PNG  IHDR pHYs   OiCCPPhotoshop ICC profilexڝSgTS=BKKoR RB&*! J!QEEȠQ, !{kּ> H3Q5 B.@ $pd!s#~<<+"x M0B\t8K@zB@F&S`cbP-`'{[! eDh;VEX0fK9-0IWfH  0Q){`##xFW<+*x<$9E[-qWW.(I+6aa@.y24x6_-"bbϫp@t~,/;m%h^ uf@Wp~<5j>{-]cK'Xto(hw?G%fIq^D$.Tʳ?D*A, `6B$BB dr`)B(Ͱ*`/@4Qhp.U=pa( Aa!ڈbX#!H$ ɈQ"K5H1RT UH=r9\F;2G1Q= C7F dt1r=6Ыhڏ>C03l0.B8, c˱" VcϱwE 6wB aAHXLXNH $4 7 Q'"K&b21XH,#/{C7$C2'ITFnR#,4H#dk9, +ȅ3![ b@qS(RjJ4e2AURݨT5ZBRQ4u9̓IKhhitݕNWGw Ljg(gwLӋT071oUX**| J&*/Tު UUT^S}FU3S ԖUPSSg;goT?~YYLOCQ_ cx,!k u5&|v*=9C3J3WRf?qtN (~))4L1e\kXHQG6EYAJ'\'GgSSݧ M=:.kDwn^Loy}/TmG X $ <5qo</QC]@Caaᄑ.ȽJtq]zۯ6iܟ4)Y3sCQ? 0k߬~OCOg#/c/Wװwa>>r><72Y_7ȷOo_C#dz%gA[z|!?:eAAA!h쐭!ΑiP~aa~ 'W?pX15wCsDDDޛg1O9-J5*>.j<74?.fYXXIlK9.*6nl {/]py.,:@LN8A*%w% yg"/6шC\*NH*Mz쑼5y$3,幄'L Lݛ:v m2=:1qB!Mggfvˬen/kY- BTZ(*geWf͉9+̳ې7ᒶKW-X潬j9(xoʿܔĹdff-[n ڴ VE/(ۻCɾUUMfeI?m]Nmq#׹=TR+Gw- 6 U#pDy  :v{vg/jBFS[b[O>zG499?rCd&ˮ/~јѡ򗓿m|x31^VwwO| (hSЧc3- cHRMz%u0`:o_F@8N ' p @8N@8}' p '#@8N@8N pQ9p!i~}|6-ӪG` VP.@*j>[ K^<֐Z]@8N'KQ<Q(`s" 'hgpKB`R@Dqj '  'P$a ( `D$Na L?u80e J,K˷NI'0eݷ(NI'؀ 2ipIIKp`:O'`ʤxB8Ѥx Ѥx $ $P6 :vRNb 'p,>NB 'P]-->P T+*^h& p '‰a ‰ (ĵt#u33;Nt̵'ޯ; [3W ~]0KH1q@8]O2]3*̧7# *p>us p _6]/}-4|t'|Smx= DoʾM×M_8!)6lq':l7!|4} '\ne t!=hnLn (~Dn\+‰_4k)0e@OhZ`F `.m1} 'vp{F`ON7Srx 'D˸nV`><;yMx!IS钦OM)Ե٥x 'DSD6bS8!" ODz#R >S8!7ّxEh0m$MIPHi$IvS8IN$I p$O8I,sk&I)$IN$Hi$I^Ah.p$MIN$IR8I·N "IF9Ah0m$MIN$IR8IN$I 3jIU;kO$ɳN$+ q.x* tEXtComment

Viewing File: /opt/cloudlinux/venv/lib/python3.11/site-packages/lvestats/lib/commons/proctitle.py

# coding=utf-8
#
# Copyright © Cloud Linux GmbH & Cloud Linux Software, Inc 2010-2019 All Rights Reserved
#
# Licensed under CLOUD LINUX LICENSE AGREEMENT
# http://cloudlinux.com/docs/LICENSE.TXT

import logging
import os
import re
import time

from lvestats.lib.commons.func import get_all_user_domains, normalize_domain


class Proctitle(object):
    """
    Class for working with mod_procurl files
    """

    def __init__(self, shm_dir="/dev/shm/"):
        self.now = 0
        self.log = logging.getLogger('Proctitle')
        self.SHM_DIR = shm_dir
        self.FILE_PATTERN = re.compile(r"apache_title_shm_[0-9]+_[0-9]+_[0-9]+$", re.IGNORECASE)
        self.parsed_data = self._get_all_data()

    def _get_all_data(self):
        """
        1) Get all files in the self.SHM_DIR that mathes to the regexp self.FILE_PATTERN
        2) Read every file to the first \x00 simbol
        3) If apache process is iddle file content equals to httpd, else it should contain 5 values separated by space
        4) Split every interesting line and return
        :return:
        list of the lists
        [[Timestamp, Domain, Http type, Path, Http version],...]
        """
        result = []
        if os.path.exists(self.SHM_DIR):
            files = filter(self.FILE_PATTERN.search, os.listdir(self.SHM_DIR))
            self.now = time.time()
            for file in files:
                try:
                    file_name = os.path.join(self.SHM_DIR, file)
                    with open(file_name, 'r', encoding='utf-8') as http_stats:
                        http_stats_line = http_stats.readline()
                    http_stats_line = http_stats_line.split('\x00')[0]
                    if not http_stats_line == "httpd":
                        http_stats_line_split = http_stats_line.split(" ")
                        if len(http_stats_line_split) == 5:
                            result.append(http_stats_line_split + [file.split("_")[-3]])
                        else:
                            self.log.debug("Number of values in file %s is not equal to 5", file)
                except IOError as e:
                    self.log.debug(str(e))
        return result

    def get_user_data(self, username):
        """
        Returns information about processed by user pages.
        :param username:
        :return:
        list of the lists
        [[Pid, Domain, Path, Http type, Http version, Time],...]
        """
        all_domains = get_all_user_domains(username)
        normalized_domains = set(map(normalize_domain, all_domains))
        result = []
        for data in self.parsed_data:
            if normalize_domain(data[1]) in normalized_domains:
                result.append([
                    data[5],
                    data[1],
                    data[2],
                    data[3],
                    data[4],
                    f"{self.now - float(data[0]):.1f}",
                ])
        return result
Back to Directory=ceiIENDB`