PNG  IHDR pHYs   OiCCPPhotoshop ICC profilexڝSgTS=BKKoR RB&*! J!QEEȠQ, !{kּ> H3Q5 B.@ $pd!s#~<<+"x M0B\t8K@zB@F&S`cbP-`'{[! eDh;VEX0fK9-0IWfH  0Q){`##xFW<+*x<$9E[-qWW.(I+6aa@.y24x6_-"bbϫp@t~,/;m%h^ uf@Wp~<5j>{-]cK'Xto(hw?G%fIq^D$.Tʳ?D*A, `6B$BB dr`)B(Ͱ*`/@4Qhp.U=pa( Aa!ڈbX#!H$ ɈQ"K5H1RT UH=r9\F;2G1Q= C7F dt1r=6Ыhڏ>C03l0.B8, c˱" VcϱwE 6wB aAHXLXNH $4 7 Q'"K&b21XH,#/{C7$C2'ITFnR#,4H#dk9, +ȅ3![ b@qS(RjJ4e2AURݨT5ZBRQ4u9̓IKhhitݕNWGw Ljg(gwLӋT071oUX**| J&*/Tު UUT^S}FU3S ԖUPSSg;goT?~YYLOCQ_ cx,!k u5&|v*=9C3J3WRf?qtN (~))4L1e\kXHQG6EYAJ'\'GgSSݧ M=:.kDwn^Loy}/TmG X $ <5qo</QC]@Caaᄑ.ȽJtq]zۯ6iܟ4)Y3sCQ? 0k߬~OCOg#/c/Wװwa>>r><72Y_7ȷOo_C#dz%gA[z|!?:eAAA!h쐭!ΑiP~aa~ 'W?pX15wCsDDDޛg1O9-J5*>.j<74?.fYXXIlK9.*6nl {/]py.,:@LN8A*%w% yg"/6шC\*NH*Mz쑼5y$3,幄'L Lݛ:v m2=:1qB!Mggfvˬen/kY- BTZ(*geWf͉9+̳ې7ᒶKW-X潬j9(xoʿܔĹdff-[n ڴ VE/(ۻCɾUUMfeI?m]Nmq#׹=TR+Gw- 6 U#pDy  :v{vg/jBFS[b[O>zG499?rCd&ˮ/~јѡ򗓿m|x31^VwwO| (hSЧc3- cHRMz%u0`:o_F@8N ' p @8N@8}' p '#@8N@8N pQ9p!i~}|6-ӪG` VP.@*j>[ K^<֐Z]@8N'KQ<Q(`s" 'hgpKB`R@Dqj '  'P$a ( `D$Na L?u80e J,K˷NI'0eݷ(NI'؀ 2ipIIKp`:O'`ʤxB8Ѥx Ѥx $ $P6 :vRNb 'p,>NB 'P]-->P T+*^h& p '‰a ‰ (ĵt#u33;Nt̵'ޯ; [3W ~]0KH1q@8]O2]3*̧7# *p>us p _6]/}-4|t'|Smx= DoʾM×M_8!)6lq':l7!|4} '\ne t!=hnLn (~Dn\+‰_4k)0e@OhZ`F `.m1} 'vp{F`ON7Srx 'D˸nV`><;yMx!IS钦OM)Ե٥x 'DSD6bS8!" ODz#R >S8!7ّxEh0m$MIPHi$IvS8IN$I p$O8I,sk&I)$IN$Hi$I^Ah.p$MIN$IR8I·N "IF9Ah0m$MIN$IR8IN$I 3jIU;kO$ɳN$+ q.x* tEXtComment

Viewing File: /opt/cloudlinux/venv/lib/python3.11/site-packages/lvestats/plugins/generic/dbgov_saver.py

# coding=utf-8
#
# Copyright © Cloud Linux GmbH & Cloud Linux Software, Inc 2010-2020 All Rights Reserved
#
# Licensed under CLOUD LINUX LICENSE AGREEMENT
# http://cloudlinux.com/docs/LICENSE.TXT

import logging
import os
import re

from sqlalchemy.exc import SQLAlchemyError

from clcommon.clpwd import ClPwd
from lvestats.core.plugin import LveStatsPlugin
from lvestats.lib.commons.func import reboot_lock
from lvestats.lib.commons.sizeutil import dbgov_io_bytes_value
from lvestats.orm.history_gov import history_gov

MAX_FILES_PER_TRANSACTION = 1000


class DBGovSaver(LveStatsPlugin):
    DBSTAT_DIR = "/var/lve/dbgovernor/"
    FILE_PATTERN = re.compile(r"governor\.[0-9]+$", re.IGNORECASE)
    _history_gov_col = list(history_gov.__table__.columns.keys())

    def __init__(self):
        self.logger = logging.getLogger(__name__)
        self.server_id = "localhost"
        self.engine = None
        self.headers = (
            ("username", str),
            None,  # max_simultaneous_requests not support
            ("sum_cpu", float),
            ("sum_write", float),
            ("sum_read", float),
            None,  # max_cpu not support
            None,  # max_write not support
            None,  # max_read not support
            ("number_of_restricts", int),
            ("limit_cpu_on_period_end", int),
            ("limit_read_on_period_end", int),
            ("limit_write_on_period_end", int),
            ("cause_of_restrict", int),
            ("uid", int),
        )
        self._headers_len = len(self.headers)
        self.cl_pwd = ClPwd()
        self.min_uid = self.cl_pwd.get_sys_min_uid(500)

    def set_config(self, config):
        self.server_id = config.get("server_id", self.server_id)

    def get_user_id(self, username):
        try:
            return self.cl_pwd.get_uid(username)
        except self.cl_pwd.NoSuchUserException as e:
            self.logger.debug('Can not obtain user id for "%s"; %s', username, e)
            return -1

    def scan_dir(self):
        """
        Scans directory generated by db governer and prepares statistics for insertion into database.
        :return: list of tuples [(file name, [lines]), (file name, [lines])...]
        """
        if os.path.exists(self.DBSTAT_DIR):
            flist = filter(self.FILE_PATTERN.search, os.listdir(self.DBSTAT_DIR))
            for f in flist:
                try:
                    file_name = os.path.join(self.DBSTAT_DIR, f)
                    with open(file_name, "r", encoding="utf-8") as f_stats:
                        f_stats_lines = f_stats.readlines()
                    yield file_name, f_stats_lines
                except IOError:
                    self.logger.warning("No file statistic")
                except UnicodeDecodeError:
                    with open(file_name, "r", errors="surrogateescape", encoding="utf-8") as file:
                        f_source = file.read()
                        self.logger.error(
                            "Error while decoding the file %s",
                            f,
                            exc_info=True,
                            extra={f: f_source},
                        )
                        yield file_name, []

    def write_to_db(self, conn, scanned):
        """
        :type scanned: generator
        :type conn: sqlalchemy.engine.base.Connection
        :rtype: list(dict(str, int|str))
        """
        values_list = []
        unlink_list = []
        for n_, (file_name, lines) in enumerate(scanned):
            if MAX_FILES_PER_TRANSACTION < n_:
                break
            for line in lines:
                try:
                    self.logger.debug("write: %s", line)
                    line_splited = line.strip().split(";")
                    file_timestamp = int(file_name.split(".")[-1])
                    values = {"server_id": self.server_id, "ts": file_timestamp}
                    values.update(dict([(h_[0], h_[1](v_)) for h_, v_ in zip(self.headers, line_splited) if h_]))
                    if (
                        # 'uid' might be:
                        # - missing (in governor-mysql < 1.2-1 and under some Gov configurations)
                        # - negative (e.g. if Gov. account name does not coincide with any Unix user name)
                        # and it's planned for removal in the future (CLOS-3317)
                        not values.get("uid")
                        or values.get("uid") < 0
                    ):
                        values["uid"] = self.get_user_id(values["username"])  # extend dict by user id
                    if values["uid"] >= self.min_uid:  # ignoring system users and when we can't extract user id
                        values_list.append(values)
                except (IndexError, ValueError):
                    self.logger.warning("Can not parse file %s; data from file not be writen to database", file_name)
            unlink_list.append(file_name)

        # Data to transfer to CM plugin
        data_for_cm = {}

        # insert all data per one commit
        with reboot_lock():
            if values_list:
                try:
                    # filter for insert only supported columns
                    values_list_filtered = [
                        {k: v for k, v in list(d.items()) if k in self._history_gov_col} for d in values_list
                    ]
                    # form data for CM plugin
                    for dbgov_data in values_list_filtered:
                        uid = dbgov_data["uid"]
                        data_for_cm[uid] = {
                            "cpu_limit": dbgov_data["limit_cpu_on_period_end"],
                            "io_limit": dbgov_io_bytes_value(
                                dbgov_data["limit_read_on_period_end"], dbgov_data["limit_write_on_period_end"]
                            ),
                            "cpu_usage": round(dbgov_data["sum_cpu"], 1),
                            "io_usage": dbgov_io_bytes_value(dbgov_data["sum_read"], dbgov_data["sum_write"]),
                        }
                    conn.execute(history_gov.__table__.insert(), values_list_filtered)
                except (SQLAlchemyError, KeyError) as e:
                    self.logger.warning(str(e))
            try:
                list(map(os.unlink, unlink_list))
            except OSError:
                pass

        return data_for_cm

    def execute(self, lve_data):
        """
        :type lve_data: dict
        """
        if "dbgov_data" not in lve_data:
            lve_data["dbgov_data"] = []
        conn = self.engine.connect()
        try:
            scanned = self.scan_dir()
            dbgov_data_for_cm = self.write_to_db(conn, scanned)
            if dbgov_data_for_cm:
                lve_data["dbgov_data_for_cm"] = dbgov_data_for_cm
        finally:
            conn.close()
Back to Directory=ceiIENDB`