Current Path: > > opt > cloudlinux > venv > lib64 > python3.11 > site-packages > lvestats > lib > > commons
Operation : Linux premium131.web-hosting.com 4.18.0-553.44.1.lve.el8.x86_64 #1 SMP Thu Mar 13 14:29:12 UTC 2025 x86_64 Software : Apache Server IP : 162.0.232.56 | Your IP: 216.73.216.111 Domains : 1034 Domain(s) Permission : [ 0755 ]
Name | Type | Size | Last Modified | Actions |
---|---|---|---|---|
__pycache__ | Directory | - | - | |
__init__.py | File | 219 bytes | May 30 2025 10:30:46. | |
argparse_utils.py | File | 11516 bytes | May 30 2025 10:30:46. | |
dateutil.py | File | 5738 bytes | May 30 2025 10:30:46. | |
decorators.py | File | 893 bytes | May 30 2025 10:30:46. | |
func.py | File | 16268 bytes | May 30 2025 10:30:46. | |
htpasswd.py | File | 2304 bytes | May 30 2025 10:30:46. | |
litespeed.py | File | 6833 bytes | May 30 2025 10:30:46. | |
logsetup.py | File | 4610 bytes | May 30 2025 10:30:46. | |
proctitle.py | File | 2966 bytes | May 30 2025 10:30:46. | |
profiler.py | File | 575 bytes | May 30 2025 10:30:46. | |
progress.py | File | 1016 bytes | May 30 2025 10:30:46. | |
sentry.py | File | 6313 bytes | May 30 2025 10:30:46. | |
server_status.py | File | 1346 bytes | May 30 2025 10:30:46. | |
sizeutil.py | File | 2653 bytes | May 30 2025 10:30:46. | |
users_manager.py | File | 3042 bytes | May 30 2025 10:30:46. |
# coding=utf-8 # # Copyright © Cloud Linux GmbH & Cloud Linux Software, Inc 2010-2019 All Rights Reserved # # Licensed under CLOUD LINUX LICENSE AGREEMENT # http://cloudlinux.com/docs/LICENSE.TXT import logging import os import re import time from lvestats.lib.commons.func import get_all_user_domains, normalize_domain class Proctitle(object): """ Class for working with mod_procurl files """ def __init__(self, shm_dir="/dev/shm/"): self.now = 0 self.log = logging.getLogger('Proctitle') self.SHM_DIR = shm_dir self.FILE_PATTERN = re.compile(r"apache_title_shm_[0-9]+_[0-9]+_[0-9]+$", re.IGNORECASE) self.parsed_data = self._get_all_data() def _get_all_data(self): """ 1) Get all files in the self.SHM_DIR that mathes to the regexp self.FILE_PATTERN 2) Read every file to the first \x00 simbol 3) If apache process is iddle file content equals to httpd, else it should contain 5 values separated by space 4) Split every interesting line and return :return: list of the lists [[Timestamp, Domain, Http type, Path, Http version],...] """ result = [] if os.path.exists(self.SHM_DIR): files = filter(self.FILE_PATTERN.search, os.listdir(self.SHM_DIR)) self.now = time.time() for file in files: try: file_name = os.path.join(self.SHM_DIR, file) with open(file_name, 'r', encoding='utf-8') as http_stats: http_stats_line = http_stats.readline() http_stats_line = http_stats_line.split('\x00')[0] if not http_stats_line == "httpd": http_stats_line_split = http_stats_line.split(" ") if len(http_stats_line_split) == 5: result.append(http_stats_line_split + [file.split("_")[-3]]) else: self.log.debug("Number of values in file %s is not equal to 5", file) except IOError as e: self.log.debug(str(e)) return result def get_user_data(self, username): """ Returns information about processed by user pages. :param username: :return: list of the lists [[Pid, Domain, Path, Http type, Http version, Time],...] """ all_domains = get_all_user_domains(username) normalized_domains = set(map(normalize_domain, all_domains)) result = [] for data in self.parsed_data: if normalize_domain(data[1]) in normalized_domains: result.append([ data[5], data[1], data[2], data[3], data[4], f"{self.now - float(data[0]):.1f}", ]) return result
SILENT KILLER Tool