Current Path: > > opt > cloudlinux > venv > lib64 > python3.11 > > site-packages > coverage
Operation : Linux premium131.web-hosting.com 4.18.0-553.44.1.lve.el8.x86_64 #1 SMP Thu Mar 13 14:29:12 UTC 2025 x86_64 Software : Apache Server IP : 162.0.232.56 | Your IP: 216.73.216.111 Domains : 1034 Domain(s) Permission : [ 0755 ]
Name | Type | Size | Last Modified | Actions |
---|---|---|---|---|
__pycache__ | Directory | - | - | |
fullcoverage | Directory | - | - | |
htmlfiles | Directory | - | - | |
__init__.py | File | 1284 bytes | April 17 2025 13:10:59. | |
__main__.py | File | 257 bytes | April 17 2025 13:10:59. | |
annotate.py | File | 3758 bytes | April 17 2025 13:10:59. | |
bytecode.py | File | 713 bytes | April 17 2025 13:10:59. | |
cmdline.py | File | 34427 bytes | April 17 2025 13:10:59. | |
collector.py | File | 20558 bytes | April 17 2025 13:10:59. | |
config.py | File | 21989 bytes | April 17 2025 13:10:59. | |
context.py | File | 2483 bytes | April 17 2025 13:10:59. | |
control.py | File | 51705 bytes | April 17 2025 13:10:59. | |
data.py | File | 7508 bytes | April 17 2025 13:10:59. | |
debug.py | File | 17805 bytes | April 17 2025 13:10:59. | |
disposition.py | File | 1916 bytes | April 17 2025 13:10:59. | |
env.py | File | 6081 bytes | April 17 2025 13:10:59. | |
exceptions.py | File | 1362 bytes | April 17 2025 13:10:59. | |
execfile.py | File | 12133 bytes | April 17 2025 13:10:59. | |
files.py | File | 19374 bytes | April 17 2025 13:10:59. | |
html.py | File | 23165 bytes | April 17 2025 13:10:59. | |
inorout.py | File | 23900 bytes | April 17 2025 13:10:59. | |
jsonreport.py | File | 4754 bytes | April 17 2025 13:10:59. | |
lcovreport.py | File | 4939 bytes | April 17 2025 13:10:59. | |
misc.py | File | 12172 bytes | April 17 2025 13:10:59. | |
multiproc.py | File | 3846 bytes | April 17 2025 13:10:59. | |
numbits.py | File | 4669 bytes | April 17 2025 13:10:59. | |
parser.py | File | 56720 bytes | April 17 2025 13:10:59. | |
phystokens.py | File | 7805 bytes | April 17 2025 13:10:59. | |
plugin.py | File | 19525 bytes | April 17 2025 13:10:59. | |
plugin_support.py | File | 10351 bytes | April 17 2025 13:10:59. | |
py.typed | File | 72 bytes | April 17 2025 13:10:59. | |
python.py | File | 8066 bytes | April 17 2025 13:10:59. | |
pytracer.py | File | 14420 bytes | April 17 2025 13:10:59. | |
report.py | File | 10623 bytes | April 17 2025 13:10:59. | |
report_core.py | File | 4068 bytes | April 17 2025 13:10:59. | |
results.py | File | 13384 bytes | April 17 2025 13:10:59. | |
sqldata.py | File | 51295 bytes | April 17 2025 13:10:59. | |
templite.py | File | 10952 bytes | April 17 2025 13:10:59. | |
tomlconfig.py | File | 7569 bytes | April 17 2025 13:10:59. | |
tracer.cpython-311-x86_64-linux-gnu.so | File | 29464 bytes | April 17 2025 13:11:30. | |
types.py | File | 5526 bytes | April 17 2025 13:10:59. | |
version.py | File | 1431 bytes | April 17 2025 13:10:59. | |
xmlreport.py | File | 9795 bytes | April 17 2025 13:10:59. |
# Licensed under the Apache License: http://www.apache.org/licenses/LICENSE-2.0 # For details: https://github.com/nedbat/coveragepy/blob/master/NOTICE.txt """Coverage data for coverage.py. This file had the 4.x JSON data support, which is now gone. This file still has storage-agnostic helpers, and is kept to avoid changing too many imports. CoverageData is now defined in sqldata.py, and imported here to keep the imports working. """ from __future__ import annotations import glob import hashlib import os.path from typing import Callable, Dict, Iterable, List, Optional from coverage.exceptions import CoverageException, NoDataError from coverage.files import PathAliases from coverage.misc import Hasher, file_be_gone, human_sorted, plural from coverage.sqldata import CoverageData def line_counts(data: CoverageData, fullpath: bool = False) -> Dict[str, int]: """Return a dict summarizing the line coverage data. Keys are based on the file names, and values are the number of executed lines. If `fullpath` is true, then the keys are the full pathnames of the files, otherwise they are the basenames of the files. Returns a dict mapping file names to counts of lines. """ summ = {} filename_fn: Callable[[str], str] if fullpath: # pylint: disable=unnecessary-lambda-assignment filename_fn = lambda f: f else: filename_fn = os.path.basename for filename in data.measured_files(): lines = data.lines(filename) assert lines is not None summ[filename_fn(filename)] = len(lines) return summ def add_data_to_hash(data: CoverageData, filename: str, hasher: Hasher) -> None: """Contribute `filename`'s data to the `hasher`. `hasher` is a `coverage.misc.Hasher` instance to be updated with the file's data. It should only get the results data, not the run data. """ if data.has_arcs(): hasher.update(sorted(data.arcs(filename) or [])) else: hasher.update(sorted_lines(data, filename)) hasher.update(data.file_tracer(filename)) def combinable_files(data_file: str, data_paths: Optional[Iterable[str]] = None) -> List[str]: """Make a list of data files to be combined. `data_file` is a path to a data file. `data_paths` is a list of files or directories of files. Returns a list of absolute file paths. """ data_dir, local = os.path.split(os.path.abspath(data_file)) data_paths = data_paths or [data_dir] files_to_combine = [] for p in data_paths: if os.path.isfile(p): files_to_combine.append(os.path.abspath(p)) elif os.path.isdir(p): pattern = glob.escape(os.path.join(os.path.abspath(p), local)) +".*" files_to_combine.extend(glob.glob(pattern)) else: raise NoDataError(f"Couldn't combine from non-existent path '{p}'") return files_to_combine def combine_parallel_data( data: CoverageData, aliases: Optional[PathAliases] = None, data_paths: Optional[Iterable[str]] = None, strict: bool = False, keep: bool = False, message: Optional[Callable[[str], None]] = None, ) -> None: """Combine a number of data files together. `data` is a CoverageData. Treat `data.filename` as a file prefix, and combine the data from all of the data files starting with that prefix plus a dot. If `aliases` is provided, it's a `PathAliases` object that is used to re-map paths to match the local machine's. If `data_paths` is provided, it is a list of directories or files to combine. Directories are searched for files that start with `data.filename` plus dot as a prefix, and those files are combined. If `data_paths` is not provided, then the directory portion of `data.filename` is used as the directory to search for data files. Unless `keep` is True every data file found and combined is then deleted from disk. If a file cannot be read, a warning will be issued, and the file will not be deleted. If `strict` is true, and no files are found to combine, an error is raised. `message` is a function to use for printing messages to the user. """ files_to_combine = combinable_files(data.base_filename(), data_paths) if strict and not files_to_combine: raise NoDataError("No data to combine") file_hashes = set() combined_any = False for f in files_to_combine: if f == data.data_filename(): # Sometimes we are combining into a file which is one of the # parallel files. Skip that file. if data._debug.should("dataio"): data._debug.write(f"Skipping combining ourself: {f!r}") continue try: rel_file_name = os.path.relpath(f) except ValueError: # ValueError can be raised under Windows when os.getcwd() returns a # folder from a different drive than the drive of f, in which case # we print the original value of f instead of its relative path rel_file_name = f with open(f, "rb") as fobj: hasher = hashlib.new("sha3_256") hasher.update(fobj.read()) sha = hasher.digest() combine_this_one = sha not in file_hashes delete_this_one = not keep if combine_this_one: if data._debug.should("dataio"): data._debug.write(f"Combining data file {f!r}") file_hashes.add(sha) try: new_data = CoverageData(f, debug=data._debug) new_data.read() except CoverageException as exc: if data._warn: # The CoverageException has the file name in it, so just # use the message as the warning. data._warn(str(exc)) if message: message(f"Couldn't combine data file {rel_file_name}: {exc}") delete_this_one = False else: data.update(new_data, aliases=aliases) combined_any = True if message: message(f"Combined data file {rel_file_name}") else: if message: message(f"Skipping duplicate data {rel_file_name}") if delete_this_one: if data._debug.should("dataio"): data._debug.write(f"Deleting data file {f!r}") file_be_gone(f) if strict and not combined_any: raise NoDataError("No usable data files") def debug_data_file(filename: str) -> None: """Implementation of 'coverage debug data'.""" data = CoverageData(filename) filename = data.data_filename() print(f"path: {filename}") if not os.path.exists(filename): print("No data collected: file doesn't exist") return data.read() print(f"has_arcs: {data.has_arcs()!r}") summary = line_counts(data, fullpath=True) filenames = human_sorted(summary.keys()) nfiles = len(filenames) print(f"{nfiles} file{plural(nfiles)}:") for f in filenames: line = f"{f}: {summary[f]} line{plural(summary[f])}" plugin = data.file_tracer(f) if plugin: line += f" [{plugin}]" print(line) def sorted_lines(data: CoverageData, filename: str) -> List[int]: """Get the sorted lines for a file, for tests.""" lines = data.lines(filename) return sorted(lines or [])
SILENT KILLER Tool