SILENT KILLERPanel

Current Path: > > opt > hc_python > > > lib > python3.12 > site-packages > sentry_sdk > > integrations


Operation   : Linux premium131.web-hosting.com 4.18.0-553.44.1.lve.el8.x86_64 #1 SMP Thu Mar 13 14:29:12 UTC 2025 x86_64
Software     : Apache
Server IP    : 162.0.232.56 | Your IP: 216.73.216.111
Domains      : 1034 Domain(s)
Permission   : [ 0755 ]

Files and Folders in: //opt/hc_python///lib/python3.12/site-packages/sentry_sdk//integrations

NameTypeSizeLast ModifiedActions
__pycache__ Directory - -
celery Directory - -
django Directory - -
grpc Directory - -
opentelemetry Directory - -
redis Directory - -
spark Directory - -
__init__.py File 10218 bytes May 23 2025 10:34:44.
_asgi_common.py File 3187 bytes May 23 2025 10:34:44.
_wsgi_common.py File 7558 bytes May 23 2025 10:34:44.
aiohttp.py File 12895 bytes May 23 2025 10:34:44.
anthropic.py File 9426 bytes May 23 2025 10:34:44.
argv.py File 911 bytes May 23 2025 10:34:44.
ariadne.py File 5834 bytes May 23 2025 10:34:44.
arq.py File 7857 bytes May 23 2025 10:34:44.
asgi.py File 12779 bytes May 23 2025 10:34:44.
asyncio.py File 4034 bytes May 23 2025 10:34:44.
asyncpg.py File 6521 bytes May 23 2025 10:34:44.
atexit.py File 1652 bytes May 23 2025 10:34:44.
aws_lambda.py File 17954 bytes May 23 2025 10:34:44.
beam.py File 5182 bytes May 23 2025 10:34:44.
boto3.py File 4411 bytes May 23 2025 10:34:44.
bottle.py File 6615 bytes May 23 2025 10:34:44.
chalice.py File 4699 bytes May 23 2025 10:34:44.
clickhouse_driver.py File 5247 bytes May 23 2025 10:34:44.
cloud_resource_context.py File 7780 bytes May 23 2025 10:34:44.
cohere.py File 9333 bytes May 23 2025 10:34:44.
dedupe.py File 1418 bytes May 23 2025 10:34:44.
dramatiq.py File 5583 bytes May 23 2025 10:34:44.
excepthook.py File 2408 bytes May 23 2025 10:34:44.
executing.py File 1994 bytes May 23 2025 10:34:44.
falcon.py File 9501 bytes May 23 2025 10:34:44.
fastapi.py File 4718 bytes May 23 2025 10:34:44.
flask.py File 8740 bytes May 23 2025 10:34:44.
gcp.py File 8274 bytes May 23 2025 10:34:44.
gnu_backtrace.py File 2894 bytes May 23 2025 10:34:44.
gql.py File 4179 bytes May 23 2025 10:34:44.
graphene.py File 5042 bytes May 23 2025 10:34:44.
httpx.py File 5866 bytes May 23 2025 10:34:44.
huey.py File 5443 bytes May 23 2025 10:34:44.
huggingface_hub.py File 6551 bytes May 23 2025 10:34:44.
langchain.py File 17718 bytes May 23 2025 10:34:44.
launchdarkly.py File 1935 bytes May 23 2025 10:34:44.
litestar.py File 11569 bytes May 23 2025 10:34:44.
logging.py File 13506 bytes May 23 2025 10:34:44.
loguru.py File 3620 bytes May 23 2025 10:34:44.
modules.py File 820 bytes May 23 2025 10:34:44.
openai.py File 15585 bytes May 23 2025 10:34:44.
openfeature.py File 1235 bytes May 23 2025 10:34:44.
pure_eval.py File 4581 bytes May 23 2025 10:34:44.
pymongo.py File 6380 bytes May 23 2025 10:34:44.
pyramid.py File 7364 bytes May 23 2025 10:34:44.
quart.py File 7437 bytes May 23 2025 10:34:44.
ray.py File 4162 bytes May 23 2025 10:34:44.
rq.py File 5307 bytes May 23 2025 10:34:44.
rust_tracing.py File 9078 bytes May 23 2025 10:34:44.
sanic.py File 12960 bytes May 23 2025 10:34:44.
serverless.py File 1804 bytes May 23 2025 10:34:44.
socket.py File 3169 bytes May 23 2025 10:34:44.
sqlalchemy.py File 4372 bytes May 23 2025 10:34:44.
starlette.py File 26413 bytes May 23 2025 10:34:44.
starlite.py File 10620 bytes May 23 2025 10:34:44.
statsig.py File 1227 bytes May 23 2025 10:34:44.
stdlib.py File 8831 bytes May 23 2025 10:34:44.
strawberry.py File 14126 bytes May 23 2025 10:34:44.
sys_exit.py File 2493 bytes May 23 2025 10:34:44.
threading.py File 5392 bytes May 23 2025 10:34:44.
tornado.py File 7222 bytes May 23 2025 10:34:44.
trytond.py File 1651 bytes May 23 2025 10:34:44.
typer.py File 1815 bytes May 23 2025 10:34:44.
unleash.py File 1058 bytes May 23 2025 10:34:44.
wsgi.py File 10747 bytes May 23 2025 10:34:44.

Reading File: //opt/hc_python///lib/python3.12/site-packages/sentry_sdk//integrations/arq.py

import sys

import sentry_sdk
from sentry_sdk.consts import OP, SPANSTATUS
from sentry_sdk.integrations import _check_minimum_version, DidNotEnable, Integration
from sentry_sdk.integrations.logging import ignore_logger
from sentry_sdk.scope import should_send_default_pii
from sentry_sdk.tracing import Transaction, TransactionSource
from sentry_sdk.utils import (
    capture_internal_exceptions,
    ensure_integration_enabled,
    event_from_exception,
    SENSITIVE_DATA_SUBSTITUTE,
    parse_version,
    reraise,
)

try:
    import arq.worker
    from arq.version import VERSION as ARQ_VERSION
    from arq.connections import ArqRedis
    from arq.worker import JobExecutionFailed, Retry, RetryJob, Worker
except ImportError:
    raise DidNotEnable("Arq is not installed")

from typing import TYPE_CHECKING

if TYPE_CHECKING:
    from typing import Any, Dict, Optional, Union

    from sentry_sdk._types import EventProcessor, Event, ExcInfo, Hint

    from arq.cron import CronJob
    from arq.jobs import Job
    from arq.typing import WorkerCoroutine
    from arq.worker import Function

ARQ_CONTROL_FLOW_EXCEPTIONS = (JobExecutionFailed, Retry, RetryJob)


class ArqIntegration(Integration):
    identifier = "arq"
    origin = f"auto.queue.{identifier}"

    @staticmethod
    def setup_once():
        # type: () -> None

        try:
            if isinstance(ARQ_VERSION, str):
                version = parse_version(ARQ_VERSION)
            else:
                version = ARQ_VERSION.version[:2]

        except (TypeError, ValueError):
            version = None

        _check_minimum_version(ArqIntegration, version)

        patch_enqueue_job()
        patch_run_job()
        patch_create_worker()

        ignore_logger("arq.worker")


def patch_enqueue_job():
    # type: () -> None
    old_enqueue_job = ArqRedis.enqueue_job
    original_kwdefaults = old_enqueue_job.__kwdefaults__

    async def _sentry_enqueue_job(self, function, *args, **kwargs):
        # type: (ArqRedis, str, *Any, **Any) -> Optional[Job]
        integration = sentry_sdk.get_client().get_integration(ArqIntegration)
        if integration is None:
            return await old_enqueue_job(self, function, *args, **kwargs)

        with sentry_sdk.start_span(
            op=OP.QUEUE_SUBMIT_ARQ, name=function, origin=ArqIntegration.origin
        ):
            return await old_enqueue_job(self, function, *args, **kwargs)

    _sentry_enqueue_job.__kwdefaults__ = original_kwdefaults
    ArqRedis.enqueue_job = _sentry_enqueue_job


def patch_run_job():
    # type: () -> None
    old_run_job = Worker.run_job

    async def _sentry_run_job(self, job_id, score):
        # type: (Worker, str, int) -> None
        integration = sentry_sdk.get_client().get_integration(ArqIntegration)
        if integration is None:
            return await old_run_job(self, job_id, score)

        with sentry_sdk.isolation_scope() as scope:
            scope._name = "arq"
            scope.clear_breadcrumbs()

            transaction = Transaction(
                name="unknown arq task",
                status="ok",
                op=OP.QUEUE_TASK_ARQ,
                source=TransactionSource.TASK,
                origin=ArqIntegration.origin,
            )

            with sentry_sdk.start_transaction(transaction):
                return await old_run_job(self, job_id, score)

    Worker.run_job = _sentry_run_job


def _capture_exception(exc_info):
    # type: (ExcInfo) -> None
    scope = sentry_sdk.get_current_scope()

    if scope.transaction is not None:
        if exc_info[0] in ARQ_CONTROL_FLOW_EXCEPTIONS:
            scope.transaction.set_status(SPANSTATUS.ABORTED)
            return

        scope.transaction.set_status(SPANSTATUS.INTERNAL_ERROR)

    event, hint = event_from_exception(
        exc_info,
        client_options=sentry_sdk.get_client().options,
        mechanism={"type": ArqIntegration.identifier, "handled": False},
    )
    sentry_sdk.capture_event(event, hint=hint)


def _make_event_processor(ctx, *args, **kwargs):
    # type: (Dict[Any, Any], *Any, **Any) -> EventProcessor
    def event_processor(event, hint):
        # type: (Event, Hint) -> Optional[Event]

        with capture_internal_exceptions():
            scope = sentry_sdk.get_current_scope()
            if scope.transaction is not None:
                scope.transaction.name = ctx["job_name"]
                event["transaction"] = ctx["job_name"]

            tags = event.setdefault("tags", {})
            tags["arq_task_id"] = ctx["job_id"]
            tags["arq_task_retry"] = ctx["job_try"] > 1
            extra = event.setdefault("extra", {})
            extra["arq-job"] = {
                "task": ctx["job_name"],
                "args": (
                    args if should_send_default_pii() else SENSITIVE_DATA_SUBSTITUTE
                ),
                "kwargs": (
                    kwargs if should_send_default_pii() else SENSITIVE_DATA_SUBSTITUTE
                ),
                "retry": ctx["job_try"],
            }

        return event

    return event_processor


def _wrap_coroutine(name, coroutine):
    # type: (str, WorkerCoroutine) -> WorkerCoroutine

    async def _sentry_coroutine(ctx, *args, **kwargs):
        # type: (Dict[Any, Any], *Any, **Any) -> Any
        integration = sentry_sdk.get_client().get_integration(ArqIntegration)
        if integration is None:
            return await coroutine(ctx, *args, **kwargs)

        sentry_sdk.get_isolation_scope().add_event_processor(
            _make_event_processor({**ctx, "job_name": name}, *args, **kwargs)
        )

        try:
            result = await coroutine(ctx, *args, **kwargs)
        except Exception:
            exc_info = sys.exc_info()
            _capture_exception(exc_info)
            reraise(*exc_info)

        return result

    return _sentry_coroutine


def patch_create_worker():
    # type: () -> None
    old_create_worker = arq.worker.create_worker

    @ensure_integration_enabled(ArqIntegration, old_create_worker)
    def _sentry_create_worker(*args, **kwargs):
        # type: (*Any, **Any) -> Worker
        settings_cls = args[0]

        if isinstance(settings_cls, dict):
            if "functions" in settings_cls:
                settings_cls["functions"] = [
                    _get_arq_function(func)
                    for func in settings_cls.get("functions", [])
                ]
            if "cron_jobs" in settings_cls:
                settings_cls["cron_jobs"] = [
                    _get_arq_cron_job(cron_job)
                    for cron_job in settings_cls.get("cron_jobs", [])
                ]

        if hasattr(settings_cls, "functions"):
            settings_cls.functions = [
                _get_arq_function(func) for func in settings_cls.functions
            ]
        if hasattr(settings_cls, "cron_jobs"):
            settings_cls.cron_jobs = [
                _get_arq_cron_job(cron_job) for cron_job in settings_cls.cron_jobs
            ]

        if "functions" in kwargs:
            kwargs["functions"] = [
                _get_arq_function(func) for func in kwargs.get("functions", [])
            ]
        if "cron_jobs" in kwargs:
            kwargs["cron_jobs"] = [
                _get_arq_cron_job(cron_job) for cron_job in kwargs.get("cron_jobs", [])
            ]

        return old_create_worker(*args, **kwargs)

    arq.worker.create_worker = _sentry_create_worker


def _get_arq_function(func):
    # type: (Union[str, Function, WorkerCoroutine]) -> Function
    arq_func = arq.worker.func(func)
    arq_func.coroutine = _wrap_coroutine(arq_func.name, arq_func.coroutine)

    return arq_func


def _get_arq_cron_job(cron_job):
    # type: (CronJob) -> CronJob
    cron_job.coroutine = _wrap_coroutine(cron_job.name, cron_job.coroutine)

    return cron_job

SILENT KILLER Tool