Current Path: > > opt > > hc_python > > lib > python3.12 > site-packages > sentry_sdk > > integrations
Operation : Linux premium131.web-hosting.com 4.18.0-553.44.1.lve.el8.x86_64 #1 SMP Thu Mar 13 14:29:12 UTC 2025 x86_64 Software : Apache Server IP : 162.0.232.56 | Your IP: 216.73.216.111 Domains : 1034 Domain(s) Permission : [ 0755 ]
Name | Type | Size | Last Modified | Actions |
---|---|---|---|---|
__pycache__ | Directory | - | - | |
celery | Directory | - | - | |
django | Directory | - | - | |
grpc | Directory | - | - | |
opentelemetry | Directory | - | - | |
redis | Directory | - | - | |
spark | Directory | - | - | |
__init__.py | File | 10218 bytes | May 23 2025 10:34:44. | |
_asgi_common.py | File | 3187 bytes | May 23 2025 10:34:44. | |
_wsgi_common.py | File | 7558 bytes | May 23 2025 10:34:44. | |
aiohttp.py | File | 12895 bytes | May 23 2025 10:34:44. | |
anthropic.py | File | 9426 bytes | May 23 2025 10:34:44. | |
argv.py | File | 911 bytes | May 23 2025 10:34:44. | |
ariadne.py | File | 5834 bytes | May 23 2025 10:34:44. | |
arq.py | File | 7857 bytes | May 23 2025 10:34:44. | |
asgi.py | File | 12779 bytes | May 23 2025 10:34:44. | |
asyncio.py | File | 4034 bytes | May 23 2025 10:34:44. | |
asyncpg.py | File | 6521 bytes | May 23 2025 10:34:44. | |
atexit.py | File | 1652 bytes | May 23 2025 10:34:44. | |
aws_lambda.py | File | 17954 bytes | May 23 2025 10:34:44. | |
beam.py | File | 5182 bytes | May 23 2025 10:34:44. | |
boto3.py | File | 4411 bytes | May 23 2025 10:34:44. | |
bottle.py | File | 6615 bytes | May 23 2025 10:34:44. | |
chalice.py | File | 4699 bytes | May 23 2025 10:34:44. | |
clickhouse_driver.py | File | 5247 bytes | May 23 2025 10:34:44. | |
cloud_resource_context.py | File | 7780 bytes | May 23 2025 10:34:44. | |
cohere.py | File | 9333 bytes | May 23 2025 10:34:44. | |
dedupe.py | File | 1418 bytes | May 23 2025 10:34:44. | |
dramatiq.py | File | 5583 bytes | May 23 2025 10:34:44. | |
excepthook.py | File | 2408 bytes | May 23 2025 10:34:44. | |
executing.py | File | 1994 bytes | May 23 2025 10:34:44. | |
falcon.py | File | 9501 bytes | May 23 2025 10:34:44. | |
fastapi.py | File | 4718 bytes | May 23 2025 10:34:44. | |
flask.py | File | 8740 bytes | May 23 2025 10:34:44. | |
gcp.py | File | 8274 bytes | May 23 2025 10:34:44. | |
gnu_backtrace.py | File | 2894 bytes | May 23 2025 10:34:44. | |
gql.py | File | 4179 bytes | May 23 2025 10:34:44. | |
graphene.py | File | 5042 bytes | May 23 2025 10:34:44. | |
httpx.py | File | 5866 bytes | May 23 2025 10:34:44. | |
huey.py | File | 5443 bytes | May 23 2025 10:34:44. | |
huggingface_hub.py | File | 6551 bytes | May 23 2025 10:34:44. | |
langchain.py | File | 17718 bytes | May 23 2025 10:34:44. | |
launchdarkly.py | File | 1935 bytes | May 23 2025 10:34:44. | |
litestar.py | File | 11569 bytes | May 23 2025 10:34:44. | |
logging.py | File | 13506 bytes | May 23 2025 10:34:44. | |
loguru.py | File | 3620 bytes | May 23 2025 10:34:44. | |
modules.py | File | 820 bytes | May 23 2025 10:34:44. | |
openai.py | File | 15585 bytes | May 23 2025 10:34:44. | |
openfeature.py | File | 1235 bytes | May 23 2025 10:34:44. | |
pure_eval.py | File | 4581 bytes | May 23 2025 10:34:44. | |
pymongo.py | File | 6380 bytes | May 23 2025 10:34:44. | |
pyramid.py | File | 7364 bytes | May 23 2025 10:34:44. | |
quart.py | File | 7437 bytes | May 23 2025 10:34:44. | |
ray.py | File | 4162 bytes | May 23 2025 10:34:44. | |
rq.py | File | 5307 bytes | May 23 2025 10:34:44. | |
rust_tracing.py | File | 9078 bytes | May 23 2025 10:34:44. | |
sanic.py | File | 12960 bytes | May 23 2025 10:34:44. | |
serverless.py | File | 1804 bytes | May 23 2025 10:34:44. | |
socket.py | File | 3169 bytes | May 23 2025 10:34:44. | |
sqlalchemy.py | File | 4372 bytes | May 23 2025 10:34:44. | |
starlette.py | File | 26413 bytes | May 23 2025 10:34:44. | |
starlite.py | File | 10620 bytes | May 23 2025 10:34:44. | |
statsig.py | File | 1227 bytes | May 23 2025 10:34:44. | |
stdlib.py | File | 8831 bytes | May 23 2025 10:34:44. | |
strawberry.py | File | 14126 bytes | May 23 2025 10:34:44. | |
sys_exit.py | File | 2493 bytes | May 23 2025 10:34:44. | |
threading.py | File | 5392 bytes | May 23 2025 10:34:44. | |
tornado.py | File | 7222 bytes | May 23 2025 10:34:44. | |
trytond.py | File | 1651 bytes | May 23 2025 10:34:44. | |
typer.py | File | 1815 bytes | May 23 2025 10:34:44. | |
unleash.py | File | 1058 bytes | May 23 2025 10:34:44. | |
wsgi.py | File | 10747 bytes | May 23 2025 10:34:44. |
from functools import wraps from sentry_sdk import consts from sentry_sdk.ai.monitoring import record_token_usage from sentry_sdk.consts import SPANDATA from sentry_sdk.ai.utils import set_data_normalized from typing import TYPE_CHECKING if TYPE_CHECKING: from typing import Any, Callable, Iterator from sentry_sdk.tracing import Span import sentry_sdk from sentry_sdk.scope import should_send_default_pii from sentry_sdk.integrations import DidNotEnable, Integration from sentry_sdk.utils import capture_internal_exceptions, event_from_exception try: from cohere.client import Client from cohere.base_client import BaseCohere from cohere import ( ChatStreamEndEvent, NonStreamedChatResponse, ) if TYPE_CHECKING: from cohere import StreamedChatResponse except ImportError: raise DidNotEnable("Cohere not installed") try: # cohere 5.9.3+ from cohere import StreamEndStreamedChatResponse except ImportError: from cohere import StreamedChatResponse_StreamEnd as StreamEndStreamedChatResponse COLLECTED_CHAT_PARAMS = { "model": SPANDATA.AI_MODEL_ID, "k": SPANDATA.AI_TOP_K, "p": SPANDATA.AI_TOP_P, "seed": SPANDATA.AI_SEED, "frequency_penalty": SPANDATA.AI_FREQUENCY_PENALTY, "presence_penalty": SPANDATA.AI_PRESENCE_PENALTY, "raw_prompting": SPANDATA.AI_RAW_PROMPTING, } COLLECTED_PII_CHAT_PARAMS = { "tools": SPANDATA.AI_TOOLS, "preamble": SPANDATA.AI_PREAMBLE, } COLLECTED_CHAT_RESP_ATTRS = { "generation_id": SPANDATA.AI_GENERATION_ID, "is_search_required": SPANDATA.AI_SEARCH_REQUIRED, "finish_reason": SPANDATA.AI_FINISH_REASON, } COLLECTED_PII_CHAT_RESP_ATTRS = { "citations": SPANDATA.AI_CITATIONS, "documents": SPANDATA.AI_DOCUMENTS, "search_queries": SPANDATA.AI_SEARCH_QUERIES, "search_results": SPANDATA.AI_SEARCH_RESULTS, "tool_calls": SPANDATA.AI_TOOL_CALLS, } class CohereIntegration(Integration): identifier = "cohere" origin = f"auto.ai.{identifier}" def __init__(self, include_prompts=True): # type: (CohereIntegration, bool) -> None self.include_prompts = include_prompts @staticmethod def setup_once(): # type: () -> None BaseCohere.chat = _wrap_chat(BaseCohere.chat, streaming=False) Client.embed = _wrap_embed(Client.embed) BaseCohere.chat_stream = _wrap_chat(BaseCohere.chat_stream, streaming=True) def _capture_exception(exc): # type: (Any) -> None event, hint = event_from_exception( exc, client_options=sentry_sdk.get_client().options, mechanism={"type": "cohere", "handled": False}, ) sentry_sdk.capture_event(event, hint=hint) def _wrap_chat(f, streaming): # type: (Callable[..., Any], bool) -> Callable[..., Any] def collect_chat_response_fields(span, res, include_pii): # type: (Span, NonStreamedChatResponse, bool) -> None if include_pii: if hasattr(res, "text"): set_data_normalized( span, SPANDATA.AI_RESPONSES, [res.text], ) for pii_attr in COLLECTED_PII_CHAT_RESP_ATTRS: if hasattr(res, pii_attr): set_data_normalized(span, "ai." + pii_attr, getattr(res, pii_attr)) for attr in COLLECTED_CHAT_RESP_ATTRS: if hasattr(res, attr): set_data_normalized(span, "ai." + attr, getattr(res, attr)) if hasattr(res, "meta"): if hasattr(res.meta, "billed_units"): record_token_usage( span, prompt_tokens=res.meta.billed_units.input_tokens, completion_tokens=res.meta.billed_units.output_tokens, ) elif hasattr(res.meta, "tokens"): record_token_usage( span, prompt_tokens=res.meta.tokens.input_tokens, completion_tokens=res.meta.tokens.output_tokens, ) if hasattr(res.meta, "warnings"): set_data_normalized(span, SPANDATA.AI_WARNINGS, res.meta.warnings) @wraps(f) def new_chat(*args, **kwargs): # type: (*Any, **Any) -> Any integration = sentry_sdk.get_client().get_integration(CohereIntegration) if ( integration is None or "message" not in kwargs or not isinstance(kwargs.get("message"), str) ): return f(*args, **kwargs) message = kwargs.get("message") span = sentry_sdk.start_span( op=consts.OP.COHERE_CHAT_COMPLETIONS_CREATE, name="cohere.client.Chat", origin=CohereIntegration.origin, ) span.__enter__() try: res = f(*args, **kwargs) except Exception as e: _capture_exception(e) span.__exit__(None, None, None) raise e from None with capture_internal_exceptions(): if should_send_default_pii() and integration.include_prompts: set_data_normalized( span, SPANDATA.AI_INPUT_MESSAGES, list( map( lambda x: { "role": getattr(x, "role", "").lower(), "content": getattr(x, "message", ""), }, kwargs.get("chat_history", []), ) ) + [{"role": "user", "content": message}], ) for k, v in COLLECTED_PII_CHAT_PARAMS.items(): if k in kwargs: set_data_normalized(span, v, kwargs[k]) for k, v in COLLECTED_CHAT_PARAMS.items(): if k in kwargs: set_data_normalized(span, v, kwargs[k]) set_data_normalized(span, SPANDATA.AI_STREAMING, False) if streaming: old_iterator = res def new_iterator(): # type: () -> Iterator[StreamedChatResponse] with capture_internal_exceptions(): for x in old_iterator: if isinstance(x, ChatStreamEndEvent) or isinstance( x, StreamEndStreamedChatResponse ): collect_chat_response_fields( span, x.response, include_pii=should_send_default_pii() and integration.include_prompts, ) yield x span.__exit__(None, None, None) return new_iterator() elif isinstance(res, NonStreamedChatResponse): collect_chat_response_fields( span, res, include_pii=should_send_default_pii() and integration.include_prompts, ) span.__exit__(None, None, None) else: set_data_normalized(span, "unknown_response", True) span.__exit__(None, None, None) return res return new_chat def _wrap_embed(f): # type: (Callable[..., Any]) -> Callable[..., Any] @wraps(f) def new_embed(*args, **kwargs): # type: (*Any, **Any) -> Any integration = sentry_sdk.get_client().get_integration(CohereIntegration) if integration is None: return f(*args, **kwargs) with sentry_sdk.start_span( op=consts.OP.COHERE_EMBEDDINGS_CREATE, name="Cohere Embedding Creation", origin=CohereIntegration.origin, ) as span: if "texts" in kwargs and ( should_send_default_pii() and integration.include_prompts ): if isinstance(kwargs["texts"], str): set_data_normalized(span, SPANDATA.AI_TEXTS, [kwargs["texts"]]) elif ( isinstance(kwargs["texts"], list) and len(kwargs["texts"]) > 0 and isinstance(kwargs["texts"][0], str) ): set_data_normalized( span, SPANDATA.AI_INPUT_MESSAGES, kwargs["texts"] ) if "model" in kwargs: set_data_normalized(span, SPANDATA.AI_MODEL_ID, kwargs["model"]) try: res = f(*args, **kwargs) except Exception as e: _capture_exception(e) raise e from None if ( hasattr(res, "meta") and hasattr(res.meta, "billed_units") and hasattr(res.meta.billed_units, "input_tokens") ): record_token_usage( span, prompt_tokens=res.meta.billed_units.input_tokens, total_tokens=res.meta.billed_units.input_tokens, ) return res return new_embed
SILENT KILLER Tool