diff --git a/.env.otlp.example b/.env.otlp.example new file mode 100644 index 0000000000..0072b4e153 --- /dev/null +++ b/.env.otlp.example @@ -0,0 +1,23 @@ + +OTEL_EXPORTER_OTLP_TRACES_ENDPOINT="http://localhost:9411/api/v2/spans" +OTEL_EXPORTER_OTLP_METRICS_ENDPOINT="http://localhost:9411/api/v2/spans" +USE_ZIPKIN=true + +# Service identification +OTEL_SERVICE_NAME="my-graphrag-app" +OTEL_SERVICE_VERSION="1.0.0" +OTEL_SERVICE_NAMESPACE="my-company" + +# Zipkin configuration +OTEL_EXPORTER_OTLP_TRACES_ENDPOINT="http://localhost:9411/api/v2/spans" + +# Tracing configuration +OTEL_ENABLE_TRACING="true" +OTEL_ENABLE_METRICS="true" +OTEL_TRACE_SAMPLE_RATE="1.0" # Sample 100% of traces + +# Environment +OTEL_DEPLOYMENT_ENVIRONMENT="production" + +# Disable telemetry completely (if needed) +DISABLE_TELEMETRY="false" \ No newline at end of file diff --git a/graphrag/__init__.py b/graphrag/__init__.py index a1e9b589bf..2fb6526be8 100644 --- a/graphrag/__init__.py +++ b/graphrag/__init__.py @@ -2,3 +2,19 @@ # Licensed under the MIT License """The GraphRAG package.""" +import logging + +from .telemetry import setup_telemetry, is_telemetry_disabled + +logger = logging.getLogger(__name__) + +# Initialize telemetry automatically when the package is imported +# unless explicitly disabled +if not is_telemetry_disabled(): + try: + setup_telemetry() + logger.info("Telemetry initialized automatically") + except Exception as e: + logger.warning(f"Failed to initialize telemetry: {e}") +else: + logger.info("Telemetry is not enabled. (Can be enabled via environment variable DISABLE_TELEMETRY)") diff --git a/graphrag/api/index.py b/graphrag/api/index.py index 7265e46187..41cd01ee49 100644 --- a/graphrag/api/index.py +++ b/graphrag/api/index.py @@ -22,10 +22,17 @@ from graphrag.index.typing.pipeline_run_result import PipelineRunResult from graphrag.index.workflows.factory import PipelineFactory from graphrag.logger.standard_logging import init_loggers +from graphrag.telemetry.decorators import add_trace logger = logging.getLogger(__name__) - +@add_trace( + operation_name="build_index", + attributes={ + "component": "indexing", + "operation": "build_index", + } +) async def build_index( config: GraphRagConfig, method: IndexingMethod | str = IndexingMethod.Standard, diff --git a/graphrag/config/load_config.py b/graphrag/config/load_config.py index de9026037d..d95b6cfd34 100644 --- a/graphrag/config/load_config.py +++ b/graphrag/config/load_config.py @@ -14,6 +14,7 @@ from graphrag.config.create_graphrag_config import create_graphrag_config from graphrag.config.models.graph_rag_config import GraphRagConfig +from graphrag.telemetry.decorators import add_trace _default_config_files = ["settings.yaml", "settings.yml", "settings.json"] @@ -142,7 +143,9 @@ def _parse(file_extension: str, contents: str) -> dict[str, Any]: ) raise ValueError(msg) - +@add_trace( + operation_name="graphrag.config.load_config", + attributes={"component": "config"}) def load_config( root_dir: Path, config_filepath: Path | None = None, diff --git a/graphrag/index/operations/build_noun_graph/np_extractors/factory.py b/graphrag/index/operations/build_noun_graph/np_extractors/factory.py index b790c5a03c..7c880fc684 100644 --- a/graphrag/index/operations/build_noun_graph/np_extractors/factory.py +++ b/graphrag/index/operations/build_noun_graph/np_extractors/factory.py @@ -22,7 +22,7 @@ from graphrag.index.operations.build_noun_graph.np_extractors.syntactic_parsing_extractor import ( SyntacticNounPhraseExtractor, ) - +from graphrag.telemetry.decorators import add_trace class NounPhraseExtractorFactory: """A factory class for creating noun phrase extractor.""" @@ -74,7 +74,10 @@ def get_np_extractor(cls, config: TextAnalyzerConfig) -> BaseNounPhraseExtractor word_delimiter=config.word_delimiter, ) - +@add_trace( + operation_name="np_extractor.factory.create_noun_phrase_extractor", + attributes={"component": "np_extractor_factory"}, +) def create_noun_phrase_extractor( analyzer_config: TextAnalyzerConfig, ) -> BaseNounPhraseExtractor: diff --git a/graphrag/index/workflows/create_communities.py b/graphrag/index/workflows/create_communities.py index c06d5f4b28..2d03eb2795 100644 --- a/graphrag/index/workflows/create_communities.py +++ b/graphrag/index/workflows/create_communities.py @@ -17,11 +17,13 @@ from graphrag.index.operations.create_graph import create_graph from graphrag.index.typing.context import PipelineRunContext from graphrag.index.typing.workflow import WorkflowFunctionOutput +from graphrag.telemetry.decorators import trace_workflow from graphrag.utils.storage import load_table_from_storage, write_table_to_storage logger = logging.getLogger(__name__) +@trace_workflow("create_communities") async def run_workflow( config: GraphRagConfig, context: PipelineRunContext, diff --git a/graphrag/index/workflows/load_input_documents.py b/graphrag/index/workflows/load_input_documents.py index 33e14d0cb2..81d05ed4fd 100644 --- a/graphrag/index/workflows/load_input_documents.py +++ b/graphrag/index/workflows/load_input_documents.py @@ -13,11 +13,13 @@ from graphrag.index.typing.context import PipelineRunContext from graphrag.index.typing.workflow import WorkflowFunctionOutput from graphrag.storage.pipeline_storage import PipelineStorage +from graphrag.telemetry.decorators import trace_workflow from graphrag.utils.storage import write_table_to_storage logger = logging.getLogger(__name__) +@trace_workflow("load_input_documents") async def run_workflow( config: GraphRagConfig, context: PipelineRunContext, diff --git a/graphrag/query/structured_search/global_search/search.py b/graphrag/query/structured_search/global_search/search.py index 86b95d0088..e94a7facf4 100644 --- a/graphrag/query/structured_search/global_search/search.py +++ b/graphrag/query/structured_search/global_search/search.py @@ -31,6 +31,7 @@ ) from graphrag.query.llm.text_utils import try_parse_json_object from graphrag.query.structured_search.base import BaseSearch, SearchResult +from graphrag.telemetry.decorators import trace_search_operation from graphrag.tokenizer.tokenizer import Tokenizer logger = logging.getLogger(__name__) @@ -132,6 +133,7 @@ async def stream_search( ): yield response + @trace_search_operation("global_search") async def search( self, query: str, diff --git a/graphrag/query/structured_search/local_search/search.py b/graphrag/query/structured_search/local_search/search.py index fdd72949da..6292bf0622 100644 --- a/graphrag/query/structured_search/local_search/search.py +++ b/graphrag/query/structured_search/local_search/search.py @@ -18,6 +18,7 @@ ConversationHistory, ) from graphrag.query.structured_search.base import BaseSearch, SearchResult +from graphrag.telemetry.decorators import trace_search_operation from graphrag.tokenizer.tokenizer import Tokenizer logger = logging.getLogger(__name__) @@ -48,6 +49,7 @@ def __init__( self.callbacks = callbacks or [] self.response_type = response_type + @trace_search_operation("local_search") async def search( self, query: str, diff --git a/graphrag/telemetry/README.md b/graphrag/telemetry/README.md new file mode 100644 index 0000000000..0f4d67ee19 --- /dev/null +++ b/graphrag/telemetry/README.md @@ -0,0 +1,339 @@ +# OpenTelemetry and Zipkin Integration for GraphRAG + +This module provides comprehensive observability for GraphRAG using OpenTelemetry with Zipkin as the tracing backend. + +## Features + +- **Automatic tracing** of API calls, workflows, and storage operations +- **Zipkin integration** for distributed tracing visualization +- **Configurable sampling** and filtering +- **Sensitive data protection** in trace attributes +- **Easy setup** with environment variable configuration +- **Graceful degradation** if telemetry setup fails + +## Quick Start + +### 1. Install Dependencies + +Dependencies are automatically included in the project via `pyproject.toml`: + +```toml +dependencies = [ + # ... other dependencies + "opentelemetry-api>=1.23.0", + "opentelemetry-sdk>=1.23.0", + "opentelemetry-exporter-zipkin-json>=1.23.0", + "opentelemetry-instrumentation>=0.44b0", + "opentelemetry-instrumentation-httpx>=0.44b0", + "opentelemetry-instrumentation-aiohttp-client>=0.44b0" +] +``` + +### 2. Start Zipkin + +Run Zipkin using Docker: + +```bash +docker run -d -p 9411:9411 openzipkin/zipkin +``` + +Zipkin UI will be available at: http://localhost:9411/zipkin + +### 3. Use GraphRAG (Automatic Setup) + +Telemetry is automatically enabled when you import GraphRAG: + +```python +from graphrag.api.index import build_index +from graphrag.api.query import lazy_search + +# Telemetry is automatically set up! +# Your operations will be traced to Zipkin +``` + +### 4. View Traces + +Open http://localhost:9411/zipkin and look for traces with service name `lazy-graphrag`. + + +## Configure Environment Variables + +Configure telemetry using environment variables: + +### Copy .env.otlp.example to .env +Update the variables as needed. + +### Programmatic Configuration + +```python +from graphrag.telemetry import TelemetryConfig, setup_telemetry + +config = TelemetryConfig( + service_name="my-graphrag-service", + service_version="2.0.0", + obs_endpoint="http://my-zipkin:9411/api/v2/spans", + trace_sample_rate=0.1, # Sample 10% of traces + deployment_environment="production" +) + +setup_telemetry(config) +``` + +### Custom Tracing + +Add tracing to your own functions: + +```python +from graphrag.telemetry.decorators import ( + add_trace, + trace_workflow, + trace_vector_store_operation, + trace_llm_operation, + trace_search_operation +) + +@add_trace("my_custom_function") +async def my_function(): + """This function will be traced.""" + pass + +@trace_workflow("data_processing") +async def process_data(): + """Workflow-specific tracing for indexing workflows.""" + pass + +@trace_search_operation("custom_search") +async def custom_search(): + """Search operation tracing.""" + pass + +@trace_vector_store_operation("search") +async def vector_search(): + """Vector store operation tracing.""" + pass + +@trace_llm_operation("gpt-4") +async def call_llm(): + """LLM operation tracing.""" + pass +``` + +### Adding Telemetry to New Workflows + +To add telemetry to a new workflow in `graphrag/index/workflows/`: + +1. Import the decorator: + ```python + from graphrag.telemetry.decorators import trace_workflow + ``` + +2. Add the decorator to the `run_workflow` function: + ```python + @trace_workflow("workflow_name") + async def run_workflow(config: GraphRagConfig, context: PipelineRunContext) -> WorkflowFunctionOutput: + # Your workflow code here + pass + ``` + +### Adding Telemetry to New Search Operations + +To add telemetry to a new search operation in `graphrag/query/structured_search/`: + +1. Import the decorator: + ```python + from graphrag.telemetry.decorators import trace_search_operation + ``` + +2. Add the decorator to the `search` method: + ```python + @trace_search_operation("search_type_name") + async def search(self, query: str, **kwargs) -> SearchResult: + # Your search code here + pass + ``` + +## Trace Attributes + +Traces include rich metadata: + +### Automatic Attributes + +- `service.name` - Service name +- `service.version` - Service version +- `service.namespace` - Service namespace +- `deployment.environment` - Environment (dev/staging/prod) +- `function.name` - Function being traced +- `function.module` - Module containing the function +- `component` - Component type (api, workflow, storage, etc.) + +### Function Arguments + +Function arguments are automatically captured as attributes: + +- **Simple types** (str, int, float, bool) - Full value +- **Collections** (list, tuple, dict) - Length/count only +- **Sensitive data** - Automatically redacted (passwords, tokens, keys, etc.) + +### Custom Attributes + +Add custom attributes to traces: + +```python +from graphrag.telemetry.setup import get_tracer +from opentelemetry import trace + +tracer = get_tracer(__name__) + +with tracer.start_as_current_span("my_operation") as span: + span.set_attribute("custom.attribute", "value") + span.set_attribute("document.count", 150) + # Your code here +``` + +## Security and Privacy + +### Automatic Data Protection + +The telemetry system automatically protects sensitive data: + +- **Redacted parameters**: Any parameter containing `password`, `token`, `key`, `secret`, `auth`, or `credential` +- **Redacted values**: Replaced with `[REDACTED]` in traces +- **Collection sizes only**: Lists/dicts show count, not contents + +### Disabling Telemetry + +Completely disable telemetry: + +```bash +# In .env +DISABLE_TELEMETRY="true" +``` + +Or disable specific features: + +```bash +# In .env +OTEL_ENABLE_TRACING="false" +OTEL_ENABLE_METRICS="false" +``` + +## Troubleshooting + +### Common Issues + +1. **No traces in Zipkin** + - Check Zipkin is running: `curl http://localhost:9411/api/v2/services` + - Verify endpoint: `echo $OTEL_EXPORTER_OTLP_TRACES_ENDPOINT` + - Check sampling rate: `echo $OTEL_TRACE_SAMPLE_RATE` + +2. **Telemetry setup failures** + - Check logs for warning messages + - Telemetry failures don't stop GraphRAG execution + - Verify OpenTelemetry dependencies are installed + +3. **High overhead** + - Reduce sampling rate: `OTEL_TRACE_SAMPLE_RATE="0.1"` + - Disable if not needed: `DISABLE_TELEMETRY="true"` + +### Debug Mode + +Enable debug logging: + +```python +import logging +logging.getLogger("graphrag.telemetry").setLevel(logging.DEBUG) +``` + +### Health Check + +Verify telemetry setup: + +```python +from graphrag.telemetry.setup import get_tracer + +tracer = get_tracer("test") +with tracer.start_as_current_span("health_check") as span: + span.set_attribute("test", "working") + print("Telemetry is working!") +``` + +## Architecture + +### Components + +- **`config.py`** - Configuration management +- **`setup.py`** - OpenTelemetry initialization and providers +- **`decorators.py`** - Tracing decorators and utilities +- **`__init__.py`** - Public API and auto-setup + +### Flow + +1. **Import** → Auto-setup telemetry (unless disabled) +2. **Function Call** → Decorator creates span +3. **Execution** → Attributes added, exceptions recorded +4. **Completion** → Span exported to Zipkin +5. **Visualization** → View in Zipkin UI + +## Example: Complete Workflow + +```python +import asyncio +from graphrag.api.index import build_index +from graphrag.config.load_config import load_config + +async def main(): + # Load config + config = load_config("config.yaml", ".") + + # Build index (automatically traced) + results = await build_index(config=config) + + print("Check Zipkin for traces: http://localhost:9411/zipkin") + +if __name__ == "__main__": + asyncio.run(main()) +``` + +## Advanced Usage + +### Custom Exporter + +Use a different exporter: + +```python +from opentelemetry.exporter.jaeger.thrift import JaegerExporter +from opentelemetry.sdk.trace.import BatchSpanProcessor + +# In your setup code +jaeger_exporter = JaegerExporter( + agent_host_name="localhost", + agent_port=6831, +) +span_processor = BatchSpanProcessor(jaeger_exporter) +``` + +### Metrics Integration + +Add custom metrics: + +```python +from graphrag.telemetry.setup import get_meter + +meter = get_meter(__name__) +counter = meter.create_counter("custom_operations") + +counter.add(1, {"operation": "data_load"}) +``` + +### Sampling Strategies + +Implement custom sampling: + +```python +from opentelemetry.sdk.trace.sampling import TraceIdRatioBased, ParentBased + +# Custom sampler +sampler = ParentBased(root=TraceIdRatioBased(0.1)) # 10% sampling +``` + +For more information, see the [OpenTelemetry Python documentation](https://opentelemetry.io/docs/instrumentation/python/). diff --git a/graphrag/telemetry/__init__.py b/graphrag/telemetry/__init__.py new file mode 100644 index 0000000000..39f8fc4415 --- /dev/null +++ b/graphrag/telemetry/__init__.py @@ -0,0 +1,31 @@ +"""OpenTelemetry integration for LazyGraphRAG.""" + +from .setup import setup_telemetry, shutdown_telemetry, get_tracer, get_meter +from .decorators import add_trace +from .config import TelemetryConfig, is_telemetry_disabled +from .logging_integration import ( + setup_trace_logging, + TracedLogger, + add_log_to_span, + trace_with_logs, + add_span_annotations, + set_span_tag, + mark_span_error, +) + +__all__ = [ + "setup_telemetry", + "shutdown_telemetry", + "get_tracer", + "get_meter", + "add_trace", + "TelemetryConfig", + "is_telemetry_disabled", + "setup_trace_logging", + "TracedLogger", + "add_log_to_span", + "trace_with_logs", + "add_span_annotations", + "set_span_tag", + "mark_span_error", +] diff --git a/graphrag/telemetry/config.py b/graphrag/telemetry/config.py new file mode 100644 index 0000000000..eeba387638 --- /dev/null +++ b/graphrag/telemetry/config.py @@ -0,0 +1,57 @@ +"""Telemetry configuration for OpenTelemetry.""" + +import os +from dataclasses import dataclass +from dotenv import load_dotenv +load_dotenv() # Load environment variables from .env file if present +OTEL_EXPORTER_OTLP_TRACES_ENDPOINT = os.getenv("OTEL_EXPORTER_OTLP_TRACES_ENDPOINT", "http://localhost:9411/api/v2/spans") +EXTENSION_ARM_ID = os.getenv("EXTENSION_ARM_ID", "") + +@dataclass +class TelemetryConfig: + """Configuration for OpenTelemetry telemetry.""" + + # Telemetry control + telemetry_disabled: bool = False + # Service information + service_name: str = "graphrag" + service_version: str = "1.0.0" + service_namespace: str = "microsoft.research" + microsoft_resource_id: str = "" + cloud_role_instance: str = "graphrag-instance" + cloud_role: str = "graphrag-service" + # observability configuration + obs_endpoint: str = OTEL_EXPORTER_OTLP_TRACES_ENDPOINT + # Tracing configuration + enable_tracing: bool = True + enable_metrics: bool = True + enable_logging: bool = True + + # Sampling configuration + trace_sample_rate: float = 1.0 # Sample 100% of traces by default + + # Resource attributes + deployment_environment: str = "development" + + @classmethod + def from_env(cls) -> "TelemetryConfig": + """Create configuration from environment variables.""" + return cls( + microsoft_resource_id=os.getenv("EXTENSION_ARM_ID", ""), + cloud_role_instance=os.getenv("OTEL_CLOUD_ROLE_INSTANCE", "graphrag-instance"), + cloud_role=os.getenv("OTEL_CLOUD_ROLE", "graphrag-service"), + service_name=os.getenv("OTEL_SERVICE_NAME", "graphrag"), + service_version=os.getenv("OTEL_SERVICE_VERSION", "1.0.0"), + service_namespace=os.getenv("OTEL_SERVICE_NAMESPACE", "microsoft.research"), + obs_endpoint=os.getenv("OTEL_EXPORTER_OTLP_TRACES_ENDPOINT", "http://localhost:9411/api/v2/spans"), + enable_tracing=os.getenv("OTEL_ENABLE_TRACING", "true").lower() == "true", + enable_metrics=os.getenv("OTEL_ENABLE_METRICS", "true").lower() == "true", + enable_logging=os.getenv("OTEL_ENABLE_LOGGING", "true").lower() == "true", + trace_sample_rate=float(os.getenv("OTEL_TRACE_SAMPLE_RATE", "1.0")), + deployment_environment=os.getenv("OTEL_DEPLOYMENT_ENVIRONMENT", "development"), + ) + + +def is_telemetry_disabled() -> bool: + """Check if telemetry is disabled via environment variable.""" + return os.getenv("DISABLE_TELEMETRY", "true").lower() == "true" \ No newline at end of file diff --git a/graphrag/telemetry/decorators.py b/graphrag/telemetry/decorators.py new file mode 100644 index 0000000000..e39bc3cd0b --- /dev/null +++ b/graphrag/telemetry/decorators.py @@ -0,0 +1,219 @@ +"""Decorators for OpenTelemetry tracing.""" + +import functools +import inspect +import logging +from typing import Any, Callable, Dict, Optional, TypeVar + +from opentelemetry import trace +from opentelemetry.trace import Status, StatusCode + +from .setup import get_tracer + +logger = logging.getLogger(__name__) + +F = TypeVar('F', bound=Callable[..., Any]) + +def add_trace( + operation_name: Optional[str] = None, + attributes: Optional[Dict[str, Any]] = None, + record_exception: bool = True, +) -> Callable[[F], F]: + """ + Decorator to trace function or method execution. + + Args: + operation_name: Name for the span. If None, uses function name. + attributes: Additional attributes to set on the span. + record_exception: Whether to record exceptions in the span. + + Returns: + Decorated function with tracing. + """ + def decorator(func: F) -> F: + tracer = get_tracer(__name__) + + # Determine the span name + span_name = operation_name or f"{func.__module__}.{func.__qualname__}" + + if inspect.iscoroutinefunction(func): + @functools.wraps(func) + async def async_wrapper(*args, **kwargs): + with tracer.start_as_current_span(span_name) as span: + # Set default attributes + span.set_attribute("function.name", func.__name__) + span.set_attribute("function.module", func.__module__) + + # Set custom attributes + if attributes: + for key, value in attributes.items(): + span.set_attribute(key, value) + + # Set function arguments as attributes (be careful with sensitive data) + # _set_function_arguments(span, func, args, kwargs) + + try: + result = await func(*args, **kwargs) + span.set_status(Status(StatusCode.OK)) + return result + except Exception as e: + if record_exception: + span.record_exception(e) + span.set_status(Status(StatusCode.ERROR, str(e))) + raise + + return async_wrapper + else: + @functools.wraps(func) + def sync_wrapper(*args, **kwargs): + with tracer.start_as_current_span(span_name) as span: + # Set default attributes + span.set_attribute("function.name", func.__name__) + span.set_attribute("function.module", func.__module__) + + # Set custom attributes + if attributes: + for key, value in attributes.items(): + span.set_attribute(key, value) + + # Set function arguments as attributes (be careful with sensitive data) + # _set_function_arguments(span, func, args, kwargs) + + try: + result = func(*args, **kwargs) + span.set_status(Status(StatusCode.OK)) + return result + except Exception as e: + if record_exception: + span.record_exception(e) + span.set_status(Status(StatusCode.ERROR, str(e))) + raise + + return sync_wrapper + + return decorator + +def _set_function_arguments(span: trace.Span, func: Callable, args: tuple, kwargs: dict) -> None: + """Set function arguments as span attributes, filtering sensitive data.""" + try: + # Get function signature + sig = inspect.signature(func) + bound_args = sig.bind(*args, **kwargs) + bound_args.apply_defaults() + + # Filter out sensitive arguments + sensitive_keys = {'password', 'token', 'key', 'secret', 'auth', 'credential'} + + for param_name, value in bound_args.arguments.items(): + # Skip sensitive parameters + if any(sensitive in param_name.lower() for sensitive in sensitive_keys): + span.set_attribute(f"function.arg.{param_name}", "[REDACTED]") + continue + + # Set attribute based on type + if isinstance(value, (str, int, float, bool)): + span.set_attribute(f"function.arg.{param_name}", value) + elif isinstance(value, (list, tuple)): + span.set_attribute(f"function.arg.{param_name}.length", len(value)) + elif isinstance(value, dict): + span.set_attribute(f"function.arg.{param_name}.keys_count", len(value)) + elif value is None: + span.set_attribute(f"function.arg.{param_name}", "None") + else: + span.set_attribute(f"function.arg.{param_name}.type", type(value).__name__) + + except Exception as e: + logger.debug(f"Failed to set function arguments as span attributes: {e}") + +def trace_workflow(workflow_name: str) -> Callable[[F], F]: + """ + Decorator specifically for GraphRAG workflow functions. + + Args: + workflow_name: Name of the workflow for the span. + + Returns: + Decorated function with workflow-specific tracing. + """ + return add_trace( + operation_name=f"workflow.{workflow_name}", + attributes={ + "component": "workflow", + "workflow.name": workflow_name, + } + ) + +def trace_vector_store_operation(operation_type: str) -> Callable[[F], F]: + """ + Decorator for vector store operations. + + Args: + operation_type: Type of vector store operation (search, insert, etc.) + + Returns: + Decorated function with vector store-specific tracing. + """ + return add_trace( + operation_name=f"vector_store.{operation_type}", + attributes={ + "component": "vector_store", + "vector_store.operation": operation_type, + } + ) + +def trace_llm_operation(model_name: Optional[str] = None, operation_name: Optional[str] = "llm.request") -> Callable[[F], F]: + """ + Decorator for LLM operations. + + Args: + model_name: Name of the LLM model being used. + + Returns: + Decorated function with LLM-specific tracing. + """ + attributes = { + "component": "llm", + } + if model_name: + attributes["llm.model"] = model_name + + return add_trace( + operation_name=operation_name, + attributes=attributes, + ) + +def trace_search_operation(operation_type: str) -> Callable[[F], F]: + """ + Decorator for search operations. + + Args: + operation_type: Type of search operation (query_decomposition, context_building, map_response, reduce_response, etc.) + + Returns: + Decorated function with search-specific tracing. + """ + return add_trace( + operation_name=f"search.{operation_type}", + attributes={ + "component": "search", + "search.operation": operation_type, + } + ) + +def trace_retrieval_operation(operation_type: str) -> Callable[[F], F]: + """ + Decorator for retrieval operations. + + Args: + operation_type: Type of retrieval operation (l1_ranking, l2_ranking, relevance_assessment, etc.) + + Returns: + Decorated function with retrieval-specific tracing. + """ + return add_trace( + operation_name=f"retrieval.{operation_type}", + attributes={ + "component": "retrieval", + "retrieval.operation": operation_type, + } + ) diff --git a/graphrag/telemetry/logging_integration.py b/graphrag/telemetry/logging_integration.py new file mode 100644 index 0000000000..d6d8038486 --- /dev/null +++ b/graphrag/telemetry/logging_integration.py @@ -0,0 +1,174 @@ +"""Integration between Python logging and OpenTelemetry tracing for Observability visibility.""" + +import logging +import time +from typing import Any, Optional + +from opentelemetry import trace +from opentelemetry.trace import Status, StatusCode + + +class TracingLogHandler(logging.Handler): + """ + Custom log handler that adds log messages as span events in OpenTelemetry traces. + + This allows you to see log.info(), log.warning(), etc. messages directly in Observability + as events within the trace spans. + """ + + def emit(self, record: logging.LogRecord) -> None: + """Add log record as an event to the current span.""" + try: + current_span = trace.get_current_span() + if current_span and current_span.is_recording(): + # Format the log message + message = self.format(record) + + # Create event attributes + attributes = { + "log.level": record.levelname, + "log.logger": record.name, + "log.message": message, + } + + # Add exception info if present + if record.exc_info: + attributes["log.exception"] = self.formatException(record.exc_info) + + # Add file/line info + if hasattr(record, 'pathname'): + attributes["log.file"] = record.pathname + attributes["log.line"] = record.lineno + attributes["log.function"] = record.funcName + + # Add the log as a span event + current_span.add_event( + name=f"log.{record.levelname.lower()}", + attributes=attributes, + timestamp=int(record.created * 1_000_000_000) # Convert to nanoseconds + ) + + except Exception: + # Don't let logging errors break the application + pass + + +def setup_trace_logging(logger_name: Optional[str] = None, level: int = logging.INFO) -> None: + """ + Set up integration between logging and tracing. + + After calling this, all log messages from the specified logger (or root logger) + will appear as events in Observability traces. + + Args: + logger_name: Name of logger to integrate. If None, uses root logger. + level: Minimum log level to capture in traces. + """ + logger = logging.getLogger(logger_name) + + # Create and configure the tracing handler + tracing_handler = TracingLogHandler() + tracing_handler.setLevel(level) + + # Use a simple format since detailed info goes in span attributes + formatter = logging.Formatter('%(levelname)s: %(message)s') + tracing_handler.setFormatter(formatter) + + # Add the handler to the logger + logger.addHandler(tracing_handler) + + +def add_log_to_span(message: str, level: str = "info", **attributes) -> None: + """ + Manually add a log message to the current span as an event. + + Args: + message: Log message to add + level: Log level (info, warning, error, etc.) + **attributes: Additional attributes to include + """ + current_span = trace.get_current_span() + if current_span and current_span.is_recording(): + event_attributes = { + "log.level": level.upper(), + "log.message": message, + **attributes + } + + current_span.add_event( + name=f"log.{level}", + attributes=event_attributes + ) + + +def trace_with_logs(span_name: str): + """ + Context manager that creates a span and captures logs within it. + + Usage: + with trace_with_logs("my_operation") as span: + logger.info("This will appear in Observability!") + # Your code here + """ + tracer = trace.get_tracer(__name__) + return tracer.start_as_current_span(span_name) + + +class TracedLogger: + """ + A logger wrapper that automatically adds logs to the current trace span. + + Usage: + traced_logger = TracedLogger("my_module") + traced_logger.info("This message will appear in Observability!") + """ + + def __init__(self, name: str): + self.logger = logging.getLogger(name) + self.name = name + + def _log_with_trace(self, level: str, message: str, *args, **kwargs): + """Log message both to regular logging and as a span event.""" + # Regular logging + getattr(self.logger, level)(message, *args, **kwargs) + + # Add to current span if available + add_log_to_span(message % args if args else message, level) + + def info(self, message: str, *args, **kwargs): + self._log_with_trace("info", message, *args, **kwargs) + + def warning(self, message: str, *args, **kwargs): + self._log_with_trace("warning", message, *args, **kwargs) + + def error(self, message: str, *args, **kwargs): + self._log_with_trace("error", message, *args, **kwargs) + + def debug(self, message: str, *args, **kwargs): + self._log_with_trace("debug", message, *args, **kwargs) + + +def add_span_annotations(*messages: str) -> None: + """ + Add multiple messages as annotations to the current span. + These will appear as timeline events in Observability. + """ + current_span = trace.get_current_span() + if current_span and current_span.is_recording(): + for message in messages: + current_span.add_event(message) + + +def set_span_tag(key: str, value: Any) -> None: + """Add a tag/attribute to the current span.""" + current_span = trace.get_current_span() + if current_span and current_span.is_recording(): + current_span.set_attribute(key, str(value)) + + +def mark_span_error(error_message: str) -> None: + """Mark the current span as having an error.""" + current_span = trace.get_current_span() + if current_span and current_span.is_recording(): + current_span.set_status(Status(StatusCode.ERROR, error_message)) + current_span.add_event("error", {"error.message": error_message}) diff --git a/graphrag/telemetry/setup.py b/graphrag/telemetry/setup.py new file mode 100644 index 0000000000..8979f3950f --- /dev/null +++ b/graphrag/telemetry/setup.py @@ -0,0 +1,193 @@ +"""OpenTelemetry setup and configuration.""" + +import logging +from typing import Optional + +from opentelemetry import trace, metrics +from opentelemetry.instrumentation.aiohttp_client import AioHttpClientInstrumentor +from opentelemetry.instrumentation.httpx import HTTPXClientInstrumentor +from opentelemetry.sdk.metrics import MeterProvider +from opentelemetry.sdk.resources import Resource +from opentelemetry.sdk.trace import TracerProvider +from opentelemetry.sdk.trace.export import BatchSpanProcessor +from opentelemetry.sdk.trace.sampling import TraceIdRatioBased +from opentelemetry.exporter.otlp.proto.grpc.trace_exporter import OTLPSpanExporter +try: + from opentelemetry.exporter.zipkin.json import ZipkinExporter + ZIPKIN_AVAILABLE = True +except ImportError: + ZIPKIN_AVAILABLE = False +from dotenv import load_dotenv +load_dotenv() # Load environment variables from .env file if present +import os, grpc +USE_ZIPKIN = os.getenv("USE_ZIPKIN", "false") + +from .config import TelemetryConfig + +logger = logging.getLogger(__name__) + +_telemetry_initialized = False +_tracer_provider: Optional[TracerProvider] = None +_meter_provider: Optional[MeterProvider] = None + + +def setup_telemetry(config: Optional[TelemetryConfig] = None) -> None: + """Set up OpenTelemetry tracing and metrics.""" + global _telemetry_initialized, _tracer_provider, _meter_provider + + if _telemetry_initialized: + logger.warning("Telemetry already initialized, skipping setup") + return + + if config is None: + config = TelemetryConfig.from_env() + + logger.info(f"Setting up telemetry for service: {config.service_name}") + + # Create resource with service information + resource = Resource.create({ + "microsoft.resourceId": config.microsoft_resource_id, + "service.name": config.service_name, + "service.version": config.service_version, + "service.namespace": config.service_namespace, + "deployment.environment": config.deployment_environment, + }) + + # Set up tracing + if config.enable_tracing: + _setup_tracing(config, resource) + + # Set up metrics + if config.enable_metrics: + _setup_metrics(config, resource) + + # Set up automatic instrumentation + _setup_instrumentation() + + _telemetry_initialized = True + logger.info("Telemetry setup completed successfully") + +def _get_credentials(): + """Get credentials for OpenTelemetry exporter.""" + # Check if deployment is aldo, set no auth mode + if USE_ZIPKIN.lower() == "true": + isInSecure = True + credentials = None + return credentials, isInSecure + + # Load the root certificate for TLS + try: + with open('./certs/root-certs.pem', 'rb') as cert_file: + root_cert = cert_file.read() + except FileNotFoundError as e: + print(f"Error: {e}") + root_cert = None + + # Create TLS credentials if certificate is found, otherwise print error + if root_cert: + credentials = grpc.ssl_channel_credentials(root_cert) + isInSecure = False + else: + logger.error("Root certificate not found. Continuing without Observability") + credentials = None + isInSecure = True + # raise RuntimeError("Root certificate not found.") + return credentials, isInSecure + +def _setup_tracing(config: TelemetryConfig, resource: Resource) -> None: + """Set up OpenTelemetry tracing.""" + global _tracer_provider + + # Create tracer provider with sampling + sampler = TraceIdRatioBased(config.trace_sample_rate) + _tracer_provider = TracerProvider(resource=resource, sampler=sampler) + + # Choose exporter based on USE_ZIPKIN setting + if USE_ZIPKIN.lower() == "true": + # Use Zipkin exporter for local development/testing + if ZIPKIN_AVAILABLE: + exporter = ZipkinExporter(endpoint=config.obs_endpoint) + logger.info(f"Tracing configured with Zipkin endpoint: {config.obs_endpoint}") + else: + logger.warning("Zipkin exporter not available. Please install with: pip install opentelemetry-exporter-zipkin-json") + logger.info("Falling back to OTLP exporter") + credentials, isInSecure = _get_credentials() + exporter = OTLPSpanExporter( + endpoint=config.obs_endpoint, + credentials=credentials, + insecure=isInSecure + ) + else: + # Use OTLP exporter for production + credentials, isInSecure = _get_credentials() + exporter = OTLPSpanExporter( + endpoint=config.obs_endpoint, + credentials=credentials, + insecure=isInSecure + ) + logger.info(f"Tracing configured with OTLP endpoint: {config.obs_endpoint}") + + # Create span processor with the chosen exporter + span_processor = BatchSpanProcessor(exporter) + _tracer_provider.add_span_processor(span_processor) + + # Set the global tracer provider + trace.set_tracer_provider(_tracer_provider) + + +def _setup_metrics(config: TelemetryConfig, resource: Resource) -> None: + """Set up OpenTelemetry metrics.""" + global _meter_provider + + # Create meter provider + _meter_provider = MeterProvider(resource=resource) + + # Set the global meter provider + metrics.set_meter_provider(_meter_provider) + + logger.info("Metrics configured") + + +def _setup_instrumentation() -> None: + """Set up automatic instrumentation for common libraries.""" + try: + # Instrument HTTP clients + HTTPXClientInstrumentor().instrument() + AioHttpClientInstrumentor().instrument() + + logger.info("Automatic instrumentation configured") + except Exception as e: + logger.warning(f"Failed to set up some instrumentation: {e}") + + +def shutdown_telemetry() -> None: + """Shutdown telemetry providers and flush any remaining data.""" + global _telemetry_initialized, _tracer_provider, _meter_provider + + if not _telemetry_initialized: + return + + logger.info("Shutting down telemetry") + + # Shutdown tracer provider + if _tracer_provider: + _tracer_provider.shutdown() + _tracer_provider = None + + # Shutdown meter provider + if _meter_provider: + _meter_provider.shutdown() + _meter_provider = None + + _telemetry_initialized = False + logger.info("Telemetry shutdown completed") + + +def get_tracer(name: str) -> trace.Tracer: + """Get a tracer instance.""" + return trace.get_tracer(name) + + +def get_meter(name: str) -> metrics.Meter: + """Get a meter instance.""" + return metrics.get_meter(name) diff --git a/graphrag/tokenizer/get_tokenizer.py b/graphrag/tokenizer/get_tokenizer.py index 5d1ef40f0c..981ed0647c 100644 --- a/graphrag/tokenizer/get_tokenizer.py +++ b/graphrag/tokenizer/get_tokenizer.py @@ -8,8 +8,12 @@ from graphrag.tokenizer.litellm_tokenizer import LitellmTokenizer from graphrag.tokenizer.tiktoken_tokenizer import TiktokenTokenizer from graphrag.tokenizer.tokenizer import Tokenizer +from graphrag.telemetry.decorators import add_trace - +@add_trace( + operation_name="tokenizer.get_tokenizer", + attributes={"component": "tokenizer"}, +) def get_tokenizer( model_config: LanguageModelConfig | None = None, encoding_model: str = ENCODING_MODEL, diff --git a/graphrag/utils/api.py b/graphrag/utils/api.py index db3d94790d..9d08c17775 100644 --- a/graphrag/utils/api.py +++ b/graphrag/utils/api.py @@ -21,6 +21,9 @@ VectorStoreSearchResult, ) from graphrag.vector_stores.factory import VectorStoreFactory +from graphrag.telemetry.decorators import trace_retrieval_operation +import logging +logger = logging.getLogger(__name__) class MultiVectorStore(BaseVectorStore): @@ -93,7 +96,7 @@ def similarity_search_by_text( ) return [] - +@trace_retrieval_operation(operation_type="get_embedding_store") def get_embedding_store( config_args: dict[str, dict], embedding_name: str, diff --git a/pyproject.toml b/pyproject.toml index 9c299bbefe..30bde23d31 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -67,6 +67,14 @@ dependencies = [ "textblob>=0.18.0.post0", "spacy>=3.8.4", "litellm>=1.77.1", + "opentelemetry-api>=1.23.0", + "opentelemetry-sdk>=1.23.0", + "opentelemetry-exporter-zipkin-json>=1.23.0", + "opentelemetry.exporter.otlp>=1.20b0", + "opentelemetry-instrumentation>=0.44b0", + "opentelemetry-instrumentation-httpx>=0.44b0", + "opentelemetry-instrumentation-aiohttp-client>=0.44b0", + "dotenv>=0.9.9", ] [dependency-groups] diff --git a/uv.lock b/uv.lock index a4f7b6a1f5..176d3c8046 100644 --- a/uv.lock +++ b/uv.lock @@ -1,5 +1,5 @@ version = 1 -revision = 3 +revision = 2 requires-python = ">=3.10, <3.13" resolution-markers = [ "python_full_version >= '3.12'", @@ -242,6 +242,41 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/fe/ba/e2081de779ca30d473f21f5b30e0e737c438205440784c7dfc81efc2b029/async_timeout-5.0.1-py3-none-any.whl", hash = "sha256:39e3809566ff85354557ec2398b55e096c8364bacac9405a7a1fa429e77fe76c", size = 6233, upload-time = "2024-11-06T16:41:37.9Z" }, ] +[[package]] +name = "asyncpg" +version = "0.30.0" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "async-timeout", marker = "python_full_version < '3.11'" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/2f/4c/7c991e080e106d854809030d8584e15b2e996e26f16aee6d757e387bc17d/asyncpg-0.30.0.tar.gz", hash = "sha256:c551e9928ab6707602f44811817f82ba3c446e018bfe1d3abecc8ba5f3eac851", size = 957746, upload-time = "2024-10-20T00:30:41.127Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/bb/07/1650a8c30e3a5c625478fa8aafd89a8dd7d85999bf7169b16f54973ebf2c/asyncpg-0.30.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:bfb4dd5ae0699bad2b233672c8fc5ccbd9ad24b89afded02341786887e37927e", size = 673143, upload-time = "2024-10-20T00:29:08.846Z" }, + { url = "https://files.pythonhosted.org/packages/a0/9a/568ff9b590d0954553c56806766914c149609b828c426c5118d4869111d3/asyncpg-0.30.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:dc1f62c792752a49f88b7e6f774c26077091b44caceb1983509edc18a2222ec0", size = 645035, upload-time = "2024-10-20T00:29:12.02Z" }, + { url = "https://files.pythonhosted.org/packages/de/11/6f2fa6c902f341ca10403743701ea952bca896fc5b07cc1f4705d2bb0593/asyncpg-0.30.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3152fef2e265c9c24eec4ee3d22b4f4d2703d30614b0b6753e9ed4115c8a146f", size = 2912384, upload-time = "2024-10-20T00:29:13.644Z" }, + { url = "https://files.pythonhosted.org/packages/83/83/44bd393919c504ffe4a82d0aed8ea0e55eb1571a1dea6a4922b723f0a03b/asyncpg-0.30.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c7255812ac85099a0e1ffb81b10dc477b9973345793776b128a23e60148dd1af", size = 2947526, upload-time = "2024-10-20T00:29:15.871Z" }, + { url = "https://files.pythonhosted.org/packages/08/85/e23dd3a2b55536eb0ded80c457b0693352262dc70426ef4d4a6fc994fa51/asyncpg-0.30.0-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:578445f09f45d1ad7abddbff2a3c7f7c291738fdae0abffbeb737d3fc3ab8b75", size = 2895390, upload-time = "2024-10-20T00:29:19.346Z" }, + { url = "https://files.pythonhosted.org/packages/9b/26/fa96c8f4877d47dc6c1864fef5500b446522365da3d3d0ee89a5cce71a3f/asyncpg-0.30.0-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:c42f6bb65a277ce4d93f3fba46b91a265631c8df7250592dd4f11f8b0152150f", size = 3015630, upload-time = "2024-10-20T00:29:21.186Z" }, + { url = "https://files.pythonhosted.org/packages/34/00/814514eb9287614188a5179a8b6e588a3611ca47d41937af0f3a844b1b4b/asyncpg-0.30.0-cp310-cp310-win32.whl", hash = "sha256:aa403147d3e07a267ada2ae34dfc9324e67ccc4cdca35261c8c22792ba2b10cf", size = 568760, upload-time = "2024-10-20T00:29:22.769Z" }, + { url = "https://files.pythonhosted.org/packages/f0/28/869a7a279400f8b06dd237266fdd7220bc5f7c975348fea5d1e6909588e9/asyncpg-0.30.0-cp310-cp310-win_amd64.whl", hash = "sha256:fb622c94db4e13137c4c7f98834185049cc50ee01d8f657ef898b6407c7b9c50", size = 625764, upload-time = "2024-10-20T00:29:25.882Z" }, + { url = "https://files.pythonhosted.org/packages/4c/0e/f5d708add0d0b97446c402db7e8dd4c4183c13edaabe8a8500b411e7b495/asyncpg-0.30.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:5e0511ad3dec5f6b4f7a9e063591d407eee66b88c14e2ea636f187da1dcfff6a", size = 674506, upload-time = "2024-10-20T00:29:27.988Z" }, + { url = "https://files.pythonhosted.org/packages/6a/a0/67ec9a75cb24a1d99f97b8437c8d56da40e6f6bd23b04e2f4ea5d5ad82ac/asyncpg-0.30.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:915aeb9f79316b43c3207363af12d0e6fd10776641a7de8a01212afd95bdf0ed", size = 645922, upload-time = "2024-10-20T00:29:29.391Z" }, + { url = "https://files.pythonhosted.org/packages/5c/d9/a7584f24174bd86ff1053b14bb841f9e714380c672f61c906eb01d8ec433/asyncpg-0.30.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1c198a00cce9506fcd0bf219a799f38ac7a237745e1d27f0e1f66d3707c84a5a", size = 3079565, upload-time = "2024-10-20T00:29:30.832Z" }, + { url = "https://files.pythonhosted.org/packages/a0/d7/a4c0f9660e333114bdb04d1a9ac70db690dd4ae003f34f691139a5cbdae3/asyncpg-0.30.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3326e6d7381799e9735ca2ec9fd7be4d5fef5dcbc3cb555d8a463d8460607956", size = 3109962, upload-time = "2024-10-20T00:29:33.114Z" }, + { url = "https://files.pythonhosted.org/packages/3c/21/199fd16b5a981b1575923cbb5d9cf916fdc936b377e0423099f209e7e73d/asyncpg-0.30.0-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:51da377487e249e35bd0859661f6ee2b81db11ad1f4fc036194bc9cb2ead5056", size = 3064791, upload-time = "2024-10-20T00:29:34.677Z" }, + { url = "https://files.pythonhosted.org/packages/77/52/0004809b3427534a0c9139c08c87b515f1c77a8376a50ae29f001e53962f/asyncpg-0.30.0-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:bc6d84136f9c4d24d358f3b02be4b6ba358abd09f80737d1ac7c444f36108454", size = 3188696, upload-time = "2024-10-20T00:29:36.389Z" }, + { url = "https://files.pythonhosted.org/packages/52/cb/fbad941cd466117be58b774a3f1cc9ecc659af625f028b163b1e646a55fe/asyncpg-0.30.0-cp311-cp311-win32.whl", hash = "sha256:574156480df14f64c2d76450a3f3aaaf26105869cad3865041156b38459e935d", size = 567358, upload-time = "2024-10-20T00:29:37.915Z" }, + { url = "https://files.pythonhosted.org/packages/3c/0a/0a32307cf166d50e1ad120d9b81a33a948a1a5463ebfa5a96cc5606c0863/asyncpg-0.30.0-cp311-cp311-win_amd64.whl", hash = "sha256:3356637f0bd830407b5597317b3cb3571387ae52ddc3bca6233682be88bbbc1f", size = 629375, upload-time = "2024-10-20T00:29:39.987Z" }, + { url = "https://files.pythonhosted.org/packages/4b/64/9d3e887bb7b01535fdbc45fbd5f0a8447539833b97ee69ecdbb7a79d0cb4/asyncpg-0.30.0-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:c902a60b52e506d38d7e80e0dd5399f657220f24635fee368117b8b5fce1142e", size = 673162, upload-time = "2024-10-20T00:29:41.88Z" }, + { url = "https://files.pythonhosted.org/packages/6e/eb/8b236663f06984f212a087b3e849731f917ab80f84450e943900e8ca4052/asyncpg-0.30.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:aca1548e43bbb9f0f627a04666fedaca23db0a31a84136ad1f868cb15deb6e3a", size = 637025, upload-time = "2024-10-20T00:29:43.352Z" }, + { url = "https://files.pythonhosted.org/packages/cc/57/2dc240bb263d58786cfaa60920779af6e8d32da63ab9ffc09f8312bd7a14/asyncpg-0.30.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6c2a2ef565400234a633da0eafdce27e843836256d40705d83ab7ec42074efb3", size = 3496243, upload-time = "2024-10-20T00:29:44.922Z" }, + { url = "https://files.pythonhosted.org/packages/f4/40/0ae9d061d278b10713ea9021ef6b703ec44698fe32178715a501ac696c6b/asyncpg-0.30.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1292b84ee06ac8a2ad8e51c7475aa309245874b61333d97411aab835c4a2f737", size = 3575059, upload-time = "2024-10-20T00:29:46.891Z" }, + { url = "https://files.pythonhosted.org/packages/c3/75/d6b895a35a2c6506952247640178e5f768eeb28b2e20299b6a6f1d743ba0/asyncpg-0.30.0-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:0f5712350388d0cd0615caec629ad53c81e506b1abaaf8d14c93f54b35e3595a", size = 3473596, upload-time = "2024-10-20T00:29:49.201Z" }, + { url = "https://files.pythonhosted.org/packages/c8/e7/3693392d3e168ab0aebb2d361431375bd22ffc7b4a586a0fc060d519fae7/asyncpg-0.30.0-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:db9891e2d76e6f425746c5d2da01921e9a16b5a71a1c905b13f30e12a257c4af", size = 3641632, upload-time = "2024-10-20T00:29:50.768Z" }, + { url = "https://files.pythonhosted.org/packages/32/ea/15670cea95745bba3f0352341db55f506a820b21c619ee66b7d12ea7867d/asyncpg-0.30.0-cp312-cp312-win32.whl", hash = "sha256:68d71a1be3d83d0570049cd1654a9bdfe506e794ecc98ad0873304a9f35e411e", size = 560186, upload-time = "2024-10-20T00:29:52.394Z" }, + { url = "https://files.pythonhosted.org/packages/7e/6b/fe1fad5cee79ca5f5c27aed7bd95baee529c1bf8a387435c8ba4fe53d5c1/asyncpg-0.30.0-cp312-cp312-win_amd64.whl", hash = "sha256:9a0292c6af5c500523949155ec17b7fe01a00ace33b68a476d6b5059f9630305", size = 621064, upload-time = "2024-10-20T00:29:53.757Z" }, +] + [[package]] name = "attrs" version = "25.3.0" @@ -936,6 +971,17 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/12/b3/231ffd4ab1fc9d679809f356cebee130ac7daa00d6d6f3206dd4fd137e9e/distro-1.9.0-py3-none-any.whl", hash = "sha256:7bffd925d65168f85027d8da9af6bddab658135b840670a223589bc0c8ef02b2", size = 20277, upload-time = "2023-12-24T09:54:30.421Z" }, ] +[[package]] +name = "dotenv" +version = "0.9.9" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "python-dotenv" }, +] +wheels = [ + { url = "https://files.pythonhosted.org/packages/b2/b7/545d2c10c1fc15e48653c91efde329a790f2eecfbbf2bd16003b5db2bab0/dotenv-0.9.9-py2.py3-none-any.whl", hash = "sha256:29cf74a087b31dafdb5a446b6d7e11cbce8ed2741540e2339c69fbef92c94ce9", size = 1892, upload-time = "2025-02-19T22:15:01.647Z" }, +] + [[package]] name = "environs" version = "14.2.0" @@ -1195,17 +1241,31 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/f7/ec/67fbef5d497f86283db54c22eec6f6140243aae73265799baaaa19cd17fb/ghp_import-2.1.0-py3-none-any.whl", hash = "sha256:8337dd7b50877f163d4c0289bc1f1c7f127550241988d568c1db512c4324a619", size = 11034, upload-time = "2022-05-02T15:47:14.552Z" }, ] +[[package]] +name = "googleapis-common-protos" +version = "1.70.0" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "protobuf" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/39/24/33db22342cf4a2ea27c9955e6713140fedd51e8b141b5ce5260897020f1a/googleapis_common_protos-1.70.0.tar.gz", hash = "sha256:0e1b44e0ea153e6594f9f394fef15193a68aaaea2d843f83e2742717ca753257", size = 145903, upload-time = "2025-04-14T10:17:02.924Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/86/f1/62a193f0227cf15a920390abe675f386dec35f7ae3ffe6da582d3ade42c7/googleapis_common_protos-1.70.0-py3-none-any.whl", hash = "sha256:b8bfcca8c25a2bb253e0e0b0adaf8c00773e5e6af6fd92397576680b807e0fd8", size = 294530, upload-time = "2025-04-14T10:17:01.271Z" }, +] + [[package]] name = "graphrag" version = "2.7.0" source = { editable = "." } dependencies = [ { name = "aiofiles" }, + { name = "asyncpg" }, { name = "azure-cosmos" }, { name = "azure-identity" }, { name = "azure-search-documents" }, { name = "azure-storage-blob" }, { name = "devtools" }, + { name = "dotenv" }, { name = "environs" }, { name = "fnllm", extra = ["azure", "openai"] }, { name = "future" }, @@ -1218,6 +1278,12 @@ dependencies = [ { name = "nltk" }, { name = "numpy" }, { name = "openai" }, + { name = "opentelemetry-api" }, + { name = "opentelemetry-exporter-otlp" }, + { name = "opentelemetry-instrumentation" }, + { name = "opentelemetry-instrumentation-aiohttp-client" }, + { name = "opentelemetry-instrumentation-httpx" }, + { name = "opentelemetry-sdk" }, { name = "pandas" }, { name = "pyarrow" }, { name = "pydantic" }, @@ -1258,11 +1324,13 @@ dev = [ [package.metadata] requires-dist = [ { name = "aiofiles", specifier = ">=24.1.0" }, + { name = "asyncpg", specifier = ">=0.30.0" }, { name = "azure-cosmos", specifier = ">=4.9.0" }, { name = "azure-identity", specifier = ">=1.19.0" }, { name = "azure-search-documents", specifier = ">=11.5.2" }, { name = "azure-storage-blob", specifier = ">=12.24.0" }, { name = "devtools", specifier = ">=0.12.2" }, + { name = "dotenv", specifier = ">=0.9.9" }, { name = "environs", specifier = ">=11.0.0" }, { name = "fnllm", extras = ["azure", "openai"], specifier = ">=0.4.1" }, { name = "future", specifier = ">=1.0.0" }, @@ -1274,6 +1342,12 @@ requires-dist = [ { name = "nltk", specifier = "==3.9.1" }, { name = "numpy", specifier = ">=1.25.2" }, { name = "openai", specifier = ">=1.68.0" }, + { name = "opentelemetry-api", specifier = ">=1.23.0" }, + { name = "opentelemetry-exporter-otlp", specifier = ">=1.20b0" }, + { name = "opentelemetry-instrumentation", specifier = ">=0.44b0" }, + { name = "opentelemetry-instrumentation-aiohttp-client", specifier = ">=0.44b0" }, + { name = "opentelemetry-instrumentation-httpx", specifier = ">=0.44b0" }, + { name = "opentelemetry-sdk", specifier = ">=1.23.0" }, { name = "pandas", specifier = ">=2.2.3" }, { name = "pyarrow", specifier = ">=17.0.0" }, { name = "pydantic", specifier = ">=2.10.3" }, @@ -1351,6 +1425,47 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/3b/51/21097af79f3d68626539ab829bdbf6cc42933f020e161972927d916e394c/graspologic_native-1.2.5-cp38-abi3-win_amd64.whl", hash = "sha256:c3ef2172d774083d7e2c8e77daccd218571ddeebeb2c1703cebb1a2cc4c56e07", size = 210438, upload-time = "2025-04-02T19:34:21.139Z" }, ] +[[package]] +name = "grpcio" +version = "1.75.1" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "typing-extensions" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/9d/f7/8963848164c7604efb3a3e6ee457fdb3a469653e19002bd24742473254f8/grpcio-1.75.1.tar.gz", hash = "sha256:3e81d89ece99b9ace23a6916880baca613c03a799925afb2857887efa8b1b3d2", size = 12731327, upload-time = "2025-09-26T09:03:36.887Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/51/57/89fd829fb00a6d0bee3fbcb2c8a7aa0252d908949b6ab58bfae99d39d77e/grpcio-1.75.1-cp310-cp310-linux_armv7l.whl", hash = "sha256:1712b5890b22547dd29f3215c5788d8fc759ce6dd0b85a6ba6e2731f2d04c088", size = 5705534, upload-time = "2025-09-26T09:00:52.225Z" }, + { url = "https://files.pythonhosted.org/packages/76/dd/2f8536e092551cf804e96bcda79ecfbc51560b214a0f5b7ebc253f0d4664/grpcio-1.75.1-cp310-cp310-macosx_11_0_universal2.whl", hash = "sha256:8d04e101bba4b55cea9954e4aa71c24153ba6182481b487ff376da28d4ba46cf", size = 11484103, upload-time = "2025-09-26T09:00:59.457Z" }, + { url = "https://files.pythonhosted.org/packages/9a/3d/affe2fb897804c98d56361138e73786af8f4dd876b9d9851cfe6342b53c8/grpcio-1.75.1-cp310-cp310-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:683cfc70be0c1383449097cba637317e4737a357cfc185d887fd984206380403", size = 6289953, upload-time = "2025-09-26T09:01:03.699Z" }, + { url = "https://files.pythonhosted.org/packages/87/aa/0f40b7f47a0ff10d7e482bc3af22dac767c7ff27205915f08962d5ca87a2/grpcio-1.75.1-cp310-cp310-manylinux2014_i686.manylinux_2_17_i686.whl", hash = "sha256:491444c081a54dcd5e6ada57314321ae526377f498d4aa09d975c3241c5b9e1c", size = 6949785, upload-time = "2025-09-26T09:01:07.504Z" }, + { url = "https://files.pythonhosted.org/packages/a5/45/b04407e44050781821c84f26df71b3f7bc469923f92f9f8bc27f1406dbcc/grpcio-1.75.1-cp310-cp310-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:ce08d4e112d0d38487c2b631ec8723deac9bc404e9c7b1011426af50a79999e4", size = 6465708, upload-time = "2025-09-26T09:01:11.028Z" }, + { url = "https://files.pythonhosted.org/packages/09/3e/4ae3ec0a4d20dcaafbb6e597defcde06399ccdc5b342f607323f3b47f0a3/grpcio-1.75.1-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:5a2acda37fc926ccc4547977ac3e56b1df48fe200de968e8c8421f6e3093df6c", size = 7100912, upload-time = "2025-09-26T09:01:14.393Z" }, + { url = "https://files.pythonhosted.org/packages/34/3f/a9085dab5c313bb0cb853f222d095e2477b9b8490a03634cdd8d19daa5c3/grpcio-1.75.1-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:745c5fe6bf05df6a04bf2d11552c7d867a2690759e7ab6b05c318a772739bd75", size = 8042497, upload-time = "2025-09-26T09:01:17.759Z" }, + { url = "https://files.pythonhosted.org/packages/c3/87/ea54eba931ab9ed3f999ba95f5d8d01a20221b664725bab2fe93e3dee848/grpcio-1.75.1-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:259526a7159d39e2db40d566fe3e8f8e034d0fb2db5bf9c00e09aace655a4c2b", size = 7493284, upload-time = "2025-09-26T09:01:20.896Z" }, + { url = "https://files.pythonhosted.org/packages/b7/5e/287f1bf1a998f4ac46ef45d518de3b5da08b4e86c7cb5e1108cee30b0282/grpcio-1.75.1-cp310-cp310-win32.whl", hash = "sha256:f4b29b9aabe33fed5df0a85e5f13b09ff25e2c05bd5946d25270a8bd5682dac9", size = 3950809, upload-time = "2025-09-26T09:01:23.695Z" }, + { url = "https://files.pythonhosted.org/packages/a4/a2/3cbfc06a4ec160dc77403b29ecb5cf76ae329eb63204fea6a7c715f1dfdb/grpcio-1.75.1-cp310-cp310-win_amd64.whl", hash = "sha256:cf2e760978dcce7ff7d465cbc7e276c3157eedc4c27aa6de7b594c7a295d3d61", size = 4644704, upload-time = "2025-09-26T09:01:25.763Z" }, + { url = "https://files.pythonhosted.org/packages/0c/3c/35ca9747473a306bfad0cee04504953f7098527cd112a4ab55c55af9e7bd/grpcio-1.75.1-cp311-cp311-linux_armv7l.whl", hash = "sha256:573855ca2e58e35032aff30bfbd1ee103fbcf4472e4b28d4010757700918e326", size = 5709761, upload-time = "2025-09-26T09:01:28.528Z" }, + { url = "https://files.pythonhosted.org/packages/c9/2c/ecbcb4241e4edbe85ac2663f885726fea0e947767401288b50d8fdcb9200/grpcio-1.75.1-cp311-cp311-macosx_11_0_universal2.whl", hash = "sha256:6a4996a2c8accc37976dc142d5991adf60733e223e5c9a2219e157dc6a8fd3a2", size = 11496691, upload-time = "2025-09-26T09:01:31.214Z" }, + { url = "https://files.pythonhosted.org/packages/81/40/bc07aee2911f0d426fa53fe636216100c31a8ea65a400894f280274cb023/grpcio-1.75.1-cp311-cp311-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:b1ea1bbe77ecbc1be00af2769f4ae4a88ce93be57a4f3eebd91087898ed749f9", size = 6296084, upload-time = "2025-09-26T09:01:34.596Z" }, + { url = "https://files.pythonhosted.org/packages/b8/d1/10c067f6c67396cbf46448b80f27583b5e8c4b46cdfbe18a2a02c2c2f290/grpcio-1.75.1-cp311-cp311-manylinux2014_i686.manylinux_2_17_i686.whl", hash = "sha256:e5b425aee54cc5e3e3c58f00731e8a33f5567965d478d516d35ef99fd648ab68", size = 6950403, upload-time = "2025-09-26T09:01:36.736Z" }, + { url = "https://files.pythonhosted.org/packages/3f/42/5f628abe360b84dfe8dd8f32be6b0606dc31dc04d3358eef27db791ea4d5/grpcio-1.75.1-cp311-cp311-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:0049a7bf547dafaeeb1db17079ce79596c298bfe308fc084d023c8907a845b9a", size = 6470166, upload-time = "2025-09-26T09:01:39.474Z" }, + { url = "https://files.pythonhosted.org/packages/c3/93/a24035080251324019882ee2265cfde642d6476c0cf8eb207fc693fcebdc/grpcio-1.75.1-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:5b8ea230c7f77c0a1a3208a04a1eda164633fb0767b4cefd65a01079b65e5b1f", size = 7107828, upload-time = "2025-09-26T09:01:41.782Z" }, + { url = "https://files.pythonhosted.org/packages/e4/f8/d18b984c1c9ba0318e3628dbbeb6af77a5007f02abc378c845070f2d3edd/grpcio-1.75.1-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:36990d629c3c9fb41e546414e5af52d0a7af37ce7113d9682c46d7e2919e4cca", size = 8045421, upload-time = "2025-09-26T09:01:45.835Z" }, + { url = "https://files.pythonhosted.org/packages/7e/b6/4bf9aacff45deca5eac5562547ed212556b831064da77971a4e632917da3/grpcio-1.75.1-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:b10ad908118d38c2453ade7ff790e5bce36580c3742919007a2a78e3a1e521ca", size = 7503290, upload-time = "2025-09-26T09:01:49.28Z" }, + { url = "https://files.pythonhosted.org/packages/3b/15/d8d69d10223cb54c887a2180bd29fe5fa2aec1d4995c8821f7aa6eaf72e4/grpcio-1.75.1-cp311-cp311-win32.whl", hash = "sha256:d6be2b5ee7bea656c954dcf6aa8093c6f0e6a3ef9945c99d99fcbfc88c5c0bfe", size = 3950631, upload-time = "2025-09-26T09:01:51.23Z" }, + { url = "https://files.pythonhosted.org/packages/8a/40/7b8642d45fff6f83300c24eaac0380a840e5e7fe0e8d80afd31b99d7134e/grpcio-1.75.1-cp311-cp311-win_amd64.whl", hash = "sha256:61c692fb05956b17dd6d1ab480f7f10ad0536dba3bc8fd4e3c7263dc244ed772", size = 4646131, upload-time = "2025-09-26T09:01:53.266Z" }, + { url = "https://files.pythonhosted.org/packages/3a/81/42be79e73a50aaa20af66731c2defeb0e8c9008d9935a64dd8ea8e8c44eb/grpcio-1.75.1-cp312-cp312-linux_armv7l.whl", hash = "sha256:7b888b33cd14085d86176b1628ad2fcbff94cfbbe7809465097aa0132e58b018", size = 5668314, upload-time = "2025-09-26T09:01:55.424Z" }, + { url = "https://files.pythonhosted.org/packages/c5/a7/3686ed15822fedc58c22f82b3a7403d9faf38d7c33de46d4de6f06e49426/grpcio-1.75.1-cp312-cp312-macosx_11_0_universal2.whl", hash = "sha256:8775036efe4ad2085975531d221535329f5dac99b6c2a854a995456098f99546", size = 11476125, upload-time = "2025-09-26T09:01:57.927Z" }, + { url = "https://files.pythonhosted.org/packages/14/85/21c71d674f03345ab183c634ecd889d3330177e27baea8d5d247a89b6442/grpcio-1.75.1-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:bb658f703468d7fbb5dcc4037c65391b7dc34f808ac46ed9136c24fc5eeb041d", size = 6246335, upload-time = "2025-09-26T09:02:00.76Z" }, + { url = "https://files.pythonhosted.org/packages/fd/db/3beb661bc56a385ae4fa6b0e70f6b91ac99d47afb726fe76aaff87ebb116/grpcio-1.75.1-cp312-cp312-manylinux2014_i686.manylinux_2_17_i686.whl", hash = "sha256:4b7177a1cdb3c51b02b0c0a256b0a72fdab719600a693e0e9037949efffb200b", size = 6916309, upload-time = "2025-09-26T09:02:02.894Z" }, + { url = "https://files.pythonhosted.org/packages/1e/9c/eda9fe57f2b84343d44c1b66cf3831c973ba29b078b16a27d4587a1fdd47/grpcio-1.75.1-cp312-cp312-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:7d4fa6ccc3ec2e68a04f7b883d354d7fea22a34c44ce535a2f0c0049cf626ddf", size = 6435419, upload-time = "2025-09-26T09:02:05.055Z" }, + { url = "https://files.pythonhosted.org/packages/c3/b8/090c98983e0a9d602e3f919a6e2d4e470a8b489452905f9a0fa472cac059/grpcio-1.75.1-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:3d86880ecaeb5b2f0a8afa63824de93adb8ebe4e49d0e51442532f4e08add7d6", size = 7064893, upload-time = "2025-09-26T09:02:07.275Z" }, + { url = "https://files.pythonhosted.org/packages/ec/c0/6d53d4dbbd00f8bd81571f5478d8a95528b716e0eddb4217cc7cb45aae5f/grpcio-1.75.1-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:a8041d2f9e8a742aeae96f4b047ee44e73619f4f9d24565e84d5446c623673b6", size = 8011922, upload-time = "2025-09-26T09:02:09.527Z" }, + { url = "https://files.pythonhosted.org/packages/f2/7c/48455b2d0c5949678d6982c3e31ea4d89df4e16131b03f7d5c590811cbe9/grpcio-1.75.1-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:3652516048bf4c314ce12be37423c79829f46efffb390ad64149a10c6071e8de", size = 7466181, upload-time = "2025-09-26T09:02:12.279Z" }, + { url = "https://files.pythonhosted.org/packages/fd/12/04a0e79081e3170b6124f8cba9b6275871276be06c156ef981033f691880/grpcio-1.75.1-cp312-cp312-win32.whl", hash = "sha256:44b62345d8403975513af88da2f3d5cc76f73ca538ba46596f92a127c2aea945", size = 3938543, upload-time = "2025-09-26T09:02:14.77Z" }, + { url = "https://files.pythonhosted.org/packages/5f/d7/11350d9d7fb5adc73d2b0ebf6ac1cc70135577701e607407fe6739a90021/grpcio-1.75.1-cp312-cp312-win_amd64.whl", hash = "sha256:b1e191c5c465fa777d4cafbaacf0c01e0d5278022082c0abbd2ee1d6454ed94d", size = 4641938, upload-time = "2025-09-26T09:02:16.927Z" }, +] + [[package]] name = "h11" version = "0.16.0" @@ -2778,6 +2893,175 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/00/e1/47887212baa7bc0532880d33d5eafbdb46fcc4b53789b903282a74a85b5b/openai-1.106.1-py3-none-any.whl", hash = "sha256:bfdef37c949f80396c59f2c17e0eda35414979bc07ef3379596a93c9ed044f3a", size = 930768, upload-time = "2025-09-04T18:17:13.349Z" }, ] +[[package]] +name = "opentelemetry-api" +version = "1.37.0" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "importlib-metadata" }, + { name = "typing-extensions" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/63/04/05040d7ce33a907a2a02257e601992f0cdf11c73b33f13c4492bf6c3d6d5/opentelemetry_api-1.37.0.tar.gz", hash = "sha256:540735b120355bd5112738ea53621f8d5edb35ebcd6fe21ada3ab1c61d1cd9a7", size = 64923, upload-time = "2025-09-11T10:29:01.662Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/91/48/28ed9e55dcf2f453128df738210a980e09f4e468a456fa3c763dbc8be70a/opentelemetry_api-1.37.0-py3-none-any.whl", hash = "sha256:accf2024d3e89faec14302213bc39550ec0f4095d1cf5ca688e1bfb1c8612f47", size = 65732, upload-time = "2025-09-11T10:28:41.826Z" }, +] + +[[package]] +name = "opentelemetry-exporter-otlp" +version = "1.37.0" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "opentelemetry-exporter-otlp-proto-grpc" }, + { name = "opentelemetry-exporter-otlp-proto-http" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/64/df/47fde1de15a3d5ad410e98710fac60cd3d509df5dc7ec1359b71d6bf7e70/opentelemetry_exporter_otlp-1.37.0.tar.gz", hash = "sha256:f85b1929dd0d750751cc9159376fb05aa88bb7a08b6cdbf84edb0054d93e9f26", size = 6145, upload-time = "2025-09-11T10:29:03.075Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/f5/23/7e35e41111e3834d918e414eca41555d585e8860c9149507298bb3b9b061/opentelemetry_exporter_otlp-1.37.0-py3-none-any.whl", hash = "sha256:bd44592c6bc7fc3e5c0a9b60f2ee813c84c2800c449e59504ab93f356cc450fc", size = 7019, upload-time = "2025-09-11T10:28:44.094Z" }, +] + +[[package]] +name = "opentelemetry-exporter-otlp-proto-common" +version = "1.37.0" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "opentelemetry-proto" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/dc/6c/10018cbcc1e6fff23aac67d7fd977c3d692dbe5f9ef9bb4db5c1268726cc/opentelemetry_exporter_otlp_proto_common-1.37.0.tar.gz", hash = "sha256:c87a1bdd9f41fdc408d9cc9367bb53f8d2602829659f2b90be9f9d79d0bfe62c", size = 20430, upload-time = "2025-09-11T10:29:03.605Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/08/13/b4ef09837409a777f3c0af2a5b4ba9b7af34872bc43609dda0c209e4060d/opentelemetry_exporter_otlp_proto_common-1.37.0-py3-none-any.whl", hash = "sha256:53038428449c559b0c564b8d718df3314da387109c4d36bd1b94c9a641b0292e", size = 18359, upload-time = "2025-09-11T10:28:44.939Z" }, +] + +[[package]] +name = "opentelemetry-exporter-otlp-proto-grpc" +version = "1.37.0" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "googleapis-common-protos" }, + { name = "grpcio" }, + { name = "opentelemetry-api" }, + { name = "opentelemetry-exporter-otlp-proto-common" }, + { name = "opentelemetry-proto" }, + { name = "opentelemetry-sdk" }, + { name = "typing-extensions" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/d1/11/4ad0979d0bb13ae5a845214e97c8d42da43980034c30d6f72d8e0ebe580e/opentelemetry_exporter_otlp_proto_grpc-1.37.0.tar.gz", hash = "sha256:f55bcb9fc848ce05ad3dd954058bc7b126624d22c4d9e958da24d8537763bec5", size = 24465, upload-time = "2025-09-11T10:29:04.172Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/39/17/46630b74751031a658706bef23ac99cdc2953cd3b2d28ec90590a0766b3e/opentelemetry_exporter_otlp_proto_grpc-1.37.0-py3-none-any.whl", hash = "sha256:aee5104835bf7993b7ddaaf380b6467472abaedb1f1dbfcc54a52a7d781a3890", size = 19305, upload-time = "2025-09-11T10:28:45.776Z" }, +] + +[[package]] +name = "opentelemetry-exporter-otlp-proto-http" +version = "1.37.0" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "googleapis-common-protos" }, + { name = "opentelemetry-api" }, + { name = "opentelemetry-exporter-otlp-proto-common" }, + { name = "opentelemetry-proto" }, + { name = "opentelemetry-sdk" }, + { name = "requests" }, + { name = "typing-extensions" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/5d/e3/6e320aeb24f951449e73867e53c55542bebbaf24faeee7623ef677d66736/opentelemetry_exporter_otlp_proto_http-1.37.0.tar.gz", hash = "sha256:e52e8600f1720d6de298419a802108a8f5afa63c96809ff83becb03f874e44ac", size = 17281, upload-time = "2025-09-11T10:29:04.844Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/e9/e9/70d74a664d83976556cec395d6bfedd9b85ec1498b778367d5f93e373397/opentelemetry_exporter_otlp_proto_http-1.37.0-py3-none-any.whl", hash = "sha256:54c42b39945a6cc9d9a2a33decb876eabb9547e0dcb49df090122773447f1aef", size = 19576, upload-time = "2025-09-11T10:28:46.726Z" }, +] + +[[package]] +name = "opentelemetry-instrumentation" +version = "0.58b0" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "opentelemetry-api" }, + { name = "opentelemetry-semantic-conventions" }, + { name = "packaging" }, + { name = "wrapt" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/f6/36/7c307d9be8ce4ee7beb86d7f1d31027f2a6a89228240405a858d6e4d64f9/opentelemetry_instrumentation-0.58b0.tar.gz", hash = "sha256:df640f3ac715a3e05af145c18f527f4422c6ab6c467e40bd24d2ad75a00cb705", size = 31549, upload-time = "2025-09-11T11:42:14.084Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/d4/db/5ff1cd6c5ca1d12ecf1b73be16fbb2a8af2114ee46d4b0e6d4b23f4f4db7/opentelemetry_instrumentation-0.58b0-py3-none-any.whl", hash = "sha256:50f97ac03100676c9f7fc28197f8240c7290ca1baa12da8bfbb9a1de4f34cc45", size = 33019, upload-time = "2025-09-11T11:41:00.624Z" }, +] + +[[package]] +name = "opentelemetry-instrumentation-aiohttp-client" +version = "0.58b0" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "opentelemetry-api" }, + { name = "opentelemetry-instrumentation" }, + { name = "opentelemetry-semantic-conventions" }, + { name = "opentelemetry-util-http" }, + { name = "wrapt" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/4a/5d/de936b63cde9fba615ca0a3b1e1d2a986a400fe53890f52760df0955596e/opentelemetry_instrumentation_aiohttp_client-0.58b0.tar.gz", hash = "sha256:aab610f90a1be67ae66d3781bcad2b484774f043f73e8519156ca04d05019b2a", size = 15039, upload-time = "2025-09-11T11:42:15.502Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/67/2d/0fe2d0c4af6d23bf51dea5b2324fe06a91ec0b4e82a1ebbc4cbf3a8e1067/opentelemetry_instrumentation_aiohttp_client-0.58b0-py3-none-any.whl", hash = "sha256:14b805bd3da73579bb8484f798800af15bc8218dd775275c207cdf5b8abae586", size = 12377, upload-time = "2025-09-11T11:41:03.021Z" }, +] + +[[package]] +name = "opentelemetry-instrumentation-httpx" +version = "0.58b0" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "opentelemetry-api" }, + { name = "opentelemetry-instrumentation" }, + { name = "opentelemetry-semantic-conventions" }, + { name = "opentelemetry-util-http" }, + { name = "wrapt" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/07/21/ba3a0106795337716e5e324f58fd3c04f5967e330c0408d0d68d873454db/opentelemetry_instrumentation_httpx-0.58b0.tar.gz", hash = "sha256:3cd747e7785a06d06bd58875e8eb11595337c98c4341f4fe176ff1f734a90db7", size = 19887, upload-time = "2025-09-11T11:42:37.926Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/cc/e7/6dc8ee4881889993fa4a7d3da225e5eded239c975b9831eff392abd5a5e4/opentelemetry_instrumentation_httpx-0.58b0-py3-none-any.whl", hash = "sha256:d3f5a36c7fed08c245f1b06d1efd91f624caf2bff679766df80981486daaccdb", size = 15197, upload-time = "2025-09-11T11:41:32.66Z" }, +] + +[[package]] +name = "opentelemetry-proto" +version = "1.37.0" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "protobuf" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/dd/ea/a75f36b463a36f3c5a10c0b5292c58b31dbdde74f6f905d3d0ab2313987b/opentelemetry_proto-1.37.0.tar.gz", hash = "sha256:30f5c494faf66f77faeaefa35ed4443c5edb3b0aa46dad073ed7210e1a789538", size = 46151, upload-time = "2025-09-11T10:29:11.04Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/c4/25/f89ea66c59bd7687e218361826c969443c4fa15dfe89733f3bf1e2a9e971/opentelemetry_proto-1.37.0-py3-none-any.whl", hash = "sha256:8ed8c066ae8828bbf0c39229979bdf583a126981142378a9cbe9d6fd5701c6e2", size = 72534, upload-time = "2025-09-11T10:28:56.831Z" }, +] + +[[package]] +name = "opentelemetry-sdk" +version = "1.37.0" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "opentelemetry-api" }, + { name = "opentelemetry-semantic-conventions" }, + { name = "typing-extensions" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/f4/62/2e0ca80d7fe94f0b193135375da92c640d15fe81f636658d2acf373086bc/opentelemetry_sdk-1.37.0.tar.gz", hash = "sha256:cc8e089c10953ded765b5ab5669b198bbe0af1b3f89f1007d19acd32dc46dda5", size = 170404, upload-time = "2025-09-11T10:29:11.779Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/9f/62/9f4ad6a54126fb00f7ed4bb5034964c6e4f00fcd5a905e115bd22707e20d/opentelemetry_sdk-1.37.0-py3-none-any.whl", hash = "sha256:8f3c3c22063e52475c5dbced7209495c2c16723d016d39287dfc215d1771257c", size = 131941, upload-time = "2025-09-11T10:28:57.83Z" }, +] + +[[package]] +name = "opentelemetry-semantic-conventions" +version = "0.58b0" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "opentelemetry-api" }, + { name = "typing-extensions" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/aa/1b/90701d91e6300d9f2fb352153fb1721ed99ed1f6ea14fa992c756016e63a/opentelemetry_semantic_conventions-0.58b0.tar.gz", hash = "sha256:6bd46f51264279c433755767bb44ad00f1c9e2367e1b42af563372c5a6fa0c25", size = 129867, upload-time = "2025-09-11T10:29:12.597Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/07/90/68152b7465f50285d3ce2481b3aec2f82822e3f52e5152eeeaf516bab841/opentelemetry_semantic_conventions-0.58b0-py3-none-any.whl", hash = "sha256:5564905ab1458b96684db1340232729fce3b5375a06e140e8904c78e4f815b28", size = 207954, upload-time = "2025-09-11T10:28:59.218Z" }, +] + +[[package]] +name = "opentelemetry-util-http" +version = "0.58b0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/c6/5f/02f31530faf50ef8a41ab34901c05cbbf8e9d76963ba2fb852b0b4065f4e/opentelemetry_util_http-0.58b0.tar.gz", hash = "sha256:de0154896c3472c6599311c83e0ecee856c4da1b17808d39fdc5cce5312e4d89", size = 9411, upload-time = "2025-09-11T11:43:05.602Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/a5/a3/0a1430c42c6d34d8372a16c104e7408028f0c30270d8f3eb6cccf2e82934/opentelemetry_util_http-0.58b0-py3-none-any.whl", hash = "sha256:6c6b86762ed43025fbd593dc5f700ba0aa3e09711aedc36fd48a13b23d8cb1e7", size = 7652, upload-time = "2025-09-11T11:42:09.682Z" }, +] + [[package]] name = "overrides" version = "7.7.0" @@ -3144,6 +3428,21 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/cc/35/cc0aaecf278bb4575b8555f2b137de5ab821595ddae9da9d3cd1da4072c7/propcache-0.3.2-py3-none-any.whl", hash = "sha256:98f1ec44fb675f5052cccc8e609c46ed23a35a1cfd18545ad4e29002d858a43f", size = 12663, upload-time = "2025-06-09T22:56:04.484Z" }, ] +[[package]] +name = "protobuf" +version = "6.33.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/19/ff/64a6c8f420818bb873713988ca5492cba3a7946be57e027ac63495157d97/protobuf-6.33.0.tar.gz", hash = "sha256:140303d5c8d2037730c548f8c7b93b20bb1dc301be280c378b82b8894589c954", size = 443463, upload-time = "2025-10-15T20:39:52.159Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/7e/ee/52b3fa8feb6db4a833dfea4943e175ce645144532e8a90f72571ad85df4e/protobuf-6.33.0-cp310-abi3-win32.whl", hash = "sha256:d6101ded078042a8f17959eccd9236fb7a9ca20d3b0098bbcb91533a5680d035", size = 425593, upload-time = "2025-10-15T20:39:40.29Z" }, + { url = "https://files.pythonhosted.org/packages/7b/c6/7a465f1825872c55e0341ff4a80198743f73b69ce5d43ab18043699d1d81/protobuf-6.33.0-cp310-abi3-win_amd64.whl", hash = "sha256:9a031d10f703f03768f2743a1c403af050b6ae1f3480e9c140f39c45f81b13ee", size = 436882, upload-time = "2025-10-15T20:39:42.841Z" }, + { url = "https://files.pythonhosted.org/packages/e1/a9/b6eee662a6951b9c3640e8e452ab3e09f117d99fc10baa32d1581a0d4099/protobuf-6.33.0-cp39-abi3-macosx_10_9_universal2.whl", hash = "sha256:905b07a65f1a4b72412314082c7dbfae91a9e8b68a0cc1577515f8df58ecf455", size = 427521, upload-time = "2025-10-15T20:39:43.803Z" }, + { url = "https://files.pythonhosted.org/packages/10/35/16d31e0f92c6d2f0e77c2a3ba93185130ea13053dd16200a57434c882f2b/protobuf-6.33.0-cp39-abi3-manylinux2014_aarch64.whl", hash = "sha256:e0697ece353e6239b90ee43a9231318302ad8353c70e6e45499fa52396debf90", size = 324445, upload-time = "2025-10-15T20:39:44.932Z" }, + { url = "https://files.pythonhosted.org/packages/e6/eb/2a981a13e35cda8b75b5585aaffae2eb904f8f351bdd3870769692acbd8a/protobuf-6.33.0-cp39-abi3-manylinux2014_s390x.whl", hash = "sha256:e0a1715e4f27355afd9570f3ea369735afc853a6c3951a6afe1f80d8569ad298", size = 339159, upload-time = "2025-10-15T20:39:46.186Z" }, + { url = "https://files.pythonhosted.org/packages/21/51/0b1cbad62074439b867b4e04cc09b93f6699d78fd191bed2bbb44562e077/protobuf-6.33.0-cp39-abi3-manylinux2014_x86_64.whl", hash = "sha256:35be49fd3f4fefa4e6e2aacc35e8b837d6703c37a2168a55ac21e9b1bc7559ef", size = 323172, upload-time = "2025-10-15T20:39:47.465Z" }, + { url = "https://files.pythonhosted.org/packages/07/d1/0a28c21707807c6aacd5dc9c3704b2aa1effbf37adebd8caeaf68b17a636/protobuf-6.33.0-py3-none-any.whl", hash = "sha256:25c9e1963c6734448ea2d308cfa610e692b801304ba0908d7bfa564ac5132995", size = 170477, upload-time = "2025-10-15T20:39:51.311Z" }, +] + [[package]] name = "psutil" version = "7.1.0"