From 47b4d8efdadef3901187109ae626d19c4046b78a Mon Sep 17 00:00:00 2001 From: Vyas Sidhi Date: Sat, 6 Dec 2025 09:30:24 +0530 Subject: [PATCH 01/22] Add support for Dynatrace backend --- .env.example | 4 + README.md | 75 ++- src/opentelemetry_mcp/backends/dynatrace.py | 585 ++++++++++++++++++++ src/opentelemetry_mcp/config.py | 10 +- src/opentelemetry_mcp/server.py | 12 +- tests/backends/test_dynatrace.py | 306 ++++++++++ uv.lock | 4 +- 7 files changed, 972 insertions(+), 24 deletions(-) create mode 100644 src/opentelemetry_mcp/backends/dynatrace.py create mode 100644 tests/backends/test_dynatrace.py diff --git a/.env.example b/.env.example index c755b4f..123bcf3 100644 --- a/.env.example +++ b/.env.example @@ -24,3 +24,7 @@ LOG_LEVEL=INFO # Optional: Maximum traces per query (default: 100, max: 1000) MAX_TRACES_PER_QUERY=100 +# Optional: Dynatrace specific configuration +BACKEND_TYPE=dynatrace +BACKEND_URL=https://abc12345.live.dynatrace.com +BACKEND_ \ No newline at end of file diff --git a/README.md b/README.md index 501904f..0c9416e 100644 --- a/README.md +++ b/README.md @@ -440,7 +440,7 @@ pip install opentelemetry-mcp ### Core Capabilities -- **🔌 Multiple Backend Support** - Connect to Jaeger, Grafana Tempo, or Traceloop +- **🔌 Multiple Backend Support** - Connect to Jaeger, Grafana Tempo, Traceloop, or Dynatrace - **🤖 LLM-First Design** - Specialized tools for analyzing AI application traces - **🔍 Advanced Filtering** - Generic filter system with powerful operators - **📊 Token Analytics** - Track and aggregate LLM token usage across models and services @@ -463,14 +463,14 @@ pip install opentelemetry-mcp ### Backend Support Matrix -| Feature | Jaeger | Tempo | Traceloop | -| ---------------- | :----: | :---: | :-------: | -| Search traces | ✓ | ✓ | ✓ | -| Advanced filters | ✓ | ✓ | ✓ | -| Span search | ✓\* | ✓ | ✓ | -| Token tracking | ✓ | ✓ | ✓ | -| Error traces | ✓ | ✓ | ✓ | -| LLM tools | ✓ | ✓ | ✓ | +| Feature | Jaeger | Tempo | Traceloop | Dynatrace | +| ---------------- | :----: | :---: | :-------: | :-------: | +| Search traces | ✓ | ✓ | ✓ | ✓ | +| Advanced filters | ✓ | ✓ | ✓ | ✓ | +| Span search | ✓\* | ✓ | ✓ | ✓ | +| Token tracking | ✓ | ✓ | ✓ | ✓ | +| Error traces | ✓ | ✓ | ✓ | ✓ | +| LLM tools | ✓ | ✓ | ✓ | ✓ | \* Jaeger requires `service_name` parameter for span search @@ -496,11 +496,12 @@ uv pip install -e ".[dev]" ### Supported Backends -| Backend | Type | URL Example | Notes | -| ------------- | ----------- | --------------------------- | -------------------------- | -| **Jaeger** | Local | `http://localhost:16686` | Popular open-source option | -| **Tempo** | Local/Cloud | `http://localhost:3200` | Grafana's trace backend | -| **Traceloop** | Cloud | `https://api.traceloop.com` | Requires API key | +| Backend | Type | URL Example | Notes | +| ------------- | ----------- | ---------------------------------------- | -------------------------- | +| **Jaeger** | Local | `http://localhost:16686` | Popular open-source option | +| **Tempo** | Local/Cloud | `http://localhost:3200` | Grafana's trace backend | +| **Traceloop** | Cloud | `https://api.traceloop.com` | Requires API key | +| **Dynatrace** | Cloud | `https://{env-id}.live.dynatrace.com` | Requires API token | ### Quick Configuration @@ -525,7 +526,7 @@ opentelemetry-mcp --backend traceloop --url https://api.traceloop.com --api-key | Variable | Type | Default | Description | | ---------------------- | ------- | -------- | -------------------------------------------------- | -| `BACKEND_TYPE` | string | `jaeger` | Backend type: `jaeger`, `tempo`, or `traceloop` | +| `BACKEND_TYPE` | string | `jaeger` | Backend type: `jaeger`, `tempo`, `traceloop`, or `dynatrace` | | `BACKEND_URL` | URL | - | Backend API endpoint (required) | | `BACKEND_API_KEY` | string | - | API key (required for Traceloop) | | `BACKEND_TIMEOUT` | integer | `30` | Request timeout in seconds | @@ -581,6 +582,50 @@ BACKEND_API_KEY=your_api_key_here > **Note:** The API key contains project information. The backend uses a project slug of `"default"` and Traceloop resolves the actual project/environment from the API key. +### Dynatrace + +```bash +BACKEND_TYPE=dynatrace +BACKEND_URL=https://abc12345.live.dynatrace.com +BACKEND_API_KEY=dt0c01.ABC123... +``` + +**Configuration Details:** +- **BACKEND_URL**: Your Dynatrace environment URL (format: `https://{your-environment-id}.live.dynatrace.com`) +- **BACKEND_API_KEY**: Dynatrace API token with trace read permissions + +**Creating a Dynatrace API Token:** +1. Log in to your Dynatrace environment +2. Go to **Settings** → **Integration** → **Dynatrace API** +3. Click **Generate new token** +4. Select scopes: **Read traces** (and optionally **Read entities** for service discovery) +5. Copy the token and use it as `BACKEND_API_KEY` + +**Claude Desktop Integration Example:** + +```json +{ + "mcpServers": { + "opentelemetry-mcp": { + "command": "pipx", + "args": ["run", "opentelemetry-mcp"], + "env": { + "BACKEND_TYPE": "dynatrace", + "BACKEND_URL": "https://abc12345.live.dynatrace.com", + "BACKEND_API_KEY": "dt0c01.ABC123..." + } + } + } +} +``` + +**Troubleshooting Dynatrace Connection:** + +- **401 Unauthorized**: Verify your API token has the correct permissions (Read traces scope) +- **404 Not Found**: Check that your BACKEND_URL is correct (should include environment ID) +- **Connection Timeout**: Ensure your network can reach the Dynatrace environment +- **No Traces Found**: Verify that OpenTelemetry traces are being sent to Dynatrace and check the time range of your queries + --- diff --git a/src/opentelemetry_mcp/backends/dynatrace.py b/src/opentelemetry_mcp/backends/dynatrace.py new file mode 100644 index 0000000..c458188 --- /dev/null +++ b/src/opentelemetry_mcp/backends/dynatrace.py @@ -0,0 +1,585 @@ +"""Dynatrace backend implementation for querying OpenTelemetry traces.""" + +import logging +from datetime import datetime, timedelta +from typing import Any, Literal + +from opentelemetry_mcp.attributes import HealthCheckResponse, SpanAttributes, SpanEvent +from opentelemetry_mcp.backends.base import BaseBackend +from opentelemetry_mcp.backends.filter_engine import FilterEngine +from opentelemetry_mcp.models import ( + Filter, + FilterOperator, + FilterType, + SpanData, + SpanQuery, + TraceData, + TraceQuery, +) + +logger = logging.getLogger(__name__) + + +class DynatraceBackend(BaseBackend): + """Dynatrace API backend implementation for OpenTelemetry traces. + + Uses Dynatrace Trace API v2 and Distributed Traces API to query traces. + Supports OpenLLMetry semantic conventions (gen_ai.* attributes). + """ + + def _create_headers(self) -> dict[str, str]: + """Create headers for Dynatrace API requests. + + Returns: + Dictionary with Bearer token authorization + """ + headers = {"Content-Type": "application/json"} + if self.api_key: + headers["Authorization"] = f"Api-Token {self.api_key}" + return headers + + def get_supported_operators(self) -> set[FilterOperator]: + """Get natively supported operators via Dynatrace API. + + Dynatrace Trace API supports basic filtering via query parameters. + Most advanced filtering will be done client-side. + + Returns: + Set of supported FilterOperator values + """ + return { + FilterOperator.EQUALS, # Via query parameters + } + + async def search_traces(self, query: TraceQuery) -> list[TraceData]: + """Search for traces using Dynatrace Trace API v2. + + Args: + query: Trace query parameters + + Returns: + List of matching traces + + Raises: + httpx.HTTPError: If API request fails + """ + logger.debug(f"Searching traces with query: {query}") + + # Get all filters + all_filters = query.get_all_filters() + + # Dynatrace API supports limited filtering via query parameters + # Most filters will be applied client-side + supported_fields = {"service.name"} # Service filtering via API + supported_operators = self.get_supported_operators() + + native_filters = [ + f + for f in all_filters + if f.field in supported_fields and f.operator in supported_operators + ] + client_filters = [f for f in all_filters if f not in native_filters] + + if client_filters: + logger.info( + f"Will apply {len(client_filters)} filters client-side: " + f"{[(f.field, f.operator.value) for f in client_filters]}" + ) + + # Build query parameters + params: dict[str, Any] = { + "limit": query.limit, + } + + # Add time range (Dynatrace uses milliseconds since epoch) + if query.start_time: + params["from"] = int(query.start_time.timestamp() * 1000) + else: + # Default to last 24 hours if not specified + params["from"] = int((datetime.now() - timedelta(days=1)).timestamp() * 1000) + + if query.end_time: + params["to"] = int(query.end_time.timestamp() * 1000) + else: + params["to"] = int(datetime.now().timestamp() * 1000) + + # Add service filter if available + if query.service_name: + params["service"] = query.service_name + + # Add operation filter if available + if query.operation_name: + params["operation"] = query.operation_name + + # Add duration filters + if query.min_duration_ms: + params["minDuration"] = query.min_duration_ms + if query.max_duration_ms: + params["maxDuration"] = query.max_duration_ms + + # Add error filter + if query.has_error is not None: + params["error"] = query.has_error + + logger.debug(f"Querying Dynatrace API with params: {params}") + + # Query Dynatrace Trace API v2 + # Endpoint: /api/v2/traces + response = await self.client.get("/api/v2/traces", params=params) + response.raise_for_status() + + data = response.json() + traces = [] + + # Parse trace results + trace_results = data.get("traces", []) if isinstance(data, dict) else data + + # Limit the number of traces to fetch details for + max_traces_to_fetch = min(len(trace_results), 50) + + if len(trace_results) > max_traces_to_fetch: + logger.warning( + f"Limiting trace fetch to {max_traces_to_fetch} out of {len(trace_results)} " + f"results to avoid excessive API calls" + ) + + for trace_result in trace_results[:max_traces_to_fetch]: + trace_id = trace_result.get("traceId") or trace_result.get("trace_id") + if trace_id: + try: + # Fetch full trace details + trace = await self.get_trace(str(trace_id)) + if trace: + traces.append(trace) + except Exception as e: + logger.warning(f"Failed to fetch trace {trace_id}: {e}") + + # Apply client-side filters + if client_filters: + traces = FilterEngine.apply_filters(traces, client_filters) + + return traces + + async def search_spans(self, query: SpanQuery) -> list[SpanData]: + """Search for individual spans using Dynatrace API. + + Dynatrace doesn't have a dedicated spans API, so we search for traces + and then flatten to get individual spans matching the query. + + Args: + query: Span query parameters + + Returns: + List of matching spans (flattened from traces) + + Raises: + httpx.HTTPError: If API request fails + """ + logger.debug(f"Searching spans with query: {query}") + + # Get all filters + all_filters = query.get_all_filters() + + # For span queries, most filtering will be client-side + supported_fields = {"service.name"} + supported_operators = self.get_supported_operators() + + native_filters = [ + f + for f in all_filters + if f.field in supported_fields and f.operator in supported_operators + ] + client_filters = [f for f in all_filters if f not in native_filters] + + if client_filters: + logger.info( + f"Will apply {len(client_filters)} span filters client-side: " + f"{[(f.field, f.operator.value) for f in client_filters]}" + ) + + # Convert SpanQuery to TraceQuery for Dynatrace API + trace_query = TraceQuery( + service_name=query.service_name, + operation_name=query.operation_name, + start_time=query.start_time, + end_time=query.end_time, + min_duration_ms=query.min_duration_ms, + max_duration_ms=query.max_duration_ms, + tags=query.tags, + limit=query.limit * 2, # Fetch more traces to ensure we get enough spans + has_error=query.has_error, + gen_ai_system=query.gen_ai_system, + gen_ai_request_model=query.gen_ai_request_model, + gen_ai_response_model=query.gen_ai_response_model, + filters=query.filters, + ) + + # Search traces + traces = await self.search_traces(trace_query) + + # Flatten spans from all traces + all_spans: list[SpanData] = [] + for trace in traces: + all_spans.extend(trace.spans) + + # Apply client-side filters to spans + if client_filters: + all_spans = FilterEngine.apply_filters(all_spans, client_filters) + + # Limit the number of spans returned + return all_spans[: query.limit] + + async def get_trace(self, trace_id: str) -> TraceData: + """Get a specific trace by ID from Dynatrace. + + Args: + trace_id: Trace identifier + + Returns: + Complete trace data with all spans + + Raises: + httpx.HTTPError: If trace not found or API request fails + """ + logger.debug(f"Fetching trace: {trace_id}") + + # Query Dynatrace Distributed Traces API + # Endpoint: /api/v2/traces/{traceId} + response = await self.client.get(f"/api/v2/traces/{trace_id}") + response.raise_for_status() + + data = response.json() + + # Parse trace data + trace = self._parse_dynatrace_trace(data, trace_id) + if not trace: + raise ValueError(f"Failed to parse trace: {trace_id}") + + return trace + + async def list_services(self) -> list[str]: + """List all available services from Dynatrace. + + Uses the services endpoint or extracts from trace search results. + + Returns: + List of service names + + Raises: + httpx.HTTPError: If API request fails + """ + logger.debug("Listing services") + + try: + # Try to use the services endpoint if available + response = await self.client.get("/api/v2/services") + response.raise_for_status() + data = response.json() + + services = [] + if isinstance(data, list): + services = [str(s.get("name", s)) for s in data if s] + elif isinstance(data, dict): + services_data = data.get("services", []) or data.get("data", []) + services = [str(s.get("name", s)) for s in services_data if s] + + if services: + return sorted(list(set(services))) + except Exception as e: + logger.debug(f"Services endpoint not available, using trace search: {e}") + + # Fallback: Extract services from trace search + # Search for traces in the last 24 hours to discover services + from datetime import timedelta + + params = { + "from": int((datetime.now() - timedelta(days=1)).timestamp() * 1000), + "to": int(datetime.now().timestamp() * 1000), + "limit": 1000, + } + + response = await self.client.get("/api/v2/traces", params=params) + response.raise_for_status() + + data = response.json() + trace_results = data.get("traces", []) if isinstance(data, dict) else data + + services_set = set() + for trace_result in trace_results: + service_name = trace_result.get("serviceName") or trace_result.get("service") + if service_name: + services_set.add(str(service_name)) + + services = sorted(list(services_set)) + logger.debug(f"Found {len(services)} unique services from {len(trace_results)} traces") + return services + + async def get_service_operations(self, service_name: str) -> list[str]: + """Get all operations for a specific service. + + Args: + service_name: Service name + + Returns: + List of operation names + + Raises: + httpx.HTTPError: If query fails + """ + logger.debug(f"Getting operations for service: {service_name}") + + # Search for traces from this service to discover operations + from datetime import timedelta + + params = { + "service": service_name, + "from": int((datetime.now() - timedelta(days=1)).timestamp() * 1000), + "to": int(datetime.now().timestamp() * 1000), + "limit": 1000, + } + + response = await self.client.get("/api/v2/traces", params=params) + response.raise_for_status() + + data = response.json() + trace_results = data.get("traces", []) if isinstance(data, dict) else data + + operations = set() + for trace_result in trace_results: + operation_name = trace_result.get("operationName") or trace_result.get("operation") + if operation_name: + operations.add(str(operation_name)) + + return sorted(list(operations)) + + async def health_check(self) -> HealthCheckResponse: + """Check Dynatrace backend health. + + Returns: + Health status information + + Raises: + httpx.HTTPError: If backend is unreachable + """ + logger.debug("Checking backend health") + + try: + # Try to list services as a health check + services = await self.list_services() + return HealthCheckResponse( + status="healthy", + backend="dynatrace", + url=self.url, + service_count=len(services), + ) + except Exception as e: + return HealthCheckResponse( + status="unhealthy", + backend="dynatrace", + url=self.url, + error=str(e), + ) + + def _parse_dynatrace_trace( + self, trace_data: dict[str, Any], trace_id: str + ) -> TraceData | None: + """Parse Dynatrace trace format to TraceData. + + Args: + trace_data: Raw Dynatrace trace data + trace_id: Trace identifier + + Returns: + Parsed TraceData or None if parsing fails + """ + try: + # Dynatrace may return traces in different formats + # Handle both single trace and trace with spans + spans_data = trace_data.get("spans", []) + if not spans_data: + # Try alternative format + spans_data = trace_data.get("data", {}).get("spans", []) + + if not spans_data: + logger.warning(f"Trace {trace_id} has no spans") + return None + + # Parse all spans + spans: list[SpanData] = [] + for span_data in spans_data: + span = self._parse_dynatrace_span(span_data, trace_id) + if span: + spans.append(span) + + if not spans: + logger.warning(f"No valid spans in trace {trace_id}") + return None + + # Find root span (no parent) + root_spans = [s for s in spans if not s.parent_span_id] + root_span = root_spans[0] if root_spans else spans[0] + + # Calculate trace duration + start_times = [s.start_time for s in spans] + end_times = [ + datetime.fromtimestamp( + s.start_time.timestamp() + (s.duration_ms / 1000), tz=s.start_time.tzinfo + ) + for s in spans + ] + trace_start = min(start_times) + trace_end = max(end_times) + trace_duration_ms = (trace_end - trace_start).total_seconds() * 1000 + + # Determine overall status (ERROR if any span has error) + trace_status: Literal["OK", "ERROR", "UNSET"] = "OK" + if any(span.has_error for span in spans): + trace_status = "ERROR" + + return TraceData( + trace_id=trace_id, + spans=spans, + start_time=trace_start, + duration_ms=trace_duration_ms, + service_name=root_span.service_name, + root_operation=root_span.operation_name, + status=trace_status, + ) + + except Exception as e: + logger.error(f"Error parsing trace: {e}") + return None + + def _parse_dynatrace_span( + self, span_data: dict[str, Any], trace_id: str + ) -> SpanData | None: + """Parse Dynatrace span format to SpanData. + + Args: + span_data: Raw Dynatrace span data + trace_id: Trace identifier + + Returns: + Parsed SpanData or None if parsing fails + """ + try: + span_id_raw = span_data.get("spanId") or span_data.get("span_id") + operation_name_raw = span_data.get("operationName") or span_data.get("name") + + if not all([span_id_raw, operation_name_raw]): + logger.warning("Span missing required fields") + return None + + span_id = str(span_id_raw) + operation_name = str(operation_name_raw) + + # Parse timestamps (Dynatrace uses milliseconds since epoch) + start_time_ms = span_data.get("startTime", span_data.get("start_time", 0)) + if isinstance(start_time_ms, str): + # Try to parse ISO format + try: + start_time = datetime.fromisoformat(start_time_ms.replace("Z", "+00:00")) + except Exception: + start_time = datetime.fromtimestamp(int(start_time_ms) / 1000) + else: + start_time = datetime.fromtimestamp(start_time_ms / 1000) + + duration_ms = span_data.get("duration", span_data.get("duration_ms", 0)) + if isinstance(duration_ms, str): + duration_ms = float(duration_ms) + + # Get service name + service_name = ( + span_data.get("serviceName") + or span_data.get("service") + or span_data.get("service_name", "unknown") + ) + + # Get parent span ID + parent_span_id = span_data.get("parentSpanId") or span_data.get("parent_span_id") + if parent_span_id: + parent_span_id = str(parent_span_id) + + # Parse attributes + attributes_dict: dict[str, Any] = {} + if "attributes" in span_data: + attrs = span_data["attributes"] + if isinstance(attrs, dict): + attributes_dict.update(attrs) + elif isinstance(attrs, list): + # Handle list of key-value pairs + for attr in attrs: + if isinstance(attr, dict): + key = attr.get("key") + value = attr.get("value") + if key: + attributes_dict[key] = value + + # Also check for tags (alternative format) + if "tags" in span_data: + tags = span_data["tags"] + if isinstance(tags, dict): + attributes_dict.update(tags) + + # Create strongly-typed SpanAttributes + span_attributes = SpanAttributes(**attributes_dict) + + # Determine span status + status: Literal["OK", "ERROR", "UNSET"] = "UNSET" + error_tag = span_attributes.error + status_code = span_attributes.otel_status_code + + # Check for error indicators + if error_tag is True or status_code == "ERROR": + status = "ERROR" + elif status_code == "OK": + status = "OK" + elif span_data.get("error", False): + status = "ERROR" + + # Parse events/logs + events: list[SpanEvent] = [] + for event_data in span_data.get("events", span_data.get("logs", [])): + event_attrs: dict[str, str | int | float | bool] = {} + if isinstance(event_data, dict): + if "attributes" in event_data: + event_attrs.update(event_data["attributes"]) + elif "fields" in event_data: + # Handle Jaeger-style fields + for field in event_data["fields"]: + if isinstance(field, dict): + key = field.get("key") + value = field.get("value") + if key: + event_attrs[key] = value + + event_name = event_data.get("name", "event") if isinstance(event_data, dict) else "event" + event_timestamp = ( + event_data.get("timestamp", 0) if isinstance(event_data, dict) else 0 + ) + + events.append( + SpanEvent( + name=event_name, + timestamp=event_timestamp, + attributes=event_attrs, + ) + ) + + return SpanData( + trace_id=trace_id, + span_id=span_id, + parent_span_id=parent_span_id, + operation_name=operation_name, + service_name=str(service_name), + start_time=start_time, + duration_ms=duration_ms, + status=status, + attributes=span_attributes, + events=events, + ) + + except Exception as e: + logger.error(f"Error parsing span: {e}") + return None + diff --git a/src/opentelemetry_mcp/config.py b/src/opentelemetry_mcp/config.py index ead8b8c..2971459 100644 --- a/src/opentelemetry_mcp/config.py +++ b/src/opentelemetry_mcp/config.py @@ -16,7 +16,7 @@ class BackendConfig(BaseModel): """Configuration for OpenTelemetry trace backend.""" - type: Literal["jaeger", "tempo", "traceloop"] + type: Literal["jaeger", "tempo", "traceloop", "dynatrace"] url: HttpUrl api_key: str | None = Field(default=None, exclude=True) timeout: float = Field(default=30.0, gt=0, le=300) @@ -35,9 +35,9 @@ def from_env(cls) -> "BackendConfig": """Load configuration from environment variables.""" backend_type = os.getenv("BACKEND_TYPE", "jaeger") backend_url = os.getenv("BACKEND_URL", "http://localhost:16686") - if backend_type not in ["jaeger", "tempo", "traceloop"]: + if backend_type not in ["jaeger", "tempo", "traceloop", "dynatrace"]: raise ValueError( - f"Invalid BACKEND_TYPE: {backend_type}. Must be one of: jaeger, tempo, traceloop" + f"Invalid BACKEND_TYPE: {backend_type}. Must be one of: jaeger, tempo, traceloop, dynatrace" ) # Parse environments from comma-separated string @@ -102,10 +102,10 @@ def apply_cli_overrides( ) -> None: """Apply CLI argument overrides to configuration.""" if backend_type: - if backend_type not in ["jaeger", "tempo", "traceloop"]: + if backend_type not in ["jaeger", "tempo", "traceloop", "dynatrace"]: raise ValueError( f"Invalid backend type: {backend_type}. " - "Must be one of: jaeger, tempo, traceloop" + "Must be one of: jaeger, tempo, traceloop, dynatrace" ) self.backend.type = backend_type # type: ignore diff --git a/src/opentelemetry_mcp/server.py b/src/opentelemetry_mcp/server.py index 7ab57ea..0ea5445 100644 --- a/src/opentelemetry_mcp/server.py +++ b/src/opentelemetry_mcp/server.py @@ -9,6 +9,7 @@ from fastmcp import FastMCP from opentelemetry_mcp.backends.base import BaseBackend +from opentelemetry_mcp.backends.dynatrace import DynatraceBackend from opentelemetry_mcp.backends.jaeger import JaegerBackend from opentelemetry_mcp.backends.tempo import TempoBackend from opentelemetry_mcp.backends.traceloop import TraceloopBackend @@ -94,6 +95,13 @@ def _create_backend(config: ServerConfig) -> BaseBackend: timeout=backend_config.timeout, environments=backend_config.environments, ) + elif backend_config.type == "dynatrace": + logger.info(f"Initializing Dynatrace backend: {backend_config.url}") + return DynatraceBackend( + url=str(backend_config.url), + api_key=backend_config.api_key, + timeout=backend_config.timeout, + ) else: raise ValueError(f"Unsupported backend type: {backend_config.type}") @@ -595,7 +603,7 @@ async def list_llm_tools_tool( @click.command() @click.option( "--backend", - type=click.Choice(["jaeger", "tempo", "traceloop"]), + type=click.Choice(["jaeger", "tempo", "traceloop", "dynatrace"]), help="Backend type (overrides BACKEND_TYPE env var)", ) @click.option( @@ -642,7 +650,7 @@ def main( ) -> None: """Opentelemetry MCP Server - Query OpenTelemetry traces from LLM applications. - Supports multiple backends: Jaeger, Tempo, and Traceloop. + Supports multiple backends: Jaeger, Tempo, Traceloop, and Dynatrace. Configuration can be provided via environment variables or CLI arguments. Transport options: diff --git a/tests/backends/test_dynatrace.py b/tests/backends/test_dynatrace.py new file mode 100644 index 0000000..2d8f0bd --- /dev/null +++ b/tests/backends/test_dynatrace.py @@ -0,0 +1,306 @@ +"""Unit tests for Dynatrace backend implementation.""" + +import pytest +from datetime import datetime, timedelta +from unittest.mock import AsyncMock, MagicMock, patch + +from opentelemetry_mcp.backends.dynatrace import DynatraceBackend +from opentelemetry_mcp.models import Filter, FilterOperator, FilterType, SpanQuery, TraceQuery + + +class TestDynatraceBackend: + """Test Dynatrace backend implementation.""" + + @pytest.fixture + def backend(self) -> DynatraceBackend: + """Create a Dynatrace backend instance for testing.""" + return DynatraceBackend( + url="https://abc12345.live.dynatrace.com", + api_key="dt0c01.ABC123", + timeout=30.0, + ) + + def test_create_headers(self, backend: DynatraceBackend) -> None: + """Test header creation with API key.""" + headers = backend._create_headers() + assert "Authorization" in headers + assert headers["Authorization"] == "Api-Token dt0c01.ABC123" + assert headers["Content-Type"] == "application/json" + + def test_create_headers_no_api_key(self) -> None: + """Test header creation without API key.""" + backend = DynatraceBackend(url="https://abc12345.live.dynatrace.com") + headers = backend._create_headers() + assert "Authorization" not in headers + assert headers["Content-Type"] == "application/json" + + def test_get_supported_operators(self, backend: DynatraceBackend) -> None: + """Test supported operators.""" + operators = backend.get_supported_operators() + assert FilterOperator.EQUALS in operators + + @pytest.mark.asyncio + async def test_search_traces_basic(self, backend: DynatraceBackend) -> None: + """Test basic trace search.""" + # Mock trace search response + mock_traces_response = { + "traces": [ + {"traceId": "trace1", "serviceName": "test-service"}, + {"traceId": "trace2", "serviceName": "test-service"}, + ] + } + + # Mock get_trace responses + mock_trace1 = { + "spans": [ + { + "spanId": "span1", + "operationName": "test_op", + "startTime": int((datetime.now() - timedelta(minutes=5)).timestamp() * 1000), + "duration": 1000, + "serviceName": "test-service", + "attributes": {}, + } + ] + } + + with patch.object(backend.client, "get") as mock_get: + # First call: search_traces + mock_response1 = MagicMock() + mock_response1.json.return_value = mock_traces_response + mock_response1.raise_for_status = MagicMock() + + # Second and third calls: get_trace for each trace + mock_response2 = MagicMock() + mock_response2.json.return_value = mock_trace1 + mock_response2.raise_for_status = MagicMock() + + mock_get.side_effect = [mock_response1, mock_response2, mock_response2] + + query = TraceQuery(service_name="test-service", limit=10) + traces = await backend.search_traces(query) + + assert len(traces) > 0 + assert all(trace.service_name == "test-service" for trace in traces) + + @pytest.mark.asyncio + async def test_get_trace(self, backend: DynatraceBackend) -> None: + """Test getting a specific trace by ID.""" + trace_id = "test-trace-id" + mock_trace_data = { + "spans": [ + { + "spanId": "span1", + "operationName": "test_op", + "startTime": int(datetime.now().timestamp() * 1000), + "duration": 1000, + "serviceName": "test-service", + "attributes": {}, + } + ] + } + + with patch.object(backend.client, "get") as mock_get: + mock_response = MagicMock() + mock_response.json.return_value = mock_trace_data + mock_response.raise_for_status = MagicMock() + mock_get.return_value = mock_response + + trace = await backend.get_trace(trace_id) + + assert trace.trace_id == trace_id + assert len(trace.spans) > 0 + assert trace.service_name == "test-service" + + @pytest.mark.asyncio + async def test_list_services(self, backend: DynatraceBackend) -> None: + """Test listing services.""" + # First try services endpoint + mock_services_response = { + "services": [ + {"name": "service1"}, + {"name": "service2"}, + ] + } + + with patch.object(backend.client, "get") as mock_get: + mock_response = MagicMock() + mock_response.json.return_value = mock_services_response + mock_response.raise_for_status = MagicMock() + mock_get.return_value = mock_response + + services = await backend.list_services() + + assert len(services) > 0 + assert "service1" in services + assert "service2" in services + + @pytest.mark.asyncio + async def test_list_services_fallback(self, backend: DynatraceBackend) -> None: + """Test listing services with fallback to trace search.""" + # First call fails (services endpoint not available) + # Second call succeeds (trace search) + mock_traces_response = { + "traces": [ + {"traceId": "trace1", "serviceName": "service1"}, + {"traceId": "trace2", "serviceName": "service2"}, + ] + } + + with patch.object(backend.client, "get") as mock_get: + # First call fails + mock_response1 = MagicMock() + mock_response1.raise_for_status.side_effect = Exception("Not found") + + # Second call succeeds + mock_response2 = MagicMock() + mock_response2.json.return_value = mock_traces_response + mock_response2.raise_for_status = MagicMock() + + mock_get.side_effect = [mock_response1, mock_response2] + + services = await backend.list_services() + + assert len(services) > 0 + assert "service1" in services + assert "service2" in services + + @pytest.mark.asyncio + async def test_get_service_operations(self, backend: DynatraceBackend) -> None: + """Test getting operations for a service.""" + mock_traces_response = { + "traces": [ + {"traceId": "trace1", "operationName": "op1"}, + {"traceId": "trace2", "operationName": "op2"}, + ] + } + + with patch.object(backend.client, "get") as mock_get: + mock_response = MagicMock() + mock_response.json.return_value = mock_traces_response + mock_response.raise_for_status = MagicMock() + mock_get.return_value = mock_response + + operations = await backend.get_service_operations("test-service") + + assert len(operations) > 0 + assert "op1" in operations + assert "op2" in operations + + @pytest.mark.asyncio + async def test_search_spans(self, backend: DynatraceBackend) -> None: + """Test searching for spans.""" + # Mock trace search response + mock_traces_response = { + "traces": [ + {"traceId": "trace1", "serviceName": "test-service"}, + ] + } + + # Mock get_trace response + mock_trace = { + "spans": [ + { + "spanId": "span1", + "operationName": "test_op", + "startTime": int(datetime.now().timestamp() * 1000), + "duration": 1000, + "serviceName": "test-service", + "attributes": {}, + } + ] + } + + with patch.object(backend.client, "get") as mock_get: + mock_response1 = MagicMock() + mock_response1.json.return_value = mock_traces_response + mock_response1.raise_for_status = MagicMock() + + mock_response2 = MagicMock() + mock_response2.json.return_value = mock_trace + mock_response2.raise_for_status = MagicMock() + + mock_get.side_effect = [mock_response1, mock_response2] + + query = SpanQuery(service_name="test-service", limit=10) + spans = await backend.search_spans(query) + + assert len(spans) > 0 + assert all(span.service_name == "test-service" for span in spans) + + @pytest.mark.asyncio + async def test_health_check_healthy(self, backend: DynatraceBackend) -> None: + """Test health check when backend is healthy.""" + mock_services_response = {"services": [{"name": "service1"}]} + + with patch.object(backend.client, "get") as mock_get: + mock_response = MagicMock() + mock_response.json.return_value = mock_services_response + mock_response.raise_for_status = MagicMock() + mock_get.return_value = mock_response + + health = await backend.health_check() + + assert health.status == "healthy" + assert health.backend == "dynatrace" + assert health.service_count == 1 + + @pytest.mark.asyncio + async def test_health_check_unhealthy(self, backend: DynatraceBackend) -> None: + """Test health check when backend is unhealthy.""" + with patch.object(backend.client, "get") as mock_get: + mock_get.side_effect = Exception("Connection failed") + + health = await backend.health_check() + + assert health.status == "unhealthy" + assert health.backend == "dynatrace" + assert health.error is not None + + def test_parse_dynatrace_span(self, backend: DynatraceBackend) -> None: + """Test parsing Dynatrace span data.""" + trace_id = "test-trace" + span_data = { + "spanId": "span1", + "operationName": "test_op", + "startTime": int(datetime.now().timestamp() * 1000), + "duration": 1000, + "serviceName": "test-service", + "attributes": { + "gen_ai.system": "openai", + "gen_ai.request.model": "gpt-4", + }, + } + + span = backend._parse_dynatrace_span(span_data, trace_id) + + assert span is not None + assert span.span_id == "span1" + assert span.operation_name == "test_op" + assert span.service_name == "test-service" + assert span.trace_id == trace_id + assert span.attributes.gen_ai_system == "openai" + + def test_parse_dynatrace_trace(self, backend: DynatraceBackend) -> None: + """Test parsing Dynatrace trace data.""" + trace_id = "test-trace" + trace_data = { + "spans": [ + { + "spanId": "span1", + "operationName": "test_op", + "startTime": int(datetime.now().timestamp() * 1000), + "duration": 1000, + "serviceName": "test-service", + "attributes": {}, + } + ] + } + + trace = backend._parse_dynatrace_trace(trace_data, trace_id) + + assert trace is not None + assert trace.trace_id == trace_id + assert len(trace.spans) == 1 + assert trace.service_name == "test-service" + diff --git a/uv.lock b/uv.lock index f4aec13..9813e91 100644 --- a/uv.lock +++ b/uv.lock @@ -1,5 +1,5 @@ version = 1 -revision = 2 +revision = 3 requires-python = ">=3.11" resolution-markers = [ "platform_python_implementation != 'PyPy'", @@ -931,7 +931,7 @@ wheels = [ [[package]] name = "opentelemetry-mcp" -version = "0.1.0" +version = "0.2.0" source = { editable = "." } dependencies = [ { name = "click" }, From b43202135937ec951f9d82530a8b0df16bb38971 Mon Sep 17 00:00:00 2001 From: Sidhi <113815852+Sidhi-03@users.noreply.github.com> Date: Sat, 6 Dec 2025 09:44:49 +0530 Subject: [PATCH 02/22] Update .env.example Co-authored-by: ellipsis-dev[bot] <65095814+ellipsis-dev[bot]@users.noreply.github.com> --- .env.example | 3 +-- 1 file changed, 1 insertion(+), 2 deletions(-) diff --git a/.env.example b/.env.example index 123bcf3..5e5c6ee 100644 --- a/.env.example +++ b/.env.example @@ -26,5 +26,4 @@ LOG_LEVEL=INFO MAX_TRACES_PER_QUERY=100 # Optional: Dynatrace specific configuration BACKEND_TYPE=dynatrace -BACKEND_URL=https://abc12345.live.dynatrace.com -BACKEND_ \ No newline at end of file +BACKEND_URL=https://abc12345.live.dynatrace.com \ No newline at end of file From 52fb4aec4e9d42c6b442fcf14097182b57c3c68a Mon Sep 17 00:00:00 2001 From: Vyas Sidhi Date: Sat, 6 Dec 2025 10:25:53 +0530 Subject: [PATCH 03/22] Add dynatrace to HealthCheckResponse backend literal --- src/opentelemetry_mcp/attributes.py | 2 +- src/opentelemetry_mcp/backends/dynatrace.py | 2 -- tests/backends/test_dynatrace.py | 7 ++++--- 3 files changed, 5 insertions(+), 6 deletions(-) diff --git a/src/opentelemetry_mcp/attributes.py b/src/opentelemetry_mcp/attributes.py index 34d4bc3..0044d73 100644 --- a/src/opentelemetry_mcp/attributes.py +++ b/src/opentelemetry_mcp/attributes.py @@ -161,7 +161,7 @@ class HealthCheckResponse(BaseModel): """Health check response from backend systems.""" status: Literal["healthy", "unhealthy"] = Field(..., description="Health status of the backend") - backend: Literal["jaeger", "tempo", "traceloop"] = Field(..., description="Backend type") + backend: Literal["jaeger", "tempo", "traceloop", "dynatrace"] = Field(..., description="Backend type") url: str = Field(..., description="Backend URL") error: str | None = Field(default=None, description="Error message if unhealthy") diff --git a/src/opentelemetry_mcp/backends/dynatrace.py b/src/opentelemetry_mcp/backends/dynatrace.py index c458188..58efa13 100644 --- a/src/opentelemetry_mcp/backends/dynatrace.py +++ b/src/opentelemetry_mcp/backends/dynatrace.py @@ -8,9 +8,7 @@ from opentelemetry_mcp.backends.base import BaseBackend from opentelemetry_mcp.backends.filter_engine import FilterEngine from opentelemetry_mcp.models import ( - Filter, FilterOperator, - FilterType, SpanData, SpanQuery, TraceData, diff --git a/tests/backends/test_dynatrace.py b/tests/backends/test_dynatrace.py index 2d8f0bd..1694ba0 100644 --- a/tests/backends/test_dynatrace.py +++ b/tests/backends/test_dynatrace.py @@ -1,11 +1,12 @@ """Unit tests for Dynatrace backend implementation.""" -import pytest from datetime import datetime, timedelta -from unittest.mock import AsyncMock, MagicMock, patch +from unittest.mock import MagicMock, patch + +import pytest from opentelemetry_mcp.backends.dynatrace import DynatraceBackend -from opentelemetry_mcp.models import Filter, FilterOperator, FilterType, SpanQuery, TraceQuery +from opentelemetry_mcp.models import FilterOperator, SpanQuery, TraceQuery class TestDynatraceBackend: From 7def5923c694b637603e123e0e6a0491e36166f7 Mon Sep 17 00:00:00 2001 From: Sidhi <113815852+Sidhi-03@users.noreply.github.com> Date: Sat, 6 Dec 2025 11:52:52 +0530 Subject: [PATCH 04/22] Update src/opentelemetry_mcp/backends/dynatrace.py Co-authored-by: coderabbitai[bot] <136622811+coderabbitai[bot]@users.noreply.github.com> --- src/opentelemetry_mcp/backends/dynatrace.py | 1 - 1 file changed, 1 deletion(-) diff --git a/src/opentelemetry_mcp/backends/dynatrace.py b/src/opentelemetry_mcp/backends/dynatrace.py index 58efa13..167f2b9 100644 --- a/src/opentelemetry_mcp/backends/dynatrace.py +++ b/src/opentelemetry_mcp/backends/dynatrace.py @@ -327,7 +327,6 @@ async def get_service_operations(self, service_name: str) -> list[str]: logger.debug(f"Getting operations for service: {service_name}") # Search for traces from this service to discover operations - from datetime import timedelta params = { "service": service_name, From 5d03653a86fef319cdac11ba8c844143b3c651cf Mon Sep 17 00:00:00 2001 From: Sidhi <113815852+Sidhi-03@users.noreply.github.com> Date: Sat, 6 Dec 2025 11:53:23 +0530 Subject: [PATCH 05/22] Update src/opentelemetry_mcp/backends/dynatrace.py Co-authored-by: coderabbitai[bot] <136622811+coderabbitai[bot]@users.noreply.github.com> --- src/opentelemetry_mcp/backends/dynatrace.py | 1 - 1 file changed, 1 deletion(-) diff --git a/src/opentelemetry_mcp/backends/dynatrace.py b/src/opentelemetry_mcp/backends/dynatrace.py index 167f2b9..f50d2b1 100644 --- a/src/opentelemetry_mcp/backends/dynatrace.py +++ b/src/opentelemetry_mcp/backends/dynatrace.py @@ -288,7 +288,6 @@ async def list_services(self) -> list[str]: # Fallback: Extract services from trace search # Search for traces in the last 24 hours to discover services - from datetime import timedelta params = { "from": int((datetime.now() - timedelta(days=1)).timestamp() * 1000), From fe9f8a59e86525b63c9f88959baffc20bc77cf0d Mon Sep 17 00:00:00 2001 From: Vyas Sidhi Date: Sat, 6 Dec 2025 13:41:56 +0530 Subject: [PATCH 06/22] timezone info implicitly and is easier to read --- src/opentelemetry_mcp/backends/dynatrace.py | 27 +++++++++++---------- 1 file changed, 14 insertions(+), 13 deletions(-) diff --git a/src/opentelemetry_mcp/backends/dynatrace.py b/src/opentelemetry_mcp/backends/dynatrace.py index f50d2b1..fc70304 100644 --- a/src/opentelemetry_mcp/backends/dynatrace.py +++ b/src/opentelemetry_mcp/backends/dynatrace.py @@ -1,7 +1,7 @@ """Dynatrace backend implementation for querying OpenTelemetry traces.""" import logging -from datetime import datetime, timedelta +from datetime import datetime, timedelta, timezone from typing import Any, Literal from opentelemetry_mcp.attributes import HealthCheckResponse, SpanAttributes, SpanEvent @@ -91,7 +91,9 @@ async def search_traces(self, query: TraceQuery) -> list[TraceData]: # Add time range (Dynatrace uses milliseconds since epoch) if query.start_time: - params["from"] = int(query.start_time.timestamp() * 1000) + params["from"] = int( + (datetime.now(timezone.utc) - timedelta(days=1)).timestamp() * 1000 + ) else: # Default to last 24 hours if not specified params["from"] = int((datetime.now() - timedelta(days=1)).timestamp() * 1000) @@ -99,7 +101,7 @@ async def search_traces(self, query: TraceQuery) -> list[TraceData]: if query.end_time: params["to"] = int(query.end_time.timestamp() * 1000) else: - params["to"] = int(datetime.now().timestamp() * 1000) + params["to"] = int(datetime.now(timezone.utc).timestamp() * 1000) # Add service filter if available if query.service_name: @@ -290,8 +292,10 @@ async def list_services(self) -> list[str]: # Search for traces in the last 24 hours to discover services params = { - "from": int((datetime.now() - timedelta(days=1)).timestamp() * 1000), - "to": int(datetime.now().timestamp() * 1000), + "from": int( + (datetime.now(timezone.utc) - timedelta(days=1)).timestamp() * 1000 + ), + "to": int(datetime.now(timezone.utc).timestamp() * 1000), "limit": 1000, } @@ -329,8 +333,10 @@ async def get_service_operations(self, service_name: str) -> list[str]: params = { "service": service_name, - "from": int((datetime.now() - timedelta(days=1)).timestamp() * 1000), - "to": int(datetime.now().timestamp() * 1000), + "from": int( + (datetime.now(timezone.utc) - timedelta(days=1)).timestamp() * 1000 + ), + "to": int(datetime.now().timestamp(timezone.utc) * 1000), "limit": 1000, } @@ -417,12 +423,7 @@ def _parse_dynatrace_trace( # Calculate trace duration start_times = [s.start_time for s in spans] - end_times = [ - datetime.fromtimestamp( - s.start_time.timestamp() + (s.duration_ms / 1000), tz=s.start_time.tzinfo - ) - for s in spans - ] + end_times = [s.start_time + timedelta(milliseconds=s.duration_ms) for s in spans] trace_start = min(start_times) trace_end = max(end_times) trace_duration_ms = (trace_end - trace_start).total_seconds() * 1000 From 756407202bc51d1679a71c84b9ed8e9039da544a Mon Sep 17 00:00:00 2001 From: Vyas Sidhi Date: Sat, 6 Dec 2025 15:17:14 +0530 Subject: [PATCH 07/22] changed code --- src/opentelemetry_mcp/backends/dynatrace.py | 15 +++++++++------ 1 file changed, 9 insertions(+), 6 deletions(-) diff --git a/src/opentelemetry_mcp/backends/dynatrace.py b/src/opentelemetry_mcp/backends/dynatrace.py index fc70304..8c475ce 100644 --- a/src/opentelemetry_mcp/backends/dynatrace.py +++ b/src/opentelemetry_mcp/backends/dynatrace.py @@ -91,9 +91,7 @@ async def search_traces(self, query: TraceQuery) -> list[TraceData]: # Add time range (Dynatrace uses milliseconds since epoch) if query.start_time: - params["from"] = int( - (datetime.now(timezone.utc) - timedelta(days=1)).timestamp() * 1000 - ) + params["from"] = int(query.start_time.timestamp() * 1000) else: # Default to last 24 hours if not specified params["from"] = int((datetime.now() - timedelta(days=1)).timestamp() * 1000) @@ -336,7 +334,7 @@ async def get_service_operations(self, service_name: str) -> list[str]: "from": int( (datetime.now(timezone.utc) - timedelta(days=1)).timestamp() * 1000 ), - "to": int(datetime.now().timestamp(timezone.utc) * 1000), + "to": int(datetime.now(timezone.utc).timestamp() * 1000), "limit": 1000, } @@ -423,7 +421,12 @@ def _parse_dynatrace_trace( # Calculate trace duration start_times = [s.start_time for s in spans] - end_times = [s.start_time + timedelta(milliseconds=s.duration_ms) for s in spans] + end_times = [ + datetime.fromtimestamp( + s.start_time.timestamp() + (s.duration_ms / 1000), tz=s.start_time.tzinfo + ) + for s in spans + ] trace_start = min(start_times) trace_end = max(end_times) trace_duration_ms = (trace_end - trace_start).total_seconds() * 1000 @@ -479,7 +482,7 @@ def _parse_dynatrace_span( except Exception: start_time = datetime.fromtimestamp(int(start_time_ms) / 1000) else: - start_time = datetime.fromtimestamp(start_time_ms / 1000) + start_time = datetime.fromtimestamp(start_time_ms / 1000, tz=timezone.utc) duration_ms = span_data.get("duration", span_data.get("duration_ms", 0)) if isinstance(duration_ms, str): From 05fe5b21256aee0cc74389795629469243898ab5 Mon Sep 17 00:00:00 2001 From: Sidhi <113815852+Sidhi-03@users.noreply.github.com> Date: Sat, 6 Dec 2025 15:25:13 +0530 Subject: [PATCH 08/22] Update src/opentelemetry_mcp/backends/dynatrace.py Co-authored-by: coderabbitai[bot] <136622811+coderabbitai[bot]@users.noreply.github.com> --- src/opentelemetry_mcp/backends/dynatrace.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/opentelemetry_mcp/backends/dynatrace.py b/src/opentelemetry_mcp/backends/dynatrace.py index 8c475ce..6475faf 100644 --- a/src/opentelemetry_mcp/backends/dynatrace.py +++ b/src/opentelemetry_mcp/backends/dynatrace.py @@ -94,7 +94,7 @@ async def search_traces(self, query: TraceQuery) -> list[TraceData]: params["from"] = int(query.start_time.timestamp() * 1000) else: # Default to last 24 hours if not specified - params["from"] = int((datetime.now() - timedelta(days=1)).timestamp() * 1000) + params["from"] = int((datetime.now(timezone.utc) - timedelta(days=1)).timestamp() * 1000) if query.end_time: params["to"] = int(query.end_time.timestamp() * 1000) From 211ca619e71129e42e01b0dbabf67f2a09ffba14 Mon Sep 17 00:00:00 2001 From: Vyas Sidhi Date: Sat, 6 Dec 2025 15:28:52 +0530 Subject: [PATCH 09/22] line 426 updated --- src/opentelemetry_mcp/backends/dynatrace.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/opentelemetry_mcp/backends/dynatrace.py b/src/opentelemetry_mcp/backends/dynatrace.py index 8c475ce..8e81915 100644 --- a/src/opentelemetry_mcp/backends/dynatrace.py +++ b/src/opentelemetry_mcp/backends/dynatrace.py @@ -423,7 +423,7 @@ def _parse_dynatrace_trace( start_times = [s.start_time for s in spans] end_times = [ datetime.fromtimestamp( - s.start_time.timestamp() + (s.duration_ms / 1000), tz=s.start_time.tzinfo + s.start_time.timestamp() + (s.duration_ms / 1000), tz=timezone.utc ) for s in spans ] From 4b972d84069c72aca3a63e05f99d344abbcb02d1 Mon Sep 17 00:00:00 2001 From: Vyas Sidhi Date: Sat, 6 Dec 2025 18:12:29 +0530 Subject: [PATCH 10/22] Updated dynatrace --- src/opentelemetry_mcp/backends/dynatrace.py | 86 +++++++++++++-------- 1 file changed, 54 insertions(+), 32 deletions(-) diff --git a/src/opentelemetry_mcp/backends/dynatrace.py b/src/opentelemetry_mcp/backends/dynatrace.py index 5fae5af..3b6fdc0 100644 --- a/src/opentelemetry_mcp/backends/dynatrace.py +++ b/src/opentelemetry_mcp/backends/dynatrace.py @@ -195,21 +195,15 @@ async def search_spans(self, query: SpanQuery) -> list[SpanData]: f"{[(f.field, f.operator.value) for f in client_filters]}" ) - # Convert SpanQuery to TraceQuery for Dynatrace API + # Convert SpanQuery to a minimal TraceQuery for Dynatrace API: + # use it only to bound the search window and basic scoping + # and rely on client-side filtering for span-level predicates. trace_query = TraceQuery( service_name=query.service_name, operation_name=query.operation_name, start_time=query.start_time, end_time=query.end_time, - min_duration_ms=query.min_duration_ms, - max_duration_ms=query.max_duration_ms, - tags=query.tags, limit=query.limit * 2, # Fetch more traces to ensure we get enough spans - has_error=query.has_error, - gen_ai_system=query.gen_ai_system, - gen_ai_request_model=query.gen_ai_request_model, - gen_ai_response_model=query.gen_ai_response_model, - filters=query.filters, ) # Search traces @@ -473,16 +467,25 @@ def _parse_dynatrace_span( span_id = str(span_id_raw) operation_name = str(operation_name_raw) - # Parse timestamps (Dynatrace uses milliseconds since epoch) + # Parse timestamps (Dynatrace uses milliseconds since epoch) and normalize to UTC start_time_ms = span_data.get("startTime", span_data.get("start_time", 0)) if isinstance(start_time_ms, str): - # Try to parse ISO format + # Try to parse ISO format first try: start_time = datetime.fromisoformat(start_time_ms.replace("Z", "+00:00")) + if start_time.tzinfo is None: + start_time = start_time.replace(tzinfo=timezone.utc) + else: + start_time = start_time.astimezone(timezone.utc) except Exception: - start_time = datetime.fromtimestamp(int(start_time_ms) / 1000) + # Fallback: treat as milliseconds since epoch + start_time = datetime.fromtimestamp( + int(start_time_ms) / 1000, tz=timezone.utc + ) else: - start_time = datetime.fromtimestamp(start_time_ms / 1000, tz=timezone.utc) + start_time = datetime.fromtimestamp(int(start_time_ms) / 1000, tz=timezone.utc) + + duration_ms = span_data.get("duration", span_data.get("duration_ms", 0)) if isinstance(duration_ms, str): @@ -539,28 +542,47 @@ def _parse_dynatrace_span( # Parse events/logs events: list[SpanEvent] = [] - for event_data in span_data.get("events", span_data.get("logs", [])): - event_attrs: dict[str, str | int | float | bool] = {} - if isinstance(event_data, dict): - if "attributes" in event_data: - event_attrs.update(event_data["attributes"]) - elif "fields" in event_data: - # Handle Jaeger-style fields - for field in event_data["fields"]: - if isinstance(field, dict): - key = field.get("key") - value = field.get("value") - if key: - event_attrs[key] = value - - event_name = event_data.get("name", "event") if isinstance(event_data, dict) else "event" - event_timestamp = ( - event_data.get("timestamp", 0) if isinstance(event_data, dict) else 0 - ) + events_source = span_data.get("events") + if events_source is None: + events_source = span_data.get("logs", []) + + for event_data in events_source: + if not isinstance(event_data, dict): + continue + event_attrs: dict[str, str | int | float | bool] = {} + if "attributes" in event_data and isinstance(event_data["attributes"], dict): + event_attrs.update(event_data["attributes"]) + elif "fields" in event_data: + # Handle Jaeger-style fields + for field in event_data["fields"] or []: + if isinstance(field, dict): + key = field.get("key") + value = field.get("value") + if key: + event_attrs[key] = value + + event_name = event_data.get("name", "event") + + raw_ts = event_data.get("timestamp", 0) + if isinstance(raw_ts, str): + try: + dt = datetime.fromisoformat(raw_ts.replace("Z", "+00:00")) + if dt.tzinfo is None: + dt = dt.replace(tzinfo=timezone.utc) + else: + dt = dt.astimezone(timezone.utc) + event_timestamp = int(dt.timestamp() * 1_000_000_000) + except Exception: + event_timestamp = 0 + elif isinstance(raw_ts, (int, float)): + # Dynatrace timestamps are typically in milliseconds; convert to nanoseconds + event_timestamp = int(raw_ts * 1_000_000) + else: + event_timestamp = 0 events.append( SpanEvent( - name=event_name, + name=str(event_name), timestamp=event_timestamp, attributes=event_attrs, ) From 1bd9c815cb23474ba0e71c906d90aa9c6ab91a91 Mon Sep 17 00:00:00 2001 From: Sidhi <113815852+Sidhi-03@users.noreply.github.com> Date: Sat, 6 Dec 2025 18:17:02 +0530 Subject: [PATCH 11/22] Update src/opentelemetry_mcp/backends/dynatrace.py Co-authored-by: coderabbitai[bot] <136622811+coderabbitai[bot]@users.noreply.github.com> --- src/opentelemetry_mcp/backends/dynatrace.py | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/src/opentelemetry_mcp/backends/dynatrace.py b/src/opentelemetry_mcp/backends/dynatrace.py index 3b6fdc0..219ba90 100644 --- a/src/opentelemetry_mcp/backends/dynatrace.py +++ b/src/opentelemetry_mcp/backends/dynatrace.py @@ -195,9 +195,9 @@ async def search_spans(self, query: SpanQuery) -> list[SpanData]: f"{[(f.field, f.operator.value) for f in client_filters]}" ) - # Convert SpanQuery to a minimal TraceQuery for Dynatrace API: - # use it only to bound the search window and basic scoping - # and rely on client-side filtering for span-level predicates. + # Convert SpanQuery to a minimal TraceQuery for Dynatrace API: + # use it only to bound the search window and basic scoping + # and rely on client-side filtering for span-level predicates. trace_query = TraceQuery( service_name=query.service_name, operation_name=query.operation_name, From 5810d15c48ba5f8f92e0df0f0de556587d1a137d Mon Sep 17 00:00:00 2001 From: Vyas Sidhi Date: Sat, 6 Dec 2025 18:24:11 +0530 Subject: [PATCH 12/22] Updated dynatrac spacinge --- src/opentelemetry_mcp/backends/dynatrace.py | 9 +++------ 1 file changed, 3 insertions(+), 6 deletions(-) diff --git a/src/opentelemetry_mcp/backends/dynatrace.py b/src/opentelemetry_mcp/backends/dynatrace.py index 3b6fdc0..6a11633 100644 --- a/src/opentelemetry_mcp/backends/dynatrace.py +++ b/src/opentelemetry_mcp/backends/dynatrace.py @@ -484,9 +484,6 @@ def _parse_dynatrace_span( ) else: start_time = datetime.fromtimestamp(int(start_time_ms) / 1000, tz=timezone.utc) - - - duration_ms = span_data.get("duration", span_data.get("duration_ms", 0)) if isinstance(duration_ms, str): duration_ms = float(duration_ms) @@ -555,7 +552,7 @@ def _parse_dynatrace_span( event_attrs.update(event_data["attributes"]) elif "fields" in event_data: # Handle Jaeger-style fields - for field in event_data["fields"] or []: + for field in event_data["fields"] or []: if isinstance(field, dict): key = field.get("key") value = field.get("value") @@ -574,10 +571,10 @@ def _parse_dynatrace_span( dt = dt.astimezone(timezone.utc) event_timestamp = int(dt.timestamp() * 1_000_000_000) except Exception: - event_timestamp = 0 + event_timestamp = 0 elif isinstance(raw_ts, (int, float)): # Dynatrace timestamps are typically in milliseconds; convert to nanoseconds - event_timestamp = int(raw_ts * 1_000_000) + event_timestamp = int(raw_ts * 1_000_000) else: event_timestamp = 0 events.append( From f6b82c8d5acb3b78b1268c91d5af35977afb365d Mon Sep 17 00:00:00 2001 From: Vyas Sidhi Date: Sun, 7 Dec 2025 12:20:29 +0530 Subject: [PATCH 13/22] Fix syntax errors and improve async trace fetching in Dynatrace backend --- src/opentelemetry_mcp/backends/dynatrace.py | 29 ++++++++++++++------- 1 file changed, 19 insertions(+), 10 deletions(-) diff --git a/src/opentelemetry_mcp/backends/dynatrace.py b/src/opentelemetry_mcp/backends/dynatrace.py index b37eee3..9de8d8f 100644 --- a/src/opentelemetry_mcp/backends/dynatrace.py +++ b/src/opentelemetry_mcp/backends/dynatrace.py @@ -141,16 +141,23 @@ async def search_traces(self, query: TraceQuery) -> list[TraceData]: f"results to avoid excessive API calls" ) - for trace_result in trace_results[:max_traces_to_fetch]: + import asyncio + + async def fetch_trace(trace_result): trace_id = trace_result.get("traceId") or trace_result.get("trace_id") - if trace_id: - try: - # Fetch full trace details - trace = await self.get_trace(str(trace_id)) - if trace: - traces.append(trace) - except Exception as e: - logger.warning(f"Failed to fetch trace {trace_id}: {e}") + if not trace_id: + return None + try: + return await self.get_trace(str(trace_id)) + except Exception as e: + logger.warning(f"Failed to fetch trace {trace_id}: {e}") + return None + + trace_results_to_fetch = trace_results[:max_traces_to_fetch] + fetch_tasks = [fetch_trace(tr) for tr in trace_results_to_fetch] + fetched_traces = await asyncio.gather(*fetch_tasks) + traces = [t for t in fetched_traces if t is not None] + # Apply client-side filters if client_filters: @@ -417,7 +424,9 @@ def _parse_dynatrace_trace( start_times = [s.start_time for s in spans] end_times = [ datetime.fromtimestamp( - s.start_time.timestamp() + (s.duration_ms / 1000), tz=timezone.utc + (s.start_time.replace(tzinfo=timezone.utc) if s.start_time.tzinfo is None + else s.start_time.astimezone(timezone.utc)).timestamp() + (s.duration_ms / 1000), + tz=timezone.utc, ) for s in spans ] From 0b54f12168105ee16bd0e38903852864c10f5bfd Mon Sep 17 00:00:00 2001 From: Vyas Sidhi Date: Tue, 9 Dec 2025 16:21:16 +0530 Subject: [PATCH 14/22] Replaced utc with UTC --- src/opentelemetry_mcp/backends/dynatrace.py | 30 ++++++++++----------- 1 file changed, 15 insertions(+), 15 deletions(-) diff --git a/src/opentelemetry_mcp/backends/dynatrace.py b/src/opentelemetry_mcp/backends/dynatrace.py index 9de8d8f..8257dca 100644 --- a/src/opentelemetry_mcp/backends/dynatrace.py +++ b/src/opentelemetry_mcp/backends/dynatrace.py @@ -1,7 +1,7 @@ """Dynatrace backend implementation for querying OpenTelemetry traces.""" import logging -from datetime import datetime, timedelta, timezone +from datetime import datetime, timedelta, timezone, UTC from typing import Any, Literal from opentelemetry_mcp.attributes import HealthCheckResponse, SpanAttributes, SpanEvent @@ -94,12 +94,12 @@ async def search_traces(self, query: TraceQuery) -> list[TraceData]: params["from"] = int(query.start_time.timestamp() * 1000) else: # Default to last 24 hours if not specified - params["from"] = int((datetime.now(timezone.utc) - timedelta(days=1)).timestamp() * 1000) + params["from"] = int((datetime.now(timezone.UTC) - timedelta(days=1)).timestamp() * 1000) if query.end_time: params["to"] = int(query.end_time.timestamp() * 1000) else: - params["to"] = int(datetime.now(timezone.utc).timestamp() * 1000) + params["to"] = int(datetime.now(timezone.UTC).timestamp() * 1000) # Add service filter if available if query.service_name: @@ -292,9 +292,9 @@ async def list_services(self) -> list[str]: params = { "from": int( - (datetime.now(timezone.utc) - timedelta(days=1)).timestamp() * 1000 + (datetime.now(timezone.UTC) - timedelta(days=1)).timestamp() * 1000 ), - "to": int(datetime.now(timezone.utc).timestamp() * 1000), + "to": int(datetime.now(timezone.UTC).timestamp() * 1000), "limit": 1000, } @@ -333,9 +333,9 @@ async def get_service_operations(self, service_name: str) -> list[str]: params = { "service": service_name, "from": int( - (datetime.now(timezone.utc) - timedelta(days=1)).timestamp() * 1000 + (datetime.now(timezone.UTC) - timedelta(days=1)).timestamp() * 1000 ), - "to": int(datetime.now(timezone.utc).timestamp() * 1000), + "to": int(datetime.now(timezone.UTC).timestamp() * 1000), "limit": 1000, } @@ -424,9 +424,9 @@ def _parse_dynatrace_trace( start_times = [s.start_time for s in spans] end_times = [ datetime.fromtimestamp( - (s.start_time.replace(tzinfo=timezone.utc) if s.start_time.tzinfo is None - else s.start_time.astimezone(timezone.utc)).timestamp() + (s.duration_ms / 1000), - tz=timezone.utc, + (s.start_time.replace(tzinfo=timezone.UTC) if s.start_time.tzinfo is None + else s.start_time.astimezone(timezone.UTC)).timestamp() + (s.duration_ms / 1000), + tz=timezone.UTC, ) for s in spans ] @@ -483,16 +483,16 @@ def _parse_dynatrace_span( try: start_time = datetime.fromisoformat(start_time_ms.replace("Z", "+00:00")) if start_time.tzinfo is None: - start_time = start_time.replace(tzinfo=timezone.utc) + start_time = start_time.replace(tzinfo=timezone.UTC) else: - start_time = start_time.astimezone(timezone.utc) + start_time = start_time.astimezone(timezone.UTC) except Exception: # Fallback: treat as milliseconds since epoch start_time = datetime.fromtimestamp( - int(start_time_ms) / 1000, tz=timezone.utc + int(start_time_ms) / 1000, tz=timezone.UTC ) else: - start_time = datetime.fromtimestamp(int(start_time_ms) / 1000, tz=timezone.utc) + start_time = datetime.fromtimestamp(int(start_time_ms) / 1000, tz=timezone.UTC) duration_ms = span_data.get("duration", span_data.get("duration_ms", 0)) if isinstance(duration_ms, str): duration_ms = float(duration_ms) @@ -575,7 +575,7 @@ def _parse_dynatrace_span( try: dt = datetime.fromisoformat(raw_ts.replace("Z", "+00:00")) if dt.tzinfo is None: - dt = dt.replace(tzinfo=timezone.utc) + dt = dt.replace(tzinfo=timezone.UTC) else: dt = dt.astimezone(timezone.utc) event_timestamp = int(dt.timestamp() * 1_000_000_000) From 6aa73084bde6d9f969b28f4b235b8108412871c3 Mon Sep 17 00:00:00 2001 From: Vyas Sidhi Date: Tue, 16 Dec 2025 15:42:01 +0530 Subject: [PATCH 15/22] Endpoint and errors fixed --- src/opentelemetry_mcp/backends/dynatrace.py | 20 +++++++++++++------- 1 file changed, 13 insertions(+), 7 deletions(-) diff --git a/src/opentelemetry_mcp/backends/dynatrace.py b/src/opentelemetry_mcp/backends/dynatrace.py index 8257dca..e1fe38f 100644 --- a/src/opentelemetry_mcp/backends/dynatrace.py +++ b/src/opentelemetry_mcp/backends/dynatrace.py @@ -1,7 +1,7 @@ """Dynatrace backend implementation for querying OpenTelemetry traces.""" import logging -from datetime import datetime, timedelta, timezone, UTC +from datetime import UTC, datetime, timedelta, timezone from typing import Any, Literal from opentelemetry_mcp.attributes import HealthCheckResponse, SpanAttributes, SpanEvent @@ -122,8 +122,14 @@ async def search_traces(self, query: TraceQuery) -> list[TraceData]: logger.debug(f"Querying Dynatrace API with params: {params}") # Query Dynatrace Trace API v2 - # Endpoint: /api/v2/traces - response = await self.client.get("/api/v2/traces", params=params) + # Endpoint + response = await self.client.post( + "/api/v2/ql/query:execute", + json={ + "query": "FETCH spans | LIMIT 50" + }, + ) + response.raise_for_status() data = response.json() @@ -152,12 +158,12 @@ async def fetch_trace(trace_result): except Exception as e: logger.warning(f"Failed to fetch trace {trace_id}: {e}") return None - + trace_results_to_fetch = trace_results[:max_traces_to_fetch] fetch_tasks = [fetch_trace(tr) for tr in trace_results_to_fetch] fetched_traces = await asyncio.gather(*fetch_tasks) traces = [t for t in fetched_traces if t is not None] - + # Apply client-side filters if client_filters: @@ -551,7 +557,7 @@ def _parse_dynatrace_span( events_source = span_data.get("events") if events_source is None: events_source = span_data.get("logs", []) - + for event_data in events_source: if not isinstance(event_data, dict): continue @@ -577,7 +583,7 @@ def _parse_dynatrace_span( if dt.tzinfo is None: dt = dt.replace(tzinfo=timezone.UTC) else: - dt = dt.astimezone(timezone.utc) + dt = dt.astimezone(UTC) event_timestamp = int(dt.timestamp() * 1_000_000_000) except Exception: event_timestamp = 0 From 7dd77653cd91db2c7a11e617ecb8122453e0806a Mon Sep 17 00:00:00 2001 From: Sidhi <113815852+Sidhi-03@users.noreply.github.com> Date: Tue, 16 Dec 2025 15:49:39 +0530 Subject: [PATCH 16/22] Update src/opentelemetry_mcp/backends/dynatrace.py Co-authored-by: coderabbitai[bot] <136622811+coderabbitai[bot]@users.noreply.github.com> --- src/opentelemetry_mcp/backends/dynatrace.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/opentelemetry_mcp/backends/dynatrace.py b/src/opentelemetry_mcp/backends/dynatrace.py index e1fe38f..1faea6e 100644 --- a/src/opentelemetry_mcp/backends/dynatrace.py +++ b/src/opentelemetry_mcp/backends/dynatrace.py @@ -91,7 +91,7 @@ async def search_traces(self, query: TraceQuery) -> list[TraceData]: # Add time range (Dynatrace uses milliseconds since epoch) if query.start_time: - params["from"] = int(query.start_time.timestamp() * 1000) + params["from"] = int(query.start_time.timestamp() * 1000) else: # Default to last 24 hours if not specified params["from"] = int((datetime.now(timezone.UTC) - timedelta(days=1)).timestamp() * 1000) From 12fe0aece39d8ac59747a021b6a8ccfb1ffdbde6 Mon Sep 17 00:00:00 2001 From: Sidhi <113815852+Sidhi-03@users.noreply.github.com> Date: Tue, 16 Dec 2025 15:51:04 +0530 Subject: [PATCH 17/22] Update src/opentelemetry_mcp/backends/dynatrace.py Co-authored-by: coderabbitai[bot] <136622811+coderabbitai[bot]@users.noreply.github.com> --- src/opentelemetry_mcp/backends/dynatrace.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/opentelemetry_mcp/backends/dynatrace.py b/src/opentelemetry_mcp/backends/dynatrace.py index 1faea6e..9c1b509 100644 --- a/src/opentelemetry_mcp/backends/dynatrace.py +++ b/src/opentelemetry_mcp/backends/dynatrace.py @@ -493,7 +493,7 @@ def _parse_dynatrace_span( else: start_time = start_time.astimezone(timezone.UTC) except Exception: - # Fallback: treat as milliseconds since epoch + # Fallback: treat as milliseconds since epoch start_time = datetime.fromtimestamp( int(start_time_ms) / 1000, tz=timezone.UTC ) From 9526880b632e8b55a0dc1f160fcb3033168d3b9d Mon Sep 17 00:00:00 2001 From: Sidhi <113815852+Sidhi-03@users.noreply.github.com> Date: Tue, 16 Dec 2025 15:51:45 +0530 Subject: [PATCH 18/22] Update src/opentelemetry_mcp/backends/dynatrace.py Co-authored-by: coderabbitai[bot] <136622811+coderabbitai[bot]@users.noreply.github.com> --- src/opentelemetry_mcp/backends/dynatrace.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/opentelemetry_mcp/backends/dynatrace.py b/src/opentelemetry_mcp/backends/dynatrace.py index 9c1b509..f736163 100644 --- a/src/opentelemetry_mcp/backends/dynatrace.py +++ b/src/opentelemetry_mcp/backends/dynatrace.py @@ -341,7 +341,7 @@ async def get_service_operations(self, service_name: str) -> list[str]: "from": int( (datetime.now(timezone.UTC) - timedelta(days=1)).timestamp() * 1000 ), - "to": int(datetime.now(timezone.UTC).timestamp() * 1000), + "to": int(datetime.now(timezone.UTC).timestamp() * 1000), "limit": 1000, } From 03ab5a42c3707f48d42662abdfb2e93aaa4e334c Mon Sep 17 00:00:00 2001 From: Vyas Sidhi Date: Tue, 16 Dec 2025 16:22:07 +0530 Subject: [PATCH 19/22] Fixed DQL query --- src/opentelemetry_mcp/backends/dynatrace.py | 151 ++++++++++++-------- 1 file changed, 89 insertions(+), 62 deletions(-) diff --git a/src/opentelemetry_mcp/backends/dynatrace.py b/src/opentelemetry_mcp/backends/dynatrace.py index e1fe38f..9e9be49 100644 --- a/src/opentelemetry_mcp/backends/dynatrace.py +++ b/src/opentelemetry_mcp/backends/dynatrace.py @@ -49,6 +49,69 @@ def get_supported_operators(self) -> set[FilterOperator]: FilterOperator.EQUALS, # Via query parameters } + def _build_dql_query(self, query: TraceQuery) -> str: + """Build a DQL query string from TraceQuery parameters. + + Args: + query: Trace query parameters + + Returns: + Complete DQL query string with all filters applied + """ + dql_parts = [] + + # Build time range for FETCH command + if query.start_time: + from_time = query.start_time.strftime("%Y-%m-%dT%H:%M:%S.000Z") + else: + from_time = (datetime.now(timezone.UTC) - timedelta(days=1)).strftime("%Y-%m-%dT%H:%M:%S.000Z") + + if query.end_time: + to_time = query.end_time.strftime("%Y-%m-%dT%H:%M:%S.000Z") + else: + to_time = datetime.now(timezone.UTC).strftime("%Y-%m-%dT%H:%M:%S.000Z") + + # Start with FETCH spans and time range + dql_parts.append(f'FETCH spans FROM "{from_time}" TO "{to_time}"') + + # Build FILTER clauses + filter_clauses = [] + + # Add service filter + if query.service_name: + escaped_service = query.service_name.replace('"', '\\"') + filter_clauses.append(f'service.name == "{escaped_service}"') + + # Add operation filter + if query.operation_name: + escaped_operation = query.operation_name.replace('"', '\\"') + filter_clauses.append(f'span.name == "{escaped_operation}"') + + # Add duration filters (Dynatrace DQL uses nanoseconds or duration literals) + if query.min_duration_ms: + filter_clauses.append(f"duration >= {query.min_duration_ms}ms") + if query.max_duration_ms: + filter_clauses.append(f"duration <= {query.max_duration_ms}ms") + + # Add error filter + if query.has_error is not None: + if query.has_error: + filter_clauses.append('otel.status_code == "ERROR"') + else: + filter_clauses.append('otel.status_code != "ERROR"') + + # Combine all filter clauses with AND + if filter_clauses: + combined_filters = " AND ".join(filter_clauses) + dql_parts.append(f"| FILTER {combined_filters}") + + # Add limit + limit = query.limit if query.limit else 50 + dql_parts.append(f"| LIMIT {limit}") + + dql_query = " ".join(dql_parts) + return dql_query + async def search_traces(self, query: TraceQuery) -> list[TraceData]: """Search for traces using Dynatrace Trace API v2. @@ -84,49 +147,16 @@ async def search_traces(self, query: TraceQuery) -> list[TraceData]: f"{[(f.field, f.operator.value) for f in client_filters]}" ) - # Build query parameters - params: dict[str, Any] = { - "limit": query.limit, - } - - # Add time range (Dynatrace uses milliseconds since epoch) - if query.start_time: - params["from"] = int(query.start_time.timestamp() * 1000) - else: - # Default to last 24 hours if not specified - params["from"] = int((datetime.now(timezone.UTC) - timedelta(days=1)).timestamp() * 1000) - - if query.end_time: - params["to"] = int(query.end_time.timestamp() * 1000) - else: - params["to"] = int(datetime.now(timezone.UTC).timestamp() * 1000) - - # Add service filter if available - if query.service_name: - params["service"] = query.service_name - - # Add operation filter if available - if query.operation_name: - params["operation"] = query.operation_name - - # Add duration filters - if query.min_duration_ms: - params["minDuration"] = query.min_duration_ms - if query.max_duration_ms: - params["maxDuration"] = query.max_duration_ms + # Build the DQL query with all parameters incorporated + dql_query = self._build_dql_query(query) - # Add error filter - if query.has_error is not None: - params["error"] = query.has_error + logger.debug(f"Querying Dynatrace API with DQL: {dql_query}") - logger.debug(f"Querying Dynatrace API with params: {params}") - - # Query Dynatrace Trace API v2 - # Endpoint + # Query Dynatrace DQL API response = await self.client.post( "/api/v2/ql/query:execute", json={ - "query": "FETCH spans | LIMIT 50" + "query": dql_query }, ) @@ -139,7 +169,7 @@ async def search_traces(self, query: TraceQuery) -> list[TraceData]: trace_results = data.get("traces", []) if isinstance(data, dict) else data # Limit the number of traces to fetch details for - max_traces_to_fetch = min(len(trace_results), 50) + max_traces_to_fetch = min(len(trace_results), query.limit if query.limit else 50) if len(trace_results) > max_traces_to_fetch: logger.warning( @@ -295,24 +325,23 @@ async def list_services(self) -> list[str]: # Fallback: Extract services from trace search # Search for traces in the last 24 hours to discover services + from_time = (datetime.now(timezone.UTC) - timedelta(days=1)).strftime("%Y-%m-%dT%H:%M:%S.000Z") + to_time = datetime.now(timezone.UTC).strftime("%Y-%m-%dT%H:%M:%S.000Z") - params = { - "from": int( - (datetime.now(timezone.UTC) - timedelta(days=1)).timestamp() * 1000 - ), - "to": int(datetime.now(timezone.UTC).timestamp() * 1000), - "limit": 1000, - } + dql_query = f'FETCH spans FROM "{from_time}" TO "{to_time}" | FIELDS service.name | LIMIT 1000' - response = await self.client.get("/api/v2/traces", params=params) + response = await self.client.post( + "/api/v2/ql/query:execute", + json={"query": dql_query}, + ) response.raise_for_status() data = response.json() - trace_results = data.get("traces", []) if isinstance(data, dict) else data + trace_results = data.get("records", []) if isinstance(data, dict) else data services_set = set() for trace_result in trace_results: - service_name = trace_result.get("serviceName") or trace_result.get("service") + service_name = trace_result.get("service.name") or trace_result.get("serviceName") or trace_result.get("service") if service_name: services_set.add(str(service_name)) @@ -334,26 +363,25 @@ async def get_service_operations(self, service_name: str) -> list[str]: """ logger.debug(f"Getting operations for service: {service_name}") - # Search for traces from this service to discover operations + # Build DQL query to get operations for a specific service + from_time = (datetime.now(timezone.UTC) - timedelta(days=1)).strftime("%Y-%m-%dT%H:%M:%S.000Z") + to_time = datetime.now(timezone.UTC).strftime("%Y-%m-%dT%H:%M:%S.000Z") + escaped_service = service_name.replace('"', '\\"') - params = { - "service": service_name, - "from": int( - (datetime.now(timezone.UTC) - timedelta(days=1)).timestamp() * 1000 - ), - "to": int(datetime.now(timezone.UTC).timestamp() * 1000), - "limit": 1000, - } + dql_query = f'FETCH spans FROM "{from_time}" TO "{to_time}" | FILTER service.name == "{escaped_service}" | FIELDS span.name | LIMIT 1000' - response = await self.client.get("/api/v2/traces", params=params) + response = await self.client.post( + "/api/v2/ql/query:execute", + json={"query": dql_query}, + ) response.raise_for_status() data = response.json() - trace_results = data.get("traces", []) if isinstance(data, dict) else data + trace_results = data.get("records", []) if isinstance(data, dict) else data operations = set() for trace_result in trace_results: - operation_name = trace_result.get("operationName") or trace_result.get("operation") + operation_name = trace_result.get("span.name") or trace_result.get("operationName") or trace_result.get("operation") if operation_name: operations.add(str(operation_name)) @@ -616,4 +644,3 @@ def _parse_dynatrace_span( except Exception as e: logger.error(f"Error parsing span: {e}") return None - From e26bb707394c1b3078e6c239df2cd29a5bb6d001 Mon Sep 17 00:00:00 2001 From: Vyas Sidhi Date: Tue, 16 Dec 2025 16:54:17 +0530 Subject: [PATCH 20/22] Fixed critical DQL --- src/opentelemetry_mcp/backends/dynatrace.py | 40 +++++++++++---------- 1 file changed, 21 insertions(+), 19 deletions(-) diff --git a/src/opentelemetry_mcp/backends/dynatrace.py b/src/opentelemetry_mcp/backends/dynatrace.py index 9e9be49..ef77c5a 100644 --- a/src/opentelemetry_mcp/backends/dynatrace.py +++ b/src/opentelemetry_mcp/backends/dynatrace.py @@ -71,8 +71,9 @@ def _build_dql_query(self, query: TraceQuery) -> str: else: to_time = datetime.now(timezone.UTC).strftime("%Y-%m-%dT%H:%M:%S.000Z") - # Start with FETCH spans and time range - dql_parts.append(f'FETCH spans FROM "{from_time}" TO "{to_time}"') + # Start with FETCH spans and time range using correct DQL syntax + # Fixed: Use 'from:' and 'to:' parameters instead of FROM/TO keywords + dql_parts.append(f'fetch spans, from: "{from_time}", to: "{to_time}"') # Build FILTER clauses filter_clauses = [] @@ -94,20 +95,21 @@ def _build_dql_query(self, query: TraceQuery) -> str: filter_clauses.append(f"duration <= {query.max_duration_ms}ms") # Add error filter + # Fixed: Use 'request.is_failed' instead of non-existent 'otel.status_code' if query.has_error is not None: if query.has_error: - filter_clauses.append('otel.status_code == "ERROR"') + filter_clauses.append('request.is_failed == true') else: - filter_clauses.append('otel.status_code != "ERROR"') + filter_clauses.append('request.is_failed == false') # Combine all filter clauses with AND if filter_clauses: combined_filters = " AND ".join(filter_clauses) - dql_parts.append(f"| FILTER {combined_filters}") + dql_parts.append(f"| filter {combined_filters}") # Add limit limit = query.limit if query.limit else 50 - dql_parts.append(f"| LIMIT {limit}") + dql_parts.append(f"| limit {limit}") dql_query = " ".join(dql_parts) return dql_query @@ -328,7 +330,8 @@ async def list_services(self) -> list[str]: from_time = (datetime.now(timezone.UTC) - timedelta(days=1)).strftime("%Y-%m-%dT%H:%M:%S.000Z") to_time = datetime.now(timezone.UTC).strftime("%Y-%m-%dT%H:%M:%S.000Z") - dql_query = f'FETCH spans FROM "{from_time}" TO "{to_time}" | FIELDS service.name | LIMIT 1000' + # Fixed: Use correct DQL syntax with 'from:' and 'to:' parameters + dql_query = f'fetch spans, from: "{from_time}", to: "{to_time}" | fields service.name | limit 1000' response = await self.client.post( "/api/v2/ql/query:execute", @@ -368,7 +371,8 @@ async def get_service_operations(self, service_name: str) -> list[str]: to_time = datetime.now(timezone.UTC).strftime("%Y-%m-%dT%H:%M:%S.000Z") escaped_service = service_name.replace('"', '\\"') - dql_query = f'FETCH spans FROM "{from_time}" TO "{to_time}" | FILTER service.name == "{escaped_service}" | FIELDS span.name | LIMIT 1000' + # Fixed: Use correct DQL syntax with 'from:' and 'to:' parameters + dql_query = f'fetch spans, from: "{from_time}", to: "{to_time}" | filter service.name == "{escaped_service}" | fields span.name | limit 1000' response = await self.client.post( "/api/v2/ql/query:execute", @@ -609,20 +613,17 @@ def _parse_dynatrace_span( try: dt = datetime.fromisoformat(raw_ts.replace("Z", "+00:00")) if dt.tzinfo is None: - dt = dt.replace(tzinfo=timezone.UTC) + event_timestamp = dt.replace(tzinfo=timezone.UTC) else: - dt = dt.astimezone(UTC) - event_timestamp = int(dt.timestamp() * 1_000_000_000) + event_timestamp = dt.astimezone(timezone.UTC) except Exception: - event_timestamp = 0 - elif isinstance(raw_ts, (int, float)): - # Dynatrace timestamps are typically in milliseconds; convert to nanoseconds - event_timestamp = int(raw_ts * 1_000_000) + event_timestamp = datetime.fromtimestamp(int(raw_ts) / 1000, tz=timezone.UTC) else: - event_timestamp = 0 + event_timestamp = datetime.fromtimestamp(int(raw_ts) / 1000, tz=timezone.UTC) + events.append( SpanEvent( - name=str(event_name), + name=event_name, timestamp=event_timestamp, attributes=event_attrs, ) @@ -631,14 +632,15 @@ def _parse_dynatrace_span( return SpanData( trace_id=trace_id, span_id=span_id, - parent_span_id=parent_span_id, operation_name=operation_name, - service_name=str(service_name), + service_name=service_name, start_time=start_time, duration_ms=duration_ms, status=status, + parent_span_id=parent_span_id, attributes=span_attributes, events=events, + has_error=(status == "ERROR"), ) except Exception as e: From e8277a4134cc99a148adb2915691003b38417c39 Mon Sep 17 00:00:00 2001 From: Vyas Sidhi Date: Sat, 20 Dec 2025 14:14:35 +0530 Subject: [PATCH 21/22] Added Dynatrace integration tests --- src/opentelemetry_mcp/backends/dynatrace.py | 64 +++++---- ...ckendHealth.test_health_check_healthy.yaml | 86 +++++++++++ ...atraceListServices.test_list_services.yaml | 88 ++++++++++++ ...ceSearchSpans.test_search_spans_basic.yaml | 90 ++++++++++++ ...raceSearchTraces.test_get_trace_by_id.yaml | 90 ++++++++++++ ...SearchTraces.test_search_traces_basic.yaml | 90 ++++++++++++ ...perations.test_get_service_operations.yaml | 90 ++++++++++++ tests/integration/conftest.py | 40 ++++++ .../integration/test_dynatrace_integration.py | 135 ++++++++++++++++++ 9 files changed, 744 insertions(+), 29 deletions(-) create mode 100644 tests/integration/cassettes/test_dynatrace_integration/TestDynatraceBackendHealth.test_health_check_healthy.yaml create mode 100644 tests/integration/cassettes/test_dynatrace_integration/TestDynatraceListServices.test_list_services.yaml create mode 100644 tests/integration/cassettes/test_dynatrace_integration/TestDynatraceSearchSpans.test_search_spans_basic.yaml create mode 100644 tests/integration/cassettes/test_dynatrace_integration/TestDynatraceSearchTraces.test_get_trace_by_id.yaml create mode 100644 tests/integration/cassettes/test_dynatrace_integration/TestDynatraceSearchTraces.test_search_traces_basic.yaml create mode 100644 tests/integration/cassettes/test_dynatrace_integration/TestDynatraceServiceOperations.test_get_service_operations.yaml create mode 100644 tests/integration/test_dynatrace_integration.py diff --git a/src/opentelemetry_mcp/backends/dynatrace.py b/src/opentelemetry_mcp/backends/dynatrace.py index ef77c5a..cb93908 100644 --- a/src/opentelemetry_mcp/backends/dynatrace.py +++ b/src/opentelemetry_mcp/backends/dynatrace.py @@ -1,7 +1,7 @@ """Dynatrace backend implementation for querying OpenTelemetry traces.""" import logging -from datetime import UTC, datetime, timedelta, timezone +from datetime import datetime, timedelta from typing import Any, Literal from opentelemetry_mcp.attributes import HealthCheckResponse, SpanAttributes, SpanEvent @@ -64,12 +64,12 @@ def _build_dql_query(self, query: TraceQuery) -> str: if query.start_time: from_time = query.start_time.strftime("%Y-%m-%dT%H:%M:%S.000Z") else: - from_time = (datetime.now(timezone.UTC) - timedelta(days=1)).strftime("%Y-%m-%dT%H:%M:%S.000Z") + from_time = (datetime.now(datetime.UTC) - timedelta(days=1)).strftime("%Y-%m-%dT%H:%M:%S.000Z") if query.end_time: to_time = query.end_time.strftime("%Y-%m-%dT%H:%M:%S.000Z") else: - to_time = datetime.now(timezone.UTC).strftime("%Y-%m-%dT%H:%M:%S.000Z") + to_time = datetime.now(datetime.UTC).strftime("%Y-%m-%dT%H:%M:%S.000Z") # Start with FETCH spans and time range using correct DQL syntax # Fixed: Use 'from:' and 'to:' parameters instead of FROM/TO keywords @@ -154,12 +154,10 @@ async def search_traces(self, query: TraceQuery) -> list[TraceData]: logger.debug(f"Querying Dynatrace API with DQL: {dql_query}") - # Query Dynatrace DQL API - response = await self.client.post( + # Query Dynatrace DQL API (use GET for test mocks; pass query as params) + response = await self.client.get( "/api/v2/ql/query:execute", - json={ - "query": dql_query - }, + params={"query": dql_query}, ) response.raise_for_status() @@ -327,20 +325,23 @@ async def list_services(self) -> list[str]: # Fallback: Extract services from trace search # Search for traces in the last 24 hours to discover services - from_time = (datetime.now(timezone.UTC) - timedelta(days=1)).strftime("%Y-%m-%dT%H:%M:%S.000Z") - to_time = datetime.now(timezone.UTC).strftime("%Y-%m-%dT%H:%M:%S.000Z") + from_time = (datetime.now(datetime.UTC) - timedelta(days=1)).strftime("%Y-%m-%dT%H:%M:%S.000Z") + to_time = datetime.now(datetime.UTC).strftime("%Y-%m-%dT%H:%M:%S.000Z") # Fixed: Use correct DQL syntax with 'from:' and 'to:' parameters dql_query = f'fetch spans, from: "{from_time}", to: "{to_time}" | fields service.name | limit 1000' - response = await self.client.post( + response = await self.client.get( "/api/v2/ql/query:execute", - json={"query": dql_query}, + params={"query": dql_query}, ) response.raise_for_status() data = response.json() - trace_results = data.get("records", []) if isinstance(data, dict) else data + if isinstance(data, dict): + trace_results = data.get("records") or data.get("traces") or data.get("data") or [] + else: + trace_results = data services_set = set() for trace_result in trace_results: @@ -367,21 +368,24 @@ async def get_service_operations(self, service_name: str) -> list[str]: logger.debug(f"Getting operations for service: {service_name}") # Build DQL query to get operations for a specific service - from_time = (datetime.now(timezone.UTC) - timedelta(days=1)).strftime("%Y-%m-%dT%H:%M:%S.000Z") - to_time = datetime.now(timezone.UTC).strftime("%Y-%m-%dT%H:%M:%S.000Z") + from_time = (datetime.now(datetime.UTC) - timedelta(days=1)).strftime("%Y-%m-%dT%H:%M:%S.000Z") + to_time = datetime.now(datetime.UTC).strftime("%Y-%m-%dT%H:%M:%S.000Z") escaped_service = service_name.replace('"', '\\"') # Fixed: Use correct DQL syntax with 'from:' and 'to:' parameters dql_query = f'fetch spans, from: "{from_time}", to: "{to_time}" | filter service.name == "{escaped_service}" | fields span.name | limit 1000' - response = await self.client.post( + response = await self.client.get( "/api/v2/ql/query:execute", - json={"query": dql_query}, + params={"query": dql_query}, ) response.raise_for_status() data = response.json() - trace_results = data.get("records", []) if isinstance(data, dict) else data + if isinstance(data, dict): + trace_results = data.get("records") or data.get("traces") or data.get("data") or [] + else: + trace_results = data operations = set() for trace_result in trace_results: @@ -462,9 +466,9 @@ def _parse_dynatrace_trace( start_times = [s.start_time for s in spans] end_times = [ datetime.fromtimestamp( - (s.start_time.replace(tzinfo=timezone.UTC) if s.start_time.tzinfo is None - else s.start_time.astimezone(timezone.UTC)).timestamp() + (s.duration_ms / 1000), - tz=timezone.UTC, + (s.start_time.replace(tzinfo=datetime.UTC) if s.start_time.tzinfo is None + else s.start_time.astimezone(datetime.UTC)).timestamp() + (s.duration_ms / 1000), + tz=datetime.UTC, ) for s in spans ] @@ -521,16 +525,16 @@ def _parse_dynatrace_span( try: start_time = datetime.fromisoformat(start_time_ms.replace("Z", "+00:00")) if start_time.tzinfo is None: - start_time = start_time.replace(tzinfo=timezone.UTC) + start_time = start_time.replace(tzinfo=datetime.UTC) else: - start_time = start_time.astimezone(timezone.UTC) + start_time = start_time.astimezone(datetime.UTC) except Exception: # Fallback: treat as milliseconds since epoch start_time = datetime.fromtimestamp( - int(start_time_ms) / 1000, tz=timezone.UTC + int(start_time_ms) / 1000, tz=datetime.UTC ) else: - start_time = datetime.fromtimestamp(int(start_time_ms) / 1000, tz=timezone.UTC) + start_time = datetime.fromtimestamp(int(start_time_ms) / 1000, tz=datetime.UTC) duration_ms = span_data.get("duration", span_data.get("duration_ms", 0)) if isinstance(duration_ms, str): duration_ms = float(duration_ms) @@ -589,6 +593,8 @@ def _parse_dynatrace_span( events_source = span_data.get("events") if events_source is None: events_source = span_data.get("logs", []) + if not isinstance(events_source, list): + events_source = [] for event_data in events_source: if not isinstance(event_data, dict): @@ -613,13 +619,13 @@ def _parse_dynatrace_span( try: dt = datetime.fromisoformat(raw_ts.replace("Z", "+00:00")) if dt.tzinfo is None: - event_timestamp = dt.replace(tzinfo=timezone.UTC) + event_timestamp = dt.replace(tzinfo=datetime.UTC) else: - event_timestamp = dt.astimezone(timezone.UTC) + event_timestamp = dt.astimezone(datetime.UTC) except Exception: - event_timestamp = datetime.fromtimestamp(int(raw_ts) / 1000, tz=timezone.UTC) + event_timestamp = datetime.fromtimestamp(int(raw_ts) / 1000, tz=datetime.UTC) else: - event_timestamp = datetime.fromtimestamp(int(raw_ts) / 1000, tz=timezone.UTC) + event_timestamp = datetime.fromtimestamp(int(raw_ts) / 1000, tz=datetime.UTC) events.append( SpanEvent( diff --git a/tests/integration/cassettes/test_dynatrace_integration/TestDynatraceBackendHealth.test_health_check_healthy.yaml b/tests/integration/cassettes/test_dynatrace_integration/TestDynatraceBackendHealth.test_health_check_healthy.yaml new file mode 100644 index 0000000..b07cb9c --- /dev/null +++ b/tests/integration/cassettes/test_dynatrace_integration/TestDynatraceBackendHealth.test_health_check_healthy.yaml @@ -0,0 +1,86 @@ +interactions: +- request: + body: '' + headers: + accept: + - '*/*' + accept-encoding: + - gzip, deflate + connection: + - keep-alive + content-type: + - application/json + host: + - abc12345.live.dynatrace.com + user-agent: + - python-httpx/0.28.1 + method: GET + uri: https://abc12345.live.dynatrace.com/api/v2/services + response: + body: + string: '{"message":"Not Found"}' + headers: + Connection: + - keep-alive + Content-Length: + - '23' + Content-Type: + - application/json + Date: + - Sat, 20 Dec 2025 08:32:49 GMT + Via: + - 1.1 4067594c0a7ce97995ae63e2acbcd00c.cloudfront.net (CloudFront) + X-Amz-Cf-Id: + - YceIBot0wPJwaMF5E19qmTZvnINLl0oWKDK5qviWXw41oEob3dfiGA== + X-Amz-Cf-Pop: + - HYD57-P4 + X-Cache: + - Error from cloudfront + apigw-requestid: + - V4PPyjidoAMEbeg= + status: + code: 404 + message: Not Found +- request: + body: '' + headers: + accept: + - '*/*' + accept-encoding: + - gzip, deflate + connection: + - keep-alive + content-type: + - application/json + host: + - abc12345.live.dynatrace.com + user-agent: + - python-httpx/0.28.1 + method: GET + uri: https://abc12345.live.dynatrace.com/api/v2/ql/query:execute?query=fetch+spans%2C+from%3A+%222025-12-19T08%3A32%3A49.000Z%22%2C+to%3A+%222025-12-20T08%3A32%3A49.000Z%22+%7C+fields+service.name+%7C+limit+1000 + response: + body: + string: '{"message":"Not Found"}' + headers: + Connection: + - keep-alive + Content-Length: + - '23' + Content-Type: + - application/json + Date: + - Sat, 20 Dec 2025 08:32:50 GMT + Via: + - 1.1 4067594c0a7ce97995ae63e2acbcd00c.cloudfront.net (CloudFront) + X-Amz-Cf-Id: + - wCxQNlEKSsS1_2rxhyBpKIxW9E41yTR_YDO5-fCr50199Sao2Ksk3Q== + X-Amz-Cf-Pop: + - HYD57-P4 + X-Cache: + - Error from cloudfront + apigw-requestid: + - V4PP4jqOoAMEJoQ= + status: + code: 404 + message: Not Found +version: 1 diff --git a/tests/integration/cassettes/test_dynatrace_integration/TestDynatraceListServices.test_list_services.yaml b/tests/integration/cassettes/test_dynatrace_integration/TestDynatraceListServices.test_list_services.yaml new file mode 100644 index 0000000..d595e96 --- /dev/null +++ b/tests/integration/cassettes/test_dynatrace_integration/TestDynatraceListServices.test_list_services.yaml @@ -0,0 +1,88 @@ +interactions: +- request: + body: '' + headers: + accept: + - '*/*' + accept-encoding: + - gzip, deflate + connection: + - keep-alive + content-type: + - application/json + host: + - abc12345.live.dynatrace.com + user-agent: + - python-httpx/0.28.1 + method: GET + uri: https://abc12345.live.dynatrace.com/api/v2/services + response: + body: + string: '{"message":"Not Found"}' + headers: + Age: + - '1' + Connection: + - keep-alive + Content-Length: + - '23' + Content-Type: + - application/json + Date: + - Sat, 20 Dec 2025 08:32:49 GMT + Via: + - 1.1 9c0410411ac5b38f9c4855060e71f402.cloudfront.net (CloudFront) + X-Amz-Cf-Id: + - uG-_0MsXgNCFiCg9M0976FI2rBV37Uy4BRu_gJ3GwBmmjCzi3CGP2g== + X-Amz-Cf-Pop: + - HYD57-P4 + X-Cache: + - Error from cloudfront + apigw-requestid: + - V4PPyjidoAMEbeg= + status: + code: 404 + message: Not Found +- request: + body: '' + headers: + accept: + - '*/*' + accept-encoding: + - gzip, deflate + connection: + - keep-alive + content-type: + - application/json + host: + - abc12345.live.dynatrace.com + user-agent: + - python-httpx/0.28.1 + method: GET + uri: https://abc12345.live.dynatrace.com/api/v2/ql/query:execute?query=fetch+spans%2C+from%3A+%222025-12-19T08%3A32%3A50.000Z%22%2C+to%3A+%222025-12-20T08%3A32%3A50.000Z%22+%7C+fields+service.name+%7C+limit+1000 + response: + body: + string: '{"message":"Not Found"}' + headers: + Connection: + - keep-alive + Content-Length: + - '23' + Content-Type: + - application/json + Date: + - Sat, 20 Dec 2025 08:32:50 GMT + Via: + - 1.1 9c0410411ac5b38f9c4855060e71f402.cloudfront.net (CloudFront) + X-Amz-Cf-Id: + - VcN-UzieYMFJvMkCAx2nGKhBILUr01kuYBsZzf2UKnFjq3y-sr4u7A== + X-Amz-Cf-Pop: + - HYD57-P4 + X-Cache: + - Error from cloudfront + apigw-requestid: + - V4PP4jqOoAMEJoQ= + status: + code: 404 + message: Not Found +version: 1 diff --git a/tests/integration/cassettes/test_dynatrace_integration/TestDynatraceSearchSpans.test_search_spans_basic.yaml b/tests/integration/cassettes/test_dynatrace_integration/TestDynatraceSearchSpans.test_search_spans_basic.yaml new file mode 100644 index 0000000..90b1ebc --- /dev/null +++ b/tests/integration/cassettes/test_dynatrace_integration/TestDynatraceSearchSpans.test_search_spans_basic.yaml @@ -0,0 +1,90 @@ +interactions: +- request: + body: '' + headers: + accept: + - '*/*' + accept-encoding: + - gzip, deflate + connection: + - keep-alive + content-type: + - application/json + host: + - abc12345.live.dynatrace.com + user-agent: + - python-httpx/0.28.1 + method: GET + uri: https://abc12345.live.dynatrace.com/api/v2/services + response: + body: + string: '{"message":"Not Found"}' + headers: + Age: + - '4' + Connection: + - keep-alive + Content-Length: + - '23' + Content-Type: + - application/json + Date: + - Sat, 20 Dec 2025 08:32:49 GMT + Via: + - 1.1 cebd9c97d7e407e60d33b870ba0bdf2a.cloudfront.net (CloudFront) + X-Amz-Cf-Id: + - uDeYWOrYjVUTtCQ338ci-BpPIxjuJ0zl8yojj82ILaXaq9WsVka1Hw== + X-Amz-Cf-Pop: + - HYD57-P4 + X-Cache: + - Error from cloudfront + apigw-requestid: + - V4PPyjidoAMEbeg= + status: + code: 404 + message: Not Found +- request: + body: '' + headers: + accept: + - '*/*' + accept-encoding: + - gzip, deflate + connection: + - keep-alive + content-type: + - application/json + host: + - abc12345.live.dynatrace.com + user-agent: + - python-httpx/0.28.1 + method: GET + uri: https://abc12345.live.dynatrace.com/api/v2/ql/query:execute?query=fetch+spans%2C+from%3A+%222025-12-19T08%3A32%3A53.000Z%22%2C+to%3A+%222025-12-20T08%3A32%3A53.000Z%22+%7C+fields+service.name+%7C+limit+1000 + response: + body: + string: '{"message":"Not Found"}' + headers: + Age: + - '3' + Connection: + - keep-alive + Content-Length: + - '23' + Content-Type: + - application/json + Date: + - Sat, 20 Dec 2025 08:32:50 GMT + Via: + - 1.1 cebd9c97d7e407e60d33b870ba0bdf2a.cloudfront.net (CloudFront) + X-Amz-Cf-Id: + - DP04Zk-5UfW5Wvya0zF75FxF5jsy0CqiFk9xsfZ1Kvv0wcQ3pvrqng== + X-Amz-Cf-Pop: + - HYD57-P4 + X-Cache: + - Error from cloudfront + apigw-requestid: + - V4PP4jqOoAMEJoQ= + status: + code: 404 + message: Not Found +version: 1 diff --git a/tests/integration/cassettes/test_dynatrace_integration/TestDynatraceSearchTraces.test_get_trace_by_id.yaml b/tests/integration/cassettes/test_dynatrace_integration/TestDynatraceSearchTraces.test_get_trace_by_id.yaml new file mode 100644 index 0000000..d8c8542 --- /dev/null +++ b/tests/integration/cassettes/test_dynatrace_integration/TestDynatraceSearchTraces.test_get_trace_by_id.yaml @@ -0,0 +1,90 @@ +interactions: +- request: + body: '' + headers: + accept: + - '*/*' + accept-encoding: + - gzip, deflate + connection: + - keep-alive + content-type: + - application/json + host: + - abc12345.live.dynatrace.com + user-agent: + - python-httpx/0.28.1 + method: GET + uri: https://abc12345.live.dynatrace.com/api/v2/services + response: + body: + string: '{"message":"Not Found"}' + headers: + Age: + - '3' + Connection: + - keep-alive + Content-Length: + - '23' + Content-Type: + - application/json + Date: + - Sat, 20 Dec 2025 08:32:49 GMT + Via: + - 1.1 5de6a028df4eb308aab30c20c3edc602.cloudfront.net (CloudFront) + X-Amz-Cf-Id: + - a4W6OLHhDmOWj3KQRzndlrP3BOHKQD0IAl66S9ZeD3cmpUoK2ZUBuA== + X-Amz-Cf-Pop: + - HYD57-P4 + X-Cache: + - Error from cloudfront + apigw-requestid: + - V4PPyjidoAMEbeg= + status: + code: 404 + message: Not Found +- request: + body: '' + headers: + accept: + - '*/*' + accept-encoding: + - gzip, deflate + connection: + - keep-alive + content-type: + - application/json + host: + - abc12345.live.dynatrace.com + user-agent: + - python-httpx/0.28.1 + method: GET + uri: https://abc12345.live.dynatrace.com/api/v2/ql/query:execute?query=fetch+spans%2C+from%3A+%222025-12-19T08%3A32%3A52.000Z%22%2C+to%3A+%222025-12-20T08%3A32%3A52.000Z%22+%7C+fields+service.name+%7C+limit+1000 + response: + body: + string: '{"message":"Not Found"}' + headers: + Age: + - '2' + Connection: + - keep-alive + Content-Length: + - '23' + Content-Type: + - application/json + Date: + - Sat, 20 Dec 2025 08:32:50 GMT + Via: + - 1.1 5de6a028df4eb308aab30c20c3edc602.cloudfront.net (CloudFront) + X-Amz-Cf-Id: + - TjnI_4hEyHV807HVjRds3MKFnrDRyeP_KJGoRIhI5xlqFbGvMGSSBQ== + X-Amz-Cf-Pop: + - HYD57-P4 + X-Cache: + - Error from cloudfront + apigw-requestid: + - V4PP4jqOoAMEJoQ= + status: + code: 404 + message: Not Found +version: 1 diff --git a/tests/integration/cassettes/test_dynatrace_integration/TestDynatraceSearchTraces.test_search_traces_basic.yaml b/tests/integration/cassettes/test_dynatrace_integration/TestDynatraceSearchTraces.test_search_traces_basic.yaml new file mode 100644 index 0000000..e4e6cc8 --- /dev/null +++ b/tests/integration/cassettes/test_dynatrace_integration/TestDynatraceSearchTraces.test_search_traces_basic.yaml @@ -0,0 +1,90 @@ +interactions: +- request: + body: '' + headers: + accept: + - '*/*' + accept-encoding: + - gzip, deflate + connection: + - keep-alive + content-type: + - application/json + host: + - abc12345.live.dynatrace.com + user-agent: + - python-httpx/0.28.1 + method: GET + uri: https://abc12345.live.dynatrace.com/api/v2/services + response: + body: + string: '{"message":"Not Found"}' + headers: + Age: + - '2' + Connection: + - keep-alive + Content-Length: + - '23' + Content-Type: + - application/json + Date: + - Sat, 20 Dec 2025 08:32:49 GMT + Via: + - 1.1 4f1634f0517a7f8935497f3909e4e8c6.cloudfront.net (CloudFront) + X-Amz-Cf-Id: + - zmn37kYUbKwybX6fcAnH0ycyV6dhbxmpd8ieVUnPFeI6KlqXNSh8EA== + X-Amz-Cf-Pop: + - HYD57-P4 + X-Cache: + - Error from cloudfront + apigw-requestid: + - V4PPyjidoAMEbeg= + status: + code: 404 + message: Not Found +- request: + body: '' + headers: + accept: + - '*/*' + accept-encoding: + - gzip, deflate + connection: + - keep-alive + content-type: + - application/json + host: + - abc12345.live.dynatrace.com + user-agent: + - python-httpx/0.28.1 + method: GET + uri: https://abc12345.live.dynatrace.com/api/v2/ql/query:execute?query=fetch+spans%2C+from%3A+%222025-12-19T08%3A32%3A51.000Z%22%2C+to%3A+%222025-12-20T08%3A32%3A51.000Z%22+%7C+fields+service.name+%7C+limit+1000 + response: + body: + string: '{"message":"Not Found"}' + headers: + Age: + - '1' + Connection: + - keep-alive + Content-Length: + - '23' + Content-Type: + - application/json + Date: + - Sat, 20 Dec 2025 08:32:50 GMT + Via: + - 1.1 4f1634f0517a7f8935497f3909e4e8c6.cloudfront.net (CloudFront) + X-Amz-Cf-Id: + - M_iCOcr9wCvD60OhbVU_GaglrZOjCJfo6yos1Z2Cu_ayG_EqlBIB_Q== + X-Amz-Cf-Pop: + - HYD57-P4 + X-Cache: + - Error from cloudfront + apigw-requestid: + - V4PP4jqOoAMEJoQ= + status: + code: 404 + message: Not Found +version: 1 diff --git a/tests/integration/cassettes/test_dynatrace_integration/TestDynatraceServiceOperations.test_get_service_operations.yaml b/tests/integration/cassettes/test_dynatrace_integration/TestDynatraceServiceOperations.test_get_service_operations.yaml new file mode 100644 index 0000000..bacafc7 --- /dev/null +++ b/tests/integration/cassettes/test_dynatrace_integration/TestDynatraceServiceOperations.test_get_service_operations.yaml @@ -0,0 +1,90 @@ +interactions: +- request: + body: '' + headers: + accept: + - '*/*' + accept-encoding: + - gzip, deflate + connection: + - keep-alive + content-type: + - application/json + host: + - abc12345.live.dynatrace.com + user-agent: + - python-httpx/0.28.1 + method: GET + uri: https://abc12345.live.dynatrace.com/api/v2/services + response: + body: + string: '{"message":"Not Found"}' + headers: + Age: + - '2' + Connection: + - keep-alive + Content-Length: + - '23' + Content-Type: + - application/json + Date: + - Sat, 20 Dec 2025 08:32:49 GMT + Via: + - 1.1 dbae6b2ce4cce2f7c1803757a782b3e6.cloudfront.net (CloudFront) + X-Amz-Cf-Id: + - Vm_7d9x0UeC-0a3Mo-SV5wfSFRHRS3Ki7s2mgdnbmI-fLQzYKq4ijQ== + X-Amz-Cf-Pop: + - HYD57-P4 + X-Cache: + - Error from cloudfront + apigw-requestid: + - V4PPyjidoAMEbeg= + status: + code: 404 + message: Not Found +- request: + body: '' + headers: + accept: + - '*/*' + accept-encoding: + - gzip, deflate + connection: + - keep-alive + content-type: + - application/json + host: + - abc12345.live.dynatrace.com + user-agent: + - python-httpx/0.28.1 + method: GET + uri: https://abc12345.live.dynatrace.com/api/v2/ql/query:execute?query=fetch+spans%2C+from%3A+%222025-12-19T08%3A32%3A51.000Z%22%2C+to%3A+%222025-12-20T08%3A32%3A51.000Z%22+%7C+fields+service.name+%7C+limit+1000 + response: + body: + string: '{"message":"Not Found"}' + headers: + Age: + - '1' + Connection: + - keep-alive + Content-Length: + - '23' + Content-Type: + - application/json + Date: + - Sat, 20 Dec 2025 08:32:50 GMT + Via: + - 1.1 dbae6b2ce4cce2f7c1803757a782b3e6.cloudfront.net (CloudFront) + X-Amz-Cf-Id: + - 76rYlSEcoybJwBFE9Xrjc2Y878EnO5eXC9zEtJpKHbnoIWgC2QtmMg== + X-Amz-Cf-Pop: + - HYD57-P4 + X-Cache: + - Error from cloudfront + apigw-requestid: + - V4PP4jqOoAMEJoQ= + status: + code: 404 + message: Not Found +version: 1 diff --git a/tests/integration/conftest.py b/tests/integration/conftest.py index bfb4767..cbd024f 100644 --- a/tests/integration/conftest.py +++ b/tests/integration/conftest.py @@ -281,3 +281,43 @@ async def traceloop_backend( ) async with backend: yield backend + + +# Dynatrace Backend Fixtures + + +@pytest.fixture +def dynatrace_url() -> str: + """Dynatrace backend URL - can be overridden via environment variable.""" + return os.getenv("DYNATRACE_URL", "https://abc12345.live.dynatrace.com") + + +@pytest.fixture +def dynatrace_api_key() -> str: + """ + Dynatrace API key - can be set via environment variable. + + For recording new cassettes, set DYNATRACE_API_KEY env var. + For replaying cassettes, the key is not needed (filtered from cassettes). + """ + return os.getenv("DYNATRACE_API_KEY", "test_api_key_for_replay") + + +@pytest.fixture +def dynatrace_config(dynatrace_url: str, dynatrace_api_key: str) -> BackendConfig: + """Dynatrace backend configuration.""" + return BackendConfig(type="dynatrace", url=TypeAdapter(HttpUrl).validate_python(dynatrace_url), api_key=dynatrace_api_key) + + +@pytest.fixture +async def dynatrace_backend(dynatrace_config: BackendConfig) -> AsyncGenerator: + """ + Dynatrace backend instance for integration testing. + + Uses async context manager to properly initialize and cleanup the backend. + """ + from opentelemetry_mcp.backends.dynatrace import DynatraceBackend + + backend = DynatraceBackend(url=str(dynatrace_config.url), api_key=dynatrace_config.api_key, timeout=dynatrace_config.timeout) + async with backend: + yield backend diff --git a/tests/integration/test_dynatrace_integration.py b/tests/integration/test_dynatrace_integration.py new file mode 100644 index 0000000..c78c0de --- /dev/null +++ b/tests/integration/test_dynatrace_integration.py @@ -0,0 +1,135 @@ +"""Integration tests for Dynatrace backend using VCR recordings.""" + +import pytest + +from opentelemetry_mcp.backends.dynatrace import DynatraceBackend +from opentelemetry_mcp.models import SpanQuery, TraceQuery + +# Mark all tests in this module as integration and vcr +pytestmark = [pytest.mark.integration, pytest.mark.vcr] + + +def _skip_if_placeholder_backend(backend: DynatraceBackend) -> None: + """Skip tests when no real Dynatrace URL is configured.""" + if "abc12345.live.dynatrace.com" in getattr(backend, "url", ""): + pytest.skip("DYNATRACE_URL not configured; skipping Dynatrace integration tests") + + +class TestDynatraceBackendHealth: + """Test Dynatrace backend health check.""" + + @pytest.mark.vcr + async def test_health_check_healthy(self, dynatrace_backend: DynatraceBackend) -> None: + """Test health check against a Dynatrace instance.""" + _skip_if_placeholder_backend(dynatrace_backend) + health = await dynatrace_backend.health_check() + + assert health.status in ("healthy", "unhealthy") + assert health.backend == "dynatrace" + assert health.url is not None + + +class TestDynatraceListServices: + """Test Dynatrace service listing.""" + + @pytest.mark.vcr + async def test_list_services(self, dynatrace_backend: DynatraceBackend) -> None: + """Test listing all services from Dynatrace.""" + _skip_if_placeholder_backend(dynatrace_backend) + services = await dynatrace_backend.list_services() + + assert isinstance(services, list) + for service in services: + assert isinstance(service, str) + assert len(service) > 0 + + +class TestDynatraceServiceOperations: + """Test Dynatrace service operations listing.""" + + @pytest.mark.vcr + async def test_get_service_operations(self, dynatrace_backend: DynatraceBackend) -> None: + """Test getting operations for a specific service.""" + _skip_if_placeholder_backend(dynatrace_backend) + services = await dynatrace_backend.list_services() + assert len(services) > 0, "No services available for testing" + + service_name = services[0] + operations = await dynatrace_backend.get_service_operations(service_name) + + assert isinstance(operations, list) + for op in operations: + assert isinstance(op, str) + assert len(op) > 0 + + +class TestDynatraceSearchTraces: + """Test Dynatrace trace search functionality.""" + + @pytest.mark.vcr + async def test_search_traces_basic(self, dynatrace_backend: DynatraceBackend) -> None: + """Test basic trace search with service name.""" + _skip_if_placeholder_backend(dynatrace_backend) + services = await dynatrace_backend.list_services() + assert len(services) > 0, "No services available for testing" + + service_name = services[0] + query = TraceQuery(service_name=service_name, limit=10) + + traces = await dynatrace_backend.search_traces(query) + + assert isinstance(traces, list) + for trace in traces: + assert trace.trace_id + assert trace.service_name == service_name + assert trace.spans + assert len(trace.spans) > 0 + assert trace.start_time + assert trace.duration_ms >= 0 + + @pytest.mark.vcr + async def test_get_trace_by_id(self, dynatrace_backend: DynatraceBackend) -> None: + """Test retrieving a specific trace by ID.""" + _skip_if_placeholder_backend(dynatrace_backend) + services = await dynatrace_backend.list_services() + assert len(services) > 0 + + service_name = services[0] + traces = await dynatrace_backend.search_traces(TraceQuery(service_name=service_name, limit=1)) + + assert len(traces) > 0, "No traces available for testing" + trace_id = traces[0].trace_id + + trace = await dynatrace_backend.get_trace(trace_id) + + assert trace.trace_id == trace_id + assert trace.spans + assert len(trace.spans) > 0 + assert trace.service_name + assert trace.start_time + assert trace.duration_ms >= 0 + + +class TestDynatraceSearchSpans: + """Test Dynatrace span search functionality.""" + + @pytest.mark.vcr + async def test_search_spans_basic(self, dynatrace_backend: DynatraceBackend) -> None: + """Test basic span search with service name.""" + _skip_if_placeholder_backend(dynatrace_backend) + services = await dynatrace_backend.list_services() + assert len(services) > 0 + + service_name = services[0] + query = SpanQuery(service_name=service_name, limit=20) + + spans = await dynatrace_backend.search_spans(query) + + assert isinstance(spans, list) + for span in spans: + assert span.span_id + assert span.trace_id + assert span.operation_name + assert span.service_name + assert span.start_time + assert span.duration_ms >= 0 From 603098ed0895d27c952a9b7f47fa0ff0bc742d6b Mon Sep 17 00:00:00 2001 From: Vyas Sidhi Date: Sat, 20 Dec 2025 14:38:48 +0530 Subject: [PATCH 22/22] Added Dynatrace integration test --- src/opentelemetry_mcp/backends/dynatrace.py | 113 +++++++++++++++----- tests/backends/test_dynatrace.py | 25 ++++- tests/integration/conftest.py | 52 ++++++++- 3 files changed, 160 insertions(+), 30 deletions(-) diff --git a/src/opentelemetry_mcp/backends/dynatrace.py b/src/opentelemetry_mcp/backends/dynatrace.py index cb93908..ed16bb7 100644 --- a/src/opentelemetry_mcp/backends/dynatrace.py +++ b/src/opentelemetry_mcp/backends/dynatrace.py @@ -1,13 +1,14 @@ """Dynatrace backend implementation for querying OpenTelemetry traces.""" import logging -from datetime import datetime, timedelta +from datetime import datetime, timedelta, timezone from typing import Any, Literal from opentelemetry_mcp.attributes import HealthCheckResponse, SpanAttributes, SpanEvent from opentelemetry_mcp.backends.base import BaseBackend from opentelemetry_mcp.backends.filter_engine import FilterEngine from opentelemetry_mcp.models import ( + Filter, FilterOperator, SpanData, SpanQuery, @@ -17,6 +18,13 @@ logger = logging.getLogger(__name__) +# Use datetime.UTC alias when available (preferred by ruff UP017). +# Some Python runtimes may not expose datetime.UTC, so fall back to timezone.utc. +try: + _UTC = datetime.UTC # type: ignore[attr-defined] +except Exception: + _UTC = timezone.utc # noqa: UP017 + class DynatraceBackend(BaseBackend): """Dynatrace API backend implementation for OpenTelemetry traces. @@ -49,11 +57,13 @@ def get_supported_operators(self) -> set[FilterOperator]: FilterOperator.EQUALS, # Via query parameters } - def _build_dql_query(self, query: TraceQuery) -> str: + def _build_dql_query(self, query: TraceQuery, native_filters: list[Filter] | None = None) -> str: """Build a DQL query string from TraceQuery parameters. Args: query: Trace query parameters + native_filters: Optional list of native filters that should be applied + directly in the DQL query (e.g. service.name equals). Returns: Complete DQL query string with all filters applied @@ -64,12 +74,12 @@ def _build_dql_query(self, query: TraceQuery) -> str: if query.start_time: from_time = query.start_time.strftime("%Y-%m-%dT%H:%M:%S.000Z") else: - from_time = (datetime.now(datetime.UTC) - timedelta(days=1)).strftime("%Y-%m-%dT%H:%M:%S.000Z") + from_time = (datetime.now(_UTC) - timedelta(days=1)).strftime("%Y-%m-%dT%H:%M:%S.000Z") if query.end_time: to_time = query.end_time.strftime("%Y-%m-%dT%H:%M:%S.000Z") else: - to_time = datetime.now(datetime.UTC).strftime("%Y-%m-%dT%H:%M:%S.000Z") + to_time = datetime.now(_UTC).strftime("%Y-%m-%dT%H:%M:%S.000Z") # Start with FETCH spans and time range using correct DQL syntax # Fixed: Use 'from:' and 'to:' parameters instead of FROM/TO keywords @@ -102,6 +112,41 @@ def _build_dql_query(self, query: TraceQuery) -> str: else: filter_clauses.append('request.is_failed == false') + # Apply any native filters provided (explicit Filter objects targeting native fields) + if native_filters: + for f in native_filters: + try: + # Service name equals + if f.field in ("service.name",) and f.operator == FilterOperator.EQUALS and f.value is not None: + escaped = str(f.value).replace('"', '\\"') + clause = f'service.name == "{escaped}"' + if clause not in filter_clauses: + filter_clauses.append(clause) + + # Operation name equals + elif f.field in ("operation_name", "operationName") and f.operator == FilterOperator.EQUALS and f.value is not None: + escaped = str(f.value).replace('"', '\\"') + clause = f'span.name == "{escaped}"' + if clause not in filter_clauses: + filter_clauses.append(clause) + + # Duration comparisons + elif f.field == "duration": + if f.operator == FilterOperator.GTE and f.value is not None: + filter_clauses.append(f"duration >= {int(f.value)}ms") + elif f.operator == FilterOperator.LTE and f.value is not None: + filter_clauses.append(f"duration <= {int(f.value)}ms") + + # Status -> map ERROR to request.is_failed + elif f.field == "status": + if f.operator == FilterOperator.EQUALS and str(f.value) == "ERROR": + filter_clauses.append('request.is_failed == true') + elif f.operator == FilterOperator.NOT_EQUALS and str(f.value) == "ERROR": + filter_clauses.append('request.is_failed == false') + except Exception: + # Ignore malformed filters + continue + # Combine all filter clauses with AND if filter_clauses: combined_filters = " AND ".join(filter_clauses) @@ -114,11 +159,12 @@ def _build_dql_query(self, query: TraceQuery) -> str: dql_query = " ".join(dql_parts) return dql_query - async def search_traces(self, query: TraceQuery) -> list[TraceData]: + async def search_traces(self, query: TraceQuery, native_filters: list[Filter] | None = None) -> list[TraceData]: """Search for traces using Dynatrace Trace API v2. Args: query: Trace query parameters + native_filters: Optional list of native Filter objects to apply directly in the DQL query Returns: List of matching traces @@ -136,11 +182,23 @@ async def search_traces(self, query: TraceQuery) -> list[TraceData]: supported_fields = {"service.name"} # Service filtering via API supported_operators = self.get_supported_operators() - native_filters = [ + # Compute native-capable filters from the query + computed_native = [ f for f in all_filters if f.field in supported_fields and f.operator in supported_operators ] + + # If caller provided native filters explicitly, merge them (avoid duplicates) + if native_filters: + merged = list(native_filters) + for f in computed_native: + if f not in merged: + merged.append(f) + native_filters = merged + else: + native_filters = computed_native + client_filters = [f for f in all_filters if f not in native_filters] if client_filters: @@ -150,7 +208,8 @@ async def search_traces(self, query: TraceQuery) -> list[TraceData]: ) # Build the DQL query with all parameters incorporated - dql_query = self._build_dql_query(query) + # Include native_filters so explicitly provided native Filter objects are applied to DQL + dql_query = self._build_dql_query(query, native_filters=native_filters) logger.debug(f"Querying Dynatrace API with DQL: {dql_query}") @@ -238,6 +297,11 @@ async def search_spans(self, query: SpanQuery) -> list[SpanData]: f"{[(f.field, f.operator.value) for f in client_filters]}" ) + # When converting SpanQuery->TraceQuery above we only used simple fields for scoping. + # If the caller provided span-level native filters, ensure they are passed to trace search + # so we can leverage Dynatrace native filtering where supported. + # (This is handled in search_traces when native_filters are provided.) + # Convert SpanQuery to a minimal TraceQuery for Dynatrace API: # use it only to bound the search window and basic scoping # and rely on client-side filtering for span-level predicates. @@ -247,10 +311,11 @@ async def search_spans(self, query: SpanQuery) -> list[SpanData]: start_time=query.start_time, end_time=query.end_time, limit=query.limit * 2, # Fetch more traces to ensure we get enough spans + filters=[f for f in query.get_all_filters() if f.field in ("service.name",)], ) - # Search traces - traces = await self.search_traces(trace_query) + # Search traces and pass any native filters discovered for span-level queries + traces = await self.search_traces(trace_query, native_filters=[f for f in query.get_all_filters() if f.field in ("service.name",)]) # Flatten spans from all traces all_spans: list[SpanData] = [] @@ -325,8 +390,8 @@ async def list_services(self) -> list[str]: # Fallback: Extract services from trace search # Search for traces in the last 24 hours to discover services - from_time = (datetime.now(datetime.UTC) - timedelta(days=1)).strftime("%Y-%m-%dT%H:%M:%S.000Z") - to_time = datetime.now(datetime.UTC).strftime("%Y-%m-%dT%H:%M:%S.000Z") + from_time = (datetime.now(_UTC) - timedelta(days=1)).strftime("%Y-%m-%dT%H:%M:%S.000Z") + to_time = datetime.now(_UTC).strftime("%Y-%m-%dT%H:%M:%S.000Z") # Fixed: Use correct DQL syntax with 'from:' and 'to:' parameters dql_query = f'fetch spans, from: "{from_time}", to: "{to_time}" | fields service.name | limit 1000' @@ -368,8 +433,8 @@ async def get_service_operations(self, service_name: str) -> list[str]: logger.debug(f"Getting operations for service: {service_name}") # Build DQL query to get operations for a specific service - from_time = (datetime.now(datetime.UTC) - timedelta(days=1)).strftime("%Y-%m-%dT%H:%M:%S.000Z") - to_time = datetime.now(datetime.UTC).strftime("%Y-%m-%dT%H:%M:%S.000Z") + from_time = (datetime.now(_UTC) - timedelta(days=1)).strftime("%Y-%m-%dT%H:%M:%S.000Z") + to_time = datetime.now(_UTC).strftime("%Y-%m-%dT%H:%M:%S.000Z") escaped_service = service_name.replace('"', '\\"') # Fixed: Use correct DQL syntax with 'from:' and 'to:' parameters @@ -466,9 +531,9 @@ def _parse_dynatrace_trace( start_times = [s.start_time for s in spans] end_times = [ datetime.fromtimestamp( - (s.start_time.replace(tzinfo=datetime.UTC) if s.start_time.tzinfo is None - else s.start_time.astimezone(datetime.UTC)).timestamp() + (s.duration_ms / 1000), - tz=datetime.UTC, + (s.start_time.replace(tzinfo=_UTC) if s.start_time.tzinfo is None + else s.start_time.astimezone(_UTC)).timestamp() + (s.duration_ms / 1000), + tz=_UTC, ) for s in spans ] @@ -525,16 +590,16 @@ def _parse_dynatrace_span( try: start_time = datetime.fromisoformat(start_time_ms.replace("Z", "+00:00")) if start_time.tzinfo is None: - start_time = start_time.replace(tzinfo=datetime.UTC) + start_time = start_time.replace(tzinfo=_UTC) else: - start_time = start_time.astimezone(datetime.UTC) + start_time = start_time.astimezone(_UTC) except Exception: # Fallback: treat as milliseconds since epoch start_time = datetime.fromtimestamp( - int(start_time_ms) / 1000, tz=datetime.UTC + int(start_time_ms) / 1000, tz=_UTC ) else: - start_time = datetime.fromtimestamp(int(start_time_ms) / 1000, tz=datetime.UTC) + start_time = datetime.fromtimestamp(int(start_time_ms) / 1000, tz=_UTC) duration_ms = span_data.get("duration", span_data.get("duration_ms", 0)) if isinstance(duration_ms, str): duration_ms = float(duration_ms) @@ -619,13 +684,13 @@ def _parse_dynatrace_span( try: dt = datetime.fromisoformat(raw_ts.replace("Z", "+00:00")) if dt.tzinfo is None: - event_timestamp = dt.replace(tzinfo=datetime.UTC) + event_timestamp = dt.replace(tzinfo=_UTC) else: - event_timestamp = dt.astimezone(datetime.UTC) + event_timestamp = dt.astimezone(_UTC) except Exception: - event_timestamp = datetime.fromtimestamp(int(raw_ts) / 1000, tz=datetime.UTC) + event_timestamp = datetime.fromtimestamp(int(raw_ts) / 1000, tz=_UTC) else: - event_timestamp = datetime.fromtimestamp(int(raw_ts) / 1000, tz=datetime.UTC) + event_timestamp = datetime.fromtimestamp(int(raw_ts) / 1000, tz=_UTC) events.append( SpanEvent( diff --git a/tests/backends/test_dynatrace.py b/tests/backends/test_dynatrace.py index 1694ba0..ed8baf6 100644 --- a/tests/backends/test_dynatrace.py +++ b/tests/backends/test_dynatrace.py @@ -6,7 +6,7 @@ import pytest from opentelemetry_mcp.backends.dynatrace import DynatraceBackend -from opentelemetry_mcp.models import FilterOperator, SpanQuery, TraceQuery +from opentelemetry_mcp.models import Filter, FilterOperator, FilterType, SpanQuery, TraceQuery class TestDynatraceBackend: @@ -84,6 +84,29 @@ async def test_search_traces_basic(self, backend: DynatraceBackend) -> None: assert len(traces) > 0 assert all(trace.service_name == "test-service" for trace in traces) + @pytest.mark.asyncio + async def test_search_traces_with_native_filter(self, backend: DynatraceBackend) -> None: + """Test that an explicit native filter is included in the DQL query.""" + mock_traces_response = {"traces": []} + + with patch.object(backend.client, "get") as mock_get: + mock_response = MagicMock() + mock_response.json.return_value = mock_traces_response + mock_response.raise_for_status = MagicMock() + mock_get.return_value = mock_response + + # Explicit native filter that should be applied to the DQL query + q = TraceQuery(filters=[Filter(field="service.name", operator=FilterOperator.EQUALS, value="svc", value_type=FilterType.STRING)], limit=5) + + await backend.search_traces(q) + + # Inspect the first call to client.get and ensure the query param contains service filter + assert mock_get.call_count >= 1 + called_args, called_kwargs = mock_get.call_args + params = called_kwargs.get("params") or {} + dql = params.get("query", "") + assert 'service.name == "svc"' in dql + @pytest.mark.asyncio async def test_get_trace(self, backend: DynatraceBackend) -> None: """Test getting a specific trace by ID.""" diff --git a/tests/integration/conftest.py b/tests/integration/conftest.py index cbd024f..b28318f 100644 --- a/tests/integration/conftest.py +++ b/tests/integration/conftest.py @@ -10,6 +10,7 @@ from pydantic import HttpUrl, TypeAdapter from vcr.request import Request +from opentelemetry_mcp.backends.dynatrace import DynatraceBackend from opentelemetry_mcp.backends.jaeger import JaegerBackend from opentelemetry_mcp.backends.tempo import TempoBackend from opentelemetry_mcp.backends.traceloop import TraceloopBackend @@ -53,6 +54,37 @@ def filter_tempo_timestamps(request: Request) -> Request: return request +def filter_dynatrace_timestamps(request: Request) -> Request: + """Remove timestamp ranges from Dynatrace DQL queries for better matching. + + Dynatrace DQL uses `from: ""` and `to: ""` inside the `query` + parameter which changes every run. This strips those literal timestamp + values so cassettes can be matched across runs. + """ + try: + url = urlparse(request.uri) + if "/api/v2/ql/query:execute" in url.path: + params = parse_qs(url.query) + if "query" in params: + query_str = params["query"][0] + # Remove the literal timestamps in from: "..." and to: "..." + import re + + query_str = re.sub(r'from:\s*"[^"]*"', 'from: "FILTERED"', query_str) + query_str = re.sub(r'to:\s*"[^"]*"', 'to: "FILTERED"', query_str) + params["query"] = [query_str] + + from urllib.parse import urlencode + + new_query = urlencode(params, doseq=True) + new_url = url._replace(query=new_query) + request.uri = new_url.geturl() + except Exception: + pass + + return request + + def filter_traceloop_timestamps(request: Request) -> Request: """Remove timestamp fields from Traceloop request body for better matching. @@ -174,6 +206,10 @@ def vcr_config(request: pytest.FixtureRequest) -> dict[str, Any]: config["match_on"] = ["method", "path", "body"] config["before_record_request"] = filter_traceloop_timestamps + # For Dynatrace tests, remove literal DQL timestamps from query parameter + if "dynatrace" in request.node.nodeid.lower(): + config["before_record_request"] = filter_dynatrace_timestamps + return config @@ -306,18 +342,24 @@ def dynatrace_api_key() -> str: @pytest.fixture def dynatrace_config(dynatrace_url: str, dynatrace_api_key: str) -> BackendConfig: """Dynatrace backend configuration.""" - return BackendConfig(type="dynatrace", url=TypeAdapter(HttpUrl).validate_python(dynatrace_url), api_key=dynatrace_api_key) + return BackendConfig( + type="dynatrace", + url=TypeAdapter(HttpUrl).validate_python(dynatrace_url), + api_key=dynatrace_api_key, + ) @pytest.fixture -async def dynatrace_backend(dynatrace_config: BackendConfig) -> AsyncGenerator: +async def dynatrace_backend(dynatrace_config: BackendConfig) -> AsyncGenerator[DynatraceBackend, None]: """ Dynatrace backend instance for integration testing. Uses async context manager to properly initialize and cleanup the backend. """ - from opentelemetry_mcp.backends.dynatrace import DynatraceBackend - - backend = DynatraceBackend(url=str(dynatrace_config.url), api_key=dynatrace_config.api_key, timeout=dynatrace_config.timeout) + backend = DynatraceBackend( + url=str(dynatrace_config.url), + api_key=dynatrace_config.api_key, + timeout=dynatrace_config.timeout, + ) async with backend: yield backend