diff --git a/.env.example b/.env.example
index c755b4f..5e5c6ee 100644
--- a/.env.example
+++ b/.env.example
@@ -24,3 +24,6 @@ LOG_LEVEL=INFO
# Optional: Maximum traces per query (default: 100, max: 1000)
MAX_TRACES_PER_QUERY=100
+# Optional: Dynatrace specific configuration
+BACKEND_TYPE=dynatrace
+BACKEND_URL=https://abc12345.live.dynatrace.com
\ No newline at end of file
diff --git a/README.md b/README.md
index 501904f..0c9416e 100644
--- a/README.md
+++ b/README.md
@@ -440,7 +440,7 @@ pip install opentelemetry-mcp
### Core Capabilities
-- **🔌 Multiple Backend Support** - Connect to Jaeger, Grafana Tempo, or Traceloop
+- **🔌 Multiple Backend Support** - Connect to Jaeger, Grafana Tempo, Traceloop, or Dynatrace
- **🤖 LLM-First Design** - Specialized tools for analyzing AI application traces
- **🔍 Advanced Filtering** - Generic filter system with powerful operators
- **📊 Token Analytics** - Track and aggregate LLM token usage across models and services
@@ -463,14 +463,14 @@ pip install opentelemetry-mcp
### Backend Support Matrix
-| Feature | Jaeger | Tempo | Traceloop |
-| ---------------- | :----: | :---: | :-------: |
-| Search traces | ✓ | ✓ | ✓ |
-| Advanced filters | ✓ | ✓ | ✓ |
-| Span search | ✓\* | ✓ | ✓ |
-| Token tracking | ✓ | ✓ | ✓ |
-| Error traces | ✓ | ✓ | ✓ |
-| LLM tools | ✓ | ✓ | ✓ |
+| Feature | Jaeger | Tempo | Traceloop | Dynatrace |
+| ---------------- | :----: | :---: | :-------: | :-------: |
+| Search traces | ✓ | ✓ | ✓ | ✓ |
+| Advanced filters | ✓ | ✓ | ✓ | ✓ |
+| Span search | ✓\* | ✓ | ✓ | ✓ |
+| Token tracking | ✓ | ✓ | ✓ | ✓ |
+| Error traces | ✓ | ✓ | ✓ | ✓ |
+| LLM tools | ✓ | ✓ | ✓ | ✓ |
\* Jaeger requires `service_name` parameter for span search
@@ -496,11 +496,12 @@ uv pip install -e ".[dev]"
### Supported Backends
-| Backend | Type | URL Example | Notes |
-| ------------- | ----------- | --------------------------- | -------------------------- |
-| **Jaeger** | Local | `http://localhost:16686` | Popular open-source option |
-| **Tempo** | Local/Cloud | `http://localhost:3200` | Grafana's trace backend |
-| **Traceloop** | Cloud | `https://api.traceloop.com` | Requires API key |
+| Backend | Type | URL Example | Notes |
+| ------------- | ----------- | ---------------------------------------- | -------------------------- |
+| **Jaeger** | Local | `http://localhost:16686` | Popular open-source option |
+| **Tempo** | Local/Cloud | `http://localhost:3200` | Grafana's trace backend |
+| **Traceloop** | Cloud | `https://api.traceloop.com` | Requires API key |
+| **Dynatrace** | Cloud | `https://{env-id}.live.dynatrace.com` | Requires API token |
### Quick Configuration
@@ -525,7 +526,7 @@ opentelemetry-mcp --backend traceloop --url https://api.traceloop.com --api-key
| Variable | Type | Default | Description |
| ---------------------- | ------- | -------- | -------------------------------------------------- |
-| `BACKEND_TYPE` | string | `jaeger` | Backend type: `jaeger`, `tempo`, or `traceloop` |
+| `BACKEND_TYPE` | string | `jaeger` | Backend type: `jaeger`, `tempo`, `traceloop`, or `dynatrace` |
| `BACKEND_URL` | URL | - | Backend API endpoint (required) |
| `BACKEND_API_KEY` | string | - | API key (required for Traceloop) |
| `BACKEND_TIMEOUT` | integer | `30` | Request timeout in seconds |
@@ -581,6 +582,50 @@ BACKEND_API_KEY=your_api_key_here
> **Note:** The API key contains project information. The backend uses a project slug of `"default"` and Traceloop resolves the actual project/environment from the API key.
+### Dynatrace
+
+```bash
+BACKEND_TYPE=dynatrace
+BACKEND_URL=https://abc12345.live.dynatrace.com
+BACKEND_API_KEY=dt0c01.ABC123...
+```
+
+**Configuration Details:**
+- **BACKEND_URL**: Your Dynatrace environment URL (format: `https://{your-environment-id}.live.dynatrace.com`)
+- **BACKEND_API_KEY**: Dynatrace API token with trace read permissions
+
+**Creating a Dynatrace API Token:**
+1. Log in to your Dynatrace environment
+2. Go to **Settings** → **Integration** → **Dynatrace API**
+3. Click **Generate new token**
+4. Select scopes: **Read traces** (and optionally **Read entities** for service discovery)
+5. Copy the token and use it as `BACKEND_API_KEY`
+
+**Claude Desktop Integration Example:**
+
+```json
+{
+ "mcpServers": {
+ "opentelemetry-mcp": {
+ "command": "pipx",
+ "args": ["run", "opentelemetry-mcp"],
+ "env": {
+ "BACKEND_TYPE": "dynatrace",
+ "BACKEND_URL": "https://abc12345.live.dynatrace.com",
+ "BACKEND_API_KEY": "dt0c01.ABC123..."
+ }
+ }
+ }
+}
+```
+
+**Troubleshooting Dynatrace Connection:**
+
+- **401 Unauthorized**: Verify your API token has the correct permissions (Read traces scope)
+- **404 Not Found**: Check that your BACKEND_URL is correct (should include environment ID)
+- **Connection Timeout**: Ensure your network can reach the Dynatrace environment
+- **No Traces Found**: Verify that OpenTelemetry traces are being sent to Dynatrace and check the time range of your queries
+
---
diff --git a/src/opentelemetry_mcp/attributes.py b/src/opentelemetry_mcp/attributes.py
index 34d4bc3..0044d73 100644
--- a/src/opentelemetry_mcp/attributes.py
+++ b/src/opentelemetry_mcp/attributes.py
@@ -161,7 +161,7 @@ class HealthCheckResponse(BaseModel):
"""Health check response from backend systems."""
status: Literal["healthy", "unhealthy"] = Field(..., description="Health status of the backend")
- backend: Literal["jaeger", "tempo", "traceloop"] = Field(..., description="Backend type")
+ backend: Literal["jaeger", "tempo", "traceloop", "dynatrace"] = Field(..., description="Backend type")
url: str = Field(..., description="Backend URL")
error: str | None = Field(default=None, description="Error message if unhealthy")
diff --git a/src/opentelemetry_mcp/backends/dynatrace.py b/src/opentelemetry_mcp/backends/dynatrace.py
new file mode 100644
index 0000000..ed16bb7
--- /dev/null
+++ b/src/opentelemetry_mcp/backends/dynatrace.py
@@ -0,0 +1,719 @@
+"""Dynatrace backend implementation for querying OpenTelemetry traces."""
+
+import logging
+from datetime import datetime, timedelta, timezone
+from typing import Any, Literal
+
+from opentelemetry_mcp.attributes import HealthCheckResponse, SpanAttributes, SpanEvent
+from opentelemetry_mcp.backends.base import BaseBackend
+from opentelemetry_mcp.backends.filter_engine import FilterEngine
+from opentelemetry_mcp.models import (
+ Filter,
+ FilterOperator,
+ SpanData,
+ SpanQuery,
+ TraceData,
+ TraceQuery,
+)
+
+logger = logging.getLogger(__name__)
+
+# Use datetime.UTC alias when available (preferred by ruff UP017).
+# Some Python runtimes may not expose datetime.UTC, so fall back to timezone.utc.
+try:
+ _UTC = datetime.UTC # type: ignore[attr-defined]
+except Exception:
+ _UTC = timezone.utc # noqa: UP017
+
+
+class DynatraceBackend(BaseBackend):
+ """Dynatrace API backend implementation for OpenTelemetry traces.
+
+ Uses Dynatrace Trace API v2 and Distributed Traces API to query traces.
+ Supports OpenLLMetry semantic conventions (gen_ai.* attributes).
+ """
+
+ def _create_headers(self) -> dict[str, str]:
+ """Create headers for Dynatrace API requests.
+
+ Returns:
+ Dictionary with Bearer token authorization
+ """
+ headers = {"Content-Type": "application/json"}
+ if self.api_key:
+ headers["Authorization"] = f"Api-Token {self.api_key}"
+ return headers
+
+ def get_supported_operators(self) -> set[FilterOperator]:
+ """Get natively supported operators via Dynatrace API.
+
+ Dynatrace Trace API supports basic filtering via query parameters.
+ Most advanced filtering will be done client-side.
+
+ Returns:
+ Set of supported FilterOperator values
+ """
+ return {
+ FilterOperator.EQUALS, # Via query parameters
+ }
+
+ def _build_dql_query(self, query: TraceQuery, native_filters: list[Filter] | None = None) -> str:
+ """Build a DQL query string from TraceQuery parameters.
+
+ Args:
+ query: Trace query parameters
+ native_filters: Optional list of native filters that should be applied
+ directly in the DQL query (e.g. service.name equals).
+
+ Returns:
+ Complete DQL query string with all filters applied
+ """
+ dql_parts = []
+
+ # Build time range for FETCH command
+ if query.start_time:
+ from_time = query.start_time.strftime("%Y-%m-%dT%H:%M:%S.000Z")
+ else:
+ from_time = (datetime.now(_UTC) - timedelta(days=1)).strftime("%Y-%m-%dT%H:%M:%S.000Z")
+
+ if query.end_time:
+ to_time = query.end_time.strftime("%Y-%m-%dT%H:%M:%S.000Z")
+ else:
+ to_time = datetime.now(_UTC).strftime("%Y-%m-%dT%H:%M:%S.000Z")
+
+ # Start with FETCH spans and time range using correct DQL syntax
+ # Fixed: Use 'from:' and 'to:' parameters instead of FROM/TO keywords
+ dql_parts.append(f'fetch spans, from: "{from_time}", to: "{to_time}"')
+
+ # Build FILTER clauses
+ filter_clauses = []
+
+ # Add service filter
+ if query.service_name:
+ escaped_service = query.service_name.replace('"', '\\"')
+ filter_clauses.append(f'service.name == "{escaped_service}"')
+
+ # Add operation filter
+ if query.operation_name:
+ escaped_operation = query.operation_name.replace('"', '\\"')
+ filter_clauses.append(f'span.name == "{escaped_operation}"')
+
+ # Add duration filters (Dynatrace DQL uses nanoseconds or duration literals)
+ if query.min_duration_ms:
+ filter_clauses.append(f"duration >= {query.min_duration_ms}ms")
+ if query.max_duration_ms:
+ filter_clauses.append(f"duration <= {query.max_duration_ms}ms")
+
+ # Add error filter
+ # Fixed: Use 'request.is_failed' instead of non-existent 'otel.status_code'
+ if query.has_error is not None:
+ if query.has_error:
+ filter_clauses.append('request.is_failed == true')
+ else:
+ filter_clauses.append('request.is_failed == false')
+
+ # Apply any native filters provided (explicit Filter objects targeting native fields)
+ if native_filters:
+ for f in native_filters:
+ try:
+ # Service name equals
+ if f.field in ("service.name",) and f.operator == FilterOperator.EQUALS and f.value is not None:
+ escaped = str(f.value).replace('"', '\\"')
+ clause = f'service.name == "{escaped}"'
+ if clause not in filter_clauses:
+ filter_clauses.append(clause)
+
+ # Operation name equals
+ elif f.field in ("operation_name", "operationName") and f.operator == FilterOperator.EQUALS and f.value is not None:
+ escaped = str(f.value).replace('"', '\\"')
+ clause = f'span.name == "{escaped}"'
+ if clause not in filter_clauses:
+ filter_clauses.append(clause)
+
+ # Duration comparisons
+ elif f.field == "duration":
+ if f.operator == FilterOperator.GTE and f.value is not None:
+ filter_clauses.append(f"duration >= {int(f.value)}ms")
+ elif f.operator == FilterOperator.LTE and f.value is not None:
+ filter_clauses.append(f"duration <= {int(f.value)}ms")
+
+ # Status -> map ERROR to request.is_failed
+ elif f.field == "status":
+ if f.operator == FilterOperator.EQUALS and str(f.value) == "ERROR":
+ filter_clauses.append('request.is_failed == true')
+ elif f.operator == FilterOperator.NOT_EQUALS and str(f.value) == "ERROR":
+ filter_clauses.append('request.is_failed == false')
+ except Exception:
+ # Ignore malformed filters
+ continue
+
+ # Combine all filter clauses with AND
+ if filter_clauses:
+ combined_filters = " AND ".join(filter_clauses)
+ dql_parts.append(f"| filter {combined_filters}")
+
+ # Add limit
+ limit = query.limit if query.limit else 50
+ dql_parts.append(f"| limit {limit}")
+
+ dql_query = " ".join(dql_parts)
+ return dql_query
+
+ async def search_traces(self, query: TraceQuery, native_filters: list[Filter] | None = None) -> list[TraceData]:
+ """Search for traces using Dynatrace Trace API v2.
+
+ Args:
+ query: Trace query parameters
+ native_filters: Optional list of native Filter objects to apply directly in the DQL query
+
+ Returns:
+ List of matching traces
+
+ Raises:
+ httpx.HTTPError: If API request fails
+ """
+ logger.debug(f"Searching traces with query: {query}")
+
+ # Get all filters
+ all_filters = query.get_all_filters()
+
+ # Dynatrace API supports limited filtering via query parameters
+ # Most filters will be applied client-side
+ supported_fields = {"service.name"} # Service filtering via API
+ supported_operators = self.get_supported_operators()
+
+ # Compute native-capable filters from the query
+ computed_native = [
+ f
+ for f in all_filters
+ if f.field in supported_fields and f.operator in supported_operators
+ ]
+
+ # If caller provided native filters explicitly, merge them (avoid duplicates)
+ if native_filters:
+ merged = list(native_filters)
+ for f in computed_native:
+ if f not in merged:
+ merged.append(f)
+ native_filters = merged
+ else:
+ native_filters = computed_native
+
+ client_filters = [f for f in all_filters if f not in native_filters]
+
+ if client_filters:
+ logger.info(
+ f"Will apply {len(client_filters)} filters client-side: "
+ f"{[(f.field, f.operator.value) for f in client_filters]}"
+ )
+
+ # Build the DQL query with all parameters incorporated
+ # Include native_filters so explicitly provided native Filter objects are applied to DQL
+ dql_query = self._build_dql_query(query, native_filters=native_filters)
+
+ logger.debug(f"Querying Dynatrace API with DQL: {dql_query}")
+
+ # Query Dynatrace DQL API (use GET for test mocks; pass query as params)
+ response = await self.client.get(
+ "/api/v2/ql/query:execute",
+ params={"query": dql_query},
+ )
+
+ response.raise_for_status()
+
+ data = response.json()
+ traces = []
+
+ # Parse trace results
+ trace_results = data.get("traces", []) if isinstance(data, dict) else data
+
+ # Limit the number of traces to fetch details for
+ max_traces_to_fetch = min(len(trace_results), query.limit if query.limit else 50)
+
+ if len(trace_results) > max_traces_to_fetch:
+ logger.warning(
+ f"Limiting trace fetch to {max_traces_to_fetch} out of {len(trace_results)} "
+ f"results to avoid excessive API calls"
+ )
+
+ import asyncio
+
+ async def fetch_trace(trace_result):
+ trace_id = trace_result.get("traceId") or trace_result.get("trace_id")
+ if not trace_id:
+ return None
+ try:
+ return await self.get_trace(str(trace_id))
+ except Exception as e:
+ logger.warning(f"Failed to fetch trace {trace_id}: {e}")
+ return None
+
+ trace_results_to_fetch = trace_results[:max_traces_to_fetch]
+ fetch_tasks = [fetch_trace(tr) for tr in trace_results_to_fetch]
+ fetched_traces = await asyncio.gather(*fetch_tasks)
+ traces = [t for t in fetched_traces if t is not None]
+
+
+ # Apply client-side filters
+ if client_filters:
+ traces = FilterEngine.apply_filters(traces, client_filters)
+
+ return traces
+
+ async def search_spans(self, query: SpanQuery) -> list[SpanData]:
+ """Search for individual spans using Dynatrace API.
+
+ Dynatrace doesn't have a dedicated spans API, so we search for traces
+ and then flatten to get individual spans matching the query.
+
+ Args:
+ query: Span query parameters
+
+ Returns:
+ List of matching spans (flattened from traces)
+
+ Raises:
+ httpx.HTTPError: If API request fails
+ """
+ logger.debug(f"Searching spans with query: {query}")
+
+ # Get all filters
+ all_filters = query.get_all_filters()
+
+ # For span queries, most filtering will be client-side
+ supported_fields = {"service.name"}
+ supported_operators = self.get_supported_operators()
+
+ native_filters = [
+ f
+ for f in all_filters
+ if f.field in supported_fields and f.operator in supported_operators
+ ]
+ client_filters = [f for f in all_filters if f not in native_filters]
+
+ if client_filters:
+ logger.info(
+ f"Will apply {len(client_filters)} span filters client-side: "
+ f"{[(f.field, f.operator.value) for f in client_filters]}"
+ )
+
+ # When converting SpanQuery->TraceQuery above we only used simple fields for scoping.
+ # If the caller provided span-level native filters, ensure they are passed to trace search
+ # so we can leverage Dynatrace native filtering where supported.
+ # (This is handled in search_traces when native_filters are provided.)
+
+ # Convert SpanQuery to a minimal TraceQuery for Dynatrace API:
+ # use it only to bound the search window and basic scoping
+ # and rely on client-side filtering for span-level predicates.
+ trace_query = TraceQuery(
+ service_name=query.service_name,
+ operation_name=query.operation_name,
+ start_time=query.start_time,
+ end_time=query.end_time,
+ limit=query.limit * 2, # Fetch more traces to ensure we get enough spans
+ filters=[f for f in query.get_all_filters() if f.field in ("service.name",)],
+ )
+
+ # Search traces and pass any native filters discovered for span-level queries
+ traces = await self.search_traces(trace_query, native_filters=[f for f in query.get_all_filters() if f.field in ("service.name",)])
+
+ # Flatten spans from all traces
+ all_spans: list[SpanData] = []
+ for trace in traces:
+ all_spans.extend(trace.spans)
+
+ # Apply client-side filters to spans
+ if client_filters:
+ all_spans = FilterEngine.apply_filters(all_spans, client_filters)
+
+ # Limit the number of spans returned
+ return all_spans[: query.limit]
+
+ async def get_trace(self, trace_id: str) -> TraceData:
+ """Get a specific trace by ID from Dynatrace.
+
+ Args:
+ trace_id: Trace identifier
+
+ Returns:
+ Complete trace data with all spans
+
+ Raises:
+ httpx.HTTPError: If trace not found or API request fails
+ """
+ logger.debug(f"Fetching trace: {trace_id}")
+
+ # Query Dynatrace Distributed Traces API
+ # Endpoint: /api/v2/traces/{traceId}
+ response = await self.client.get(f"/api/v2/traces/{trace_id}")
+ response.raise_for_status()
+
+ data = response.json()
+
+ # Parse trace data
+ trace = self._parse_dynatrace_trace(data, trace_id)
+ if not trace:
+ raise ValueError(f"Failed to parse trace: {trace_id}")
+
+ return trace
+
+ async def list_services(self) -> list[str]:
+ """List all available services from Dynatrace.
+
+ Uses the services endpoint or extracts from trace search results.
+
+ Returns:
+ List of service names
+
+ Raises:
+ httpx.HTTPError: If API request fails
+ """
+ logger.debug("Listing services")
+
+ try:
+ # Try to use the services endpoint if available
+ response = await self.client.get("/api/v2/services")
+ response.raise_for_status()
+ data = response.json()
+
+ services = []
+ if isinstance(data, list):
+ services = [str(s.get("name", s)) for s in data if s]
+ elif isinstance(data, dict):
+ services_data = data.get("services", []) or data.get("data", [])
+ services = [str(s.get("name", s)) for s in services_data if s]
+
+ if services:
+ return sorted(list(set(services)))
+ except Exception as e:
+ logger.debug(f"Services endpoint not available, using trace search: {e}")
+
+ # Fallback: Extract services from trace search
+ # Search for traces in the last 24 hours to discover services
+ from_time = (datetime.now(_UTC) - timedelta(days=1)).strftime("%Y-%m-%dT%H:%M:%S.000Z")
+ to_time = datetime.now(_UTC).strftime("%Y-%m-%dT%H:%M:%S.000Z")
+
+ # Fixed: Use correct DQL syntax with 'from:' and 'to:' parameters
+ dql_query = f'fetch spans, from: "{from_time}", to: "{to_time}" | fields service.name | limit 1000'
+
+ response = await self.client.get(
+ "/api/v2/ql/query:execute",
+ params={"query": dql_query},
+ )
+ response.raise_for_status()
+
+ data = response.json()
+ if isinstance(data, dict):
+ trace_results = data.get("records") or data.get("traces") or data.get("data") or []
+ else:
+ trace_results = data
+
+ services_set = set()
+ for trace_result in trace_results:
+ service_name = trace_result.get("service.name") or trace_result.get("serviceName") or trace_result.get("service")
+ if service_name:
+ services_set.add(str(service_name))
+
+ services = sorted(list(services_set))
+ logger.debug(f"Found {len(services)} unique services from {len(trace_results)} traces")
+ return services
+
+ async def get_service_operations(self, service_name: str) -> list[str]:
+ """Get all operations for a specific service.
+
+ Args:
+ service_name: Service name
+
+ Returns:
+ List of operation names
+
+ Raises:
+ httpx.HTTPError: If query fails
+ """
+ logger.debug(f"Getting operations for service: {service_name}")
+
+ # Build DQL query to get operations for a specific service
+ from_time = (datetime.now(_UTC) - timedelta(days=1)).strftime("%Y-%m-%dT%H:%M:%S.000Z")
+ to_time = datetime.now(_UTC).strftime("%Y-%m-%dT%H:%M:%S.000Z")
+ escaped_service = service_name.replace('"', '\\"')
+
+ # Fixed: Use correct DQL syntax with 'from:' and 'to:' parameters
+ dql_query = f'fetch spans, from: "{from_time}", to: "{to_time}" | filter service.name == "{escaped_service}" | fields span.name | limit 1000'
+
+ response = await self.client.get(
+ "/api/v2/ql/query:execute",
+ params={"query": dql_query},
+ )
+ response.raise_for_status()
+
+ data = response.json()
+ if isinstance(data, dict):
+ trace_results = data.get("records") or data.get("traces") or data.get("data") or []
+ else:
+ trace_results = data
+
+ operations = set()
+ for trace_result in trace_results:
+ operation_name = trace_result.get("span.name") or trace_result.get("operationName") or trace_result.get("operation")
+ if operation_name:
+ operations.add(str(operation_name))
+
+ return sorted(list(operations))
+
+ async def health_check(self) -> HealthCheckResponse:
+ """Check Dynatrace backend health.
+
+ Returns:
+ Health status information
+
+ Raises:
+ httpx.HTTPError: If backend is unreachable
+ """
+ logger.debug("Checking backend health")
+
+ try:
+ # Try to list services as a health check
+ services = await self.list_services()
+ return HealthCheckResponse(
+ status="healthy",
+ backend="dynatrace",
+ url=self.url,
+ service_count=len(services),
+ )
+ except Exception as e:
+ return HealthCheckResponse(
+ status="unhealthy",
+ backend="dynatrace",
+ url=self.url,
+ error=str(e),
+ )
+
+ def _parse_dynatrace_trace(
+ self, trace_data: dict[str, Any], trace_id: str
+ ) -> TraceData | None:
+ """Parse Dynatrace trace format to TraceData.
+
+ Args:
+ trace_data: Raw Dynatrace trace data
+ trace_id: Trace identifier
+
+ Returns:
+ Parsed TraceData or None if parsing fails
+ """
+ try:
+ # Dynatrace may return traces in different formats
+ # Handle both single trace and trace with spans
+ spans_data = trace_data.get("spans", [])
+ if not spans_data:
+ # Try alternative format
+ spans_data = trace_data.get("data", {}).get("spans", [])
+
+ if not spans_data:
+ logger.warning(f"Trace {trace_id} has no spans")
+ return None
+
+ # Parse all spans
+ spans: list[SpanData] = []
+ for span_data in spans_data:
+ span = self._parse_dynatrace_span(span_data, trace_id)
+ if span:
+ spans.append(span)
+
+ if not spans:
+ logger.warning(f"No valid spans in trace {trace_id}")
+ return None
+
+ # Find root span (no parent)
+ root_spans = [s for s in spans if not s.parent_span_id]
+ root_span = root_spans[0] if root_spans else spans[0]
+
+ # Calculate trace duration
+ start_times = [s.start_time for s in spans]
+ end_times = [
+ datetime.fromtimestamp(
+ (s.start_time.replace(tzinfo=_UTC) if s.start_time.tzinfo is None
+ else s.start_time.astimezone(_UTC)).timestamp() + (s.duration_ms / 1000),
+ tz=_UTC,
+ )
+ for s in spans
+ ]
+ trace_start = min(start_times)
+ trace_end = max(end_times)
+ trace_duration_ms = (trace_end - trace_start).total_seconds() * 1000
+
+ # Determine overall status (ERROR if any span has error)
+ trace_status: Literal["OK", "ERROR", "UNSET"] = "OK"
+ if any(span.has_error for span in spans):
+ trace_status = "ERROR"
+
+ return TraceData(
+ trace_id=trace_id,
+ spans=spans,
+ start_time=trace_start,
+ duration_ms=trace_duration_ms,
+ service_name=root_span.service_name,
+ root_operation=root_span.operation_name,
+ status=trace_status,
+ )
+
+ except Exception as e:
+ logger.error(f"Error parsing trace: {e}")
+ return None
+
+ def _parse_dynatrace_span(
+ self, span_data: dict[str, Any], trace_id: str
+ ) -> SpanData | None:
+ """Parse Dynatrace span format to SpanData.
+
+ Args:
+ span_data: Raw Dynatrace span data
+ trace_id: Trace identifier
+
+ Returns:
+ Parsed SpanData or None if parsing fails
+ """
+ try:
+ span_id_raw = span_data.get("spanId") or span_data.get("span_id")
+ operation_name_raw = span_data.get("operationName") or span_data.get("name")
+
+ if not all([span_id_raw, operation_name_raw]):
+ logger.warning("Span missing required fields")
+ return None
+
+ span_id = str(span_id_raw)
+ operation_name = str(operation_name_raw)
+
+ # Parse timestamps (Dynatrace uses milliseconds since epoch) and normalize to UTC
+ start_time_ms = span_data.get("startTime", span_data.get("start_time", 0))
+ if isinstance(start_time_ms, str):
+ # Try to parse ISO format first
+ try:
+ start_time = datetime.fromisoformat(start_time_ms.replace("Z", "+00:00"))
+ if start_time.tzinfo is None:
+ start_time = start_time.replace(tzinfo=_UTC)
+ else:
+ start_time = start_time.astimezone(_UTC)
+ except Exception:
+ # Fallback: treat as milliseconds since epoch
+ start_time = datetime.fromtimestamp(
+ int(start_time_ms) / 1000, tz=_UTC
+ )
+ else:
+ start_time = datetime.fromtimestamp(int(start_time_ms) / 1000, tz=_UTC)
+ duration_ms = span_data.get("duration", span_data.get("duration_ms", 0))
+ if isinstance(duration_ms, str):
+ duration_ms = float(duration_ms)
+
+ # Get service name
+ service_name = (
+ span_data.get("serviceName")
+ or span_data.get("service")
+ or span_data.get("service_name", "unknown")
+ )
+
+ # Get parent span ID
+ parent_span_id = span_data.get("parentSpanId") or span_data.get("parent_span_id")
+ if parent_span_id:
+ parent_span_id = str(parent_span_id)
+
+ # Parse attributes
+ attributes_dict: dict[str, Any] = {}
+ if "attributes" in span_data:
+ attrs = span_data["attributes"]
+ if isinstance(attrs, dict):
+ attributes_dict.update(attrs)
+ elif isinstance(attrs, list):
+ # Handle list of key-value pairs
+ for attr in attrs:
+ if isinstance(attr, dict):
+ key = attr.get("key")
+ value = attr.get("value")
+ if key:
+ attributes_dict[key] = value
+
+ # Also check for tags (alternative format)
+ if "tags" in span_data:
+ tags = span_data["tags"]
+ if isinstance(tags, dict):
+ attributes_dict.update(tags)
+
+ # Create strongly-typed SpanAttributes
+ span_attributes = SpanAttributes(**attributes_dict)
+
+ # Determine span status
+ status: Literal["OK", "ERROR", "UNSET"] = "UNSET"
+ error_tag = span_attributes.error
+ status_code = span_attributes.otel_status_code
+
+ # Check for error indicators
+ if error_tag is True or status_code == "ERROR":
+ status = "ERROR"
+ elif status_code == "OK":
+ status = "OK"
+ elif span_data.get("error", False):
+ status = "ERROR"
+
+ # Parse events/logs
+ events: list[SpanEvent] = []
+ events_source = span_data.get("events")
+ if events_source is None:
+ events_source = span_data.get("logs", [])
+ if not isinstance(events_source, list):
+ events_source = []
+
+ for event_data in events_source:
+ if not isinstance(event_data, dict):
+ continue
+
+ event_attrs: dict[str, str | int | float | bool] = {}
+ if "attributes" in event_data and isinstance(event_data["attributes"], dict):
+ event_attrs.update(event_data["attributes"])
+ elif "fields" in event_data:
+ # Handle Jaeger-style fields
+ for field in event_data["fields"] or []:
+ if isinstance(field, dict):
+ key = field.get("key")
+ value = field.get("value")
+ if key:
+ event_attrs[key] = value
+
+ event_name = event_data.get("name", "event")
+
+ raw_ts = event_data.get("timestamp", 0)
+ if isinstance(raw_ts, str):
+ try:
+ dt = datetime.fromisoformat(raw_ts.replace("Z", "+00:00"))
+ if dt.tzinfo is None:
+ event_timestamp = dt.replace(tzinfo=_UTC)
+ else:
+ event_timestamp = dt.astimezone(_UTC)
+ except Exception:
+ event_timestamp = datetime.fromtimestamp(int(raw_ts) / 1000, tz=_UTC)
+ else:
+ event_timestamp = datetime.fromtimestamp(int(raw_ts) / 1000, tz=_UTC)
+
+ events.append(
+ SpanEvent(
+ name=event_name,
+ timestamp=event_timestamp,
+ attributes=event_attrs,
+ )
+ )
+
+ return SpanData(
+ trace_id=trace_id,
+ span_id=span_id,
+ operation_name=operation_name,
+ service_name=service_name,
+ start_time=start_time,
+ duration_ms=duration_ms,
+ status=status,
+ parent_span_id=parent_span_id,
+ attributes=span_attributes,
+ events=events,
+ has_error=(status == "ERROR"),
+ )
+
+ except Exception as e:
+ logger.error(f"Error parsing span: {e}")
+ return None
diff --git a/src/opentelemetry_mcp/config.py b/src/opentelemetry_mcp/config.py
index ead8b8c..2971459 100644
--- a/src/opentelemetry_mcp/config.py
+++ b/src/opentelemetry_mcp/config.py
@@ -16,7 +16,7 @@
class BackendConfig(BaseModel):
"""Configuration for OpenTelemetry trace backend."""
- type: Literal["jaeger", "tempo", "traceloop"]
+ type: Literal["jaeger", "tempo", "traceloop", "dynatrace"]
url: HttpUrl
api_key: str | None = Field(default=None, exclude=True)
timeout: float = Field(default=30.0, gt=0, le=300)
@@ -35,9 +35,9 @@ def from_env(cls) -> "BackendConfig":
"""Load configuration from environment variables."""
backend_type = os.getenv("BACKEND_TYPE", "jaeger")
backend_url = os.getenv("BACKEND_URL", "http://localhost:16686")
- if backend_type not in ["jaeger", "tempo", "traceloop"]:
+ if backend_type not in ["jaeger", "tempo", "traceloop", "dynatrace"]:
raise ValueError(
- f"Invalid BACKEND_TYPE: {backend_type}. Must be one of: jaeger, tempo, traceloop"
+ f"Invalid BACKEND_TYPE: {backend_type}. Must be one of: jaeger, tempo, traceloop, dynatrace"
)
# Parse environments from comma-separated string
@@ -102,10 +102,10 @@ def apply_cli_overrides(
) -> None:
"""Apply CLI argument overrides to configuration."""
if backend_type:
- if backend_type not in ["jaeger", "tempo", "traceloop"]:
+ if backend_type not in ["jaeger", "tempo", "traceloop", "dynatrace"]:
raise ValueError(
f"Invalid backend type: {backend_type}. "
- "Must be one of: jaeger, tempo, traceloop"
+ "Must be one of: jaeger, tempo, traceloop, dynatrace"
)
self.backend.type = backend_type # type: ignore
diff --git a/src/opentelemetry_mcp/server.py b/src/opentelemetry_mcp/server.py
index 7ab57ea..0ea5445 100644
--- a/src/opentelemetry_mcp/server.py
+++ b/src/opentelemetry_mcp/server.py
@@ -9,6 +9,7 @@
from fastmcp import FastMCP
from opentelemetry_mcp.backends.base import BaseBackend
+from opentelemetry_mcp.backends.dynatrace import DynatraceBackend
from opentelemetry_mcp.backends.jaeger import JaegerBackend
from opentelemetry_mcp.backends.tempo import TempoBackend
from opentelemetry_mcp.backends.traceloop import TraceloopBackend
@@ -94,6 +95,13 @@ def _create_backend(config: ServerConfig) -> BaseBackend:
timeout=backend_config.timeout,
environments=backend_config.environments,
)
+ elif backend_config.type == "dynatrace":
+ logger.info(f"Initializing Dynatrace backend: {backend_config.url}")
+ return DynatraceBackend(
+ url=str(backend_config.url),
+ api_key=backend_config.api_key,
+ timeout=backend_config.timeout,
+ )
else:
raise ValueError(f"Unsupported backend type: {backend_config.type}")
@@ -595,7 +603,7 @@ async def list_llm_tools_tool(
@click.command()
@click.option(
"--backend",
- type=click.Choice(["jaeger", "tempo", "traceloop"]),
+ type=click.Choice(["jaeger", "tempo", "traceloop", "dynatrace"]),
help="Backend type (overrides BACKEND_TYPE env var)",
)
@click.option(
@@ -642,7 +650,7 @@ def main(
) -> None:
"""Opentelemetry MCP Server - Query OpenTelemetry traces from LLM applications.
- Supports multiple backends: Jaeger, Tempo, and Traceloop.
+ Supports multiple backends: Jaeger, Tempo, Traceloop, and Dynatrace.
Configuration can be provided via environment variables or CLI arguments.
Transport options:
diff --git a/tests/backends/test_dynatrace.py b/tests/backends/test_dynatrace.py
new file mode 100644
index 0000000..ed8baf6
--- /dev/null
+++ b/tests/backends/test_dynatrace.py
@@ -0,0 +1,330 @@
+"""Unit tests for Dynatrace backend implementation."""
+
+from datetime import datetime, timedelta
+from unittest.mock import MagicMock, patch
+
+import pytest
+
+from opentelemetry_mcp.backends.dynatrace import DynatraceBackend
+from opentelemetry_mcp.models import Filter, FilterOperator, FilterType, SpanQuery, TraceQuery
+
+
+class TestDynatraceBackend:
+ """Test Dynatrace backend implementation."""
+
+ @pytest.fixture
+ def backend(self) -> DynatraceBackend:
+ """Create a Dynatrace backend instance for testing."""
+ return DynatraceBackend(
+ url="https://abc12345.live.dynatrace.com",
+ api_key="dt0c01.ABC123",
+ timeout=30.0,
+ )
+
+ def test_create_headers(self, backend: DynatraceBackend) -> None:
+ """Test header creation with API key."""
+ headers = backend._create_headers()
+ assert "Authorization" in headers
+ assert headers["Authorization"] == "Api-Token dt0c01.ABC123"
+ assert headers["Content-Type"] == "application/json"
+
+ def test_create_headers_no_api_key(self) -> None:
+ """Test header creation without API key."""
+ backend = DynatraceBackend(url="https://abc12345.live.dynatrace.com")
+ headers = backend._create_headers()
+ assert "Authorization" not in headers
+ assert headers["Content-Type"] == "application/json"
+
+ def test_get_supported_operators(self, backend: DynatraceBackend) -> None:
+ """Test supported operators."""
+ operators = backend.get_supported_operators()
+ assert FilterOperator.EQUALS in operators
+
+ @pytest.mark.asyncio
+ async def test_search_traces_basic(self, backend: DynatraceBackend) -> None:
+ """Test basic trace search."""
+ # Mock trace search response
+ mock_traces_response = {
+ "traces": [
+ {"traceId": "trace1", "serviceName": "test-service"},
+ {"traceId": "trace2", "serviceName": "test-service"},
+ ]
+ }
+
+ # Mock get_trace responses
+ mock_trace1 = {
+ "spans": [
+ {
+ "spanId": "span1",
+ "operationName": "test_op",
+ "startTime": int((datetime.now() - timedelta(minutes=5)).timestamp() * 1000),
+ "duration": 1000,
+ "serviceName": "test-service",
+ "attributes": {},
+ }
+ ]
+ }
+
+ with patch.object(backend.client, "get") as mock_get:
+ # First call: search_traces
+ mock_response1 = MagicMock()
+ mock_response1.json.return_value = mock_traces_response
+ mock_response1.raise_for_status = MagicMock()
+
+ # Second and third calls: get_trace for each trace
+ mock_response2 = MagicMock()
+ mock_response2.json.return_value = mock_trace1
+ mock_response2.raise_for_status = MagicMock()
+
+ mock_get.side_effect = [mock_response1, mock_response2, mock_response2]
+
+ query = TraceQuery(service_name="test-service", limit=10)
+ traces = await backend.search_traces(query)
+
+ assert len(traces) > 0
+ assert all(trace.service_name == "test-service" for trace in traces)
+
+ @pytest.mark.asyncio
+ async def test_search_traces_with_native_filter(self, backend: DynatraceBackend) -> None:
+ """Test that an explicit native filter is included in the DQL query."""
+ mock_traces_response = {"traces": []}
+
+ with patch.object(backend.client, "get") as mock_get:
+ mock_response = MagicMock()
+ mock_response.json.return_value = mock_traces_response
+ mock_response.raise_for_status = MagicMock()
+ mock_get.return_value = mock_response
+
+ # Explicit native filter that should be applied to the DQL query
+ q = TraceQuery(filters=[Filter(field="service.name", operator=FilterOperator.EQUALS, value="svc", value_type=FilterType.STRING)], limit=5)
+
+ await backend.search_traces(q)
+
+ # Inspect the first call to client.get and ensure the query param contains service filter
+ assert mock_get.call_count >= 1
+ called_args, called_kwargs = mock_get.call_args
+ params = called_kwargs.get("params") or {}
+ dql = params.get("query", "")
+ assert 'service.name == "svc"' in dql
+
+ @pytest.mark.asyncio
+ async def test_get_trace(self, backend: DynatraceBackend) -> None:
+ """Test getting a specific trace by ID."""
+ trace_id = "test-trace-id"
+ mock_trace_data = {
+ "spans": [
+ {
+ "spanId": "span1",
+ "operationName": "test_op",
+ "startTime": int(datetime.now().timestamp() * 1000),
+ "duration": 1000,
+ "serviceName": "test-service",
+ "attributes": {},
+ }
+ ]
+ }
+
+ with patch.object(backend.client, "get") as mock_get:
+ mock_response = MagicMock()
+ mock_response.json.return_value = mock_trace_data
+ mock_response.raise_for_status = MagicMock()
+ mock_get.return_value = mock_response
+
+ trace = await backend.get_trace(trace_id)
+
+ assert trace.trace_id == trace_id
+ assert len(trace.spans) > 0
+ assert trace.service_name == "test-service"
+
+ @pytest.mark.asyncio
+ async def test_list_services(self, backend: DynatraceBackend) -> None:
+ """Test listing services."""
+ # First try services endpoint
+ mock_services_response = {
+ "services": [
+ {"name": "service1"},
+ {"name": "service2"},
+ ]
+ }
+
+ with patch.object(backend.client, "get") as mock_get:
+ mock_response = MagicMock()
+ mock_response.json.return_value = mock_services_response
+ mock_response.raise_for_status = MagicMock()
+ mock_get.return_value = mock_response
+
+ services = await backend.list_services()
+
+ assert len(services) > 0
+ assert "service1" in services
+ assert "service2" in services
+
+ @pytest.mark.asyncio
+ async def test_list_services_fallback(self, backend: DynatraceBackend) -> None:
+ """Test listing services with fallback to trace search."""
+ # First call fails (services endpoint not available)
+ # Second call succeeds (trace search)
+ mock_traces_response = {
+ "traces": [
+ {"traceId": "trace1", "serviceName": "service1"},
+ {"traceId": "trace2", "serviceName": "service2"},
+ ]
+ }
+
+ with patch.object(backend.client, "get") as mock_get:
+ # First call fails
+ mock_response1 = MagicMock()
+ mock_response1.raise_for_status.side_effect = Exception("Not found")
+
+ # Second call succeeds
+ mock_response2 = MagicMock()
+ mock_response2.json.return_value = mock_traces_response
+ mock_response2.raise_for_status = MagicMock()
+
+ mock_get.side_effect = [mock_response1, mock_response2]
+
+ services = await backend.list_services()
+
+ assert len(services) > 0
+ assert "service1" in services
+ assert "service2" in services
+
+ @pytest.mark.asyncio
+ async def test_get_service_operations(self, backend: DynatraceBackend) -> None:
+ """Test getting operations for a service."""
+ mock_traces_response = {
+ "traces": [
+ {"traceId": "trace1", "operationName": "op1"},
+ {"traceId": "trace2", "operationName": "op2"},
+ ]
+ }
+
+ with patch.object(backend.client, "get") as mock_get:
+ mock_response = MagicMock()
+ mock_response.json.return_value = mock_traces_response
+ mock_response.raise_for_status = MagicMock()
+ mock_get.return_value = mock_response
+
+ operations = await backend.get_service_operations("test-service")
+
+ assert len(operations) > 0
+ assert "op1" in operations
+ assert "op2" in operations
+
+ @pytest.mark.asyncio
+ async def test_search_spans(self, backend: DynatraceBackend) -> None:
+ """Test searching for spans."""
+ # Mock trace search response
+ mock_traces_response = {
+ "traces": [
+ {"traceId": "trace1", "serviceName": "test-service"},
+ ]
+ }
+
+ # Mock get_trace response
+ mock_trace = {
+ "spans": [
+ {
+ "spanId": "span1",
+ "operationName": "test_op",
+ "startTime": int(datetime.now().timestamp() * 1000),
+ "duration": 1000,
+ "serviceName": "test-service",
+ "attributes": {},
+ }
+ ]
+ }
+
+ with patch.object(backend.client, "get") as mock_get:
+ mock_response1 = MagicMock()
+ mock_response1.json.return_value = mock_traces_response
+ mock_response1.raise_for_status = MagicMock()
+
+ mock_response2 = MagicMock()
+ mock_response2.json.return_value = mock_trace
+ mock_response2.raise_for_status = MagicMock()
+
+ mock_get.side_effect = [mock_response1, mock_response2]
+
+ query = SpanQuery(service_name="test-service", limit=10)
+ spans = await backend.search_spans(query)
+
+ assert len(spans) > 0
+ assert all(span.service_name == "test-service" for span in spans)
+
+ @pytest.mark.asyncio
+ async def test_health_check_healthy(self, backend: DynatraceBackend) -> None:
+ """Test health check when backend is healthy."""
+ mock_services_response = {"services": [{"name": "service1"}]}
+
+ with patch.object(backend.client, "get") as mock_get:
+ mock_response = MagicMock()
+ mock_response.json.return_value = mock_services_response
+ mock_response.raise_for_status = MagicMock()
+ mock_get.return_value = mock_response
+
+ health = await backend.health_check()
+
+ assert health.status == "healthy"
+ assert health.backend == "dynatrace"
+ assert health.service_count == 1
+
+ @pytest.mark.asyncio
+ async def test_health_check_unhealthy(self, backend: DynatraceBackend) -> None:
+ """Test health check when backend is unhealthy."""
+ with patch.object(backend.client, "get") as mock_get:
+ mock_get.side_effect = Exception("Connection failed")
+
+ health = await backend.health_check()
+
+ assert health.status == "unhealthy"
+ assert health.backend == "dynatrace"
+ assert health.error is not None
+
+ def test_parse_dynatrace_span(self, backend: DynatraceBackend) -> None:
+ """Test parsing Dynatrace span data."""
+ trace_id = "test-trace"
+ span_data = {
+ "spanId": "span1",
+ "operationName": "test_op",
+ "startTime": int(datetime.now().timestamp() * 1000),
+ "duration": 1000,
+ "serviceName": "test-service",
+ "attributes": {
+ "gen_ai.system": "openai",
+ "gen_ai.request.model": "gpt-4",
+ },
+ }
+
+ span = backend._parse_dynatrace_span(span_data, trace_id)
+
+ assert span is not None
+ assert span.span_id == "span1"
+ assert span.operation_name == "test_op"
+ assert span.service_name == "test-service"
+ assert span.trace_id == trace_id
+ assert span.attributes.gen_ai_system == "openai"
+
+ def test_parse_dynatrace_trace(self, backend: DynatraceBackend) -> None:
+ """Test parsing Dynatrace trace data."""
+ trace_id = "test-trace"
+ trace_data = {
+ "spans": [
+ {
+ "spanId": "span1",
+ "operationName": "test_op",
+ "startTime": int(datetime.now().timestamp() * 1000),
+ "duration": 1000,
+ "serviceName": "test-service",
+ "attributes": {},
+ }
+ ]
+ }
+
+ trace = backend._parse_dynatrace_trace(trace_data, trace_id)
+
+ assert trace is not None
+ assert trace.trace_id == trace_id
+ assert len(trace.spans) == 1
+ assert trace.service_name == "test-service"
+
diff --git a/tests/integration/cassettes/test_dynatrace_integration/TestDynatraceBackendHealth.test_health_check_healthy.yaml b/tests/integration/cassettes/test_dynatrace_integration/TestDynatraceBackendHealth.test_health_check_healthy.yaml
new file mode 100644
index 0000000..b07cb9c
--- /dev/null
+++ b/tests/integration/cassettes/test_dynatrace_integration/TestDynatraceBackendHealth.test_health_check_healthy.yaml
@@ -0,0 +1,86 @@
+interactions:
+- request:
+ body: ''
+ headers:
+ accept:
+ - '*/*'
+ accept-encoding:
+ - gzip, deflate
+ connection:
+ - keep-alive
+ content-type:
+ - application/json
+ host:
+ - abc12345.live.dynatrace.com
+ user-agent:
+ - python-httpx/0.28.1
+ method: GET
+ uri: https://abc12345.live.dynatrace.com/api/v2/services
+ response:
+ body:
+ string: '{"message":"Not Found"}'
+ headers:
+ Connection:
+ - keep-alive
+ Content-Length:
+ - '23'
+ Content-Type:
+ - application/json
+ Date:
+ - Sat, 20 Dec 2025 08:32:49 GMT
+ Via:
+ - 1.1 4067594c0a7ce97995ae63e2acbcd00c.cloudfront.net (CloudFront)
+ X-Amz-Cf-Id:
+ - YceIBot0wPJwaMF5E19qmTZvnINLl0oWKDK5qviWXw41oEob3dfiGA==
+ X-Amz-Cf-Pop:
+ - HYD57-P4
+ X-Cache:
+ - Error from cloudfront
+ apigw-requestid:
+ - V4PPyjidoAMEbeg=
+ status:
+ code: 404
+ message: Not Found
+- request:
+ body: ''
+ headers:
+ accept:
+ - '*/*'
+ accept-encoding:
+ - gzip, deflate
+ connection:
+ - keep-alive
+ content-type:
+ - application/json
+ host:
+ - abc12345.live.dynatrace.com
+ user-agent:
+ - python-httpx/0.28.1
+ method: GET
+ uri: https://abc12345.live.dynatrace.com/api/v2/ql/query:execute?query=fetch+spans%2C+from%3A+%222025-12-19T08%3A32%3A49.000Z%22%2C+to%3A+%222025-12-20T08%3A32%3A49.000Z%22+%7C+fields+service.name+%7C+limit+1000
+ response:
+ body:
+ string: '{"message":"Not Found"}'
+ headers:
+ Connection:
+ - keep-alive
+ Content-Length:
+ - '23'
+ Content-Type:
+ - application/json
+ Date:
+ - Sat, 20 Dec 2025 08:32:50 GMT
+ Via:
+ - 1.1 4067594c0a7ce97995ae63e2acbcd00c.cloudfront.net (CloudFront)
+ X-Amz-Cf-Id:
+ - wCxQNlEKSsS1_2rxhyBpKIxW9E41yTR_YDO5-fCr50199Sao2Ksk3Q==
+ X-Amz-Cf-Pop:
+ - HYD57-P4
+ X-Cache:
+ - Error from cloudfront
+ apigw-requestid:
+ - V4PP4jqOoAMEJoQ=
+ status:
+ code: 404
+ message: Not Found
+version: 1
diff --git a/tests/integration/cassettes/test_dynatrace_integration/TestDynatraceListServices.test_list_services.yaml b/tests/integration/cassettes/test_dynatrace_integration/TestDynatraceListServices.test_list_services.yaml
new file mode 100644
index 0000000..d595e96
--- /dev/null
+++ b/tests/integration/cassettes/test_dynatrace_integration/TestDynatraceListServices.test_list_services.yaml
@@ -0,0 +1,88 @@
+interactions:
+- request:
+ body: ''
+ headers:
+ accept:
+ - '*/*'
+ accept-encoding:
+ - gzip, deflate
+ connection:
+ - keep-alive
+ content-type:
+ - application/json
+ host:
+ - abc12345.live.dynatrace.com
+ user-agent:
+ - python-httpx/0.28.1
+ method: GET
+ uri: https://abc12345.live.dynatrace.com/api/v2/services
+ response:
+ body:
+ string: '{"message":"Not Found"}'
+ headers:
+ Age:
+ - '1'
+ Connection:
+ - keep-alive
+ Content-Length:
+ - '23'
+ Content-Type:
+ - application/json
+ Date:
+ - Sat, 20 Dec 2025 08:32:49 GMT
+ Via:
+ - 1.1 9c0410411ac5b38f9c4855060e71f402.cloudfront.net (CloudFront)
+ X-Amz-Cf-Id:
+ - uG-_0MsXgNCFiCg9M0976FI2rBV37Uy4BRu_gJ3GwBmmjCzi3CGP2g==
+ X-Amz-Cf-Pop:
+ - HYD57-P4
+ X-Cache:
+ - Error from cloudfront
+ apigw-requestid:
+ - V4PPyjidoAMEbeg=
+ status:
+ code: 404
+ message: Not Found
+- request:
+ body: ''
+ headers:
+ accept:
+ - '*/*'
+ accept-encoding:
+ - gzip, deflate
+ connection:
+ - keep-alive
+ content-type:
+ - application/json
+ host:
+ - abc12345.live.dynatrace.com
+ user-agent:
+ - python-httpx/0.28.1
+ method: GET
+ uri: https://abc12345.live.dynatrace.com/api/v2/ql/query:execute?query=fetch+spans%2C+from%3A+%222025-12-19T08%3A32%3A50.000Z%22%2C+to%3A+%222025-12-20T08%3A32%3A50.000Z%22+%7C+fields+service.name+%7C+limit+1000
+ response:
+ body:
+ string: '{"message":"Not Found"}'
+ headers:
+ Connection:
+ - keep-alive
+ Content-Length:
+ - '23'
+ Content-Type:
+ - application/json
+ Date:
+ - Sat, 20 Dec 2025 08:32:50 GMT
+ Via:
+ - 1.1 9c0410411ac5b38f9c4855060e71f402.cloudfront.net (CloudFront)
+ X-Amz-Cf-Id:
+ - VcN-UzieYMFJvMkCAx2nGKhBILUr01kuYBsZzf2UKnFjq3y-sr4u7A==
+ X-Amz-Cf-Pop:
+ - HYD57-P4
+ X-Cache:
+ - Error from cloudfront
+ apigw-requestid:
+ - V4PP4jqOoAMEJoQ=
+ status:
+ code: 404
+ message: Not Found
+version: 1
diff --git a/tests/integration/cassettes/test_dynatrace_integration/TestDynatraceSearchSpans.test_search_spans_basic.yaml b/tests/integration/cassettes/test_dynatrace_integration/TestDynatraceSearchSpans.test_search_spans_basic.yaml
new file mode 100644
index 0000000..90b1ebc
--- /dev/null
+++ b/tests/integration/cassettes/test_dynatrace_integration/TestDynatraceSearchSpans.test_search_spans_basic.yaml
@@ -0,0 +1,90 @@
+interactions:
+- request:
+ body: ''
+ headers:
+ accept:
+ - '*/*'
+ accept-encoding:
+ - gzip, deflate
+ connection:
+ - keep-alive
+ content-type:
+ - application/json
+ host:
+ - abc12345.live.dynatrace.com
+ user-agent:
+ - python-httpx/0.28.1
+ method: GET
+ uri: https://abc12345.live.dynatrace.com/api/v2/services
+ response:
+ body:
+ string: '{"message":"Not Found"}'
+ headers:
+ Age:
+ - '4'
+ Connection:
+ - keep-alive
+ Content-Length:
+ - '23'
+ Content-Type:
+ - application/json
+ Date:
+ - Sat, 20 Dec 2025 08:32:49 GMT
+ Via:
+ - 1.1 cebd9c97d7e407e60d33b870ba0bdf2a.cloudfront.net (CloudFront)
+ X-Amz-Cf-Id:
+ - uDeYWOrYjVUTtCQ338ci-BpPIxjuJ0zl8yojj82ILaXaq9WsVka1Hw==
+ X-Amz-Cf-Pop:
+ - HYD57-P4
+ X-Cache:
+ - Error from cloudfront
+ apigw-requestid:
+ - V4PPyjidoAMEbeg=
+ status:
+ code: 404
+ message: Not Found
+- request:
+ body: ''
+ headers:
+ accept:
+ - '*/*'
+ accept-encoding:
+ - gzip, deflate
+ connection:
+ - keep-alive
+ content-type:
+ - application/json
+ host:
+ - abc12345.live.dynatrace.com
+ user-agent:
+ - python-httpx/0.28.1
+ method: GET
+ uri: https://abc12345.live.dynatrace.com/api/v2/ql/query:execute?query=fetch+spans%2C+from%3A+%222025-12-19T08%3A32%3A53.000Z%22%2C+to%3A+%222025-12-20T08%3A32%3A53.000Z%22+%7C+fields+service.name+%7C+limit+1000
+ response:
+ body:
+ string: '{"message":"Not Found"}'
+ headers:
+ Age:
+ - '3'
+ Connection:
+ - keep-alive
+ Content-Length:
+ - '23'
+ Content-Type:
+ - application/json
+ Date:
+ - Sat, 20 Dec 2025 08:32:50 GMT
+ Via:
+ - 1.1 cebd9c97d7e407e60d33b870ba0bdf2a.cloudfront.net (CloudFront)
+ X-Amz-Cf-Id:
+ - DP04Zk-5UfW5Wvya0zF75FxF5jsy0CqiFk9xsfZ1Kvv0wcQ3pvrqng==
+ X-Amz-Cf-Pop:
+ - HYD57-P4
+ X-Cache:
+ - Error from cloudfront
+ apigw-requestid:
+ - V4PP4jqOoAMEJoQ=
+ status:
+ code: 404
+ message: Not Found
+version: 1
diff --git a/tests/integration/cassettes/test_dynatrace_integration/TestDynatraceSearchTraces.test_get_trace_by_id.yaml b/tests/integration/cassettes/test_dynatrace_integration/TestDynatraceSearchTraces.test_get_trace_by_id.yaml
new file mode 100644
index 0000000..d8c8542
--- /dev/null
+++ b/tests/integration/cassettes/test_dynatrace_integration/TestDynatraceSearchTraces.test_get_trace_by_id.yaml
@@ -0,0 +1,90 @@
+interactions:
+- request:
+ body: ''
+ headers:
+ accept:
+ - '*/*'
+ accept-encoding:
+ - gzip, deflate
+ connection:
+ - keep-alive
+ content-type:
+ - application/json
+ host:
+ - abc12345.live.dynatrace.com
+ user-agent:
+ - python-httpx/0.28.1
+ method: GET
+ uri: https://abc12345.live.dynatrace.com/api/v2/services
+ response:
+ body:
+ string: '{"message":"Not Found"}'
+ headers:
+ Age:
+ - '3'
+ Connection:
+ - keep-alive
+ Content-Length:
+ - '23'
+ Content-Type:
+ - application/json
+ Date:
+ - Sat, 20 Dec 2025 08:32:49 GMT
+ Via:
+ - 1.1 5de6a028df4eb308aab30c20c3edc602.cloudfront.net (CloudFront)
+ X-Amz-Cf-Id:
+ - a4W6OLHhDmOWj3KQRzndlrP3BOHKQD0IAl66S9ZeD3cmpUoK2ZUBuA==
+ X-Amz-Cf-Pop:
+ - HYD57-P4
+ X-Cache:
+ - Error from cloudfront
+ apigw-requestid:
+ - V4PPyjidoAMEbeg=
+ status:
+ code: 404
+ message: Not Found
+- request:
+ body: ''
+ headers:
+ accept:
+ - '*/*'
+ accept-encoding:
+ - gzip, deflate
+ connection:
+ - keep-alive
+ content-type:
+ - application/json
+ host:
+ - abc12345.live.dynatrace.com
+ user-agent:
+ - python-httpx/0.28.1
+ method: GET
+ uri: https://abc12345.live.dynatrace.com/api/v2/ql/query:execute?query=fetch+spans%2C+from%3A+%222025-12-19T08%3A32%3A52.000Z%22%2C+to%3A+%222025-12-20T08%3A32%3A52.000Z%22+%7C+fields+service.name+%7C+limit+1000
+ response:
+ body:
+ string: '{"message":"Not Found"}'
+ headers:
+ Age:
+ - '2'
+ Connection:
+ - keep-alive
+ Content-Length:
+ - '23'
+ Content-Type:
+ - application/json
+ Date:
+ - Sat, 20 Dec 2025 08:32:50 GMT
+ Via:
+ - 1.1 5de6a028df4eb308aab30c20c3edc602.cloudfront.net (CloudFront)
+ X-Amz-Cf-Id:
+ - TjnI_4hEyHV807HVjRds3MKFnrDRyeP_KJGoRIhI5xlqFbGvMGSSBQ==
+ X-Amz-Cf-Pop:
+ - HYD57-P4
+ X-Cache:
+ - Error from cloudfront
+ apigw-requestid:
+ - V4PP4jqOoAMEJoQ=
+ status:
+ code: 404
+ message: Not Found
+version: 1
diff --git a/tests/integration/cassettes/test_dynatrace_integration/TestDynatraceSearchTraces.test_search_traces_basic.yaml b/tests/integration/cassettes/test_dynatrace_integration/TestDynatraceSearchTraces.test_search_traces_basic.yaml
new file mode 100644
index 0000000..e4e6cc8
--- /dev/null
+++ b/tests/integration/cassettes/test_dynatrace_integration/TestDynatraceSearchTraces.test_search_traces_basic.yaml
@@ -0,0 +1,90 @@
+interactions:
+- request:
+ body: ''
+ headers:
+ accept:
+ - '*/*'
+ accept-encoding:
+ - gzip, deflate
+ connection:
+ - keep-alive
+ content-type:
+ - application/json
+ host:
+ - abc12345.live.dynatrace.com
+ user-agent:
+ - python-httpx/0.28.1
+ method: GET
+ uri: https://abc12345.live.dynatrace.com/api/v2/services
+ response:
+ body:
+ string: '{"message":"Not Found"}'
+ headers:
+ Age:
+ - '2'
+ Connection:
+ - keep-alive
+ Content-Length:
+ - '23'
+ Content-Type:
+ - application/json
+ Date:
+ - Sat, 20 Dec 2025 08:32:49 GMT
+ Via:
+ - 1.1 4f1634f0517a7f8935497f3909e4e8c6.cloudfront.net (CloudFront)
+ X-Amz-Cf-Id:
+ - zmn37kYUbKwybX6fcAnH0ycyV6dhbxmpd8ieVUnPFeI6KlqXNSh8EA==
+ X-Amz-Cf-Pop:
+ - HYD57-P4
+ X-Cache:
+ - Error from cloudfront
+ apigw-requestid:
+ - V4PPyjidoAMEbeg=
+ status:
+ code: 404
+ message: Not Found
+- request:
+ body: ''
+ headers:
+ accept:
+ - '*/*'
+ accept-encoding:
+ - gzip, deflate
+ connection:
+ - keep-alive
+ content-type:
+ - application/json
+ host:
+ - abc12345.live.dynatrace.com
+ user-agent:
+ - python-httpx/0.28.1
+ method: GET
+ uri: https://abc12345.live.dynatrace.com/api/v2/ql/query:execute?query=fetch+spans%2C+from%3A+%222025-12-19T08%3A32%3A51.000Z%22%2C+to%3A+%222025-12-20T08%3A32%3A51.000Z%22+%7C+fields+service.name+%7C+limit+1000
+ response:
+ body:
+ string: '{"message":"Not Found"}'
+ headers:
+ Age:
+ - '1'
+ Connection:
+ - keep-alive
+ Content-Length:
+ - '23'
+ Content-Type:
+ - application/json
+ Date:
+ - Sat, 20 Dec 2025 08:32:50 GMT
+ Via:
+ - 1.1 4f1634f0517a7f8935497f3909e4e8c6.cloudfront.net (CloudFront)
+ X-Amz-Cf-Id:
+ - M_iCOcr9wCvD60OhbVU_GaglrZOjCJfo6yos1Z2Cu_ayG_EqlBIB_Q==
+ X-Amz-Cf-Pop:
+ - HYD57-P4
+ X-Cache:
+ - Error from cloudfront
+ apigw-requestid:
+ - V4PP4jqOoAMEJoQ=
+ status:
+ code: 404
+ message: Not Found
+version: 1
diff --git a/tests/integration/cassettes/test_dynatrace_integration/TestDynatraceServiceOperations.test_get_service_operations.yaml b/tests/integration/cassettes/test_dynatrace_integration/TestDynatraceServiceOperations.test_get_service_operations.yaml
new file mode 100644
index 0000000..bacafc7
--- /dev/null
+++ b/tests/integration/cassettes/test_dynatrace_integration/TestDynatraceServiceOperations.test_get_service_operations.yaml
@@ -0,0 +1,90 @@
+interactions:
+- request:
+ body: ''
+ headers:
+ accept:
+ - '*/*'
+ accept-encoding:
+ - gzip, deflate
+ connection:
+ - keep-alive
+ content-type:
+ - application/json
+ host:
+ - abc12345.live.dynatrace.com
+ user-agent:
+ - python-httpx/0.28.1
+ method: GET
+ uri: https://abc12345.live.dynatrace.com/api/v2/services
+ response:
+ body:
+ string: '{"message":"Not Found"}'
+ headers:
+ Age:
+ - '2'
+ Connection:
+ - keep-alive
+ Content-Length:
+ - '23'
+ Content-Type:
+ - application/json
+ Date:
+ - Sat, 20 Dec 2025 08:32:49 GMT
+ Via:
+ - 1.1 dbae6b2ce4cce2f7c1803757a782b3e6.cloudfront.net (CloudFront)
+ X-Amz-Cf-Id:
+ - Vm_7d9x0UeC-0a3Mo-SV5wfSFRHRS3Ki7s2mgdnbmI-fLQzYKq4ijQ==
+ X-Amz-Cf-Pop:
+ - HYD57-P4
+ X-Cache:
+ - Error from cloudfront
+ apigw-requestid:
+ - V4PPyjidoAMEbeg=
+ status:
+ code: 404
+ message: Not Found
+- request:
+ body: ''
+ headers:
+ accept:
+ - '*/*'
+ accept-encoding:
+ - gzip, deflate
+ connection:
+ - keep-alive
+ content-type:
+ - application/json
+ host:
+ - abc12345.live.dynatrace.com
+ user-agent:
+ - python-httpx/0.28.1
+ method: GET
+ uri: https://abc12345.live.dynatrace.com/api/v2/ql/query:execute?query=fetch+spans%2C+from%3A+%222025-12-19T08%3A32%3A51.000Z%22%2C+to%3A+%222025-12-20T08%3A32%3A51.000Z%22+%7C+fields+service.name+%7C+limit+1000
+ response:
+ body:
+ string: '{"message":"Not Found"}'
+ headers:
+ Age:
+ - '1'
+ Connection:
+ - keep-alive
+ Content-Length:
+ - '23'
+ Content-Type:
+ - application/json
+ Date:
+ - Sat, 20 Dec 2025 08:32:50 GMT
+ Via:
+ - 1.1 dbae6b2ce4cce2f7c1803757a782b3e6.cloudfront.net (CloudFront)
+ X-Amz-Cf-Id:
+ - 76rYlSEcoybJwBFE9Xrjc2Y878EnO5eXC9zEtJpKHbnoIWgC2QtmMg==
+ X-Amz-Cf-Pop:
+ - HYD57-P4
+ X-Cache:
+ - Error from cloudfront
+ apigw-requestid:
+ - V4PP4jqOoAMEJoQ=
+ status:
+ code: 404
+ message: Not Found
+version: 1
diff --git a/tests/integration/conftest.py b/tests/integration/conftest.py
index bfb4767..b28318f 100644
--- a/tests/integration/conftest.py
+++ b/tests/integration/conftest.py
@@ -10,6 +10,7 @@
from pydantic import HttpUrl, TypeAdapter
from vcr.request import Request
+from opentelemetry_mcp.backends.dynatrace import DynatraceBackend
from opentelemetry_mcp.backends.jaeger import JaegerBackend
from opentelemetry_mcp.backends.tempo import TempoBackend
from opentelemetry_mcp.backends.traceloop import TraceloopBackend
@@ -53,6 +54,37 @@ def filter_tempo_timestamps(request: Request) -> Request:
return request
+def filter_dynatrace_timestamps(request: Request) -> Request:
+ """Remove timestamp ranges from Dynatrace DQL queries for better matching.
+
+ Dynatrace DQL uses `from: ""` and `to: ""` inside the `query`
+ parameter which changes every run. This strips those literal timestamp
+ values so cassettes can be matched across runs.
+ """
+ try:
+ url = urlparse(request.uri)
+ if "/api/v2/ql/query:execute" in url.path:
+ params = parse_qs(url.query)
+ if "query" in params:
+ query_str = params["query"][0]
+ # Remove the literal timestamps in from: "..." and to: "..."
+ import re
+
+ query_str = re.sub(r'from:\s*"[^"]*"', 'from: "FILTERED"', query_str)
+ query_str = re.sub(r'to:\s*"[^"]*"', 'to: "FILTERED"', query_str)
+ params["query"] = [query_str]
+
+ from urllib.parse import urlencode
+
+ new_query = urlencode(params, doseq=True)
+ new_url = url._replace(query=new_query)
+ request.uri = new_url.geturl()
+ except Exception:
+ pass
+
+ return request
+
+
def filter_traceloop_timestamps(request: Request) -> Request:
"""Remove timestamp fields from Traceloop request body for better matching.
@@ -174,6 +206,10 @@ def vcr_config(request: pytest.FixtureRequest) -> dict[str, Any]:
config["match_on"] = ["method", "path", "body"]
config["before_record_request"] = filter_traceloop_timestamps
+ # For Dynatrace tests, remove literal DQL timestamps from query parameter
+ if "dynatrace" in request.node.nodeid.lower():
+ config["before_record_request"] = filter_dynatrace_timestamps
+
return config
@@ -281,3 +317,49 @@ async def traceloop_backend(
)
async with backend:
yield backend
+
+
+# Dynatrace Backend Fixtures
+
+
+@pytest.fixture
+def dynatrace_url() -> str:
+ """Dynatrace backend URL - can be overridden via environment variable."""
+ return os.getenv("DYNATRACE_URL", "https://abc12345.live.dynatrace.com")
+
+
+@pytest.fixture
+def dynatrace_api_key() -> str:
+ """
+ Dynatrace API key - can be set via environment variable.
+
+ For recording new cassettes, set DYNATRACE_API_KEY env var.
+ For replaying cassettes, the key is not needed (filtered from cassettes).
+ """
+ return os.getenv("DYNATRACE_API_KEY", "test_api_key_for_replay")
+
+
+@pytest.fixture
+def dynatrace_config(dynatrace_url: str, dynatrace_api_key: str) -> BackendConfig:
+ """Dynatrace backend configuration."""
+ return BackendConfig(
+ type="dynatrace",
+ url=TypeAdapter(HttpUrl).validate_python(dynatrace_url),
+ api_key=dynatrace_api_key,
+ )
+
+
+@pytest.fixture
+async def dynatrace_backend(dynatrace_config: BackendConfig) -> AsyncGenerator[DynatraceBackend, None]:
+ """
+ Dynatrace backend instance for integration testing.
+
+ Uses async context manager to properly initialize and cleanup the backend.
+ """
+ backend = DynatraceBackend(
+ url=str(dynatrace_config.url),
+ api_key=dynatrace_config.api_key,
+ timeout=dynatrace_config.timeout,
+ )
+ async with backend:
+ yield backend
diff --git a/tests/integration/test_dynatrace_integration.py b/tests/integration/test_dynatrace_integration.py
new file mode 100644
index 0000000..c78c0de
--- /dev/null
+++ b/tests/integration/test_dynatrace_integration.py
@@ -0,0 +1,135 @@
+"""Integration tests for Dynatrace backend using VCR recordings."""
+
+import pytest
+
+from opentelemetry_mcp.backends.dynatrace import DynatraceBackend
+from opentelemetry_mcp.models import SpanQuery, TraceQuery
+
+# Mark all tests in this module as integration and vcr
+pytestmark = [pytest.mark.integration, pytest.mark.vcr]
+
+
+def _skip_if_placeholder_backend(backend: DynatraceBackend) -> None:
+ """Skip tests when no real Dynatrace URL is configured."""
+ if "abc12345.live.dynatrace.com" in getattr(backend, "url", ""):
+ pytest.skip("DYNATRACE_URL not configured; skipping Dynatrace integration tests")
+
+
+class TestDynatraceBackendHealth:
+ """Test Dynatrace backend health check."""
+
+ @pytest.mark.vcr
+ async def test_health_check_healthy(self, dynatrace_backend: DynatraceBackend) -> None:
+ """Test health check against a Dynatrace instance."""
+ _skip_if_placeholder_backend(dynatrace_backend)
+ health = await dynatrace_backend.health_check()
+
+ assert health.status in ("healthy", "unhealthy")
+ assert health.backend == "dynatrace"
+ assert health.url is not None
+
+
+class TestDynatraceListServices:
+ """Test Dynatrace service listing."""
+
+ @pytest.mark.vcr
+ async def test_list_services(self, dynatrace_backend: DynatraceBackend) -> None:
+ """Test listing all services from Dynatrace."""
+ _skip_if_placeholder_backend(dynatrace_backend)
+ services = await dynatrace_backend.list_services()
+
+ assert isinstance(services, list)
+ for service in services:
+ assert isinstance(service, str)
+ assert len(service) > 0
+
+
+class TestDynatraceServiceOperations:
+ """Test Dynatrace service operations listing."""
+
+ @pytest.mark.vcr
+ async def test_get_service_operations(self, dynatrace_backend: DynatraceBackend) -> None:
+ """Test getting operations for a specific service."""
+ _skip_if_placeholder_backend(dynatrace_backend)
+ services = await dynatrace_backend.list_services()
+ assert len(services) > 0, "No services available for testing"
+
+ service_name = services[0]
+ operations = await dynatrace_backend.get_service_operations(service_name)
+
+ assert isinstance(operations, list)
+ for op in operations:
+ assert isinstance(op, str)
+ assert len(op) > 0
+
+
+class TestDynatraceSearchTraces:
+ """Test Dynatrace trace search functionality."""
+
+ @pytest.mark.vcr
+ async def test_search_traces_basic(self, dynatrace_backend: DynatraceBackend) -> None:
+ """Test basic trace search with service name."""
+ _skip_if_placeholder_backend(dynatrace_backend)
+ services = await dynatrace_backend.list_services()
+ assert len(services) > 0, "No services available for testing"
+
+ service_name = services[0]
+ query = TraceQuery(service_name=service_name, limit=10)
+
+ traces = await dynatrace_backend.search_traces(query)
+
+ assert isinstance(traces, list)
+ for trace in traces:
+ assert trace.trace_id
+ assert trace.service_name == service_name
+ assert trace.spans
+ assert len(trace.spans) > 0
+ assert trace.start_time
+ assert trace.duration_ms >= 0
+
+ @pytest.mark.vcr
+ async def test_get_trace_by_id(self, dynatrace_backend: DynatraceBackend) -> None:
+ """Test retrieving a specific trace by ID."""
+ _skip_if_placeholder_backend(dynatrace_backend)
+ services = await dynatrace_backend.list_services()
+ assert len(services) > 0
+
+ service_name = services[0]
+ traces = await dynatrace_backend.search_traces(TraceQuery(service_name=service_name, limit=1))
+
+ assert len(traces) > 0, "No traces available for testing"
+ trace_id = traces[0].trace_id
+
+ trace = await dynatrace_backend.get_trace(trace_id)
+
+ assert trace.trace_id == trace_id
+ assert trace.spans
+ assert len(trace.spans) > 0
+ assert trace.service_name
+ assert trace.start_time
+ assert trace.duration_ms >= 0
+
+
+class TestDynatraceSearchSpans:
+ """Test Dynatrace span search functionality."""
+
+ @pytest.mark.vcr
+ async def test_search_spans_basic(self, dynatrace_backend: DynatraceBackend) -> None:
+ """Test basic span search with service name."""
+ _skip_if_placeholder_backend(dynatrace_backend)
+ services = await dynatrace_backend.list_services()
+ assert len(services) > 0
+
+ service_name = services[0]
+ query = SpanQuery(service_name=service_name, limit=20)
+
+ spans = await dynatrace_backend.search_spans(query)
+
+ assert isinstance(spans, list)
+ for span in spans:
+ assert span.span_id
+ assert span.trace_id
+ assert span.operation_name
+ assert span.service_name
+ assert span.start_time
+ assert span.duration_ms >= 0
diff --git a/uv.lock b/uv.lock
index f4aec13..9813e91 100644
--- a/uv.lock
+++ b/uv.lock
@@ -1,5 +1,5 @@
version = 1
-revision = 2
+revision = 3
requires-python = ">=3.11"
resolution-markers = [
"platform_python_implementation != 'PyPy'",
@@ -931,7 +931,7 @@ wheels = [
[[package]]
name = "opentelemetry-mcp"
-version = "0.1.0"
+version = "0.2.0"
source = { editable = "." }
dependencies = [
{ name = "click" },