diff --git a/packages/opentelemetry-instrumentation-anthropic/opentelemetry/instrumentation/anthropic/__init__.py b/packages/opentelemetry-instrumentation-anthropic/opentelemetry/instrumentation/anthropic/__init__.py index 841b57b64..017bdecd8 100644 --- a/packages/opentelemetry-instrumentation-anthropic/opentelemetry/instrumentation/anthropic/__init__.py +++ b/packages/opentelemetry-instrumentation-anthropic/opentelemetry/instrumentation/anthropic/__init__.py @@ -586,12 +586,14 @@ def _wrap( return wrapped(*args, **kwargs) name = to_wrap.get("span_name") + peer_service = to_wrap.get("object") span = tracer.start_span( name, kind=SpanKind.CLIENT, attributes={ SpanAttributes.LLM_SYSTEM: "Anthropic", SpanAttributes.LLM_REQUEST_TYPE: LLMRequestTypeValues.COMPLETION.value, + SpanAttributes.PEER_SERVICE: peer_service }, ) @@ -682,12 +684,14 @@ async def _awrap( return await wrapped(*args, **kwargs) name = to_wrap.get("span_name") + peer_service = to_wrap.get("object") span = tracer.start_span( name, kind=SpanKind.CLIENT, attributes={ SpanAttributes.LLM_SYSTEM: "Anthropic", SpanAttributes.LLM_REQUEST_TYPE: LLMRequestTypeValues.COMPLETION.value, + SpanAttributes.PEER_SERVICE: peer_service }, ) try: diff --git a/packages/opentelemetry-instrumentation-bedrock/opentelemetry/instrumentation/bedrock/__init__.py b/packages/opentelemetry-instrumentation-bedrock/opentelemetry/instrumentation/bedrock/__init__.py index 0df1349c3..a95544fc3 100644 --- a/packages/opentelemetry-instrumentation-bedrock/opentelemetry/instrumentation/bedrock/__init__.py +++ b/packages/opentelemetry-instrumentation-bedrock/opentelemetry/instrumentation/bedrock/__init__.py @@ -157,18 +157,18 @@ def _wrap( metric_params.start_time = time.time() client = wrapped(*args, **kwargs) client.invoke_model = _instrumented_model_invoke( - client.invoke_model, tracer, metric_params + client.invoke_model, tracer, metric_params, to_wrap ) client.invoke_model_with_response_stream = ( _instrumented_model_invoke_with_response_stream( - client.invoke_model_with_response_stream, tracer, metric_params + client.invoke_model_with_response_stream, tracer, metric_params, to_wrap ) ) client.converse = _instrumented_converse( - client.converse, tracer, metric_params + client.converse, tracer, metric_params, to_wrap ) client.converse_stream = _instrumented_converse_stream( - client.converse_stream, tracer, metric_params + client.converse_stream, tracer, metric_params, to_wrap ) return client except Exception as e: @@ -189,14 +189,16 @@ def _wrap( return wrapped(*args, **kwargs) -def _instrumented_model_invoke(fn, tracer, metric_params): +def _instrumented_model_invoke(fn, tracer, metric_params, to_wrap): @wraps(fn) def with_instrumentation(*args, **kwargs): if context_api.get_value(SUPPRESS_LANGUAGE_MODEL_INSTRUMENTATION_KEY): return fn(*args, **kwargs) with tracer.start_as_current_span( - _BEDROCK_INVOKE_SPAN_NAME, kind=SpanKind.CLIENT + _BEDROCK_INVOKE_SPAN_NAME, kind=SpanKind.CLIENT, attributes={ + SpanAttributes.PEER_SERVICE: to_wrap.get("object"), + } ) as span: response = fn(*args, **kwargs) @@ -208,13 +210,15 @@ def with_instrumentation(*args, **kwargs): return with_instrumentation -def _instrumented_model_invoke_with_response_stream(fn, tracer, metric_params): +def _instrumented_model_invoke_with_response_stream(fn, tracer, metric_params, to_wrap): @wraps(fn) def with_instrumentation(*args, **kwargs): if context_api.get_value(SUPPRESS_LANGUAGE_MODEL_INSTRUMENTATION_KEY): return fn(*args, **kwargs) - span = tracer.start_span(_BEDROCK_INVOKE_SPAN_NAME, kind=SpanKind.CLIENT) + span = tracer.start_span(_BEDROCK_INVOKE_SPAN_NAME, kind=SpanKind.CLIENT, attributes={ + SpanAttributes.PEER_SERVICE: to_wrap.get("object"), + }) response = fn(*args, **kwargs) if span.is_recording(): @@ -225,7 +229,7 @@ def with_instrumentation(*args, **kwargs): return with_instrumentation -def _instrumented_converse(fn, tracer, metric_params): +def _instrumented_converse(fn, tracer, metric_params, to_wrap): # see # https://boto3.amazonaws.com/v1/documentation/api/latest/reference/services/bedrock-runtime/client/converse.html # for the request/response format @@ -235,7 +239,9 @@ def with_instrumentation(*args, **kwargs): return fn(*args, **kwargs) with tracer.start_as_current_span( - _BEDROCK_CONVERSE_SPAN_NAME, kind=SpanKind.CLIENT + _BEDROCK_CONVERSE_SPAN_NAME, kind=SpanKind.CLIENT, attributes={ + SpanAttributes.PEER_SERVICE: to_wrap.get("object"), + } ) as span: response = fn(*args, **kwargs) _handle_converse(span, kwargs, response, metric_params) @@ -245,13 +251,15 @@ def with_instrumentation(*args, **kwargs): return with_instrumentation -def _instrumented_converse_stream(fn, tracer, metric_params): +def _instrumented_converse_stream(fn, tracer, metric_params, to_wrap): @wraps(fn) def with_instrumentation(*args, **kwargs): if context_api.get_value(SUPPRESS_LANGUAGE_MODEL_INSTRUMENTATION_KEY): return fn(*args, **kwargs) - span = tracer.start_span(_BEDROCK_CONVERSE_SPAN_NAME, kind=SpanKind.CLIENT) + span = tracer.start_span(_BEDROCK_CONVERSE_SPAN_NAME, kind=SpanKind.CLIENT, attributes={ + SpanAttributes.PEER_SERVICE: to_wrap.get("object"), + }) response = fn(*args, **kwargs) if span.is_recording(): _handle_converse_stream(span, kwargs, response, metric_params) diff --git a/packages/opentelemetry-instrumentation-crewai/opentelemetry/instrumentation/crewai/instrumentation.py b/packages/opentelemetry-instrumentation-crewai/opentelemetry/instrumentation/crewai/instrumentation.py index b5404144c..4b72885c1 100644 --- a/packages/opentelemetry-instrumentation-crewai/opentelemetry/instrumentation/crewai/instrumentation.py +++ b/packages/opentelemetry-instrumentation-crewai/opentelemetry/instrumentation/crewai/instrumentation.py @@ -99,6 +99,8 @@ def wrap_agent_execute_task(tracer, duration_histogram, token_histogram, wrapped kind=SpanKind.CLIENT, attributes={ SpanAttributes.TRACELOOP_SPAN_KIND: TraceloopSpanKindValues.AGENT.value, + SpanAttributes.LLM_SYSTEM: "crewai", + SpanAttributes.PEER_SERVICE: "Agent" } ) as span: try: @@ -140,6 +142,8 @@ def wrap_task_execute(tracer, duration_histogram, token_histogram, wrapped, inst kind=SpanKind.CLIENT, attributes={ SpanAttributes.TRACELOOP_SPAN_KIND: TraceloopSpanKindValues.TASK.value, + SpanAttributes.LLM_SYSTEM: "crewai", + SpanAttributes.PEER_SERVICE: "Task" } ) as span: try: @@ -160,6 +164,8 @@ def wrap_llm_call(tracer, duration_histogram, token_histogram, wrapped, instance f"{llm}.llm", kind=SpanKind.CLIENT, attributes={ + SpanAttributes.LLM_SYSTEM: "crewai", + SpanAttributes.PEER_SERVICE: "LLM" } ) as span: start_time = time.time() diff --git a/packages/opentelemetry-instrumentation-groq/opentelemetry/instrumentation/groq/__init__.py b/packages/opentelemetry-instrumentation-groq/opentelemetry/instrumentation/groq/__init__.py index 7d7dd406f..82578b96f 100644 --- a/packages/opentelemetry-instrumentation-groq/opentelemetry/instrumentation/groq/__init__.py +++ b/packages/opentelemetry-instrumentation-groq/opentelemetry/instrumentation/groq/__init__.py @@ -402,12 +402,14 @@ def _wrap( return wrapped(*args, **kwargs) name = to_wrap.get("span_name") + peer_service = to_wrap.get("object") span = tracer.start_span( name, kind=SpanKind.CLIENT, attributes={ SpanAttributes.LLM_SYSTEM: "Groq", SpanAttributes.LLM_REQUEST_TYPE: LLMRequestTypeValues.COMPLETION.value, + SpanAttributes.PEER_SERVICE: peer_service }, ) @@ -484,12 +486,14 @@ async def _awrap( return await wrapped(*args, **kwargs) name = to_wrap.get("span_name") + peer_service = to_wrap.get("object") span = tracer.start_span( name, kind=SpanKind.CLIENT, attributes={ SpanAttributes.LLM_SYSTEM: "Groq", SpanAttributes.LLM_REQUEST_TYPE: LLMRequestTypeValues.COMPLETION.value, + SpanAttributes.PEER_SERVICE: peer_service }, ) try: diff --git a/packages/opentelemetry-instrumentation-langchain/opentelemetry/instrumentation/langchain/callback_handler.py b/packages/opentelemetry-instrumentation-langchain/opentelemetry/instrumentation/langchain/callback_handler.py index 3799be857..af6e5a012 100644 --- a/packages/opentelemetry-instrumentation-langchain/opentelemetry/instrumentation/langchain/callback_handler.py +++ b/packages/opentelemetry-instrumentation-langchain/opentelemetry/instrumentation/langchain/callback_handler.py @@ -475,6 +475,7 @@ def _create_llm_span( ) _set_span_attribute(span, SpanAttributes.LLM_SYSTEM, "Langchain") _set_span_attribute(span, SpanAttributes.LLM_REQUEST_TYPE, request_type.value) + _set_span_attribute(span, SpanAttributes.PEER_SYSTEM, name) return span diff --git a/packages/opentelemetry-instrumentation-sagemaker/opentelemetry/instrumentation/sagemaker/__init__.py b/packages/opentelemetry-instrumentation-sagemaker/opentelemetry/instrumentation/sagemaker/__init__.py index ca28515de..cbfc53410 100644 --- a/packages/opentelemetry-instrumentation-sagemaker/opentelemetry/instrumentation/sagemaker/__init__.py +++ b/packages/opentelemetry-instrumentation-sagemaker/opentelemetry/instrumentation/sagemaker/__init__.py @@ -76,11 +76,11 @@ def _wrap(tracer, to_wrap, wrapped, instance, args, kwargs): if kwargs.get("service_name") == "sagemaker-runtime": client = wrapped(*args, **kwargs) client.invoke_endpoint = _instrumented_endpoint_invoke( - client.invoke_endpoint, tracer + client.invoke_endpoint, tracer, to_wrap ) client.invoke_endpoint_with_response_stream = ( _instrumented_endpoint_invoke_with_response_stream( - client.invoke_endpoint_with_response_stream, tracer + client.invoke_endpoint_with_response_stream, tracer, to_wrap ) ) @@ -89,14 +89,17 @@ def _wrap(tracer, to_wrap, wrapped, instance, args, kwargs): return wrapped(*args, **kwargs) -def _instrumented_endpoint_invoke(fn, tracer): +def _instrumented_endpoint_invoke(fn, tracer, to_wrap): @wraps(fn) def with_instrumentation(*args, **kwargs): if context_api.get_value(SUPPRESS_LANGUAGE_MODEL_INSTRUMENTATION_KEY): return fn(*args, **kwargs) with tracer.start_as_current_span( - "sagemaker.completion", kind=SpanKind.CLIENT + "sagemaker.completion", kind=SpanKind.CLIENT, attributes={ + SpanAttributes.PEER_SERVICE: to_wrap.get("object"), + SpanAttributes.LLM_SYSTEM: "SageMaker" + } ) as span: response = fn(*args, **kwargs) @@ -108,13 +111,16 @@ def with_instrumentation(*args, **kwargs): return with_instrumentation -def _instrumented_endpoint_invoke_with_response_stream(fn, tracer): +def _instrumented_endpoint_invoke_with_response_stream(fn, tracer, to_wrap): @wraps(fn) def with_instrumentation(*args, **kwargs): if context_api.get_value(SUPPRESS_LANGUAGE_MODEL_INSTRUMENTATION_KEY): return fn(*args, **kwargs) - span = tracer.start_span("sagemaker.completion", kind=SpanKind.CLIENT) + span = tracer.start_span("sagemaker.completion", kind=SpanKind.CLIENT, attributes={ + SpanAttributes.PEER_SERVICE: to_wrap.get("object"), + SpanAttributes.LLM_SYSTEM: "SageMaker", + }) response = fn(*args, **kwargs) if span.is_recording(): diff --git a/packages/opentelemetry-instrumentation-watsonx/opentelemetry/instrumentation/watsonx/__init__.py b/packages/opentelemetry-instrumentation-watsonx/opentelemetry/instrumentation/watsonx/__init__.py index c902289dd..fb2ce90d2 100644 --- a/packages/opentelemetry-instrumentation-watsonx/opentelemetry/instrumentation/watsonx/__init__.py +++ b/packages/opentelemetry-instrumentation-watsonx/opentelemetry/instrumentation/watsonx/__init__.py @@ -446,6 +446,7 @@ def _wrap( return wrapped(*args, **kwargs) name = to_wrap.get("span_name") + peer_service = to_wrap.get("object") span = tracer.start_span( name, @@ -453,6 +454,7 @@ def _wrap( attributes={ SpanAttributes.LLM_SYSTEM: "Watsonx", SpanAttributes.LLM_REQUEST_TYPE: LLMRequestTypeValues.COMPLETION.value, + SpanAttributes.PEER_SERVICE: peer_service }, ) diff --git a/packages/opentelemetry-semantic-conventions-ai/opentelemetry/semconv_ai/__init__.py b/packages/opentelemetry-semantic-conventions-ai/opentelemetry/semconv_ai/__init__.py index a16b550e9..2e7691fb2 100644 --- a/packages/opentelemetry-semantic-conventions-ai/opentelemetry/semconv_ai/__init__.py +++ b/packages/opentelemetry-semantic-conventions-ai/opentelemetry/semconv_ai/__init__.py @@ -223,6 +223,9 @@ class SpanAttributes: MCP_SESSION_INIT_OPTIONS = "mcp.session.init_options" MCP_RESPONSE_VALUE = "mcp.response.value" + # Service + PEER_SERVICE = "peer.service" + class Events(Enum): DB_QUERY_EMBEDDINGS = "db.query.embeddings"