Skip to content

Commit c63f0cb

Browse files
committed
GenAI Utils | Allow passing external Span to the manual LLMInvocation
1 parent 6ae0615 commit c63f0cb

3 files changed

Lines changed: 70 additions & 8 deletions

File tree

util/opentelemetry-util-genai/src/opentelemetry/util/genai/handler.py

Lines changed: 16 additions & 8 deletions
Original file line numberDiff line numberDiff line change
@@ -137,16 +137,20 @@ def start_llm(
137137
) -> LLMInvocation:
138138
"""Start an LLM invocation and create a pending span entry."""
139139
# Create a span and attach it as current; keep the token to detach later
140-
span = self._tracer.start_span(
141-
name=f"{invocation.operation_name} {invocation.request_model}",
142-
kind=SpanKind.CLIENT,
143-
)
140+
# If the span is already set on the invocation - use it instead of creating a new one
141+
# This allows users to control the span outside of the handler if needed
142+
if invocation.span is None:
143+
span = self._tracer.start_span(
144+
name=f"{invocation.operation_name} {invocation.request_model}",
145+
kind=SpanKind.CLIENT,
146+
)
147+
invocation.span = span
148+
invocation.end_span_on_exit = True
144149
# Record a monotonic start timestamp (seconds) for duration
145150
# calculation using timeit.default_timer.
146151
invocation.monotonic_start_s = timeit.default_timer()
147-
invocation.span = span
148152
invocation.context_token = otel_context.attach(
149-
set_span_in_context(span)
153+
set_span_in_context(invocation.span)
150154
)
151155
return invocation
152156

@@ -162,7 +166,9 @@ def stop_llm(self, invocation: LLMInvocation) -> LLMInvocation: # pylint: disab
162166
_maybe_emit_llm_event(self._logger, span, invocation)
163167
# Detach context and end span
164168
otel_context.detach(invocation.context_token)
165-
span.end()
169+
# End the span only if it was created by the handler
170+
if invocation.end_span_on_exit:
171+
span.end()
166172
return invocation
167173

168174
def fail_llm( # pylint: disable=no-self-use
@@ -181,7 +187,9 @@ def fail_llm( # pylint: disable=no-self-use
181187
_maybe_emit_llm_event(self._logger, span, invocation, error)
182188
# Detach context and end span
183189
otel_context.detach(invocation.context_token)
184-
span.end()
190+
# End the span only if it was created by the handler
191+
if invocation.end_span_on_exit:
192+
span.end()
185193
return invocation
186194

187195
@contextmanager

util/opentelemetry-util-genai/src/opentelemetry/util/genai/types.py

Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -201,6 +201,7 @@ class GenAIInvocation:
201201
context_token: ContextToken | None = None
202202
span: Span | None = None
203203
attributes: dict[str, Any] = field(default_factory=_new_str_any_dict)
204+
end_span_on_exit: bool = False
204205

205206

206207
@dataclass

util/opentelemetry-util-genai/tests/test_utils.py

Lines changed: 53 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -861,6 +861,59 @@ def test_emits_llm_event_by_default_for_span_and_event(self):
861861
)
862862
self.assertIn(GenAI.GEN_AI_INPUT_MESSAGES, log_record.attributes)
863863

864+
@patch_env_vars(
865+
stability_mode="gen_ai_latest_experimental",
866+
content_capturing="SPAN_ONLY",
867+
emit_event="",
868+
)
869+
def test_llm_manual_start_and_stop_external_span(self):
870+
message = _create_input_message("hi")
871+
chat_generation = _create_output_message("ok")
872+
873+
# Create a custom Span
874+
external_span = self.telemetry_handler._tracer.start_span(
875+
name="external operation", kind=trace.SpanKind.INTERNAL
876+
)
877+
878+
invocation = LLMInvocation(
879+
span=external_span,
880+
request_model="manual-model",
881+
input_messages=[message],
882+
provider="test-provider",
883+
attributes={"external": True},
884+
)
885+
886+
self.telemetry_handler.start_llm(invocation)
887+
assert invocation.span is not None
888+
assert invocation.span == external_span
889+
assert invocation.end_span_on_exit is False
890+
891+
invocation.output_messages = [chat_generation]
892+
self.telemetry_handler.stop_llm(invocation)
893+
# Verify that the external span is still recording after stop_llm is called
894+
assert invocation.span.is_recording()
895+
# Manually end the external span after stopping the LLM invocation
896+
external_span.end()
897+
898+
span = _get_single_span(self.span_exporter)
899+
# Make sure that external span is used and not overwritten by telemetry handler
900+
assert span.kind == trace.SpanKind.INTERNAL
901+
_assert_span_time_order(span)
902+
903+
attrs = _get_span_attributes(span)
904+
_assert_span_attributes(
905+
attrs,
906+
{
907+
GenAI.GEN_AI_OPERATION_NAME: "chat",
908+
GenAI.GEN_AI_REQUEST_MODEL: "manual-model",
909+
GenAI.GEN_AI_PROVIDER_NAME: "test-provider",
910+
GenAI.GEN_AI_INPUT_MESSAGES: AnyNonNone(),
911+
GenAI.GEN_AI_OUTPUT_MESSAGES: AnyNonNone(),
912+
GenAI.GEN_AI_RESPONSE_FINISH_REASONS: ("stop",),
913+
"external": True,
914+
},
915+
)
916+
864917

865918
class AnyNonNone:
866919
def __eq__(self, other):

0 commit comments

Comments
 (0)