Skip to content

Commit 7e33320

Browse files
committed
Merge branch 'master' into feat/span-first-2
2 parents f1393d9 + bf0a683 commit 7e33320

File tree

9 files changed

+15
-135
lines changed

9 files changed

+15
-135
lines changed

sentry_sdk/ai/utils.py

Lines changed: 0 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -16,7 +16,6 @@
1616
from sentry_sdk.utils import logger
1717
from sentry_sdk.traces import StreamedSpan
1818
from sentry_sdk.tracing_utils import has_span_streaming_enabled
19-
from sentry_sdk.consts import SPANDATA
2019

2120
MAX_GEN_AI_MESSAGE_BYTES = 20_000 # 20KB
2221
# Maximum characters when only a single message is left after bytes truncation
@@ -714,8 +713,6 @@ def truncate_and_annotate_messages(
714713
if len(messages) > 1:
715714
scope._gen_ai_original_message_count[span.span_id] = len(messages)
716715

717-
span.set_data(SPANDATA.META_GEN_AI_ORIGINAL_INPUT_MESSAGES_LENGTH, len(messages))
718-
719716
return [truncated_message]
720717

721718

sentry_sdk/consts.py

Lines changed: 0 additions & 8 deletions
Original file line numberDiff line numberDiff line change
@@ -879,14 +879,6 @@ class SPANDATA:
879879
Example: "a1b2c3d4e5f6"
880880
"""
881881

882-
META_GEN_AI_ORIGINAL_INPUT_MESSAGES_LENGTH = (
883-
"sentry.sdk_meta.gen_ai.input.messages.original_length"
884-
)
885-
"""
886-
The original number of input non-system instruction messages, before SDK trimming.
887-
Example: 4
888-
"""
889-
890882

891883
class SPANSTATUS:
892884
"""

sentry_sdk/integrations/mcp.py

Lines changed: 15 additions & 112 deletions
Original file line numberDiff line numberDiff line change
@@ -352,19 +352,20 @@ def _prepare_handler_data(
352352
)
353353

354354

355-
async def _async_handler_wrapper(
355+
async def _handler_wrapper(
356356
handler_type: str,
357357
func: "Callable[..., Any]",
358358
original_args: "tuple[Any, ...]",
359359
original_kwargs: "Optional[dict[str, Any]]" = None,
360360
self: "Optional[Any]" = None,
361+
force_await: bool = True,
361362
) -> "Any":
362363
"""
363-
Async wrapper for MCP handlers.
364+
Wrapper for MCP handlers.
364365
365366
Args:
366367
handler_type: "tool", "prompt", or "resource"
367-
func: The async handler function to wrap
368+
func: The handler function to wrap
368369
original_args: Original arguments passed to the handler
369370
original_kwargs: Original keyword arguments passed to the handler
370371
self: Optional instance for bound methods
@@ -421,73 +422,11 @@ async def _async_handler_wrapper(
421422
# Execute the async handler
422423
if self is not None:
423424
original_args = (self, *original_args)
424-
result = await func(*original_args, **original_kwargs)
425-
except Exception as e:
426-
# Set error flag for tools
427-
if handler_type == "tool":
428-
span.set_data(SPANDATA.MCP_TOOL_RESULT_IS_ERROR, True)
429-
sentry_sdk.capture_exception(e)
430-
raise
431-
432-
_set_span_output_data(span, result, result_data_key, handler_type)
433-
return result
434-
435425

436-
def _sync_handler_wrapper(
437-
handler_type: str, func: "Callable[..., Any]", original_args: "tuple[Any, ...]"
438-
) -> "Any":
439-
"""
440-
Sync wrapper for MCP handlers.
426+
result = func(*original_args, **original_kwargs)
427+
if force_await or inspect.isawaitable(result):
428+
result = await result
441429

442-
Args:
443-
handler_type: "tool", "prompt", or "resource"
444-
func: The sync handler function to wrap
445-
original_args: Original arguments passed to the handler
446-
"""
447-
(
448-
handler_name,
449-
arguments,
450-
span_data_key,
451-
span_name,
452-
mcp_method_name,
453-
result_data_key,
454-
) = _prepare_handler_data(handler_type, original_args)
455-
456-
# Start span and execute
457-
with get_start_span_function()(
458-
op=OP.MCP_SERVER,
459-
name=span_name,
460-
origin=MCPIntegration.origin,
461-
) as span:
462-
# Get request ID, session ID, and transport from context
463-
request_id, session_id, mcp_transport = _get_request_context_data()
464-
465-
# Set input span data
466-
_set_span_input_data(
467-
span,
468-
handler_name,
469-
span_data_key,
470-
mcp_method_name,
471-
arguments,
472-
request_id,
473-
session_id,
474-
mcp_transport,
475-
)
476-
477-
# For resources, extract and set protocol
478-
if handler_type == "resource":
479-
uri = original_args[0]
480-
protocol = None
481-
if hasattr(uri, "scheme"):
482-
protocol = uri.scheme
483-
elif handler_name and "://" in handler_name:
484-
protocol = handler_name.split("://")[0]
485-
if protocol:
486-
span.set_data(SPANDATA.MCP_RESOURCE_PROTOCOL, protocol)
487-
488-
try:
489-
# Execute the sync handler
490-
result = func(*original_args)
491430
except Exception as e:
492431
# Set error flag for tools
493432
if handler_type == "tool":
@@ -499,41 +438,6 @@ def _sync_handler_wrapper(
499438
return result
500439

501440

502-
def _create_instrumented_handler(
503-
handler_type: str, func: "Callable[..., Any]"
504-
) -> "Callable[..., Any]":
505-
"""
506-
Create an instrumented version of a handler function (async or sync).
507-
508-
This function wraps the user's handler with a runtime wrapper that will create
509-
Sentry spans and capture metrics when the handler is actually called.
510-
511-
The wrapper preserves the async/sync nature of the original function, which is
512-
critical for Python's async/await to work correctly.
513-
514-
Args:
515-
handler_type: "tool", "prompt", or "resource" - determines span configuration
516-
func: The handler function to instrument (async or sync)
517-
518-
Returns:
519-
A wrapped version of func that creates Sentry spans on execution
520-
"""
521-
if inspect.iscoroutinefunction(func):
522-
523-
@wraps(func)
524-
async def async_wrapper(*args: "Any") -> "Any":
525-
return await _async_handler_wrapper(handler_type, func, args)
526-
527-
return async_wrapper
528-
else:
529-
530-
@wraps(func)
531-
def sync_wrapper(*args: "Any") -> "Any":
532-
return _sync_handler_wrapper(handler_type, func, args)
533-
534-
return sync_wrapper
535-
536-
537441
def _create_instrumented_decorator(
538442
original_decorator: "Callable[..., Any]",
539443
handler_type: str,
@@ -547,8 +451,7 @@ def _create_instrumented_decorator(
547451
Sentry instrumentation into the handler registration flow. The returned decorator
548452
will:
549453
1. Receive the user's handler function
550-
2. Wrap it with instrumentation via _create_instrumented_handler
551-
3. Pass the instrumented version to the original MCP decorator
454+
2. Pass the instrumented version to the original MCP decorator
552455
553456
This ensures that when the handler is called at runtime, it's already wrapped
554457
with Sentry spans and metrics collection.
@@ -564,12 +467,12 @@ def _create_instrumented_decorator(
564467
"""
565468

566469
def instrumented_decorator(func: "Callable[..., Any]") -> "Callable[..., Any]":
567-
# First wrap the handler with instrumentation
568-
instrumented_func = _create_instrumented_handler(handler_type, func)
470+
@wraps(func)
471+
async def wrapper(*args: "Any") -> "Any":
472+
return await _handler_wrapper(handler_type, func, args, force_await=False)
473+
569474
# Then register it with the original MCP decorator
570-
return original_decorator(*decorator_args, **decorator_kwargs)(
571-
instrumented_func
572-
)
475+
return original_decorator(*decorator_args, **decorator_kwargs)(wrapper)
573476

574477
return instrumented_decorator
575478

@@ -634,7 +537,7 @@ def _patch_fastmcp() -> None:
634537
async def patched_get_prompt_mcp(
635538
self: "Any", *args: "Any", **kwargs: "Any"
636539
) -> "Any":
637-
return await _async_handler_wrapper(
540+
return await _handler_wrapper(
638541
"prompt",
639542
original_get_prompt_mcp,
640543
args,
@@ -651,7 +554,7 @@ async def patched_get_prompt_mcp(
651554
async def patched_read_resource_mcp(
652555
self: "Any", *args: "Any", **kwargs: "Any"
653556
) -> "Any":
654-
return await _async_handler_wrapper(
557+
return await _handler_wrapper(
655558
"resource",
656559
original_read_resource_mcp,
657560
args,

tests/integrations/anthropic/test_anthropic.py

Lines changed: 0 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -1012,7 +1012,6 @@ def test_anthropic_message_truncation(sentry_init, capture_events):
10121012
assert len(parsed_messages) == 1
10131013
assert "small message 5" in str(parsed_messages[0])
10141014

1015-
assert chat_span["data"][SPANDATA.META_GEN_AI_ORIGINAL_INPUT_MESSAGES_LENGTH] == 5
10161015
assert tx["_meta"]["spans"]["0"]["data"]["gen_ai.request.messages"][""]["len"] == 5
10171016

10181017

@@ -1064,7 +1063,6 @@ async def test_anthropic_message_truncation_async(sentry_init, capture_events):
10641063
assert len(parsed_messages) == 1
10651064
assert "small message 5" in str(parsed_messages[0])
10661065

1067-
assert chat_span["data"][SPANDATA.META_GEN_AI_ORIGINAL_INPUT_MESSAGES_LENGTH] == 5
10681066
assert tx["_meta"]["spans"]["0"]["data"]["gen_ai.request.messages"][""]["len"] == 5
10691067

10701068

tests/integrations/google_genai/test_google_genai.py

Lines changed: 0 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -983,7 +983,6 @@ def test_google_genai_message_truncation(
983983
assert parsed_messages[0]["role"] == "user"
984984
assert small_content in parsed_messages[0]["content"]
985985

986-
assert invoke_span["data"][SPANDATA.META_GEN_AI_ORIGINAL_INPUT_MESSAGES_LENGTH] == 2
987986
assert (
988987
event["_meta"]["spans"]["0"]["data"]["gen_ai.request.messages"][""]["len"] == 2
989988
)

tests/integrations/langchain/test_langchain.py

Lines changed: 0 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -1070,8 +1070,6 @@ def test_langchain_message_truncation(sentry_init, capture_events):
10701070
assert isinstance(parsed_messages, list)
10711071
assert len(parsed_messages) == 1
10721072
assert "small message 5" in str(parsed_messages[0])
1073-
1074-
assert llm_span["data"][SPANDATA.META_GEN_AI_ORIGINAL_INPUT_MESSAGES_LENGTH] == 5
10751073
assert tx["_meta"]["spans"]["0"]["data"]["gen_ai.request.messages"][""]["len"] == 5
10761074

10771075

tests/integrations/langgraph/test_langgraph.py

Lines changed: 0 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -1384,6 +1384,4 @@ def original_invoke(self, *args, **kwargs):
13841384
assert isinstance(parsed_messages, list)
13851385
assert len(parsed_messages) == 1
13861386
assert "small message 5" in str(parsed_messages[0])
1387-
1388-
assert invoke_span["data"][SPANDATA.META_GEN_AI_ORIGINAL_INPUT_MESSAGES_LENGTH] == 5
13891387
assert tx["_meta"]["spans"]["0"]["data"]["gen_ai.request.messages"][""]["len"] == 5

tests/integrations/litellm/test_litellm.py

Lines changed: 0 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -754,8 +754,6 @@ def test_litellm_message_truncation(sentry_init, capture_events):
754754
assert isinstance(parsed_messages, list)
755755
assert len(parsed_messages) == 1
756756
assert "small message 5" in str(parsed_messages[0])
757-
758-
assert chat_span["data"][SPANDATA.META_GEN_AI_ORIGINAL_INPUT_MESSAGES_LENGTH] == 5
759757
assert tx["_meta"]["spans"]["0"]["data"]["gen_ai.request.messages"][""]["len"] == 5
760758

761759

tests/integrations/openai/test_openai.py

Lines changed: 0 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -1607,7 +1607,6 @@ def test_ai_client_span_responses_api(
16071607
"gen_ai.usage.total_tokens": 30,
16081608
"gen_ai.request.model": "gpt-4o",
16091609
"gen_ai.response.text": "the model response",
1610-
"sentry.sdk_meta.gen_ai.input.messages.original_length": 1,
16111610
"thread.id": mock.ANY,
16121611
"thread.name": mock.ANY,
16131612
}
@@ -1911,7 +1910,6 @@ async def test_ai_client_span_responses_async_api(
19111910
"gen_ai.usage.output_tokens.reasoning": 8,
19121911
"gen_ai.usage.total_tokens": 30,
19131912
"gen_ai.response.text": "the model response",
1914-
"sentry.sdk_meta.gen_ai.input.messages.original_length": 1,
19151913
"thread.id": mock.ANY,
19161914
"thread.name": mock.ANY,
19171915
}
@@ -2179,7 +2177,6 @@ async def test_ai_client_span_streaming_responses_async_api(
21792177
"gen_ai.usage.total_tokens": 30,
21802178
"gen_ai.request.model": "gpt-4o",
21812179
"gen_ai.response.text": "the model response",
2182-
"sentry.sdk_meta.gen_ai.input.messages.original_length": 1,
21832180
"thread.id": mock.ANY,
21842181
"thread.name": mock.ANY,
21852182
}

0 commit comments

Comments
 (0)