From 4a472bdf37c4f89ab0befd1126377386b42f8b79 Mon Sep 17 00:00:00 2001 From: Alex Hall Date: Fri, 21 Feb 2025 12:57:27 +0200 Subject: [PATCH] Use raw OTel and actual event loggers in `InstrumentedModel` (#945) --- .../pydantic_ai/models/instrumented.py | 59 ++- .../pydantic_ai/models/wrapper.py | 7 +- tests/models/test_instrumented.py | 454 ++++++++---------- uv.lock | 93 ++-- 4 files changed, 291 insertions(+), 322 deletions(-) diff --git a/pydantic_ai_slim/pydantic_ai/models/instrumented.py b/pydantic_ai_slim/pydantic_ai/models/instrumented.py index 52ecf28df..16b5f0328 100644 --- a/pydantic_ai_slim/pydantic_ai/models/instrumented.py +++ b/pydantic_ai_slim/pydantic_ai/models/instrumented.py @@ -1,12 +1,14 @@ from __future__ import annotations -from collections.abc import AsyncIterator +from collections.abc import AsyncIterator, Iterator from contextlib import asynccontextmanager, contextmanager -from dataclasses import dataclass +from dataclasses import dataclass, field from functools import partial -from typing import Any, Literal +from typing import Any, Callable, Literal import logfire_api +from opentelemetry._events import Event, EventLogger, EventLoggerProvider, get_event_logger_provider +from opentelemetry.trace import Tracer, TracerProvider, get_tracer_provider from ..messages import ( ModelMessage, @@ -22,7 +24,7 @@ ) from ..settings import ModelSettings from ..usage import Usage -from . import ModelRequestParameters, StreamedResponse +from . import KnownModelName, Model, ModelRequestParameters, StreamedResponse from .wrapper import WrapperModel MODEL_SETTING_ATTRIBUTES: tuple[ @@ -51,10 +53,33 @@ class InstrumentedModel(WrapperModel): """Model which is instrumented with logfire.""" - logfire_instance: logfire_api.Logfire = logfire_api.DEFAULT_LOGFIRE_INSTANCE + tracer: Tracer = field(repr=False) + event_logger: EventLogger = field(repr=False) - def __post_init__(self): - self.logfire_instance = self.logfire_instance.with_settings(custom_scope_suffix='pydantic_ai') + def __init__( + self, + wrapped: Model | KnownModelName, + tracer_provider: TracerProvider | None = None, + event_logger_provider: EventLoggerProvider | None = None, + ): + super().__init__(wrapped) + tracer_provider = tracer_provider or get_tracer_provider() + event_logger_provider = event_logger_provider or get_event_logger_provider() + self.tracer = tracer_provider.get_tracer('pydantic-ai') + self.event_logger = event_logger_provider.get_event_logger('pydantic-ai') + + @classmethod + def from_logfire( + cls, + wrapped: Model | KnownModelName, + logfire_instance: logfire_api.Logfire = logfire_api.DEFAULT_LOGFIRE_INSTANCE, + ) -> InstrumentedModel: + if hasattr(logfire_instance.config, 'get_event_logger_provider'): + event_provider = logfire_instance.config.get_event_logger_provider() + else: + event_provider = None + tracer_provider = logfire_instance.config.get_tracer_provider() + return cls(wrapped, tracer_provider, event_provider) async def request( self, @@ -90,7 +115,7 @@ def _instrument( self, messages: list[ModelMessage], model_settings: ModelSettings | None, - ): + ) -> Iterator[Callable[[ModelResponse, Usage], None]]: operation = 'chat' model_name = self.model_name span_name = f'{operation} {model_name}' @@ -114,7 +139,7 @@ def _instrument( emit_event = partial(self._emit_event, system) - with self.logfire_instance.span(span_name, **attributes) as span: + with self.tracer.start_as_current_span(span_name, attributes=attributes) as span: if span.is_recording(): for message in messages: if isinstance(message, ModelRequest): @@ -157,27 +182,27 @@ def finish(response: ModelResponse, usage: Usage): yield finish def _emit_event(self, system: str, event_name: str, body: dict[str, Any]) -> None: - self.logfire_instance.info(event_name, **{'gen_ai.system': system}, **body) + self.event_logger.emit(Event(event_name, body=body, attributes={'gen_ai.system': system})) def _request_part_body(part: ModelRequestPart) -> tuple[str, dict[str, Any]]: if isinstance(part, SystemPromptPart): - return 'gen_ai.system.message', {'content': part.content} + return 'gen_ai.system.message', {'content': part.content, 'role': 'system'} elif isinstance(part, UserPromptPart): - return 'gen_ai.user.message', {'content': part.content} + return 'gen_ai.user.message', {'content': part.content, 'role': 'user'} elif isinstance(part, ToolReturnPart): - return 'gen_ai.tool.message', {'content': part.content, 'id': part.tool_call_id} + return 'gen_ai.tool.message', {'content': part.content, 'role': 'tool', 'id': part.tool_call_id} elif isinstance(part, RetryPromptPart): if part.tool_name is None: - return 'gen_ai.user.message', {'content': part.model_response()} + return 'gen_ai.user.message', {'content': part.model_response(), 'role': 'user'} else: - return 'gen_ai.tool.message', {'content': part.model_response(), 'id': part.tool_call_id} + return 'gen_ai.tool.message', {'content': part.model_response(), 'role': 'tool', 'id': part.tool_call_id} else: return '', {} def _response_bodies(message: ModelResponse) -> list[dict[str, Any]]: - body: dict[str, Any] = {} + body: dict[str, Any] = {'role': 'assistant'} result = [body] for part in message.parts: if isinstance(part, ToolCallPart): @@ -193,7 +218,7 @@ def _response_bodies(message: ModelResponse) -> list[dict[str, Any]]: ) elif isinstance(part, TextPart): if body.get('content'): - body = {} + body = {'role': 'assistant'} result.append(body) body['content'] = part.content diff --git a/pydantic_ai_slim/pydantic_ai/models/wrapper.py b/pydantic_ai_slim/pydantic_ai/models/wrapper.py index 5b6bf1cc7..9e1cdc3e7 100644 --- a/pydantic_ai_slim/pydantic_ai/models/wrapper.py +++ b/pydantic_ai_slim/pydantic_ai/models/wrapper.py @@ -8,15 +8,18 @@ from ..messages import ModelMessage, ModelResponse from ..settings import ModelSettings from ..usage import Usage -from . import Model, ModelRequestParameters, StreamedResponse +from . import KnownModelName, Model, ModelRequestParameters, StreamedResponse, infer_model -@dataclass +@dataclass(init=False) class WrapperModel(Model): """Model which wraps another model.""" wrapped: Model + def __init__(self, wrapped: Model | KnownModelName): + self.wrapped = infer_model(wrapped) + async def request(self, *args: Any, **kwargs: Any) -> tuple[ModelResponse, Usage]: return await self.wrapped.request(*args, **kwargs) diff --git a/tests/models/test_instrumented.py b/tests/models/test_instrumented.py index 5d5e87b70..cc28cbd85 100644 --- a/tests/models/test_instrumented.py +++ b/tests/models/test_instrumented.py @@ -5,8 +5,8 @@ from datetime import datetime import pytest -from dirty_equals import IsJson from inline_snapshot import snapshot +from logfire_api import DEFAULT_LOGFIRE_INSTANCE from pydantic_ai.messages import ( ModelMessage, @@ -24,15 +24,17 @@ UserPromptPart, ) from pydantic_ai.models import Model, ModelRequestParameters, StreamedResponse -from pydantic_ai.models.instrumented import InstrumentedModel from pydantic_ai.settings import ModelSettings from pydantic_ai.usage import Usage from ..conftest import try_import with try_import() as imports_successful: - import logfire_api from logfire.testing import CaptureLogfire + from opentelemetry._events import NoOpEventLoggerProvider + from opentelemetry.trace import NoOpTracerProvider + + from pydantic_ai.models.instrumented import InstrumentedModel pytestmark = [ @@ -40,6 +42,11 @@ pytest.mark.anyio, ] +requires_logfire_events = pytest.mark.skipif( + not hasattr(DEFAULT_LOGFIRE_INSTANCE.config, 'get_event_logger_provider'), + reason='old logfire without events/logs support', +) + class MyModel(Model): @property @@ -96,8 +103,9 @@ def timestamp(self) -> datetime: @pytest.mark.anyio +@requires_logfire_events async def test_instrumented_model(capfire: CaptureLogfire): - model = InstrumentedModel(MyModel()) + model = InstrumentedModel.from_logfire(MyModel()) assert model.system == 'my_system' assert model.model_name == 'my_model' @@ -131,216 +139,174 @@ async def test_instrumented_model(capfire: CaptureLogfire): assert capfire.exporter.exported_spans_as_dict() == snapshot( [ { - 'name': 'gen_ai.system.message', - 'context': {'trace_id': 1, 'span_id': 3, 'is_remote': False}, - 'parent': {'trace_id': 1, 'span_id': 1, 'is_remote': False}, - 'start_time': 2000000000, - 'end_time': 2000000000, + 'name': 'chat my_model', + 'context': {'trace_id': 1, 'span_id': 1, 'is_remote': False}, + 'parent': None, + 'start_time': 1000000000, + 'end_time': 18000000000, 'attributes': { - 'logfire.span_type': 'log', - 'logfire.level_num': 9, - 'logfire.msg_template': 'gen_ai.system.message', - 'logfire.msg': 'gen_ai.system.message', - 'code.filepath': 'test_instrumented.py', - 'code.function': 'test_instrumented_model', - 'code.lineno': 123, + 'gen_ai.operation.name': 'chat', 'gen_ai.system': 'my_system', - 'content': 'system_prompt', - 'logfire.json_schema': '{"type":"object","properties":{"gen_ai.system":{},"content":{}}}', + 'gen_ai.request.model': 'my_model', + 'gen_ai.request.temperature': 1, + 'logfire.msg': 'chat my_model', + 'logfire.span_type': 'span', + 'gen_ai.response.model': 'my_model_123', + 'gen_ai.usage.input_tokens': 100, + 'gen_ai.usage.output_tokens': 200, }, }, + ] + ) + + assert capfire.log_exporter.exported_logs_as_dicts() == snapshot( + [ { - 'name': 'gen_ai.user.message', - 'context': {'trace_id': 1, 'span_id': 4, 'is_remote': False}, - 'parent': {'trace_id': 1, 'span_id': 1, 'is_remote': False}, - 'start_time': 3000000000, - 'end_time': 3000000000, + 'body': {'content': 'system_prompt', 'role': 'system'}, + 'severity_number': 9, + 'severity_text': None, 'attributes': { - 'logfire.span_type': 'log', - 'logfire.level_num': 9, - 'logfire.msg_template': 'gen_ai.user.message', - 'logfire.msg': 'gen_ai.user.message', - 'code.filepath': 'test_instrumented.py', - 'code.function': 'test_instrumented_model', - 'code.lineno': 123, 'gen_ai.system': 'my_system', - 'content': 'user_prompt', - 'logfire.json_schema': '{"type":"object","properties":{"gen_ai.system":{},"content":{}}}', + 'event.name': 'gen_ai.system.message', }, + 'timestamp': 2000000000, + 'observed_timestamp': 3000000000, + 'trace_id': 1, + 'span_id': 1, + 'trace_flags': 1, }, { - 'name': 'gen_ai.tool.message', - 'context': {'trace_id': 1, 'span_id': 5, 'is_remote': False}, - 'parent': {'trace_id': 1, 'span_id': 1, 'is_remote': False}, - 'start_time': 4000000000, - 'end_time': 4000000000, + 'body': {'content': 'user_prompt', 'role': 'user'}, + 'severity_number': 9, + 'severity_text': None, 'attributes': { - 'logfire.span_type': 'log', - 'logfire.level_num': 9, - 'logfire.msg_template': 'gen_ai.tool.message', - 'logfire.msg': 'gen_ai.tool.message', - 'code.filepath': 'test_instrumented.py', - 'code.function': 'test_instrumented_model', - 'code.lineno': 123, 'gen_ai.system': 'my_system', - 'content': 'tool_return_content', - 'id': 'tool_call_3', - 'logfire.json_schema': '{"type":"object","properties":{"gen_ai.system":{},"content":{},"id":{}}}', + 'event.name': 'gen_ai.user.message', }, + 'timestamp': 4000000000, + 'observed_timestamp': 5000000000, + 'trace_id': 1, + 'span_id': 1, + 'trace_flags': 1, }, { - 'name': 'gen_ai.tool.message', - 'context': {'trace_id': 1, 'span_id': 6, 'is_remote': False}, - 'parent': {'trace_id': 1, 'span_id': 1, 'is_remote': False}, - 'start_time': 5000000000, - 'end_time': 5000000000, + 'body': {'content': 'tool_return_content', 'role': 'tool', 'id': 'tool_call_3'}, + 'severity_number': 9, + 'severity_text': None, 'attributes': { - 'logfire.span_type': 'log', - 'logfire.level_num': 9, - 'logfire.msg_template': 'gen_ai.tool.message', - 'logfire.msg': 'gen_ai.tool.message', - 'code.filepath': 'test_instrumented.py', - 'code.function': 'test_instrumented_model', - 'code.lineno': 123, 'gen_ai.system': 'my_system', + 'event.name': 'gen_ai.tool.message', + }, + 'timestamp': 6000000000, + 'observed_timestamp': 7000000000, + 'trace_id': 1, + 'span_id': 1, + 'trace_flags': 1, + }, + { + 'body': { 'content': """\ retry_prompt1 Fix the errors and try again.\ """, + 'role': 'tool', 'id': 'tool_call_4', - 'logfire.json_schema': '{"type":"object","properties":{"gen_ai.system":{},"content":{},"id":{}}}', }, - }, - { - 'name': 'gen_ai.user.message', - 'context': {'trace_id': 1, 'span_id': 7, 'is_remote': False}, - 'parent': {'trace_id': 1, 'span_id': 1, 'is_remote': False}, - 'start_time': 6000000000, - 'end_time': 6000000000, + 'severity_number': 9, + 'severity_text': None, 'attributes': { - 'logfire.span_type': 'log', - 'logfire.level_num': 9, - 'logfire.msg_template': 'gen_ai.user.message', - 'logfire.msg': 'gen_ai.user.message', - 'code.filepath': 'test_instrumented.py', - 'code.function': 'test_instrumented_model', - 'code.lineno': 123, 'gen_ai.system': 'my_system', + 'event.name': 'gen_ai.tool.message', + }, + 'timestamp': 8000000000, + 'observed_timestamp': 9000000000, + 'trace_id': 1, + 'span_id': 1, + 'trace_flags': 1, + }, + { + 'body': { 'content': """\ retry_prompt2 Fix the errors and try again.\ """, - 'logfire.json_schema': '{"type":"object","properties":{"gen_ai.system":{},"content":{}}}', + 'role': 'user', }, - }, - { - 'name': 'gen_ai.assistant.message', - 'context': {'trace_id': 1, 'span_id': 8, 'is_remote': False}, - 'parent': {'trace_id': 1, 'span_id': 1, 'is_remote': False}, - 'start_time': 7000000000, - 'end_time': 7000000000, + 'severity_number': 9, + 'severity_text': None, 'attributes': { - 'logfire.span_type': 'log', - 'logfire.level_num': 9, - 'logfire.msg_template': 'gen_ai.assistant.message', - 'logfire.msg': 'gen_ai.assistant.message', - 'code.filepath': 'test_instrumented.py', - 'code.function': 'test_instrumented_model', - 'code.lineno': 123, 'gen_ai.system': 'my_system', - 'content': 'text3', - 'logfire.json_schema': '{"type":"object","properties":{"gen_ai.system":{},"content":{}}}', + 'event.name': 'gen_ai.user.message', }, + 'timestamp': 10000000000, + 'observed_timestamp': 11000000000, + 'trace_id': 1, + 'span_id': 1, + 'trace_flags': 1, }, { - 'name': 'gen_ai.choice', - 'context': {'trace_id': 1, 'span_id': 9, 'is_remote': False}, - 'parent': {'trace_id': 1, 'span_id': 1, 'is_remote': False}, - 'start_time': 8000000000, - 'end_time': 8000000000, + 'body': {'role': 'assistant', 'content': 'text3'}, + 'severity_number': 9, + 'severity_text': None, 'attributes': { - 'logfire.span_type': 'log', - 'logfire.level_num': 9, - 'logfire.msg_template': 'gen_ai.choice', - 'logfire.msg': 'gen_ai.choice', - 'code.filepath': 'test_instrumented.py', - 'code.function': 'test_instrumented_model', - 'code.lineno': 123, 'gen_ai.system': 'my_system', - 'index': 0, - 'message': IsJson( - { - 'content': 'text1', - 'tool_calls': [ - { - 'id': 'tool_call_1', - 'type': 'function', - 'function': {'name': 'tool1', 'arguments': 'args1'}, - }, - { - 'id': 'tool_call_2', - 'type': 'function', - 'function': {'name': 'tool2', 'arguments': {'args2': 3}}, - }, - ], - } - ), - 'logfire.json_schema': '{"type":"object","properties":{"gen_ai.system":{},"index":{},"message":{"type":"object"}}}', + 'event.name': 'gen_ai.assistant.message', }, + 'timestamp': 12000000000, + 'observed_timestamp': 13000000000, + 'trace_id': 1, + 'span_id': 1, + 'trace_flags': 1, }, { - 'name': 'gen_ai.choice', - 'context': {'trace_id': 1, 'span_id': 10, 'is_remote': False}, - 'parent': {'trace_id': 1, 'span_id': 1, 'is_remote': False}, - 'start_time': 9000000000, - 'end_time': 9000000000, - 'attributes': { - 'logfire.span_type': 'log', - 'logfire.level_num': 9, - 'logfire.msg_template': 'gen_ai.choice', - 'logfire.msg': 'gen_ai.choice', - 'code.filepath': 'test_instrumented.py', - 'code.function': 'test_instrumented_model', - 'code.lineno': 123, - 'gen_ai.system': 'my_system', + 'body': { 'index': 0, - 'message': '{"content":"text2"}', - 'logfire.json_schema': '{"type":"object","properties":{"gen_ai.system":{},"index":{},"message":{"type":"object"}}}', + 'message': { + 'role': 'assistant', + 'content': 'text1', + 'tool_calls': [ + { + 'id': 'tool_call_1', + 'type': 'function', + 'function': {'name': 'tool1', 'arguments': 'args1'}, + }, + { + 'id': 'tool_call_2', + 'type': 'function', + 'function': {'name': 'tool2', 'arguments': {'args2': 3}}, + }, + ], + }, }, + 'severity_number': 9, + 'severity_text': None, + 'attributes': {'gen_ai.system': 'my_system', 'event.name': 'gen_ai.choice'}, + 'timestamp': 14000000000, + 'observed_timestamp': 15000000000, + 'trace_id': 1, + 'span_id': 1, + 'trace_flags': 1, }, { - 'name': 'chat my_model', - 'context': {'trace_id': 1, 'span_id': 1, 'is_remote': False}, - 'parent': None, - 'start_time': 1000000000, - 'end_time': 10000000000, - 'attributes': { - 'code.filepath': 'test_instrumented.py', - 'code.function': 'test_instrumented_model', - 'code.lineno': 123, - 'gen_ai.operation.name': 'chat', - 'gen_ai.system': 'my_system', - 'gen_ai.request.model': 'my_model', - 'gen_ai.request.temperature': 1, - 'logfire.msg_template': 'chat my_model', - 'logfire.msg': 'chat my_model', - 'logfire.span_type': 'span', - 'gen_ai.response.model': 'my_model_123', - 'gen_ai.usage.input_tokens': 100, - 'gen_ai.usage.output_tokens': 200, - 'logfire.json_schema': '{"type":"object","properties":{"gen_ai.operation.name":{},"gen_ai.system":{},"gen_ai.request.model":{},"gen_ai.request.temperature":{},"gen_ai.response.model":{},"gen_ai.usage.input_tokens":{},"gen_ai.usage.output_tokens":{}}}', - }, + 'body': {'index': 0, 'message': {'role': 'assistant', 'content': 'text2'}}, + 'severity_number': 9, + 'severity_text': None, + 'attributes': {'gen_ai.system': 'my_system', 'event.name': 'gen_ai.choice'}, + 'timestamp': 16000000000, + 'observed_timestamp': 17000000000, + 'trace_id': 1, + 'span_id': 1, + 'trace_flags': 1, }, ] ) @pytest.mark.anyio -async def test_instrumented_model_not_recording(capfire: CaptureLogfire): - logfire_instance = logfire_api.DEFAULT_LOGFIRE_INSTANCE.with_trace_sample_rate(0) - model = InstrumentedModel(MyModel(), logfire_instance) +async def test_instrumented_model_not_recording(): + model = InstrumentedModel(MyModel(), NoOpTracerProvider(), NoOpEventLoggerProvider()) messages: list[ModelMessage] = [ModelRequest(parts=[SystemPromptPart('system_prompt')])] await model.request( @@ -353,12 +319,11 @@ async def test_instrumented_model_not_recording(capfire: CaptureLogfire): ), ) - assert capfire.exporter.exported_spans_as_dict() == snapshot([]) - @pytest.mark.anyio +@requires_logfire_events async def test_instrumented_model_stream(capfire: CaptureLogfire): - model = InstrumentedModel(MyModel()) + model = InstrumentedModel.from_logfire(MyModel()) messages: list[ModelMessage] = [ ModelRequest( @@ -385,75 +350,62 @@ async def test_instrumented_model_stream(capfire: CaptureLogfire): assert capfire.exporter.exported_spans_as_dict() == snapshot( [ - { - 'name': 'gen_ai.user.message', - 'context': {'trace_id': 1, 'span_id': 3, 'is_remote': False}, - 'parent': {'trace_id': 1, 'span_id': 1, 'is_remote': False}, - 'start_time': 2000000000, - 'end_time': 2000000000, - 'attributes': { - 'logfire.span_type': 'log', - 'logfire.level_num': 9, - 'logfire.msg_template': 'gen_ai.user.message', - 'logfire.msg': 'gen_ai.user.message', - 'code.filepath': 'test_instrumented.py', - 'code.function': 'test_instrumented_model_stream', - 'code.lineno': 123, - 'gen_ai.system': 'my_system', - 'content': 'user_prompt', - 'logfire.json_schema': '{"type":"object","properties":{"gen_ai.system":{},"content":{}}}', - }, - }, - { - 'name': 'gen_ai.choice', - 'context': {'trace_id': 1, 'span_id': 4, 'is_remote': False}, - 'parent': {'trace_id': 1, 'span_id': 1, 'is_remote': False}, - 'start_time': 3000000000, - 'end_time': 3000000000, - 'attributes': { - 'logfire.span_type': 'log', - 'logfire.level_num': 9, - 'logfire.msg_template': 'gen_ai.choice', - 'logfire.msg': 'gen_ai.choice', - 'code.filepath': 'test_instrumented.py', - 'code.function': 'test_instrumented_model_stream', - 'code.lineno': 123, - 'gen_ai.system': 'my_system', - 'index': 0, - 'message': '{"content":"text1text2"}', - 'logfire.json_schema': '{"type":"object","properties":{"gen_ai.system":{},"index":{},"message":{"type":"object"}}}', - }, - }, { 'name': 'chat my_model', 'context': {'trace_id': 1, 'span_id': 1, 'is_remote': False}, 'parent': None, 'start_time': 1000000000, - 'end_time': 4000000000, + 'end_time': 6000000000, 'attributes': { - 'code.filepath': 'test_instrumented.py', - 'code.function': 'test_instrumented_model_stream', - 'code.lineno': 123, 'gen_ai.operation.name': 'chat', 'gen_ai.system': 'my_system', 'gen_ai.request.model': 'my_model', 'gen_ai.request.temperature': 1, - 'logfire.msg_template': 'chat my_model', 'logfire.msg': 'chat my_model', 'logfire.span_type': 'span', 'gen_ai.response.model': 'my_model_123', 'gen_ai.usage.input_tokens': 300, 'gen_ai.usage.output_tokens': 400, - 'logfire.json_schema': '{"type":"object","properties":{"gen_ai.operation.name":{},"gen_ai.system":{},"gen_ai.request.model":{},"gen_ai.request.temperature":{},"gen_ai.response.model":{},"gen_ai.usage.input_tokens":{},"gen_ai.usage.output_tokens":{}}}', }, }, ] ) + assert capfire.log_exporter.exported_logs_as_dicts() == snapshot( + [ + { + 'body': {'content': 'user_prompt', 'role': 'user'}, + 'severity_number': 9, + 'severity_text': None, + 'attributes': { + 'gen_ai.system': 'my_system', + 'event.name': 'gen_ai.user.message', + }, + 'timestamp': 2000000000, + 'observed_timestamp': 3000000000, + 'trace_id': 1, + 'span_id': 1, + 'trace_flags': 1, + }, + { + 'body': {'index': 0, 'message': {'role': 'assistant', 'content': 'text1text2'}}, + 'severity_number': 9, + 'severity_text': None, + 'attributes': {'gen_ai.system': 'my_system', 'event.name': 'gen_ai.choice'}, + 'timestamp': 4000000000, + 'observed_timestamp': 5000000000, + 'trace_id': 1, + 'span_id': 1, + 'trace_flags': 1, + }, + ] + ) + @pytest.mark.anyio +@requires_logfire_events async def test_instrumented_model_stream_break(capfire: CaptureLogfire): - model = InstrumentedModel(MyModel()) + model = InstrumentedModel.from_logfire(MyModel()) messages: list[ModelMessage] = [ ModelRequest( @@ -479,80 +431,66 @@ async def test_instrumented_model_stream_break(capfire: CaptureLogfire): assert capfire.exporter.exported_spans_as_dict() == snapshot( [ - { - 'name': 'gen_ai.user.message', - 'context': {'trace_id': 1, 'span_id': 3, 'is_remote': False}, - 'parent': {'trace_id': 1, 'span_id': 1, 'is_remote': False}, - 'start_time': 2000000000, - 'end_time': 2000000000, - 'attributes': { - 'logfire.span_type': 'log', - 'logfire.level_num': 9, - 'logfire.msg_template': 'gen_ai.user.message', - 'logfire.msg': 'gen_ai.user.message', - 'code.filepath': 'test_instrumented.py', - 'code.function': 'test_instrumented_model_stream_break', - 'code.lineno': 123, - 'gen_ai.system': 'my_system', - 'content': 'user_prompt', - 'logfire.json_schema': '{"type":"object","properties":{"gen_ai.system":{},"content":{}}}', - }, - }, - { - 'name': 'gen_ai.choice', - 'context': {'trace_id': 1, 'span_id': 4, 'is_remote': False}, - 'parent': {'trace_id': 1, 'span_id': 1, 'is_remote': False}, - 'start_time': 3000000000, - 'end_time': 3000000000, - 'attributes': { - 'logfire.span_type': 'log', - 'logfire.level_num': 9, - 'logfire.msg_template': 'gen_ai.choice', - 'logfire.msg': 'gen_ai.choice', - 'code.filepath': 'test_instrumented.py', - 'code.function': 'test_instrumented_model_stream_break', - 'code.lineno': 123, - 'gen_ai.system': 'my_system', - 'index': 0, - 'message': '{"content":"text1"}', - 'logfire.json_schema': '{"type":"object","properties":{"gen_ai.system":{},"index":{},"message":{"type":"object"}}}', - }, - }, { 'name': 'chat my_model', 'context': {'trace_id': 1, 'span_id': 1, 'is_remote': False}, 'parent': None, 'start_time': 1000000000, - 'end_time': 5000000000, + 'end_time': 7000000000, 'attributes': { - 'code.filepath': 'test_instrumented.py', - 'code.function': 'test_instrumented_model_stream_break', - 'code.lineno': 123, 'gen_ai.operation.name': 'chat', 'gen_ai.system': 'my_system', 'gen_ai.request.model': 'my_model', 'gen_ai.request.temperature': 1, - 'logfire.msg_template': 'chat my_model', 'logfire.msg': 'chat my_model', 'logfire.span_type': 'span', 'gen_ai.response.model': 'my_model_123', 'gen_ai.usage.input_tokens': 300, 'gen_ai.usage.output_tokens': 400, 'logfire.level_num': 17, - 'logfire.json_schema': '{"type":"object","properties":{"gen_ai.operation.name":{},"gen_ai.system":{},"gen_ai.request.model":{},"gen_ai.request.temperature":{},"gen_ai.response.model":{},"gen_ai.usage.input_tokens":{},"gen_ai.usage.output_tokens":{}}}', }, 'events': [ { 'name': 'exception', - 'timestamp': 4000000000, + 'timestamp': 6000000000, 'attributes': { 'exception.type': 'RuntimeError', 'exception.message': '', 'exception.stacktrace': 'RuntimeError', - 'exception.escaped': 'True', + 'exception.escaped': 'False', }, } ], }, ] ) + + assert capfire.log_exporter.exported_logs_as_dicts() == snapshot( + [ + { + 'body': {'content': 'user_prompt', 'role': 'user'}, + 'severity_number': 9, + 'severity_text': None, + 'attributes': { + 'gen_ai.system': 'my_system', + 'event.name': 'gen_ai.user.message', + }, + 'timestamp': 2000000000, + 'observed_timestamp': 3000000000, + 'trace_id': 1, + 'span_id': 1, + 'trace_flags': 1, + }, + { + 'body': {'index': 0, 'message': {'role': 'assistant', 'content': 'text1'}}, + 'severity_number': 9, + 'severity_text': None, + 'attributes': {'gen_ai.system': 'my_system', 'event.name': 'gen_ai.choice'}, + 'timestamp': 4000000000, + 'observed_timestamp': 5000000000, + 'trace_id': 1, + 'span_id': 1, + 'trace_flags': 1, + }, + ] + ) diff --git a/uv.lock b/uv.lock index 1caa7ebae..aeee55b2f 100644 --- a/uv.lock +++ b/uv.lock @@ -1,4 +1,5 @@ version = 1 +revision = 1 requires-python = ">=3.9" resolution-markers = [ "python_full_version == '3.10.*'", @@ -1344,7 +1345,7 @@ wheels = [ [[package]] name = "logfire" -version = "2.6.2" +version = "3.6.1" source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "executing" }, @@ -1356,9 +1357,9 @@ dependencies = [ { name = "tomli", marker = "python_full_version < '3.11'" }, { name = "typing-extensions" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/14/05/14c4f8a928be439fabef6174edce3697950de6cb17588063c2d6b160c660/logfire-2.6.2.tar.gz", hash = "sha256:23507de8fb64eb5ab0556f694b498a5455adc0c0636bf234aefc2aa220933b68", size = 253211 } +sdist = { url = "https://files.pythonhosted.org/packages/30/68/50cecae912663b3ff606f24ee4090794bc5a9bb55fc68f25c1a3df8e3b77/logfire-3.6.1.tar.gz", hash = "sha256:93b11d94ad1a19a8bfc1fd97b40ecc3c919a3edc6a5e51cb2a153e64cd04aa9e", size = 268682 } wheels = [ - { url = "https://files.pythonhosted.org/packages/9f/1b/86975d00d1ef022505cd1f9d0e9eeade6023b8d48fde8376fcefc0f5515a/logfire-2.6.2-py3-none-any.whl", hash = "sha256:67ff148db4760ef2e79a903364302de913f3946ccdc6544d8396330320f64803", size = 172653 }, + { url = "https://files.pythonhosted.org/packages/76/48/58089aba35ececf61a0dd78fd527d6c80af281c80df56ceb60c29afbc04b/logfire-3.6.1-py3-none-any.whl", hash = "sha256:de8d7d9e21b292b0fb5cc653a2d6b93be33828ba9e5b2645d09135a7ffbdbf1e", size = 180970 }, ] [package.optional-dependencies] @@ -1374,11 +1375,11 @@ sqlite3 = [ [[package]] name = "logfire-api" -version = "2.4.1" +version = "3.6.0" source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/9f/51/5d1fd2bce3620b981bdfeafaf4b01ee6e51698f287edf6b715f8764dd9c7/logfire_api-2.4.1.tar.gz", hash = "sha256:cf7aa63065d7cc2b0e77292d1c4ac5fd1a5b507767b8559f91df9c42e84c8250", size = 41706 } +sdist = { url = "https://files.pythonhosted.org/packages/1b/ca/c18452d6c5550427e1d88e2c69d0aade343fb44c89cc10b35474f9fc7ceb/logfire_api-3.6.0.tar.gz", hash = "sha256:35123d788625f41555fa78d516d3bc45ffaa3fd57181b6188708a3c77311a1f4", size = 45239 } wheels = [ - { url = "https://files.pythonhosted.org/packages/94/29/0b6c68c58b91a98be287db8f47ca26a3ea6c2f1147035583df3d1ad41486/logfire_api-2.4.1-py3-none-any.whl", hash = "sha256:3243f287a546c64f364dbce90bdf46af585b1b60d7d5cb109859928a61865046", size = 69530 }, + { url = "https://files.pythonhosted.org/packages/fc/87/84a4aa9ed8e6de5fcc08dc500f81303ed13bb6f727e48a0eacd100e6591f/logfire_api-3.6.0-py3-none-any.whl", hash = "sha256:71d90148655c03f7616810cc58198261d0cb7c9185ac8d300f77561e39019056", size = 74998 }, ] [[package]] @@ -1943,32 +1944,32 @@ wheels = [ [[package]] name = "opentelemetry-api" -version = "1.28.2" +version = "1.30.0" source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "deprecated" }, { name = "importlib-metadata" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/51/34/e4e9245c868c6490a46ffedf6bd5b0f512bbc0a848b19e3a51f6bbad648c/opentelemetry_api-1.28.2.tar.gz", hash = "sha256:ecdc70c7139f17f9b0cf3742d57d7020e3e8315d6cffcdf1a12a905d45b19cc0", size = 62796 } +sdist = { url = "https://files.pythonhosted.org/packages/2b/6d/bbbf879826b7f3c89a45252010b5796fb1f1a0d45d9dc4709db0ef9a06c8/opentelemetry_api-1.30.0.tar.gz", hash = "sha256:375893400c1435bf623f7dfb3bcd44825fe6b56c34d0667c542ea8257b1a1240", size = 63703 } wheels = [ - { url = "https://files.pythonhosted.org/packages/4d/58/b17393cdfc149e14ee84c662abf921993dcce8058628359ef1f49e2abb97/opentelemetry_api-1.28.2-py3-none-any.whl", hash = "sha256:6fcec89e265beb258fe6b1acaaa3c8c705a934bd977b9f534a2b7c0d2d4275a6", size = 64302 }, + { url = "https://files.pythonhosted.org/packages/36/0a/eea862fae6413d8181b23acf8e13489c90a45f17986ee9cf4eab8a0b9ad9/opentelemetry_api-1.30.0-py3-none-any.whl", hash = "sha256:d5f5284890d73fdf47f843dda3210edf37a38d66f44f2b5aedc1e89ed455dc09", size = 64955 }, ] [[package]] name = "opentelemetry-exporter-otlp-proto-common" -version = "1.28.2" +version = "1.30.0" source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "opentelemetry-proto" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/60/cd/cd990f891b64e7698b8a6b6ab90dfac7f957db5a3d06788acd52f73ad4c0/opentelemetry_exporter_otlp_proto_common-1.28.2.tar.gz", hash = "sha256:7aebaa5fc9ff6029374546df1f3a62616fda07fccd9c6a8b7892ec130dd8baca", size = 19136 } +sdist = { url = "https://files.pythonhosted.org/packages/a2/d7/44098bf1ef89fc5810cdbda05faa2ae9322a0dbda4921cdc965dc68a9856/opentelemetry_exporter_otlp_proto_common-1.30.0.tar.gz", hash = "sha256:ddbfbf797e518411857d0ca062c957080279320d6235a279f7b64ced73c13897", size = 19640 } wheels = [ - { url = "https://files.pythonhosted.org/packages/2a/4d/769f3b1b1c6af5e603da50349ba31af757897540a75d666de22d39461055/opentelemetry_exporter_otlp_proto_common-1.28.2-py3-none-any.whl", hash = "sha256:545b1943b574f666c35b3d6cc67cb0b111060727e93a1e2866e346b33bff2a12", size = 18460 }, + { url = "https://files.pythonhosted.org/packages/ee/54/f4b3de49f8d7d3a78fd6e6e1a6fd27dd342eb4d82c088b9078c6a32c3808/opentelemetry_exporter_otlp_proto_common-1.30.0-py3-none-any.whl", hash = "sha256:5468007c81aa9c44dc961ab2cf368a29d3475977df83b4e30aeed42aa7bc3b38", size = 18747 }, ] [[package]] name = "opentelemetry-exporter-otlp-proto-http" -version = "1.28.2" +version = "1.30.0" source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "deprecated" }, @@ -1979,14 +1980,14 @@ dependencies = [ { name = "opentelemetry-sdk" }, { name = "requests" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/b1/91/4e32e52d13dbdf9560bc095dfe66a2c09e0034a886f7725fcda8fe10a052/opentelemetry_exporter_otlp_proto_http-1.28.2.tar.gz", hash = "sha256:d9b353d67217f091aaf4cfe8693c170973bb3e90a558992570d97020618fda79", size = 15043 } +sdist = { url = "https://files.pythonhosted.org/packages/04/f9/abb9191d536e6a2e2b7903f8053bf859a76bf784e3ca19a5749550ef19e4/opentelemetry_exporter_otlp_proto_http-1.30.0.tar.gz", hash = "sha256:c3ae75d4181b1e34a60662a6814d0b94dd33b628bee5588a878bed92cee6abdc", size = 15073 } wheels = [ - { url = "https://files.pythonhosted.org/packages/19/23/802b889cf8bf3e235f30fbcbaa2b3fd484fe8c76b5b4db00f00c0e9af20f/opentelemetry_exporter_otlp_proto_http-1.28.2-py3-none-any.whl", hash = "sha256:af921c18212a56ef4be68458ba475791c0517ebfd8a2ff04669c9cd477d90ff2", size = 17218 }, + { url = "https://files.pythonhosted.org/packages/e1/3c/cdf34bc459613f2275aff9b258f35acdc4c4938dad161d17437de5d4c034/opentelemetry_exporter_otlp_proto_http-1.30.0-py3-none-any.whl", hash = "sha256:9578e790e579931c5ffd50f1e6975cbdefb6a0a0a5dea127a6ae87df10e0a589", size = 17245 }, ] [[package]] name = "opentelemetry-instrumentation" -version = "0.49b2" +version = "0.51b0" source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "opentelemetry-api" }, @@ -1994,14 +1995,14 @@ dependencies = [ { name = "packaging" }, { name = "wrapt" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/6f/1f/9fa51f6f64f4d179f4e3370eb042176ff7717682428552f5e1f4c5efcc09/opentelemetry_instrumentation-0.49b2.tar.gz", hash = "sha256:8cf00cc8d9d479e4b72adb9bd267ec544308c602b7188598db5a687e77b298e2", size = 26480 } +sdist = { url = "https://files.pythonhosted.org/packages/ec/5a/4c7f02235ac1269b48f3855f6be1afc641f31d4888d28b90b732fbce7141/opentelemetry_instrumentation-0.51b0.tar.gz", hash = "sha256:4ca266875e02f3988536982467f7ef8c32a38b8895490ddce9ad9604649424fa", size = 27760 } wheels = [ - { url = "https://files.pythonhosted.org/packages/ef/e3/ad23372525653b0221212d5e2a71bd97aae64cc35f90cbf0c70de57dfa4e/opentelemetry_instrumentation-0.49b2-py3-none-any.whl", hash = "sha256:f6d782b0ef9fef4a4c745298651c65f5c532c34cd4c40d230ab5b9f3b3b4d151", size = 30693 }, + { url = "https://files.pythonhosted.org/packages/40/2c/48fa93f1acca9f79a06da0df7bfe916632ecc7fce1971067b3e46bcae55b/opentelemetry_instrumentation-0.51b0-py3-none-any.whl", hash = "sha256:c6de8bd26b75ec8b0e54dff59e198946e29de6a10ec65488c357d4b34aa5bdcf", size = 30923 }, ] [[package]] name = "opentelemetry-instrumentation-asgi" -version = "0.49b2" +version = "0.51b0" source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "asgiref" }, @@ -2010,28 +2011,28 @@ dependencies = [ { name = "opentelemetry-semantic-conventions" }, { name = "opentelemetry-util-http" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/84/42/079079bd7c0423bfab987a6457e34468b6ddccf501d3c91d2795c200d65d/opentelemetry_instrumentation_asgi-0.49b2.tar.gz", hash = "sha256:2af5faf062878330714efe700127b837038c4d9d3b70b451ab2424d5076d6c1c", size = 24106 } +sdist = { url = "https://files.pythonhosted.org/packages/9e/67/8aa6e1129f641f0f3f8786e6c5d18c1f2bbe490bd4b0e91a6879e85154d2/opentelemetry_instrumentation_asgi-0.51b0.tar.gz", hash = "sha256:b3fe97c00f0bfa934371a69674981d76591c68d937b6422a5716ca21081b4148", size = 24201 } wheels = [ - { url = "https://files.pythonhosted.org/packages/3f/82/06a56e786de3ea0ef4703ed313d9d8395fb4bc9ae740cc71415178ae8bff/opentelemetry_instrumentation_asgi-0.49b2-py3-none-any.whl", hash = "sha256:c8ede13ed781402458a800411cb7ec16a25386dc21de8e5b9a568b386a1dc5f4", size = 16305 }, + { url = "https://files.pythonhosted.org/packages/54/7e/0a95ab37302729543631a789ba8e71dea75c520495739dbbbdfdc580b401/opentelemetry_instrumentation_asgi-0.51b0-py3-none-any.whl", hash = "sha256:e8072993db47303b633c6ec1bc74726ba4d32bd0c46c28dfadf99f79521a324c", size = 16340 }, ] [[package]] name = "opentelemetry-instrumentation-asyncpg" -version = "0.49b2" +version = "0.51b0" source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "opentelemetry-api" }, { name = "opentelemetry-instrumentation" }, { name = "opentelemetry-semantic-conventions" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/0e/b9/8b913ef967c7be7a3b909d6fa99933f2dd877cc3d3509c974fdd423ad587/opentelemetry_instrumentation_asyncpg-0.49b2.tar.gz", hash = "sha256:fa0aa65408f0fd128b8902a7f1b6e800c21335e88a00c39377f0114cd2b1f037", size = 8563 } +sdist = { url = "https://files.pythonhosted.org/packages/1f/fe/95eb7747a37d980787440db8001ab991f54ba4f47ea8635b43644eb2df5f/opentelemetry_instrumentation_asyncpg-0.51b0.tar.gz", hash = "sha256:366fb7f7e2c3a66de28b3770e7e795fd2612eace346dd842b77bbe61a97b7ff1", size = 8656 } wheels = [ - { url = "https://files.pythonhosted.org/packages/4b/56/0fb233e5c294aec87a749ab4ff4ea2b5997b3945f2095564b05cebdcbb47/opentelemetry_instrumentation_asyncpg-0.49b2-py3-none-any.whl", hash = "sha256:649dc8523ddc0a67c6a8bb74ac20250a1b7075fde9d095b9b364182afa792722", size = 9955 }, + { url = "https://files.pythonhosted.org/packages/02/35/ec8638338a1b4623172f86fa7c01a58f30fd5f39c053bbb3fabc9514d7fd/opentelemetry_instrumentation_asyncpg-0.51b0-py3-none-any.whl", hash = "sha256:6180c57c497cee1c787aeb5b090f92b1bb9ee90cb606932adfaf6bf3fdb494a5", size = 9992 }, ] [[package]] name = "opentelemetry-instrumentation-dbapi" -version = "0.49b2" +version = "0.51b0" source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "opentelemetry-api" }, @@ -2039,14 +2040,14 @@ dependencies = [ { name = "opentelemetry-semantic-conventions" }, { name = "wrapt" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/73/a5/a2bc4f57105133c21a16fbfb93cafb6a2efd0d0c4c2fba644f89b7693100/opentelemetry_instrumentation_dbapi-0.49b2.tar.gz", hash = "sha256:702fd576df514c47e81cb670c4f1b8884ea66f92e43d978ac787aeea852988a7", size = 12197 } +sdist = { url = "https://files.pythonhosted.org/packages/84/b7/fdc107617b9f626632f5fbe444a6a91efa4a9d1e38447500802b8a12010c/opentelemetry_instrumentation_dbapi-0.51b0.tar.gz", hash = "sha256:740b5e17eef02a91a8d3966f06e5605817a7d875ae4d9dec8318ef652ccfc1fe", size = 13860 } wheels = [ - { url = "https://files.pythonhosted.org/packages/18/4a/3253fdafdd31631ba26d211983d50f2562268ee31c7c0ebb1415538b1bac/opentelemetry_instrumentation_dbapi-0.49b2-py3-none-any.whl", hash = "sha256:0ac831ba3e740a15c4e79565e212c004696169229d8078d61099f5dde5af340d", size = 11498 }, + { url = "https://files.pythonhosted.org/packages/34/13/d3cd0292680ebd54ed6d55d7a81434bc2c6f7327d971c6690c98114d6abc/opentelemetry_instrumentation_dbapi-0.51b0-py3-none-any.whl", hash = "sha256:1b4dfb4f25b4ef509b70fb24c637436a40fe5fc8204933b956f1d0ccaa61735f", size = 12373 }, ] [[package]] name = "opentelemetry-instrumentation-fastapi" -version = "0.49b2" +version = "0.51b0" source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "opentelemetry-api" }, @@ -2055,71 +2056,71 @@ dependencies = [ { name = "opentelemetry-semantic-conventions" }, { name = "opentelemetry-util-http" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/87/ed/a1275d5aac63edfad0afb012d2d5917412f09ac5f773c86b465b2b0d2549/opentelemetry_instrumentation_fastapi-0.49b2.tar.gz", hash = "sha256:3aa81ed7acf6aa5236d96e90a1218c5e84a9c0dce8fa63bf34ceee6218354b63", size = 19217 } +sdist = { url = "https://files.pythonhosted.org/packages/2d/dc/8db4422b5084177d1ef6c7855c69bf2e9e689f595a4a9b59e60588e0d427/opentelemetry_instrumentation_fastapi-0.51b0.tar.gz", hash = "sha256:1624e70f2f4d12ceb792d8a0c331244cd6723190ccee01336273b4559bc13abc", size = 19249 } wheels = [ - { url = "https://files.pythonhosted.org/packages/d3/a9/ef2678c16caf5dc2f84628bfafdbc90139e3c78d9017afd07fbd51b1eeef/opentelemetry_instrumentation_fastapi-0.49b2-py3-none-any.whl", hash = "sha256:c66331d05bf806d7ca4f9579c1db7383aad31a9f6665dbaa2b7c9a4c1e830892", size = 12082 }, + { url = "https://files.pythonhosted.org/packages/55/1c/ec2d816b78edf2404d7b3df6d09eefb690b70bfd191b7da06f76634f1bdc/opentelemetry_instrumentation_fastapi-0.51b0-py3-none-any.whl", hash = "sha256:10513bbc11a1188adb9c1d2c520695f7a8f2b5f4de14e8162098035901cd6493", size = 12117 }, ] [[package]] name = "opentelemetry-instrumentation-sqlite3" -version = "0.49b2" +version = "0.51b0" source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "opentelemetry-api" }, { name = "opentelemetry-instrumentation" }, { name = "opentelemetry-instrumentation-dbapi" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/fb/94/28873232ca6330c1c1fefe1799d2f32fb4dce6457fd0ad4a3546a8f30d85/opentelemetry_instrumentation_sqlite3-0.49b2.tar.gz", hash = "sha256:91a479f84d8fb384d93ef2ee563ab584babf1e1ebe0a47e98a30037d845e96ca", size = 7513 } +sdist = { url = "https://files.pythonhosted.org/packages/e8/2a/1755f34fd1d58858272970ce9f8386a488ce2aa16c2673373ed31cc60d33/opentelemetry_instrumentation_sqlite3-0.51b0.tar.gz", hash = "sha256:3bd5dbe2292a68b27b79c44a13a03b1443341404e02351d3886ee6526792ead1", size = 7930 } wheels = [ - { url = "https://files.pythonhosted.org/packages/e0/d5/dfb022aa06312c8ee870676e220e8438c1cec1b845956c02646b3959c0f5/opentelemetry_instrumentation_sqlite3-0.49b2-py3-none-any.whl", hash = "sha256:c4fd17133ea239fbcf7c03228c2a7fdfe9a891f85ff8197f8ed9ea04c5d08af0", size = 8700 }, + { url = "https://files.pythonhosted.org/packages/d8/d0/6288eb2b6065b7766eee545729e6e68ac241ce82ec60a8452742414536c7/opentelemetry_instrumentation_sqlite3-0.51b0-py3-none-any.whl", hash = "sha256:77418bfec1b45f4d44a9a316c355aab33d36eb7cc1cd5d871f40acae36ae5c96", size = 9339 }, ] [[package]] name = "opentelemetry-proto" -version = "1.28.2" +version = "1.30.0" source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "protobuf" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/d0/45/96c4f34c79fd87dc8a1c0c432f23a5a202729f21e4e63c8b36fc8e57767a/opentelemetry_proto-1.28.2.tar.gz", hash = "sha256:7c0d125a6b71af88bfeeda16bfdd0ff63dc2cf0039baf6f49fa133b203e3f566", size = 34316 } +sdist = { url = "https://files.pythonhosted.org/packages/31/6e/c1ff2e3b0cd3a189a6be03fd4d63441d73d7addd9117ab5454e667b9b6c7/opentelemetry_proto-1.30.0.tar.gz", hash = "sha256:afe5c9c15e8b68d7c469596e5b32e8fc085eb9febdd6fb4e20924a93a0389179", size = 34362 } wheels = [ - { url = "https://files.pythonhosted.org/packages/1d/12/646f48d6d698a6df0437a22b591387440dc4888c8752d1a1300f730da710/opentelemetry_proto-1.28.2-py3-none-any.whl", hash = "sha256:0837498f59db55086462915e5898d0b1a18c1392f6db4d7e937143072a72370c", size = 55818 }, + { url = "https://files.pythonhosted.org/packages/56/d7/85de6501f7216995295f7ec11e470142e6a6e080baacec1753bbf272e007/opentelemetry_proto-1.30.0-py3-none-any.whl", hash = "sha256:c6290958ff3ddacc826ca5abbeb377a31c2334387352a259ba0df37c243adc11", size = 55854 }, ] [[package]] name = "opentelemetry-sdk" -version = "1.28.2" +version = "1.30.0" source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "opentelemetry-api" }, { name = "opentelemetry-semantic-conventions" }, { name = "typing-extensions" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/4b/f4/840a5af4efe48d7fb4c456ad60fd624673e871a60d6494f7ff8a934755d4/opentelemetry_sdk-1.28.2.tar.gz", hash = "sha256:5fed24c5497e10df30282456fe2910f83377797511de07d14cec0d3e0a1a3110", size = 157272 } +sdist = { url = "https://files.pythonhosted.org/packages/93/ee/d710062e8a862433d1be0b85920d0c653abe318878fef2d14dfe2c62ff7b/opentelemetry_sdk-1.30.0.tar.gz", hash = "sha256:c9287a9e4a7614b9946e933a67168450b9ab35f08797eb9bc77d998fa480fa18", size = 158633 } wheels = [ - { url = "https://files.pythonhosted.org/packages/da/8b/4f2b418496c08016d4384f9b1c4725a8af7faafa248d624be4bb95993ce1/opentelemetry_sdk-1.28.2-py3-none-any.whl", hash = "sha256:93336c129556f1e3ccd21442b94d3521759541521861b2214c499571b85cb71b", size = 118757 }, + { url = "https://files.pythonhosted.org/packages/97/28/64d781d6adc6bda2260067ce2902bd030cf45aec657e02e28c5b4480b976/opentelemetry_sdk-1.30.0-py3-none-any.whl", hash = "sha256:14fe7afc090caad881addb6926cec967129bd9260c4d33ae6a217359f6b61091", size = 118717 }, ] [[package]] name = "opentelemetry-semantic-conventions" -version = "0.49b2" +version = "0.51b0" source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "deprecated" }, { name = "opentelemetry-api" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/7d/0a/e3b93f94aa3223c6fd8e743502a1fefd4fb3a753d8f501ce2a418f7c0bd4/opentelemetry_semantic_conventions-0.49b2.tar.gz", hash = "sha256:44e32ce6a5bb8d7c0c617f84b9dc1c8deda1045a07dc16a688cc7cbeab679997", size = 95213 } +sdist = { url = "https://files.pythonhosted.org/packages/1e/c0/0f9ef4605fea7f2b83d55dd0b0d7aebe8feead247cd6facd232b30907b4f/opentelemetry_semantic_conventions-0.51b0.tar.gz", hash = "sha256:3fabf47f35d1fd9aebcdca7e6802d86bd5ebc3bc3408b7e3248dde6e87a18c47", size = 107191 } wheels = [ - { url = "https://files.pythonhosted.org/packages/b1/be/6661c8f76708bb3ba38c90be8fa8d7ffe17ccbc5cbbc229334f5535f6448/opentelemetry_semantic_conventions-0.49b2-py3-none-any.whl", hash = "sha256:51e7e1d0daa958782b6c2a8ed05e5f0e7dd0716fc327ac058777b8659649ee54", size = 159199 }, + { url = "https://files.pythonhosted.org/packages/2e/75/d7bdbb6fd8630b4cafb883482b75c4fc276b6426619539d266e32ac53266/opentelemetry_semantic_conventions-0.51b0-py3-none-any.whl", hash = "sha256:fdc777359418e8d06c86012c3dc92c88a6453ba662e941593adb062e48c2eeae", size = 177416 }, ] [[package]] name = "opentelemetry-util-http" -version = "0.49b2" +version = "0.51b0" source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/96/28/ac5b1a0fd210ecb6c86c5e04256ba09c8308eb41e116097b9e2714d4b8dd/opentelemetry_util_http-0.49b2.tar.gz", hash = "sha256:5958c7009f79146bbe98b0fdb23d9d7bf1ea9cd154a1c199029b1a89e0557199", size = 7861 } +sdist = { url = "https://files.pythonhosted.org/packages/58/64/32510c0a803465eb6ef1f5bd514d0f5627f8abc9444ed94f7240faf6fcaa/opentelemetry_util_http-0.51b0.tar.gz", hash = "sha256:05edd19ca1cc3be3968b1e502fd94816901a365adbeaab6b6ddb974384d3a0b9", size = 8043 } wheels = [ - { url = "https://files.pythonhosted.org/packages/19/22/9128f10d1c2868ee42df7e10937d00f154a69bee87c416ca9b20a6af6c54/opentelemetry_util_http-0.49b2-py3-none-any.whl", hash = "sha256:e325d6511c6bee7b43170eb0c93261a210ec57e20ab1d7a99838515ef6d2bf58", size = 6941 }, + { url = "https://files.pythonhosted.org/packages/48/dd/c371eeb9cc78abbdad231a27ce1a196a37ef96328d876ccbb381dea4c8ee/opentelemetry_util_http-0.51b0-py3-none-any.whl", hash = "sha256:0561d7a6e9c422b9ef9ae6e77eafcfcd32a2ab689f5e801475cbb67f189efa20", size = 7304 }, ] [[package]] @@ -2559,6 +2560,7 @@ requires-dist = [ { name = "pydantic-ai-examples", marker = "extra == 'examples'", editable = "examples" }, { name = "pydantic-ai-slim", extras = ["openai", "vertexai", "groq", "anthropic", "mistral", "cohere"], editable = "pydantic_ai_slim" }, ] +provides-extras = ["examples", "logfire"] [package.metadata.requires-dev] docs = [ @@ -2679,6 +2681,7 @@ requires-dist = [ { name = "pydantic-graph", editable = "pydantic_graph" }, { name = "requests", marker = "extra == 'vertexai'", specifier = ">=2.32.3" }, ] +provides-extras = ["logfire", "openai", "cohere", "vertexai", "anthropic", "groq", "mistral"] [package.metadata.requires-dev] dev = [