Skip to content

Commit f1a6988

Browse files
alizenhomxrmx
authored andcommitted
Add Support for Async openai instrumentation (open-telemetry#2984)
1 parent 2286b86 commit f1a6988

20 files changed

+3081
-20
lines changed

Diff for: .github/component_owners.yml

+1
Original file line numberDiff line numberDiff line change
@@ -73,4 +73,5 @@ components:
7373
- lzchen
7474
- gyliu513
7575
- nirga
76+
- alizenhom
7677
- codefromthecrypt

Diff for: instrumentation-genai/opentelemetry-instrumentation-openai-v2/CHANGELOG.md

+2
Original file line numberDiff line numberDiff line change
@@ -7,6 +7,8 @@ and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0
77

88
## Unreleased
99

10+
- Support for `AsyncOpenAI/AsyncCompletions` ([#2984](https://github.com/open-telemetry/opentelemetry-python-contrib/pull/2984))
11+
1012
## Version 2.0b0 (2024-11-08)
1113

1214
- Use generic `OTEL_INSTRUMENTATION_GENAI_CAPTURE_MESSAGE_CONTENT` environment variable

Diff for: instrumentation-genai/opentelemetry-instrumentation-openai-v2/src/opentelemetry/instrumentation/openai_v2/__init__.py

+10-1
Original file line numberDiff line numberDiff line change
@@ -52,7 +52,7 @@
5252
from opentelemetry.semconv.schemas import Schemas
5353
from opentelemetry.trace import get_tracer
5454

55-
from .patch import chat_completions_create
55+
from .patch import async_chat_completions_create, chat_completions_create
5656

5757

5858
class OpenAIInstrumentor(BaseInstrumentor):
@@ -84,7 +84,16 @@ def _instrument(self, **kwargs):
8484
),
8585
)
8686

87+
wrap_function_wrapper(
88+
module="openai.resources.chat.completions",
89+
name="AsyncCompletions.create",
90+
wrapper=async_chat_completions_create(
91+
tracer, event_logger, is_content_enabled()
92+
),
93+
)
94+
8795
def _uninstrument(self, **kwargs):
8896
import openai # pylint: disable=import-outside-toplevel
8997

9098
unwrap(openai.resources.chat.completions.Completions, "create")
99+
unwrap(openai.resources.chat.completions.AsyncCompletions, "create")

Diff for: instrumentation-genai/opentelemetry-instrumentation-openai-v2/src/opentelemetry/instrumentation/openai_v2/patch.py

+71-15
Original file line numberDiff line numberDiff line change
@@ -21,15 +21,12 @@
2121
from opentelemetry.semconv._incubating.attributes import (
2222
gen_ai_attributes as GenAIAttributes,
2323
)
24-
from opentelemetry.semconv.attributes import (
25-
error_attributes as ErrorAttributes,
26-
)
2724
from opentelemetry.trace import Span, SpanKind, Tracer
28-
from opentelemetry.trace.status import Status, StatusCode
2925

3026
from .utils import (
3127
choice_to_event,
3228
get_llm_request_attributes,
29+
handle_span_exception,
3330
is_streaming,
3431
message_to_event,
3532
set_span_attribute,
@@ -72,12 +69,49 @@ def traced_method(wrapped, instance, args, kwargs):
7269
return result
7370

7471
except Exception as error:
75-
span.set_status(Status(StatusCode.ERROR, str(error)))
72+
handle_span_exception(span, error)
73+
raise
74+
75+
return traced_method
76+
77+
78+
def async_chat_completions_create(
79+
tracer: Tracer, event_logger: EventLogger, capture_content: bool
80+
):
81+
"""Wrap the `create` method of the `AsyncChatCompletion` class to trace it."""
82+
83+
async def traced_method(wrapped, instance, args, kwargs):
84+
span_attributes = {**get_llm_request_attributes(kwargs, instance)}
85+
86+
span_name = f"{span_attributes[GenAIAttributes.GEN_AI_OPERATION_NAME]} {span_attributes[GenAIAttributes.GEN_AI_REQUEST_MODEL]}"
87+
with tracer.start_as_current_span(
88+
name=span_name,
89+
kind=SpanKind.CLIENT,
90+
attributes=span_attributes,
91+
end_on_exit=False,
92+
) as span:
93+
if span.is_recording():
94+
for message in kwargs.get("messages", []):
95+
event_logger.emit(
96+
message_to_event(message, capture_content)
97+
)
98+
99+
try:
100+
result = await wrapped(*args, **kwargs)
101+
if is_streaming(kwargs):
102+
return StreamWrapper(
103+
result, span, event_logger, capture_content
104+
)
105+
76106
if span.is_recording():
77-
span.set_attribute(
78-
ErrorAttributes.ERROR_TYPE, type(error).__qualname__
107+
_set_response_attributes(
108+
span, result, event_logger, capture_content
79109
)
80110
span.end()
111+
return result
112+
113+
except Exception as error:
114+
handle_span_exception(span, error)
81115
raise
82116

83117
return traced_method
@@ -286,10 +320,19 @@ def __enter__(self):
286320
def __exit__(self, exc_type, exc_val, exc_tb):
287321
try:
288322
if exc_type is not None:
289-
self.span.set_status(Status(StatusCode.ERROR, str(exc_val)))
290-
self.span.set_attribute(
291-
ErrorAttributes.ERROR_TYPE, exc_type.__qualname__
292-
)
323+
handle_span_exception(self.span, exc_val)
324+
finally:
325+
self.cleanup()
326+
return False # Propagate the exception
327+
328+
async def __aenter__(self):
329+
self.setup()
330+
return self
331+
332+
async def __aexit__(self, exc_type, exc_val, exc_tb):
333+
try:
334+
if exc_type is not None:
335+
handle_span_exception(self.span, exc_val)
293336
finally:
294337
self.cleanup()
295338
return False # Propagate the exception
@@ -301,6 +344,9 @@ def close(self):
301344
def __iter__(self):
302345
return self
303346

347+
def __aiter__(self):
348+
return self
349+
304350
def __next__(self):
305351
try:
306352
chunk = next(self.stream)
@@ -310,10 +356,20 @@ def __next__(self):
310356
self.cleanup()
311357
raise
312358
except Exception as error:
313-
self.span.set_status(Status(StatusCode.ERROR, str(error)))
314-
self.span.set_attribute(
315-
ErrorAttributes.ERROR_TYPE, type(error).__qualname__
316-
)
359+
handle_span_exception(self.span, error)
360+
self.cleanup()
361+
raise
362+
363+
async def __anext__(self):
364+
try:
365+
chunk = await self.stream.__anext__()
366+
self.process_chunk(chunk)
367+
return chunk
368+
except StopAsyncIteration:
369+
self.cleanup()
370+
raise
371+
except Exception as error:
372+
handle_span_exception(self.span, error)
317373
self.cleanup()
318374
raise
319375

Diff for: instrumentation-genai/opentelemetry-instrumentation-openai-v2/src/opentelemetry/instrumentation/openai_v2/utils.py

+18-3
Original file line numberDiff line numberDiff line change
@@ -26,6 +26,10 @@
2626
from opentelemetry.semconv._incubating.attributes import (
2727
server_attributes as ServerAttributes,
2828
)
29+
from opentelemetry.semconv.attributes import (
30+
error_attributes as ErrorAttributes,
31+
)
32+
from opentelemetry.trace.status import Status, StatusCode
2933

3034
OTEL_INSTRUMENTATION_GENAI_CAPTURE_MESSAGE_CONTENT = (
3135
"OTEL_INSTRUMENTATION_GENAI_CAPTURE_MESSAGE_CONTENT"
@@ -138,9 +142,11 @@ def choice_to_event(choice, capture_content):
138142

139143
if choice.message:
140144
message = {
141-
"role": choice.message.role
142-
if choice.message and choice.message.role
143-
else None
145+
"role": (
146+
choice.message.role
147+
if choice.message and choice.message.role
148+
else None
149+
)
144150
}
145151
tool_calls = extract_tool_calls(choice.message, capture_content)
146152
if tool_calls:
@@ -210,3 +216,12 @@ def get_llm_request_attributes(
210216

211217
# filter out None values
212218
return {k: v for k, v in attributes.items() if v is not None}
219+
220+
221+
def handle_span_exception(span, error):
222+
span.set_status(Status(StatusCode.ERROR, str(error)))
223+
if span.is_recording():
224+
span.set_attribute(
225+
ErrorAttributes.ERROR_TYPE, type(error).__qualname__
226+
)
227+
span.end()

Diff for: instrumentation-genai/opentelemetry-instrumentation-openai-v2/test-requirements-0.txt

+1
Original file line numberDiff line numberDiff line change
@@ -5,6 +5,7 @@ importlib-metadata==6.11.0
55
packaging==24.0
66
pytest==7.4.4
77
pytest-vcr==1.0.2
8+
pytest-asyncio==0.21.0
89
wrapt==1.16.0
910
opentelemetry-api==1.28 # when updating, also update in pyproject.toml
1011
opentelemetry-sdk==1.28 # when updating, also update in pyproject.toml

Diff for: instrumentation-genai/opentelemetry-instrumentation-openai-v2/test-requirements-1.txt

+1
Original file line numberDiff line numberDiff line change
@@ -5,6 +5,7 @@ importlib-metadata==6.11.0
55
packaging==24.0
66
pytest==7.4.4
77
pytest-vcr==1.0.2
8+
pytest-asyncio==0.21.0
89
wrapt==1.16.0
910
# test with the latest version of opentelemetry-api, sdk, and semantic conventions
1011

Original file line numberDiff line numberDiff line change
@@ -0,0 +1,89 @@
1+
interactions:
2+
- request:
3+
body: |-
4+
{
5+
"messages": [
6+
{
7+
"role": "user",
8+
"content": "Say this is a test"
9+
}
10+
],
11+
"model": "this-model-does-not-exist"
12+
}
13+
headers:
14+
accept:
15+
- application/json
16+
accept-encoding:
17+
- gzip, deflate
18+
authorization:
19+
- Bearer test_openai_api_key
20+
connection:
21+
- keep-alive
22+
content-length:
23+
- '103'
24+
content-type:
25+
- application/json
26+
host:
27+
- api.openai.com
28+
user-agent:
29+
- AsyncOpenAI/Python 1.26.0
30+
x-stainless-arch:
31+
- arm64
32+
x-stainless-async:
33+
- async:asyncio
34+
x-stainless-lang:
35+
- python
36+
x-stainless-os:
37+
- MacOS
38+
x-stainless-package-version:
39+
- 1.26.0
40+
x-stainless-runtime:
41+
- CPython
42+
x-stainless-runtime-version:
43+
- 3.12.5
44+
method: POST
45+
uri: https://api.openai.com/v1/chat/completions
46+
response:
47+
body:
48+
string: |-
49+
{
50+
"error": {
51+
"message": "The model `this-model-does-not-exist` does not exist or you do not have access to it.",
52+
"type": "invalid_request_error",
53+
"param": null,
54+
"code": "model_not_found"
55+
}
56+
}
57+
headers:
58+
CF-Cache-Status:
59+
- DYNAMIC
60+
CF-RAY:
61+
- 8e1a80827a861852-MRS
62+
Connection:
63+
- keep-alive
64+
Content-Type:
65+
- application/json; charset=utf-8
66+
Date:
67+
- Wed, 13 Nov 2024 00:04:01 GMT
68+
Server:
69+
- cloudflare
70+
Set-Cookie: test_set_cookie
71+
Transfer-Encoding:
72+
- chunked
73+
X-Content-Type-Options:
74+
- nosniff
75+
alt-svc:
76+
- h3=":443"; ma=86400
77+
content-length:
78+
- '231'
79+
openai-organization: test_openai_org_id
80+
strict-transport-security:
81+
- max-age=31536000; includeSubDomains; preload
82+
vary:
83+
- Origin
84+
x-request-id:
85+
- req_5cf06a7fabd45ebe21ee38c14c5b2f76
86+
status:
87+
code: 404
88+
message: Not Found
89+
version: 1

0 commit comments

Comments
 (0)