File tree 7 files changed +29
-10
lines changed
instrumentation-genai/opentelemetry-instrumentation-openai-v2/examples
opentelemetry-distro/src/opentelemetry/distro
7 files changed +29
-10
lines changed Original file line number Diff line number Diff line change @@ -6,9 +6,9 @@ OPENAI_API_KEY=sk-YOUR_API_KEY
6
6
# OPENAI_API_KEY=unused
7
7
# CHAT_MODEL=qwen2.5:0.5b
8
8
9
- # Uncomment and change to your OTLP endpoint
10
- # OTEL_EXPORTER_OTLP_ENDPOINT=http://localhost:4318
11
- # OTEL_EXPORTER_OTLP_PROTOCOL=http/protobuf
9
+ # Uncomment and change to your OTLP endpoint and/or
10
+ # OTEL_EXPORTER_OTLP_ENDPOINT=http://localhost:4317
11
+ # OTEL_EXPORTER_OTLP_PROTOCOL=grpc
12
12
OTEL_SERVICE_NAME = opentelemetry-python-openai
13
13
14
14
# Change to 'false' to hide prompt and completion content
Original file line number Diff line number Diff line change @@ -9,12 +9,18 @@ duration of the chat request. Logs capture the chat request and the generated
9
9
response, providing a comprehensive view of the performance and behavior of
10
10
your OpenAI requests.
11
11
12
+ Note: `.env <.env >`_ file configures additional environment variables:
13
+
14
+ - `OTEL_INSTRUMENTATION_GENAI_CAPTURE_MESSAGE_CONTENT=true ` configures
15
+ OpenAI instrumentation to capture prompt and completion contents on
16
+ events.
17
+
12
18
Setup
13
19
-----
14
20
15
21
Minimally, update the `.env <.env >`_ file with your "OPENAI_API_KEY". An
16
22
OTLP compatible endpoint should be listening for traces and logs on
17
- http://localhost:4318 . If not, update "OTEL_EXPORTER_OTLP_ENDPOINT" as well.
23
+ http://localhost:4317 . If not, update "OTEL_EXPORTER_OTLP_ENDPOINT" as well.
18
24
19
25
Next, set up a virtual environment like this:
20
26
Original file line number Diff line number Diff line change 1
1
openai ~= 1.54.4
2
2
3
3
opentelemetry-sdk ~= 1.28.2
4
- opentelemetry-exporter-otlp-proto-http ~= 1.28.2
4
+ opentelemetry-exporter-otlp-proto-grpc ~= 1.28.2
5
5
opentelemetry-instrumentation-openai-v2 ~= 2.0b0
Original file line number Diff line number Diff line change @@ -6,13 +6,15 @@ OPENAI_API_KEY=sk-YOUR_API_KEY
6
6
# OPENAI_API_KEY=unused
7
7
# CHAT_MODEL=qwen2.5:0.5b
8
8
9
- OTEL_EXPORTER_OTLP_ENDPOINT = http://localhost:4318
10
- OTEL_EXPORTER_OTLP_PROTOCOL = http/protobuf
9
+ # Uncomment and change to your OTLP endpoint and/or
10
+ # OTEL_EXPORTER_OTLP_ENDPOINT=http://localhost:4317
11
+ # OTEL_EXPORTER_OTLP_PROTOCOL=grpc
12
+
11
13
OTEL_SERVICE_NAME = opentelemetry-python-openai
12
14
13
15
# Change to 'false' to disable logging
14
16
OTEL_PYTHON_LOGGING_AUTO_INSTRUMENTATION_ENABLED = true
15
17
# Change to 'console' if your OTLP endpoint doesn't support logs
16
- OTEL_LOGS_EXPORTER = otlp_proto_http
18
+ OTEL_LOGS_EXPORTER = otlp
17
19
# Change to 'false' to hide prompt and completion content
18
20
OTEL_INSTRUMENTATION_GENAI_CAPTURE_MESSAGE_CONTENT = true
Original file line number Diff line number Diff line change @@ -10,12 +10,21 @@ duration of the chat request. Logs capture the chat request and the generated
10
10
response, providing a comprehensive view of the performance and behavior of
11
11
your OpenAI requests.
12
12
13
+ Note: `.env <.env >`_ file configures additional environment variables:
14
+
15
+ - `OTEL_PYTHON_LOGGING_AUTO_INSTRUMENTATION_ENABLED=true ` configures
16
+ OpenTelemetry SDK to export logs and events.
17
+ - `OTEL_INSTRUMENTATION_GENAI_CAPTURE_MESSAGE_CONTENT=true ` configures
18
+ OpenAI instrumentation to capture prompt and completion contents on
19
+ events.
20
+ - `OTEL_LOGS_EXPORTER=otlp ` to specify exporter type.
21
+
13
22
Setup
14
23
-----
15
24
16
25
Minimally, update the `.env <.env >`_ file with your "OPENAI_API_KEY". An
17
26
OTLP compatible endpoint should be listening for traces and logs on
18
- http://localhost:4318 . If not, update "OTEL_EXPORTER_OTLP_ENDPOINT" as well.
27
+ http://localhost:4317 . If not, update "OTEL_EXPORTER_OTLP_ENDPOINT" as well.
19
28
20
29
Next, set up a virtual environment like this:
21
30
Original file line number Diff line number Diff line change 1
1
openai ~= 1.54.4
2
2
3
3
opentelemetry-sdk ~= 1.28.2
4
- opentelemetry-exporter-otlp-proto-http ~= 1.28.2
4
+ opentelemetry-exporter-otlp-proto-grpc ~= 1.28.2
5
5
opentelemetry-distro ~= 0.49b2
6
6
opentelemetry-instrumentation-openai-v2 ~= 2.0b0
Original file line number Diff line number Diff line change 17
17
from opentelemetry .environment_variables import (
18
18
OTEL_METRICS_EXPORTER ,
19
19
OTEL_TRACES_EXPORTER ,
20
+ OTEL_LOGS_EXPORTER
20
21
)
21
22
from opentelemetry .instrumentation .distro import BaseDistro
22
23
from opentelemetry .sdk ._configuration import _OTelSDKConfigurator
@@ -37,4 +38,5 @@ class OpenTelemetryDistro(BaseDistro):
37
38
def _configure (self , ** kwargs ):
38
39
os .environ .setdefault (OTEL_TRACES_EXPORTER , "otlp" )
39
40
os .environ .setdefault (OTEL_METRICS_EXPORTER , "otlp" )
41
+ os .environ .setdefault (OTEL_LOGS_EXPORTER , "otlp" )
40
42
os .environ .setdefault (OTEL_EXPORTER_OTLP_PROTOCOL , "grpc" )
You can’t perform that action at this time.
0 commit comments