|
2 | 2 |
|
3 | 3 | from openai import OpenAI
|
4 | 4 |
|
5 |
| -from opentelemetry.instrumentation.openai_v2 import OpenAIInstrumentor |
6 |
| - |
7 | 5 | # NOTE: OpenTelemetry Python Logs and Events APIs are in beta
|
8 |
| -from opentelemetry import trace, _logs, _events |
9 |
| -from opentelemetry.sdk.trace import TracerProvider |
10 |
| -from opentelemetry.sdk._logs import LoggerProvider |
| 6 | +from opentelemetry import _events, _logs, trace |
| 7 | +from opentelemetry.exporter.otlp.proto.http._log_exporter import ( |
| 8 | + OTLPLogExporter, |
| 9 | +) |
| 10 | +from opentelemetry.exporter.otlp.proto.http.trace_exporter import ( |
| 11 | + OTLPSpanExporter, |
| 12 | +) |
| 13 | +from opentelemetry.instrumentation.openai_v2 import OpenAIInstrumentor |
11 | 14 | from opentelemetry.sdk._events import EventLoggerProvider
|
12 |
| - |
13 |
| -from opentelemetry.sdk.trace.export import BatchSpanProcessor |
| 15 | +from opentelemetry.sdk._logs import LoggerProvider |
14 | 16 | from opentelemetry.sdk._logs.export import BatchLogRecordProcessor
|
15 |
| -from opentelemetry.exporter.otlp.proto.http.trace_exporter import OTLPSpanExporter |
16 |
| -from opentelemetry.exporter.otlp.proto.http._log_exporter import OTLPLogExporter |
| 17 | +from opentelemetry.sdk.trace import TracerProvider |
| 18 | +from opentelemetry.sdk.trace.export import BatchSpanProcessor |
17 | 19 |
|
18 | 20 | # configure tracing
|
19 | 21 | trace.set_tracer_provider(TracerProvider())
|
|
23 | 25 |
|
24 | 26 | # configure logging and events
|
25 | 27 | _logs.set_logger_provider(LoggerProvider())
|
26 |
| -_logs.get_logger_provider().add_log_record_processor(BatchLogRecordProcessor(OTLPLogExporter())) |
| 28 | +_logs.get_logger_provider().add_log_record_processor( |
| 29 | + BatchLogRecordProcessor(OTLPLogExporter()) |
| 30 | +) |
27 | 31 | _events.set_event_logger_provider(EventLoggerProvider())
|
28 | 32 |
|
29 | 33 | # instrument OpenAI
|
30 | 34 | OpenAIInstrumentor().instrument()
|
31 | 35 |
|
32 |
| -def main(): |
33 | 36 |
|
| 37 | +def main(): |
34 | 38 | client = OpenAI()
|
35 | 39 | chat_completion = client.chat.completions.create(
|
36 | 40 | model=os.getenv("CHAT_MODEL", "gpt-4o-mini"),
|
|
0 commit comments