Skip to content

Commit 659d68b

Browse files
committed
Fix linting errors
1 parent 3e7d78f commit 659d68b

File tree

2 files changed

+15
-11
lines changed

2 files changed

+15
-11
lines changed

sentry_sdk/integrations/openai.py

+14-10
Original file line numberDiff line numberDiff line change
@@ -3,24 +3,26 @@
33
from sentry_sdk._types import TYPE_CHECKING
44

55
if TYPE_CHECKING:
6-
from typing import Iterator, Any, TypeVar, Callable
7-
8-
F = TypeVar("F", bound=Callable[..., Any])
6+
from typing import Iterator, Any, Iterable, List, Optional, Callable
7+
from sentry_sdk.tracing import Span
98

109
from sentry_sdk._functools import wraps
1110
from sentry_sdk.hub import Hub
1211
from sentry_sdk.integrations import DidNotEnable, Integration
1312
from sentry_sdk.utils import logger, capture_internal_exceptions
1413

1514
try:
16-
from openai.types.chat import ChatCompletionChunk
17-
from openai.resources.chat.completions import Completions
18-
from openai.resources import Embeddings
15+
from openai.types.chat import ChatCompletionChunk # type: ignore
16+
from openai.resources.chat.completions import Completions # type: ignore
17+
from openai.resources import Embeddings # type: ignore
18+
19+
if TYPE_CHECKING:
20+
from openai.types.chat import ChatCompletionMessageParam
1921
except ImportError:
2022
raise DidNotEnable("OpenAI not installed")
2123

2224
try:
23-
import tiktoken
25+
import tiktoken # type: ignore
2426

2527
enc = tiktoken.get_encoding("cl100k_base")
2628

@@ -51,14 +53,15 @@ class OpenAIIntegration(Integration):
5153

5254
@staticmethod
5355
def setup_once():
54-
# TODO minimum version
56+
# type: () -> None
5557
Completions.create = _wrap_chat_completion_create(Completions.create)
5658
Embeddings.create = _wrap_enbeddings_create(Embeddings.create)
5759

5860

5961
def _calculate_chat_completion_usage(
6062
messages, response, span, streaming_message_responses=None
6163
):
64+
# type: (Iterable[ChatCompletionMessageParam], Any, Span, Optional[List[str]]) -> None
6265
completion_tokens = 0
6366
prompt_tokens = 0
6467
total_tokens = 0
@@ -104,7 +107,7 @@ def _calculate_chat_completion_usage(
104107

105108

106109
def _wrap_chat_completion_create(f):
107-
# type: (F) -> F
110+
# type: (Callable[..., Any]) -> Callable[..., Any]
108111
@wraps(f)
109112
def new_chat_completion(*args, **kwargs):
110113
# type: (*Any, **Any) -> Any
@@ -180,10 +183,11 @@ def new_iterator() -> Iterator[ChatCompletionChunk]:
180183

181184

182185
def _wrap_enbeddings_create(f):
183-
# type: (F) -> F
186+
# type: (Callable[..., Any]) -> Callable[..., Any]
184187

185188
@wraps(f)
186189
def new_embeddings_create(*args, **kwargs):
190+
# type: (*Any, **Any) -> Any
187191
hub = Hub.current
188192
integration = hub.get_integration(OpenAIIntegration)
189193
if integration is None:

tox.ini

+1-1
Original file line numberDiff line numberDiff line change
@@ -443,8 +443,8 @@ deps =
443443
loguru-latest: loguru
444444

445445
# OpenAI
446+
openai: tiktoken
446447
openai-v1: openai~=1.0.0
447-
openai-v1: tiktoken
448448
openai-latest: openai
449449

450450
# OpenTelemetry (OTel)

0 commit comments

Comments
 (0)