Skip to content

Commit 56d2679

Browse files
committed
Add "exclude prompts" flag as optional
1 parent 4b33a4e commit 56d2679

File tree

1 file changed

+30
-4
lines changed

1 file changed

+30
-4
lines changed

sentry_sdk/integrations/openai.py

+30-4
Original file line numberDiff line numberDiff line change
@@ -50,6 +50,10 @@ def count_tokens(s):
5050
class OpenAIIntegration(Integration):
5151
identifier = "openai"
5252

53+
def __init__(self, exclude_prompts=False):
54+
# type: (OpenAIIntegration, bool) -> None
55+
self.exclude_prompts = exclude_prompts
56+
5357
@staticmethod
5458
def setup_once():
5559
# type: () -> None
@@ -122,6 +126,14 @@ def _wrap_chat_completion_create(f):
122126
@wraps(f)
123127
def new_chat_completion(*args, **kwargs):
124128
# type: (*Any, **Any) -> Any
129+
hub = Hub.current
130+
if not hub:
131+
return f(*args, **kwargs)
132+
133+
integration = hub.get_integration(OpenAIIntegration) # type: OpenAIIntegration
134+
if not integration:
135+
return f(*args, **kwargs)
136+
125137
if "messages" not in kwargs:
126138
# invalid call (in all versions of openai), let it return error
127139
return f(*args, **kwargs)
@@ -149,13 +161,13 @@ def new_chat_completion(*args, **kwargs):
149161
raise e from None
150162

151163
with capture_internal_exceptions():
152-
if _should_send_default_pii():
164+
if _should_send_default_pii() or not integration.exclude_prompts:
153165
span.set_data("ai.input_messages", messages)
154166
span.set_data("ai.model_id", model)
155167
span.set_data("ai.streaming", streaming)
156168

157169
if hasattr(res, "choices"):
158-
if _should_send_default_pii():
170+
if _should_send_default_pii() or not integration.exclude_prompts:
159171
span.set_data(
160172
"ai.responses", list(map(lambda x: x.message, res.choices))
161173
)
@@ -186,7 +198,10 @@ def new_iterator():
186198
all_responses = list(
187199
map(lambda chunk: "".join(chunk), data_buf)
188200
)
189-
if _should_send_default_pii():
201+
if (
202+
_should_send_default_pii()
203+
or not integration.exclude_prompts
204+
):
190205
span.set_data("ai.responses", all_responses)
191206
_calculate_chat_completion_usage(
192207
messages, res, span, all_responses
@@ -208,11 +223,22 @@ def _wrap_embeddings_create(f):
208223
@wraps(f)
209224
def new_embeddings_create(*args, **kwargs):
210225
# type: (*Any, **Any) -> Any
226+
227+
hub = Hub.current
228+
if not hub:
229+
return f(*args, **kwargs)
230+
231+
integration = hub.get_integration(OpenAIIntegration) # type: OpenAIIntegration
232+
if not integration:
233+
return f(*args, **kwargs)
234+
211235
with sentry_sdk.start_span(
212236
op=consts.OP.OPENAI_EMBEDDINGS_CREATE,
213237
description="OpenAI Embedding Creation",
214238
) as span:
215-
if "input" in kwargs:
239+
if "input" in kwargs and (
240+
_should_send_default_pii() or not integration.exclude_prompts
241+
):
216242
if isinstance(kwargs["input"], str):
217243
span.set_data("ai.input_messages", [kwargs["input"]])
218244
elif (

0 commit comments

Comments
 (0)