Skip to content

Commit 93e1c71

Browse files
committed
format fixes
1 parent ea0df4e commit 93e1c71

File tree

3 files changed

+27
-21
lines changed

3 files changed

+27
-21
lines changed

instrumentation/opentelemetry-instrumentation-botocore/src/opentelemetry/instrumentation/botocore/extensions/bedrock.py

+24-18
Original file line numberDiff line numberDiff line change
@@ -334,7 +334,7 @@ def _extract_llama_attributes(self, attributes, request_body):
334334
attributes, GEN_AI_REQUEST_TOP_P, request_body.get("top_p")
335335
)
336336
# request for meta llama models does not contain stop_sequences field
337-
337+
338338
def _extract_mistral_attributes(self, attributes, request_body):
339339
prompt = request_body.get("prompt")
340340
if prompt:
@@ -382,24 +382,30 @@ def _get_request_messages(self):
382382
if not messages:
383383
model_id = self._call_context.params.get(_MODEL_ID_KEY)
384384
if "amazon.titan" in model_id:
385-
if input_text := decoded_body.get("inputText"):
386-
messages = [
387-
{"role": "user", "content": [{"text": input_text}]}
388-
]
385+
messages = self._get_messages_from_input_text(
386+
decoded_body, "inputText"
387+
)
389388
elif "cohere.command-r" in model_id:
390389
# chat_history can be converted to messages; for now, just use message
391-
if input_text := decoded_body.get("message"):
392-
messages = [
393-
{"role": "user", "content": [{"text": input_text}]}
394-
]
390+
messages = self._get_messages_from_input_text(
391+
decoded_body, "message"
392+
)
395393
elif "cohere.command" in model_id or "meta.llama" in model_id or "mistral.mistral" in model_id:
396-
if input_text := decoded_body.get("prompt"):
397-
messages = [
398-
{"role": "user", "content": [{"text": input_text}]}
399-
]
394+
messages = self._get_messages_from_input_text(
395+
decoded_body, "prompt"
396+
)
400397

401398
return system_messages + messages
402399

400+
def _get_messages_from_input_text(
401+
self, decoded_body: dict[str, Any], input_name: str
402+
):
403+
if input_text := decoded_body.get(input_name):
404+
return [
405+
{"role": "user", "content": [{"text": input_text}]}
406+
]
407+
return []
408+
403409
def before_service_call(
404410
self, span: Span, instrumentor_context: _BotocoreInstrumentorContext
405411
):
@@ -827,7 +833,7 @@ def _handle_anthropic_claude_response(
827833
token_usage_histogram.record(
828834
output_tokens, output_attributes
829835
)
830-
836+
831837
def _handle_cohere_command_r_response(
832838
self,
833839
span: Span,
@@ -843,13 +849,13 @@ def _handle_cohere_command_r_response(
843849
span.set_attribute(
844850
GEN_AI_RESPONSE_FINISH_REASONS, [response_body["finish_reason"]]
845851
)
846-
852+
847853
event_logger = instrumentor_context.event_logger
848854
choice = _Choice.from_invoke_cohere_command_r(
849855
response_body, capture_content
850856
)
851857
event_logger.emit(choice.to_choice_event())
852-
858+
853859
def _handle_cohere_command_response(
854860
self,
855861
span: Span,
@@ -867,7 +873,7 @@ def _handle_cohere_command_response(
867873
span.set_attribute(
868874
GEN_AI_RESPONSE_FINISH_REASONS, [generations["finish_reason"]]
869875
)
870-
876+
871877
event_logger = instrumentor_context.event_logger
872878
choice = _Choice.from_invoke_cohere_command(
873879
response_body, capture_content
@@ -913,7 +919,7 @@ def _handle_mistral_ai_response(
913919
span.set_attribute(GEN_AI_USAGE_OUTPUT_TOKENS, estimate_token_count(outputs["text"]))
914920
if "stop_reason" in outputs:
915921
span.set_attribute(GEN_AI_RESPONSE_FINISH_REASONS, [outputs["stop_reason"]])
916-
922+
917923
event_logger = instrumentor_context.event_logger
918924
choice = _Choice.from_invoke_mistral_mistral(
919925
response_body, capture_content

instrumentation/opentelemetry-instrumentation-botocore/src/opentelemetry/instrumentation/botocore/extensions/bedrock_utils.py

+2-2
Original file line numberDiff line numberDiff line change
@@ -536,7 +536,7 @@ def from_invoke_cohere_command_r(
536536
else:
537537
message = {}
538538
return cls(message, response["finish_reason"], index=0)
539-
539+
540540
@classmethod
541541
def from_invoke_cohere_command(
542542
cls, response: dict[str, Any], capture_content: bool
@@ -557,7 +557,7 @@ def from_invoke_meta_llama(
557557
else:
558558
message = {}
559559
return cls(message, response["stop_reason"], index=0)
560-
560+
561561
@classmethod
562562
def from_invoke_mistral_mistral(
563563
cls, response: dict[str, Any], capture_content: bool

instrumentation/opentelemetry-instrumentation-botocore/tests/bedrock_utils.py

+1-1
Original file line numberDiff line numberDiff line change
@@ -41,7 +41,7 @@
4141
)
4242

4343

44-
# pylint: disable=too-many-branches, too-many-locals
44+
# pylint: disable=too-many-branches, too-many-locals, too-many-statements
4545
def assert_completion_attributes_from_streaming_body(
4646
span: ReadableSpan,
4747
request_model: str,

0 commit comments

Comments
 (0)