diff --git a/ddtrace/llmobs/_integrations/bedrock.py b/ddtrace/llmobs/_integrations/bedrock.py index 98441687288..70dd56ec649 100644 --- a/ddtrace/llmobs/_integrations/bedrock.py +++ b/ddtrace/llmobs/_integrations/bedrock.py @@ -186,18 +186,18 @@ def _extract_output_message_for_converse_stream( if "messageStart" in chunk: message_data = chunk["messageStart"] - current_message = {"role": message_data.get("role", "assistant"), "context_block_indices": []} + current_message = {"role": message_data.get("role", "assistant"), "content_block_indicies": []} # always make sure we have a current message if current_message is None: - current_message = {"role": "assistant", "context_block_indices": []} + current_message = {"role": "assistant", "content_block_indicies": []} if "contentBlockStart" in chunk: block_start = chunk["contentBlockStart"] index = block_start.get("contentBlockIndex") if index is not None: - current_message["context_block_indices"].append(index) + current_message["content_block_indicies"].append(index) if "start" in block_start and "toolUse" in block_start["start"]: tool_content_blocks[index] = block_start["start"]["toolUse"] @@ -206,8 +206,8 @@ def _extract_output_message_for_converse_stream( index = content_block_delta.get("contentBlockIndex") if index is not None and "delta" in content_block_delta: - if index not in current_message.get("context_block_indices", []): - current_message["context_block_indices"].append(index) + if index not in current_message.get("content_block_indicies", []): + current_message["content_block_indicies"].append(index) delta_content = content_block_delta["delta"] text_content_blocks[index] = text_content_blocks.get(index, "") + delta_content.get("text", "") @@ -225,7 +225,7 @@ def _extract_output_message_for_converse_stream( current_message = None # Handle the case where we didn't receive an explicit message stop event - if current_message is not None: + if current_message is not None and current_message.get("content_block_indicies"): messages.append( get_final_message_converse_stream_message(current_message, text_content_blocks, tool_content_blocks) ) diff --git a/ddtrace/llmobs/_integrations/utils.py b/ddtrace/llmobs/_integrations/utils.py index ef7cc62ff7e..cc286fa28c5 100644 --- a/ddtrace/llmobs/_integrations/utils.py +++ b/ddtrace/llmobs/_integrations/utils.py @@ -908,7 +908,7 @@ def get_final_message_converse_stream_message( Returns: Dict containing the processed message with content and optional tool calls """ - indices = sorted(message.get("context_block_indices", [])) + indices = sorted(message.get("content_block_indicies", [])) message_output = {"role": message["role"]} text_contents = [text_blocks[idx] for idx in indices if idx in text_blocks] @@ -920,7 +920,7 @@ def get_final_message_converse_stream_message( if not tool_block: continue tool_call = { - "name": tool_block.get("toolName", ""), + "name": tool_block.get("name", ""), "tool_id": tool_block.get("toolUseId", ""), } tool_input = tool_block.get("input") diff --git a/releasenotes/notes/converse-parsing-cd9ebb9506f8bbe1.yaml b/releasenotes/notes/converse-parsing-cd9ebb9506f8bbe1.yaml new file mode 100644 index 00000000000..060659bd28e --- /dev/null +++ b/releasenotes/notes/converse-parsing-cd9ebb9506f8bbe1.yaml @@ -0,0 +1,6 @@ +--- +fixes: + - | + LLM Observability: This fix resolves an issue where tool call names were not being captured for bedrock `converse_stream` calls. + - | + LLM Observability: This fix resolves an issue where bedrock `converse_stream` calls contained an extra empty output message. diff --git a/tests/contrib/botocore/test_bedrock_llmobs.py b/tests/contrib/botocore/test_bedrock_llmobs.py index 0fa4c68fbc8..a86ff5ded16 100644 --- a/tests/contrib/botocore/test_bedrock_llmobs.py +++ b/tests/contrib/botocore/test_bedrock_llmobs.py @@ -735,12 +735,12 @@ def test_llmobs_converse(cls, bedrock_client, request_vcr, mock_tracer, llmobs_e span = mock_tracer.pop_traces()[0][0] assert len(llmobs_events) == 1 - llmobs_events[0] == _expected_llmobs_llm_span_event( + assert llmobs_events[0] == _expected_llmobs_llm_span_event( span, model_name="claude-3-sonnet-20240229-v1:0", model_provider="anthropic", input_messages=[ - {"role": "system", "content": request_params.get("system")}, + {"role": "system", "content": request_params.get("system")[0]["text"]}, {"role": "user", "content": request_params.get("messages")[0].get("content")[0].get("text")}, ], output_messages=[ @@ -813,12 +813,12 @@ def test_llmobs_converse_stream(cls, bedrock_client, request_vcr, mock_tracer, l span = mock_tracer.pop_traces()[0][0] assert len(llmobs_events) == 1 - llmobs_events[0] == _expected_llmobs_llm_span_event( + assert llmobs_events[0] == _expected_llmobs_llm_span_event( span, model_name="claude-3-sonnet-20240229-v1:0", model_provider="anthropic", input_messages=[ - {"role": "system", "content": request_params.get("system")}, + {"role": "system", "content": request_params.get("system")[0]["text"]}, {"role": "user", "content": request_params.get("messages")[0].get("content")[0].get("text")}, ], output_messages=[ @@ -835,7 +835,6 @@ def test_llmobs_converse_stream(cls, bedrock_client, request_vcr, mock_tracer, l } ], metadata={ - "stop_reason": "tool_use", "temperature": request_params.get("inferenceConfig", {}).get("temperature"), "max_tokens": request_params.get("inferenceConfig", {}).get("maxTokens"), },