Skip to content
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
Original file line number Diff line number Diff line change
Expand Up @@ -565,29 +565,142 @@ def _output_messages(
yield LLM_OUTPUT_MESSAGES, parsed_messages


@stop_on_exception
def _extract_message_role(message_data: Optional[Mapping[str, Any]]) -> Iterator[Tuple[str, Any]]:
if not message_data:
return
assert hasattr(message_data, "get"), f"expected Mapping, found {type(message_data)}"
id_ = message_data.get("id")
assert isinstance(id_, List), f"expected list, found {type(id_)}"
message_class_name = id_[-1]
def _infer_role_from_context(message_data: Mapping[str, Any]) -> Optional[str]:
"""
Infer message role from context when the id field is unavailable.

This is a fallback strategy used when LangGraph streaming produces messages
without the standard id field (e.g., when message["id"] is None).

Args:
message_data: The message data mapping

Returns:
The inferred role string, or None if role cannot be determined
"""
# Check for tool_call_id in kwargs - indicates a tool message
if kwargs := message_data.get("kwargs"):
if isinstance(kwargs, Mapping):
if kwargs.get("tool_call_id"):
return "tool"

# Check for tool_calls - indicates an assistant message
if kwargs.get("tool_calls"):
return "assistant"

# Check for explicit role in kwargs (e.g., ChatMessage)
if role := kwargs.get("role"):
if isinstance(role, str):
return role

# Check for tool_calls at the top level (LangGraph style)
if message_data.get("tool_calls"):
return "assistant"

# Unable to infer role from context
return None


def _map_class_name_to_role(message_class_name: str, message_data: Mapping[str, Any]) -> str:
"""
Map a LangChain message class name to its corresponding role.

Args:
message_class_name: The class name from the message id
message_data: The full message data (needed for ChatMessage role lookup)

Returns:
The role string

Raises:
ValueError: If the message class name is not recognized
"""
if message_class_name.startswith("HumanMessage"):
role = "user"
return "user"
elif message_class_name.startswith("AIMessage"):
role = "assistant"
return "assistant"
elif message_class_name.startswith("SystemMessage"):
role = "system"
return "system"
elif message_class_name.startswith("FunctionMessage"):
role = "function"
return "function"
elif message_class_name.startswith("ToolMessage"):
role = "tool"
return "tool"
elif message_class_name.startswith("ChatMessage"):
role = message_data["kwargs"]["role"]
role: str = cast(str, message_data["kwargs"]["role"])
return role
else:
raise ValueError(f"Cannot parse message of type: {message_class_name}")
yield MESSAGE_ROLE, role


@stop_on_exception
def _extract_message_role(message_data: Optional[Mapping[str, Any]]) -> Iterator[Tuple[str, Any]]:
"""
Extract the message role from message data with multiple fallback strategies.

This function handles cases where the standard id field may be missing or None,
which can occur in LangGraph streaming scenarios.

Fallback strategies:
1. Try extracting from id field (standard LangChain serialization)
2. Try extracting from type field
3. Try inferring from message context (tool_calls, tool_call_id, etc.)
4. If all fail, log warning and skip role (span continues without role attribute)
"""
if not message_data:
return
assert hasattr(message_data, "get"), f"expected Mapping, found {type(message_data)}"

role = None

# Strategy 1: Try the standard id field approach
id_ = message_data.get("id")
if id_ is not None and isinstance(id_, List) and len(id_) > 0:
try:
message_class_name = id_[-1]
role = _map_class_name_to_role(message_class_name, message_data)
logger.debug("Extracted message role from id field: %s", role)
except (IndexError, KeyError, ValueError) as e:
logger.debug("Failed to extract role from id field: %s", e)

# Strategy 2: Try the type field (alternative serialization format)
if role is None:
type_field = message_data.get("type")
if isinstance(type_field, str):
# Map common type values to roles
type_to_role = {
"human": "user",
"ai": "assistant",
"system": "system",
"function": "function",
"tool": "tool",
}
role = type_to_role.get(type_field.lower())
if role:
logger.debug("Extracted message role from type field: %s", role)

# Strategy 3: Try direct role field (for raw dict messages)
if role is None:
direct_role = message_data.get("role")
if isinstance(direct_role, str):
logger.debug("Extracted message role from direct role field: %s", direct_role)
role = direct_role

# Strategy 4: Try inferring from context
if role is None:
role = _infer_role_from_context(message_data)
if role:
logger.debug("Inferred message role from context: %s", role)

# If we found a role through any strategy, yield it
if role:
yield MESSAGE_ROLE, role
else:
# Log warning but don't fail - span will continue without role attribute
logger.warning(
"Unable to determine message role. Message data keys: %s. "
"Span will continue without role attribute.",
list(message_data.keys()),
)


@stop_on_exception
Expand Down
Original file line number Diff line number Diff line change
@@ -0,0 +1,90 @@
interactions:
- request:
body: '{"messages":[{"content":"You are a helpful assistant.","role":"system"},{"content":"What''s
the weather in London?","role":"user"}],"model":"gpt-4o-mini","n":1,"stream":true,"temperature":0.0,"tools":[{"type":"function","function":{"name":"get_weather","description":"Get
the weather for a location.","parameters":{"properties":{"location":{"type":"string"}},"required":["location"],"type":"object"}}}]}'
headers: {}
method: POST
uri: https://api.openai.com/v1/chat/completions
response:
body:
string: 'data: {"id":"chatcmpl-CdKU5xm8z6nhRMJs7Slt7m0OhOdgz","object":"chat.completion.chunk","created":1763489569,"model":"gpt-4o-mini-2024-07-18","service_tier":"default","system_fingerprint":"fp_560af6e559","choices":[{"index":0,"delta":{"role":"assistant","content":null,"tool_calls":[{"index":0,"id":"call_7p1vRWhZc9Pm0j6JVvrHK4ZE","type":"function","function":{"name":"get_weather","arguments":""}}],"refusal":null},"logprobs":null,"finish_reason":null}],"obfuscation":"f"}


data: {"id":"chatcmpl-CdKU5xm8z6nhRMJs7Slt7m0OhOdgz","object":"chat.completion.chunk","created":1763489569,"model":"gpt-4o-mini-2024-07-18","service_tier":"default","system_fingerprint":"fp_560af6e559","choices":[{"index":0,"delta":{"tool_calls":[{"index":0,"function":{"arguments":"{\""}}]},"logprobs":null,"finish_reason":null}],"obfuscation":"IlSJqtP4c7n"}


data: {"id":"chatcmpl-CdKU5xm8z6nhRMJs7Slt7m0OhOdgz","object":"chat.completion.chunk","created":1763489569,"model":"gpt-4o-mini-2024-07-18","service_tier":"default","system_fingerprint":"fp_560af6e559","choices":[{"index":0,"delta":{"tool_calls":[{"index":0,"function":{"arguments":"location"}}]},"logprobs":null,"finish_reason":null}],"obfuscation":"KIpgSp"}


data: {"id":"chatcmpl-CdKU5xm8z6nhRMJs7Slt7m0OhOdgz","object":"chat.completion.chunk","created":1763489569,"model":"gpt-4o-mini-2024-07-18","service_tier":"default","system_fingerprint":"fp_560af6e559","choices":[{"index":0,"delta":{"tool_calls":[{"index":0,"function":{"arguments":"\":\""}}]},"logprobs":null,"finish_reason":null}],"obfuscation":"vcSXjae37"}


data: {"id":"chatcmpl-CdKU5xm8z6nhRMJs7Slt7m0OhOdgz","object":"chat.completion.chunk","created":1763489569,"model":"gpt-4o-mini-2024-07-18","service_tier":"default","system_fingerprint":"fp_560af6e559","choices":[{"index":0,"delta":{"tool_calls":[{"index":0,"function":{"arguments":"London"}}]},"logprobs":null,"finish_reason":null}],"obfuscation":"rgXcAPCs"}


data: {"id":"chatcmpl-CdKU5xm8z6nhRMJs7Slt7m0OhOdgz","object":"chat.completion.chunk","created":1763489569,"model":"gpt-4o-mini-2024-07-18","service_tier":"default","system_fingerprint":"fp_560af6e559","choices":[{"index":0,"delta":{"tool_calls":[{"index":0,"function":{"arguments":"\"}"}}]},"logprobs":null,"finish_reason":null}],"obfuscation":"tBHkThTc2Ev"}


data: {"id":"chatcmpl-CdKU5xm8z6nhRMJs7Slt7m0OhOdgz","object":"chat.completion.chunk","created":1763489569,"model":"gpt-4o-mini-2024-07-18","service_tier":"default","system_fingerprint":"fp_560af6e559","choices":[{"index":0,"delta":{},"logprobs":null,"finish_reason":"tool_calls"}],"obfuscation":"TiZm2BODRQiA"}


data: [DONE]


'
headers: {}
status:
code: 200
message: OK
- request:
body: '{"messages":[{"content":"You are a helpful assistant.","role":"system"},{"content":"What''s
the weather in London?","role":"user"},{"content":null,"role":"assistant","tool_calls":[{"type":"function","id":"call_7p1vRWhZc9Pm0j6JVvrHK4ZE","function":{"name":"get_weather","arguments":"{\"location\":
\"London\"}"}}]},{"content":"The weather in London is sunny.","role":"tool","tool_call_id":"call_7p1vRWhZc9Pm0j6JVvrHK4ZE"}],"model":"gpt-4o-mini","n":1,"stream":true,"temperature":0.0,"tools":[{"type":"function","function":{"name":"get_weather","description":"Get
the weather for a location.","parameters":{"properties":{"location":{"type":"string"}},"required":["location"],"type":"object"}}}]}'
headers: {}
method: POST
uri: https://api.openai.com/v1/chat/completions
response:
body:
string: 'data: {"id":"chatcmpl-CdKU7jAO8mYCVjDyDpc0C7u8brkU7","object":"chat.completion.chunk","created":1763489571,"model":"gpt-4o-mini-2024-07-18","service_tier":"default","system_fingerprint":"fp_560af6e559","choices":[{"index":0,"delta":{"role":"assistant","content":"","refusal":null},"logprobs":null,"finish_reason":null}],"obfuscation":"4Msqp0"}


data: {"id":"chatcmpl-CdKU7jAO8mYCVjDyDpc0C7u8brkU7","object":"chat.completion.chunk","created":1763489571,"model":"gpt-4o-mini-2024-07-18","service_tier":"default","system_fingerprint":"fp_560af6e559","choices":[{"index":0,"delta":{"content":"The"},"logprobs":null,"finish_reason":null}],"obfuscation":"TvXs2"}


data: {"id":"chatcmpl-CdKU7jAO8mYCVjDyDpc0C7u8brkU7","object":"chat.completion.chunk","created":1763489571,"model":"gpt-4o-mini-2024-07-18","service_tier":"default","system_fingerprint":"fp_560af6e559","choices":[{"index":0,"delta":{"content":"
weather"},"logprobs":null,"finish_reason":null}],"obfuscation":""}


data: {"id":"chatcmpl-CdKU7jAO8mYCVjDyDpc0C7u8brkU7","object":"chat.completion.chunk","created":1763489571,"model":"gpt-4o-mini-2024-07-18","service_tier":"default","system_fingerprint":"fp_560af6e559","choices":[{"index":0,"delta":{"content":"
in"},"logprobs":null,"finish_reason":null}],"obfuscation":"8mNQy"}


data: {"id":"chatcmpl-CdKU7jAO8mYCVjDyDpc0C7u8brkU7","object":"chat.completion.chunk","created":1763489571,"model":"gpt-4o-mini-2024-07-18","service_tier":"default","system_fingerprint":"fp_560af6e559","choices":[{"index":0,"delta":{"content":"
London"},"logprobs":null,"finish_reason":null}],"obfuscation":"z"}


data: {"id":"chatcmpl-CdKU7jAO8mYCVjDyDpc0C7u8brkU7","object":"chat.completion.chunk","created":1763489571,"model":"gpt-4o-mini-2024-07-18","service_tier":"default","system_fingerprint":"fp_560af6e559","choices":[{"index":0,"delta":{"content":"
is"},"logprobs":null,"finish_reason":null}],"obfuscation":"LcZAV"}


data: {"id":"chatcmpl-CdKU7jAO8mYCVjDyDpc0C7u8brkU7","object":"chat.completion.chunk","created":1763489571,"model":"gpt-4o-mini-2024-07-18","service_tier":"default","system_fingerprint":"fp_560af6e559","choices":[{"index":0,"delta":{"content":"
sunny"},"logprobs":null,"finish_reason":null}],"obfuscation":"qe"}


data: {"id":"chatcmpl-CdKU7jAO8mYCVjDyDpc0C7u8brkU7","object":"chat.completion.chunk","created":1763489571,"model":"gpt-4o-mini-2024-07-18","service_tier":"default","system_fingerprint":"fp_560af6e559","choices":[{"index":0,"delta":{"content":"."},"logprobs":null,"finish_reason":null}],"obfuscation":"sv3vFIw"}


data: {"id":"chatcmpl-CdKU7jAO8mYCVjDyDpc0C7u8brkU7","object":"chat.completion.chunk","created":1763489571,"model":"gpt-4o-mini-2024-07-18","service_tier":"default","system_fingerprint":"fp_560af6e559","choices":[{"index":0,"delta":{},"logprobs":null,"finish_reason":"stop"}],"obfuscation":"I0"}


data: [DONE]


'
headers: {}
status:
code: 200
message: OK
version: 1
Loading
Loading