import inspect
import json
from langchain_core.messages import (
AIMessage,
BaseMessage,
HumanMessage,
ToolMessage,
)
from langchain_core.messages import (
ChatMessage as LangchainChatMessage,
)
from pydantic import BaseModel, HttpUrl, TypeAdapter
from langgraph_agent_toolkit.schema import ChatMessage
[docs]
def check_str_is_http(x: str) -> str:
http_url_adapter = TypeAdapter(HttpUrl)
return str(http_url_adapter.validate_python(x))
[docs]
def convert_message_content_to_string(content: str | list[str | dict]) -> str:
if isinstance(content, str):
return content
text: list[str] = []
for content_item in content:
if isinstance(content_item, str):
text.append(content_item)
continue
if content_item["type"] == "text":
text.append(content_item["text"])
return "".join(text)
[docs]
def langchain_to_chat_message(message: BaseMessage | dict | BaseModel) -> ChatMessage:
"""Create a ChatMessage from a LangChain message."""
if not isinstance(message, (BaseMessage, AIMessage, HumanMessage, ToolMessage, LangchainChatMessage)):
if isinstance(message, BaseModel):
message = message.model_dump()
elif isinstance(message, dict):
message = message["raw"].content
match message:
case HumanMessage():
human_message = ChatMessage(
type="human",
content=convert_message_content_to_string(message.content),
)
return human_message
case AIMessage():
ai_message = ChatMessage(
type="ai",
content=convert_message_content_to_string(message.content),
)
if message.tool_calls:
ai_message.tool_calls = message.tool_calls
if message.response_metadata:
ai_message.response_metadata = message.response_metadata
return ai_message
case ToolMessage():
tool_message = ChatMessage(
type="tool",
content=convert_message_content_to_string(message.content),
tool_call_id=message.tool_call_id,
)
return tool_message
case LangchainChatMessage():
if message.role == "custom":
custom_message = ChatMessage(
type="custom",
content="",
custom_data=message.content[0],
)
return custom_message
else:
raise ValueError(f"Unsupported chat message role: {message.role}")
case str() | dict():
return ChatMessage(
type="ai",
content=message,
)
case _:
raise ValueError(f"Unsupported message type: {message.__class__.__name__}")
[docs]
def create_ai_message(parts: dict) -> AIMessage:
sig = inspect.signature(AIMessage)
valid_keys = set(sig.parameters)
filtered = {k: v for k, v in parts.items() if k in valid_keys}
return AIMessage(**filtered)