44"""
55
66from typing import Any , List
7- from typing_extensions import Literal
8- from langchain_openai import ChatOpenAI
9- from langchain_core .messages import SystemMessage , BaseMessage
10- from langchain_core .runnables import RunnableConfig
7+
118from langchain .tools import tool
12- from langgraph .graph import StateGraph , END
13- from langgraph .types import Command
14- from langgraph .graph import MessagesState
9+ from langchain_core .messages import BaseMessage , SystemMessage
10+ from langchain_core .runnables import RunnableConfig
11+ from langchain_openai import ChatOpenAI
12+ from langgraph .graph import END , MessagesState , StateGraph
1513from langgraph .prebuilt import ToolNode
14+ from langgraph .types import Command
15+
1616
1717class AgentState (MessagesState ):
1818 """
@@ -22,17 +22,20 @@ class AgentState(MessagesState):
2222 the CopilotKitState fields. We're also adding a custom field, `language`,
2323 which will be used to set the language of the agent.
2424 """
25- proverbs : List [str ] = []
25+
26+ proverbs : List [str ]
2627 tools : List [Any ]
2728 # your_custom_agent_state: str = ""
2829
30+
2931@tool
3032def get_weather (location : str ):
3133 """
3234 Get the weather for a given location.
3335 """
3436 return f"The weather for { location } is 70 degrees."
3537
38+
3639# @tool
3740# def your_tool_here(your_arg: str):
3841# """Your tool description here."""
@@ -48,7 +51,7 @@ def get_weather(location: str):
4851backend_tool_names = [tool .name for tool in backend_tools ]
4952
5053
51- async def chat_node (state : AgentState , config : RunnableConfig ) -> Command [Literal [ "tool_node" , "__end__" ] ]:
54+ async def chat_node (state : AgentState , config : RunnableConfig ) -> Command [str ]:
5255 """
5356 Standard chat node based on the ReAct design pattern. It handles:
5457 - The model to use (and binds in CopilotKit actions and the tools defined above)
@@ -61,16 +64,15 @@ async def chat_node(state: AgentState, config: RunnableConfig) -> Command[Litera
6164 """
6265
6366 # 1. Define the model
64- model = ChatOpenAI (model = "gpt-4o " )
67+ model = ChatOpenAI (model = "gpt-5-mini " )
6568
6669 # 2. Bind the tools to the model
6770 model_with_tools = model .bind_tools (
6871 [
69- * state .get ("tools" , []), # bind tools defined by ag-ui
72+ * state .get ("tools" , []), # bind tools defined by ag-ui
7073 * backend_tools ,
7174 # your_tool_here
7275 ],
73-
7476 # 2.1 Disable parallel tool calls to avoid race conditions,
7577 # enable this for faster performance if you want to manage
7678 # the complexity of running tool calls in parallel.
@@ -83,10 +85,13 @@ async def chat_node(state: AgentState, config: RunnableConfig) -> Command[Litera
8385 )
8486
8587 # 4. Run the model to generate a response
86- response = await model_with_tools .ainvoke ([
87- system_message ,
88- * state ["messages" ],
89- ], config )
88+ response = await model_with_tools .ainvoke (
89+ [
90+ system_message ,
91+ * state ["messages" ],
92+ ],
93+ config ,
94+ )
9095
9196 # only route to tool node if tool is not in the tools list
9297 if route_to_tool_node (response ):
@@ -95,17 +100,18 @@ async def chat_node(state: AgentState, config: RunnableConfig) -> Command[Litera
95100 goto = "tool_node" ,
96101 update = {
97102 "messages" : [response ],
98- }
103+ },
99104 )
100105
101106 # 5. We've handled all tool calls, so we can end the graph.
102107 return Command (
103108 goto = END ,
104109 update = {
105110 "messages" : [response ],
106- }
111+ },
107112 )
108113
114+
109115def route_to_tool_node (response : BaseMessage ):
110116 """
111117 Route to tool node if any tool call in the response matches a backend tool name.
@@ -119,6 +125,7 @@ def route_to_tool_node(response: BaseMessage):
119125 return True
120126 return False
121127
128+
122129# Define the workflow graph
123130workflow = StateGraph (AgentState )
124131workflow .add_node ("chat_node" , chat_node )
0 commit comments