Open jhachirag7 opened 6 days ago
@jhachirag7 can you please provide a smaller reproducible code example that demonstrates the issue?
actually i got what the issue is node = functools.partial(self.agent_node, agent=agent.create_workflow(), name=name) agent.create_workflow() is compiledGraph object
if i create node = agent.create_workflow() than internal graph state messages are stored to persistance db but if we convert node to functions than internal graph state values is none workflow.add_node(name, node)
for smaller use case code you can use this below code
from langgraph.graph import StateGraph, END, START, MessagesState
from langchain_core.tools import tool
from langchain_openai import ChatOpenAI
from langchain_aws import ChatBedrock
import os
@tool
def get_weather(city: str):
"""Get the weather for a specific city"""
return f"It's sunny in {city}!"
raw_model = ChatBedrock(
model_id="anthropic.claude-3-sonnet-20240229-v1:0",
streaming= True,
beta_use_converse_api=False,
)
model = raw_model.with_structured_output(get_weather)
class SubGraphState(MessagesState):
city: str
def model_node(state: SubGraphState):
result = model.invoke(state["messages"])
return {"city": result["city"]}
def weather_node(state: SubGraphState):
result = get_weather.invoke({"city": state["city"]})
return {"messages": [{"role": "assistant", "content": result}]}
subgraph = StateGraph(SubGraphState)
subgraph.add_node(model_node)
subgraph.add_node(weather_node)
subgraph.add_edge(START, "model_node")
subgraph.add_edge("model_node", "weather_node")
subgraph.add_edge("weather_node", END)
subgraph = subgraph.compile(interrupt_before=["weather_node"])
from typing import Literal
from typing_extensions import TypedDict
class RouterState(MessagesState):
route: Literal["weather", "other"]
class Router(TypedDict):
route: Literal["weather", "other"]
router_model = raw_model.with_structured_output(Router)
def router_node(state: RouterState):
system_message = "Classify the incoming query as either about weather or not."
messages = [{"role": "system", "content": system_message}] + state["messages"]
route = router_model.invoke(messages)
return {"route": route["route"]}
def normal_llm_node(state: RouterState):
response = raw_model.invoke(state["messages"])
return {"messages": [response]}
def route_after_prediction(
state: RouterState,
) -> Literal["weather_graph", "normal_llm_node"]:
if state["route"] == "weather":
return "weather_graph"
else:
return "normal_llm_node"
graph = StateGraph(RouterState)
graph.add_node(router_node)
graph.add_node(normal_llm_node)
graph.add_node("weather_graph", subgraph)
graph.add_edge(START, "router_node")
graph.add_conditional_edges("router_node", route_after_prediction)
graph.add_edge("normal_llm_node", END)
graph.add_edge("weather_graph", END)
from psycopg_pool import ConnectionPool
from langgraph.checkpoint.postgres import PostgresSaver
import os
DB_URI = os.getenv('DB_URI')
connection_kwargs = {
"autocommit": True,
"prepare_threshold": 0,
}
config = {"configurable": {"thread_id": "3"}}
state = None
with ConnectionPool(
conninfo=DB_URI,
kwargs=connection_kwargs,
) as pool:
checkpointer = PostgresSaver(pool)
graph = graph.compile(checkpointer=checkpointer)
inputs = {"messages": [{"role": "user", "content": "what's the weather in sf"}]}
for update in graph.stream(inputs, config=config, stream_mode="values", subgraphs=True):
# print(update)
pass
state = graph.get_state(config, subgraphs=True)
print(state.tasks[0])
Checked other resources
Example Code
Multi agent Class:
but once the i print tasks for subgraph, state value is none:
Description
The state value should be inserted in persistance db.
System Info
requests langchain langchain-openai langchain-aws langgraph langchain-community firebase_admin supabase rollbar tavily-python google-search-results pandas apify-client mailchimp-marketing hubspot-api-client wikipedia langchain-google-community langchain-anthropic psycopg psycopg-binary psycopg-pool langgraph-checkpoint-postgres