langchain-ai / langgraphjs

⚡ Build language agents as graphs ⚡
https://langchain-ai.github.io/langgraphjs/
MIT License
584 stars 85 forks source link

I got TypeError: value.initialize is not a function by simply following the example #622

Closed Patrick-Shih closed 2 hours ago

Patrick-Shih commented 3 hours ago

I simply copy the entire example within a fresh start project, run it with node V18.18.2, and get the error.

I think the only difference are:

  1. I do not use typeScript so I removed all type definitions
  2. I use common-js style

I wonder is using typeScript a must to use langgraph? or it is something eles?

Following is the package.json

{
  "dependencies": {
    "@langchain/anthropic": "^0.3.5",
    "@langchain/core": "^0.3.13",
    "@langchain/langgraph": "^0.2.17"
  }
}

Following is the error

> node langGraphPoc.js
TypeError: value.initialize is not a function
    at /workspace/langchain/node_modules/@langchain/langgraph/dist/pregel/index.cjs:667:48
    at async CompiledStateGraph.prepareSpecs (/workspace/langchain/node_modules/@langchain/langgraph/dist/pregel/index.cjs:656:59)
    at async CompiledStateGraph._streamIterator (/workspace/langchain/node_modules/@langchain/langgraph/dist/pregel/index.cjs:696:43)

Following is the code langGraphPoc.js

const { HumanMessage } = require("@langchain/core/messages");
const { tool } = require("@langchain/core/tools");
const { z } = require("zod");
const { ChatAnthropic } = require("@langchain/anthropic");
const { StateGraph } = require("@langchain/langgraph");
const { MemorySaver, Annotation } = require("@langchain/langgraph");
const { ToolNode } = require("@langchain/langgraph/prebuilt");

// Define the graph state
// See here for more info: https://langchain-ai.github.io/langgraphjs/how-tos/define-state/
const StateAnnotation = Annotation.Root({
  messages: {
    reducer: (x, y) => x.concat(y),
  },
});

// Define the tools for the agent to use
const weatherTool = tool(
  async ({ query }) => {
    // This is a placeholder for the actual implementation
    if (
      query.toLowerCase().includes("sf") ||
      query.toLowerCase().includes("san francisco")
    ) {
      return "It's 60 degrees and foggy.";
    }
    return "It's 90 degrees and sunny.";
  },
  {
    name: "weather",
    description: "Call to get the current weather for a location.",
    schema: z.object({
      query: z.string().describe("The query to use in your search."),
    }),
  }
);

const tools = [weatherTool];
const toolNode = new ToolNode(tools);

const model = new ChatAnthropic({
  model: "claude-3-5-sonnet-20240620",
  temperature: 0,
}).bindTools(tools);

// Define the function that determines whether to continue or not
// We can extract the state typing via `StateAnnotation.State`
function shouldContinue(state) {
  const messages = state.messages;
  const lastMessage = messages[messages.length - 1];

  // If the LLM makes a tool call, then we route to the "tools" node
  if (lastMessage.tool_calls?.length) {
    return "tools";
  }
  // Otherwise, we stop (reply to the user)
  return "__end__";
}

// Define the function that calls the model
async function callModel(state) {
  const messages = state.messages;
  const response = await model.invoke(messages);

  // We return a list, because this will get added to the existing list
  return { messages: [response] };
}

// Define a new graph
const workflow = new StateGraph(StateAnnotation)
  .addNode("agent", callModel)
  .addNode("tools", toolNode)
  .addEdge("__start__", "agent")
  .addConditionalEdges("agent", shouldContinue)
  .addEdge("tools", "agent");

// Initialize memory to persist state between graph runs
const checkpointer = new MemorySaver();

// Finally, we compile it!
// This compiles it into a LangChain Runnable.
// Note that we're (optionally) passing the memory when compiling the graph
const app = workflow.compile({ checkpointer });

async function main() {
  const finalState = await app.invoke(
    { messages: [new HumanMessage("what is the weather in sf")] },
    { configurable: { thread_id: "42" } }
  );
  console.log(finalState.messages[finalState.messages.length - 1].content);
}

main().catch(console.error);
jacoblee93 commented 2 hours ago

Hey @Patrick-Shih,

You need to wrap properties in your annotation like this:

// Define the graph state
// See here for more info: https://langchain-ai.github.io/langgraphjs/how-tos/define-state/
const StateAnnotation = Annotation.Root({
  // Annotation here too!
  messages: Annotation({
    reducer: (x, y) => x.concat(y),
  }),
});

TypeScript is not required (it never is) but we generally recommend it since you avoid pitfalls like this!