langchain-ai / langchain

🦜🔗 Build context-aware reasoning applications
https://python.langchain.com
MIT License
91.85k stars 14.59k forks source link

JsonOutputParser throws KeyError for missing variables #17929

Closed droggta closed 11 hours ago

droggta commented 6 months ago

Checked other resources

Example Code

This Code is just an example. Can't post the full code due to restrictions.

class LLMOutputJSON(OldBaseModel):
    msg: str = OldField(description="The message to be sent to the user")
    finished_validating: bool = OldField(description="Whether the agent has finished validating the users request")
    metadata: dict = OldField(description="Metadata")

def main():
    llm = AzureChatOpenAI()
    prompt="You are funny and tell the user Jokes"
    parser=JsonOutputParser(pydantic_object=LLMOutputJSON)
    tools = []
    memory = ConversationBufferMemory()
    agent = (
        {
            "input": lambda x: x["input"],
            "chat_history": lambda x: x["chat_history"],
            "agent_scratchpad": lambda x: format_log_to_str(
                x["intermediate_steps"]
            ),
        }
        | prompt
        | llm
        | parser
    )

agent = AgentExecutor.from_agent_and_tools(
            agent=agent,
            tools=tools,
            verbose=True,
            memory=memory,
            handle_parsing_errors=True,
        )

agent.ainvoke:("input": "Tell me a Joke.")

Error Message and Stack Trace (if applicable)

res = await agent.ainvoke( app-1 | ^^^^^^^^^^^^^^^^^^^^ app-1 | File "/usr/local/lib/python3.11/site-packages/langchain/chains/base.py", line 217, in ainvoke app-1 | raise e app-1 | File "/usr/local/lib/python3.11/site-packages/langchain/chains/base.py", line 208, in ainvoke app-1 | await self._acall(inputs, run_manager=run_manager) app-1 | File "/usr/local/lib/python3.11/site-packages/langchain/agents/agent.py", line 1440, in _acall app-1 | next_step_output = await self._atake_next_step( app-1 | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^ app-1 | File "/usr/local/lib/python3.11/site-packages/langchain/agents/agent.py", line 1234, in _atake_next_step app-1 | [ app-1 | File "/usr/local/lib/python3.11/site-packages/langchain/agents/agent.py", line 1234, in app-1 | [ app-1 | File "/usr/local/lib/python3.11/site-packages/langchain/agents/agent.py", line 1262, in _aiter_next_step app-1 | output = await self.agent.aplan( app-1 | ^^^^^^^^^^^^^^^^^^^^^^^ app-1 | File "/usr/local/lib/python3.11/site-packages/langchain/agents/agent.py", line 422, in aplan app-1 | async for chunk in self.runnable.astream( app-1 | File "/usr/local/lib/python3.11/site-packages/langchain_core/runnables/base.py", line 2452, in astream app-1 | async for chunk in self.atransform(input_aiter(), config, kwargs): app-1 | File "/usr/local/lib/python3.11/site-packages/langchain_core/runnables/base.py", line 2435, in atransform app-1 | async for chunk in self._atransform_stream_with_config( app-1 | File "/usr/local/lib/python3.11/site-packages/langchain_core/runnables/base.py", line 1592, in _atransform_stream_with_config app-1 | chunk: Output = await asyncio.create_task( # type: ignore[call-arg] app-1 | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ app-1 | File "/usr/local/lib/python3.11/site-packages/langchain_core/runnables/base.py", line 2405, in _atransform app-1 | async for output in final_pipeline: app-1 | File "/usr/local/lib/python3.11/site-packages/langchain_core/output_parsers/transform.py", line 60, in atransform app-1 | async for chunk in self._atransform_stream_with_config( app-1 | File "/usr/local/lib/python3.11/site-packages/langchain_core/runnables/base.py", line 1557, in _atransform_stream_with_config app-1 | final_input: Optional[Input] = await py_anext(input_for_tracing, None) app-1 | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ app-1 | File "/usr/local/lib/python3.11/site-packages/langchain_core/utils/aiter.py", line 62, in anext_impl app-1 | return await anext(iterator) app-1 | ^^^^^^^^^^^^^^^^^^^^^^^^^ app-1 | File "/usr/local/lib/python3.11/site-packages/langchain_core/utils/aiter.py", line 97, in tee_peer app-1 | item = await iterator.anext() app-1 | ^^^^^^^^^^^^^^^^^^^^^^^^^^ app-1 | File "/usr/local/lib/python3.11/site-packages/langchain_core/runnables/base.py", line 4176, in atransform app-1 | async for item in self.bound.atransform( app-1 | File "/usr/local/lib/python3.11/site-packages/langchain_core/runnables/base.py", line 1058, in atransform app-1 | async for chunk in input: app-1 | File "/usr/local/lib/python3.11/site-packages/langchain_core/runnables/base.py", line 1068, in atransform app-1 | async for output in self.astream(final, config, kwargs): app-1 | File "/usr/local/lib/python3.11/site-packages/langchain_core/runnables/base.py", line 589, in astream app-1 | yield await self.ainvoke(input, config, kwargs) app-1 | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ app-1 | File "/usr/local/lib/python3.11/site-packages/langchain_core/runnables/base.py", line 495, in ainvoke app-1 | return await run_in_executor(config, self.invoke, input, config, kwargs) app-1 | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ app-1 | File "/usr/local/lib/python3.11/site-packages/langchain_core/runnables/config.py", line 493, in run_in_executor app-1 | return await asyncio.get_running_loop().run_in_executor( app-1 | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ app-1 | File "/usr/local/lib/python3.11/concurrent/futures/thread.py", line 58, in run app-1 | result = self.fn(*self.args, self.kwargs) app-1 | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ app-1 | File "/usr/local/lib/python3.11/site-packages/langchain_core/prompts/base.py", line 113, in invoke app-1 | return self._call_with_config( app-1 | ^^^^^^^^^^^^^^^^^^^^^^^ app-1 | File "/usr/local/lib/python3.11/site-packages/langchain_core/runnables/base.py", line 1243, in _call_with_config app-1 | context.run( app-1 | File "/usr/local/lib/python3.11/site-packages/langchain_core/runnables/config.py", line 326, in call_func_with_variable_args app-1 | return func(input, kwargs) # type: ignore[call-arg] app-1 | ^^^^^^^^^^^^^^^^^^^^^ app-1 | File "/usr/local/lib/python3.11/site-packages/langchain_core/prompts/base.py", line 98, in _format_prompt_with_error_handling app-1 | raise KeyError( app-1 | KeyError: 'Input to PromptTemplate is missing variables {\'"properties"\', \'"foo"\'}. Expected: [\'"foo"\', \'"properties"\', \'agent_scratchpad\', \'chat_history\', \'input\'] Received: [\'input\', \'chat_history\', \'agent_scratchpad\']'

Description

The Problem seems to come from within the JSONOutputParser.

JSON_FORMAT_INSTRUCTIONS = """
The output should be formatted as a JSON instance that conforms to the JSON schema below.

As an example, for the schema {{"properties": {{"foo": {{"title": "Foo", "description": "a list of strings", "type": "array", "items": {{"type": "string"}}}}}}, "required": ["foo"]}}
the object {{"foo": ["bar", "baz"]}} is a well-formatted instance of the schema. The object {{"properties": {{"foo": ["bar", "baz"]}}}} is not well-formatted.

Here is the output schema:

{schema}
"""

The foo and properties are registered as input_variables what causes the Error. Not exactly sure why that happens.

System Info

pip freeze | grep langchain langchain==0.1.8 langchain-community==0.0.21 langchain-core==0.1.25 langchain-openai==0.0.6 langchainhub==0.1.14

rjurney commented 3 months ago

@droggta This is because unlike ChatOpenAI, AzureChatOpenAI doesn't have the bind_functions method to enable output parsers. I've tried to copy it over and add it and couldn't get it to work. It is a more recent interface and AzureChatOpenAI hasn't been updated yet.

Check out the Output Parsers / OpenAI Functions page:

This means they are only usable with models that support function calling.

The relevant code is: ChatOpenAI.bind_functions and AzureChatOpenAI.