langchain-ai / langchain-databricks

MIT License
5 stars 6 forks source link

HTTPError: 400 Client Error: Bad request: json: unknown field "id" for url: https://<some_id>.azuredatabricks.net/serving-endpoints/databricks-meta-llama-3-1-70b-instruct/invocations. Response text: {"error_code": "BAD_REQUEST", "message": "Bad request: json: unknown field \"id\"\n"} #6

Closed yashasreddy closed 2 months ago

yashasreddy commented 2 months ago

I am experimenting with ChatDatabricks. I am trying to build an simple llm runnable aware of chat history using RunnableWithMessageHistory class. For the first question it runs fine. But for the second follow up question it throws an error. i have attached both the error message and code snipped below.

from langchain.prompts import (
    HumanMessagePromptTemplate,
    SystemMessagePromptTemplate,
    ChatPromptTemplate,
    MessagesPlaceholder
)
from langchain_core.chat_history import BaseChatMessageHistory
from langchain_community.chat_message_histories import ChatMessageHistory
from langchain_community.chat_message_histories.postgres import PostgresChatMessageHistory
from langchain_databricks import ChatDatabricks

cloud_prompt = """You are an AI Assistant. 
"""

llm = ChatDatabricks(
                endpoint="databricks-meta-llama-3-1-70b-instruct",
                temperature=0,
                max_tokens=500,
                )

prompt = ChatPromptTemplate.from_messages([
    SystemMessagePromptTemplate.from_template(cloud_prompt),
    MessagesPlaceholder(variable_name='chat_history'),
    HumanMessagePromptTemplate.from_template("{query}")
])

runnable = prompt | llm

store = {}

def get_session_history(session_id: str) -> BaseChatMessageHistory:
    if session_id not in store:
        store[session_id] = ChatMessageHistory()
    return store[session_id]

with_message_history = RunnableWithMessageHistory(
    runnable,
    get_session_history,
    input_messages_key="query",
    history_messages_key="chat_history",
)
def query_bot(query):
    return  with_message_history.stream(
        {'query': query},
        config={"configurable": {'session_id': "1234"}}
    )

HTTPError: 400 Client Error: Bad request: json: unknown field "id" for url: https://.azuredatabricks.net/serving-endpoints/databricks-meta-llama-3-1-70b-instruct/invocations. Response text: {"error_code": "BAD_REQUEST", "message": "Bad request: json: unknown field \"id\"\n"}

Traceback: File "C:\Users\yreddy\AppData\Local\miniconda3\envs\chat\Lib\site-packages\streamlit\runtime\scriptrunner\exec_code.py", line 88, in exec_func_with_error_handling result = func() ^^^^^^ File "C:\Users\yreddy\AppData\Local\miniconda3\envs\chat\Lib\site-packages\streamlit\runtime\scriptrunner\script_runner.py", line 590, in code_to_exec exec(code, module.dict) File "D:\01.Projects\llm-bot\code\main.py", line 25, in st.write_stream(query_bot(prompt)) File "C:\Users\yreddy\AppData\Local\miniconda3\envs\chat\Lib\site-packages\streamlit\runtime\metrics_util.py", line 410, in wrapped_func result = non_optional_func(*args, kwargs) ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ File "C:\Users\yreddy\AppData\Local\miniconda3\envs\chat\Lib\site-packages\streamlit\elements\write.py", line 174, in write_stream for chunk in stream: # type: ignore File "C:\Users\yreddy\AppData\Local\miniconda3\envs\chat\Lib\site-packages\langchain_core\runnables\base.py", line 5263, in stream yield from self.bound.stream( File "C:\Users\yreddy\AppData\Local\miniconda3\envs\chat\Lib\site-packages\langchain_core\runnables\base.py", line 5263, in stream yield from self.bound.stream( File "C:\Users\yreddy\AppData\Local\miniconda3\envs\chat\Lib\site-packages\langchain_core\runnables\base.py", line 3261, in stream yield from self.transform(iter([input]), config, kwargs) File "C:\Users\yreddy\AppData\Local\miniconda3\envs\chat\Lib\site-packages\langchain_core\runnables\base.py", line 3248, in transform yield from self._transform_stream_with_config( File "C:\Users\yreddy\AppData\Local\miniconda3\envs\chat\Lib\site-packages\langchain_core\runnables\base.py", line 2054, in _transform_stream_with_config chunk: Output = context.run(next, iterator) # type: ignore ^^^^^^^^^^^^^^^^^^^^^^^^^^^ File "C:\Users\yreddy\AppData\Local\miniconda3\envs\chat\Lib\site-packages\langchain_core\runnables\base.py", line 3211, in _transform yield from final_pipeline File "C:\Users\yreddy\AppData\Local\miniconda3\envs\chat\Lib\site-packages\langchain_core\runnables\base.py", line 5299, in transform yield from self.bound.transform( File "C:\Users\yreddy\AppData\Local\miniconda3\envs\chat\Lib\site-packages\langchain_core\runnables\base.py", line 4579, in transform yield from self._transform_stream_with_config( File "C:\Users\yreddy\AppData\Local\miniconda3\envs\chat\Lib\site-packages\langchain_core\runnables\base.py", line 2054, in _transform_stream_with_config chunk: Output = context.run(next, iterator) # type: ignore ^^^^^^^^^^^^^^^^^^^^^^^^^^^ File "C:\Users\yreddy\AppData\Local\miniconda3\envs\chat\Lib\site-packages\langchain_core\runnables\base.py", line 4559, in _transform for chunk in output.stream( File "C:\Users\yreddy\AppData\Local\miniconda3\envs\chat\Lib\site-packages\langchain_core\runnables\base.py", line 5263, in stream yield from self.bound.stream( File "C:\Users\yreddy\AppData\Local\miniconda3\envs\chat\Lib\site-packages\langchain_core\runnables\base.py", line 3261, in stream yield from self.transform(iter([input]), config, kwargs) File "C:\Users\yreddy\AppData\Local\miniconda3\envs\chat\Lib\site-packages\langchain_core\runnables\base.py", line 3248, in transform yield from self._transform_stream_with_config( File "C:\Users\yreddy\AppData\Local\miniconda3\envs\chat\Lib\site-packages\langchain_core\runnables\base.py", line 2054, in _transform_stream_with_config chunk: Output = context.run(next, iterator) # type: ignore ^^^^^^^^^^^^^^^^^^^^^^^^^^^ File "C:\Users\yreddy\AppData\Local\miniconda3\envs\chat\Lib\site-packages\langchain_core\runnables\base.py", line 3211, in _transform yield from final_pipeline File "C:\Users\yreddy\AppData\Local\miniconda3\envs\chat\Lib\site-packages\langchain_core\runnables\base.py", line 1290, in transform yield from self.stream(final, config, kwargs) File "C:\Users\yreddy\AppData\Local\miniconda3\envs\chat\Lib\site-packages\langchain_core\language_models\chat_models.py", line 411, in stream raise e File "C:\Users\yreddy\AppData\Local\miniconda3\envs\chat\Lib\site-packages\langchain_core\language_models\chat_models.py", line 391, in stream for chunk in self._stream(messages, stop=stop, **kwargs): File "C:\Users\yreddy\AppData\Local\miniconda3\envs\chat\Lib\site-packages\langchain_databricks\chat_models.py", line 302, in _stream for chunk in self._client.predict_stream(endpoint=self.endpoint, inputs=data): File "C:\Users\yreddy\AppData\Local\miniconda3\envs\chat\Lib\site-packages\mlflow\deployments\databricks__init.py", line 298, in predict_stream chunk_line_iter = self._call_endpoint_stream( ^^^^^^^^^^^^^^^^^^^^^^^^^^^ File "C:\Users\yreddy\AppData\Local\miniconda3\envs\chat\Lib\site-packages\mlflow\deployments\databricks\init__.py", line 176, in _call_endpoint_stream augmented_raise_for_status(response) File "C:\Users\yreddy\AppData\Local\miniconda3\envs\chat\Lib\site-packages\mlflow\utils\request_utils.py", line 66, in augmented_raise_for_status raise HTTPError(

B-Step62 commented 2 months ago

@yashasreddy Thank you for reporting the issue! It appears that the endpoint "databricks-meta-llama-3-1-70b-instruct" does not support id fields in input messages. That field is not included in your input message, but it is in one of the output message from assistant, which is passed to the chat endpoint when using RunnableWithMessageHistory. This is why it works fine for the first invocation, as you don't have any history messages.

The new ChatDatabricks class passes this id fields as it is to the chat endpoint (code), which we should not. We will file a fix soon!