Chainlit / chainlit

Build Conversational AI in minutes ⚡️
https://docs.chainlit.io
Apache License 2.0
6.67k stars 860 forks source link

Error in AsyncLangchainCallbackHandler.on_llm_start callback: 'NoneType' object is not a mapping #351

Closed VishwasKukreti closed 11 months ago

VishwasKukreti commented 1 year ago

Trying to follow the document QA example on local dataset with the exception of using llama-2 instead of OpenAI. I am stumped by the error in AsyncLangChainCallbackHandler. Whatever I have tried has been unsuccessful. How to overcome the error?

My code is:


import langchain and modules
import chainlit as cl
text_splitter = RecursiveCharacterTextSplitter()
system_template = """some template"""
messages = [
    SystemMessagePromptTemplate.from_template(system_template),
    HumanMessagePromptTemplate.from_template("{question}"),
]
prompt = ChatPromptTemplate.from_messages(messages)
chain_type_kwargs = {"prompt": prompt}
name = "meta-llama/Llama-2-7b-chat-hf"
auth_token = "****"
@cl.on_chat_start
async def init():
    print('start')
    with open('file.txt', 'r') as f:
        text = f.read()
    tokenizer = AutoTokenizer.from_pretrained(name)
    model = AutoModelForCausalLM.from_pretrained(name)
    # Split the text into chunks
    texts = text_splitter.split_text(text)
    # Create a metadata for each chunk
    metadatas = [{"source": f"{i}-pl"} for i in range(len(texts))]
    # Create a Chroma vector store
    embeddings = HuggingFaceEmbeddings(model_name="all-MiniLM-L6-v2")
    print('embeddings created')
    docsearch = await cl.make_async(Chroma.from_texts)(texts, embeddings, metadatas=metadatas, persist_directory="db")
    docsearch.persist()    
    print('docsearch done')
    generate_text = transformers.pipeline(
        model=model,
        tokenizer=tokenizer,
        return_full_text=True, 
        task='text-generation',
        # we pass model parameters here too
        temperature=0.1,  
        max_new_tokens=512,  
        repetition_penalty=1.1  
    )
    llm = HuggingFacePipeline(pipeline=generate_text)
    # Create a chain that uses the Chroma vector store
    chain = RetrievalQAWithSourcesChain.from_chain_type(
        llm,
        chain_type="stuff",
        retriever=docsearch.as_retriever(),
    )
    # Save the metadata and texts in the user session
    cl.user_session.set("metadatas", metadatas)
    cl.user_session.set("texts", texts)
    cl.user_session.set("chain", chain)
@cl.on_message
async def main(message):
    chain = cl.user_session.get("chain")  
    cb = cl.AsyncLangchainCallbackHandler(stream_final_answer=True, answer_prefix_tokens=["FINAL", "ANSWER"])
    cb.answer_reached = True
    res = await chain.acall(message, callbacks=[cb])
    print('res updated')
    answer = res["answer"].strip()
    sources = res["sources"].strip()
    source_elements = []
    # Get the metadata and texts from the user session
    metadatas = cl.user_session.get("metadatas")
    all_sources = [m["source"] for m in metadatas]
    texts = cl.user_session.get("texts")
    if sources:
        found_sources = []
        # Add the sources to the message
        for source in sources.split(","):
            source_name = source.strip().replace(".", "")
            # Get the index of the source
            try:
                index = all_sources.index(source_name)
            except ValueError:
                continue
            text = texts[index]
            found_sources.append(source_name)
            # Create the text element referenced in the message
            source_elements.append(cl.Text(content=text, name=source_name))
        if found_sources:
            answer += f"\nSources: {', '.join(found_sources)}"
        else:
            answer += "\nNo sources found"
    if cb.has_streamed_final_answer:
        cb.final_stream.elements = source_elements
        await cb.final_stream.update()
    else:
        await cl.Message(content=answer, elements=source_elements).send()
willydouhard commented 1 year ago

What version of chainlit are you using? Can you provide the full error stack trace?

VishwasKukreti commented 1 year ago

Version is 0.6.402. Two errors pop up during the run. First is 'Exception in ASGI application' and then Error in AsyncLangchainCallbackHandler. The trace for Error in AsyncLangchainCallbackHandler is:

2023-09-03 19:35:35 - Error in AsyncLangchainCallbackHandler.on_llm_start callback: 'NoneType' object is not a mapping
2023-09-03 19:35:35 - 'NoneType' object is not a mapping
Traceback (most recent call last):
  File "/usr/local/lib/python3.10/dist-packages/chainlit/utils.py", line 40, in wrapper
    return await user_function(**params_values)
  File "/content/cl_f.py", line 170, in main
    res = await chain.acall(message, callbacks=[cb])
  File "/usr/local/lib/python3.10/dist-packages/langchain/chains/base.py", line 361, in acall
    raise e
  File "/usr/local/lib/python3.10/dist-packages/langchain/chains/base.py", line 355, in acall
    await self._acall(inputs, run_manager=run_manager)
  File "/usr/local/lib/python3.10/dist-packages/langchain/chains/qa_with_sources/base.py", line 189, in _acall
    answer = await self.combine_documents_chain.arun(
  File "/usr/local/lib/python3.10/dist-packages/langchain/chains/base.py", line 568, in arun
    await self.acall(
  File "/usr/local/lib/python3.10/dist-packages/langchain/chains/base.py", line 361, in acall
    raise e
  File "/usr/local/lib/python3.10/dist-packages/langchain/chains/base.py", line 355, in acall
    await self._acall(inputs, run_manager=run_manager)
  File "/usr/local/lib/python3.10/dist-packages/langchain/chains/combine_documents/base.py", line 121, in _acall
    output, extra_return_dict = await self.acombine_docs(
  File "/usr/local/lib/python3.10/dist-packages/langchain/chains/combine_documents/stuff.py", line 189, in acombine_docs
    return await self.llm_chain.apredict(callbacks=callbacks, **inputs), {}
  File "/usr/local/lib/python3.10/dist-packages/langchain/chains/llm.py", line 272, in apredict
    return (await self.acall(kwargs, callbacks=callbacks))[self.output_key]
  File "/usr/local/lib/python3.10/dist-packages/langchain/chains/base.py", line 361, in acall
    raise e
  File "/usr/local/lib/python3.10/dist-packages/langchain/chains/base.py", line 355, in acall
    await self._acall(inputs, run_manager=run_manager)
  File "/usr/local/lib/python3.10/dist-packages/langchain/chains/llm.py", line 237, in _acall
    response = await self.agenerate([inputs], run_manager=run_manager)
  File "/usr/local/lib/python3.10/dist-packages/langchain/chains/llm.py", line 115, in agenerate
    return await self.llm.agenerate_prompt(
  File "/usr/local/lib/python3.10/dist-packages/langchain/llms/base.py", line 496, in agenerate_prompt
    return await self.agenerate(
  File "/usr/local/lib/python3.10/dist-packages/langchain/llms/base.py", line 764, in agenerate
    run_managers = await asyncio.gather(
  File "/usr/lib/python3.10/asyncio/tasks.py", line 304, in __wakeup
    future.result()
  File "/usr/lib/python3.10/asyncio/tasks.py", line 234, in __step
    result = coro.throw(exc)
  File "/usr/local/lib/python3.10/dist-packages/langchain/callbacks/manager.py", line 1407, in on_llm_start
    await asyncio.gather(*tasks)
  File "/usr/lib/python3.10/asyncio/tasks.py", line 304, in __wakeup
    future.result()
  File "/usr/lib/python3.10/asyncio/tasks.py", line 234, in __step
    result = coro.throw(exc)
  File "/usr/local/lib/python3.10/dist-packages/langchain/callbacks/manager.py", line 412, in _ahandle_event
    await asyncio.gather(
  File "/usr/lib/python3.10/asyncio/tasks.py", line 304, in __wakeup
    future.result()
  File "/usr/lib/python3.10/asyncio/tasks.py", line 232, in __step
    result = coro.send(None)
  File "/usr/local/lib/python3.10/dist-packages/langchain/callbacks/manager.py", line 397, in _ahandle_event_for_handler
    raise e
  File "/usr/local/lib/python3.10/dist-packages/langchain/callbacks/manager.py", line 367, in _ahandle_event_for_handler
    await event(*args, **kwargs)
  File "/usr/local/lib/python3.10/dist-packages/chainlit/langchain/callbacks.py", line 528, in on_llm_start
    _on_llm_start(self, serialized, prompts, **kwargs)
  File "/usr/local/lib/python3.10/dist-packages/chainlit/langchain/callbacks.py", line 355, in _on_llm_start
    provider, settings = get_llm_settings(invocation_params, serialized)
  File "/usr/local/lib/python3.10/dist-packages/chainlit/langchain/callbacks.py", line 29, in get_llm_settings
    merged = {
TypeError: 'NoneType' object is not a mapping
willydouhard commented 1 year ago

Thank you, what is your version of langchain? can you try updating it?

VishwasKukreti commented 1 year ago

I am on the latest one 0.6.402.

willydouhard commented 1 year ago

I think this is your chainlit version. I was asking about langchain :)

VishwasKukreti commented 1 year ago

Apologies. Lancgchain is the latest one too - 0.0.279

willydouhard commented 1 year ago

Can you open the langchain/callbacks in your local chainlit installation and replace the get_llm_settings with this implementation

def get_llm_settings(invocation_params: Union[Dict, None], serialized: Dict[str, Any]):
    if invocation_params is None:
        return None, None

    provider = invocation_params.pop("_type")  # type: str

    if provider.startswith("openai"):
        model_name = invocation_params.pop("model_name")
        invocation_params["model"] = model_name

    serialized = serialized or {}
    model_kwargs = invocation_params.pop("model_kwargs", {})

    merged = {
        **invocation_params,
        **model_kwargs,
        **serialized.get("kwargs", {}),
    }

    # make sure there is no api key specification
    settings = {k: v for k, v in merged.items() if not k.endswith("_api_key")}

    return provider, settings

Then, save the file and restart your app. I would like to know if that fixes your issue.

You can find the path of your chainlit installation with pip show chainlit.

VishwasKukreti commented 1 year ago

Hey @willydouhard. Thanks for the help. Unfortunately, the error persists.

willydouhard commented 1 year ago

Unfortunate indeed.

what if you replace it with

def get_llm_settings(invocation_params: Union[Dict, None], serialized: Dict[str, Any]):
        return None, None

I want to make sure I scope the problem correctly.

VishwasKukreti commented 1 year ago

Now it throws cannot unpack non-iterable NoneType object error!

The trace is :

2023-09-04 15:51:44 - Error in AsyncLangchainCallbackHandler.on_llm_start callback: cannot unpack non-iterable NoneType object
2023-09-04 15:51:44 - cannot unpack non-iterable NoneType object
Traceback (most recent call last):
  File "/usr/local/lib/python3.10/dist-packages/chainlit/utils.py", line 40, in wrapper
    return await user_function(**params_values)
  File "/cl_f.py", line 170, in main
    res = await chain.acall(message, callbacks=[cb])
  File "/usr/local/lib/python3.10/dist-packages/langchain/chains/base.py", line 361, in acall
    raise e
  File "/usr/local/lib/python3.10/dist-packages/langchain/chains/base.py", line 355, in acall
    await self._acall(inputs, run_manager=run_manager)
  File "/usr/local/lib/python3.10/dist-packages/langchain/chains/qa_with_sources/base.py", line 189, in _acall
    answer = await self.combine_documents_chain.arun(
  File "/usr/local/lib/python3.10/dist-packages/langchain/chains/base.py", line 568, in arun
    await self.acall(
  File "/usr/local/lib/python3.10/dist-packages/langchain/chains/base.py", line 361, in acall
    raise e
  File "/usr/local/lib/python3.10/dist-packages/langchain/chains/base.py", line 355, in acall
    await self._acall(inputs, run_manager=run_manager)
  File "/usr/local/lib/python3.10/dist-packages/langchain/chains/combine_documents/base.py", line 121, in _acall
    output, extra_return_dict = await self.acombine_docs(
  File "/usr/local/lib/python3.10/dist-packages/langchain/chains/combine_documents/stuff.py", line 189, in acombine_docs
    return await self.llm_chain.apredict(callbacks=callbacks, **inputs), {}
  File "/usr/local/lib/python3.10/dist-packages/langchain/chains/llm.py", line 272, in apredict
    return (await self.acall(kwargs, callbacks=callbacks))[self.output_key]
  File "/usr/local/lib/python3.10/dist-packages/langchain/chains/base.py", line 361, in acall
    raise e
  File "/usr/local/lib/python3.10/dist-packages/langchain/chains/base.py", line 355, in acall
    await self._acall(inputs, run_manager=run_manager)
  File "/usr/local/lib/python3.10/dist-packages/langchain/chains/llm.py", line 237, in _acall
    response = await self.agenerate([inputs], run_manager=run_manager)
  File "/usr/local/lib/python3.10/dist-packages/langchain/chains/llm.py", line 115, in agenerate
    return await self.llm.agenerate_prompt(
  File "/usr/local/lib/python3.10/dist-packages/langchain/llms/base.py", line 496, in agenerate_prompt
    return await self.agenerate(
  File "/usr/local/lib/python3.10/dist-packages/langchain/llms/base.py", line 764, in agenerate
    run_managers = await asyncio.gather(
  File "/usr/lib/python3.10/asyncio/tasks.py", line 304, in __wakeup
    future.result()
  File "/usr/lib/python3.10/asyncio/tasks.py", line 234, in __step
    result = coro.throw(exc)
  File "/usr/local/lib/python3.10/dist-packages/langchain/callbacks/manager.py", line 1407, in on_llm_start
    await asyncio.gather(*tasks)
  File "/usr/lib/python3.10/asyncio/tasks.py", line 304, in __wakeup
    future.result()
  File "/usr/lib/python3.10/asyncio/tasks.py", line 234, in __step
    result = coro.throw(exc)
  File "/usr/local/lib/python3.10/dist-packages/langchain/callbacks/manager.py", line 412, in _ahandle_event
    await asyncio.gather(
  File "/usr/lib/python3.10/asyncio/tasks.py", line 304, in __wakeup
    future.result()
  File "/usr/lib/python3.10/asyncio/tasks.py", line 232, in __step
    result = coro.send(None)
  File "/usr/local/lib/python3.10/dist-packages/langchain/callbacks/manager.py", line 397, in _ahandle_event_for_handler
    raise e
  File "/usr/local/lib/python3.10/dist-packages/langchain/callbacks/manager.py", line 367, in _ahandle_event_for_handler
    await event(*args, **kwargs)
  File "/usr/local/lib/python3.10/dist-packages/chainlit/langchain/callbacks.py", line 509, in on_llm_start
    _on_llm_start(self, serialized, prompts, **kwargs)
  File "/usr/local/lib/python3.10/dist-packages/chainlit/langchain/callbacks.py", line 336, in _on_llm_start
    provider, settings = get_llm_settings(invocation_params, serialized)
TypeError: cannot unpack non-iterable NoneType object
willydouhard commented 1 year ago

Are you sure the indentation is correct? I cannot reproduce at all, even the last error.

VishwasKukreti commented 1 year ago

Hey @willydouhard, by 'cannot reproduce' do you mean the app works or it does not work at all? I find the indentation to be correct. I repeat every instruction twice before getting back to you. Would you be more comfortable if I share the code with you?

willydouhard commented 1 year ago

I mean my app works. The code would be helpful! Information about your setup would also help (OS, python version). Thank you for taking the time!

rachaelsingleton commented 12 months ago

Im getting a similar error..

2023-09-06 18:27:08 - Error in AsyncLangchainCallbackHandler.on_chat_model_start callback: '_type'
2023-09-06 18:27:08 - Error in AsyncLangchainCallbackHandler.on_chat_model_start callback: '_type'
2023-09-06 18:27:08 - Error in AsyncLangchainCallbackHandler.on_chat_model_start callback: '_type'
2023-09-06 18:27:08 - '_type'
Traceback (most recent call last):
  File "/home/vscode/.local/lib/python3.9/site-packages/chainlit/utils.py", line 40, in wrapper
    return await user_function(**params_values)
  File "app.py", line 94, in main
    res = await chain.acall(inputs={"question": message}, callbacks=[cb])
  File "/home/vscode/.local/lib/python3.9/site-packages/langchain/chains/base.py", line 349, in acall
    raise e
  File "/home/vscode/.local/lib/python3.9/site-packages/langchain/chains/base.py", line 343, in acall
    await self._acall(inputs, run_manager=run_manager)
  File "/home/vscode/.local/lib/python3.9/site-packages/langchain/chains/conversational_retrieval/base.py", line 190, in _acall
    answer = await self.combine_docs_chain.arun(
  File "/home/vscode/.local/lib/python3.9/site-packages/langchain/chains/base.py", line 556, in arun
    await self.acall(
  File "/home/vscode/.local/lib/python3.9/site-packages/langchain/chains/base.py", line 349, in acall
    raise e
  File "/home/vscode/.local/lib/python3.9/site-packages/langchain/chains/base.py", line 343, in acall
    await self._acall(inputs, run_manager=run_manager)
  File "/home/vscode/.local/lib/python3.9/site-packages/langchain/chains/combine_documents/base.py", line 122, in _acall
    output, extra_return_dict = await self.acombine_docs(
  File "/home/vscode/.local/lib/python3.9/site-packages/langchain/chains/combine_documents/map_reduce.py", line 241, in acombine_docs
    map_results = await self.llm_chain.aapply(
  File "/home/vscode/.local/lib/python3.9/site-packages/langchain/chains/llm.py", line 206, in aapply
    raise e
  File "/home/vscode/.local/lib/python3.9/site-packages/langchain/chains/llm.py", line 203, in aapply
    response = await self.agenerate(input_list, run_manager=run_manager)
  File "/home/vscode/.local/lib/python3.9/site-packages/langchain/chains/llm.py", line 116, in agenerate
    return await self.llm.agenerate_prompt(
  File "/home/vscode/.local/lib/python3.9/site-packages/langchain/chat_models/base.py", line 423, in agenerate_prompt
    return await self.agenerate(
  File "/home/vscode/.local/lib/python3.9/site-packages/langchain/chat_models/base.py", line 348, in agenerate
    run_managers = await callback_manager.on_chat_model_start(
  File "/home/vscode/.local/lib/python3.9/site-packages/langchain/callbacks/manager.py", line 1426, in on_chat_model_start
    await asyncio.gather(*tasks)
  File "/usr/lib/python3.9/asyncio/tasks.py", line 328, in __wakeup
    future.result()
  File "/usr/lib/python3.9/asyncio/tasks.py", line 258, in __step
    result = coro.throw(exc)
  File "/home/vscode/.local/lib/python3.9/site-packages/langchain/callbacks/manager.py", line 383, in _ahandle_event
    await asyncio.gather(
  File "/usr/lib/python3.9/asyncio/tasks.py", line 328, in __wakeup
    future.result()
  File "/usr/lib/python3.9/asyncio/tasks.py", line 256, in __step
    result = coro.send(None)
  File "/home/vscode/.local/lib/python3.9/site-packages/langchain/callbacks/manager.py", line 368, in _ahandle_event_for_handler
    raise e
  File "/home/vscode/.local/lib/python3.9/site-packages/langchain/callbacks/manager.py", line 338, in _ahandle_event_for_handler
    await event(*args, **kwargs)
  File "/home/vscode/.local/lib/python3.9/site-packages/chainlit/langchain/callbacks.py", line 536, in on_chat_model_start
    _on_chat_model_start(self, serialized, messages, **kwargs)
  File "/home/vscode/.local/lib/python3.9/site-packages/chainlit/langchain/callbacks.py", line 284, in _on_chat_model_start
    provider, settings = get_llm_settings(invocation_params, serialized)
  File "/home/vscode/.local/lib/python3.9/site-packages/chainlit/langchain/callbacks.py", line 21, in get_llm_settings
    provider = invocation_params.pop("_type")  # type: str
KeyError: '_type'
willydouhard commented 12 months ago

This one I understand, the easy fix is to add a default argument none to the pop function. Will release a fix https://github.com/Chainlit/chainlit/pull/368/files