Traceback:
File "C:\Users\GV631HJ\OneDrive - EY\Desktop\Streamlit Chat App\chatenv\Lib\site-packages\streamlit\runtime\scriptrunner\exec_code.py", line 88, in exec_func_with_error_handling
result = func()
^^^^^^
File "C:\Users\GV631HJ\OneDrive - EY\Desktop\Streamlit Chat App\chatenv\Lib\site-packages\streamlit\runtime\scriptrunner\script_runner.py", line 579, in code_to_exec
exec(code, module.__dict__)
File "C:\Users\GV631HJ\OneDrive - EY\Desktop\Streamlit Chat App\app.py", line 108, in <module>
main()
File "C:\Users\GV631HJ\OneDrive - EY\Desktop\Streamlit Chat App\app.py", line 99, in main
st.write_stream(response)
File "C:\Users\GV631HJ\OneDrive - EY\Desktop\Streamlit Chat App\chatenv\Lib\site-packages\streamlit\runtime\metrics_util.py", line 410, in wrapped_func
result = non_optional_func(*args, **kwargs)
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
File "C:\Users\GV631HJ\OneDrive - EY\Desktop\Streamlit Chat App\chatenv\Lib\site-packages\streamlit\elements\write.py", line 174, in write_stream
for chunk in stream: # type: ignore
File "C:\Users\GV631HJ\OneDrive - EY\Desktop\Streamlit Chat App\chatenv\Lib\site-packages\langchain_core\runnables\base.py", line 3407, in stream
yield from self.transform(iter([input]), config, **kwargs)
File "C:\Users\GV631HJ\OneDrive - EY\Desktop\Streamlit Chat App\chatenv\Lib\site-packages\langchain_core\runnables\base.py", line 3394, in transform
yield from self._transform_stream_with_config(
File "C:\Users\GV631HJ\OneDrive - EY\Desktop\Streamlit Chat App\chatenv\Lib\site-packages\langchain_core\runnables\base.py", line 2197, in _transform_stream_with_config
chunk: Output = context.run(next, iterator) # type: ignore
^^^^^^^^^^^^^^^^^^^^^^^^^^^
File "C:\Users\GV631HJ\OneDrive - EY\Desktop\Streamlit Chat App\chatenv\Lib\site-packages\langchain_core\runnables\base.py", line 3357, in _transform
yield from final_pipeline
File "C:\Users\GV631HJ\OneDrive - EY\Desktop\Streamlit Chat App\chatenv\Lib\site-packages\langchain_core\output_parsers\transform.py", line 64, in transform
yield from self._transform_stream_with_config(
File "C:\Users\GV631HJ\OneDrive - EY\Desktop\Streamlit Chat App\chatenv\Lib\site-packages\langchain_core\runnables\base.py", line 2161, in _transform_stream_with_config
final_input: Optional[Input] = next(input_for_tracing, None)
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
File "C:\Users\GV631HJ\OneDrive - EY\Desktop\Streamlit Chat App\chatenv\Lib\site-packages\langchain_core\runnables\base.py", line 1413, in transform
for ichunk in input:
File "C:\Users\GV631HJ\OneDrive - EY\Desktop\Streamlit Chat App\chatenv\Lib\site-packages\langchain_core\runnables\base.py", line 1413, in transform
for ichunk in input:
File "C:\Users\GV631HJ\OneDrive - EY\Desktop\Streamlit Chat App\chatenv\Lib\site-packages\langchain_core\runnables\base.py", line 3847, in transform
yield from self._transform_stream_with_config(
File "C:\Users\GV631HJ\OneDrive - EY\Desktop\Streamlit Chat App\chatenv\Lib\site-packages\langchain_core\runnables\base.py", line 2197, in _transform_stream_with_config
chunk: Output = context.run(next, iterator) # type: ignore
^^^^^^^^^^^^^^^^^^^^^^^^^^^
File "C:\Users\GV631HJ\OneDrive - EY\Desktop\Streamlit Chat App\chatenv\Lib\site-packages\langchain_core\runnables\base.py", line 3832, in _transform
chunk = AddableDict({step_name: future.result()})
^^^^^^^^^^^^^^^
File "C:\Users\GV631HJ\AppData\Local\Programs\Python\Python311\Lib\concurrent\futures\_base.py", line 449, in result
return self.__get_result()
^^^^^^^^^^^^^^^^^^^
File "C:\Users\GV631HJ\AppData\Local\Programs\Python\Python311\Lib\concurrent\futures\_base.py", line 401, in __get_result
raise self._exception
File "C:\Users\GV631HJ\AppData\Local\Programs\Python\Python311\Lib\concurrent\futures\thread.py", line 58, in run
result = self.fn(*self.args, **self.kwargs)
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
File "C:\Users\GV631HJ\OneDrive - EY\Desktop\Streamlit Chat App\chatenv\Lib\site-packages\langchain_core\runnables\base.py", line 1431, in transform
yield from self.stream(final, config, **kwargs)
File "C:\Users\GV631HJ\OneDrive - EY\Desktop\Streamlit Chat App\chatenv\Lib\site-packages\langchain_core\runnables\base.py", line 998, in stream
yield self.invoke(input, config, **kwargs)
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
File "C:\Users\GV631HJ\OneDrive - EY\Desktop\Streamlit Chat App\chatenv\Lib\site-packages\langchain_core\retrievers.py", line 254, in invoke
raise e
File "C:\Users\GV631HJ\OneDrive - EY\Desktop\Streamlit Chat App\chatenv\Lib\site-packages\langchain_core\retrievers.py", line 247, in invoke
result = self._get_relevant_documents(
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
File "C:\Users\GV631HJ\OneDrive - EY\Desktop\Streamlit Chat App\chatenv\Lib\site-packages\langchain_core\vectorstores\base.py", line 1080, in _get_relevant_documents
docs = self.vectorstore.similarity_search(query, **self.search_kwargs)
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
File "C:\Users\GV631HJ\OneDrive - EY\Desktop\Streamlit Chat App\chatenv\Lib\site-packages\langchain_chroma\vectorstores.py", line 582, in similarity_search
docs_and_scores = self.similarity_search_with_score(
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
File "C:\Users\GV631HJ\OneDrive - EY\Desktop\Streamlit Chat App\chatenv\Lib\site-packages\langchain_chroma\vectorstores.py", line 679, in similarity_search_with_score
query_embedding = self._embedding_function.embed_query(query)
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
File "C:\Users\GV631HJ\OneDrive - EY\Desktop\Streamlit Chat App\chatenv\Lib\site-packages\langchain_openai\embeddings\base.py", line 629, in embed_query
return self.embed_documents([text])[0]
^^^^^^^^^^^^^^^^^^^^^^^^^^^^
File "C:\Users\GV631HJ\OneDrive - EY\Desktop\Streamlit Chat App\chatenv\Lib\site-packages\langchain_openai\embeddings\base.py", line 588, in embed_documents
return self._get_len_safe_embeddings(texts, engine=engine)
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
File "C:\Users\GV631HJ\OneDrive - EY\Desktop\Streamlit Chat App\chatenv\Lib\site-packages\langchain_openai\embeddings\base.py", line 480, in _get_len_safe_embeddings
_iter, tokens, indices = self._tokenize(texts, _chunk_size)
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
File "C:\Users\GV631HJ\OneDrive - EY\Desktop\Streamlit Chat App\chatenv\Lib\site-packages\langchain_openai\embeddings\base.py", line 441, in _tokenize
token = encoding.encode_ordinary(text)
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
File "C:\Users\GV631HJ\OneDrive - EY\Desktop\Streamlit Chat App\chatenv\Lib\site-packages\tiktoken\core.py", line 69, in encode_ordinary
return self._core_bpe.encode_ordinary(text)
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
Description
I encounter an error (as mentioned in the title) when attempting to append to the chat_history during the invoke method call. Am I incorrectly setting up the pipeline?
Am I wrongly composing the pipeline?
System Info
System Information
OS: Windows
OS Version: 10.0.19045
Python Version: 3.11.7 (tags/v3.11.7:fa7a6f2, Dec 4 2023, 19:24:49) [MSC v.1937 64 bit (AMD64)]
Checked other resources
Example Code
Here is my code for constructing the chain:
Error Message and Stack Trace (if applicable)
Description
I encounter an error (as mentioned in the title) when attempting to append to the chat_history during the invoke method call. Am I incorrectly setting up the pipeline?
Am I wrongly composing the pipeline?
System Info
System Information
Package Information
Optional packages not installed
Other Dependencies