This is all error log.
File "/usr/local/lib/python3.8/dist-packages/streamlit/runtime/scriptrunner/script_runner.py", line 589, in _run_script exec(code, module.__dict__) File "/opt/jetson_copilot/app/app.py", line 153, in <module> message = st.write_stream(model_res_generator(prompt)) File "/usr/local/lib/python3.8/dist-packages/streamlit/runtime/metrics_util.py", line 408, in wrapped_func result = non_optional_func(*args, **kwargs) File "/usr/local/lib/python3.8/dist-packages/streamlit/elements/write.py", line 167, in write_stream for chunk in stream: # type: ignore File "/opt/jetson_copilot/app/app.py", line 123, in model_res_generator response_stream = st.session_state.chat_engine.stream_chat(prompt) File "/usr/local/lib/python3.8/dist-packages/llama_index/core/instrumentation/dispatcher.py", line 230, in wrapper result = func(*args, **kwargs) File "/usr/local/lib/python3.8/dist-packages/llama_index/core/callbacks/utils.py", line 41, in wrapper return func(self, *args, **kwargs) File "/usr/local/lib/python3.8/dist-packages/llama_index/core/chat_engine/context.py", line 198, in stream_chat context_str_template, nodes = self._generate_context(message) File "/usr/local/lib/python3.8/dist-packages/llama_index/core/chat_engine/context.py", line 110, in _generate_context nodes = self._retriever.retrieve(message) File "/usr/local/lib/python3.8/dist-packages/llama_index/core/instrumentation/dispatcher.py", line 230, in wrapper result = func(*args, **kwargs) File "/usr/local/lib/python3.8/dist-packages/llama_index/core/base/base_retriever.py", line 243, in retrieve nodes = self._retrieve(query_bundle) File "/usr/local/lib/python3.8/dist-packages/llama_index/core/instrumentation/dispatcher.py", line 230, in wrapper result = func(*args, **kwargs) File "/usr/local/lib/python3.8/dist-packages/llama_index/core/indices/vector_store/retrievers/retriever.py", line 101, in _retrieve return self._get_nodes_with_embeddings(query_bundle) File "/usr/local/lib/python3.8/dist-packages/llama_index/core/indices/vector_store/retrievers/retriever.py", line 177, in _get_nodes_with_embeddings query_result = self._vector_store.query(query, **self._kwargs) File "/usr/local/lib/python3.8/dist-packages/llama_index/core/vector_stores/simple.py", line 369, in query top_similarities, top_ids = get_top_k_embeddings( File "/usr/local/lib/python3.8/dist-packages/llama_index/core/indices/query/embedding_utils.py", line 30, in get_top_k_embeddings similarity = similarity_fn(query_embedding_np, emb) File "/usr/local/lib/python3.8/dist-packages/llama_index/core/base/embeddings/base.py", line 58, in similarity product = np.dot(embedding1, embedding2) File "<__array_function__ internals>", line 200, in dot
This is all error log.
File "/usr/local/lib/python3.8/dist-packages/streamlit/runtime/scriptrunner/script_runner.py", line 589, in _run_script exec(code, module.__dict__) File "/opt/jetson_copilot/app/app.py", line 153, in <module> message = st.write_stream(model_res_generator(prompt)) File "/usr/local/lib/python3.8/dist-packages/streamlit/runtime/metrics_util.py", line 408, in wrapped_func result = non_optional_func(*args, **kwargs) File "/usr/local/lib/python3.8/dist-packages/streamlit/elements/write.py", line 167, in write_stream for chunk in stream: # type: ignore File "/opt/jetson_copilot/app/app.py", line 123, in model_res_generator response_stream = st.session_state.chat_engine.stream_chat(prompt) File "/usr/local/lib/python3.8/dist-packages/llama_index/core/instrumentation/dispatcher.py", line 230, in wrapper result = func(*args, **kwargs) File "/usr/local/lib/python3.8/dist-packages/llama_index/core/callbacks/utils.py", line 41, in wrapper return func(self, *args, **kwargs) File "/usr/local/lib/python3.8/dist-packages/llama_index/core/chat_engine/context.py", line 198, in stream_chat context_str_template, nodes = self._generate_context(message) File "/usr/local/lib/python3.8/dist-packages/llama_index/core/chat_engine/context.py", line 110, in _generate_context nodes = self._retriever.retrieve(message) File "/usr/local/lib/python3.8/dist-packages/llama_index/core/instrumentation/dispatcher.py", line 230, in wrapper result = func(*args, **kwargs) File "/usr/local/lib/python3.8/dist-packages/llama_index/core/base/base_retriever.py", line 243, in retrieve nodes = self._retrieve(query_bundle) File "/usr/local/lib/python3.8/dist-packages/llama_index/core/instrumentation/dispatcher.py", line 230, in wrapper result = func(*args, **kwargs) File "/usr/local/lib/python3.8/dist-packages/llama_index/core/indices/vector_store/retrievers/retriever.py", line 101, in _retrieve return self._get_nodes_with_embeddings(query_bundle) File "/usr/local/lib/python3.8/dist-packages/llama_index/core/indices/vector_store/retrievers/retriever.py", line 177, in _get_nodes_with_embeddings query_result = self._vector_store.query(query, **self._kwargs) File "/usr/local/lib/python3.8/dist-packages/llama_index/core/vector_stores/simple.py", line 369, in query top_similarities, top_ids = get_top_k_embeddings( File "/usr/local/lib/python3.8/dist-packages/llama_index/core/indices/query/embedding_utils.py", line 30, in get_top_k_embeddings similarity = similarity_fn(query_embedding_np, emb) File "/usr/local/lib/python3.8/dist-packages/llama_index/core/base/embeddings/base.py", line 58, in similarity product = np.dot(embedding1, embedding2) File "<__array_function__ internals>", line 200, in dot
Thanks