ive tried onmultiple pcs with different models but cant get it to wor
File "/home/kamii/localGPT/run_localGPT.py", line 258, in
main()
File "/home/kamii/miniconda3/envs/localGPT/lib/python3.10/site-packages/click/core.py", line 1157, in call
return self.main(args, kwargs)
File "/home/kamii/miniconda3/envs/localGPT/lib/python3.10/site-packages/click/core.py", line 1078, in main
rv = self.invoke(ctx)
File "/home/kamii/miniconda3/envs/localGPT/lib/python3.10/site-packages/click/core.py", line 1434, in invoke
return ctx.invoke(self.callback, ctx.params)
File "/home/kamii/miniconda3/envs/localGPT/lib/python3.10/site-packages/click/core.py", line 783, in invoke
return __callback(args, kwargs)
File "/home/kamii/localGPT/run_localGPT.py", line 229, in main
qa = retrieval_qa_pipline(device_type, use_history, promptTemplate_type="llama")
File "/home/kamii/localGPT/run_localGPT.py", line 144, in retrieval_qa_pipline
qa = RetrievalQA.from_chain_type(
File "/home/kamii/miniconda3/envs/localGPT/lib/python3.10/site-packages/langchain/chains/retrieval_qa/base.py", line 100, in from_chain_type
combine_documents_chain = load_qa_chain(
File "/home/kamii/miniconda3/envs/localGPT/lib/python3.10/site-packages/langchain/chains/question_answering/init.py", line 249, in load_qa_chain
return loader_mapping[chain_type](
File "/home/kamii/miniconda3/envs/localGPT/lib/python3.10/site-packages/langchain/chains/question_answering/init.py", line 73, in _load_stuff_chain
llm_chain = LLMChain(
File "/home/kamii/miniconda3/envs/localGPT/lib/python3.10/site-packages/langchain/load/serializable.py", line 74, in init
super().init(kwargs)
File "pydantic/main.py", line 341, in pydantic.main.BaseModel.init
pydantic.error_wrappers.ValidationError: 1 validation error for LLMChain
llm
ive tried onmultiple pcs with different models but cant get it to wor
File "/home/kamii/localGPT/run_localGPT.py", line 258, in
main()
File "/home/kamii/miniconda3/envs/localGPT/lib/python3.10/site-packages/click/core.py", line 1157, in call
return self.main(args, kwargs)
File "/home/kamii/miniconda3/envs/localGPT/lib/python3.10/site-packages/click/core.py", line 1078, in main
rv = self.invoke(ctx)
File "/home/kamii/miniconda3/envs/localGPT/lib/python3.10/site-packages/click/core.py", line 1434, in invoke
return ctx.invoke(self.callback, ctx.params)
File "/home/kamii/miniconda3/envs/localGPT/lib/python3.10/site-packages/click/core.py", line 783, in invoke
return __callback(args, kwargs)
File "/home/kamii/localGPT/run_localGPT.py", line 229, in main
qa = retrieval_qa_pipline(device_type, use_history, promptTemplate_type="llama")
File "/home/kamii/localGPT/run_localGPT.py", line 144, in retrieval_qa_pipline
qa = RetrievalQA.from_chain_type(
File "/home/kamii/miniconda3/envs/localGPT/lib/python3.10/site-packages/langchain/chains/retrieval_qa/base.py", line 100, in from_chain_type
combine_documents_chain = load_qa_chain(
File "/home/kamii/miniconda3/envs/localGPT/lib/python3.10/site-packages/langchain/chains/question_answering/init.py", line 249, in load_qa_chain
return loader_mapping[chain_type](
File "/home/kamii/miniconda3/envs/localGPT/lib/python3.10/site-packages/langchain/chains/question_answering/init.py", line 73, in _load_stuff_chain
llm_chain = LLMChain(
File "/home/kamii/miniconda3/envs/localGPT/lib/python3.10/site-packages/langchain/load/serializable.py", line 74, in init
super().init(kwargs)
File "pydantic/main.py", line 341, in pydantic.main.BaseModel.init
pydantic.error_wrappers.ValidationError: 1 validation error for LLMChain
llm