Open 18811449050 opened 1 month ago
The error TypeError: AsyncClient.chat() got an unexpected keyword argument 'tools'
occurs because the AsyncClient.chat()
method does not accept a keyword argument named tools
. To resolve this error, you need to remove the tools
argument from the AsyncClient.chat()
method call.
Here is an example of how you might modify your code:
# Incorrect usage
response = await self.async_client.chat(
model=self.model,
messages=ollama_messages,
stream=True,
format="json" if self.json_mode else "",
tools=tools, # Remove this line
options=self._model_kwargs,
)
# Correct usage
response = await self.async_client.chat(
model=self.model,
messages=ollama_messages,
stream=True,
format="json" if self.json_mode else "",
options=self._model_kwargs,
)
Make sure to check the documentation or the method definition for AsyncClient.chat()
to see which arguments are accepted and adjust your code accordingly [1].
To continue talking to Dosu, mention @dosu.
I couldn't replicate actually. Maybe try pip install -U llama-index-llms-ollama ollama
?
Tools is definitely a kwarg in the client, maybe your Ollama client is out of date
Bug Description
from typing import Literal from llama_index.llms.ollama import Ollama from llama_index.core.indices.property_graph import SchemaLLMPathExtractor from llama_index.vector_stores.neo4jvector import Neo4jVectorStore from llama_index.graph_stores.neo4j import Neo4jPropertyGraphStore
from li_llm import li_llm
from li_embed_model import HuggingFaceEmbeddingModel
import nest_asyncio
nest_asyncio.apply()
best practice to use upper-case
entities = Literal["PERSON", "PLACE", "ORGANIZATION"] relations = Literal["HAS", "PART_OF", "WORKED_ON", "WORKED_WITH", "WORKED_AT"]
define which entities can have which relations
validation_schema = { "PERSON": ["HAS", "PART_OF", "WORKED_ON", "WORKED_WITH", "WORKED_AT"], "PLACE": ["HAS", "PART_OF", "WORKED_AT"], "ORGANIZATION": ["HAS", "PART_OF", "WORKED_WITH"], }
validation_schema = [ ("ORGANIZATION", "HAS", "PERSON"), ("PERSON", "WORKED_AT", "ORGANIZATION"), ("PERSON", "WORKED_WITH", "PERSON"), ("PERSON", "WORKED_ON", "ORGANIZATION"), ("PERSON", "PART_OF", "ORGANIZATION"), ("ORGANIZATION", "PART_OF", "ORGANIZATION"), ("PERSON", "WORKED_AT", "PLACE"), ]
kg_extractor = SchemaLLMPathExtractor( llm=Ollama(model="llama3:70b", json_mode=True, request_timeout=3600), possible_entities=entities, possible_relations=relations, kg_validation_schema=validation_schema,
if false, allows for values outside of the schema
)
class LiNeo4jPropertyGraphStore(Neo4jPropertyGraphStore): def enter(self): return self
graph_store = LiNeo4jPropertyGraphStore( username="neo4j", password="123456789", url="bolt://0.0.0.0:7687", )
vec_store = None
graph_store._driver.close()
from llama_index.core import SimpleDirectoryReader from llama_index.core import PropertyGraphIndex from llama_index.embeddings.huggingface import HuggingFaceEmbedding
documents = SimpleDirectoryReader("./data_all/").load_data() print(len(documents)) index = PropertyGraphIndex.from_documents( documents, kg_extractors=[kg_extractor], embed_model=HuggingFaceEmbedding(model_name="/data1/mgl/model/bge-large-zh-v1.5-model/"), property_graph_store=graph_store, vector_store=vec_store, show_progress=True, )
from llama_index.core.indices.property_graph import ( LLMSynonymRetriever, VectorContextRetriever, )
llm_synonym = LLMSynonymRetriever( index.property_graph_store, llm=Ollama( model="llama3:70b", request_timeout=120.0, context_window=3900, json_mode=True, temperature=0.1, num_output=256 ), include_text=False, ) vector_context = VectorContextRetriever( index.property_graph_store, embed_model=HuggingFaceEmbedding(model_name="/data1/mgl/model/bge-large-zh-v1.5-model/"), include_text=False, ) retriever = index.as_retriever( sub_retrievers=[ llm_synonym, vector_context, ] ) nodes = retriever.retrieve("What happened at Interleaf?")
for node in nodes: print(node.text)
Version
stable
Steps to Reproduce
What is the reason for this error?
Relevant Logs/Tracbacks