Open abc-w opened 23 hours ago
The error occurs because the NebulaGraphStore
is trying to insert vertices and edges into the NebulaGraph database, but the schema for the entity
vertex type is not found. This results in a SemanticError
.
To fix this, you need to ensure that the schema for the entity
vertex type and the relationship
edge type is defined in your NebulaGraph database before running your code. Here is how you can define the schema:
Connect to NebulaGraph: Use the NebulaGraph console or a client to connect to your NebulaGraph instance.
Create the Schema:
Define the entity
vertex type and the relationship
edge type. For example:
CREATE SPACE IF NOT EXISTS test(partition_num=10, replica_factor=1, vid_type=FIXED_STRING(30));
USE test;
CREATE TAG IF NOT EXISTS entity(name string);
CREATE EDGE IF NOT EXISTS relationship(relationship string);
Run Your Code: After defining the schema, you can run your code. Here is the modified version of your code:
space_name = "test"
graph_store = NebulaGraphStore(
space_name=space_name,
edge_types=["relationship"],
rel_prop_names=["relationship"],
tags=["entity"],
tag_prop_names=["name"],
session_pool_kwargs={
"user": os.getenv("NEBULA_USER"),
"password": os.getenv("NEBULA_PASSWORD"),
"address": os.getenv("NEBULA_ADDRESS"),
}
)
storage_context = StorageContext.from_defaults(graph_store=graph_store)
# Create the KnowledgeGraphIndex
kg_index = KnowledgeGraphIndex.from_documents(
documents=docs,
storage_context=storage_context,
max_triplets_per_chunk=10,
space_name=space_name,
max_knowledge_sequence=15,
llm=llm,
)
Ensure that the environment variables NEBULA_USER
, NEBULA_PASSWORD
, and NEBULA_ADDRESS
are set correctly.
This should resolve the ValueError
and allow the KnowledgeGraphIndex
to be created successfully [1][2].
To continue talking to Dosu, mention @dosu.
Question Validation
Question
space_name = "test" graph_store = NebulaGraphStore( space_name=space_name) storage_context = StorageContext.from_defaults(graph_store=graph_store)
Create the KnowledgeGraphIndex
kg_index = KnowledgeGraphIndex.from_documents( documents=docs, storage_context=storage_context, max_triplets_per_chunk=10, space_name=space_name, max_knowledge_sequence=15, llm=llm, ) my code above, Why does this error occur? Please help me with how to modify it
ValueError Traceback (most recent call last) File /data/pingchuan/miniconda3/lib/python3.12/site-packages/tenacity/init.py:478, in Retrying.call(self, fn, *args, *kwargs) 477 try: --> 478 result = fn(args, **kwargs) 479 except BaseException: # noqa: B902
File /data/pingchuan/miniconda3/lib/python3.12/site-packages/llama_index/graph_stores/nebula/nebula_graph_store.py:266, in NebulaGraphStore.execute(self, query, param_map) 265 if not result.is_succeeded(): --> 266 raise ValueError( 267 f"Query failed. Query: {query}, Param: {param_map}" 268 f"Error message: {result.error_msg()}" 269 ) 270 return result
ValueError: Query failed. Query: INSERT VERTEX
entity
(name) VALUES "Raptor":("Raptor");INSERT VERTEXentity
(name) VALUES "Novel approach":("Novel approach");INSERT EDGErelationship
(relationship,
) VALUES "Raptor"->"Novel approach"@-1434174299530822493:("Is");, Param: {}Error message: SemanticError: No schema found for `entity'The above exception was the direct cause of the following exception:
RetryError Traceback (most recent call last) Cell In[4], line 23 19 llm = LangChainLLM(llm=llm) 22 # Create the KnowledgeGraphIndex ---> 23 kg_index = KnowledgeGraphIndex.from_documents( 24 documents=docs, 25 storage_context=storage_context, 26 max_triplets_per_chunk=10, 27 space_name=space_name, 28 max_knowledge_sequence=15, 29 llm=llm, 30 )
File /data/pingchuan/miniconda3/lib/python3.12/site-packages/llama_index/core/indices/base.py:119, in BaseIndex.from_documents(cls, documents, storage_context, show_progress, callback_manager, transformations, kwargs) 110 docstore.set_document_hash(doc.get_doc_id(), doc.hash) 112 nodes = run_transformations( 113 documents, # type: ignore 114 transformations, 115 show_progress=show_progress, 116 kwargs, 117 ) --> 119 return cls( 120 nodes=nodes, 121 storage_context=storage_context, 122 callback_manager=callback_manager, 123 show_progress=show_progress, 124 transformations=transformations, 125 **kwargs, 126 )
File /data/pingchuan/miniconda3/lib/python3.12/site-packages/llama_index/core/indices/knowledge_graph/base.py:99, in KnowledgeGraphIndex.init(self, nodes, objects, index_struct, llm, embed_model, storage_context, kg_triplet_extract_template, max_triplets_per_chunk, include_embeddings, show_progress, max_object_length, kg_triplet_extract_fn, kwargs) 96 self._llm = llm or Settings.llm 97 self._embed_model = embed_model or Settings.embed_model ---> 99 super().init( 100 nodes=nodes, 101 index_struct=index_struct, 102 storage_context=storage_context, 103 show_progress=show_progress, 104 objects=objects, 105 kwargs, 106 ) 108 # TODO: legacy conversion - remove in next release 109 if ( 110 len(self.index_struct.table) > 0 111 and isinstance(self.graph_store, SimpleGraphStore) 112 and len(self.graph_store._data.graph_dict) == 0 113 ):
File /data/pingchuan/miniconda3/lib/python3.12/site-packages/llama_index/core/indices/base.py:77, in BaseIndex.init(self, nodes, objects, index_struct, storage_context, callback_manager, transformations, show_progress, kwargs) 75 if index_struct is None: 76 nodes = nodes or [] ---> 77 index_struct = self.build_index_from_nodes( 78 nodes + objects, # type: ignore 79 kwargs, # type: ignore 80 ) 81 self._index_struct = index_struct 82 self._storage_context.index_store.add_index_struct(self._index_struct)
File /data/pingchuan/miniconda3/lib/python3.12/site-packages/llama_index/core/indices/base.py:185, in BaseIndex.build_index_from_nodes(self, nodes, build_kwargs) 183 """Build the index from nodes.""" 184 self._docstore.add_documents(nodes, allow_update=True) --> 185 return self._build_index_from_nodes(nodes, build_kwargs)
File /data/pingchuan/miniconda3/lib/python3.12/site-packages/llama_index/core/indices/knowledge_graph/base.py:218, in KnowledgeGraphIndex._build_index_from_nodes(self, nodes, **build_kwargs) 216 for triplet in triplets: 217 subj, _, obj = triplet --> 218 self.upsert_triplet(triplet) 219 index_struct.add_node([subj, obj], n) 221 if self.include_embeddings:
File /data/pingchuan/miniconda3/lib/python3.12/site-packages/llama_index/core/indices/knowledge_graph/base.py:266, in KnowledgeGraphIndex.upsert_triplet(self, triplet, include_embeddings) 254 def upsert_triplet( 255 self, triplet: Tuple[str, str, str], include_embeddings: bool = False 256 ) -> None: 257 """Insert triplets and optionally embeddings. 258 259 Used for manual insertion of KG triplets (in the form (...) 264 embedding (Any, optional): Embedding option for the triplet. Defaults to None. 265 """ --> 266 self._graph_store.upsert_triplet(*triplet) 267 triplet_str = str(triplet) 268 if include_embeddings:
File /data/pingchuan/miniconda3/lib/python3.12/site-packages/llama_index/graph_stores/nebula/nebula_graph_store.py:521, in NebulaGraphStore.upsert_triplet(self, subj, rel, obj) 510 dml_query = ( 511 f"INSERT VERTEX
{entity_type}
(name) " 512 f" VALUES {subj_field}:({QUOTE}{subj}{QUOTE});" (...) 518 f"@{rel_hash}:({QUOTE}{rel}{QUOTE});" 519 ) 520 logger.debug(f"upsert_triplet()\nDML query: {dml_query}") --> 521 result = self.execute(dml_query) 522 assert ( 523 result and result.is_succeeded() 524 ), f"Failed to upsert triplet: {subj} {rel} {obj}, query: {dml_query}"File /data/pingchuan/miniconda3/lib/python3.12/site-packages/tenacity/init.py:336, in BaseRetrying.wraps..wrapped_f(*args, *kw)
334 copy = self.copy()
335 wrapped_f.statistics = copy.statistics # type: ignore[attr-defined]
--> 336 return copy(f, args, **kw)
File /data/pingchuan/miniconda3/lib/python3.12/site-packages/tenacity/init.py:475, in Retrying.call(self, fn, *args, **kwargs) 473 retry_state = RetryCallState(retry_object=self, fn=fn, args=args, kwargs=kwargs) 474 while True: --> 475 do = self.iter(retry_state=retry_state) 476 if isinstance(do, DoAttempt): 477 try:
File /data/pingchuan/miniconda3/lib/python3.12/site-packages/tenacity/init.py:376, in BaseRetrying.iter(self, retry_state) 374 result = None 375 for action in self.iter_state.actions: --> 376 result = action(retry_state) 377 return result
File /data/pingchuan/miniconda3/lib/python3.12/site-packages/tenacity/init.py:419, in BaseRetrying._post_stop_check_actions..exc_check(rs)
417 if self.reraise:
418 raise retry_exc.reraise()
--> 419 raise retry_exc from fut.exception()
RetryError: RetryError[<Future at 0x7f3b563c6f00 state=finished raised ValueError>]