Closed stanleyshly closed 2 years ago
Passing in opts seems to solve my issue. Here is my working coding:
from parlai.core.agents import create_agent_from_model_file
opt = {
"beam_disregard_knowledge_for_context_blocking": False,
"datatype": "valid",
"drm_beam_block_full_context": True,
"drm_beam_block_ngram": 3,
"drm_beam_context_block_ngram": 3,
"drm_beam_min_length": 20,
"drm_beam_size": 10,
"drm_inference": "beam",
"drm_message_mutators": None,
"drm_model": "projects.seeker.agents.seeker:ComboFidSearchQueryAgent",
"exclude_context_in_krm_context_blocking": False,
"include_knowledge_in_krm_context_blocking": True,
"inject_query_string": None,
"knowledge_response_control_token": None,
"krm_beam_block_ngram": 3,
"krm_beam_context_block_ngram": 3,
"krm_beam_min_length": 1,
"krm_beam_size": 3,
"krm_doc_chunks_ranker": "woi_chunk_retrieved_docs",
"krm_inference": "beam",
"krm_message_mutators": None,
"krm_model": "projects.seeker.agents.seeker:ComboFidSearchQueryAgent",
"krm_n_ranked_doc_chunks": 1,
"krm_rag_retriever_type": "search_engine",
"krm_search_query_generator_model_file": "''",
"krm_search_server": "",
"loglevel": "debug",
"min_knowledge_length_when_search": 10,
"model": "projects.seeker.agents.seeker:SeekerAgent",
"model_file": "zoo:seeker/seeker_dialogue_400M/model",
"sdm_beam_block_ngram": -1,
"sdm_beam_min_length": 1,
"sdm_beam_size": 1,
"sdm_history_size": 1,
"sdm_inference": "greedy",
"sdm_model": "projects.seeker.agents.seeker:ComboFidSearchQueryAgent",
"search_decision": "always",
"search_decision_control_token": "__is-search-required__",
"search_decision_do_search_reply": "__do-search__",
"search_decision_dont_search_reply": "__do-not-search__",
"search_query_control_token": "__generate-query__",
"search_server": "0.0.0.0:8080",
"sqm_beam_block_ngram": -1,
"sqm_beam_min_length": 2,
"sqm_beam_size": 1,
"sqm_inference": "beam",
"sqm_model": "projects.seeker.agents.seeker:ComboFidSearchQueryAgent"
}
seeker_agent = create_agent_from_model_file("zoo:seeker/seeker_dialogue_400M/model", opt)
# forget everything. Important if you run this multiple times.
seeker_agent.reset()
# concatenate the persona and the first thing the human says
first_turn = {'text': "hi", 'episode_done': False}
print(first_turn["episode_done"])
# Model actually witnesses the human's text
#seeker_agent.observe({'text': first_turn, 'episode_done': False})
seeker_agent.observe(first_turn)
print(f"You said: {first_turn}")
# model produces a response
response = seeker_agent.act()
print("Seeker replied: {}".format(response['text']))
print()
# now another turn
second_turn = "do you like cheese?"
print(f"You said: {second_turn}")
seeker_agent.observe({'text': second_turn, "episode_done": False})
response2 = seeker_agent.act()
print("Seeker replied: {}".format(response2['text']))
print()
print('-' * 40)
print()
print("Seeker's history view:")
print(seeker_agent.history.get_history_str())
I'm trying to adapt the code from #2872 to work with Seeker. Here is the adapted code so far:
However, I get a KeyError:
How do adapt this code intended for Blenderbot to work with Seeker?