Closed santosh-gkg closed 6 months ago
Hi, I'm not seeing this problem. If I have auth like:
{
"jon": {
"password": "jon",
"userid": "c78b94bb-05d1-4124-a253-fc870ca71d28",
"selection_docs_state": {
"langchain_modes": [
"UserData",
"MyData",
"LLM",
"Disabled",
"dudedata",
"dogdata",
"dogdata1",
"aaaaa",
"feefef",
"xxx"
],
"langchain_mode_paths": {
"UserData": null,
"dudedata": null,
"dogdata": null,
"dogdata1": null,
"aaaaa": null,
"feefef": null,
"xxx": null
},
"langchain_mode_types": {
"UserData": "shared",
"github h2oGPT": "shared",
"DriverlessAI docs": "shared",
"wiki": "shared",
"wiki_full": "",
"MyData": "personal",
"LLM": "personal",
"Disabled": "personal",
"dudedata": "personal",
"dogdata": "personal",
"dogdata1": "personal",
"aaaaa": "personal",
"feefef": "personal",
"xxx": "personal"
}
},
"model_options_state": [
[
"[]",
"01-ai/Yi-34B-Chat",
"01-ai/Yi-34b-Chat",
"Arc53/docsgpt-7b-mistral",
"BAAI/AquilaChat2-34B",
"BAAI/AquilaChat2-34B-16K",
"BAAI/bge-large-en-v1.5",
"BAAI/bge-reranker-large",
"CohereForAI/c4ai-command-r-v01",
"EleutherAI/gpt-neo-1.3B",
"HuggingFaceH4/zephyr-7b-alpha",
"HuggingFaceH4/zephyr-7b-beta",
"Nexusflow/NexusRaven-V2-13B",
"NousResearch/Nous-Capybara-34B",
"OpenAssistant/reward-model-deberta-v3-large-v2",
"Qwen/Qwen1.5-72B-Chat",
"SeaLLMs/SeaLLM-7B-v2",
"THUDM/chatglm3-6b",
"TheBloke/Llama-2-13B-AWQ",
"TheBloke/Llama-2-13B-chat-AWQ",
"TheBloke/Llama-2-13B-chat-GPTQ",
"TheBloke/Llama-2-70B-chat-GPTQ",
"TheBloke/Llama-2-7B-Chat-GGUF",
"TheBloke/Llama-2-7B-Chat-GPTQ",
"TheBloke/Llama-2-7B-chat-GPTQ",
"TheBloke/Llama-2-7b-Chat-GPTQ",
"TheBloke/Mistral-7B-Instruct-v0.2-AWQ",
"TheBloke/Mixtral-8x7B-Instruct-v0.1-AWQ",
"TheBloke/Nous-Hermes-13B-GPTQ",
"TheBloke/Nous-Hermes-2-Mixtral-8x7B-DPO-AWQ",
"TheBloke/Nous-Hermes-Llama2-GPTQ",
"TheBloke/Xwin-LM-13B-V0.1-GPTQ",
"TheBloke/Xwin-LM-13B-v0.2-GPTQ",
"TheBloke/dolphin-2.7-mixtral-8x7b-AWQ",
"TheBloke/em_german_leo_mistral-GPTQ",
"TheBloke/openchat-3.5-1210-AWQ",
"TheBloke/openchat_3.5-16k-AWQ",
"TheBloke/zephyr-7B-beta-AWQ",
"TheBloke/zephyr-7B-beta-GGUF",
"ToolBench/ToolLLaMA-2-7b",
"Xwin-LM/Xwin-Math-7B-V1.0",
"Yukang/LongAlpaca-70B",
"amazon/MistralLite",
"berkeley-nest/Starling-LM-7B-alpha",
"bert-base-multilingual-cased",
"bert-base-uncased",
"cerebras/Cerebras-GPT-111M",
"core42/jais-13b-chat",
"deepseek-ai/deepseek-coder-33b-instruct",
"deepseek-ai/deepseek-coder-6.7b-instruct",
"distilgpt2",
"facebook/wav2vec2-base-960h",
"gemini-pro",
"google/gemma-7b-it",
"gpt-3.5-turbo",
"gpt2",
"h2oai/h2o-danube-1.8b-chat",
"h2oai/h2o-danube-1.8b-sft",
"h2oai/h2ogpt-16k-aquilachat2-34b",
"h2oai/h2ogpt-16k-codellama-34b-instruct",
"h2oai/h2ogpt-32k-codellama-34b-instruct",
"h2oai/h2ogpt-4096-llama2-13b-chat",
"h2oai/h2ogpt-4096-llama2-70b-chat",
"h2oai/h2ogpt-4096-llama2-70b-chat-4bit",
"h2oai/h2ogpt-4096-llama2-7b",
"h2oai/h2ogpt-4096-llama2-7b-chat",
"h2oai/h2ogpt-gm-10b-solar-rag",
"h2oai/h2ogpt-gm-7b-mistral-chat-sft-dpo-rag-v1",
"h2oai/h2ogpt-gm-oasst1-en-2048-falcon-40b-v1",
"h2oai/h2ogpt-gm-oasst1-en-2048-falcon-40b-v2",
"h2oai/h2ogpt-gm-oasst1-en-2048-falcon-7b-v3",
"h2oai/h2ogpt-gm-oasst1-en-2048-open-llama-7b-preview-300bt",
"h2oai/h2ogpt-oasst1-512-12b",
"h2oai/h2ogpt-oasst1-falcon-40b",
"h2oai/h2ogpt-oig-oasst1-256-20b",
"h2oai/h2ogpt-oig-oasst1-256-6.9b",
"h2oai/h2ogpt-oig-oasst1-256-6_9b",
"h2oai/h2ogpt-oig-oasst1-512-6_9b",
"h2oai/h2ogpt-research-oasst1-llama-65b",
"h2oai/qa-v0",
"junelee/wizard-vicuna-13b",
"liuhaotian/llava-v1.6-34b",
"liuhaotian/llava-v1.6-vicuna-13b",
"llamacpp_path/llama-2-7b-chat.Q6_K.gguf",
"llamacpp_path/mistral-7b-instruct-v0.2.Q4_K_M.gguf",
"llamacpp_path/mistral-7b-instruct-v0.2.Q5_K_M.gguf",
"llamacpp_path/mistral-7b-v0.1.Q5_K_M.gguf",
"llamacpp_path/zephyr-7b-beta.Q5_K_M.gguf",
"llmware/dragon-mistral-7b-v0",
"lmsys/fastchat-t5-3b-v1.0",
"lmsys/vicuna-13b-v1.5",
"lmsys/vicuna-13b-v1.5-16k",
"lmsys/vicuna-33b-v1.3",
"meta-llama/Llama-2-13b-chat-hf",
"meta-llama/Llama-2-70b-chat-hf",
"meta-llama/Llama-2-7b-chat-hf",
"mistralai/Mistral-7B-Instruct-v0.1",
"mistralai/Mistral-7B-Instruct-v0.2",
"mistralai/Mistral-7B-v0.1",
"mistralai/Mixtral-8x7B-Instruct-v0.1",
"mosaicml/mpt-30b-chat",
"mosaicml/mpt-30b-instruct",
"mosaicml/mpt-7b",
"namespace-Pt/activation-beacon-llama2-7b-chat",
"nomic-ai/nomic-embed-text-v1",
"openchat/openchat-3.5-1210",
"roberta-large",
"tiiuae/falcon-180B-chat",
"tiiuae/falcon-40b-instruct",
"togethercomputer/RedPajama-INCITE-Chat-3B-v1",
"ybelkada/Mixtral-8x7B-Instruct-v0.1-AWQ"
]
],
"lora_options_state": [
[
"[]"
]
],
"server_options_state": [
[
"[]",
"google",
"openai",
"openai_chat"
]
],
"langchain_mode": "xxx",
"text_output": [
[
"Summarize Collection: xxx, Subset: Relevant, Documents: ['All']",
"- H2O has had a strong community presence in India since its inception.\n- Bangalore has consistently been a major source of downloads for H2O.\n- H2O has a significant customer base in India that has supported the company for many years.\n\n<details><summary><font size=\"2\">Sources</font></summary><font size=\"2\"><font size=\"2\">Sources [Score | Link]:<ul></font><font size=\"2\"><li>0.71 | <font size=\"2\"><a href=\"file//tmp/gradio/cf560ac6c85a153dfe8584fd1e0d0dedfa49a8c8/sri2.wav\" target=\"_blank\" rel=\"noopener noreferrer\">/tmp/gradio/cf560ac6c85a153dfe8584fd1e0d0dedfa49a8c8/sri2.wav</a></font></li><details><summary><font size=\"2\"> Thank you for gathering today. H2O has </font></summary><font size=\"2\"> Thank you for gathering today. H2O has been a community movement for almost since the day we started. So it's a privilege to welcome our H2O India community and our customers who have been supporting us for better part of the company's existence. For years, our largest downloads would be coming from Bangalore.</font></details></font>Total Time: 14 [s]<p>Total document chunks used: 1<p><font size=\"2\"></ul></p>End Sources<p></font></font></details>"
],
[
"Extract Collection: xxx, Subset: Relevant, Documents: ['All']",
"['- H2O has had a strong community presence in India since its inception.\\n- Bangalore has consistently been a major source of downloads for H2O.\\n- H2O has a significant customer base in India that has supported the company for many years.']"
]
],
"text_output2": [
[
"Summarize Collection: xxx, Subset: Relevant, Documents: ['All']",
null
],
[
"Extract Collection: xxx, Subset: Relevant, Documents: ['All']",
null
]
],
"text_outputs": [
[],
[
[
"Who are you?",
"Hello! Based on the context provided, I can infer that you are the H2O India community and customers who have been supporting the company for a significant portion of its existence. The document mentions that the largest downloads used to come from Bangalore, indicating that you are located in Bangalore or have a strong presence there. Additionally, the use of the phrase \"our community\" suggests that you are a group of individuals who share a common interest or goal, possibly related to the use of H2O software or technology."
],
[
null,
"<details><summary><font size=\"2\">Sources</font></summary><font size=\"2\"><font size=\"2\">Sources [Score | Link]:<ul></font><font size=\"2\"><li>0.71 | <font size=\"2\"><a href=\"file//tmp/gradio/cf560ac6c85a153dfe8584fd1e0d0dedfa49a8c8/sri2.wav\" target=\"_blank\" rel=\"noopener noreferrer\">/tmp/gradio/cf560ac6c85a153dfe8584fd1e0d0dedfa49a8c8/sri2.wav</a></font></li><details><summary><font size=\"2\">Thank you for gathering today. H2O has b</font></summary><font size=\"2\">Thank you for gathering today. H2O has been a community movement for almost since the day we started. So it's a privilege to welcome our H2O India community and our customers who have been supporting us for better part of the company's existence. For years, our largest downloads would be coming from Bangalore.</font></details></font>Total Time: 2 [s]<p>Total document chunks used: 1<p><font size=\"2\"></ul></p>End Sources<p></font></font></details>"
]
],
[],
[],
[],
[],
[],
[],
[
[
"Who are you?",
"Based on the information provided in the document context, I am an assistant associated with the H2O community, which is a community movement that has been active since the early days of the organization. The context suggests that H2O has a significant presence in India, particularly in Bangalore, as it has been a major source of the company's downloads. However, the text does not provide specific details about my role or identity within the H2O community."
],
[
null,
"<details><summary><font size=\"2\">Sources</font></summary><font size=\"2\"><font size=\"2\">Sources [Score | Link]:<ul></font><font size=\"2\"><li>0.71 | <font size=\"2\"><a href=\"file//tmp/gradio/cf560ac6c85a153dfe8584fd1e0d0dedfa49a8c8/sri2.wav\" target=\"_blank\" rel=\"noopener noreferrer\">/tmp/gradio/cf560ac6c85a153dfe8584fd1e0d0dedfa49a8c8/sri2.wav</a></font></li><details><summary><font size=\"2\">Thank you for gathering today. H2O has b</font></summary><font size=\"2\">Thank you for gathering today. H2O has been a community movement for almost since the day we started. So it's a privilege to welcome our H2O India community and our customers who have been supporting us for better part of the company's existence. For years, our largest downloads would be coming from Bangalore.</font></details></font>Total Time: 4 [s]<p>Input Tokens: 226 | Output Tokens: 97<p>Total document chunks used: 1<p><font size=\"2\"></ul></p>End Sources<p></font></font></details>"
]
],
[
[
"Who are you?",
"I am an expert document question-answer language model named GPT-4 Turbo, created by OpenAI. I have been provided with the context of a gathering where H2O, a community movement, is being discussed. The document mentions that H2O India community and customers have been supporting the company for a significant portion of its existence, and that the largest downloads of their product have been coming from Bangalore for years. However, the context does not provide any information about my identity beyond my designation as a language model."
],
[
null,
"<details><summary><font size=\"2\">Sources</font></summary><font size=\"2\"><font size=\"2\">Sources [Score | Link]:<ul></font><font size=\"2\"><li>0.71 | <font size=\"2\"><a href=\"file//tmp/gradio/cf560ac6c85a153dfe8584fd1e0d0dedfa49a8c8/sri2.wav\" target=\"_blank\" rel=\"noopener noreferrer\">/tmp/gradio/cf560ac6c85a153dfe8584fd1e0d0dedfa49a8c8/sri2.wav</a></font></li><details><summary><font size=\"2\">Thank you for gathering today. H2O has b</font></summary><font size=\"2\">Thank you for gathering today. H2O has been a community movement for almost since the day we started. So it's a privilege to welcome our H2O India community and our customers who have been supporting us for better part of the company's existence. For years, our largest downloads would be coming from Bangalore.</font></details></font>Total Time: 3 [s]<p>Total document chunks used: 1<p><font size=\"2\"></ul></p>End Sources<p></font></font></details>"
]
],
[],
[],
[],
[],
[],
[],
[],
[],
[],
[],
[],
[],
[],
[],
[],
[],
[],
[],
[],
[],
[]
],
"roles_state": {
"Female AI Assistant": "models/female.wav",
"Male AI Assistant": "models/male.wav",
"AI Beard The Pirate": "models/pirate_by_coqui.wav",
"None": "",
"sri": "/tmp/gradio/d41ef01ec64f0717abd366c65cc40feee7980d01/srivoice.wav.new.wav.new.wav",
"sri2": "/tmp/gradio/cf560ac6c85a153dfe8584fd1e0d0dedfa49a8c8/sri2.wav.new.wav.new.wav",
"sri3": "/tmp/gradio/79cae1b535ec5ae9440d817e87712807b4ed94f8/sri3.wav.new.wav.new.wav"
},
"visible_models": [
"h2oai/h2ogpt-4096-llama2-13b-chat",
"mistralai/Mixtral-8x7B-Instruct-v0.1",
"mistral-medium"
],
"h2ogpt_key": "62224bfb-c832-4452-81e7-8a4bdabbe164"
},
"45f87d15-4ce5-476b-9e16-bde448bf04b7": {
"password": "c2e3cc6b-887b-459a-b01c-e4f738a2ffce",
"userid": "c0ace93a-262a-4a8d-bc7b-de7d21e792e0",
"selection_docs_state": {
"langchain_modes": [
"UserData",
"MyData",
"LLM",
"Disabled"
],
"langchain_mode_paths": {
"UserData": null
},
"langchain_mode_types": {
"UserData": "shared",
"github h2oGPT": "shared",
"DriverlessAI docs": "shared",
"wiki": "shared",
"wiki_full": "",
"MyData": "personal",
"LLM": "personal",
"Disabled": "personal"
}
}
},
"admin": {
"password": "Delta###9999FFF@@",
"userid": "5d4c5774-8c05-4e22-927e-0cc2f064ef66",
"selection_docs_state": {
"langchain_modes": [
"UserData",
"MyData",
"LLM",
"Disabled"
],
"langchain_mode_paths": {
"UserData": null
},
"langchain_mode_types": {
"UserData": "shared",
"github h2oGPT": "shared",
"DriverlessAI docs": "shared",
"wiki": "shared",
"wiki_full": "",
"MyData": "personal",
"LLM": "personal",
"Disabled": "personal"
}
}
},
"bob": {
"password": "bob",
"userid": "c03f6da5-9d9c-409f-b2e2-55068a599fc5",
"selection_docs_state": {
"langchain_modes": [
"UserData",
"MyData",
"LLM",
"Disabled"
],
"langchain_mode_paths": {
"UserData": null
},
"langchain_mode_types": {
"UserData": "shared",
"github h2oGPT": "shared",
"DriverlessAI docs": "shared",
"wiki": "shared",
"wiki_full": "",
"MyData": "personal",
"LLM": "personal",
"Disabled": "personal"
}
},
"model_options_state": [
[
"[]",
"HuggingFaceH4/zephyr-7b-beta",
"TheBloke/Llama-2-7B-Chat-GGUF",
"TheBloke/Xwin-LM-13B-V0.1-GPTQ",
"TheBloke/zephyr-7B-beta-AWQ",
"TheBloke/zephyr-7B-beta-GGUF",
"Yukang/LongAlpaca-70B",
"gemini-pro",
"gpt-3.5-turbo",
"h2oai/h2ogpt-32k-codellama-34b-instruct",
"lmsys/vicuna-13b-v1.5-16k"
]
],
"lora_options_state": [
[
"[]"
]
],
"server_options_state": [
[
"[]",
"google",
"openai",
"openai_chat"
]
],
"langchain_mode": "MyData",
"text_output": [
[
"Summarize Collection: MyData, Subset: Relevant, Documents: ['All']",
""
]
],
"text_output2": [
[
"Summarize Collection: MyData, Subset: Relevant, Documents: ['All']",
null
]
]
},
"dog": {
"password": "dog",
"userid": "f18c6b3b-8f89-42e3-9e26-197a743d6b65",
"selection_docs_state": {
"langchain_modes": [
"UserData",
"MyData",
"LLM",
"Disabled"
],
"langchain_mode_paths": {
"UserData": null
},
"langchain_mode_types": {
"UserData": "shared",
"github h2oGPT": "shared",
"DriverlessAI docs": "shared",
"wiki": "shared",
"wiki_full": "",
"MyData": "personal",
"LLM": "personal",
"Disabled": "personal"
}
},
"model_options_state": [
[
"[]",
"HuggingFaceH4/zephyr-7b-beta",
"TheBloke/Llama-2-7B-Chat-GGUF",
"TheBloke/Xwin-LM-13B-V0.1-GPTQ",
"TheBloke/zephyr-7B-beta-AWQ",
"TheBloke/zephyr-7B-beta-GGUF",
"Yukang/LongAlpaca-70B",
"gemini-pro",
"gpt-3.5-turbo",
"h2oai/h2ogpt-32k-codellama-34b-instruct",
"lmsys/vicuna-13b-v1.5-16k"
]
],
"lora_options_state": [
[
"[]"
]
],
"server_options_state": [
[
"[]",
"google",
"openai",
"openai_chat"
]
],
"langchain_mode": "MyData",
"text_output": [
[
"Summarize Collection: MyData, Subset: Relevant, Documents: ['All']",
"- Zulu is hot.\n\n<details><summary><font size=\"2\">Sources</font></summary><font size=\"2\"><font size=\"2\">Sources [Score | Link]:<ul></font><font size=\"2\"><li>0.72 | <font size=\"2\"><a href=\"file/user_paste/_3021cd8c-b.txt\" target=\"_blank\" rel=\"noopener noreferrer\">user_paste/_3021cd8c-b.txt</a></font></li><details><summary><font size=\"2\">Zulu is hot.</font></summary><font size=\"2\">Zulu is hot.</font></details></font>Total Time: 50 [s]<p>Total document chunks used: 1<p><font size=\"2\"></ul></p>End Sources<p></font></font></details>"
]
],
"text_output2": [
[
"Summarize Collection: MyData, Subset: Relevant, Documents: ['All']",
null
]
]
},
"paddy": {
"password": "paddy",
"userid": "40226033-352a-479a-a0b9-995093b29083",
"selection_docs_state": {
"langchain_modes": [
"UserData",
"MyData",
"LLM",
"Disabled"
],
"langchain_mode_paths": {
"UserData": null
},
"langchain_mode_types": {
"UserData": "shared",
"github h2oGPT": "shared",
"DriverlessAI docs": "shared",
"wiki": "shared",
"wiki_full": "",
"MyData": "personal",
"LLM": "personal",
"Disabled": "personal"
}
},
"model_options_state": [
[
"[]",
"HuggingFaceH4/zephyr-7b-beta",
"TheBloke/Llama-2-7B-Chat-GGUF",
"TheBloke/Xwin-LM-13B-V0.1-GPTQ",
"TheBloke/zephyr-7B-beta-AWQ",
"TheBloke/zephyr-7B-beta-GGUF",
"Yukang/LongAlpaca-70B",
"gpt-3.5-turbo",
"h2oai/h2ogpt-32k-codellama-34b-instruct",
"lmsys/vicuna-13b-v1.5-16k"
]
],
"lora_options_state": [
[
"[]"
]
],
"server_options_state": [
[
"[]",
"openai",
"openai_chat"
]
],
"text_outputs": [
[
[
"Who are you?",
"Hello! I'm just an AI, my purpose is to assist and provide helpful information to the best of my abilities. I am programmed to follow ethical guidelines and promote respectful and positive interactions. I am not capable of providing harmful or offensive responses, and I will always strive to provide accurate and helpful answers to your questions. If a question does not make sense or is not factually coherent, I will do my best to explain why and provide clarification. If I don't know the answer to a question, I will not provide false information and will instead suggest ways for you to find the answer or clarify the question. Please feel free to ask me anything, and I will do my best to assist you."
]
],
[],
[],
[],
[],
[],
[],
[],
[],
[],
[],
[],
[],
[],
[],
[],
[],
[],
[],
[],
[],
[],
[],
[],
[],
[],
[],
[],
[],
[],
[]
],
"langchain_mode": "LLM",
"h2ogpt_key": "62224bfb-c832-4452-81e7-8a4bdabbe164",
"visible_models": [
"h2oai/h2ogpt-4096-llama2-70b-chat"
]
}
}
then I start the server
python generate.py \
--verbose=True \
--score_model=None \
--pre_load_embedding_model=False \
--base_model=gemini-pro \
--inference_server=google \
--guest_name=guest \
--auth=auth.json \
--add_disk_models_to_ui=False \
--visible_ratings=True \
--append_sources_to_answer=True \
--append_sources_to_chat=False
I see as user jon:Jon:
This is because when I login in auth case, it loads the auth file.
I see normal results too if I disable auth and just login in the login page.
python generate.py \
--verbose=True \
--score_model=None \
--pre_load_embedding_model=False \
--base_model=gemini-pro \
--inference_server=google \
--guest_name=guest \
--auth_filename=auth.json \
--add_disk_models_to_ui=False \
--visible_ratings=True \
--append_sources_to_answer=True \
--append_sources_to_chat=False
If you changed auth while UI is up, you need to click on this button:
Thank you very much for your elaborated reply, it was not syncing because i wasn't persisting my state after login in the login tab. now it's working.
but is there a way that we can set to persist the state for guest as well?
and how can we add parameters like --visible_expert_tab=False for some users in auth.json
Sure, you can do that now.
You can set in CLI some defaults, and then set for each user something else. If you keep "system tab" available, they can make things less visible but not more visible.
"side_bar_text": "on",
"doc_count_text": "on",
"submit_buttons_text": "on",
"visible_models_text": "on",
"chat_tab_text": "on",
"doc_selection_tab_text": "off",
"doc_view_tab_text": "off",
"chat_history_tab_text": "off",
"expert_tab_text": "off",
"models_tab_text": "off",
"system_tab_text": "on",
"tos_tab_text": "off",
"login_tab_text": "off",
"hosts_tab_text": "off"
thank you very much @pseudotensor , i really appreciate your help for all the users.
for example u2 has access to employees as well as userdata collections but in UI it's only showing userdata and mydata