Closed sdecoder closed 1 month ago
Also I believe this error is not from OpenWebUI. Since I copied the OpenAI API sample code and run it. The same error occurred.
response = client.completions.create( model="default", prompt="The capital of France is", temperature=0, max_tokens=32, ) print(response)
response = client.chat.completions.create( model="default", messages=[ {"role": "system", "content": "You are a helpful AI assistant"}, {"role": "user", "content": "List 3 countries and their capitals."}, ], temperature=0, max_tokens=64, ) print(response)
response = client.embeddings.create( model="default", input="How are you today", ) print(response)
BTW, also tried the latest release v0.3.0. The same error persists.
ERROR: Exception in ASGI application Traceback (most recent call last): File "/root/anaconda3/envs/sglang/lib/python3.10/site-packages/uvicorn/protocols/http/httptools_impl.py", line 401, in run_asgi result = await app( # type: ignore[func-returns-value] File "/root/anaconda3/envs/sglang/lib/python3.10/site-packages/uvicorn/middleware/proxy_headers.py", line 70, in call return await self.app(scope, receive, send) File "/root/anaconda3/envs/sglang/lib/python3.10/site-packages/fastapi/applications.py", line 1054, in call await super().call(scope, receive, send) File "/root/anaconda3/envs/sglang/lib/python3.10/site-packages/starlette/applications.py", line 113, in call await self.middleware_stack(scope, receive, send) File "/root/anaconda3/envs/sglang/lib/python3.10/site-packages/starlette/middleware/errors.py", line 187, in call raise exc File "/root/anaconda3/envs/sglang/lib/python3.10/site-packages/starlette/middleware/errors.py", line 165, in call await self.app(scope, receive, _send) File "/root/anaconda3/envs/sglang/lib/python3.10/site-packages/starlette/middleware/exceptions.py", line 62, in call await wrap_app_handling_exceptions(self.app, conn)(scope, receive, send) File "/root/anaconda3/envs/sglang/lib/python3.10/site-packages/starlette/_exception_handler.py", line 62, in wrapped_app raise exc File "/root/anaconda3/envs/sglang/lib/python3.10/site-packages/starlette/_exception_handler.py", line 51, in wrapped_app await app(scope, receive, sender) File "/root/anaconda3/envs/sglang/lib/python3.10/site-packages/starlette/routing.py", line 715, in call await self.middleware_stack(scope, receive, send) File "/root/anaconda3/envs/sglang/lib/python3.10/site-packages/starlette/routing.py", line 735, in app await route.handle(scope, receive, send) File "/root/anaconda3/envs/sglang/lib/python3.10/site-packages/starlette/routing.py", line 288, in handle await self.app(scope, receive, send) File "/root/anaconda3/envs/sglang/lib/python3.10/site-packages/starlette/routing.py", line 76, in app await wrap_app_handling_exceptions(app, request)(scope, receive, send) File "/root/anaconda3/envs/sglang/lib/python3.10/site-packages/starlette/_exception_handler.py", line 62, in wrapped_app raise exc File "/root/anaconda3/envs/sglang/lib/python3.10/site-packages/starlette/_exception_handler.py", line 51, in wrapped_app await app(scope, receive, sender) File "/root/anaconda3/envs/sglang/lib/python3.10/site-packages/starlette/routing.py", line 73, in app response = await f(request) File "/root/anaconda3/envs/sglang/lib/python3.10/site-packages/fastapi/routing.py", line 297, in app raw_response = await run_endpoint_function( File "/root/anaconda3/envs/sglang/lib/python3.10/site-packages/fastapi/routing.py", line 210, in run_endpoint_function return await dependant.call(*values) File "/root/anaconda3/envs/sglang/lib/python3.10/site-packages/sglang/srt/server.py", line 213, in openai_v1_chat_completions return await v1_chat_completions(tokenizer_manager, raw_request) File "/root/anaconda3/envs/sglang/lib/python3.10/site-packages/sglang/srt/openai_api/adapter.py", line 1046, in v1_chat_completions adapted_request, request = v1_chat_generate_request(all_requests, tokenizer_manager) File "/root/anaconda3/envs/sglang/lib/python3.10/site-packages/sglang/srt/openai_api/adapter.py", line 860, in v1_chat_generate_request prompt_ids = tokenizer_manager.tokenizer.apply_chat_template( File "/root/anaconda3/envs/sglang/lib/python3.10/site-packages/transformers/tokenization_utils_base.py", line 1844, in apply_chat_template rendered_chat = compiled_template.render( File "/root/anaconda3/envs/sglang/lib/python3.10/site-packages/jinja2/environment.py", line 1304, in render self.environment.handle_exception() File "/root/anaconda3/envs/sglang/lib/python3.10/site-packages/jinja2/environment.py", line 939, in handle_exception raise rewrite_traceback_stack(source=source) File "", line 1, in top-level template code File "/root/anaconda3/envs/sglang/lib/python3.10/site-packages/jinja2/sandbox.py", line 394, in call return context.call(obj, args, **kwargs) File "/root/anaconda3/envs/sglang/lib/python3.10/site-packages/transformers/tokenization_utils_base.py", line 1925, in raise_exception raise TemplateError(message) jinja2.exceptions.TemplateError: System role not supported
3.2 If I use the "normal" chat mode, that is, talking to Gemma2 without uploading a document, everything works well.
Can anyone give me some hint? Thank you so much for any help.