[X] 1. I have searched related issues but cannot get the expected help.
[X] 2. The bug has not been fixed in the latest version.
[X] 3. Please note that if the bug-related issue you submitted lacks corresponding environment info and a minimal reproducible demo, it will be challenging for us to reproduce and resolve the issue, reducing the likelihood of receiving feedback.
Describe the bug
INFO: 192.168.137.9:50376 - "POST /v1/chat/completions HTTP/1.1" 500 Internal Server Error
ERROR: Exception in ASGI application
Traceback (most recent call last):
File "/home/gtlm/anaconda3/envs/llama_factory/lib/python3.11/site-packages/uvicorn/protocols/http/httptools_impl.py", line 399, in run_asgi
result = await app( # type: ignore[func-returns-value]
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
File "/home/gtlm/anaconda3/envs/llama_factory/lib/python3.11/site-packages/uvicorn/middleware/proxy_headers.py", line 70, in call
return await self.app(scope, receive, send)
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
File "/home/gtlm/anaconda3/envs/llama_factory/lib/python3.11/site-packages/fastapi/applications.py", line 1054, in call
await super().call(scope, receive, send)
File "/home/gtlm/anaconda3/envs/llama_factory/lib/python3.11/site-packages/starlette/applications.py", line 123, in call
await self.middleware_stack(scope, receive, send)
File "/home/gtlm/anaconda3/envs/llama_factory/lib/python3.11/site-packages/starlette/middleware/errors.py", line 186, in call
raise exc
File "/home/gtlm/anaconda3/envs/llama_factory/lib/python3.11/site-packages/starlette/middleware/errors.py", line 164, in call
await self.app(scope, receive, _send)
File "/home/gtlm/anaconda3/envs/llama_factory/lib/python3.11/site-packages/starlette/middleware/exceptions.py", line 62, in call
await wrap_app_handling_exceptions(self.app, conn)(scope, receive, send)
File "/home/gtlm/anaconda3/envs/llama_factory/lib/python3.11/site-packages/starlette/_exception_handler.py", line 64, in wrapped_app
raise exc
File "/home/gtlm/anaconda3/envs/llama_factory/lib/python3.11/site-packages/starlette/_exception_handler.py", line 53, in wrapped_app
await app(scope, receive, sender)
File "/home/gtlm/anaconda3/envs/llama_factory/lib/python3.11/site-packages/starlette/routing.py", line 758, in call
await self.middleware_stack(scope, receive, send)
File "/home/gtlm/anaconda3/envs/llama_factory/lib/python3.11/site-packages/starlette/routing.py", line 778, in app
await route.handle(scope, receive, send)
File "/home/gtlm/anaconda3/envs/llama_factory/lib/python3.11/site-packages/starlette/routing.py", line 299, in handle
await self.app(scope, receive, send)
File "/home/gtlm/anaconda3/envs/llama_factory/lib/python3.11/site-packages/starlette/routing.py", line 79, in app
await wrap_app_handling_exceptions(app, request)(scope, receive, send)
File "/home/gtlm/anaconda3/envs/llama_factory/lib/python3.11/site-packages/starlette/_exception_handler.py", line 64, in wrapped_app
raise exc
File "/home/gtlm/anaconda3/envs/llama_factory/lib/python3.11/site-packages/starlette/_exception_handler.py", line 53, in wrapped_app
await app(scope, receive, sender)
File "/home/gtlm/anaconda3/envs/llama_factory/lib/python3.11/site-packages/starlette/routing.py", line 74, in app
response = await func(request)
^^^^^^^^^^^^^^^^^^^
File "/home/gtlm/anaconda3/envs/llama_factory/lib/python3.11/site-packages/fastapi/routing.py", line 278, in app
raw_response = await run_endpoint_function(
^^^^^^^^^^^^^^^^^^^^^^^^^^^^
File "/home/gtlm/anaconda3/envs/llama_factory/lib/python3.11/site-packages/fastapi/routing.py", line 191, in run_endpoint_function
return await dependant.call(*values)
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
File "/home/gtlm/anaconda3/envs/llama_factory/lib/python3.11/site-packages/sglang/srt/server.py", line 157, in openai_v1_chat_completions
return await v1_chat_completions(tokenizer_manager, raw_request)
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
File "/home/gtlm/anaconda3/envs/llama_factory/lib/python3.11/site-packages/sglang/srt/openai_api/adapter.py", line 720, in v1_chat_completions
adapted_request, request = v1_chat_generate_request(all_requests, tokenizer_manager)
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
File "/home/gtlm/anaconda3/envs/llama_factory/lib/python3.11/site-packages/sglang/srt/openai_api/adapter.py", line 602, in v1_chat_generate_request
prompt = tokenizer_manager.tokenizer.apply_chat_template(
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
File "/home/gtlm/anaconda3/envs/llama_factory/lib/python3.11/site-packages/transformers/tokenization_utils_base.py", line 1833, in apply_chat_template
rendered_chat = compiled_template.render(
^^^^^^^^^^^^^^^^^^^^^^^^^
File "/home/gtlm/anaconda3/envs/llama_factory/lib/python3.11/site-packages/jinja2/environment.py", line 1301, in render
self.environment.handle_exception()
File "/home/gtlm/anaconda3/envs/llama_factory/lib/python3.11/site-packages/jinja2/environment.py", line 936, in handle_exception
raise rewrite_traceback_stack(source=source)
File "", line 1, in top-level template code
File "/home/gtlm/anaconda3/envs/llama_factory/lib/python3.11/site-packages/jinja2/sandbox.py", line 393, in call
return context.call(obj, args, **kwargs)
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
File "/home/gtlm/anaconda3/envs/llama_factory/lib/python3.11/site-packages/transformers/tokenization_utils_base.py", line 1914, in raise_exception
raise TemplateError(message)
jinja2.exceptions.TemplateError: System role not supported
Checklist
Describe the bug
INFO: 192.168.137.9:50376 - "POST /v1/chat/completions HTTP/1.1" 500 Internal Server Error ERROR: Exception in ASGI application Traceback (most recent call last): File "/home/gtlm/anaconda3/envs/llama_factory/lib/python3.11/site-packages/uvicorn/protocols/http/httptools_impl.py", line 399, in run_asgi result = await app( # type: ignore[func-returns-value] ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ File "/home/gtlm/anaconda3/envs/llama_factory/lib/python3.11/site-packages/uvicorn/middleware/proxy_headers.py", line 70, in call return await self.app(scope, receive, send) ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ File "/home/gtlm/anaconda3/envs/llama_factory/lib/python3.11/site-packages/fastapi/applications.py", line 1054, in call await super().call(scope, receive, send) File "/home/gtlm/anaconda3/envs/llama_factory/lib/python3.11/site-packages/starlette/applications.py", line 123, in call await self.middleware_stack(scope, receive, send) File "/home/gtlm/anaconda3/envs/llama_factory/lib/python3.11/site-packages/starlette/middleware/errors.py", line 186, in call raise exc File "/home/gtlm/anaconda3/envs/llama_factory/lib/python3.11/site-packages/starlette/middleware/errors.py", line 164, in call await self.app(scope, receive, _send) File "/home/gtlm/anaconda3/envs/llama_factory/lib/python3.11/site-packages/starlette/middleware/exceptions.py", line 62, in call await wrap_app_handling_exceptions(self.app, conn)(scope, receive, send) File "/home/gtlm/anaconda3/envs/llama_factory/lib/python3.11/site-packages/starlette/_exception_handler.py", line 64, in wrapped_app raise exc File "/home/gtlm/anaconda3/envs/llama_factory/lib/python3.11/site-packages/starlette/_exception_handler.py", line 53, in wrapped_app await app(scope, receive, sender) File "/home/gtlm/anaconda3/envs/llama_factory/lib/python3.11/site-packages/starlette/routing.py", line 758, in call await self.middleware_stack(scope, receive, send) File "/home/gtlm/anaconda3/envs/llama_factory/lib/python3.11/site-packages/starlette/routing.py", line 778, in app await route.handle(scope, receive, send) File "/home/gtlm/anaconda3/envs/llama_factory/lib/python3.11/site-packages/starlette/routing.py", line 299, in handle await self.app(scope, receive, send) File "/home/gtlm/anaconda3/envs/llama_factory/lib/python3.11/site-packages/starlette/routing.py", line 79, in app await wrap_app_handling_exceptions(app, request)(scope, receive, send) File "/home/gtlm/anaconda3/envs/llama_factory/lib/python3.11/site-packages/starlette/_exception_handler.py", line 64, in wrapped_app raise exc File "/home/gtlm/anaconda3/envs/llama_factory/lib/python3.11/site-packages/starlette/_exception_handler.py", line 53, in wrapped_app await app(scope, receive, sender) File "/home/gtlm/anaconda3/envs/llama_factory/lib/python3.11/site-packages/starlette/routing.py", line 74, in app response = await func(request) ^^^^^^^^^^^^^^^^^^^ File "/home/gtlm/anaconda3/envs/llama_factory/lib/python3.11/site-packages/fastapi/routing.py", line 278, in app raw_response = await run_endpoint_function( ^^^^^^^^^^^^^^^^^^^^^^^^^^^^ File "/home/gtlm/anaconda3/envs/llama_factory/lib/python3.11/site-packages/fastapi/routing.py", line 191, in run_endpoint_function return await dependant.call(*values) ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ File "/home/gtlm/anaconda3/envs/llama_factory/lib/python3.11/site-packages/sglang/srt/server.py", line 157, in openai_v1_chat_completions return await v1_chat_completions(tokenizer_manager, raw_request) ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ File "/home/gtlm/anaconda3/envs/llama_factory/lib/python3.11/site-packages/sglang/srt/openai_api/adapter.py", line 720, in v1_chat_completions adapted_request, request = v1_chat_generate_request(all_requests, tokenizer_manager) ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ File "/home/gtlm/anaconda3/envs/llama_factory/lib/python3.11/site-packages/sglang/srt/openai_api/adapter.py", line 602, in v1_chat_generate_request prompt = tokenizer_manager.tokenizer.apply_chat_template( ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ File "/home/gtlm/anaconda3/envs/llama_factory/lib/python3.11/site-packages/transformers/tokenization_utils_base.py", line 1833, in apply_chat_template rendered_chat = compiled_template.render( ^^^^^^^^^^^^^^^^^^^^^^^^^ File "/home/gtlm/anaconda3/envs/llama_factory/lib/python3.11/site-packages/jinja2/environment.py", line 1301, in render self.environment.handle_exception() File "/home/gtlm/anaconda3/envs/llama_factory/lib/python3.11/site-packages/jinja2/environment.py", line 936, in handle_exception raise rewrite_traceback_stack(source=source) File "", line 1, in top-level template code File "/home/gtlm/anaconda3/envs/llama_factory/lib/python3.11/site-packages/jinja2/sandbox.py", line 393, in call return context.call(obj, args, **kwargs) ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ File "/home/gtlm/anaconda3/envs/llama_factory/lib/python3.11/site-packages/transformers/tokenization_utils_base.py", line 1914, in raise_exception raise TemplateError(message) jinja2.exceptions.TemplateError: System role not supported
Reproduction
python -m sglang.launch_server --model-path gemma-2-2b-it --port 8081 --mem-fraction-static 0.7 --host 0.0.0.0
Environment