When i try to describe a picture using bakllava LLM (in ollama), alot of errors are shown on console log.
When i use ollama directly, i have no bugs. Bugs with pictures happen only when i use litellm and pictures.
chat/completions are working perfectly with pdf documents and text, but not with pictures.
Can you help me please.
Relevant log output
Traceback (most recent call last):
File "/appli/litellm/venv/lib/python3.10/site-packages/litellm/llms/ollama.py", line 270, in ollama_async_streaming
raise OllamaError(
litellm.llms.ollama.OllamaError: b'{"error":"illegal base64 data at input byte 4"}'
Traceback (most recent call last):
File "/appli/litellm/venv/lib/python3.10/site-packages/litellm/proxy/proxy_server.py", line 2596, in async_data_generator
async for chunk in response:
File "/appli/litellm/venv/lib/python3.10/site-packages/litellm/llms/ollama.py", line 284, in ollama_async_streaming
raise e
File "/appli/litellm/venv/lib/python3.10/site-packages/litellm/llms/ollama.py", line 270, in ollama_async_streaming
raise OllamaError(
litellm.llms.ollama.OllamaError: b'{"error":"illegal base64 data at input byte 4"}'
ERROR: Exception in ASGI application
Traceback (most recent call last):
File "/appli/litellm/venv/lib/python3.10/site-packages/starlette/responses.py", line 264, in __call__
await wrap(partial(self.listen_for_disconnect, receive))
File "/appli/litellm/venv/lib/python3.10/site-packages/starlette/responses.py", line 260, in wrap
await func()
File "/appli/litellm/venv/lib/python3.10/site-packages/starlette/responses.py", line 237, in listen_for_disconnect
message = await receive()
File "/appli/litellm/venv/lib/python3.10/site-packages/uvicorn/protocols/http/h11_impl.py", line 564, in receive
await self.message_event.wait()
File "/usr/lib/python3.10/asyncio/locks.py", line 214, in wait
await fut
asyncio.exceptions.CancelledError: Cancelled by cancel scope 780981cb1fc0
During handling of the above exception, another exception occurred:
+ Exception Group Traceback (most recent call last):
| File "/appli/litellm/venv/lib/python3.10/site-packages/uvicorn/protocols/http/h11_impl.py", line 428, in run_asgi
| result = await app( # type: ignore[func-returns-value]
| File "/appli/litellm/venv/lib/python3.10/site-packages/uvicorn/middleware/proxy_headers.py", line 78, in __call__
| return await self.app(scope, receive, send)
| File "/appli/litellm/venv/lib/python3.10/site-packages/fastapi/applications.py", line 1054, in __call__
| await super().__call__(scope, receive, send)
| File "/appli/litellm/venv/lib/python3.10/site-packages/starlette/applications.py", line 123, in __call__
| await self.middleware_stack(scope, receive, send)
| File "/appli/litellm/venv/lib/python3.10/site-packages/starlette/middleware/errors.py", line 186, in __call__
| raise exc
| File "/appli/litellm/venv/lib/python3.10/site-packages/starlette/middleware/errors.py", line 164, in __call__
| await self.app(scope, receive, _send)
| File "/appli/litellm/venv/lib/python3.10/site-packages/starlette/middleware/cors.py", line 83, in __call__
| await self.app(scope, receive, send)
| File "/appli/litellm/venv/lib/python3.10/site-packages/starlette/middleware/exceptions.py", line 62, in __call__
| await wrap_app_handling_exceptions(self.app, conn)(scope, receive, send)
| File "/appli/litellm/venv/lib/python3.10/site-packages/starlette/_exception_handler.py", line 64, in wrapped_app
| raise exc
| File "/appli/litellm/venv/lib/python3.10/site-packages/starlette/_exception_handler.py", line 53, in wrapped_app
| await app(scope, receive, sender)
| File "/appli/litellm/venv/lib/python3.10/site-packages/starlette/routing.py", line 758, in __call__
| await self.middleware_stack(scope, receive, send)
| File "/appli/litellm/venv/lib/python3.10/site-packages/starlette/routing.py", line 778, in app
| await route.handle(scope, receive, send)
| File "/appli/litellm/venv/lib/python3.10/site-packages/starlette/routing.py", line 299, in handle
| await self.app(scope, receive, send)
| File "/appli/litellm/venv/lib/python3.10/site-packages/starlette/routing.py", line 79, in app
| await wrap_app_handling_exceptions(app, request)(scope, receive, send)
| File "/appli/litellm/venv/lib/python3.10/site-packages/starlette/_exception_handler.py", line 64, in wrapped_app
| raise exc
| File "/appli/litellm/venv/lib/python3.10/site-packages/starlette/_exception_handler.py", line 53, in wrapped_app
| await app(scope, receive, sender)
| File "/appli/litellm/venv/lib/python3.10/site-packages/starlette/routing.py", line 77, in app
| await response(scope, receive, send)
| File "/appli/litellm/venv/lib/python3.10/site-packages/starlette/responses.py", line 257, in __call__
| async with anyio.create_task_group() as task_group:
| File "/appli/litellm/venv/lib/python3.10/site-packages/anyio/_backends/_asyncio.py", line 678, in __aexit__
| raise BaseExceptionGroup(
| exceptiongroup.ExceptionGroup: unhandled errors in a TaskGroup (1 sub-exception)
+-+---------------- 1 ----------------
| Traceback (most recent call last):
| File "/appli/litellm/venv/lib/python3.10/site-packages/litellm/proxy/proxy_server.py", line 2596, in async_data_generator
| async for chunk in response:
| File "/appli/litellm/venv/lib/python3.10/site-packages/litellm/llms/ollama.py", line 284, in ollama_async_streaming
| raise e
| File "/appli/litellm/venv/lib/python3.10/site-packages/litellm/llms/ollama.py", line 270, in ollama_async_streaming
| raise OllamaError(
| litellm.llms.ollama.OllamaError: b'{"error":"illegal base64 data at input byte 4"}'
|
| During handling of the above exception, another exception occurred:
|
| Traceback (most recent call last):
| File "/appli/litellm/venv/lib/python3.10/site-packages/starlette/responses.py", line 260, in wrap
| await func()
| File "/appli/litellm/venv/lib/python3.10/site-packages/starlette/responses.py", line 249, in stream_response
| async for chunk in self.body_iterator:
| File "/appli/litellm/venv/lib/python3.10/site-packages/litellm/proxy/proxy_server.py", line 2628, in async_data_generator
| proxy_exception = ProxyException(
| File "/appli/litellm/venv/lib/python3.10/site-packages/litellm/proxy/proxy_server.py", line 202, in __init__
| "No healthy deployment available" in self.message
| TypeError: a bytes-like object is required, not 'str'
+------------------------------------
What happened?
When i try to describe a picture using bakllava LLM (in ollama), alot of errors are shown on console log. When i use ollama directly, i have no bugs. Bugs with pictures happen only when i use litellm and pictures. chat/completions are working perfectly with pdf documents and text, but not with pictures. Can you help me please.
Relevant log output
Twitter / LinkedIn details
No response