Issue happens when use litellm(litellm==1.43.18) with gemini hosted by Google in dAnswer.
And from my analysis, it is regarding to these snippets.
For LLM configuration, api_base is "", which is an empty string, but not None. So it goes into this branch and with the wrong value been applied.
if (
api_base is not None
): # for cloudflare ai gateway - https://github.com/BerriAI/litellm/issues/4317
if custom_llm_provider == "gemini":
url = "{}/{}".format(api_base, endpoint)
auth_header = (
gemini_api_key # cloudflare expects api key as bearer token
)
else:
url = "{}:{}".format(api_base, endpoint)
if stream is True:
url = url + "?alt=sse"
Due to that in many places, we accept the empty string as the default parameter, so I suggest that, use
if (api_base)
this checks for both None and empty string.
Relevant log output
Traceback (most recent call last):
File "/usr/local/lib/python3.11/site-packages/httpx/_transports/default.py", line 60, in map_httpcore_exceptions
yield
File "/usr/local/lib/python3.11/site-packages/httpx/_transports/default.py", line 218, in handle_request
resp = self._pool.handle_request(req)
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
File "/usr/local/lib/python3.11/site-packages/httpcore/_sync/connection_pool.py", line 208, in handle_request
raise UnsupportedProtocol(
httpcore.UnsupportedProtocol: Request URL is missing an 'http://' or 'https://' protocol.
The above exception was the direct cause of the following exception:
Traceback (most recent call last):
File "/usr/local/lib/python3.11/site-packages/litellm/utils.py", line 10372, in __next__
self.fetch_sync_stream()
File "/usr/local/lib/python3.11/site-packages/litellm/utils.py", line 10477, in fetch_sync_stream
self.completion_stream = self.make_call(client=litellm.module_level_client)
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
File "/usr/local/lib/python3.11/site-packages/litellm/llms/vertex_httpx.py", line 555, in make_sync_call
response = client.post(api_base, headers=headers, data=data, stream=True)
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
File "/usr/local/lib/python3.11/site-packages/litellm/llms/custom_httpx/http_handler.py", line 275, in post
raise e
File "/usr/local/lib/python3.11/site-packages/litellm/llms/custom_httpx/http_handler.py", line 266, in post
response = self.client.send(req, stream=stream)
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
File "/usr/local/lib/python3.11/site-packages/httpx/_client.py", line 908, in send
response = self._send_handling_auth(
^^^^^^^^^^^^^^^^^^^^^^^^^
File "/usr/local/lib/python3.11/site-packages/httpx/_client.py", line 936, in _send_handling_auth
response = self._send_handling_redirects(
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
File "/usr/local/lib/python3.11/site-packages/httpx/_client.py", line 973, in _send_handling_redirects
response = self._send_single_request(request)
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
File "/usr/local/lib/python3.11/site-packages/httpx/_client.py", line 1009, in _send_single_request
response = transport.handle_request(request)
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
File "/usr/local/lib/python3.11/site-packages/httpx/_transports/default.py", line 217, in handle_request
with map_httpcore_exceptions():
File "/usr/local/lib/python3.11/contextlib.py", line 158, in __exit__
self.gen.throw(typ, value, traceback)
File "/usr/local/lib/python3.11/site-packages/httpx/_transports/default.py", line 77, in map_httpcore_exceptions
raise mapped_exc(message) from exc
httpx.UnsupportedProtocol: Request URL is missing an 'http://' or 'https://' protocol.
During handling of the above exception, another exception occurred:
Traceback (most recent call last):
File "/app/danswer/chat/process_message.py", line 666, in stream_chat_message_objects
for packet in answer.processed_streamed_output:
File "/app/danswer/llm/answering/answer.py", line 577, in processed_streamed_output
for processed_packet in _process_stream(output_generator):
File "/app/danswer/llm/answering/answer.py", line 509, in _process_stream
for message in stream:
File "/app/danswer/llm/answering/answer.py", line 423, in _raw_output_for_non_explicit_tool_calling_llms
yield from self._process_llm_stream(
File "/app/danswer/llm/answering/answer.py", line 329, in _process_llm_stream
for message in self.llm.stream(
File "/app/danswer/llm/chat_llm.py", line 364, in _stream_implementation
for part in response:
File "/usr/local/lib/python3.11/site-packages/litellm/utils.py", line 10468, in __next__
raise exception_type(
^^^^^^^^^^^^^^^
File "/usr/local/lib/python3.11/site-packages/litellm/utils.py", line 8334, in exception_type
raise e
File "/usr/local/lib/python3.11/site-packages/litellm/utils.py", line 8298, in exception_type
raise APIConnectionError(
litellm.exceptions.APIConnectionError: litellm.APIConnectionError: Request URL is missing an 'http://' or 'https://' protocol.
Traceback (most recent call last):
File "/usr/local/lib/python3.11/site-packages/httpx/_transports/default.py", line 60, in map_httpcore_exceptions
yield
File "/usr/local/lib/python3.11/site-packages/httpx/_transports/default.py", line 218, in handle_request
resp = self._pool.handle_request(req)
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
File "/usr/local/lib/python3.11/site-packages/httpcore/_sync/connection_pool.py", line 208, in handle_request
raise UnsupportedProtocol(
httpcore.UnsupportedProtocol: Request URL is missing an 'http://' or 'https://' protocol.
The above exception was the direct cause of the following exception:
Traceback (most recent call last):
File "/usr/local/lib/python3.11/site-packages/litellm/utils.py", line 10372, in __next__
self.fetch_sync_stream()
File "/usr/local/lib/python3.11/site-packages/litellm/utils.py", line 10477, in fetch_sync_stream
self.completion_stream = self.make_call(client=litellm.module_level_client)
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
File "/usr/local/lib/python3.11/site-packages/litellm/llms/vertex_httpx.py", line 555, in make_sync_call
response = client.post(api_base, headers=headers, data=data, stream=True)
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
File "/usr/local/lib/python3.11/site-packages/litellm/llms/custom_httpx/http_handler.py", line 275, in post
raise e
File "/usr/local/lib/python3.11/site-packages/litellm/llms/custom_httpx/http_handler.py", line 266, in post
response = self.client.send(req, stream=stream)
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
File "/usr/local/lib/python3.11/site-packages/httpx/_client.py", line 908, in send
response = self._send_handling_auth(
^^^^^^^^^^^^^^^^^^^^^^^^^
File "/usr/local/lib/python3.11/site-packages/httpx/_client.py", line 936, in _send_handling_auth
response = self._send_handling_redirects(
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
File "/usr/local/lib/python3.11/site-packages/httpx/_client.py", line 973, in _send_handling_redirects
response = self._send_single_request(request)
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
File "/usr/local/lib/python3.11/site-packages/httpx/_client.py", line 1009, in _send_single_request
response = transport.handle_request(request)
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
File "/usr/local/lib/python3.11/site-packages/httpx/_transports/default.py", line 217, in handle_request
with map_httpcore_exceptions():
File "/usr/local/lib/python3.11/contextlib.py", line 158, in __exit__
self.gen.throw(typ, value, traceback)
File "/usr/local/lib/python3.11/site-packages/httpx/_transports/default.py", line 77, in map_httpcore_exceptions
raise mapped_exc(message) from exc
httpx.UnsupportedProtocol: Request URL is missing an 'http://' or 'https://' protocol.
url // this is the url specified by _get_token_and_url
https://generativelanguage.googleapis.com/v1beta/models/gemini-1.5-pro-latest:streamGenerateContent?key=AImyFakedKey4&alt=sse
True // api_base is not None
// api_base, empty string but not None, so it goes into the wrong branch, and composes a wrong value for the url
/streamGenerateContent?alt=sse
What happened?
Issue happens when use litellm(litellm==1.43.18) with gemini hosted by Google in dAnswer. And from my analysis, it is regarding to these snippets. For LLM configuration, api_base is "", which is an empty string, but not None. So it goes into this branch and with the wrong value been applied.
Due to that in many places, we accept the empty string as the default parameter, so I suggest that, use
this checks for both None and empty string.
Relevant log output
Twitter / LinkedIn details
No response