promplate / demo

A simple python server with @promplate. Best practise for prompt engineering applications.
https://demo.promplate.dev/redoc
MIT License
1 stars 0 forks source link

UnicodeEncodeError: 'ascii' codec can't encode characters in position 4-5: ordinal not in range(128) #193

Open CNSeniorious000 opened 2 hours ago

CNSeniorious000 commented 2 hours ago

Client receives:

image

Server logs:

Traceback (most recent call last):
  File "/app/src/utils/response.py", line 12, in make_response
    first_chunk = await anext(it)
                  ^^^^^^^^^^^^^^^
  File "/app/src/routes/run.py", line 94, in make_stream
    async for c in node.astream(data.context, find_llm(data.model).generate, **config):
  File "/app/.venv/lib/python3.12/site-packages/promplate/chain/node.py", line 275, in astream
    async for _ in self._astream(ChainContext(context, self.context), generate, callbacks, **config):
  File "/app/.venv/lib/python3.12/site-packages/promplate/chain/node.py", line 361, in _astream
    async for result in accumulate_any(generate(prompt, **(self.run_config | config))):
  File "/app/.venv/lib/python3.12/site-packages/promplate/chain/utils.py", line 34, in async_accumulate
    async for delta in async_iterable:
  File "/app/src/utils/prefill.py", line 42, in wrapper
    async for chunk in func(messages, **config):
  File "/app/.venv/lib/python3.12/site-packages/promplate_trace/langfuse.py", line 137, in _
    async for delta in gen:
  File "/app/.venv/lib/python3.12/site-packages/promplate_trace/langsmith.py", line 257, in wrapper
    async for delta in f(messages, **config):
  File "/app/src/utils/llm/groq.py", line 33, in generate
    async for token in generate(prompt, **config):
  File "/app/.venv/lib/python3.12/site-packages/promplate/llm/openai/v1.py", line 138, in __call__
    stream = await self._aclient.chat.completions.create(**config)
             ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
  File "/app/.venv/lib/python3.12/site-packages/openai/resources/chat/completions.py", line 1490, in create
    return await self._post(
           ^^^^^^^^^^^^^^^^^
  File "/app/.venv/lib/python3.12/site-packages/openai/_base_client.py", line 1831, in post
    return await self.request(cast_to, opts, stream=stream, stream_cls=stream_cls)
           ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
  File "/app/.venv/lib/python3.12/site-packages/openai/_base_client.py", line 1525, in request
    return await self._request(
           ^^^^^^^^^^^^^^^^^^^^
  File "/app/.venv/lib/python3.12/site-packages/logfire/_internal/integrations/llm_providers/llm_provider.py", line 136, in instrumented_llm_request_async
    return await original_request_method(*args, **kwargs)
           ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
  File "/app/.venv/lib/python3.12/site-packages/openai/_base_client.py", line 1556, in _request
    request = self._build_request(options, retries_taken=retries_taken)
              ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
  File "/app/.venv/lib/python3.12/site-packages/openai/_base_client.py", line 459, in _build_request
    headers = self._build_headers(options, retries_taken=retries_taken)
              ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
  File "/app/.venv/lib/python3.12/site-packages/openai/_base_client.py", line 410, in _build_headers
    headers = httpx.Headers(headers_dict)
              ^^^^^^^^^^^^^^^^^^^^^^^^^^^
  File "/app/.venv/lib/python3.12/site-packages/httpx/_models.py", line 78, in __init__
    normalize_header_value(v, encoding),
    ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
  File "/app/.venv/lib/python3.12/site-packages/httpx/_utils.py", line 53, in normalize_header_value
    return value.encode(encoding or "ascii")
           ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
UnicodeEncodeError: 'ascii' codec can't encode characters in position 4-5: ordinal not in range(128)
linear[bot] commented 2 hours ago

PROM-33 UnicodeEncodeError: 'ascii' codec can't encode characters in position 4-5: ordinal not in range(128)