poe-platform / fastapi_poe

A helper library for writing Poe API bots using FastAPI
Apache License 2.0
114 stars 22 forks source link

API call error #36

Closed pangzheng closed 7 months ago

pangzheng commented 8 months ago
(poe) pangzheng@iZt4n86o3ror4rdm8mmpiyZ:~/fastapi_poe$ pip list

Package           Version
----------------- ------------
annotated-types   0.6.0
anyio             3.7.1
certifi           2023.7.22
click             8.1.7
exceptiongroup    1.1.3
fastapi           0.104.1
fastapi-poe       0.0.23
h11               0.14.0
httpcore          1.0.2
httpx             0.25.1
httpx-sse         0.3.1
idna              3.4
pip               20.0.2
pkg-resources     0.0.0
pydantic          2.5.0
pydantic-core     2.14.1
setuptools        44.0.0
sniffio           1.3.0
sse-starlette     1.6.5
starlette         0.27.0
typing-extensions 4.8.0
uvicorn           0.24.0.post1
wheel             0.34.2
import asyncio
  from fastapi_poe.types import ProtocolMessage
  from fastapi_poe.client import get_bot_response

  # Replace <api_key> with your actual API key, ensuring it is a string.
  api_key = "xxxx"

  # Create an asynchronous function to encapsulate the async for loop
  async def get_responses(api_key):
      message = ProtocolMessage(role="user", content="Hello world")
      async for partial in get_bot_response(messages=[message], bot_name="GPT-3.5-Turbo", api_key=api_key):
          print(partial)

  # Run the event loop
  # For Python 3.7 and newer
  asyncio.run(get_responses(api_key))

  # For Python 3.6 and older, you would typically do the following:
  # loop = asyncio.get_event_loop()
  # loop.run_until_complete(get_responses(api_key))
 # loop.close()
$ python poe-demo.py

text='' raw_response={'type': 'text', 'text': '{"text": ""}'} full_prompt="QueryRequest(version='1.0', type='query', query=[ProtocolMessage(role='user', content='Hello world', content_type='text/markdown', timestamp=0, message_id='', feedback=[], attachments=[])], user_id='', conversation_id='', message_id='', metadata='', api_key='<missing>', access_key='<missing>', temperature=0.7, skip_system_prompt=False, logit_bias={}, stop_sequences=[])" request_id=None is_suggested_reply=False is_replace_response=False
text='Hello' raw_response={'type': 'text', 'text': '{"text": "Hello"}'} full_prompt="QueryRequest(version='1.0', type='query', query=[ProtocolMessage(role='user', content='Hello world', content_type='text/markdown', timestamp=0, message_id='', feedback=[], attachments=[])], user_id='', conversation_id='', message_id='', metadata='', api_key='<missing>', access_key='<missing>', temperature=0.7, skip_system_prompt=False, logit_bias={}, stop_sequences=[])" request_id=None is_suggested_reply=False is_replace_response=False
text='!' raw_response={'type': 'text', 'text': '{"text": "!"}'} full_prompt="QueryRequest(version='1.0', type='query', query=[ProtocolMessage(role='user', content='Hello world', content_type='text/markdown', timestamp=0, message_id='', feedback=[], attachments=[])], user_id='', conversation_id='', message_id='', metadata='', api_key='<missing>', access_key='<missing>', temperature=0.7, skip_system_prompt=False, logit_bias={}, stop_sequences=[])" request_id=None is_suggested_reply=False is_replace_response=False
text=' How' raw_response={'type': 'text', 'text': '{"text": " How"}'} full_prompt="QueryRequest(version='1.0', type='query', query=[ProtocolMessage(role='user', content='Hello world', content_type='text/markdown', timestamp=0, message_id='', feedback=[], attachments=[])], user_id='', conversation_id='', message_id='', metadata='', api_key='<missing>', access_key='<missing>', temperature=0.7, skip_system_prompt=False, logit_bias={}, stop_sequences=[])" request_id=None is_suggested_reply=False is_replace_response=False
text=' can' raw_response={'type': 'text', 'text': '{"text": " can"}'} full_prompt="QueryRequest(version='1.0', type='query', query=[ProtocolMessage(role='user', content='Hello world', content_type='text/markdown', timestamp=0, message_id='', feedback=[], attachments=[])], user_id='', conversation_id='', message_id='', metadata='', api_key='<missing>', access_key='<missing>', temperature=0.7, skip_system_prompt=False, logit_bias={}, stop_sequences=[])" request_id=None is_suggested_reply=False is_replace_response=False
text=' I' raw_response={'type': 'text', 'text': '{"text": " I"}'} full_prompt="QueryRequest(version='1.0', type='query', query=[ProtocolMessage(role='user', content='Hello world', content_type='text/markdown', timestamp=0, message_id='', feedback=[], attachments=[])], user_id='', conversation_id='', message_id='', metadata='', api_key='<missing>', access_key='<missing>', temperature=0.7, skip_system_prompt=False, logit_bias={}, stop_sequences=[])" request_id=None is_suggested_reply=False is_replace_response=False
text=' assist' raw_response={'type': 'text', 'text': '{"text": " assist"}'} full_prompt="QueryRequest(version='1.0', type='query', query=[ProtocolMessage(role='user', content='Hello world', content_type='text/markdown', timestamp=0, message_id='', feedback=[], attachments=[])], user_id='', conversation_id='', message_id='', metadata='', api_key='<missing>', access_key='<missing>', temperature=0.7, skip_system_prompt=False, logit_bias={}, stop_sequences=[])" request_id=None is_suggested_reply=False is_replace_response=False
text=' you' raw_response={'type': 'text', 'text': '{"text": " you"}'} full_prompt="QueryRequest(version='1.0', type='query', query=[ProtocolMessage(role='user', content='Hello world', content_type='text/markdown', timestamp=0, message_id='', feedback=[], attachments=[])], user_id='', conversation_id='', message_id='', metadata='', api_key='<missing>', access_key='<missing>', temperature=0.7, skip_system_prompt=False, logit_bias={}, stop_sequences=[])" request_id=None is_suggested_reply=False is_replace_response=False
text=' today' raw_response={'type': 'text', 'text': '{"text": " today"}'} full_prompt="QueryRequest(version='1.0', type='query', query=[ProtocolMessage(role='user', content='Hello world', content_type='text/markdown', timestamp=0, message_id='', feedback=[], attachments=[])], user_id='', conversation_id='', message_id='', metadata='', api_key='<missing>', access_key='<missing>', temperature=0.7, skip_system_prompt=False, logit_bias={}, stop_sequences=[])" request_id=None is_suggested_reply=False is_replace_response=False
text='?' raw_response={'type': 'text', 'text': '{"text": "?"}'} full_prompt="QueryRequest(version='1.0', type='query', query=[ProtocolMessage(role='user', content='Hello world', content_type='text/markdown', timestamp=0, message_id='', feedback=[], attachments=[])], user_id='', conversation_id='', message_id='', metadata='', api_key='<missing>', access_key='<missing>', temperature=0.7, skip_system_prompt=False, logit_bias={}, stop_sequences=[])" request_id=None is_suggested_reply=False is_replace_response=False
text='' raw_response={'type': 'text', 'text': '{"text": ""}'} full_prompt="QueryRequest(version='1.0', type='query', query=[ProtocolMessage(role='user', content='Hello world', content_type='text/markdown', timestamp=0, message_id='', feedback=[], attachments=[])], user_id='', conversation_id='', message_id='', metadata='', api_key='<missing>', access_key='<missing>', temperature=0.7, skip_system_prompt=False, logit_bias={}, stop_sequences=[])" request_id=None is_suggested_reply=False is_replace_response=False`
pangzheng commented 8 months ago

How to deal with this error, thank you

krisyang1125 commented 7 months ago

Can you elaborate more about what the error is? The above output seems to show that the streamed response is returning properly.