Open loneystar1983 opened 2 hours ago
Hey @loneystar1983! I'm here to help you with any bugs, questions, or contributions. Let's squash those bugs together!
To resolve the ImportError
for ChatMessage
in SimpleLLMHandler
, you need to update the import statement to reflect the new location of ChatMessage
. The correct import statement should be:
from llama_index.core.base.llms.types import ChatMessage
Here's the updated SimpleLLMHandler
class with the correct import:
from typing import Any, Dict, List, Optional, cast
from llama_index.legacy.callbacks.base_handler import BaseCallbackHandler
from llama_index.legacy.callbacks.schema import CBEventType, EventPayload
class SimpleLLMHandler(BaseCallbackHandler):
"""Callback handler for printing llms inputs/outputs."""
def __init__(self) -> None:
super().__init__(event_starts_to_ignore=[], event_ends_to_ignore=[])
def start_trace(self, trace_id: Optional[str] = None) -> None:
return
def end_trace(
self,
trace_id: Optional[str] = None,
trace_map: Optional[Dict[str, List[str]]] = None,
) -> None:
return
def _print_llm_event(self, payload: dict) -> None:
from llama_index.core.base.llms.types import ChatMessage
if EventPayload.PROMPT in payload:
prompt = str(payload.get(EventPayload.PROMPT))
completion = str(payload.get(EventPayload.COMPLETION))
print(f"** Prompt: **\n{prompt}")
print("*" * 50)
print(f"** Completion: **\n{completion}")
print("*" * 50)
print("\n")
elif EventPayload.MESSAGES in payload:
messages = cast(List[ChatMessage], payload.get(EventPayload.MESSAGES, []))
messages_str = "\n".join([str(x) for x in messages])
response = str(payload.get(EventPayload.RESPONSE))
print(f"** Messages: **\n{messages_str}")
print("*" * 50)
print(f"** Response: **\n{response}")
print("*" * 50)
print("\n")
def on_event_start(
self,
event_type: CBEventType,
payload: Optional[Dict[str, Any]] = None,
event_id: str = "",
parent_id: str = "",
**kwargs: Any,
) -> str:
return event_id
def on_event_end(
self,
event_type: CBEventType,
payload: Optional[Dict[str, Any]] = None,
event_id: str = "",
**kwargs: Any,
) -> None:
"""Count the LLM or Embedding tokens as needed."""
if event_type == CBEventType.LLM and payload is not None:
self._print_llm_event(payload)
This should resolve the import error you are encountering [1][2].
To continue talking to Dosu, mention @dosu.
Bug Description
SimpleLLMHandler._print_llm_event has the code following: from llama_index.core.llms import ChatMessage and ChatMessage has been moved to llama_index.core.base.llms.types
so an error occur: ImportError: cannot import name 'ChatMessage' from 'llama_index.core.llms' (unknown location)
Version
0.11.14
Steps to Reproduce
from llama_index.core.callbacks.global_handlers import set_global_handler
Set the global handler
set_global_handler(eval_mode="simple")
from llama_index.llms.openai import OpenAI
llm = OpenAI(model='gpt-35-turbo-1106', api_key='xxx', api_base='xxxx') llm.complete("how is the weather?")
Relevant Logs/Tracbacks