import openai
import json
from pydantic import BaseModel
import instructor
client = openai.OpenAI(
api_key="sk-xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx", # can be anything
base_url="http://127.0.0.1:8000/v1", # NOTE: Replace with IP address and port of your llama-cpp-python server
)
# Enables `response_model`
client = instructor.patch(client=client)
class UserDetail(BaseModel):
name: str
age: int
user = client.chat.completions.create(
model="gpt-3.5-turbo",
response_model=UserDetail,
messages=[
{"role": "user", "content": "Extract Jason is 25 years old"},
],
)
assert isinstance(user, UserDetail)
assert user.name == "Jason"
assert user.age == 25
print(user)
but when I using Text Classification example with code
import openai
import json
from pydantic import BaseModel
import enum
client = openai.OpenAI(
api_key="sk-xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx", # can be anything
base_url="http://127.0.0.1:8000/v1", # NOTE: Replace with IP address and port of your llama-cpp-python server
)
class Labels(str, enum.Enum):
"""Enumeration for single-label text classification."""
SPAM = "spam"
NOT_SPAM = "not_spam"
class SinglePrediction(BaseModel):
"""
Class for a single class label prediction.
"""
class_label: Labels
def classify(data: str) -> SinglePrediction:
"""Perform single-label classification on the input text."""
return client.chat.completions.create(
model="gpt-3.5-turbo-0613",
response_model=SinglePrediction,
messages=[
{
"role": "user",
"content": f"Classify the following text: {data}",
},
],
) # type: ignore
prediction = classify("Hello there I'm a Nigerian prince and I want to give you money")
assert prediction.class_label == Labels.SPAM
print(prediction)
with bug
root@ubuntu:/app/inside_container/llama_python_demo# python3 client.py
Traceback (most recent call last):
File "/app/inside_container/llama_python_demo/client.py", line 41, in <module>
prediction = classify("Hello there I'm a Nigerian prince and I want to give you money")
File "/app/inside_container/llama_python_demo/client.py", line 29, in classify
return client.chat.completions.create(
File "/usr/local/lib/python3.10/dist-packages/openai/_utils/_utils.py", line 274, in wrapper
return func(*args, **kwargs)
TypeError: Completions.create() got an unexpected keyword argument 'response_model'
I am following this tutorial to use function call with qwen2:0.5b https://github.com/abetlen/llama-cpp-python/blob/main/examples/notebooks/Functions.ipynb
I used this command to start a serve inside container
then I can successfully using following code
with output
but when I using Text Classification example with code
with bug
Can anyone help me look at it?