model ,err := openai.New(openai.WithToken(openapikey.OpenApikey), openai.WithModel("gpt-3.5-turbo-instruct"))
if err != nil {
log.Fatal(err)
}
completion, err := llms.GenerateFromSinglePrompt(ctx,
model,
"What is the capital of Japan?",
llms.WithTemperature(0.5),
llms.WithStopWords([]string{"\n"}))
2024/08/19 17:07:58 API returned unexpected status code: 404: This is not a chat model and thus not supported in the v1/chat/completions endpoint. Did you mean to use v1/completions?
exit status 1
The chat/completions endpoint is hit as shown below.
The chat/completions endpoint is hit as shown below.