token-js / token.js

Integrate 200+ LLMs with one TypeScript SDK using OpenAI's format.
https://docs.tokenjs.ai/
MIT License
111 stars 11 forks source link

fix: handle Gemini content optional field #47

Closed sam-goldman closed 3 months ago

sam-goldman commented 3 months ago

Here is the script that caused this error:

import * as dotenv from 'dotenv'
import { OpenAI } from 'openai'

import { TokenJS } from '../src'
dotenv.config()

const messages: OpenAI.Chat.Completions.ChatCompletionMessageParam[] = [
  {
    role: 'user',
    content: `Generate a JSON that represents a person, with name and age. Give a concise answer.`,
  },
]

const callLLM = async () => {
  const tokenjs = new TokenJS()
  const result = await tokenjs.chat.completions.create({
    provider: 'gemini',
    model: 'gemini-1.5-pro',
    messages,
    response_format: { type: 'json_object'}
  })

  console.log(result.choices)
}

callLLM()