parakeet-nest / parakeet

🦜🪺 Parakeet is a GoLang library, made to simplify the development of small generative AI applications with Ollama 🦙.
https://parakeet-nest.github.io/parakeet/
MIT License
60 stars 4 forks source link

Use OpenAI with Parakeet #20

Closed k33g closed 2 months ago

codefromthecrypt commented 2 months ago

cool. I was able to convert my example, so that it works with llama-server or ollama, using OpenAI's API!

package main

import (
    "fmt"
    "github.com/parakeet-nest/parakeet/completion"
    "github.com/parakeet-nest/parakeet/llm"
    "log"
)

// llama-server --log-disable --hf-repo Qwen/Qwen2-7B-Instruct-GGUF --hf-file qwen2-7b-instruct-q4_k_m.gguf
var llamaServer = struct {
    url, model string
}{"http://localhost:8080/v1", "ignored"}

// ollama serve; ollama pull qwen2:7b-instruct
var ollama = struct {
    url, model string
}{"http://localhost:11434/v1", "qwen2:7b-instruct"}

func main() {
    url := llamaServer.url
    model := llamaServer.model
    question := llm.OpenAIQuery{
        Model: model,
        Messages: []llm.Message{
            {Role: "user", Content: "Which ocean contains the falkland islands?"},
        },
    }

    answer, err := completion.ChatWithOpenAI(url, question)
    if err != nil {
        log.Fatal("😡:", err)
    }
    response := answer.Choices[0].Message
    fmt.Println(response.Content)

    fmt.Println()

    secondQuestion := llm.OpenAIQuery{
        Model: model,
        Messages: append(question.Messages,
            llm.Message{Role: response.Role, Content: response.Content},
            llm.Message{Role: "user", Content: "What’s the capital?"},
        ),
    }

    answer, err = completion.ChatWithOpenAI(url, secondQuestion)
    if err != nil {
        log.Fatal("😡:", err)
    }
    response = answer.Choices[0].Message
    fmt.Println(response.Content)

    fmt.Println()

}