openai / openai-go

The official Go library for the OpenAI API
Apache License 2.0
99 stars 7 forks source link

NewSteam Assistents not working (no events) #26

Closed bartek5186 closed 2 weeks ago

bartek5186 commented 3 weeks ago

My implementation RunStream method:

// @TODO no working stream yet
func (as *OpenAIAutoService) RunStream(assistantName string, threadID string, message string) (*ssestream.Stream[openai.AssistantStreamEvent], error) {

    client := openai.NewClient(
        option.WithAPIKey(Apikey),
        option.WithHeader("OpenAI-Organization", Organization),
        option.WithHeader("Content-Type", "application/json"),
        option.WithHeader("OpenAI-Beta", "assistants=v2"),
        option.WithJSONSet("response_format", map[string]string{"type": "json_object"}),
    )

    stream := client.Beta.Threads.Runs.NewStreaming(
        context.TODO(),
        threadID,
        openai.BetaThreadRunNewParams{
            AssistantID: openai.F(AssistantID),
            //MaxCompletionTokens: openai.F(int64(256)),
            //MaxPromptTokens:     openai.F(int64(256)),
            Model:             openai.F(openai.ChatModelGPT4oMini),
            ParallelToolCalls: openai.F(true),
            Temperature:       openai.F(0.500000),
            TopP:              openai.F(1.000000)
        },
    )

    if stream.Err() != nil {
        fmt.Printf("error: %v", stream.Err().Error())
        return nil, stream.Err()
    }

    as.StreamEventHandler(stream)

    return stream, nil

}

And steamHandler:

// @TODO no working stream yet
func (as *OpenAIAutoService) StreamEventHandler(stream *ssestream.Stream[openai.AssistantStreamEvent]) {
    defer func() {
        log.Println("Closing stream...")
        if err := stream.Close(); err != nil {
            log.Printf("Error closing stream: %v", err)
        }
    }()

    log.Println("Open stream...")
    // next in FALSE on begining

    event := stream.Current()

    var apierr *openai.Error
    if errors.As(stream.Err(), &apierr) {
        println(string(apierr.DumpRequest(true)))  // Prints the serialized HTTP request
        println(string(apierr.DumpResponse(true))) // Prints the serialized HTTP response
    }

    log.Printf("EventData: %v", event.Data)
    log.Printf("event: %v", event.Event)
    log.Printf("json: %v", event.JSON)

        // @TODO this seems never working
    for stream.Next() {

        event := stream.Current()

        log.Printf("Event received: %s", event.Event) // @TODO this never run

        switch event.Event {
        case openai.AssistantStreamEventEventError:
            fmt.Printf("Error: %v\n", event.Data)
        case openai.AssistantStreamEventEventThreadMessageCreated:
            fmt.Printf("Created: %v\n", event.Data)
        default:
            log.Printf("unhandled event: %v", event.Event)
        }
    }

    // Sprawdzenie błędów strumienia po zakończeniu pętli
    if stream.Err() != nil {
        log.Printf("Stream error: %v", stream.Err())
    } else {
        log.Println("Stream ended without errors")
    }
}
yjp20 commented 3 weeks ago

Hey! We can reproduce this and will ship a fix soon.

Just FYI, we expect Chat Completions to be relatively stable during this alpha period, but the endpoints under the Beta resource are likely to go through more API changes.

bartek5186 commented 3 weeks ago

Hey! We can reproduce this and will ship a fix soon.

Just FYI, we expect Chat Completions to be relatively stable during this alpha period, but the endpoints under the Beta resource are likely to go through more API changes.

Can I help with something?

yjp20 commented 2 weeks ago

Hey! This should now be resolved with the new release! let us know if that works for you

bartek5186 commented 2 weeks ago

Yes, Stream on Assistant receive events now.