Open coolbaluk opened 1 month ago
Attention: Patch coverage is 80.00000%
with 12 lines
in your changes are missing coverage. Please review.
Project coverage is 97.75%. Comparing base (
774fc9d
) to head (322bd92
). Report is 15 commits behind head on master.
Files | Patch % | Lines |
---|---|---|
run.go | 80.00% | 6 Missing and 6 partials :warning: |
:umbrella: View full report in Codecov by Sentry.
:loudspeaker: Have feedback on the report? Share it here.
may i ask question?
how to talk in next context use last thread_id?
cc := openai.NewClientWithConfig(config)
stream, _ := cc.CreateRunStreaming(context.Background(), "thread_VEPTeIWj1umjdFyUd0Aj4lb2", openai.RunRequest{
AssistantID: "asst_IuAlZbLkuIgcky26QPB2TQy0",
})
//stream, _ := cc.CreateThreadAndStream(context.Background(), openai.CreateThreadAndRunRequest{
// RunRequest: openai.RunRequest{
// AssistantID: "asst_IuAlZbLkuIgcky26QPB2TQy0",
// },
// Thread: openai.ThreadRequest{
// Messages: []openai.ThreadMessage{
// {
// Role: openai.ThreadMessageRoleUser,
// Content: "我刚问了什么?",
// },
// },
// },
//})
defer stream.Close()
for {
resp, err := stream.Recv()
if errors.Is(err, io.EOF) {
break
}
t.Log("thread_id", resp.ID)
for _, content := range resp.Delta.Content {
t.Log(content.Text.Value)
}
}
may i ask question? how to talk in next context use last thread_id?
cc := openai.NewClientWithConfig(config) stream, _ := cc.CreateRunStreaming(context.Background(), "thread_VEPTeIWj1umjdFyUd0Aj4lb2", openai.RunRequest{ AssistantID: "asst_IuAlZbLkuIgcky26QPB2TQy0", }) //stream, _ := cc.CreateThreadAndStream(context.Background(), openai.CreateThreadAndRunRequest{ // RunRequest: openai.RunRequest{ // AssistantID: "asst_IuAlZbLkuIgcky26QPB2TQy0", // }, // Thread: openai.ThreadRequest{ // Messages: []openai.ThreadMessage{ // { // Role: openai.ThreadMessageRoleUser, // Content: "我刚问了什么?", // }, // }, // }, //}) defer stream.Close() for { resp, err := stream.Recv() if errors.Is(err, io.EOF) { break } t.Log("thread_id", resp.ID) for _, content := range resp.Delta.Content { t.Log(content.Text.Value) } }
There's an API for inserting message in a thread. Example below:
_, err = a.client.CreateMessage(ctx, threadId, openai.MessageRequest{
Role: openai.ChatMessageRoleUser,
Content: messageText,
})
if err != nil {
logger.Error("failed to create message", zap.Error(err))
return err
}
outStream, err = a.client.CreateRunStreaming(ctx, threadId, openai.RunRequest{
AssistantID: assistantId,
})
may i ask question? how to talk in next context use last thread_id?
cc := openai.NewClientWithConfig(config) stream, _ := cc.CreateRunStreaming(context.Background(), "thread_VEPTeIWj1umjdFyUd0Aj4lb2", openai.RunRequest{ AssistantID: "asst_IuAlZbLkuIgcky26QPB2TQy0", }) //stream, _ := cc.CreateThreadAndStream(context.Background(), openai.CreateThreadAndRunRequest{ // RunRequest: openai.RunRequest{ // AssistantID: "asst_IuAlZbLkuIgcky26QPB2TQy0", // }, // Thread: openai.ThreadRequest{ // Messages: []openai.ThreadMessage{ // { // Role: openai.ThreadMessageRoleUser, // Content: "我刚问了什么?", // }, // }, // }, //}) defer stream.Close() for { resp, err := stream.Recv() if errors.Is(err, io.EOF) { break } t.Log("thread_id", resp.ID) for _, content := range resp.Delta.Content { t.Log(content.Text.Value) } }
There's an API for inserting message in a thread. Example below:
_, err = a.client.CreateMessage(ctx, threadId, openai.MessageRequest{ Role: openai.ChatMessageRoleUser, Content: messageText, }) if err != nil { logger.Error("failed to create message", zap.Error(err)) return err } outStream, err = a.client.CreateRunStreaming(ctx, threadId, openai.RunRequest{ AssistantID: assistantId, })
thanks very much
合并进去了吗?现在很需要这个
Supersedes #731
Based on the original fork of @tanzyy96 just brought up to date.
We've been running this for a couple of weeks and has served us well.
Example usage: