tmc / langchaingo

LangChain for Go, the easiest way to write LLM-based programs in Go
https://tmc.github.io/langchaingo/
MIT License
4.41k stars 598 forks source link

Use Proxy to Get OpenAI response return "malformed HTTP response " ,How to fix #760

Open Eumenides1 opened 6 months ago

Eumenides1 commented 6 months ago

This code return "https://ai-yyds.com/v1/chat/completions": malformed HTTP response ;But I can use postman to get response,why

var flagDebugHTTP = flag.Bool("debug-http", true, "enable debugging of HTTP requests and responses")

// DebugTransport 结构封装了 HTTP 代理功能和请求/响应调试
type DebugTransport struct {
    ProxyURL *url.URL
}

// RoundTrip 执行单个 HTTP 事务,并打印请求/响应详细信息
func (d *DebugTransport) RoundTrip(req *http.Request) (*http.Response, error) {
    // 将请求打印到控制台
    if *flagDebugHTTP {
        dump, err := httputil.DumpRequestOut(req, true)
        if err != nil {
            log.Fatalf("Error dumping request: %v", err)
        }
        fmt.Printf("%s\n\n", dump)
    }

    // 设置代理并发送请求
    proxy := http.ProxyURL(d.ProxyURL)
    transport := &http.Transport{Proxy: proxy}
    return transport.RoundTrip(req)
}

func main() {
    // 设置代理
    proxyUrl, err := url.Parse("https://ai-yyds.com")
    if err != nil {
        log.Fatalf("proxy url error %v\n", err)
    }

    // 使用 DebugTransport
    httpClient := &http.Client{
        Transport: &DebugTransport{
            ProxyURL: proxyUrl,
        },
    }

    flag.Parse()
    var opts []openai.Option
    if *flagDebugHTTP {
        // 使用自定义的 HTTP 客户端
        opts = append(opts, openai.WithHTTPClient(httpClient))
        // opts = append(opts, openai.WithModel("gpt-4-1106-preview"))
    }

    ctx := context.Background()
    llm, err := openai.New(opts...)
    if err != nil {
        panic(err)
    }
    prompt := "What would be a good company name for a company that makes colorful socks?"
    completion, err := llms.GenerateFromSinglePrompt(ctx,
        llm,
        prompt,
        llms.WithTemperature(0.8),
    )
    if err != nil {
        log.Fatal(err)
    }
    fmt.Println(completion)
}
CrazyWr commented 5 months ago

Maybe you should check your proxy service,It's ok I try this with local proxy service