run-llama / LlamaIndexTS

LlamaIndex in TypeScript
https://ts.llamaindex.ai
MIT License
1.77k stars 337 forks source link

Groq class incorrectly uses OPENAI_API_KEY and requests are sent to OpenAI's API #943

Closed EmreErdogan closed 2 months ago

EmreErdogan commented 2 months ago

llamaindex version: 0.3.17 node version: v18.12.1

Looks like Groq class extends OpenAI and depends on OPENAI_API_KEY.

When running the following code, I get the error below:

import fs from "node:fs/promises";

import { Document, Groq, Settings, VectorStoreIndex } from "llamaindex";

// load environment variables
import dotenv from "dotenv";
dotenv.config();

// Update llm to use Groq
Settings.llm = new Groq({
  apiKey: process.env.GROQ_API_KEY,
  model: "llama3-8b-8192",
});

async function main() {
  // Load essay from abramov.txt in Node
  const path = "node_modules/llamaindex/examples/abramov.txt";
  const essay = await fs.readFile(path, "utf-8");
  const document = new Document({ text: essay, id_: "essay" });

  // Load and index documents
  const index = await VectorStoreIndex.fromDocuments([document]);

  // get retriever
  const retriever = index.asRetriever();

  // Create a query engine
  const queryEngine = index.asQueryEngine({
    retriever,
  });

  const query = "What is the meaning of life?";

  // Query
  const response = await queryEngine.query({
    query,
  });

  // Log the response
  console.log(response.response);
}

main();

Error:

/Users/emre/dev/ai/node-llamaindex-test1/node_modules/llamaindex/dist/cjs/llm/openai.js:438
            throw new Error("Set OpenAI Key in OPENAI_API_KEY env variable"); // Overriding OpenAI package's error message
                  ^

Error: Set OpenAI Key in OPENAI_API_KEY env variable

If I set OPENAI_API_KEY environment variable then I get the following error:

/Users/emre/dev/ai/node-llamaindex-test1/node_modules/openai/src/error.ts:74
      return new AuthenticationError(status, error, message, headers);
             ^

AuthenticationError: 401 Incorrect API key provided: gsk_dWsK********************************************AD96. You can find your API key at https://platform.openai.com/account/api-keys.
    at Function.generate (/Users/emre/dev/ai/node-llamaindex-test1/node_modules/openai/src/error.ts:74:14)
    at OpenAI.makeStatusError (/Users/emre/dev/ai/node-llamaindex-test1/node_modules/openai/src/core.ts:397:21)
    at OpenAI.makeRequest (/Users/emre/dev/ai/node-llamaindex-test1/node_modules/openai/src/core.ts:460:24)
    at process.processTicksAndRejections (node:internal/process/task_queues:95:5)
    at async OpenAIEmbedding.getOpenAIEmbedding (/Users/emre/dev/ai/node-llamaindex-test1/node_modules/llamaindex/dist/cjs/embeddings/OpenAIEmbedding.js:100:26)
    at async OpenAIEmbedding.getTextEmbeddings (/Users/emre/dev/ai/node-llamaindex-test1/node_modules/llamaindex/dist/cjs/embeddings/OpenAIEmbedding.js:111:16)
    at async batchEmbeddings (/Users/emre/dev/ai/node-llamaindex-test1/node_modules/llamaindex/dist/cjs/embeddings/types.js:69:32)
    at async OpenAIEmbedding.getTextEmbeddingsBatch (/Users/emre/dev/ai/node-llamaindex-test1/node_modules/llamaindex/dist/cjs/embeddings/types.js:51:16)
    at async OpenAIEmbedding.transform (/Users/emre/dev/ai/node-llamaindex-test1/node_modules/llamaindex/dist/cjs/embeddings/types.js:55:28)
    at async VectorStoreIndex.getNodeEmbeddingResults (/Users/emre/dev/ai/node-llamaindex-test1/node_modules/llamaindex/dist/cjs/indices/vectorStore/index.js:476:17)
    at async VectorStoreIndex.insertNodes (/Users/emre/dev/ai/node-llamaindex-test1/node_modules/llamaindex/dist/cjs/indices/vectorStore/index.js:573:17)
    at async VectorStoreIndex.buildIndexFromNodes (/Users/emre/dev/ai/node-llamaindex-test1/node_modules/llamaindex/dist/cjs/indices/vectorStore/index.js:488:9)
    at async VectorStoreIndex.init (/Users/emre/dev/ai/node-llamaindex-test1/node_modules/llamaindex/dist/cjs/indices/vectorStore/index.js:438:13)
    at async VectorStoreIndex.fromDocuments (/Users/emre/dev/ai/node-llamaindex-test1/node_modules/llamaindex/dist/cjs/indices/vectorStore/index.js:516:16)
    at main (/Users/emre/dev/ai/node-llamaindex-test1/groq.ts:22:17) {
  status: 401,
  headers: {
    'alt-svc': 'h3=":443"; ma=86400',
    'cf-cache-status': 'DYNAMIC',
    'cf-ray': '896301fc5d69051b-OTP',
    connection: 'keep-alive',
    'content-length': '306',
    'content-type': 'application/json; charset=utf-8',
    date: 'Wed, 19 Jun 2024 10:59:06 GMT',
    server: 'cloudflare',
    'set-cookie': '__cf_bm=HeRz9AxRsq72HqKjw4EqZbyk5dciWmKa6VT09877S.U-1718734646-1.0.1.1-0fKwNZXEjo7vSD_rNdIt3pWsNAgHV_VuvdC2I25EpJyAYbnGMhgxwmr3ljxCVpw.cgPOP1AXE7Tapj86PsFGkQ; path=/; expires=Wed, 19-Jun-24 11:29:06 GMT; domain=.api.openai.com; HttpOnly; Secure; SameSite=None, _cfuvid=QDcHb002EK2X3lXvd18lN6pBS6IHqfiMzmWdYn18no4-1718794746508-0.0.1.1-604800000; path=/; domain=.api.openai.com; HttpOnly; Secure; SameSite=None',
    'strict-transport-security': 'max-age=15724800; includeSubDomains',
    vary: 'Origin',
    'x-request-id': 'req_0601ed69f9a5f926f971f8466d09'
  },
  request_id: 'req_0601ed69f9a5f926f971f8466d09,
  error: {
    message: 'Incorrect API key provided: gsk_dWsK********************************************AD96. You can find your API key at https://platform.openai.com/account/api-keys.',
    type: 'invalid_request_error',
    param: null,
    code: 'invalid_api_key'
  },
  code: 'invalid_api_key',
  param: null,
  type: 'invalid_request_error'
}

It appears that the request was sent to OpenAI's API endpoints instead of Groq's.

EmreErdogan commented 2 months ago

@himself65 Thanks for the patch. But I am still getting the following error:

/Users/emre/dev/ai/node-llamaindex-test1/node_modules/openai/src/error.ts:74
      return new AuthenticationError(status, error, message, headers);
             ^

AuthenticationError: 401 Incorrect API key provided: gsk_dWsK********************************************AD96. You can find your API key at https://platform.openai.com/account/api-keys.
    at Function.generate (/Users/emre/dev/ai/node-llamaindex-test1/node_modules/openai/src/error.ts:74:14)
    at OpenAI.makeStatusError (/Users/emre/dev/ai/node-llamaindex-test1/node_modules/openai/src/core.ts:397:21)
    at OpenAI.makeRequest (/Users/emre/dev/ai/node-llamaindex-test1/node_modules/openai/src/core.ts:460:24)
    at process.processTicksAndRejections (node:internal/process/task_queues:95:5)
    at async OpenAIEmbedding.getOpenAIEmbedding (/Users/emre/dev/ai/node-llamaindex-test1/node_modules/llamaindex/dist/cjs/embeddings/OpenAIEmbedding.js:106:26)
    at async OpenAIEmbedding.getTextEmbeddings (/Users/emre/dev/ai/node-llamaindex-test1/node_modules/llamaindex/dist/cjs/embeddings/OpenAIEmbedding.js:117:16)
    at async batchEmbeddings (/Users/emre/dev/ai/node-llamaindex-test1/node_modules/llamaindex/dist/cjs/embeddings/types.js:78:32)
    at async OpenAIEmbedding.getTextEmbeddingsBatch (/Users/emre/dev/ai/node-llamaindex-test1/node_modules/llamaindex/dist/cjs/embeddings/types.js:53:16)
    at async OpenAIEmbedding.transform (/Users/emre/dev/ai/node-llamaindex-test1/node_modules/llamaindex/dist/cjs/embeddings/types.js:57:28)
    at async VectorStoreIndex.getNodeEmbeddingResults (/Users/emre/dev/ai/node-llamaindex-test1/node_modules/llamaindex/dist/cjs/indices/vectorStore/index.js:476:17)
    at async VectorStoreIndex.insertNodes (/Users/emre/dev/ai/node-llamaindex-test1/node_modules/llamaindex/dist/cjs/indices/vectorStore/index.js:573:17)
    at async VectorStoreIndex.buildIndexFromNodes (/Users/emre/dev/ai/node-llamaindex-test1/node_modules/llamaindex/dist/cjs/indices/vectorStore/index.js:488:9)
    at async VectorStoreIndex.init (/Users/emre/dev/ai/node-llamaindex-test1/node_modules/llamaindex/dist/cjs/indices/vectorStore/index.js:438:13)
    at async VectorStoreIndex.fromDocuments (/Users/emre/dev/ai/node-llamaindex-test1/node_modules/llamaindex/dist/cjs/indices/vectorStore/index.js:516:16)
    at main (/Users/emre/dev/ai/node-llamaindex-test1/groq.ts:22:17) {
  status: 401,
  headers: {
    'alt-svc': 'h3=":443"; ma=86400',
    'cf-cache-status': 'DYNAMIC',
    'cf-ray': '896301fc5d69051b-OTP',
    connection: 'keep-alive',
    'content-length': '306',
    'content-type': 'application/json; charset=utf-8',
    date: 'Wed, 19 Jun 2024 10:59:06 GMT',
    server: 'cloudflare',
    'set-cookie': '__cf_bm=HeRz9AxRsq72HqKjw4EqZbyk5dciWmKa6VT09877S.U-1718734646-1.0.1.1-0fKwNZXEjo7vSD_rNdIt3pWsNAgHV_VuvdC2I25EpJyAYbnGMhgxwmr3ljxCVpw.cgPOP1AXE7Tapj86PsFGkQ; path=/; expires=Wed, 19-Jun-24 11:29:06 GMT; domain=.api.openai.com; HttpOnly; Secure; SameSite=None, _cfuvid=QDcHb002EK2X3lXvd18lN6pBS6IHqfiMzmWdYn18no4-1718794746508-0.0.1.1-604800000; path=/; domain=.api.openai.com; HttpOnly; Secure; SameSite=None',
    'strict-transport-security': 'max-age=15724800; includeSubDomains',
    vary: 'Origin',
    'x-request-id': 'req_0601ed69f9a5f926f971f8466d09'
  },
  request_id: 'req_0601ed69f9a5f926f971f8466d09,
  error: {
    message: 'Incorrect API key provided: gsk_dWsK********************************************AD96. You can find your API key at https://platform.openai.com/account/api-keys.',
    type: 'invalid_request_error',
    param: null,
    code: 'invalid_api_key'
  },
  code: 'invalid_api_key',
  param: null,
  type: 'invalid_request_error'
}

Are we hitting OpenAI API endpoints instead of Groq's?