joschan21 / quill

Quill - A Modern SaaS-Platform Built With Next.js 13
1.86k stars 489 forks source link

PineconeError: PineconeClient: Error calling query: PineconeError: PineconeClient: #6

Open MrArabAi opened 11 months ago

MrArabAi commented 11 months ago

Anyone had this error before ? i got this error when i try to type a message to the bot

⨯ [PineconeError: PineconeClient: Error calling query: PineconeError: PineconeClient: Error calling queryRaw: FetchError: The request failed and the interceptors did not return an alternative response]

MrArabAi commented 11 months ago

here is my core.ts

import { db } from '@/db' import { getKindeServerSession } from '@kinde-oss/kinde-auth-nextjs/server' import { createUploadthing, type FileRouter, } from 'uploadthing/next'

import { PDFLoader } from 'langchain/document_loaders/fs/pdf' import { OpenAIEmbeddings } from 'langchain/embeddings/openai' import { PineconeStore } from 'langchain/vectorstores/pinecone' import { getPineconeClient } from '@/lib/pinecone' import { getUserSubscriptionPlan } from '@/lib/stripe' import { PLANS } from '@/config/stripe'

const f = createUploadthing()

const middleware = async () => { const { getUser } = getKindeServerSession() const user = getUser()

if (!user || !user.id) throw new Error('Unauthorized')

const subscriptionPlan = await getUserSubscriptionPlan()

return { subscriptionPlan, userId: user.id } }

const onUploadComplete = async ({ metadata, file, }: { metadata: Awaited<ReturnType> file: { key: string name: string url: string } }) => { const isFileExist = await db.file.findFirst({ where: { key: file.key, }, })

if (isFileExist) return

const createdFile = await db.file.create({ data: { key: file.key, name: file.name, userId: metadata.userId, url: https://uploadthing-prod.s3.us-west-2.amazonaws.com/${file.key}, uploadStatus: 'PROCESSING', }, })

try { const response = await fetch( https://uploadthing-prod.s3.us-west-2.amazonaws.com/${file.key} )

const blob = await response.blob()

const loader = new PDFLoader(blob)

const pageLevelDocs = await loader.load()

const pagesAmt = pageLevelDocs.length

const { subscriptionPlan } = metadata
const { isSubscribed } = subscriptionPlan

const isProExceeded =
  pagesAmt >
  PLANS.find((plan) => plan.name === 'Pro')!.pagesPerPdf
const isFreeExceeded =
  pagesAmt >
  PLANS.find((plan) => plan.name === 'Free')!
    .pagesPerPdf

if (
  (isSubscribed && isProExceeded) ||
  (!isSubscribed && isFreeExceeded)
) {
  await db.file.update({
    data: {
      uploadStatus: 'FAILED',
    },
    where: {
      id: createdFile.id,
    },
  })
}

// vectorize and index entire document
const pinecone = await getPineconeClient()
const pineconeIndex = pinecone.Index('chatpdf')

const embeddings = new OpenAIEmbeddings({
  openAIApiKey: process.env.OPENAI_API_KEY,
})

await PineconeStore.fromDocuments(
  pageLevelDocs,
  embeddings,
  {
    pineconeIndex,
    namespace: createdFile.id,
  }
)

await db.file.update({
  data: {
    uploadStatus: 'SUCCESS',
  },
  where: {
    id: createdFile.id,
  },
})

} catch (err) { await db.file.update({ data: { uploadStatus: 'FAILED', }, where: { id: createdFile.id, }, }) } }

export const ourFileRouter = { freePlanUploader: f({ pdf: { maxFileSize: '4MB' } }) .middleware(middleware) .onUploadComplete(onUploadComplete), proPlanUploader: f({ pdf: { maxFileSize: '16MB' } }) .middleware(middleware) .onUploadComplete(onUploadComplete), } satisfies FileRouter

export type OurFileRouter = typeof ourFileRouter

joschan21 commented 11 months ago

Found this related error that was caused by not defining the index, seems to me you did that though. Might help anyways: https://github.com/mayooear/gpt4-pdf-chatbot-langchain/issues/40

anand-mukul commented 11 months ago

PATH::\src\app\api\message\route.ts

import { db } from '@/db'
import { openai } from '@/lib/openai'
import { getPineconeClient } from '@/lib/pinecone'
import { SendMessageValidator } from '@/lib/validators/SendMessageValidator'
import { getKindeServerSession } from '@kinde-oss/kinde-auth-nextjs/server'
import { OpenAIEmbeddings } from 'langchain/embeddings/openai'
import { PineconeStore } from 'langchain/vectorstores/pinecone'
import { NextRequest } from 'next/server'

import { OpenAIStream, StreamingTextResponse } from 'ai'

export const POST = async (req: NextRequest) => {
  // endpoint for asking a question to a pdf file

  const body = await req.json()

  const { getUser } = getKindeServerSession()
  const user = getUser()

  const { id: userId } = user

  if (!userId)
    return new Response('Unauthorized', { status: 401 })

  const { fileId, message } =
    SendMessageValidator.parse(body)

  const file = await db.file.findFirst({
    where: {
      id: fileId,
      userId,
    },
  })

  if (!file)
    return new Response('Not found', { status: 404 })

  await db.message.create({
    data: {
      text: message,
      isUserMessage: true,
      userId,
      fileId,
    },
  })

  // 1: vectorize message
  const embeddings = new OpenAIEmbeddings({
    openAIApiKey: process.env.OPENAI_API_KEY,
  })

  const pinecone = await getPineconeClient()  //Must add this line also check this in ...\src\app\api\uploadthing\core.ts
  const pineconeIndex = pinecone.Index('YourIndexName') //Replace this with your pinecone index name

  const vectorStore = await PineconeStore.fromExistingIndex(
    embeddings,
    {
       //@ts-ignore
      pineconeIndex,
      namespace: file.id,
    }
  )

  const results = await vectorStore.similaritySearch(
    message,
    4
  )

  const prevMessages = await db.message.findMany({
    where: {
      fileId,
    },
    orderBy: {
      createdAt: 'asc',
    },
    take: 6,
  })

  const formattedPrevMessages = prevMessages.map((msg) => ({
    role: msg.isUserMessage
      ? ('user' as const)
      : ('assistant' as const),
    content: msg.text,
  }))

  const response = await openai.chat.completions.create({
    model: 'gpt-3.5-turbo',
    temperature: 0,
    stream: true,
    messages: [
      {
        role: 'system',
        content:
          'Use the following pieces of context (or previous conversaton if needed) to answer the users question in markdown format.',
      },
      {
        role: 'user',
        content: `Use the following pieces of context (or previous conversaton if needed) to answer the users question in markdown format. \nIf you don't know the answer, just say that you don't know, don't try to make up an answer.

  \n----------------\n

  PREVIOUS CONVERSATION:
  ${formattedPrevMessages.map((message) => {
    if (message.role === 'user')
      return `User: ${message.content}\n`
    return `Assistant: ${message.content}\n`
  })}

  \n----------------\n

  CONTEXT:
  ${results.map((r) => r.pageContent).join('\n\n')}

  USER INPUT: ${message}`,
      },
    ],
  })

  const stream = OpenAIStream(response, {
    async onCompletion(completion) {
      await db.message.create({
        data: {
          text: completion,
          isUserMessage: false,
          fileId,
          userId,
        },
      })
    },
  })

  return new StreamingTextResponse(stream)
}

PATH::\src\lib\pinecone.ts

import { PineconeClient } from '@pinecone-database/pinecone'

  export const getPineconeClient = async () => {
    const client = new PineconeClient()

    await client.init({
      apiKey: process.env.PINECONE_API_KEY!,
      environment: 'asia-southeast1-gcp-free', //Replace this with your pinecone environment, Check Pinecone website for this.
    })

    return client
  }
susanta96 commented 10 months ago

Maybe because Pinecone "gcp-starter" doesn't support namespace.

KLGH24 commented 10 months ago

Maybe because Pinecone "gcp-starter" doesn't support namespace.

Hs this been confirmed? @susanta96?

susanta96 commented 10 months ago

Yes.

varunagarwal007 commented 10 months ago

Then what is the fix if it is "gcp-starter" ?

willbrandin commented 10 months ago

@varunagarwal007 To resolve the namespace with gcp-starter, you need to add a metadata field for the file id to search by instead of namespace.

// uploadthing/core.ts
var pageLevelDocs = await loader.load();

pageLevelDocs = pageLevelDocs.map((doc) => {
   doc.metadata = {
       ...doc.metadata,
      fileId: createdFile.id, // map over the docs and add the file id. 
   };
   return doc;
});

const pageAmt = pageLevelDocs.length;

const pinecone = await getPineconeClient();
const pineconeIndex = pinecone.Index("quill-chat");
// message/route.ts
const pineconeIndex = pinecone.Index("quill-chat");

const vectorStore = await PineconeStore.fromExistingIndex(embeddings, {
  pineconeIndex,
  filter: { fileId }, // filter by the file id
});

const results = await vectorStore.similaritySearch(message, 4);
koushikyemula commented 10 months ago

where should i paste this message/route code in the actual file? @willbrandin

willbrandin commented 10 months ago

@BlitZSenpai in /uploadthing/core.ts you only need to map over page level docs. I have mine on Line 66, but could vary for you. In /message/route.ts the code should all be the same as in the @joschan21's video, the only difference is you are replacing namespace with filter: { fileId }.

koushikyemula commented 10 months ago

@willbrandin ye but im getting this error


pinecone.d.ts(8, 5): The expected type comes from property 'pineconeIndex' which is declared here on type 'PineconeLibArgs'
(property) PineconeLibArgs.pineconeIndex: Index<RecordMetadata>```
  at /core.ts on the line

```await PineconeStore.fromDocuments(pageLevelDocs, embeddings, {
          pineconeIndex,
          namespace: createdFile.id,
        });```
willbrandin commented 10 months ago

Yes, you are using namespace which is not on the free plan. You need to use filters.

koushikyemula commented 10 months ago

@willbrandin can u gimme a replacement for those 3 lines. i replaced namespace with filters and the line pineconeIndex still has the same error

koushikyemula commented 10 months ago

`import { db } from "@/db"; import { getKindeServerSession } from "@kinde-oss/kinde-auth-nextjs/server"; import { createUploadthing, type FileRouter } from "uploadthing/next"; import { PDFLoader } from "langchain/document_loaders/fs/pdf"; import { OpenAIEmbeddings } from "langchain/embeddings/openai"; import { PineconeStore } from "langchain/vectorstores/pinecone"; import { getPineconeClient } from "@/lib/pinecone";

const f = createUploadthing();

export const ourFileRouter = { pdfUploader: f({ pdf: { maxFileSize: "4MB" } }) .middleware(async ({ req }) => { const { getUser } = getKindeServerSession(); const user = await getUser();

  if (!user || !user.id) throw new Error("Unauthorized");

  return { userId: user.id };
})
.onUploadComplete(async ({ metadata, file }) => {
  const createdFile = await db.file.create({
    data: {
      key: file.key,
      name: file.name,
      userId: metadata.userId,
      url: `https://uploadthing-prod.s3.us-west-2.amazonaws.com/${file.key}`,
      uploadStatus: "PROCESSING",
    },
  });
  try {
    const response = await fetch(`https://uploadthing-prod.s3.us-west-2.amazonaws.com/${file.key}`);
    const blob = await response.blob();

    const loader = new PDFLoader(blob);

    const pageLevelDocs = await loader.load();
    const pageAmount = pageLevelDocs.length;

    const pinecone = await getPineconeClient();
    const pineconeIndex = pinecone.Index("otto");

    const embeddings = new OpenAIEmbeddings({
      openAIApiKey: process.env.OPENAI_API_KEY,
    });

    await PineconeStore.fromDocuments(pageLevelDocs, embeddings, {
      pineconeIndex, //error at this line
      filters: createdFile.id,
    });

    await db.file.update({
      data: {
        uploadStatus: "SUCCESS",
      },
      where: {
        id: createdFile.id,
      },
    });
  } catch (err) {
    await db.file.update({
      data: {
        uploadStatus: "FAILED",
      },
      where: {
        id: createdFile.id,
      },
    });
  }
}),

} satisfies FileRouter;

export type OurFileRouter = typeof ourFileRouter; `

This is my core.ts and im getting this error "pinecone.d.ts(8, 5): The expected type comes from property 'pineconeIndex' which is declared here on type 'PineconeLibArgs' (property) PineconeLibArgs.pineconeIndex: Index" at pinecodeIndex i marked above with a comment

Omsoni06 commented 7 months ago

@willbrandin ye but im getting this error

pinecone.d.ts(8, 5): The expected type comes from property 'pineconeIndex' which is declared here on type 'PineconeLibArgs'
(property) PineconeLibArgs.pineconeIndex: Index<RecordMetadata>```
  at /core.ts on the line

```await PineconeStore.fromDocuments(pageLevelDocs, embeddings, {
          pineconeIndex,
          namespace: createdFile.id,
        });```

i am also getting this error please help someone to solve this

Sujan1714 commented 4 months ago

image

why this error occurs