Chainlit / literalai-typescript

https://docs.literalai.com
Apache License 2.0
4 stars 0 forks source link

feat(wrappers): add decoration wrapper #63

Closed Dam-Buty closed 1 week ago

Dam-Buty commented 3 weeks ago

To test this PR :

Changes

await literalClient.api.createGeneration(generation, stepId)
const openai_ = new OpenAI();

const openai = client.instrumentation.openai({
  // the initial client needs to be passed when instrumenting
  client: openai_
});

await openai.chat.completions.create(
  {
    model: 'gpt-3.5-turbo',
    messages: [
      { role: 'system', content: 'You are a helpful assistant.' },
      { role: 'user', content: 'What is the capital of Canada?' }
    ]
  },
  {
    literalaiTags: ['tag3', 'tag4'],
    literalaiMetadata: { otherKey: 'otherValue' },
    // You can also specify a Step ID at the call level
    // When this generation is logged, it will be created with the given Step ID
    literalaiStepId
  }
);
const literalaiStepId = uuidv4();

await model.invoke('Hello, how are you?', {
  callbacks: [cb],
  metadata: {
    key: 'value',
    // use literalaiStepId in the metadata to specify a Step ID
    literalaiStepId,
  },
  tags: ['tag1', 'tag2'],
});
const { text } = await generateText({
  model: openai('gpt-3.5-turbo'),
  prompt: question,
  literalaiStepId,
  literalaiTags: ['tag1', 'tag2'],
  literalaiMetadata: { otherKey: 'otherValue' }
});
await client.decorate({ metadata, tags, stepId }).wrap(async () => {
  // This generation will be logged with the provided stepId, metadata and tags
  const completion = await openai.chat.completions.create({
    model: 'gpt-4',
    messages: [{ role: 'user', content: 'Say hello !' }]
  });

  // Because step IDs are unique, this call will revert to the default behaviour of
  // assigning a random UUID to the generation when it is logged.
  const completion = await openai.chat.completions.create({
    model: 'gpt-4',
    messages: [{ role: 'user', content: 'Say hello !' }]
  });
})