Skip to main content

Installation

npm install @uselumina/sdk ai

Basic Usage

import { initLumina } from '@uselumina/sdk';
import { streamText } from 'ai';
import { openai } from '@ai-sdk/openai';

const lumina = initLumina({
  endpoint: 'http://localhost:9411/v1/traces',
  service_name: 'vercel-ai-app',
});

const result = await lumina.traceLLM(
  () => streamText({ model: openai('gpt-4'), prompt: 'Hello!' }),
  { name: 'vercel-ai-stream', system: 'openai' }
);

Next.js Route Handler

// app/api/chat/route.ts
import { streamText } from 'ai';
import { initLumina } from '@uselumina/sdk';

const lumina = initLumina({
  endpoint: process.env.LUMINA_ENDPOINT,
  service_name: 'nextjs-chat',
});

export async function POST(req: Request) {
  const { messages } = await req.json();

  const result = await lumina.traceLLM(
    () => streamText({ model: openai('gpt-4'), messages }),
    { name: 'chat-api', system: 'openai' }
  );

  return result.toDataStreamResponse();
}