Basic usage patterns for the Lumina SDK
const response = await lumina.traceLLM( () => openai.chat.completions.create({ model: 'gpt-4', messages: [{ role: 'user', content: 'Hello!' }], }), { name: 'chat-completion', system: 'openai', prompt: 'Hello!', } );
await lumina.traceLLM( () => llm.generate(prompt), { name: 'chat', metadata: { userId: 'user-123', sessionId: 'session-456', }, } );
try { await lumina.traceLLM( () => llm.generate(prompt), { name: 'chat' } ); } catch (error) { // Error automatically recorded console.error('LLM call failed:', error); throw error; }