Skip to main content
Integrate Lumina with LangChain applications.

Installation

npm install @uselumina/sdk langchain

Basic Usage

import { initLumina } from '@uselumina/sdk';
import { ChatOpenAI } from 'langchain/chat_models/openai';
import { HumanMessage } from 'langchain/schema';

const lumina = initLumina({
  endpoint: 'http://localhost:9411/v1/traces',
  service_name: 'langchain-app',
});

const chat = new ChatOpenAI();

const response = await lumina.traceLLM(
  () => chat.call([new HumanMessage('Hello!')]),
  { name: 'langchain-chat', system: 'openai' }
);

Chain Tracing

import { LLMChain } from 'langchain/chains';
import { PromptTemplate } from 'langchain/prompts';

const prompt = PromptTemplate.fromTemplate('Tell me about {topic}');
const chain = new LLMChain({ llm: chat, prompt });

await lumina.traceLLM(
  () => chain.call({ topic: 'AI' }),
  { name: 'langchain-chain', system: 'openai' }
);

Agent Tracing

import { initializeAgentExecutorWithOptions } from 'langchain/agents';

const executor = await initializeAgentExecutorWithOptions(
  tools,
  chat,
  { agentType: 'chat-conversational-react-description' }
);

await lumina.trace('langchain-agent', async () => {
  return await lumina.traceLLM(
    () => executor.call({ input: 'What's the weather?' }),
    { name: 'agent-execution', system: 'openai' }
  );
});