Using OpenTelemetry Directly
For providers not officially supported, use OpenTelemetry:Copy
import { trace } from '@opentelemetry/api';
const tracer = trace.getTracer('my-custom-provider');
const span = tracer.startSpan('llm_call');
try {
const response = await customLLM.generate(prompt);
// Add attributes
span.setAttribute('model', 'custom-model');
span.setAttribute('prompt_tokens', response.usage.prompt_tokens);
span.setAttribute('completion_tokens', response.usage.completion_tokens);
span.setAttribute('cost_usd', calculateCost(response.usage));
span.setStatus({ code: 'OK' });
} catch (error) {
span.setStatus({ code: 'ERROR', message: error.message });
throw error;
} finally {
span.end();
}
Custom Cost Calculation
Copy
function calculateCost(usage) {
const promptCost = usage.prompt_tokens * 0.000003; // $0.003 per 1K tokens
const completionCost = usage.completion_tokens * 0.000015; // $0.015 per 1K tokens
return promptCost + completionCost;
}
Wrapper Function
Create a reusable wrapper:Copy
async function traceCustomLLM(fn, options) {
const span = tracer.startSpan(options.name || 'llm_call');
try {
const response = await fn();
span.setAttribute('model', response.model);
span.setAttribute('prompt_tokens', response.usage.prompt_tokens);
span.setAttribute('completion_tokens', response.usage.completion_tokens);
span.setAttribute('cost_usd', calculateCost(response.usage));
span.setStatus({ code: 'OK' });
return response;
} catch (error) {
span.setStatus({ code: 'ERROR', message: error.message });
throw error;
} finally {
span.end();
}
}
// Usage
await traceCustomLLM(
() => customLLM.generate(prompt),
{ name: 'custom-call' }
);