# app.py
import os
from flask import Flask, request, jsonify
from lumina import init_lumina
import openai
app = Flask(__name__)
client = openai.OpenAI(api_key=os.environ["OPENAI_API_KEY"])
lumina = init_lumina({
"endpoint": os.environ.get("LUMINA_ENDPOINT", "http://localhost:9411/v1/traces"),
"service_name": "flask-app",
})
@app.route("/api/chat", methods=["POST"])
def chat():
message = request.json["message"]
response = lumina.trace_llm(
lambda: client.chat.completions.create(
model="gpt-4",
messages=[{"role": "user", "content": message}],
),
name="chat-completion",
system="openai",
prompt=message,
)
return jsonify({"content": response.choices[0].message.content})