from langfuse import observe
# drop-in wrapper adds OpenTelemetry tracing to OpenAI
# many other llm/agent integrations are available
from langfuse.openai import openai
@observe() # decorate any function; all nested calls are auto-linked
def handle_request(text: str) -> str:
res = openai.chat.completions.create(
model="gpt-5",
messages=[
{"role": "system", "content": "Summarize in one sentence."},
{"role": "user", "content": text},
],
)
return res.choices[0].message.content
from langfuse import observe
# drop-in wrapper adds OpenTelemetry tracing to OpenAI
# many other llm/agent integrations are available
from langfuse.openai import openai
@observe() # decorate any function; all nested calls are auto-linked
def handle_request(text: str) -> str:
res = openai.chat.completions.create(
model="gpt-5",
messages=[
{"role": "system", "content": "Summarize in one sentence."},
{"role": "user", "content": text},
],
)
return res.choices[0].message.content