Loading...
Loading...
Integrates Flowlines observability SDK into Python LLM applications. Use when adding Flowlines telemetry, instrumenting LLM providers, or setting up OpenTelemetry-based LLM monitoring.
npx skill4agent add flowlines-ai/skills integrate-flowlines-sdk-pythonpip install flowlines# Single provider
pip install flowlines[openai]
# Multiple providers
pip install flowlines[openai,anthropic]
# All supported providers
pip install flowlines[all]openaianthropicbedrockcoheregoogle-generativeaivertexaitogetherpineconechromadbqdrantlangchainllamaindexmcpTracerProviderfrom flowlines import Flowlines
flowlines = Flowlines(api_key="<FLOWLINES_API_KEY>")TracerProviderhas_external_otel=TrueTracerProviderfrom flowlines import Flowlines
from opentelemetry.sdk.trace import TracerProvider
flowlines = Flowlines(api_key="<FLOWLINES_API_KEY>", has_external_otel=True)
provider = TracerProvider()
# Add the Flowlines span processor to the existing provider
processor = flowlines.create_span_processor()
provider.add_span_processor(processor)
# Instrument providers using the Flowlines instrumentor registry
for instrumentor in flowlines.get_instrumentors():
instrumentor.instrument(tracer_provider=provider)create_span_processor()get_instrumentors()has_traceloop=Truefrom flowlines import Flowlines
flowlines = Flowlines(api_key="<FLOWLINES_API_KEY>", has_traceloop=True)TracerProviderFlowlines()OpenAI()Anthropic()Flowlines()RuntimeErrorhas_external_otelhas_traceloopTrueValueErroruser_idcontext()user_idsession_idagent_idwith flowlines.context(user_id="user-42", session_id="sess-abc", agent_id="agent-1"):
client.chat.completions.create(...) # this span gets user_id, session_id, and agent_idsession_idagent_idwith flowlines.context(user_id="user-42"):
client.chat.completions.create(...)token = Flowlines.set_context(user_id="user-42", session_id="sess-abc", agent_id="agent-1")
try:
client.chat.completions.create(...)
finally:
Flowlines.clear_context(token)set_context()clear_context()Flowlinesflowlines.context()user_idsession_idagent_iduser_idsession_idagent_idrequest.user.idthread_idwith flowlines.context(user_id=request.user.id, session_id=thread_id):
...user_idsession_idagent_idwith flowlines.context(
user_id="anonymous", # TODO: replace with actual user identifier
session_id=f"sess-{uuid.uuid4().hex[:8]}", # TODO: replace with actual session/conversation ID
agent_id="my-agent", # TODO: replace with actual agent identifier
):
...session_idagent_idFlowlines(
api_key: str, # Required. The Flowlines API key.
endpoint: str = "https://ingest.flowlines.ai", # Backend URL.
has_external_otel: bool = False, # True if project has its own TracerProvider.
has_traceloop: bool = False, # True if Traceloop is already initialized.
verbose: bool = False, # True to enable debug logging to stderr.
)| Method / attribute | Description |
|---|---|
| Constructor. Initializes the SDK (singleton). |
| Context manager to tag spans with user/session/agent. |
| Static. Imperative context setting; returns a token. |
| Static. Restores previous context using the token. |
| Returns a |
| Returns list of available instrumentor instances. |
| Flush and shut down. Called automatically via |
from flowlines import Flowlines
from flowlines import FlowlinesExporter # only needed for advanced useverbose=Trueflowlines = Flowlines(api_key="...", verbose=True)flowlines.shutdown()atexitFlowlines()RuntimeErrorhas_external_otel=Truehas_traceloop=Trueflowlines[openai]