Loading...
Loading...
Query the otel-relay span store directly via HTTP to interrogate traces from local render runs without consuming the full SSE stream.
npx skill4agent add editframe/skills otel-relay-query4319WORKTREE_TRACING_SSE_PORT# 1. Clear state before a test run
curl -X DELETE http://localhost:4319/api/buffer
# 2. Start a render (via the app or a test script)
# 3. Query what arrived
curl http://localhost:4319/api/summaryGET /api/summaryGET /api/traces?service=?name=?from=?to=?limit=GET /api/traces/{traceId}{ traceId, spans: [...] }GET /api/spans?name=?namePrefix=SegmentEncoder?traceId=?attr.{key}={value}?attr.renderId=abc&attr.jobId=xyz?from=?to=?limit=DELETE /api/buffer# If these exist locally, missing production spans = exporter/context issue, not missing instrumentation
curl "http://localhost:4319/api/spans?namePrefix=SegmentEncoder&limit=5"
curl "http://localhost:4319/api/spans?namePrefix=ElectronEngine&limit=5"# rpcReady span is present on cold instances, absent when Electron is reused
curl "http://localhost:4319/api/spans?name=rpcReady"# traceId comes from Cloud Logging: jsonPayload.workflowId → find matching "Claimed job" log entry
curl "http://localhost:4319/api/traces/{traceId}"curl "http://localhost:4319/api/spans?attr.renderId=6b529000-4b6d-4103-a4e5-3d803dfa64c8"# List all traces, note the traceIds of slow vs fast jobs
curl "http://localhost:4319/api/traces?limit=20"
# Then pull each trace for comparison
curl "http://localhost:4319/api/traces/{slow-traceId}"
curl "http://localhost:4319/api/traces/{fast-traceId}"SpanStoreQuerySpansSpanStoreDELETE /api/buffertelecine/services/otel-relay/store.goapi.goSegmentEncoder.renderFrameElectronEngine.captureFramerpcReady