Loading...
Loading...
Use this skill whenever working with QuestDB — a high-performance time-series database. Trigger on any mention of QuestDB, time-series SQL with SAMPLE BY, LATEST ON, ASOF JOIN, ILP ingestion, or the questdb Python/Go/Java/Rust/.NET client libraries. Also trigger when writing Grafana queries against QuestDB, creating materialized views for time-series rollups, working with order book or financial market data in QuestDB, or any SQL that involves designated timestamps or time-partitioned tables. QuestDB extends SQL with unique time-series keywords — standard PostgreSQL or MySQL patterns will fail. Always read this skill before writing QuestDB SQL to avoid hallucinating incorrect syntax.
npx skill4agent add questdb/questdb-agent-skill questdb02_ingest.py03_dashboard.pysleep03_dashboard.pyfieldConfig.overrides"5s""250ms"?refresh=250msRuntimeError: no current event loopSSL_CERT_FILEdocker run -d --name questdb -p 9000:9000 -p 9009:9009 -p 8812:8812 questdb/questdb:latestdocker run -d --name grafana -p 3000:3000 -e GF_INSTALL_PLUGINS=questdb-questdb-datasource -e GF_SECURITY_ADMIN_PASSWORD=admin -e GF_DASHBOARDS_MIN_REFRESH_INTERVAL=250ms grafana/grafana:latestpython3 -m venv .venv.venv/bin/pip install -q cryptofeed questdb 'psycopg[binary]' requests numpy certifi.venv/bin/pip uninstall uvloop -y 2>/dev/nullcurl -sf --retry 30 --retry-delay 1 --retry-all-errors -o /dev/null "http://localhost:9000/exec?query=SELECT+1"curl -sf --retry 30 --retry-delay 1 --retry-all-errors -o /dev/null http://localhost:3000/api/healthcurl -s -X POST http://localhost:3000/api/datasources -u admin:admin -H "Content-Type: application/json" -d '{"name":"QuestDB","type":"questdb-questdb-datasource","access":"proxy","jsonData":{"server":"host.docker.internal","port":8812,"username":"admin","tlsMode":"disable","timeout":"120","queryTimeout":"60"},"secureJsonData":{"password":"quest"}}'serverhost.docker.internalport8812tlsMode"disable"usernamepasswordadminquestpython3 -m venv .venv.venv/bin/pip install -q cryptofeed questdb 'psycopg[binary]' requests numpy certifi.venv/bin/pip uninstall uvloop -y 2>/dev/nullpythonreferences/common-mistakes.mdgrafana-advanced.mdindicators.mdcookbook.mdsql-grammar.mdvwap()twap()spread()mid()wmid()l2price()enterprise.mdtime_bucket()DISTINCT ONHAVINGgenerate_series().mdcurl# Any doc page — just append .md to the URL
curl -sH "Accept: text/markdown" "https://questdb.com/docs/query/sql/horizon-join.md"
curl -sH "Accept: text/markdown" "https://questdb.com/docs/cookbook/sql/finance.md"
# Full documentation index for LLMs — lists every page with its .md URL
curl -s "https://questdb.com/docs/llms.txt"llms.txtCREATE TABLE IF NOT EXISTS trades (
ts TIMESTAMP,
symbol SYMBOL,
side SYMBOL,
price DOUBLE,
amount DOUBLE
) TIMESTAMP(ts) PARTITION BY DAY WAL
DEDUP UPSERT KEYS(ts, symbol);TIMESTAMP(col)SYMBOLPARTITION BY DAY|MONTH|YEAR|HOURWALDEDUP UPSERT KEYS(ts, symbol)DOUBLE[][]ALTER TABLE trades ADD COLUMN exchange SYMBOL;
ALTER TABLE trades DROP COLUMN exchange;
ALTER TABLE trades RENAME COLUMN amount TO qty;
ALTER TABLE trades ALTER COLUMN exchange SET TYPE SYMBOL;GROUP BY time_bucket()SELECT ts, symbol,
first(price) AS open, max(price) AS high,
min(price) AS low, last(price) AS close,
sum(amount) AS volume
FROM trades
WHERE ts > '2025-01-01'
SAMPLE BY 1h;1s5s1m15m1h1d1MALIGN TO CALENDARFILL(PREV | NULL | LINEAR | value)SAMPLE BY 5sfirst()last()count_distinct(col)COUNT(DISTINCT col)HAVINGSELECT * FROM (... SAMPLE BY ...) WHERE volume > 1000DISTINCT ONROW_NUMBER()SELECT * FROM trades
WHERE ts > dateadd('h', -1, now())
LATEST ON ts PARTITION BY symbol;LATEST ON tsPARTITION BYWHERE| Pattern | Use |
|---|---|
| Point-in-time match: "value at trade time" | ASOF JOIN (or HORIZON JOIN with |
| Match strictly before trade time | LT JOIN |
| Interleave two time series chronologically | SPLICE JOIN |
| Price at trade time AND at offsets ±N (markout, IS decomposition, adverse selection) | HORIZON JOIN |
| Aggregate quotes within a ±N window around each trade (rolling stats, quote density, pre-/post-trade averages) | WINDOW JOIN |
| Per-outer-row subquery: top-N per group, dynamic filters, per-row SAMPLE BY / LATEST ON / ASOF | LATERAL JOIN |
-- ASOF JOIN: nearest right row with ts ≤ left.ts
SELECT * FROM trades ASOF JOIN quotes ON (symbol);
-- LT JOIN: strictly ts < left.ts
SELECT * FROM trades LT JOIN quotes ON (symbol);
-- SPLICE JOIN: merge two time series interleaved
SELECT * FROM trades SPLICE JOIN quotes ON (symbol);
-- HORIZON JOIN: ASOF at a grid of time offsets, one parallel pass.
-- For each trade, computes metrics at -1m, -30s, 0, 30s, 1m, ...
-- Supports multiple right-hand tables (only the last carries RANGE/LIST/AS).
SELECT h.offset / 1_000_000_000 AS horizon_sec, t.symbol,
avg((m.best_bid + m.best_ask) / 2) AS avg_mid
FROM fx_trades t
HORIZON JOIN market_data m ON (symbol)
RANGE FROM -1m TO 5m STEP 30s AS h
WHERE t.timestamp IN '$yesterday'
ORDER BY t.symbol, horizon_sec;
-- Or with explicit offsets for implementation shortfall:
-- LIST (0, 1800s) AS h → at_fill and at_30m columns (via PIVOT)
-- WINDOW JOIN: aggregate right-table rows within a ±time window around each left row.
-- EXCLUDE PREVAILING omits the last-known value before the window start.
SELECT t.symbol, t.timestamp,
avg(c.bid_price) AS avg_bid_pm5s,
count() AS num_quotes
FROM fx_trades t
WINDOW JOIN core_price c ON (t.symbol = c.symbol)
RANGE BETWEEN 5 seconds PRECEDING AND 5 seconds FOLLOWING
EXCLUDE PREVAILING
WHERE t.symbol = 'EURUSD' AND t.timestamp IN '$now-1h..$now';
-- LATERAL JOIN: subquery evaluated per outer row, can reference outer columns.
-- Classic top-N per group — two largest fills for each order:
SELECT o.id, o.desk, t.qty
FROM orders o
JOIN LATERAL (
SELECT qty FROM fills
WHERE order_id = o.id
ORDER BY qty DESC LIMIT 2
) t
ORDER BY o.id, t.qty DESC;ON (symbol)ON (t.sym = q.sym)curl -sH "Accept: text/markdown" "https://questdb.com/docs/query/sql/horizon-join.md"curl -sH "Accept: text/markdown" "https://questdb.com/docs/query/sql/window-join.md"curl -sH "Accept: text/markdown" "https://questdb.com/docs/query/sql/lateral-join.md"references/cookbook.mdUNNESTFROM-- Native array: expand the sizes at level 2 of each order book row
SELECT t.symbol, u.vol
FROM market_data t, UNNEST(t.asks[2]) u(vol)
WHERE t.timestamp IN '$now-1m..$now' AND t.symbol = 'EURUSD';
-- WITH ORDINALITY gives a 1-based level index (resets per input row)
SELECT m.symbol, u.vol, u.level
FROM market_data m, UNNEST(m.asks[2]) WITH ORDINALITY u(vol, level)
WHERE m.timestamp IN '$now-1m..$now';
-- JSON array (VARCHAR) with typed columns — great for ingesting API payloads
SELECT u.trade_id, u.price, u.size, u.side, u.time
FROM UNNEST(
payload_varchar::VARCHAR
COLUMNS(trade_id LONG, price DOUBLE, size DOUBLE, side VARCHAR, time TIMESTAMP)
) u;UNNESTFROMCROSS JOINSELECTDOUBLE[]UNNESTDOUBLE[][]DOUBLE[]UNNESTCOLUMNS(name TYPE, ...)ordinalityu(val, pos)curl -sH "Accept: text/markdown" "https://questdb.com/docs/query/sql/unnest.md"-- ROWS frame (count-based)
AVG(close) OVER (ORDER BY ts ROWS BETWEEN 19 PRECEDING AND CURRENT ROW) AS sma20
-- CUMULATIVE shorthand (QuestDB extension, equivalent to ROWS UNBOUNDED PRECEDING)
SUM(volume) OVER (ORDER BY ts CUMULATIVE) AS running_total
-- PARTITION BY
LAG(close) OVER (PARTITION BY symbol ORDER BY ts) AS prev_close
-- EMA (exponential moving average — QuestDB extension)
avg(price, 'period', 14) OVER (ORDER BY ts) AS ema14ROW_NUMBER()RANK()DENSE_RANK()LAG()LEAD()FIRST_VALUE()AVG()SUM()MIN()MAX()COUNT()stddev_samp()sqrt(avg(x*x) - avg(x)^2)CREATE MATERIALIZED VIEW IF NOT EXISTS candles_5s AS (
SELECT ts, symbol,
first(price) AS open, max(price) AS high,
min(price) AS low, last(price) AS close,
sum(amount) AS volume
FROM trades SAMPLE BY 5s
) PARTITION BY DAY;IF NOT EXISTStrades → 5s → 1m → 1hALTER MATERIALIZED VIEW candles_5s INVALIDATECREATE VIEW IF NOT EXISTS latest_prices AS (
SELECT * FROM trades
LATEST ON ts PARTITION BY symbol
);
-- Query it like a table
SELECT * FROM latest_prices WHERE symbol = 'BTC-USDT';
-- Modify or drop
ALTER VIEW latest_prices AS (SELECT ...); -- redefine
COMPILE VIEW latest_prices; -- recompile after schema changes
DROP VIEW latest_prices;dateadd()BETWEENdate [T time] @timezone #dayFilter ;duration-- Date variables: resolve at query time
WHERE ts IN '$today' -- full day (midnight to midnight)
WHERE ts IN '$yesterday' -- previous full day
WHERE ts IN '$tomorrow' -- next full day
-- Ranges with $now (point-in-time, microsecond precision)
WHERE ts IN '$now - 1h..$now' -- last hour
WHERE ts IN '$now - 30m..$now' -- last 30 minutes
WHERE ts IN '$now - 5bd..$now' -- last 5 business days (skips weekends)
-- Duration suffix: extends forward from a point
WHERE ts IN '$now;1h' -- 1 hour starting now (forward)
WHERE ts IN '2025-01-15T09:30;6h30m' -- NYSE trading session
-- Imprecise dates: month-level and year-level (expand to all days)
WHERE ts IN '[2025-01]' -- all of January (no need for [01..31])
WHERE ts IN '[2025]T09:30' -- all of 2025 at 09:30
WHERE ts IN '2025-02T09:30' -- bare imprecise (brackets optional alone)
-- Bracket expansion: generates multiple intervals
WHERE ts IN '2025-01-[10..15]' -- days 10 through 15
WHERE ts IN '2025-01-[5,10..12,20]' -- specific days + ranges
WHERE ts IN '2025-[01,06]-[10,15]' -- Cartesian: Jan+Jun x 10th+15th
WHERE ts IN '2025-[01..03]T09:30' -- month-level range: all days Jan-Mar
-- Day-of-week filters
WHERE ts IN '[2025-01]#workday' -- weekdays in January
WHERE ts IN '[2025-01]#weekend' -- weekends in January
WHERE ts IN '[2025-01]#Mon,Wed,Fri' -- specific days in January
-- Timezone-aware (handles DST)
WHERE ts IN '2025-01-15T09:30@America/New_York;6h30m'
-- Combined: workdays at 09:30 New York time for all of January
WHERE ts IN '[2025-01]T09:30@America/New_York#workday;6h30m'
-- Mixed-precision date lists
WHERE ts IN '[2025-01-15, 2025-02]T09:30' -- one day + full month
-- Time lists: multiple intraday windows
WHERE ts IN '2025-01-15T[09:00,14:30];1h' -- two 1h windows on the same day
-- Date lists (brackets required for lists)
WHERE ts IN '[$today, $yesterday, 2025-01-15]'
-- ISO week dates
WHERE ts IN '2025-W01-[1..5]T09:00;8h' -- Mon-Fri of week 1$TODAY$today$today$yesterday$tomorrow$nowyMwdbdhmsTunbdMmTYYYY-MMYYYY'[2025-01]T09:30#workday;6h30m''2025-01-[01..31]T09:30#workday;6h30m''2025-02T09:30''[2025-02]T09:30'[$today, $yesterday][$now - 2h..$now]@America/New_York$now - 2h..$now2025-01T09:30#XNYS#XLON#workdayWHERE ts IN '[2025-01]#XNYS'curl -sH "Accept: text/markdown" "https://questdb.com/docs/query/operators/exchange-calendars.md"curl -sH "Accept: text/markdown" "https://questdb.com/docs/query/operators/tick.md"DECLARE @start := '2025-01-01T00:00:00Z', @end := now();
SELECT * FROM trades WHERE ts BETWEEN @start AND @end;FROM → WHERE → SAMPLE BY → SELECT → LIMITnow()systimestamp()dateadd('unit', n, ts)datediff('unit', ts1, ts2)to_timestamp('str', 'fmt')timestamp_floor('unit', ts)timestamp_ceil('unit', ts)hour(ts)day_of_week(ts)week_of_year(ts)year(ts)month(ts)concat('a', 'b')left(s, n)right(s, n)length(s)starts_with(s, prefix)lcase(s)ucase(s)replace(s, old, new)split_part(s, delim, idx)regexp_replace(s, pattern, replacement)first(x)last(x)count_distinct(x)sum(x)avg(x)min(x)max(x)haversine_dist_deg(lat1, lon1, lat2, lon2)array_cum_sum(arr)from questdb.ingress import Sender, TimestampNanos
import numpy as np
# Open Source (no auth). For Enterprise: read references/enterprise.md Quick Start
# (admin creates service account + token via REST → ingestion script uses token)
conf = "tcp::addr=localhost:9009;protocol_version=2;"
with Sender.from_conf(conf) as sender:
sender.row(
'trades',
symbols={'symbol': 'BTC-USDT', 'side': 'buy'},
columns={'price': 42000.50, 'amount': 1.5},
at=TimestampNanos.now()
)
sender.flush()Sender.from_conf()withsender.establish()symbols={}columns={}np.float64protocol_version=2http::addr=localhost:9000;tcp::addr=localhost:9009;sender.row(
'order_book',
symbols={'symbol': 'BTC-USDT'},
columns={
'bids': [
np.array([41999.0, 41998.5, 41998.0], dtype=np.float64), # prices [1]
np.array([2.1, 5.3, 10.0], dtype=np.float64), # sizes [2]
],
'asks': [
np.array([42001.0, 42001.5, 42002.0], dtype=np.float64),
np.array([1.8, 4.2, 8.5], dtype=np.float64),
],
},
at=TimestampNanos.now()
)ingestion/clients/{language}INSERT INTO trades (ts, symbol, side, price, amount)
VALUES ('2025-02-09T10:00:00.000000Z', 'BTC-USDT', 'buy', 42000.50, 1.5);import psycopg as pg
conn = pg.connect("user=admin password=quest host=localhost port=8812 dbname=qdb")GET http://localhost:9000/exec?query=URL_ENCODED_SQL(%28)%29()count_distinct%28symbol%29count_distinct(symbol)references/enterprise.md{ "columns": [...], "dataset": [...] }CREATE TABLE IF NOT EXISTS trades (
ts TIMESTAMP,
symbol SYMBOL,
side SYMBOL,
price DOUBLE,
amount DOUBLE
) TIMESTAMP(ts) PARTITION BY DAY WAL
DEDUP UPSERT KEYS(ts, symbol);
CREATE TABLE IF NOT EXISTS orderbook (
ts TIMESTAMP,
symbol SYMBOL,
bid_prices DOUBLE[],
bid_sizes DOUBLE[],
ask_prices DOUBLE[],
ask_sizes DOUBLE[]
) TIMESTAMP(ts) PARTITION BY DAY WAL
DEDUP UPSERT KEYS(ts, symbol);
-- Cascading materialized views: trades → 5s → 1m → 1h
CREATE MATERIALIZED VIEW IF NOT EXISTS candles_5s AS (
SELECT ts, symbol,
first(price) AS open, max(price) AS high,
min(price) AS low, last(price) AS close,
sum(amount) AS volume
FROM trades SAMPLE BY 5s
) PARTITION BY DAY;
CREATE MATERIALIZED VIEW IF NOT EXISTS candles_1m AS (
SELECT ts, symbol,
first(open) AS open, max(high) AS high,
min(low) AS low, last(close) AS close,
sum(volume) AS volume
FROM candles_5s SAMPLE BY 1m
) PARTITION BY DAY;
CREATE MATERIALIZED VIEW IF NOT EXISTS candles_1h AS (
SELECT ts, symbol,
first(open) AS open, max(high) AS high,
min(low) AS low, last(close) AS close,
sum(volume) AS volume
FROM candles_1m SAMPLE BY 1h
) PARTITION BY MONTH;import os
import certifi
os.environ['SSL_CERT_FILE'] = certifi.where() # Required: macOS lacks system CA certs for HTTPS/WSS
import numpy as np
from cryptofeed import FeedHandler
from cryptofeed.exchanges import OKX
from cryptofeed.defines import TRADES, L2_BOOK
from questdb.ingress import Sender, TimestampNanos
conf = "tcp::addr=localhost:9009;protocol_version=2;" # Enterprise: see references/enterprise.md
async def trade_cb(t, receipt_timestamp):
with Sender.from_conf(conf) as sender:
sender.row(
'trades',
symbols={'symbol': t.symbol, 'side': t.side},
columns={'price': float(t.price), 'amount': float(t.amount)},
at=TimestampNanos(int(t.timestamp * 1e9))
)
sender.flush()
async def book_cb(book, receipt_timestamp):
bids = book.book['bid']
asks = book.book['ask']
# Sort: bids descending by price, asks ascending
bid_prices = sorted(bids.keys(), reverse=True)[:25]
ask_prices = sorted(asks.keys())[:25]
with Sender.from_conf(conf) as sender:
sender.row(
'orderbook',
symbols={'symbol': book.symbol},
columns={
'bid_prices': np.array([float(p) for p in bid_prices], dtype=np.float64),
'bid_sizes': np.array([float(bids[p]) for p in bid_prices], dtype=np.float64),
'ask_prices': np.array([float(p) for p in ask_prices], dtype=np.float64),
'ask_sizes': np.array([float(asks[p]) for p in ask_prices], dtype=np.float64),
},
at=TimestampNanos(int(book.timestamp * 1e9))
)
sender.flush()
SYMBOLS = ['BTC-USDT', 'ETH-USDT', 'SOL-USDT'] # add more as needed
f = FeedHandler()
f.add_feed(OKX(
symbols=SYMBOLS,
channels=[TRADES, L2_BOOK],
callbacks={TRADES: trade_cb, L2_BOOK: book_cb}
))
f.run()t.symbolt.side'buy''sell't.pricet.amountt.timestampt.pricet.amountDecimalfloat()book.book['bid']book.book['ask']{Decimal(price): Decimal(size), ...}book.symbolbook.timestampOKXCoinbaseBinanceKrakenBybitTRADESL2_BOOKL3_BOOKTICKERCANDLESOPEN_INTERESTFUNDINGLIQUIDATIONS'BTC-USDT''BTC-USD'X | NoneOptional[X]pip install cryptofeed questdb 'psycopg[binary]' requests numpy certifipip uninstall uvloop -ySSL_CERT_FILEos.environ['SSL_CERT_FILE'] = certifi.where()SSL_CERT_FILE=$(...)sender = Sender.from_conf(conf)
sender.establish()
async def trade_cb(t, receipt_timestamp):
sender.row('trades', symbols={...}, columns={...}, at=...)
# flush periodically or use auto_flush_interval in conf string01_schema.py03_dashboard.py02_ingest.pyopenxdg-open?refresh=250msurl = f"{GRAFANA_URL}{resp.json()['url']}?refresh=250ms&from=now-5m&to=now"
subprocess.run(["open" if sys.platform == "darwin" else "xdg-open", url])demo.questdb.iocurl -sH "Accept: text/markdown" "https://questdb.com/docs/cookbook/demo-data-schema/"questdb-questdb-datasourcejsonData.serverhost.docker.internaljsonData.port8812jsonData.tlsMode"disable"jsonData.usernamesecureJsonData.passwordadminquesturl$__timeFilter(ts)5sSELECT DISTINCT symbol FROM tradesreferences/grafana-advanced.mdtimets AS timets AS timetstsSELECT ts AS time,
first(price) AS open, max(price) AS high,
min(price) AS low, last(price) AS close,
sum(amount) AS volume
FROM trades
WHERE $__timeFilter(ts) AND symbol = '$symbol'
SAMPLE BY 5s;SELECT ts AS time,
first(open) AS open, max(high) AS high,
min(low) AS low, last(close) AS close,
sum(volume) AS volume
FROM candles_1m
WHERE $__timeFilter(ts) AND symbol = '$symbol'
SAMPLE BY 5m;WITH ohlc AS (
SELECT ts, symbol,
first(price) AS open, max(price) AS high,
min(price) AS low, last(price) AS close,
sum(amount) AS volume
FROM trades
WHERE $__timeFilter(ts) AND symbol = '$symbol'
SAMPLE BY 5s
),
vwap AS (
SELECT ts, close,
sum((high + low + close) / 3 * volume) OVER (ORDER BY ts CUMULATIVE)
/ sum(volume) OVER (ORDER BY ts CUMULATIVE) AS vwap
FROM ohlc
)
SELECT ts AS time, close, vwap FROM vwap;stddev_sampWITH ohlc AS (
SELECT ts, symbol,
first(price) AS open, max(price) AS high,
min(price) AS low, last(price) AS close,
sum(amount) AS volume
FROM trades
WHERE $__timeFilter(ts) AND symbol = '$symbol'
SAMPLE BY 5s
),
stats AS (
SELECT ts, close,
AVG(close) OVER (
ORDER BY ts ROWS BETWEEN 19 PRECEDING AND CURRENT ROW
) AS sma20,
AVG(close * close) OVER (
ORDER BY ts ROWS BETWEEN 19 PRECEDING AND CURRENT ROW
) AS avg_close_sq
FROM ohlc
)
SELECT ts AS time, close, sma20,
sma20 + 2 * sqrt(avg_close_sq - (sma20 * sma20)) AS upper_band,
sma20 - 2 * sqrt(avg_close_sq - (sma20 * sma20)) AS lower_band
FROM stats;references/indicators.mdWITH ohlc AS (
SELECT ts, symbol,
first(price) AS open, max(price) AS high,
min(price) AS low, last(price) AS close,
sum(amount) AS volume
FROM trades
WHERE $__timeFilter(ts) AND symbol = '$symbol'
SAMPLE BY 5s
),
changes AS (
SELECT ts, close,
close - LAG(close) OVER (ORDER BY ts) AS change
FROM ohlc
),
gains_losses AS (
SELECT ts, close,
CASE WHEN change > 0 THEN change ELSE 0 END AS gain,
CASE WHEN change < 0 THEN ABS(change) ELSE 0 END AS loss
FROM changes
),
avg_gl AS (
SELECT ts, close,
AVG(gain) OVER (ORDER BY ts
ROWS BETWEEN 13 PRECEDING AND CURRENT ROW) AS avg_gain,
AVG(loss) OVER (ORDER BY ts
ROWS BETWEEN 13 PRECEDING AND CURRENT ROW) AS avg_loss
FROM gains_losses
)
SELECT ts AS time,
CASE WHEN avg_loss = 0 THEN 100
ELSE 100 - (100 / (1 + avg_gain / NULLIF(avg_loss, 0)))
END AS rsi
FROM avg_gl;avg(gain, 'period', 14) OVER (ORDER BY ts)avg(gain, 'period', 27)WITH ohlc AS (
SELECT ts, symbol,
first(price) AS open, max(price) AS high,
min(price) AS low, last(price) AS close,
sum(amount) AS volume
FROM trades
WHERE $__timeFilter(ts) AND symbol = '$symbol'
SAMPLE BY 5s
),
indicators AS (
SELECT ts, close,
AVG(close) OVER (
ORDER BY ts ROWS BETWEEN 19 PRECEDING AND CURRENT ROW
) AS sma20,
AVG(close * close) OVER (
ORDER BY ts ROWS BETWEEN 19 PRECEDING AND CURRENT ROW
) AS avg_close_sq,
sum((high + low + close) / 3 * volume) OVER (ORDER BY ts CUMULATIVE)
/ sum(volume) OVER (ORDER BY ts CUMULATIVE) AS vwap
FROM ohlc
)
SELECT ts AS time, close, sma20,
sma20 + 2 * sqrt(avg_close_sq - (sma20 * sma20)) AS upper_band,
sma20 - 2 * sqrt(avg_close_sq - (sma20 * sma20)) AS lower_band,
vwap
FROM indicators;includeAllFields: truecandlesticktimeseriests AS timefieldConfig.overrides"refresh": "250ms""5s""liveNow": false"time": {"from": "now-5m", "to": "now"}"timepicker": {"refresh_intervals": ["250ms", "500ms", "1s", "5s", "10s", "30s", "1m", "5m", "15m", "30m", "1h", "2h", "1d"]}?refresh=250ms&from=now-5m&to=nowmin_refresh_interval5sGF_DASHBOARDS_MIN_REFRESH_INTERVAL=250ms"refresh": "250ms"?refresh=250ms{
"refId": "A",
"datasource": {"uid": "QUESTDB_UID", "type": "questdb-questdb-datasource"},
"format": 1,
"rawSql": "SELECT ts AS time, ... WHERE $__timeFilter(ts) ..."
}"format"1"table"sqlutil.FormatQueryOptionjson: cannot unmarshal string into Go struct field Query.format"table"1import json, subprocess, sys, os, requests, time
QUESTDB_URL = "http://localhost:9000"
GRAFANA_URL = "http://localhost:3000"
GRAFANA_AUTH = ("admin", "admin")
SCRIPT_DIR = os.path.dirname(os.path.abspath(__file__))
# --- Start ingestion if not already running ---
already_running = False
try:
resp = requests.get(f"{QUESTDB_URL}/exec",
params={"query": "SELECT count() FROM trades WHERE ts > dateadd('s', -10, now())"})
already_running = resp.json()["dataset"][0][0] > 0
except Exception:
pass
if already_running:
print("Ingestion already running — skipping launch")
else:
ingest_log = open(os.path.join(SCRIPT_DIR, "ingest.log"), "w")
ingest_proc = subprocess.Popen(
[sys.executable, "-u", os.path.join(SCRIPT_DIR, "02_ingest.py")],
stdout=ingest_log, stderr=ingest_log,
start_new_session=True, # detach from parent — survives Ctrl+C and script exit
)
print(f"Ingestion started: PID {ingest_proc.pid}")
# --- Gate: wait for data before deploying dashboard ---
for i in range(15): # up to 15s
time.sleep(1)
if ingest_proc.poll() is not None:
ingest_log.close()
print("ERROR: Ingestion process died. Log:")
print(open(os.path.join(SCRIPT_DIR, "ingest.log")).read())
sys.exit(1)
try:
resp = requests.get(f"{QUESTDB_URL}/exec", params={"query": "SELECT count() FROM trades"})
count = resp.json()["dataset"][0][0]
if count > 0:
print(f"Data gate passed: {count} rows in trades (waited {i+1}s)")
break
except Exception:
pass
else:
print("ERROR: No data after 15s. Check ingest.log")
sys.exit(1)
# --- Create or find QuestDB datasource ---
# QuestDB plugin uses jsonData fields, NOT the standard url field.
# server = hostname only (no port, no protocol), port = integer, tlsMode = "disable"
ds_list = requests.get(f"{GRAFANA_URL}/api/datasources", auth=GRAFANA_AUTH).json()
existing = [d for d in ds_list if d["type"] == "questdb-questdb-datasource"]
if existing:
questdb_uid = existing[0]["uid"]
else:
resp = requests.post(f"{GRAFANA_URL}/api/datasources", auth=GRAFANA_AUTH,
json={
"name": "QuestDB", "type": "questdb-questdb-datasource", "access": "proxy",
"jsonData": {
"server": "host.docker.internal",
"port": 8812,
"username": "admin",
"tlsMode": "disable",
"timeout": "120",
"queryTimeout": "60",
},
"secureJsonData": {"password": "quest"},
})
questdb_uid = resp.json()["datasource"]["uid"]
DS_REF = {"uid": questdb_uid, "type": "questdb-questdb-datasource"}
dashboard = {
"dashboard": {
"title": "Crypto Real-Time Market Data",
"uid": "crypto-realtime",
"timezone": "browser",
"refresh": "250ms", # Default refresh rate — do NOT change to 5s
"liveNow": False,
"schemaVersion": 38,
"time": {"from": "now-5m", "to": "now"},
"timepicker": {
"refresh_intervals": ["250ms", "500ms", "1s", "5s", "10s", "30s", "1m", "5m", "15m", "30m", "1h", "2h", "1d"],
},
"tags": ["crypto", "questdb", "realtime"],
"templating": {"list": [{
"name": "symbol", "type": "query", "label": "Symbol",
"query": "SELECT DISTINCT symbol FROM trades ORDER BY symbol;",
"datasource": DS_REF,
"refresh": 1, "sort": 1,
"current": {"text": "BTC-USDT", "value": "BTC-USDT"},
}]},
"panels": [
{
"id": 1, "type": "candlestick",
"title": "OHLC - $symbol",
"gridPos": {"h": 10, "w": 24, "x": 0, "y": 0},
"datasource": DS_REF,
"fieldConfig": {
"defaults": {"custom": {"axisBorderShow": False, "axisPlacement": "auto"}},
"overrides": [{"matcher": {"id": "byName", "options": "volume"},
"properties": [{"id": "custom.axisPlacement", "value": "hidden"}]}],
},
"options": {
"mode": "candles+volume", "includeAllFields": False,
"candleStyle": "candles", "colorStrategy": "open-close",
"colors": {"up": "green", "down": "red"},
"fields": {"open": "open", "high": "high", "low": "low",
"close": "close", "volume": "volume"},
},
"targets": [{
"refId": "A", "datasource": DS_REF, "format": 1,
"rawSql": "SELECT ts AS time, first(price) AS open, max(price) AS high, min(price) AS low, last(price) AS close, sum(amount) AS volume FROM trades WHERE $__timeFilter(ts) AND symbol = '$symbol' SAMPLE BY 5s;",
}],
},
{
"id": 2, "type": "candlestick",
"title": "OHLC + Indicators - $symbol",
"gridPos": {"h": 10, "w": 24, "x": 0, "y": 10},
"datasource": DS_REF,
"fieldConfig": {
"defaults": {"custom": {"axisBorderShow": False, "axisPlacement": "auto"}},
# CRITICAL: these overrides are NOT cosmetic — without them:
# - RSI (0-100) shares Y-axis with price (~97000) → candlesticks appear flat
# - Volume distorts the price axis
# - Bollinger bands have no visual distinction from price lines
# Copy ALL overrides below exactly. NEVER use "overrides": []
"overrides": [
{"matcher": {"id": "byName", "options": "volume"},
"properties": [{"id": "custom.axisPlacement", "value": "hidden"}]},
{"matcher": {"id": "byName", "options": "vwap"},
"properties": [{"id": "color", "value": {"fixedColor": "orange", "mode": "fixed"}},
{"id": "custom.lineWidth", "value": 2}]},
{"matcher": {"id": "byName", "options": "sma20"},
"properties": [{"id": "color", "value": {"fixedColor": "yellow", "mode": "fixed"}},
{"id": "custom.lineWidth", "value": 2}]},
{"matcher": {"id": "byName", "options": "upper_band"},
"properties": [{"id": "color", "value": {"fixedColor": "light-blue", "mode": "fixed"}},
{"id": "custom.lineStyle", "value": {"fill": "dash", "dash": [10, 10]}},
{"id": "custom.fillBelowTo", "value": "lower_band"},
{"id": "custom.fillOpacity", "value": 8}]},
{"matcher": {"id": "byName", "options": "lower_band"},
"properties": [{"id": "color", "value": {"fixedColor": "light-blue", "mode": "fixed"}},
{"id": "custom.lineStyle", "value": {"fill": "dash", "dash": [10, 10]}}]},
{"matcher": {"id": "byFrameRefID", "options": "D"},
"properties": [{"id": "color", "value": {"fixedColor": "purple", "mode": "fixed"}},
{"id": "custom.axisPlacement", "value": "right"},
{"id": "min", "value": 0}, {"id": "max", "value": 100},
{"id": "unit", "value": "percent"}]},
],
},
"options": {
"mode": "candles+volume", "includeAllFields": True,
"candleStyle": "candles", "colorStrategy": "open-close",
"colors": {"up": "green", "down": "red"},
"fields": {"open": "open", "high": "high", "low": "low",
"close": "close", "volume": "volume"},
},
# --- Overlay pattern: refId A = OHLC columns, B/C/D = one indicator each ---
# A MUST return: open, high, low, close, volume (for the candlestick)
# B/C/D MUST return: ts AS time + indicator columns ONLY (not OHLC)
# Grafana overlays B/C/D on A using the shared time column
"targets": [
{ # A: OHLC + volume — the candlestick data
"refId": "A", "datasource": DS_REF, "format": 1,
"rawSql": "SELECT ts AS time, first(price) AS open, max(price) AS high, min(price) AS low, last(price) AS close, sum(amount) AS volume FROM trades WHERE $__timeFilter(ts) AND symbol = '$symbol' SAMPLE BY 5s;",
},
{ # B: VWAP only — overlays on candlestick
"refId": "B", "datasource": DS_REF, "format": 1,
"rawSql": "WITH ohlc AS (SELECT ts, symbol, first(price) AS open, max(price) AS high, min(price) AS low, last(price) AS close, sum(amount) AS volume FROM trades WHERE $__timeFilter(ts) AND symbol = '$symbol' SAMPLE BY 5s), vwap AS (SELECT ts, sum((high + low + close) / 3 * volume) OVER (ORDER BY ts CUMULATIVE) / sum(volume) OVER (ORDER BY ts CUMULATIVE) AS vwap FROM ohlc) SELECT ts AS time, vwap FROM vwap;",
},
{ # C: Bollinger Bands only (sma20, upper_band, lower_band)
"refId": "C", "datasource": DS_REF, "format": 1,
"rawSql": "WITH ohlc AS (SELECT ts, symbol, first(price) AS open, max(price) AS high, min(price) AS low, last(price) AS close, sum(amount) AS volume FROM trades WHERE $__timeFilter(ts) AND symbol = '$symbol' SAMPLE BY 5s), stats AS (SELECT ts, close, AVG(close) OVER (ORDER BY ts ROWS BETWEEN 19 PRECEDING AND CURRENT ROW) AS sma20, AVG(close * close) OVER (ORDER BY ts ROWS BETWEEN 19 PRECEDING AND CURRENT ROW) AS avg_close_sq FROM ohlc) SELECT ts AS time, sma20, sma20 + 2 * sqrt(avg_close_sq - (sma20 * sma20)) AS upper_band, sma20 - 2 * sqrt(avg_close_sq - (sma20 * sma20)) AS lower_band FROM stats;",
},
{ # D: RSI only — right Y-axis 0-100%
"refId": "D", "datasource": DS_REF, "format": 1,
"rawSql": "WITH ohlc AS (SELECT ts, symbol, first(price) AS open, max(price) AS high, min(price) AS low, last(price) AS close, sum(amount) AS volume FROM trades WHERE $__timeFilter(ts) AND symbol = '$symbol' SAMPLE BY 5s), changes AS (SELECT ts, close, close - LAG(close) OVER (ORDER BY ts) AS change FROM ohlc), gains_losses AS (SELECT ts, close, CASE WHEN change > 0 THEN change ELSE 0 END AS gain, CASE WHEN change < 0 THEN ABS(change) ELSE 0 END AS loss FROM changes), avg_gl AS (SELECT ts, close, AVG(gain) OVER (ORDER BY ts ROWS BETWEEN 13 PRECEDING AND CURRENT ROW) AS avg_gain, AVG(loss) OVER (ORDER BY ts ROWS BETWEEN 13 PRECEDING AND CURRENT ROW) AS avg_loss FROM gains_losses) SELECT ts AS time, CASE WHEN avg_loss = 0 THEN 100 ELSE 100 - (100 / (1 + avg_gain / NULLIF(avg_loss, 0))) END AS rsi FROM avg_gl;",
},
],
},
{
"id": 3, "type": "timeseries",
"title": "Bid-Ask Spread - $symbol",
"gridPos": {"h": 6, "w": 24, "x": 0, "y": 20},
"datasource": DS_REF,
# CRITICAL: overrides REQUIRED — without them spread (~0.01) is invisible
# next to bid/ask (~97000). Spread MUST use right Y-axis.
"fieldConfig": {
"defaults": {"custom": {"lineWidth": 1, "fillOpacity": 15, "spanNulls": True, "pointSize": 1}},
"overrides": [
{"matcher": {"id": "byName", "options": "spread"},
"properties": [{"id": "color", "value": {"fixedColor": "red", "mode": "fixed"}},
{"id": "custom.axisPlacement", "value": "right"}]},
{"matcher": {"id": "byName", "options": "best_bid"},
"properties": [{"id": "color", "value": {"fixedColor": "green", "mode": "fixed"}}]},
{"matcher": {"id": "byName", "options": "best_ask"},
"properties": [{"id": "color", "value": {"fixedColor": "orange", "mode": "fixed"}}]},
],
},
"targets": [{
"refId": "A", "datasource": DS_REF, "format": 1,
"rawSql": "SELECT ts AS time, avg(ask_prices[1] - bid_prices[1]) AS spread, avg(bid_prices[1]) AS best_bid, avg(ask_prices[1]) AS best_ask FROM orderbook WHERE $__timeFilter(ts) AND symbol = '$symbol' SAMPLE BY 5s;",
}],
},
],
},
"overwrite": True,
}
resp = requests.post(f"{GRAFANA_URL}/api/dashboards/db", auth=GRAFANA_AUTH,
headers={"Content-Type": "application/json"}, json=dashboard)
url = f"{GRAFANA_URL}{resp.json().get('url', '')}?refresh=250ms&from=now-5m&to=now"
print(f"Dashboard: {resp.status_code} - {url}")
# Open in browser — data gate already confirmed rows are flowing
subprocess.run(["open" if sys.platform == "darwin" else "xdg-open", url])sleep