Skill by
ara.so — Daily 2026 Skills collection.
Quip Node Manager is a cross-platform desktop application (macOS, Linux, Windows) built with Tauri v2 + Rust that lets you run, configure, and monitor
Quip Network nodes. It supports two execution modes — Docker (default on Windows/Linux) and Native binary (default on macOS) — with a GUI front-end and an optional terminal UI (
flag).
Data directory: — stores settings, TOML config, secrets, downloaded binaries, and trust database.
The frontend calls Rust commands via
window.__TAURI__.core.invoke
. Key commands:
js
const { invoke } = window.__TAURI__.core;
// Get current node status
const status = await invoke('get_node_status');
// Returns: { running: bool, mode: 'docker'|'native', uptime_secs: number }
// Start the node
await invoke('start_node');
// Stop the node
await invoke('stop_node');
// Get configuration
const config = await invoke('get_config');
// Save configuration
await invoke('save_config', { config: { /* see config schema below */ } });
// Run pre-flight checks
const checks = await invoke('run_preflight');
// Returns: { docker: bool, secret: bool, public_ip: string, port_open: bool, firewall: bool }
// Get GPU devices
const gpus = await invoke('get_gpu_devices');
// Returns: [{ id: string, name: string, type: 'cuda'|'metal', enabled: bool, utilization: number }]
// Check for updates
const update = await invoke('check_for_updates');
// Returns: { app: string|null, node: string|null, docker_image: string|null }
The app generates a TOML config written to
matching the quip-protocol format:
rust
// src-tauri/src/commands.rs
use tauri::State;
use crate::node::NodeManager;
#[tauri::command]
pub async fn get_node_status(
manager: State<'_, NodeManager>,
) -> Result<NodeStatus, String> {
manager.status().await.map_err(|e| e.to_string())
}
#[tauri::command]
pub async fn start_node(
manager: State<'_, NodeManager>,
) -> Result<(), String> {
manager.start().await.map_err(|e| e.to_string())
}
rust
// src-tauri/src/main.rs
fn main() {
tauri::Builder::default()
.manage(NodeManager::new())
.invoke_handler(tauri::generate_handler![
commands::get_node_status,
commands::start_node,
commands::stop_node,
commands::get_config,
commands::save_config,
commands::run_preflight,
commands::get_gpu_devices,
commands::check_for_updates,
])
.run(tauri::generate_context!())
.expect("error while running tauri application");
}
rust
// src-tauri/src/node.rs
use tauri::{AppHandle, Emitter};
pub async fn stream_logs(app: AppHandle, mut reader: impl AsyncBufRead + Unpin) {
let mut line = String::new();
loop {
line.clear();
match reader.read_line(&mut line).await {
Ok(0) => break, // EOF
Ok(_) => {
app.emit("node-log-line", line.trim().to_string())
.unwrap_or_default();
}
Err(e) => {
app.emit("node-log-error", e.to_string()).unwrap_or_default();
break;
}
}
}
}
rust
// src-tauri/src/docker.rs
use std::process::Command;
pub fn pull_image(image: &str) -> Result<(), String> {
let status = Command::new("docker")
.args(["pull", image])
.status()
.map_err(|e| format!("docker not found: {e}"))?;
if status.success() { Ok(()) } else { Err("docker pull failed".into()) }
}
pub fn run_node_container(image: &str, data_dir: &str, port: u16) -> Result<String, String> {
let output = Command::new("docker")
.args([
"run", "-d",
"--name", "quip-node",
"-p", &format!("{port}:{port}"),
"-v", &format!("{data_dir}:/quip-data"),
image,
])
.output()
.map_err(|e| e.to_string())?;
if output.status.success() {
Ok(String::from_utf8_lossy(&output.stdout).trim().to_string())
} else {
Err(String::from_utf8_lossy(&output.stderr).to_string())
}
}
rust
// src-tauri/src/gpu.rs
#[derive(serde::Serialize)]
pub struct GpuDevice {
pub id: String,
pub name: String,
pub device_type: String, // "cuda" or "metal"
pub enabled: bool,
pub utilization: u8,
}
pub fn detect_gpus() -> Vec<GpuDevice> {
let mut devices = Vec::new();
// CUDA detection via nvidia-smi
if let Ok(output) = std::process::Command::new("nvidia-smi")
.args(["--query-gpu=index,name", "--format=csv,noheader"])
.output()
{
for line in String::from_utf8_lossy(&output.stdout).lines() {
let parts: Vec<&str> = line.splitn(2, ',').collect();
if parts.len() == 2 {
devices.push(GpuDevice {
id: parts[0].trim().to_string(),
name: parts[1].trim().to_string(),
device_type: "cuda".into(),
enabled: true,
utilization: 80,
});
}
}
}
// macOS Metal: presence of Metal framework implies GPU
#[cfg(target_os = "macos")]
devices.push(GpuDevice {
id: "0".into(),
name: "Apple Metal GPU".into(),
device_type: "metal".into(),
enabled: true,
utilization: 80,
});
devices
}
Launches a terminal UI (TUI) instead of the desktop window — useful for headless servers.
The GUI includes a built-in walkthrough. For scripted setup: