Loading...
Loading...
OpenLess open-source voice input for macOS & Windows — press a hotkey, speak, get AI-polished text inserted at your cursor in any app.
npx skill4agent add aradotso/trending-skills openless-voice-inputSkill by ara.so — Daily 2026 Skills collection.
OpenLess_<version>_aarch64.dmgOpenLess.app/ApplicationsOpenLess_<version>_x64-setup.exerustupcargo install tauri-cli --version "^2"openless-all/README.mdgit clone https://github.com/appergb/openless.git
cd openless/openless-all/app
npm ci
# Development (Vite at :1420 + Tauri shell with hot reload)
npm run tauri dev
# Production build — macOS (signs, installs, resets TCC)
./scripts/build-mac.sh
# Build only, skip install step
INSTALL=0 ./scripts/build-mac.sh
# Rust type-check without full compile
cargo check --manifest-path src-tauri/Cargo.toml
# Frontend TypeScript type-check
npm run build~/Library/Logs/OpenLess/openless.log%LOCALAPPDATA%\OpenLess\Logs\openless.logcom.openless.app~/.openless/credentials.json0600| Key | Where to get it |
|---|---|
| Volcengine ASR APP ID | Volcengine console → Speech Recognition |
| Volcengine ASR Access Token | Same console |
| Volcengine ASR Resource ID | Same console |
| Ark/LLM API Key | Volcengine Ark console or any OpenAI-compatible provider |
| Ark Model ID | e.g. |
| Ark Endpoint | Default: |
// src-tauri/src/persistence.rs pattern
use crate::types::Credentials;
let creds = Credentials {
asr_app_id: std::env::var("OPENLESS_ASR_APP_ID").unwrap(),
asr_token: std::env::var("OPENLESS_ASR_TOKEN").unwrap(),
asr_resource_id: std::env::var("OPENLESS_ASR_RESOURCE_ID").unwrap(),
ark_api_key: std::env::var("OPENLESS_ARK_API_KEY").unwrap(),
ark_model_id: std::env::var("OPENLESS_ARK_MODEL_ID").unwrap(),
ark_endpoint: std::env::var("OPENLESS_ARK_ENDPOINT")
.unwrap_or_else(|_| "https://ark.cn-beijing.volces.com/api/v3/chat/completions".to_string()),
};
// Pass to coordinator via Tauri stateopenless-all/app/
├── src/ # React/TypeScript frontend
│ ├── pages/
│ │ ├── _atoms.tsx # Recoil global state atoms
│ │ ├── Home.tsx
│ │ ├── History.tsx
│ │ ├── Dictionary.tsx
│ │ └── Settings.tsx
│ └── lib/
│ └── ipc.ts # All Tauri invoke() calls (IPC surface)
└── src-tauri/src/ # Rust backend
├── types.rs # Value types: DictationSession, PolishMode, errors
├── hotkey.rs # CGEventTap (macOS) / WH_KEYBOARD_LL (Windows)
├── recorder.rs # Mic → 16 kHz mono Int16 PCM + RMS callback
├── asr/ # Volcengine WebSocket ASR + Whisper HTTP
├── polish.rs # OpenAI-compatible chat-completions
├── insertion.rs # AX focused-element → clipboard+paste → copy fallback
├── persistence.rs # History / prefs / vocab JSON + Keychain
├── permissions.rs # TCC checks
├── coordinator.rs # State machine: Idle→Starting→Listening→Processing
└── commands.rs # Tauri #[tauri::command] IPC surfacehotkey DOWN
→ coordinator: Idle → Starting → Listening
→ recorder.start() + asr.open_session()
→ [audio frames streamed to ASR via WebSocket]
hotkey UP
→ recorder.stop() + asr.send_last_frame()
→ coordinator: Listening → Processing
→ polish(transcript, mode) → LLM API call
→ insertion.insert_at_cursor(polished_text)
├─ AX focused element write (macOS Accessibility API)
├─ clipboard + Cmd+V / Ctrl+V paste
└─ copy-only fallback (text in clipboard, user pastes manually)
→ history.save(session)
→ coordinator: Processing → IdleEsc| Mode | Tauri enum | Behaviour |
|---|---|---|
| Raw | | Transcript verbatim, no LLM call |
| Light | | Remove filler words, fix punctuation |
| Structured | | AI-prompt mode — reshapes speech into a structured, context-rich prompt |
| Formal | | Formal prose, fixes grammar, organises paragraphs |
Please write a SQL query that:
- Pulls orders from last month from the `orders` table.
- Groups by customer.
- Sorts by total amount, descending.
- Returns the top 10 rows only.What features are missing?src/lib/ipc.tsinvoke()// src/lib/ipc.ts — representative subset
import { invoke } from "@tauri-apps/api/core";
import type { DictationSession, PolishMode, HotkeyBinding } from "./types";
// Save credentials (written to Keychain)
export async function saveCredentials(creds: {
asrAppId: string;
asrToken: string;
asrResourceId: string;
arkApiKey: string;
arkModelId: string;
arkEndpoint: string;
}): Promise<void> {
return invoke("save_credentials", { creds });
}
// Load saved credentials (for Settings UI pre-fill)
export async function loadCredentials(): Promise<typeof creds | null> {
return invoke("load_credentials");
}
// Get dictation history
export async function getHistory(): Promise<DictationSession[]> {
return invoke("get_history");
}
// Set active polish mode
export async function setPolishMode(mode: PolishMode): Promise<void> {
return invoke("set_polish_mode", { mode });
}
// Update hotkey binding
export async function setHotkey(binding: HotkeyBinding): Promise<void> {
return invoke("set_hotkey", { binding });
}
// Check platform permissions
export async function checkPermissions(): Promise<{
microphone: boolean;
accessibility: boolean;
}> {
return invoke("check_permissions");
}
// Add a vocabulary/dictionary entry
export async function addVocabEntry(entry: {
word: string;
category: string;
notes: string;
}): Promise<void> {
return invoke("add_vocab_entry", { entry });
}// src/pages/_atoms.tsx — key atoms
import { atom, selector } from "recoil";
import type { DictationSession, PolishMode } from "../lib/types";
export const polishModeAtom = atom<PolishMode>({
key: "polishMode",
default: "Structured",
});
export const historyAtom = atom<DictationSession[]>({
key: "history",
default: [],
});
export const recordingStateAtom = atom<
"idle" | "starting" | "listening" | "processing"
>({
key: "recordingState",
default: "idle",
});
export const rmsLevelAtom = atom<number>({
key: "rmsLevel",
default: 0,
});// src-tauri/src/types.rs
#[derive(Debug, Clone, serde::Serialize, serde::Deserialize)]
pub enum PolishMode {
Raw,
Light,
Structured,
Formal,
// Add your mode:
Technical,
}
// src-tauri/src/polish.rs
fn build_system_prompt(mode: &PolishMode) -> &'static str {
match mode {
PolishMode::Raw => "",
PolishMode::Light => LIGHT_PROMPT,
PolishMode::Structured => STRUCTURED_PROMPT,
PolishMode::Formal => FORMAL_PROMPT,
PolishMode::Technical => {
"You are a technical writer. Convert the spoken transcript into \
precise technical documentation prose. Use correct terminology. \
Do not answer questions — output the cleaned text only, no preamble."
}
}
}context.hotwordsimport { addVocabEntry } from "../lib/ipc";
await addVocabEntry({
word: "OpenLess",
category: "product",
notes: "Open-source voice input app, not 'open list' or 'open less'",
});// src-tauri/src/asr/volcengine.rs (simplified)
async fn open_session(
&self,
vocab: &[VocabEntry],
config: &AsrConfig,
) -> Result<AsrSession> {
let hotwords: Vec<String> = vocab
.iter()
.filter(|e| e.enabled)
.map(|e| e.word.clone())
.collect();
let payload = serde_json::json!({
"app": { "appid": config.app_id, "token": config.token },
"audio": { "format": "pcm", "sample_rate": 16000, "bits": 16, "channel": 1 },
"request": {
"model_name": config.resource_id,
"context": { "hotwords": hotwords }
}
});
// open WebSocket, send payload ...
}import { setHotkey } from "../lib/ipc";
import type { HotkeyBinding } from "../lib/types";
// Example: Right Option key, push-to-talk mode
const binding: HotkeyBinding = {
key: "AltRight",
modifiers: [],
mode: "PushToTalk",
};
await setHotkey(binding);// src-tauri/src/hotkey.rs (macOS path — CGEventTap)
#[cfg(target_os = "macos")]
pub fn register_global_hotkey(
binding: HotkeyBinding,
on_press: impl Fn() + Send + 'static,
on_release: impl Fn() + Send + 'static,
) -> Result<HotkeyHandle> {
// Uses CGEventTap — requires Accessibility permission
// Spawns a dedicated CFRunLoop thread
// Sends Tauri events: "hotkey-press" / "hotkey-release"
todo!("see hotkey.rs for full implementation")
}
#[cfg(target_os = "windows")]
pub fn register_global_hotkey(/* ... */) -> Result<HotkeyHandle> {
// Uses SetWindowsHookExA(WH_KEYBOARD_LL, ...)
todo!("see hotkey.rs for full implementation")
}// src-tauri/src/coordinator.rs
#[derive(Debug, Clone, PartialEq)]
pub enum RecordingState {
Idle,
Starting,
Listening,
Processing,
}
// Transitions:
// Idle --[hotkey press]--> Starting
// Starting --[ASR ready]----> Listening
// Listening --[hotkey release]-> Processing
// Listening --[Esc]-----------> Idle (cancel)
// Processing--[done / Esc]----> Idleimport { listen } from "@tauri-apps/api/event";
import { useSetRecoilState } from "recoil";
import { recordingStateAtom } from "./_atoms";
const setRecordingState = useSetRecoilState(recordingStateAtom);
useEffect(() => {
const unlisten = listen<string>("recording-state-changed", (event) => {
setRecordingState(event.payload as any);
});
return () => { unlisten.then(fn => fn()); };
}, []);openless.logcurl -s $OPENLESS_ARK_ENDPOINTcodesign# Skip signing for local dev build
CODESIGN_IDENTITY="" INSTALL=0 ./scripts/build-mac.shcd openless-all/app
cargo check --manifest-path src-tauri/Cargo.toml
# Look for changed feature flags in Cargo.toml
# Run `cargo update` if lock file is stale| File | Purpose |
|---|---|
| Master state machine — start here to understand the flow |
| All |
| LLM prompt construction and API calls |
| Volcengine WebSocket ASR + Whisper HTTP |
| Cursor insertion + clipboard fallback |
| Frontend IPC surface — all |
| Recoil global state |
| Module-wiring invariants for AI coding agents |
| Polish example corpus design |
| Full end-user walkthrough |