diff --git a/web/app.js b/web/app.js
deleted file mode 100644
index e8bd0d7..0000000
--- a/web/app.js
+++ /dev/null
@@ -1,342 +0,0 @@
-import "./style.css";
-import audioProcessorUrl from "./audio-processor.js?worker&url";
-/**
- * VoicePaste — Main application logic.
- *
- * Modules:
- * 1. WebSocket client (token auth, reconnect)
- * 2. Audio pipeline (getUserMedia → AudioWorklet → resample → WS binary)
- * 3. Recording controls (touch/mouse, state machine)
- * 4. History (localStorage, tap to re-send)
- * 5. UI state management
- */
-;(function () {
- "use strict";
-
- // ── Constants ──
- const TARGET_SAMPLE_RATE = 16000;
- const WS_RECONNECT_BASE = 1000;
- const WS_RECONNECT_MAX = 16000;
- const HISTORY_KEY = "voicepaste_history";
- const HISTORY_MAX = 50;
-
- // ── DOM refs ──
- const $ = (sel) => document.querySelector(sel);
- const statusEl = $("#status");
- const statusText = $("#status-text");
- const previewText = $("#preview-text");
- const previewBox = $("#preview");
- const micBtn = $("#mic-btn");
- const historyList = $("#history-list");
- const historyEmpty = $("#history-empty");
- const clearHistoryBtn = $("#clear-history");
-
- // ── State ──
- const state = {
- ws: null,
- connected: false,
- recording: false,
- pendingStart: false,
- startCancelled: false,
- audioCtx: null,
- workletNode: null,
- stream: null,
- reconnectDelay: WS_RECONNECT_BASE,
- reconnectTimer: null,
- };
-
- // ── Utility ──
- function getToken() {
- const params = new URLSearchParams(location.search);
- return params.get("token") || "";
- }
-
- function formatTime(ts) {
- const d = new Date(ts);
- const hh = String(d.getHours()).padStart(2, "0");
- const mm = String(d.getMinutes()).padStart(2, "0");
- return `${hh}:${mm}`;
- }
-
- // ── Resampler (linear interpolation, native rate → 16kHz 16-bit mono) ──
- function resampleTo16kInt16(float32, srcRate) {
- const ratio = srcRate / TARGET_SAMPLE_RATE;
- const outLen = Math.floor(float32.length / ratio);
- const out = new Int16Array(outLen);
- for (let i = 0; i < outLen; i++) {
- const srcIdx = i * ratio;
- const lo = Math.floor(srcIdx);
- const hi = Math.min(lo + 1, float32.length - 1);
- const frac = srcIdx - lo;
- const sample = float32[lo] + frac * (float32[hi] - float32[lo]);
- // Clamp to [-1, 1] then scale to Int16
- out[i] = Math.max(-32768, Math.min(32767, Math.round(sample * 32767)));
- }
- return out;
- }
- // ── WebSocket ──
- function wsUrl() {
- const proto = location.protocol === "https:" ? "wss:" : "ws:";
- const token = getToken();
- const q = token ? `?token=${encodeURIComponent(token)}` : "";
- return `${proto}//${location.host}/ws${q}`;
- }
- function setStatus(cls, text) {
- statusEl.className = `status ${cls}`;
- statusText.textContent = text;
- }
- function connectWS() {
- if (state.ws) return;
- setStatus("connecting", "连接中…");
- const ws = new WebSocket(wsUrl());
- ws.binaryType = "arraybuffer";
- ws.onopen = () => {
- state.connected = true;
- state.reconnectDelay = WS_RECONNECT_BASE;
- setStatus("connected", "已连接");
- micBtn.disabled = false;
- };
- ws.onmessage = (e) => handleServerMsg(e.data);
- ws.onclose = () => {
- state.connected = false;
- state.ws = null;
- micBtn.disabled = true;
- if (state.recording) stopRecording();
- setStatus("disconnected", "已断开");
- scheduleReconnect();
- };
- ws.onerror = () => ws.close();
- state.ws = ws;
- }
- function scheduleReconnect() {
- clearTimeout(state.reconnectTimer);
- state.reconnectTimer = setTimeout(() => {
- connectWS();
- }, state.reconnectDelay);
- state.reconnectDelay = Math.min(state.reconnectDelay * 2, WS_RECONNECT_MAX);
- }
- function sendJSON(obj) {
- if (state.ws && state.ws.readyState === WebSocket.OPEN) {
- state.ws.send(JSON.stringify(obj));
- }
- }
- function sendBinary(int16arr) {
- if (state.ws && state.ws.readyState === WebSocket.OPEN) {
- state.ws.send(int16arr.buffer);
- }
- }
- // ── Server message handler ──
- function handleServerMsg(data) {
- if (typeof data !== "string") return;
- let msg;
- try { msg = JSON.parse(data); } catch { return; }
- switch (msg.type) {
- case "partial":
- setPreview(msg.text, false);
- break;
- case "final":
- setPreview(msg.text, true);
- if (msg.text) addHistory(msg.text);
- break;
- case "pasted":
- showToast("✅ 已粘贴");
- break;
- case "error":
- showToast(`❌ ${msg.message || "错误"}`);
- break;
- }
- }
- function setPreview(text, isFinal) {
- if (!text) {
- previewText.textContent = "按住说话…";
- previewText.classList.add("placeholder");
- previewBox.classList.remove("active");
- return;
- }
- previewText.textContent = text;
- previewText.classList.remove("placeholder");
- previewBox.classList.toggle("active", !isFinal);
- }
- function showToast(msg) {
- // Lightweight toast — reuse or create
- let toast = $("#toast");
- if (!toast) {
- toast = document.createElement("div");
- toast.id = "toast";
- toast.style.cssText =
- "position:fixed;bottom:calc(100px + var(--safe-bottom,0px));left:50%;" +
- "transform:translateX(-50%);background:#222;color:#eee;padding:8px 18px;" +
- "border-radius:20px;font-size:14px;z-index:999;opacity:0;transition:opacity .3s;";
- document.body.appendChild(toast);
- }
- toast.textContent = msg;
- toast.style.opacity = "1";
- clearTimeout(toast._timer);
- toast._timer = setTimeout(() => { toast.style.opacity = "0"; }, 2000);
- }
- // ── Audio pipeline ──
- async function initAudio() {
- if (state.audioCtx) return;
- // Use device native sample rate — we resample to 16kHz in software
- const audioCtx = new (window.AudioContext || window.webkitAudioContext)();
- // Chrome requires resume() after user gesture
- if (audioCtx.state === "suspended") {
- await audioCtx.resume();
- }
- await audioCtx.audioWorklet.addModule(audioProcessorUrl);
- state.audioCtx = audioCtx;
- }
- async function startRecording() {
- if (state.recording || state.pendingStart) return;
- state.pendingStart = true;
- state.startCancelled = false;
- try {
- await initAudio();
- if (state.startCancelled) { state.pendingStart = false; return; }
- // Ensure AudioContext is running (may suspend between recordings)
- if (state.audioCtx.state === "suspended") {
- await state.audioCtx.resume();
- }
- if (state.startCancelled) { state.pendingStart = false; return; }
- const stream = await navigator.mediaDevices.getUserMedia({
- audio: { echoCancellation: true, noiseSuppression: true, channelCount: 1 },
- });
- if (state.startCancelled) {
- stream.getTracks().forEach((t) => t.stop());
- state.pendingStart = false;
- return;
- }
- state.stream = stream;
- const source = state.audioCtx.createMediaStreamSource(stream);
- const worklet = new AudioWorkletNode(state.audioCtx, "audio-processor");
- worklet.port.onmessage = (e) => {
- if (e.data.type === "audio") {
- const int16 = resampleTo16kInt16(e.data.samples, e.data.sampleRate);
- sendBinary(int16);
- }
- };
- source.connect(worklet);
- worklet.port.postMessage({ command: "start" });
- // Don't connect worklet to destination (no playback)
- state.workletNode = worklet;
- state.pendingStart = false;
- state.recording = true;
- sendJSON({ type: "start" });
- micBtn.classList.add("recording");
- setPreview("", false);
- } catch (err) {
- state.pendingStart = false;
- showToast(`麦克风错误: ${err.message}`);
- }
- }
- function stopRecording() {
- // Cancel pending async start if still initializing
- if (state.pendingStart) {
- state.startCancelled = true;
- micBtn.classList.remove("recording");
- return;
- }
- if (!state.recording) return;
- state.recording = false;
- // Stop worklet
- if (state.workletNode) {
- state.workletNode.port.postMessage({ command: "stop" });
- state.workletNode.disconnect();
- state.workletNode = null;
- }
- // Stop mic stream
- if (state.stream) {
- state.stream.getTracks().forEach((t) => t.stop());
- state.stream = null;
- }
- sendJSON({ type: "stop" });
- micBtn.classList.remove("recording");
- }
- // ── History (localStorage) ──
- function loadHistory() {
- try {
- return JSON.parse(localStorage.getItem(HISTORY_KEY)) || [];
- } catch {
- return [];
- }
- }
- function saveHistory(items) {
- localStorage.setItem(HISTORY_KEY, JSON.stringify(items));
- }
- function addHistory(text) {
- const items = loadHistory();
- items.unshift({ text, ts: Date.now() });
- if (items.length > HISTORY_MAX) items.length = HISTORY_MAX;
- saveHistory(items);
- renderHistory();
- }
- function clearHistory() {
- localStorage.removeItem(HISTORY_KEY);
- renderHistory();
- }
- function renderHistory() {
- const items = loadHistory();
- historyList.innerHTML = "";
- if (!items.length) {
- historyEmpty.style.display = "";
- return;
- }
- historyEmpty.style.display = "none";
- for (const item of items) {
- const li = document.createElement("li");
- li.innerHTML =
- `${escapeHtml(item.text)}` +
- `${formatTime(item.ts)}`;
- li.addEventListener("click", () => {
- sendJSON({ type: "paste", text: item.text });
- showToast("发送粘贴…");
- });
- historyList.appendChild(li);
- }
- }
- function escapeHtml(s) {
- const d = document.createElement("div");
- d.textContent = s;
- return d.innerHTML;
- }
- // ── Event bindings ──
- function bindMicButton() {
- // Touch events (mobile primary)
- micBtn.addEventListener("touchstart", (e) => {
- e.preventDefault();
- startRecording();
- }, { passive: false });
- micBtn.addEventListener("touchend", (e) => {
- e.preventDefault();
- stopRecording();
- }, { passive: false });
- micBtn.addEventListener("touchcancel", (e) => {
- e.preventDefault();
- stopRecording();
- }, { passive: false });
- // Mouse fallback (desktop testing)
- micBtn.addEventListener("mousedown", (e) => {
- if (e.button !== 0) return;
- startRecording();
- });
- micBtn.addEventListener("mouseup", () => stopRecording());
- micBtn.addEventListener("mouseleave", () => {
- if (state.recording) stopRecording();
- });
- }
- // ── Init ──
- function init() {
- micBtn.disabled = true;
- bindMicButton();
- if (clearHistoryBtn) {
- clearHistoryBtn.addEventListener("click", clearHistory);
- }
- renderHistory();
- connectWS();
- }
- if (document.readyState === "loading") {
- document.addEventListener("DOMContentLoaded", init);
- } else {
- init();
- }
-})();
\ No newline at end of file
diff --git a/web/app.ts b/web/app.ts
new file mode 100644
index 0000000..c6e4607
--- /dev/null
+++ b/web/app.ts
@@ -0,0 +1,409 @@
+import "./style.css";
+import audioProcessorUrl from "./audio-processor.ts?worker&url";
+
+/**
+ * VoicePaste — Main application logic.
+ *
+ * Modules:
+ * 1. WebSocket client (token auth, reconnect)
+ * 2. Audio pipeline (getUserMedia → AudioWorklet → resample → WS binary)
+ * 3. Recording controls (touch/mouse, state machine)
+ * 4. History (localStorage, tap to re-send)
+ * 5. UI state management
+ */
+
+// ── Types ──
+interface HistoryItem {
+ text: string;
+ ts: number;
+}
+
+interface ServerMessage {
+ type: "partial" | "final" | "pasted" | "error";
+ text?: string;
+ message?: string;
+}
+
+interface AppState {
+ ws: WebSocket | null;
+ connected: boolean;
+ recording: boolean;
+ pendingStart: boolean;
+ startCancelled: boolean;
+ audioCtx: AudioContext | null;
+ workletNode: AudioWorkletNode | null;
+ stream: MediaStream | null;
+ reconnectDelay: number;
+ reconnectTimer: ReturnType | null;
+}
+
+// ── Constants ──
+const TARGET_SAMPLE_RATE = 16000;
+const WS_RECONNECT_BASE = 1000;
+const WS_RECONNECT_MAX = 16000;
+const HISTORY_KEY = "voicepaste_history";
+const HISTORY_MAX = 50;
+
+// ── DOM refs ──
+function q(sel: string): HTMLElement {
+ const el = document.querySelector(sel);
+ if (!el) throw new Error(`Element not found: ${sel}`);
+ return el;
+}
+const statusEl = q("#status");
+const statusText = q("#status-text");
+const previewText = q("#preview-text");
+const previewBox = q("#preview");
+const micBtn = q("#mic-btn") as HTMLButtonElement;
+const historyList = q("#history-list");
+const historyEmpty = q("#history-empty");
+const clearHistoryBtn = document.querySelector("#clear-history");
+
+// ── State ──
+const state: AppState = {
+ ws: null,
+ connected: false,
+ recording: false,
+ pendingStart: false,
+ startCancelled: false,
+ audioCtx: null,
+ workletNode: null,
+ stream: null,
+ reconnectDelay: WS_RECONNECT_BASE,
+ reconnectTimer: null,
+};
+
+// ── Utility ──
+function getToken(): string {
+ const params = new URLSearchParams(location.search);
+ return params.get("token") || "";
+}
+
+function formatTime(ts: number): string {
+ const d = new Date(ts);
+ const hh = String(d.getHours()).padStart(2, "0");
+ const mm = String(d.getMinutes()).padStart(2, "0");
+ return `${hh}:${mm}`;
+}
+
+// ── Resampler (linear interpolation, native rate → 16kHz 16-bit mono) ──
+function resampleTo16kInt16(
+ float32: Float32Array,
+ srcRate: number,
+): Int16Array {
+ const ratio = srcRate / TARGET_SAMPLE_RATE;
+ const outLen = Math.floor(float32.length / ratio);
+ const out = new Int16Array(outLen);
+ for (let i = 0; i < outLen; i++) {
+ const srcIdx = i * ratio;
+ const lo = Math.floor(srcIdx);
+ const hi = Math.min(lo + 1, float32.length - 1);
+ const frac = srcIdx - lo;
+ const sample = float32[lo] + frac * (float32[hi] - float32[lo]);
+ // Clamp to [-1, 1] then scale to Int16
+ out[i] = Math.max(-32768, Math.min(32767, Math.round(sample * 32767)));
+ }
+ return out;
+}
+// ── WebSocket ──
+function wsUrl(): string {
+ const proto = location.protocol === "https:" ? "wss:" : "ws:";
+ const token = getToken();
+ const q = token ? `?token=${encodeURIComponent(token)}` : "";
+ return `${proto}//${location.host}/ws${q}`;
+}
+function setStatus(cls: string, text: string): void {
+ statusEl.className = `status ${cls}`;
+ statusText.textContent = text;
+}
+function connectWS(): void {
+ if (state.ws) return;
+ setStatus("connecting", "连接中…");
+ const ws = new WebSocket(wsUrl());
+ ws.binaryType = "arraybuffer";
+ ws.onopen = () => {
+ state.connected = true;
+ state.reconnectDelay = WS_RECONNECT_BASE;
+ setStatus("connected", "已连接");
+ micBtn.disabled = false;
+ };
+ ws.onmessage = (e: MessageEvent) => handleServerMsg(e.data);
+ ws.onclose = () => {
+ state.connected = false;
+ state.ws = null;
+ micBtn.disabled = true;
+ if (state.recording) stopRecording();
+ setStatus("disconnected", "已断开");
+ scheduleReconnect();
+ };
+ ws.onerror = () => ws.close();
+ state.ws = ws;
+}
+function scheduleReconnect(): void {
+ if (state.reconnectTimer !== null) clearTimeout(state.reconnectTimer);
+ state.reconnectTimer = setTimeout(() => {
+ connectWS();
+ }, state.reconnectDelay);
+ state.reconnectDelay = Math.min(state.reconnectDelay * 2, WS_RECONNECT_MAX);
+}
+function sendJSON(obj: Record): void {
+ if (state.ws && state.ws.readyState === WebSocket.OPEN) {
+ state.ws.send(JSON.stringify(obj));
+ }
+}
+function sendBinary(int16arr: Int16Array): void {
+ if (state.ws && state.ws.readyState === WebSocket.OPEN) {
+ state.ws.send(int16arr.buffer);
+ }
+}
+// ── Server message handler ──
+function handleServerMsg(data: unknown): void {
+ if (typeof data !== "string") return;
+ let msg: ServerMessage;
+ try {
+ msg = JSON.parse(data) as ServerMessage;
+ } catch {
+ return;
+ }
+ switch (msg.type) {
+ case "partial":
+ setPreview(msg.text || "", false);
+ break;
+ case "final":
+ setPreview(msg.text || "", true);
+ if (msg.text) addHistory(msg.text);
+ break;
+ case "pasted":
+ showToast("✅ 已粘贴");
+ break;
+ case "error":
+ showToast(`❌ ${msg.message || "错误"}`);
+ break;
+ }
+}
+function setPreview(text: string, isFinal: boolean): void {
+ if (!text) {
+ previewText.textContent = "按住说话…";
+ previewText.classList.add("placeholder");
+ previewBox.classList.remove("active");
+ return;
+ }
+ previewText.textContent = text;
+ previewText.classList.remove("placeholder");
+ previewBox.classList.toggle("active", !isFinal);
+}
+function showToast(msg: string): void {
+ let toast = document.getElementById("toast");
+ if (!toast) {
+ toast = document.createElement("div");
+ toast.id = "toast";
+ toast.style.cssText =
+ "position:fixed;bottom:calc(100px + var(--safe-bottom,0px));left:50%;" +
+ "transform:translateX(-50%);background:#222;color:#eee;padding:8px 18px;" +
+ "border-radius:20px;font-size:14px;z-index:999;opacity:0;transition:opacity .3s;";
+ document.body.appendChild(toast);
+ }
+ toast.textContent = msg;
+ toast.style.opacity = "1";
+ clearTimeout(
+ (toast as HTMLElement & { _timer?: ReturnType })._timer,
+ );
+ (toast as HTMLElement & { _timer?: ReturnType })._timer =
+ setTimeout(() => {
+ toast.style.opacity = "0";
+ }, 2000);
+}
+// ── Audio pipeline ──
+async function initAudio(): Promise {
+ if (state.audioCtx) return;
+ // Use device native sample rate — we resample to 16kHz in software
+ const audioCtx = new AudioContext();
+ // Chrome requires resume() after user gesture
+ if (audioCtx.state === "suspended") {
+ await audioCtx.resume();
+ }
+ await audioCtx.audioWorklet.addModule(audioProcessorUrl);
+ state.audioCtx = audioCtx;
+}
+async function startRecording(): Promise {
+ if (state.recording || state.pendingStart) return;
+ state.pendingStart = true;
+ state.startCancelled = false;
+ try {
+ await initAudio();
+ if (state.startCancelled) {
+ state.pendingStart = false;
+ return;
+ }
+ const audioCtx = state.audioCtx as AudioContext;
+ // Ensure AudioContext is running (may suspend between recordings)
+ if (audioCtx.state === "suspended") {
+ await audioCtx.resume();
+ }
+ if (state.startCancelled) {
+ state.pendingStart = false;
+ return;
+ }
+ const stream = await navigator.mediaDevices.getUserMedia({
+ audio: {
+ echoCancellation: true,
+ noiseSuppression: true,
+ channelCount: 1,
+ },
+ });
+ if (state.startCancelled) {
+ stream.getTracks().forEach((t) => {
+ t.stop();
+ });
+ state.pendingStart = false;
+ return;
+ }
+ state.stream = stream;
+ const source = audioCtx.createMediaStreamSource(stream);
+ const worklet = new AudioWorkletNode(audioCtx, "audio-processor");
+ worklet.port.onmessage = (e: MessageEvent) => {
+ if (e.data.type === "audio") {
+ const int16 = resampleTo16kInt16(e.data.samples, e.data.sampleRate);
+ sendBinary(int16);
+ }
+ };
+ source.connect(worklet);
+ worklet.port.postMessage({ command: "start" });
+ // Don't connect worklet to destination (no playback)
+ state.workletNode = worklet;
+ state.pendingStart = false;
+ state.recording = true;
+ sendJSON({ type: "start" });
+ micBtn.classList.add("recording");
+ setPreview("", false);
+ } catch (err) {
+ state.pendingStart = false;
+ showToast(`麦克风错误: ${(err as Error).message}`);
+ }
+}
+function stopRecording(): void {
+ // Cancel pending async start if still initializing
+ if (state.pendingStart) {
+ state.startCancelled = true;
+ micBtn.classList.remove("recording");
+ return;
+ }
+ if (!state.recording) return;
+ state.recording = false;
+ // Stop worklet
+ if (state.workletNode) {
+ state.workletNode.port.postMessage({ command: "stop" });
+ state.workletNode.disconnect();
+ state.workletNode = null;
+ }
+ // Stop mic stream
+ if (state.stream) {
+ state.stream.getTracks().forEach((t) => {
+ t.stop();
+ });
+ state.stream = null;
+ }
+ sendJSON({ type: "stop" });
+ micBtn.classList.remove("recording");
+}
+// ── History (localStorage) ──
+function loadHistory(): HistoryItem[] {
+ try {
+ return JSON.parse(
+ localStorage.getItem(HISTORY_KEY) || "[]",
+ ) as HistoryItem[];
+ } catch {
+ return [];
+ }
+}
+function saveHistory(items: HistoryItem[]): void {
+ localStorage.setItem(HISTORY_KEY, JSON.stringify(items));
+}
+function addHistory(text: string): void {
+ const items = loadHistory();
+ items.unshift({ text, ts: Date.now() });
+ if (items.length > HISTORY_MAX) items.length = HISTORY_MAX;
+ saveHistory(items);
+ renderHistory();
+}
+function clearHistory(): void {
+ localStorage.removeItem(HISTORY_KEY);
+ renderHistory();
+}
+function renderHistory(): void {
+ const items = loadHistory();
+ historyList.innerHTML = "";
+ if (!items.length) {
+ (historyEmpty as HTMLElement).style.display = "";
+ return;
+ }
+ (historyEmpty as HTMLElement).style.display = "none";
+ for (const item of items) {
+ const li = document.createElement("li");
+ li.innerHTML =
+ `${escapeHtml(item.text)}` +
+ `${formatTime(item.ts)}`;
+ li.addEventListener("click", () => {
+ sendJSON({ type: "paste", text: item.text });
+ showToast("发送粘贴…");
+ });
+ historyList.appendChild(li);
+ }
+}
+function escapeHtml(s: string): string {
+ const d = document.createElement("div");
+ d.textContent = s;
+ return d.innerHTML;
+}
+// ── Event bindings ──
+function bindMicButton(): void {
+ // Touch events (mobile primary)
+ micBtn.addEventListener(
+ "touchstart",
+ (e: TouchEvent) => {
+ e.preventDefault();
+ startRecording();
+ },
+ { passive: false },
+ );
+ micBtn.addEventListener(
+ "touchend",
+ (e: TouchEvent) => {
+ e.preventDefault();
+ stopRecording();
+ },
+ { passive: false },
+ );
+ micBtn.addEventListener(
+ "touchcancel",
+ (e: TouchEvent) => {
+ e.preventDefault();
+ stopRecording();
+ },
+ { passive: false },
+ );
+ // Mouse fallback (desktop testing)
+ micBtn.addEventListener("mousedown", (e: MouseEvent) => {
+ if (e.button !== 0) return;
+ startRecording();
+ });
+ micBtn.addEventListener("mouseup", () => stopRecording());
+ micBtn.addEventListener("mouseleave", () => {
+ if (state.recording) stopRecording();
+ });
+}
+// ── Init ──
+function init(): void {
+ micBtn.disabled = true;
+ bindMicButton();
+ if (clearHistoryBtn) {
+ clearHistoryBtn.addEventListener("click", clearHistory);
+ }
+ renderHistory();
+ connectWS();
+}
+if (document.readyState === "loading") {
+ document.addEventListener("DOMContentLoaded", init);
+} else {
+ init();
+}
diff --git a/web/audio-processor.js b/web/audio-processor.js
deleted file mode 100644
index edf45f0..0000000
--- a/web/audio-processor.js
+++ /dev/null
@@ -1,73 +0,0 @@
-/**
- * AudioWorklet processor for VoicePaste.
- *
- * Captures raw Float32 PCM from the microphone, accumulates samples into
- * ~200ms frames, and posts them to the main thread for resampling + WS send.
- *
- * Communication:
- * Main → Processor: { command: "start" | "stop" }
- * Processor → Main: { type: "audio", samples: Float32Array, sampleRate: number }
- */
-class AudioProcessor extends AudioWorkletProcessor {
- constructor() {
- super();
- this.recording = false;
- this.buffer = [];
- this.bufferLen = 0;
- // ~200ms worth of samples at current sample rate
- // sampleRate is a global in AudioWorkletGlobalScope
- this.frameSize = Math.floor(sampleRate * 0.2);
-
- this.port.onmessage = (e) => {
- if (e.data.command === "start") {
- this.recording = true;
- this.buffer = [];
- this.bufferLen = 0;
- } else if (e.data.command === "stop") {
- // Flush remaining samples
- if (this.bufferLen > 0) {
- this._flush();
- }
- this.recording = false;
- }
- };
- }
-
- process(inputs) {
- if (!this.recording) return true;
-
- const input = inputs[0];
- if (!input || !input[0]) return true;
-
- // Mono channel 0
- const channelData = input[0];
- this.buffer.push(new Float32Array(channelData));
- this.bufferLen += channelData.length;
-
- if (this.bufferLen >= this.frameSize) {
- this._flush();
- }
-
- return true;
- }
-
- _flush() {
- // Merge buffer chunks into a single Float32Array
- const merged = new Float32Array(this.bufferLen);
- let offset = 0;
- for (const chunk of this.buffer) {
- merged.set(chunk, offset);
- offset += chunk.length;
- }
-
- this.port.postMessage(
- { type: "audio", samples: merged, sampleRate: sampleRate },
- [merged.buffer] // Transfer ownership for zero-copy
- );
-
- this.buffer = [];
- this.bufferLen = 0;
- }
-}
-
-registerProcessor("audio-processor", AudioProcessor);
diff --git a/web/audio-processor.ts b/web/audio-processor.ts
new file mode 100644
index 0000000..3c8f3f4
--- /dev/null
+++ b/web/audio-processor.ts
@@ -0,0 +1,88 @@
+/**
+ * AudioWorklet processor for VoicePaste.
+ *
+ * Captures raw Float32 PCM from the microphone, accumulates samples into
+ * ~200ms frames, and posts them to the main thread for resampling + WS send.
+ *
+ * Communication:
+ * Main → Processor: { command: "start" | "stop" }
+ * Processor → Main: { type: "audio", samples: Float32Array, sampleRate: number }
+ */
+
+// AudioWorkletGlobalScope globals (not in standard lib)
+declare const sampleRate: number;
+declare class AudioWorkletProcessor {
+ readonly port: MessagePort;
+ constructor();
+ process(
+ inputs: Float32Array[][],
+ outputs: Float32Array[][],
+ parameters: Record,
+ ): boolean;
+}
+declare function registerProcessor(
+ name: string,
+ ctor: new () => AudioWorkletProcessor,
+): void;
+
+class VoicePasteProcessor extends AudioWorkletProcessor {
+ private recording = false;
+ private buffer: Float32Array[] = [];
+ private bufferLen = 0;
+ private readonly frameSize: number;
+
+ constructor() {
+ super();
+ // ~200ms worth of samples at current sample rate
+ this.frameSize = Math.floor(sampleRate * 0.2);
+
+ this.port.onmessage = (e: MessageEvent) => {
+ if (e.data.command === "start") {
+ this.recording = true;
+ this.buffer = [];
+ this.bufferLen = 0;
+ } else if (e.data.command === "stop") {
+ if (this.bufferLen > 0) {
+ this.flush();
+ }
+ this.recording = false;
+ }
+ };
+ }
+
+ process(inputs: Float32Array[][]): boolean {
+ if (!this.recording) return true;
+
+ const input = inputs[0];
+ if (!input || !input[0]) return true;
+
+ const channelData = input[0];
+ this.buffer.push(new Float32Array(channelData));
+ this.bufferLen += channelData.length;
+
+ if (this.bufferLen >= this.frameSize) {
+ this.flush();
+ }
+
+ return true;
+ }
+
+ private flush(): void {
+ const merged = new Float32Array(this.bufferLen);
+ let offset = 0;
+ for (const chunk of this.buffer) {
+ merged.set(chunk, offset);
+ offset += chunk.length;
+ }
+
+ this.port.postMessage(
+ { type: "audio", samples: merged, sampleRate: sampleRate },
+ [merged.buffer],
+ );
+
+ this.buffer = [];
+ this.bufferLen = 0;
+ }
+}
+
+registerProcessor("audio-processor", VoicePasteProcessor);
diff --git a/web/biome.json b/web/biome.json
new file mode 100644
index 0000000..b9b52b1
--- /dev/null
+++ b/web/biome.json
@@ -0,0 +1,34 @@
+{
+ "$schema": "https://biomejs.dev/schemas/2.4.4/schema.json",
+ "vcs": {
+ "enabled": true,
+ "clientKind": "git",
+ "useIgnoreFile": true
+ },
+ "files": {
+ "includes": ["**", "!!**/dist"]
+ },
+ "formatter": {
+ "enabled": true,
+ "indentStyle": "tab"
+ },
+ "linter": {
+ "enabled": true,
+ "rules": {
+ "recommended": true
+ }
+ },
+ "javascript": {
+ "formatter": {
+ "quoteStyle": "double"
+ }
+ },
+ "assist": {
+ "enabled": true,
+ "actions": {
+ "source": {
+ "organizeImports": "on"
+ }
+ }
+ }
+}
diff --git a/web/bun.lock b/web/bun.lock
index 66f95b7..fd1e39c 100644
--- a/web/bun.lock
+++ b/web/bun.lock
@@ -5,11 +5,31 @@
"": {
"name": "web",
"devDependencies": {
+ "@biomejs/biome": "^2.4.4",
+ "typescript": "^5.9.3",
"vite": "^7.3.1",
},
},
},
"packages": {
+ "@biomejs/biome": ["@biomejs/biome@2.4.4", "", { "optionalDependencies": { "@biomejs/cli-darwin-arm64": "2.4.4", "@biomejs/cli-darwin-x64": "2.4.4", "@biomejs/cli-linux-arm64": "2.4.4", "@biomejs/cli-linux-arm64-musl": "2.4.4", "@biomejs/cli-linux-x64": "2.4.4", "@biomejs/cli-linux-x64-musl": "2.4.4", "@biomejs/cli-win32-arm64": "2.4.4", "@biomejs/cli-win32-x64": "2.4.4" }, "bin": { "biome": "bin/biome" } }, "sha512-tigwWS5KfJf0cABVd52NVaXyAVv4qpUXOWJ1rxFL8xF1RVoeS2q/LK+FHgYoKMclJCuRoCWAPy1IXaN9/mS61Q=="],
+
+ "@biomejs/cli-darwin-arm64": ["@biomejs/cli-darwin-arm64@2.4.4", "", { "os": "darwin", "cpu": "arm64" }, "sha512-jZ+Xc6qvD6tTH5jM6eKX44dcbyNqJHssfl2nnwT6vma6B1sj7ZLTGIk6N5QwVBs5xGN52r3trk5fgd3sQ9We9A=="],
+
+ "@biomejs/cli-darwin-x64": ["@biomejs/cli-darwin-x64@2.4.4", "", { "os": "darwin", "cpu": "x64" }, "sha512-Dh1a/+W+SUCXhEdL7TiX3ArPTFCQKJTI1mGncZNWfO+6suk+gYA4lNyJcBB+pwvF49uw0pEbUS49BgYOY4hzUg=="],
+
+ "@biomejs/cli-linux-arm64": ["@biomejs/cli-linux-arm64@2.4.4", "", { "os": "linux", "cpu": "arm64" }, "sha512-V/NFfbWhsUU6w+m5WYbBenlEAz8eYnSqRMDMAW3K+3v0tYVkNyZn8VU0XPxk/lOqNXLSCCrV7FmV/u3SjCBShg=="],
+
+ "@biomejs/cli-linux-arm64-musl": ["@biomejs/cli-linux-arm64-musl@2.4.4", "", { "os": "linux", "cpu": "arm64" }, "sha512-+sPAXq3bxmFwhVFJnSwkSF5Rw2ZAJMH3MF6C9IveAEOdSpgajPhoQhbbAK12SehN9j2QrHpk4J/cHsa/HqWaYQ=="],
+
+ "@biomejs/cli-linux-x64": ["@biomejs/cli-linux-x64@2.4.4", "", { "os": "linux", "cpu": "x64" }, "sha512-R4+ZCDtG9kHArasyBO+UBD6jr/FcFCTH8QkNTOCu0pRJzCWyWC4EtZa2AmUZB5h3e0jD7bRV2KvrENcf8rndBg=="],
+
+ "@biomejs/cli-linux-x64-musl": ["@biomejs/cli-linux-x64-musl@2.4.4", "", { "os": "linux", "cpu": "x64" }, "sha512-gGvFTGpOIQDb5CQ2VC0n9Z2UEqlP46c4aNgHmAMytYieTGEcfqhfCFnhs6xjt0S3igE6q5GLuIXtdQt3Izok+g=="],
+
+ "@biomejs/cli-win32-arm64": ["@biomejs/cli-win32-arm64@2.4.4", "", { "os": "win32", "cpu": "arm64" }, "sha512-trzCqM7x+Gn832zZHgr28JoYagQNX4CZkUZhMUac2YxvvyDRLJDrb5m9IA7CaZLlX6lTQmADVfLEKP1et1Ma4Q=="],
+
+ "@biomejs/cli-win32-x64": ["@biomejs/cli-win32-x64@2.4.4", "", { "os": "win32", "cpu": "x64" }, "sha512-gnOHKVPFAAPrpoPt2t+Q6FZ7RPry/FDV3GcpU53P3PtLNnQjBmKyN2Vh/JtqXet+H4pme8CC76rScwdjDcT1/A=="],
+
"@esbuild/aix-ppc64": ["@esbuild/aix-ppc64@0.27.3", "", { "os": "aix", "cpu": "ppc64" }, "sha512-9fJMTNFTWZMh5qwrBItuziu834eOCUcEqymSH7pY+zoMVEZg3gcPuBNxH1EvfVYe9h0x/Ptw8KBzv7qxb7l8dg=="],
"@esbuild/android-arm": ["@esbuild/android-arm@0.27.3", "", { "os": "android", "cpu": "arm" }, "sha512-i5D1hPY7GIQmXlXhs2w8AWHhenb00+GxjxRncS2ZM7YNVGNfaMxgzSGuO8o8SJzRc/oZwU2bcScvVERk03QhzA=="],
@@ -136,6 +156,8 @@
"tinyglobby": ["tinyglobby@0.2.15", "", { "dependencies": { "fdir": "^6.5.0", "picomatch": "^4.0.3" } }, "sha512-j2Zq4NyQYG5XMST4cbs02Ak8iJUdxRM0XI5QyxXuZOzKOINmWurp3smXu3y5wDcJrptwpSjgXHzIQxR0omXljQ=="],
+ "typescript": ["typescript@5.9.3", "", { "bin": { "tsc": "bin/tsc", "tsserver": "bin/tsserver" } }, "sha512-jl1vZzPDinLr9eUt3J/t7V6FgNEw9QjvBPdysz9KfQDD41fQrC2Y4vKQdiaUpFT4bXlb1RHhLpp8wtm6M5TgSw=="],
+
"undici-types": ["undici-types@7.18.2", "", {}, "sha512-AsuCzffGHJybSaRrmr5eHr81mwJU3kjw6M+uprWvCXiNeN9SOGwQ3Jn8jb8m3Z6izVgknn1R0FTCEAP2QrLY/w=="],
"vite": ["vite@7.3.1", "", { "dependencies": { "esbuild": "^0.27.0", "fdir": "^6.5.0", "picomatch": "^4.0.3", "postcss": "^8.5.6", "rollup": "^4.43.0", "tinyglobby": "^0.2.15" }, "optionalDependencies": { "fsevents": "~2.3.3" }, "peerDependencies": { "@types/node": "^20.19.0 || >=22.12.0", "jiti": ">=1.21.0", "less": "^4.0.0", "lightningcss": "^1.21.0", "sass": "^1.70.0", "sass-embedded": "^1.70.0", "stylus": ">=0.54.8", "sugarss": "^5.0.0", "terser": "^5.16.0", "tsx": "^4.8.1", "yaml": "^2.4.2" }, "optionalPeers": ["@types/node", "jiti", "less", "lightningcss", "sass", "sass-embedded", "stylus", "sugarss", "terser", "tsx", "yaml"], "bin": { "vite": "bin/vite.js" } }, "sha512-w+N7Hifpc3gRjZ63vYBXA56dvvRlNWRczTdmCBBa+CotUzAPf5b7YMdMR/8CQoeYE5LX3W4wj6RYTgonm1b9DA=="],
diff --git a/web/index.html b/web/index.html
index a58ad24..07caf99 100644
--- a/web/index.html
+++ b/web/index.html
@@ -24,8 +24,9 @@
-