import "./style.css"; import audioProcessorUrl from "./audio-processor.js?worker&url"; /** * VoicePaste — Main application logic. * * Modules: * 1. WebSocket client (token auth, reconnect) * 2. Audio pipeline (getUserMedia → AudioWorklet → resample → WS binary) * 3. Recording controls (touch/mouse, state machine) * 4. History (localStorage, tap to re-send) * 5. UI state management */ ;(function () { "use strict"; // ── Constants ── const TARGET_SAMPLE_RATE = 16000; const WS_RECONNECT_BASE = 1000; const WS_RECONNECT_MAX = 16000; const HISTORY_KEY = "voicepaste_history"; const HISTORY_MAX = 50; // ── DOM refs ── const $ = (sel) => document.querySelector(sel); const statusEl = $("#status"); const statusText = $("#status-text"); const previewText = $("#preview-text"); const previewBox = $("#preview"); const micBtn = $("#mic-btn"); const historyList = $("#history-list"); const historyEmpty = $("#history-empty"); const clearHistoryBtn = $("#clear-history"); // ── State ── const state = { ws: null, connected: false, recording: false, audioCtx: null, workletNode: null, stream: null, reconnectDelay: WS_RECONNECT_BASE, reconnectTimer: null, }; // ── Utility ── function getToken() { const params = new URLSearchParams(location.search); return params.get("token") || ""; } function formatTime(ts) { const d = new Date(ts); const hh = String(d.getHours()).padStart(2, "0"); const mm = String(d.getMinutes()).padStart(2, "0"); return `${hh}:${mm}`; } // ── Resampler (linear interpolation, native rate → 16kHz 16-bit mono) ── function resampleTo16kInt16(float32, srcRate) { const ratio = srcRate / TARGET_SAMPLE_RATE; const outLen = Math.floor(float32.length / ratio); const out = new Int16Array(outLen); for (let i = 0; i < outLen; i++) { const srcIdx = i * ratio; const lo = Math.floor(srcIdx); const hi = Math.min(lo + 1, float32.length - 1); const frac = srcIdx - lo; const sample = float32[lo] + frac * (float32[hi] - float32[lo]); // Clamp to [-1, 1] then scale to Int16 out[i] = Math.max(-32768, Math.min(32767, Math.round(sample * 32767))); } return out; } // ── WebSocket ── function wsUrl() { const proto = location.protocol === "https:" ? "wss:" : "ws:"; const token = getToken(); const q = token ? `?token=${encodeURIComponent(token)}` : ""; return `${proto}//${location.host}/ws${q}`; } function setStatus(cls, text) { statusEl.className = `status ${cls}`; statusText.textContent = text; } function connectWS() { if (state.ws) return; setStatus("connecting", "连接中…"); const ws = new WebSocket(wsUrl()); ws.binaryType = "arraybuffer"; ws.onopen = () => { state.connected = true; state.reconnectDelay = WS_RECONNECT_BASE; setStatus("connected", "已连接"); micBtn.disabled = false; }; ws.onmessage = (e) => handleServerMsg(e.data); ws.onclose = () => { state.connected = false; state.ws = null; micBtn.disabled = true; if (state.recording) stopRecording(); setStatus("disconnected", "已断开"); scheduleReconnect(); }; ws.onerror = () => ws.close(); state.ws = ws; } function scheduleReconnect() { clearTimeout(state.reconnectTimer); state.reconnectTimer = setTimeout(() => { connectWS(); }, state.reconnectDelay); state.reconnectDelay = Math.min(state.reconnectDelay * 2, WS_RECONNECT_MAX); } function sendJSON(obj) { if (state.ws && state.ws.readyState === WebSocket.OPEN) { state.ws.send(JSON.stringify(obj)); } } function sendBinary(int16arr) { if (state.ws && state.ws.readyState === WebSocket.OPEN) { state.ws.send(int16arr.buffer); } } // ── Server message handler ── function handleServerMsg(data) { if (typeof data !== "string") return; let msg; try { msg = JSON.parse(data); } catch { return; } switch (msg.type) { case "partial": setPreview(msg.text, false); break; case "final": setPreview(msg.text, true); if (msg.text) addHistory(msg.text); break; case "pasted": showToast("✅ 已粘贴"); break; case "error": showToast(`❌ ${msg.message || "错误"}`); break; } } function setPreview(text, isFinal) { if (!text) { previewText.textContent = "按住说话…"; previewText.classList.add("placeholder"); previewBox.classList.remove("active"); return; } previewText.textContent = text; previewText.classList.remove("placeholder"); previewBox.classList.toggle("active", !isFinal); } function showToast(msg) { // Lightweight toast — reuse or create let toast = $("#toast"); if (!toast) { toast = document.createElement("div"); toast.id = "toast"; toast.style.cssText = "position:fixed;bottom:calc(100px + var(--safe-bottom,0px));left:50%;" + "transform:translateX(-50%);background:#222;color:#eee;padding:8px 18px;" + "border-radius:20px;font-size:14px;z-index:999;opacity:0;transition:opacity .3s;"; document.body.appendChild(toast); } toast.textContent = msg; toast.style.opacity = "1"; clearTimeout(toast._timer); toast._timer = setTimeout(() => { toast.style.opacity = "0"; }, 2000); } // ── Audio pipeline ── async function initAudio() { if (state.audioCtx) return; // Use device native sample rate — we resample to 16kHz in software const audioCtx = new (window.AudioContext || window.webkitAudioContext)(); // Chrome requires resume() after user gesture if (audioCtx.state === "suspended") { await audioCtx.resume(); } await audioCtx.audioWorklet.addModule(audioProcessorUrl); state.audioCtx = audioCtx; } async function startRecording() { if (state.recording) return; try { await initAudio(); // Ensure AudioContext is running (may suspend between recordings) if (state.audioCtx.state === "suspended") { await state.audioCtx.resume(); } const stream = await navigator.mediaDevices.getUserMedia({ audio: { echoCancellation: true, noiseSuppression: true, channelCount: 1 }, }); state.stream = stream; const source = state.audioCtx.createMediaStreamSource(stream); const worklet = new AudioWorkletNode(state.audioCtx, "audio-processor"); worklet.port.onmessage = (e) => { if (e.data.type === "audio") { const int16 = resampleTo16kInt16(e.data.samples, e.data.sampleRate); sendBinary(int16); } }; source.connect(worklet); worklet.port.postMessage({ command: "start" }); // Don't connect worklet to destination (no playback) state.workletNode = worklet; state.recording = true; sendJSON({ type: "start" }); micBtn.classList.add("recording"); setPreview("", false); } catch (err) { showToast(`麦克风错误: ${err.message}`); } } function stopRecording() { if (!state.recording) return; state.recording = false; // Stop worklet if (state.workletNode) { state.workletNode.port.postMessage({ command: "stop" }); state.workletNode.disconnect(); state.workletNode = null; } // Stop mic stream if (state.stream) { state.stream.getTracks().forEach((t) => t.stop()); state.stream = null; } sendJSON({ type: "stop" }); micBtn.classList.remove("recording"); } // ── History (localStorage) ── function loadHistory() { try { return JSON.parse(localStorage.getItem(HISTORY_KEY)) || []; } catch { return []; } } function saveHistory(items) { localStorage.setItem(HISTORY_KEY, JSON.stringify(items)); } function addHistory(text) { const items = loadHistory(); items.unshift({ text, ts: Date.now() }); if (items.length > HISTORY_MAX) items.length = HISTORY_MAX; saveHistory(items); renderHistory(); } function clearHistory() { localStorage.removeItem(HISTORY_KEY); renderHistory(); } function renderHistory() { const items = loadHistory(); historyList.innerHTML = ""; if (!items.length) { historyEmpty.style.display = ""; return; } historyEmpty.style.display = "none"; for (const item of items) { const li = document.createElement("li"); li.innerHTML = `${escapeHtml(item.text)}` + `${formatTime(item.ts)}`; li.addEventListener("click", () => { sendJSON({ type: "paste", text: item.text }); showToast("发送粘贴…"); }); historyList.appendChild(li); } } function escapeHtml(s) { const d = document.createElement("div"); d.textContent = s; return d.innerHTML; } // ── Event bindings ── function bindMicButton() { // Touch events (mobile primary) micBtn.addEventListener("touchstart", (e) => { e.preventDefault(); startRecording(); }, { passive: false }); micBtn.addEventListener("touchend", (e) => { e.preventDefault(); stopRecording(); }, { passive: false }); micBtn.addEventListener("touchcancel", (e) => { e.preventDefault(); stopRecording(); }, { passive: false }); // Mouse fallback (desktop testing) micBtn.addEventListener("mousedown", (e) => { if (e.button !== 0) return; startRecording(); }); micBtn.addEventListener("mouseup", () => stopRecording()); micBtn.addEventListener("mouseleave", () => { if (state.recording) stopRecording(); }); } // ── Init ── function init() { micBtn.disabled = true; bindMicButton(); if (clearHistoryBtn) { clearHistoryBtn.addEventListener("click", clearHistory); } renderHistory(); connectWS(); } if (document.readyState === "loading") { document.addEventListener("DOMContentLoaded", init); } else { init(); } })();