Compare commits
113 Commits
2048f73155
...
ux
| Author | SHA1 | Date | |
|---|---|---|---|
| 250eba6927 | |||
| 0f344b5847 | |||
| 403eec3e12 | |||
| 84c935d4bd | |||
| e5fed81db5 | |||
| e3e3caed6a | |||
| b5490085bd | |||
| 713ee5b79f | |||
| d7d6b06e35 | |||
| 1997655875 | |||
| 9a2bd5c43a | |||
| 42bc8605b4 | |||
| 04ff718f47 | |||
| da82403f7f | |||
| 4a5dd437fa | |||
| 1945417f28 | |||
| 8be32bf15b | |||
| 1110edc974 | |||
| a5fd9c1833 | |||
| 3d27f8ccfa | |||
| 4d64cfb93d | |||
| 2651ec0835 | |||
| 122dead202 | |||
| ec41a4cfc7 | |||
| 86754f73c1 | |||
| 9296ab31e4 | |||
| 72d1727eb6 | |||
| aabd60e619 | |||
| cdb3298f6d | |||
| 060ddd8e12 | |||
| b50d2eaf10 | |||
| 46e2c94faf | |||
| b1062a5aed | |||
| b193759e90 | |||
| eb941c06c0 | |||
| eb2f6554b2 | |||
| 58d57fa148 | |||
| 509860bba8 | |||
| 4e7c4e1aa5 | |||
| 8261409d7d | |||
| d2eb98d612 | |||
| 9d8a38a4c4 | |||
| cd7448c3b3 | |||
| 58d7a453b6 | |||
| afc3b66efa | |||
| 3c97e9c3eb | |||
| 58620b4d4b | |||
| 04b8dedb3e | |||
| 02bdfffe79 | |||
| 0cd8b57d24 | |||
| 0438b52c93 | |||
| fd9478d64e | |||
| 73614204f7 | |||
| 61e7a1b621 | |||
| 5ccde0a121 | |||
| 0553347bfe | |||
| 52af81b079 | |||
| 527c1d1020 | |||
| 4ed961760a | |||
| c54b7d27a6 | |||
| d478b94c13 | |||
| 908b369732 | |||
| 51724a7936 | |||
| 93a2519012 | |||
| 5edab0ba1d | |||
| a451e08209 | |||
| e76a03d0f4 | |||
| aa1e2c81c6 | |||
| 7e2621ae37 | |||
| 94a9122f34 | |||
| 275c8e4795 | |||
| 8245abe217 | |||
| 627e6f9dd3 | |||
| e59e085217 | |||
| cd9826ded3 | |||
| 2efc57d9ee | |||
| 1f5940438a | |||
| 0bab6372ac | |||
| 5f0c9d33cb | |||
| 73982939a8 | |||
| 10c2d61523 | |||
| 18ce05854a | |||
| 7eccef5d8f | |||
| 41667cb33b | |||
| 00c944e1b5 | |||
| f9edfd0058 | |||
| 9aea89e16d | |||
| 26b74b25f2 | |||
| ccf220fc29 | |||
| a585069cdc | |||
| b149cc5dc0 | |||
| 9f38636d76 | |||
| 63906ec09b | |||
| 8c4e4ad150 | |||
| e171db8196 | |||
| dac6bb1643 | |||
| 8c0ea632d7 | |||
| db23ee42fc | |||
| 0784546e50 | |||
| 2fe3e15659 | |||
| ed02993350 | |||
| e4e5ff2211 | |||
| d69a573a33 | |||
| 6cc1bc6834 | |||
| 894fd17d1a | |||
| 888f20fdab | |||
| 7318600e20 | |||
| e8e473b357 | |||
| 41d97ca312 | |||
| cfe7de2a70 | |||
| b87de26e17 | |||
| b8d38872ad | |||
| 7450c685d3 |
5
.gitignore
vendored
5
.gitignore
vendored
@@ -9,6 +9,11 @@
|
|||||||
# Bun build
|
# Bun build
|
||||||
*.bun-build
|
*.bun-build
|
||||||
|
|
||||||
|
# SQLite database files
|
||||||
|
*.db
|
||||||
|
*.db-wal
|
||||||
|
*.db-shm
|
||||||
|
|
||||||
# Turborepo
|
# Turborepo
|
||||||
.turbo/
|
.turbo/
|
||||||
|
|
||||||
|
|||||||
@@ -1,798 +0,0 @@
|
|||||||
# Electrobun Desktop: Production Mode via Child Process Architecture
|
|
||||||
|
|
||||||
## TL;DR
|
|
||||||
|
|
||||||
> **Quick Summary**: Redesign the Electrobun desktop app to support production mode by spawning the TanStack Start server as a child process. Currently only dev mode works (hardcoded `localhost:3000`). The desktop will detect dev/prod mode, spawn the server with `PORT=0` in prod, parse the actual port from stdout, and open the BrowserWindow.
|
|
||||||
>
|
|
||||||
> **Deliverables**:
|
|
||||||
> - Rewritten `apps/desktop/src/bun/index.ts` with dev/prod mode support
|
|
||||||
> - Updated `apps/desktop/electrobun.config.ts` with `build.copy` and platform configs
|
|
||||||
> - Cross-workspace build dependency in turbo pipeline
|
|
||||||
> - Updated `apps/desktop/AGENTS.md` reflecting new architecture
|
|
||||||
>
|
|
||||||
> **Estimated Effort**: Medium
|
|
||||||
> **Parallel Execution**: YES - 2 waves
|
|
||||||
> **Critical Path**: Tasks 1,2,3 (parallel) → Task 4 → Task 5
|
|
||||||
|
|
||||||
---
|
|
||||||
|
|
||||||
## Context
|
|
||||||
|
|
||||||
### Original Request
|
|
||||||
Redesign the Electrobun desktop app to support production mode. The desktop app should spawn the TanStack Start server as a child process, detect dev vs prod mode at runtime, use system-assigned ports for security, and handle server lifecycle (crash, quit).
|
|
||||||
|
|
||||||
### Confirmed Decisions
|
|
||||||
- **Architecture**: Desktop spawns server as child process via `Bun.spawn`
|
|
||||||
- **Server artifact**: `.output/server/index.mjs` (not compiled binary) — Electrobun already bundles Bun
|
|
||||||
- **Port strategy**: `PORT=0` (system-assigned), `HOST=127.0.0.1`
|
|
||||||
- **Dev/Prod detection**: `process.env.ELECTROBUN_BUILD_ENV` (see Defaults Applied below)
|
|
||||||
- **Target platforms**: All (Linux, macOS, Windows)
|
|
||||||
- **DATABASE_URL**: Pass-through via env var, no special handling
|
|
||||||
- **Crash handling**: MVP — log error to stderr, exit process
|
|
||||||
|
|
||||||
### Research Findings
|
|
||||||
|
|
||||||
**Electrobun APIs (verified from source code):**
|
|
||||||
- `build.copy` supports paths outside the project directory (e.g., `../server/.output`). Source paths are resolved relative to the project root. Destinations map into `Resources/app/` in the bundle.
|
|
||||||
- `PATHS` exported from `electrobun/bun` provides `RESOURCES_FOLDER` (absolute path to `Resources/`) and `VIEWS_FOLDER` (`Resources/app/views/`).
|
|
||||||
- `process.execPath` in a bundled Electrobun app points to the bundled Bun binary.
|
|
||||||
- `Electrobun.events.on('before-quit', callback)` fires before app quit. Callback receives an event with `response({ allow: false })` to cancel quit.
|
|
||||||
- `ELECTROBUN_BUILD_ENV` is set by the Electrobun CLI: `"dev"` for `electrobun dev`, `"stable"` for `electrobun build --env=stable`.
|
|
||||||
|
|
||||||
**Server startup behavior (verified from built output):**
|
|
||||||
- `.output/server/index.mjs` uses `Bun.serve` via the `h3+srvx` adapter (Nitro bun preset).
|
|
||||||
- Startup log format: `➜ Listening on: http://<address>:<port>/`
|
|
||||||
- The log uses the actual assigned address/port, not the requested one. So `PORT=0` will log the real port.
|
|
||||||
- `DATABASE_URL` is validated at startup via Zod (`@t3-oss/env-core`). Missing = immediate crash.
|
|
||||||
- The `.output/server/` directory contains `index.mjs` plus `_libs/` with bundled dependencies.
|
|
||||||
|
|
||||||
**Turbo pipeline:**
|
|
||||||
- Root `turbo.json` has `build.dependsOn: ["^build"]` which only builds workspace *dependencies*.
|
|
||||||
- Desktop does NOT depend on server in `package.json`, so `^build` won't trigger server build.
|
|
||||||
- Need explicit cross-workspace dependency via desktop's `turbo.json`.
|
|
||||||
|
|
||||||
### Metis Review
|
|
||||||
**Identified Gaps** (addressed):
|
|
||||||
- Dev/prod detection mechanism: Switched from custom `ELECTROBUN_DEV` to built-in `ELECTROBUN_BUILD_ENV`
|
|
||||||
- Server startup timeout: Added explicit timeout with error reporting
|
|
||||||
- Port parsing failure: Plan includes fallback and error handling
|
|
||||||
- Server crash during runtime: Watching `subprocess.exited` promise
|
|
||||||
- `cwd` for spawned server: Must set to server directory for relative import resolution
|
|
||||||
- Cross-platform considerations: `ELECTROBUN_BUILD_ENV` works everywhere (no `cross-env` needed)
|
|
||||||
|
|
||||||
### Unknowns Resolved
|
|
||||||
|
|
||||||
| Unknown | Resolution |
|
|
||||||
|---------|------------|
|
|
||||||
| Does `build.copy` support paths outside project? | **YES** — uses `cpSync` with source resolved from project root. `../server/.output` works. |
|
|
||||||
| Runtime API for resolving bundled resource paths? | **`PATHS.RESOURCES_FOLDER`** from `electrobun/bun`. Copied files land in `Resources/app/{dest}/`. |
|
|
||||||
| Does Nitro log actual port with PORT=0? | **YES** — format: `➜ Listening on: http://<addr>:<port>/` via h3+srvx adapter. |
|
|
||||||
| How does Electrobun detect dev mode? | **`ELECTROBUN_BUILD_ENV`** env var set by CLI. Values: `dev`, `canary`, `stable`. |
|
|
||||||
|
|
||||||
---
|
|
||||||
|
|
||||||
## Work Objectives
|
|
||||||
|
|
||||||
### Core Objective
|
|
||||||
Enable the Electrobun desktop app to run in production mode by spawning the TanStack Start server as a managed child process, while preserving existing dev mode behavior.
|
|
||||||
|
|
||||||
### Concrete Deliverables
|
|
||||||
- `apps/desktop/src/bun/index.ts` — Complete rewrite with dual-mode support
|
|
||||||
- `apps/desktop/electrobun.config.ts` — `build.copy` + all-platform configs
|
|
||||||
- `apps/desktop/turbo.json` — Cross-workspace build dependency
|
|
||||||
- `apps/desktop/AGENTS.md` — Accurate documentation of new architecture
|
|
||||||
|
|
||||||
### Definition of Done
|
|
||||||
- [ ] `bun typecheck` passes from monorepo root (zero errors)
|
|
||||||
- [ ] `bun build` from root succeeds: server builds first, then desktop bundles server output
|
|
||||||
- [ ] `bun dev` from root still starts both apps (dev mode preserved)
|
|
||||||
- [ ] Desktop `index.ts` has zero hardcoded URLs (all dynamic)
|
|
||||||
|
|
||||||
### Must Have
|
|
||||||
- Dev mode: poll external `localhost:3000`, open window when ready (existing behavior, refactored)
|
|
||||||
- Prod mode: spawn server via `Bun.spawn`, parse port from stdout, open window
|
|
||||||
- Server bound to `127.0.0.1` only (no network exposure)
|
|
||||||
- `PORT=0` for system-assigned port (no conflicts)
|
|
||||||
- Server process killed on app quit (via `before-quit` event)
|
|
||||||
- Server crash detection (watch `exited` promise, log error, exit app)
|
|
||||||
- Startup timeout with clear error message
|
|
||||||
- Server `cwd` set to its own directory (for relative import resolution)
|
|
||||||
- `DATABASE_URL` passed through from parent environment
|
|
||||||
|
|
||||||
### Must NOT Have (Guardrails)
|
|
||||||
- No hardcoded port numbers (not even 3000 — use a named constant `DEV_SERVER_URL`)
|
|
||||||
- No `as any`, `@ts-ignore`, or `@ts-expect-error`
|
|
||||||
- No empty catch blocks — always handle or re-throw
|
|
||||||
- No `npm`, `npx`, `node` — Bun only
|
|
||||||
- No manual `useMemo`/`useCallback` (not relevant here, but per project rules)
|
|
||||||
- No suppressed type errors — fix them properly
|
|
||||||
- No custom env var for dev detection — use built-in `ELECTROBUN_BUILD_ENV`
|
|
||||||
- No compiled server binary — use `.output/server/index.mjs` with bundled Bun
|
|
||||||
- Do NOT edit `apps/server/` — only `apps/desktop/` files change
|
|
||||||
- Do NOT add `@furtherverse/server` as a package dependency of desktop (use turbo cross-workspace dependency instead)
|
|
||||||
|
|
||||||
---
|
|
||||||
|
|
||||||
## Verification Strategy
|
|
||||||
|
|
||||||
> **UNIVERSAL RULE: ZERO HUMAN INTERVENTION**
|
|
||||||
>
|
|
||||||
> ALL tasks in this plan MUST be verifiable WITHOUT any human action.
|
|
||||||
> Every criterion is verified by running a command or using a tool.
|
|
||||||
|
|
||||||
### Test Decision
|
|
||||||
- **Infrastructure exists**: NO (no test framework in this project)
|
|
||||||
- **Automated tests**: NO (per project state — `AGENTS.md` says "No test framework configured yet")
|
|
||||||
- **Framework**: None
|
|
||||||
- **Agent-Executed QA**: ALWAYS (mandatory for all tasks)
|
|
||||||
|
|
||||||
---
|
|
||||||
|
|
||||||
## Execution Strategy
|
|
||||||
|
|
||||||
### Parallel Execution Waves
|
|
||||||
|
|
||||||
```
|
|
||||||
Wave 1 (Start Immediately — all independent, different files):
|
|
||||||
├── Task 1: Update electrobun.config.ts (build.copy + platform configs)
|
|
||||||
├── Task 2: Update turbo.json (cross-workspace build dependency)
|
|
||||||
└── Task 3: Rewrite index.ts (complete dev/prod mode implementation)
|
|
||||||
|
|
||||||
Wave 2 (After Wave 1 — needs final state of all files):
|
|
||||||
├── Task 4: Rewrite AGENTS.md (documentation reflecting new architecture)
|
|
||||||
└── Task 5: End-to-end verification (typecheck + build pipeline)
|
|
||||||
```
|
|
||||||
|
|
||||||
### Dependency Matrix
|
|
||||||
|
|
||||||
| Task | Depends On | Blocks | Can Parallelize With |
|
|
||||||
|------|------------|--------|---------------------|
|
|
||||||
| 1 | None | 4, 5 | 2, 3 |
|
|
||||||
| 2 | None | 4, 5 | 1, 3 |
|
|
||||||
| 3 | None | 4, 5 | 1, 2 |
|
|
||||||
| 4 | 1, 2, 3 | 5 | None |
|
|
||||||
| 5 | 1, 2, 3, 4 | None | None (final) |
|
|
||||||
|
|
||||||
### Agent Dispatch Summary
|
|
||||||
|
|
||||||
| Wave | Tasks | Recommended Agents |
|
|
||||||
|------|-------|-------------------|
|
|
||||||
| 1 | 1, 2, 3 | Tasks 1,2: `delegate_task(category="quick")`. Task 3: `delegate_task(category="unspecified-high")` |
|
|
||||||
| 2 | 4, 5 | Task 4: `delegate_task(category="writing")`. Task 5: `delegate_task(category="quick")` |
|
|
||||||
|
|
||||||
---
|
|
||||||
|
|
||||||
## TODOs
|
|
||||||
|
|
||||||
- [ ] 1. Update `apps/desktop/electrobun.config.ts` — Add build.copy and platform configs
|
|
||||||
|
|
||||||
**What to do**:
|
|
||||||
- Add `build.copy` to include the server's Nitro build output in the desktop bundle:
|
|
||||||
```typescript
|
|
||||||
copy: {
|
|
||||||
'../server/.output': 'server-output',
|
|
||||||
}
|
|
||||||
```
|
|
||||||
This copies `apps/server/.output/` (the entire Nitro build output) into `Resources/app/server-output/` in the Electrobun bundle. The full server entry point will be at `Resources/app/server-output/server/index.mjs`.
|
|
||||||
- Add `macOS` platform config block (currently only `linux` exists):
|
|
||||||
```typescript
|
|
||||||
macOS: {
|
|
||||||
bundleCEF: true,
|
|
||||||
}
|
|
||||||
```
|
|
||||||
- Add `windows` platform config block:
|
|
||||||
```typescript
|
|
||||||
windows: {
|
|
||||||
bundleCEF: true,
|
|
||||||
}
|
|
||||||
```
|
|
||||||
- Verify the exact property names by checking Electrobun's `ElectrobunConfig` type definition. The `linux` block already uses `bundleCEF: true`, so follow the same pattern for other platforms. If the type doesn't support `macOS`/`windows` yet, skip those and leave a `// TODO:` comment explaining what's needed.
|
|
||||||
- Preserve existing config values exactly (app name, identifier, version, bun entrypoint, linux config).
|
|
||||||
|
|
||||||
**Must NOT do**:
|
|
||||||
- Do not change the app name, identifier, or version
|
|
||||||
- Do not change the bun entrypoint path
|
|
||||||
- Do not remove the existing `linux` config
|
|
||||||
- Do not add dependencies or scripts
|
|
||||||
|
|
||||||
**Recommended Agent Profile**:
|
|
||||||
- **Category**: `quick`
|
|
||||||
- Reason: Single file, small config change, clear specification
|
|
||||||
- **Skills**: `[]`
|
|
||||||
- No specialized skills needed — straightforward TypeScript config edit
|
|
||||||
- **Skills Evaluated but Omitted**:
|
|
||||||
- `frontend-ui-ux`: No UI work involved
|
|
||||||
|
|
||||||
**Parallelization**:
|
|
||||||
- **Can Run In Parallel**: YES
|
|
||||||
- **Parallel Group**: Wave 1 (with Tasks 2, 3)
|
|
||||||
- **Blocks**: Tasks 4, 5
|
|
||||||
- **Blocked By**: None (can start immediately)
|
|
||||||
|
|
||||||
**References**:
|
|
||||||
|
|
||||||
**Pattern References** (existing code to follow):
|
|
||||||
- `apps/desktop/electrobun.config.ts` — Current config structure. The `linux.bundleCEF: true` pattern should be replicated for other platforms. The `build.bun.entrypoint` key shows where build config lives.
|
|
||||||
|
|
||||||
**API/Type References** (contracts to implement against):
|
|
||||||
- The `ElectrobunConfig` type from `electrobun` — imported via `import type { ElectrobunConfig } from 'electrobun'`. Check its definition (likely in `node_modules/electrobun/`) to verify exact property names for `copy`, `macOS`, `windows`.
|
|
||||||
|
|
||||||
**External References**:
|
|
||||||
- Electrobun `build.copy` syntax: copies source (relative to project root) into `Resources/app/{dest}/` in the bundle. Uses `cpSync` with `dereference: true`.
|
|
||||||
|
|
||||||
**WHY Each Reference Matters**:
|
|
||||||
- `electrobun.config.ts`: You're editing this file — need to know its current shape to preserve existing values
|
|
||||||
- `ElectrobunConfig` type: Must match the type definition exactly — don't guess property names
|
|
||||||
|
|
||||||
**Acceptance Criteria**:
|
|
||||||
- [ ] `build.copy` key exists with `'../server/.output': 'server-output'` mapping
|
|
||||||
- [ ] Platform configs added for all three platforms (or TODO comments if types don't support them)
|
|
||||||
- [ ] Existing config values unchanged (app.name = 'Desktop', etc.)
|
|
||||||
- [ ] File passes `bun typecheck` (no type errors)
|
|
||||||
|
|
||||||
**Agent-Executed QA Scenarios:**
|
|
||||||
|
|
||||||
```
|
|
||||||
Scenario: Config file is valid TypeScript with correct types
|
|
||||||
Tool: Bash
|
|
||||||
Preconditions: None
|
|
||||||
Steps:
|
|
||||||
1. Run: bun typecheck (from apps/desktop/)
|
|
||||||
2. Assert: Exit code 0
|
|
||||||
3. Assert: No errors mentioning electrobun.config.ts
|
|
||||||
Expected Result: TypeScript compilation succeeds
|
|
||||||
Evidence: Terminal output captured
|
|
||||||
|
|
||||||
Scenario: build.copy key has correct structure
|
|
||||||
Tool: Bash (grep)
|
|
||||||
Preconditions: File has been edited
|
|
||||||
Steps:
|
|
||||||
1. Read apps/desktop/electrobun.config.ts
|
|
||||||
2. Assert: Contains '../server/.output'
|
|
||||||
3. Assert: Contains 'server-output'
|
|
||||||
4. Assert: File still contains 'satisfies ElectrobunConfig'
|
|
||||||
Expected Result: Config has copy mapping and type annotation
|
|
||||||
Evidence: File contents
|
|
||||||
```
|
|
||||||
|
|
||||||
**Commit**: YES (groups with 2)
|
|
||||||
- Message: `feat(desktop): add build.copy for server bundle and platform configs`
|
|
||||||
- Files: `apps/desktop/electrobun.config.ts`
|
|
||||||
- Pre-commit: `bun typecheck` (from `apps/desktop/`)
|
|
||||||
|
|
||||||
---
|
|
||||||
|
|
||||||
- [ ] 2. Update `apps/desktop/turbo.json` — Add cross-workspace build dependency
|
|
||||||
|
|
||||||
**What to do**:
|
|
||||||
- Add `dependsOn` to the existing `build` task to ensure the server builds before the desktop:
|
|
||||||
```json
|
|
||||||
{
|
|
||||||
"tasks": {
|
|
||||||
"build": {
|
|
||||||
"dependsOn": ["@furtherverse/server#build"],
|
|
||||||
"outputs": ["build/**", "artifacts/**"]
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
```
|
|
||||||
- This tells Turbo: "before running `build` for `@furtherverse/desktop`, first run `build` for `@furtherverse/server`."
|
|
||||||
- This ensures `apps/server/.output/` exists when `electrobun build` runs and tries to `build.copy` from `../server/.output`.
|
|
||||||
- Preserve the existing `outputs` array exactly.
|
|
||||||
|
|
||||||
**Must NOT do**:
|
|
||||||
- Do not modify the root `turbo.json` — only `apps/desktop/turbo.json`
|
|
||||||
- Do not remove existing `outputs`
|
|
||||||
- Do not add other tasks or change other config
|
|
||||||
|
|
||||||
**Recommended Agent Profile**:
|
|
||||||
- **Category**: `quick`
|
|
||||||
- Reason: Single file, one-line JSON change
|
|
||||||
- **Skills**: `[]`
|
|
||||||
- No specialized skills needed
|
|
||||||
- **Skills Evaluated but Omitted**:
|
|
||||||
- `git-master`: Commit will be grouped with Task 1
|
|
||||||
|
|
||||||
**Parallelization**:
|
|
||||||
- **Can Run In Parallel**: YES
|
|
||||||
- **Parallel Group**: Wave 1 (with Tasks 1, 3)
|
|
||||||
- **Blocks**: Tasks 4, 5
|
|
||||||
- **Blocked By**: None (can start immediately)
|
|
||||||
|
|
||||||
**References**:
|
|
||||||
|
|
||||||
**Pattern References** (existing code to follow):
|
|
||||||
- `apps/desktop/turbo.json` — Current file with `build.outputs` already defined. You're adding `dependsOn` alongside it.
|
|
||||||
- `turbo.json` (root) — Shows existing turbo patterns like `build.dependsOn: ["^build"]`. The root already uses `^build` for workspace dependencies, but since desktop doesn't list server as a package dependency, we need an explicit cross-workspace reference.
|
|
||||||
|
|
||||||
**API/Type References**:
|
|
||||||
- Turbo `dependsOn` syntax: `"@furtherverse/server#build"` means "run the `build` task in the `@furtherverse/server` workspace".
|
|
||||||
|
|
||||||
**Documentation References**:
|
|
||||||
- `apps/server/package.json` — The package name is `@furtherverse/server` (verify this is the exact name used in the `dependsOn` reference).
|
|
||||||
|
|
||||||
**WHY Each Reference Matters**:
|
|
||||||
- `apps/desktop/turbo.json`: You're editing this file — preserve existing outputs
|
|
||||||
- `apps/server/package.json`: Need exact package name for the cross-workspace reference
|
|
||||||
- Root `turbo.json`: Context for existing turbo patterns in this project
|
|
||||||
|
|
||||||
**Acceptance Criteria**:
|
|
||||||
- [ ] `apps/desktop/turbo.json` has `dependsOn: ["@furtherverse/server#build"]` in the build task
|
|
||||||
- [ ] Existing `outputs` array is preserved
|
|
||||||
- [ ] Valid JSON (no syntax errors)
|
|
||||||
|
|
||||||
**Agent-Executed QA Scenarios:**
|
|
||||||
|
|
||||||
```
|
|
||||||
Scenario: turbo.json is valid JSON with correct structure
|
|
||||||
Tool: Bash
|
|
||||||
Preconditions: File has been edited
|
|
||||||
Steps:
|
|
||||||
1. Run: bun -e "JSON.parse(require('fs').readFileSync('apps/desktop/turbo.json', 'utf8'))"
|
|
||||||
2. Assert: Exit code 0 (valid JSON)
|
|
||||||
3. Read the file and verify structure contains both dependsOn and outputs
|
|
||||||
Expected Result: Valid JSON with both keys present
|
|
||||||
Evidence: Terminal output captured
|
|
||||||
|
|
||||||
Scenario: Turbo resolves the cross-workspace dependency
|
|
||||||
Tool: Bash
|
|
||||||
Preconditions: turbo.json updated
|
|
||||||
Steps:
|
|
||||||
1. Run: bunx turbo build --dry-run --filter=@furtherverse/desktop (from monorepo root)
|
|
||||||
2. Assert: Output shows @furtherverse/server#build runs BEFORE @furtherverse/desktop#build
|
|
||||||
Expected Result: Server build is listed as a dependency in the dry-run output
|
|
||||||
Evidence: Terminal output showing task execution order
|
|
||||||
```
|
|
||||||
|
|
||||||
**Commit**: YES (groups with 1)
|
|
||||||
- Message: `feat(desktop): add build.copy for server bundle and platform configs`
|
|
||||||
- Files: `apps/desktop/turbo.json`
|
|
||||||
- Pre-commit: Valid JSON check
|
|
||||||
|
|
||||||
---
|
|
||||||
|
|
||||||
- [ ] 3. Rewrite `apps/desktop/src/bun/index.ts` — Complete dev/prod mode implementation
|
|
||||||
|
|
||||||
**What to do**:
|
|
||||||
|
|
||||||
This is the core task. Completely rewrite `index.ts` to support both dev and prod modes. The new file should have this structure:
|
|
||||||
|
|
||||||
**A. Imports and Constants**:
|
|
||||||
```typescript
|
|
||||||
import Electrobun, { BrowserWindow } from 'electrobun/bun'
|
|
||||||
// Import PATHS — verify exact import syntax from electrobun/bun type definitions
|
|
||||||
// It may be: import { PATHS } from 'electrobun/bun'
|
|
||||||
// Or it may be on the Electrobun default export: Electrobun.PATHS
|
|
||||||
// CHECK the type definitions in node_modules/electrobun/ before writing
|
|
||||||
import { join, dirname } from 'path'
|
|
||||||
|
|
||||||
const DEV_SERVER_URL = 'http://localhost:3000'
|
|
||||||
const SERVER_READY_TIMEOUT_MS = 30_000
|
|
||||||
const PORT_PATTERN = /Listening on:?\s*https?:\/\/[^\s:]+:(\d+)/
|
|
||||||
```
|
|
||||||
|
|
||||||
**B. `isDev()` function**:
|
|
||||||
- Check `process.env.ELECTROBUN_BUILD_ENV === 'dev'`
|
|
||||||
- If `ELECTROBUN_BUILD_ENV` is not set, default to `true` (dev mode) — safe fallback
|
|
||||||
- Return a boolean
|
|
||||||
|
|
||||||
**C. `getServerEntryPath()` function**:
|
|
||||||
- Use `PATHS.RESOURCES_FOLDER` (or equivalent) to resolve the bundled server entry
|
|
||||||
- Path: `join(PATHS.RESOURCES_FOLDER, 'app', 'server-output', 'server', 'index.mjs')`
|
|
||||||
- **IMPORTANT**: Verify `PATHS.RESOURCES_FOLDER` points to `Resources/` and that `build.copy` destinations land in `Resources/app/`. If the pathing is different, adjust accordingly. The executor MUST verify by checking Electrobun's source or type definitions.
|
|
||||||
|
|
||||||
**D. `waitForServer(url, timeoutMs)` function** (preserved from current code):
|
|
||||||
- Polls a URL with `fetch` HEAD requests
|
|
||||||
- Returns `true` when server responds with `response.ok`
|
|
||||||
- Returns `false` on timeout
|
|
||||||
- Uses `Bun.sleep(100)` between attempts
|
|
||||||
- Catches fetch errors silently (server not up yet)
|
|
||||||
|
|
||||||
**E. `spawnServer()` function** (NEW — the critical piece):
|
|
||||||
- Returns a `Promise<{ process: Subprocess; url: string }>`
|
|
||||||
- Implementation:
|
|
||||||
1. Resolve the server entry path via `getServerEntryPath()`
|
|
||||||
2. Resolve the server directory via `dirname(serverEntryPath)` — used as `cwd`
|
|
||||||
3. Spawn with `Bun.spawn`:
|
|
||||||
```typescript
|
|
||||||
const serverProc = Bun.spawn([process.execPath, serverEntryPath], {
|
|
||||||
cwd: serverDir,
|
|
||||||
env: {
|
|
||||||
...process.env,
|
|
||||||
PORT: '0',
|
|
||||||
HOST: '127.0.0.1',
|
|
||||||
},
|
|
||||||
stdout: 'pipe',
|
|
||||||
stderr: 'pipe',
|
|
||||||
})
|
|
||||||
```
|
|
||||||
4. Read stdout as a stream to find the port:
|
|
||||||
- Use `serverProc.stdout` (a `ReadableStream<Uint8Array>`)
|
|
||||||
- Create a reader, accumulate chunks into a text buffer
|
|
||||||
- Test buffer against `PORT_PATTERN` regex after each chunk
|
|
||||||
- When match found: extract port, resolve promise with `{ process: serverProc, url: 'http://127.0.0.1:${port}' }`
|
|
||||||
5. Implement a timeout:
|
|
||||||
- Use `setTimeout` to reject the promise after `SERVER_READY_TIMEOUT_MS`
|
|
||||||
- On timeout, kill the server process before rejecting
|
|
||||||
6. Handle early exit:
|
|
||||||
- If stdout ends (stream done) before port is found, reject with error
|
|
||||||
- Include any stderr output in the error message for debugging
|
|
||||||
|
|
||||||
**F. `main()` async function**:
|
|
||||||
- Log startup message
|
|
||||||
- Branch on `isDev()`:
|
|
||||||
- **Dev mode**:
|
|
||||||
1. Log: "Dev mode: waiting for external server..."
|
|
||||||
2. Call `waitForServer(DEV_SERVER_URL)`
|
|
||||||
3. If timeout: log error with instructions (`"Run: bun dev in apps/server"`), `process.exit(1)`
|
|
||||||
4. Set `serverUrl = DEV_SERVER_URL`
|
|
||||||
- **Prod mode**:
|
|
||||||
1. Log: "Production mode: starting embedded server..."
|
|
||||||
2. Call `spawnServer()`
|
|
||||||
3. If error: log error, `process.exit(1)`
|
|
||||||
4. Store returned `process` and `url`
|
|
||||||
- Create `BrowserWindow` with the resolved `serverUrl`:
|
|
||||||
```typescript
|
|
||||||
new BrowserWindow({
|
|
||||||
title: 'Furtherverse',
|
|
||||||
url: serverUrl,
|
|
||||||
frame: { x: 100, y: 100, width: 1200, height: 800 },
|
|
||||||
renderer: 'cef',
|
|
||||||
})
|
|
||||||
```
|
|
||||||
- Register lifecycle handlers:
|
|
||||||
- `Electrobun.events.on('before-quit', ...)`: Kill server process if it exists
|
|
||||||
- Watch `serverProcess.exited` (if in prod mode): When server exits unexpectedly, log the exit code and stderr, then `process.exit(1)`
|
|
||||||
|
|
||||||
**G. Top-level execution**:
|
|
||||||
```typescript
|
|
||||||
main().catch((error) => {
|
|
||||||
console.error('Failed to start:', error)
|
|
||||||
process.exit(1)
|
|
||||||
})
|
|
||||||
```
|
|
||||||
|
|
||||||
**Critical implementation details**:
|
|
||||||
- The `PORT_PATTERN` regex must handle multiple log formats:
|
|
||||||
- `➜ Listening on: http://localhost:54321/` (srvx format)
|
|
||||||
- `Listening on http://127.0.0.1:54321` (node-server format)
|
|
||||||
- `Listening on http://[::]:54321` (IPv6 format)
|
|
||||||
- The regex `/Listening on:?\s*https?:\/\/[^\s:]+:(\d+)/` captures the port from all these formats.
|
|
||||||
- `cwd` MUST be set to the server directory (`dirname(serverEntryPath)`), not the app root. Nitro resolves internal `_libs/` imports relative to its directory.
|
|
||||||
- `process.execPath` in an Electrobun bundle points to the bundled Bun binary — this is what runs the server.
|
|
||||||
- `stderr: 'pipe'` — capture stderr for crash diagnostics but don't block on it during startup.
|
|
||||||
|
|
||||||
**Must NOT do**:
|
|
||||||
- Do not hardcode port numbers anywhere (use `PORT=0` and parse result)
|
|
||||||
- Do not use `as any` or type assertions to work around issues
|
|
||||||
- Do not use `child_process` module — use `Bun.spawn` (native Bun API)
|
|
||||||
- Do not bind server to `0.0.0.0` — always use `127.0.0.1`
|
|
||||||
- Do not leave the `waitForServer` function unused in dev mode
|
|
||||||
- Do not use synchronous I/O for stdout reading
|
|
||||||
|
|
||||||
**Recommended Agent Profile**:
|
|
||||||
- **Category**: `unspecified-high`
|
|
||||||
- Reason: Complex async logic (stream parsing, subprocess lifecycle, timeout management), multiple code paths (dev/prod), error handling across process boundaries. This is the architectural centerpiece.
|
|
||||||
- **Skills**: `[]`
|
|
||||||
- No specialized skills needed — pure Bun/TypeScript with Electrobun APIs
|
|
||||||
- **Skills Evaluated but Omitted**:
|
|
||||||
- `frontend-ui-ux`: No UI work — this is backend/process management code
|
|
||||||
- `playwright`: No browser testing needed for this task
|
|
||||||
|
|
||||||
**Parallelization**:
|
|
||||||
- **Can Run In Parallel**: YES
|
|
||||||
- **Parallel Group**: Wave 1 (with Tasks 1, 2)
|
|
||||||
- **Blocks**: Tasks 4, 5
|
|
||||||
- **Blocked By**: None (can start immediately — edits a different file from Tasks 1, 2)
|
|
||||||
|
|
||||||
**References**:
|
|
||||||
|
|
||||||
**Pattern References** (existing code to follow):
|
|
||||||
- `apps/desktop/src/bun/index.ts` — Current implementation. Preserve the `waitForServer` polling pattern (slightly refactored). Keep the `BrowserWindow` config (title, frame dimensions, renderer). Keep the top-level `main().catch(...)` pattern.
|
|
||||||
- `apps/desktop/src/bun/index.ts:1` — Current import: `import Electrobun, { BrowserWindow } from 'electrobun/bun'`. Extend this to also import `PATHS` (verify exact export name from type definitions).
|
|
||||||
|
|
||||||
**API/Type References** (contracts to implement against):
|
|
||||||
- `electrobun/bun` module — Exports `Electrobun` (default), `BrowserWindow` (named), and `PATHS` (named — verify). Check `node_modules/electrobun/` for exact type definitions.
|
|
||||||
- `Bun.spawn` API — Returns `Subprocess` with `.stdout` (ReadableStream when piped), `.stderr`, `.exited` (Promise<number>), `.kill()`.
|
|
||||||
- `PATHS.RESOURCES_FOLDER` — Absolute path to `Resources/` directory in the bundle. Verify by reading the Paths.ts source in electrobun package.
|
|
||||||
|
|
||||||
**Documentation References**:
|
|
||||||
- `apps/desktop/AGENTS.md` — Mentions production mode architecture (aspirational, but gives intent)
|
|
||||||
|
|
||||||
**External References**:
|
|
||||||
- Electrobun lifecycle events: `Electrobun.events.on('before-quit', callback)` — callback can call `event.response({ allow: false })` to cancel. Source: `electrobun/src/bun/core/Utils.ts`.
|
|
||||||
- Electrobun `PATHS`: Source at `electrobun/src/bun/core/Paths.ts`. Contains `RESOURCES_FOLDER` and `VIEWS_FOLDER`.
|
|
||||||
- Bun `Subprocess` docs: `stdout` is `ReadableStream<Uint8Array>` when `stdout: 'pipe'`.
|
|
||||||
|
|
||||||
**WHY Each Reference Matters**:
|
|
||||||
- Current `index.ts`: Preserving the `waitForServer` pattern, `BrowserWindow` config, and error handling style. You're rewriting this file, so understand what to keep vs. replace.
|
|
||||||
- `electrobun/bun` types: MUST verify `PATHS` export name and shape before using it. Don't assume — check.
|
|
||||||
- `Bun.spawn` API: Core to the entire prod mode implementation. Understand `stdout` stream reading.
|
|
||||||
- Lifecycle events: `before-quit` is where server cleanup happens. Understand the event contract.
|
|
||||||
|
|
||||||
**Acceptance Criteria**:
|
|
||||||
- [ ] File compiles: `bun typecheck` passes (from `apps/desktop/`)
|
|
||||||
- [ ] No hardcoded port numbers (grep for `:3000` — should only appear in `DEV_SERVER_URL` constant)
|
|
||||||
- [ ] `isDev()` function uses `process.env.ELECTROBUN_BUILD_ENV`
|
|
||||||
- [ ] `spawnServer()` uses `PORT=0`, `HOST=127.0.0.1`, `process.execPath`
|
|
||||||
- [ ] `spawnServer()` sets `cwd` to `dirname(serverEntryPath)`
|
|
||||||
- [ ] `before-quit` handler kills server process
|
|
||||||
- [ ] Server crash watcher exists (watches `subprocess.exited`)
|
|
||||||
- [ ] Timeout handling exists in both dev and prod paths
|
|
||||||
- [ ] All Biome rules pass: `bun fix` produces no changes
|
|
||||||
|
|
||||||
**Agent-Executed QA Scenarios:**
|
|
||||||
|
|
||||||
```
|
|
||||||
Scenario: File compiles with zero type errors
|
|
||||||
Tool: Bash
|
|
||||||
Preconditions: File has been rewritten
|
|
||||||
Steps:
|
|
||||||
1. Run: bun typecheck (from apps/desktop/)
|
|
||||||
2. Assert: Exit code 0
|
|
||||||
3. Assert: No errors in output
|
|
||||||
Expected Result: Clean TypeScript compilation
|
|
||||||
Evidence: Terminal output captured
|
|
||||||
|
|
||||||
Scenario: No hardcoded ports outside DEV_SERVER_URL
|
|
||||||
Tool: Bash (grep)
|
|
||||||
Preconditions: File has been rewritten
|
|
||||||
Steps:
|
|
||||||
1. Search apps/desktop/src/bun/index.ts for literal ':3000'
|
|
||||||
2. Assert: Only occurrence is in the DEV_SERVER_URL constant definition
|
|
||||||
3. Search for literal '3000' — should only appear once
|
|
||||||
Expected Result: Port 3000 only in constant, nowhere else
|
|
||||||
Evidence: Grep output
|
|
||||||
|
|
||||||
Scenario: Code passes Biome lint/format
|
|
||||||
Tool: Bash
|
|
||||||
Preconditions: File has been rewritten
|
|
||||||
Steps:
|
|
||||||
1. Run: bun fix (from apps/desktop/)
|
|
||||||
2. Run: git diff apps/desktop/src/bun/index.ts
|
|
||||||
3. Assert: No diff (bun fix made no changes)
|
|
||||||
Expected Result: Code already conforms to Biome rules
|
|
||||||
Evidence: Empty git diff
|
|
||||||
|
|
||||||
Scenario: Required patterns present in source
|
|
||||||
Tool: Bash (grep)
|
|
||||||
Preconditions: File has been rewritten
|
|
||||||
Steps:
|
|
||||||
1. Grep for 'ELECTROBUN_BUILD_ENV' — Assert: found
|
|
||||||
2. Grep for 'Bun.spawn' — Assert: found
|
|
||||||
3. Grep for 'process.execPath' — Assert: found
|
|
||||||
4. Grep for 'PORT.*0' — Assert: found
|
|
||||||
5. Grep for '127.0.0.1' — Assert: found
|
|
||||||
6. Grep for 'before-quit' — Assert: found
|
|
||||||
7. Grep for '.exited' — Assert: found (crash watcher)
|
|
||||||
8. Grep for 'dirname' — Assert: found (cwd for server)
|
|
||||||
Expected Result: All required patterns present
|
|
||||||
Evidence: Grep results for each pattern
|
|
||||||
```
|
|
||||||
|
|
||||||
**Commit**: YES
|
|
||||||
- Message: `feat(desktop): implement production mode with child process server`
|
|
||||||
- Files: `apps/desktop/src/bun/index.ts`
|
|
||||||
- Pre-commit: `bun typecheck && bun fix`
|
|
||||||
|
|
||||||
---
|
|
||||||
|
|
||||||
- [ ] 4. Rewrite `apps/desktop/AGENTS.md` — Document new architecture
|
|
||||||
|
|
||||||
**What to do**:
|
|
||||||
- Completely rewrite `AGENTS.md` to reflect the actual implemented architecture
|
|
||||||
- Document:
|
|
||||||
- **Architecture overview**: Desktop spawns server as child process in prod, connects to external server in dev
|
|
||||||
- **Dev mode**: How it works (polls localhost:3000, requires server running separately)
|
|
||||||
- **Prod mode**: How it works (spawns server from bundle, PORT=0, parses port from stdout)
|
|
||||||
- **Environment detection**: `ELECTROBUN_BUILD_ENV` values (`dev`, `canary`, `stable`)
|
|
||||||
- **Build pipeline**: Server must build before desktop (turbo dependency), `build.copy` bundles output
|
|
||||||
- **Key files**: `src/bun/index.ts` (main process), `electrobun.config.ts` (build config)
|
|
||||||
- **Environment variables**: `DATABASE_URL` (required, passed to server), `ELECTROBUN_BUILD_ENV` (auto-set by CLI)
|
|
||||||
- **Server lifecycle**: Spawned on start, killed on quit, crash = exit
|
|
||||||
- **Commands**: `bun dev`, `bun build`, `bun typecheck`, `bun fix`
|
|
||||||
- Follow the style and conventions of the root `AGENTS.md` and `apps/server/AGENTS.md`
|
|
||||||
- Be factual — only document what actually exists, not aspirational features
|
|
||||||
|
|
||||||
**Must NOT do**:
|
|
||||||
- Do not document features that don't exist
|
|
||||||
- Do not copy content from the server's AGENTS.md verbatim
|
|
||||||
- Do not include implementation details that belong in code comments
|
|
||||||
|
|
||||||
**Recommended Agent Profile**:
|
|
||||||
- **Category**: `writing`
|
|
||||||
- Reason: Documentation task requiring clear technical writing
|
|
||||||
- **Skills**: `[]`
|
|
||||||
- No specialized skills needed
|
|
||||||
- **Skills Evaluated but Omitted**:
|
|
||||||
- `frontend-ui-ux`: Not a UI task
|
|
||||||
|
|
||||||
**Parallelization**:
|
|
||||||
- **Can Run In Parallel**: NO
|
|
||||||
- **Parallel Group**: Wave 2
|
|
||||||
- **Blocks**: Task 5
|
|
||||||
- **Blocked By**: Tasks 1, 2, 3 (needs to know final state of all files)
|
|
||||||
|
|
||||||
**References**:
|
|
||||||
|
|
||||||
**Pattern References** (existing code to follow):
|
|
||||||
- `AGENTS.md` (root) — Follow same structure: Overview, Build Commands, Code Style, Directory Structure sections
|
|
||||||
- `apps/server/AGENTS.md` — Follow same style for app-specific documentation. Use this as a template for tone and detail level.
|
|
||||||
|
|
||||||
**Content References** (what to document):
|
|
||||||
- `apps/desktop/src/bun/index.ts` — The rewritten file (Task 3 output). Document its behavior, not its code.
|
|
||||||
- `apps/desktop/electrobun.config.ts` — The updated config (Task 1 output). Document build.copy and platform configs.
|
|
||||||
- `apps/desktop/turbo.json` — The updated turbo config (Task 2 output). Document the build dependency.
|
|
||||||
|
|
||||||
**WHY Each Reference Matters**:
|
|
||||||
- Root `AGENTS.md`: Template for documentation style
|
|
||||||
- Server `AGENTS.md`: Template for app-specific docs
|
|
||||||
- All Task 1-3 outputs: The actual implemented behavior that must be accurately documented
|
|
||||||
|
|
||||||
**Acceptance Criteria**:
|
|
||||||
- [ ] File exists and is valid Markdown
|
|
||||||
- [ ] Documents dev mode behavior accurately
|
|
||||||
- [ ] Documents prod mode behavior accurately
|
|
||||||
- [ ] Documents `ELECTROBUN_BUILD_ENV` mechanism
|
|
||||||
- [ ] Documents build pipeline (server → desktop dependency)
|
|
||||||
- [ ] Documents `DATABASE_URL` requirement
|
|
||||||
- [ ] Does NOT mention features that don't exist
|
|
||||||
- [ ] Follows conventions from root `AGENTS.md`
|
|
||||||
|
|
||||||
**Agent-Executed QA Scenarios:**
|
|
||||||
|
|
||||||
```
|
|
||||||
Scenario: AGENTS.md contains all required sections
|
|
||||||
Tool: Bash (grep)
|
|
||||||
Preconditions: File has been rewritten
|
|
||||||
Steps:
|
|
||||||
1. Grep for 'dev' or 'Dev' — Assert: found (dev mode documented)
|
|
||||||
2. Grep for 'prod' or 'Prod' or 'production' — Assert: found (prod mode documented)
|
|
||||||
3. Grep for 'ELECTROBUN_BUILD_ENV' — Assert: found
|
|
||||||
4. Grep for 'DATABASE_URL' — Assert: found
|
|
||||||
5. Grep for 'child process' or 'spawn' — Assert: found (architecture documented)
|
|
||||||
6. Grep for 'bun dev' — Assert: found (commands documented)
|
|
||||||
7. Grep for 'bun build' — Assert: found (commands documented)
|
|
||||||
Expected Result: All key topics are covered
|
|
||||||
Evidence: Grep results
|
|
||||||
|
|
||||||
Scenario: No aspirational/unimplemented features documented
|
|
||||||
Tool: Bash (grep)
|
|
||||||
Preconditions: File has been rewritten
|
|
||||||
Steps:
|
|
||||||
1. Grep for 'TODO' or 'planned' or 'future' or 'coming soon' — Assert: not found (or minimal)
|
|
||||||
2. Grep for 'auto-update' — Assert: not found (not implemented)
|
|
||||||
3. Grep for 'tray' — Assert: not found (not implemented)
|
|
||||||
Expected Result: Only implemented features documented
|
|
||||||
Evidence: Grep results showing no aspirational content
|
|
||||||
```
|
|
||||||
|
|
||||||
**Commit**: YES
|
|
||||||
- Message: `docs(desktop): rewrite AGENTS.md to reflect production mode architecture`
|
|
||||||
- Files: `apps/desktop/AGENTS.md`
|
|
||||||
- Pre-commit: None (Markdown file)
|
|
||||||
|
|
||||||
---
|
|
||||||
|
|
||||||
- [ ] 5. End-to-end verification — Typecheck and build pipeline
|
|
||||||
|
|
||||||
**What to do**:
|
|
||||||
- Run full monorepo typecheck to ensure no type errors were introduced
|
|
||||||
- Run full monorepo build to verify:
|
|
||||||
1. Server builds first (produces `.output/`)
|
|
||||||
2. Desktop builds second (copies server output into bundle)
|
|
||||||
3. No build errors
|
|
||||||
- Run Biome formatting/linting check on all changed files
|
|
||||||
- Verify dev mode still works conceptually (no runtime test — just verify the code path exists)
|
|
||||||
|
|
||||||
**Must NOT do**:
|
|
||||||
- Do not fix issues in server code — only desktop code
|
|
||||||
- Do not modify any files unless fixing issues found during verification
|
|
||||||
- Do not skip any verification step
|
|
||||||
|
|
||||||
**Recommended Agent Profile**:
|
|
||||||
- **Category**: `quick`
|
|
||||||
- Reason: Running commands and checking output, no creative work
|
|
||||||
- **Skills**: `[]`
|
|
||||||
- No specialized skills needed
|
|
||||||
- **Skills Evaluated but Omitted**:
|
|
||||||
- `playwright`: No browser testing in this verification
|
|
||||||
|
|
||||||
**Parallelization**:
|
|
||||||
- **Can Run In Parallel**: NO
|
|
||||||
- **Parallel Group**: Wave 2 (sequential after Task 4)
|
|
||||||
- **Blocks**: None (final task)
|
|
||||||
- **Blocked By**: Tasks 1, 2, 3, 4
|
|
||||||
|
|
||||||
**References**:
|
|
||||||
|
|
||||||
**Documentation References**:
|
|
||||||
- `AGENTS.md` (root) — Build/Lint/Test commands: `bun typecheck`, `bun fix`, `bun build`
|
|
||||||
- `apps/desktop/package.json` — Desktop-specific scripts
|
|
||||||
|
|
||||||
**WHY Each Reference Matters**:
|
|
||||||
- Root `AGENTS.md`: Canonical list of verification commands
|
|
||||||
- Desktop `package.json`: Desktop-specific build/typecheck commands
|
|
||||||
|
|
||||||
**Acceptance Criteria**:
|
|
||||||
- [ ] `bun typecheck` (monorepo root) exits with code 0
|
|
||||||
- [ ] `bun build` (monorepo root) exits with code 0
|
|
||||||
- [ ] `bun fix` (monorepo root) produces no changes (all code formatted)
|
|
||||||
- [ ] Build output shows server building before desktop
|
|
||||||
- [ ] Desktop build output includes server bundle (verify in build artifacts)
|
|
||||||
|
|
||||||
**Agent-Executed QA Scenarios:**
|
|
||||||
|
|
||||||
```
|
|
||||||
Scenario: Monorepo typecheck passes
|
|
||||||
Tool: Bash
|
|
||||||
Preconditions: All tasks 1-4 completed
|
|
||||||
Steps:
|
|
||||||
1. Run: bun typecheck (from monorepo root)
|
|
||||||
2. Assert: Exit code 0
|
|
||||||
3. Assert: No error output
|
|
||||||
Expected Result: Zero type errors across entire monorepo
|
|
||||||
Evidence: Terminal output captured
|
|
||||||
|
|
||||||
Scenario: Monorepo build succeeds with correct order
|
|
||||||
Tool: Bash
|
|
||||||
Preconditions: All tasks 1-4 completed
|
|
||||||
Steps:
|
|
||||||
1. Run: bun build (from monorepo root)
|
|
||||||
2. Assert: Exit code 0
|
|
||||||
3. Assert: Output shows @furtherverse/server build task runs
|
|
||||||
4. Assert: Output shows @furtherverse/desktop build task runs AFTER server
|
|
||||||
Expected Result: Build pipeline executes in correct order
|
|
||||||
Evidence: Terminal output showing task order
|
|
||||||
|
|
||||||
Scenario: Biome finds no issues
|
|
||||||
Tool: Bash
|
|
||||||
Preconditions: All tasks 1-4 completed
|
|
||||||
Steps:
|
|
||||||
1. Run: bun fix (from monorepo root)
|
|
||||||
2. Run: git diff
|
|
||||||
3. Assert: No changes (all code already formatted)
|
|
||||||
Expected Result: All code passes Biome rules
|
|
||||||
Evidence: Empty git diff
|
|
||||||
|
|
||||||
Scenario: Desktop build artifacts include server bundle
|
|
||||||
Tool: Bash
|
|
||||||
Preconditions: Build succeeded
|
|
||||||
Steps:
|
|
||||||
1. Search desktop build output directory for server-output/ or index.mjs
|
|
||||||
2. Assert: Server files are present in the desktop bundle
|
|
||||||
Expected Result: Server bundle is included in desktop build output
|
|
||||||
Evidence: File listing of build artifacts
|
|
||||||
```
|
|
||||||
|
|
||||||
**Commit**: NO (verification only — no file changes unless fixing issues)
|
|
||||||
|
|
||||||
---
|
|
||||||
|
|
||||||
## Commit Strategy
|
|
||||||
|
|
||||||
| After Task(s) | Message | Files | Verification |
|
|
||||||
|---------------|---------|-------|--------------|
|
|
||||||
| 1, 2 | `feat(desktop): add build.copy for server bundle and cross-workspace build dependency` | `electrobun.config.ts`, `turbo.json` | `bun typecheck` |
|
|
||||||
| 3 | `feat(desktop): implement production mode with child process server` | `src/bun/index.ts` | `bun typecheck && bun fix` |
|
|
||||||
| 4 | `docs(desktop): rewrite AGENTS.md to reflect production mode architecture` | `AGENTS.md` | None |
|
|
||||||
| 5 | (no commit — verification only) | — | — |
|
|
||||||
|
|
||||||
---
|
|
||||||
|
|
||||||
## Success Criteria
|
|
||||||
|
|
||||||
### Verification Commands
|
|
||||||
```bash
|
|
||||||
bun typecheck # Expected: exit 0, no errors
|
|
||||||
bun build # Expected: exit 0, server builds before desktop
|
|
||||||
bun fix # Expected: no changes (already formatted)
|
|
||||||
```
|
|
||||||
|
|
||||||
### Final Checklist
|
|
||||||
- [ ] All "Must Have" features present in `index.ts`
|
|
||||||
- [ ] All "Must NOT Have" exclusions verified absent
|
|
||||||
- [ ] All 3 verification commands pass
|
|
||||||
- [ ] `AGENTS.md` accurately reflects implemented architecture
|
|
||||||
- [ ] Server output is bundled into desktop build via `build.copy`
|
|
||||||
- [ ] Turbo builds server before desktop
|
|
||||||
5
.vscode/extensions.json
vendored
5
.vscode/extensions.json
vendored
@@ -2,11 +2,8 @@
|
|||||||
"recommendations": [
|
"recommendations": [
|
||||||
"biomejs.biome",
|
"biomejs.biome",
|
||||||
"hverlin.mise-vscode",
|
"hverlin.mise-vscode",
|
||||||
"mikestead.dotenv",
|
|
||||||
"oven.bun-vscode",
|
"oven.bun-vscode",
|
||||||
"redhat.vscode-yaml",
|
"redhat.vscode-yaml",
|
||||||
"rust-lang.rust-analyzer",
|
"tamasfe.even-better-toml"
|
||||||
"tamasfe.even-better-toml",
|
|
||||||
"tauri-apps.tauri-vscode"
|
|
||||||
]
|
]
|
||||||
}
|
}
|
||||||
|
|||||||
43
.vscode/settings.json
vendored
43
.vscode/settings.json
vendored
@@ -1,49 +1,42 @@
|
|||||||
{
|
{
|
||||||
// Disable the default formatter & linter, use biome instead
|
|
||||||
"prettier.enable": false,
|
|
||||||
"eslint.enable": false,
|
|
||||||
|
|
||||||
// Auto fix
|
|
||||||
"editor.codeActionsOnSave": {
|
|
||||||
"source.fixAll.biome": "explicit",
|
|
||||||
"source.organizeImports.biome": "explicit"
|
|
||||||
},
|
|
||||||
"editor.defaultFormatter": "biomejs.biome",
|
|
||||||
"editor.formatOnSave": true,
|
|
||||||
"[javascript]": {
|
"[javascript]": {
|
||||||
"editor.defaultFormatter": "biomejs.biome"
|
"editor.defaultFormatter": "biomejs.biome"
|
||||||
},
|
},
|
||||||
"[javascriptreact]": {
|
"[javascriptreact]": {
|
||||||
"editor.defaultFormatter": "biomejs.biome"
|
"editor.defaultFormatter": "biomejs.biome"
|
||||||
},
|
},
|
||||||
"[typescript]": {
|
|
||||||
"editor.defaultFormatter": "biomejs.biome"
|
|
||||||
},
|
|
||||||
"[typescriptreact]": {
|
|
||||||
"editor.defaultFormatter": "biomejs.biome"
|
|
||||||
},
|
|
||||||
"[json]": {
|
"[json]": {
|
||||||
"editor.defaultFormatter": "biomejs.biome"
|
"editor.defaultFormatter": "biomejs.biome"
|
||||||
},
|
},
|
||||||
"[jsonc]": {
|
"[jsonc]": {
|
||||||
"editor.defaultFormatter": "biomejs.biome"
|
"editor.defaultFormatter": "biomejs.biome"
|
||||||
},
|
},
|
||||||
"[yaml]": {
|
|
||||||
"editor.defaultFormatter": "redhat.vscode-yaml"
|
|
||||||
},
|
|
||||||
"[toml]": {
|
"[toml]": {
|
||||||
"editor.defaultFormatter": "tamasfe.even-better-toml"
|
"editor.defaultFormatter": "tamasfe.even-better-toml"
|
||||||
},
|
},
|
||||||
|
"[typescript]": {
|
||||||
|
"editor.defaultFormatter": "biomejs.biome"
|
||||||
|
},
|
||||||
|
"[typescriptreact]": {
|
||||||
|
"editor.defaultFormatter": "biomejs.biome"
|
||||||
|
},
|
||||||
|
"[yaml]": {
|
||||||
|
"editor.defaultFormatter": "redhat.vscode-yaml"
|
||||||
|
},
|
||||||
|
"editor.codeActionsOnSave": {
|
||||||
|
"source.fixAll.biome": "explicit",
|
||||||
|
"source.organizeImports.biome": "explicit"
|
||||||
|
},
|
||||||
|
"editor.defaultFormatter": "biomejs.biome",
|
||||||
|
"editor.formatOnSave": true,
|
||||||
"files.associations": {
|
"files.associations": {
|
||||||
".env": "dotenv",
|
".env": "dotenv",
|
||||||
".env.*": "dotenv",
|
".env.*": "dotenv",
|
||||||
"**/tsconfig.json": "jsonc",
|
|
||||||
"**/tsconfig.*.json": "jsonc",
|
|
||||||
"**/biome.json": "jsonc",
|
"**/biome.json": "jsonc",
|
||||||
"**/opencode.json": "jsonc"
|
"**/opencode.json": "jsonc",
|
||||||
|
"**/tsconfig.*.json": "jsonc",
|
||||||
|
"**/tsconfig.json": "jsonc"
|
||||||
},
|
},
|
||||||
|
|
||||||
// TanStack Router
|
|
||||||
"files.readonlyInclude": {
|
"files.readonlyInclude": {
|
||||||
"**/routeTree.gen.ts": true
|
"**/routeTree.gen.ts": true
|
||||||
},
|
},
|
||||||
|
|||||||
109
AGENTS.md
109
AGENTS.md
@@ -4,48 +4,71 @@ Guidelines for AI agents working in this Bun monorepo.
|
|||||||
|
|
||||||
## Project Overview
|
## Project Overview
|
||||||
|
|
||||||
> **This project uses [Bun](https://bun.sh) exclusively as both the JavaScript runtime and package manager. Do NOT use Node.js / npm / yarn / pnpm. All commands start with `bun` — use `bun install` for dependencies and `bun run` / `bun <script>` for scripts. Never use `npm`, `npx`, or `node`.**
|
> **This project uses [Bun](https://bun.sh) exclusively as both the JavaScript runtime and package manager. Do NOT use Node.js / npm / yarn / pnpm. All commands start with `bun` — use `bun install` for dependencies and `bun run <script>` for scripts. Always prefer `bun run <script>` over `bun <script>` to avoid conflicts with Bun built-in subcommands (e.g. `bun build` invokes Bun's bundler, NOT your package.json script). Never use `npm`, `npx`, or `node`.**
|
||||||
|
|
||||||
- **Monorepo**: Bun workspaces + Turborepo orchestration
|
- **Monorepo**: Bun workspaces + Turborepo orchestration
|
||||||
- **Runtime**: Bun (see `mise.toml` for version) — **NOT Node.js**
|
- **Runtime**: Bun (see `mise.toml` for version) — **NOT Node.js**
|
||||||
- **Package Manager**: Bun — **NOT npm / yarn / pnpm**
|
- **Package Manager**: Bun — **NOT npm / yarn / pnpm**
|
||||||
- **Apps**:
|
- **Apps**:
|
||||||
- `apps/server` - TanStack Start fullstack web app (see `apps/server/AGENTS.md`)
|
- `apps/server` - TanStack Start fullstack web app (see `apps/server/AGENTS.md`)
|
||||||
- `apps/desktop` - Electrobun desktop shell, loads server in native window (see `apps/desktop/AGENTS.md`)
|
- `apps/desktop` - Electron desktop shell, sidecar server pattern (see `apps/desktop/AGENTS.md`)
|
||||||
- **Packages**: `packages/utils`, `packages/tsconfig` (shared configs)
|
- **Packages**: `packages/tsconfig` (shared TS configs)
|
||||||
|
|
||||||
## Build / Lint / Test Commands
|
## Build / Lint / Test Commands
|
||||||
|
|
||||||
### Root Commands (via Turbo)
|
### Root Commands (via Turbo)
|
||||||
```bash
|
```bash
|
||||||
bun dev # Start all apps in dev mode
|
bun run dev # Start all apps in dev mode
|
||||||
bun build # Build all apps
|
bun run build # Build all apps
|
||||||
bun fix # Lint + format (Biome auto-fix)
|
bun run compile # Compile server to standalone binary (current platform)
|
||||||
bun typecheck # TypeScript check across monorepo
|
bun run compile:darwin # Compile server for macOS (arm64 + x64)
|
||||||
|
bun run compile:linux # Compile server for Linux (x64 + arm64)
|
||||||
|
bun run compile:windows # Compile server for Windows x64
|
||||||
|
bun run dist # Package desktop distributable (current platform)
|
||||||
|
bun run dist:linux # Package desktop for Linux (x64 + arm64)
|
||||||
|
bun run dist:mac # Package desktop for macOS (arm64 + x64)
|
||||||
|
bun run dist:win # Package desktop for Windows x64
|
||||||
|
bun run fix # Lint + format (Biome auto-fix)
|
||||||
|
bun run typecheck # TypeScript check across monorepo
|
||||||
```
|
```
|
||||||
|
|
||||||
### Server App (`apps/server`)
|
### Server App (`apps/server`)
|
||||||
```bash
|
```bash
|
||||||
bun dev # Vite dev server (localhost:3000)
|
bun run dev # Vite dev server (localhost:3000)
|
||||||
bun build # Production build -> .output/
|
bun run build # Production build -> .output/
|
||||||
bun compile # Compile to standalone binary
|
bun run compile # Compile to standalone binary (current platform)
|
||||||
bun fix # Biome auto-fix
|
bun run compile:darwin # Compile for macOS (arm64 + x64)
|
||||||
bun typecheck # TypeScript check
|
bun run compile:darwin:arm64 # Compile for macOS arm64
|
||||||
|
bun run compile:darwin:x64 # Compile for macOS x64
|
||||||
|
bun run compile:linux # Compile for Linux (x64 + arm64)
|
||||||
|
bun run compile:linux:arm64 # Compile for Linux arm64
|
||||||
|
bun run compile:linux:x64 # Compile for Linux x64
|
||||||
|
bun run compile:windows # Compile for Windows (default: x64)
|
||||||
|
bun run compile:windows:x64 # Compile for Windows x64
|
||||||
|
bun run fix # Biome auto-fix
|
||||||
|
bun run typecheck # TypeScript check
|
||||||
|
|
||||||
# Database (Drizzle)
|
# Database (Drizzle)
|
||||||
bun db:generate # Generate migrations from schema
|
bun run db:generate # Generate migrations from schema
|
||||||
bun db:migrate # Run migrations
|
bun run db:migrate # Run migrations
|
||||||
bun db:push # Push schema (dev only)
|
bun run db:push # Push schema (dev only)
|
||||||
bun db:studio # Open Drizzle Studio
|
bun run db:studio # Open Drizzle Studio
|
||||||
```
|
```
|
||||||
|
|
||||||
### Desktop App (`apps/desktop`)
|
### Desktop App (`apps/desktop`)
|
||||||
```bash
|
```bash
|
||||||
bun dev # Start Electrobun dev mode (requires server dev running)
|
bun run dev # electron-vite dev mode (requires server dev running)
|
||||||
bun build # Build canary release
|
bun run build # electron-vite build (main + preload)
|
||||||
bun build:stable # Build stable release
|
bun run dist # Build + package for current platform
|
||||||
bun fix # Biome auto-fix
|
bun run dist:linux # Build + package for Linux (x64 + arm64)
|
||||||
bun typecheck # TypeScript check
|
bun run dist:linux:x64 # Build + package for Linux x64
|
||||||
|
bun run dist:linux:arm64 # Build + package for Linux arm64
|
||||||
|
bun run dist:mac # Build + package for macOS (arm64 + x64)
|
||||||
|
bun run dist:mac:arm64 # Build + package for macOS arm64
|
||||||
|
bun run dist:mac:x64 # Build + package for macOS x64
|
||||||
|
bun run dist:win # Build + package for Windows x64
|
||||||
|
bun run fix # Biome auto-fix
|
||||||
|
bun run typecheck # TypeScript check
|
||||||
```
|
```
|
||||||
|
|
||||||
### Testing
|
### Testing
|
||||||
@@ -97,7 +120,13 @@ import type { ReactNode } from 'react'
|
|||||||
- ORPC: Use `ORPCError` with proper codes (`NOT_FOUND`, `INPUT_VALIDATION_FAILED`)
|
- ORPC: Use `ORPCError` with proper codes (`NOT_FOUND`, `INPUT_VALIDATION_FAILED`)
|
||||||
- Never use empty catch blocks
|
- Never use empty catch blocks
|
||||||
|
|
||||||
## Database (Drizzle ORM)
|
## Database (Drizzle ORM v1 beta + postgres-js)
|
||||||
|
|
||||||
|
- **ORM**: Drizzle ORM `1.0.0-beta` (RQBv2)
|
||||||
|
- **Driver**: `drizzle-orm/postgres-js` (NOT `bun-sql`)
|
||||||
|
- **Validation**: `drizzle-orm/zod` (built-in, NOT separate `drizzle-zod` package)
|
||||||
|
- **Relations**: Defined via `defineRelations()` in `src/server/db/relations.ts` (contains schema info, so `drizzle()` only needs `{ relations }`)
|
||||||
|
- **Query style**: RQBv2 object syntax (`orderBy: { createdAt: 'desc' }`, `where: { id: 1 }`)
|
||||||
|
|
||||||
```typescript
|
```typescript
|
||||||
export const myTable = pgTable('my_table', {
|
export const myTable = pgTable('my_table', {
|
||||||
@@ -120,13 +149,22 @@ export const myTable = pgTable('my_table', {
|
|||||||
- Workspace packages use `"catalog:"` — never hardcode versions
|
- Workspace packages use `"catalog:"` — never hardcode versions
|
||||||
- Internal packages use `"workspace:*"` references
|
- Internal packages use `"workspace:*"` references
|
||||||
|
|
||||||
|
## Development Principles
|
||||||
|
|
||||||
|
> **These principles apply to ALL code changes. Agents MUST follow them on every task.**
|
||||||
|
|
||||||
|
1. **No backward compatibility** — This project is in rapid iteration. Always use the latest API and patterns. Never keep deprecated code paths or old API fallbacks "just in case".
|
||||||
|
2. **Always sync documentation** — When code changes, immediately update all related documentation (`AGENTS.md`, `README.md`, inline code examples). Code and docs must never drift apart. This includes updating code snippets in docs when imports, APIs, or patterns change.
|
||||||
|
3. **Forward-only migration** — When upgrading dependencies, fully adopt the new API. Don't mix old and new patterns in the same codebase.
|
||||||
|
|
||||||
## Critical Rules
|
## Critical Rules
|
||||||
|
|
||||||
**DO:**
|
**DO:**
|
||||||
- Run `bun fix` before committing
|
- Run `bun run fix` before committing
|
||||||
- Use `@/*` path aliases (not relative imports)
|
- Use `@/*` path aliases (not relative imports)
|
||||||
- Include `createdAt`/`updatedAt` on all tables
|
- Include `createdAt`/`updatedAt` on all tables
|
||||||
- Use `catalog:` for dependency versions
|
- Use `catalog:` for dependency versions
|
||||||
|
- Update `AGENTS.md` and other docs whenever code patterns change
|
||||||
|
|
||||||
**DON'T:**
|
**DON'T:**
|
||||||
- Use `npm`, `npx`, `node`, `yarn`, `pnpm` — always use `bun` / `bunx`
|
- Use `npm`, `npx`, `node`, `yarn`, `pnpm` — always use `bun` / `bunx`
|
||||||
@@ -135,13 +173,14 @@ export const myTable = pgTable('my_table', {
|
|||||||
- Commit `.env` files
|
- Commit `.env` files
|
||||||
- Use empty catch blocks `catch(e) {}`
|
- Use empty catch blocks `catch(e) {}`
|
||||||
- Hardcode dependency versions in workspace packages
|
- Hardcode dependency versions in workspace packages
|
||||||
|
- Leave docs out of sync with code changes
|
||||||
|
|
||||||
## Git Workflow
|
## Git Workflow
|
||||||
|
|
||||||
1. Make changes following style guide
|
1. Make changes following style guide
|
||||||
2. `bun fix` - auto-format and lint
|
2. `bun run fix` - auto-format and lint
|
||||||
3. `bun typecheck` - verify types
|
3. `bun run typecheck` - verify types
|
||||||
4. `bun dev` - test locally
|
4. `bun run dev` - test locally
|
||||||
5. Commit with descriptive message
|
5. Commit with descriptive message
|
||||||
|
|
||||||
## Directory Structure
|
## Directory Structure
|
||||||
@@ -151,22 +190,24 @@ export const myTable = pgTable('my_table', {
|
|||||||
├── apps/
|
├── apps/
|
||||||
│ ├── server/ # TanStack Start fullstack app
|
│ ├── server/ # TanStack Start fullstack app
|
||||||
│ │ ├── src/
|
│ │ ├── src/
|
||||||
│ │ │ ├── client/ # ORPC client, Query client
|
│ │ │ ├── client/ # ORPC client + TanStack Query utils
|
||||||
│ │ │ ├── components/
|
│ │ │ ├── components/
|
||||||
│ │ │ ├── routes/ # File-based routing
|
│ │ │ ├── routes/ # File-based routing
|
||||||
│ │ │ └── server/ # API layer + database
|
│ │ │ └── server/ # API layer + database
|
||||||
│ │ │ ├── api/ # ORPC contracts, routers, middlewares
|
│ │ │ ├── api/ # ORPC contracts, routers, middlewares
|
||||||
│ │ │ └── db/ # Drizzle schema
|
│ │ │ └── db/ # Drizzle schema
|
||||||
│ │ └── AGENTS.md
|
│ │ └── AGENTS.md
|
||||||
│ └── desktop/ # Electrobun desktop shell
|
│ └── desktop/ # Electron desktop shell
|
||||||
│ ├── src/
|
│ ├── src/
|
||||||
│ │ └── bun/
|
│ │ ├── main/
|
||||||
│ │ └── index.ts # Main process entry
|
│ │ │ └── index.ts # Main process entry
|
||||||
│ ├── electrobun.config.ts # Electrobun configuration
|
│ │ └── preload/
|
||||||
|
│ │ └── index.ts # Preload script
|
||||||
|
│ ├── electron.vite.config.ts
|
||||||
|
│ ├── electron-builder.yml # Packaging config
|
||||||
│ └── AGENTS.md
|
│ └── AGENTS.md
|
||||||
├── packages/
|
├── packages/
|
||||||
│ ├── tsconfig/ # Shared TS configs
|
│ └── tsconfig/ # Shared TS configs
|
||||||
│ └── utils/ # Shared utilities
|
|
||||||
├── biome.json # Linting/formatting config
|
├── biome.json # Linting/formatting config
|
||||||
├── turbo.json # Turbo task orchestration
|
├── turbo.json # Turbo task orchestration
|
||||||
└── package.json # Workspace root + dependency catalog
|
└── package.json # Workspace root + dependency catalog
|
||||||
@@ -175,4 +216,4 @@ export const myTable = pgTable('my_table', {
|
|||||||
## See Also
|
## See Also
|
||||||
|
|
||||||
- `apps/server/AGENTS.md` - Detailed TanStack Start / ORPC patterns
|
- `apps/server/AGENTS.md` - Detailed TanStack Start / ORPC patterns
|
||||||
- `apps/desktop/AGENTS.md` - Electrobun desktop development guide
|
- `apps/desktop/AGENTS.md` - Electron desktop development guide
|
||||||
|
|||||||
6
apps/desktop/.gitignore
vendored
6
apps/desktop/.gitignore
vendored
@@ -1,3 +1,3 @@
|
|||||||
# Electrobun build output
|
# electron-vite build output
|
||||||
build/
|
out/
|
||||||
artifacts/
|
dist/
|
||||||
|
|||||||
@@ -1,36 +1,39 @@
|
|||||||
# AGENTS.md - Desktop App Guidelines
|
# AGENTS.md - Desktop App Guidelines
|
||||||
|
|
||||||
Thin Electrobun shell hosting the fullstack server app.
|
Thin Electron shell hosting the fullstack server app.
|
||||||
|
|
||||||
## Tech Stack
|
## Tech Stack
|
||||||
|
|
||||||
> **⚠️ This project uses Bun — NOT Node.js / npm. All commands use `bun`. Never use `npm`, `npx`, or `node`.**
|
> **⚠️ This project uses Bun as the package manager. Runtime is Electron (Node.js). Always use `bun run <script>` (not `bun <script>`) to avoid conflicts with Bun built-in subcommands. Never use `npm`, `npx`, `yarn`, or `pnpm`.**
|
||||||
|
|
||||||
- **Type**: Electrobun desktop application
|
- **Type**: Electron desktop shell
|
||||||
- **Design**: Server-driven desktop (thin native shell hosting web app)
|
- **Design**: Server-driven desktop (thin native window hosting web app)
|
||||||
- **Runtime**: Bun (Main process) + CEF (Chromium Embedded Framework)
|
- **Runtime**: Electron (Main/Renderer) + Sidecar server binary (Bun-compiled)
|
||||||
- **Framework**: Electrobun
|
- **Build Tool**: electron-vite (Vite-based, handles main + preload builds)
|
||||||
- **Build**: Electrobun CLI + Turborepo
|
- **Packager**: electron-builder (installers, signing, auto-update)
|
||||||
|
- **Orchestration**: Turborepo
|
||||||
|
|
||||||
## Architecture
|
## Architecture
|
||||||
|
|
||||||
- **Server-driven design**: The desktop app is a "thin" native shell. It does not contain UI or business logic; it merely hosts the `apps/server` TanStack Start application in a native window.
|
- **Server-driven design**: The desktop app is a "thin" native shell. It does not contain UI or business logic; it opens a BrowserWindow pointing to the `apps/server` TanStack Start application.
|
||||||
- **Dev mode**: Connects to an external Vite dev server at `localhost:3000`. Requires `apps/server` to be running separately.
|
- **Dev mode**: Opens a BrowserWindow pointing to `localhost:3000`. Requires `apps/server` to be running separately (Turbo handles this).
|
||||||
- **Prod mode**: Spawns an embedded TanStack Start server (Nitro) as a child process and loads the dynamically assigned local URL.
|
- **Production mode**: Spawns a compiled server binary (from `resources/`) as a sidecar process, waits for readiness, then loads its URL.
|
||||||
- **Config**: `electrobun.config.ts` manages app metadata (identifier, name), build entries, and asset bundling.
|
|
||||||
|
|
||||||
## Commands
|
## Commands
|
||||||
|
|
||||||
```bash
|
```bash
|
||||||
# Development
|
bun run dev # electron-vite dev (requires server dev running)
|
||||||
bun dev # Start Electrobun dev mode (requires server dev running)
|
bun run build # electron-vite build (main + preload)
|
||||||
|
bun run dist # Build + package for current platform
|
||||||
# Build
|
bun run dist:linux # Build + package for Linux (x64 + arm64)
|
||||||
bun build # Build stable release (all platforms)
|
bun run dist:linux:x64 # Build + package for Linux x64
|
||||||
|
bun run dist:linux:arm64 # Build + package for Linux arm64
|
||||||
# Code Quality
|
bun run dist:mac # Build + package for macOS (arm64 + x64)
|
||||||
bun fix # Biome auto-fix
|
bun run dist:mac:arm64 # Build + package for macOS arm64
|
||||||
bun typecheck # TypeScript check
|
bun run dist:mac:x64 # Build + package for macOS x64
|
||||||
|
bun run dist:win # Build + package for Windows x64
|
||||||
|
bun run fix # Biome auto-fix
|
||||||
|
bun run typecheck # TypeScript check
|
||||||
```
|
```
|
||||||
|
|
||||||
## Directory Structure
|
## Directory Structure
|
||||||
@@ -38,94 +41,55 @@ bun typecheck # TypeScript check
|
|||||||
```
|
```
|
||||||
.
|
.
|
||||||
├── src/
|
├── src/
|
||||||
│ └── bun/
|
│ ├── main/
|
||||||
│ └── index.ts # Main process entry (Window management + server lifecycle)
|
│ │ └── index.ts # Main process (server lifecycle + BrowserWindow)
|
||||||
├── electrobun.config.ts # App metadata and build/copy configuration
|
│ └── preload/
|
||||||
├── package.json # Scripts and dependencies
|
│ └── index.ts # Preload script (security isolation)
|
||||||
├── turbo.json # Build pipeline dependencies (depends on server build)
|
├── resources/ # Sidecar binaries (gitignored, copied from server build)
|
||||||
└── AGENTS.md # Desktop guidelines (this file)
|
├── out/ # electron-vite build output (gitignored)
|
||||||
|
├── electron.vite.config.ts
|
||||||
|
├── electron-builder.yml # Packaging configuration
|
||||||
|
├── package.json
|
||||||
|
├── turbo.json
|
||||||
|
└── AGENTS.md
|
||||||
```
|
```
|
||||||
|
|
||||||
## Development Workflow
|
## Development Workflow
|
||||||
|
|
||||||
1. **Start server**: In `apps/server`, run `bun dev`.
|
1. **Start server**: `bun run dev` in `apps/server` (or use root `bun run dev` via Turbo).
|
||||||
2. **Start desktop**: In `apps/desktop`, run `bun dev`.
|
2. **Start desktop**: `bun run dev` in `apps/desktop`.
|
||||||
3. **Connection**: The desktop app polls `localhost:3000` until responsive, then opens the native window.
|
3. **Connection**: Main process polls `localhost:3000` until responsive, then opens BrowserWindow.
|
||||||
|
|
||||||
## Production Architecture
|
## Production Build Workflow
|
||||||
|
|
||||||
### Build Pipeline
|
From monorepo root, run `bun run dist` to execute the full pipeline automatically (via Turbo task dependencies):
|
||||||
The desktop build is orchestrated by Turbo. It depends on the server's production build:
|
|
||||||
- `turbo.json`: `@furtherverse/desktop#build` depends on `@furtherverse/server#build`.
|
|
||||||
- `electrobun.config.ts`: Copies `../server/.output` to `server-output` folder within the app bundle.
|
|
||||||
|
|
||||||
### Server Lifecycle
|
1. **Build server**: `apps/server` → `vite build` → `.output/`
|
||||||
In production, the main process manages the embedded server:
|
2. **Compile server**: `apps/server` → `bun compile.ts --target ...` → `out/server-{os}-{arch}`
|
||||||
- **Spawn**: Spawns server from `server-output/server/index.mjs` using `Bun.spawn`.
|
3. **Package desktop**: `apps/desktop` → `electron-vite build` + `electron-builder` → distributable
|
||||||
- **Port Allocation**: A free port is pre-allocated via `node:net` (`createServer` on `127.0.0.1:0`), then passed to the server as the `PORT` environment variable.
|
|
||||||
- **Readiness Check**: The main process polls the server URL with `fetch` until it responds, rather than parsing stdout.
|
|
||||||
- **Retry**: If the server fails to become ready (timeout or early exit), the process is killed and a new attempt is made with a fresh port (up to 3 retries).
|
|
||||||
- **Lifecycle**: The server process is tied to the app; it is killed on `SIGTERM`, `SIGINT`, or app exit. If the server process crashes, the app exits with an error.
|
|
||||||
|
|
||||||
## Environment Detection
|
The `electron-builder.yml` `extraResources` config reads binaries directly from `../server/out/`, no manual copy needed.
|
||||||
|
|
||||||
The application determines its environment via the `ELECTROBUN_BUILD_ENV` variable, automatically set by the Electrobun CLI:
|
To build for a specific platform explicitly, use `bun run dist:linux` / `bun run dist:mac` / `bun run dist:win` in `apps/desktop`.
|
||||||
|
For single-arch output, use `bun run dist:linux:x64`, `bun run dist:linux:arm64`, `bun run dist:mac:x64`, or `bun run dist:mac:arm64`.
|
||||||
|
|
||||||
```typescript
|
## Development Principles
|
||||||
const isDev = () => {
|
|
||||||
const env = process.env.ELECTROBUN_BUILD_ENV
|
|
||||||
return !env || env === 'dev'
|
|
||||||
}
|
|
||||||
```
|
|
||||||
|
|
||||||
- **dev**: Development mode (connects to external host).
|
> **These principles apply to ALL code changes. Agents MUST follow them on every task.**
|
||||||
- **canary / stable**: Production mode (starts embedded server).
|
|
||||||
|
|
||||||
## Environment Variables
|
1. **No backward compatibility** — This project is in rapid iteration. Always use the latest API and patterns. Never keep deprecated code paths or old API fallbacks.
|
||||||
|
2. **Always sync documentation** — When code changes, immediately update all related documentation (`AGENTS.md`, `README.md`, inline code examples). Code and docs must never drift apart.
|
||||||
- `ELECTROBUN_BUILD_ENV`: Auto-set by CLI. Determines whether to use local dev server or embedded server.
|
3. **Forward-only migration** — When upgrading dependencies, fully adopt the new API. Don't mix old and new patterns.
|
||||||
- `DATABASE_URL`: Required by the server process. Must be passed through from the parent environment to the spawned child process.
|
|
||||||
|
|
||||||
## Electrobun Patterns
|
|
||||||
|
|
||||||
### BrowserWindow Configuration
|
|
||||||
The main window uses the CEF renderer for consistency across platforms.
|
|
||||||
|
|
||||||
```typescript
|
|
||||||
new BrowserWindow({
|
|
||||||
title: 'Furtherverse',
|
|
||||||
url: serverUrl,
|
|
||||||
frame: {
|
|
||||||
x: 100,
|
|
||||||
y: 100,
|
|
||||||
width: 1200,
|
|
||||||
height: 800,
|
|
||||||
},
|
|
||||||
renderer: 'cef',
|
|
||||||
})
|
|
||||||
```
|
|
||||||
|
|
||||||
### Path Aliases
|
|
||||||
The main process uses `electrobun/bun` for native APIs and `PATHS` for locating bundled assets.
|
|
||||||
|
|
||||||
```typescript
|
|
||||||
import { BrowserWindow, PATHS } from 'electrobun/bun'
|
|
||||||
|
|
||||||
// Locate the embedded server bundle
|
|
||||||
const serverEntryPath = join(PATHS.VIEWS_FOLDER, '..', 'server-output', 'server', 'index.mjs')
|
|
||||||
```
|
|
||||||
|
|
||||||
## Critical Rules
|
## Critical Rules
|
||||||
|
|
||||||
**DO:**
|
**DO:**
|
||||||
- Use arrow functions for all components and utility functions.
|
- Use arrow functions for all utility functions.
|
||||||
- Ensure `apps/server` is built before building `apps/desktop` (handled by Turbo).
|
- Keep the desktop app as a thin shell — no UI or business logic.
|
||||||
- Pre-allocate a free port and pass it via `PORT` env var instead of parsing stdout.
|
|
||||||
- Handle server process termination to avoid orphan processes.
|
|
||||||
- Use `catalog:` for all dependency versions in `package.json`.
|
- Use `catalog:` for all dependency versions in `package.json`.
|
||||||
|
|
||||||
**DON'T:**
|
**DON'T:**
|
||||||
- Use `npm`, `npx`, `node`, `yarn`, or `pnpm`. Always use `bun`.
|
- Use `npm`, `npx`, `yarn`, or `pnpm`. Use `bun` for package management.
|
||||||
- Hardcode `localhost:3000` for production builds.
|
- Include UI components or business logic in the desktop app.
|
||||||
- Include UI components or business logic in the desktop app (keep it thin).
|
|
||||||
- Use `as any` or `@ts-ignore`.
|
- Use `as any` or `@ts-ignore`.
|
||||||
|
- Leave docs out of sync with code changes.
|
||||||
|
|||||||
9
apps/desktop/biome.json
Normal file
9
apps/desktop/biome.json
Normal file
@@ -0,0 +1,9 @@
|
|||||||
|
{
|
||||||
|
"$schema": "../../node_modules/@biomejs/biome/configuration_schema.json",
|
||||||
|
"extends": "//",
|
||||||
|
"css": {
|
||||||
|
"parser": {
|
||||||
|
"tailwindDirectives": true
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
BIN
apps/desktop/build/icon.png
Normal file
BIN
apps/desktop/build/icon.png
Normal file
Binary file not shown.
|
After Width: | Height: | Size: 83 KiB |
@@ -1,27 +0,0 @@
|
|||||||
import type { ElectrobunConfig } from 'electrobun'
|
|
||||||
|
|
||||||
export default {
|
|
||||||
app: {
|
|
||||||
name: 'Desktop',
|
|
||||||
identifier: 'com.furtherverse.desktop',
|
|
||||||
version: '1.0.0',
|
|
||||||
},
|
|
||||||
build: {
|
|
||||||
bun: {
|
|
||||||
entrypoint: 'src/bun/index.ts',
|
|
||||||
},
|
|
||||||
copy: {
|
|
||||||
'../server/.output': 'server-output',
|
|
||||||
},
|
|
||||||
targets: 'win-x64',
|
|
||||||
linux: {
|
|
||||||
bundleCEF: true,
|
|
||||||
},
|
|
||||||
mac: {
|
|
||||||
bundleCEF: false,
|
|
||||||
},
|
|
||||||
win: {
|
|
||||||
bundleCEF: false,
|
|
||||||
},
|
|
||||||
},
|
|
||||||
} satisfies ElectrobunConfig
|
|
||||||
48
apps/desktop/electron-builder.yml
Normal file
48
apps/desktop/electron-builder.yml
Normal file
@@ -0,0 +1,48 @@
|
|||||||
|
# yaml-language-server: $schema=https://raw.githubusercontent.com/electron-userland/electron-builder/refs/heads/master/packages/app-builder-lib/scheme.json
|
||||||
|
appId: com.furtherverse.desktop
|
||||||
|
productName: Furtherverse
|
||||||
|
executableName: furtherverse
|
||||||
|
|
||||||
|
npmRebuild: false
|
||||||
|
asarUnpack:
|
||||||
|
- resources/**
|
||||||
|
|
||||||
|
files:
|
||||||
|
- "!**/.vscode/*"
|
||||||
|
- "!src/*"
|
||||||
|
- "!electron.vite.config.{js,ts,mjs,cjs}"
|
||||||
|
- "!{.env,.env.*,bun.lock}"
|
||||||
|
- "!{tsconfig.json,tsconfig.node.json}"
|
||||||
|
- "!{AGENTS.md,README.md,CHANGELOG.md}"
|
||||||
|
|
||||||
|
# macOS
|
||||||
|
mac:
|
||||||
|
target:
|
||||||
|
- dmg
|
||||||
|
category: public.app-category.productivity
|
||||||
|
extraResources:
|
||||||
|
- from: ../server/out/server-darwin-${arch}
|
||||||
|
to: server
|
||||||
|
dmg:
|
||||||
|
artifactName: ${productName}-${version}-${os}-${arch}.${ext}
|
||||||
|
|
||||||
|
# Windows
|
||||||
|
win:
|
||||||
|
target:
|
||||||
|
- portable
|
||||||
|
extraResources:
|
||||||
|
- from: ../server/out/server-windows-${arch}.exe
|
||||||
|
to: server.exe
|
||||||
|
portable:
|
||||||
|
artifactName: ${productName}-${version}-${os}-${arch}-Portable.${ext}
|
||||||
|
|
||||||
|
# Linux
|
||||||
|
linux:
|
||||||
|
target:
|
||||||
|
- AppImage
|
||||||
|
category: Utility
|
||||||
|
extraResources:
|
||||||
|
- from: ../server/out/server-linux-${arch}
|
||||||
|
to: server
|
||||||
|
appImage:
|
||||||
|
artifactName: ${productName}-${version}-${os}-${arch}.${ext}
|
||||||
11
apps/desktop/electron.vite.config.ts
Normal file
11
apps/desktop/electron.vite.config.ts
Normal file
@@ -0,0 +1,11 @@
|
|||||||
|
import tailwindcss from '@tailwindcss/vite'
|
||||||
|
import react from '@vitejs/plugin-react'
|
||||||
|
import { defineConfig } from 'electron-vite'
|
||||||
|
|
||||||
|
export default defineConfig({
|
||||||
|
main: {},
|
||||||
|
preload: {},
|
||||||
|
renderer: {
|
||||||
|
plugins: [react(), tailwindcss()],
|
||||||
|
},
|
||||||
|
})
|
||||||
@@ -2,18 +2,36 @@
|
|||||||
"name": "@furtherverse/desktop",
|
"name": "@furtherverse/desktop",
|
||||||
"version": "1.0.0",
|
"version": "1.0.0",
|
||||||
"private": true,
|
"private": true,
|
||||||
"type": "module",
|
"main": "out/main/index.js",
|
||||||
"scripts": {
|
"scripts": {
|
||||||
"build": "electrobun build --env=stable",
|
"build": "electron-vite build",
|
||||||
"dev": "electrobun build && electrobun dev",
|
"dev": "electron-vite dev --watch",
|
||||||
|
"dist": "electron-builder",
|
||||||
|
"dist:linux": "bun run dist:linux:x64 && bun run dist:linux:arm64",
|
||||||
|
"dist:linux:arm64": "electron-builder --linux --arm64",
|
||||||
|
"dist:linux:x64": "electron-builder --linux --x64",
|
||||||
|
"dist:mac": "bun run dist:mac:arm64 && bun run dist:mac:x64",
|
||||||
|
"dist:mac:arm64": "electron-builder --mac --arm64",
|
||||||
|
"dist:mac:x64": "electron-builder --mac --x64",
|
||||||
|
"dist:win": "electron-builder --win --x64",
|
||||||
"fix": "biome check --write",
|
"fix": "biome check --write",
|
||||||
"typecheck": "tsc --noEmit"
|
"typecheck": "tsc -b"
|
||||||
},
|
},
|
||||||
"dependencies": {
|
"dependencies": {
|
||||||
"electrobun": "catalog:"
|
"motion": "catalog:",
|
||||||
|
"react": "catalog:",
|
||||||
|
"react-dom": "catalog:",
|
||||||
|
"tree-kill": "catalog:"
|
||||||
},
|
},
|
||||||
"devDependencies": {
|
"devDependencies": {
|
||||||
"@furtherverse/tsconfig": "workspace:*",
|
"@furtherverse/tsconfig": "workspace:*",
|
||||||
"@types/bun": "catalog:"
|
"@tailwindcss/vite": "catalog:",
|
||||||
|
"@types/node": "catalog:",
|
||||||
|
"@vitejs/plugin-react": "catalog:",
|
||||||
|
"electron": "catalog:",
|
||||||
|
"electron-builder": "catalog:",
|
||||||
|
"electron-vite": "catalog:",
|
||||||
|
"tailwindcss": "catalog:",
|
||||||
|
"vite": "catalog:"
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|||||||
0
apps/desktop/resources/.gitkeep
Normal file
0
apps/desktop/resources/.gitkeep
Normal file
@@ -1,166 +0,0 @@
|
|||||||
import { createServer } from 'node:net'
|
|
||||||
import { dirname, join } from 'node:path'
|
|
||||||
import { BrowserWindow, PATHS } from 'electrobun/bun'
|
|
||||||
|
|
||||||
const DEV_SERVER_URL = 'http://localhost:3000'
|
|
||||||
const SERVER_READY_TIMEOUT_MS = 5_000
|
|
||||||
const MAX_SPAWN_RETRIES = 3
|
|
||||||
|
|
||||||
const isDev = (): boolean => {
|
|
||||||
const env = process.env.ELECTROBUN_BUILD_ENV
|
|
||||||
return !env || env === 'dev'
|
|
||||||
}
|
|
||||||
|
|
||||||
const getServerEntryPath = (): string => {
|
|
||||||
return join(PATHS.VIEWS_FOLDER, '..', 'server-output', 'server', 'index.mjs')
|
|
||||||
}
|
|
||||||
|
|
||||||
const getFreePort = (): Promise<number> => {
|
|
||||||
return new Promise((resolve, reject) => {
|
|
||||||
const srv = createServer()
|
|
||||||
srv.unref()
|
|
||||||
srv.once('error', reject)
|
|
||||||
srv.listen({ port: 0, host: '127.0.0.1', exclusive: true }, () => {
|
|
||||||
const addr = srv.address()
|
|
||||||
if (addr && typeof addr === 'object') {
|
|
||||||
const port = addr.port
|
|
||||||
srv.close((err) => (err ? reject(err) : resolve(port)))
|
|
||||||
} else {
|
|
||||||
srv.close(() => reject(new Error('Unexpected address() result')))
|
|
||||||
}
|
|
||||||
})
|
|
||||||
})
|
|
||||||
}
|
|
||||||
|
|
||||||
const waitForServer = async (
|
|
||||||
url: string,
|
|
||||||
timeoutMs = SERVER_READY_TIMEOUT_MS,
|
|
||||||
): Promise<boolean> => {
|
|
||||||
const start = Date.now()
|
|
||||||
while (Date.now() - start < timeoutMs) {
|
|
||||||
try {
|
|
||||||
const response = await fetch(url, { method: 'HEAD' })
|
|
||||||
if (response.ok) return true
|
|
||||||
} catch (_) {
|
|
||||||
// Server not up yet, retry after sleep
|
|
||||||
}
|
|
||||||
await Bun.sleep(100)
|
|
||||||
}
|
|
||||||
return false
|
|
||||||
}
|
|
||||||
|
|
||||||
const spawnServer = async (): Promise<{
|
|
||||||
process: ReturnType<typeof Bun.spawn>
|
|
||||||
url: string
|
|
||||||
}> => {
|
|
||||||
const serverEntryPath = getServerEntryPath()
|
|
||||||
const serverDir = dirname(serverEntryPath)
|
|
||||||
|
|
||||||
for (let attempt = 1; attempt <= MAX_SPAWN_RETRIES; attempt++) {
|
|
||||||
const port = await getFreePort()
|
|
||||||
const url = `http://127.0.0.1:${port}`
|
|
||||||
|
|
||||||
const serverProc = Bun.spawn([process.execPath, serverEntryPath], {
|
|
||||||
cwd: serverDir,
|
|
||||||
env: {
|
|
||||||
...process.env,
|
|
||||||
PORT: String(port),
|
|
||||||
HOST: '127.0.0.1',
|
|
||||||
},
|
|
||||||
stdio: ['ignore', 'inherit', 'inherit'],
|
|
||||||
})
|
|
||||||
|
|
||||||
const ready = await Promise.race([
|
|
||||||
waitForServer(url),
|
|
||||||
serverProc.exited.then((code) => {
|
|
||||||
throw new Error(`Server exited with code ${code} before becoming ready`)
|
|
||||||
}),
|
|
||||||
])
|
|
||||||
|
|
||||||
if (ready) {
|
|
||||||
return { process: serverProc, url }
|
|
||||||
}
|
|
||||||
|
|
||||||
serverProc.kill()
|
|
||||||
await serverProc.exited
|
|
||||||
console.warn(
|
|
||||||
`Server failed to become ready on port ${port} (attempt ${attempt}/${MAX_SPAWN_RETRIES})`,
|
|
||||||
)
|
|
||||||
}
|
|
||||||
|
|
||||||
throw new Error(`Server failed to start after ${MAX_SPAWN_RETRIES} attempts`)
|
|
||||||
}
|
|
||||||
|
|
||||||
const main = async () => {
|
|
||||||
console.log('Starting Furtherverse Desktop...')
|
|
||||||
|
|
||||||
let serverUrl: string
|
|
||||||
let serverProcess: ReturnType<typeof Bun.spawn> | null = null
|
|
||||||
|
|
||||||
if (isDev()) {
|
|
||||||
console.log('Dev mode: waiting for external server at', DEV_SERVER_URL)
|
|
||||||
const ready = await waitForServer(DEV_SERVER_URL)
|
|
||||||
if (!ready) {
|
|
||||||
console.error(
|
|
||||||
'Dev server not responding. Make sure to run: bun dev in apps/server',
|
|
||||||
)
|
|
||||||
process.exit(1)
|
|
||||||
}
|
|
||||||
console.log('Dev server ready!')
|
|
||||||
serverUrl = DEV_SERVER_URL
|
|
||||||
} else {
|
|
||||||
console.log('Production mode: starting embedded server...')
|
|
||||||
try {
|
|
||||||
const server = await spawnServer()
|
|
||||||
serverProcess = server.process
|
|
||||||
serverUrl = server.url
|
|
||||||
console.log('Server ready at', serverUrl)
|
|
||||||
} catch (err) {
|
|
||||||
console.error('Failed to start embedded server:', err)
|
|
||||||
process.exit(1)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
new BrowserWindow({
|
|
||||||
title: 'Furtherverse',
|
|
||||||
url: serverUrl,
|
|
||||||
frame: {
|
|
||||||
x: 100,
|
|
||||||
y: 100,
|
|
||||||
width: 1200,
|
|
||||||
height: 800,
|
|
||||||
},
|
|
||||||
renderer: 'cef',
|
|
||||||
})
|
|
||||||
|
|
||||||
if (serverProcess) {
|
|
||||||
const cleanup = () => {
|
|
||||||
if (serverProcess) {
|
|
||||||
serverProcess.kill()
|
|
||||||
serverProcess = null
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
process.on('exit', cleanup)
|
|
||||||
process.on('SIGTERM', () => {
|
|
||||||
cleanup()
|
|
||||||
process.exit(0)
|
|
||||||
})
|
|
||||||
process.on('SIGINT', () => {
|
|
||||||
cleanup()
|
|
||||||
process.exit(0)
|
|
||||||
})
|
|
||||||
|
|
||||||
serverProcess.exited.then((code) => {
|
|
||||||
if (serverProcess) {
|
|
||||||
console.error(`Server exited unexpectedly with code ${code}`)
|
|
||||||
process.exit(1)
|
|
||||||
}
|
|
||||||
})
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
main().catch((error) => {
|
|
||||||
console.error('Failed to start:', error)
|
|
||||||
process.exit(1)
|
|
||||||
})
|
|
||||||
198
apps/desktop/src/main/index.ts
Normal file
198
apps/desktop/src/main/index.ts
Normal file
@@ -0,0 +1,198 @@
|
|||||||
|
import { join } from 'node:path'
|
||||||
|
import { app, BrowserWindow, dialog, session, shell } from 'electron'
|
||||||
|
import { createSidecarRuntime } from './sidecar'
|
||||||
|
|
||||||
|
const DEV_SERVER_URL = 'http://localhost:3000'
|
||||||
|
const SAFE_EXTERNAL_PROTOCOLS = new Set(['https:', 'http:', 'mailto:'])
|
||||||
|
|
||||||
|
let mainWindow: BrowserWindow | null = null
|
||||||
|
let windowCreationPromise: Promise<void> | null = null
|
||||||
|
let isQuitting = false
|
||||||
|
|
||||||
|
const showErrorAndQuit = (title: string, detail: string) => {
|
||||||
|
if (isQuitting) {
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
dialog.showErrorBox(title, detail)
|
||||||
|
app.quit()
|
||||||
|
}
|
||||||
|
|
||||||
|
const sidecar = createSidecarRuntime({
|
||||||
|
devServerUrl: DEV_SERVER_URL,
|
||||||
|
isPackaged: app.isPackaged,
|
||||||
|
resourcesPath: process.resourcesPath,
|
||||||
|
isQuitting: () => isQuitting,
|
||||||
|
onUnexpectedStop: (detail) => {
|
||||||
|
showErrorAndQuit('Service Stopped', detail)
|
||||||
|
},
|
||||||
|
})
|
||||||
|
|
||||||
|
const toErrorMessage = (error: unknown): string => (error instanceof Error ? error.message : String(error))
|
||||||
|
|
||||||
|
const canOpenExternally = (url: string): boolean => {
|
||||||
|
try {
|
||||||
|
const parsed = new URL(url)
|
||||||
|
return SAFE_EXTERNAL_PROTOCOLS.has(parsed.protocol)
|
||||||
|
} catch {
|
||||||
|
return false
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
const loadSplash = async (windowRef: BrowserWindow) => {
|
||||||
|
if (process.env.ELECTRON_RENDERER_URL) {
|
||||||
|
await windowRef.loadURL(process.env.ELECTRON_RENDERER_URL)
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
await windowRef.loadFile(join(__dirname, '../renderer/index.html'))
|
||||||
|
}
|
||||||
|
|
||||||
|
const createWindow = async () => {
|
||||||
|
if (mainWindow && !mainWindow.isDestroyed()) {
|
||||||
|
mainWindow.focus()
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
const windowRef = new BrowserWindow({
|
||||||
|
width: 1200,
|
||||||
|
height: 800,
|
||||||
|
show: false,
|
||||||
|
webPreferences: {
|
||||||
|
preload: join(__dirname, '../preload/index.js'),
|
||||||
|
sandbox: true,
|
||||||
|
contextIsolation: true,
|
||||||
|
nodeIntegration: false,
|
||||||
|
},
|
||||||
|
})
|
||||||
|
mainWindow = windowRef
|
||||||
|
|
||||||
|
windowRef.webContents.setWindowOpenHandler(({ url }) => {
|
||||||
|
if (!canOpenExternally(url)) {
|
||||||
|
if (!app.isPackaged) {
|
||||||
|
console.warn(`Blocked external URL: ${url}`)
|
||||||
|
}
|
||||||
|
|
||||||
|
return { action: 'deny' }
|
||||||
|
}
|
||||||
|
|
||||||
|
void shell.openExternal(url)
|
||||||
|
return { action: 'deny' }
|
||||||
|
})
|
||||||
|
|
||||||
|
windowRef.webContents.on('will-navigate', (event, url) => {
|
||||||
|
const allowed = [DEV_SERVER_URL, sidecar.lastResolvedUrl].filter((v): v is string => v != null)
|
||||||
|
const isAllowed = allowed.some((origin) => url.startsWith(origin))
|
||||||
|
|
||||||
|
if (!isAllowed) {
|
||||||
|
event.preventDefault()
|
||||||
|
|
||||||
|
if (canOpenExternally(url)) {
|
||||||
|
void shell.openExternal(url)
|
||||||
|
} else if (!app.isPackaged) {
|
||||||
|
console.warn(`Blocked navigation to: ${url}`)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
})
|
||||||
|
|
||||||
|
windowRef.on('closed', () => {
|
||||||
|
if (mainWindow === windowRef) {
|
||||||
|
mainWindow = null
|
||||||
|
}
|
||||||
|
})
|
||||||
|
|
||||||
|
try {
|
||||||
|
await loadSplash(windowRef)
|
||||||
|
} catch (error) {
|
||||||
|
if (mainWindow === windowRef) {
|
||||||
|
mainWindow = null
|
||||||
|
}
|
||||||
|
|
||||||
|
if (!windowRef.isDestroyed()) {
|
||||||
|
windowRef.destroy()
|
||||||
|
}
|
||||||
|
|
||||||
|
throw error
|
||||||
|
}
|
||||||
|
|
||||||
|
if (!windowRef.isDestroyed()) {
|
||||||
|
windowRef.show()
|
||||||
|
}
|
||||||
|
|
||||||
|
const targetUrl = await sidecar.resolveUrl()
|
||||||
|
if (isQuitting || windowRef.isDestroyed()) {
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
try {
|
||||||
|
await windowRef.loadURL(targetUrl)
|
||||||
|
} catch (error) {
|
||||||
|
if (mainWindow === windowRef) {
|
||||||
|
mainWindow = null
|
||||||
|
}
|
||||||
|
|
||||||
|
if (!windowRef.isDestroyed()) {
|
||||||
|
windowRef.destroy()
|
||||||
|
}
|
||||||
|
|
||||||
|
throw error
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
const ensureWindow = async () => {
|
||||||
|
if (windowCreationPromise) {
|
||||||
|
return windowCreationPromise
|
||||||
|
}
|
||||||
|
|
||||||
|
windowCreationPromise = createWindow().finally(() => {
|
||||||
|
windowCreationPromise = null
|
||||||
|
})
|
||||||
|
|
||||||
|
return windowCreationPromise
|
||||||
|
}
|
||||||
|
|
||||||
|
const beginQuit = () => {
|
||||||
|
isQuitting = true
|
||||||
|
sidecar.stop()
|
||||||
|
}
|
||||||
|
|
||||||
|
const handleWindowCreationError = (error: unknown, context: string) => {
|
||||||
|
console.error(`${context}:`, error)
|
||||||
|
showErrorAndQuit(
|
||||||
|
"App Couldn't Start",
|
||||||
|
app.isPackaged
|
||||||
|
? 'A required component failed to start. Please reinstall the app.'
|
||||||
|
: `${context}: ${toErrorMessage(error)}`,
|
||||||
|
)
|
||||||
|
}
|
||||||
|
|
||||||
|
app
|
||||||
|
.whenReady()
|
||||||
|
.then(() => {
|
||||||
|
session.defaultSession.setPermissionRequestHandler((_webContents, _permission, callback) => {
|
||||||
|
callback(false)
|
||||||
|
})
|
||||||
|
|
||||||
|
return ensureWindow()
|
||||||
|
})
|
||||||
|
.catch((error) => {
|
||||||
|
handleWindowCreationError(error, 'Failed to create window')
|
||||||
|
})
|
||||||
|
|
||||||
|
app.on('window-all-closed', () => {
|
||||||
|
if (process.platform !== 'darwin') {
|
||||||
|
app.quit()
|
||||||
|
}
|
||||||
|
})
|
||||||
|
|
||||||
|
app.on('activate', () => {
|
||||||
|
if (isQuitting || BrowserWindow.getAllWindows().length > 0) {
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
ensureWindow().catch((error) => {
|
||||||
|
handleWindowCreationError(error, 'Failed to re-create window')
|
||||||
|
})
|
||||||
|
})
|
||||||
|
|
||||||
|
app.on('before-quit', beginQuit)
|
||||||
256
apps/desktop/src/main/sidecar.ts
Normal file
256
apps/desktop/src/main/sidecar.ts
Normal file
@@ -0,0 +1,256 @@
|
|||||||
|
import { type ChildProcess, spawn } from 'node:child_process'
|
||||||
|
import { existsSync } from 'node:fs'
|
||||||
|
import { createServer } from 'node:net'
|
||||||
|
import { join } from 'node:path'
|
||||||
|
import killProcessTree from 'tree-kill'
|
||||||
|
|
||||||
|
const SERVER_HOST = '127.0.0.1'
|
||||||
|
const SERVER_READY_TIMEOUT_MS = 10_000
|
||||||
|
const SERVER_REQUEST_TIMEOUT_MS = 1_500
|
||||||
|
const SERVER_POLL_INTERVAL_MS = 250
|
||||||
|
const SERVER_PROBE_PATHS = ['/api/health', '/']
|
||||||
|
|
||||||
|
type SidecarState = {
|
||||||
|
process: ChildProcess | null
|
||||||
|
startup: Promise<string> | null
|
||||||
|
url: string | null
|
||||||
|
}
|
||||||
|
|
||||||
|
type SidecarRuntimeOptions = {
|
||||||
|
devServerUrl: string
|
||||||
|
isPackaged: boolean
|
||||||
|
resourcesPath: string
|
||||||
|
isQuitting: () => boolean
|
||||||
|
onUnexpectedStop: (detail: string) => void
|
||||||
|
}
|
||||||
|
|
||||||
|
type SidecarRuntime = {
|
||||||
|
resolveUrl: () => Promise<string>
|
||||||
|
stop: () => void
|
||||||
|
lastResolvedUrl: string | null
|
||||||
|
}
|
||||||
|
|
||||||
|
const sleep = (ms: number): Promise<void> => new Promise((resolve) => setTimeout(resolve, ms))
|
||||||
|
|
||||||
|
const isProcessAlive = (processToCheck: ChildProcess | null): processToCheck is ChildProcess => {
|
||||||
|
if (!processToCheck || !processToCheck.pid) {
|
||||||
|
return false
|
||||||
|
}
|
||||||
|
|
||||||
|
return processToCheck.exitCode === null && !processToCheck.killed
|
||||||
|
}
|
||||||
|
|
||||||
|
const getAvailablePort = (): Promise<number> =>
|
||||||
|
new Promise((resolve, reject) => {
|
||||||
|
const server = createServer()
|
||||||
|
server.listen(0, () => {
|
||||||
|
const addr = server.address()
|
||||||
|
if (!addr || typeof addr === 'string') {
|
||||||
|
server.close()
|
||||||
|
reject(new Error('Failed to resolve port'))
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
server.close(() => resolve(addr.port))
|
||||||
|
})
|
||||||
|
server.on('error', reject)
|
||||||
|
})
|
||||||
|
|
||||||
|
const isServerReady = async (url: string): Promise<boolean> => {
|
||||||
|
for (const probePath of SERVER_PROBE_PATHS) {
|
||||||
|
try {
|
||||||
|
const probeUrl = new URL(probePath, `${url}/`)
|
||||||
|
const response = await fetch(probeUrl, {
|
||||||
|
method: 'GET',
|
||||||
|
cache: 'no-store',
|
||||||
|
signal: AbortSignal.timeout(SERVER_REQUEST_TIMEOUT_MS),
|
||||||
|
})
|
||||||
|
|
||||||
|
if (response.status < 500) {
|
||||||
|
if (probePath === '/api/health' && response.status === 404) {
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
|
||||||
|
return true
|
||||||
|
}
|
||||||
|
} catch {
|
||||||
|
// Expected: probe request fails while server is still starting up
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
return false
|
||||||
|
}
|
||||||
|
|
||||||
|
const waitForServer = async (url: string, isQuitting: () => boolean, processRef?: ChildProcess): Promise<boolean> => {
|
||||||
|
const start = Date.now()
|
||||||
|
while (Date.now() - start < SERVER_READY_TIMEOUT_MS && !isQuitting()) {
|
||||||
|
if (processRef && processRef.exitCode !== null) {
|
||||||
|
return false
|
||||||
|
}
|
||||||
|
|
||||||
|
if (await isServerReady(url)) {
|
||||||
|
return true
|
||||||
|
}
|
||||||
|
|
||||||
|
await sleep(SERVER_POLL_INTERVAL_MS)
|
||||||
|
}
|
||||||
|
|
||||||
|
return false
|
||||||
|
}
|
||||||
|
|
||||||
|
const resolveBinaryPath = (resourcesPath: string): string => {
|
||||||
|
const binaryName = process.platform === 'win32' ? 'server.exe' : 'server'
|
||||||
|
return join(resourcesPath, binaryName)
|
||||||
|
}
|
||||||
|
|
||||||
|
const formatUnexpectedStopMessage = (
|
||||||
|
isPackaged: boolean,
|
||||||
|
code: number | null,
|
||||||
|
signal: NodeJS.Signals | null,
|
||||||
|
): string => {
|
||||||
|
if (isPackaged) {
|
||||||
|
return 'The background service stopped unexpectedly. Please restart the app.'
|
||||||
|
}
|
||||||
|
|
||||||
|
return `Server process exited unexpectedly (code ${code ?? 'unknown'}, signal ${signal ?? 'none'}).`
|
||||||
|
}
|
||||||
|
|
||||||
|
export const createSidecarRuntime = (options: SidecarRuntimeOptions): SidecarRuntime => {
|
||||||
|
const state: SidecarState = {
|
||||||
|
process: null,
|
||||||
|
startup: null,
|
||||||
|
url: null,
|
||||||
|
}
|
||||||
|
|
||||||
|
const resetState = (processRef?: ChildProcess) => {
|
||||||
|
if (processRef && state.process !== processRef) {
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
state.process = null
|
||||||
|
state.url = null
|
||||||
|
}
|
||||||
|
|
||||||
|
const stop = () => {
|
||||||
|
const runningServer = state.process
|
||||||
|
resetState()
|
||||||
|
|
||||||
|
if (!runningServer?.pid || runningServer.exitCode !== null) {
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
killProcessTree(runningServer.pid, 'SIGTERM', (error?: Error) => {
|
||||||
|
if (error) {
|
||||||
|
console.error('Failed to stop server process:', error)
|
||||||
|
}
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
const attachLifecycleHandlers = (processRef: ChildProcess) => {
|
||||||
|
processRef.on('error', (error) => {
|
||||||
|
if (state.process !== processRef) {
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
const hadReadyServer = state.url !== null
|
||||||
|
resetState(processRef)
|
||||||
|
|
||||||
|
if (!options.isQuitting() && hadReadyServer) {
|
||||||
|
options.onUnexpectedStop('The background service crashed unexpectedly. Please restart the app.')
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
console.error('Failed to start server process:', error)
|
||||||
|
})
|
||||||
|
|
||||||
|
processRef.on('exit', (code, signal) => {
|
||||||
|
if (state.process !== processRef) {
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
const hadReadyServer = state.url !== null
|
||||||
|
resetState(processRef)
|
||||||
|
|
||||||
|
if (!options.isQuitting() && hadReadyServer) {
|
||||||
|
options.onUnexpectedStop(formatUnexpectedStopMessage(options.isPackaged, code, signal))
|
||||||
|
}
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
const startPackagedServer = async (): Promise<string> => {
|
||||||
|
if (state.url && isProcessAlive(state.process)) {
|
||||||
|
return state.url
|
||||||
|
}
|
||||||
|
|
||||||
|
if (state.startup) {
|
||||||
|
return state.startup
|
||||||
|
}
|
||||||
|
|
||||||
|
state.startup = (async () => {
|
||||||
|
const binaryPath = resolveBinaryPath(options.resourcesPath)
|
||||||
|
if (!existsSync(binaryPath)) {
|
||||||
|
throw new Error(`Sidecar server binary is missing: ${binaryPath}`)
|
||||||
|
}
|
||||||
|
|
||||||
|
if (options.isQuitting()) {
|
||||||
|
throw new Error('Application is shutting down.')
|
||||||
|
}
|
||||||
|
|
||||||
|
const port = await getAvailablePort()
|
||||||
|
const nextServerUrl = `http://${SERVER_HOST}:${port}`
|
||||||
|
const processRef = spawn(binaryPath, [], {
|
||||||
|
env: {
|
||||||
|
...process.env,
|
||||||
|
HOST: SERVER_HOST,
|
||||||
|
PORT: String(port),
|
||||||
|
},
|
||||||
|
stdio: 'ignore',
|
||||||
|
windowsHide: true,
|
||||||
|
})
|
||||||
|
|
||||||
|
processRef.unref()
|
||||||
|
state.process = processRef
|
||||||
|
attachLifecycleHandlers(processRef)
|
||||||
|
|
||||||
|
const ready = await waitForServer(nextServerUrl, options.isQuitting, processRef)
|
||||||
|
if (ready && isProcessAlive(processRef)) {
|
||||||
|
state.url = nextServerUrl
|
||||||
|
return nextServerUrl
|
||||||
|
}
|
||||||
|
|
||||||
|
const failureReason =
|
||||||
|
processRef.exitCode !== null
|
||||||
|
? `The service exited early (code ${processRef.exitCode}).`
|
||||||
|
: `The service did not respond at ${nextServerUrl} within 10 seconds.`
|
||||||
|
|
||||||
|
stop()
|
||||||
|
throw new Error(failureReason)
|
||||||
|
})().finally(() => {
|
||||||
|
state.startup = null
|
||||||
|
})
|
||||||
|
|
||||||
|
return state.startup
|
||||||
|
}
|
||||||
|
|
||||||
|
const resolveUrl = async (): Promise<string> => {
|
||||||
|
if (options.isPackaged) {
|
||||||
|
return startPackagedServer()
|
||||||
|
}
|
||||||
|
|
||||||
|
const ready = await waitForServer(options.devServerUrl, options.isQuitting)
|
||||||
|
if (!ready) {
|
||||||
|
throw new Error('Dev server not responding. Run `bun dev` in apps/server first.')
|
||||||
|
}
|
||||||
|
|
||||||
|
state.url = options.devServerUrl
|
||||||
|
return options.devServerUrl
|
||||||
|
}
|
||||||
|
|
||||||
|
return {
|
||||||
|
resolveUrl,
|
||||||
|
stop,
|
||||||
|
get lastResolvedUrl() {
|
||||||
|
return state.url
|
||||||
|
},
|
||||||
|
}
|
||||||
|
}
|
||||||
1
apps/desktop/src/preload/index.ts
Normal file
1
apps/desktop/src/preload/index.ts
Normal file
@@ -0,0 +1 @@
|
|||||||
|
export {}
|
||||||
BIN
apps/desktop/src/renderer/assets/logo.png
Normal file
BIN
apps/desktop/src/renderer/assets/logo.png
Normal file
Binary file not shown.
|
After Width: | Height: | Size: 83 KiB |
33
apps/desktop/src/renderer/components/SplashApp.tsx
Normal file
33
apps/desktop/src/renderer/components/SplashApp.tsx
Normal file
@@ -0,0 +1,33 @@
|
|||||||
|
import { motion } from 'motion/react'
|
||||||
|
import logoImage from '../assets/logo.png'
|
||||||
|
|
||||||
|
export const SplashApp = () => {
|
||||||
|
return (
|
||||||
|
<main className="m-0 flex h-screen w-screen cursor-default select-none items-center justify-center overflow-hidden bg-white font-sans antialiased">
|
||||||
|
<motion.section
|
||||||
|
animate={{ opacity: 1, y: 0 }}
|
||||||
|
className="flex flex-col items-center gap-8"
|
||||||
|
initial={{ opacity: 0, y: 4 }}
|
||||||
|
transition={{
|
||||||
|
duration: 1,
|
||||||
|
ease: [0.16, 1, 0.3, 1],
|
||||||
|
}}
|
||||||
|
>
|
||||||
|
<img alt="Logo" className="h-20 w-auto object-contain" draggable={false} src={logoImage} />
|
||||||
|
|
||||||
|
<div className="relative h-[4px] w-36 overflow-hidden rounded-full bg-zinc-100">
|
||||||
|
<motion.div
|
||||||
|
animate={{ x: '100%' }}
|
||||||
|
className="h-full w-full bg-zinc-800"
|
||||||
|
initial={{ x: '-100%' }}
|
||||||
|
transition={{
|
||||||
|
duration: 2,
|
||||||
|
ease: [0.4, 0, 0.2, 1],
|
||||||
|
repeat: Infinity,
|
||||||
|
}}
|
||||||
|
/>
|
||||||
|
</div>
|
||||||
|
</motion.section>
|
||||||
|
</main>
|
||||||
|
)
|
||||||
|
}
|
||||||
12
apps/desktop/src/renderer/index.html
Normal file
12
apps/desktop/src/renderer/index.html
Normal file
@@ -0,0 +1,12 @@
|
|||||||
|
<!doctype html>
|
||||||
|
<html lang="en">
|
||||||
|
<head>
|
||||||
|
<meta charset="UTF-8" />
|
||||||
|
<meta name="viewport" content="width=device-width, initial-scale=1.0" />
|
||||||
|
<title>Furtherverse</title>
|
||||||
|
</head>
|
||||||
|
<body>
|
||||||
|
<div id="root"></div>
|
||||||
|
<script type="module" src="./main.tsx"></script>
|
||||||
|
</body>
|
||||||
|
</html>
|
||||||
11
apps/desktop/src/renderer/main.tsx
Normal file
11
apps/desktop/src/renderer/main.tsx
Normal file
@@ -0,0 +1,11 @@
|
|||||||
|
import { StrictMode } from 'react'
|
||||||
|
import { createRoot } from 'react-dom/client'
|
||||||
|
import { SplashApp } from './components/SplashApp'
|
||||||
|
import './styles.css'
|
||||||
|
|
||||||
|
// biome-ignore lint/style/noNonNullAssertion: 一定存在
|
||||||
|
createRoot(document.getElementById('root')!).render(
|
||||||
|
<StrictMode>
|
||||||
|
<SplashApp />
|
||||||
|
</StrictMode>,
|
||||||
|
)
|
||||||
1
apps/desktop/src/renderer/styles.css
Normal file
1
apps/desktop/src/renderer/styles.css
Normal file
@@ -0,0 +1 @@
|
|||||||
|
@import "tailwindcss";
|
||||||
8
apps/desktop/tsconfig.app.json
Normal file
8
apps/desktop/tsconfig.app.json
Normal file
@@ -0,0 +1,8 @@
|
|||||||
|
{
|
||||||
|
"extends": "@furtherverse/tsconfig/react.json",
|
||||||
|
"compilerOptions": {
|
||||||
|
"composite": true,
|
||||||
|
"types": ["vite/client"]
|
||||||
|
},
|
||||||
|
"include": ["src/renderer/**/*"]
|
||||||
|
}
|
||||||
@@ -1,6 +1,11 @@
|
|||||||
{
|
{
|
||||||
"extends": "@furtherverse/tsconfig/bun.json",
|
"files": [],
|
||||||
"compilerOptions": {
|
"references": [
|
||||||
"lib": ["ESNext", "DOM", "DOM.Iterable"]
|
{
|
||||||
|
"path": "./tsconfig.app.json"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"path": "./tsconfig.node.json"
|
||||||
}
|
}
|
||||||
|
]
|
||||||
}
|
}
|
||||||
|
|||||||
8
apps/desktop/tsconfig.node.json
Normal file
8
apps/desktop/tsconfig.node.json
Normal file
@@ -0,0 +1,8 @@
|
|||||||
|
{
|
||||||
|
"extends": "@furtherverse/tsconfig/base.json",
|
||||||
|
"compilerOptions": {
|
||||||
|
"composite": true,
|
||||||
|
"types": ["node"]
|
||||||
|
},
|
||||||
|
"include": ["src/main/**/*", "src/preload/**/*", "electron.vite.config.ts"]
|
||||||
|
}
|
||||||
@@ -3,8 +3,39 @@
|
|||||||
"extends": ["//"],
|
"extends": ["//"],
|
||||||
"tasks": {
|
"tasks": {
|
||||||
"build": {
|
"build": {
|
||||||
"dependsOn": ["@furtherverse/server#build"],
|
"outputs": ["out/**"]
|
||||||
"outputs": ["build/**", "artifacts/**"]
|
},
|
||||||
|
"dist": {
|
||||||
|
"dependsOn": ["build", "@furtherverse/server#compile"],
|
||||||
|
"outputs": ["dist/**"]
|
||||||
|
},
|
||||||
|
"dist:linux": {
|
||||||
|
"dependsOn": ["build", "@furtherverse/server#compile:linux:arm64", "@furtherverse/server#compile:linux:x64"],
|
||||||
|
"outputs": ["dist/**"]
|
||||||
|
},
|
||||||
|
"dist:linux:arm64": {
|
||||||
|
"dependsOn": ["build", "@furtherverse/server#compile:linux:arm64"],
|
||||||
|
"outputs": ["dist/**"]
|
||||||
|
},
|
||||||
|
"dist:linux:x64": {
|
||||||
|
"dependsOn": ["build", "@furtherverse/server#compile:linux:x64"],
|
||||||
|
"outputs": ["dist/**"]
|
||||||
|
},
|
||||||
|
"dist:mac": {
|
||||||
|
"dependsOn": ["build", "@furtherverse/server#compile:darwin:arm64", "@furtherverse/server#compile:darwin:x64"],
|
||||||
|
"outputs": ["dist/**"]
|
||||||
|
},
|
||||||
|
"dist:mac:arm64": {
|
||||||
|
"dependsOn": ["build", "@furtherverse/server#compile:darwin:arm64"],
|
||||||
|
"outputs": ["dist/**"]
|
||||||
|
},
|
||||||
|
"dist:mac:x64": {
|
||||||
|
"dependsOn": ["build", "@furtherverse/server#compile:darwin:x64"],
|
||||||
|
"outputs": ["dist/**"]
|
||||||
|
},
|
||||||
|
"dist:win": {
|
||||||
|
"dependsOn": ["build", "@furtherverse/server#compile:windows:x64"],
|
||||||
|
"outputs": ["dist/**"]
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -1 +1 @@
|
|||||||
DATABASE_URL=postgres://postgres:postgres@localhost:5432/postgres
|
DATABASE_PATH=data.db
|
||||||
|
|||||||
@@ -4,14 +4,14 @@ TanStack Start fullstack web app with ORPC (contract-first RPC).
|
|||||||
|
|
||||||
## Tech Stack
|
## Tech Stack
|
||||||
|
|
||||||
> **⚠️ This project uses Bun — NOT Node.js / npm. All commands use `bun`. Never use `npm`, `npx`, or `node`.**
|
> **⚠️ This project uses Bun — NOT Node.js / npm. All commands use `bun`. Always use `bun run <script>` (not `bun <script>`) to avoid conflicts with Bun built-in subcommands. Never use `npm`, `npx`, or `node`.**
|
||||||
|
|
||||||
- **Framework**: TanStack Start (React 19 SSR, file-based routing)
|
- **Framework**: TanStack Start (React 19 SSR, file-based routing)
|
||||||
- **Runtime**: Bun — **NOT Node.js**
|
- **Runtime**: Bun — **NOT Node.js**
|
||||||
- **Package Manager**: Bun — **NOT npm / yarn / pnpm**
|
- **Package Manager**: Bun — **NOT npm / yarn / pnpm**
|
||||||
- **Language**: TypeScript (strict mode)
|
- **Language**: TypeScript (strict mode)
|
||||||
- **Styling**: Tailwind CSS v4
|
- **Styling**: Tailwind CSS v4
|
||||||
- **Database**: PostgreSQL + Drizzle ORM
|
- **Database**: PostgreSQL + Drizzle ORM v1 beta (`drizzle-orm/postgres-js`, RQBv2)
|
||||||
- **State**: TanStack Query v5
|
- **State**: TanStack Query v5
|
||||||
- **RPC**: ORPC (contract-first, type-safe)
|
- **RPC**: ORPC (contract-first, type-safe)
|
||||||
- **Build**: Vite + Nitro
|
- **Build**: Vite + Nitro
|
||||||
@@ -20,21 +20,29 @@ TanStack Start fullstack web app with ORPC (contract-first RPC).
|
|||||||
|
|
||||||
```bash
|
```bash
|
||||||
# Development
|
# Development
|
||||||
bun dev # Vite dev server (localhost:3000)
|
bun run dev # Vite dev server (localhost:3000)
|
||||||
bun db:studio # Drizzle Studio GUI
|
bun run db:studio # Drizzle Studio GUI
|
||||||
|
|
||||||
# Build
|
# Build
|
||||||
bun build # Production build → .output/
|
bun run build # Production build → .output/
|
||||||
bun compile # Compile to standalone binary
|
bun run compile # Compile to standalone binary (current platform, depends on build)
|
||||||
|
bun run compile:darwin # Compile for macOS (arm64 + x64)
|
||||||
|
bun run compile:darwin:arm64 # Compile for macOS arm64
|
||||||
|
bun run compile:darwin:x64 # Compile for macOS x64
|
||||||
|
bun run compile:linux # Compile for Linux (x64 + arm64)
|
||||||
|
bun run compile:linux:arm64 # Compile for Linux arm64
|
||||||
|
bun run compile:linux:x64 # Compile for Linux x64
|
||||||
|
bun run compile:windows # Compile for Windows (default: x64)
|
||||||
|
bun run compile:windows:x64 # Compile for Windows x64
|
||||||
|
|
||||||
# Code Quality
|
# Code Quality
|
||||||
bun fix # Biome auto-fix
|
bun run fix # Biome auto-fix
|
||||||
bun typecheck # TypeScript check
|
bun run typecheck # TypeScript check
|
||||||
|
|
||||||
# Database
|
# Database
|
||||||
bun db:generate # Generate migrations from schema
|
bun run db:generate # Generate migrations from schema
|
||||||
bun db:migrate # Run migrations
|
bun run db:migrate # Run migrations
|
||||||
bun db:push # Push schema directly (dev only)
|
bun run db:push # Push schema directly (dev only)
|
||||||
|
|
||||||
# Testing (not yet configured)
|
# Testing (not yet configured)
|
||||||
bun test path/to/test.ts # Run single test
|
bun test path/to/test.ts # Run single test
|
||||||
@@ -46,25 +54,29 @@ bun test -t "pattern" # Run tests matching pattern
|
|||||||
```
|
```
|
||||||
src/
|
src/
|
||||||
├── client/ # Client-side code
|
├── client/ # Client-side code
|
||||||
│ ├── orpc.client.ts # ORPC isomorphic client
|
│ └── orpc.ts # ORPC client + TanStack Query utils (single entry point)
|
||||||
│ └── query-client.ts # TanStack Query client
|
|
||||||
├── components/ # React components
|
├── components/ # React components
|
||||||
├── routes/ # TanStack Router file routes
|
├── routes/ # TanStack Router file routes
|
||||||
│ ├── __root.tsx # Root layout
|
│ ├── __root.tsx # Root layout
|
||||||
│ ├── index.tsx # Home page
|
│ ├── index.tsx # Home page
|
||||||
│ └── api/
|
│ └── api/
|
||||||
│ └── rpc.$.ts # ORPC HTTP endpoint
|
│ ├── $.ts # OpenAPI handler + Scalar docs
|
||||||
|
│ ├── health.ts # Health check endpoint
|
||||||
|
│ └── rpc.$.ts # ORPC RPC handler
|
||||||
├── server/ # Server-side code
|
├── server/ # Server-side code
|
||||||
│ ├── api/ # ORPC layer
|
│ ├── api/ # ORPC layer
|
||||||
│ │ ├── contracts/ # Input/output schemas (Zod)
|
│ │ ├── contracts/ # Input/output schemas (Zod)
|
||||||
│ │ ├── middlewares/ # Middleware (db provider, auth)
|
│ │ ├── middlewares/ # Middleware (db provider, auth)
|
||||||
│ │ ├── routers/ # Handler implementations
|
│ │ ├── routers/ # Handler implementations
|
||||||
|
│ │ ├── interceptors.ts # Shared error interceptors
|
||||||
│ │ ├── context.ts # Request context
|
│ │ ├── context.ts # Request context
|
||||||
│ │ ├── server.ts # ORPC server instance
|
│ │ ├── server.ts # ORPC server instance
|
||||||
│ │ └── types.ts # Type exports
|
│ │ └── types.ts # Type exports
|
||||||
│ └── db/
|
│ └── db/
|
||||||
│ ├── schema/ # Drizzle table definitions
|
│ ├── schema/ # Drizzle table definitions
|
||||||
│ └── index.ts # Database instance
|
│ ├── fields.ts # Shared field builders (id, createdAt, updatedAt)
|
||||||
|
│ ├── relations.ts # Drizzle relations (defineRelations, RQBv2)
|
||||||
|
│ └── index.ts # Database instance (postgres-js driver)
|
||||||
├── env.ts # Environment variable validation
|
├── env.ts # Environment variable validation
|
||||||
├── router.tsx # Router configuration
|
├── router.tsx # Router configuration
|
||||||
├── routeTree.gen.ts # Auto-generated (DO NOT EDIT)
|
├── routeTree.gen.ts # Auto-generated (DO NOT EDIT)
|
||||||
@@ -76,7 +88,7 @@ src/
|
|||||||
### 1. Define Contract (`src/server/api/contracts/feature.contract.ts`)
|
### 1. Define Contract (`src/server/api/contracts/feature.contract.ts`)
|
||||||
```typescript
|
```typescript
|
||||||
import { oc } from '@orpc/contract'
|
import { oc } from '@orpc/contract'
|
||||||
import { createSelectSchema } from 'drizzle-zod'
|
import { createSelectSchema } from 'drizzle-orm/zod'
|
||||||
import { z } from 'zod'
|
import { z } from 'zod'
|
||||||
import { featureTable } from '@/server/db/schema'
|
import { featureTable } from '@/server/db/schema'
|
||||||
|
|
||||||
@@ -93,7 +105,9 @@ import { db } from '../middlewares'
|
|||||||
import { os } from '../server'
|
import { os } from '../server'
|
||||||
|
|
||||||
export const list = os.feature.list.use(db).handler(async ({ context }) => {
|
export const list = os.feature.list.use(db).handler(async ({ context }) => {
|
||||||
return await context.db.query.featureTable.findMany()
|
return await context.db.query.featureTable.findMany({
|
||||||
|
orderBy: { createdAt: 'desc' },
|
||||||
|
})
|
||||||
})
|
})
|
||||||
```
|
```
|
||||||
|
|
||||||
@@ -111,14 +125,20 @@ export const router = os.router({ feature })
|
|||||||
### 4. Use in Components
|
### 4. Use in Components
|
||||||
```typescript
|
```typescript
|
||||||
import { useSuspenseQuery, useMutation } from '@tanstack/react-query'
|
import { useSuspenseQuery, useMutation } from '@tanstack/react-query'
|
||||||
import { orpc } from '@/client/orpc.client'
|
import { orpc } from '@/client/orpc'
|
||||||
|
|
||||||
const { data } = useSuspenseQuery(orpc.feature.list.queryOptions())
|
const { data } = useSuspenseQuery(orpc.feature.list.queryOptions())
|
||||||
const mutation = useMutation(orpc.feature.create.mutationOptions())
|
const mutation = useMutation(orpc.feature.create.mutationOptions())
|
||||||
```
|
```
|
||||||
|
|
||||||
## Database Schema (Drizzle)
|
## Database (Drizzle ORM v1 beta)
|
||||||
|
|
||||||
|
- **Driver**: `drizzle-orm/postgres-js` (NOT `bun-sql`)
|
||||||
|
- **Validation**: `drizzle-orm/zod` (built-in, NOT separate `drizzle-zod` package)
|
||||||
|
- **Relations**: Defined via `defineRelations()` in `src/server/db/relations.ts`
|
||||||
|
- **Query**: RQBv2 — use `db.query.tableName.findMany()` with object-style `orderBy` and `where`
|
||||||
|
|
||||||
|
### Schema Definition
|
||||||
```typescript
|
```typescript
|
||||||
import { pgTable, text, timestamp, uuid } from 'drizzle-orm/pg-core'
|
import { pgTable, text, timestamp, uuid } from 'drizzle-orm/pg-core'
|
||||||
import { sql } from 'drizzle-orm'
|
import { sql } from 'drizzle-orm'
|
||||||
@@ -131,6 +151,43 @@ export const myTable = pgTable('my_table', {
|
|||||||
})
|
})
|
||||||
```
|
```
|
||||||
|
|
||||||
|
### Relations (RQBv2)
|
||||||
|
```typescript
|
||||||
|
// src/server/db/relations.ts
|
||||||
|
import { defineRelations } from 'drizzle-orm'
|
||||||
|
import * as schema from './schema'
|
||||||
|
|
||||||
|
export const relations = defineRelations(schema, (r) => ({
|
||||||
|
// Define relations here using r.one / r.many / r.through
|
||||||
|
}))
|
||||||
|
```
|
||||||
|
|
||||||
|
### DB Instance
|
||||||
|
```typescript
|
||||||
|
// src/server/db/index.ts
|
||||||
|
import { drizzle } from 'drizzle-orm/postgres-js'
|
||||||
|
import { relations } from '@/server/db/relations'
|
||||||
|
// In RQBv2, relations already contain schema info — no separate schema import needed
|
||||||
|
|
||||||
|
const db = drizzle({
|
||||||
|
connection: env.DATABASE_URL,
|
||||||
|
relations,
|
||||||
|
})
|
||||||
|
```
|
||||||
|
|
||||||
|
### RQBv2 Query Examples
|
||||||
|
```typescript
|
||||||
|
// Object-style orderBy (NOT callback style)
|
||||||
|
const todos = await db.query.todoTable.findMany({
|
||||||
|
orderBy: { createdAt: 'desc' },
|
||||||
|
})
|
||||||
|
|
||||||
|
// Object-style where
|
||||||
|
const todo = await db.query.todoTable.findFirst({
|
||||||
|
where: { id: someId },
|
||||||
|
})
|
||||||
|
```
|
||||||
|
|
||||||
## Code Style
|
## Code Style
|
||||||
|
|
||||||
### Formatting (Biome)
|
### Formatting (Biome)
|
||||||
@@ -189,13 +246,24 @@ export const env = createEnv({
|
|||||||
})
|
})
|
||||||
```
|
```
|
||||||
|
|
||||||
|
## Development Principles
|
||||||
|
|
||||||
|
> **These principles apply to ALL code changes. Agents MUST follow them on every task.**
|
||||||
|
|
||||||
|
1. **No backward compatibility** — This project is in rapid iteration. Always use the latest API and patterns. Never keep deprecated code paths or old API fallbacks.
|
||||||
|
2. **Always sync documentation** — When code changes, immediately update all related documentation (`AGENTS.md`, `README.md`, inline code examples). Code and docs must never drift apart.
|
||||||
|
3. **Forward-only migration** — When upgrading dependencies, fully adopt the new API. Don't mix old and new patterns.
|
||||||
|
|
||||||
## Critical Rules
|
## Critical Rules
|
||||||
|
|
||||||
**DO:**
|
**DO:**
|
||||||
- Run `bun fix` before committing
|
- Run `bun run fix` before committing
|
||||||
- Use `@/*` path aliases
|
- Use `@/*` path aliases
|
||||||
- Include `createdAt`/`updatedAt` on all tables
|
- Include `createdAt`/`updatedAt` on all tables
|
||||||
- Use `ORPCError` with proper codes
|
- Use `ORPCError` with proper codes
|
||||||
|
- Use `drizzle-orm/zod` (NOT `drizzle-zod`) for schema validation
|
||||||
|
- Use RQBv2 object syntax for `orderBy` and `where`
|
||||||
|
- Update `AGENTS.md` and other docs whenever code patterns change
|
||||||
|
|
||||||
**DON'T:**
|
**DON'T:**
|
||||||
- Use `npm`, `npx`, `node`, `yarn`, `pnpm` — always use `bun` / `bunx`
|
- Use `npm`, `npx`, `node`, `yarn`, `pnpm` — always use `bun` / `bunx`
|
||||||
@@ -203,3 +271,9 @@ export const env = createEnv({
|
|||||||
- Use `as any`, `@ts-ignore`, `@ts-expect-error`
|
- Use `as any`, `@ts-ignore`, `@ts-expect-error`
|
||||||
- Commit `.env` files
|
- Commit `.env` files
|
||||||
- Use empty catch blocks
|
- Use empty catch blocks
|
||||||
|
- Import from `drizzle-zod` (use `drizzle-orm/zod` instead)
|
||||||
|
- Use RQBv1 callback-style `orderBy` / old `relations()` API
|
||||||
|
- Use `drizzle-orm/bun-sql` driver (use `drizzle-orm/postgres-js`)
|
||||||
|
- Pass `schema` to `drizzle()` constructor (only `relations` is needed in RQBv2)
|
||||||
|
- Import `os` from `@orpc/server` in middleware — use `@/server/api/server` (the local typed instance)
|
||||||
|
- Leave docs out of sync with code changes
|
||||||
|
|||||||
@@ -3,5 +3,10 @@
|
|||||||
"extends": "//",
|
"extends": "//",
|
||||||
"files": {
|
"files": {
|
||||||
"includes": ["**", "!**/routeTree.gen.ts"]
|
"includes": ["**", "!**/routeTree.gen.ts"]
|
||||||
|
},
|
||||||
|
"css": {
|
||||||
|
"parser": {
|
||||||
|
"tailwindDirectives": true
|
||||||
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -1,111 +0,0 @@
|
|||||||
import { rm } from 'node:fs/promises'
|
|
||||||
import path from 'node:path'
|
|
||||||
import process from 'node:process'
|
|
||||||
|
|
||||||
const ALL_TARGETS = [
|
|
||||||
'bun-windows-x64',
|
|
||||||
'bun-darwin-arm64',
|
|
||||||
'bun-darwin-x64',
|
|
||||||
'bun-linux-x64',
|
|
||||||
'bun-linux-arm64',
|
|
||||||
] as const
|
|
||||||
|
|
||||||
type BunTarget = (typeof ALL_TARGETS)[number]
|
|
||||||
|
|
||||||
const ENTRYPOINT = '.output/server/index.mjs'
|
|
||||||
const OUTDIR = 'out'
|
|
||||||
const OUTFILE_BASE = 'server'
|
|
||||||
|
|
||||||
const DEFAULT_TARGETS: BunTarget[] = [
|
|
||||||
'bun-windows-x64',
|
|
||||||
'bun-darwin-arm64',
|
|
||||||
'bun-linux-x64',
|
|
||||||
]
|
|
||||||
|
|
||||||
const suffixFor = (target: BunTarget) => target.replace('bun-', '')
|
|
||||||
|
|
||||||
const isTarget = (value: string): value is BunTarget =>
|
|
||||||
(ALL_TARGETS as readonly string[]).includes(value)
|
|
||||||
|
|
||||||
const parseTargets = (): BunTarget[] => {
|
|
||||||
const args = process.argv.slice(2)
|
|
||||||
const targets: string[] = []
|
|
||||||
|
|
||||||
for (let i = 0; i < args.length; i++) {
|
|
||||||
const arg = args[i]
|
|
||||||
const next = args[i + 1]
|
|
||||||
if (arg === '--target' && next) {
|
|
||||||
targets.push(next)
|
|
||||||
i++
|
|
||||||
} else if (arg === '--targets' && next) {
|
|
||||||
targets.push(...next.split(','))
|
|
||||||
i++
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
if (targets.length === 0) return DEFAULT_TARGETS
|
|
||||||
|
|
||||||
const invalid = targets.filter((t) => !isTarget(t))
|
|
||||||
if (invalid.length) {
|
|
||||||
throw new Error(
|
|
||||||
`Unknown target(s): ${invalid.join(', ')}\nAllowed: ${ALL_TARGETS.join(', ')}`,
|
|
||||||
)
|
|
||||||
}
|
|
||||||
return targets as BunTarget[]
|
|
||||||
}
|
|
||||||
|
|
||||||
const buildOne = async (target: BunTarget) => {
|
|
||||||
const suffix = suffixFor(target)
|
|
||||||
const outfile = `${OUTFILE_BASE}-${suffix}`
|
|
||||||
|
|
||||||
const result = await Bun.build({
|
|
||||||
entrypoints: [ENTRYPOINT],
|
|
||||||
outdir: OUTDIR,
|
|
||||||
compile: {
|
|
||||||
outfile,
|
|
||||||
target,
|
|
||||||
},
|
|
||||||
})
|
|
||||||
|
|
||||||
if (!result.success) {
|
|
||||||
throw new Error(
|
|
||||||
`Build failed for ${target}:\n${result.logs.map(String).join('\n')}`,
|
|
||||||
)
|
|
||||||
}
|
|
||||||
|
|
||||||
return {
|
|
||||||
target,
|
|
||||||
outputs: result.outputs.map((o) => path.relative('.', o.path)),
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
const main = async () => {
|
|
||||||
const targets = parseTargets()
|
|
||||||
|
|
||||||
await rm(OUTDIR, { recursive: true, force: true })
|
|
||||||
console.log(`✓ 已清理输出目录: ${OUTDIR}`)
|
|
||||||
|
|
||||||
// Bun cross-compile 不支持真正并行,逐目标串行构建
|
|
||||||
const results: Awaited<ReturnType<typeof buildOne>>[] = []
|
|
||||||
for (const target of targets) {
|
|
||||||
const start = Date.now()
|
|
||||||
process.stdout.write(`🔨 构建 ${target}... `)
|
|
||||||
const result = await buildOne(target)
|
|
||||||
results.push(result)
|
|
||||||
console.log(`完成 (${Date.now() - start}ms)`)
|
|
||||||
}
|
|
||||||
|
|
||||||
console.log('\n📦 构建完成:')
|
|
||||||
for (const r of results) {
|
|
||||||
console.log(` ${r.target}:`)
|
|
||||||
for (const p of r.outputs) {
|
|
||||||
console.log(` - ${p}`)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
main().catch((err) => {
|
|
||||||
console.error('\n❌ 构建失败:')
|
|
||||||
console.error(err instanceof Error ? err.message : err)
|
|
||||||
process.exit(1)
|
|
||||||
})
|
|
||||||
64
apps/server/compile.ts
Normal file
64
apps/server/compile.ts
Normal file
@@ -0,0 +1,64 @@
|
|||||||
|
import { mkdir, rm } from 'node:fs/promises'
|
||||||
|
import { parseArgs } from 'node:util'
|
||||||
|
|
||||||
|
const ENTRYPOINT = '.output/server/index.mjs'
|
||||||
|
const OUTDIR = 'out'
|
||||||
|
|
||||||
|
const SUPPORTED_TARGETS: readonly Bun.Build.CompileTarget[] = [
|
||||||
|
'bun-windows-x64',
|
||||||
|
'bun-darwin-arm64',
|
||||||
|
'bun-darwin-x64',
|
||||||
|
'bun-linux-x64',
|
||||||
|
'bun-linux-arm64',
|
||||||
|
]
|
||||||
|
|
||||||
|
const isSupportedTarget = (value: string): value is Bun.Build.CompileTarget =>
|
||||||
|
(SUPPORTED_TARGETS as readonly string[]).includes(value)
|
||||||
|
|
||||||
|
const { values } = parseArgs({
|
||||||
|
options: { target: { type: 'string' } },
|
||||||
|
strict: true,
|
||||||
|
allowPositionals: false,
|
||||||
|
})
|
||||||
|
|
||||||
|
const resolveTarget = (): Bun.Build.CompileTarget => {
|
||||||
|
if (values.target !== undefined) {
|
||||||
|
if (!isSupportedTarget(values.target)) {
|
||||||
|
throw new Error(`Invalid target: ${values.target}\nAllowed: ${SUPPORTED_TARGETS.join(', ')}`)
|
||||||
|
}
|
||||||
|
return values.target
|
||||||
|
}
|
||||||
|
|
||||||
|
const os = process.platform === 'win32' ? 'windows' : process.platform
|
||||||
|
const candidate = `bun-${os}-${process.arch}`
|
||||||
|
if (!isSupportedTarget(candidate)) {
|
||||||
|
throw new Error(`Unsupported host: ${process.platform}-${process.arch}`)
|
||||||
|
}
|
||||||
|
return candidate
|
||||||
|
}
|
||||||
|
|
||||||
|
const main = async () => {
|
||||||
|
const target = resolveTarget()
|
||||||
|
const suffix = target.replace('bun-', '')
|
||||||
|
const outfile = `server-${suffix}`
|
||||||
|
|
||||||
|
await mkdir(OUTDIR, { recursive: true })
|
||||||
|
await Promise.all([rm(`${OUTDIR}/${outfile}`, { force: true }), rm(`${OUTDIR}/${outfile}.exe`, { force: true })])
|
||||||
|
|
||||||
|
const result = await Bun.build({
|
||||||
|
entrypoints: [ENTRYPOINT],
|
||||||
|
outdir: OUTDIR,
|
||||||
|
compile: { outfile, target },
|
||||||
|
})
|
||||||
|
|
||||||
|
if (!result.success) {
|
||||||
|
throw new Error(result.logs.map(String).join('\n'))
|
||||||
|
}
|
||||||
|
|
||||||
|
console.log(`✓ ${target} → ${OUTDIR}/${outfile}`)
|
||||||
|
}
|
||||||
|
|
||||||
|
main().catch((err) => {
|
||||||
|
console.error('❌', err instanceof Error ? err.message : err)
|
||||||
|
process.exit(1)
|
||||||
|
})
|
||||||
@@ -1,11 +1,12 @@
|
|||||||
import { defineConfig } from 'drizzle-kit'
|
import { defineConfig } from 'drizzle-kit'
|
||||||
import { env } from '@/env'
|
|
||||||
|
const databasePath = process.env.DATABASE_PATH ?? 'data.db'
|
||||||
|
|
||||||
export default defineConfig({
|
export default defineConfig({
|
||||||
out: './drizzle',
|
out: './drizzle',
|
||||||
schema: './src/server/db/schema/index.ts',
|
schema: './src/server/db/schema/index.ts',
|
||||||
dialect: 'postgresql',
|
dialect: 'sqlite',
|
||||||
dbCredentials: {
|
dbCredentials: {
|
||||||
url: env.DATABASE_URL,
|
url: databasePath,
|
||||||
},
|
},
|
||||||
})
|
})
|
||||||
|
|||||||
@@ -4,18 +4,26 @@
|
|||||||
"private": true,
|
"private": true,
|
||||||
"type": "module",
|
"type": "module",
|
||||||
"scripts": {
|
"scripts": {
|
||||||
"build": "vite build",
|
"build": "bunx --bun vite build",
|
||||||
"compile": "bun build.ts",
|
"compile": "bun compile.ts",
|
||||||
"db:generate": "drizzle-kit generate",
|
"compile:darwin": "bun run compile:darwin:arm64 && bun run compile:darwin:x64",
|
||||||
"db:migrate": "drizzle-kit migrate",
|
"compile:darwin:arm64": "bun compile.ts --target bun-darwin-arm64",
|
||||||
"db:push": "drizzle-kit push",
|
"compile:darwin:x64": "bun compile.ts --target bun-darwin-x64",
|
||||||
"db:studio": "drizzle-kit studio",
|
"compile:linux": "bun run compile:linux:x64 && bun run compile:linux:arm64",
|
||||||
"dev": "vite dev",
|
"compile:linux:arm64": "bun compile.ts --target bun-linux-arm64",
|
||||||
|
"compile:linux:x64": "bun compile.ts --target bun-linux-x64",
|
||||||
|
"compile:windows": "bun run compile:windows:x64",
|
||||||
|
"compile:windows:x64": "bun compile.ts --target bun-windows-x64",
|
||||||
|
"db:generate": "bun --bun drizzle-kit generate",
|
||||||
|
"db:migrate": "bun --bun drizzle-kit migrate",
|
||||||
|
"db:push": "bun --bun drizzle-kit push",
|
||||||
|
"db:studio": "bun --bun drizzle-kit studio",
|
||||||
|
"dev": "bunx --bun vite dev",
|
||||||
"fix": "biome check --write",
|
"fix": "biome check --write",
|
||||||
"typecheck": "tsc --noEmit"
|
"typecheck": "tsc --noEmit"
|
||||||
},
|
},
|
||||||
"dependencies": {
|
"dependencies": {
|
||||||
"@furtherverse/utils": "workspace:*",
|
"@furtherverse/crypto": "workspace:*",
|
||||||
"@orpc/client": "catalog:",
|
"@orpc/client": "catalog:",
|
||||||
"@orpc/contract": "catalog:",
|
"@orpc/contract": "catalog:",
|
||||||
"@orpc/openapi": "catalog:",
|
"@orpc/openapi": "catalog:",
|
||||||
@@ -28,10 +36,11 @@
|
|||||||
"@tanstack/react-router-ssr-query": "catalog:",
|
"@tanstack/react-router-ssr-query": "catalog:",
|
||||||
"@tanstack/react-start": "catalog:",
|
"@tanstack/react-start": "catalog:",
|
||||||
"drizzle-orm": "catalog:",
|
"drizzle-orm": "catalog:",
|
||||||
"drizzle-zod": "catalog:",
|
"jszip": "catalog:",
|
||||||
"postgres": "catalog:",
|
"lossless-json": "catalog:",
|
||||||
"react": "catalog:",
|
"react": "catalog:",
|
||||||
"react-dom": "catalog:",
|
"react-dom": "catalog:",
|
||||||
|
"systeminformation": "catalog:",
|
||||||
"uuid": "catalog:",
|
"uuid": "catalog:",
|
||||||
"zod": "catalog:"
|
"zod": "catalog:"
|
||||||
},
|
},
|
||||||
|
|||||||
@@ -1,6 +1,7 @@
|
|||||||
import { createORPCClient } from '@orpc/client'
|
import { createORPCClient } from '@orpc/client'
|
||||||
import { RPCLink } from '@orpc/client/fetch'
|
import { RPCLink } from '@orpc/client/fetch'
|
||||||
import { createRouterClient } from '@orpc/server'
|
import { createRouterClient } from '@orpc/server'
|
||||||
|
import { createTanstackQueryUtils } from '@orpc/tanstack-query'
|
||||||
import { createIsomorphicFn } from '@tanstack/react-start'
|
import { createIsomorphicFn } from '@tanstack/react-start'
|
||||||
import { getRequestHeaders } from '@tanstack/react-start/server'
|
import { getRequestHeaders } from '@tanstack/react-start/server'
|
||||||
import { router } from '@/server/api/routers'
|
import { router } from '@/server/api/routers'
|
||||||
@@ -21,4 +22,6 @@ const getORPCClient = createIsomorphicFn()
|
|||||||
return createORPCClient<RouterClient>(link)
|
return createORPCClient<RouterClient>(link)
|
||||||
})
|
})
|
||||||
|
|
||||||
export const orpc: RouterClient = getORPCClient()
|
const client: RouterClient = getORPCClient()
|
||||||
|
|
||||||
|
export const orpc = createTanstackQueryUtils(client)
|
||||||
@@ -1,30 +0,0 @@
|
|||||||
import { createTanstackQueryUtils } from '@orpc/tanstack-query'
|
|
||||||
import { orpc as orpcClient } from './orpc.client'
|
|
||||||
|
|
||||||
export const orpc = createTanstackQueryUtils(orpcClient, {
|
|
||||||
experimental_defaults: {
|
|
||||||
todo: {
|
|
||||||
create: {
|
|
||||||
mutationOptions: {
|
|
||||||
onSuccess: (_, __, ___, ctx) => {
|
|
||||||
ctx.client.invalidateQueries({ queryKey: orpc.todo.list.key() })
|
|
||||||
},
|
|
||||||
},
|
|
||||||
},
|
|
||||||
update: {
|
|
||||||
mutationOptions: {
|
|
||||||
onSuccess: (_, __, ___, ctx) => {
|
|
||||||
ctx.client.invalidateQueries({ queryKey: orpc.todo.list.key() })
|
|
||||||
},
|
|
||||||
},
|
|
||||||
},
|
|
||||||
remove: {
|
|
||||||
mutationOptions: {
|
|
||||||
onSuccess: (_, __, ___, ctx) => {
|
|
||||||
ctx.client.invalidateQueries({ queryKey: orpc.todo.list.key() })
|
|
||||||
},
|
|
||||||
},
|
|
||||||
},
|
|
||||||
},
|
|
||||||
},
|
|
||||||
})
|
|
||||||
@@ -3,7 +3,7 @@ import { z } from 'zod'
|
|||||||
|
|
||||||
export const env = createEnv({
|
export const env = createEnv({
|
||||||
server: {
|
server: {
|
||||||
DATABASE_URL: z.url(),
|
DATABASE_PATH: z.string().min(1).default('data.db'),
|
||||||
},
|
},
|
||||||
clientPrefix: 'VITE_',
|
clientPrefix: 'VITE_',
|
||||||
client: {
|
client: {
|
||||||
|
|||||||
@@ -10,6 +10,7 @@
|
|||||||
|
|
||||||
import { Route as rootRouteImport } from './routes/__root'
|
import { Route as rootRouteImport } from './routes/__root'
|
||||||
import { Route as IndexRouteImport } from './routes/index'
|
import { Route as IndexRouteImport } from './routes/index'
|
||||||
|
import { Route as ApiHealthRouteImport } from './routes/api/health'
|
||||||
import { Route as ApiSplatRouteImport } from './routes/api/$'
|
import { Route as ApiSplatRouteImport } from './routes/api/$'
|
||||||
import { Route as ApiRpcSplatRouteImport } from './routes/api/rpc.$'
|
import { Route as ApiRpcSplatRouteImport } from './routes/api/rpc.$'
|
||||||
|
|
||||||
@@ -18,6 +19,11 @@ const IndexRoute = IndexRouteImport.update({
|
|||||||
path: '/',
|
path: '/',
|
||||||
getParentRoute: () => rootRouteImport,
|
getParentRoute: () => rootRouteImport,
|
||||||
} as any)
|
} as any)
|
||||||
|
const ApiHealthRoute = ApiHealthRouteImport.update({
|
||||||
|
id: '/api/health',
|
||||||
|
path: '/api/health',
|
||||||
|
getParentRoute: () => rootRouteImport,
|
||||||
|
} as any)
|
||||||
const ApiSplatRoute = ApiSplatRouteImport.update({
|
const ApiSplatRoute = ApiSplatRouteImport.update({
|
||||||
id: '/api/$',
|
id: '/api/$',
|
||||||
path: '/api/$',
|
path: '/api/$',
|
||||||
@@ -32,30 +38,34 @@ const ApiRpcSplatRoute = ApiRpcSplatRouteImport.update({
|
|||||||
export interface FileRoutesByFullPath {
|
export interface FileRoutesByFullPath {
|
||||||
'/': typeof IndexRoute
|
'/': typeof IndexRoute
|
||||||
'/api/$': typeof ApiSplatRoute
|
'/api/$': typeof ApiSplatRoute
|
||||||
|
'/api/health': typeof ApiHealthRoute
|
||||||
'/api/rpc/$': typeof ApiRpcSplatRoute
|
'/api/rpc/$': typeof ApiRpcSplatRoute
|
||||||
}
|
}
|
||||||
export interface FileRoutesByTo {
|
export interface FileRoutesByTo {
|
||||||
'/': typeof IndexRoute
|
'/': typeof IndexRoute
|
||||||
'/api/$': typeof ApiSplatRoute
|
'/api/$': typeof ApiSplatRoute
|
||||||
|
'/api/health': typeof ApiHealthRoute
|
||||||
'/api/rpc/$': typeof ApiRpcSplatRoute
|
'/api/rpc/$': typeof ApiRpcSplatRoute
|
||||||
}
|
}
|
||||||
export interface FileRoutesById {
|
export interface FileRoutesById {
|
||||||
__root__: typeof rootRouteImport
|
__root__: typeof rootRouteImport
|
||||||
'/': typeof IndexRoute
|
'/': typeof IndexRoute
|
||||||
'/api/$': typeof ApiSplatRoute
|
'/api/$': typeof ApiSplatRoute
|
||||||
|
'/api/health': typeof ApiHealthRoute
|
||||||
'/api/rpc/$': typeof ApiRpcSplatRoute
|
'/api/rpc/$': typeof ApiRpcSplatRoute
|
||||||
}
|
}
|
||||||
export interface FileRouteTypes {
|
export interface FileRouteTypes {
|
||||||
fileRoutesByFullPath: FileRoutesByFullPath
|
fileRoutesByFullPath: FileRoutesByFullPath
|
||||||
fullPaths: '/' | '/api/$' | '/api/rpc/$'
|
fullPaths: '/' | '/api/$' | '/api/health' | '/api/rpc/$'
|
||||||
fileRoutesByTo: FileRoutesByTo
|
fileRoutesByTo: FileRoutesByTo
|
||||||
to: '/' | '/api/$' | '/api/rpc/$'
|
to: '/' | '/api/$' | '/api/health' | '/api/rpc/$'
|
||||||
id: '__root__' | '/' | '/api/$' | '/api/rpc/$'
|
id: '__root__' | '/' | '/api/$' | '/api/health' | '/api/rpc/$'
|
||||||
fileRoutesById: FileRoutesById
|
fileRoutesById: FileRoutesById
|
||||||
}
|
}
|
||||||
export interface RootRouteChildren {
|
export interface RootRouteChildren {
|
||||||
IndexRoute: typeof IndexRoute
|
IndexRoute: typeof IndexRoute
|
||||||
ApiSplatRoute: typeof ApiSplatRoute
|
ApiSplatRoute: typeof ApiSplatRoute
|
||||||
|
ApiHealthRoute: typeof ApiHealthRoute
|
||||||
ApiRpcSplatRoute: typeof ApiRpcSplatRoute
|
ApiRpcSplatRoute: typeof ApiRpcSplatRoute
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -68,6 +78,13 @@ declare module '@tanstack/react-router' {
|
|||||||
preLoaderRoute: typeof IndexRouteImport
|
preLoaderRoute: typeof IndexRouteImport
|
||||||
parentRoute: typeof rootRouteImport
|
parentRoute: typeof rootRouteImport
|
||||||
}
|
}
|
||||||
|
'/api/health': {
|
||||||
|
id: '/api/health'
|
||||||
|
path: '/api/health'
|
||||||
|
fullPath: '/api/health'
|
||||||
|
preLoaderRoute: typeof ApiHealthRouteImport
|
||||||
|
parentRoute: typeof rootRouteImport
|
||||||
|
}
|
||||||
'/api/$': {
|
'/api/$': {
|
||||||
id: '/api/$'
|
id: '/api/$'
|
||||||
path: '/api/$'
|
path: '/api/$'
|
||||||
@@ -88,6 +105,7 @@ declare module '@tanstack/react-router' {
|
|||||||
const rootRouteChildren: RootRouteChildren = {
|
const rootRouteChildren: RootRouteChildren = {
|
||||||
IndexRoute: IndexRoute,
|
IndexRoute: IndexRoute,
|
||||||
ApiSplatRoute: ApiSplatRoute,
|
ApiSplatRoute: ApiSplatRoute,
|
||||||
|
ApiHealthRoute: ApiHealthRoute,
|
||||||
ApiRpcSplatRoute: ApiRpcSplatRoute,
|
ApiRpcSplatRoute: ApiRpcSplatRoute,
|
||||||
}
|
}
|
||||||
export const routeTree = rootRouteImport
|
export const routeTree = rootRouteImport
|
||||||
|
|||||||
@@ -5,7 +5,14 @@ import type { RouterContext } from './routes/__root'
|
|||||||
import { routeTree } from './routeTree.gen'
|
import { routeTree } from './routeTree.gen'
|
||||||
|
|
||||||
export const getRouter = () => {
|
export const getRouter = () => {
|
||||||
const queryClient = new QueryClient()
|
const queryClient = new QueryClient({
|
||||||
|
defaultOptions: {
|
||||||
|
queries: {
|
||||||
|
staleTime: 30 * 1000,
|
||||||
|
retry: 1,
|
||||||
|
},
|
||||||
|
},
|
||||||
|
})
|
||||||
|
|
||||||
const router = createRouter({
|
const router = createRouter({
|
||||||
routeTree,
|
routeTree,
|
||||||
|
|||||||
@@ -1,15 +1,11 @@
|
|||||||
import { TanStackDevtools } from '@tanstack/react-devtools'
|
import { TanStackDevtools } from '@tanstack/react-devtools'
|
||||||
import type { QueryClient } from '@tanstack/react-query'
|
import type { QueryClient } from '@tanstack/react-query'
|
||||||
import { ReactQueryDevtoolsPanel } from '@tanstack/react-query-devtools'
|
import { ReactQueryDevtoolsPanel } from '@tanstack/react-query-devtools'
|
||||||
import {
|
import { createRootRouteWithContext, HeadContent, Scripts } from '@tanstack/react-router'
|
||||||
createRootRouteWithContext,
|
|
||||||
HeadContent,
|
|
||||||
Scripts,
|
|
||||||
} from '@tanstack/react-router'
|
|
||||||
import { TanStackRouterDevtoolsPanel } from '@tanstack/react-router-devtools'
|
import { TanStackRouterDevtoolsPanel } from '@tanstack/react-router-devtools'
|
||||||
import type { ReactNode } from 'react'
|
import type { ReactNode } from 'react'
|
||||||
import { ErrorComponent } from '@/components/Error'
|
import { ErrorComponent } from '@/components/Error'
|
||||||
import { NotFoundComponent } from '@/components/NotFount'
|
import { NotFoundComponent } from '@/components/NotFound'
|
||||||
import appCss from '@/styles.css?url'
|
import appCss from '@/styles.css?url'
|
||||||
|
|
||||||
export interface RouterContext {
|
export interface RouterContext {
|
||||||
@@ -50,6 +46,7 @@ function RootDocument({ children }: Readonly<{ children: ReactNode }>) {
|
|||||||
</head>
|
</head>
|
||||||
<body>
|
<body>
|
||||||
{children}
|
{children}
|
||||||
|
{import.meta.env.DEV && (
|
||||||
<TanStackDevtools
|
<TanStackDevtools
|
||||||
config={{
|
config={{
|
||||||
position: 'bottom-right',
|
position: 'bottom-right',
|
||||||
@@ -65,6 +62,7 @@ function RootDocument({ children }: Readonly<{ children: ReactNode }>) {
|
|||||||
},
|
},
|
||||||
]}
|
]}
|
||||||
/>
|
/>
|
||||||
|
)}
|
||||||
<Scripts />
|
<Scripts />
|
||||||
</body>
|
</body>
|
||||||
</html>
|
</html>
|
||||||
|
|||||||
@@ -1,10 +1,10 @@
|
|||||||
import { OpenAPIHandler } from '@orpc/openapi/fetch'
|
import { OpenAPIHandler } from '@orpc/openapi/fetch'
|
||||||
import { OpenAPIReferencePlugin } from '@orpc/openapi/plugins'
|
import { OpenAPIReferencePlugin } from '@orpc/openapi/plugins'
|
||||||
import { ORPCError, onError, ValidationError } from '@orpc/server'
|
import { onError } from '@orpc/server'
|
||||||
import { ZodToJsonSchemaConverter } from '@orpc/zod/zod4'
|
import { ZodToJsonSchemaConverter } from '@orpc/zod/zod4'
|
||||||
import { createFileRoute } from '@tanstack/react-router'
|
import { createFileRoute } from '@tanstack/react-router'
|
||||||
import { z } from 'zod'
|
|
||||||
import { name, version } from '@/../package.json'
|
import { name, version } from '@/../package.json'
|
||||||
|
import { handleValidationError, logError } from '@/server/api/interceptors'
|
||||||
import { router } from '@/server/api/routers'
|
import { router } from '@/server/api/routers'
|
||||||
|
|
||||||
const handler = new OpenAPIHandler(router, {
|
const handler = new OpenAPIHandler(router, {
|
||||||
@@ -16,56 +16,16 @@ const handler = new OpenAPIHandler(router, {
|
|||||||
info: {
|
info: {
|
||||||
title: name,
|
title: name,
|
||||||
version,
|
version,
|
||||||
|
description:
|
||||||
|
'UX 授权服务 OpenAPI 文档。该服务用于工具箱侧本地身份初始化与密码学能力调用,覆盖设备授权密文生成、任务二维码解密、摘要信息加密、报告签名打包等流程。\n\n推荐调用顺序:\n1) 写入平台公钥;\n2) 写入已签名 licence JSON;\n3) 写入 OpenPGP 私钥;\n4) 读取本机身份状态进行前置校验;\n5) 执行加密/解密与签名接口。\n\n说明:除文件下载接口外,返回体均为 JSON;字段示例已提供,便于联调和 Mock。',
|
||||||
},
|
},
|
||||||
// components: {
|
|
||||||
// securitySchemes: {
|
|
||||||
// bearerAuth: {
|
|
||||||
// type: 'http',
|
|
||||||
// scheme: 'bearer',
|
|
||||||
// },
|
|
||||||
// },
|
|
||||||
// },
|
|
||||||
},
|
},
|
||||||
docsPath: '/docs',
|
docsPath: '/docs',
|
||||||
specPath: '/spec.json',
|
specPath: '/spec.json',
|
||||||
}),
|
}),
|
||||||
],
|
],
|
||||||
interceptors: [
|
interceptors: [onError(logError)],
|
||||||
onError((error) => {
|
clientInterceptors: [onError(handleValidationError)],
|
||||||
console.error(error)
|
|
||||||
}),
|
|
||||||
],
|
|
||||||
clientInterceptors: [
|
|
||||||
onError((error) => {
|
|
||||||
if (
|
|
||||||
error instanceof ORPCError &&
|
|
||||||
error.code === 'BAD_REQUEST' &&
|
|
||||||
error.cause instanceof ValidationError
|
|
||||||
) {
|
|
||||||
// If you only use Zod you can safely cast to ZodIssue[]
|
|
||||||
const zodError = new z.ZodError(
|
|
||||||
error.cause.issues as z.core.$ZodIssue[],
|
|
||||||
)
|
|
||||||
|
|
||||||
throw new ORPCError('INPUT_VALIDATION_FAILED', {
|
|
||||||
status: 422,
|
|
||||||
message: z.prettifyError(zodError),
|
|
||||||
data: z.flattenError(zodError),
|
|
||||||
cause: error.cause,
|
|
||||||
})
|
|
||||||
}
|
|
||||||
|
|
||||||
if (
|
|
||||||
error instanceof ORPCError &&
|
|
||||||
error.code === 'INTERNAL_SERVER_ERROR' &&
|
|
||||||
error.cause instanceof ValidationError
|
|
||||||
) {
|
|
||||||
throw new ORPCError('OUTPUT_VALIDATION_FAILED', {
|
|
||||||
cause: error.cause,
|
|
||||||
})
|
|
||||||
}
|
|
||||||
}),
|
|
||||||
],
|
|
||||||
})
|
})
|
||||||
|
|
||||||
export const Route = createFileRoute('/api/$')({
|
export const Route = createFileRoute('/api/$')({
|
||||||
|
|||||||
27
apps/server/src/routes/api/health.ts
Normal file
27
apps/server/src/routes/api/health.ts
Normal file
@@ -0,0 +1,27 @@
|
|||||||
|
import { createFileRoute } from '@tanstack/react-router'
|
||||||
|
import { name, version } from '@/../package.json'
|
||||||
|
|
||||||
|
const createHealthResponse = (): Response =>
|
||||||
|
Response.json(
|
||||||
|
{
|
||||||
|
status: 'ok',
|
||||||
|
service: name,
|
||||||
|
version,
|
||||||
|
timestamp: new Date().toISOString(),
|
||||||
|
},
|
||||||
|
{
|
||||||
|
status: 200,
|
||||||
|
headers: {
|
||||||
|
'cache-control': 'no-store',
|
||||||
|
},
|
||||||
|
},
|
||||||
|
)
|
||||||
|
|
||||||
|
export const Route = createFileRoute('/api/health')({
|
||||||
|
server: {
|
||||||
|
handlers: {
|
||||||
|
GET: async () => createHealthResponse(),
|
||||||
|
HEAD: async () => new Response(null, { status: 200 }),
|
||||||
|
},
|
||||||
|
},
|
||||||
|
})
|
||||||
@@ -1,46 +1,12 @@
|
|||||||
import { ORPCError, onError, ValidationError } from '@orpc/server'
|
import { onError } from '@orpc/server'
|
||||||
import { RPCHandler } from '@orpc/server/fetch'
|
import { RPCHandler } from '@orpc/server/fetch'
|
||||||
import { createFileRoute } from '@tanstack/react-router'
|
import { createFileRoute } from '@tanstack/react-router'
|
||||||
import { z } from 'zod'
|
import { handleValidationError, logError } from '@/server/api/interceptors'
|
||||||
import { router } from '@/server/api/routers'
|
import { router } from '@/server/api/routers'
|
||||||
|
|
||||||
const handler = new RPCHandler(router, {
|
const handler = new RPCHandler(router, {
|
||||||
interceptors: [
|
interceptors: [onError(logError)],
|
||||||
onError((error) => {
|
clientInterceptors: [onError(handleValidationError)],
|
||||||
console.error(error)
|
|
||||||
}),
|
|
||||||
],
|
|
||||||
clientInterceptors: [
|
|
||||||
onError((error) => {
|
|
||||||
if (
|
|
||||||
error instanceof ORPCError &&
|
|
||||||
error.code === 'BAD_REQUEST' &&
|
|
||||||
error.cause instanceof ValidationError
|
|
||||||
) {
|
|
||||||
// If you only use Zod you can safely cast to ZodIssue[]
|
|
||||||
const zodError = new z.ZodError(
|
|
||||||
error.cause.issues as z.core.$ZodIssue[],
|
|
||||||
)
|
|
||||||
|
|
||||||
throw new ORPCError('INPUT_VALIDATION_FAILED', {
|
|
||||||
status: 422,
|
|
||||||
message: z.prettifyError(zodError),
|
|
||||||
data: z.flattenError(zodError),
|
|
||||||
cause: error.cause,
|
|
||||||
})
|
|
||||||
}
|
|
||||||
|
|
||||||
if (
|
|
||||||
error instanceof ORPCError &&
|
|
||||||
error.code === 'INTERNAL_SERVER_ERROR' &&
|
|
||||||
error.cause instanceof ValidationError
|
|
||||||
) {
|
|
||||||
throw new ORPCError('OUTPUT_VALIDATION_FAILED', {
|
|
||||||
cause: error.cause,
|
|
||||||
})
|
|
||||||
}
|
|
||||||
}),
|
|
||||||
],
|
|
||||||
})
|
})
|
||||||
|
|
||||||
export const Route = createFileRoute('/api/rpc/$')({
|
export const Route = createFileRoute('/api/rpc/$')({
|
||||||
|
|||||||
@@ -1,208 +1,21 @@
|
|||||||
import { useMutation, useSuspenseQuery } from '@tanstack/react-query'
|
|
||||||
import { createFileRoute } from '@tanstack/react-router'
|
import { createFileRoute } from '@tanstack/react-router'
|
||||||
import type { ChangeEventHandler, FormEventHandler } from 'react'
|
|
||||||
import { useState } from 'react'
|
|
||||||
import { orpc } from '@/client/query-client'
|
|
||||||
|
|
||||||
export const Route = createFileRoute('/')({
|
export const Route = createFileRoute('/')({
|
||||||
component: Todos,
|
component: Home,
|
||||||
loader: async ({ context }) => {
|
|
||||||
await context.queryClient.ensureQueryData(orpc.todo.list.queryOptions())
|
|
||||||
},
|
|
||||||
})
|
})
|
||||||
|
|
||||||
function Todos() {
|
function Home() {
|
||||||
const [newTodoTitle, setNewTodoTitle] = useState('')
|
|
||||||
|
|
||||||
const listQuery = useSuspenseQuery(orpc.todo.list.queryOptions())
|
|
||||||
const createMutation = useMutation(orpc.todo.create.mutationOptions())
|
|
||||||
const updateMutation = useMutation(orpc.todo.update.mutationOptions())
|
|
||||||
const deleteMutation = useMutation(orpc.todo.remove.mutationOptions())
|
|
||||||
|
|
||||||
const handleCreateTodo: FormEventHandler<HTMLFormElement> = (e) => {
|
|
||||||
e.preventDefault()
|
|
||||||
if (newTodoTitle.trim()) {
|
|
||||||
createMutation.mutate({ title: newTodoTitle.trim() })
|
|
||||||
setNewTodoTitle('')
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
const handleInputChange: ChangeEventHandler<HTMLInputElement> = (e) => {
|
|
||||||
setNewTodoTitle(e.target.value)
|
|
||||||
}
|
|
||||||
|
|
||||||
const handleToggleTodo = (id: string, currentCompleted: boolean) => {
|
|
||||||
updateMutation.mutate({
|
|
||||||
id,
|
|
||||||
data: { completed: !currentCompleted },
|
|
||||||
})
|
|
||||||
}
|
|
||||||
|
|
||||||
const handleDeleteTodo = (id: string) => {
|
|
||||||
deleteMutation.mutate({ id })
|
|
||||||
}
|
|
||||||
|
|
||||||
const todos = listQuery.data
|
|
||||||
const completedCount = todos.filter((todo) => todo.completed).length
|
|
||||||
const totalCount = todos.length
|
|
||||||
const progress = totalCount > 0 ? (completedCount / totalCount) * 100 : 0
|
|
||||||
|
|
||||||
return (
|
return (
|
||||||
<div className="min-h-screen bg-slate-50 py-12 px-4 sm:px-6 font-sans">
|
<div className="min-h-screen bg-slate-50 flex items-center justify-center font-sans">
|
||||||
<div className="max-w-2xl mx-auto space-y-8">
|
<div className="text-center space-y-4">
|
||||||
{/* Header */}
|
<h1 className="text-3xl font-bold text-slate-900 tracking-tight">UX Server</h1>
|
||||||
<div className="flex items-end justify-between">
|
<p className="text-slate-500">
|
||||||
<div>
|
API Docs:
|
||||||
<h1 className="text-3xl font-bold text-slate-900 tracking-tight">
|
<a href="/api/docs" className="text-indigo-600 hover:text-indigo-700 underline">
|
||||||
我的待办
|
/api/docs
|
||||||
</h1>
|
</a>
|
||||||
<p className="text-slate-500 mt-1">保持专注,逐个击破</p>
|
|
||||||
</div>
|
|
||||||
<div className="text-right">
|
|
||||||
<div className="text-2xl font-semibold text-slate-900">
|
|
||||||
{completedCount}
|
|
||||||
<span className="text-slate-400 text-lg">/{totalCount}</span>
|
|
||||||
</div>
|
|
||||||
<div className="text-xs font-medium text-slate-400 uppercase tracking-wider">
|
|
||||||
已完成
|
|
||||||
</div>
|
|
||||||
</div>
|
|
||||||
</div>
|
|
||||||
|
|
||||||
{/* Add Todo Form */}
|
|
||||||
<form onSubmit={handleCreateTodo} className="relative group z-10">
|
|
||||||
<div className="relative transform transition-all duration-200 focus-within:-translate-y-1">
|
|
||||||
<input
|
|
||||||
type="text"
|
|
||||||
value={newTodoTitle}
|
|
||||||
onChange={handleInputChange}
|
|
||||||
placeholder="添加新任务..."
|
|
||||||
className="w-full pl-6 pr-32 py-5 bg-white rounded-2xl shadow-[0_8px_30px_rgb(0,0,0,0.04)] border-0 ring-1 ring-slate-100 focus:ring-2 focus:ring-indigo-500/50 outline-none transition-all placeholder:text-slate-400 text-lg text-slate-700"
|
|
||||||
disabled={createMutation.isPending}
|
|
||||||
/>
|
|
||||||
<button
|
|
||||||
type="submit"
|
|
||||||
disabled={createMutation.isPending || !newTodoTitle.trim()}
|
|
||||||
className="absolute right-3 top-3 bottom-3 px-6 bg-indigo-600 hover:bg-indigo-700 text-white rounded-xl font-medium transition-all shadow-md shadow-indigo-200 disabled:opacity-50 disabled:shadow-none hover:shadow-lg hover:shadow-indigo-300 active:scale-95"
|
|
||||||
>
|
|
||||||
{createMutation.isPending ? '添加中' : '添加'}
|
|
||||||
</button>
|
|
||||||
</div>
|
|
||||||
</form>
|
|
||||||
|
|
||||||
{/* Progress Bar (Only visible when there are tasks) */}
|
|
||||||
{totalCount > 0 && (
|
|
||||||
<div className="h-1.5 w-full bg-slate-200 rounded-full overflow-hidden">
|
|
||||||
<div
|
|
||||||
className="h-full bg-indigo-500 transition-all duration-500 ease-out rounded-full"
|
|
||||||
style={{ width: `${progress}%` }}
|
|
||||||
/>
|
|
||||||
</div>
|
|
||||||
)}
|
|
||||||
|
|
||||||
{/* Todo List */}
|
|
||||||
<div className="space-y-3">
|
|
||||||
{todos.length === 0 ? (
|
|
||||||
<div className="py-20 text-center">
|
|
||||||
<div className="inline-flex items-center justify-center w-16 h-16 rounded-full bg-slate-100 mb-4">
|
|
||||||
<svg
|
|
||||||
className="w-8 h-8 text-slate-400"
|
|
||||||
fill="none"
|
|
||||||
viewBox="0 0 24 24"
|
|
||||||
stroke="currentColor"
|
|
||||||
aria-hidden="true"
|
|
||||||
>
|
|
||||||
<path
|
|
||||||
strokeLinecap="round"
|
|
||||||
strokeLinejoin="round"
|
|
||||||
strokeWidth={1.5}
|
|
||||||
d="M12 6v6m0 0v6m0-6h6m-6 0H6"
|
|
||||||
/>
|
|
||||||
</svg>
|
|
||||||
</div>
|
|
||||||
<p className="text-slate-500 text-lg font-medium">没有待办事项</p>
|
|
||||||
<p className="text-slate-400 text-sm mt-1">
|
|
||||||
输入上方内容添加您的第一个任务
|
|
||||||
</p>
|
</p>
|
||||||
</div>
|
</div>
|
||||||
) : (
|
|
||||||
todos.map((todo) => (
|
|
||||||
<div
|
|
||||||
key={todo.id}
|
|
||||||
className={`group relative flex items-center p-4 bg-white rounded-xl border border-slate-100 shadow-sm transition-all duration-200 hover:shadow-md hover:border-slate-200 ${
|
|
||||||
todo.completed ? 'bg-slate-50/50' : ''
|
|
||||||
}`}
|
|
||||||
>
|
|
||||||
<button
|
|
||||||
type="button"
|
|
||||||
onClick={() => handleToggleTodo(todo.id, todo.completed)}
|
|
||||||
className={`flex-shrink-0 w-6 h-6 rounded-full border-2 transition-all duration-200 flex items-center justify-center mr-4 focus:outline-none focus:ring-2 focus:ring-offset-2 focus:ring-indigo-500 ${
|
|
||||||
todo.completed
|
|
||||||
? 'bg-indigo-500 border-indigo-500'
|
|
||||||
: 'border-slate-300 hover:border-indigo-500 bg-white'
|
|
||||||
}`}
|
|
||||||
>
|
|
||||||
{todo.completed && (
|
|
||||||
<svg
|
|
||||||
className="w-3.5 h-3.5 text-white"
|
|
||||||
fill="none"
|
|
||||||
viewBox="0 0 24 24"
|
|
||||||
stroke="currentColor"
|
|
||||||
strokeWidth={3}
|
|
||||||
aria-hidden="true"
|
|
||||||
>
|
|
||||||
<path
|
|
||||||
strokeLinecap="round"
|
|
||||||
strokeLinejoin="round"
|
|
||||||
d="M5 13l4 4L19 7"
|
|
||||||
/>
|
|
||||||
</svg>
|
|
||||||
)}
|
|
||||||
</button>
|
|
||||||
|
|
||||||
<div className="flex-1 min-w-0">
|
|
||||||
<p
|
|
||||||
className={`text-lg transition-all duration-200 truncate ${
|
|
||||||
todo.completed
|
|
||||||
? 'text-slate-400 line-through decoration-slate-300 decoration-2'
|
|
||||||
: 'text-slate-700'
|
|
||||||
}`}
|
|
||||||
>
|
|
||||||
{todo.title}
|
|
||||||
</p>
|
|
||||||
</div>
|
|
||||||
|
|
||||||
<div className="flex items-center opacity-0 group-hover:opacity-100 transition-opacity duration-200 absolute right-4 pl-4 bg-gradient-to-l from-white via-white to-transparent sm:static sm:bg-none">
|
|
||||||
<span className="text-xs text-slate-400 mr-3 hidden sm:inline-block">
|
|
||||||
{new Date(todo.createdAt).toLocaleDateString('zh-CN')}
|
|
||||||
</span>
|
|
||||||
<button
|
|
||||||
type="button"
|
|
||||||
onClick={() => handleDeleteTodo(todo.id)}
|
|
||||||
className="p-2 text-slate-400 hover:text-red-500 hover:bg-red-50 rounded-lg transition-colors focus:outline-none"
|
|
||||||
title="删除"
|
|
||||||
>
|
|
||||||
<svg
|
|
||||||
className="w-5 h-5"
|
|
||||||
fill="none"
|
|
||||||
viewBox="0 0 24 24"
|
|
||||||
stroke="currentColor"
|
|
||||||
strokeWidth={1.5}
|
|
||||||
aria-hidden="true"
|
|
||||||
>
|
|
||||||
<path
|
|
||||||
strokeLinecap="round"
|
|
||||||
strokeLinejoin="round"
|
|
||||||
d="M19 7l-.867 12.142A2 2 0 0116.138 21H7.862a2 2 0 01-1.995-1.858L5 7m5 4v6m4-6v6m1-10V4a1 1 0 00-1-1h-4a1 1 0 00-1 1v3M4 7h16"
|
|
||||||
/>
|
|
||||||
</svg>
|
|
||||||
</button>
|
|
||||||
</div>
|
|
||||||
</div>
|
|
||||||
))
|
|
||||||
)}
|
|
||||||
</div>
|
|
||||||
</div>
|
|
||||||
</div>
|
</div>
|
||||||
)
|
)
|
||||||
}
|
}
|
||||||
|
|||||||
127
apps/server/src/server/api/contracts/config.contract.ts
Normal file
127
apps/server/src/server/api/contracts/config.contract.ts
Normal file
@@ -0,0 +1,127 @@
|
|||||||
|
import { oc } from '@orpc/contract'
|
||||||
|
import { z } from 'zod'
|
||||||
|
import { licenceEnvelopeSchema } from '@/server/licence'
|
||||||
|
|
||||||
|
const licenceOutput = z
|
||||||
|
.object({
|
||||||
|
licenceId: z.string().describe('验签通过后的 licence 标识'),
|
||||||
|
expireTime: z.string().describe('授权到期日,格式为 YYYY-MM-DD'),
|
||||||
|
isExpired: z.boolean().describe('当前 licence 是否已过期(按 UTC 自然日计算)'),
|
||||||
|
})
|
||||||
|
.describe('当前已安装 licence 的验证后元数据')
|
||||||
|
|
||||||
|
const configOutput = z
|
||||||
|
.object({
|
||||||
|
licence: licenceOutput.nullable().describe('当前本地已验证 licence 的元数据,未设置时为 null'),
|
||||||
|
fingerprint: z.string().describe('UX 本机计算得到的设备特征码(SHA-256)'),
|
||||||
|
hasPlatformPublicKey: z.boolean().describe('是否已配置平台公钥'),
|
||||||
|
hasPgpPrivateKey: z.boolean().describe('是否已配置 OpenPGP 私钥'),
|
||||||
|
})
|
||||||
|
.describe('本地身份配置快照,用于判断设备授权初始化是否完成')
|
||||||
|
.meta({
|
||||||
|
examples: [
|
||||||
|
{
|
||||||
|
licence: {
|
||||||
|
licenceId: 'LIC-20260319-0025',
|
||||||
|
expireTime: '2027-03-19',
|
||||||
|
isExpired: false,
|
||||||
|
},
|
||||||
|
fingerprint: '9a3b7c1d2e4f5a6b8c0d1e2f3a4b5c6d7e8f9a0b1c2d3e4f5a6b7c8d9e0f1a2b',
|
||||||
|
hasPlatformPublicKey: true,
|
||||||
|
hasPgpPrivateKey: true,
|
||||||
|
},
|
||||||
|
{
|
||||||
|
licence: null,
|
||||||
|
fingerprint: '9a3b7c1d2e4f5a6b8c0d1e2f3a4b5c6d7e8f9a0b1c2d3e4f5a6b7c8d9e0f1a2b',
|
||||||
|
hasPlatformPublicKey: false,
|
||||||
|
hasPgpPrivateKey: false,
|
||||||
|
},
|
||||||
|
],
|
||||||
|
})
|
||||||
|
|
||||||
|
export const get = oc
|
||||||
|
.route({
|
||||||
|
method: 'POST',
|
||||||
|
path: '/config/get',
|
||||||
|
operationId: 'configGet',
|
||||||
|
summary: '读取本机身份配置',
|
||||||
|
description:
|
||||||
|
'查询 UX 当前本地身份配置状态。\n\n典型用途:页面初始化时检测授权状态、验签前检查平台公钥、签名前检查私钥是否就绪。\n\n返回内容:\n- licence:当前已验证 licence 的元数据,未设置时为 null;\n- fingerprint:设备特征码(本机自动计算);\n- hasPlatformPublicKey:是否已写入平台公钥;\n- hasPgpPrivateKey:是否已写入 OpenPGP 私钥。',
|
||||||
|
tags: ['Config'],
|
||||||
|
})
|
||||||
|
.input(z.object({}).describe('空请求体,仅触发读取当前配置'))
|
||||||
|
.output(configOutput)
|
||||||
|
|
||||||
|
export const setLicence = oc
|
||||||
|
.route({
|
||||||
|
method: 'POST',
|
||||||
|
path: '/config/set-licence',
|
||||||
|
operationId: 'configSetLicence',
|
||||||
|
summary: '写入本地 licence',
|
||||||
|
description:
|
||||||
|
'写入或更新本机持久化 licence。\n\n调用时机:设备首次激活、授权码变更、授权修复。\n\n约束与行为:\n- 接收 `.lic` 文件内容对应的 JSON 信封,而不是文件上传;\n- 使用已配置的平台公钥对 payload 原始字符串做 SHA256withRSA 验签;\n- 仅在验签通过且 expire_time 未过期时持久化;\n- fingerprint 由本机自动计算,不允许外部覆盖;\n- 成功后返回最新配置快照,便于前端立即刷新授权状态。',
|
||||||
|
tags: ['Config'],
|
||||||
|
})
|
||||||
|
.input(
|
||||||
|
licenceEnvelopeSchema.meta({
|
||||||
|
examples: [
|
||||||
|
{
|
||||||
|
payload: 'eyJsaWNlbmNlX2lkIjoiTElDLTIwMjYwMzE5LTAwMjUiLCJleHBpcmVfdGltZSI6IjIwMjctMDMtMTkifQ==',
|
||||||
|
signature:
|
||||||
|
'aLd+wwpz1W5AS0jgE/IstSNjCAQ5estQYIMqeLXRWMIsnKxjZpCvC8O5q/G5LEBBLJXnbTk8N6IMTUx295nf2HQYlXNtJkWiBeUXQ6/uzs0RbhCeRAWK2Hx4kSsmiEv4AHGLb4ozI2XekTc+40+ApJQYqaWbDu/NU99TmDm3/da1VkKpQxH60BhSQVwBtU67w9Vp3SpWm8y1faQ7ci5WDtJf1JZaS70kPXoGeA5018rPeMFlEzUp10yDlGW6RcrT7Dm+r7zFyrFznLK+evBEvTf9mMGWwZZP3q9vJtC/wFt1t5zNHdkb27cTwc9yyqGMWdelXQAQDnoisn2Jzi06KA==',
|
||||||
|
},
|
||||||
|
],
|
||||||
|
}),
|
||||||
|
)
|
||||||
|
.output(configOutput)
|
||||||
|
|
||||||
|
export const setPgpPrivateKey = oc
|
||||||
|
.route({
|
||||||
|
method: 'POST',
|
||||||
|
path: '/config/set-pgp-private-key',
|
||||||
|
operationId: 'configSetPgpPrivateKey',
|
||||||
|
summary: '写入本地 OpenPGP 私钥',
|
||||||
|
description:
|
||||||
|
'写入或更新本机持久化 OpenPGP 私钥(ASCII armored)。\n\n调用时机:首次导入签名私钥、私钥轮换。\n\n约束与行为:\n- 仅接收 ASCII armored 私钥文本;\n- 私钥保存在本地,后续报告签名接口会自动读取;\n- 成功后返回最新配置快照,可用于确认 hasPgpPrivateKey 状态。',
|
||||||
|
tags: ['Config'],
|
||||||
|
})
|
||||||
|
.input(
|
||||||
|
z
|
||||||
|
.object({
|
||||||
|
pgpPrivateKey: z.string().min(1).describe('OpenPGP 私钥(ASCII armored 格式)'),
|
||||||
|
})
|
||||||
|
.meta({
|
||||||
|
examples: [
|
||||||
|
{
|
||||||
|
pgpPrivateKey: '-----BEGIN PGP PRIVATE KEY BLOCK-----\n\nxcMGBGd...\n-----END PGP PRIVATE KEY BLOCK-----',
|
||||||
|
},
|
||||||
|
],
|
||||||
|
}),
|
||||||
|
)
|
||||||
|
.output(configOutput)
|
||||||
|
|
||||||
|
export const setPlatformPublicKey = oc
|
||||||
|
.route({
|
||||||
|
method: 'POST',
|
||||||
|
path: '/config/set-platform-public-key',
|
||||||
|
operationId: 'configSetPlatformPublicKey',
|
||||||
|
summary: '写入本地平台公钥',
|
||||||
|
description:
|
||||||
|
'写入或更新本机持久化平台公钥(Base64 编码 SPKI DER)。\n\n调用时机:设备授权初始化、平台公钥轮换。\n\n约束与行为:\n- 仅接收可解析的平台 RSA 公钥文本;\n- 公钥保存在本地,设备授权密文接口和 licence 验签都会自动读取,无需每次传参;\n- 若平台公钥发生变化,已安装 licence 会被清空,需要重新安装已签名 licence;\n- 成功后返回最新配置快照,可用于确认 hasPlatformPublicKey 状态。',
|
||||||
|
tags: ['Config'],
|
||||||
|
})
|
||||||
|
.input(
|
||||||
|
z
|
||||||
|
.object({
|
||||||
|
platformPublicKey: z.string().min(1).describe('平台公钥(Base64 编码 SPKI DER)'),
|
||||||
|
})
|
||||||
|
.meta({
|
||||||
|
examples: [
|
||||||
|
{
|
||||||
|
platformPublicKey:
|
||||||
|
'MIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEAzDlZvMDVaL+fjl05Hi182JOAUAaN4gh9rOF+1NhKfO4J6e0HLy8lBuylp3A4xoTiyUejNm22h0dqAgDSPnY/xZR76POFTD1soHr2LaFCN8JAbQ96P8gE7wC9qpoTssVvIVRH7QbVd260J6eD0Szwcx9cg591RSN69pMpe5IVRi8T99Hhql6/wnZHORPr18eESLOY93jRskLzc0q18r68RRoTJiQf+9YC8ub5iKp7rCjVnPi1UbIYmXmL08tk5mksYA0NqWQAa1ofKxx/9tQtB9uTjhTxuTu94XU9jlGU87qaHZs+kpqa8CAbYYJFbSP1xHwoZzpU2jpw2aF22HBYxwIDAQAB',
|
||||||
|
},
|
||||||
|
],
|
||||||
|
}),
|
||||||
|
)
|
||||||
|
.output(configOutput)
|
||||||
163
apps/server/src/server/api/contracts/crypto.contract.ts
Normal file
163
apps/server/src/server/api/contracts/crypto.contract.ts
Normal file
@@ -0,0 +1,163 @@
|
|||||||
|
import { oc } from '@orpc/contract'
|
||||||
|
import { z } from 'zod'
|
||||||
|
|
||||||
|
export const encryptDeviceInfo = oc
|
||||||
|
.route({
|
||||||
|
method: 'POST',
|
||||||
|
path: '/crypto/encrypt-device-info',
|
||||||
|
operationId: 'encryptDeviceInfo',
|
||||||
|
summary: '生成设备授权二维码密文',
|
||||||
|
description:
|
||||||
|
'生成设备授权流程所需的二维码密文。\n\n处理流程:\n- 读取本机已验证的 licenceId、fingerprint 与本地持久化的平台公钥;\n- 组装为授权载荷 JSON;\n- 使用平台公钥执行 RSA-OAEP(SHA-256) 加密;\n- 返回 Base64 密文供前端生成二维码。\n\n适用场景:设备授权申请、重新授权。\n\n前置条件:需先调用 config.setPlatformPublicKey 写入平台公钥,并通过 config.setLicence 安装已签名 licence。',
|
||||||
|
tags: ['Crypto'],
|
||||||
|
})
|
||||||
|
.input(z.object({}).describe('空请求体。平台公钥由本地配置自动读取'))
|
||||||
|
.output(
|
||||||
|
z
|
||||||
|
.object({
|
||||||
|
encrypted: z.string().describe('Base64 密文(可直接用于设备授权二维码内容)'),
|
||||||
|
})
|
||||||
|
.describe('设备授权密文生成结果')
|
||||||
|
.meta({
|
||||||
|
examples: [
|
||||||
|
{
|
||||||
|
encrypted: 'dGhpcyBpcyBhIGJhc2U2NCBlbmNvZGVkIFJTQS1PQUVQIGVuY3J5cHRlZCBkZXZpY2UgaW5mby4uLg==',
|
||||||
|
},
|
||||||
|
],
|
||||||
|
}),
|
||||||
|
)
|
||||||
|
|
||||||
|
export const decryptTask = oc
|
||||||
|
.route({
|
||||||
|
method: 'POST',
|
||||||
|
path: '/crypto/decrypt-task',
|
||||||
|
operationId: 'decryptTask',
|
||||||
|
summary: '解密任务二维码数据',
|
||||||
|
description:
|
||||||
|
'解密 App 下发的任务二维码密文。\n\n处理流程:\n- 基于本机已验证的 licenceId + fingerprint 派生 AES-256-GCM 密钥;\n- 对二维码中的 Base64 密文进行解密;\n- 返回任务明文 JSON 字符串。\n\n适用场景:扫码接收任务后解析任务详情。',
|
||||||
|
tags: ['Crypto'],
|
||||||
|
})
|
||||||
|
.input(
|
||||||
|
z
|
||||||
|
.object({
|
||||||
|
encryptedData: z.string().min(1).describe('Base64 编码的 AES-256-GCM 密文(来自任务二维码扫描结果)'),
|
||||||
|
})
|
||||||
|
.describe('任务二维码解密请求')
|
||||||
|
.meta({
|
||||||
|
examples: [
|
||||||
|
{
|
||||||
|
encryptedData: 'uWUcAmp6UQd0w3G3crdsd4613QCxGLoEgslgXJ4G2hQhpQdjtghtQjCBUZwB/JO+NRgH1vSTr8dqBJRq7Qh4nug==',
|
||||||
|
},
|
||||||
|
],
|
||||||
|
}),
|
||||||
|
)
|
||||||
|
.output(
|
||||||
|
z
|
||||||
|
.object({
|
||||||
|
decrypted: z.string().describe('解密后的任务信息 JSON 字符串(可进一步反序列化)'),
|
||||||
|
})
|
||||||
|
.describe('任务二维码解密结果')
|
||||||
|
.meta({
|
||||||
|
examples: [
|
||||||
|
{
|
||||||
|
decrypted:
|
||||||
|
'{"taskId":"TASK-20260115-4875","enterpriseId":"1173040813421105152","orgName":"超艺科技有限公司","inspectionId":"702286470691215417","inspectionPerson":"警务通","issuedAt":1734571234567}',
|
||||||
|
},
|
||||||
|
],
|
||||||
|
}),
|
||||||
|
)
|
||||||
|
|
||||||
|
export const encryptSummary = oc
|
||||||
|
.route({
|
||||||
|
method: 'POST',
|
||||||
|
path: '/crypto/encrypt-summary',
|
||||||
|
operationId: 'encryptSummary',
|
||||||
|
summary: '加密摘要信息',
|
||||||
|
description:
|
||||||
|
'加密检查摘要信息并产出二维码密文。\n\n处理流程:\n- 使用已验证的 licenceId + fingerprint 结合 taskId(salt) 通过 HKDF-SHA256 派生密钥;\n- 使用 AES-256-GCM 加密摘要明文;\n- 返回 Base64 密文用于摘要二维码生成。\n\n适用场景:任务执行后提交摘要信息。',
|
||||||
|
tags: ['Crypto'],
|
||||||
|
})
|
||||||
|
.input(
|
||||||
|
z
|
||||||
|
.object({
|
||||||
|
salt: z.string().min(1).describe('HKDF salt(通常为 taskId,需与任务上下文一致)'),
|
||||||
|
plaintext: z.string().min(1).describe('待加密的摘要信息 JSON 明文字符串'),
|
||||||
|
})
|
||||||
|
.describe('摘要信息加密请求')
|
||||||
|
.meta({
|
||||||
|
examples: [
|
||||||
|
{
|
||||||
|
salt: 'TASK-20260115-4875',
|
||||||
|
plaintext:
|
||||||
|
'{"enterpriseId":"1173040813421105152","inspectionId":"702286470691215417","summary":{"orgId":"1","orgName":"超艺科技有限公司","checkId":"1","vcheckId":"1","task":{"startTime":"2022-01-01 00:00:00","endTime":"2022-01-01 00:00:00"},"asset":{"count":183},"weakPwd":{"count":5},"vul":{"emergency":13,"high":34,"medium":45,"low":12,"info":3}},"timestamp":1734571234567}',
|
||||||
|
},
|
||||||
|
],
|
||||||
|
}),
|
||||||
|
)
|
||||||
|
.output(
|
||||||
|
z
|
||||||
|
.object({
|
||||||
|
encrypted: z.string().describe('Base64 密文(用于摘要信息二维码内容)'),
|
||||||
|
})
|
||||||
|
.describe('摘要信息加密结果')
|
||||||
|
.meta({
|
||||||
|
examples: [
|
||||||
|
{
|
||||||
|
encrypted: 'uWUcAmp6UQd0w3G3crdsd4613QCxGLoEgslgXJ4G2hQhpQdjtghtQjCBUZwB/JO+NRgH1vSTr8dqBJRq7Qh4nug==',
|
||||||
|
},
|
||||||
|
],
|
||||||
|
}),
|
||||||
|
)
|
||||||
|
|
||||||
|
export const signAndPackReport = oc
|
||||||
|
.route({
|
||||||
|
method: 'POST',
|
||||||
|
path: '/crypto/sign-and-pack-report',
|
||||||
|
operationId: 'signAndPackReport',
|
||||||
|
summary: '签名并打包检查报告',
|
||||||
|
description:
|
||||||
|
'对原始报告执行设备签名与 OpenPGP 签名并重新打包。\n\n处理流程:\n- 解析上传 ZIP 并提取 summary.json;\n- 用已验证的 licenceId/fingerprint 计算 deviceSignature(HKDF + HMAC-SHA256) 并回写 summary.json;\n- 生成 META-INF/manifest.json;\n- 使用本地 OpenPGP 私钥生成 detached signature(`META-INF/signature.asc`);\n- 返回签名后 ZIP。\n\n适用场景:检查结果归档、可追溯签名分发。',
|
||||||
|
tags: ['Report'],
|
||||||
|
spec: (current) => {
|
||||||
|
const multipartContent =
|
||||||
|
current.requestBody && !('$ref' in current.requestBody)
|
||||||
|
? (current.requestBody.content?.['multipart/form-data'] ?? current.requestBody.content?.['application/json'])
|
||||||
|
: undefined
|
||||||
|
|
||||||
|
return {
|
||||||
|
...current,
|
||||||
|
requestBody:
|
||||||
|
multipartContent && current.requestBody && !('$ref' in current.requestBody)
|
||||||
|
? {
|
||||||
|
...current.requestBody,
|
||||||
|
content: {
|
||||||
|
'multipart/form-data': multipartContent,
|
||||||
|
},
|
||||||
|
}
|
||||||
|
: current.requestBody,
|
||||||
|
}
|
||||||
|
},
|
||||||
|
})
|
||||||
|
.input(
|
||||||
|
z
|
||||||
|
.object({
|
||||||
|
rawZip: z
|
||||||
|
.file()
|
||||||
|
.mime(['application/zip', 'application/x-zip-compressed'])
|
||||||
|
.describe(
|
||||||
|
'原始报告 ZIP 文件(必须包含 summary.json,以及 assets.json、vulnerabilities.json、weakPasswords.json、漏洞评估报告.html 等报告文件)',
|
||||||
|
),
|
||||||
|
outputFileName: z
|
||||||
|
.string()
|
||||||
|
.min(1)
|
||||||
|
.optional()
|
||||||
|
.describe('返回 ZIP 文件名(可选,默认 signed-report.zip)')
|
||||||
|
.meta({ examples: ['signed-report.zip'] }),
|
||||||
|
})
|
||||||
|
.describe('报告签名与打包请求'),
|
||||||
|
)
|
||||||
|
.output(
|
||||||
|
z
|
||||||
|
.file()
|
||||||
|
.describe('签名后报告 ZIP 文件(二进制响应,包含 summary.json、META-INF/manifest.json、META-INF/signature.asc)'),
|
||||||
|
)
|
||||||
@@ -1,7 +1,9 @@
|
|||||||
import * as todo from './todo.contract'
|
import * as config from './config.contract'
|
||||||
|
import * as crypto from './crypto.contract'
|
||||||
|
|
||||||
export const contract = {
|
export const contract = {
|
||||||
todo,
|
config,
|
||||||
|
crypto,
|
||||||
}
|
}
|
||||||
|
|
||||||
export type Contract = typeof contract
|
export type Contract = typeof contract
|
||||||
|
|||||||
@@ -1,43 +0,0 @@
|
|||||||
import { oc } from '@orpc/contract'
|
|
||||||
import {
|
|
||||||
createInsertSchema,
|
|
||||||
createSelectSchema,
|
|
||||||
createUpdateSchema,
|
|
||||||
} from 'drizzle-zod'
|
|
||||||
import { z } from 'zod'
|
|
||||||
import { todoTable } from '@/server/db/schema'
|
|
||||||
|
|
||||||
const selectSchema = createSelectSchema(todoTable)
|
|
||||||
|
|
||||||
const insertSchema = createInsertSchema(todoTable).omit({
|
|
||||||
id: true,
|
|
||||||
createdAt: true,
|
|
||||||
updatedAt: true,
|
|
||||||
})
|
|
||||||
|
|
||||||
const updateSchema = createUpdateSchema(todoTable).omit({
|
|
||||||
id: true,
|
|
||||||
createdAt: true,
|
|
||||||
updatedAt: true,
|
|
||||||
})
|
|
||||||
|
|
||||||
export const list = oc.input(z.void()).output(z.array(selectSchema))
|
|
||||||
|
|
||||||
export const create = oc.input(insertSchema).output(selectSchema)
|
|
||||||
|
|
||||||
export const update = oc
|
|
||||||
.input(
|
|
||||||
z.object({
|
|
||||||
id: z.uuid(),
|
|
||||||
data: updateSchema,
|
|
||||||
}),
|
|
||||||
)
|
|
||||||
.output(selectSchema)
|
|
||||||
|
|
||||||
export const remove = oc
|
|
||||||
.input(
|
|
||||||
z.object({
|
|
||||||
id: z.uuid(),
|
|
||||||
}),
|
|
||||||
)
|
|
||||||
.output(z.void())
|
|
||||||
26
apps/server/src/server/api/interceptors.ts
Normal file
26
apps/server/src/server/api/interceptors.ts
Normal file
@@ -0,0 +1,26 @@
|
|||||||
|
import { ORPCError, ValidationError } from '@orpc/server'
|
||||||
|
import { z } from 'zod'
|
||||||
|
|
||||||
|
export const logError = (error: unknown) => {
|
||||||
|
console.error(error)
|
||||||
|
}
|
||||||
|
|
||||||
|
export const handleValidationError = (error: unknown) => {
|
||||||
|
if (error instanceof ORPCError && error.code === 'BAD_REQUEST' && error.cause instanceof ValidationError) {
|
||||||
|
// If you only use Zod you can safely cast to ZodIssue[] (per ORPC official docs)
|
||||||
|
const zodError = new z.ZodError(error.cause.issues as z.core.$ZodIssue[])
|
||||||
|
|
||||||
|
throw new ORPCError('INPUT_VALIDATION_FAILED', {
|
||||||
|
status: 422,
|
||||||
|
message: z.prettifyError(zodError),
|
||||||
|
data: z.flattenError(zodError),
|
||||||
|
cause: error.cause,
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
if (error instanceof ORPCError && error.code === 'INTERNAL_SERVER_ERROR' && error.cause instanceof ValidationError) {
|
||||||
|
throw new ORPCError('OUTPUT_VALIDATION_FAILED', {
|
||||||
|
cause: error.cause,
|
||||||
|
})
|
||||||
|
}
|
||||||
|
}
|
||||||
@@ -1,4 +1,4 @@
|
|||||||
import { os } from '@orpc/server'
|
import { os } from '@/server/api/server'
|
||||||
import { getDB } from '@/server/db'
|
import { getDB } from '@/server/db'
|
||||||
|
|
||||||
export const db = os.middleware(async ({ context, next }) => {
|
export const db = os.middleware(async ({ context, next }) => {
|
||||||
|
|||||||
81
apps/server/src/server/api/routers/config.router.ts
Normal file
81
apps/server/src/server/api/routers/config.router.ts
Normal file
@@ -0,0 +1,81 @@
|
|||||||
|
import { validatePgpPrivateKey, validateRsaPublicKey } from '@furtherverse/crypto'
|
||||||
|
import { ORPCError } from '@orpc/server'
|
||||||
|
import { isLicenceExpired, verifyAndDecodeLicenceEnvelope } from '@/server/licence'
|
||||||
|
import { ensureUxConfig, setUxLicence, setUxPgpPrivateKey, setUxPlatformPublicKey } from '@/server/ux-config'
|
||||||
|
import { db } from '../middlewares'
|
||||||
|
import { os } from '../server'
|
||||||
|
|
||||||
|
const toConfigOutput = (config: {
|
||||||
|
licenceId: string | null
|
||||||
|
licenceExpireTime: string | null
|
||||||
|
fingerprint: string
|
||||||
|
platformPublicKey: string | null
|
||||||
|
pgpPrivateKey: string | null
|
||||||
|
}) => ({
|
||||||
|
licence:
|
||||||
|
config.licenceId && config.licenceExpireTime
|
||||||
|
? {
|
||||||
|
licenceId: config.licenceId,
|
||||||
|
expireTime: config.licenceExpireTime,
|
||||||
|
isExpired: isLicenceExpired(config.licenceExpireTime),
|
||||||
|
}
|
||||||
|
: null,
|
||||||
|
fingerprint: config.fingerprint,
|
||||||
|
hasPlatformPublicKey: config.platformPublicKey != null,
|
||||||
|
hasPgpPrivateKey: config.pgpPrivateKey != null,
|
||||||
|
})
|
||||||
|
|
||||||
|
export const get = os.config.get.use(db).handler(async ({ context }) => {
|
||||||
|
const config = await ensureUxConfig(context.db)
|
||||||
|
return toConfigOutput(config)
|
||||||
|
})
|
||||||
|
|
||||||
|
export const setLicence = os.config.setLicence.use(db).handler(async ({ context, input }) => {
|
||||||
|
const currentConfig = await ensureUxConfig(context.db)
|
||||||
|
|
||||||
|
if (!currentConfig.platformPublicKey) {
|
||||||
|
throw new ORPCError('PRECONDITION_FAILED', {
|
||||||
|
message: 'Platform public key is not configured. Call config.setPlatformPublicKey first.',
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
const payload = verifyAndDecodeLicenceEnvelope(input, currentConfig.platformPublicKey)
|
||||||
|
if (isLicenceExpired(payload.expire_time)) {
|
||||||
|
throw new ORPCError('BAD_REQUEST', {
|
||||||
|
message: 'licence has expired',
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
const config = await setUxLicence(context.db, {
|
||||||
|
payload: input.payload,
|
||||||
|
signature: input.signature,
|
||||||
|
licenceId: payload.licence_id,
|
||||||
|
expireTime: payload.expire_time,
|
||||||
|
})
|
||||||
|
|
||||||
|
return toConfigOutput(config)
|
||||||
|
})
|
||||||
|
|
||||||
|
export const setPgpPrivateKey = os.config.setPgpPrivateKey.use(db).handler(async ({ context, input }) => {
|
||||||
|
await validatePgpPrivateKey(input.pgpPrivateKey).catch((error) => {
|
||||||
|
throw new ORPCError('BAD_REQUEST', {
|
||||||
|
message: `Invalid PGP private key: ${error instanceof Error ? error.message : 'unable to parse'}`,
|
||||||
|
})
|
||||||
|
})
|
||||||
|
|
||||||
|
const config = await setUxPgpPrivateKey(context.db, input.pgpPrivateKey)
|
||||||
|
return toConfigOutput(config)
|
||||||
|
})
|
||||||
|
|
||||||
|
export const setPlatformPublicKey = os.config.setPlatformPublicKey.use(db).handler(async ({ context, input }) => {
|
||||||
|
try {
|
||||||
|
validateRsaPublicKey(input.platformPublicKey)
|
||||||
|
} catch (error) {
|
||||||
|
throw new ORPCError('BAD_REQUEST', {
|
||||||
|
message: `Invalid platform public key: ${error instanceof Error ? error.message : 'unable to parse'}`,
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
const config = await setUxPlatformPublicKey(context.db, input.platformPublicKey)
|
||||||
|
return toConfigOutput(config)
|
||||||
|
})
|
||||||
219
apps/server/src/server/api/routers/crypto.router.ts
Normal file
219
apps/server/src/server/api/routers/crypto.router.ts
Normal file
@@ -0,0 +1,219 @@
|
|||||||
|
import {
|
||||||
|
aesGcmDecrypt,
|
||||||
|
aesGcmEncrypt,
|
||||||
|
hkdfSha256,
|
||||||
|
hmacSha256Base64,
|
||||||
|
pgpSignDetached,
|
||||||
|
rsaOaepEncrypt,
|
||||||
|
sha256,
|
||||||
|
sha256Hex,
|
||||||
|
} from '@furtherverse/crypto'
|
||||||
|
import { ORPCError } from '@orpc/server'
|
||||||
|
import JSZip from 'jszip'
|
||||||
|
import {
|
||||||
|
isInteger,
|
||||||
|
isSafeNumber,
|
||||||
|
LosslessNumber,
|
||||||
|
parse as losslessParse,
|
||||||
|
stringify as losslessStringify,
|
||||||
|
} from 'lossless-json'
|
||||||
|
import { z } from 'zod'
|
||||||
|
import { isLicenceExpired } from '@/server/licence'
|
||||||
|
import { extractSafeZipFiles, ZipValidationError } from '@/server/safe-zip'
|
||||||
|
import { getUxConfig } from '@/server/ux-config'
|
||||||
|
import { db } from '../middlewares'
|
||||||
|
import { os } from '../server'
|
||||||
|
|
||||||
|
const safeNumberParser = (value: string): number | string => {
|
||||||
|
if (isSafeNumber(value)) return Number(value)
|
||||||
|
if (isInteger(value)) return value
|
||||||
|
return Number(value)
|
||||||
|
}
|
||||||
|
|
||||||
|
const toLosslessNumber = (value: string): LosslessNumber | string =>
|
||||||
|
value !== '' && /^-?\d+$/.test(value) ? new LosslessNumber(value) : value
|
||||||
|
|
||||||
|
const summaryPayloadSchema = z
|
||||||
|
.object({
|
||||||
|
taskId: z.string().min(1, 'summary.json must contain a non-empty taskId'),
|
||||||
|
checkId: z.union([z.string(), z.number()]).optional(),
|
||||||
|
inspectionId: z.union([z.string(), z.number()]).optional(),
|
||||||
|
orgId: z.union([z.string(), z.number()]).optional(),
|
||||||
|
enterpriseId: z.union([z.string(), z.number()]).optional(),
|
||||||
|
summary: z.string().optional(),
|
||||||
|
})
|
||||||
|
.loose()
|
||||||
|
|
||||||
|
const requireIdentity = async (dbInstance: Parameters<typeof getUxConfig>[0]) => {
|
||||||
|
const config = await getUxConfig(dbInstance)
|
||||||
|
if (!config || !config.licenceId || !config.licenceExpireTime) {
|
||||||
|
throw new ORPCError('PRECONDITION_FAILED', {
|
||||||
|
message: 'Local identity is not initialized. Call config.get and then config.setLicence first.',
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
if (isLicenceExpired(config.licenceExpireTime)) {
|
||||||
|
throw new ORPCError('PRECONDITION_FAILED', {
|
||||||
|
message: 'Local licence has expired. Install a new signed licence before calling crypto APIs.',
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
return config as typeof config & { licenceId: string; licenceExpireTime: string }
|
||||||
|
}
|
||||||
|
|
||||||
|
export const encryptDeviceInfo = os.crypto.encryptDeviceInfo.use(db).handler(async ({ context }) => {
|
||||||
|
const config = await requireIdentity(context.db)
|
||||||
|
|
||||||
|
if (!config.platformPublicKey) {
|
||||||
|
throw new ORPCError('PRECONDITION_FAILED', {
|
||||||
|
message: 'Platform public key is not configured. Call config.setPlatformPublicKey first.',
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
const deviceInfoJson = JSON.stringify({
|
||||||
|
licence: config.licenceId,
|
||||||
|
fingerprint: config.fingerprint,
|
||||||
|
})
|
||||||
|
|
||||||
|
const encrypted = rsaOaepEncrypt(deviceInfoJson, config.platformPublicKey)
|
||||||
|
return { encrypted }
|
||||||
|
})
|
||||||
|
|
||||||
|
export const decryptTask = os.crypto.decryptTask.use(db).handler(async ({ context, input }) => {
|
||||||
|
const config = await requireIdentity(context.db)
|
||||||
|
|
||||||
|
const key = sha256(config.licenceId + config.fingerprint)
|
||||||
|
const decrypted = aesGcmDecrypt(input.encryptedData, key)
|
||||||
|
return { decrypted }
|
||||||
|
})
|
||||||
|
|
||||||
|
export const encryptSummary = os.crypto.encryptSummary.use(db).handler(async ({ context, input }) => {
|
||||||
|
const config = await requireIdentity(context.db)
|
||||||
|
|
||||||
|
const ikm = config.licenceId + config.fingerprint
|
||||||
|
const aesKey = hkdfSha256(ikm, input.salt, 'inspection_report_encryption')
|
||||||
|
const encrypted = aesGcmEncrypt(input.plaintext, aesKey)
|
||||||
|
return { encrypted }
|
||||||
|
})
|
||||||
|
|
||||||
|
export const signAndPackReport = os.crypto.signAndPackReport.use(db).handler(async ({ context, input }) => {
|
||||||
|
const config = await requireIdentity(context.db)
|
||||||
|
|
||||||
|
if (!config.pgpPrivateKey) {
|
||||||
|
throw new ORPCError('PRECONDITION_FAILED', {
|
||||||
|
message: 'PGP private key is not configured. Call config.setPgpPrivateKey first.',
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
const rawZipBytes = Buffer.from(await input.rawZip.arrayBuffer())
|
||||||
|
|
||||||
|
const zipFiles = await extractSafeZipFiles(rawZipBytes).catch((error) => {
|
||||||
|
if (error instanceof ZipValidationError) {
|
||||||
|
throw new ORPCError('BAD_REQUEST', { message: error.message })
|
||||||
|
}
|
||||||
|
throw error
|
||||||
|
})
|
||||||
|
|
||||||
|
// Extract and validate summary.json from the ZIP
|
||||||
|
const summaryFile = zipFiles.find((f) => f.name === 'summary.json')
|
||||||
|
if (!summaryFile) {
|
||||||
|
throw new ORPCError('BAD_REQUEST', {
|
||||||
|
message: 'rawZip must contain a summary.json file',
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
let rawJson: unknown
|
||||||
|
try {
|
||||||
|
rawJson = losslessParse(Buffer.from(summaryFile.bytes).toString('utf-8'), undefined, safeNumberParser)
|
||||||
|
} catch {
|
||||||
|
throw new ORPCError('BAD_REQUEST', {
|
||||||
|
message: 'summary.json in the ZIP is not valid JSON',
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
const parsed = summaryPayloadSchema.safeParse(rawJson)
|
||||||
|
if (!parsed.success) {
|
||||||
|
throw new ORPCError('BAD_REQUEST', {
|
||||||
|
message: `Invalid summary.json: ${z.prettifyError(parsed.error)}`,
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
const summaryPayload = parsed.data
|
||||||
|
const checkId = String(summaryPayload.checkId ?? summaryPayload.inspectionId ?? '')
|
||||||
|
const orgId = summaryPayload.orgId ?? summaryPayload.enterpriseId ?? ''
|
||||||
|
|
||||||
|
// Helper: find file in ZIP and compute its SHA256 hash
|
||||||
|
const requireFileHash = (name: string): string => {
|
||||||
|
const file = zipFiles.find((f) => f.name === name)
|
||||||
|
if (!file) {
|
||||||
|
throw new ORPCError('BAD_REQUEST', { message: `rawZip must contain ${name}` })
|
||||||
|
}
|
||||||
|
return sha256Hex(Buffer.from(file.bytes))
|
||||||
|
}
|
||||||
|
|
||||||
|
// Compute SHA256 of each content file (fixed order, matching Kotlin reference)
|
||||||
|
const assetsSha256 = requireFileHash('assets.json')
|
||||||
|
const vulnerabilitiesSha256 = requireFileHash('vulnerabilities.json')
|
||||||
|
const weakPasswordsSha256 = requireFileHash('weakPasswords.json')
|
||||||
|
const reportHtmlSha256 = requireFileHash('漏洞评估报告.html')
|
||||||
|
|
||||||
|
// Compute device signature
|
||||||
|
// signPayload = taskId + inspectionId + assetsSha256 + vulnerabilitiesSha256 + weakPasswordsSha256 + reportHtmlSha256
|
||||||
|
// (plain concatenation, no separators, fixed order — matching Kotlin reference)
|
||||||
|
const ikm = config.licenceId + config.fingerprint
|
||||||
|
const signingKey = hkdfSha256(ikm, 'AUTH_V3_SALT', 'device_report_signature')
|
||||||
|
|
||||||
|
const signPayload = `${summaryPayload.taskId}${checkId}${assetsSha256}${vulnerabilitiesSha256}${weakPasswordsSha256}${reportHtmlSha256}`
|
||||||
|
const deviceSignature = hmacSha256Base64(signingKey, signPayload)
|
||||||
|
|
||||||
|
// Build final summary.json with flat structure (matching Kotlin reference)
|
||||||
|
const finalSummary = {
|
||||||
|
orgId: toLosslessNumber(String(orgId)),
|
||||||
|
checkId: toLosslessNumber(checkId),
|
||||||
|
taskId: summaryPayload.taskId,
|
||||||
|
licence: config.licenceId,
|
||||||
|
fingerprint: config.fingerprint,
|
||||||
|
deviceSignature,
|
||||||
|
summary: summaryPayload.summary ?? '',
|
||||||
|
}
|
||||||
|
const summaryJson = losslessStringify(finalSummary)
|
||||||
|
if (!summaryJson) {
|
||||||
|
throw new ORPCError('INTERNAL_SERVER_ERROR', {
|
||||||
|
message: 'Failed to serialize summary.json',
|
||||||
|
})
|
||||||
|
}
|
||||||
|
const summaryBytes = Buffer.from(summaryJson, 'utf-8')
|
||||||
|
|
||||||
|
// Build manifest.json (fixed file list, matching Kotlin reference)
|
||||||
|
const manifestFiles: Record<string, string> = {
|
||||||
|
'summary.json': sha256Hex(summaryBytes),
|
||||||
|
'assets.json': assetsSha256,
|
||||||
|
'vulnerabilities.json': vulnerabilitiesSha256,
|
||||||
|
'weakPasswords.json': weakPasswordsSha256,
|
||||||
|
'漏洞评估报告.html': reportHtmlSha256,
|
||||||
|
}
|
||||||
|
|
||||||
|
const manifestBytes = Buffer.from(JSON.stringify({ files: manifestFiles }, null, 2), 'utf-8')
|
||||||
|
const signatureAsc = await pgpSignDetached(manifestBytes, config.pgpPrivateKey)
|
||||||
|
|
||||||
|
// Pack signed ZIP
|
||||||
|
const signedZip = new JSZip()
|
||||||
|
signedZip.file('summary.json', summaryBytes)
|
||||||
|
for (const item of zipFiles) {
|
||||||
|
if (item.name !== 'summary.json') {
|
||||||
|
signedZip.file(item.name, item.bytes)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
signedZip.file('META-INF/manifest.json', manifestBytes)
|
||||||
|
signedZip.file('META-INF/signature.asc', signatureAsc)
|
||||||
|
|
||||||
|
const signedZipBytes = await signedZip.generateAsync({
|
||||||
|
type: 'uint8array',
|
||||||
|
compression: 'DEFLATE',
|
||||||
|
compressionOptions: { level: 9 },
|
||||||
|
})
|
||||||
|
|
||||||
|
return new File([Buffer.from(signedZipBytes)], input.outputFileName ?? 'signed-report.zip', {
|
||||||
|
type: 'application/zip',
|
||||||
|
})
|
||||||
|
})
|
||||||
@@ -1,6 +1,8 @@
|
|||||||
import { os } from '../server'
|
import { os } from '../server'
|
||||||
import * as todo from './todo.router'
|
import * as config from './config.router'
|
||||||
|
import * as crypto from './crypto.router'
|
||||||
|
|
||||||
export const router = os.router({
|
export const router = os.router({
|
||||||
todo,
|
config,
|
||||||
|
crypto,
|
||||||
})
|
})
|
||||||
|
|||||||
@@ -1,49 +0,0 @@
|
|||||||
import { ORPCError } from '@orpc/server'
|
|
||||||
import { eq } from 'drizzle-orm'
|
|
||||||
import { todoTable } from '@/server/db/schema'
|
|
||||||
import { db } from '../middlewares'
|
|
||||||
import { os } from '../server'
|
|
||||||
|
|
||||||
export const list = os.todo.list.use(db).handler(async ({ context }) => {
|
|
||||||
const todos = await context.db.query.todoTable.findMany({
|
|
||||||
orderBy: (todos, { desc }) => [desc(todos.createdAt)],
|
|
||||||
})
|
|
||||||
return todos
|
|
||||||
})
|
|
||||||
|
|
||||||
export const create = os.todo.create
|
|
||||||
.use(db)
|
|
||||||
.handler(async ({ context, input }) => {
|
|
||||||
const [newTodo] = await context.db
|
|
||||||
.insert(todoTable)
|
|
||||||
.values(input)
|
|
||||||
.returning()
|
|
||||||
|
|
||||||
if (!newTodo) {
|
|
||||||
throw new ORPCError('NOT_FOUND')
|
|
||||||
}
|
|
||||||
|
|
||||||
return newTodo
|
|
||||||
})
|
|
||||||
|
|
||||||
export const update = os.todo.update
|
|
||||||
.use(db)
|
|
||||||
.handler(async ({ context, input }) => {
|
|
||||||
const [updatedTodo] = await context.db
|
|
||||||
.update(todoTable)
|
|
||||||
.set(input.data)
|
|
||||||
.where(eq(todoTable.id, input.id))
|
|
||||||
.returning()
|
|
||||||
|
|
||||||
if (!updatedTodo) {
|
|
||||||
throw new ORPCError('NOT_FOUND')
|
|
||||||
}
|
|
||||||
|
|
||||||
return updatedTodo
|
|
||||||
})
|
|
||||||
|
|
||||||
export const remove = os.todo.remove
|
|
||||||
.use(db)
|
|
||||||
.handler(async ({ context, input }) => {
|
|
||||||
await context.db.delete(todoTable).where(eq(todoTable.id, input.id))
|
|
||||||
})
|
|
||||||
@@ -1,8 +1,4 @@
|
|||||||
import type {
|
import type { ContractRouterClient, InferContractRouterInputs, InferContractRouterOutputs } from '@orpc/contract'
|
||||||
ContractRouterClient,
|
|
||||||
InferContractRouterInputs,
|
|
||||||
InferContractRouterOutputs,
|
|
||||||
} from '@orpc/contract'
|
|
||||||
import type { Contract } from './contracts'
|
import type { Contract } from './contracts'
|
||||||
|
|
||||||
export type RouterClient = ContractRouterClient<Contract>
|
export type RouterClient = ContractRouterClient<Contract>
|
||||||
|
|||||||
36
apps/server/src/server/db/fields.ts
Normal file
36
apps/server/src/server/db/fields.ts
Normal file
@@ -0,0 +1,36 @@
|
|||||||
|
import { integer, text } from 'drizzle-orm/sqlite-core'
|
||||||
|
import { v7 as uuidv7 } from 'uuid'
|
||||||
|
|
||||||
|
export const pk = (name = 'id') =>
|
||||||
|
text(name)
|
||||||
|
.primaryKey()
|
||||||
|
.$defaultFn(() => uuidv7())
|
||||||
|
|
||||||
|
export const createdAt = (name = 'created_at') =>
|
||||||
|
integer(name, { mode: 'timestamp_ms' })
|
||||||
|
.notNull()
|
||||||
|
.$defaultFn(() => new Date())
|
||||||
|
|
||||||
|
export const updatedAt = (name = 'updated_at') =>
|
||||||
|
integer(name, { mode: 'timestamp_ms' })
|
||||||
|
.notNull()
|
||||||
|
.$defaultFn(() => new Date())
|
||||||
|
.$onUpdateFn(() => new Date())
|
||||||
|
|
||||||
|
export const generatedFields = {
|
||||||
|
id: pk('id'),
|
||||||
|
createdAt: createdAt('created_at'),
|
||||||
|
updatedAt: updatedAt('updated_at'),
|
||||||
|
}
|
||||||
|
|
||||||
|
const createGeneratedFieldKeys = <T extends Record<string, unknown>>(fields: T): Record<keyof T, true> => {
|
||||||
|
return Object.keys(fields).reduce(
|
||||||
|
(acc, key) => {
|
||||||
|
acc[key as keyof T] = true
|
||||||
|
return acc
|
||||||
|
},
|
||||||
|
{} as Record<keyof T, true>,
|
||||||
|
)
|
||||||
|
}
|
||||||
|
|
||||||
|
export const generatedFieldKeys = createGeneratedFieldKeys(generatedFields)
|
||||||
@@ -1,15 +1,14 @@
|
|||||||
import { drizzle } from 'drizzle-orm/postgres-js'
|
import { Database } from 'bun:sqlite'
|
||||||
|
import { drizzle } from 'drizzle-orm/bun-sqlite'
|
||||||
import { env } from '@/env'
|
import { env } from '@/env'
|
||||||
import * as schema from '@/server/db/schema'
|
import { relations } from '@/server/db/relations'
|
||||||
|
|
||||||
export const createDB = () =>
|
export const createDB = () => {
|
||||||
drizzle({
|
const sqlite = new Database(env.DATABASE_PATH)
|
||||||
connection: {
|
sqlite.exec('PRAGMA journal_mode = WAL')
|
||||||
url: env.DATABASE_URL,
|
sqlite.exec('PRAGMA foreign_keys = ON')
|
||||||
prepare: true,
|
return drizzle({ client: sqlite, relations })
|
||||||
},
|
}
|
||||||
schema,
|
|
||||||
})
|
|
||||||
|
|
||||||
export type DB = ReturnType<typeof createDB>
|
export type DB = ReturnType<typeof createDB>
|
||||||
|
|
||||||
|
|||||||
4
apps/server/src/server/db/relations.ts
Normal file
4
apps/server/src/server/db/relations.ts
Normal file
@@ -0,0 +1,4 @@
|
|||||||
|
import { defineRelations } from 'drizzle-orm'
|
||||||
|
import * as schema from './schema'
|
||||||
|
|
||||||
|
export const relations = defineRelations(schema, () => ({}))
|
||||||
@@ -1 +1 @@
|
|||||||
export * from './todo'
|
export * from './ux-config'
|
||||||
|
|||||||
@@ -1,8 +0,0 @@
|
|||||||
import { boolean, pgTable, text } from 'drizzle-orm/pg-core'
|
|
||||||
import { generatedFields } from './utils/field'
|
|
||||||
|
|
||||||
export const todoTable = pgTable('todo', {
|
|
||||||
...generatedFields,
|
|
||||||
title: text('title').notNull(),
|
|
||||||
completed: boolean('completed').notNull().default(false),
|
|
||||||
})
|
|
||||||
@@ -1,58 +0,0 @@
|
|||||||
import { sql } from 'drizzle-orm'
|
|
||||||
import { timestamp, uuid } from 'drizzle-orm/pg-core'
|
|
||||||
import { v7 as uuidv7 } from 'uuid'
|
|
||||||
|
|
||||||
// id
|
|
||||||
|
|
||||||
export const id = (name: string) => uuid(name)
|
|
||||||
export const pk = (name: string, strategy?: 'native' | 'extension') => {
|
|
||||||
switch (strategy) {
|
|
||||||
// PG 18+
|
|
||||||
case 'native':
|
|
||||||
return id(name).primaryKey().default(sql`uuidv7()`)
|
|
||||||
|
|
||||||
// PG 13+ with extension
|
|
||||||
case 'extension':
|
|
||||||
return id(name).primaryKey().default(sql`uuid_generate_v7()`)
|
|
||||||
|
|
||||||
// Any PG version
|
|
||||||
default:
|
|
||||||
return id(name)
|
|
||||||
.primaryKey()
|
|
||||||
.$defaultFn(() => uuidv7())
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
// timestamp
|
|
||||||
|
|
||||||
export const createdAt = (name = 'created_at') =>
|
|
||||||
timestamp(name, { withTimezone: true }).notNull().defaultNow()
|
|
||||||
|
|
||||||
export const updatedAt = (name = 'updated_at') =>
|
|
||||||
timestamp(name, { withTimezone: true })
|
|
||||||
.notNull()
|
|
||||||
.defaultNow()
|
|
||||||
.$onUpdateFn(() => new Date())
|
|
||||||
|
|
||||||
// generated fields
|
|
||||||
|
|
||||||
export const generatedFields = {
|
|
||||||
id: pk('id'),
|
|
||||||
createdAt: createdAt('created_at'),
|
|
||||||
updatedAt: updatedAt('updated_at'),
|
|
||||||
}
|
|
||||||
|
|
||||||
// Helper to create omit keys from generatedFields
|
|
||||||
const createGeneratedFieldKeys = <T extends Record<string, unknown>>(
|
|
||||||
fields: T,
|
|
||||||
): Record<keyof T, true> => {
|
|
||||||
return Object.keys(fields).reduce(
|
|
||||||
(acc, key) => {
|
|
||||||
acc[key as keyof T] = true
|
|
||||||
return acc
|
|
||||||
},
|
|
||||||
{} as Record<keyof T, true>,
|
|
||||||
)
|
|
||||||
}
|
|
||||||
|
|
||||||
export const generatedFieldKeys = createGeneratedFieldKeys(generatedFields)
|
|
||||||
14
apps/server/src/server/db/schema/ux-config.ts
Normal file
14
apps/server/src/server/db/schema/ux-config.ts
Normal file
@@ -0,0 +1,14 @@
|
|||||||
|
import { sqliteTable, text } from 'drizzle-orm/sqlite-core'
|
||||||
|
import { generatedFields } from '../fields'
|
||||||
|
|
||||||
|
export const uxConfigTable = sqliteTable('ux_config', {
|
||||||
|
...generatedFields,
|
||||||
|
singletonKey: text('singleton_key').notNull().unique().default('default'),
|
||||||
|
licencePayload: text('licence_payload'),
|
||||||
|
licenceSignature: text('licence_signature'),
|
||||||
|
licenceId: text('licence_id'),
|
||||||
|
licenceExpireTime: text('licence_expire_time'),
|
||||||
|
fingerprint: text('fingerprint').notNull(),
|
||||||
|
platformPublicKey: text('platform_public_key'),
|
||||||
|
pgpPrivateKey: text('pgp_private_key'),
|
||||||
|
})
|
||||||
10
apps/server/src/server/device-fingerprint.ts
Normal file
10
apps/server/src/server/device-fingerprint.ts
Normal file
@@ -0,0 +1,10 @@
|
|||||||
|
import { sha256Hex } from '@furtherverse/crypto'
|
||||||
|
import { system } from 'systeminformation'
|
||||||
|
|
||||||
|
export const computeDeviceFingerprint = async (): Promise<string> => {
|
||||||
|
const { uuid, serial, model, manufacturer } = await system()
|
||||||
|
const source = [uuid, serial, model, manufacturer].join('|')
|
||||||
|
const hash = sha256Hex(source)
|
||||||
|
|
||||||
|
return hash
|
||||||
|
}
|
||||||
32
apps/server/src/server/licence.test.ts
Normal file
32
apps/server/src/server/licence.test.ts
Normal file
@@ -0,0 +1,32 @@
|
|||||||
|
import { describe, expect, it } from 'bun:test'
|
||||||
|
import { constants, createSign, generateKeyPairSync } from 'node:crypto'
|
||||||
|
import { decodeLicencePayload, isLicenceExpired, verifyAndDecodeLicenceEnvelope } from './licence'
|
||||||
|
|
||||||
|
describe('licence helpers', () => {
|
||||||
|
it('verifies payload signatures and decodes payload JSON', () => {
|
||||||
|
const { privateKey, publicKey } = generateKeyPairSync('rsa', { modulusLength: 2048 })
|
||||||
|
const payloadJson = JSON.stringify({ licence_id: 'LIC-20260319-0025', expire_time: '2027-03-19' })
|
||||||
|
const payload = Buffer.from(payloadJson, 'utf-8').toString('base64')
|
||||||
|
|
||||||
|
const signer = createSign('RSA-SHA256')
|
||||||
|
signer.update(Buffer.from(payload, 'utf-8'))
|
||||||
|
signer.end()
|
||||||
|
|
||||||
|
const signature = signer.sign({ key: privateKey, padding: constants.RSA_PKCS1_PADDING }).toString('base64')
|
||||||
|
const publicKeyBase64 = publicKey.export({ format: 'der', type: 'spki' }).toString('base64')
|
||||||
|
|
||||||
|
expect(verifyAndDecodeLicenceEnvelope({ payload, signature }, publicKeyBase64)).toEqual({
|
||||||
|
licence_id: 'LIC-20260319-0025',
|
||||||
|
expire_time: '2027-03-19',
|
||||||
|
})
|
||||||
|
})
|
||||||
|
|
||||||
|
it('treats expire_time as valid through the end of the UTC day', () => {
|
||||||
|
expect(isLicenceExpired('2027-03-19', new Date('2027-03-19T23:59:59.999Z'))).toBe(false)
|
||||||
|
expect(isLicenceExpired('2027-03-19', new Date('2027-03-20T00:00:00.000Z'))).toBe(true)
|
||||||
|
})
|
||||||
|
|
||||||
|
it('rejects malformed payloads', () => {
|
||||||
|
expect(() => decodeLicencePayload('not-base64')).toThrow('payload must be valid Base64')
|
||||||
|
})
|
||||||
|
})
|
||||||
94
apps/server/src/server/licence.ts
Normal file
94
apps/server/src/server/licence.ts
Normal file
@@ -0,0 +1,94 @@
|
|||||||
|
import { rsaVerifySignature } from '@furtherverse/crypto'
|
||||||
|
import { z } from 'zod'
|
||||||
|
|
||||||
|
const BASE64_PATTERN = /^(?:[A-Za-z0-9+/]{4})*(?:[A-Za-z0-9+/]{2}==|[A-Za-z0-9+/]{3}=)?$/
|
||||||
|
const DATE_PATTERN = /^(\d{4})-(\d{2})-(\d{2})$/
|
||||||
|
|
||||||
|
export const licenceEnvelopeSchema = z.object({
|
||||||
|
payload: z.string().min(1).max(8192).describe('Base64 编码的 licence payload 原文'),
|
||||||
|
signature: z.string().min(1).max(8192).describe('对 payload 字符串 UTF-8 字节做 SHA256withRSA 后得到的 Base64 签名'),
|
||||||
|
})
|
||||||
|
|
||||||
|
export const licencePayloadSchema = z
|
||||||
|
.object({
|
||||||
|
licence_id: z.string().min(1).describe('验签通过后的 licence 标识'),
|
||||||
|
expire_time: z
|
||||||
|
.string()
|
||||||
|
.regex(DATE_PATTERN, 'expire_time must use YYYY-MM-DD')
|
||||||
|
.describe('授权到期日,格式为 YYYY-MM-DD(按 UTC 自然日末尾失效)'),
|
||||||
|
})
|
||||||
|
.loose()
|
||||||
|
|
||||||
|
export type LicenceEnvelope = z.infer<typeof licenceEnvelopeSchema>
|
||||||
|
export type LicencePayload = z.infer<typeof licencePayloadSchema>
|
||||||
|
|
||||||
|
const decodeBase64 = (value: string, fieldName: string): Buffer => {
|
||||||
|
if (!BASE64_PATTERN.test(value)) {
|
||||||
|
throw new Error(`${fieldName} must be valid Base64`)
|
||||||
|
}
|
||||||
|
|
||||||
|
return Buffer.from(value, 'base64')
|
||||||
|
}
|
||||||
|
|
||||||
|
const parseUtcDate = (value: string): Date => {
|
||||||
|
const match = DATE_PATTERN.exec(value)
|
||||||
|
if (!match) {
|
||||||
|
throw new Error('expire_time must use YYYY-MM-DD')
|
||||||
|
}
|
||||||
|
|
||||||
|
const [, yearText, monthText, dayText] = match
|
||||||
|
const year = Number(yearText)
|
||||||
|
const month = Number(monthText)
|
||||||
|
const day = Number(dayText)
|
||||||
|
const parsed = new Date(Date.UTC(year, month - 1, day))
|
||||||
|
|
||||||
|
if (
|
||||||
|
Number.isNaN(parsed.getTime()) ||
|
||||||
|
parsed.getUTCFullYear() !== year ||
|
||||||
|
parsed.getUTCMonth() !== month - 1 ||
|
||||||
|
parsed.getUTCDate() !== day
|
||||||
|
) {
|
||||||
|
throw new Error('expire_time is not a valid calendar date')
|
||||||
|
}
|
||||||
|
|
||||||
|
return parsed
|
||||||
|
}
|
||||||
|
|
||||||
|
export const isLicenceExpired = (expireTime: string, now = new Date()): boolean => {
|
||||||
|
const expireDate = parseUtcDate(expireTime)
|
||||||
|
const expiresAt = Date.UTC(expireDate.getUTCFullYear(), expireDate.getUTCMonth(), expireDate.getUTCDate() + 1)
|
||||||
|
|
||||||
|
return now.getTime() >= expiresAt
|
||||||
|
}
|
||||||
|
|
||||||
|
export const decodeLicencePayload = (payloadBase64: string): LicencePayload => {
|
||||||
|
const decodedJson = decodeBase64(payloadBase64, 'payload').toString('utf-8')
|
||||||
|
|
||||||
|
let rawPayload: unknown
|
||||||
|
try {
|
||||||
|
rawPayload = JSON.parse(decodedJson)
|
||||||
|
} catch {
|
||||||
|
throw new Error('payload must decode to valid JSON')
|
||||||
|
}
|
||||||
|
|
||||||
|
const parsedPayload = licencePayloadSchema.safeParse(rawPayload)
|
||||||
|
if (!parsedPayload.success) {
|
||||||
|
throw new Error(z.prettifyError(parsedPayload.error))
|
||||||
|
}
|
||||||
|
|
||||||
|
return parsedPayload.data
|
||||||
|
}
|
||||||
|
|
||||||
|
export const verifyLicenceEnvelopeSignature = (envelope: LicenceEnvelope, publicKeyBase64: string): void => {
|
||||||
|
const signatureBytes = decodeBase64(envelope.signature, 'signature')
|
||||||
|
const isValid = rsaVerifySignature(Buffer.from(envelope.payload, 'utf-8'), signatureBytes, publicKeyBase64)
|
||||||
|
|
||||||
|
if (!isValid) {
|
||||||
|
throw new Error('licence signature is invalid')
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
export const verifyAndDecodeLicenceEnvelope = (envelope: LicenceEnvelope, publicKeyBase64: string): LicencePayload => {
|
||||||
|
verifyLicenceEnvelopeSignature(envelope, publicKeyBase64)
|
||||||
|
return decodeLicencePayload(envelope.payload)
|
||||||
|
}
|
||||||
96
apps/server/src/server/safe-zip.ts
Normal file
96
apps/server/src/server/safe-zip.ts
Normal file
@@ -0,0 +1,96 @@
|
|||||||
|
import type { JSZipObject } from 'jszip'
|
||||||
|
import JSZip from 'jszip'
|
||||||
|
|
||||||
|
export class ZipValidationError extends Error {
|
||||||
|
override name = 'ZipValidationError'
|
||||||
|
}
|
||||||
|
|
||||||
|
export interface ZipFileItem {
|
||||||
|
name: string
|
||||||
|
bytes: Uint8Array
|
||||||
|
}
|
||||||
|
|
||||||
|
export interface SafeZipOptions {
|
||||||
|
maxRawBytes?: number
|
||||||
|
maxEntries?: number
|
||||||
|
maxSingleFileBytes?: number
|
||||||
|
maxTotalUncompressedBytes?: number
|
||||||
|
}
|
||||||
|
|
||||||
|
const DEFAULTS = {
|
||||||
|
maxRawBytes: 50 * 1024 * 1024,
|
||||||
|
maxEntries: 64,
|
||||||
|
maxSingleFileBytes: 20 * 1024 * 1024,
|
||||||
|
maxTotalUncompressedBytes: 60 * 1024 * 1024,
|
||||||
|
} satisfies Required<SafeZipOptions>
|
||||||
|
|
||||||
|
const normalizePath = (name: string): string => name.replaceAll('\\', '/')
|
||||||
|
|
||||||
|
const isUnsafePath = (name: string): boolean => {
|
||||||
|
const normalized = normalizePath(name)
|
||||||
|
const segments = normalized.split('/')
|
||||||
|
|
||||||
|
return (
|
||||||
|
normalized.startsWith('/') ||
|
||||||
|
normalized.includes('\0') ||
|
||||||
|
segments.some((segment) => segment === '..' || segment.trim().length === 0)
|
||||||
|
)
|
||||||
|
}
|
||||||
|
|
||||||
|
export const extractSafeZipFiles = async (
|
||||||
|
rawBytes: Uint8Array | Buffer,
|
||||||
|
options?: SafeZipOptions,
|
||||||
|
): Promise<ZipFileItem[]> => {
|
||||||
|
const opts = { ...DEFAULTS, ...options }
|
||||||
|
|
||||||
|
if (rawBytes.byteLength === 0 || rawBytes.byteLength > opts.maxRawBytes) {
|
||||||
|
throw new ZipValidationError('ZIP is empty or exceeds max size limit')
|
||||||
|
}
|
||||||
|
|
||||||
|
const zip = await JSZip.loadAsync(rawBytes, { checkCRC32: true }).catch(() => {
|
||||||
|
throw new ZipValidationError('Not a valid ZIP file')
|
||||||
|
})
|
||||||
|
|
||||||
|
const entries = Object.values(zip.files) as JSZipObject[]
|
||||||
|
if (entries.length > opts.maxEntries) {
|
||||||
|
throw new ZipValidationError(`ZIP contains too many entries: ${entries.length}`)
|
||||||
|
}
|
||||||
|
|
||||||
|
let totalUncompressedBytes = 0
|
||||||
|
const files: ZipFileItem[] = []
|
||||||
|
const seen = new Set<string>()
|
||||||
|
|
||||||
|
for (const entry of entries) {
|
||||||
|
if (entry.dir) {
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
|
||||||
|
if (isUnsafePath(entry.name)) {
|
||||||
|
throw new ZipValidationError(`ZIP contains unsafe entry path: ${entry.name}`)
|
||||||
|
}
|
||||||
|
|
||||||
|
const normalizedName = normalizePath(entry.name)
|
||||||
|
if (seen.has(normalizedName)) {
|
||||||
|
throw new ZipValidationError(`ZIP contains duplicate entry: ${normalizedName}`)
|
||||||
|
}
|
||||||
|
seen.add(normalizedName)
|
||||||
|
|
||||||
|
const content = await entry.async('uint8array')
|
||||||
|
if (content.byteLength > opts.maxSingleFileBytes) {
|
||||||
|
throw new ZipValidationError(`ZIP entry too large: ${normalizedName}`)
|
||||||
|
}
|
||||||
|
|
||||||
|
totalUncompressedBytes += content.byteLength
|
||||||
|
if (totalUncompressedBytes > opts.maxTotalUncompressedBytes) {
|
||||||
|
throw new ZipValidationError('ZIP total uncompressed content exceeds max size limit')
|
||||||
|
}
|
||||||
|
|
||||||
|
files.push({ name: normalizedName, bytes: content })
|
||||||
|
}
|
||||||
|
|
||||||
|
if (files.length === 0) {
|
||||||
|
throw new ZipValidationError('ZIP has no file entries')
|
||||||
|
}
|
||||||
|
|
||||||
|
return files
|
||||||
|
}
|
||||||
99
apps/server/src/server/ux-config.ts
Normal file
99
apps/server/src/server/ux-config.ts
Normal file
@@ -0,0 +1,99 @@
|
|||||||
|
import { eq } from 'drizzle-orm'
|
||||||
|
import type { DB } from '@/server/db'
|
||||||
|
import { uxConfigTable } from '@/server/db/schema'
|
||||||
|
import { computeDeviceFingerprint } from './device-fingerprint'
|
||||||
|
|
||||||
|
const UX_CONFIG_KEY = 'default'
|
||||||
|
|
||||||
|
export const getUxConfig = async (db: DB) => {
|
||||||
|
return await db.query.uxConfigTable.findFirst({
|
||||||
|
where: { singletonKey: UX_CONFIG_KEY },
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
export const ensureUxConfig = async (db: DB) => {
|
||||||
|
const fingerprint = await computeDeviceFingerprint()
|
||||||
|
const existing = await getUxConfig(db)
|
||||||
|
|
||||||
|
if (existing) {
|
||||||
|
if (existing.fingerprint !== fingerprint) {
|
||||||
|
const rows = await db
|
||||||
|
.update(uxConfigTable)
|
||||||
|
.set({ fingerprint })
|
||||||
|
.where(eq(uxConfigTable.id, existing.id))
|
||||||
|
.returning()
|
||||||
|
return rows[0] as (typeof rows)[number]
|
||||||
|
}
|
||||||
|
return existing
|
||||||
|
}
|
||||||
|
|
||||||
|
const rows = await db
|
||||||
|
.insert(uxConfigTable)
|
||||||
|
.values({
|
||||||
|
singletonKey: UX_CONFIG_KEY,
|
||||||
|
fingerprint,
|
||||||
|
licencePayload: null,
|
||||||
|
licenceSignature: null,
|
||||||
|
licenceId: null,
|
||||||
|
licenceExpireTime: null,
|
||||||
|
})
|
||||||
|
.returning()
|
||||||
|
|
||||||
|
return rows[0] as (typeof rows)[number]
|
||||||
|
}
|
||||||
|
|
||||||
|
export const setUxLicence = async (
|
||||||
|
db: DB,
|
||||||
|
licence: {
|
||||||
|
payload: string
|
||||||
|
signature: string
|
||||||
|
licenceId: string
|
||||||
|
expireTime: string
|
||||||
|
},
|
||||||
|
) => {
|
||||||
|
const config = await ensureUxConfig(db)
|
||||||
|
|
||||||
|
const rows = await db
|
||||||
|
.update(uxConfigTable)
|
||||||
|
.set({
|
||||||
|
licencePayload: licence.payload,
|
||||||
|
licenceSignature: licence.signature,
|
||||||
|
licenceId: licence.licenceId,
|
||||||
|
licenceExpireTime: licence.expireTime,
|
||||||
|
})
|
||||||
|
.where(eq(uxConfigTable.id, config.id))
|
||||||
|
.returning()
|
||||||
|
|
||||||
|
return rows[0] as (typeof rows)[number]
|
||||||
|
}
|
||||||
|
|
||||||
|
export const setUxPgpPrivateKey = async (db: DB, pgpPrivateKey: string) => {
|
||||||
|
const config = await ensureUxConfig(db)
|
||||||
|
|
||||||
|
const rows = await db.update(uxConfigTable).set({ pgpPrivateKey }).where(eq(uxConfigTable.id, config.id)).returning()
|
||||||
|
|
||||||
|
return rows[0] as (typeof rows)[number]
|
||||||
|
}
|
||||||
|
|
||||||
|
export const setUxPlatformPublicKey = async (db: DB, platformPublicKey: string) => {
|
||||||
|
const config = await ensureUxConfig(db)
|
||||||
|
const shouldClearLicence = config.platformPublicKey !== platformPublicKey
|
||||||
|
|
||||||
|
const rows = await db
|
||||||
|
.update(uxConfigTable)
|
||||||
|
.set({
|
||||||
|
platformPublicKey,
|
||||||
|
...(shouldClearLicence
|
||||||
|
? {
|
||||||
|
licencePayload: null,
|
||||||
|
licenceSignature: null,
|
||||||
|
licenceId: null,
|
||||||
|
licenceExpireTime: null,
|
||||||
|
}
|
||||||
|
: {}),
|
||||||
|
})
|
||||||
|
.where(eq(uxConfigTable.id, config.id))
|
||||||
|
.returning()
|
||||||
|
|
||||||
|
return rows[0] as (typeof rows)[number]
|
||||||
|
}
|
||||||
@@ -4,9 +4,43 @@
|
|||||||
"tasks": {
|
"tasks": {
|
||||||
"build": {
|
"build": {
|
||||||
"env": ["NODE_ENV", "VITE_*"],
|
"env": ["NODE_ENV", "VITE_*"],
|
||||||
|
"inputs": ["src/**", "public/**", "package.json", "tsconfig.json", "vite.config.ts"],
|
||||||
"outputs": [".output/**"]
|
"outputs": [".output/**"]
|
||||||
},
|
},
|
||||||
"compile": {
|
"compile": {
|
||||||
|
"dependsOn": ["build"],
|
||||||
|
"outputs": ["out/**"]
|
||||||
|
},
|
||||||
|
"compile:darwin": {
|
||||||
|
"dependsOn": ["build"],
|
||||||
|
"outputs": ["out/**"]
|
||||||
|
},
|
||||||
|
"compile:darwin:arm64": {
|
||||||
|
"dependsOn": ["build"],
|
||||||
|
"outputs": ["out/**"]
|
||||||
|
},
|
||||||
|
"compile:darwin:x64": {
|
||||||
|
"dependsOn": ["build"],
|
||||||
|
"outputs": ["out/**"]
|
||||||
|
},
|
||||||
|
"compile:linux": {
|
||||||
|
"dependsOn": ["build"],
|
||||||
|
"outputs": ["out/**"]
|
||||||
|
},
|
||||||
|
"compile:linux:arm64": {
|
||||||
|
"dependsOn": ["build"],
|
||||||
|
"outputs": ["out/**"]
|
||||||
|
},
|
||||||
|
"compile:linux:x64": {
|
||||||
|
"dependsOn": ["build"],
|
||||||
|
"outputs": ["out/**"]
|
||||||
|
},
|
||||||
|
"compile:windows": {
|
||||||
|
"dependsOn": ["build"],
|
||||||
|
"outputs": ["out/**"]
|
||||||
|
},
|
||||||
|
"compile:windows:x64": {
|
||||||
|
"dependsOn": ["build"],
|
||||||
"outputs": ["out/**"]
|
"outputs": ["out/**"]
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -11,7 +11,8 @@
|
|||||||
"formatter": {
|
"formatter": {
|
||||||
"enabled": true,
|
"enabled": true,
|
||||||
"indentStyle": "space",
|
"indentStyle": "space",
|
||||||
"lineEnding": "lf"
|
"lineEnding": "lf",
|
||||||
|
"lineWidth": 120
|
||||||
},
|
},
|
||||||
"linter": {
|
"linter": {
|
||||||
"enabled": true,
|
"enabled": true,
|
||||||
|
|||||||
432
docs/摘要+zip/EncryptionTestController.kt
Normal file
432
docs/摘要+zip/EncryptionTestController.kt
Normal file
@@ -0,0 +1,432 @@
|
|||||||
|
package top.tangyh.lamp.filing.controller.compress
|
||||||
|
|
||||||
|
import com.fasterxml.jackson.databind.ObjectMapper
|
||||||
|
import io.swagger.annotations.Api
|
||||||
|
import io.swagger.annotations.ApiOperation
|
||||||
|
import io.swagger.annotations.ApiParam
|
||||||
|
import org.springframework.validation.annotation.Validated
|
||||||
|
import org.springframework.web.bind.annotation.*
|
||||||
|
import top.tangyh.basic.annotation.log.WebLog
|
||||||
|
import top.tangyh.basic.base.R
|
||||||
|
import top.tangyh.lamp.filing.dto.management.UploadInspectionFileV2Request
|
||||||
|
import top.tangyh.lamp.filing.utils.AesGcmUtil
|
||||||
|
import top.tangyh.lamp.filing.utils.HkdfUtil
|
||||||
|
import top.tangyh.lamp.filing.utils.PgpSignatureUtil
|
||||||
|
import java.util.*
|
||||||
|
|
||||||
|
/**
|
||||||
|
* 加密测试工具类
|
||||||
|
*
|
||||||
|
* 用于生成加密后的 encrypted 数据,测试 uploadInspectionFileV2Encrypted 接口
|
||||||
|
*
|
||||||
|
* 使用说明:
|
||||||
|
* 1. 调用 /compression/test/generateEncrypted 接口
|
||||||
|
* 2. 传入 licence、fingerprint、taskId 和明文数据
|
||||||
|
* 3. 获取加密后的 Base64 字符串
|
||||||
|
* 4. 使用返回的 encrypted 数据测试 uploadInspectionFileV2Encrypted 接口
|
||||||
|
*/
|
||||||
|
@Validated
|
||||||
|
@RestController
|
||||||
|
@RequestMapping("/compression/test")
|
||||||
|
@Api(value = "EncryptionTest", tags = ["加密测试工具"])
|
||||||
|
class EncryptionTestController {
|
||||||
|
|
||||||
|
private val objectMapper = ObjectMapper()
|
||||||
|
|
||||||
|
companion object {
|
||||||
|
private const val DEFAULT_PGP_PRIVATE_KEY = """-----BEGIN PGP PRIVATE KEY BLOCK-----
|
||||||
|
|
||||||
|
lFgEaSZqXBYJKwYBBAHaRw8BAQdARzZ5JXreuTeTgMFwYcw0Ju7aCWmXuUMmQyff
|
||||||
|
5vmN8RQAAP4nli0R/MTNtgx9+g5ZPyAj8XSAnjHaW9u2UJQxYhMIYw8XtBZpdHRj
|
||||||
|
PGl0dGNAaXR0Yy5zaC5jbj6IkwQTFgoAOxYhBG8IkI1kmkNpEu8iuqWu91t6SEzN
|
||||||
|
BQJpJmpcAhsDBQsJCAcCAiICBhUKCQgLAgQWAgMBAh4HAheAAAoJEKWu91t6SEzN
|
||||||
|
dSQBAPM5llVG0X6SBa4YM90Iqyb2jWvlNjstoF8jjPVny1CiAP4hIOUvb686oSA0
|
||||||
|
OrS3AuICi7X/r+JnSo1Z7pngUA3VC5xdBGkmalwSCisGAQQBl1UBBQEBB0BouQlG
|
||||||
|
hIL0bq7EbaB55s+ygLVFOfhjFA8E4fwFBFJGVAMBCAcAAP98ZXRGgzld1XUa5ZGx
|
||||||
|
cTE+1qGZY4E4BVIeqkVxdg5tqA64iHgEGBYKACAWIQRvCJCNZJpDaRLvIrqlrvdb
|
||||||
|
ekhMzQUCaSZqXAIbDAAKCRClrvdbekhMzcaSAQDB/4pvDuc7SploQg1fBYobFm5P
|
||||||
|
vxguByr8I+PrYWKKOQEAnaeXT4ipi1nICXFiigztsIl2xTth3D77XG6pZUU/Zw8=
|
||||||
|
=/k1H
|
||||||
|
-----END PGP PRIVATE KEY BLOCK-----"""
|
||||||
|
|
||||||
|
private const val DEFAULT_PGP_PASSPHRASE = ""
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* 生成加密数据请求 DTO
|
||||||
|
*/
|
||||||
|
data class GenerateEncryptedRequest(
|
||||||
|
@ApiParam(value = "授权码", required = true)
|
||||||
|
val licence: String,
|
||||||
|
|
||||||
|
@ApiParam(value = "硬件指纹", required = true)
|
||||||
|
val fingerprint: String,
|
||||||
|
|
||||||
|
@ApiParam(value = "任务ID", required = true)
|
||||||
|
val taskId: String,
|
||||||
|
|
||||||
|
@ApiParam(value = "企业ID", required = true)
|
||||||
|
val enterpriseId: Long,
|
||||||
|
|
||||||
|
@ApiParam(value = "检查ID", required = true)
|
||||||
|
val inspectionId: Long,
|
||||||
|
|
||||||
|
@ApiParam(value = "摘要信息", required = true)
|
||||||
|
val summary: String
|
||||||
|
)
|
||||||
|
|
||||||
|
/**
|
||||||
|
* 生成加密数据响应 DTO
|
||||||
|
*/
|
||||||
|
data class GenerateEncryptedResponse(
|
||||||
|
val encrypted: String,
|
||||||
|
val requestBody: UploadInspectionFileV2Request,
|
||||||
|
val plaintext: String,
|
||||||
|
val keyDerivationInfo: KeyDerivationInfo
|
||||||
|
)
|
||||||
|
|
||||||
|
/**
|
||||||
|
* 密钥派生信息
|
||||||
|
*/
|
||||||
|
data class KeyDerivationInfo(
|
||||||
|
val ikm: String,
|
||||||
|
val salt: String,
|
||||||
|
val info: String,
|
||||||
|
val keyLength: Int,
|
||||||
|
val keyHex: String
|
||||||
|
)
|
||||||
|
|
||||||
|
/**
|
||||||
|
* 生成加密数据
|
||||||
|
*
|
||||||
|
* 模拟工具箱端的加密逻辑:
|
||||||
|
* 1. 使用 HKDF-SHA256 派生 AES 密钥
|
||||||
|
* - ikm = licence + fingerprint
|
||||||
|
* - salt = taskId
|
||||||
|
* - info = "inspection_report_encryption"
|
||||||
|
* - length = 32 bytes
|
||||||
|
*
|
||||||
|
* 2. 使用 AES-256-GCM 加密数据
|
||||||
|
* - 格式:IV (12字节) + Ciphertext + Tag (16字节)
|
||||||
|
* - Base64 编码返回
|
||||||
|
*
|
||||||
|
* @param request 生成加密数据请求
|
||||||
|
* @return 加密后的数据和完整的请求体
|
||||||
|
*/
|
||||||
|
@ApiOperation(value = "生成加密数据", notes = "生成加密后的 encrypted 数据,用于测试 uploadInspectionFileV2Encrypted 接口")
|
||||||
|
@PostMapping("/generateEncrypted")
|
||||||
|
@WebLog(value = "'生成加密数据:'", request = false)
|
||||||
|
fun generateEncrypted(
|
||||||
|
@RequestBody request: GenerateEncryptedRequest
|
||||||
|
): R<GenerateEncryptedResponse> {
|
||||||
|
return try {
|
||||||
|
// 1. 组装明文数据(JSON格式)
|
||||||
|
val timestamp = System.currentTimeMillis()
|
||||||
|
val plaintextMap = mapOf(
|
||||||
|
"enterpriseId" to request.enterpriseId.toString(),
|
||||||
|
"inspectionId" to request.inspectionId.toString(),
|
||||||
|
"summary" to request.summary,
|
||||||
|
"timestamp" to timestamp
|
||||||
|
)
|
||||||
|
val plaintext = objectMapper.writeValueAsString(plaintextMap)
|
||||||
|
|
||||||
|
// 2. 使用 HKDF-SHA256 派生 AES 密钥
|
||||||
|
// ikm = licence + fingerprint
|
||||||
|
// salt = taskId(工具箱从二维码获取,平台从请求获取)
|
||||||
|
// info = "inspection_report_encryption"(固定值)
|
||||||
|
// length = 32 bytes
|
||||||
|
val ikm = "${request.licence}${request.fingerprint}"
|
||||||
|
val salt = request.taskId.toString()
|
||||||
|
val info = "inspection_report_encryption"
|
||||||
|
val keyLength = 32
|
||||||
|
|
||||||
|
val aesKey = HkdfUtil.deriveKey(ikm, salt, info, keyLength)
|
||||||
|
|
||||||
|
// 3. 使用 AES-256-GCM 加密数据
|
||||||
|
val encrypted = AesGcmUtil.encrypt(plaintext, aesKey)
|
||||||
|
|
||||||
|
// 4. 组装完整的请求体(appid 需要前端自己赋值)
|
||||||
|
val requestBody = UploadInspectionFileV2Request().apply {
|
||||||
|
this.appid = "test-appid" // 测试用的 appid,实际使用时前端会赋值
|
||||||
|
this.taskId = request.taskId
|
||||||
|
this.encrypted = encrypted
|
||||||
|
}
|
||||||
|
|
||||||
|
// 5. 返回加密数据和密钥派生信息
|
||||||
|
val response = GenerateEncryptedResponse(
|
||||||
|
encrypted = encrypted,
|
||||||
|
requestBody = requestBody,
|
||||||
|
plaintext = plaintext,
|
||||||
|
keyDerivationInfo = KeyDerivationInfo(
|
||||||
|
ikm = ikm,
|
||||||
|
salt = salt,
|
||||||
|
info = info,
|
||||||
|
keyLength = keyLength,
|
||||||
|
keyHex = aesKey.joinToString("") { "%02x".format(it) }
|
||||||
|
)
|
||||||
|
)
|
||||||
|
|
||||||
|
R.success(response, "加密数据生成成功")
|
||||||
|
} catch (e: Exception) {
|
||||||
|
R.fail("生成加密数据失败: ${e.message}")
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* 快速生成测试数据(使用默认值)
|
||||||
|
*
|
||||||
|
* @return 加密后的数据和完整的请求体
|
||||||
|
*/
|
||||||
|
@ApiOperation(value = "快速生成测试数据", notes = "使用默认值快速生成加密数据,用于快速测试")
|
||||||
|
@GetMapping("/generateTestData")
|
||||||
|
@WebLog(value = "'快速生成测试数据:'", request = false)
|
||||||
|
fun generateTestData(): R<GenerateEncryptedResponse> {
|
||||||
|
return try {
|
||||||
|
// 使用默认测试数据
|
||||||
|
val request = GenerateEncryptedRequest(
|
||||||
|
licence = "TEST-LICENCE-001",
|
||||||
|
fingerprint = "TEST-FINGERPRINT-001",
|
||||||
|
taskId = "TASK-20260115-4875",
|
||||||
|
enterpriseId = 1173040813421105152L,
|
||||||
|
inspectionId = 702286470691215417L,
|
||||||
|
summary = "测试摘要信息"
|
||||||
|
)
|
||||||
|
|
||||||
|
generateEncrypted(request).data?.let {
|
||||||
|
R.success(it, "测试数据生成成功")
|
||||||
|
} ?: R.fail("生成测试数据失败")
|
||||||
|
} catch (e: Exception) {
|
||||||
|
R.fail("生成测试数据失败: ${e.message}")
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* 验证加密数据(解密测试)
|
||||||
|
*
|
||||||
|
* 用于验证生成的加密数据是否能正确解密
|
||||||
|
*
|
||||||
|
* @param encrypted 加密后的 Base64 字符串
|
||||||
|
* @param licence 授权码
|
||||||
|
* @param fingerprint 硬件指纹
|
||||||
|
* @param taskId 任务ID
|
||||||
|
* @return 解密后的明文数据
|
||||||
|
*/
|
||||||
|
@ApiOperation(value = "验证加密数据", notes = "解密加密数据,验证加密是否正确")
|
||||||
|
@PostMapping("/verifyEncrypted")
|
||||||
|
@WebLog(value = "'验证加密数据:'", request = false)
|
||||||
|
fun verifyEncrypted(
|
||||||
|
@ApiParam(value = "加密后的 Base64 字符串", required = true)
|
||||||
|
@RequestParam encrypted: String,
|
||||||
|
|
||||||
|
@ApiParam(value = "授权码", required = true)
|
||||||
|
@RequestParam licence: String,
|
||||||
|
|
||||||
|
@ApiParam(value = "硬件指纹", required = true)
|
||||||
|
@RequestParam fingerprint: String,
|
||||||
|
|
||||||
|
@ApiParam(value = "任务ID", required = true)
|
||||||
|
@RequestParam taskId: String
|
||||||
|
): R<Map<String, Any>> {
|
||||||
|
return try {
|
||||||
|
// 1. 使用相同的密钥派生规则派生密钥
|
||||||
|
val ikm = "$licence$fingerprint"
|
||||||
|
val salt = taskId.toString()
|
||||||
|
val info = "inspection_report_encryption"
|
||||||
|
val aesKey = HkdfUtil.deriveKey(ikm, salt, info, 32)
|
||||||
|
|
||||||
|
// 2. 解密数据
|
||||||
|
val decrypted = AesGcmUtil.decrypt(encrypted, aesKey)
|
||||||
|
|
||||||
|
// 3. 解析 JSON
|
||||||
|
@Suppress("UNCHECKED_CAST")
|
||||||
|
val dataMap = objectMapper.readValue(decrypted, Map::class.java) as Map<String, Any>
|
||||||
|
|
||||||
|
R.success(dataMap, "解密成功")
|
||||||
|
} catch (e: Exception) {
|
||||||
|
R.fail("解密失败: ${e.message}")
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* 生成加密报告 ZIP 文件请求 DTO
|
||||||
|
*/
|
||||||
|
data class GenerateEncryptedZipRequest(
|
||||||
|
@ApiParam(value = "授权码", required = true)
|
||||||
|
val licence: String,
|
||||||
|
|
||||||
|
@ApiParam(value = "硬件指纹", required = true)
|
||||||
|
val fingerprint: String,
|
||||||
|
|
||||||
|
@ApiParam(value = "任务ID", required = true)
|
||||||
|
val taskId: String,
|
||||||
|
|
||||||
|
@ApiParam(value = "企业ID", required = true)
|
||||||
|
val enterpriseId: Long,
|
||||||
|
|
||||||
|
@ApiParam(value = "检查ID", required = true)
|
||||||
|
val inspectionId: Long,
|
||||||
|
|
||||||
|
@ApiParam(value = "摘要信息", required = true)
|
||||||
|
val summary: String,
|
||||||
|
|
||||||
|
@ApiParam(value = "资产信息 JSON", required = true)
|
||||||
|
val assetsJson: String,
|
||||||
|
|
||||||
|
@ApiParam(value = "漏洞信息 JSON", required = true)
|
||||||
|
val vulnerabilitiesJson: String,
|
||||||
|
|
||||||
|
@ApiParam(value = "弱密码信息 JSON", required = true)
|
||||||
|
val weakPasswordsJson: String,
|
||||||
|
|
||||||
|
@ApiParam(value = "漏洞评估报告 HTML", required = true)
|
||||||
|
val reportHtml: String,
|
||||||
|
|
||||||
|
@ApiParam(value = "PGP 私钥(可选,不提供则跳过 PGP 签名)", required = false)
|
||||||
|
val pgpPrivateKey: String? = null,
|
||||||
|
|
||||||
|
@ApiParam(value = "PGP 私钥密码(可选)", required = false)
|
||||||
|
val pgpPassphrase: String? = null
|
||||||
|
)
|
||||||
|
|
||||||
|
/**
|
||||||
|
* 生成加密报告 ZIP 文件
|
||||||
|
*
|
||||||
|
* 按照文档《工具箱端-报告加密与签名生成指南.md》生成加密报告 ZIP 文件
|
||||||
|
*
|
||||||
|
* @param request 生成请求
|
||||||
|
* @return ZIP 文件(二进制流)
|
||||||
|
*/
|
||||||
|
@ApiOperation(value = "生成加密报告 ZIP", notes = "生成带设备签名的加密报告 ZIP 文件,可被 uploadInspectionFileV2 接口解密")
|
||||||
|
@PostMapping("/generateEncryptedZip")
|
||||||
|
@WebLog(value = "'生成加密报告 ZIP:'", request = false)
|
||||||
|
fun generateEncryptedZip(
|
||||||
|
@RequestBody request: GenerateEncryptedZipRequest,
|
||||||
|
response: javax.servlet.http.HttpServletResponse
|
||||||
|
) {
|
||||||
|
try {
|
||||||
|
// 1. 准备文件内容
|
||||||
|
val assetsContent = request.assetsJson.toByteArray(Charsets.UTF_8)
|
||||||
|
val vulnerabilitiesContent = request.vulnerabilitiesJson.toByteArray(Charsets.UTF_8)
|
||||||
|
val weakPasswordsContent = request.weakPasswordsJson.toByteArray(Charsets.UTF_8)
|
||||||
|
val reportHtmlContent = request.reportHtml.toByteArray(Charsets.UTF_8)
|
||||||
|
|
||||||
|
// 2. 生成设备签名
|
||||||
|
// 2.1 密钥派生
|
||||||
|
val ikm = "${request.licence}${request.fingerprint}"
|
||||||
|
val salt = "AUTH_V3_SALT"
|
||||||
|
val info = "device_report_signature"
|
||||||
|
val derivedKey = HkdfUtil.deriveKey(ikm, salt, info, 32)
|
||||||
|
|
||||||
|
// 2.2 计算文件 SHA256
|
||||||
|
fun sha256Hex(content: ByteArray): String {
|
||||||
|
val digest = java.security.MessageDigest.getInstance("SHA-256")
|
||||||
|
return digest.digest(content).joinToString("") { "%02x".format(it) }
|
||||||
|
}
|
||||||
|
|
||||||
|
val assetsSha256 = sha256Hex(assetsContent)
|
||||||
|
val vulnerabilitiesSha256 = sha256Hex(vulnerabilitiesContent)
|
||||||
|
val weakPasswordsSha256 = sha256Hex(weakPasswordsContent)
|
||||||
|
val reportHtmlSha256 = sha256Hex(reportHtmlContent)
|
||||||
|
|
||||||
|
// 2.3 组装签名数据(严格顺序)
|
||||||
|
val signPayload = buildString {
|
||||||
|
append(request.taskId)
|
||||||
|
append(request.inspectionId)
|
||||||
|
append(assetsSha256)
|
||||||
|
append(vulnerabilitiesSha256)
|
||||||
|
append(weakPasswordsSha256)
|
||||||
|
append(reportHtmlSha256)
|
||||||
|
}
|
||||||
|
|
||||||
|
// 2.4 计算 HMAC-SHA256
|
||||||
|
val mac = javax.crypto.Mac.getInstance("HmacSHA256")
|
||||||
|
val secretKey = javax.crypto.spec.SecretKeySpec(derivedKey, "HmacSHA256")
|
||||||
|
mac.init(secretKey)
|
||||||
|
val signatureBytes = mac.doFinal(signPayload.toByteArray(Charsets.UTF_8))
|
||||||
|
val deviceSignature = Base64.getEncoder().encodeToString(signatureBytes)
|
||||||
|
|
||||||
|
// 2.5 生成 summary.json
|
||||||
|
val summaryMap = mapOf(
|
||||||
|
"orgId" to request.enterpriseId,
|
||||||
|
"checkId" to request.inspectionId,
|
||||||
|
"taskId" to request.taskId,
|
||||||
|
"licence" to request.licence,
|
||||||
|
"fingerprint" to request.fingerprint,
|
||||||
|
"deviceSignature" to deviceSignature,
|
||||||
|
"summary" to request.summary
|
||||||
|
)
|
||||||
|
val summaryContent = objectMapper.writeValueAsString(summaryMap).toByteArray(Charsets.UTF_8)
|
||||||
|
|
||||||
|
// 3. 生成 manifest.json
|
||||||
|
val filesHashes = mapOf(
|
||||||
|
"summary.json" to sha256Hex(summaryContent),
|
||||||
|
"assets.json" to assetsSha256,
|
||||||
|
"vulnerabilities.json" to vulnerabilitiesSha256,
|
||||||
|
"weakPasswords.json" to weakPasswordsSha256,
|
||||||
|
"漏洞评估报告.html" to reportHtmlSha256
|
||||||
|
)
|
||||||
|
val manifest = mapOf("files" to filesHashes)
|
||||||
|
val manifestContent = objectMapper.writeValueAsString(manifest).toByteArray(Charsets.UTF_8)
|
||||||
|
|
||||||
|
// 4. 生成 signature.asc
|
||||||
|
val privateKey = request.pgpPrivateKey?.takeIf { it.isNotBlank() } ?: DEFAULT_PGP_PRIVATE_KEY
|
||||||
|
val passphrase = request.pgpPassphrase ?: DEFAULT_PGP_PASSPHRASE
|
||||||
|
|
||||||
|
val signatureAsc = try {
|
||||||
|
PgpSignatureUtil.generateDetachedSignature(
|
||||||
|
manifestContent,
|
||||||
|
privateKey,
|
||||||
|
passphrase
|
||||||
|
)
|
||||||
|
} catch (e: Exception) {
|
||||||
|
throw RuntimeException("生成 PGP 签名失败: ${e.message}", e)
|
||||||
|
}
|
||||||
|
|
||||||
|
// 5. 打包 ZIP 文件到内存
|
||||||
|
val baos = java.io.ByteArrayOutputStream()
|
||||||
|
java.util.zip.ZipOutputStream(baos).use { zipOut ->
|
||||||
|
zipOut.putNextEntry(java.util.zip.ZipEntry("summary.json"))
|
||||||
|
zipOut.write(summaryContent)
|
||||||
|
zipOut.closeEntry()
|
||||||
|
|
||||||
|
zipOut.putNextEntry(java.util.zip.ZipEntry("assets.json"))
|
||||||
|
zipOut.write(assetsContent)
|
||||||
|
zipOut.closeEntry()
|
||||||
|
|
||||||
|
zipOut.putNextEntry(java.util.zip.ZipEntry("vulnerabilities.json"))
|
||||||
|
zipOut.write(vulnerabilitiesContent)
|
||||||
|
zipOut.closeEntry()
|
||||||
|
|
||||||
|
zipOut.putNextEntry(java.util.zip.ZipEntry("weakPasswords.json"))
|
||||||
|
zipOut.write(weakPasswordsContent)
|
||||||
|
zipOut.closeEntry()
|
||||||
|
|
||||||
|
zipOut.putNextEntry(java.util.zip.ZipEntry("漏洞评估报告.html"))
|
||||||
|
zipOut.write(reportHtmlContent)
|
||||||
|
zipOut.closeEntry()
|
||||||
|
|
||||||
|
zipOut.putNextEntry(java.util.zip.ZipEntry("META-INF/manifest.json"))
|
||||||
|
zipOut.write(manifestContent)
|
||||||
|
zipOut.closeEntry()
|
||||||
|
|
||||||
|
zipOut.putNextEntry(java.util.zip.ZipEntry("META-INF/signature.asc"))
|
||||||
|
zipOut.write(signatureAsc)
|
||||||
|
zipOut.closeEntry()
|
||||||
|
}
|
||||||
|
|
||||||
|
val zipBytes = baos.toByteArray()
|
||||||
|
|
||||||
|
// 6. 设置响应头并输出
|
||||||
|
response.contentType = "application/octet-stream"
|
||||||
|
response.setHeader("Content-Disposition", "attachment; filename=\"report_${request.taskId}.zip\"")
|
||||||
|
response.setHeader("Content-Length", zipBytes.size.toString())
|
||||||
|
response.outputStream.write(zipBytes)
|
||||||
|
response.outputStream.flush()
|
||||||
|
} catch (e: Exception) {
|
||||||
|
response.reset()
|
||||||
|
response.contentType = "application/json; charset=UTF-8"
|
||||||
|
response.writer.write("{\"code\": 500, \"msg\": \"生成 ZIP 文件失败: ${e.message}\"}")
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
74
package.json
74
package.json
@@ -10,53 +10,61 @@
|
|||||||
"scripts": {
|
"scripts": {
|
||||||
"build": "turbo run build",
|
"build": "turbo run build",
|
||||||
"compile": "turbo run compile",
|
"compile": "turbo run compile",
|
||||||
|
"compile:darwin": "turbo run compile:darwin",
|
||||||
|
"compile:linux": "turbo run compile:linux",
|
||||||
|
"compile:windows": "turbo run compile:windows",
|
||||||
"dev": "turbo run dev",
|
"dev": "turbo run dev",
|
||||||
|
"dist": "turbo run dist",
|
||||||
|
"dist:linux": "turbo run dist:linux",
|
||||||
|
"dist:mac": "turbo run dist:mac",
|
||||||
|
"dist:win": "turbo run dist:win",
|
||||||
"fix": "turbo run fix",
|
"fix": "turbo run fix",
|
||||||
"typecheck": "turbo run typecheck"
|
"typecheck": "turbo run typecheck"
|
||||||
},
|
},
|
||||||
"devDependencies": {
|
"devDependencies": {
|
||||||
"@biomejs/biome": "^2.3.14",
|
"@biomejs/biome": "^2.4.7",
|
||||||
"turbo": "^2.8.3",
|
"turbo": "^2.8.17",
|
||||||
"typescript": "^5.9.3"
|
"typescript": "^5.9.3"
|
||||||
},
|
},
|
||||||
"catalog": {
|
"catalog": {
|
||||||
"@biomejs/biome": "^2.3.11",
|
"@orpc/client": "^1.13.7",
|
||||||
"@orpc/client": "^1.13.4",
|
"@orpc/contract": "^1.13.7",
|
||||||
"@orpc/contract": "^1.13.4",
|
"@orpc/openapi": "^1.13.7",
|
||||||
"@orpc/openapi": "^1.13.4",
|
"@orpc/server": "^1.13.7",
|
||||||
"@orpc/server": "^1.13.4",
|
"@orpc/tanstack-query": "^1.13.7",
|
||||||
"@orpc/tanstack-query": "^1.13.4",
|
"@orpc/zod": "^1.13.7",
|
||||||
"@orpc/zod": "^1.13.4",
|
|
||||||
"@t3-oss/env-core": "^0.13.10",
|
"@t3-oss/env-core": "^0.13.10",
|
||||||
"@tailwindcss/vite": "^4.1.18",
|
"@tailwindcss/vite": "^4.2.1",
|
||||||
"@tanstack/devtools-vite": "^0.5.0",
|
"@tanstack/devtools-vite": "^0.5.5",
|
||||||
"@tanstack/react-devtools": "^0.9.4",
|
"@tanstack/react-devtools": "^0.9.13",
|
||||||
"@tanstack/react-query": "^5.90.20",
|
"@tanstack/react-query": "^5.90.21",
|
||||||
"@tanstack/react-query-devtools": "^5.91.3",
|
"@tanstack/react-query-devtools": "^5.91.3",
|
||||||
"@tanstack/react-router": "^1.158.4",
|
"@tanstack/react-router": "^1.167.3",
|
||||||
"@tanstack/react-router-devtools": "^1.158.4",
|
"@tanstack/react-router-devtools": "^1.166.9",
|
||||||
"@tanstack/react-router-ssr-query": "^1.158.4",
|
"@tanstack/react-router-ssr-query": "^1.166.9",
|
||||||
"@tanstack/react-start": "^1.159.0",
|
"@tanstack/react-start": "^1.166.14",
|
||||||
"@types/bun": "^1.3.8",
|
"@types/bun": "^1.3.10",
|
||||||
"@types/node": "^24.3.0",
|
"@types/node": "^24.12.0",
|
||||||
"@vitejs/plugin-react": "^5.1.3",
|
"@vitejs/plugin-react": "^5.2.0",
|
||||||
"babel-plugin-react-compiler": "^1.0.0",
|
"babel-plugin-react-compiler": "^1.0.0",
|
||||||
"drizzle-kit": "^0.31.8",
|
"drizzle-kit": "1.0.0-beta.15-859cf75",
|
||||||
"drizzle-orm": "^0.45.1",
|
"drizzle-orm": "1.0.0-beta.15-859cf75",
|
||||||
"drizzle-zod": "^0.8.3",
|
"electron": "^34.0.0",
|
||||||
"electrobun": "^1.12.0-beta.1",
|
"electron-builder": "^26.8.1",
|
||||||
"nitro": "npm:nitro-nightly@3.0.1-20260206-171553-bc737c0c",
|
"electron-vite": "^5.0.0",
|
||||||
"ohash": "^2.0.11",
|
"jszip": "^3.10.1",
|
||||||
"postgres": "^3.4.8",
|
"lossless-json": "^4.3.0",
|
||||||
|
"motion": "^12.36.0",
|
||||||
|
"nitro": "npm:nitro-nightly@3.0.1-20260315-195328-c31268c6",
|
||||||
|
"openpgp": "^6.0.1",
|
||||||
"react": "^19.2.4",
|
"react": "^19.2.4",
|
||||||
"react-dom": "^19.2.4",
|
"react-dom": "^19.2.4",
|
||||||
"systeminformation": "^5.30.7",
|
"tailwindcss": "^4.2.1",
|
||||||
"tailwindcss": "^4.1.18",
|
"tree-kill": "^1.2.2",
|
||||||
"turbo": "^2.7.5",
|
|
||||||
"typescript": "^5.9.3",
|
|
||||||
"uuid": "^13.0.0",
|
"uuid": "^13.0.0",
|
||||||
"vite": "^8.0.0-beta.13",
|
"vite": "^8.0.0",
|
||||||
"vite-tsconfig-paths": "^6.0.5",
|
"vite-tsconfig-paths": "^6.1.1",
|
||||||
|
"systeminformation": "^5.31.4",
|
||||||
"zod": "^4.3.6"
|
"zod": "^4.3.6"
|
||||||
},
|
},
|
||||||
"overrides": {
|
"overrides": {
|
||||||
|
|||||||
18
packages/crypto/package.json
Normal file
18
packages/crypto/package.json
Normal file
@@ -0,0 +1,18 @@
|
|||||||
|
{
|
||||||
|
"name": "@furtherverse/crypto",
|
||||||
|
"version": "1.0.0",
|
||||||
|
"private": true,
|
||||||
|
"type": "module",
|
||||||
|
"exports": {
|
||||||
|
".": "./src/index.ts"
|
||||||
|
},
|
||||||
|
"dependencies": {
|
||||||
|
"node-forge": "^1.3.3",
|
||||||
|
"openpgp": "catalog:"
|
||||||
|
},
|
||||||
|
"devDependencies": {
|
||||||
|
"@furtherverse/tsconfig": "workspace:*",
|
||||||
|
"@types/bun": "catalog:",
|
||||||
|
"@types/node-forge": "^1.3.14"
|
||||||
|
}
|
||||||
|
}
|
||||||
53
packages/crypto/src/aes-gcm.ts
Normal file
53
packages/crypto/src/aes-gcm.ts
Normal file
@@ -0,0 +1,53 @@
|
|||||||
|
import { createCipheriv, createDecipheriv, randomBytes } from 'node:crypto'
|
||||||
|
|
||||||
|
const GCM_IV_LENGTH = 12 // 96 bits
|
||||||
|
const GCM_TAG_LENGTH = 16 // 128 bits
|
||||||
|
const ALGORITHM = 'aes-256-gcm'
|
||||||
|
|
||||||
|
/**
|
||||||
|
* AES-256-GCM encrypt.
|
||||||
|
*
|
||||||
|
* Output format (before Base64): [IV (12 bytes)] + [ciphertext] + [auth tag (16 bytes)]
|
||||||
|
*
|
||||||
|
* @param plaintext - UTF-8 string to encrypt
|
||||||
|
* @param key - 32-byte AES key
|
||||||
|
* @returns Base64-encoded encrypted data
|
||||||
|
*/
|
||||||
|
export const aesGcmEncrypt = (plaintext: string, key: Buffer): string => {
|
||||||
|
const iv = randomBytes(GCM_IV_LENGTH)
|
||||||
|
const cipher = createCipheriv(ALGORITHM, key, iv, { authTagLength: GCM_TAG_LENGTH })
|
||||||
|
|
||||||
|
const encrypted = Buffer.concat([cipher.update(plaintext, 'utf-8'), cipher.final()])
|
||||||
|
const tag = cipher.getAuthTag()
|
||||||
|
|
||||||
|
// Layout: IV + ciphertext + tag
|
||||||
|
const combined = Buffer.concat([iv, encrypted, tag])
|
||||||
|
return combined.toString('base64')
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* AES-256-GCM decrypt.
|
||||||
|
*
|
||||||
|
* Input format (after Base64 decode): [IV (12 bytes)] + [ciphertext] + [auth tag (16 bytes)]
|
||||||
|
*
|
||||||
|
* @param encryptedBase64 - Base64-encoded encrypted data
|
||||||
|
* @param key - 32-byte AES key
|
||||||
|
* @returns Decrypted UTF-8 string
|
||||||
|
*/
|
||||||
|
export const aesGcmDecrypt = (encryptedBase64: string, key: Buffer): string => {
|
||||||
|
const data = Buffer.from(encryptedBase64, 'base64')
|
||||||
|
|
||||||
|
if (data.length < GCM_IV_LENGTH + GCM_TAG_LENGTH) {
|
||||||
|
throw new Error('Encrypted data too short: must contain IV + tag at minimum')
|
||||||
|
}
|
||||||
|
|
||||||
|
const iv = data.subarray(0, GCM_IV_LENGTH)
|
||||||
|
const tag = data.subarray(data.length - GCM_TAG_LENGTH)
|
||||||
|
const ciphertext = data.subarray(GCM_IV_LENGTH, data.length - GCM_TAG_LENGTH)
|
||||||
|
|
||||||
|
const decipher = createDecipheriv(ALGORITHM, key, iv, { authTagLength: GCM_TAG_LENGTH })
|
||||||
|
decipher.setAuthTag(tag)
|
||||||
|
|
||||||
|
const decrypted = Buffer.concat([decipher.update(ciphertext), decipher.final()])
|
||||||
|
return decrypted.toString('utf-8')
|
||||||
|
}
|
||||||
15
packages/crypto/src/hash.ts
Normal file
15
packages/crypto/src/hash.ts
Normal file
@@ -0,0 +1,15 @@
|
|||||||
|
import { createHash } from 'node:crypto'
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Compute SHA-256 hash and return raw Buffer.
|
||||||
|
*/
|
||||||
|
export const sha256 = (data: string | Buffer): Buffer => {
|
||||||
|
return createHash('sha256').update(data).digest()
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Compute SHA-256 hash and return lowercase hex string.
|
||||||
|
*/
|
||||||
|
export const sha256Hex = (data: string | Buffer): string => {
|
||||||
|
return createHash('sha256').update(data).digest('hex')
|
||||||
|
}
|
||||||
15
packages/crypto/src/hkdf.ts
Normal file
15
packages/crypto/src/hkdf.ts
Normal file
@@ -0,0 +1,15 @@
|
|||||||
|
import { hkdfSync } from 'node:crypto'
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Derive a key using HKDF-SHA256.
|
||||||
|
*
|
||||||
|
* @param ikm - Input keying material (string, will be UTF-8 encoded)
|
||||||
|
* @param salt - Salt value (string, will be UTF-8 encoded)
|
||||||
|
* @param info - Info/context string (will be UTF-8 encoded)
|
||||||
|
* @param length - Output key length in bytes (default: 32 for AES-256)
|
||||||
|
* @returns Derived key as Buffer
|
||||||
|
*/
|
||||||
|
export const hkdfSha256 = (ikm: string, salt: string, info: string, length = 32): Buffer => {
|
||||||
|
const derived = hkdfSync('sha256', ikm, salt, info, length)
|
||||||
|
return Buffer.from(derived)
|
||||||
|
}
|
||||||
23
packages/crypto/src/hmac.ts
Normal file
23
packages/crypto/src/hmac.ts
Normal file
@@ -0,0 +1,23 @@
|
|||||||
|
import { createHmac } from 'node:crypto'
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Compute HMAC-SHA256 and return Base64-encoded signature.
|
||||||
|
*
|
||||||
|
* @param key - HMAC key (Buffer)
|
||||||
|
* @param data - Data to sign (UTF-8 string)
|
||||||
|
* @returns Base64-encoded HMAC-SHA256 signature
|
||||||
|
*/
|
||||||
|
export const hmacSha256Base64 = (key: Buffer, data: string): string => {
|
||||||
|
return createHmac('sha256', key).update(data, 'utf-8').digest('base64')
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Compute HMAC-SHA256 and return raw Buffer.
|
||||||
|
*
|
||||||
|
* @param key - HMAC key (Buffer)
|
||||||
|
* @param data - Data to sign (UTF-8 string)
|
||||||
|
* @returns HMAC-SHA256 digest as Buffer
|
||||||
|
*/
|
||||||
|
export const hmacSha256 = (key: Buffer, data: string): Buffer => {
|
||||||
|
return createHmac('sha256', key).update(data, 'utf-8').digest()
|
||||||
|
}
|
||||||
7
packages/crypto/src/index.ts
Normal file
7
packages/crypto/src/index.ts
Normal file
@@ -0,0 +1,7 @@
|
|||||||
|
export { aesGcmDecrypt, aesGcmEncrypt } from './aes-gcm'
|
||||||
|
export { sha256, sha256Hex } from './hash'
|
||||||
|
export { hkdfSha256 } from './hkdf'
|
||||||
|
export { hmacSha256, hmacSha256Base64 } from './hmac'
|
||||||
|
export { generatePgpKeyPair, pgpSignDetached, pgpVerifyDetached, validatePgpPrivateKey } from './pgp'
|
||||||
|
export { rsaOaepEncrypt } from './rsa-oaep'
|
||||||
|
export { rsaVerifySignature, validateRsaPublicKey } from './rsa-signature'
|
||||||
79
packages/crypto/src/pgp.ts
Normal file
79
packages/crypto/src/pgp.ts
Normal file
@@ -0,0 +1,79 @@
|
|||||||
|
import * as openpgp from 'openpgp'
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Generate an OpenPGP RSA key pair.
|
||||||
|
*
|
||||||
|
* @param name - User name for the key
|
||||||
|
* @param email - User email for the key
|
||||||
|
* @returns ASCII-armored private and public keys
|
||||||
|
*/
|
||||||
|
export const generatePgpKeyPair = async (
|
||||||
|
name: string,
|
||||||
|
email: string,
|
||||||
|
): Promise<{ privateKey: string; publicKey: string }> => {
|
||||||
|
const { privateKey, publicKey } = await openpgp.generateKey({
|
||||||
|
type: 'rsa',
|
||||||
|
rsaBits: 2048,
|
||||||
|
userIDs: [{ name, email }],
|
||||||
|
format: 'armored',
|
||||||
|
})
|
||||||
|
|
||||||
|
return { privateKey, publicKey }
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Create a detached OpenPGP signature for the given data.
|
||||||
|
*
|
||||||
|
* @param data - Raw data to sign (Buffer or Uint8Array)
|
||||||
|
* @param armoredPrivateKey - ASCII-armored private key
|
||||||
|
* @returns ASCII-armored detached signature (signature.asc content)
|
||||||
|
*/
|
||||||
|
export const validatePgpPrivateKey = async (armoredKey: string): Promise<void> => {
|
||||||
|
await openpgp.readPrivateKey({ armoredKey })
|
||||||
|
}
|
||||||
|
|
||||||
|
export const pgpSignDetached = async (data: Uint8Array, armoredPrivateKey: string): Promise<string> => {
|
||||||
|
const privateKey = await openpgp.readPrivateKey({ armoredKey: armoredPrivateKey })
|
||||||
|
const message = await openpgp.createMessage({ binary: data })
|
||||||
|
|
||||||
|
const signature = await openpgp.sign({
|
||||||
|
message,
|
||||||
|
signingKeys: privateKey,
|
||||||
|
detached: true,
|
||||||
|
format: 'armored',
|
||||||
|
})
|
||||||
|
|
||||||
|
return signature as string
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Verify a detached OpenPGP signature.
|
||||||
|
*
|
||||||
|
* @param data - Original data (Buffer or Uint8Array)
|
||||||
|
* @param armoredSignature - ASCII-armored detached signature
|
||||||
|
* @param armoredPublicKey - ASCII-armored public key
|
||||||
|
* @returns true if signature is valid
|
||||||
|
*/
|
||||||
|
export const pgpVerifyDetached = async (
|
||||||
|
data: Uint8Array,
|
||||||
|
armoredSignature: string,
|
||||||
|
armoredPublicKey: string,
|
||||||
|
): Promise<boolean> => {
|
||||||
|
const publicKey = await openpgp.readKey({ armoredKey: armoredPublicKey })
|
||||||
|
const signature = await openpgp.readSignature({ armoredSignature })
|
||||||
|
const message = await openpgp.createMessage({ binary: data })
|
||||||
|
|
||||||
|
const verificationResult = await openpgp.verify({
|
||||||
|
message,
|
||||||
|
signature,
|
||||||
|
verificationKeys: publicKey,
|
||||||
|
})
|
||||||
|
|
||||||
|
const { verified } = verificationResult.signatures[0]!
|
||||||
|
try {
|
||||||
|
await verified
|
||||||
|
return true
|
||||||
|
} catch {
|
||||||
|
return false
|
||||||
|
}
|
||||||
|
}
|
||||||
32
packages/crypto/src/rsa-oaep.ts
Normal file
32
packages/crypto/src/rsa-oaep.ts
Normal file
@@ -0,0 +1,32 @@
|
|||||||
|
import forge from 'node-forge'
|
||||||
|
|
||||||
|
/**
|
||||||
|
* RSA-OAEP encrypt with platform public key.
|
||||||
|
*
|
||||||
|
* Matches Java's {@code Cipher.getInstance("RSA/ECB/OAEPWithSHA-256AndMGF1Padding")}
|
||||||
|
* with **default SunJCE parameters**:
|
||||||
|
*
|
||||||
|
* | Parameter | Value |
|
||||||
|
* |-----------|--------|
|
||||||
|
* | OAEP hash | SHA-256|
|
||||||
|
* | MGF1 hash | SHA-1 |
|
||||||
|
*
|
||||||
|
* Node.js `crypto.publicEncrypt({ oaepHash })` ties both hashes together,
|
||||||
|
* so we use `node-forge` which allows independent configuration.
|
||||||
|
*
|
||||||
|
* @param plaintext - UTF-8 string to encrypt
|
||||||
|
* @param publicKeyBase64 - Platform RSA public key (X.509 / SPKI DER, Base64)
|
||||||
|
* @returns Base64-encoded ciphertext
|
||||||
|
*/
|
||||||
|
export const rsaOaepEncrypt = (plaintext: string, publicKeyBase64: string): string => {
|
||||||
|
const derBytes = forge.util.decode64(publicKeyBase64)
|
||||||
|
const asn1 = forge.asn1.fromDer(derBytes)
|
||||||
|
const publicKey = forge.pki.publicKeyFromAsn1(asn1) as forge.pki.rsa.PublicKey
|
||||||
|
|
||||||
|
const encrypted = publicKey.encrypt(plaintext, 'RSA-OAEP', {
|
||||||
|
md: forge.md.sha256.create(),
|
||||||
|
mgf1: { md: forge.md.sha1.create() },
|
||||||
|
})
|
||||||
|
|
||||||
|
return forge.util.encode64(encrypted)
|
||||||
|
}
|
||||||
24
packages/crypto/src/rsa-signature.test.ts
Normal file
24
packages/crypto/src/rsa-signature.test.ts
Normal file
@@ -0,0 +1,24 @@
|
|||||||
|
import { describe, expect, it } from 'bun:test'
|
||||||
|
import { constants, createSign, generateKeyPairSync } from 'node:crypto'
|
||||||
|
import { rsaVerifySignature, validateRsaPublicKey } from './rsa-signature'
|
||||||
|
|
||||||
|
describe('rsaVerifySignature', () => {
|
||||||
|
it('verifies SHA256withRSA signatures over raw payload bytes', () => {
|
||||||
|
const { privateKey, publicKey } = generateKeyPairSync('rsa', { modulusLength: 2048 })
|
||||||
|
const payload = Buffer.from('eyJsaWNlbmNlX2lkIjoiTElDLTAwMSIsImV4cGlyZV90aW1lIjoiMjAyNy0wMy0xOSJ9', 'utf-8')
|
||||||
|
|
||||||
|
const signer = createSign('RSA-SHA256')
|
||||||
|
signer.update(payload)
|
||||||
|
signer.end()
|
||||||
|
|
||||||
|
const signature = signer.sign({ key: privateKey, padding: constants.RSA_PKCS1_PADDING })
|
||||||
|
const publicKeyBase64 = publicKey.export({ format: 'der', type: 'spki' }).toString('base64')
|
||||||
|
|
||||||
|
expect(rsaVerifySignature(payload, signature, publicKeyBase64)).toBe(true)
|
||||||
|
expect(rsaVerifySignature(Buffer.from(`${payload}x`, 'utf-8'), signature, publicKeyBase64)).toBe(false)
|
||||||
|
})
|
||||||
|
|
||||||
|
it('rejects malformed SPKI public keys', () => {
|
||||||
|
expect(() => validateRsaPublicKey('not-a-public-key')).toThrow()
|
||||||
|
})
|
||||||
|
})
|
||||||
19
packages/crypto/src/rsa-signature.ts
Normal file
19
packages/crypto/src/rsa-signature.ts
Normal file
@@ -0,0 +1,19 @@
|
|||||||
|
import { constants, createPublicKey, verify } from 'node:crypto'
|
||||||
|
|
||||||
|
const createSpkiPublicKey = (publicKeyBase64: string) => {
|
||||||
|
return createPublicKey({
|
||||||
|
key: Buffer.from(publicKeyBase64, 'base64'),
|
||||||
|
format: 'der',
|
||||||
|
type: 'spki',
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
export const validateRsaPublicKey = (publicKeyBase64: string): void => {
|
||||||
|
createSpkiPublicKey(publicKeyBase64)
|
||||||
|
}
|
||||||
|
|
||||||
|
export const rsaVerifySignature = (data: Uint8Array, signature: Uint8Array, publicKeyBase64: string): boolean => {
|
||||||
|
const publicKey = createSpkiPublicKey(publicKeyBase64)
|
||||||
|
|
||||||
|
return verify('RSA-SHA256', data, { key: publicKey, padding: constants.RSA_PKCS1_PADDING }, signature)
|
||||||
|
}
|
||||||
7
packages/crypto/tsconfig.json
Normal file
7
packages/crypto/tsconfig.json
Normal file
@@ -0,0 +1,7 @@
|
|||||||
|
{
|
||||||
|
"extends": "@furtherverse/tsconfig/bun.json",
|
||||||
|
"compilerOptions": {
|
||||||
|
"rootDir": "src"
|
||||||
|
},
|
||||||
|
"include": ["src"]
|
||||||
|
}
|
||||||
@@ -1,20 +0,0 @@
|
|||||||
{
|
|
||||||
"name": "@furtherverse/utils",
|
|
||||||
"version": "1.0.0",
|
|
||||||
"private": true,
|
|
||||||
"type": "module",
|
|
||||||
"imports": {
|
|
||||||
"#*": "./src/*"
|
|
||||||
},
|
|
||||||
"exports": {
|
|
||||||
".": "./src/index.ts",
|
|
||||||
"./*": "./src/*.ts"
|
|
||||||
},
|
|
||||||
"dependencies": {
|
|
||||||
"ohash": "catalog:",
|
|
||||||
"systeminformation": "catalog:"
|
|
||||||
},
|
|
||||||
"devDependencies": {
|
|
||||||
"@furtherverse/tsconfig": "workspace:*"
|
|
||||||
}
|
|
||||||
}
|
|
||||||
@@ -1,29 +0,0 @@
|
|||||||
import { hash } from 'ohash'
|
|
||||||
import si from 'systeminformation'
|
|
||||||
|
|
||||||
async function getSystemInfo() {
|
|
||||||
const [uuid, baseboard, bios, system, diskLayout, networkInterfaces] =
|
|
||||||
await Promise.all([
|
|
||||||
si.uuid(),
|
|
||||||
si.baseboard(),
|
|
||||||
si.bios(),
|
|
||||||
si.system(),
|
|
||||||
si.diskLayout(),
|
|
||||||
si.networkInterfaces(),
|
|
||||||
])
|
|
||||||
|
|
||||||
return {
|
|
||||||
uuid,
|
|
||||||
baseboard,
|
|
||||||
bios,
|
|
||||||
system,
|
|
||||||
diskLayout,
|
|
||||||
networkInterfaces,
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
export async function getHardwareFingerprint() {
|
|
||||||
const systemInfo = await getSystemInfo()
|
|
||||||
|
|
||||||
return hash(systemInfo)
|
|
||||||
}
|
|
||||||
@@ -1 +0,0 @@
|
|||||||
export * from './fingerprint'
|
|
||||||
10
turbo.json
10
turbo.json
@@ -6,9 +6,6 @@
|
|||||||
"build": {
|
"build": {
|
||||||
"dependsOn": ["^build"]
|
"dependsOn": ["^build"]
|
||||||
},
|
},
|
||||||
"compile": {
|
|
||||||
"dependsOn": ["build"]
|
|
||||||
},
|
|
||||||
"dev": {
|
"dev": {
|
||||||
"cache": false,
|
"cache": false,
|
||||||
"persistent": true
|
"persistent": true
|
||||||
@@ -17,12 +14,7 @@
|
|||||||
"cache": false
|
"cache": false
|
||||||
},
|
},
|
||||||
"typecheck": {
|
"typecheck": {
|
||||||
"inputs": [
|
"inputs": ["package.json", "tsconfig.json", "tsconfig.*.json", "**/*.{ts,tsx,d.ts}"],
|
||||||
"package.json",
|
|
||||||
"tsconfig.json",
|
|
||||||
"tsconfig.*.json",
|
|
||||||
"**/*.{ts,tsx,d.ts}"
|
|
||||||
],
|
|
||||||
"outputs": []
|
"outputs": []
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
|
|||||||
Reference in New Issue
Block a user