Compare commits
4 Commits
main
..
509860bba8
| Author | SHA1 | Date | |
|---|---|---|---|
| 509860bba8 | |||
| 4e7c4e1aa5 | |||
| 8261409d7d | |||
| d2eb98d612 |
@@ -1,13 +0,0 @@
|
|||||||
node_modules/
|
|
||||||
.output/
|
|
||||||
.tanstack/
|
|
||||||
out/
|
|
||||||
.git/
|
|
||||||
.env
|
|
||||||
.env.*
|
|
||||||
|
|
||||||
*.md
|
|
||||||
*.tsbuildinfo
|
|
||||||
*.bun-build
|
|
||||||
|
|
||||||
.vscode/
|
|
||||||
@@ -1,6 +0,0 @@
|
|||||||
DATABASE_URL=postgres://postgres:postgres@localhost:5432/postgres
|
|
||||||
|
|
||||||
# Optional logging knobs (defaults are usually fine):
|
|
||||||
# LOG_LEVEL=info # trace|debug|info|warning|error|fatal
|
|
||||||
# LOG_FORMAT=pretty # pretty|json — defaults to TTY ? pretty : json
|
|
||||||
# LOG_DB=false # true to log every Drizzle SQL query
|
|
||||||
+154
-15
@@ -1,23 +1,162 @@
|
|||||||
# Dependencies
|
### Custom ###
|
||||||
node_modules/
|
|
||||||
|
|
||||||
# Build output
|
# TanStack
|
||||||
.output/
|
|
||||||
.tanstack/
|
.tanstack/
|
||||||
.vite/
|
|
||||||
out/
|
|
||||||
*.bun-build
|
|
||||||
*.tsbuildinfo
|
|
||||||
vite.config.js.timestamp-*
|
|
||||||
vite.config.ts.timestamp-*
|
|
||||||
|
|
||||||
# Env
|
# Nitro
|
||||||
|
.output/
|
||||||
|
|
||||||
|
# Bun build
|
||||||
|
*.bun-build
|
||||||
|
|
||||||
|
# SQLite database files
|
||||||
|
*.db
|
||||||
|
*.db-wal
|
||||||
|
*.db-shm
|
||||||
|
|
||||||
|
# Turborepo
|
||||||
|
.turbo/
|
||||||
|
|
||||||
|
### Node ###
|
||||||
|
|
||||||
|
# Logs
|
||||||
|
logs
|
||||||
|
*.log
|
||||||
|
npm-debug.log*
|
||||||
|
yarn-debug.log*
|
||||||
|
yarn-error.log*
|
||||||
|
lerna-debug.log*
|
||||||
|
|
||||||
|
# Diagnostic reports (https://nodejs.org/api/report.html)
|
||||||
|
report.[0-9]*.[0-9]*.[0-9]*.[0-9]*.json
|
||||||
|
|
||||||
|
# Runtime data
|
||||||
|
pids
|
||||||
|
*.pid
|
||||||
|
*.seed
|
||||||
|
*.pid.lock
|
||||||
|
|
||||||
|
# Directory for instrumented libs generated by jscoverage/JSCover
|
||||||
|
lib-cov
|
||||||
|
|
||||||
|
# Coverage directory used by tools like istanbul
|
||||||
|
coverage
|
||||||
|
*.lcov
|
||||||
|
|
||||||
|
# nyc test coverage
|
||||||
|
.nyc_output
|
||||||
|
|
||||||
|
# Grunt intermediate storage (https://gruntjs.com/creating-plugins#storing-task-files)
|
||||||
|
.grunt
|
||||||
|
|
||||||
|
# Bower dependency directory (https://bower.io/)
|
||||||
|
bower_components
|
||||||
|
|
||||||
|
# node-waf configuration
|
||||||
|
.lock-wscript
|
||||||
|
|
||||||
|
# Compiled binary addons (https://nodejs.org/api/addons.html)
|
||||||
|
build/Release
|
||||||
|
|
||||||
|
# Dependency directories
|
||||||
|
node_modules/
|
||||||
|
jspm_packages/
|
||||||
|
|
||||||
|
# Snowpack dependency directory (https://snowpack.dev/)
|
||||||
|
web_modules/
|
||||||
|
|
||||||
|
# TypeScript cache
|
||||||
|
*.tsbuildinfo
|
||||||
|
|
||||||
|
# Optional npm cache directory
|
||||||
|
.npm
|
||||||
|
|
||||||
|
# Optional eslint cache
|
||||||
|
.eslintcache
|
||||||
|
|
||||||
|
# Optional stylelint cache
|
||||||
|
.stylelintcache
|
||||||
|
|
||||||
|
# Optional REPL history
|
||||||
|
.node_repl_history
|
||||||
|
|
||||||
|
# Output of 'npm pack'
|
||||||
|
*.tgz
|
||||||
|
|
||||||
|
# Yarn Integrity file
|
||||||
|
.yarn-integrity
|
||||||
|
|
||||||
|
# dotenv environment variable files
|
||||||
.env
|
.env
|
||||||
.env.*
|
.env.*
|
||||||
!.env.example
|
!.env.example
|
||||||
|
|
||||||
# Logs
|
# parcel-bundler cache (https://parceljs.org/)
|
||||||
*.log
|
.cache
|
||||||
|
.parcel-cache
|
||||||
|
|
||||||
# OS
|
# Next.js build output
|
||||||
.DS_Store
|
.next
|
||||||
|
out
|
||||||
|
|
||||||
|
# Nuxt.js build / generate output
|
||||||
|
.nuxt
|
||||||
|
dist
|
||||||
|
.output
|
||||||
|
|
||||||
|
# Gatsby files
|
||||||
|
.cache/
|
||||||
|
# Comment in the public line in if your project uses Gatsby and not Next.js
|
||||||
|
# https://nextjs.org/blog/next-9-1#public-directory-support
|
||||||
|
# public
|
||||||
|
|
||||||
|
# vuepress build output
|
||||||
|
.vuepress/dist
|
||||||
|
|
||||||
|
# vuepress v2.x temp and cache directory
|
||||||
|
.temp
|
||||||
|
.cache
|
||||||
|
|
||||||
|
# Sveltekit cache directory
|
||||||
|
.svelte-kit/
|
||||||
|
|
||||||
|
# vitepress build output
|
||||||
|
**/.vitepress/dist
|
||||||
|
|
||||||
|
# vitepress cache directory
|
||||||
|
**/.vitepress/cache
|
||||||
|
|
||||||
|
# Docusaurus cache and generated files
|
||||||
|
.docusaurus
|
||||||
|
|
||||||
|
# Serverless directories
|
||||||
|
.serverless/
|
||||||
|
|
||||||
|
# FuseBox cache
|
||||||
|
.fusebox/
|
||||||
|
|
||||||
|
# DynamoDB Local files
|
||||||
|
.dynamodb/
|
||||||
|
|
||||||
|
# Firebase cache directory
|
||||||
|
.firebase/
|
||||||
|
|
||||||
|
# TernJS port file
|
||||||
|
.tern-port
|
||||||
|
|
||||||
|
# Stores VSCode versions used for testing VSCode extensions
|
||||||
|
.vscode-test
|
||||||
|
|
||||||
|
# yarn v3
|
||||||
|
.pnp.*
|
||||||
|
.yarn/*
|
||||||
|
!.yarn/patches
|
||||||
|
!.yarn/plugins
|
||||||
|
!.yarn/releases
|
||||||
|
!.yarn/sdks
|
||||||
|
!.yarn/versions
|
||||||
|
|
||||||
|
# Vite files
|
||||||
|
vite.config.js.timestamp-*
|
||||||
|
vite.config.ts.timestamp-*
|
||||||
|
.vite/
|
||||||
|
|||||||
Vendored
+1
-3
@@ -1,11 +1,9 @@
|
|||||||
{
|
{
|
||||||
"recommendations": [
|
"recommendations": [
|
||||||
"biomejs.biome",
|
"biomejs.biome",
|
||||||
"codezombiech.gitignore",
|
|
||||||
"hverlin.mise-vscode",
|
"hverlin.mise-vscode",
|
||||||
"oven.bun-vscode",
|
"oven.bun-vscode",
|
||||||
"redhat.vscode-yaml",
|
"redhat.vscode-yaml",
|
||||||
"tamasfe.even-better-toml",
|
"tamasfe.even-better-toml"
|
||||||
"unional.vscode-sort-package-json"
|
|
||||||
]
|
]
|
||||||
}
|
}
|
||||||
|
|||||||
Vendored
-2
@@ -43,8 +43,6 @@
|
|||||||
"files.watcherExclude": {
|
"files.watcherExclude": {
|
||||||
"**/routeTree.gen.ts": true
|
"**/routeTree.gen.ts": true
|
||||||
},
|
},
|
||||||
"js/ts.tsdk.path": "node_modules/typescript/lib",
|
|
||||||
"js/ts.tsdk.promptToUseWorkspaceVersion": true,
|
|
||||||
"search.exclude": {
|
"search.exclude": {
|
||||||
"**/routeTree.gen.ts": true
|
"**/routeTree.gen.ts": true
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -1,232 +1,219 @@
|
|||||||
# AGENTS.md
|
# AGENTS.md - AI Coding Agent Guidelines
|
||||||
|
|
||||||
Compact, repo-specific notes for AI agents. Generic language/framework knowledge is omitted — only things that will bite you if you don't know. Pair this with `README.md` (user-facing quick-start + add-a-feature checklist + deploy flow).
|
Guidelines for AI agents working in this Bun monorepo.
|
||||||
|
|
||||||
## Stack & runtime
|
## Project Overview
|
||||||
|
|
||||||
- **Bun-only** (`mise.toml` pins `bun = 1.3.13`). Never invoke `npm`/`npx`/`node`/`yarn`/`pnpm`. Use `bun run <script>` (bare `bun <script>` can collide with Bun built-in subcommands).
|
> **This project uses [Bun](https://bun.sh) exclusively as both the JavaScript runtime and package manager. Do NOT use Node.js / npm / yarn / pnpm. All commands start with `bun` — use `bun install` for dependencies and `bun run <script>` for scripts. Always prefer `bun run <script>` over `bun <script>` to avoid conflicts with Bun built-in subcommands (e.g. `bun build` invokes Bun's bundler, NOT your package.json script). Never use `npm`, `npx`, or `node`.**
|
||||||
- **Prefer Bun-native APIs over external packages and `node:*` polyfills.** UUIDv7 in app code → `Bun.randomUUIDv7()` (not the `uuid` package); DB primary keys are a separate matter — those go through PG18's `uuidv7()`, see "Drizzle" section. SHA-256 → `Bun.CryptoHasher.hash('sha256', s, 'hex')` (not `node:crypto.createHash`); short sleeps → `Bun.sleep(ms)` (not raw `setTimeout` with promise wrapping); file I/O in build scripts → `Bun.file` / `Bun.write` are fine. The runtime is Bun, the deployment target is Bun, the test runner is Bun — there is no "portability" concern that would justify dragging in npm packages or Node compat shims for things Bun ships natively.
|
|
||||||
- TanStack Start (React 19 SSR, file-routed) + Vite 8 + Nitro (nightly, preset `bun`). Dev server defaults to Vite's port (3000); not pinned, override via `vite dev --port <n>` if you need to.
|
|
||||||
- **PostgreSQL 18+ only** (`compose.yaml` pins `postgres:18-alpine`). The starter relies on PG18's built-in `uuidv7()` function for primary-key generation — see "Drizzle" section. Do not soften this to support older PG; if you need PG <18 compatibility, fork and reintroduce app-side UUIDv7 (e.g. `Bun.randomUUIDv7()` or the `uuid` package) yourself.
|
|
||||||
- **Drizzle ORM `0.45.2` (0.x, NOT 1.0 beta)** — see "Drizzle" section, this matters a lot.
|
|
||||||
- ORPC (contract-first), TanStack Query v5, Tailwind v4.
|
|
||||||
- **Logging via [LogTape](https://logtape.org/)** (zero-dep, runtime-agnostic) — see "Logging" section. `console.*` is forbidden in business code.
|
|
||||||
|
|
||||||
## Scripts
|
- **Monorepo**: Bun workspaces + Turborepo orchestration
|
||||||
|
- **Runtime**: Bun (see `mise.toml` for version) — **NOT Node.js**
|
||||||
|
- **Package Manager**: Bun — **NOT npm / yarn / pnpm**
|
||||||
|
- **Apps**:
|
||||||
|
- `apps/server` - TanStack Start fullstack web app (see `apps/server/AGENTS.md`)
|
||||||
|
- `apps/desktop` - Electron desktop shell, sidecar server pattern (see `apps/desktop/AGENTS.md`)
|
||||||
|
- **Packages**: `packages/tsconfig` (shared TS configs)
|
||||||
|
|
||||||
|
## Build / Lint / Test Commands
|
||||||
|
|
||||||
|
### Root Commands (via Turbo)
|
||||||
```bash
|
```bash
|
||||||
bun run dev # bunx --bun vite dev (localhost:3000)
|
bun run dev # Start all apps in dev mode
|
||||||
bun run build # bunx --bun vite build → .output/
|
bun run build # Build all apps
|
||||||
bun run compile # bun scripts/compile.ts → out/server-<target> (standalone CLI binary)
|
bun run compile # Compile server to standalone binary (current platform)
|
||||||
bun run cli <cmd> # bun src/bin.ts <cmd> — run a CLI subcommand in source (dev)
|
bun run compile:darwin # Compile server for macOS (arm64 + x64)
|
||||||
bun run typecheck # tsc --noEmit
|
bun run compile:linux # Compile server for Linux (x64 + arm64)
|
||||||
bun run test # bun test — runs all *.test.ts files (colocated with source)
|
bun run compile:windows # Compile server for Windows x64
|
||||||
bun run fix # biome check --write (lint + format + organize imports)
|
bun run dist # Package desktop distributable (current platform)
|
||||||
bun run db:push # dev only — push schema to DB, no migration file
|
bun run dist:linux # Package desktop for Linux (x64 + arm64)
|
||||||
bun run db:generate # drizzle-kit generate && scripts/embed-migrations.ts (regenerates migrations.gen.ts)
|
bun run dist:mac # Package desktop for macOS (arm64 + x64)
|
||||||
bun run db:embed # scripts/embed-migrations.ts only — regenerate migrations.gen.ts from ./drizzle/
|
bun run dist:win # Package desktop for Windows x64
|
||||||
bun run db:migrate # apply migrations via drizzle-kit (local dev convenience; prod uses ./server migrate)
|
bun run fix # Lint + format (Biome auto-fix)
|
||||||
bun run db:studio # Drizzle Studio
|
bun run typecheck # TypeScript check across monorepo
|
||||||
```
|
```
|
||||||
|
|
||||||
Cross-compile targets live under `compile:{linux,darwin,windows}[:arch]`. `scripts/compile.ts` accepts `--target bun-<os>-<arch>`; default derives from host.
|
### Server App (`apps/server`)
|
||||||
|
```bash
|
||||||
|
bun run dev # Vite dev server (localhost:3000)
|
||||||
|
bun run build # Production build -> .output/
|
||||||
|
bun run compile # Compile to standalone binary (current platform)
|
||||||
|
bun run compile:darwin # Compile for macOS (arm64 + x64)
|
||||||
|
bun run compile:darwin:arm64 # Compile for macOS arm64
|
||||||
|
bun run compile:darwin:x64 # Compile for macOS x64
|
||||||
|
bun run compile:linux # Compile for Linux (x64 + arm64)
|
||||||
|
bun run compile:linux:arm64 # Compile for Linux arm64
|
||||||
|
bun run compile:linux:x64 # Compile for Linux x64
|
||||||
|
bun run compile:windows # Compile for Windows (default: x64)
|
||||||
|
bun run compile:windows:x64 # Compile for Windows x64
|
||||||
|
bun run fix # Biome auto-fix
|
||||||
|
bun run typecheck # TypeScript check
|
||||||
|
|
||||||
Before committing: `bun run fix && bun run typecheck && bun run test`. No CI, no pre-commit hooks, no lint-staged — so these are on you.
|
# Database (Drizzle)
|
||||||
|
bun run db:generate # Generate migrations from schema
|
||||||
## Drizzle (v0.x — critical)
|
bun run db:migrate # Run migrations
|
||||||
|
bun run db:push # Push schema (dev only)
|
||||||
**Why it matters:** the project was on 1.0 beta and was rolled back. Online docs default to 1.0 beta APIs that do NOT exist here. If typecheck complains, you are probably importing a 1.0 beta API.
|
bun run db:studio # Open Drizzle Studio
|
||||||
|
|
||||||
- Driver: `drizzle-orm/postgres-js`. Do NOT use `drizzle-orm/bun-sql`.
|
|
||||||
- `drizzle()` is called with `{ connection, schema }` where `schema = import * as schema from '@/server/db/schema'`. There is **no `relations.ts`** and **no `defineRelations`** in 0.x.
|
|
||||||
- Zod generators live in the separate `drizzle-zod` package (`^0.8.3`). Import from `drizzle-zod`, **not** `drizzle-orm/zod` (that subpath only exists in 1.0 beta).
|
|
||||||
- Relational queries use **RQB v1 callback syntax**:
|
|
||||||
```ts
|
|
||||||
db.query.todoTable.findMany({
|
|
||||||
orderBy: (t, { desc }) => desc(t.createdAt),
|
|
||||||
})
|
|
||||||
```
|
|
||||||
Do NOT use the v2 object form (`orderBy: { createdAt: 'desc' }`, `where: { id }`) — it won't type-check.
|
|
||||||
- To add relations later: declare per-table with `relations()` from `drizzle-orm` and export them from the same file as the table; they get picked up automatically because `index.ts` does `drizzle({ schema })` via `import *`.
|
|
||||||
- Every table must spread `...generatedFields` from `src/server/db/fields.ts` (`id uuid PRIMARY KEY DEFAULT uuidv7() NOT NULL` — **Postgres-side generation**, requires PG18+; `createdAt`, `updatedAt` with `$onUpdateFn`). The DB is the single source of UUIDv7 truth: monotonic per cluster, uses DB clock, no app-side round-trip. **Do not reintroduce `$defaultFn(() => Bun.randomUUIDv7())`** — the SQL default is what the migration emits and what `drizzle-zod` reads as "optional in insert schema". `generatedFieldKeys` is hand-written and uses `satisfies Record<keyof typeof generatedFields, true>` so any field-key drift fails typecheck; it feeds `createInsertSchema(...).omit(...)` / `createUpdateSchema(...).omit(...)`.
|
|
||||||
- `src/server/db/index.ts` exports a module-level `const db = drizzle(...)` — not a lazy singleton. On Bun this is a long-lived process, so top-level side effects are fine and requested. Don't reintroduce `getDB/closeDB` ceremony; the Nitro shutdown plugin calls `db.$client.end()` directly. (Cloudflare Workers would need per-request init — we don't support that deployment target.)
|
|
||||||
- `drizzle.config.ts` runs outside Vite — `@/*` path aliases do NOT resolve there. It currently does `import { env } from './src/env'` (relative). Preserve that.
|
|
||||||
- **Migrations are embedded in the binary, not read from disk.** `bun run db:generate` chains `drizzle-kit generate && bun scripts/embed-migrations.ts`, which regenerates `src/server/db/migrations.gen.ts` (committed, AUTO-GENERATED header) by `import sql_<idx> from '#drizzle/<tag>.sql' with { type: 'text' }`. `src/cli/migrate.ts` reads `embeddedMigrations`, **validates SHA-256 hash of every already-applied migration against the embedded SQL** (rejects schema drift if anyone edited an applied migration), then applies pending entries via `db.execute(sql\`...\`)` + `db.transaction(...)` against the `drizzle.__drizzle_migrations` book-keeping table — public APIs only, no `db.dialect`/`db.session` (those are `@internal`). Each migration is split on `--> statement-breakpoint`; empty fragments are trimmed and skipped. Dev helpers `db:push` / `drizzle-kit migrate` still read `./drizzle/`.
|
|
||||||
|
|
||||||
## CLI & single-binary deploy
|
|
||||||
|
|
||||||
`bun run compile` produces a single executable that dispatches subcommands via [citty](https://github.com/unjs/citty). Entry is `src/bin.ts`; subcommands live in `src/cli/`.
|
|
||||||
|
|
||||||
```
|
|
||||||
./server [serve] # default — start the HTTP server
|
|
||||||
./server migrate # apply embedded migrations
|
|
||||||
./server --help
|
|
||||||
```
|
```
|
||||||
|
|
||||||
**Nitro side-effect pitfall (important).** Under the `bun` preset, `.output/server/index.mjs` has a top-level `serve(...)` call — merely importing it starts the HTTP server. `src/bin.ts` therefore must not eager-import any subcommand module, and `src/cli/serve.ts` reaches `.output/server/index.mjs` through the `src/cli/_serve-nitro.mjs` bridge (with `_serve-nitro.d.mts` for types, since `.output/` doesn't exist at typecheck time). Citty's `subCommands: { x: () => import('...') }` lazy-loader is what keeps `--help` and `migrate` from booting the server.
|
### Desktop App (`apps/desktop`)
|
||||||
|
```bash
|
||||||
**Citty eager-loads subcommand modules for `--help`** to read each subcommand's `meta`. So every `src/cli/*.ts` module body must be side-effect-free: do NOT static-import `@/env`, `@/server/db/*`, or anything that reads env at module-load time. Use `await import('@/env')` inside `run()`. Otherwise `./server --help` (or any subcommand's help) will fail with env validation errors before printing.
|
bun run dev # electron-vite dev mode (requires server dev running)
|
||||||
|
bun run build # electron-vite build (main + preload)
|
||||||
Add a subcommand: drop a file in `src/cli/` that default-exports `defineCommand({...})`, then register it in `src/bin.ts`'s `subCommands` with a `() => import(...)` thunk. Keep top-level imports limited to `citty` + Node built-ins; pull env / db / etc. via `await import(...)` inside `run()`.
|
bun run dist # Build + package for current platform
|
||||||
|
bun run dist:linux # Build + package for Linux (x64 + arm64)
|
||||||
**Deploy flow is always migrate-then-serve.** Migrations are embedded in the binary (see "Drizzle" section), so the binary is the only artifact — no `./drizzle/` directory at runtime. Dockerfile copies just `./server`. `compose.yaml` models the pattern with a one-shot `migrate` service that `app` `depends_on: service_completed_successfully`. On k8s, run `./server migrate` as an initContainer or a Helm `pre-upgrade` Job; run `./server` (= `./server serve`) as the main container.
|
bun run dist:linux:x64 # Build + package for Linux x64
|
||||||
|
bun run dist:linux:arm64 # Build + package for Linux arm64
|
||||||
## Compile flags
|
bun run dist:mac # Build + package for macOS (arm64 + x64)
|
||||||
|
bun run dist:mac:arm64 # Build + package for macOS arm64
|
||||||
`scripts/compile.ts` builds with `--minify --bytecode --sourcemap=inline`:
|
bun run dist:mac:x64 # Build + package for macOS x64
|
||||||
|
bun run dist:win # Build + package for Windows x64
|
||||||
- **`bytecode`** — pre-compiles JS to bytecode and embeds it in the binary; ~2x startup on app-sized binaries (Bun docs benchmark). Requires `--compile`; top-level `await` must live inside `async` functions (it already does in this repo).
|
bun run fix # Biome auto-fix
|
||||||
- **`minify`** — shrinks the binary and the bytecode it derives from.
|
bun run typecheck # TypeScript check
|
||||||
- **`sourcemap: 'inline'`** — embeds the source map in the binary so error stack traces stay decodable. Bun also writes a residual `out/bin.js.map` next to the output; `scripts/compile.ts` removes it so the binary is the only artifact.
|
|
||||||
|
|
||||||
## ORPC
|
|
||||||
|
|
||||||
Contract → Router → Handler → Client, all type-safe from a single contract.
|
|
||||||
|
|
||||||
- `os` is built in `src/server/api/server.ts` via `implement(contract).$context<BaseContext>()`. **Always import `os` from `@/server/api/server`**, never from `@orpc/server` directly. `ORPCError`, `onError`, `ValidationError` come from `@orpc/server`.
|
|
||||||
- Contracts (`src/server/api/contracts/*.contract.ts`) generate Zod from Drizzle tables via `drizzle-zod`:
|
|
||||||
```ts
|
|
||||||
const insertSchema = createInsertSchema(todoTable).omit(generatedFieldKeys)
|
|
||||||
```
|
|
||||||
Barrel-aggregated in `contracts/index.ts` as `export const contract = { todo }`.
|
|
||||||
- Routers (`src/server/api/routers/*.router.ts`) import `db` directly from `@/server/db` in their handlers. There is **no `middlewares/` directory** by default — `db` doesn't need one (module-level const). When you actually need per-request context (auth, tenant, rate-limit), create `src/server/api/middlewares/<name>.middleware.ts` with `os.middleware(...)` and extend `BaseContext` in `context.ts`.
|
|
||||||
- **Interceptors are attached at the handler level, not in `server.ts` and not on `os`.** Both `src/routes/api/rpc.$.ts` (`RPCHandler`) and `src/routes/api/$.ts` (`OpenAPIHandler`) register `[onError(logError)]` (server) and `[onError(handleValidationError)]` (client). The validation interceptor rewrites `BAD_REQUEST + ValidationError` into `INPUT_VALIDATION_FAILED` (422) and output validation errors into `OUTPUT_VALIDATION_FAILED`. `logError` resolves a `getLogger(['api'])` LogTape category — never `console.*` directly. See "Logging" section.
|
|
||||||
- OpenAPI/Scalar: docs at `/api/docs`, spec at `/api/spec.json` (handler prefix `/api`, plugin paths `/docs` and `/spec.json`).
|
|
||||||
- **SSR isomorphism** (`src/client/orpc.ts`): `createIsomorphicFn().server(createRouterClient(...)).client(new RPCLink(...))`. Server branch reads `getRequestHeaders()` for context; client branch POSTs to `${origin}/api/rpc`.
|
|
||||||
- **Mutation invalidation is colocated at the call site** via `mutationOptions({ onSuccess })`, not in `src/client/orpc.ts`. `orpc` in `src/client/orpc.ts` is a plain `createTanstackQueryUtils(client)` — no `experimental_defaults`. Per-feature query helpers live in `src/client/queries/<feature>.ts` (e.g. `useInvalidateTodos`); routes/components compose those hooks rather than holding query keys inline. See `src/client/queries/todo.ts` + `src/routes/index.tsx` for the canonical shape.
|
|
||||||
- SSR prefetch in route loaders: `await context.queryClient.ensureQueryData(orpc.todo.list.queryOptions())`. Components use `useSuspenseQuery(orpc.feature.list.queryOptions())`.
|
|
||||||
|
|
||||||
## Code style (Biome)
|
|
||||||
|
|
||||||
- 2-space, LF, single quotes, **semicolons as-needed** (omitted unless required), 120-col, arrow parens always, `useArrowFunction: "error"` (covers function *expressions* only). Also `noReactPropAssignments: "error"`.
|
|
||||||
- Lint domains enabled (Biome 2.4): `types: "all"` (catches `noFloatingPromises`, `noMisusedPromises`, `useAwaitThenable`, `noUnnecessaryConditions` — TS-aware async/promise traps), `drizzle: "recommended"`, `react: "recommended"`. `noImportCycles` is on under `suspicious`.
|
|
||||||
- **Route components use `function Foo()` declarations**, placed below the `Route` config so the file reads top-down (route on top, component below). This is the official TanStack Router/Start pattern and relies on hoisting — `const Foo = () => {}` would TDZ-error when referenced from `createFileRoute({ component: Foo })` above it. Inline arrows are fine for trivial leaf components (e.g. plain redirect routes). Non-route components (UI primitives in `src/components/`) use `const Foo = () => {}`.
|
|
||||||
- Imports are auto-organized into two groups (external, then `@/*`), each alphabetical, with `import type` interleaved (NOT a separate group). `bun run fix` handles this; don't hand-sort.
|
|
||||||
- Files: utils `kebab-case.ts`, components `PascalCase.tsx`.
|
|
||||||
- `routeTree.gen.ts` is generated — ignored by Biome, never edit.
|
|
||||||
|
|
||||||
## Testing
|
|
||||||
|
|
||||||
`bun test` runs all `*.test.ts` files. Tests are colocated next to the source they exercise (e.g. `src/server/api/contracts/todo.contract.test.ts`). Use `import { describe, expect, test } from 'bun:test'`. The `@/*` alias resolves in tests via tsconfig paths. No Vitest, no Jest, no separate test config — keep it that way unless you need a browser/JSDOM environment.
|
|
||||||
|
|
||||||
## Endpoints
|
|
||||||
|
|
||||||
- `/` — Todos UI (file route).
|
|
||||||
- `/health` — bare `GET` returning `ok` (200, `text/plain`). Liveness only — no DB check, so it stays green even when Postgres is down. Add a separate `/ready` if you ever need readiness.
|
|
||||||
- `/api/rpc` — ORPC RPC handler (POST).
|
|
||||||
- `/api/*` — ORPC OpenAPI handler. Docs at `/api/docs`, spec at `/api/spec.json`.
|
|
||||||
|
|
||||||
## TypeScript
|
|
||||||
|
|
||||||
Strict mode, plus `noUncheckedIndexedAccess`, `verbatimModuleSyntax`, `erasableSyntaxOnly`, `noImplicitOverride`. No `as any` / `@ts-ignore` / `@ts-expect-error`.
|
|
||||||
|
|
||||||
Path alias: `@/* → src/*`. For files outside `src/` use `@/../<file>` (example in the codebase: `src/routes/api/$.ts` imports `name, version` from `@/../package.json`).
|
|
||||||
|
|
||||||
## Env
|
|
||||||
|
|
||||||
`src/env.ts` via `@t3-oss/env-core`. Server: `DATABASE_URL` (required, `z.url()`), `LOG_LEVEL` (`trace|debug|info|warning|error|fatal`, default `info`), `LOG_FORMAT` (`pretty|json`, default = TTY ? `pretty` : `json`), `LOG_DB` (`stringbool`, default `false` — flips on Drizzle SQL query logging). `client: {}` is empty by default — any client-side env must be `VITE_`-prefixed. Never commit `.env`.
|
|
||||||
|
|
||||||
## Logging
|
|
||||||
|
|
||||||
All server-side logging goes through `src/server/logger.ts`, a thin wrapper over [LogTape](https://logtape.org/). The module configures LogTape on import (via `configureSync`, no top-level await — works under `--bytecode`) and re-exports `getLogger`.
|
|
||||||
|
|
||||||
```ts
|
|
||||||
import { getLogger } from '@/server/logger'
|
|
||||||
const logger = getLogger(['feature', 'subsystem'])
|
|
||||||
logger.info('Created todo {id}', { id })
|
|
||||||
logger.error('DB write failed', { error })
|
|
||||||
```
|
```
|
||||||
|
|
||||||
- Categories are hierarchical arrays — they show up as dot-paths in JSON output (`"logger":"feature.subsystem"`) and let you filter by prefix when shipping logs.
|
### Testing
|
||||||
- The `{name}` placeholders are for **primitive** values you want rendered inline (numbers, short strings, IDs). For objects, errors, and anything multi-field, omit the placeholder and just pass the value in properties — `logger.error('Auth failed', { error, userId })` keeps the message clean while properties stay structured. Never string-concatenate or template-literal — that defeats structured logging.
|
No test framework configured yet. When adding tests:
|
||||||
- Format is `pretty` (icons + ANSI) on TTY, `json` (one-line JSON) when piped — perfect for Loki/Datadog/CloudWatch ingestion. Override with `LOG_FORMAT`.
|
```bash
|
||||||
- Drizzle SQL queries are logged at `info` under category `['db']` when `LOG_DB=true`, via `@logtape/drizzle-orm`'s `DrizzleLogger` adapter (constructed in `src/server/db/index.ts`). The `info` level is intentional: flipping `LOG_DB=true` alone is enough — no need to also lower `LOG_LEVEL`.
|
bun test path/to/test.ts # Run single test file
|
||||||
- `src/server/api/interceptors.ts` calls `getLogger(['api']).error(...)` from `logError`. CLI subcommands lazy-import the logger inside `run()` — they are still required to be side-effect-free at module top (citty eager-loads for `--help`).
|
bun test -t "pattern" # Run tests matching pattern
|
||||||
- Bun-specific: `process.env.NODE_ENV` is **inlined at build time** by `bun build --minify` — do NOT branch on it for logger config (use `process.stdout.isTTY` or `LOG_FORMAT` instead). pino is unusable here because its worker-thread transports crash inside the `/$bunfs/` virtual filesystem of compiled binaries; LogTape has zero workers and zero dynamic require, so it ships cleanly into the single binary.
|
|
||||||
|
|
||||||
## Docker / deploy
|
|
||||||
|
|
||||||
- Multi-stage: `oven/bun:1.3.13` builds and runs `bun scripts/compile.ts`, then `gcr.io/distroless/cc-debian13:nonroot` runs the single `./server` binary. The `cc` (glibc) distroless variant is required because Bun's compiled binary links glibc.
|
|
||||||
- `compose.yaml`: one-shot `migrate` service runs `./server migrate` with `restart: "no"`, then `app` starts (`depends_on: migrate: service_completed_successfully`). `DATABASE_URL=postgres://postgres:postgres@db:5432/postgres` for both.
|
|
||||||
- Distroless has no shell, so any init-then-serve pattern must use exec-form `command: [...]`, not `sh -c`.
|
|
||||||
|
|
||||||
## Layout (non-obvious parts only)
|
|
||||||
|
|
||||||
```
|
|
||||||
src/
|
|
||||||
├── bin.ts # citty entry — keep imports minimal (see "CLI" section)
|
|
||||||
├── client/
|
|
||||||
│ ├── orpc.ts # isomorphic ORPC client + TanStack Query utils (no global invalidation defaults)
|
|
||||||
│ └── queries/ # per-feature query hooks: keys, options, `useInvalidate<Feature>` helpers
|
|
||||||
├── cli/ # CLI subcommands (loaded lazily by src/bin.ts via citty)
|
|
||||||
│ ├── serve.ts # `./server serve` — imports the Nitro bridge on demand
|
|
||||||
│ ├── migrate.ts # `./server migrate` — applies embedded migrations via public `db.execute(sql)` + `db.transaction()`
|
|
||||||
│ ├── _serve-nitro.mjs # bridge: `import('#server')` (subpath import → .output/server/index.mjs)
|
|
||||||
│ └── _serve-nitro.d.mts # types for the bridge (build output has no .d.ts)
|
|
||||||
├── routes/
|
|
||||||
│ ├── __root.tsx # root route + RootDocument shell
|
|
||||||
│ ├── index.tsx # Todos UI
|
|
||||||
│ ├── health.ts # GET /health → "ok" (no DB)
|
|
||||||
│ └── api/
|
|
||||||
│ ├── $.ts # OpenAPI + Scalar; interceptors registered here
|
|
||||||
│ └── rpc.$.ts # RPC; interceptors registered here
|
|
||||||
├── server/
|
|
||||||
│ ├── logger.ts # LogTape `configureSync` + `getLogger` re-export — the only log entrypoint
|
|
||||||
│ ├── api/
|
|
||||||
│ │ ├── server.ts # the ONLY place to build `os`
|
|
||||||
│ │ ├── context.ts # BaseContext (add per-request fields when you add middlewares)
|
|
||||||
│ │ ├── interceptors.ts # logError (→ logger), handleValidationError
|
|
||||||
│ │ ├── types.ts # Router{Client,Inputs,Outputs} derived from Contract
|
|
||||||
│ │ ├── contracts/ # Zod schemas from Drizzle tables (barrel: contract); colocated *.test.ts
|
|
||||||
│ │ └── routers/ # os.* handlers (barrel: router) — import db directly
|
|
||||||
│ ├── db/
|
|
||||||
│ │ ├── index.ts # module-level `export const db = drizzle({...})`
|
|
||||||
│ │ ├── fields.ts # generatedFields (id/createdAt/updatedAt) + generatedFieldKeys
|
|
||||||
│ │ ├── migrations.gen.ts # AUTO-GENERATED by `bun run db:embed`; embeds ./drizzle/*.sql via `with { type: 'text' }`
|
|
||||||
│ │ ├── sql.d.ts # ambient `declare module '*.sql'` — load-bearing for `with { type: 'text' }` imports in migrations.gen.ts
|
|
||||||
│ │ └── schema/ # pgTable definitions; also put `relations()` here when adding
|
|
||||||
│ └── plugins/
|
|
||||||
│ └── shutdown.ts # SIGINT/SIGTERM → db.$client.end() with 500ms delay (prod only)
|
|
||||||
├── components/ # non-route UI primitives (PascalCase, arrow const)
|
|
||||||
├── env.ts # t3-oss env validation
|
|
||||||
├── router.tsx # QueryClient + setupRouterSsrQueryIntegration
|
|
||||||
├── styles.css # Tailwind v4 entry
|
|
||||||
└── routeTree.gen.ts # auto-generated, do not edit
|
|
||||||
scripts/
|
|
||||||
├── compile.ts # `bun build --compile` driver; resolves --target; sets minify/bytecode/sourcemap
|
|
||||||
└── embed-migrations.ts # codegen: scans ./drizzle/meta/_journal.json → src/server/db/migrations.gen.ts
|
|
||||||
drizzle/ # SQL migrations (source of truth for `db:generate`; not shipped in binary)
|
|
||||||
```
|
```
|
||||||
|
|
||||||
Nitro plugins are wired in `vite.config.ts` (`nitro({ plugins: [...] })`), not via a Nitro config file.
|
## Code Style (TypeScript)
|
||||||
|
|
||||||
## Don'ts (specific, non-obvious)
|
### Formatting (Biome)
|
||||||
|
- **Indent**: 2 spaces | **Line endings**: LF
|
||||||
|
- **Quotes**: Single `'` | **Semicolons**: Omit (ASI)
|
||||||
|
- **Arrow parentheses**: Always `(x) => x`
|
||||||
|
|
||||||
- **Don't run `bun run db:generate` (or `drizzle-kit generate`) as an AI agent.** Migration generation is reserved for the human. Make schema changes in `src/server/db/schema/*` + `src/server/db/fields.ts`, push the code changes, and stop — the human will run `bun run db:generate` and commit the resulting `drizzle/*.sql` + `src/server/db/migrations.gen.ts` themselves. (`bun run db:embed` is also off-limits because it's the codegen tail of `db:generate`.)
|
### Imports
|
||||||
- Don't edit `routeTree.gen.ts` or `src/server/db/migrations.gen.ts`.
|
Biome auto-organizes. Order: 1) External packages → 2) Internal `@/*` aliases → 3) Type imports (`import type { ... }`)
|
||||||
- Don't eager-import anything from `.output/` in `src/bin.ts` or any module it statically imports — it starts the HTTP server as a side effect. Subcommands must be lazy via citty's `() => import(...)` thunks.
|
|
||||||
- Don't re-add an auto-migrate Nitro plugin. Migrations are an explicit deploy step via `./server migrate`.
|
|
||||||
- Don't reach into `db.dialect`/`db.session` from `migrate.ts` — they're `@internal`. The current implementation uses public `db.execute(sql)` + `db.transaction(...)` against the documented `drizzle.__drizzle_migrations` schema.
|
|
||||||
- Don't add `./drizzle/` back to the runtime image — migrations are embedded into the binary.
|
|
||||||
- Don't reintroduce `getDB/closeDB` or any "lazy DB init" pattern — that's a Cloudflare Workers shape; we deploy on Bun processes.
|
|
||||||
- Don't import `os` from `@orpc/server` in middleware/routers — always `@/server/api/server`.
|
|
||||||
- Don't import from `drizzle-orm/zod` (1.0 beta only). Use `drizzle-zod`.
|
|
||||||
- Don't use RQB v2 object syntax, `defineRelations`, or pass `relations` to `drizzle()`. All are 1.0 beta.
|
|
||||||
- Don't use `drizzle-orm/bun-sql`.
|
|
||||||
- Don't use `@/*` aliases in `drizzle.config.ts`.
|
|
||||||
- Don't commit `.env`.
|
|
||||||
|
|
||||||
## Room-to-grow rules (discipline for the first real feature)
|
```typescript
|
||||||
|
import { createFileRoute } from '@tanstack/react-router'
|
||||||
|
import { z } from 'zod'
|
||||||
|
import { db } from '@/server/db'
|
||||||
|
import type { ReactNode } from 'react'
|
||||||
|
```
|
||||||
|
|
||||||
These keep the starter from setting bad precedents as it grows. Append, don't restructure.
|
### TypeScript Strictness
|
||||||
|
- `strict: true`, `noUncheckedIndexedAccess: true`, `noImplicitOverride: true`, `verbatimModuleSyntax: true`
|
||||||
|
- Use `@/*` path aliases (maps to `src/*`)
|
||||||
|
|
||||||
1. Per-feature client query code — keys, options, `useInvalidate<Feature>` helpers — lives in `src/client/queries/<feature>.ts`. Routes/components compose these hooks; they don't hold query keys inline.
|
### Naming Conventions
|
||||||
2. Mutation invalidation stays explicit at the mutation call site (`mutationOptions({ onSuccess })`) or in a feature query helper. Do not reintroduce global `experimental_defaults` or any implicit cache policy.
|
| Type | Convention | Example |
|
||||||
3. Client state defaults to route/component local state. Create a store (zustand or equivalent) only when state is shared across routes or needs persistence.
|
|------|------------|---------|
|
||||||
4. Middlewares (`src/server/api/middlewares/<name>.middleware.ts`) derive request-scoped context or gate access. They do NOT orchestrate business flow, hold side effects, or replace handlers/services.
|
| Files (utils) | kebab-case | `auth-utils.ts` |
|
||||||
5. Interceptors (`src/server/api/interceptors.ts`) do cross-cutting error logging, transport normalization, and validation rewrites. They do NOT read business data.
|
| Files (components) | PascalCase | `UserProfile.tsx` |
|
||||||
6. One file per Drizzle table. Relations live in the same file and are exported as `<entity>Relations`. No global `relations.ts`.
|
| Components | PascalCase arrow | `const Button = () => {}` |
|
||||||
7. One router file per feature (`routers/<feature>.router.ts`). Only introduce `routers/<domain>/index.ts` when a domain grows past ~5 router files or needs shared domain helpers.
|
| Functions | camelCase | `getUserById` |
|
||||||
8. All server-side logging goes through `getLogger([...])` from `@/server/logger`. Use a hierarchical category (`['api']`, `['db']`, `['cli', 'migrate']`, etc.) — these become dot-paths in JSON output and let you filter by prefix. Use the `{name}` placeholder + properties form, not string interpolation. `console.*` is forbidden in business code.
|
| Constants | UPPER_SNAKE | `MAX_RETRIES` |
|
||||||
9. CLI subcommand modules keep top-level imports to `citty` + Node built-ins. Env, db, and server code are `await import(...)`-ed inside `run()` (see `src/bin.ts` comment for why).
|
| Types/Interfaces | PascalCase | `UserProfile` |
|
||||||
10. Every new business feature ships with at least one `bun test` covering a contract schema, a pure helper, or a router behavior.
|
|
||||||
|
### React Patterns
|
||||||
|
- Components: arrow functions (enforced by Biome)
|
||||||
|
- Routes: TanStack Router file conventions (`export const Route = createFileRoute(...)`)
|
||||||
|
- Data fetching: `useSuspenseQuery(orpc.feature.list.queryOptions())`
|
||||||
|
- Let React Compiler handle memoization (no manual `useMemo`/`useCallback`)
|
||||||
|
|
||||||
|
### Error Handling
|
||||||
|
- Use `try-catch` for async operations; throw descriptive errors
|
||||||
|
- ORPC: Use `ORPCError` with proper codes (`NOT_FOUND`, `INPUT_VALIDATION_FAILED`)
|
||||||
|
- Never use empty catch blocks
|
||||||
|
|
||||||
|
## Database (Drizzle ORM v1 beta + postgres-js)
|
||||||
|
|
||||||
|
- **ORM**: Drizzle ORM `1.0.0-beta` (RQBv2)
|
||||||
|
- **Driver**: `drizzle-orm/postgres-js` (NOT `bun-sql`)
|
||||||
|
- **Validation**: `drizzle-orm/zod` (built-in, NOT separate `drizzle-zod` package)
|
||||||
|
- **Relations**: Defined via `defineRelations()` in `src/server/db/relations.ts` (contains schema info, so `drizzle()` only needs `{ relations }`)
|
||||||
|
- **Query style**: RQBv2 object syntax (`orderBy: { createdAt: 'desc' }`, `where: { id: 1 }`)
|
||||||
|
|
||||||
|
```typescript
|
||||||
|
export const myTable = pgTable('my_table', {
|
||||||
|
id: uuid().primaryKey().default(sql`uuidv7()`),
|
||||||
|
name: text().notNull(),
|
||||||
|
createdAt: timestamp({ withTimezone: true }).notNull().defaultNow(),
|
||||||
|
updatedAt: timestamp({ withTimezone: true }).notNull().defaultNow().$onUpdateFn(() => new Date()),
|
||||||
|
})
|
||||||
|
```
|
||||||
|
|
||||||
|
## Environment Variables
|
||||||
|
|
||||||
|
- Use `@t3-oss/env-core` with Zod validation in `src/env.ts`
|
||||||
|
- Server vars: no prefix | Client vars: `VITE_` prefix required
|
||||||
|
- Never commit `.env` files
|
||||||
|
|
||||||
|
## Dependency Management
|
||||||
|
|
||||||
|
- All versions centralized in root `package.json` `catalog` field
|
||||||
|
- Workspace packages use `"catalog:"` — never hardcode versions
|
||||||
|
- Internal packages use `"workspace:*"` references
|
||||||
|
|
||||||
|
## Development Principles
|
||||||
|
|
||||||
|
> **These principles apply to ALL code changes. Agents MUST follow them on every task.**
|
||||||
|
|
||||||
|
1. **No backward compatibility** — This project is in rapid iteration. Always use the latest API and patterns. Never keep deprecated code paths or old API fallbacks "just in case".
|
||||||
|
2. **Always sync documentation** — When code changes, immediately update all related documentation (`AGENTS.md`, `README.md`, inline code examples). Code and docs must never drift apart. This includes updating code snippets in docs when imports, APIs, or patterns change.
|
||||||
|
3. **Forward-only migration** — When upgrading dependencies, fully adopt the new API. Don't mix old and new patterns in the same codebase.
|
||||||
|
|
||||||
|
## Critical Rules
|
||||||
|
|
||||||
|
**DO:**
|
||||||
|
- Run `bun run fix` before committing
|
||||||
|
- Use `@/*` path aliases (not relative imports)
|
||||||
|
- Include `createdAt`/`updatedAt` on all tables
|
||||||
|
- Use `catalog:` for dependency versions
|
||||||
|
- Update `AGENTS.md` and other docs whenever code patterns change
|
||||||
|
|
||||||
|
**DON'T:**
|
||||||
|
- Use `npm`, `npx`, `node`, `yarn`, `pnpm` — always use `bun` / `bunx`
|
||||||
|
- Edit `src/routeTree.gen.ts` (auto-generated)
|
||||||
|
- Use `as any`, `@ts-ignore`, `@ts-expect-error`
|
||||||
|
- Commit `.env` files
|
||||||
|
- Use empty catch blocks `catch(e) {}`
|
||||||
|
- Hardcode dependency versions in workspace packages
|
||||||
|
- Leave docs out of sync with code changes
|
||||||
|
|
||||||
|
## Git Workflow
|
||||||
|
|
||||||
|
1. Make changes following style guide
|
||||||
|
2. `bun run fix` - auto-format and lint
|
||||||
|
3. `bun run typecheck` - verify types
|
||||||
|
4. `bun run dev` - test locally
|
||||||
|
5. Commit with descriptive message
|
||||||
|
|
||||||
|
## Directory Structure
|
||||||
|
|
||||||
|
```
|
||||||
|
.
|
||||||
|
├── apps/
|
||||||
|
│ ├── server/ # TanStack Start fullstack app
|
||||||
|
│ │ ├── src/
|
||||||
|
│ │ │ ├── client/ # ORPC client + TanStack Query utils
|
||||||
|
│ │ │ ├── components/
|
||||||
|
│ │ │ ├── routes/ # File-based routing
|
||||||
|
│ │ │ └── server/ # API layer + database
|
||||||
|
│ │ │ ├── api/ # ORPC contracts, routers, middlewares
|
||||||
|
│ │ │ └── db/ # Drizzle schema
|
||||||
|
│ │ └── AGENTS.md
|
||||||
|
│ └── desktop/ # Electron desktop shell
|
||||||
|
│ ├── src/
|
||||||
|
│ │ ├── main/
|
||||||
|
│ │ │ └── index.ts # Main process entry
|
||||||
|
│ │ └── preload/
|
||||||
|
│ │ └── index.ts # Preload script
|
||||||
|
│ ├── electron.vite.config.ts
|
||||||
|
│ ├── electron-builder.yml # Packaging config
|
||||||
|
│ └── AGENTS.md
|
||||||
|
├── packages/
|
||||||
|
│ └── tsconfig/ # Shared TS configs
|
||||||
|
├── biome.json # Linting/formatting config
|
||||||
|
├── turbo.json # Turbo task orchestration
|
||||||
|
└── package.json # Workspace root + dependency catalog
|
||||||
|
```
|
||||||
|
|
||||||
|
## See Also
|
||||||
|
|
||||||
|
- `apps/server/AGENTS.md` - Detailed TanStack Start / ORPC patterns
|
||||||
|
- `apps/desktop/AGENTS.md` - Electron desktop development guide
|
||||||
|
|||||||
-21
@@ -1,21 +0,0 @@
|
|||||||
FROM oven/bun:1.3.13 AS build
|
|
||||||
|
|
||||||
WORKDIR /app
|
|
||||||
|
|
||||||
COPY package.json bun.lock ./
|
|
||||||
RUN bun install --frozen-lockfile
|
|
||||||
|
|
||||||
COPY . .
|
|
||||||
RUN bun run build \
|
|
||||||
&& bun run compile \
|
|
||||||
&& mv out/server-* out/server
|
|
||||||
|
|
||||||
FROM gcr.io/distroless/cc-debian13:nonroot
|
|
||||||
|
|
||||||
WORKDIR /app
|
|
||||||
COPY --from=build --chown=nonroot:nonroot /app/out/server ./server
|
|
||||||
|
|
||||||
ENV HOST=0.0.0.0
|
|
||||||
EXPOSE 3000
|
|
||||||
|
|
||||||
CMD ["./server"]
|
|
||||||
@@ -1,112 +0,0 @@
|
|||||||
# fullstack-starter
|
|
||||||
|
|
||||||
一个**单二进制**的全栈应用 starter——`bun run compile` 出来的 `./server` 文件就是你要部署的全部产物,自带 HTTP 服务、SSR、API、嵌入式 SQL 迁移,运行时不依赖 Node、不依赖源码、不依赖外部 migration 目录。
|
|
||||||
|
|
||||||
技术栈:Bun · TanStack Start (React 19 SSR) · ORPC(契约优先 API)· Drizzle ORM · PostgreSQL 18+ · Tailwind v4 · Biome。
|
|
||||||
|
|
||||||
## 为什么用这个
|
|
||||||
|
|
||||||
- **部署最简**:发布只拷一个二进制文件。先 `./server migrate` 再 `./server`,完事。
|
|
||||||
- **契约优先**:在 `*.contract.ts` 用 Zod 定义一次,前端、后端、OpenAPI 文档自动同步。
|
|
||||||
- **类型严格**:TypeScript strict,杜绝 `any` / `@ts-ignore` / `as any` 等类型逃逸。
|
|
||||||
- **开箱可跑**:路径别名、文件路由、ORPC 接线、Tailwind、热重载、错误页全部预接好。
|
|
||||||
|
|
||||||
## 快速开始
|
|
||||||
|
|
||||||
> **需要 PostgreSQL 18+**——schema 用 PG 原生的 `uuidv7()` 生成主键(`compose.yaml` 已锁 `postgres:18-alpine`)。要兼容更老的 PG,把 `src/server/db/fields.ts` 里的 `default(sql\`uuidv7()\`)` 换成 `$defaultFn(() => Bun.randomUUIDv7())`,再跑 `bun run db:generate`。
|
|
||||||
|
|
||||||
```bash
|
|
||||||
cp .env.example .env # 把里面的 DATABASE_URL 改成你的 Postgres
|
|
||||||
bun install
|
|
||||||
bun run db:push # 开发期:把 schema 直接同步到 DB(不写 migration 文件)
|
|
||||||
bun run dev # http://localhost:3000
|
|
||||||
```
|
|
||||||
|
|
||||||
打开浏览器:
|
|
||||||
|
|
||||||
- `http://localhost:3000/` — Todo 示例页
|
|
||||||
- `http://localhost:3000/api/docs` — Scalar 渲染的 API 文档
|
|
||||||
|
|
||||||
## 目录结构(你需要关心的部分)
|
|
||||||
|
|
||||||
```
|
|
||||||
src/
|
|
||||||
├── routes/ # 文件路由:页面 + API 端点
|
|
||||||
├── server/
|
|
||||||
│ ├── api/
|
|
||||||
│ │ ├── contracts/ # Zod 契约(client / server 共享)
|
|
||||||
│ │ └── routers/ # 业务实现
|
|
||||||
│ └── db/ # Drizzle schema + 嵌入式 migrations
|
|
||||||
├── client/ # 前端 hooks、ORPC 客户端
|
|
||||||
└── components/ # UI 组件
|
|
||||||
```
|
|
||||||
|
|
||||||
## 加一个功能(以 `post` 为例)
|
|
||||||
|
|
||||||
每一步都很短,按顺序填即可:
|
|
||||||
|
|
||||||
1. **建表**:`src/server/db/schema/post.ts` 定义 `postTable`,记得展开 `...generatedFields`(自动注入 `id` / `createdAt` / `updatedAt`)。
|
|
||||||
2. **导出表**:在 `src/server/db/schema/index.ts` 加 `export * from './post'`。
|
|
||||||
3. **写契约**:`src/server/api/contracts/post.contract.ts` 用 `drizzle-zod` 从表派生 Zod schema。
|
|
||||||
4. **挂契约**:在 `src/server/api/contracts/index.ts` 把 `post` 加进 `contract` 对象。
|
|
||||||
5. **写实现**:`src/server/api/routers/post.router.ts` 实现 `os.post.*.handler(...)`。
|
|
||||||
6. **挂路由**:在 `src/server/api/routers/index.ts` 把 `post` 加进 `router` 对象。
|
|
||||||
7. **写前端 hook**:`src/client/queries/post.ts` 导出 `useInvalidatePosts` 等失效辅助。
|
|
||||||
8. **写页面**:`src/routes/<page>.tsx` 用 `useSuspenseQuery` 读、`mutate` 写;mutation 的 `onSuccess` 调用第 7 步的 helper。
|
|
||||||
9. **生成 migration**:`bun run db:generate` 把 SQL 写到 `./drizzle/` 并嵌入二进制。
|
|
||||||
|
|
||||||
完工。`bun run dev` 已自动热重载。
|
|
||||||
|
|
||||||
## 部署
|
|
||||||
|
|
||||||
**永远先 migrate 再 serve**。Migration 已嵌入二进制;部署只发一个 `./server` 文件。
|
|
||||||
|
|
||||||
```bash
|
|
||||||
./server migrate # 应用嵌入式 migration(用 $DATABASE_URL)
|
|
||||||
./server # 启动 HTTP 服务(默认子命令)
|
|
||||||
./server --help # 列出所有子命令
|
|
||||||
```
|
|
||||||
|
|
||||||
仓库自带 `compose.yaml`(一次性 `migrate` 服务先跑完,再启动 `app`):
|
|
||||||
|
|
||||||
```bash
|
|
||||||
docker compose up --build
|
|
||||||
```
|
|
||||||
|
|
||||||
Kubernetes 上:把 `./server migrate` 放进 initContainer 或 Helm `pre-upgrade` Job,主容器跑 `./server`。
|
|
||||||
|
|
||||||
## 脚本一览
|
|
||||||
|
|
||||||
| 命令 | 作用 |
|
|
||||||
| --- | --- |
|
|
||||||
| `bun run dev` | Vite 开发服务器(默认端口 3000) |
|
|
||||||
| `bun run build` | 构建到 `.output/`(`bun run compile` 会用到) |
|
|
||||||
| `bun run compile` | 生成单二进制 `out/server-<target>` |
|
|
||||||
| `bun run typecheck` | TypeScript 类型检查 |
|
|
||||||
| `bun run test` | 运行所有 `*.test.ts` |
|
|
||||||
| `bun run fix` | Biome 格式化 + lint + 整理 imports |
|
|
||||||
| `bun run db:push` | 开发期:直接同步 schema 到 DB(不写 migration 文件) |
|
|
||||||
| `bun run db:generate` | 写 SQL migration 到 `./drizzle/` 并嵌入二进制 |
|
|
||||||
| `bun run db:embed` | 仅重生 `migrations.gen.ts`(手改了 `./drizzle/*.sql` 后用) |
|
|
||||||
| `bun run db:migrate` | 通过 drizzle-kit 在本地应用 migration(开发便利) |
|
|
||||||
| `bun run db:studio` | Drizzle Studio(可视化 DB) |
|
|
||||||
|
|
||||||
跨平台编译:`bun run compile:{linux,darwin,windows}[:arch]`。
|
|
||||||
|
|
||||||
## 端点
|
|
||||||
|
|
||||||
| 路径 | 用途 |
|
|
||||||
| --- | --- |
|
|
||||||
| `/` | Todo 示例 UI |
|
|
||||||
| `/health` | 存活探针(不查 DB,纯文本 `ok`) |
|
|
||||||
| `/api/rpc` | ORPC RPC 端点(client 直连) |
|
|
||||||
| `/api/docs` | Scalar 渲染的 API 文档 |
|
|
||||||
| `/api/spec.json` | OpenAPI spec |
|
|
||||||
|
|
||||||
## 提交前
|
|
||||||
|
|
||||||
```bash
|
|
||||||
bun run fix && bun run typecheck && bun run test
|
|
||||||
```
|
|
||||||
|
|
||||||
没有 CI、没有 pre-commit hook——上面三条由你自觉跑。
|
|
||||||
@@ -0,0 +1,3 @@
|
|||||||
|
# electron-vite build output
|
||||||
|
out/
|
||||||
|
dist/
|
||||||
@@ -0,0 +1,95 @@
|
|||||||
|
# AGENTS.md - Desktop App Guidelines
|
||||||
|
|
||||||
|
Thin Electron shell hosting the fullstack server app.
|
||||||
|
|
||||||
|
## Tech Stack
|
||||||
|
|
||||||
|
> **⚠️ This project uses Bun as the package manager. Runtime is Electron (Node.js). Always use `bun run <script>` (not `bun <script>`) to avoid conflicts with Bun built-in subcommands. Never use `npm`, `npx`, `yarn`, or `pnpm`.**
|
||||||
|
|
||||||
|
- **Type**: Electron desktop shell
|
||||||
|
- **Design**: Server-driven desktop (thin native window hosting web app)
|
||||||
|
- **Runtime**: Electron (Main/Renderer) + Sidecar server binary (Bun-compiled)
|
||||||
|
- **Build Tool**: electron-vite (Vite-based, handles main + preload builds)
|
||||||
|
- **Packager**: electron-builder (installers, signing, auto-update)
|
||||||
|
- **Orchestration**: Turborepo
|
||||||
|
|
||||||
|
## Architecture
|
||||||
|
|
||||||
|
- **Server-driven design**: The desktop app is a "thin" native shell. It does not contain UI or business logic; it opens a BrowserWindow pointing to the `apps/server` TanStack Start application.
|
||||||
|
- **Dev mode**: Opens a BrowserWindow pointing to `localhost:3000`. Requires `apps/server` to be running separately (Turbo handles this).
|
||||||
|
- **Production mode**: Spawns a compiled server binary (from `resources/`) as a sidecar process, waits for readiness, then loads its URL.
|
||||||
|
|
||||||
|
## Commands
|
||||||
|
|
||||||
|
```bash
|
||||||
|
bun run dev # electron-vite dev (requires server dev running)
|
||||||
|
bun run build # electron-vite build (main + preload)
|
||||||
|
bun run dist # Build + package for current platform
|
||||||
|
bun run dist:linux # Build + package for Linux (x64 + arm64)
|
||||||
|
bun run dist:linux:x64 # Build + package for Linux x64
|
||||||
|
bun run dist:linux:arm64 # Build + package for Linux arm64
|
||||||
|
bun run dist:mac # Build + package for macOS (arm64 + x64)
|
||||||
|
bun run dist:mac:arm64 # Build + package for macOS arm64
|
||||||
|
bun run dist:mac:x64 # Build + package for macOS x64
|
||||||
|
bun run dist:win # Build + package for Windows x64
|
||||||
|
bun run fix # Biome auto-fix
|
||||||
|
bun run typecheck # TypeScript check
|
||||||
|
```
|
||||||
|
|
||||||
|
## Directory Structure
|
||||||
|
|
||||||
|
```
|
||||||
|
.
|
||||||
|
├── src/
|
||||||
|
│ ├── main/
|
||||||
|
│ │ └── index.ts # Main process (server lifecycle + BrowserWindow)
|
||||||
|
│ └── preload/
|
||||||
|
│ └── index.ts # Preload script (security isolation)
|
||||||
|
├── resources/ # Sidecar binaries (gitignored, copied from server build)
|
||||||
|
├── out/ # electron-vite build output (gitignored)
|
||||||
|
├── electron.vite.config.ts
|
||||||
|
├── electron-builder.yml # Packaging configuration
|
||||||
|
├── package.json
|
||||||
|
├── turbo.json
|
||||||
|
└── AGENTS.md
|
||||||
|
```
|
||||||
|
|
||||||
|
## Development Workflow
|
||||||
|
|
||||||
|
1. **Start server**: `bun run dev` in `apps/server` (or use root `bun run dev` via Turbo).
|
||||||
|
2. **Start desktop**: `bun run dev` in `apps/desktop`.
|
||||||
|
3. **Connection**: Main process polls `localhost:3000` until responsive, then opens BrowserWindow.
|
||||||
|
|
||||||
|
## Production Build Workflow
|
||||||
|
|
||||||
|
From monorepo root, run `bun run dist` to execute the full pipeline automatically (via Turbo task dependencies):
|
||||||
|
|
||||||
|
1. **Build server**: `apps/server` → `vite build` → `.output/`
|
||||||
|
2. **Compile server**: `apps/server` → `bun compile.ts --target ...` → `out/server-{os}-{arch}`
|
||||||
|
3. **Package desktop**: `apps/desktop` → `electron-vite build` + `electron-builder` → distributable
|
||||||
|
|
||||||
|
The `electron-builder.yml` `extraResources` config reads binaries directly from `../server/out/`, no manual copy needed.
|
||||||
|
|
||||||
|
To build for a specific platform explicitly, use `bun run dist:linux` / `bun run dist:mac` / `bun run dist:win` in `apps/desktop`.
|
||||||
|
For single-arch output, use `bun run dist:linux:x64`, `bun run dist:linux:arm64`, `bun run dist:mac:x64`, or `bun run dist:mac:arm64`.
|
||||||
|
|
||||||
|
## Development Principles
|
||||||
|
|
||||||
|
> **These principles apply to ALL code changes. Agents MUST follow them on every task.**
|
||||||
|
|
||||||
|
1. **No backward compatibility** — This project is in rapid iteration. Always use the latest API and patterns. Never keep deprecated code paths or old API fallbacks.
|
||||||
|
2. **Always sync documentation** — When code changes, immediately update all related documentation (`AGENTS.md`, `README.md`, inline code examples). Code and docs must never drift apart.
|
||||||
|
3. **Forward-only migration** — When upgrading dependencies, fully adopt the new API. Don't mix old and new patterns.
|
||||||
|
|
||||||
|
## Critical Rules
|
||||||
|
|
||||||
|
**DO:**
|
||||||
|
- Use arrow functions for all utility functions.
|
||||||
|
- Keep the desktop app as a thin shell — no UI or business logic.
|
||||||
|
- Use `catalog:` for all dependency versions in `package.json`.
|
||||||
|
|
||||||
|
**DON'T:**
|
||||||
|
- Use `npm`, `npx`, `yarn`, or `pnpm`. Use `bun` for package management.
|
||||||
|
- Include UI components or business logic in the desktop app.
|
||||||
|
- Use `as any` or `@ts-ignore`.
|
||||||
|
- Leave docs out of sync with code changes.
|
||||||
@@ -0,0 +1,9 @@
|
|||||||
|
{
|
||||||
|
"$schema": "../../node_modules/@biomejs/biome/configuration_schema.json",
|
||||||
|
"extends": "//",
|
||||||
|
"css": {
|
||||||
|
"parser": {
|
||||||
|
"tailwindDirectives": true
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
Binary file not shown.
|
After Width: | Height: | Size: 83 KiB |
@@ -0,0 +1,48 @@
|
|||||||
|
# yaml-language-server: $schema=https://raw.githubusercontent.com/electron-userland/electron-builder/refs/heads/master/packages/app-builder-lib/scheme.json
|
||||||
|
appId: com.furtherverse.desktop
|
||||||
|
productName: Furtherverse
|
||||||
|
executableName: furtherverse
|
||||||
|
|
||||||
|
npmRebuild: false
|
||||||
|
asarUnpack:
|
||||||
|
- resources/**
|
||||||
|
|
||||||
|
files:
|
||||||
|
- "!**/.vscode/*"
|
||||||
|
- "!src/*"
|
||||||
|
- "!electron.vite.config.{js,ts,mjs,cjs}"
|
||||||
|
- "!{.env,.env.*,bun.lock}"
|
||||||
|
- "!{tsconfig.json,tsconfig.node.json}"
|
||||||
|
- "!{AGENTS.md,README.md,CHANGELOG.md}"
|
||||||
|
|
||||||
|
# macOS
|
||||||
|
mac:
|
||||||
|
target:
|
||||||
|
- dmg
|
||||||
|
category: public.app-category.productivity
|
||||||
|
extraResources:
|
||||||
|
- from: ../server/out/server-darwin-${arch}
|
||||||
|
to: server
|
||||||
|
dmg:
|
||||||
|
artifactName: ${productName}-${version}-${os}-${arch}.${ext}
|
||||||
|
|
||||||
|
# Windows
|
||||||
|
win:
|
||||||
|
target:
|
||||||
|
- portable
|
||||||
|
extraResources:
|
||||||
|
- from: ../server/out/server-windows-${arch}.exe
|
||||||
|
to: server.exe
|
||||||
|
portable:
|
||||||
|
artifactName: ${productName}-${version}-${os}-${arch}-Portable.${ext}
|
||||||
|
|
||||||
|
# Linux
|
||||||
|
linux:
|
||||||
|
target:
|
||||||
|
- AppImage
|
||||||
|
category: Utility
|
||||||
|
extraResources:
|
||||||
|
- from: ../server/out/server-linux-${arch}
|
||||||
|
to: server
|
||||||
|
appImage:
|
||||||
|
artifactName: ${productName}-${version}-${os}-${arch}.${ext}
|
||||||
@@ -0,0 +1,11 @@
|
|||||||
|
import tailwindcss from '@tailwindcss/vite'
|
||||||
|
import react from '@vitejs/plugin-react'
|
||||||
|
import { defineConfig } from 'electron-vite'
|
||||||
|
|
||||||
|
export default defineConfig({
|
||||||
|
main: {},
|
||||||
|
preload: {},
|
||||||
|
renderer: {
|
||||||
|
plugins: [react(), tailwindcss()],
|
||||||
|
},
|
||||||
|
})
|
||||||
@@ -0,0 +1,37 @@
|
|||||||
|
{
|
||||||
|
"name": "@furtherverse/desktop",
|
||||||
|
"version": "1.0.0",
|
||||||
|
"private": true,
|
||||||
|
"main": "out/main/index.js",
|
||||||
|
"scripts": {
|
||||||
|
"build": "electron-vite build",
|
||||||
|
"dev": "electron-vite dev --watch",
|
||||||
|
"dist": "electron-builder",
|
||||||
|
"dist:linux": "bun run dist:linux:x64 && bun run dist:linux:arm64",
|
||||||
|
"dist:linux:arm64": "electron-builder --linux --arm64",
|
||||||
|
"dist:linux:x64": "electron-builder --linux --x64",
|
||||||
|
"dist:mac": "bun run dist:mac:arm64 && bun run dist:mac:x64",
|
||||||
|
"dist:mac:arm64": "electron-builder --mac --arm64",
|
||||||
|
"dist:mac:x64": "electron-builder --mac --x64",
|
||||||
|
"dist:win": "electron-builder --win --x64",
|
||||||
|
"fix": "biome check --write",
|
||||||
|
"typecheck": "tsc -b"
|
||||||
|
},
|
||||||
|
"dependencies": {
|
||||||
|
"motion": "catalog:",
|
||||||
|
"react": "catalog:",
|
||||||
|
"react-dom": "catalog:",
|
||||||
|
"tree-kill": "catalog:"
|
||||||
|
},
|
||||||
|
"devDependencies": {
|
||||||
|
"@furtherverse/tsconfig": "workspace:*",
|
||||||
|
"@tailwindcss/vite": "catalog:",
|
||||||
|
"@types/node": "catalog:",
|
||||||
|
"@vitejs/plugin-react": "catalog:",
|
||||||
|
"electron": "catalog:",
|
||||||
|
"electron-builder": "catalog:",
|
||||||
|
"electron-vite": "catalog:",
|
||||||
|
"tailwindcss": "catalog:",
|
||||||
|
"vite": "catalog:"
|
||||||
|
}
|
||||||
|
}
|
||||||
@@ -0,0 +1,198 @@
|
|||||||
|
import { join } from 'node:path'
|
||||||
|
import { app, BrowserWindow, dialog, session, shell } from 'electron'
|
||||||
|
import { createSidecarRuntime } from './sidecar'
|
||||||
|
|
||||||
|
const DEV_SERVER_URL = 'http://localhost:3000'
|
||||||
|
const SAFE_EXTERNAL_PROTOCOLS = new Set(['https:', 'http:', 'mailto:'])
|
||||||
|
|
||||||
|
let mainWindow: BrowserWindow | null = null
|
||||||
|
let windowCreationPromise: Promise<void> | null = null
|
||||||
|
let isQuitting = false
|
||||||
|
|
||||||
|
const showErrorAndQuit = (title: string, detail: string) => {
|
||||||
|
if (isQuitting) {
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
dialog.showErrorBox(title, detail)
|
||||||
|
app.quit()
|
||||||
|
}
|
||||||
|
|
||||||
|
const sidecar = createSidecarRuntime({
|
||||||
|
devServerUrl: DEV_SERVER_URL,
|
||||||
|
isPackaged: app.isPackaged,
|
||||||
|
resourcesPath: process.resourcesPath,
|
||||||
|
isQuitting: () => isQuitting,
|
||||||
|
onUnexpectedStop: (detail) => {
|
||||||
|
showErrorAndQuit('Service Stopped', detail)
|
||||||
|
},
|
||||||
|
})
|
||||||
|
|
||||||
|
const toErrorMessage = (error: unknown): string => (error instanceof Error ? error.message : String(error))
|
||||||
|
|
||||||
|
const canOpenExternally = (url: string): boolean => {
|
||||||
|
try {
|
||||||
|
const parsed = new URL(url)
|
||||||
|
return SAFE_EXTERNAL_PROTOCOLS.has(parsed.protocol)
|
||||||
|
} catch {
|
||||||
|
return false
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
const loadSplash = async (windowRef: BrowserWindow) => {
|
||||||
|
if (process.env.ELECTRON_RENDERER_URL) {
|
||||||
|
await windowRef.loadURL(process.env.ELECTRON_RENDERER_URL)
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
await windowRef.loadFile(join(__dirname, '../renderer/index.html'))
|
||||||
|
}
|
||||||
|
|
||||||
|
const createWindow = async () => {
|
||||||
|
if (mainWindow && !mainWindow.isDestroyed()) {
|
||||||
|
mainWindow.focus()
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
const windowRef = new BrowserWindow({
|
||||||
|
width: 1200,
|
||||||
|
height: 800,
|
||||||
|
show: false,
|
||||||
|
webPreferences: {
|
||||||
|
preload: join(__dirname, '../preload/index.js'),
|
||||||
|
sandbox: true,
|
||||||
|
contextIsolation: true,
|
||||||
|
nodeIntegration: false,
|
||||||
|
},
|
||||||
|
})
|
||||||
|
mainWindow = windowRef
|
||||||
|
|
||||||
|
windowRef.webContents.setWindowOpenHandler(({ url }) => {
|
||||||
|
if (!canOpenExternally(url)) {
|
||||||
|
if (!app.isPackaged) {
|
||||||
|
console.warn(`Blocked external URL: ${url}`)
|
||||||
|
}
|
||||||
|
|
||||||
|
return { action: 'deny' }
|
||||||
|
}
|
||||||
|
|
||||||
|
void shell.openExternal(url)
|
||||||
|
return { action: 'deny' }
|
||||||
|
})
|
||||||
|
|
||||||
|
windowRef.webContents.on('will-navigate', (event, url) => {
|
||||||
|
const allowed = [DEV_SERVER_URL, sidecar.lastResolvedUrl].filter((v): v is string => v != null)
|
||||||
|
const isAllowed = allowed.some((origin) => url.startsWith(origin))
|
||||||
|
|
||||||
|
if (!isAllowed) {
|
||||||
|
event.preventDefault()
|
||||||
|
|
||||||
|
if (canOpenExternally(url)) {
|
||||||
|
void shell.openExternal(url)
|
||||||
|
} else if (!app.isPackaged) {
|
||||||
|
console.warn(`Blocked navigation to: ${url}`)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
})
|
||||||
|
|
||||||
|
windowRef.on('closed', () => {
|
||||||
|
if (mainWindow === windowRef) {
|
||||||
|
mainWindow = null
|
||||||
|
}
|
||||||
|
})
|
||||||
|
|
||||||
|
try {
|
||||||
|
await loadSplash(windowRef)
|
||||||
|
} catch (error) {
|
||||||
|
if (mainWindow === windowRef) {
|
||||||
|
mainWindow = null
|
||||||
|
}
|
||||||
|
|
||||||
|
if (!windowRef.isDestroyed()) {
|
||||||
|
windowRef.destroy()
|
||||||
|
}
|
||||||
|
|
||||||
|
throw error
|
||||||
|
}
|
||||||
|
|
||||||
|
if (!windowRef.isDestroyed()) {
|
||||||
|
windowRef.show()
|
||||||
|
}
|
||||||
|
|
||||||
|
const targetUrl = await sidecar.resolveUrl()
|
||||||
|
if (isQuitting || windowRef.isDestroyed()) {
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
try {
|
||||||
|
await windowRef.loadURL(targetUrl)
|
||||||
|
} catch (error) {
|
||||||
|
if (mainWindow === windowRef) {
|
||||||
|
mainWindow = null
|
||||||
|
}
|
||||||
|
|
||||||
|
if (!windowRef.isDestroyed()) {
|
||||||
|
windowRef.destroy()
|
||||||
|
}
|
||||||
|
|
||||||
|
throw error
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
const ensureWindow = async () => {
|
||||||
|
if (windowCreationPromise) {
|
||||||
|
return windowCreationPromise
|
||||||
|
}
|
||||||
|
|
||||||
|
windowCreationPromise = createWindow().finally(() => {
|
||||||
|
windowCreationPromise = null
|
||||||
|
})
|
||||||
|
|
||||||
|
return windowCreationPromise
|
||||||
|
}
|
||||||
|
|
||||||
|
const beginQuit = () => {
|
||||||
|
isQuitting = true
|
||||||
|
sidecar.stop()
|
||||||
|
}
|
||||||
|
|
||||||
|
const handleWindowCreationError = (error: unknown, context: string) => {
|
||||||
|
console.error(`${context}:`, error)
|
||||||
|
showErrorAndQuit(
|
||||||
|
"App Couldn't Start",
|
||||||
|
app.isPackaged
|
||||||
|
? 'A required component failed to start. Please reinstall the app.'
|
||||||
|
: `${context}: ${toErrorMessage(error)}`,
|
||||||
|
)
|
||||||
|
}
|
||||||
|
|
||||||
|
app
|
||||||
|
.whenReady()
|
||||||
|
.then(() => {
|
||||||
|
session.defaultSession.setPermissionRequestHandler((_webContents, _permission, callback) => {
|
||||||
|
callback(false)
|
||||||
|
})
|
||||||
|
|
||||||
|
return ensureWindow()
|
||||||
|
})
|
||||||
|
.catch((error) => {
|
||||||
|
handleWindowCreationError(error, 'Failed to create window')
|
||||||
|
})
|
||||||
|
|
||||||
|
app.on('window-all-closed', () => {
|
||||||
|
if (process.platform !== 'darwin') {
|
||||||
|
app.quit()
|
||||||
|
}
|
||||||
|
})
|
||||||
|
|
||||||
|
app.on('activate', () => {
|
||||||
|
if (isQuitting || BrowserWindow.getAllWindows().length > 0) {
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
ensureWindow().catch((error) => {
|
||||||
|
handleWindowCreationError(error, 'Failed to re-create window')
|
||||||
|
})
|
||||||
|
})
|
||||||
|
|
||||||
|
app.on('before-quit', beginQuit)
|
||||||
@@ -0,0 +1,256 @@
|
|||||||
|
import { type ChildProcess, spawn } from 'node:child_process'
|
||||||
|
import { existsSync } from 'node:fs'
|
||||||
|
import { createServer } from 'node:net'
|
||||||
|
import { join } from 'node:path'
|
||||||
|
import killProcessTree from 'tree-kill'
|
||||||
|
|
||||||
|
const SERVER_HOST = '127.0.0.1'
|
||||||
|
const SERVER_READY_TIMEOUT_MS = 10_000
|
||||||
|
const SERVER_REQUEST_TIMEOUT_MS = 1_500
|
||||||
|
const SERVER_POLL_INTERVAL_MS = 250
|
||||||
|
const SERVER_PROBE_PATHS = ['/api/health', '/']
|
||||||
|
|
||||||
|
type SidecarState = {
|
||||||
|
process: ChildProcess | null
|
||||||
|
startup: Promise<string> | null
|
||||||
|
url: string | null
|
||||||
|
}
|
||||||
|
|
||||||
|
type SidecarRuntimeOptions = {
|
||||||
|
devServerUrl: string
|
||||||
|
isPackaged: boolean
|
||||||
|
resourcesPath: string
|
||||||
|
isQuitting: () => boolean
|
||||||
|
onUnexpectedStop: (detail: string) => void
|
||||||
|
}
|
||||||
|
|
||||||
|
type SidecarRuntime = {
|
||||||
|
resolveUrl: () => Promise<string>
|
||||||
|
stop: () => void
|
||||||
|
lastResolvedUrl: string | null
|
||||||
|
}
|
||||||
|
|
||||||
|
const sleep = (ms: number): Promise<void> => new Promise((resolve) => setTimeout(resolve, ms))
|
||||||
|
|
||||||
|
const isProcessAlive = (processToCheck: ChildProcess | null): processToCheck is ChildProcess => {
|
||||||
|
if (!processToCheck || !processToCheck.pid) {
|
||||||
|
return false
|
||||||
|
}
|
||||||
|
|
||||||
|
return processToCheck.exitCode === null && !processToCheck.killed
|
||||||
|
}
|
||||||
|
|
||||||
|
const getAvailablePort = (): Promise<number> =>
|
||||||
|
new Promise((resolve, reject) => {
|
||||||
|
const server = createServer()
|
||||||
|
server.listen(0, () => {
|
||||||
|
const addr = server.address()
|
||||||
|
if (!addr || typeof addr === 'string') {
|
||||||
|
server.close()
|
||||||
|
reject(new Error('Failed to resolve port'))
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
server.close(() => resolve(addr.port))
|
||||||
|
})
|
||||||
|
server.on('error', reject)
|
||||||
|
})
|
||||||
|
|
||||||
|
const isServerReady = async (url: string): Promise<boolean> => {
|
||||||
|
for (const probePath of SERVER_PROBE_PATHS) {
|
||||||
|
try {
|
||||||
|
const probeUrl = new URL(probePath, `${url}/`)
|
||||||
|
const response = await fetch(probeUrl, {
|
||||||
|
method: 'GET',
|
||||||
|
cache: 'no-store',
|
||||||
|
signal: AbortSignal.timeout(SERVER_REQUEST_TIMEOUT_MS),
|
||||||
|
})
|
||||||
|
|
||||||
|
if (response.status < 500) {
|
||||||
|
if (probePath === '/api/health' && response.status === 404) {
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
|
||||||
|
return true
|
||||||
|
}
|
||||||
|
} catch {
|
||||||
|
// Expected: probe request fails while server is still starting up
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
return false
|
||||||
|
}
|
||||||
|
|
||||||
|
const waitForServer = async (url: string, isQuitting: () => boolean, processRef?: ChildProcess): Promise<boolean> => {
|
||||||
|
const start = Date.now()
|
||||||
|
while (Date.now() - start < SERVER_READY_TIMEOUT_MS && !isQuitting()) {
|
||||||
|
if (processRef && processRef.exitCode !== null) {
|
||||||
|
return false
|
||||||
|
}
|
||||||
|
|
||||||
|
if (await isServerReady(url)) {
|
||||||
|
return true
|
||||||
|
}
|
||||||
|
|
||||||
|
await sleep(SERVER_POLL_INTERVAL_MS)
|
||||||
|
}
|
||||||
|
|
||||||
|
return false
|
||||||
|
}
|
||||||
|
|
||||||
|
const resolveBinaryPath = (resourcesPath: string): string => {
|
||||||
|
const binaryName = process.platform === 'win32' ? 'server.exe' : 'server'
|
||||||
|
return join(resourcesPath, binaryName)
|
||||||
|
}
|
||||||
|
|
||||||
|
const formatUnexpectedStopMessage = (
|
||||||
|
isPackaged: boolean,
|
||||||
|
code: number | null,
|
||||||
|
signal: NodeJS.Signals | null,
|
||||||
|
): string => {
|
||||||
|
if (isPackaged) {
|
||||||
|
return 'The background service stopped unexpectedly. Please restart the app.'
|
||||||
|
}
|
||||||
|
|
||||||
|
return `Server process exited unexpectedly (code ${code ?? 'unknown'}, signal ${signal ?? 'none'}).`
|
||||||
|
}
|
||||||
|
|
||||||
|
export const createSidecarRuntime = (options: SidecarRuntimeOptions): SidecarRuntime => {
|
||||||
|
const state: SidecarState = {
|
||||||
|
process: null,
|
||||||
|
startup: null,
|
||||||
|
url: null,
|
||||||
|
}
|
||||||
|
|
||||||
|
const resetState = (processRef?: ChildProcess) => {
|
||||||
|
if (processRef && state.process !== processRef) {
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
state.process = null
|
||||||
|
state.url = null
|
||||||
|
}
|
||||||
|
|
||||||
|
const stop = () => {
|
||||||
|
const runningServer = state.process
|
||||||
|
resetState()
|
||||||
|
|
||||||
|
if (!runningServer?.pid || runningServer.exitCode !== null) {
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
killProcessTree(runningServer.pid, 'SIGTERM', (error?: Error) => {
|
||||||
|
if (error) {
|
||||||
|
console.error('Failed to stop server process:', error)
|
||||||
|
}
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
const attachLifecycleHandlers = (processRef: ChildProcess) => {
|
||||||
|
processRef.on('error', (error) => {
|
||||||
|
if (state.process !== processRef) {
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
const hadReadyServer = state.url !== null
|
||||||
|
resetState(processRef)
|
||||||
|
|
||||||
|
if (!options.isQuitting() && hadReadyServer) {
|
||||||
|
options.onUnexpectedStop('The background service crashed unexpectedly. Please restart the app.')
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
console.error('Failed to start server process:', error)
|
||||||
|
})
|
||||||
|
|
||||||
|
processRef.on('exit', (code, signal) => {
|
||||||
|
if (state.process !== processRef) {
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
const hadReadyServer = state.url !== null
|
||||||
|
resetState(processRef)
|
||||||
|
|
||||||
|
if (!options.isQuitting() && hadReadyServer) {
|
||||||
|
options.onUnexpectedStop(formatUnexpectedStopMessage(options.isPackaged, code, signal))
|
||||||
|
}
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
const startPackagedServer = async (): Promise<string> => {
|
||||||
|
if (state.url && isProcessAlive(state.process)) {
|
||||||
|
return state.url
|
||||||
|
}
|
||||||
|
|
||||||
|
if (state.startup) {
|
||||||
|
return state.startup
|
||||||
|
}
|
||||||
|
|
||||||
|
state.startup = (async () => {
|
||||||
|
const binaryPath = resolveBinaryPath(options.resourcesPath)
|
||||||
|
if (!existsSync(binaryPath)) {
|
||||||
|
throw new Error(`Sidecar server binary is missing: ${binaryPath}`)
|
||||||
|
}
|
||||||
|
|
||||||
|
if (options.isQuitting()) {
|
||||||
|
throw new Error('Application is shutting down.')
|
||||||
|
}
|
||||||
|
|
||||||
|
const port = await getAvailablePort()
|
||||||
|
const nextServerUrl = `http://${SERVER_HOST}:${port}`
|
||||||
|
const processRef = spawn(binaryPath, [], {
|
||||||
|
env: {
|
||||||
|
...process.env,
|
||||||
|
HOST: SERVER_HOST,
|
||||||
|
PORT: String(port),
|
||||||
|
},
|
||||||
|
stdio: 'ignore',
|
||||||
|
windowsHide: true,
|
||||||
|
})
|
||||||
|
|
||||||
|
processRef.unref()
|
||||||
|
state.process = processRef
|
||||||
|
attachLifecycleHandlers(processRef)
|
||||||
|
|
||||||
|
const ready = await waitForServer(nextServerUrl, options.isQuitting, processRef)
|
||||||
|
if (ready && isProcessAlive(processRef)) {
|
||||||
|
state.url = nextServerUrl
|
||||||
|
return nextServerUrl
|
||||||
|
}
|
||||||
|
|
||||||
|
const failureReason =
|
||||||
|
processRef.exitCode !== null
|
||||||
|
? `The service exited early (code ${processRef.exitCode}).`
|
||||||
|
: `The service did not respond at ${nextServerUrl} within 10 seconds.`
|
||||||
|
|
||||||
|
stop()
|
||||||
|
throw new Error(failureReason)
|
||||||
|
})().finally(() => {
|
||||||
|
state.startup = null
|
||||||
|
})
|
||||||
|
|
||||||
|
return state.startup
|
||||||
|
}
|
||||||
|
|
||||||
|
const resolveUrl = async (): Promise<string> => {
|
||||||
|
if (options.isPackaged) {
|
||||||
|
return startPackagedServer()
|
||||||
|
}
|
||||||
|
|
||||||
|
const ready = await waitForServer(options.devServerUrl, options.isQuitting)
|
||||||
|
if (!ready) {
|
||||||
|
throw new Error('Dev server not responding. Run `bun dev` in apps/server first.')
|
||||||
|
}
|
||||||
|
|
||||||
|
state.url = options.devServerUrl
|
||||||
|
return options.devServerUrl
|
||||||
|
}
|
||||||
|
|
||||||
|
return {
|
||||||
|
resolveUrl,
|
||||||
|
stop,
|
||||||
|
get lastResolvedUrl() {
|
||||||
|
return state.url
|
||||||
|
},
|
||||||
|
}
|
||||||
|
}
|
||||||
@@ -0,0 +1 @@
|
|||||||
|
export {}
|
||||||
Binary file not shown.
|
After Width: | Height: | Size: 83 KiB |
@@ -0,0 +1,33 @@
|
|||||||
|
import { motion } from 'motion/react'
|
||||||
|
import logoImage from '../assets/logo.png'
|
||||||
|
|
||||||
|
export const SplashApp = () => {
|
||||||
|
return (
|
||||||
|
<main className="m-0 flex h-screen w-screen cursor-default select-none items-center justify-center overflow-hidden bg-white font-sans antialiased">
|
||||||
|
<motion.section
|
||||||
|
animate={{ opacity: 1, y: 0 }}
|
||||||
|
className="flex flex-col items-center gap-8"
|
||||||
|
initial={{ opacity: 0, y: 4 }}
|
||||||
|
transition={{
|
||||||
|
duration: 1,
|
||||||
|
ease: [0.16, 1, 0.3, 1],
|
||||||
|
}}
|
||||||
|
>
|
||||||
|
<img alt="Logo" className="h-20 w-auto object-contain" draggable={false} src={logoImage} />
|
||||||
|
|
||||||
|
<div className="relative h-[4px] w-36 overflow-hidden rounded-full bg-zinc-100">
|
||||||
|
<motion.div
|
||||||
|
animate={{ x: '100%' }}
|
||||||
|
className="h-full w-full bg-zinc-800"
|
||||||
|
initial={{ x: '-100%' }}
|
||||||
|
transition={{
|
||||||
|
duration: 2,
|
||||||
|
ease: [0.4, 0, 0.2, 1],
|
||||||
|
repeat: Infinity,
|
||||||
|
}}
|
||||||
|
/>
|
||||||
|
</div>
|
||||||
|
</motion.section>
|
||||||
|
</main>
|
||||||
|
)
|
||||||
|
}
|
||||||
@@ -0,0 +1,12 @@
|
|||||||
|
<!doctype html>
|
||||||
|
<html lang="en">
|
||||||
|
<head>
|
||||||
|
<meta charset="UTF-8" />
|
||||||
|
<meta name="viewport" content="width=device-width, initial-scale=1.0" />
|
||||||
|
<title>Furtherverse</title>
|
||||||
|
</head>
|
||||||
|
<body>
|
||||||
|
<div id="root"></div>
|
||||||
|
<script type="module" src="./main.tsx"></script>
|
||||||
|
</body>
|
||||||
|
</html>
|
||||||
@@ -0,0 +1,11 @@
|
|||||||
|
import { StrictMode } from 'react'
|
||||||
|
import { createRoot } from 'react-dom/client'
|
||||||
|
import { SplashApp } from './components/SplashApp'
|
||||||
|
import './styles.css'
|
||||||
|
|
||||||
|
// biome-ignore lint/style/noNonNullAssertion: 一定存在
|
||||||
|
createRoot(document.getElementById('root')!).render(
|
||||||
|
<StrictMode>
|
||||||
|
<SplashApp />
|
||||||
|
</StrictMode>,
|
||||||
|
)
|
||||||
@@ -0,0 +1,8 @@
|
|||||||
|
{
|
||||||
|
"extends": "@furtherverse/tsconfig/react.json",
|
||||||
|
"compilerOptions": {
|
||||||
|
"composite": true,
|
||||||
|
"types": ["vite/client"]
|
||||||
|
},
|
||||||
|
"include": ["src/renderer/**/*"]
|
||||||
|
}
|
||||||
@@ -0,0 +1,11 @@
|
|||||||
|
{
|
||||||
|
"files": [],
|
||||||
|
"references": [
|
||||||
|
{
|
||||||
|
"path": "./tsconfig.app.json"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"path": "./tsconfig.node.json"
|
||||||
|
}
|
||||||
|
]
|
||||||
|
}
|
||||||
@@ -0,0 +1,8 @@
|
|||||||
|
{
|
||||||
|
"extends": "@furtherverse/tsconfig/base.json",
|
||||||
|
"compilerOptions": {
|
||||||
|
"composite": true,
|
||||||
|
"types": ["node"]
|
||||||
|
},
|
||||||
|
"include": ["src/main/**/*", "src/preload/**/*", "electron.vite.config.ts"]
|
||||||
|
}
|
||||||
@@ -0,0 +1,41 @@
|
|||||||
|
{
|
||||||
|
"$schema": "../../node_modules/turbo/schema.json",
|
||||||
|
"extends": ["//"],
|
||||||
|
"tasks": {
|
||||||
|
"build": {
|
||||||
|
"outputs": ["out/**"]
|
||||||
|
},
|
||||||
|
"dist": {
|
||||||
|
"dependsOn": ["build", "@furtherverse/server#compile"],
|
||||||
|
"outputs": ["dist/**"]
|
||||||
|
},
|
||||||
|
"dist:linux": {
|
||||||
|
"dependsOn": ["build", "@furtherverse/server#compile:linux:arm64", "@furtherverse/server#compile:linux:x64"],
|
||||||
|
"outputs": ["dist/**"]
|
||||||
|
},
|
||||||
|
"dist:linux:arm64": {
|
||||||
|
"dependsOn": ["build", "@furtherverse/server#compile:linux:arm64"],
|
||||||
|
"outputs": ["dist/**"]
|
||||||
|
},
|
||||||
|
"dist:linux:x64": {
|
||||||
|
"dependsOn": ["build", "@furtherverse/server#compile:linux:x64"],
|
||||||
|
"outputs": ["dist/**"]
|
||||||
|
},
|
||||||
|
"dist:mac": {
|
||||||
|
"dependsOn": ["build", "@furtherverse/server#compile:darwin:arm64", "@furtherverse/server#compile:darwin:x64"],
|
||||||
|
"outputs": ["dist/**"]
|
||||||
|
},
|
||||||
|
"dist:mac:arm64": {
|
||||||
|
"dependsOn": ["build", "@furtherverse/server#compile:darwin:arm64"],
|
||||||
|
"outputs": ["dist/**"]
|
||||||
|
},
|
||||||
|
"dist:mac:x64": {
|
||||||
|
"dependsOn": ["build", "@furtherverse/server#compile:darwin:x64"],
|
||||||
|
"outputs": ["dist/**"]
|
||||||
|
},
|
||||||
|
"dist:win": {
|
||||||
|
"dependsOn": ["build", "@furtherverse/server#compile:windows:x64"],
|
||||||
|
"outputs": ["dist/**"]
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
@@ -0,0 +1 @@
|
|||||||
|
DATABASE_PATH=data.db
|
||||||
@@ -0,0 +1,279 @@
|
|||||||
|
# AGENTS.md - Server App Guidelines
|
||||||
|
|
||||||
|
TanStack Start fullstack web app with ORPC (contract-first RPC).
|
||||||
|
|
||||||
|
## Tech Stack
|
||||||
|
|
||||||
|
> **⚠️ This project uses Bun — NOT Node.js / npm. All commands use `bun`. Always use `bun run <script>` (not `bun <script>`) to avoid conflicts with Bun built-in subcommands. Never use `npm`, `npx`, or `node`.**
|
||||||
|
|
||||||
|
- **Framework**: TanStack Start (React 19 SSR, file-based routing)
|
||||||
|
- **Runtime**: Bun — **NOT Node.js**
|
||||||
|
- **Package Manager**: Bun — **NOT npm / yarn / pnpm**
|
||||||
|
- **Language**: TypeScript (strict mode)
|
||||||
|
- **Styling**: Tailwind CSS v4
|
||||||
|
- **Database**: PostgreSQL + Drizzle ORM v1 beta (`drizzle-orm/postgres-js`, RQBv2)
|
||||||
|
- **State**: TanStack Query v5
|
||||||
|
- **RPC**: ORPC (contract-first, type-safe)
|
||||||
|
- **Build**: Vite + Nitro
|
||||||
|
|
||||||
|
## Commands
|
||||||
|
|
||||||
|
```bash
|
||||||
|
# Development
|
||||||
|
bun run dev # Vite dev server (localhost:3000)
|
||||||
|
bun run db:studio # Drizzle Studio GUI
|
||||||
|
|
||||||
|
# Build
|
||||||
|
bun run build # Production build → .output/
|
||||||
|
bun run compile # Compile to standalone binary (current platform, depends on build)
|
||||||
|
bun run compile:darwin # Compile for macOS (arm64 + x64)
|
||||||
|
bun run compile:darwin:arm64 # Compile for macOS arm64
|
||||||
|
bun run compile:darwin:x64 # Compile for macOS x64
|
||||||
|
bun run compile:linux # Compile for Linux (x64 + arm64)
|
||||||
|
bun run compile:linux:arm64 # Compile for Linux arm64
|
||||||
|
bun run compile:linux:x64 # Compile for Linux x64
|
||||||
|
bun run compile:windows # Compile for Windows (default: x64)
|
||||||
|
bun run compile:windows:x64 # Compile for Windows x64
|
||||||
|
|
||||||
|
# Code Quality
|
||||||
|
bun run fix # Biome auto-fix
|
||||||
|
bun run typecheck # TypeScript check
|
||||||
|
|
||||||
|
# Database
|
||||||
|
bun run db:generate # Generate migrations from schema
|
||||||
|
bun run db:migrate # Run migrations
|
||||||
|
bun run db:push # Push schema directly (dev only)
|
||||||
|
|
||||||
|
# Testing (not yet configured)
|
||||||
|
bun test path/to/test.ts # Run single test
|
||||||
|
bun test -t "pattern" # Run tests matching pattern
|
||||||
|
```
|
||||||
|
|
||||||
|
## Directory Structure
|
||||||
|
|
||||||
|
```
|
||||||
|
src/
|
||||||
|
├── client/ # Client-side code
|
||||||
|
│ └── orpc.ts # ORPC client + TanStack Query utils (single entry point)
|
||||||
|
├── components/ # React components
|
||||||
|
├── routes/ # TanStack Router file routes
|
||||||
|
│ ├── __root.tsx # Root layout
|
||||||
|
│ ├── index.tsx # Home page
|
||||||
|
│ └── api/
|
||||||
|
│ ├── $.ts # OpenAPI handler + Scalar docs
|
||||||
|
│ ├── health.ts # Health check endpoint
|
||||||
|
│ └── rpc.$.ts # ORPC RPC handler
|
||||||
|
├── server/ # Server-side code
|
||||||
|
│ ├── api/ # ORPC layer
|
||||||
|
│ │ ├── contracts/ # Input/output schemas (Zod)
|
||||||
|
│ │ ├── middlewares/ # Middleware (db provider, auth)
|
||||||
|
│ │ ├── routers/ # Handler implementations
|
||||||
|
│ │ ├── interceptors.ts # Shared error interceptors
|
||||||
|
│ │ ├── context.ts # Request context
|
||||||
|
│ │ ├── server.ts # ORPC server instance
|
||||||
|
│ │ └── types.ts # Type exports
|
||||||
|
│ └── db/
|
||||||
|
│ ├── schema/ # Drizzle table definitions
|
||||||
|
│ ├── fields.ts # Shared field builders (id, createdAt, updatedAt)
|
||||||
|
│ ├── relations.ts # Drizzle relations (defineRelations, RQBv2)
|
||||||
|
│ └── index.ts # Database instance (postgres-js driver)
|
||||||
|
├── env.ts # Environment variable validation
|
||||||
|
├── router.tsx # Router configuration
|
||||||
|
├── routeTree.gen.ts # Auto-generated (DO NOT EDIT)
|
||||||
|
└── styles.css # Tailwind entry
|
||||||
|
```
|
||||||
|
|
||||||
|
## ORPC Pattern
|
||||||
|
|
||||||
|
### 1. Define Contract (`src/server/api/contracts/feature.contract.ts`)
|
||||||
|
```typescript
|
||||||
|
import { oc } from '@orpc/contract'
|
||||||
|
import { createSelectSchema } from 'drizzle-orm/zod'
|
||||||
|
import { z } from 'zod'
|
||||||
|
import { featureTable } from '@/server/db/schema'
|
||||||
|
|
||||||
|
const selectSchema = createSelectSchema(featureTable)
|
||||||
|
|
||||||
|
export const list = oc.input(z.void()).output(z.array(selectSchema))
|
||||||
|
export const create = oc.input(insertSchema).output(selectSchema)
|
||||||
|
```
|
||||||
|
|
||||||
|
### 2. Implement Router (`src/server/api/routers/feature.router.ts`)
|
||||||
|
```typescript
|
||||||
|
import { ORPCError } from '@orpc/server'
|
||||||
|
import { db } from '../middlewares'
|
||||||
|
import { os } from '../server'
|
||||||
|
|
||||||
|
export const list = os.feature.list.use(db).handler(async ({ context }) => {
|
||||||
|
return await context.db.query.featureTable.findMany({
|
||||||
|
orderBy: { createdAt: 'desc' },
|
||||||
|
})
|
||||||
|
})
|
||||||
|
```
|
||||||
|
|
||||||
|
### 3. Register in Index Files
|
||||||
|
```typescript
|
||||||
|
// src/server/api/contracts/index.ts
|
||||||
|
import * as feature from './feature.contract'
|
||||||
|
export const contract = { feature }
|
||||||
|
|
||||||
|
// src/server/api/routers/index.ts
|
||||||
|
import * as feature from './feature.router'
|
||||||
|
export const router = os.router({ feature })
|
||||||
|
```
|
||||||
|
|
||||||
|
### 4. Use in Components
|
||||||
|
```typescript
|
||||||
|
import { useSuspenseQuery, useMutation } from '@tanstack/react-query'
|
||||||
|
import { orpc } from '@/client/orpc'
|
||||||
|
|
||||||
|
const { data } = useSuspenseQuery(orpc.feature.list.queryOptions())
|
||||||
|
const mutation = useMutation(orpc.feature.create.mutationOptions())
|
||||||
|
```
|
||||||
|
|
||||||
|
## Database (Drizzle ORM v1 beta)
|
||||||
|
|
||||||
|
- **Driver**: `drizzle-orm/postgres-js` (NOT `bun-sql`)
|
||||||
|
- **Validation**: `drizzle-orm/zod` (built-in, NOT separate `drizzle-zod` package)
|
||||||
|
- **Relations**: Defined via `defineRelations()` in `src/server/db/relations.ts`
|
||||||
|
- **Query**: RQBv2 — use `db.query.tableName.findMany()` with object-style `orderBy` and `where`
|
||||||
|
|
||||||
|
### Schema Definition
|
||||||
|
```typescript
|
||||||
|
import { pgTable, text, timestamp, uuid } from 'drizzle-orm/pg-core'
|
||||||
|
import { sql } from 'drizzle-orm'
|
||||||
|
|
||||||
|
export const myTable = pgTable('my_table', {
|
||||||
|
id: uuid().primaryKey().default(sql`uuidv7()`),
|
||||||
|
name: text().notNull(),
|
||||||
|
createdAt: timestamp({ withTimezone: true }).notNull().defaultNow(),
|
||||||
|
updatedAt: timestamp({ withTimezone: true }).notNull().defaultNow().$onUpdateFn(() => new Date()),
|
||||||
|
})
|
||||||
|
```
|
||||||
|
|
||||||
|
### Relations (RQBv2)
|
||||||
|
```typescript
|
||||||
|
// src/server/db/relations.ts
|
||||||
|
import { defineRelations } from 'drizzle-orm'
|
||||||
|
import * as schema from './schema'
|
||||||
|
|
||||||
|
export const relations = defineRelations(schema, (r) => ({
|
||||||
|
// Define relations here using r.one / r.many / r.through
|
||||||
|
}))
|
||||||
|
```
|
||||||
|
|
||||||
|
### DB Instance
|
||||||
|
```typescript
|
||||||
|
// src/server/db/index.ts
|
||||||
|
import { drizzle } from 'drizzle-orm/postgres-js'
|
||||||
|
import { relations } from '@/server/db/relations'
|
||||||
|
// In RQBv2, relations already contain schema info — no separate schema import needed
|
||||||
|
|
||||||
|
const db = drizzle({
|
||||||
|
connection: env.DATABASE_URL,
|
||||||
|
relations,
|
||||||
|
})
|
||||||
|
```
|
||||||
|
|
||||||
|
### RQBv2 Query Examples
|
||||||
|
```typescript
|
||||||
|
// Object-style orderBy (NOT callback style)
|
||||||
|
const todos = await db.query.todoTable.findMany({
|
||||||
|
orderBy: { createdAt: 'desc' },
|
||||||
|
})
|
||||||
|
|
||||||
|
// Object-style where
|
||||||
|
const todo = await db.query.todoTable.findFirst({
|
||||||
|
where: { id: someId },
|
||||||
|
})
|
||||||
|
```
|
||||||
|
|
||||||
|
## Code Style
|
||||||
|
|
||||||
|
### Formatting (Biome)
|
||||||
|
- **Indent**: 2 spaces
|
||||||
|
- **Quotes**: Single `'`
|
||||||
|
- **Semicolons**: Omit (ASI)
|
||||||
|
- **Arrow parens**: Always `(x) => x`
|
||||||
|
|
||||||
|
### Imports
|
||||||
|
Biome auto-organizes:
|
||||||
|
1. External packages
|
||||||
|
2. Internal `@/*` aliases
|
||||||
|
3. Type imports (`import type { ... }`)
|
||||||
|
|
||||||
|
```typescript
|
||||||
|
import { createFileRoute } from '@tanstack/react-router'
|
||||||
|
import { z } from 'zod'
|
||||||
|
import { db } from '@/server/db'
|
||||||
|
import type { ReactNode } from 'react'
|
||||||
|
```
|
||||||
|
|
||||||
|
### TypeScript
|
||||||
|
- `strict: true`
|
||||||
|
- `noUncheckedIndexedAccess: true` - array access returns `T | undefined`
|
||||||
|
- Use `@/*` path aliases (maps to `src/*`)
|
||||||
|
|
||||||
|
### Naming
|
||||||
|
| Type | Convention | Example |
|
||||||
|
|------|------------|---------|
|
||||||
|
| Files (utils) | kebab-case | `auth-utils.ts` |
|
||||||
|
| Files (components) | PascalCase | `UserProfile.tsx` |
|
||||||
|
| Components | PascalCase arrow | `const Button = () => {}` |
|
||||||
|
| Functions | camelCase | `getUserById` |
|
||||||
|
| Types | PascalCase | `UserProfile` |
|
||||||
|
|
||||||
|
### React
|
||||||
|
- Use arrow functions for components (Biome enforced)
|
||||||
|
- Use `useSuspenseQuery` for guaranteed data
|
||||||
|
- Let React Compiler handle memoization (no manual `useMemo`/`useCallback`)
|
||||||
|
|
||||||
|
## Environment Variables
|
||||||
|
|
||||||
|
```typescript
|
||||||
|
// src/env.ts - using @t3-oss/env-core
|
||||||
|
import { createEnv } from '@t3-oss/env-core'
|
||||||
|
import { z } from 'zod'
|
||||||
|
|
||||||
|
export const env = createEnv({
|
||||||
|
server: {
|
||||||
|
DATABASE_URL: z.string().url(),
|
||||||
|
},
|
||||||
|
clientPrefix: 'VITE_',
|
||||||
|
client: {
|
||||||
|
VITE_API_URL: z.string().optional(),
|
||||||
|
},
|
||||||
|
})
|
||||||
|
```
|
||||||
|
|
||||||
|
## Development Principles
|
||||||
|
|
||||||
|
> **These principles apply to ALL code changes. Agents MUST follow them on every task.**
|
||||||
|
|
||||||
|
1. **No backward compatibility** — This project is in rapid iteration. Always use the latest API and patterns. Never keep deprecated code paths or old API fallbacks.
|
||||||
|
2. **Always sync documentation** — When code changes, immediately update all related documentation (`AGENTS.md`, `README.md`, inline code examples). Code and docs must never drift apart.
|
||||||
|
3. **Forward-only migration** — When upgrading dependencies, fully adopt the new API. Don't mix old and new patterns.
|
||||||
|
|
||||||
|
## Critical Rules
|
||||||
|
|
||||||
|
**DO:**
|
||||||
|
- Run `bun run fix` before committing
|
||||||
|
- Use `@/*` path aliases
|
||||||
|
- Include `createdAt`/`updatedAt` on all tables
|
||||||
|
- Use `ORPCError` with proper codes
|
||||||
|
- Use `drizzle-orm/zod` (NOT `drizzle-zod`) for schema validation
|
||||||
|
- Use RQBv2 object syntax for `orderBy` and `where`
|
||||||
|
- Update `AGENTS.md` and other docs whenever code patterns change
|
||||||
|
|
||||||
|
**DON'T:**
|
||||||
|
- Use `npm`, `npx`, `node`, `yarn`, `pnpm` — always use `bun` / `bunx`
|
||||||
|
- Edit `src/routeTree.gen.ts` (auto-generated)
|
||||||
|
- Use `as any`, `@ts-ignore`, `@ts-expect-error`
|
||||||
|
- Commit `.env` files
|
||||||
|
- Use empty catch blocks
|
||||||
|
- Import from `drizzle-zod` (use `drizzle-orm/zod` instead)
|
||||||
|
- Use RQBv1 callback-style `orderBy` / old `relations()` API
|
||||||
|
- Use `drizzle-orm/bun-sql` driver (use `drizzle-orm/postgres-js`)
|
||||||
|
- Pass `schema` to `drizzle()` constructor (only `relations` is needed in RQBv2)
|
||||||
|
- Import `os` from `@orpc/server` in middleware — use `@/server/api/server` (the local typed instance)
|
||||||
|
- Leave docs out of sync with code changes
|
||||||
@@ -0,0 +1,12 @@
|
|||||||
|
{
|
||||||
|
"$schema": "../../node_modules/@biomejs/biome/configuration_schema.json",
|
||||||
|
"extends": "//",
|
||||||
|
"files": {
|
||||||
|
"includes": ["**", "!**/routeTree.gen.ts"]
|
||||||
|
},
|
||||||
|
"css": {
|
||||||
|
"parser": {
|
||||||
|
"tailwindDirectives": true
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
@@ -1,8 +1,7 @@
|
|||||||
import { mkdir, rm } from 'node:fs/promises'
|
import { mkdir, rm } from 'node:fs/promises'
|
||||||
import { basename } from 'node:path'
|
|
||||||
import { parseArgs } from 'node:util'
|
import { parseArgs } from 'node:util'
|
||||||
|
|
||||||
const ENTRYPOINT = 'src/bin.ts'
|
const ENTRYPOINT = '.output/server/index.mjs'
|
||||||
const OUTDIR = 'out'
|
const OUTDIR = 'out'
|
||||||
|
|
||||||
const SUPPORTED_TARGETS: readonly Bun.Build.CompileTarget[] = [
|
const SUPPORTED_TARGETS: readonly Bun.Build.CompileTarget[] = [
|
||||||
@@ -13,9 +12,8 @@ const SUPPORTED_TARGETS: readonly Bun.Build.CompileTarget[] = [
|
|||||||
'bun-linux-arm64',
|
'bun-linux-arm64',
|
||||||
]
|
]
|
||||||
|
|
||||||
const SUPPORTED_TARGET_SET: ReadonlySet<string> = new Set(SUPPORTED_TARGETS)
|
const isSupportedTarget = (value: string): value is Bun.Build.CompileTarget =>
|
||||||
|
(SUPPORTED_TARGETS as readonly string[]).includes(value)
|
||||||
const isSupportedTarget = (value: string): value is Bun.Build.CompileTarget => SUPPORTED_TARGET_SET.has(value)
|
|
||||||
|
|
||||||
const { values } = parseArgs({
|
const { values } = parseArgs({
|
||||||
options: { target: { type: 'string' } },
|
options: { target: { type: 'string' } },
|
||||||
@@ -50,20 +48,13 @@ const main = async () => {
|
|||||||
const result = await Bun.build({
|
const result = await Bun.build({
|
||||||
entrypoints: [ENTRYPOINT],
|
entrypoints: [ENTRYPOINT],
|
||||||
outdir: OUTDIR,
|
outdir: OUTDIR,
|
||||||
// autoloadDotenv: false — produce a deterministic binary; it must not silently consume a .env from cwd.
|
compile: { outfile, target },
|
||||||
compile: { outfile, target, autoloadDotenv: false },
|
|
||||||
minify: true,
|
|
||||||
bytecode: true,
|
|
||||||
sourcemap: 'inline',
|
|
||||||
})
|
})
|
||||||
|
|
||||||
if (!result.success) {
|
if (!result.success) {
|
||||||
throw new Error(result.logs.map(String).join('\n'))
|
throw new Error(result.logs.map(String).join('\n'))
|
||||||
}
|
}
|
||||||
|
|
||||||
// Bun bundler still writes *.js.map next to the binary even with inline sourcemap.
|
|
||||||
await rm(`${OUTDIR}/${basename(ENTRYPOINT, '.ts')}.js.map`, { force: true })
|
|
||||||
|
|
||||||
console.log(`✓ ${target} → ${OUTDIR}/${outfile}`)
|
console.log(`✓ ${target} → ${OUTDIR}/${outfile}`)
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -1,11 +1,11 @@
|
|||||||
import { defineConfig } from 'drizzle-kit'
|
import { defineConfig } from 'drizzle-kit'
|
||||||
import { env } from './src/env'
|
import { env } from '@/env'
|
||||||
|
|
||||||
export default defineConfig({
|
export default defineConfig({
|
||||||
out: './drizzle',
|
out: './drizzle',
|
||||||
schema: './src/server/db/schema/index.ts',
|
schema: './src/server/db/schema/index.ts',
|
||||||
dialect: 'postgresql',
|
dialect: 'sqlite',
|
||||||
dbCredentials: {
|
dbCredentials: {
|
||||||
url: env.DATABASE_URL,
|
url: env.DATABASE_PATH,
|
||||||
},
|
},
|
||||||
})
|
})
|
||||||
@@ -0,0 +1,61 @@
|
|||||||
|
{
|
||||||
|
"name": "@furtherverse/server",
|
||||||
|
"version": "1.0.0",
|
||||||
|
"private": true,
|
||||||
|
"type": "module",
|
||||||
|
"scripts": {
|
||||||
|
"build": "bunx --bun vite build",
|
||||||
|
"compile": "bun compile.ts",
|
||||||
|
"compile:darwin": "bun run compile:darwin:arm64 && bun run compile:darwin:x64",
|
||||||
|
"compile:darwin:arm64": "bun compile.ts --target bun-darwin-arm64",
|
||||||
|
"compile:darwin:x64": "bun compile.ts --target bun-darwin-x64",
|
||||||
|
"compile:linux": "bun run compile:linux:x64 && bun run compile:linux:arm64",
|
||||||
|
"compile:linux:arm64": "bun compile.ts --target bun-linux-arm64",
|
||||||
|
"compile:linux:x64": "bun compile.ts --target bun-linux-x64",
|
||||||
|
"compile:windows": "bun run compile:windows:x64",
|
||||||
|
"compile:windows:x64": "bun compile.ts --target bun-windows-x64",
|
||||||
|
"db:generate": "drizzle-kit generate",
|
||||||
|
"db:migrate": "drizzle-kit migrate",
|
||||||
|
"db:push": "drizzle-kit push",
|
||||||
|
"db:studio": "drizzle-kit studio",
|
||||||
|
"dev": "bunx --bun vite dev",
|
||||||
|
"fix": "biome check --write",
|
||||||
|
"typecheck": "tsc --noEmit"
|
||||||
|
},
|
||||||
|
"dependencies": {
|
||||||
|
"@furtherverse/crypto": "workspace:*",
|
||||||
|
"@orpc/client": "catalog:",
|
||||||
|
"@orpc/contract": "catalog:",
|
||||||
|
"@orpc/openapi": "catalog:",
|
||||||
|
"@orpc/server": "catalog:",
|
||||||
|
"@orpc/tanstack-query": "catalog:",
|
||||||
|
"@orpc/zod": "catalog:",
|
||||||
|
"@t3-oss/env-core": "catalog:",
|
||||||
|
"@tanstack/react-query": "catalog:",
|
||||||
|
"@tanstack/react-router": "catalog:",
|
||||||
|
"@tanstack/react-router-ssr-query": "catalog:",
|
||||||
|
"@tanstack/react-start": "catalog:",
|
||||||
|
"drizzle-orm": "catalog:",
|
||||||
|
"jszip": "catalog:",
|
||||||
|
"react": "catalog:",
|
||||||
|
"react-dom": "catalog:",
|
||||||
|
"uuid": "catalog:",
|
||||||
|
"zod": "catalog:"
|
||||||
|
},
|
||||||
|
"devDependencies": {
|
||||||
|
"@furtherverse/tsconfig": "workspace:*",
|
||||||
|
"@tailwindcss/vite": "catalog:",
|
||||||
|
"@tanstack/devtools-vite": "catalog:",
|
||||||
|
"@tanstack/react-devtools": "catalog:",
|
||||||
|
"@tanstack/react-query-devtools": "catalog:",
|
||||||
|
"@tanstack/react-router-devtools": "catalog:",
|
||||||
|
"@types/bun": "catalog:",
|
||||||
|
"@vitejs/plugin-react": "catalog:",
|
||||||
|
"babel-plugin-react-compiler": "catalog:",
|
||||||
|
"drizzle-kit": "catalog:",
|
||||||
|
"nitro": "catalog:",
|
||||||
|
"tailwindcss": "catalog:",
|
||||||
|
"vite": "catalog:",
|
||||||
|
"vite-tsconfig-paths": "catalog:"
|
||||||
|
}
|
||||||
|
}
|
||||||
@@ -0,0 +1,3 @@
|
|||||||
|
# https://www.robotstxt.org/robotstxt.html
|
||||||
|
User-agent: *
|
||||||
|
Disallow:
|
||||||
@@ -0,0 +1,3 @@
|
|||||||
|
export function ErrorComponent() {
|
||||||
|
return <div>An unhandled error happened!</div>
|
||||||
|
}
|
||||||
@@ -0,0 +1,3 @@
|
|||||||
|
export function NotFoundComponent() {
|
||||||
|
return <div>404 - Not Found</div>
|
||||||
|
}
|
||||||
@@ -0,0 +1,14 @@
|
|||||||
|
import { createEnv } from '@t3-oss/env-core'
|
||||||
|
import { z } from 'zod'
|
||||||
|
|
||||||
|
export const env = createEnv({
|
||||||
|
server: {
|
||||||
|
DATABASE_PATH: z.string().min(1).default('data.db'),
|
||||||
|
},
|
||||||
|
clientPrefix: 'VITE_',
|
||||||
|
client: {
|
||||||
|
VITE_APP_TITLE: z.string().min(1).optional(),
|
||||||
|
},
|
||||||
|
runtimeEnv: process.env,
|
||||||
|
emptyStringAsUndefined: true,
|
||||||
|
})
|
||||||
@@ -9,21 +9,21 @@
|
|||||||
// Additionally, you should also exclude this file from your linter and/or formatter to prevent it from being checked or modified.
|
// Additionally, you should also exclude this file from your linter and/or formatter to prevent it from being checked or modified.
|
||||||
|
|
||||||
import { Route as rootRouteImport } from './routes/__root'
|
import { Route as rootRouteImport } from './routes/__root'
|
||||||
import { Route as HealthRouteImport } from './routes/health'
|
|
||||||
import { Route as IndexRouteImport } from './routes/index'
|
import { Route as IndexRouteImport } from './routes/index'
|
||||||
|
import { Route as ApiHealthRouteImport } from './routes/api/health'
|
||||||
import { Route as ApiSplatRouteImport } from './routes/api/$'
|
import { Route as ApiSplatRouteImport } from './routes/api/$'
|
||||||
import { Route as ApiRpcSplatRouteImport } from './routes/api/rpc.$'
|
import { Route as ApiRpcSplatRouteImport } from './routes/api/rpc.$'
|
||||||
|
|
||||||
const HealthRoute = HealthRouteImport.update({
|
|
||||||
id: '/health',
|
|
||||||
path: '/health',
|
|
||||||
getParentRoute: () => rootRouteImport,
|
|
||||||
} as any)
|
|
||||||
const IndexRoute = IndexRouteImport.update({
|
const IndexRoute = IndexRouteImport.update({
|
||||||
id: '/',
|
id: '/',
|
||||||
path: '/',
|
path: '/',
|
||||||
getParentRoute: () => rootRouteImport,
|
getParentRoute: () => rootRouteImport,
|
||||||
} as any)
|
} as any)
|
||||||
|
const ApiHealthRoute = ApiHealthRouteImport.update({
|
||||||
|
id: '/api/health',
|
||||||
|
path: '/api/health',
|
||||||
|
getParentRoute: () => rootRouteImport,
|
||||||
|
} as any)
|
||||||
const ApiSplatRoute = ApiSplatRouteImport.update({
|
const ApiSplatRoute = ApiSplatRouteImport.update({
|
||||||
id: '/api/$',
|
id: '/api/$',
|
||||||
path: '/api/$',
|
path: '/api/$',
|
||||||
@@ -37,47 +37,40 @@ const ApiRpcSplatRoute = ApiRpcSplatRouteImport.update({
|
|||||||
|
|
||||||
export interface FileRoutesByFullPath {
|
export interface FileRoutesByFullPath {
|
||||||
'/': typeof IndexRoute
|
'/': typeof IndexRoute
|
||||||
'/health': typeof HealthRoute
|
|
||||||
'/api/$': typeof ApiSplatRoute
|
'/api/$': typeof ApiSplatRoute
|
||||||
|
'/api/health': typeof ApiHealthRoute
|
||||||
'/api/rpc/$': typeof ApiRpcSplatRoute
|
'/api/rpc/$': typeof ApiRpcSplatRoute
|
||||||
}
|
}
|
||||||
export interface FileRoutesByTo {
|
export interface FileRoutesByTo {
|
||||||
'/': typeof IndexRoute
|
'/': typeof IndexRoute
|
||||||
'/health': typeof HealthRoute
|
|
||||||
'/api/$': typeof ApiSplatRoute
|
'/api/$': typeof ApiSplatRoute
|
||||||
|
'/api/health': typeof ApiHealthRoute
|
||||||
'/api/rpc/$': typeof ApiRpcSplatRoute
|
'/api/rpc/$': typeof ApiRpcSplatRoute
|
||||||
}
|
}
|
||||||
export interface FileRoutesById {
|
export interface FileRoutesById {
|
||||||
__root__: typeof rootRouteImport
|
__root__: typeof rootRouteImport
|
||||||
'/': typeof IndexRoute
|
'/': typeof IndexRoute
|
||||||
'/health': typeof HealthRoute
|
|
||||||
'/api/$': typeof ApiSplatRoute
|
'/api/$': typeof ApiSplatRoute
|
||||||
|
'/api/health': typeof ApiHealthRoute
|
||||||
'/api/rpc/$': typeof ApiRpcSplatRoute
|
'/api/rpc/$': typeof ApiRpcSplatRoute
|
||||||
}
|
}
|
||||||
export interface FileRouteTypes {
|
export interface FileRouteTypes {
|
||||||
fileRoutesByFullPath: FileRoutesByFullPath
|
fileRoutesByFullPath: FileRoutesByFullPath
|
||||||
fullPaths: '/' | '/health' | '/api/$' | '/api/rpc/$'
|
fullPaths: '/' | '/api/$' | '/api/health' | '/api/rpc/$'
|
||||||
fileRoutesByTo: FileRoutesByTo
|
fileRoutesByTo: FileRoutesByTo
|
||||||
to: '/' | '/health' | '/api/$' | '/api/rpc/$'
|
to: '/' | '/api/$' | '/api/health' | '/api/rpc/$'
|
||||||
id: '__root__' | '/' | '/health' | '/api/$' | '/api/rpc/$'
|
id: '__root__' | '/' | '/api/$' | '/api/health' | '/api/rpc/$'
|
||||||
fileRoutesById: FileRoutesById
|
fileRoutesById: FileRoutesById
|
||||||
}
|
}
|
||||||
export interface RootRouteChildren {
|
export interface RootRouteChildren {
|
||||||
IndexRoute: typeof IndexRoute
|
IndexRoute: typeof IndexRoute
|
||||||
HealthRoute: typeof HealthRoute
|
|
||||||
ApiSplatRoute: typeof ApiSplatRoute
|
ApiSplatRoute: typeof ApiSplatRoute
|
||||||
|
ApiHealthRoute: typeof ApiHealthRoute
|
||||||
ApiRpcSplatRoute: typeof ApiRpcSplatRoute
|
ApiRpcSplatRoute: typeof ApiRpcSplatRoute
|
||||||
}
|
}
|
||||||
|
|
||||||
declare module '@tanstack/react-router' {
|
declare module '@tanstack/react-router' {
|
||||||
interface FileRoutesByPath {
|
interface FileRoutesByPath {
|
||||||
'/health': {
|
|
||||||
id: '/health'
|
|
||||||
path: '/health'
|
|
||||||
fullPath: '/health'
|
|
||||||
preLoaderRoute: typeof HealthRouteImport
|
|
||||||
parentRoute: typeof rootRouteImport
|
|
||||||
}
|
|
||||||
'/': {
|
'/': {
|
||||||
id: '/'
|
id: '/'
|
||||||
path: '/'
|
path: '/'
|
||||||
@@ -85,6 +78,13 @@ declare module '@tanstack/react-router' {
|
|||||||
preLoaderRoute: typeof IndexRouteImport
|
preLoaderRoute: typeof IndexRouteImport
|
||||||
parentRoute: typeof rootRouteImport
|
parentRoute: typeof rootRouteImport
|
||||||
}
|
}
|
||||||
|
'/api/health': {
|
||||||
|
id: '/api/health'
|
||||||
|
path: '/api/health'
|
||||||
|
fullPath: '/api/health'
|
||||||
|
preLoaderRoute: typeof ApiHealthRouteImport
|
||||||
|
parentRoute: typeof rootRouteImport
|
||||||
|
}
|
||||||
'/api/$': {
|
'/api/$': {
|
||||||
id: '/api/$'
|
id: '/api/$'
|
||||||
path: '/api/$'
|
path: '/api/$'
|
||||||
@@ -104,8 +104,8 @@ declare module '@tanstack/react-router' {
|
|||||||
|
|
||||||
const rootRouteChildren: RootRouteChildren = {
|
const rootRouteChildren: RootRouteChildren = {
|
||||||
IndexRoute: IndexRoute,
|
IndexRoute: IndexRoute,
|
||||||
HealthRoute: HealthRoute,
|
|
||||||
ApiSplatRoute: ApiSplatRoute,
|
ApiSplatRoute: ApiSplatRoute,
|
||||||
|
ApiHealthRoute: ApiHealthRoute,
|
||||||
ApiRpcSplatRoute: ApiRpcSplatRoute,
|
ApiRpcSplatRoute: ApiRpcSplatRoute,
|
||||||
}
|
}
|
||||||
export const routeTree = rootRouteImport
|
export const routeTree = rootRouteImport
|
||||||
@@ -4,7 +4,6 @@ import { ReactQueryDevtoolsPanel } from '@tanstack/react-query-devtools'
|
|||||||
import { createRootRouteWithContext, HeadContent, Scripts } from '@tanstack/react-router'
|
import { createRootRouteWithContext, HeadContent, Scripts } from '@tanstack/react-router'
|
||||||
import { TanStackRouterDevtoolsPanel } from '@tanstack/react-router-devtools'
|
import { TanStackRouterDevtoolsPanel } from '@tanstack/react-router-devtools'
|
||||||
import type { ReactNode } from 'react'
|
import type { ReactNode } from 'react'
|
||||||
import { name } from '#package'
|
|
||||||
import { ErrorComponent } from '@/components/Error'
|
import { ErrorComponent } from '@/components/Error'
|
||||||
import { NotFoundComponent } from '@/components/NotFound'
|
import { NotFoundComponent } from '@/components/NotFound'
|
||||||
import appCss from '@/styles.css?url'
|
import appCss from '@/styles.css?url'
|
||||||
@@ -24,7 +23,7 @@ export const Route = createRootRouteWithContext<RouterContext>()({
|
|||||||
content: 'width=device-width, initial-scale=1',
|
content: 'width=device-width, initial-scale=1',
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
title: name,
|
title: 'Furtherverse',
|
||||||
},
|
},
|
||||||
],
|
],
|
||||||
links: [
|
links: [
|
||||||
@@ -35,8 +34,8 @@ export const Route = createRootRouteWithContext<RouterContext>()({
|
|||||||
],
|
],
|
||||||
}),
|
}),
|
||||||
shellComponent: RootDocument,
|
shellComponent: RootDocument,
|
||||||
errorComponent: ErrorComponent,
|
errorComponent: () => <ErrorComponent />,
|
||||||
notFoundComponent: NotFoundComponent,
|
notFoundComponent: () => <NotFoundComponent />,
|
||||||
})
|
})
|
||||||
|
|
||||||
function RootDocument({ children }: Readonly<{ children: ReactNode }>) {
|
function RootDocument({ children }: Readonly<{ children: ReactNode }>) {
|
||||||
@@ -3,7 +3,7 @@ import { OpenAPIReferencePlugin } from '@orpc/openapi/plugins'
|
|||||||
import { onError } from '@orpc/server'
|
import { onError } from '@orpc/server'
|
||||||
import { ZodToJsonSchemaConverter } from '@orpc/zod/zod4'
|
import { ZodToJsonSchemaConverter } from '@orpc/zod/zod4'
|
||||||
import { createFileRoute } from '@tanstack/react-router'
|
import { createFileRoute } from '@tanstack/react-router'
|
||||||
import { name, version } from '#package'
|
import { name, version } from '@/../package.json'
|
||||||
import { handleValidationError, logError } from '@/server/api/interceptors'
|
import { handleValidationError, logError } from '@/server/api/interceptors'
|
||||||
import { router } from '@/server/api/routers'
|
import { router } from '@/server/api/routers'
|
||||||
|
|
||||||
@@ -0,0 +1,27 @@
|
|||||||
|
import { createFileRoute } from '@tanstack/react-router'
|
||||||
|
import { name, version } from '@/../package.json'
|
||||||
|
|
||||||
|
const createHealthResponse = (): Response =>
|
||||||
|
Response.json(
|
||||||
|
{
|
||||||
|
status: 'ok',
|
||||||
|
service: name,
|
||||||
|
version,
|
||||||
|
timestamp: new Date().toISOString(),
|
||||||
|
},
|
||||||
|
{
|
||||||
|
status: 200,
|
||||||
|
headers: {
|
||||||
|
'cache-control': 'no-store',
|
||||||
|
},
|
||||||
|
},
|
||||||
|
)
|
||||||
|
|
||||||
|
export const Route = createFileRoute('/api/health')({
|
||||||
|
server: {
|
||||||
|
handlers: {
|
||||||
|
GET: async () => createHealthResponse(),
|
||||||
|
HEAD: async () => new Response(null, { status: 200 }),
|
||||||
|
},
|
||||||
|
},
|
||||||
|
})
|
||||||
@@ -0,0 +1,21 @@
|
|||||||
|
import { createFileRoute } from '@tanstack/react-router'
|
||||||
|
|
||||||
|
export const Route = createFileRoute('/')({
|
||||||
|
component: Home,
|
||||||
|
})
|
||||||
|
|
||||||
|
function Home() {
|
||||||
|
return (
|
||||||
|
<div className="min-h-screen bg-slate-50 flex items-center justify-center font-sans">
|
||||||
|
<div className="text-center space-y-4">
|
||||||
|
<h1 className="text-3xl font-bold text-slate-900 tracking-tight">UX Server</h1>
|
||||||
|
<p className="text-slate-500">
|
||||||
|
API:
|
||||||
|
<a href="/api" className="text-indigo-600 hover:text-indigo-700 underline">
|
||||||
|
/api
|
||||||
|
</a>
|
||||||
|
</p>
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
)
|
||||||
|
}
|
||||||
@@ -0,0 +1,25 @@
|
|||||||
|
import type { DB } from '@/server/db'
|
||||||
|
|
||||||
|
/**
|
||||||
|
* 基础 Context - 所有请求都包含的上下文
|
||||||
|
*/
|
||||||
|
export interface BaseContext {
|
||||||
|
headers: Headers
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* 数据库 Context - 通过 db middleware 扩展
|
||||||
|
*/
|
||||||
|
export interface DBContext extends BaseContext {
|
||||||
|
db: DB
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* 认证 Context - 通过 auth middleware 扩展(未来使用)
|
||||||
|
*
|
||||||
|
* @example
|
||||||
|
* export interface AuthContext extends DBContext {
|
||||||
|
* userId: string
|
||||||
|
* user: User
|
||||||
|
* }
|
||||||
|
*/
|
||||||
@@ -0,0 +1,66 @@
|
|||||||
|
import { oc } from '@orpc/contract'
|
||||||
|
import { z } from 'zod'
|
||||||
|
|
||||||
|
export const encryptDeviceInfo = oc
|
||||||
|
.input(
|
||||||
|
z.object({
|
||||||
|
deviceId: z.string().min(1),
|
||||||
|
}),
|
||||||
|
)
|
||||||
|
.output(
|
||||||
|
z.object({
|
||||||
|
encrypted: z.string(),
|
||||||
|
}),
|
||||||
|
)
|
||||||
|
|
||||||
|
export const decryptTask = oc
|
||||||
|
.input(
|
||||||
|
z.object({
|
||||||
|
deviceId: z.string().min(1),
|
||||||
|
encryptedData: z.string().min(1),
|
||||||
|
}),
|
||||||
|
)
|
||||||
|
.output(
|
||||||
|
z.object({
|
||||||
|
taskId: z.string(),
|
||||||
|
enterpriseId: z.string(),
|
||||||
|
orgName: z.string(),
|
||||||
|
inspectionId: z.string(),
|
||||||
|
inspectionPerson: z.string(),
|
||||||
|
issuedAt: z.number(),
|
||||||
|
}),
|
||||||
|
)
|
||||||
|
|
||||||
|
export const encryptSummary = oc
|
||||||
|
.input(
|
||||||
|
z.object({
|
||||||
|
deviceId: z.string().min(1),
|
||||||
|
taskId: z.string().min(1),
|
||||||
|
enterpriseId: z.string().min(1),
|
||||||
|
inspectionId: z.string().min(1),
|
||||||
|
summary: z.string().min(1),
|
||||||
|
}),
|
||||||
|
)
|
||||||
|
.output(
|
||||||
|
z.object({
|
||||||
|
qrContent: z.string(),
|
||||||
|
}),
|
||||||
|
)
|
||||||
|
|
||||||
|
export const signAndPackReport = oc
|
||||||
|
.input(
|
||||||
|
z.object({
|
||||||
|
deviceId: z.string().min(1),
|
||||||
|
taskId: z.string().min(1),
|
||||||
|
enterpriseId: z.string().min(1),
|
||||||
|
inspectionId: z.string().min(1),
|
||||||
|
summary: z.string().min(1),
|
||||||
|
rawZipBase64: z.string().min(1),
|
||||||
|
}),
|
||||||
|
)
|
||||||
|
.output(
|
||||||
|
z.object({
|
||||||
|
deviceSignature: z.string(),
|
||||||
|
signedZipBase64: z.string(),
|
||||||
|
}),
|
||||||
|
)
|
||||||
@@ -0,0 +1,30 @@
|
|||||||
|
import { oc } from '@orpc/contract'
|
||||||
|
import { z } from 'zod'
|
||||||
|
|
||||||
|
const deviceOutput = z.object({
|
||||||
|
id: z.string(),
|
||||||
|
licence: z.string(),
|
||||||
|
fingerprint: z.string(),
|
||||||
|
platformPublicKey: z.string(),
|
||||||
|
pgpPublicKey: z.string().nullable(),
|
||||||
|
createdAt: z.date(),
|
||||||
|
updatedAt: z.date(),
|
||||||
|
})
|
||||||
|
|
||||||
|
export const register = oc
|
||||||
|
.input(
|
||||||
|
z.object({
|
||||||
|
licence: z.string().min(1),
|
||||||
|
platformPublicKey: z.string().min(1),
|
||||||
|
}),
|
||||||
|
)
|
||||||
|
.output(deviceOutput)
|
||||||
|
|
||||||
|
export const get = oc
|
||||||
|
.input(
|
||||||
|
z.object({
|
||||||
|
id: z.string().optional(),
|
||||||
|
licence: z.string().optional(),
|
||||||
|
}),
|
||||||
|
)
|
||||||
|
.output(deviceOutput)
|
||||||
@@ -0,0 +1,11 @@
|
|||||||
|
import * as crypto from './crypto.contract'
|
||||||
|
import * as device from './device.contract'
|
||||||
|
import * as task from './task.contract'
|
||||||
|
|
||||||
|
export const contract = {
|
||||||
|
device,
|
||||||
|
crypto,
|
||||||
|
task,
|
||||||
|
}
|
||||||
|
|
||||||
|
export type Contract = typeof contract
|
||||||
@@ -0,0 +1,47 @@
|
|||||||
|
import { oc } from '@orpc/contract'
|
||||||
|
import { z } from 'zod'
|
||||||
|
|
||||||
|
const taskOutput = z.object({
|
||||||
|
id: z.string(),
|
||||||
|
deviceId: z.string(),
|
||||||
|
taskId: z.string(),
|
||||||
|
enterpriseId: z.string().nullable(),
|
||||||
|
orgName: z.string().nullable(),
|
||||||
|
inspectionId: z.string().nullable(),
|
||||||
|
inspectionPerson: z.string().nullable(),
|
||||||
|
issuedAt: z.date().nullable(),
|
||||||
|
status: z.enum(['pending', 'in_progress', 'done']),
|
||||||
|
createdAt: z.date(),
|
||||||
|
updatedAt: z.date(),
|
||||||
|
})
|
||||||
|
|
||||||
|
export const save = oc
|
||||||
|
.input(
|
||||||
|
z.object({
|
||||||
|
deviceId: z.string().min(1),
|
||||||
|
taskId: z.string().min(1),
|
||||||
|
enterpriseId: z.string().optional(),
|
||||||
|
orgName: z.string().optional(),
|
||||||
|
inspectionId: z.string().optional(),
|
||||||
|
inspectionPerson: z.string().optional(),
|
||||||
|
issuedAt: z.number().optional(),
|
||||||
|
}),
|
||||||
|
)
|
||||||
|
.output(taskOutput)
|
||||||
|
|
||||||
|
export const list = oc
|
||||||
|
.input(
|
||||||
|
z.object({
|
||||||
|
deviceId: z.string().min(1),
|
||||||
|
}),
|
||||||
|
)
|
||||||
|
.output(z.array(taskOutput))
|
||||||
|
|
||||||
|
export const updateStatus = oc
|
||||||
|
.input(
|
||||||
|
z.object({
|
||||||
|
id: z.string().min(1),
|
||||||
|
status: z.enum(['pending', 'in_progress', 'done']),
|
||||||
|
}),
|
||||||
|
)
|
||||||
|
.output(taskOutput)
|
||||||
@@ -1,19 +1,13 @@
|
|||||||
import { ORPCError, ValidationError } from '@orpc/server'
|
import { ORPCError, ValidationError } from '@orpc/server'
|
||||||
import { z } from 'zod'
|
import { z } from 'zod'
|
||||||
import { getLogger } from '@/server/logger'
|
|
||||||
|
|
||||||
const logger = getLogger(['api'])
|
|
||||||
|
|
||||||
export const logError = (error: unknown) => {
|
export const logError = (error: unknown) => {
|
||||||
logger.error('Unhandled error in ORPC handler', { error })
|
console.error(error)
|
||||||
}
|
}
|
||||||
|
|
||||||
export const handleValidationError = (error: unknown) => {
|
export const handleValidationError = (error: unknown) => {
|
||||||
if (!(error instanceof ORPCError) || !(error.cause instanceof ValidationError)) return
|
if (error instanceof ORPCError && error.code === 'BAD_REQUEST' && error.cause instanceof ValidationError) {
|
||||||
|
// If you only use Zod you can safely cast to ZodIssue[] (per ORPC official docs)
|
||||||
if (error.code === 'BAD_REQUEST') {
|
|
||||||
// ORPC widens issues to the Standard Schema shape; every contract here is built from Zod/drizzle-zod,
|
|
||||||
// so the runtime objects are Zod issues. Rehydrate to reuse z.prettifyError / z.flattenError.
|
|
||||||
const zodError = new z.ZodError(error.cause.issues as z.core.$ZodIssue[])
|
const zodError = new z.ZodError(error.cause.issues as z.core.$ZodIssue[])
|
||||||
|
|
||||||
throw new ORPCError('INPUT_VALIDATION_FAILED', {
|
throw new ORPCError('INPUT_VALIDATION_FAILED', {
|
||||||
@@ -24,7 +18,7 @@ export const handleValidationError = (error: unknown) => {
|
|||||||
})
|
})
|
||||||
}
|
}
|
||||||
|
|
||||||
if (error.code === 'INTERNAL_SERVER_ERROR') {
|
if (error instanceof ORPCError && error.code === 'INTERNAL_SERVER_ERROR' && error.cause instanceof ValidationError) {
|
||||||
throw new ORPCError('OUTPUT_VALIDATION_FAILED', {
|
throw new ORPCError('OUTPUT_VALIDATION_FAILED', {
|
||||||
cause: error.cause,
|
cause: error.cause,
|
||||||
})
|
})
|
||||||
@@ -0,0 +1,11 @@
|
|||||||
|
import { os } from '@/server/api/server'
|
||||||
|
import { getDB } from '@/server/db'
|
||||||
|
|
||||||
|
export const db = os.middleware(async ({ context, next }) => {
|
||||||
|
return next({
|
||||||
|
context: {
|
||||||
|
...context,
|
||||||
|
db: getDB(),
|
||||||
|
},
|
||||||
|
})
|
||||||
|
})
|
||||||
@@ -0,0 +1 @@
|
|||||||
|
export * from './db.middleware'
|
||||||
@@ -0,0 +1,306 @@
|
|||||||
|
import {
|
||||||
|
aesGcmDecrypt,
|
||||||
|
aesGcmEncrypt,
|
||||||
|
hkdfSha256,
|
||||||
|
hmacSha256Base64,
|
||||||
|
pgpSignDetached,
|
||||||
|
rsaOaepEncrypt,
|
||||||
|
sha256,
|
||||||
|
sha256Hex,
|
||||||
|
} from '@furtherverse/crypto'
|
||||||
|
import { ORPCError } from '@orpc/server'
|
||||||
|
import type { JSZipObject } from 'jszip'
|
||||||
|
import JSZip from 'jszip'
|
||||||
|
import { z } from 'zod'
|
||||||
|
import { db } from '../middlewares'
|
||||||
|
import { os } from '../server'
|
||||||
|
|
||||||
|
interface DeviceRow {
|
||||||
|
id: string
|
||||||
|
licence: string
|
||||||
|
fingerprint: string
|
||||||
|
platformPublicKey: string
|
||||||
|
pgpPrivateKey: string | null
|
||||||
|
pgpPublicKey: string | null
|
||||||
|
}
|
||||||
|
|
||||||
|
interface ReportFiles {
|
||||||
|
assets: Uint8Array
|
||||||
|
vulnerabilities: Uint8Array
|
||||||
|
weakPasswords: Uint8Array
|
||||||
|
reportHtml: Uint8Array
|
||||||
|
reportHtmlName: string
|
||||||
|
}
|
||||||
|
|
||||||
|
const MAX_RAW_ZIP_BYTES = 50 * 1024 * 1024
|
||||||
|
const MAX_SINGLE_FILE_BYTES = 20 * 1024 * 1024
|
||||||
|
const MAX_TOTAL_UNCOMPRESSED_BYTES = 60 * 1024 * 1024
|
||||||
|
const MAX_ZIP_ENTRIES = 32
|
||||||
|
|
||||||
|
const taskPayloadSchema = z.object({
|
||||||
|
taskId: z.string().min(1),
|
||||||
|
enterpriseId: z.string().min(1),
|
||||||
|
orgName: z.string().min(1),
|
||||||
|
inspectionId: z.string().min(1),
|
||||||
|
inspectionPerson: z.string().min(1),
|
||||||
|
issuedAt: z.number(),
|
||||||
|
})
|
||||||
|
|
||||||
|
const normalizePath = (name: string): string => name.replaceAll('\\', '/')
|
||||||
|
|
||||||
|
const isUnsafePath = (name: string): boolean => {
|
||||||
|
const normalized = normalizePath(name)
|
||||||
|
const segments = normalized.split('/')
|
||||||
|
|
||||||
|
return (
|
||||||
|
normalized.startsWith('/') ||
|
||||||
|
normalized.includes('\u0000') ||
|
||||||
|
segments.some((segment) => segment === '..' || segment.trim().length === 0)
|
||||||
|
)
|
||||||
|
}
|
||||||
|
|
||||||
|
const getBaseName = (name: string): string => {
|
||||||
|
const normalized = normalizePath(name)
|
||||||
|
const parts = normalized.split('/')
|
||||||
|
return parts.at(-1) ?? normalized
|
||||||
|
}
|
||||||
|
|
||||||
|
const getRequiredReportFiles = async (rawZip: JSZip): Promise<ReportFiles> => {
|
||||||
|
let assets: Uint8Array | null = null
|
||||||
|
let vulnerabilities: Uint8Array | null = null
|
||||||
|
let weakPasswords: Uint8Array | null = null
|
||||||
|
let reportHtml: Uint8Array | null = null
|
||||||
|
let reportHtmlName: string | null = null
|
||||||
|
|
||||||
|
const entries = Object.values(rawZip.files) as JSZipObject[]
|
||||||
|
|
||||||
|
if (entries.length > MAX_ZIP_ENTRIES) {
|
||||||
|
throw new ORPCError('BAD_REQUEST', {
|
||||||
|
message: `Zip contains too many entries: ${entries.length}`,
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
let totalUncompressedBytes = 0
|
||||||
|
|
||||||
|
for (const entry of entries) {
|
||||||
|
if (entry.dir) {
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
|
||||||
|
if (isUnsafePath(entry.name)) {
|
||||||
|
throw new ORPCError('BAD_REQUEST', {
|
||||||
|
message: `Zip contains unsafe entry path: ${entry.name}`,
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
const content = await entry.async('uint8array')
|
||||||
|
if (content.byteLength > MAX_SINGLE_FILE_BYTES) {
|
||||||
|
throw new ORPCError('BAD_REQUEST', {
|
||||||
|
message: `Zip entry too large: ${entry.name}`,
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
totalUncompressedBytes += content.byteLength
|
||||||
|
if (totalUncompressedBytes > MAX_TOTAL_UNCOMPRESSED_BYTES) {
|
||||||
|
throw new ORPCError('BAD_REQUEST', {
|
||||||
|
message: 'Zip total uncompressed content exceeds max size limit',
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
const fileName = getBaseName(entry.name)
|
||||||
|
const lowerFileName = fileName.toLowerCase()
|
||||||
|
|
||||||
|
if (lowerFileName === 'assets.json') {
|
||||||
|
if (assets) {
|
||||||
|
throw new ORPCError('BAD_REQUEST', { message: 'Zip contains duplicate assets.json' })
|
||||||
|
}
|
||||||
|
assets = content
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
if (lowerFileName === 'vulnerabilities.json') {
|
||||||
|
if (vulnerabilities) {
|
||||||
|
throw new ORPCError('BAD_REQUEST', { message: 'Zip contains duplicate vulnerabilities.json' })
|
||||||
|
}
|
||||||
|
vulnerabilities = content
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
if (lowerFileName === 'weakpasswords.json') {
|
||||||
|
if (weakPasswords) {
|
||||||
|
throw new ORPCError('BAD_REQUEST', { message: 'Zip contains duplicate weakPasswords.json' })
|
||||||
|
}
|
||||||
|
weakPasswords = content
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
if (fileName.includes('漏洞评估报告') && lowerFileName.endsWith('.html')) {
|
||||||
|
if (reportHtml) {
|
||||||
|
throw new ORPCError('BAD_REQUEST', {
|
||||||
|
message: 'Zip contains multiple 漏洞评估报告*.html files',
|
||||||
|
})
|
||||||
|
}
|
||||||
|
reportHtml = content
|
||||||
|
reportHtmlName = fileName
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
if (!assets || !vulnerabilities || !weakPasswords || !reportHtml || !reportHtmlName) {
|
||||||
|
throw new ORPCError('BAD_REQUEST', {
|
||||||
|
message:
|
||||||
|
'Zip missing required files. Required: assets.json, vulnerabilities.json, weakPasswords.json, and 漏洞评估报告*.html',
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
return {
|
||||||
|
assets,
|
||||||
|
vulnerabilities,
|
||||||
|
weakPasswords,
|
||||||
|
reportHtml,
|
||||||
|
reportHtmlName,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
const getDevice = async (
|
||||||
|
context: {
|
||||||
|
db: { query: { deviceTable: { findFirst: (args: { where: { id: string } }) => Promise<DeviceRow | undefined> } } }
|
||||||
|
},
|
||||||
|
deviceId: string,
|
||||||
|
): Promise<DeviceRow> => {
|
||||||
|
const device = await context.db.query.deviceTable.findFirst({
|
||||||
|
where: { id: deviceId },
|
||||||
|
})
|
||||||
|
if (!device) {
|
||||||
|
throw new ORPCError('NOT_FOUND', { message: 'Device not found' })
|
||||||
|
}
|
||||||
|
return device
|
||||||
|
}
|
||||||
|
|
||||||
|
export const encryptDeviceInfo = os.crypto.encryptDeviceInfo.use(db).handler(async ({ context, input }) => {
|
||||||
|
const device = await getDevice(context, input.deviceId)
|
||||||
|
|
||||||
|
const deviceInfoJson = JSON.stringify({
|
||||||
|
licence: device.licence,
|
||||||
|
fingerprint: device.fingerprint,
|
||||||
|
})
|
||||||
|
|
||||||
|
const encrypted = rsaOaepEncrypt(deviceInfoJson, device.platformPublicKey)
|
||||||
|
|
||||||
|
return { encrypted }
|
||||||
|
})
|
||||||
|
|
||||||
|
export const decryptTask = os.crypto.decryptTask.use(db).handler(async ({ context, input }) => {
|
||||||
|
const device = await getDevice(context, input.deviceId)
|
||||||
|
|
||||||
|
const key = sha256(device.licence + device.fingerprint)
|
||||||
|
const decryptedJson = aesGcmDecrypt(input.encryptedData, key)
|
||||||
|
const taskData = taskPayloadSchema.parse(JSON.parse(decryptedJson))
|
||||||
|
|
||||||
|
return taskData
|
||||||
|
})
|
||||||
|
|
||||||
|
export const encryptSummary = os.crypto.encryptSummary.use(db).handler(async ({ context, input }) => {
|
||||||
|
const device = await getDevice(context, input.deviceId)
|
||||||
|
|
||||||
|
const ikm = device.licence + device.fingerprint
|
||||||
|
const aesKey = hkdfSha256(ikm, input.taskId, 'inspection_report_encryption')
|
||||||
|
|
||||||
|
const timestamp = Date.now()
|
||||||
|
const plaintextJson = JSON.stringify({
|
||||||
|
enterpriseId: input.enterpriseId,
|
||||||
|
inspectionId: input.inspectionId,
|
||||||
|
summary: input.summary,
|
||||||
|
timestamp,
|
||||||
|
})
|
||||||
|
|
||||||
|
const encrypted = aesGcmEncrypt(plaintextJson, aesKey)
|
||||||
|
|
||||||
|
const qrContent = JSON.stringify({
|
||||||
|
taskId: input.taskId,
|
||||||
|
encrypted,
|
||||||
|
})
|
||||||
|
|
||||||
|
return { qrContent }
|
||||||
|
})
|
||||||
|
|
||||||
|
export const signAndPackReport = os.crypto.signAndPackReport.use(db).handler(async ({ context, input }) => {
|
||||||
|
const device = await getDevice(context, input.deviceId)
|
||||||
|
const rawZipBytes = Buffer.from(input.rawZipBase64, 'base64')
|
||||||
|
|
||||||
|
if (rawZipBytes.byteLength === 0 || rawZipBytes.byteLength > MAX_RAW_ZIP_BYTES) {
|
||||||
|
throw new ORPCError('BAD_REQUEST', {
|
||||||
|
message: 'rawZipBase64 is empty or exceeds max size limit',
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
const rawZip = await JSZip.loadAsync(rawZipBytes, {
|
||||||
|
checkCRC32: true,
|
||||||
|
}).catch(() => {
|
||||||
|
throw new ORPCError('BAD_REQUEST', {
|
||||||
|
message: 'rawZipBase64 is not a valid zip file',
|
||||||
|
})
|
||||||
|
})
|
||||||
|
|
||||||
|
const reportFiles = await getRequiredReportFiles(rawZip)
|
||||||
|
|
||||||
|
const ikm = device.licence + device.fingerprint
|
||||||
|
const signingKey = hkdfSha256(ikm, 'AUTH_V3_SALT', 'device_report_signature')
|
||||||
|
|
||||||
|
const assetsHash = sha256Hex(Buffer.from(reportFiles.assets))
|
||||||
|
const vulnerabilitiesHash = sha256Hex(Buffer.from(reportFiles.vulnerabilities))
|
||||||
|
const weakPasswordsHash = sha256Hex(Buffer.from(reportFiles.weakPasswords))
|
||||||
|
const reportHtmlHash = sha256Hex(Buffer.from(reportFiles.reportHtml))
|
||||||
|
|
||||||
|
const signPayload =
|
||||||
|
input.taskId + input.inspectionId + assetsHash + vulnerabilitiesHash + weakPasswordsHash + reportHtmlHash
|
||||||
|
|
||||||
|
const deviceSignature = hmacSha256Base64(signingKey, signPayload)
|
||||||
|
|
||||||
|
if (!device.pgpPrivateKey) {
|
||||||
|
throw new ORPCError('PRECONDITION_FAILED', {
|
||||||
|
message: 'Device does not have a PGP key pair. Re-register the device.',
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
const summaryObject = {
|
||||||
|
enterpriseId: input.enterpriseId,
|
||||||
|
inspectionId: input.inspectionId,
|
||||||
|
taskId: input.taskId,
|
||||||
|
licence: device.licence,
|
||||||
|
fingerprint: device.fingerprint,
|
||||||
|
deviceSignature,
|
||||||
|
summary: input.summary,
|
||||||
|
timestamp: Date.now(),
|
||||||
|
}
|
||||||
|
|
||||||
|
const summaryBytes = Buffer.from(JSON.stringify(summaryObject), 'utf-8')
|
||||||
|
|
||||||
|
const manifestObject = {
|
||||||
|
files: {
|
||||||
|
'summary.json': sha256Hex(summaryBytes),
|
||||||
|
'assets.json': assetsHash,
|
||||||
|
'vulnerabilities.json': vulnerabilitiesHash,
|
||||||
|
'weakPasswords.json': weakPasswordsHash,
|
||||||
|
[reportFiles.reportHtmlName]: reportHtmlHash,
|
||||||
|
},
|
||||||
|
}
|
||||||
|
|
||||||
|
const manifestBytes = Buffer.from(JSON.stringify(manifestObject, null, 2), 'utf-8')
|
||||||
|
const signatureAsc = await pgpSignDetached(manifestBytes, device.pgpPrivateKey)
|
||||||
|
|
||||||
|
const signedZip = new JSZip()
|
||||||
|
signedZip.file('summary.json', summaryBytes)
|
||||||
|
signedZip.file('assets.json', reportFiles.assets)
|
||||||
|
signedZip.file('vulnerabilities.json', reportFiles.vulnerabilities)
|
||||||
|
signedZip.file('weakPasswords.json', reportFiles.weakPasswords)
|
||||||
|
signedZip.file(reportFiles.reportHtmlName, reportFiles.reportHtml)
|
||||||
|
signedZip.file('META-INF/manifest.json', manifestBytes)
|
||||||
|
signedZip.file('META-INF/signature.asc', signatureAsc)
|
||||||
|
|
||||||
|
const signedZipBytes = await signedZip.generateAsync({
|
||||||
|
type: 'uint8array',
|
||||||
|
compression: 'DEFLATE',
|
||||||
|
compressionOptions: { level: 9 },
|
||||||
|
})
|
||||||
|
|
||||||
|
const signedZipBase64 = Buffer.from(signedZipBytes).toString('base64')
|
||||||
|
|
||||||
|
return { deviceSignature, signedZipBase64 }
|
||||||
|
})
|
||||||
@@ -0,0 +1,54 @@
|
|||||||
|
import { generatePgpKeyPair } from '@furtherverse/crypto'
|
||||||
|
import { ORPCError } from '@orpc/server'
|
||||||
|
import { deviceTable } from '@/server/db/schema'
|
||||||
|
import { computeDeviceFingerprint } from '@/server/device-fingerprint'
|
||||||
|
import { db } from '../middlewares'
|
||||||
|
import { os } from '../server'
|
||||||
|
|
||||||
|
export const register = os.device.register.use(db).handler(async ({ context, input }) => {
|
||||||
|
const existing = await context.db.query.deviceTable.findFirst({
|
||||||
|
where: { licence: input.licence },
|
||||||
|
})
|
||||||
|
|
||||||
|
if (existing) {
|
||||||
|
throw new ORPCError('CONFLICT', {
|
||||||
|
message: `Device with licence "${input.licence}" already registered`,
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
const pgpKeys = await generatePgpKeyPair(input.licence, `${input.licence}@ux.local`)
|
||||||
|
const fingerprint = computeDeviceFingerprint()
|
||||||
|
|
||||||
|
const rows = await context.db
|
||||||
|
.insert(deviceTable)
|
||||||
|
.values({
|
||||||
|
licence: input.licence,
|
||||||
|
fingerprint,
|
||||||
|
platformPublicKey: input.platformPublicKey,
|
||||||
|
pgpPrivateKey: pgpKeys.privateKey,
|
||||||
|
pgpPublicKey: pgpKeys.publicKey,
|
||||||
|
})
|
||||||
|
.returning()
|
||||||
|
|
||||||
|
return rows[0] as (typeof rows)[number]
|
||||||
|
})
|
||||||
|
|
||||||
|
export const get = os.device.get.use(db).handler(async ({ context, input }) => {
|
||||||
|
if (!input.id && !input.licence) {
|
||||||
|
throw new ORPCError('BAD_REQUEST', {
|
||||||
|
message: 'Either id or licence must be provided',
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
const device = input.id
|
||||||
|
? await context.db.query.deviceTable.findFirst({ where: { id: input.id } })
|
||||||
|
: await context.db.query.deviceTable.findFirst({ where: { licence: input.licence } })
|
||||||
|
|
||||||
|
if (!device) {
|
||||||
|
throw new ORPCError('NOT_FOUND', {
|
||||||
|
message: 'Device not found',
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
return device
|
||||||
|
})
|
||||||
@@ -0,0 +1,10 @@
|
|||||||
|
import { os } from '../server'
|
||||||
|
import * as crypto from './crypto.router'
|
||||||
|
import * as device from './device.router'
|
||||||
|
import * as task from './task.router'
|
||||||
|
|
||||||
|
export const router = os.router({
|
||||||
|
device,
|
||||||
|
crypto,
|
||||||
|
task,
|
||||||
|
})
|
||||||
@@ -0,0 +1,44 @@
|
|||||||
|
import { ORPCError } from '@orpc/server'
|
||||||
|
import { eq } from 'drizzle-orm'
|
||||||
|
import { taskTable } from '@/server/db/schema'
|
||||||
|
import { db } from '../middlewares'
|
||||||
|
import { os } from '../server'
|
||||||
|
|
||||||
|
export const save = os.task.save.use(db).handler(async ({ context, input }) => {
|
||||||
|
const rows = await context.db
|
||||||
|
.insert(taskTable)
|
||||||
|
.values({
|
||||||
|
deviceId: input.deviceId,
|
||||||
|
taskId: input.taskId,
|
||||||
|
enterpriseId: input.enterpriseId,
|
||||||
|
orgName: input.orgName,
|
||||||
|
inspectionId: input.inspectionId,
|
||||||
|
inspectionPerson: input.inspectionPerson,
|
||||||
|
issuedAt: input.issuedAt ? new Date(input.issuedAt) : null,
|
||||||
|
})
|
||||||
|
.returning()
|
||||||
|
|
||||||
|
return rows[0] as (typeof rows)[number]
|
||||||
|
})
|
||||||
|
|
||||||
|
export const list = os.task.list.use(db).handler(async ({ context, input }) => {
|
||||||
|
return await context.db.query.taskTable.findMany({
|
||||||
|
where: { deviceId: input.deviceId },
|
||||||
|
orderBy: { createdAt: 'desc' },
|
||||||
|
})
|
||||||
|
})
|
||||||
|
|
||||||
|
export const updateStatus = os.task.updateStatus.use(db).handler(async ({ context, input }) => {
|
||||||
|
const rows = await context.db
|
||||||
|
.update(taskTable)
|
||||||
|
.set({ status: input.status })
|
||||||
|
.where(eq(taskTable.id, input.id))
|
||||||
|
.returning()
|
||||||
|
|
||||||
|
const updated = rows[0]
|
||||||
|
if (!updated) {
|
||||||
|
throw new ORPCError('NOT_FOUND', { message: 'Task not found' })
|
||||||
|
}
|
||||||
|
|
||||||
|
return updated
|
||||||
|
})
|
||||||
@@ -0,0 +1,6 @@
|
|||||||
|
import type { ContractRouterClient, InferContractRouterInputs, InferContractRouterOutputs } from '@orpc/contract'
|
||||||
|
import type { Contract } from './contracts'
|
||||||
|
|
||||||
|
export type RouterClient = ContractRouterClient<Contract>
|
||||||
|
export type RouterInputs = InferContractRouterInputs<Contract>
|
||||||
|
export type RouterOutputs = InferContractRouterOutputs<Contract>
|
||||||
@@ -0,0 +1,36 @@
|
|||||||
|
import { integer, text } from 'drizzle-orm/sqlite-core'
|
||||||
|
import { v7 as uuidv7 } from 'uuid'
|
||||||
|
|
||||||
|
export const pk = (name = 'id') =>
|
||||||
|
text(name)
|
||||||
|
.primaryKey()
|
||||||
|
.$defaultFn(() => uuidv7())
|
||||||
|
|
||||||
|
export const createdAt = (name = 'created_at') =>
|
||||||
|
integer(name, { mode: 'timestamp_ms' })
|
||||||
|
.notNull()
|
||||||
|
.$defaultFn(() => new Date())
|
||||||
|
|
||||||
|
export const updatedAt = (name = 'updated_at') =>
|
||||||
|
integer(name, { mode: 'timestamp_ms' })
|
||||||
|
.notNull()
|
||||||
|
.$defaultFn(() => new Date())
|
||||||
|
.$onUpdateFn(() => new Date())
|
||||||
|
|
||||||
|
export const generatedFields = {
|
||||||
|
id: pk('id'),
|
||||||
|
createdAt: createdAt('created_at'),
|
||||||
|
updatedAt: updatedAt('updated_at'),
|
||||||
|
}
|
||||||
|
|
||||||
|
const createGeneratedFieldKeys = <T extends Record<string, unknown>>(fields: T): Record<keyof T, true> => {
|
||||||
|
return Object.keys(fields).reduce(
|
||||||
|
(acc, key) => {
|
||||||
|
acc[key as keyof T] = true
|
||||||
|
return acc
|
||||||
|
},
|
||||||
|
{} as Record<keyof T, true>,
|
||||||
|
)
|
||||||
|
}
|
||||||
|
|
||||||
|
export const generatedFieldKeys = createGeneratedFieldKeys(generatedFields)
|
||||||
@@ -0,0 +1,26 @@
|
|||||||
|
import { Database } from 'bun:sqlite'
|
||||||
|
import { drizzle } from 'drizzle-orm/bun-sqlite'
|
||||||
|
import { env } from '@/env'
|
||||||
|
import { relations } from '@/server/db/relations'
|
||||||
|
|
||||||
|
export const createDB = () => {
|
||||||
|
const sqlite = new Database(env.DATABASE_PATH)
|
||||||
|
sqlite.exec('PRAGMA journal_mode = WAL')
|
||||||
|
sqlite.exec('PRAGMA foreign_keys = ON')
|
||||||
|
return drizzle({ client: sqlite, relations })
|
||||||
|
}
|
||||||
|
|
||||||
|
export type DB = ReturnType<typeof createDB>
|
||||||
|
|
||||||
|
export const getDB = (() => {
|
||||||
|
let db: DB | null = null
|
||||||
|
|
||||||
|
return (singleton = true): DB => {
|
||||||
|
if (!singleton) {
|
||||||
|
return createDB()
|
||||||
|
}
|
||||||
|
|
||||||
|
db ??= createDB()
|
||||||
|
return db
|
||||||
|
}
|
||||||
|
})()
|
||||||
@@ -0,0 +1,14 @@
|
|||||||
|
import { defineRelations } from 'drizzle-orm'
|
||||||
|
import * as schema from './schema'
|
||||||
|
|
||||||
|
export const relations = defineRelations(schema, (r) => ({
|
||||||
|
deviceTable: {
|
||||||
|
tasks: r.many.taskTable(),
|
||||||
|
},
|
||||||
|
taskTable: {
|
||||||
|
device: r.one.deviceTable({
|
||||||
|
from: r.taskTable.deviceId,
|
||||||
|
to: r.deviceTable.id,
|
||||||
|
}),
|
||||||
|
},
|
||||||
|
}))
|
||||||
@@ -0,0 +1,11 @@
|
|||||||
|
import { sqliteTable, text } from 'drizzle-orm/sqlite-core'
|
||||||
|
import { generatedFields } from '../fields'
|
||||||
|
|
||||||
|
export const deviceTable = sqliteTable('device', {
|
||||||
|
...generatedFields,
|
||||||
|
licence: text('licence').notNull().unique(),
|
||||||
|
fingerprint: text('fingerprint').notNull(),
|
||||||
|
platformPublicKey: text('platform_public_key').notNull(),
|
||||||
|
pgpPrivateKey: text('pgp_private_key'),
|
||||||
|
pgpPublicKey: text('pgp_public_key'),
|
||||||
|
})
|
||||||
@@ -0,0 +1,2 @@
|
|||||||
|
export * from './device'
|
||||||
|
export * from './task'
|
||||||
@@ -0,0 +1,16 @@
|
|||||||
|
import { integer, sqliteTable, text } from 'drizzle-orm/sqlite-core'
|
||||||
|
import { generatedFields } from '../fields'
|
||||||
|
|
||||||
|
export const taskTable = sqliteTable('task', {
|
||||||
|
...generatedFields,
|
||||||
|
deviceId: text('device_id').notNull(),
|
||||||
|
taskId: text('task_id').notNull(),
|
||||||
|
enterpriseId: text('enterprise_id'),
|
||||||
|
orgName: text('org_name'),
|
||||||
|
inspectionId: text('inspection_id'),
|
||||||
|
inspectionPerson: text('inspection_person'),
|
||||||
|
issuedAt: integer('issued_at', { mode: 'timestamp_ms' }),
|
||||||
|
status: text('status', { enum: ['pending', 'in_progress', 'done'] })
|
||||||
|
.notNull()
|
||||||
|
.default('pending'),
|
||||||
|
})
|
||||||
@@ -0,0 +1,39 @@
|
|||||||
|
import { readFileSync } from 'node:fs'
|
||||||
|
import { arch, cpus, networkInterfaces, platform, release, totalmem } from 'node:os'
|
||||||
|
import { sha256Hex } from '@furtherverse/crypto'
|
||||||
|
|
||||||
|
const readMachineId = (): string => {
|
||||||
|
const candidates = ['/etc/machine-id', '/var/lib/dbus/machine-id']
|
||||||
|
|
||||||
|
for (const path of candidates) {
|
||||||
|
try {
|
||||||
|
const value = readFileSync(path, 'utf-8').trim()
|
||||||
|
if (value.length > 0) {
|
||||||
|
return value
|
||||||
|
}
|
||||||
|
} catch {}
|
||||||
|
}
|
||||||
|
|
||||||
|
return ''
|
||||||
|
}
|
||||||
|
|
||||||
|
const collectMacAddresses = (): string[] => {
|
||||||
|
const interfaces = networkInterfaces()
|
||||||
|
|
||||||
|
return Object.values(interfaces)
|
||||||
|
.flatMap((group) => group ?? [])
|
||||||
|
.filter((item) => item.mac && item.mac !== '00:00:00:00:00:00' && !item.internal)
|
||||||
|
.map((item) => item.mac)
|
||||||
|
.sort()
|
||||||
|
}
|
||||||
|
|
||||||
|
export const computeDeviceFingerprint = (): string => {
|
||||||
|
const machineId = readMachineId()
|
||||||
|
const firstCpuModel = cpus()[0]?.model ?? 'unknown'
|
||||||
|
const macs = collectMacAddresses().join(',')
|
||||||
|
|
||||||
|
const source = [machineId, platform(), release(), arch(), String(totalmem()), firstCpuModel, macs].join('|')
|
||||||
|
const hash = sha256Hex(source)
|
||||||
|
|
||||||
|
return `FP-${hash.slice(0, 16)}`
|
||||||
|
}
|
||||||
@@ -0,0 +1 @@
|
|||||||
|
@import "tailwindcss";
|
||||||
@@ -0,0 +1,9 @@
|
|||||||
|
{
|
||||||
|
"extends": "@furtherverse/tsconfig/react.json",
|
||||||
|
"compilerOptions": {
|
||||||
|
"baseUrl": ".",
|
||||||
|
"paths": {
|
||||||
|
"@/*": ["./src/*"]
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
@@ -0,0 +1,47 @@
|
|||||||
|
{
|
||||||
|
"$schema": "../../node_modules/turbo/schema.json",
|
||||||
|
"extends": ["//"],
|
||||||
|
"tasks": {
|
||||||
|
"build": {
|
||||||
|
"env": ["NODE_ENV", "VITE_*"],
|
||||||
|
"inputs": ["src/**", "public/**", "package.json", "tsconfig.json", "vite.config.ts"],
|
||||||
|
"outputs": [".output/**"]
|
||||||
|
},
|
||||||
|
"compile": {
|
||||||
|
"dependsOn": ["build"],
|
||||||
|
"outputs": ["out/**"]
|
||||||
|
},
|
||||||
|
"compile:darwin": {
|
||||||
|
"dependsOn": ["build"],
|
||||||
|
"outputs": ["out/**"]
|
||||||
|
},
|
||||||
|
"compile:darwin:arm64": {
|
||||||
|
"dependsOn": ["build"],
|
||||||
|
"outputs": ["out/**"]
|
||||||
|
},
|
||||||
|
"compile:darwin:x64": {
|
||||||
|
"dependsOn": ["build"],
|
||||||
|
"outputs": ["out/**"]
|
||||||
|
},
|
||||||
|
"compile:linux": {
|
||||||
|
"dependsOn": ["build"],
|
||||||
|
"outputs": ["out/**"]
|
||||||
|
},
|
||||||
|
"compile:linux:arm64": {
|
||||||
|
"dependsOn": ["build"],
|
||||||
|
"outputs": ["out/**"]
|
||||||
|
},
|
||||||
|
"compile:linux:x64": {
|
||||||
|
"dependsOn": ["build"],
|
||||||
|
"outputs": ["out/**"]
|
||||||
|
},
|
||||||
|
"compile:windows": {
|
||||||
|
"dependsOn": ["build"],
|
||||||
|
"outputs": ["out/**"]
|
||||||
|
},
|
||||||
|
"compile:windows:x64": {
|
||||||
|
"dependsOn": ["build"],
|
||||||
|
"outputs": ["out/**"]
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
@@ -4,21 +4,27 @@ import { tanstackStart } from '@tanstack/react-start/plugin/vite'
|
|||||||
import react from '@vitejs/plugin-react'
|
import react from '@vitejs/plugin-react'
|
||||||
import { nitro } from 'nitro/vite'
|
import { nitro } from 'nitro/vite'
|
||||||
import { defineConfig } from 'vite'
|
import { defineConfig } from 'vite'
|
||||||
|
import tsconfigPaths from 'vite-tsconfig-paths'
|
||||||
|
|
||||||
export default defineConfig({
|
export default defineConfig({
|
||||||
clearScreen: false,
|
clearScreen: false,
|
||||||
plugins: [
|
plugins: [
|
||||||
tanstackDevtools(),
|
tanstackDevtools(),
|
||||||
tailwindcss(),
|
tailwindcss(),
|
||||||
|
tsconfigPaths(),
|
||||||
tanstackStart(),
|
tanstackStart(),
|
||||||
react(),
|
react({
|
||||||
|
babel: {
|
||||||
|
plugins: ['babel-plugin-react-compiler'],
|
||||||
|
},
|
||||||
|
}),
|
||||||
nitro({
|
nitro({
|
||||||
preset: 'bun',
|
preset: 'bun',
|
||||||
serveStatic: 'inline',
|
serveStatic: 'inline',
|
||||||
plugins: ['./src/server/plugins/shutdown.ts'],
|
|
||||||
}),
|
}),
|
||||||
],
|
],
|
||||||
resolve: {
|
server: {
|
||||||
tsconfigPaths: true,
|
port: 3000,
|
||||||
|
strictPort: true,
|
||||||
},
|
},
|
||||||
})
|
})
|
||||||
+1
-20
@@ -6,8 +6,7 @@
|
|||||||
"useIgnoreFile": true
|
"useIgnoreFile": true
|
||||||
},
|
},
|
||||||
"files": {
|
"files": {
|
||||||
"ignoreUnknown": false,
|
"ignoreUnknown": false
|
||||||
"includes": ["**", "!**/routeTree.gen.ts", "!**/migrations.gen.ts"]
|
|
||||||
},
|
},
|
||||||
"formatter": {
|
"formatter": {
|
||||||
"enabled": true,
|
"enabled": true,
|
||||||
@@ -17,11 +16,6 @@
|
|||||||
},
|
},
|
||||||
"linter": {
|
"linter": {
|
||||||
"enabled": true,
|
"enabled": true,
|
||||||
"domains": {
|
|
||||||
"drizzle": "recommended",
|
|
||||||
"react": "recommended",
|
|
||||||
"types": "all"
|
|
||||||
},
|
|
||||||
"rules": {
|
"rules": {
|
||||||
"recommended": true,
|
"recommended": true,
|
||||||
"complexity": {
|
"complexity": {
|
||||||
@@ -29,14 +23,6 @@
|
|||||||
},
|
},
|
||||||
"correctness": {
|
"correctness": {
|
||||||
"noReactPropAssignments": "error"
|
"noReactPropAssignments": "error"
|
||||||
},
|
|
||||||
"style": {
|
|
||||||
"noNonNullAssertion": "error"
|
|
||||||
},
|
|
||||||
"suspicious": {
|
|
||||||
"noExplicitAny": "error",
|
|
||||||
"noImportCycles": "error",
|
|
||||||
"noTsIgnore": "error"
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
@@ -47,11 +33,6 @@
|
|||||||
"arrowParentheses": "always"
|
"arrowParentheses": "always"
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
"css": {
|
|
||||||
"parser": {
|
|
||||||
"tailwindDirectives": true
|
|
||||||
}
|
|
||||||
},
|
|
||||||
"assist": {
|
"assist": {
|
||||||
"enabled": true,
|
"enabled": true,
|
||||||
"actions": {
|
"actions": {
|
||||||
|
|||||||
@@ -0,0 +1,2 @@
|
|||||||
|
[install]
|
||||||
|
publicHoistPattern = ["@types/*", "bun-types", "nitro*"]
|
||||||
@@ -1,39 +0,0 @@
|
|||||||
services:
|
|
||||||
migrate:
|
|
||||||
build: .
|
|
||||||
depends_on:
|
|
||||||
db:
|
|
||||||
condition: service_healthy
|
|
||||||
environment:
|
|
||||||
- DATABASE_URL=postgres://postgres:postgres@db:5432/postgres
|
|
||||||
command: ["./server", "migrate"]
|
|
||||||
restart: "no"
|
|
||||||
|
|
||||||
app:
|
|
||||||
build: .
|
|
||||||
depends_on:
|
|
||||||
db:
|
|
||||||
condition: service_healthy
|
|
||||||
migrate:
|
|
||||||
condition: service_completed_successfully
|
|
||||||
ports:
|
|
||||||
- "3000:3000"
|
|
||||||
environment:
|
|
||||||
- DATABASE_URL=postgres://postgres:postgres@db:5432/postgres
|
|
||||||
|
|
||||||
db:
|
|
||||||
image: postgres:18-alpine
|
|
||||||
volumes:
|
|
||||||
- postgres_data:/var/lib/postgresql
|
|
||||||
environment:
|
|
||||||
POSTGRES_USER: postgres
|
|
||||||
POSTGRES_PASSWORD: postgres
|
|
||||||
POSTGRES_DB: postgres
|
|
||||||
healthcheck:
|
|
||||||
test: [ "CMD", "pg_isready", "-U", "postgres" ]
|
|
||||||
interval: 10s
|
|
||||||
timeout: 5s
|
|
||||||
retries: 5
|
|
||||||
|
|
||||||
volumes:
|
|
||||||
postgres_data:
|
|
||||||
@@ -0,0 +1,71 @@
|
|||||||
|
# UX 授权端接口说明
|
||||||
|
|
||||||
|
本文档描述当前 UX 服务端实现的授权对接接口与职责边界。
|
||||||
|
|
||||||
|
## 1. 职责边界
|
||||||
|
|
||||||
|
- UX **只与工具箱交互**(HTTP RPC),不直接与手机 App 交互。
|
||||||
|
- 手机 App 仅承担扫码和与管理平台联网通信。
|
||||||
|
- 报告签名流程由工具箱上传原始 ZIP 到 UX,UX 返回已签名 ZIP。
|
||||||
|
|
||||||
|
## 2. 设备注册
|
||||||
|
|
||||||
|
`device.register`
|
||||||
|
|
||||||
|
- 输入:`licence`、`platformPublicKey`
|
||||||
|
- UX 在本机采集设备特征并计算 `fingerprint`
|
||||||
|
- UX 将 `licence + fingerprint + 公钥 + PGP 密钥对` 持久化到数据库
|
||||||
|
|
||||||
|
## 3. 核心加密接口
|
||||||
|
|
||||||
|
### 3.1 设备授权二维码密文
|
||||||
|
|
||||||
|
`crypto.encryptDeviceInfo`
|
||||||
|
|
||||||
|
- 使用平台公钥 RSA-OAEP 加密:`{ licence, fingerprint }`
|
||||||
|
- 返回 Base64 密文(工具箱用于生成二维码)
|
||||||
|
|
||||||
|
### 3.2 任务二维码解密
|
||||||
|
|
||||||
|
`crypto.decryptTask`
|
||||||
|
|
||||||
|
- 密钥:`SHA256(licence + fingerprint)`
|
||||||
|
- 算法:AES-256-GCM
|
||||||
|
- 输入:任务二维码中的 Base64 密文
|
||||||
|
- 输出:任务 JSON
|
||||||
|
|
||||||
|
### 3.3 摘要二维码加密
|
||||||
|
|
||||||
|
`crypto.encryptSummary`
|
||||||
|
|
||||||
|
- 密钥派生:HKDF-SHA256
|
||||||
|
- `ikm = licence + fingerprint`
|
||||||
|
- `salt = taskId`
|
||||||
|
- `info = "inspection_report_encryption"`
|
||||||
|
- 算法:AES-256-GCM
|
||||||
|
- 输出:`{ taskId, encrypted }` JSON(工具箱用于生成二维码)
|
||||||
|
|
||||||
|
### 3.4 原始 ZIP 签名打包(最终报告)
|
||||||
|
|
||||||
|
`crypto.signAndPackReport`
|
||||||
|
|
||||||
|
- 输入:`rawZipBase64` + `taskId` + `inspectionId` + `enterpriseId` + `summary`
|
||||||
|
- UX 在服务端完成:
|
||||||
|
1. 校验并解包原始 ZIP
|
||||||
|
2. 计算文件 SHA-256
|
||||||
|
3. HKDF + HMAC 生成 `deviceSignature`
|
||||||
|
4. 生成 `summary.json`
|
||||||
|
5. 生成 `META-INF/manifest.json`
|
||||||
|
6. OpenPGP 分离签名生成 `META-INF/signature.asc`
|
||||||
|
7. 重新打包为 signed ZIP
|
||||||
|
- 输出:`signedZipBase64` 与 `deviceSignature`
|
||||||
|
|
||||||
|
## 4. 安全约束(签名打包)
|
||||||
|
|
||||||
|
- 拒绝危险 ZIP 路径(防 Zip Slip)
|
||||||
|
- 限制原始 ZIP 和单文件大小
|
||||||
|
- 强制存在以下文件:
|
||||||
|
- `assets.json`
|
||||||
|
- `vulnerabilities.json`
|
||||||
|
- `weakPasswords.json`
|
||||||
|
- `漏洞评估报告*.html`
|
||||||
@@ -0,0 +1,644 @@
|
|||||||
|
# 工具箱端 - 任务二维码解密指南
|
||||||
|
|
||||||
|
## 概述
|
||||||
|
|
||||||
|
本文档说明工具箱端如何解密任务二维码数据。App 创建任务后,平台会生成加密的任务数据并返回给 App,App 将其生成二维码。工具箱扫描二维码后,需要使用自己的 `licence` 和 `fingerprint` 解密任务数据。
|
||||||
|
|
||||||
|
> ### UX 集成模式补充(当前项目实现)
|
||||||
|
>
|
||||||
|
> 在当前集成模式中,工具箱扫描二维码后将密文提交给 UX 的 `crypto.decryptTask`,
|
||||||
|
> 由 UX 使用设备绑定的 `licence + fingerprint` 执行 AES-256-GCM 解密并返回任务明文。
|
||||||
|
|
||||||
|
## 一、业务流程
|
||||||
|
|
||||||
|
```
|
||||||
|
App创建任务 → 平台加密任务数据 → 返回加密数据 → App生成二维码
|
||||||
|
↓
|
||||||
|
工具箱扫描二维码 → 提取加密数据 → AES-256-GCM解密 → 获取任务信息
|
||||||
|
```
|
||||||
|
|
||||||
|
## 二、任务数据结构
|
||||||
|
|
||||||
|
### 2.1 任务数据 JSON 格式
|
||||||
|
|
||||||
|
解密后的任务数据为 JSON 格式,包含以下字段:
|
||||||
|
|
||||||
|
```json
|
||||||
|
{
|
||||||
|
"taskId": "TASK-20260115-4875",
|
||||||
|
"enterpriseId": "1173040813421105152",
|
||||||
|
"orgName": "超艺科技有限公司",
|
||||||
|
"inspectionId": "702286470691215417",
|
||||||
|
"inspectionPerson": "警务通",
|
||||||
|
"issuedAt": 1734571234567
|
||||||
|
}
|
||||||
|
```
|
||||||
|
|
||||||
|
### 2.2 字段说明
|
||||||
|
|
||||||
|
| 字段名 | 类型 | 说明 | 示例 |
|
||||||
|
|--------|------|------|------|
|
||||||
|
| `taskId` | String | 任务唯一ID(格式:TASK-YYYYMMDD-XXXX) | `"TASK-20260115-4875"` |
|
||||||
|
| `enterpriseId` | String | 企业ID | `"1173040813421105152"` |
|
||||||
|
| `orgName` | String | 单位名称 | `"超艺科技有限公司"` |
|
||||||
|
| `inspectionId` | String | 检查ID | `"702286470691215417"` |
|
||||||
|
| `inspectionPerson` | String | 检查人 | `"警务通"` |
|
||||||
|
| `issuedAt` | Number | 任务发布时间戳(毫秒) | `1734571234567` |
|
||||||
|
|
||||||
|
## 三、加密算法说明
|
||||||
|
|
||||||
|
### 3.1 加密方式
|
||||||
|
|
||||||
|
- **算法**:AES-256-GCM(Galois/Counter Mode)
|
||||||
|
- **密钥长度**:256 位(32 字节)
|
||||||
|
- **IV 长度**:12 字节(96 位)
|
||||||
|
- **认证标签长度**:16 字节(128 位)
|
||||||
|
|
||||||
|
### 3.2 密钥生成
|
||||||
|
|
||||||
|
密钥由工具箱的 `licence` 和 `fingerprint` 生成:
|
||||||
|
|
||||||
|
```
|
||||||
|
密钥 = SHA-256(licence + fingerprint)
|
||||||
|
```
|
||||||
|
|
||||||
|
**重要说明**:
|
||||||
|
- `licence` 和 `fingerprint` 直接字符串拼接(无分隔符)
|
||||||
|
- 使用 SHA-256 哈希算法的全部 32 字节作为 AES-256 密钥
|
||||||
|
- 工具箱必须使用与平台绑定时相同的 `licence` 和 `fingerprint`
|
||||||
|
|
||||||
|
### 3.3 加密数据格式
|
||||||
|
|
||||||
|
加密后的数据格式(Base64 编码前):
|
||||||
|
|
||||||
|
```
|
||||||
|
[IV(12字节)] + [加密数据] + [认证标签(16字节)]
|
||||||
|
```
|
||||||
|
|
||||||
|
**数据布局**:
|
||||||
|
```
|
||||||
|
+------------------+------------------+------------------+
|
||||||
|
| IV (12字节) | 加密数据 | 认证标签(16字节)|
|
||||||
|
+------------------+------------------+------------------+
|
||||||
|
```
|
||||||
|
|
||||||
|
## 四、解密步骤
|
||||||
|
|
||||||
|
### 4.1 解密流程
|
||||||
|
|
||||||
|
1. **扫描二维码**:获取 Base64 编码的加密数据
|
||||||
|
2. **Base64 解码**:将 Base64 字符串解码为字节数组
|
||||||
|
3. **分离数据**:从字节数组中分离 IV、加密数据和认证标签
|
||||||
|
4. **生成密钥**:使用 `licence + fingerprint` 生成 AES-256 密钥
|
||||||
|
5. **解密数据**:使用 AES-256-GCM 解密(自动验证认证标签)
|
||||||
|
6. **解析 JSON**:将解密后的字符串解析为 JSON 对象
|
||||||
|
|
||||||
|
### 4.2 Python 实现示例
|
||||||
|
|
||||||
|
```python
|
||||||
|
import base64
|
||||||
|
import json
|
||||||
|
import hashlib
|
||||||
|
from cryptography.hazmat.primitives.ciphers.aead import AESGCM
|
||||||
|
from cryptography.hazmat.backends import default_backend
|
||||||
|
|
||||||
|
def decrypt_task_data(
|
||||||
|
encrypted_data_base64: str,
|
||||||
|
licence: str,
|
||||||
|
fingerprint: str
|
||||||
|
) -> dict:
|
||||||
|
"""
|
||||||
|
解密任务二维码数据
|
||||||
|
|
||||||
|
Args:
|
||||||
|
encrypted_data_base64: Base64编码的加密数据
|
||||||
|
licence: 设备授权码
|
||||||
|
fingerprint: 设备硬件指纹
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
解密后的任务数据(字典)
|
||||||
|
"""
|
||||||
|
# 1. Base64 解码
|
||||||
|
encrypted_bytes = base64.b64decode(encrypted_data_base64)
|
||||||
|
|
||||||
|
# 2. 分离 IV 和加密数据(包含认证标签)
|
||||||
|
if len(encrypted_bytes) < 12:
|
||||||
|
raise ValueError("加密数据格式错误:数据长度不足")
|
||||||
|
|
||||||
|
iv = encrypted_bytes[:12] # IV: 前12字节
|
||||||
|
ciphertext_with_tag = encrypted_bytes[12:] # 加密数据 + 认证标签
|
||||||
|
|
||||||
|
# 3. 生成密钥:SHA-256(licence + fingerprint)
|
||||||
|
combined = licence + fingerprint
|
||||||
|
key = hashlib.sha256(combined.encode('utf-8')).digest()
|
||||||
|
|
||||||
|
# 4. 使用 AES-256-GCM 解密
|
||||||
|
aesgcm = AESGCM(key)
|
||||||
|
decrypted_bytes = aesgcm.decrypt(iv, ciphertext_with_tag, None)
|
||||||
|
|
||||||
|
# 5. 解析 JSON
|
||||||
|
decrypted_json = decrypted_bytes.decode('utf-8')
|
||||||
|
task_data = json.loads(decrypted_json)
|
||||||
|
|
||||||
|
return task_data
|
||||||
|
|
||||||
|
# 使用示例
|
||||||
|
if __name__ == "__main__":
|
||||||
|
# 从二维码扫描获取的加密数据
|
||||||
|
encrypted_data = "Base64编码的加密数据..."
|
||||||
|
|
||||||
|
# 工具箱的授权信息(必须与平台绑定时一致)
|
||||||
|
licence = "LIC-8F2A-XXXX"
|
||||||
|
fingerprint = "FP-2c91e9f3"
|
||||||
|
|
||||||
|
# 解密任务数据
|
||||||
|
task_data = decrypt_task_data(encrypted_data, licence, fingerprint)
|
||||||
|
|
||||||
|
print("任务ID:", task_data["taskId"])
|
||||||
|
print("企业ID:", task_data["enterpriseId"])
|
||||||
|
print("单位名称:", task_data["orgName"])
|
||||||
|
print("检查ID:", task_data["inspectionId"])
|
||||||
|
print("检查人:", task_data["inspectionPerson"])
|
||||||
|
print("发布时间:", task_data["issuedAt"])
|
||||||
|
```
|
||||||
|
|
||||||
|
### 4.3 Java/Kotlin 实现示例
|
||||||
|
|
||||||
|
```kotlin
|
||||||
|
import com.fasterxml.jackson.databind.ObjectMapper
|
||||||
|
import java.nio.charset.StandardCharsets
|
||||||
|
import java.security.MessageDigest
|
||||||
|
import java.util.Base64
|
||||||
|
import javax.crypto.Cipher
|
||||||
|
import javax.crypto.spec.GCMParameterSpec
|
||||||
|
import javax.crypto.spec.SecretKeySpec
|
||||||
|
|
||||||
|
object TaskDecryptionUtil {
|
||||||
|
|
||||||
|
private const val ALGORITHM = "AES"
|
||||||
|
private const val TRANSFORMATION = "AES/GCM/NoPadding"
|
||||||
|
private const val GCM_IV_LENGTH = 12 // GCM 推荐使用 12 字节 IV
|
||||||
|
private const val GCM_TAG_LENGTH = 16 // GCM 认证标签长度(128位)
|
||||||
|
private const val KEY_LENGTH = 32 // AES-256 密钥长度(256位 = 32字节)
|
||||||
|
|
||||||
|
private val objectMapper = ObjectMapper()
|
||||||
|
|
||||||
|
/**
|
||||||
|
* 解密任务二维码数据
|
||||||
|
*
|
||||||
|
* @param encryptedDataBase64 Base64编码的加密数据
|
||||||
|
* @param licence 设备授权码
|
||||||
|
* @param fingerprint 设备硬件指纹
|
||||||
|
* @return 解密后的任务数据(Map)
|
||||||
|
*/
|
||||||
|
fun decryptTaskData(
|
||||||
|
encryptedDataBase64: String,
|
||||||
|
licence: String,
|
||||||
|
fingerprint: String
|
||||||
|
): Map<String, Any> {
|
||||||
|
// 1. Base64 解码
|
||||||
|
val encryptedBytes = Base64.getDecoder().decode(encryptedDataBase64)
|
||||||
|
|
||||||
|
// 2. 分离 IV 和加密数据(包含认证标签)
|
||||||
|
if (encryptedBytes.size < GCM_IV_LENGTH) {
|
||||||
|
throw IllegalArgumentException("加密数据格式错误:数据长度不足")
|
||||||
|
}
|
||||||
|
|
||||||
|
val iv = encryptedBytes.sliceArray(0 until GCM_IV_LENGTH)
|
||||||
|
val ciphertextWithTag = encryptedBytes.sliceArray(GCM_IV_LENGTH until encryptedBytes.size)
|
||||||
|
|
||||||
|
// 3. 生成密钥:SHA-256(licence + fingerprint)
|
||||||
|
val combined = "$licence$fingerprint"
|
||||||
|
val digest = MessageDigest.getInstance("SHA-256")
|
||||||
|
val keyBytes = digest.digest(combined.toByteArray(StandardCharsets.UTF_8))
|
||||||
|
val key = SecretKeySpec(keyBytes, ALGORITHM)
|
||||||
|
|
||||||
|
// 4. 使用 AES-256-GCM 解密
|
||||||
|
val cipher = Cipher.getInstance(TRANSFORMATION)
|
||||||
|
val parameterSpec = GCMParameterSpec(GCM_TAG_LENGTH * 8, iv) // 标签长度以位为单位
|
||||||
|
cipher.init(Cipher.DECRYPT_MODE, key, parameterSpec)
|
||||||
|
|
||||||
|
// 解密数据(GCM 会自动验证认证标签)
|
||||||
|
val decryptedBytes = cipher.doFinal(ciphertextWithTag)
|
||||||
|
|
||||||
|
// 5. 解析 JSON
|
||||||
|
val decryptedJson = String(decryptedBytes, StandardCharsets.UTF_8)
|
||||||
|
@Suppress("UNCHECKED_CAST")
|
||||||
|
return objectMapper.readValue(decryptedJson, Map::class.java) as Map<String, Any>
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// 使用示例
|
||||||
|
fun main() {
|
||||||
|
// 从二维码扫描获取的加密数据
|
||||||
|
val encryptedData = "Base64编码的加密数据..."
|
||||||
|
|
||||||
|
// 工具箱的授权信息(必须与平台绑定时一致)
|
||||||
|
val licence = "LIC-8F2A-XXXX"
|
||||||
|
val fingerprint = "FP-2c91e9f3"
|
||||||
|
|
||||||
|
// 解密任务数据
|
||||||
|
val taskData = TaskDecryptionUtil.decryptTaskData(encryptedData, licence, fingerprint)
|
||||||
|
|
||||||
|
println("任务ID: ${taskData["taskId"]}")
|
||||||
|
println("企业ID: ${taskData["enterpriseId"]}")
|
||||||
|
println("单位名称: ${taskData["orgName"]}")
|
||||||
|
println("检查ID: ${taskData["inspectionId"]}")
|
||||||
|
println("检查人: ${taskData["inspectionPerson"]}")
|
||||||
|
println("发布时间: ${taskData["issuedAt"]}")
|
||||||
|
}
|
||||||
|
```
|
||||||
|
|
||||||
|
### 4.4 C# 实现示例
|
||||||
|
|
||||||
|
```csharp
|
||||||
|
using System;
|
||||||
|
using System.Security.Cryptography;
|
||||||
|
using System.Text;
|
||||||
|
using System.Text.Json;
|
||||||
|
|
||||||
|
public class TaskDecryptionUtil
|
||||||
|
{
|
||||||
|
private const int GcmIvLength = 12; // GCM 推荐使用 12 字节 IV
|
||||||
|
private const int GcmTagLength = 16; // GCM 认证标签长度(128位)
|
||||||
|
|
||||||
|
/// <summary>
|
||||||
|
/// 解密任务二维码数据
|
||||||
|
/// </summary>
|
||||||
|
public static Dictionary<string, object> DecryptTaskData(
|
||||||
|
string encryptedDataBase64,
|
||||||
|
string licence,
|
||||||
|
string fingerprint
|
||||||
|
)
|
||||||
|
{
|
||||||
|
// 1. Base64 解码
|
||||||
|
byte[] encryptedBytes = Convert.FromBase64String(encryptedDataBase64);
|
||||||
|
|
||||||
|
// 2. 分离 IV 和加密数据(包含认证标签)
|
||||||
|
if (encryptedBytes.Length < GcmIvLength)
|
||||||
|
{
|
||||||
|
throw new ArgumentException("加密数据格式错误:数据长度不足");
|
||||||
|
}
|
||||||
|
|
||||||
|
byte[] iv = new byte[GcmIvLength];
|
||||||
|
Array.Copy(encryptedBytes, 0, iv, 0, GcmIvLength);
|
||||||
|
|
||||||
|
byte[] ciphertextWithTag = new byte[encryptedBytes.Length - GcmIvLength];
|
||||||
|
Array.Copy(encryptedBytes, GcmIvLength, ciphertextWithTag, 0, ciphertextWithTag.Length);
|
||||||
|
|
||||||
|
// 3. 生成密钥:SHA-256(licence + fingerprint)
|
||||||
|
string combined = licence + fingerprint;
|
||||||
|
byte[] keyBytes = SHA256.Create().ComputeHash(Encoding.UTF8.GetBytes(combined));
|
||||||
|
|
||||||
|
// 4. 使用 AES-256-GCM 解密
|
||||||
|
using (AesGcm aesGcm = new AesGcm(keyBytes))
|
||||||
|
{
|
||||||
|
byte[] decryptedBytes = new byte[ciphertextWithTag.Length - GcmTagLength];
|
||||||
|
byte[] tag = new byte[GcmTagLength];
|
||||||
|
Array.Copy(ciphertextWithTag, ciphertextWithTag.Length - GcmTagLength, tag, 0, GcmTagLength);
|
||||||
|
Array.Copy(ciphertextWithTag, 0, decryptedBytes, 0, decryptedBytes.Length);
|
||||||
|
|
||||||
|
aesGcm.Decrypt(iv, decryptedBytes, tag, null, decryptedBytes);
|
||||||
|
|
||||||
|
// 5. 解析 JSON
|
||||||
|
string decryptedJson = Encoding.UTF8.GetString(decryptedBytes);
|
||||||
|
return JsonSerializer.Deserialize<Dictionary<string, object>>(decryptedJson);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// 使用示例
|
||||||
|
class Program
|
||||||
|
{
|
||||||
|
static void Main()
|
||||||
|
{
|
||||||
|
// 从二维码扫描获取的加密数据
|
||||||
|
string encryptedData = "Base64编码的加密数据...";
|
||||||
|
|
||||||
|
// 工具箱的授权信息(必须与平台绑定时一致)
|
||||||
|
string licence = "LIC-8F2A-XXXX";
|
||||||
|
string fingerprint = "FP-2c91e9f3";
|
||||||
|
|
||||||
|
// 解密任务数据
|
||||||
|
var taskData = TaskDecryptionUtil.DecryptTaskData(encryptedData, licence, fingerprint);
|
||||||
|
|
||||||
|
Console.WriteLine($"任务ID: {taskData["taskId"]}");
|
||||||
|
Console.WriteLine($"企业ID: {taskData["enterpriseId"]}");
|
||||||
|
Console.WriteLine($"单位名称: {taskData["orgName"]}");
|
||||||
|
Console.WriteLine($"检查ID: {taskData["inspectionId"]}");
|
||||||
|
Console.WriteLine($"检查人: {taskData["inspectionPerson"]}");
|
||||||
|
Console.WriteLine($"发布时间: {taskData["issuedAt"]}");
|
||||||
|
}
|
||||||
|
}
|
||||||
|
```
|
||||||
|
|
||||||
|
## 五、完整流程示例
|
||||||
|
|
||||||
|
### 5.1 Python 完整示例(包含二维码扫描)
|
||||||
|
|
||||||
|
```python
|
||||||
|
import base64
|
||||||
|
import json
|
||||||
|
import hashlib
|
||||||
|
from cryptography.hazmat.primitives.ciphers.aead import AESGCM
|
||||||
|
from pyzbar import pyzbar
|
||||||
|
from PIL import Image
|
||||||
|
|
||||||
|
class TaskQRCodeDecoder:
|
||||||
|
"""任务二维码解码器"""
|
||||||
|
|
||||||
|
def __init__(self, licence: str, fingerprint: str):
|
||||||
|
"""
|
||||||
|
初始化解码器
|
||||||
|
|
||||||
|
Args:
|
||||||
|
licence: 设备授权码
|
||||||
|
fingerprint: 设备硬件指纹
|
||||||
|
"""
|
||||||
|
self.licence = licence
|
||||||
|
self.fingerprint = fingerprint
|
||||||
|
self._key = self._generate_key()
|
||||||
|
|
||||||
|
def _generate_key(self) -> bytes:
|
||||||
|
"""生成 AES-256 密钥"""
|
||||||
|
combined = self.licence + self.fingerprint
|
||||||
|
return hashlib.sha256(combined.encode('utf-8')).digest()
|
||||||
|
|
||||||
|
def scan_qr_code(self, qr_image_path: str) -> dict:
|
||||||
|
"""
|
||||||
|
扫描二维码并解密任务数据
|
||||||
|
|
||||||
|
Args:
|
||||||
|
qr_image_path: 二维码图片路径
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
解密后的任务数据(字典)
|
||||||
|
"""
|
||||||
|
# 1. 扫描二维码
|
||||||
|
image = Image.open(qr_image_path)
|
||||||
|
qr_codes = pyzbar.decode(image)
|
||||||
|
|
||||||
|
if not qr_codes:
|
||||||
|
raise ValueError("未找到二维码")
|
||||||
|
|
||||||
|
# 获取二维码内容(Base64编码的加密数据)
|
||||||
|
encrypted_data_base64 = qr_codes[0].data.decode('utf-8')
|
||||||
|
print(f"扫描到二维码内容: {encrypted_data_base64[:50]}...")
|
||||||
|
|
||||||
|
# 2. 解密任务数据
|
||||||
|
return self.decrypt_task_data(encrypted_data_base64)
|
||||||
|
|
||||||
|
def decrypt_task_data(self, encrypted_data_base64: str) -> dict:
|
||||||
|
"""
|
||||||
|
解密任务数据
|
||||||
|
|
||||||
|
Args:
|
||||||
|
encrypted_data_base64: Base64编码的加密数据
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
解密后的任务数据(字典)
|
||||||
|
"""
|
||||||
|
# 1. Base64 解码
|
||||||
|
encrypted_bytes = base64.b64decode(encrypted_data_base64)
|
||||||
|
|
||||||
|
# 2. 分离 IV 和加密数据(包含认证标签)
|
||||||
|
if len(encrypted_bytes) < 12:
|
||||||
|
raise ValueError("加密数据格式错误:数据长度不足")
|
||||||
|
|
||||||
|
iv = encrypted_bytes[:12] # IV: 前12字节
|
||||||
|
ciphertext_with_tag = encrypted_bytes[12:] # 加密数据 + 认证标签
|
||||||
|
|
||||||
|
# 3. 使用 AES-256-GCM 解密
|
||||||
|
aesgcm = AESGCM(self._key)
|
||||||
|
decrypted_bytes = aesgcm.decrypt(iv, ciphertext_with_tag, None)
|
||||||
|
|
||||||
|
# 4. 解析 JSON
|
||||||
|
decrypted_json = decrypted_bytes.decode('utf-8')
|
||||||
|
task_data = json.loads(decrypted_json)
|
||||||
|
|
||||||
|
return task_data
|
||||||
|
|
||||||
|
# 使用示例
|
||||||
|
if __name__ == "__main__":
|
||||||
|
# 工具箱的授权信息(必须与平台绑定时一致)
|
||||||
|
licence = "LIC-8F2A-XXXX"
|
||||||
|
fingerprint = "FP-2c91e9f3"
|
||||||
|
|
||||||
|
# 创建解码器
|
||||||
|
decoder = TaskQRCodeDecoder(licence, fingerprint)
|
||||||
|
|
||||||
|
# 扫描二维码并解密
|
||||||
|
try:
|
||||||
|
task_data = decoder.scan_qr_code("task_qr_code.png")
|
||||||
|
|
||||||
|
print("\n=== 任务信息 ===")
|
||||||
|
print(f"任务ID: {task_data['taskId']}")
|
||||||
|
print(f"企业ID: {task_data['enterpriseId']}")
|
||||||
|
print(f"单位名称: {task_data['orgName']}")
|
||||||
|
print(f"检查ID: {task_data['inspectionId']}")
|
||||||
|
print(f"检查人: {task_data['inspectionPerson']}")
|
||||||
|
print(f"发布时间: {task_data['issuedAt']}")
|
||||||
|
|
||||||
|
# 可以使用任务信息执行检查任务
|
||||||
|
# execute_inspection_task(task_data)
|
||||||
|
|
||||||
|
except Exception as e:
|
||||||
|
print(f"解密失败: {e}")
|
||||||
|
```
|
||||||
|
|
||||||
|
### 5.2 Java/Kotlin 完整示例(包含二维码扫描)
|
||||||
|
|
||||||
|
```kotlin
|
||||||
|
import com.fasterxml.jackson.databind.ObjectMapper
|
||||||
|
import com.google.zxing.BinaryBitmap
|
||||||
|
import com.google.zxing.MultiFormatReader
|
||||||
|
import com.google.zxing.Result
|
||||||
|
import com.google.zxing.client.j2se.BufferedImageLuminanceSource
|
||||||
|
import com.google.zxing.common.HybridBinarizer
|
||||||
|
import java.awt.image.BufferedImage
|
||||||
|
import java.io.File
|
||||||
|
import java.nio.charset.StandardCharsets
|
||||||
|
import java.security.MessageDigest
|
||||||
|
import java.util.Base64
|
||||||
|
import javax.crypto.Cipher
|
||||||
|
import javax.crypto.spec.GCMParameterSpec
|
||||||
|
import javax.crypto.spec.SecretKeySpec
|
||||||
|
import javax.imageio.ImageIO
|
||||||
|
|
||||||
|
class TaskQRCodeDecoder(
|
||||||
|
private val licence: String,
|
||||||
|
private val fingerprint: String
|
||||||
|
) {
|
||||||
|
|
||||||
|
private val key: SecretKeySpec by lazy {
|
||||||
|
val combined = "$licence$fingerprint"
|
||||||
|
val digest = MessageDigest.getInstance("SHA-256")
|
||||||
|
val keyBytes = digest.digest(combined.toByteArray(StandardCharsets.UTF_8))
|
||||||
|
SecretKeySpec(keyBytes, "AES")
|
||||||
|
}
|
||||||
|
|
||||||
|
private val objectMapper = ObjectMapper()
|
||||||
|
|
||||||
|
/**
|
||||||
|
* 扫描二维码并解密任务数据
|
||||||
|
*/
|
||||||
|
fun scanAndDecrypt(qrImagePath: String): Map<String, Any> {
|
||||||
|
// 1. 扫描二维码
|
||||||
|
val image: BufferedImage = ImageIO.read(File(qrImagePath))
|
||||||
|
val source = BufferedImageLuminanceSource(image)
|
||||||
|
val bitmap = BinaryBitmap(HybridBinarizer(source))
|
||||||
|
val reader = MultiFormatReader()
|
||||||
|
val result: Result = reader.decode(bitmap)
|
||||||
|
|
||||||
|
// 获取二维码内容(Base64编码的加密数据)
|
||||||
|
val encryptedDataBase64 = result.text
|
||||||
|
println("扫描到二维码内容: ${encryptedDataBase64.take(50)}...")
|
||||||
|
|
||||||
|
// 2. 解密任务数据
|
||||||
|
return decryptTaskData(encryptedDataBase64)
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* 解密任务数据
|
||||||
|
*/
|
||||||
|
fun decryptTaskData(encryptedDataBase64: String): Map<String, Any> {
|
||||||
|
// 1. Base64 解码
|
||||||
|
val encryptedBytes = Base64.getDecoder().decode(encryptedDataBase64)
|
||||||
|
|
||||||
|
// 2. 分离 IV 和加密数据(包含认证标签)
|
||||||
|
if (encryptedBytes.size < 12) {
|
||||||
|
throw IllegalArgumentException("加密数据格式错误:数据长度不足")
|
||||||
|
}
|
||||||
|
|
||||||
|
val iv = encryptedBytes.sliceArray(0 until 12)
|
||||||
|
val ciphertextWithTag = encryptedBytes.sliceArray(12 until encryptedBytes.size)
|
||||||
|
|
||||||
|
// 3. 使用 AES-256-GCM 解密
|
||||||
|
val cipher = Cipher.getInstance("AES/GCM/NoPadding")
|
||||||
|
val parameterSpec = GCMParameterSpec(16 * 8, iv) // 标签长度以位为单位
|
||||||
|
cipher.init(Cipher.DECRYPT_MODE, key, parameterSpec)
|
||||||
|
|
||||||
|
// 解密数据(GCM 会自动验证认证标签)
|
||||||
|
val decryptedBytes = cipher.doFinal(ciphertextWithTag)
|
||||||
|
|
||||||
|
// 4. 解析 JSON
|
||||||
|
val decryptedJson = String(decryptedBytes, StandardCharsets.UTF_8)
|
||||||
|
@Suppress("UNCHECKED_CAST")
|
||||||
|
return objectMapper.readValue(decryptedJson, Map::class.java) as Map<String, Any>
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// 使用示例
|
||||||
|
fun main() {
|
||||||
|
// 工具箱的授权信息(必须与平台绑定时一致)
|
||||||
|
val licence = "LIC-8F2A-XXXX"
|
||||||
|
val fingerprint = "FP-2c91e9f3"
|
||||||
|
|
||||||
|
// 创建解码器
|
||||||
|
val decoder = TaskQRCodeDecoder(licence, fingerprint)
|
||||||
|
|
||||||
|
// 扫描二维码并解密
|
||||||
|
try {
|
||||||
|
val taskData = decoder.scanAndDecrypt("task_qr_code.png")
|
||||||
|
|
||||||
|
println("\n=== 任务信息 ===")
|
||||||
|
println("任务ID: ${taskData["taskId"]}")
|
||||||
|
println("企业ID: ${taskData["enterpriseId"]}")
|
||||||
|
println("单位名称: ${taskData["orgName"]}")
|
||||||
|
println("检查ID: ${taskData["inspectionId"]}")
|
||||||
|
println("检查人: ${taskData["inspectionPerson"]}")
|
||||||
|
println("发布时间: ${taskData["issuedAt"]}")
|
||||||
|
|
||||||
|
// 可以使用任务信息执行检查任务
|
||||||
|
// executeInspectionTask(taskData)
|
||||||
|
|
||||||
|
} catch (e: Exception) {
|
||||||
|
println("解密失败: ${e.message}")
|
||||||
|
}
|
||||||
|
}
|
||||||
|
```
|
||||||
|
|
||||||
|
## 六、常见错误和注意事项
|
||||||
|
|
||||||
|
### 6.1 解密失败
|
||||||
|
|
||||||
|
**可能原因**:
|
||||||
|
1. **密钥不匹配**:`licence` 或 `fingerprint` 与平台绑定时不一致
|
||||||
|
- 确保使用与设备授权时相同的 `licence` 和 `fingerprint`
|
||||||
|
- 检查字符串拼接是否正确(无分隔符)
|
||||||
|
|
||||||
|
2. **数据格式错误**:Base64 编码或数据布局错误
|
||||||
|
- 确保 Base64 解码正确
|
||||||
|
- 确保 IV 长度正确(12 字节)
|
||||||
|
|
||||||
|
3. **认证标签验证失败**:数据被篡改或损坏
|
||||||
|
- GCM 模式会自动验证认证标签
|
||||||
|
- 如果验证失败,说明数据被篡改或密钥错误
|
||||||
|
|
||||||
|
4. **算法不匹配**:必须使用 `AES/GCM/NoPadding`
|
||||||
|
- 确保使用正确的加密算法
|
||||||
|
- 确保认证标签长度为 128 位(16 字节)
|
||||||
|
|
||||||
|
### 6.2 二维码扫描失败
|
||||||
|
|
||||||
|
**可能原因**:
|
||||||
|
1. **二维码图片质量差**:确保图片清晰,有足够的对比度
|
||||||
|
2. **二维码内容过长**:如果加密数据过长,可能需要更高版本的二维码
|
||||||
|
3. **扫描库不支持**:确保使用支持 Base64 字符串的二维码扫描库
|
||||||
|
|
||||||
|
### 6.3 JSON 解析失败
|
||||||
|
|
||||||
|
**可能原因**:
|
||||||
|
1. **字符编码错误**:确保使用 UTF-8 编码
|
||||||
|
2. **JSON 格式错误**:确保解密后的字符串是有效的 JSON
|
||||||
|
3. **字段缺失**:确保所有必需字段都存在
|
||||||
|
|
||||||
|
## 七、安全设计说明
|
||||||
|
|
||||||
|
### 7.1 为什么使用 AES-256-GCM
|
||||||
|
|
||||||
|
1. **认证加密(AEAD)**:GCM 模式提供加密和认证,防止数据被篡改
|
||||||
|
2. **强安全性**:AES-256 提供 256 位密钥强度
|
||||||
|
3. **自动验证**:GCM 模式会自动验证认证标签,任何篡改都会导致解密失败
|
||||||
|
|
||||||
|
### 7.2 为什么第三方无法解密
|
||||||
|
|
||||||
|
1. **密钥绑定**:只有拥有正确 `licence + fingerprint` 的工具箱才能生成正确的密钥
|
||||||
|
2. **认证标签**:GCM 模式会验证认证标签,任何篡改都会导致解密失败
|
||||||
|
3. **密钥唯一性**:每个设备的 `licence + fingerprint` 组合是唯一的
|
||||||
|
|
||||||
|
### 7.3 密钥生成的安全性
|
||||||
|
|
||||||
|
1. **SHA-256 哈希**:使用强哈希算法生成密钥
|
||||||
|
2. **密钥长度**:使用全部 32 字节作为 AES-256 密钥
|
||||||
|
3. **密钥隔离**:每个设备的密钥是独立的,互不影响
|
||||||
|
|
||||||
|
## 八、测试建议
|
||||||
|
|
||||||
|
1. **单元测试**:
|
||||||
|
- 测试密钥生成是否正确
|
||||||
|
- 测试解密功能是否正常
|
||||||
|
- 测试 JSON 解析是否正确
|
||||||
|
|
||||||
|
2. **集成测试**:
|
||||||
|
- 使用真实平台生成的二维码进行测试
|
||||||
|
- 测试不同长度的任务数据
|
||||||
|
- 测试错误的密钥是否会导致解密失败
|
||||||
|
|
||||||
|
3. **边界测试**:
|
||||||
|
- 测试超长的任务数据
|
||||||
|
- 测试特殊字符的处理
|
||||||
|
- 测试错误的 Base64 格式
|
||||||
|
|
||||||
|
## 九、参考实现
|
||||||
|
|
||||||
|
- **Python**:`cryptography` 库(AES-GCM 加密)、`pyzbar` 库(二维码扫描)
|
||||||
|
- **Java/Kotlin**:JDK `javax.crypto`(AES-GCM 加密)、ZXing 库(二维码扫描)
|
||||||
|
- **C#**:`System.Security.Cryptography`(AES-GCM 加密)、ZXing.Net 库(二维码扫描)
|
||||||
|
|
||||||
|
## 十、联系支持
|
||||||
|
|
||||||
|
如有问题,请联系平台技术支持团队获取:
|
||||||
|
- 测试环境地址
|
||||||
|
- 技术支持
|
||||||
|
|
||||||
@@ -0,0 +1,646 @@
|
|||||||
|
# 工具箱端 - 报告加密与签名生成指南
|
||||||
|
|
||||||
|
## 概述
|
||||||
|
|
||||||
|
本文档说明工具箱端如何生成加密和签名的检查报告 ZIP 文件,以确保:
|
||||||
|
1. **授权校验**:只有合法授权的工具箱才能生成有效的报告
|
||||||
|
2. **防篡改校验**:确保报告内容在传输过程中未被篡改
|
||||||
|
|
||||||
|
> ### UX 集成模式补充(当前项目实现)
|
||||||
|
>
|
||||||
|
> 在当前集成模式中,工具箱可将原始报告 ZIP 直接上传到 UX 的 `crypto.signAndPackReport`:
|
||||||
|
>
|
||||||
|
> 1. UX 校验 ZIP 并提取必需文件;
|
||||||
|
> 2. UX 生成 `deviceSignature`、`summary.json`、`META-INF/manifest.json`、`META-INF/signature.asc`;
|
||||||
|
> 3. UX 重新打包并返回签名后的 ZIP(Base64),工具箱再用于离线介质回传平台。
|
||||||
|
|
||||||
|
## 一、ZIP 文件结构要求
|
||||||
|
|
||||||
|
工具箱生成的 ZIP 文件必须包含以下文件:
|
||||||
|
|
||||||
|
```
|
||||||
|
report.zip
|
||||||
|
├── summary.json # 摘要信息(必须包含授权和签名字段)
|
||||||
|
├── assets.json # 资产信息(用于签名校验)
|
||||||
|
├── vulnerabilities.json # 漏洞信息(用于签名校验)
|
||||||
|
├── weakPasswords.json # 弱密码信息(用于签名校验)
|
||||||
|
├── 漏洞评估报告.html # 漏洞评估报告(用于签名校验)
|
||||||
|
└── META-INF/
|
||||||
|
├── manifest.json # 文件清单(用于 OpenPGP 签名)
|
||||||
|
└── signature.asc # OpenPGP 签名文件(防篡改)
|
||||||
|
```
|
||||||
|
|
||||||
|
## 二、授权校验 - 设备签名(device_signature)
|
||||||
|
|
||||||
|
### 2.1 目的
|
||||||
|
|
||||||
|
设备签名用于验证报告是由合法授权的工具箱生成的,防止第三方伪造扫描结果。
|
||||||
|
|
||||||
|
### 2.2 密钥派生
|
||||||
|
|
||||||
|
使用 **HKDF-SHA256** 从设备的 `licence` 和 `fingerprint` 派生签名密钥:
|
||||||
|
|
||||||
|
```
|
||||||
|
K = HKDF(
|
||||||
|
input = licence + fingerprint, # 输入密钥材料(字符串拼接)
|
||||||
|
salt = "AUTH_V3_SALT", # 固定盐值
|
||||||
|
info = "device_report_signature", # 固定信息参数
|
||||||
|
hash = SHA-256, # 哈希算法
|
||||||
|
length = 32 # 输出密钥长度(32字节 = 256位)
|
||||||
|
)
|
||||||
|
```
|
||||||
|
|
||||||
|
**伪代码示例**:
|
||||||
|
```python
|
||||||
|
import hkdf
|
||||||
|
|
||||||
|
# 输入密钥材料
|
||||||
|
ikm = licence + fingerprint # 字符串直接拼接
|
||||||
|
|
||||||
|
# HKDF 参数
|
||||||
|
salt = "AUTH_V3_SALT"
|
||||||
|
info = "device_report_signature"
|
||||||
|
key_length = 32 # 32字节 = 256位
|
||||||
|
|
||||||
|
# 派生密钥
|
||||||
|
derived_key = hkdf.HKDF(
|
||||||
|
algorithm=hashlib.sha256,
|
||||||
|
length=key_length,
|
||||||
|
salt=salt.encode('utf-8'),
|
||||||
|
info=info.encode('utf-8'),
|
||||||
|
ikm=ikm.encode('utf-8')
|
||||||
|
).derive()
|
||||||
|
```
|
||||||
|
|
||||||
|
### 2.3 签名数据组装(严格顺序)
|
||||||
|
|
||||||
|
签名数据必须按照以下**严格顺序**组装:
|
||||||
|
|
||||||
|
```
|
||||||
|
sign_payload =
|
||||||
|
taskId + # 任务ID(字符串)
|
||||||
|
inspectionId + # 检查ID(数字转字符串)
|
||||||
|
SHA256(assets.json) + # assets.json 的 SHA256(hex字符串,小写)
|
||||||
|
SHA256(vulnerabilities.json) + # vulnerabilities.json 的 SHA256(hex字符串,小写)
|
||||||
|
SHA256(weakPasswords.json) + # weakPasswords.json 的 SHA256(hex字符串,小写)
|
||||||
|
SHA256(漏洞评估报告.html) # 漏洞评估报告.html 的 SHA256(hex字符串,小写)
|
||||||
|
```
|
||||||
|
|
||||||
|
**重要说明**:
|
||||||
|
- 所有字符串直接拼接,**不添加任何分隔符**
|
||||||
|
- SHA256 哈希值必须是 **hex 字符串(小写)**,例如:`a1b2c3d4...`
|
||||||
|
- 文件内容必须是**原始字节**,不能进行任何编码转换
|
||||||
|
- 顺序必须严格一致,任何顺序错误都会导致签名验证失败
|
||||||
|
|
||||||
|
**伪代码示例**:
|
||||||
|
```python
|
||||||
|
import hashlib
|
||||||
|
|
||||||
|
# 1. 读取文件内容(原始字节)
|
||||||
|
assets_content = read_file("assets.json")
|
||||||
|
vulnerabilities_content = read_file("vulnerabilities.json")
|
||||||
|
weak_passwords_content = read_file("weakPasswords.json")
|
||||||
|
report_html_content = read_file("漏洞评估报告.html")
|
||||||
|
|
||||||
|
# 2. 计算 SHA256(hex字符串,小写)
|
||||||
|
def sha256_hex(content: bytes) -> str:
|
||||||
|
return hashlib.sha256(content).hexdigest()
|
||||||
|
|
||||||
|
assets_sha256 = sha256_hex(assets_content)
|
||||||
|
vulnerabilities_sha256 = sha256_hex(vulnerabilities_content)
|
||||||
|
weak_passwords_sha256 = sha256_hex(weak_passwords_content)
|
||||||
|
report_html_sha256 = sha256_hex(report_html_content)
|
||||||
|
|
||||||
|
# 3. 组装签名数据(严格顺序,直接拼接)
|
||||||
|
sign_payload = (
|
||||||
|
str(task_id) +
|
||||||
|
str(inspection_id) +
|
||||||
|
assets_sha256 +
|
||||||
|
vulnerabilities_sha256 +
|
||||||
|
weak_passwords_sha256 +
|
||||||
|
report_html_sha256
|
||||||
|
)
|
||||||
|
```
|
||||||
|
|
||||||
|
### 2.4 生成设备签名
|
||||||
|
|
||||||
|
使用 **HMAC-SHA256** 计算签名:
|
||||||
|
|
||||||
|
```
|
||||||
|
device_signature = Base64(HMAC-SHA256(key=K, data=sign_payload))
|
||||||
|
```
|
||||||
|
|
||||||
|
**伪代码示例**:
|
||||||
|
```python
|
||||||
|
import hmac
|
||||||
|
import base64
|
||||||
|
|
||||||
|
# 使用派生密钥计算 HMAC-SHA256
|
||||||
|
mac = hmac.new(
|
||||||
|
key=derived_key, # 派生密钥(32字节)
|
||||||
|
msg=sign_payload.encode('utf-8'), # 签名数据(UTF-8编码)
|
||||||
|
digestmod=hashlib.sha256
|
||||||
|
)
|
||||||
|
|
||||||
|
# 计算签名
|
||||||
|
signature_bytes = mac.digest()
|
||||||
|
|
||||||
|
# Base64 编码
|
||||||
|
device_signature = base64.b64encode(signature_bytes).decode('utf-8')
|
||||||
|
```
|
||||||
|
|
||||||
|
### 2.5 写入 summary.json
|
||||||
|
|
||||||
|
将 `device_signature` 写入 `summary.json`:
|
||||||
|
|
||||||
|
```json
|
||||||
|
{
|
||||||
|
"orgId": 1173040813421105152,
|
||||||
|
"checkId": 702286470691215417,
|
||||||
|
"taskId": "TASK-20260115-4875",
|
||||||
|
"licence": "LIC-8F2A-XXXX",
|
||||||
|
"fingerprint": "FP-2c91e9f3",
|
||||||
|
"deviceSignature": "Base64编码的签名值",
|
||||||
|
"summary": "检查摘要信息",
|
||||||
|
...其他字段...
|
||||||
|
}
|
||||||
|
```
|
||||||
|
|
||||||
|
**必需字段**:
|
||||||
|
- `licence`:设备授权码(字符串)
|
||||||
|
- `fingerprint`:设备硬件指纹(字符串)
|
||||||
|
- `taskId`:任务ID(字符串)
|
||||||
|
- `deviceSignature`:设备签名(Base64字符串)
|
||||||
|
- `checkId` 或 `inspectionId`:检查ID(数字)
|
||||||
|
|
||||||
|
## 三、防篡改校验 - OpenPGP 签名
|
||||||
|
|
||||||
|
### 3.1 目的
|
||||||
|
|
||||||
|
OpenPGP 签名用于验证 ZIP 文件在传输过程中未被篡改,确保文件完整性。
|
||||||
|
|
||||||
|
### 3.2 生成 manifest.json
|
||||||
|
|
||||||
|
创建 `META-INF/manifest.json` 文件,包含所有文件的 SHA-256 哈希值:
|
||||||
|
|
||||||
|
```json
|
||||||
|
{
|
||||||
|
"files": {
|
||||||
|
"summary.json": "a1b2c3d4e5f6...",
|
||||||
|
"assets.json": "b2c3d4e5f6a1...",
|
||||||
|
"vulnerabilities.json": "c3d4e5f6a1b2...",
|
||||||
|
"weakPasswords.json": "d4e5f6a1b2c3...",
|
||||||
|
"漏洞评估报告.html": "e5f6a1b2c3d4..."
|
||||||
|
}
|
||||||
|
}
|
||||||
|
```
|
||||||
|
|
||||||
|
**伪代码示例**:
|
||||||
|
```python
|
||||||
|
import hashlib
|
||||||
|
import json
|
||||||
|
|
||||||
|
def calculate_sha256_hex(content: bytes) -> str:
|
||||||
|
return hashlib.sha256(content).hexdigest()
|
||||||
|
|
||||||
|
# 计算所有文件的 SHA256
|
||||||
|
files_hashes = {
|
||||||
|
"summary.json": calculate_sha256_hex(summary_content),
|
||||||
|
"assets.json": calculate_sha256_hex(assets_content),
|
||||||
|
"vulnerabilities.json": calculate_sha256_hex(vulnerabilities_content),
|
||||||
|
"weakPasswords.json": calculate_sha256_hex(weak_passwords_content),
|
||||||
|
"漏洞评估报告.html": calculate_sha256_hex(report_html_content)
|
||||||
|
}
|
||||||
|
|
||||||
|
# 生成 manifest.json
|
||||||
|
manifest = {
|
||||||
|
"files": files_hashes
|
||||||
|
}
|
||||||
|
|
||||||
|
manifest_json = json.dumps(manifest, ensure_ascii=False, indent=2)
|
||||||
|
```
|
||||||
|
|
||||||
|
### 3.3 生成 OpenPGP 签名
|
||||||
|
|
||||||
|
使用工具箱的**私钥**对 `manifest.json` 进行 OpenPGP 签名,生成 `META-INF/signature.asc`:
|
||||||
|
|
||||||
|
**伪代码示例(使用 Python gnupg)**:
|
||||||
|
```python
|
||||||
|
import gnupg
|
||||||
|
|
||||||
|
# 初始化 GPG
|
||||||
|
gpg = gnupg.GPG()
|
||||||
|
|
||||||
|
# 导入私钥(或使用已配置的密钥)
|
||||||
|
# gpg.import_keys(private_key_data)
|
||||||
|
|
||||||
|
# 对 manifest.json 进行签名
|
||||||
|
with open('META-INF/manifest.json', 'rb') as f:
|
||||||
|
signed_data = gpg.sign_file(
|
||||||
|
f,
|
||||||
|
detach=True, # 分离式签名
|
||||||
|
clearsign=False, # 不使用明文签名
|
||||||
|
output='META-INF/signature.asc'
|
||||||
|
)
|
||||||
|
```
|
||||||
|
|
||||||
|
**伪代码示例(使用 BouncyCastle - Java/Kotlin)**:
|
||||||
|
```kotlin
|
||||||
|
import org.bouncycastle.openpgp.*
|
||||||
|
import org.bouncycastle.openpgp.operator.jcajce.JcaPGPContentSignerBuilder
|
||||||
|
import org.bouncycastle.openpgp.operator.jcajce.JcaPGPPrivateKey
|
||||||
|
import java.io.ByteArrayOutputStream
|
||||||
|
import java.io.FileOutputStream
|
||||||
|
|
||||||
|
fun generatePGPSignature(
|
||||||
|
manifestContent: ByteArray,
|
||||||
|
privateKey: PGPPrivateKey,
|
||||||
|
publicKey: PGPPublicKey
|
||||||
|
): ByteArray {
|
||||||
|
val signatureGenerator = PGPSignatureGenerator(
|
||||||
|
JcaPGPContentSignerBuilder(publicKey.algorithm, PGPUtil.SHA256)
|
||||||
|
)
|
||||||
|
signatureGenerator.init(PGPSignature.BINARY_DOCUMENT, privateKey)
|
||||||
|
signatureGenerator.update(manifestContent)
|
||||||
|
|
||||||
|
val signature = signatureGenerator.generate()
|
||||||
|
val signatureList = PGPSignatureList(signature)
|
||||||
|
|
||||||
|
val out = ByteArrayOutputStream()
|
||||||
|
val pgpOut = PGPObjectFactory(PGPUtil.getEncoderStream(out))
|
||||||
|
signatureList.encode(out)
|
||||||
|
|
||||||
|
return out.toByteArray()
|
||||||
|
}
|
||||||
|
```
|
||||||
|
|
||||||
|
### 3.4 打包 ZIP 文件
|
||||||
|
|
||||||
|
将所有文件打包成 ZIP 文件,确保包含:
|
||||||
|
- 所有报告文件(summary.json、assets.json 等)
|
||||||
|
- `META-INF/manifest.json`
|
||||||
|
- `META-INF/signature.asc`
|
||||||
|
|
||||||
|
**伪代码示例**:
|
||||||
|
```python
|
||||||
|
import zipfile
|
||||||
|
|
||||||
|
def create_signed_zip(output_path: str):
|
||||||
|
with zipfile.ZipFile(output_path, 'w', zipfile.ZIP_DEFLATED) as zipf:
|
||||||
|
# 添加报告文件
|
||||||
|
zipf.write('summary.json', 'summary.json')
|
||||||
|
zipf.write('assets.json', 'assets.json')
|
||||||
|
zipf.write('vulnerabilities.json', 'vulnerabilities.json')
|
||||||
|
zipf.write('weakPasswords.json', 'weakPasswords.json')
|
||||||
|
zipf.write('漏洞评估报告.html', '漏洞评估报告.html')
|
||||||
|
|
||||||
|
# 添加签名文件
|
||||||
|
zipf.write('META-INF/manifest.json', 'META-INF/manifest.json')
|
||||||
|
zipf.write('META-INF/signature.asc', 'META-INF/signature.asc')
|
||||||
|
```
|
||||||
|
|
||||||
|
## 四、完整流程示例
|
||||||
|
|
||||||
|
### 4.1 Python 完整示例
|
||||||
|
|
||||||
|
```python
|
||||||
|
import hashlib
|
||||||
|
import hmac
|
||||||
|
import base64
|
||||||
|
import json
|
||||||
|
import zipfile
|
||||||
|
import hkdf
|
||||||
|
import gnupg
|
||||||
|
|
||||||
|
def generate_report_zip(
|
||||||
|
licence: str,
|
||||||
|
fingerprint: str,
|
||||||
|
task_id: str,
|
||||||
|
inspection_id: int,
|
||||||
|
output_path: str
|
||||||
|
):
|
||||||
|
"""
|
||||||
|
生成带签名和加密的检查报告 ZIP 文件
|
||||||
|
"""
|
||||||
|
|
||||||
|
# ========== 1. 读取报告文件 ==========
|
||||||
|
assets_content = read_file("assets.json")
|
||||||
|
vulnerabilities_content = read_file("vulnerabilities.json")
|
||||||
|
weak_passwords_content = read_file("weakPasswords.json")
|
||||||
|
report_html_content = read_file("漏洞评估报告.html")
|
||||||
|
|
||||||
|
# ========== 2. 生成设备签名 ==========
|
||||||
|
|
||||||
|
# 2.1 密钥派生
|
||||||
|
ikm = licence + fingerprint
|
||||||
|
salt = "AUTH_V3_SALT"
|
||||||
|
info = "device_report_signature"
|
||||||
|
key_length = 32
|
||||||
|
|
||||||
|
derived_key = hkdf.HKDF(
|
||||||
|
algorithm=hashlib.sha256,
|
||||||
|
length=key_length,
|
||||||
|
salt=salt.encode('utf-8'),
|
||||||
|
info=info.encode('utf-8'),
|
||||||
|
ikm=ikm.encode('utf-8')
|
||||||
|
).derive()
|
||||||
|
|
||||||
|
# 2.2 计算文件 SHA256
|
||||||
|
def sha256_hex(content: bytes) -> str:
|
||||||
|
return hashlib.sha256(content).hexdigest()
|
||||||
|
|
||||||
|
assets_sha256 = sha256_hex(assets_content)
|
||||||
|
vulnerabilities_sha256 = sha256_hex(vulnerabilities_content)
|
||||||
|
weak_passwords_sha256 = sha256_hex(weak_passwords_content)
|
||||||
|
report_html_sha256 = sha256_hex(report_html_content)
|
||||||
|
|
||||||
|
# 2.3 组装签名数据(严格顺序)
|
||||||
|
sign_payload = (
|
||||||
|
str(task_id) +
|
||||||
|
str(inspection_id) +
|
||||||
|
assets_sha256 +
|
||||||
|
vulnerabilities_sha256 +
|
||||||
|
weak_passwords_sha256 +
|
||||||
|
report_html_sha256
|
||||||
|
)
|
||||||
|
|
||||||
|
# 2.4 计算 HMAC-SHA256
|
||||||
|
mac = hmac.new(
|
||||||
|
key=derived_key,
|
||||||
|
msg=sign_payload.encode('utf-8'),
|
||||||
|
digestmod=hashlib.sha256
|
||||||
|
)
|
||||||
|
device_signature = base64.b64encode(mac.digest()).decode('utf-8')
|
||||||
|
|
||||||
|
# 2.5 生成 summary.json
|
||||||
|
summary = {
|
||||||
|
"orgId": 1173040813421105152,
|
||||||
|
"checkId": inspection_id,
|
||||||
|
"taskId": task_id,
|
||||||
|
"licence": licence,
|
||||||
|
"fingerprint": fingerprint,
|
||||||
|
"deviceSignature": device_signature,
|
||||||
|
"summary": "检查摘要信息"
|
||||||
|
}
|
||||||
|
summary_content = json.dumps(summary, ensure_ascii=False).encode('utf-8')
|
||||||
|
|
||||||
|
# ========== 3. 生成 OpenPGP 签名 ==========
|
||||||
|
|
||||||
|
# 3.1 生成 manifest.json
|
||||||
|
files_hashes = {
|
||||||
|
"summary.json": sha256_hex(summary_content),
|
||||||
|
"assets.json": assets_sha256,
|
||||||
|
"vulnerabilities.json": vulnerabilities_sha256,
|
||||||
|
"weakPasswords.json": weak_passwords_sha256,
|
||||||
|
"漏洞评估报告.html": report_html_sha256
|
||||||
|
}
|
||||||
|
manifest = {"files": files_hashes}
|
||||||
|
manifest_content = json.dumps(manifest, ensure_ascii=False, indent=2).encode('utf-8')
|
||||||
|
|
||||||
|
# 3.2 生成 OpenPGP 签名
|
||||||
|
gpg = gnupg.GPG()
|
||||||
|
with open('META-INF/manifest.json', 'wb') as f:
|
||||||
|
f.write(manifest_content)
|
||||||
|
|
||||||
|
with open('META-INF/manifest.json', 'rb') as f:
|
||||||
|
signed_data = gpg.sign_file(
|
||||||
|
f,
|
||||||
|
detach=True,
|
||||||
|
output='META-INF/signature.asc'
|
||||||
|
)
|
||||||
|
|
||||||
|
# ========== 4. 打包 ZIP 文件 ==========
|
||||||
|
with zipfile.ZipFile(output_path, 'w', zipfile.ZIP_DEFLATED) as zipf:
|
||||||
|
zipf.writestr('summary.json', summary_content)
|
||||||
|
zipf.writestr('assets.json', assets_content)
|
||||||
|
zipf.writestr('vulnerabilities.json', vulnerabilities_content)
|
||||||
|
zipf.writestr('weakPasswords.json', weak_passwords_content)
|
||||||
|
zipf.writestr('漏洞评估报告.html', report_html_content)
|
||||||
|
zipf.writestr('META-INF/manifest.json', manifest_content)
|
||||||
|
zipf.write('META-INF/signature.asc', 'META-INF/signature.asc')
|
||||||
|
|
||||||
|
print(f"报告 ZIP 文件生成成功: {output_path}")
|
||||||
|
```
|
||||||
|
|
||||||
|
### 4.2 Java/Kotlin 完整示例
|
||||||
|
|
||||||
|
```kotlin
|
||||||
|
import org.bouncycastle.crypto.digests.SHA256Digest
|
||||||
|
import org.bouncycastle.crypto.generators.HKDFBytesGenerator
|
||||||
|
import org.bouncycastle.crypto.params.HKDFParameters
|
||||||
|
import java.security.MessageDigest
|
||||||
|
import javax.crypto.Mac
|
||||||
|
import javax.crypto.spec.SecretKeySpec
|
||||||
|
import java.util.Base64
|
||||||
|
import java.util.zip.ZipOutputStream
|
||||||
|
import java.io.FileOutputStream
|
||||||
|
|
||||||
|
fun generateReportZip(
|
||||||
|
licence: String,
|
||||||
|
fingerprint: String,
|
||||||
|
taskId: String,
|
||||||
|
inspectionId: Long,
|
||||||
|
outputPath: String
|
||||||
|
) {
|
||||||
|
// ========== 1. 读取报告文件 ==========
|
||||||
|
val assetsContent = readFile("assets.json")
|
||||||
|
val vulnerabilitiesContent = readFile("vulnerabilities.json")
|
||||||
|
val weakPasswordsContent = readFile("weakPasswords.json")
|
||||||
|
val reportHtmlContent = readFile("漏洞评估报告.html")
|
||||||
|
|
||||||
|
// ========== 2. 生成设备签名 ==========
|
||||||
|
|
||||||
|
// 2.1 密钥派生
|
||||||
|
val ikm = (licence + fingerprint).toByteArray(Charsets.UTF_8)
|
||||||
|
val salt = "AUTH_V3_SALT".toByteArray(Charsets.UTF_8)
|
||||||
|
val info = "device_report_signature".toByteArray(Charsets.UTF_8)
|
||||||
|
val keyLength = 32
|
||||||
|
|
||||||
|
val hkdf = HKDFBytesGenerator(SHA256Digest())
|
||||||
|
hkdf.init(HKDFParameters(ikm, salt, info))
|
||||||
|
val derivedKey = ByteArray(keyLength)
|
||||||
|
hkdf.generateBytes(derivedKey, 0, keyLength)
|
||||||
|
|
||||||
|
// 2.2 计算文件 SHA256
|
||||||
|
fun sha256Hex(content: ByteArray): String {
|
||||||
|
val digest = MessageDigest.getInstance("SHA-256")
|
||||||
|
val hashBytes = digest.digest(content)
|
||||||
|
return hashBytes.joinToString("") { "%02x".format(it) }
|
||||||
|
}
|
||||||
|
|
||||||
|
val assetsSha256 = sha256Hex(assetsContent)
|
||||||
|
val vulnerabilitiesSha256 = sha256Hex(vulnerabilitiesContent)
|
||||||
|
val weakPasswordsSha256 = sha256Hex(weakPasswordsContent)
|
||||||
|
val reportHtmlSha256 = sha256Hex(reportHtmlContent)
|
||||||
|
|
||||||
|
// 2.3 组装签名数据(严格顺序)
|
||||||
|
val signPayload = buildString {
|
||||||
|
append(taskId)
|
||||||
|
append(inspectionId)
|
||||||
|
append(assetsSha256)
|
||||||
|
append(vulnerabilitiesSha256)
|
||||||
|
append(weakPasswordsSha256)
|
||||||
|
append(reportHtmlSha256)
|
||||||
|
}
|
||||||
|
|
||||||
|
// 2.4 计算 HMAC-SHA256
|
||||||
|
val mac = Mac.getInstance("HmacSHA256")
|
||||||
|
val secretKey = SecretKeySpec(derivedKey, "HmacSHA256")
|
||||||
|
mac.init(secretKey)
|
||||||
|
val signatureBytes = mac.doFinal(signPayload.toByteArray(Charsets.UTF_8))
|
||||||
|
val deviceSignature = Base64.getEncoder().encodeToString(signatureBytes)
|
||||||
|
|
||||||
|
// 2.5 生成 summary.json
|
||||||
|
val summary = mapOf(
|
||||||
|
"orgId" to 1173040813421105152L,
|
||||||
|
"checkId" to inspectionId,
|
||||||
|
"taskId" to taskId,
|
||||||
|
"licence" to licence,
|
||||||
|
"fingerprint" to fingerprint,
|
||||||
|
"deviceSignature" to deviceSignature,
|
||||||
|
"summary" to "检查摘要信息"
|
||||||
|
)
|
||||||
|
val summaryContent = objectMapper.writeValueAsString(summary).toByteArray(Charsets.UTF_8)
|
||||||
|
|
||||||
|
// ========== 3. 生成 OpenPGP 签名 ==========
|
||||||
|
|
||||||
|
// 3.1 生成 manifest.json
|
||||||
|
val filesHashes = mapOf(
|
||||||
|
"summary.json" to sha256Hex(summaryContent),
|
||||||
|
"assets.json" to assetsSha256,
|
||||||
|
"vulnerabilities.json" to vulnerabilitiesSha256,
|
||||||
|
"weakPasswords.json" to weakPasswordsSha256,
|
||||||
|
"漏洞评估报告.html" to reportHtmlSha256
|
||||||
|
)
|
||||||
|
val manifest = mapOf("files" to filesHashes)
|
||||||
|
val manifestContent = objectMapper.writeValueAsString(manifest).toByteArray(Charsets.UTF_8)
|
||||||
|
|
||||||
|
// 3.2 生成 OpenPGP 签名(使用 BouncyCastle)
|
||||||
|
val signatureAsc = generatePGPSignature(manifestContent, privateKey, publicKey)
|
||||||
|
|
||||||
|
// ========== 4. 打包 ZIP 文件 ==========
|
||||||
|
ZipOutputStream(FileOutputStream(outputPath)).use { zipOut ->
|
||||||
|
zipOut.putNextEntry(ZipEntry("summary.json"))
|
||||||
|
zipOut.write(summaryContent)
|
||||||
|
zipOut.closeEntry()
|
||||||
|
|
||||||
|
zipOut.putNextEntry(ZipEntry("assets.json"))
|
||||||
|
zipOut.write(assetsContent)
|
||||||
|
zipOut.closeEntry()
|
||||||
|
|
||||||
|
zipOut.putNextEntry(ZipEntry("vulnerabilities.json"))
|
||||||
|
zipOut.write(vulnerabilitiesContent)
|
||||||
|
zipOut.closeEntry()
|
||||||
|
|
||||||
|
zipOut.putNextEntry(ZipEntry("weakPasswords.json"))
|
||||||
|
zipOut.write(weakPasswordsContent)
|
||||||
|
zipOut.closeEntry()
|
||||||
|
|
||||||
|
zipOut.putNextEntry(ZipEntry("漏洞评估报告.html"))
|
||||||
|
zipOut.write(reportHtmlContent)
|
||||||
|
zipOut.closeEntry()
|
||||||
|
|
||||||
|
zipOut.putNextEntry(ZipEntry("META-INF/manifest.json"))
|
||||||
|
zipOut.write(manifestContent)
|
||||||
|
zipOut.closeEntry()
|
||||||
|
|
||||||
|
zipOut.putNextEntry(ZipEntry("META-INF/signature.asc"))
|
||||||
|
zipOut.write(signatureAsc)
|
||||||
|
zipOut.closeEntry()
|
||||||
|
}
|
||||||
|
|
||||||
|
println("报告 ZIP 文件生成成功: $outputPath")
|
||||||
|
}
|
||||||
|
```
|
||||||
|
|
||||||
|
## 五、平台端验证流程
|
||||||
|
|
||||||
|
平台端会按以下顺序验证:
|
||||||
|
|
||||||
|
1. **OpenPGP 签名验证**(防篡改)
|
||||||
|
- 读取 `META-INF/manifest.json` 和 `META-INF/signature.asc`
|
||||||
|
- 使用平台公钥验证签名
|
||||||
|
- 验证所有文件的 SHA256 是否与 manifest.json 中的哈希值匹配
|
||||||
|
|
||||||
|
2. **设备签名验证**(授权)
|
||||||
|
- 从 `summary.json` 提取 `licence`、`fingerprint`、`taskId`、`deviceSignature`
|
||||||
|
- 验证 `licence + fingerprint` 是否已绑定
|
||||||
|
- 验证 `taskId` 是否存在且属于该设备
|
||||||
|
- 使用相同的 HKDF 派生密钥
|
||||||
|
- 重新计算签名并与 `deviceSignature` 比较
|
||||||
|
|
||||||
|
## 六、常见错误和注意事项
|
||||||
|
|
||||||
|
### 6.1 设备签名验证失败
|
||||||
|
|
||||||
|
**可能原因**:
|
||||||
|
1. **密钥派生错误**:确保使用正确的 `salt` 和 `info` 参数
|
||||||
|
2. **签名数据顺序错误**:必须严格按照 `taskId + inspectionId + SHA256(...)` 的顺序
|
||||||
|
3. **SHA256 格式错误**:必须是 hex 字符串(小写),不能包含分隔符
|
||||||
|
4. **文件内容错误**:确保使用原始文件内容,不能进行编码转换
|
||||||
|
5. **licence 或 fingerprint 不匹配**:确保与平台绑定的值一致
|
||||||
|
|
||||||
|
### 6.2 OpenPGP 签名验证失败
|
||||||
|
|
||||||
|
**可能原因**:
|
||||||
|
1. **私钥不匹配**:确保使用与平台公钥对应的私钥
|
||||||
|
2. **manifest.json 格式错误**:确保 JSON 格式正确
|
||||||
|
3. **文件哈希值错误**:确保 manifest.json 中的哈希值与实际文件匹配
|
||||||
|
|
||||||
|
### 6.3 文件缺失
|
||||||
|
|
||||||
|
**必需文件**:
|
||||||
|
- `summary.json`(必须包含授权字段)
|
||||||
|
- `assets.json`
|
||||||
|
- `vulnerabilities.json`
|
||||||
|
- `weakPasswords.json`(文件名大小写不敏感)
|
||||||
|
- `漏洞评估报告.html`(文件名包含"漏洞评估报告"且以".html"结尾)
|
||||||
|
- `META-INF/manifest.json`
|
||||||
|
- `META-INF/signature.asc`
|
||||||
|
|
||||||
|
## 七、安全设计说明
|
||||||
|
|
||||||
|
### 7.1 为什么第三方无法伪造
|
||||||
|
|
||||||
|
1. **设备签名**:
|
||||||
|
- 只有拥有正确 `licence + fingerprint` 的设备才能派生正确的签名密钥
|
||||||
|
- 即使第三方获取了某个设备的签名,也无法用于其他任务(`taskId` 绑定)
|
||||||
|
- 即使第三方修改了报告内容,签名也会失效(多个文件的 SHA256 绑定)
|
||||||
|
|
||||||
|
2. **OpenPGP 签名**:
|
||||||
|
- 只有拥有私钥的工具箱才能生成有效签名
|
||||||
|
- 任何文件修改都会导致哈希值不匹配
|
||||||
|
|
||||||
|
### 7.2 密钥分离
|
||||||
|
|
||||||
|
使用 HKDF 的 `info` 参数区分不同用途的密钥:
|
||||||
|
- `device_report_signature`:用于设备签名
|
||||||
|
- 其他用途可以使用不同的 `info` 值,确保密钥隔离
|
||||||
|
|
||||||
|
## 八、测试建议
|
||||||
|
|
||||||
|
1. **单元测试**:
|
||||||
|
- 测试密钥派生是否正确
|
||||||
|
- 测试签名生成和验证是否匹配
|
||||||
|
- 测试文件 SHA256 计算是否正确
|
||||||
|
|
||||||
|
2. **集成测试**:
|
||||||
|
- 使用真实数据生成 ZIP 文件
|
||||||
|
- 上传到平台验证是否通过
|
||||||
|
- 测试篡改文件后验证是否失败
|
||||||
|
|
||||||
|
3. **边界测试**:
|
||||||
|
- 测试文件缺失的情况
|
||||||
|
- 测试签名数据顺序错误的情况
|
||||||
|
- 测试错误的 `licence` 或 `fingerprint` 的情况
|
||||||
|
|
||||||
|
## 九、参考实现
|
||||||
|
|
||||||
|
- **HKDF 实现**:BouncyCastle(Java/Kotlin)、`hkdf` 库(Python)
|
||||||
|
- **HMAC-SHA256**:Java `javax.crypto.Mac`、Python `hmac`
|
||||||
|
- **OpenPGP**:BouncyCastle(Java/Kotlin)、`gnupg` 库(Python)
|
||||||
|
|
||||||
|
## 十、联系支持
|
||||||
|
|
||||||
|
如有问题,请联系平台技术支持团队。
|
||||||
|
|
||||||
@@ -0,0 +1,756 @@
|
|||||||
|
# 工具箱端 - 摘要信息二维码生成指南
|
||||||
|
|
||||||
|
## 概述
|
||||||
|
|
||||||
|
本文档说明工具箱端如何生成摘要信息二维码。工具箱完成检查任务后,需要将摘要信息加密并生成二维码,供 App 扫描后上传到平台。
|
||||||
|
|
||||||
|
> ### UX 集成模式补充(当前项目实现)
|
||||||
|
>
|
||||||
|
> 在当前集成模式中,工具箱将摘要明文传给 UX 的 `crypto.encryptSummary`,
|
||||||
|
> 由 UX 执行 HKDF + AES-256-GCM 加密并返回二维码内容 JSON(`taskId + encrypted`)。
|
||||||
|
|
||||||
|
## 一、业务流程
|
||||||
|
|
||||||
|
```
|
||||||
|
工具箱完成检查 → 准备摘要信息 → HKDF派生密钥 → AES-256-GCM加密 → 组装二维码内容 → 生成二维码
|
||||||
|
↓
|
||||||
|
App扫描二维码 → 提取taskId和encrypted → 提交到平台 → 平台解密验证 → 保存摘要信息
|
||||||
|
```
|
||||||
|
|
||||||
|
## 二、二维码内容格式
|
||||||
|
|
||||||
|
二维码内容为 JSON 格式,包含以下字段:
|
||||||
|
|
||||||
|
```json
|
||||||
|
{
|
||||||
|
"taskId": "TASK-20260115-4875",
|
||||||
|
"encrypted": "uWUcAmp6UQd0w3G3crdsd4613QCxGLoEgslgXJ4G2hQhpQdjtghtQjCBUZwB/JO+NRgH1vSTr8dqBJRq7Qh4nugESrB2jUSGASTf4+5E7cLlDOmtDw7QlqS+6Hb7sn3daMSOovcna07huchHeesrJCiHV8ntEDXdCCdQOEHfkZAvy5gS8jQY41x5Qcnmqbz3qqHTmceIihTj4uqRVyKOE8jxzY6ko76jx0gW239gyFysJUTrqSPiFAr+gToi2l9SWP8ISViBmYmCY2cQtKvPfTKXwxGMid0zE/nDmb9n38X1oR05nAP0v1KaVY7iPcjsWySDGqO2iIbPzV8tQzq5TNuYqn9gvxIX/oRTFECP+aosfmOD5I8H8rVFTebyTHw+ONV3KoN2IMRqnG+a2lucbhzwQk7/cX1hs9lYm+yapmp+0MbLCtf2KMWqJPdeZqTVZgi3R181BCxo3OIwcCFTnZ/b9pdw+q8ai6SJpso5mA0TpUCvqYlGlKMZde0nj07kmLpdAm3AOg3GtPezfJu8iHmsc4PTa8RDsPgTIxcdyxNSMqo1Ws3VLQXm6DHK/kma/vbvSA/N7upPzi7wLvboig=="
|
||||||
|
}
|
||||||
|
```
|
||||||
|
|
||||||
|
### 2.1 字段说明
|
||||||
|
|
||||||
|
| 字段名 | 类型 | 说明 | 示例 |
|
||||||
|
|--------|------|------|------|
|
||||||
|
| `taskId` | String | 任务ID(从任务二维码中获取) | `"TASK-20260115-4875"` |
|
||||||
|
| `encrypted` | String | Base64编码的加密数据 | `"uWUcAmp6UQd0w3G3..."` |
|
||||||
|
|
||||||
|
## 三、摘要信息数据结构
|
||||||
|
|
||||||
|
### 3.1 明文数据 JSON 格式
|
||||||
|
|
||||||
|
加密前的摘要信息为 JSON 格式,包含以下字段:
|
||||||
|
|
||||||
|
```json
|
||||||
|
{
|
||||||
|
"enterpriseId": "1173040813421105152",
|
||||||
|
"inspectionId": "702286470691215417",
|
||||||
|
"summary": "检查摘要信息",
|
||||||
|
"timestamp": 1734571234567
|
||||||
|
}
|
||||||
|
```
|
||||||
|
|
||||||
|
### 3.2 字段说明
|
||||||
|
|
||||||
|
| 字段名 | 类型 | 说明 | 示例 |
|
||||||
|
|--------|------|------|------|
|
||||||
|
| `enterpriseId` | String | 企业ID(从任务数据中获取) | `"1173040813421105152"` |
|
||||||
|
| `inspectionId` | String | 检查ID(从任务数据中获取) | `"702286470691215417"` |
|
||||||
|
| `summary` | String | 检查摘要信息 | `"检查摘要信息"` |
|
||||||
|
| `timestamp` | Number | 时间戳(毫秒) | `1734571234567` |
|
||||||
|
|
||||||
|
## 四、密钥派生(HKDF-SHA256)
|
||||||
|
|
||||||
|
### 4.1 密钥派生参数
|
||||||
|
|
||||||
|
使用 **HKDF-SHA256** 从 `licence + fingerprint` 派生 AES 密钥:
|
||||||
|
|
||||||
|
```
|
||||||
|
AES Key = HKDF(
|
||||||
|
input = licence + fingerprint, # 输入密钥材料(字符串拼接)
|
||||||
|
salt = taskId, # Salt值(任务ID)
|
||||||
|
info = "inspection_report_encryption", # Info值(固定值)
|
||||||
|
hash = SHA-256, # 哈希算法
|
||||||
|
length = 32 # 输出密钥长度(32字节 = 256位)
|
||||||
|
)
|
||||||
|
```
|
||||||
|
|
||||||
|
**重要说明**:
|
||||||
|
- `ikm`(输入密钥材料)= `licence + fingerprint`(直接字符串拼接,无分隔符)
|
||||||
|
- `salt` = `taskId`(从任务二维码中获取的任务ID)
|
||||||
|
- `info` = `"inspection_report_encryption"`(固定值,区分不同用途的密钥)
|
||||||
|
- `length` = `32` 字节(AES-256 密钥长度)
|
||||||
|
|
||||||
|
### 4.2 Python 实现示例
|
||||||
|
|
||||||
|
```python
|
||||||
|
import hashlib
|
||||||
|
import hkdf
|
||||||
|
|
||||||
|
def derive_aes_key(licence: str, fingerprint: str, task_id: str) -> bytes:
|
||||||
|
"""
|
||||||
|
使用 HKDF-SHA256 派生 AES-256 密钥
|
||||||
|
|
||||||
|
Args:
|
||||||
|
licence: 设备授权码
|
||||||
|
fingerprint: 设备硬件指纹
|
||||||
|
task_id: 任务ID
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
派生出的密钥(32字节)
|
||||||
|
"""
|
||||||
|
# 输入密钥材料
|
||||||
|
ikm = licence + fingerprint # 直接字符串拼接
|
||||||
|
|
||||||
|
# HKDF 参数
|
||||||
|
salt = task_id
|
||||||
|
info = "inspection_report_encryption"
|
||||||
|
key_length = 32 # 32字节 = 256位
|
||||||
|
|
||||||
|
# 派生密钥
|
||||||
|
derived_key = hkdf.HKDF(
|
||||||
|
algorithm=hashlib.sha256,
|
||||||
|
length=key_length,
|
||||||
|
salt=salt.encode('utf-8'),
|
||||||
|
info=info.encode('utf-8'),
|
||||||
|
ikm=ikm.encode('utf-8')
|
||||||
|
).derive()
|
||||||
|
|
||||||
|
return derived_key
|
||||||
|
```
|
||||||
|
|
||||||
|
### 4.3 Java/Kotlin 实现示例
|
||||||
|
|
||||||
|
```kotlin
|
||||||
|
import org.bouncycastle.crypto.digests.SHA256Digest
|
||||||
|
import org.bouncycastle.crypto.generators.HKDFBytesGenerator
|
||||||
|
import org.bouncycastle.crypto.params.HKDFParameters
|
||||||
|
import java.nio.charset.StandardCharsets
|
||||||
|
|
||||||
|
fun deriveAesKey(licence: String, fingerprint: String, taskId: String): ByteArray {
|
||||||
|
// 输入密钥材料
|
||||||
|
val ikm = (licence + fingerprint).toByteArray(StandardCharsets.UTF_8)
|
||||||
|
|
||||||
|
// HKDF 参数
|
||||||
|
val salt = taskId.toByteArray(StandardCharsets.UTF_8)
|
||||||
|
val info = "inspection_report_encryption".toByteArray(StandardCharsets.UTF_8)
|
||||||
|
val keyLength = 32 // 32字节 = 256位
|
||||||
|
|
||||||
|
// 派生密钥
|
||||||
|
val hkdf = HKDFBytesGenerator(SHA256Digest())
|
||||||
|
val params = HKDFParameters(ikm, salt, info)
|
||||||
|
hkdf.init(params)
|
||||||
|
|
||||||
|
val derivedKey = ByteArray(keyLength)
|
||||||
|
hkdf.generateBytes(derivedKey, 0, keyLength)
|
||||||
|
|
||||||
|
return derivedKey
|
||||||
|
}
|
||||||
|
```
|
||||||
|
|
||||||
|
## 五、AES-256-GCM 加密
|
||||||
|
|
||||||
|
### 5.1 加密算法
|
||||||
|
|
||||||
|
- **算法**:AES-256-GCM(Galois/Counter Mode)
|
||||||
|
- **密钥长度**:256 位(32 字节)
|
||||||
|
- **IV 长度**:12 字节(96 位)
|
||||||
|
- **认证标签长度**:16 字节(128 位)
|
||||||
|
|
||||||
|
### 5.2 加密数据格式
|
||||||
|
|
||||||
|
加密后的数据格式(Base64 编码前):
|
||||||
|
|
||||||
|
```
|
||||||
|
[IV(12字节)] + [加密数据] + [认证标签(16字节)]
|
||||||
|
```
|
||||||
|
|
||||||
|
**数据布局**:
|
||||||
|
```
|
||||||
|
+------------------+------------------+------------------+
|
||||||
|
| IV (12字节) | 加密数据 | 认证标签(16字节)|
|
||||||
|
+------------------+------------------+------------------+
|
||||||
|
```
|
||||||
|
|
||||||
|
### 5.3 Python 实现示例
|
||||||
|
|
||||||
|
```python
|
||||||
|
import base64
|
||||||
|
import hashlib
|
||||||
|
import hkdf
|
||||||
|
from cryptography.hazmat.primitives.ciphers.aead import AESGCM
|
||||||
|
from cryptography.hazmat.backends import default_backend
|
||||||
|
import json
|
||||||
|
import time
|
||||||
|
|
||||||
|
def encrypt_summary_data(
|
||||||
|
enterprise_id: str,
|
||||||
|
inspection_id: str,
|
||||||
|
summary: str,
|
||||||
|
licence: str,
|
||||||
|
fingerprint: str,
|
||||||
|
task_id: str
|
||||||
|
) -> str:
|
||||||
|
"""
|
||||||
|
加密摘要信息数据
|
||||||
|
|
||||||
|
Args:
|
||||||
|
enterprise_id: 企业ID
|
||||||
|
inspection_id: 检查ID
|
||||||
|
summary: 摘要信息
|
||||||
|
licence: 设备授权码
|
||||||
|
fingerprint: 设备硬件指纹
|
||||||
|
task_id: 任务ID
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
Base64编码的加密数据
|
||||||
|
"""
|
||||||
|
# 1. 组装明文数据(JSON格式)
|
||||||
|
timestamp = int(time.time() * 1000) # 毫秒时间戳
|
||||||
|
plaintext_map = {
|
||||||
|
"enterpriseId": str(enterprise_id),
|
||||||
|
"inspectionId": str(inspection_id),
|
||||||
|
"summary": summary,
|
||||||
|
"timestamp": timestamp
|
||||||
|
}
|
||||||
|
plaintext = json.dumps(plaintext_map, ensure_ascii=False)
|
||||||
|
|
||||||
|
# 2. 使用 HKDF-SHA256 派生 AES 密钥
|
||||||
|
ikm = licence + fingerprint
|
||||||
|
salt = task_id
|
||||||
|
info = "inspection_report_encryption"
|
||||||
|
key_length = 32
|
||||||
|
|
||||||
|
aes_key = hkdf.HKDF(
|
||||||
|
algorithm=hashlib.sha256,
|
||||||
|
length=key_length,
|
||||||
|
salt=salt.encode('utf-8'),
|
||||||
|
info=info.encode('utf-8'),
|
||||||
|
ikm=ikm.encode('utf-8')
|
||||||
|
).derive()
|
||||||
|
|
||||||
|
# 3. 使用 AES-256-GCM 加密数据
|
||||||
|
aesgcm = AESGCM(aes_key)
|
||||||
|
iv = os.urandom(12) # 生成12字节随机IV
|
||||||
|
encrypted_bytes = aesgcm.encrypt(iv, plaintext.encode('utf-8'), None)
|
||||||
|
|
||||||
|
# 4. 组装:IV + 加密数据(包含认证标签)
|
||||||
|
# AESGCM.encrypt 返回的格式已经是:加密数据 + 认证标签
|
||||||
|
combined = iv + encrypted_bytes
|
||||||
|
|
||||||
|
# 5. Base64 编码
|
||||||
|
encrypted_base64 = base64.b64encode(combined).decode('utf-8')
|
||||||
|
|
||||||
|
return encrypted_base64
|
||||||
|
```
|
||||||
|
|
||||||
|
### 5.4 Java/Kotlin 实现示例
|
||||||
|
|
||||||
|
```kotlin
|
||||||
|
import com.fasterxml.jackson.databind.ObjectMapper
|
||||||
|
import org.bouncycastle.crypto.digests.SHA256Digest
|
||||||
|
import org.bouncycastle.crypto.generators.HKDFBytesGenerator
|
||||||
|
import org.bouncycastle.crypto.params.HKDFParameters
|
||||||
|
import java.nio.charset.StandardCharsets
|
||||||
|
import java.security.SecureRandom
|
||||||
|
import java.util.Base64
|
||||||
|
import javax.crypto.Cipher
|
||||||
|
import javax.crypto.spec.GCMParameterSpec
|
||||||
|
import javax.crypto.spec.SecretKeySpec
|
||||||
|
|
||||||
|
object SummaryEncryptionUtil {
|
||||||
|
|
||||||
|
private const val ALGORITHM = "AES"
|
||||||
|
private const val TRANSFORMATION = "AES/GCM/NoPadding"
|
||||||
|
private const val GCM_IV_LENGTH = 12 // 12 bytes = 96 bits
|
||||||
|
private const val GCM_TAG_LENGTH = 16 // 16 bytes = 128 bits
|
||||||
|
private const val GCM_TAG_LENGTH_BITS = GCM_TAG_LENGTH * 8 // 128 bits
|
||||||
|
|
||||||
|
private val objectMapper = ObjectMapper()
|
||||||
|
private val secureRandom = SecureRandom()
|
||||||
|
|
||||||
|
/**
|
||||||
|
* 加密摘要信息数据
|
||||||
|
*/
|
||||||
|
fun encryptSummaryData(
|
||||||
|
enterpriseId: String,
|
||||||
|
inspectionId: String,
|
||||||
|
summary: String,
|
||||||
|
licence: String,
|
||||||
|
fingerprint: String,
|
||||||
|
taskId: String
|
||||||
|
): String {
|
||||||
|
// 1. 组装明文数据(JSON格式)
|
||||||
|
val timestamp = System.currentTimeMillis()
|
||||||
|
val plaintextMap = mapOf(
|
||||||
|
"enterpriseId" to enterpriseId,
|
||||||
|
"inspectionId" to inspectionId,
|
||||||
|
"summary" to summary,
|
||||||
|
"timestamp" to timestamp
|
||||||
|
)
|
||||||
|
val plaintext = objectMapper.writeValueAsString(plaintextMap)
|
||||||
|
|
||||||
|
// 2. 使用 HKDF-SHA256 派生 AES 密钥
|
||||||
|
val ikm = (licence + fingerprint).toByteArray(StandardCharsets.UTF_8)
|
||||||
|
val salt = taskId.toByteArray(StandardCharsets.UTF_8)
|
||||||
|
val info = "inspection_report_encryption".toByteArray(StandardCharsets.UTF_8)
|
||||||
|
val keyLength = 32
|
||||||
|
|
||||||
|
val hkdf = HKDFBytesGenerator(SHA256Digest())
|
||||||
|
val params = HKDFParameters(ikm, salt, info)
|
||||||
|
hkdf.init(params)
|
||||||
|
|
||||||
|
val aesKey = ByteArray(keyLength)
|
||||||
|
hkdf.generateBytes(aesKey, 0, keyLength)
|
||||||
|
|
||||||
|
// 3. 使用 AES-256-GCM 加密数据
|
||||||
|
val iv = ByteArray(GCM_IV_LENGTH)
|
||||||
|
secureRandom.nextBytes(iv)
|
||||||
|
|
||||||
|
val secretKey = SecretKeySpec(aesKey, ALGORITHM)
|
||||||
|
val gcmSpec = GCMParameterSpec(GCM_TAG_LENGTH_BITS, iv)
|
||||||
|
|
||||||
|
val cipher = Cipher.getInstance(TRANSFORMATION)
|
||||||
|
cipher.init(Cipher.ENCRYPT_MODE, secretKey, gcmSpec)
|
||||||
|
|
||||||
|
val plaintextBytes = plaintext.toByteArray(StandardCharsets.UTF_8)
|
||||||
|
val encryptedBytes = cipher.doFinal(plaintextBytes)
|
||||||
|
|
||||||
|
// 4. 组装:IV + 加密数据(包含认证标签)
|
||||||
|
// GCM 模式会将认证标签附加到密文末尾
|
||||||
|
val ciphertext = encryptedBytes.sliceArray(0 until encryptedBytes.size - GCM_TAG_LENGTH)
|
||||||
|
val tag = encryptedBytes.sliceArray(encryptedBytes.size - GCM_TAG_LENGTH until encryptedBytes.size)
|
||||||
|
|
||||||
|
val combined = iv + ciphertext + tag
|
||||||
|
|
||||||
|
// 5. Base64 编码
|
||||||
|
return Base64.getEncoder().encodeToString(combined)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
```
|
||||||
|
|
||||||
|
## 六、组装二维码内容
|
||||||
|
|
||||||
|
### 6.1 二维码内容 JSON
|
||||||
|
|
||||||
|
将 `taskId` 和加密后的 `encrypted` 组装成 JSON 格式:
|
||||||
|
|
||||||
|
```json
|
||||||
|
{
|
||||||
|
"taskId": "TASK-20260115-4875",
|
||||||
|
"encrypted": "Base64编码的加密数据"
|
||||||
|
}
|
||||||
|
```
|
||||||
|
|
||||||
|
### 6.2 Python 实现示例
|
||||||
|
|
||||||
|
```python
|
||||||
|
import json
|
||||||
|
|
||||||
|
def generate_qr_code_content(task_id: str, encrypted: str) -> str:
|
||||||
|
"""
|
||||||
|
生成二维码内容(JSON格式)
|
||||||
|
|
||||||
|
Args:
|
||||||
|
task_id: 任务ID
|
||||||
|
encrypted: Base64编码的加密数据
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
JSON格式的字符串
|
||||||
|
"""
|
||||||
|
qr_content = {
|
||||||
|
"taskId": task_id,
|
||||||
|
"encrypted": encrypted
|
||||||
|
}
|
||||||
|
return json.dumps(qr_content, ensure_ascii=False)
|
||||||
|
```
|
||||||
|
|
||||||
|
## 七、完整流程示例
|
||||||
|
|
||||||
|
### 7.1 Python 完整示例
|
||||||
|
|
||||||
|
```python
|
||||||
|
import base64
|
||||||
|
import json
|
||||||
|
import time
|
||||||
|
import hashlib
|
||||||
|
import hkdf
|
||||||
|
import qrcode
|
||||||
|
from cryptography.hazmat.primitives.ciphers.aead import AESGCM
|
||||||
|
import os
|
||||||
|
|
||||||
|
class SummaryQRCodeGenerator:
|
||||||
|
"""摘要信息二维码生成器"""
|
||||||
|
|
||||||
|
def __init__(self, licence: str, fingerprint: str):
|
||||||
|
"""
|
||||||
|
初始化生成器
|
||||||
|
|
||||||
|
Args:
|
||||||
|
licence: 设备授权码
|
||||||
|
fingerprint: 设备硬件指纹
|
||||||
|
"""
|
||||||
|
self.licence = licence
|
||||||
|
self.fingerprint = fingerprint
|
||||||
|
|
||||||
|
def generate_summary_qr_code(
|
||||||
|
self,
|
||||||
|
task_id: str,
|
||||||
|
enterprise_id: str,
|
||||||
|
inspection_id: str,
|
||||||
|
summary: str,
|
||||||
|
output_path: str = "summary_qr.png"
|
||||||
|
) -> str:
|
||||||
|
"""
|
||||||
|
生成摘要信息二维码
|
||||||
|
|
||||||
|
Args:
|
||||||
|
task_id: 任务ID(从任务二维码中获取)
|
||||||
|
enterprise_id: 企业ID(从任务数据中获取)
|
||||||
|
inspection_id: 检查ID(从任务数据中获取)
|
||||||
|
summary: 摘要信息
|
||||||
|
output_path: 二维码图片保存路径
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
二维码内容(JSON字符串)
|
||||||
|
"""
|
||||||
|
# 1. 组装明文数据(JSON格式)
|
||||||
|
timestamp = int(time.time() * 1000) # 毫秒时间戳
|
||||||
|
plaintext_map = {
|
||||||
|
"enterpriseId": str(enterprise_id),
|
||||||
|
"inspectionId": str(inspection_id),
|
||||||
|
"summary": summary,
|
||||||
|
"timestamp": timestamp
|
||||||
|
}
|
||||||
|
plaintext = json.dumps(plaintext_map, ensure_ascii=False)
|
||||||
|
print(f"明文数据: {plaintext}")
|
||||||
|
|
||||||
|
# 2. 使用 HKDF-SHA256 派生 AES 密钥
|
||||||
|
ikm = self.licence + self.fingerprint
|
||||||
|
salt = task_id
|
||||||
|
info = "inspection_report_encryption"
|
||||||
|
key_length = 32
|
||||||
|
|
||||||
|
aes_key = hkdf.HKDF(
|
||||||
|
algorithm=hashlib.sha256,
|
||||||
|
length=key_length,
|
||||||
|
salt=salt.encode('utf-8'),
|
||||||
|
info=info.encode('utf-8'),
|
||||||
|
ikm=ikm.encode('utf-8')
|
||||||
|
).derive()
|
||||||
|
print(f"密钥派生成功: {len(aes_key)} 字节")
|
||||||
|
|
||||||
|
# 3. 使用 AES-256-GCM 加密数据
|
||||||
|
aesgcm = AESGCM(aes_key)
|
||||||
|
iv = os.urandom(12) # 生成12字节随机IV
|
||||||
|
encrypted_bytes = aesgcm.encrypt(iv, plaintext.encode('utf-8'), None)
|
||||||
|
|
||||||
|
# 组装:IV + 加密数据(包含认证标签)
|
||||||
|
combined = iv + encrypted_bytes
|
||||||
|
|
||||||
|
# Base64 编码
|
||||||
|
encrypted_base64 = base64.b64encode(combined).decode('utf-8')
|
||||||
|
print(f"加密成功: {encrypted_base64[:50]}...")
|
||||||
|
|
||||||
|
# 4. 组装二维码内容(JSON格式)
|
||||||
|
qr_content = {
|
||||||
|
"taskId": task_id,
|
||||||
|
"encrypted": encrypted_base64
|
||||||
|
}
|
||||||
|
qr_content_json = json.dumps(qr_content, ensure_ascii=False)
|
||||||
|
print(f"二维码内容: {qr_content_json[:100]}...")
|
||||||
|
|
||||||
|
# 5. 生成二维码
|
||||||
|
qr = qrcode.QRCode(
|
||||||
|
version=1,
|
||||||
|
error_correction=qrcode.constants.ERROR_CORRECT_M,
|
||||||
|
box_size=10,
|
||||||
|
border=4,
|
||||||
|
)
|
||||||
|
qr.add_data(qr_content_json)
|
||||||
|
qr.make(fit=True)
|
||||||
|
|
||||||
|
img = qr.make_image(fill_color="black", back_color="white")
|
||||||
|
img.save(output_path)
|
||||||
|
print(f"二维码已生成: {output_path}")
|
||||||
|
|
||||||
|
return qr_content_json
|
||||||
|
|
||||||
|
# 使用示例
|
||||||
|
if __name__ == "__main__":
|
||||||
|
# 工具箱的授权信息(必须与平台绑定时一致)
|
||||||
|
licence = "LIC-8F2A-XXXX"
|
||||||
|
fingerprint = "FP-2c91e9f3"
|
||||||
|
|
||||||
|
# 创建生成器
|
||||||
|
generator = SummaryQRCodeGenerator(licence, fingerprint)
|
||||||
|
|
||||||
|
# 从任务二维码中获取的信息
|
||||||
|
task_id = "TASK-20260115-4875"
|
||||||
|
enterprise_id = "1173040813421105152"
|
||||||
|
inspection_id = "702286470691215417"
|
||||||
|
summary = "检查摘要信息:发现3个高危漏洞,5个中危漏洞"
|
||||||
|
|
||||||
|
# 生成二维码
|
||||||
|
qr_content = generator.generate_summary_qr_code(
|
||||||
|
task_id=task_id,
|
||||||
|
enterprise_id=enterprise_id,
|
||||||
|
inspection_id=inspection_id,
|
||||||
|
summary=summary,
|
||||||
|
output_path="summary_qr_code.png"
|
||||||
|
)
|
||||||
|
|
||||||
|
print(f"\n二维码内容:\n{qr_content}")
|
||||||
|
```
|
||||||
|
|
||||||
|
### 7.2 Java/Kotlin 完整示例
|
||||||
|
|
||||||
|
```kotlin
|
||||||
|
import com.fasterxml.jackson.databind.ObjectMapper
|
||||||
|
import com.google.zxing.BarcodeFormat
|
||||||
|
import com.google.zxing.EncodeHintType
|
||||||
|
import com.google.zxing.qrcode.QRCodeWriter
|
||||||
|
import com.google.zxing.qrcode.decoder.ErrorCorrectionLevel
|
||||||
|
import org.bouncycastle.crypto.digests.SHA256Digest
|
||||||
|
import org.bouncycastle.crypto.generators.HKDFBytesGenerator
|
||||||
|
import org.bouncycastle.crypto.params.HKDFParameters
|
||||||
|
import java.awt.image.BufferedImage
|
||||||
|
import java.nio.charset.StandardCharsets
|
||||||
|
import java.security.SecureRandom
|
||||||
|
import java.util.Base64
|
||||||
|
import javax.crypto.Cipher
|
||||||
|
import javax.crypto.spec.GCMParameterSpec
|
||||||
|
import javax.crypto.spec.SecretKeySpec
|
||||||
|
import javax.imageio.ImageIO
|
||||||
|
import java.io.File
|
||||||
|
|
||||||
|
class SummaryQRCodeGenerator(
|
||||||
|
private val licence: String,
|
||||||
|
private val fingerprint: String
|
||||||
|
) {
|
||||||
|
|
||||||
|
private const val ALGORITHM = "AES"
|
||||||
|
private const val TRANSFORMATION = "AES/GCM/NoPadding"
|
||||||
|
private const val GCM_IV_LENGTH = 12
|
||||||
|
private const val GCM_TAG_LENGTH = 16
|
||||||
|
private const val GCM_TAG_LENGTH_BITS = GCM_TAG_LENGTH * 8
|
||||||
|
|
||||||
|
private val objectMapper = ObjectMapper()
|
||||||
|
private val secureRandom = SecureRandom()
|
||||||
|
|
||||||
|
/**
|
||||||
|
* 生成摘要信息二维码
|
||||||
|
*/
|
||||||
|
fun generateSummaryQRCode(
|
||||||
|
taskId: String,
|
||||||
|
enterpriseId: String,
|
||||||
|
inspectionId: String,
|
||||||
|
summary: String,
|
||||||
|
outputPath: String = "summary_qr.png"
|
||||||
|
): String {
|
||||||
|
// 1. 组装明文数据(JSON格式)
|
||||||
|
val timestamp = System.currentTimeMillis()
|
||||||
|
val plaintextMap = mapOf(
|
||||||
|
"enterpriseId" to enterpriseId,
|
||||||
|
"inspectionId" to inspectionId,
|
||||||
|
"summary" to summary,
|
||||||
|
"timestamp" to timestamp
|
||||||
|
)
|
||||||
|
val plaintext = objectMapper.writeValueAsString(plaintextMap)
|
||||||
|
println("明文数据: $plaintext")
|
||||||
|
|
||||||
|
// 2. 使用 HKDF-SHA256 派生 AES 密钥
|
||||||
|
val ikm = (licence + fingerprint).toByteArray(StandardCharsets.UTF_8)
|
||||||
|
val salt = taskId.toByteArray(StandardCharsets.UTF_8)
|
||||||
|
val info = "inspection_report_encryption".toByteArray(StandardCharsets.UTF_8)
|
||||||
|
val keyLength = 32
|
||||||
|
|
||||||
|
val hkdf = HKDFBytesGenerator(SHA256Digest())
|
||||||
|
val params = HKDFParameters(ikm, salt, info)
|
||||||
|
hkdf.init(params)
|
||||||
|
|
||||||
|
val aesKey = ByteArray(keyLength)
|
||||||
|
hkdf.generateBytes(aesKey, 0, keyLength)
|
||||||
|
println("密钥派生成功: ${aesKey.size} 字节")
|
||||||
|
|
||||||
|
// 3. 使用 AES-256-GCM 加密数据
|
||||||
|
val iv = ByteArray(GCM_IV_LENGTH)
|
||||||
|
secureRandom.nextBytes(iv)
|
||||||
|
|
||||||
|
val secretKey = SecretKeySpec(aesKey, ALGORITHM)
|
||||||
|
val gcmSpec = GCMParameterSpec(GCM_TAG_LENGTH_BITS, iv)
|
||||||
|
|
||||||
|
val cipher = Cipher.getInstance(TRANSFORMATION)
|
||||||
|
cipher.init(Cipher.ENCRYPT_MODE, secretKey, gcmSpec)
|
||||||
|
|
||||||
|
val plaintextBytes = plaintext.toByteArray(StandardCharsets.UTF_8)
|
||||||
|
val encryptedBytes = cipher.doFinal(plaintextBytes)
|
||||||
|
|
||||||
|
// 组装:IV + 加密数据(包含认证标签)
|
||||||
|
val ciphertext = encryptedBytes.sliceArray(0 until encryptedBytes.size - GCM_TAG_LENGTH)
|
||||||
|
val tag = encryptedBytes.sliceArray(encryptedBytes.size - GCM_TAG_LENGTH until encryptedBytes.size)
|
||||||
|
|
||||||
|
val combined = iv + ciphertext + tag
|
||||||
|
|
||||||
|
// Base64 编码
|
||||||
|
val encryptedBase64 = Base64.getEncoder().encodeToString(combined)
|
||||||
|
println("加密成功: ${encryptedBase64.take(50)}...")
|
||||||
|
|
||||||
|
// 4. 组装二维码内容(JSON格式)
|
||||||
|
val qrContent = mapOf(
|
||||||
|
"taskId" to taskId,
|
||||||
|
"encrypted" to encryptedBase64
|
||||||
|
)
|
||||||
|
val qrContentJson = objectMapper.writeValueAsString(qrContent)
|
||||||
|
println("二维码内容: ${qrContentJson.take(100)}...")
|
||||||
|
|
||||||
|
// 5. 生成二维码
|
||||||
|
val hints = hashMapOf<EncodeHintType, Any>().apply {
|
||||||
|
put(EncodeHintType.ERROR_CORRECTION, ErrorCorrectionLevel.M)
|
||||||
|
put(EncodeHintType.CHARACTER_SET, "UTF-8")
|
||||||
|
put(EncodeHintType.MARGIN, 1)
|
||||||
|
}
|
||||||
|
|
||||||
|
val writer = QRCodeWriter()
|
||||||
|
val bitMatrix = writer.encode(qrContentJson, BarcodeFormat.QR_CODE, 300, 300, hints)
|
||||||
|
|
||||||
|
val width = bitMatrix.width
|
||||||
|
val height = bitMatrix.height
|
||||||
|
val image = BufferedImage(width, height, BufferedImage.TYPE_INT_RGB)
|
||||||
|
|
||||||
|
for (x in 0 until width) {
|
||||||
|
for (y in 0 until height) {
|
||||||
|
image.setRGB(x, y, if (bitMatrix[x, y]) 0x000000 else 0xFFFFFF)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
ImageIO.write(image, "PNG", File(outputPath))
|
||||||
|
println("二维码已生成: $outputPath")
|
||||||
|
|
||||||
|
return qrContentJson
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// 使用示例
|
||||||
|
fun main() {
|
||||||
|
// 工具箱的授权信息(必须与平台绑定时一致)
|
||||||
|
val licence = "LIC-8F2A-XXXX"
|
||||||
|
val fingerprint = "FP-2c91e9f3"
|
||||||
|
|
||||||
|
// 创建生成器
|
||||||
|
val generator = SummaryQRCodeGenerator(licence, fingerprint)
|
||||||
|
|
||||||
|
// 从任务二维码中获取的信息
|
||||||
|
val taskId = "TASK-20260115-4875"
|
||||||
|
val enterpriseId = "1173040813421105152"
|
||||||
|
val inspectionId = "702286470691215417"
|
||||||
|
val summary = "检查摘要信息:发现3个高危漏洞,5个中危漏洞"
|
||||||
|
|
||||||
|
// 生成二维码
|
||||||
|
val qrContent = generator.generateSummaryQRCode(
|
||||||
|
taskId = taskId,
|
||||||
|
enterpriseId = enterpriseId,
|
||||||
|
inspectionId = inspectionId,
|
||||||
|
summary = summary,
|
||||||
|
outputPath = "summary_qr_code.png"
|
||||||
|
)
|
||||||
|
|
||||||
|
println("\n二维码内容:\n$qrContent")
|
||||||
|
}
|
||||||
|
```
|
||||||
|
|
||||||
|
## 八、平台端验证流程
|
||||||
|
|
||||||
|
平台端会按以下流程验证:
|
||||||
|
|
||||||
|
1. **接收请求**:App 扫描二维码后,将 `taskId` 和 `encrypted` 提交到平台
|
||||||
|
2. **查询任务**:根据 `taskId` 查询任务记录,获取 `deviceLicenceId`
|
||||||
|
3. **获取设备信息**:根据 `deviceLicenceId` 查询设备授权记录,获取 `licence` 和 `fingerprint`
|
||||||
|
4. **密钥派生**:使用相同的 HKDF 参数派生 AES 密钥
|
||||||
|
5. **解密数据**:使用 AES-256-GCM 解密(自动验证认证标签)
|
||||||
|
6. **时间戳校验**:验证 `timestamp` 是否在合理范围内(防止重放攻击)
|
||||||
|
7. **保存摘要**:将摘要信息保存到数据库
|
||||||
|
|
||||||
|
## 九、常见错误和注意事项
|
||||||
|
|
||||||
|
### 9.1 加密失败
|
||||||
|
|
||||||
|
**可能原因**:
|
||||||
|
1. **密钥派生错误**:确保使用正确的 HKDF 参数
|
||||||
|
- `ikm` = `licence + fingerprint`(直接字符串拼接)
|
||||||
|
- `salt` = `taskId`(必须与任务二维码中的 taskId 一致)
|
||||||
|
- `info` = `"inspection_report_encryption"`(固定值)
|
||||||
|
- `length` = `32` 字节
|
||||||
|
|
||||||
|
2. **数据格式错误**:确保 JSON 格式正确
|
||||||
|
- 字段名和类型必须匹配
|
||||||
|
- 时间戳必须是数字类型(毫秒)
|
||||||
|
|
||||||
|
3. **IV 生成错误**:确保使用安全的随机数生成器生成 12 字节 IV
|
||||||
|
|
||||||
|
### 9.2 平台验证失败
|
||||||
|
|
||||||
|
**可能原因**:
|
||||||
|
1. **taskId 不匹配**:确保二维码中的 `taskId` 与任务二维码中的 `taskId` 一致
|
||||||
|
2. **密钥不匹配**:确保 `licence` 和 `fingerprint` 与平台绑定时一致
|
||||||
|
3. **时间戳过期**:平台会验证时间戳,确保时间戳在合理范围内
|
||||||
|
4. **认证标签验证失败**:数据被篡改或密钥错误
|
||||||
|
|
||||||
|
### 9.3 二维码生成失败
|
||||||
|
|
||||||
|
**可能原因**:
|
||||||
|
1. **内容过长**:如果加密数据过长,可能需要更高版本的二维码
|
||||||
|
2. **JSON 格式错误**:确保 JSON 格式正确
|
||||||
|
3. **字符编码错误**:确保使用 UTF-8 编码
|
||||||
|
|
||||||
|
## 十、安全设计说明
|
||||||
|
|
||||||
|
### 10.1 为什么使用 HKDF
|
||||||
|
|
||||||
|
1. **密钥分离**:使用 `info` 参数区分不同用途的密钥
|
||||||
|
2. **Salt 随机性**:使用 `taskId` 作为 salt,确保每个任务的密钥不同
|
||||||
|
3. **密钥扩展**:HKDF 提供更好的密钥扩展性
|
||||||
|
|
||||||
|
### 10.2 为什么第三方无法伪造
|
||||||
|
|
||||||
|
1. **密钥绑定**:只有拥有正确 `licence + fingerprint` 的工具箱才能生成正确的密钥
|
||||||
|
2. **任务绑定**:使用 `taskId` 作为 salt,确保密钥与特定任务绑定
|
||||||
|
3. **认证加密**:GCM 模式提供认证加密,任何篡改都会导致解密失败
|
||||||
|
4. **时间戳校验**:平台会验证时间戳,防止重放攻击
|
||||||
|
|
||||||
|
### 10.3 密钥派生参数的重要性
|
||||||
|
|
||||||
|
- **ikm**:`licence + fingerprint` 是设备唯一标识
|
||||||
|
- **salt**:`taskId` 确保每个任务使用不同的密钥
|
||||||
|
- **info**:`"inspection_report_encryption"` 区分不同用途的密钥
|
||||||
|
- **length**:`32` 字节提供 256 位密钥强度
|
||||||
|
|
||||||
|
## 十一、测试建议
|
||||||
|
|
||||||
|
1. **单元测试**:
|
||||||
|
- 测试密钥派生是否正确
|
||||||
|
- 测试加密和解密是否匹配
|
||||||
|
- 测试 JSON 格式是否正确
|
||||||
|
|
||||||
|
2. **集成测试**:
|
||||||
|
- 使用真实任务数据生成二维码
|
||||||
|
- App 扫描二维码并提交到平台
|
||||||
|
- 验证平台是否能正确解密和验证
|
||||||
|
|
||||||
|
3. **边界测试**:
|
||||||
|
- 测试超长的摘要信息
|
||||||
|
- 测试特殊字符的处理
|
||||||
|
- 测试错误的 taskId 是否会导致解密失败
|
||||||
|
|
||||||
|
## 十二、参考实现
|
||||||
|
|
||||||
|
- **Python**:`hkdf` 库(HKDF)、`cryptography` 库(AES-GCM)、`qrcode` 库(二维码生成)
|
||||||
|
- **Java/Kotlin**:BouncyCastle(HKDF)、JDK `javax.crypto`(AES-GCM)、ZXing 库(二维码生成)
|
||||||
|
- **C#**:BouncyCastle.Net(HKDF)、`System.Security.Cryptography`(AES-GCM)、ZXing.Net 库(二维码生成)
|
||||||
|
|
||||||
|
## 十三、联系支持
|
||||||
|
|
||||||
|
如有问题,请联系平台技术支持团队获取:
|
||||||
|
- 测试环境地址
|
||||||
|
- 技术支持
|
||||||
|
|
||||||
@@ -0,0 +1,601 @@
|
|||||||
|
# 工具箱端 - 设备授权二维码生成指南
|
||||||
|
|
||||||
|
## 概述
|
||||||
|
|
||||||
|
本文档说明工具箱端如何生成设备授权二维码,用于设备首次授权和绑定。App 扫描二维码后,会将加密的设备信息提交到平台完成授权校验和绑定。
|
||||||
|
|
||||||
|
> ### UX 集成模式补充(当前项目实现)
|
||||||
|
>
|
||||||
|
> 在当前集成模式中,工具箱不直接执行 RSA 加密,而是调用 UX 接口:
|
||||||
|
>
|
||||||
|
> 1. 工具箱先调用 `device.register` 传入 `licence` 与平台公钥,`fingerprint` 由 UX 本机计算并入库。
|
||||||
|
> 2. 工具箱再调用 `crypto.encryptDeviceInfo` 获取加密后的 Base64 密文。
|
||||||
|
> 3. 工具箱将该密文生成二维码供 App 扫码提交平台。
|
||||||
|
|
||||||
|
## 一、业务流程
|
||||||
|
|
||||||
|
```
|
||||||
|
工具箱 → 生成设备信息 → RSA-OAEP加密 → Base64编码 → 生成二维码
|
||||||
|
↓
|
||||||
|
App扫描二维码 → 提取加密数据 → 调用平台接口 → 平台解密验证 → 授权成功
|
||||||
|
```
|
||||||
|
|
||||||
|
## 二、设备信息准备
|
||||||
|
|
||||||
|
### 2.1 设备信息字段
|
||||||
|
|
||||||
|
工具箱需要准备以下设备信息:
|
||||||
|
|
||||||
|
| 字段名 | 类型 | 说明 | 示例 |
|
||||||
|
|--------|------|------|------|
|
||||||
|
| `licence` | String | 设备授权码(工具箱唯一标识) | `"LIC-8F2A-XXXX"` |
|
||||||
|
| `fingerprint` | String | 设备硬件指纹(设备唯一标识) | `"FP-2c91e9f3"` |
|
||||||
|
|
||||||
|
### 2.2 生成设备信息 JSON
|
||||||
|
|
||||||
|
将设备信息组装成 JSON 格式:
|
||||||
|
|
||||||
|
```json
|
||||||
|
{
|
||||||
|
"licence": "LIC-8F2A-XXXX",
|
||||||
|
"fingerprint": "FP-2c91e9f3"
|
||||||
|
}
|
||||||
|
```
|
||||||
|
|
||||||
|
**重要说明**:
|
||||||
|
- `licence` 和 `fingerprint` 必须是字符串类型
|
||||||
|
- JSON 格式必须正确,不能有多余的逗号或格式错误
|
||||||
|
- 建议使用标准的 JSON 库生成,避免手动拼接
|
||||||
|
|
||||||
|
**伪代码示例**:
|
||||||
|
```python
|
||||||
|
import json
|
||||||
|
|
||||||
|
device_info = {
|
||||||
|
"licence": "LIC-8F2A-XXXX", # 工具箱授权码
|
||||||
|
"fingerprint": "FP-2c91e9f3" # 设备硬件指纹
|
||||||
|
}
|
||||||
|
|
||||||
|
# 转换为 JSON 字符串
|
||||||
|
device_info_json = json.dumps(device_info, ensure_ascii=False)
|
||||||
|
# 结果: {"licence":"LIC-8F2A-XXXX","fingerprint":"FP-2c91e9f3"}
|
||||||
|
```
|
||||||
|
|
||||||
|
## 三、RSA-OAEP 加密
|
||||||
|
|
||||||
|
### 3.1 加密算法
|
||||||
|
|
||||||
|
使用 **RSA-OAEP** 非对称加密算法:
|
||||||
|
|
||||||
|
- **算法名称**:`RSA/ECB/OAEPWithSHA-256AndMGF1Padding`
|
||||||
|
- **密钥长度**:2048 位(推荐)
|
||||||
|
- **填充方式**:OAEP with SHA-256 and MGF1
|
||||||
|
- **加密方向**:使用**平台公钥**加密,平台使用私钥解密
|
||||||
|
|
||||||
|
### 3.2 获取平台公钥
|
||||||
|
|
||||||
|
平台公钥需要从平台获取,通常以 **Base64 编码**的字符串形式提供。
|
||||||
|
|
||||||
|
**公钥格式**:
|
||||||
|
- 格式:X.509 标准格式(DER 编码)
|
||||||
|
- 存储:Base64 编码的字符串
|
||||||
|
- 示例:`MIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEAzDlZvMDVaL+fjl05Hi182JOAUAaN4gh9rOF+1NhKfO4J6e0HLy8lBuylp3A4xoTiyUejNm22h0dqAgDSPnY/xZR76POFTD1soHr2LaFCN8JAbQ96P8gE7wC9qpoTssVvIVRH7QbVd260J6eD0Szwcx9cg591RSN69pMpe5IVRi8T99Hhql6/wnZHORPr18eESLOY93jRskLzc0q18r68RRoTJiQf+9YC8ub5iKp7rCjVnPi1UbIYmXmL08tk5mksYA0NqWQAa1ofKxx/9tQtB9uTjhTxuTu94XU9jlGU87qaHZs+kpqa8CAbYYJFbSP1xHwoZzpU2jpw2aF22HBYxwIDAQAB`
|
||||||
|
|
||||||
|
### 3.3 加密步骤
|
||||||
|
|
||||||
|
1. **加载平台公钥**:从 Base64 字符串加载公钥对象
|
||||||
|
2. **初始化加密器**:使用 `RSA/ECB/OAEPWithSHA-256AndMGF1Padding` 算法
|
||||||
|
3. **加密数据**:使用公钥加密设备信息 JSON 字符串(UTF-8 编码)
|
||||||
|
4. **Base64 编码**:将加密后的字节数组进行 Base64 编码
|
||||||
|
|
||||||
|
### 3.4 Python 实现示例
|
||||||
|
|
||||||
|
```python
|
||||||
|
import base64
|
||||||
|
import json
|
||||||
|
from cryptography.hazmat.primitives import hashes
|
||||||
|
from cryptography.hazmat.primitives.asymmetric import padding
|
||||||
|
from cryptography.hazmat.primitives import serialization
|
||||||
|
from cryptography.hazmat.backends import default_backend
|
||||||
|
|
||||||
|
def encrypt_device_info(licence: str, fingerprint: str, platform_public_key_base64: str) -> str:
|
||||||
|
"""
|
||||||
|
使用平台公钥加密设备信息
|
||||||
|
|
||||||
|
Args:
|
||||||
|
licence: 设备授权码
|
||||||
|
fingerprint: 设备硬件指纹
|
||||||
|
platform_public_key_base64: 平台公钥(Base64编码)
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
Base64编码的加密数据
|
||||||
|
"""
|
||||||
|
# 1. 组装设备信息 JSON
|
||||||
|
device_info = {
|
||||||
|
"licence": licence,
|
||||||
|
"fingerprint": fingerprint
|
||||||
|
}
|
||||||
|
device_info_json = json.dumps(device_info, ensure_ascii=False)
|
||||||
|
|
||||||
|
# 2. 加载平台公钥
|
||||||
|
public_key_bytes = base64.b64decode(platform_public_key_base64)
|
||||||
|
public_key = serialization.load_der_public_key(
|
||||||
|
public_key_bytes,
|
||||||
|
backend=default_backend()
|
||||||
|
)
|
||||||
|
|
||||||
|
# 3. 使用 RSA-OAEP 加密
|
||||||
|
# OAEP padding with SHA-256 and MGF1
|
||||||
|
encrypted_bytes = public_key.encrypt(
|
||||||
|
device_info_json.encode('utf-8'),
|
||||||
|
padding.OAEP(
|
||||||
|
mgf=padding.MGF1(algorithm=hashes.SHA256()),
|
||||||
|
algorithm=hashes.SHA256(),
|
||||||
|
label=None
|
||||||
|
)
|
||||||
|
)
|
||||||
|
|
||||||
|
# 4. Base64 编码
|
||||||
|
encrypted_base64 = base64.b64encode(encrypted_bytes).decode('utf-8')
|
||||||
|
|
||||||
|
return encrypted_base64
|
||||||
|
```
|
||||||
|
|
||||||
|
### 3.5 Java/Kotlin 实现示例
|
||||||
|
|
||||||
|
```kotlin
|
||||||
|
import java.security.KeyFactory
|
||||||
|
import java.security.PublicKey
|
||||||
|
import java.security.spec.X509EncodedKeySpec
|
||||||
|
import java.util.Base64
|
||||||
|
import javax.crypto.Cipher
|
||||||
|
import java.nio.charset.StandardCharsets
|
||||||
|
|
||||||
|
object DeviceAuthorizationUtil {
|
||||||
|
|
||||||
|
private const val CIPHER_ALGORITHM = "RSA/ECB/OAEPWithSHA-256AndMGF1Padding"
|
||||||
|
|
||||||
|
/**
|
||||||
|
* 使用平台公钥加密设备信息
|
||||||
|
*
|
||||||
|
* @param licence 设备授权码
|
||||||
|
* @param fingerprint 设备硬件指纹
|
||||||
|
* @param platformPublicKeyBase64 平台公钥(Base64编码)
|
||||||
|
* @return Base64编码的加密数据
|
||||||
|
*/
|
||||||
|
fun encryptDeviceInfo(
|
||||||
|
licence: String,
|
||||||
|
fingerprint: String,
|
||||||
|
platformPublicKeyBase64: String
|
||||||
|
): String {
|
||||||
|
// 1. 组装设备信息 JSON
|
||||||
|
val deviceInfo = mapOf(
|
||||||
|
"licence" to licence,
|
||||||
|
"fingerprint" to fingerprint
|
||||||
|
)
|
||||||
|
val deviceInfoJson = objectMapper.writeValueAsString(deviceInfo)
|
||||||
|
|
||||||
|
// 2. 加载平台公钥
|
||||||
|
val publicKeyBytes = Base64.getDecoder().decode(platformPublicKeyBase64)
|
||||||
|
val keySpec = X509EncodedKeySpec(publicKeyBytes)
|
||||||
|
val keyFactory = KeyFactory.getInstance("RSA")
|
||||||
|
val publicKey = keyFactory.generatePublic(keySpec)
|
||||||
|
|
||||||
|
// 3. 使用 RSA-OAEP 加密
|
||||||
|
val cipher = Cipher.getInstance(CIPHER_ALGORITHM)
|
||||||
|
cipher.init(Cipher.ENCRYPT_MODE, publicKey)
|
||||||
|
val encryptedBytes = cipher.doFinal(deviceInfoJson.toByteArray(StandardCharsets.UTF_8))
|
||||||
|
|
||||||
|
// 4. Base64 编码
|
||||||
|
return Base64.getEncoder().encodeToString(encryptedBytes)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
```
|
||||||
|
|
||||||
|
### 3.6 C# 实现示例
|
||||||
|
|
||||||
|
```csharp
|
||||||
|
using System;
|
||||||
|
using System.Security.Cryptography;
|
||||||
|
using System.Text;
|
||||||
|
using System.Text.Json;
|
||||||
|
|
||||||
|
public class DeviceAuthorizationUtil
|
||||||
|
{
|
||||||
|
private const string CipherAlgorithm = "RSA/ECB/OAEPWithSHA-256AndMGF1Padding";
|
||||||
|
|
||||||
|
/// <summary>
|
||||||
|
/// 使用平台公钥加密设备信息
|
||||||
|
/// </summary>
|
||||||
|
public static string EncryptDeviceInfo(
|
||||||
|
string licence,
|
||||||
|
string fingerprint,
|
||||||
|
string platformPublicKeyBase64)
|
||||||
|
{
|
||||||
|
// 1. 组装设备信息 JSON
|
||||||
|
var deviceInfo = new
|
||||||
|
{
|
||||||
|
licence = licence,
|
||||||
|
fingerprint = fingerprint
|
||||||
|
};
|
||||||
|
var deviceInfoJson = JsonSerializer.Serialize(deviceInfo);
|
||||||
|
|
||||||
|
// 2. 加载平台公钥
|
||||||
|
var publicKeyBytes = Convert.FromBase64String(platformPublicKeyBase64);
|
||||||
|
using var rsa = RSA.Create();
|
||||||
|
rsa.ImportSubjectPublicKeyInfo(publicKeyBytes, out _);
|
||||||
|
|
||||||
|
// 3. 使用 RSA-OAEP 加密
|
||||||
|
var encryptedBytes = rsa.Encrypt(
|
||||||
|
Encoding.UTF8.GetBytes(deviceInfoJson),
|
||||||
|
RSAEncryptionPadding.OaepSHA256
|
||||||
|
);
|
||||||
|
|
||||||
|
// 4. Base64 编码
|
||||||
|
return Convert.ToBase64String(encryptedBytes);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
```
|
||||||
|
|
||||||
|
## 四、生成二维码
|
||||||
|
|
||||||
|
### 4.1 二维码内容
|
||||||
|
|
||||||
|
二维码内容就是加密后的 **Base64 编码字符串**(不是 JSON 格式)。
|
||||||
|
|
||||||
|
**示例**:
|
||||||
|
```
|
||||||
|
MIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEAzDlZvMDVaL+fjl05Hi182JOAUAaN4gh9rOF+1NhKfO4J6e0HLy8lBuylp3A4xoTiyUejNm22h0dqAgDSPnY/xZR76POFTD1soHr2LaFCN8JAbQ96P8gE7wC9qpoTssVvIVRH7QbVd260J6eD0Szwcx9cg591RSN69pMpe5IVRi8T99Hhql6/wnZHORPr18eESLOY93jRskLzc0q18r68RRoTJiQf+9YC8ub5iKp7rCjVnPi1UbIYmXmL08tk5mksYA0NqWQAa1ofKxx/9tQtB9uTjhTxuTu94XU9jlGU87qaHZs+kpqa8CAbYYJFbSP1xHwoZzpU2jpw2aF22HBYxwIDAQAB...
|
||||||
|
```
|
||||||
|
|
||||||
|
### 4.2 二维码生成
|
||||||
|
|
||||||
|
使用标准的二维码生成库生成二维码图片。
|
||||||
|
|
||||||
|
**Python 示例(使用 qrcode 库)**:
|
||||||
|
```python
|
||||||
|
import qrcode
|
||||||
|
from PIL import Image
|
||||||
|
|
||||||
|
def generate_qr_code(encrypted_data: str, output_path: str = "device_qr.png"):
|
||||||
|
"""
|
||||||
|
生成设备授权二维码
|
||||||
|
|
||||||
|
Args:
|
||||||
|
encrypted_data: Base64编码的加密数据
|
||||||
|
output_path: 二维码图片保存路径
|
||||||
|
"""
|
||||||
|
qr = qrcode.QRCode(
|
||||||
|
version=1, # 控制二维码大小(1-40)
|
||||||
|
error_correction=qrcode.constants.ERROR_CORRECT_M, # 错误纠正级别
|
||||||
|
box_size=10, # 每个小方块的像素数
|
||||||
|
border=4, # 边框的厚度
|
||||||
|
)
|
||||||
|
qr.add_data(encrypted_data)
|
||||||
|
qr.make(fit=True)
|
||||||
|
|
||||||
|
# 创建二维码图片
|
||||||
|
img = qr.make_image(fill_color="black", back_color="white")
|
||||||
|
img.save(output_path)
|
||||||
|
|
||||||
|
print(f"二维码已生成: {output_path}")
|
||||||
|
```
|
||||||
|
|
||||||
|
**Java/Kotlin 示例(使用 ZXing 库)**:
|
||||||
|
```kotlin
|
||||||
|
import com.google.zxing.BarcodeFormat
|
||||||
|
import com.google.zxing.EncodeHintType
|
||||||
|
import com.google.zxing.qrcode.QRCodeWriter
|
||||||
|
import com.google.zxing.qrcode.decoder.ErrorCorrectionLevel
|
||||||
|
import java.awt.image.BufferedImage
|
||||||
|
import javax.imageio.ImageIO
|
||||||
|
import java.io.File
|
||||||
|
|
||||||
|
fun generateQRCode(encryptedData: String, outputPath: String = "device_qr.png") {
|
||||||
|
val hints = hashMapOf<EncodeHintType, Any>().apply {
|
||||||
|
put(EncodeHintType.ERROR_CORRECTION, ErrorCorrectionLevel.M)
|
||||||
|
put(EncodeHintType.CHARACTER_SET, "UTF-8")
|
||||||
|
put(EncodeHintType.MARGIN, 1)
|
||||||
|
}
|
||||||
|
|
||||||
|
val writer = QRCodeWriter()
|
||||||
|
val bitMatrix = writer.encode(encryptedData, BarcodeFormat.QR_CODE, 300, 300, hints)
|
||||||
|
|
||||||
|
val width = bitMatrix.width
|
||||||
|
val height = bitMatrix.height
|
||||||
|
val image = BufferedImage(width, height, BufferedImage.TYPE_INT_RGB)
|
||||||
|
|
||||||
|
for (x in 0 until width) {
|
||||||
|
for (y in 0 until height) {
|
||||||
|
image.setRGB(x, y, if (bitMatrix[x, y]) 0x000000 else 0xFFFFFF)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
ImageIO.write(image, "PNG", File(outputPath))
|
||||||
|
println("二维码已生成: $outputPath")
|
||||||
|
}
|
||||||
|
```
|
||||||
|
|
||||||
|
## 五、完整流程示例
|
||||||
|
|
||||||
|
### 5.1 Python 完整示例
|
||||||
|
|
||||||
|
```python
|
||||||
|
import json
|
||||||
|
import base64
|
||||||
|
import qrcode
|
||||||
|
from cryptography.hazmat.primitives import hashes
|
||||||
|
from cryptography.hazmat.primitives.asymmetric import padding
|
||||||
|
from cryptography.hazmat.primitives import serialization
|
||||||
|
from cryptography.hazmat.backends import default_backend
|
||||||
|
|
||||||
|
def generate_device_authorization_qr(
|
||||||
|
licence: str,
|
||||||
|
fingerprint: str,
|
||||||
|
platform_public_key_base64: str,
|
||||||
|
qr_output_path: str = "device_qr.png"
|
||||||
|
) -> str:
|
||||||
|
"""
|
||||||
|
生成设备授权二维码
|
||||||
|
|
||||||
|
Args:
|
||||||
|
licence: 设备授权码
|
||||||
|
fingerprint: 设备硬件指纹
|
||||||
|
platform_public_key_base64: 平台公钥(Base64编码)
|
||||||
|
qr_output_path: 二维码图片保存路径
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
加密后的Base64字符串(二维码内容)
|
||||||
|
"""
|
||||||
|
# 1. 组装设备信息 JSON
|
||||||
|
device_info = {
|
||||||
|
"licence": licence,
|
||||||
|
"fingerprint": fingerprint
|
||||||
|
}
|
||||||
|
device_info_json = json.dumps(device_info, ensure_ascii=False)
|
||||||
|
print(f"设备信息 JSON: {device_info_json}")
|
||||||
|
|
||||||
|
# 2. 加载平台公钥
|
||||||
|
public_key_bytes = base64.b64decode(platform_public_key_base64)
|
||||||
|
public_key = serialization.load_der_public_key(
|
||||||
|
public_key_bytes,
|
||||||
|
backend=default_backend()
|
||||||
|
)
|
||||||
|
|
||||||
|
# 3. 使用 RSA-OAEP 加密
|
||||||
|
encrypted_bytes = public_key.encrypt(
|
||||||
|
device_info_json.encode('utf-8'),
|
||||||
|
padding.OAEP(
|
||||||
|
mgf=padding.MGF1(algorithm=hashes.SHA256()),
|
||||||
|
algorithm=hashes.SHA256(),
|
||||||
|
label=None
|
||||||
|
)
|
||||||
|
)
|
||||||
|
|
||||||
|
# 4. Base64 编码
|
||||||
|
encrypted_base64 = base64.b64encode(encrypted_bytes).decode('utf-8')
|
||||||
|
print(f"加密后的 Base64: {encrypted_base64[:100]}...") # 只显示前100个字符
|
||||||
|
|
||||||
|
# 5. 生成二维码
|
||||||
|
qr = qrcode.QRCode(
|
||||||
|
version=1,
|
||||||
|
error_correction=qrcode.constants.ERROR_CORRECT_M,
|
||||||
|
box_size=10,
|
||||||
|
border=4,
|
||||||
|
)
|
||||||
|
qr.add_data(encrypted_base64)
|
||||||
|
qr.make(fit=True)
|
||||||
|
|
||||||
|
img = qr.make_image(fill_color="black", back_color="white")
|
||||||
|
img.save(qr_output_path)
|
||||||
|
print(f"二维码已生成: {qr_output_path}")
|
||||||
|
|
||||||
|
return encrypted_base64
|
||||||
|
|
||||||
|
# 使用示例
|
||||||
|
if __name__ == "__main__":
|
||||||
|
# 平台公钥(示例,实际使用时需要从平台获取)
|
||||||
|
platform_public_key = "MIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEAzDlZvMDVaL+fjl05Hi182JOAUAaN4gh9rOF+1NhKfO4J6e0HLy8lBuylp3A4xoTiyUejNm22h0dqAgDSPnY/xZR76POFTD1soHr2LaFCN8JAbQ96P8gE7wC9qpoTssVvIVRH7QbVd260J6eD0Szwcx9cg591RSN69pMpe5IVRi8T99Hhql6/wnZHORPr18eESLOY93jRskLzc0q18r68RRoTJiQf+9YC8ub5iKp7rCjVnPi1UbIYmXmL08tk5mksYA0NqWQAa1ofKxx/9tQtB9uTjhTxuTu94XU9jlGU87qaHZs+kpqa8CAbYYJFbSP1xHwoZzpU2jpw2aF22HBYxwIDAQAB"
|
||||||
|
|
||||||
|
# 设备信息
|
||||||
|
licence = "LIC-8F2A-XXXX"
|
||||||
|
fingerprint = "FP-2c91e9f3"
|
||||||
|
|
||||||
|
# 生成二维码
|
||||||
|
encrypted_data = generate_device_authorization_qr(
|
||||||
|
licence=licence,
|
||||||
|
fingerprint=fingerprint,
|
||||||
|
platform_public_key_base64=platform_public_key,
|
||||||
|
qr_output_path="device_authorization_qr.png"
|
||||||
|
)
|
||||||
|
|
||||||
|
print(f"\n二维码内容(加密后的Base64):\n{encrypted_data}")
|
||||||
|
```
|
||||||
|
|
||||||
|
### 5.2 Java/Kotlin 完整示例
|
||||||
|
|
||||||
|
```kotlin
|
||||||
|
import com.fasterxml.jackson.databind.ObjectMapper
|
||||||
|
import com.google.zxing.BarcodeFormat
|
||||||
|
import com.google.zxing.EncodeHintType
|
||||||
|
import com.google.zxing.qrcode.QRCodeWriter
|
||||||
|
import com.google.zxing.qrcode.decoder.ErrorCorrectionLevel
|
||||||
|
import java.awt.image.BufferedImage
|
||||||
|
import java.security.KeyFactory
|
||||||
|
import java.security.PublicKey
|
||||||
|
import java.security.spec.X509EncodedKeySpec
|
||||||
|
import java.util.Base64
|
||||||
|
import javax.crypto.Cipher
|
||||||
|
import javax.imageio.ImageIO
|
||||||
|
import java.io.File
|
||||||
|
import java.nio.charset.StandardCharsets
|
||||||
|
|
||||||
|
object DeviceAuthorizationQRGenerator {
|
||||||
|
|
||||||
|
private const val CIPHER_ALGORITHM = "RSA/ECB/OAEPWithSHA-256AndMGF1Padding"
|
||||||
|
private val objectMapper = ObjectMapper()
|
||||||
|
|
||||||
|
/**
|
||||||
|
* 生成设备授权二维码
|
||||||
|
*/
|
||||||
|
fun generateDeviceAuthorizationQR(
|
||||||
|
licence: String,
|
||||||
|
fingerprint: String,
|
||||||
|
platformPublicKeyBase64: String,
|
||||||
|
qrOutputPath: String = "device_qr.png"
|
||||||
|
): String {
|
||||||
|
// 1. 组装设备信息 JSON
|
||||||
|
val deviceInfo = mapOf(
|
||||||
|
"licence" to licence,
|
||||||
|
"fingerprint" to fingerprint
|
||||||
|
)
|
||||||
|
val deviceInfoJson = objectMapper.writeValueAsString(deviceInfo)
|
||||||
|
println("设备信息 JSON: $deviceInfoJson")
|
||||||
|
|
||||||
|
// 2. 加载平台公钥
|
||||||
|
val publicKeyBytes = Base64.getDecoder().decode(platformPublicKeyBase64)
|
||||||
|
val keySpec = X509EncodedKeySpec(publicKeyBytes)
|
||||||
|
val keyFactory = KeyFactory.getInstance("RSA")
|
||||||
|
val publicKey = keyFactory.generatePublic(keySpec)
|
||||||
|
|
||||||
|
// 3. 使用 RSA-OAEP 加密
|
||||||
|
val cipher = Cipher.getInstance(CIPHER_ALGORITHM)
|
||||||
|
cipher.init(Cipher.ENCRYPT_MODE, publicKey)
|
||||||
|
val encryptedBytes = cipher.doFinal(deviceInfoJson.toByteArray(StandardCharsets.UTF_8))
|
||||||
|
|
||||||
|
// 4. Base64 编码
|
||||||
|
val encryptedBase64 = Base64.getEncoder().encodeToString(encryptedBytes)
|
||||||
|
println("加密后的 Base64: ${encryptedBase64.take(100)}...")
|
||||||
|
|
||||||
|
// 5. 生成二维码
|
||||||
|
val hints = hashMapOf<EncodeHintType, Any>().apply {
|
||||||
|
put(EncodeHintType.ERROR_CORRECTION, ErrorCorrectionLevel.M)
|
||||||
|
put(EncodeHintType.CHARACTER_SET, "UTF-8")
|
||||||
|
put(EncodeHintType.MARGIN, 1)
|
||||||
|
}
|
||||||
|
|
||||||
|
val writer = QRCodeWriter()
|
||||||
|
val bitMatrix = writer.encode(encryptedBase64, BarcodeFormat.QR_CODE, 300, 300, hints)
|
||||||
|
|
||||||
|
val width = bitMatrix.width
|
||||||
|
val height = bitMatrix.height
|
||||||
|
val image = BufferedImage(width, height, BufferedImage.TYPE_INT_RGB)
|
||||||
|
|
||||||
|
for (x in 0 until width) {
|
||||||
|
for (y in 0 until height) {
|
||||||
|
image.setRGB(x, y, if (bitMatrix[x, y]) 0x000000 else 0xFFFFFF)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
ImageIO.write(image, "PNG", File(qrOutputPath))
|
||||||
|
println("二维码已生成: $qrOutputPath")
|
||||||
|
|
||||||
|
return encryptedBase64
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// 使用示例
|
||||||
|
fun main() {
|
||||||
|
// 平台公钥(示例,实际使用时需要从平台获取)
|
||||||
|
val platformPublicKey = "MIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEAzDlZvMDVaL+fjl05Hi182JOAUAaN4gh9rOF+1NhKfO4J6e0HLy8lBuylp3A4xoTiyUejNm22h0dqAgDSPnY/xZR76POFTD1soHr2LaFCN8JAbQ96P8gE7wC9qpoTssVvIVRH7QbVd260J6eD0Szwcx9cg591RSN69pMpe5IVRi8T99Hhql6/wnZHORPr18eESLOY93jRskLzc0q18r68RRoTJiQf+9YC8ub5iKp7rCjVnPi1UbIYmXmL08tk5mksYA0NqWQAa1ofKxx/9tQtB9uTjhTxuTu94XU9jlGU87qaHZs+kpqa8CAbYYJFbSP1xHwoZzpU2jpw2aF22HBYxwIDAQAB"
|
||||||
|
|
||||||
|
// 设备信息
|
||||||
|
val licence = "LIC-8F2A-XXXX"
|
||||||
|
val fingerprint = "FP-2c91e9f3"
|
||||||
|
|
||||||
|
// 生成二维码
|
||||||
|
val encryptedData = DeviceAuthorizationQRGenerator.generateDeviceAuthorizationQR(
|
||||||
|
licence = licence,
|
||||||
|
fingerprint = fingerprint,
|
||||||
|
platformPublicKeyBase64 = platformPublicKey,
|
||||||
|
qrOutputPath = "device_authorization_qr.png"
|
||||||
|
)
|
||||||
|
|
||||||
|
println("\n二维码内容(加密后的Base64):\n$encryptedData")
|
||||||
|
}
|
||||||
|
```
|
||||||
|
|
||||||
|
## 六、平台端验证流程
|
||||||
|
|
||||||
|
平台端会按以下流程验证:
|
||||||
|
|
||||||
|
1. **接收请求**:App 扫描二维码后,将 `encryptedDeviceInfo` 和 `appid` 提交到平台
|
||||||
|
2. **RSA-OAEP 解密**:使用平台私钥解密 `encryptedDeviceInfo`
|
||||||
|
3. **提取设备信息**:从解密后的 JSON 中提取 `licence` 和 `fingerprint`
|
||||||
|
4. **设备验证**:
|
||||||
|
- 检查 `filing_device_licence` 表中是否存在该 `licence`
|
||||||
|
- 如果存在,验证 `fingerprint` 是否匹配
|
||||||
|
- 如果 `fingerprint` 不匹配,记录非法授权日志并返回错误
|
||||||
|
5. **App 绑定**:检查 `filing_app_licence` 表中是否存在绑定关系
|
||||||
|
- 如果不存在,创建新的绑定记录
|
||||||
|
- 如果已存在,返回已绑定信息
|
||||||
|
6. **返回响应**:返回 `deviceLicenceId` 和 `licence`
|
||||||
|
|
||||||
|
## 七、常见错误和注意事项
|
||||||
|
|
||||||
|
### 7.1 加密失败
|
||||||
|
|
||||||
|
**可能原因**:
|
||||||
|
1. **公钥格式错误**:确保使用正确的 Base64 编码的公钥
|
||||||
|
2. **算法不匹配**:必须使用 `RSA/ECB/OAEPWithSHA-256AndMGF1Padding`
|
||||||
|
3. **数据长度超限**:RSA-2048 最多加密 245 字节(设备信息 JSON 通常不会超过)
|
||||||
|
4. **字符编码错误**:确保使用 UTF-8 编码
|
||||||
|
|
||||||
|
### 7.2 二维码扫描失败
|
||||||
|
|
||||||
|
**可能原因**:
|
||||||
|
1. **二维码内容过长**:如果加密后的数据过长,可能需要使用更高版本的二维码(version)
|
||||||
|
2. **错误纠正级别过低**:建议使用 `ERROR_CORRECT_M` 或更高
|
||||||
|
3. **二维码图片质量差**:确保二维码图片清晰,有足够的对比度
|
||||||
|
|
||||||
|
### 7.3 平台验证失败
|
||||||
|
|
||||||
|
**可能原因**:
|
||||||
|
1. **licence 已存在但 fingerprint 不匹配**:设备被替换或授权码被复用
|
||||||
|
2. **JSON 格式错误**:确保 JSON 格式正确,字段名和类型匹配
|
||||||
|
3. **加密数据损坏**:确保 Base64 编码和解码正确
|
||||||
|
|
||||||
|
## 八、安全设计说明
|
||||||
|
|
||||||
|
### 8.1 为什么使用 RSA-OAEP
|
||||||
|
|
||||||
|
1. **非对称加密**:只有平台拥有私钥,可以解密数据
|
||||||
|
2. **OAEP 填充**:提供更好的安全性,防止某些攻击
|
||||||
|
3. **SHA-256**:使用强哈希算法,提供更好的安全性
|
||||||
|
|
||||||
|
### 8.2 为什么第三方无法伪造
|
||||||
|
|
||||||
|
1. **只有平台能解密**:第三方无法获取平台私钥,无法解密数据
|
||||||
|
2. **fingerprint 验证**:平台会验证硬件指纹,防止授权码被复用
|
||||||
|
3. **非法授权日志**:平台会记录所有非法授权尝试
|
||||||
|
|
||||||
|
## 九、测试建议
|
||||||
|
|
||||||
|
1. **单元测试**:
|
||||||
|
- 测试 JSON 生成是否正确
|
||||||
|
- 测试加密和解密是否匹配
|
||||||
|
- 测试 Base64 编码和解码是否正确
|
||||||
|
|
||||||
|
2. **集成测试**:
|
||||||
|
- 使用真实平台公钥生成二维码
|
||||||
|
- App 扫描二维码并提交到平台
|
||||||
|
- 验证平台是否能正确解密和验证
|
||||||
|
|
||||||
|
3. **边界测试**:
|
||||||
|
- 测试超长的 licence 或 fingerprint
|
||||||
|
- 测试特殊字符的处理
|
||||||
|
- 测试错误的公钥格式
|
||||||
|
|
||||||
|
## 十、参考实现
|
||||||
|
|
||||||
|
- **Python**:`cryptography` 库(RSA 加密)、`qrcode` 库(二维码生成)
|
||||||
|
- **Java/Kotlin**:JDK `javax.crypto`(RSA 加密)、ZXing 库(二维码生成)
|
||||||
|
- **C#**:`System.Security.Cryptography`(RSA 加密)、ZXing.Net 库(二维码生成)
|
||||||
|
|
||||||
|
## 十一、联系支持
|
||||||
|
|
||||||
|
如有问题,请联系平台技术支持团队获取:
|
||||||
|
- 平台公钥(Base64 编码)
|
||||||
|
- 测试环境地址
|
||||||
|
- 技术支持
|
||||||
|
|
||||||
@@ -1,7 +0,0 @@
|
|||||||
CREATE TABLE "todo" (
|
|
||||||
"id" uuid PRIMARY KEY NOT NULL,
|
|
||||||
"created_at" timestamp with time zone DEFAULT now() NOT NULL,
|
|
||||||
"updated_at" timestamp with time zone DEFAULT now() NOT NULL,
|
|
||||||
"title" text NOT NULL,
|
|
||||||
"completed" boolean DEFAULT false NOT NULL
|
|
||||||
);
|
|
||||||
@@ -1,65 +0,0 @@
|
|||||||
{
|
|
||||||
"id": "4ece5479-57bf-473d-b806-c1176c972e7f",
|
|
||||||
"prevId": "00000000-0000-0000-0000-000000000000",
|
|
||||||
"version": "7",
|
|
||||||
"dialect": "postgresql",
|
|
||||||
"tables": {
|
|
||||||
"public.todo": {
|
|
||||||
"name": "todo",
|
|
||||||
"schema": "",
|
|
||||||
"columns": {
|
|
||||||
"id": {
|
|
||||||
"name": "id",
|
|
||||||
"type": "uuid",
|
|
||||||
"primaryKey": true,
|
|
||||||
"notNull": true
|
|
||||||
},
|
|
||||||
"created_at": {
|
|
||||||
"name": "created_at",
|
|
||||||
"type": "timestamp with time zone",
|
|
||||||
"primaryKey": false,
|
|
||||||
"notNull": true,
|
|
||||||
"default": "now()"
|
|
||||||
},
|
|
||||||
"updated_at": {
|
|
||||||
"name": "updated_at",
|
|
||||||
"type": "timestamp with time zone",
|
|
||||||
"primaryKey": false,
|
|
||||||
"notNull": true,
|
|
||||||
"default": "now()"
|
|
||||||
},
|
|
||||||
"title": {
|
|
||||||
"name": "title",
|
|
||||||
"type": "text",
|
|
||||||
"primaryKey": false,
|
|
||||||
"notNull": true
|
|
||||||
},
|
|
||||||
"completed": {
|
|
||||||
"name": "completed",
|
|
||||||
"type": "boolean",
|
|
||||||
"primaryKey": false,
|
|
||||||
"notNull": true,
|
|
||||||
"default": false
|
|
||||||
}
|
|
||||||
},
|
|
||||||
"indexes": {},
|
|
||||||
"foreignKeys": {},
|
|
||||||
"compositePrimaryKeys": {},
|
|
||||||
"uniqueConstraints": {},
|
|
||||||
"policies": {},
|
|
||||||
"checkConstraints": {},
|
|
||||||
"isRLSEnabled": false
|
|
||||||
}
|
|
||||||
},
|
|
||||||
"enums": {},
|
|
||||||
"schemas": {},
|
|
||||||
"sequences": {},
|
|
||||||
"roles": {},
|
|
||||||
"policies": {},
|
|
||||||
"views": {},
|
|
||||||
"_meta": {
|
|
||||||
"columns": {},
|
|
||||||
"schemas": {},
|
|
||||||
"tables": {}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
@@ -1,13 +0,0 @@
|
|||||||
{
|
|
||||||
"version": "7",
|
|
||||||
"dialect": "postgresql",
|
|
||||||
"entries": [
|
|
||||||
{
|
|
||||||
"idx": 0,
|
|
||||||
"version": "7",
|
|
||||||
"when": 1777096386609,
|
|
||||||
"tag": "0000_loving_thunderbird",
|
|
||||||
"breakpoints": true
|
|
||||||
}
|
|
||||||
]
|
|
||||||
}
|
|
||||||
@@ -0,0 +1,13 @@
|
|||||||
|
{
|
||||||
|
"$schema": "https://opencode.ai/config.json",
|
||||||
|
"mcp": {
|
||||||
|
"shadcn": {
|
||||||
|
"type": "local",
|
||||||
|
"command": ["bunx", "--bun", "shadcn", "mcp"]
|
||||||
|
},
|
||||||
|
"tanstack": {
|
||||||
|
"type": "remote",
|
||||||
|
"url": "https://tanstack.com/api/mcp"
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
+62
-62
@@ -1,71 +1,71 @@
|
|||||||
{
|
{
|
||||||
"name": "fullstack-starter",
|
"name": "@furtherverse/monorepo",
|
||||||
"version": "1.0.0",
|
"version": "1.0.0",
|
||||||
"private": true,
|
"private": true,
|
||||||
"type": "module",
|
"type": "module",
|
||||||
"imports": {
|
"workspaces": [
|
||||||
"#drizzle/*.sql": "./drizzle/*.sql",
|
"apps/*",
|
||||||
"#package": "./package.json",
|
"packages/*"
|
||||||
"#server": "./.output/server/index.mjs"
|
],
|
||||||
},
|
|
||||||
"scripts": {
|
"scripts": {
|
||||||
"build": "bunx --bun vite build",
|
"build": "turbo run build",
|
||||||
"cli": "bun src/bin.ts",
|
"compile": "turbo run compile",
|
||||||
"compile": "bun scripts/compile.ts",
|
"compile:darwin": "turbo run compile:darwin",
|
||||||
"compile:darwin": "bun run compile:darwin:arm64 && bun run compile:darwin:x64",
|
"compile:linux": "turbo run compile:linux",
|
||||||
"compile:darwin:arm64": "bun scripts/compile.ts --target bun-darwin-arm64",
|
"compile:windows": "turbo run compile:windows",
|
||||||
"compile:darwin:x64": "bun scripts/compile.ts --target bun-darwin-x64",
|
"dev": "turbo run dev",
|
||||||
"compile:linux": "bun run compile:linux:x64 && bun run compile:linux:arm64",
|
"dist": "turbo run dist",
|
||||||
"compile:linux:arm64": "bun scripts/compile.ts --target bun-linux-arm64",
|
"dist:linux": "turbo run dist:linux",
|
||||||
"compile:linux:x64": "bun scripts/compile.ts --target bun-linux-x64",
|
"dist:mac": "turbo run dist:mac",
|
||||||
"compile:windows": "bun run compile:windows:x64",
|
"dist:win": "turbo run dist:win",
|
||||||
"compile:windows:x64": "bun scripts/compile.ts --target bun-windows-x64",
|
"fix": "turbo run fix",
|
||||||
"db:embed": "bun scripts/embed-migrations.ts",
|
"typecheck": "turbo run typecheck"
|
||||||
"db:generate": "drizzle-kit generate && bun scripts/embed-migrations.ts",
|
|
||||||
"db:migrate": "drizzle-kit migrate",
|
|
||||||
"db:push": "drizzle-kit push",
|
|
||||||
"db:studio": "drizzle-kit studio",
|
|
||||||
"dev": "bunx --bun vite dev",
|
|
||||||
"fix": "biome check --write",
|
|
||||||
"test": "bun test",
|
|
||||||
"typecheck": "tsc --noEmit"
|
|
||||||
},
|
|
||||||
"dependencies": {
|
|
||||||
"@logtape/drizzle-orm": "^2.0.5",
|
|
||||||
"@logtape/logtape": "^2.0.5",
|
|
||||||
"@logtape/pretty": "^2.0.5",
|
|
||||||
"@orpc/client": "^1.14.0",
|
|
||||||
"@orpc/contract": "^1.14.0",
|
|
||||||
"@orpc/openapi": "^1.14.0",
|
|
||||||
"@orpc/server": "^1.14.0",
|
|
||||||
"@orpc/tanstack-query": "^1.14.0",
|
|
||||||
"@orpc/zod": "^1.14.0",
|
|
||||||
"@t3-oss/env-core": "^0.13.11",
|
|
||||||
"@tanstack/react-query": "^5.100.1",
|
|
||||||
"@tanstack/react-router": "^1.168.24",
|
|
||||||
"@tanstack/react-router-ssr-query": "^1.166.11",
|
|
||||||
"@tanstack/react-start": "^1.167.48",
|
|
||||||
"citty": "^0.2.2",
|
|
||||||
"drizzle-orm": "0.45.2",
|
|
||||||
"drizzle-zod": "^0.8.3",
|
|
||||||
"postgres": "^3.4.9",
|
|
||||||
"react": "^19.2.5",
|
|
||||||
"react-dom": "^19.2.5",
|
|
||||||
"zod": "^4.3.6"
|
|
||||||
},
|
},
|
||||||
"devDependencies": {
|
"devDependencies": {
|
||||||
"@biomejs/biome": "^2.4.13",
|
"@biomejs/biome": "^2.4.5",
|
||||||
"@tailwindcss/vite": "^4.2.4",
|
"turbo": "^2.8.13",
|
||||||
"@tanstack/devtools-vite": "^0.6.0",
|
"typescript": "^5.9.3"
|
||||||
"@tanstack/react-devtools": "^0.10.2",
|
},
|
||||||
"@tanstack/react-query-devtools": "^5.100.1",
|
"catalog": {
|
||||||
"@tanstack/react-router-devtools": "^1.166.13",
|
"@orpc/client": "^1.13.6",
|
||||||
"@types/bun": "^1.3.13",
|
"@orpc/contract": "^1.13.6",
|
||||||
"@vitejs/plugin-react": "^6.0.1",
|
"@orpc/openapi": "^1.13.6",
|
||||||
"drizzle-kit": "0.31.10",
|
"@orpc/server": "^1.13.6",
|
||||||
"nitro": "npm:nitro-nightly@3.0.1-20260424-182106-f8cf6ccc",
|
"@orpc/tanstack-query": "^1.13.6",
|
||||||
"tailwindcss": "^4.2.4",
|
"@orpc/zod": "^1.13.6",
|
||||||
"typescript": "^6.0.3",
|
"@t3-oss/env-core": "^0.13.10",
|
||||||
"vite": "^8.0.10"
|
"@tailwindcss/vite": "^4.2.1",
|
||||||
|
"@tanstack/devtools-vite": "^0.5.3",
|
||||||
|
"@tanstack/react-devtools": "^0.9.9",
|
||||||
|
"@tanstack/react-query": "^5.90.21",
|
||||||
|
"@tanstack/react-query-devtools": "^5.91.3",
|
||||||
|
"@tanstack/react-router": "^1.166.2",
|
||||||
|
"@tanstack/react-router-devtools": "^1.166.2",
|
||||||
|
"@tanstack/react-router-ssr-query": "^1.166.2",
|
||||||
|
"@tanstack/react-start": "^1.166.2",
|
||||||
|
"@types/bun": "^1.3.10",
|
||||||
|
"@types/node": "^24.11.0",
|
||||||
|
"@vitejs/plugin-react": "^5.1.4",
|
||||||
|
"babel-plugin-react-compiler": "^1.0.0",
|
||||||
|
"drizzle-kit": "1.0.0-beta.15-859cf75",
|
||||||
|
"drizzle-orm": "1.0.0-beta.15-859cf75",
|
||||||
|
"electron": "^34.0.0",
|
||||||
|
"electron-builder": "^26.8.1",
|
||||||
|
"electron-vite": "^5.0.0",
|
||||||
|
"jszip": "^3.10.1",
|
||||||
|
"motion": "^12.35.0",
|
||||||
|
"nitro": "npm:nitro-nightly@3.0.1-20260227-181935-bfbb207c",
|
||||||
|
"openpgp": "^6.0.1",
|
||||||
|
"react": "^19.2.4",
|
||||||
|
"react-dom": "^19.2.4",
|
||||||
|
"tailwindcss": "^4.2.1",
|
||||||
|
"tree-kill": "^1.2.2",
|
||||||
|
"uuid": "^13.0.0",
|
||||||
|
"vite": "^8.0.0-beta.16",
|
||||||
|
"vite-tsconfig-paths": "^6.1.1",
|
||||||
|
"zod": "^4.3.6"
|
||||||
|
},
|
||||||
|
"overrides": {
|
||||||
|
"@types/node": "catalog:"
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -0,0 +1,16 @@
|
|||||||
|
{
|
||||||
|
"name": "@furtherverse/crypto",
|
||||||
|
"version": "1.0.0",
|
||||||
|
"private": true,
|
||||||
|
"type": "module",
|
||||||
|
"exports": {
|
||||||
|
".": "./src/index.ts"
|
||||||
|
},
|
||||||
|
"dependencies": {
|
||||||
|
"openpgp": "catalog:"
|
||||||
|
},
|
||||||
|
"devDependencies": {
|
||||||
|
"@furtherverse/tsconfig": "workspace:*",
|
||||||
|
"@types/bun": "catalog:"
|
||||||
|
}
|
||||||
|
}
|
||||||
@@ -0,0 +1,53 @@
|
|||||||
|
import { createCipheriv, createDecipheriv, randomBytes } from 'node:crypto'
|
||||||
|
|
||||||
|
const GCM_IV_LENGTH = 12 // 96 bits
|
||||||
|
const GCM_TAG_LENGTH = 16 // 128 bits
|
||||||
|
const ALGORITHM = 'aes-256-gcm'
|
||||||
|
|
||||||
|
/**
|
||||||
|
* AES-256-GCM encrypt.
|
||||||
|
*
|
||||||
|
* Output format (before Base64): [IV (12 bytes)] + [ciphertext] + [auth tag (16 bytes)]
|
||||||
|
*
|
||||||
|
* @param plaintext - UTF-8 string to encrypt
|
||||||
|
* @param key - 32-byte AES key
|
||||||
|
* @returns Base64-encoded encrypted data
|
||||||
|
*/
|
||||||
|
export const aesGcmEncrypt = (plaintext: string, key: Buffer): string => {
|
||||||
|
const iv = randomBytes(GCM_IV_LENGTH)
|
||||||
|
const cipher = createCipheriv(ALGORITHM, key, iv, { authTagLength: GCM_TAG_LENGTH })
|
||||||
|
|
||||||
|
const encrypted = Buffer.concat([cipher.update(plaintext, 'utf-8'), cipher.final()])
|
||||||
|
const tag = cipher.getAuthTag()
|
||||||
|
|
||||||
|
// Layout: IV + ciphertext + tag
|
||||||
|
const combined = Buffer.concat([iv, encrypted, tag])
|
||||||
|
return combined.toString('base64')
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* AES-256-GCM decrypt.
|
||||||
|
*
|
||||||
|
* Input format (after Base64 decode): [IV (12 bytes)] + [ciphertext] + [auth tag (16 bytes)]
|
||||||
|
*
|
||||||
|
* @param encryptedBase64 - Base64-encoded encrypted data
|
||||||
|
* @param key - 32-byte AES key
|
||||||
|
* @returns Decrypted UTF-8 string
|
||||||
|
*/
|
||||||
|
export const aesGcmDecrypt = (encryptedBase64: string, key: Buffer): string => {
|
||||||
|
const data = Buffer.from(encryptedBase64, 'base64')
|
||||||
|
|
||||||
|
if (data.length < GCM_IV_LENGTH + GCM_TAG_LENGTH) {
|
||||||
|
throw new Error('Encrypted data too short: must contain IV + tag at minimum')
|
||||||
|
}
|
||||||
|
|
||||||
|
const iv = data.subarray(0, GCM_IV_LENGTH)
|
||||||
|
const tag = data.subarray(data.length - GCM_TAG_LENGTH)
|
||||||
|
const ciphertext = data.subarray(GCM_IV_LENGTH, data.length - GCM_TAG_LENGTH)
|
||||||
|
|
||||||
|
const decipher = createDecipheriv(ALGORITHM, key, iv, { authTagLength: GCM_TAG_LENGTH })
|
||||||
|
decipher.setAuthTag(tag)
|
||||||
|
|
||||||
|
const decrypted = Buffer.concat([decipher.update(ciphertext), decipher.final()])
|
||||||
|
return decrypted.toString('utf-8')
|
||||||
|
}
|
||||||
@@ -0,0 +1,15 @@
|
|||||||
|
import { createHash } from 'node:crypto'
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Compute SHA-256 hash and return raw Buffer.
|
||||||
|
*/
|
||||||
|
export const sha256 = (data: string | Buffer): Buffer => {
|
||||||
|
return createHash('sha256').update(data).digest()
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Compute SHA-256 hash and return lowercase hex string.
|
||||||
|
*/
|
||||||
|
export const sha256Hex = (data: string | Buffer): string => {
|
||||||
|
return createHash('sha256').update(data).digest('hex')
|
||||||
|
}
|
||||||
@@ -0,0 +1,15 @@
|
|||||||
|
import { hkdfSync } from 'node:crypto'
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Derive a key using HKDF-SHA256.
|
||||||
|
*
|
||||||
|
* @param ikm - Input keying material (string, will be UTF-8 encoded)
|
||||||
|
* @param salt - Salt value (string, will be UTF-8 encoded)
|
||||||
|
* @param info - Info/context string (will be UTF-8 encoded)
|
||||||
|
* @param length - Output key length in bytes (default: 32 for AES-256)
|
||||||
|
* @returns Derived key as Buffer
|
||||||
|
*/
|
||||||
|
export const hkdfSha256 = (ikm: string, salt: string, info: string, length = 32): Buffer => {
|
||||||
|
const derived = hkdfSync('sha256', ikm, salt, info, length)
|
||||||
|
return Buffer.from(derived)
|
||||||
|
}
|
||||||
@@ -0,0 +1,23 @@
|
|||||||
|
import { createHmac } from 'node:crypto'
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Compute HMAC-SHA256 and return Base64-encoded signature.
|
||||||
|
*
|
||||||
|
* @param key - HMAC key (Buffer)
|
||||||
|
* @param data - Data to sign (UTF-8 string)
|
||||||
|
* @returns Base64-encoded HMAC-SHA256 signature
|
||||||
|
*/
|
||||||
|
export const hmacSha256Base64 = (key: Buffer, data: string): string => {
|
||||||
|
return createHmac('sha256', key).update(data, 'utf-8').digest('base64')
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Compute HMAC-SHA256 and return raw Buffer.
|
||||||
|
*
|
||||||
|
* @param key - HMAC key (Buffer)
|
||||||
|
* @param data - Data to sign (UTF-8 string)
|
||||||
|
* @returns HMAC-SHA256 digest as Buffer
|
||||||
|
*/
|
||||||
|
export const hmacSha256 = (key: Buffer, data: string): Buffer => {
|
||||||
|
return createHmac('sha256', key).update(data, 'utf-8').digest()
|
||||||
|
}
|
||||||
@@ -0,0 +1,6 @@
|
|||||||
|
export { aesGcmDecrypt, aesGcmEncrypt } from './aes-gcm'
|
||||||
|
export { sha256, sha256Hex } from './hash'
|
||||||
|
export { hkdfSha256 } from './hkdf'
|
||||||
|
export { hmacSha256, hmacSha256Base64 } from './hmac'
|
||||||
|
export { generatePgpKeyPair, pgpSignDetached, pgpVerifyDetached } from './pgp'
|
||||||
|
export { rsaOaepEncrypt } from './rsa-oaep'
|
||||||
@@ -0,0 +1,75 @@
|
|||||||
|
import * as openpgp from 'openpgp'
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Generate an OpenPGP RSA key pair.
|
||||||
|
*
|
||||||
|
* @param name - User name for the key
|
||||||
|
* @param email - User email for the key
|
||||||
|
* @returns ASCII-armored private and public keys
|
||||||
|
*/
|
||||||
|
export const generatePgpKeyPair = async (
|
||||||
|
name: string,
|
||||||
|
email: string,
|
||||||
|
): Promise<{ privateKey: string; publicKey: string }> => {
|
||||||
|
const { privateKey, publicKey } = await openpgp.generateKey({
|
||||||
|
type: 'rsa',
|
||||||
|
rsaBits: 2048,
|
||||||
|
userIDs: [{ name, email }],
|
||||||
|
format: 'armored',
|
||||||
|
})
|
||||||
|
|
||||||
|
return { privateKey, publicKey }
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Create a detached OpenPGP signature for the given data.
|
||||||
|
*
|
||||||
|
* @param data - Raw data to sign (Buffer or Uint8Array)
|
||||||
|
* @param armoredPrivateKey - ASCII-armored private key
|
||||||
|
* @returns ASCII-armored detached signature (signature.asc content)
|
||||||
|
*/
|
||||||
|
export const pgpSignDetached = async (data: Uint8Array, armoredPrivateKey: string): Promise<string> => {
|
||||||
|
const privateKey = await openpgp.readPrivateKey({ armoredKey: armoredPrivateKey })
|
||||||
|
const message = await openpgp.createMessage({ binary: data })
|
||||||
|
|
||||||
|
const signature = await openpgp.sign({
|
||||||
|
message,
|
||||||
|
signingKeys: privateKey,
|
||||||
|
detached: true,
|
||||||
|
format: 'armored',
|
||||||
|
})
|
||||||
|
|
||||||
|
return signature as string
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Verify a detached OpenPGP signature.
|
||||||
|
*
|
||||||
|
* @param data - Original data (Buffer or Uint8Array)
|
||||||
|
* @param armoredSignature - ASCII-armored detached signature
|
||||||
|
* @param armoredPublicKey - ASCII-armored public key
|
||||||
|
* @returns true if signature is valid
|
||||||
|
*/
|
||||||
|
export const pgpVerifyDetached = async (
|
||||||
|
data: Uint8Array,
|
||||||
|
armoredSignature: string,
|
||||||
|
armoredPublicKey: string,
|
||||||
|
): Promise<boolean> => {
|
||||||
|
const publicKey = await openpgp.readKey({ armoredKey: armoredPublicKey })
|
||||||
|
const signature = await openpgp.readSignature({ armoredSignature })
|
||||||
|
const message = await openpgp.createMessage({ binary: data })
|
||||||
|
|
||||||
|
const verificationResult = await openpgp.verify({
|
||||||
|
message,
|
||||||
|
signature,
|
||||||
|
verificationKeys: publicKey,
|
||||||
|
})
|
||||||
|
|
||||||
|
const { verified } = verificationResult.signatures[0]!
|
||||||
|
try {
|
||||||
|
await verified
|
||||||
|
return true
|
||||||
|
} catch {
|
||||||
|
return false
|
||||||
|
}
|
||||||
|
}
|
||||||
@@ -0,0 +1,34 @@
|
|||||||
|
import { constants, createPublicKey, publicEncrypt } from 'node:crypto'
|
||||||
|
|
||||||
|
/**
|
||||||
|
* RSA-OAEP encrypt with platform public key.
|
||||||
|
*
|
||||||
|
* Algorithm: RSA/ECB/OAEPWithSHA-256AndMGF1Padding
|
||||||
|
* - OAEP hash: SHA-256
|
||||||
|
* - MGF1 hash: SHA-256
|
||||||
|
*
|
||||||
|
* @param plaintext - UTF-8 string to encrypt
|
||||||
|
* @param publicKeyBase64 - Platform public key (X.509 DER, Base64 encoded)
|
||||||
|
* @returns Base64-encoded ciphertext
|
||||||
|
*/
|
||||||
|
export const rsaOaepEncrypt = (plaintext: string, publicKeyBase64: string): string => {
|
||||||
|
// Load public key from Base64-encoded DER (X.509 / SubjectPublicKeyInfo)
|
||||||
|
const publicKeyDer = Buffer.from(publicKeyBase64, 'base64')
|
||||||
|
const publicKey = createPublicKey({
|
||||||
|
key: publicKeyDer,
|
||||||
|
format: 'der',
|
||||||
|
type: 'spki',
|
||||||
|
})
|
||||||
|
|
||||||
|
// Encrypt with RSA-OAEP (SHA-256 for both OAEP hash and MGF1)
|
||||||
|
const encrypted = publicEncrypt(
|
||||||
|
{
|
||||||
|
key: publicKey,
|
||||||
|
padding: constants.RSA_PKCS1_OAEP_PADDING,
|
||||||
|
oaepHash: 'sha256',
|
||||||
|
},
|
||||||
|
Buffer.from(plaintext, 'utf-8'),
|
||||||
|
)
|
||||||
|
|
||||||
|
return encrypted.toString('base64')
|
||||||
|
}
|
||||||
@@ -0,0 +1,7 @@
|
|||||||
|
{
|
||||||
|
"extends": "@furtherverse/tsconfig/bun.json",
|
||||||
|
"compilerOptions": {
|
||||||
|
"rootDir": "src"
|
||||||
|
},
|
||||||
|
"include": ["src"]
|
||||||
|
}
|
||||||
@@ -1,8 +1,9 @@
|
|||||||
{
|
{
|
||||||
"$schema": "https://json.schemastore.org/tsconfig",
|
"$schema": "https://json.schemastore.org/tsconfig",
|
||||||
|
"display": "Base",
|
||||||
"compilerOptions": {
|
"compilerOptions": {
|
||||||
"target": "esnext",
|
"target": "esnext",
|
||||||
"lib": ["ESNext", "DOM", "DOM.Iterable"],
|
"lib": ["ESNext"],
|
||||||
"module": "preserve",
|
"module": "preserve",
|
||||||
"skipLibCheck": true,
|
"skipLibCheck": true,
|
||||||
|
|
||||||
@@ -19,14 +20,7 @@
|
|||||||
"noFallthroughCasesInSwitch": true,
|
"noFallthroughCasesInSwitch": true,
|
||||||
"noUncheckedSideEffectImports": true,
|
"noUncheckedSideEffectImports": true,
|
||||||
"noUncheckedIndexedAccess": true,
|
"noUncheckedIndexedAccess": true,
|
||||||
"noImplicitOverride": true,
|
"noImplicitOverride": true
|
||||||
|
|
||||||
"jsx": "react-jsx",
|
|
||||||
"types": ["bun"],
|
|
||||||
|
|
||||||
"paths": {
|
|
||||||
"@/*": ["./src/*"]
|
|
||||||
}
|
|
||||||
},
|
},
|
||||||
"exclude": ["node_modules", ".output", "out"]
|
"exclude": ["node_modules"]
|
||||||
}
|
}
|
||||||
@@ -0,0 +1,8 @@
|
|||||||
|
{
|
||||||
|
"$schema": "https://json.schemastore.org/tsconfig",
|
||||||
|
"display": "Bun",
|
||||||
|
"extends": "./base.json",
|
||||||
|
"compilerOptions": {
|
||||||
|
"types": ["bun-types"]
|
||||||
|
}
|
||||||
|
}
|
||||||
@@ -0,0 +1,11 @@
|
|||||||
|
{
|
||||||
|
"name": "@furtherverse/tsconfig",
|
||||||
|
"version": "1.0.0",
|
||||||
|
"private": true,
|
||||||
|
"type": "module",
|
||||||
|
"exports": {
|
||||||
|
"./base.json": "./base.json",
|
||||||
|
"./bun.json": "./bun.json",
|
||||||
|
"./react.json": "./react.json"
|
||||||
|
}
|
||||||
|
}
|
||||||
@@ -0,0 +1,9 @@
|
|||||||
|
{
|
||||||
|
"$schema": "https://json.schemastore.org/tsconfig",
|
||||||
|
"display": "React",
|
||||||
|
"extends": "./base.json",
|
||||||
|
"compilerOptions": {
|
||||||
|
"lib": ["ESNext", "DOM", "DOM.Iterable"],
|
||||||
|
"jsx": "react-jsx"
|
||||||
|
}
|
||||||
|
}
|
||||||
@@ -1,2 +0,0 @@
|
|||||||
User-agent: *
|
|
||||||
Disallow:
|
|
||||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user