refactor: 应用 Oracle round-4 复核,硬化 migrator 与默认安全值
- migrate: 校验已应用 migration 的 SHA-256,拒绝 schema drift; split 后 trim + skip empty,避免空 statement 触发 SQL 错误 - todo.contract: update 拒绝空 patch - env: DATABASE_URL 限定 postgres(ql):// scheme,配置错误更早失败 - compile: autoloadDotenv: false,二进制部署不再吞 cwd 的 .env - Error.tsx: 生产环境隐藏 error.message,避免内部错误泄露 - AGENTS: 同步 generatedFieldKeys / migrator 行为新描述
This commit is contained in:
@@ -45,10 +45,10 @@ Before committing: `bun run fix && bun run typecheck && bun run test`. No CI, no
|
||||
```
|
||||
Do NOT use the v2 object form (`orderBy: { createdAt: 'desc' }`, `where: { id }`) — it won't type-check.
|
||||
- To add relations later: declare per-table with `relations()` from `drizzle-orm` and export them from the same file as the table; they get picked up automatically because `index.ts` does `drizzle({ schema })` via `import *`.
|
||||
- Every table must spread `...generatedFields` from `src/server/db/fields.ts` (`id` UUIDv7 via `$defaultFn(uuidv7)`, `createdAt`, `updatedAt` with `$onUpdateFn`). `generatedFieldKeys` (hand-written `as const`) feeds `createInsertSchema(...).omit(...)`.
|
||||
- Every table must spread `...generatedFields` from `src/server/db/fields.ts` (`id` UUIDv7 via `$defaultFn(uuidv7)`, `createdAt`, `updatedAt` with `$onUpdateFn`). `generatedFieldKeys` is hand-written and uses `satisfies Record<keyof typeof generatedFields, true>` so any field-key drift fails typecheck; it feeds `createInsertSchema(...).omit(...)` / `createUpdateSchema(...).omit(...)`.
|
||||
- `src/server/db/index.ts` exports a module-level `const db = drizzle(...)` — not a lazy singleton. On Bun this is a long-lived process, so top-level side effects are fine and requested. Don't reintroduce `getDB/closeDB` ceremony; the Nitro shutdown plugin calls `db.$client.end()` directly. (Cloudflare Workers would need per-request init — we don't support that deployment target.)
|
||||
- `drizzle.config.ts` runs outside Vite — `@/*` path aliases do NOT resolve there. It currently does `import { env } from './src/env'` (relative). Preserve that.
|
||||
- **Migrations are embedded in the binary, not read from disk.** `bun run db:generate` chains `drizzle-kit generate && bun embed-migrations.ts`, which regenerates `src/server/db/migrations.gen.ts` (committed, AUTO-GENERATED header) by `import sql_<idx> from '../../../drizzle/<tag>.sql' with { type: 'text' }`. `src/cli/migrate.ts` reads `embeddedMigrations`, computes SHA-256 hashes at runtime, and applies pending entries via `db.execute(sql\`...\`)` + `db.transaction(...)` against the `drizzle.__drizzle_migrations` book-keeping table — public APIs only, no `db.dialect`/`db.session` (those are `@internal`). Dev helpers `db:push` / `drizzle-kit migrate` still read `./drizzle/`.
|
||||
- **Migrations are embedded in the binary, not read from disk.** `bun run db:generate` chains `drizzle-kit generate && bun embed-migrations.ts`, which regenerates `src/server/db/migrations.gen.ts` (committed, AUTO-GENERATED header) by `import sql_<idx> from '../../../drizzle/<tag>.sql' with { type: 'text' }`. `src/cli/migrate.ts` reads `embeddedMigrations`, **validates SHA-256 hash of every already-applied migration against the embedded SQL** (rejects schema drift if anyone edited an applied migration), then applies pending entries via `db.execute(sql\`...\`)` + `db.transaction(...)` against the `drizzle.__drizzle_migrations` book-keeping table — public APIs only, no `db.dialect`/`db.session` (those are `@internal`). Each migration is split on `--> statement-breakpoint`; empty fragments are trimmed and skipped. Dev helpers `db:push` / `drizzle-kit migrate` still read `./drizzle/`.
|
||||
|
||||
## CLI & single-binary deploy
|
||||
|
||||
|
||||
+2
-1
@@ -49,7 +49,8 @@ const main = async () => {
|
||||
const result = await Bun.build({
|
||||
entrypoints: [ENTRYPOINT],
|
||||
outdir: OUTDIR,
|
||||
compile: { outfile, target },
|
||||
// autoloadDotenv: false — produce a deterministic binary; it must not silently consume a .env from cwd.
|
||||
compile: { outfile, target, autoloadDotenv: false },
|
||||
minify: true,
|
||||
bytecode: true,
|
||||
sourcemap: 'inline',
|
||||
|
||||
+24
-8
@@ -19,6 +19,8 @@ export default defineCommand({
|
||||
return
|
||||
}
|
||||
|
||||
const sha256 = (s: string) => createHash('sha256').update(s).digest('hex')
|
||||
|
||||
const db = drizzle({ connection: { url: env.DATABASE_URL, max: 1, onnotice: () => {} } })
|
||||
try {
|
||||
await db.execute(sql`CREATE SCHEMA IF NOT EXISTS "drizzle"`)
|
||||
@@ -29,12 +31,25 @@ export default defineCommand({
|
||||
created_at bigint
|
||||
)
|
||||
`)
|
||||
const last = await db.execute<{ created_at: string | null }>(
|
||||
sql`SELECT created_at FROM "drizzle"."__drizzle_migrations" ORDER BY created_at DESC LIMIT 1`,
|
||||
)
|
||||
const lastMillis = Number(last[0]?.created_at ?? 0)
|
||||
|
||||
const pending = embeddedMigrations.filter((m) => m.when > lastMillis)
|
||||
const applied = await db.execute<{ hash: string; created_at: string | null }>(
|
||||
sql`SELECT hash, created_at FROM "drizzle"."__drizzle_migrations" ORDER BY created_at ASC`,
|
||||
)
|
||||
|
||||
// Reject schema drift: any applied migration whose embedded SQL has changed (or is missing) is fatal.
|
||||
for (const row of applied) {
|
||||
const when = Number(row.created_at)
|
||||
const m = embeddedMigrations.find((e) => e.when === when)
|
||||
if (!m) {
|
||||
throw new Error(`Applied migration when=${when} is not in this binary; do not roll back applied migrations.`)
|
||||
}
|
||||
if (sha256(m.sql) !== row.hash) {
|
||||
throw new Error(`Migration hash mismatch at when=${when}; do not edit migrations after they are applied.`)
|
||||
}
|
||||
}
|
||||
|
||||
const appliedWhens = new Set(applied.map((r) => Number(r.created_at)))
|
||||
const pending = embeddedMigrations.filter((m) => !appliedWhens.has(m.when))
|
||||
if (pending.length === 0) {
|
||||
console.log('Database is up to date.')
|
||||
return
|
||||
@@ -43,12 +58,13 @@ export default defineCommand({
|
||||
console.log(`Applying ${pending.length} migration(s)...`)
|
||||
await db.transaction(async (tx) => {
|
||||
for (const m of pending) {
|
||||
for (const stmt of m.sql.split('--> statement-breakpoint')) {
|
||||
for (const rawStmt of m.sql.split('--> statement-breakpoint')) {
|
||||
const stmt = rawStmt.trim()
|
||||
if (!stmt) continue
|
||||
await tx.execute(sql.raw(stmt))
|
||||
}
|
||||
const hash = createHash('sha256').update(m.sql).digest('hex')
|
||||
await tx.execute(
|
||||
sql`INSERT INTO "drizzle"."__drizzle_migrations" ("hash", "created_at") VALUES (${hash}, ${m.when})`,
|
||||
sql`INSERT INTO "drizzle"."__drizzle_migrations" ("hash", "created_at") VALUES (${sha256(m.sql)}, ${m.when})`,
|
||||
)
|
||||
}
|
||||
})
|
||||
|
||||
@@ -20,7 +20,7 @@ export const ErrorComponent = ({ error, reset }: { error: Error; reset: () => vo
|
||||
</div>
|
||||
<div>
|
||||
<h1 className="text-2xl font-bold text-slate-900">出错了</h1>
|
||||
<p className="text-slate-500 mt-2">{error.message}</p>
|
||||
<p className="text-slate-500 mt-2">{import.meta.env.DEV ? error.message : '请求失败,请稍后重试'}</p>
|
||||
</div>
|
||||
<div className="flex items-center justify-center gap-4">
|
||||
<button
|
||||
|
||||
+1
-1
@@ -3,7 +3,7 @@ import { z } from 'zod'
|
||||
|
||||
export const env = createEnv({
|
||||
server: {
|
||||
DATABASE_URL: z.url(),
|
||||
DATABASE_URL: z.url({ protocol: /^postgres(ql)?$/ }),
|
||||
},
|
||||
clientPrefix: 'VITE_',
|
||||
client: {},
|
||||
|
||||
@@ -8,7 +8,9 @@ const selectSchema = createSelectSchema(todoTable)
|
||||
|
||||
const insertSchema = createInsertSchema(todoTable).omit(generatedFieldKeys)
|
||||
|
||||
const updateSchema = createUpdateSchema(todoTable).omit(generatedFieldKeys)
|
||||
const updateSchema = createUpdateSchema(todoTable)
|
||||
.omit(generatedFieldKeys)
|
||||
.refine((data) => Object.keys(data).length > 0, { message: 'At least one field is required' })
|
||||
|
||||
export const list = oc.input(z.void()).output(z.array(selectSchema))
|
||||
|
||||
|
||||
Reference in New Issue
Block a user