refactor(layout): 根目录脚本归位 src/ 与 scripts/,sql.d.ts 下沉到 db/
- bin.ts → src/bin.ts (生产入口归并到 src/,import 改用 #package + @/cli/*) - compile.ts → scripts/compile.ts (开发期工具) - embed-migrations.ts → scripts/embed-migrations.ts (codegen) - src/sql.d.ts → src/server/db/sql.d.ts (与唯一消费者 migrations.gen.ts 共址) 效果:项目根从 3 个零散 .ts 减为 0 个,src/ 是完整应用源码,scripts/ 明确区分开发期工具。所有 package.json scripts、AGENTS.md layout/CLI 章节、 compile.ts ENTRYPOINT 与 .js.map 清理路径同步更新。 验证:fix / typecheck / test 3/3 / build 570ms / compile 117M / docker compose 全套(migrate 干净的 logger=cli.migrate JSON 日志、app /health 200、POST /api/todo/create 成功)。
This commit is contained in:
@@ -0,0 +1,73 @@
|
||||
import { mkdir, rm } from 'node:fs/promises'
|
||||
import { basename } from 'node:path'
|
||||
import { parseArgs } from 'node:util'
|
||||
|
||||
const ENTRYPOINT = 'src/bin.ts'
|
||||
const OUTDIR = 'out'
|
||||
|
||||
const SUPPORTED_TARGETS: readonly Bun.Build.CompileTarget[] = [
|
||||
'bun-windows-x64',
|
||||
'bun-darwin-arm64',
|
||||
'bun-darwin-x64',
|
||||
'bun-linux-x64',
|
||||
'bun-linux-arm64',
|
||||
]
|
||||
|
||||
const SUPPORTED_TARGET_SET: ReadonlySet<string> = new Set(SUPPORTED_TARGETS)
|
||||
|
||||
const isSupportedTarget = (value: string): value is Bun.Build.CompileTarget => SUPPORTED_TARGET_SET.has(value)
|
||||
|
||||
const { values } = parseArgs({
|
||||
options: { target: { type: 'string' } },
|
||||
strict: true,
|
||||
allowPositionals: false,
|
||||
})
|
||||
|
||||
const resolveTarget = (): Bun.Build.CompileTarget => {
|
||||
if (values.target !== undefined) {
|
||||
if (!isSupportedTarget(values.target)) {
|
||||
throw new Error(`Invalid target: ${values.target}\nAllowed: ${SUPPORTED_TARGETS.join(', ')}`)
|
||||
}
|
||||
return values.target
|
||||
}
|
||||
|
||||
const os = process.platform === 'win32' ? 'windows' : process.platform
|
||||
const candidate = `bun-${os}-${process.arch}`
|
||||
if (!isSupportedTarget(candidate)) {
|
||||
throw new Error(`Unsupported host: ${process.platform}-${process.arch}`)
|
||||
}
|
||||
return candidate
|
||||
}
|
||||
|
||||
const main = async () => {
|
||||
const target = resolveTarget()
|
||||
const suffix = target.replace('bun-', '')
|
||||
const outfile = `server-${suffix}`
|
||||
|
||||
await mkdir(OUTDIR, { recursive: true })
|
||||
await Promise.all([rm(`${OUTDIR}/${outfile}`, { force: true }), rm(`${OUTDIR}/${outfile}.exe`, { force: true })])
|
||||
|
||||
const result = await Bun.build({
|
||||
entrypoints: [ENTRYPOINT],
|
||||
outdir: OUTDIR,
|
||||
// autoloadDotenv: false — produce a deterministic binary; it must not silently consume a .env from cwd.
|
||||
compile: { outfile, target, autoloadDotenv: false },
|
||||
minify: true,
|
||||
bytecode: true,
|
||||
sourcemap: 'inline',
|
||||
})
|
||||
|
||||
if (!result.success) {
|
||||
throw new Error(result.logs.map(String).join('\n'))
|
||||
}
|
||||
|
||||
// Bun bundler still writes *.js.map next to the binary even with inline sourcemap.
|
||||
await rm(`${OUTDIR}/${basename(ENTRYPOINT, '.ts')}.js.map`, { force: true })
|
||||
|
||||
console.log(`✓ ${target} → ${OUTDIR}/${outfile}`)
|
||||
}
|
||||
|
||||
main().catch((err) => {
|
||||
console.error('❌', err instanceof Error ? err.message : err)
|
||||
process.exit(1)
|
||||
})
|
||||
@@ -0,0 +1,51 @@
|
||||
import { existsSync } from 'node:fs'
|
||||
import { readFile, writeFile } from 'node:fs/promises'
|
||||
import { z } from 'zod'
|
||||
|
||||
const JOURNAL = './drizzle/meta/_journal.json'
|
||||
const OUTPUT = './src/server/db/migrations.gen.ts'
|
||||
|
||||
const journalEntrySchema = z.object({
|
||||
idx: z.number().int().nonnegative(),
|
||||
tag: z.string().regex(/^\d{4}_[a-z0-9_]+$/),
|
||||
when: z.number().int().nonnegative(),
|
||||
breakpoints: z.boolean(),
|
||||
})
|
||||
const journalSchema = z.object({ entries: z.array(journalEntrySchema).default([]) })
|
||||
|
||||
type JournalEntry = z.infer<typeof journalEntrySchema>
|
||||
|
||||
const readJournalEntries = async (): Promise<JournalEntry[]> => {
|
||||
if (!existsSync(JOURNAL)) {
|
||||
return []
|
||||
}
|
||||
const raw: unknown = JSON.parse(await readFile(JOURNAL, 'utf-8'))
|
||||
return journalSchema.parse(raw).entries.sort((a, b) => a.idx - b.idx)
|
||||
}
|
||||
|
||||
const main = async () => {
|
||||
const entries = await readJournalEntries()
|
||||
|
||||
const imports = entries
|
||||
.map((e) => `import sql_${e.idx} from '#drizzle/${e.tag}.sql' with { type: 'text' }`)
|
||||
.join('\n')
|
||||
|
||||
const arrayBody = entries.length
|
||||
? `[\n${entries.map((e) => ` { tag: '${e.tag}', sql: sql_${e.idx}, when: ${e.when}, breakpoints: ${e.breakpoints} },`).join('\n')}\n]`
|
||||
: '[]'
|
||||
|
||||
const out = `// AUTO-GENERATED by \`bun run db:embed\`. Do not edit.
|
||||
${imports ? `${imports}\n` : ''}
|
||||
export type EmbeddedMigration = { tag: string; sql: string; when: number; breakpoints: boolean }
|
||||
|
||||
export const embeddedMigrations: readonly EmbeddedMigration[] = ${arrayBody}
|
||||
`
|
||||
|
||||
await writeFile(OUTPUT, out)
|
||||
console.log(`✓ ${OUTPUT} (${entries.length} migration${entries.length === 1 ? '' : 's'})`)
|
||||
}
|
||||
|
||||
main().catch((err) => {
|
||||
console.error('❌', err instanceof Error ? err.message : err)
|
||||
process.exit(1)
|
||||
})
|
||||
Reference in New Issue
Block a user