Compare commits

...

2 Commits

Author SHA1 Message Date
f0eda5ed50 feat: 添加OCR识别功能及镜像优化
Some checks failed
Build Server / Compile DLL (push) Failing after 43s
- 变更基础镜像阶段为运行阶段,同时添加微信二进制包,复制项目文件并安装依赖。
- 添加处理OCR请求的端点以识别上传图像中的文本。
- 将OCR功能封装到WechatOCRClient类中并添加临时文件处理逻辑。
2025-07-08 00:51:06 +08:00
34cf6e9cbf - 使用archlinux:base-devel替换基础镜像并简化文件构建步骤。
- 使用archlinux:base-devel替换基础镜像并简化文件构建步骤。
2025-07-08 00:45:37 +08:00
3 changed files with 90 additions and 66 deletions

View File

@@ -1,18 +1,17 @@
# FROM greyltc/archlinux-aur:yay AS lib
# WORKDIR /src
# RUN yay -Syu --noconfirm cmake python nodejs && \
# git clone https://github.com/swigger/wechat-ocr.git wcocr && \
# sed -i 's/v3.21.0/v3.21.2/g' wcocr/CMakeLists.txt && \
# mkdir -p wcocr/build && cd wcocr/build && cmake .. && make -j$(nproc)
FROM archlinux:base-devel AS lib
WORKDIR /src
RUN pacman -Syu --noconfirm cmake python nodejs && \
git clone https://github.com/swigger/wechat-ocr.git wcocr && \
sed -i 's/v3.21.0/v3.21.2/g' wcocr/CMakeLists.txt && \
mkdir -p wcocr/build && cd wcocr/build && cmake .. && make -j$(nproc)
FROM greyltc/archlinux-aur:yay
FROM greyltc/archlinux-aur:yay AS runner
WORKDIR /app
RUN aur-install bun-bin wechat-bin && \
echo "Hello World"
# COPY --from=lib /src/wcocr/build/libwcocr.so .
# COPY src/index.ts .
# RUN bun install hono
RUN aur-install bun-bin wechat-bin
COPY . .
COPY --from=lib /src/wcocr/build/libwcocr.so .
RUN bun install --frozen-lockfile
# EXPOSE 3000
EXPOSE 3000
# CMD ["bun", "src/index.ts"]
CMD ["bun", "src/index.ts"]

View File

@@ -1,7 +1,28 @@
import { Hono } from 'hono'
import { Buffer } from 'node:buffer'
import { WechatOCRClient } from './wcocr'
const app = new Hono()
const client = new WechatOCRClient()
app.get('/', c => c.text('Hello World'))
app.post('/ocr', async (c) => {
const formData = await c.req.formData()
const imageFile = formData.get('image')
if (!imageFile || typeof imageFile === 'string') {
return c.json({ error: 'Image file is required' }, 400)
}
const imageBuffer = Buffer.from(await imageFile.arrayBuffer())
try {
const result = await client.recognize(imageBuffer)
return c.json(JSON.parse(result))
} catch (error) {
console.error('OCR processing error:', error)
return c.json({ error: 'Failed to process image' }, 500)
}
})
export default app

View File

@@ -2,10 +2,14 @@ import type { Pointer } from 'bun:ffi'
import { CString, dlopen, FFIType, JSCallback, suffix } from 'bun:ffi'
import { Buffer } from 'node:buffer'
import { randomBytes } from 'node:crypto'
import { rm, writeFile } from 'node:fs/promises'
import { tmpdir } from 'node:os'
import { join } from 'node:path'
const libPath = `libwcocr.${suffix}`
const WechatOCR = dlopen(libPath, {
const WechatOCRFFI = dlopen(libPath, {
wechat_ocr: {
args: [
FFIType.cstring, // ocr_exe路径
@@ -21,59 +25,59 @@ const WechatOCR = dlopen(libPath, {
},
})
// 主函数
async function callWechatOcr(
ocrExe: string,
wechatDir: string,
imgfn: string,
): Promise<string> {
return new Promise((resolve, reject) => {
// 创建回调函数
const callback = new JSCallback(
(argPtr: Pointer) => {
const arg = new CString(argPtr).toString()
resolve(arg)
callback.close() // 释放回调
},
{
args: [FFIType.ptr],
returns: FFIType.void,
},
)
export class WechatOCRClient {
private ocrExe: string
private wechatDir: string
// 调用wechat_ocr函数
const result = WechatOCR.symbols.wechat_ocr(
Buffer.from(`${ocrExe}\0`), // 转换为C字符串
Buffer.from(`${wechatDir}\0`),
Buffer.from(`${imgfn}\0`),
callback, // 直接传递JSCallback实例
)
constructor(options?: { ocrExe?: string, wechatDir?: string }) {
this.ocrExe = options?.ocrExe ?? '/opt/wechat/wxocr'
this.wechatDir = options?.wechatDir ?? '/opt/wechat'
}
if (!result) {
callback.close()
reject(new Error('OCR调用失败'))
private async _callWechatOcr(imgfn: string): Promise<string> {
return new Promise((resolve, reject) => {
// 创建回调函数
const callback = new JSCallback(
(argPtr: Pointer) => {
const arg = new CString(argPtr).toString()
resolve(arg)
callback.close() // 释放回调
},
{
args: [FFIType.ptr],
returns: FFIType.void,
},
)
// 调用wechat_ocr函数
const result = WechatOCRFFI.symbols.wechat_ocr(
Buffer.from(`${this.ocrExe}\0`), // 转换为C字符串
Buffer.from(`${this.wechatDir}\0`),
Buffer.from(`${imgfn}\0`),
callback, // 直接传递JSCallback实例
)
if (!result) {
callback.close()
reject(new Error('OCR call failed'))
}
})
}
public async recognize(image: Buffer): Promise<string> {
const tempPath = join(tmpdir(), `wechat-ocr-temp-${randomBytes(16).toString('hex')}.png`)
await writeFile(tempPath, image)
try {
const result = await this._callWechatOcr(tempPath)
return result
} finally {
await rm(tempPath, { force: true })
}
})
}
}
async function main() {
// 请在这里替换为你的实际路径
const ocrExe = '/opt/wechat/wxocr'
const wechatDir = '/opt/wechat'
const imgfn = '/home/imbytecat/Pictures/Screenshots/Screenshot_07-Jul_10-00-20_9907.png'
console.log('OCR开始...')
try {
const result = await callWechatOcr(ocrExe, wechatDir, imgfn)
console.log('OCR结果:', result)
} catch (error) {
console.error('OCR错误:', error)
} finally {
// 清理资源
WechatOCR.symbols.stop_ocr()
console.log('OCR结束...')
public close() {
WechatOCRFFI.symbols.stop_ocr()
console.log('OCR resources cleaned up.')
}
}
// 运行主函数
main()