feat: 添加微信OCR功能并整合Hono应用程序
All checks were successful
Build Server / Compile DLL (push) Successful in 1m26s

- 添加"hono"作为项目的依赖项。
- 添加 Dockerfile 以构建 WeChat OCR 应用程序。
- 新增hono库依赖到package.json文件。
- 将微信OCR集成替换为简单的Hono应用程序。
- 添加微信OCR功能以实现基于路径的图片文字识别。
This commit is contained in:
2025-07-08 00:13:44 +08:00
parent 6a258b71a9
commit dac1400009
5 changed files with 107 additions and 76 deletions

16
Dockerfile Normal file
View File

@@ -0,0 +1,16 @@
FROM greyltc/archlinux-aur:yay AS lib
WORKDIR /src
RUN yay -Syu --noconfirm cmake python nodejs && \
git clone https://github.com/swigger/wechat-ocr.git wcocr && \
sed -i 's/v3.21.0/v3.21.2/g' wcocr/CMakeLists.txt && \
mkdir -p wcocr/build && cd wcocr/build && cmake .. && make -j$(nproc)
# FROM greyltc/archlinux-aur:yay
# WORKDIR /app
# COPY --from=lib /src/wcocr/build/libwcocr.so .
# COPY src/index.ts .
# RUN bun install hono
# EXPOSE 3000
# CMD ["bun", "src/index.ts"]

View File

@@ -3,6 +3,9 @@
"workspaces": {
"": {
"name": "wxocr",
"dependencies": {
"hono": "^4.8.4",
},
"devDependencies": {
"@antfu/eslint-config": "^4.16.2",
"@types/bun": "^1.2.18",
@@ -330,6 +333,8 @@
"has-flag": ["has-flag@4.0.0", "", {}, "sha512-EykJT/Q1KjTWctppgIAgfSO0tKVuZUjhgMr17kqTumMl6Afv3EISleU7qZUzoXDFTAHTDC4NOoG/ZxU3EvlMPQ=="],
"hono": ["hono@4.8.4", "", {}, "sha512-KOIBp1+iUs0HrKztM4EHiB2UtzZDTBihDtOF5K6+WaJjCPeaW4Q92R8j63jOhvJI5+tZSMuKD9REVEXXY9illg=="],
"ignore": ["ignore@5.3.2", "", {}, "sha512-hsBTNUqQTDwkWtcdYI2i06Y/nUBEsNEDJKjWdigLvegy8kDuJAS8uRlpkkcQpyEXL0Z/pjDy5HBmMjRCJ2gq+g=="],
"import-fresh": ["import-fresh@3.3.1", "", { "dependencies": { "parent-module": "^1.0.0", "resolve-from": "^4.0.0" } }, "sha512-TR3KfrTZTYLPB6jUjfx6MF9WcWrHL9su5TObK4ZkYgBdWKPOFoSoQIdEuTuR82pmtxH2spWG9h6etwfr1pLBqQ=="],

View File

@@ -10,6 +10,9 @@
"peerDependencies": {
"typescript": "^5.8.3"
},
"dependencies": {
"hono": "^4.8.4"
},
"devDependencies": {
"@antfu/eslint-config": "^4.16.2",
"@types/bun": "^1.2.18",

View File

@@ -1,79 +1,7 @@
import type { Pointer } from 'bun:ffi'
import { Hono } from 'hono'
import { CString, dlopen, FFIType, JSCallback, suffix } from 'bun:ffi'
import { Buffer } from 'node:buffer'
const app = new Hono()
const libPath = `libwcocr.${suffix}`
app.get('/', c => c.text('Hello World'))
const WechatOCR = dlopen(libPath, {
wechat_ocr: {
args: [
FFIType.cstring, // ocr_exe路径
FFIType.cstring, // wechat_dir路径
FFIType.cstring, // imgfn路径
FFIType.function, // 回调函数指针
],
returns: FFIType.bool, // 返回布尔值
},
stop_ocr: {
args: [],
returns: FFIType.void, // 无返回值
},
})
// 主函数
async function callWechatOcr(
ocrExe: string,
wechatDir: string,
imgfn: string,
): Promise<string> {
return new Promise((resolve, reject) => {
// 创建回调函数
const callback = new JSCallback(
(argPtr: Pointer) => {
const arg = new CString(argPtr).toString()
resolve(arg)
callback.close() // 释放回调
},
{
args: [FFIType.ptr],
returns: FFIType.void,
},
)
// 调用wechat_ocr函数
const result = WechatOCR.symbols.wechat_ocr(
Buffer.from(`${ocrExe}\0`), // 转换为C字符串
Buffer.from(`${wechatDir}\0`),
Buffer.from(`${imgfn}\0`),
callback, // 直接传递JSCallback实例
)
if (!result) {
callback.close()
reject(new Error('OCR调用失败'))
}
})
}
async function main() {
// 请在这里替换为你的实际路径
const ocrExe = '/opt/wechat/wxocr'
const wechatDir = '/opt/wechat'
const imgfn = '/home/imbytecat/Pictures/Screenshots/Screenshot_07-Jul_10-00-20_9907.png'
console.log('OCR开始...')
try {
const result = await callWechatOcr(ocrExe, wechatDir, imgfn)
console.log('OCR结果:', result)
} catch (error) {
console.error('OCR错误:', error)
} finally {
// 清理资源
WechatOCR.symbols.stop_ocr()
console.log('OCR结束...')
}
}
// 运行主函数
main()
export default app

79
src/wcocr.ts Normal file
View File

@@ -0,0 +1,79 @@
import type { Pointer } from 'bun:ffi'
import { CString, dlopen, FFIType, JSCallback, suffix } from 'bun:ffi'
import { Buffer } from 'node:buffer'
const libPath = `libwcocr.${suffix}`
const WechatOCR = dlopen(libPath, {
wechat_ocr: {
args: [
FFIType.cstring, // ocr_exe路径
FFIType.cstring, // wechat_dir路径
FFIType.cstring, // imgfn路径
FFIType.function, // 回调函数指针
],
returns: FFIType.bool, // 返回布尔值
},
stop_ocr: {
args: [],
returns: FFIType.void, // 无返回值
},
})
// 主函数
async function callWechatOcr(
ocrExe: string,
wechatDir: string,
imgfn: string,
): Promise<string> {
return new Promise((resolve, reject) => {
// 创建回调函数
const callback = new JSCallback(
(argPtr: Pointer) => {
const arg = new CString(argPtr).toString()
resolve(arg)
callback.close() // 释放回调
},
{
args: [FFIType.ptr],
returns: FFIType.void,
},
)
// 调用wechat_ocr函数
const result = WechatOCR.symbols.wechat_ocr(
Buffer.from(`${ocrExe}\0`), // 转换为C字符串
Buffer.from(`${wechatDir}\0`),
Buffer.from(`${imgfn}\0`),
callback, // 直接传递JSCallback实例
)
if (!result) {
callback.close()
reject(new Error('OCR调用失败'))
}
})
}
async function main() {
// 请在这里替换为你的实际路径
const ocrExe = '/opt/wechat/wxocr'
const wechatDir = '/opt/wechat'
const imgfn = '/home/imbytecat/Pictures/Screenshots/Screenshot_07-Jul_10-00-20_9907.png'
console.log('OCR开始...')
try {
const result = await callWechatOcr(ocrExe, wechatDir, imgfn)
console.log('OCR结果:', result)
} catch (error) {
console.error('OCR错误:', error)
} finally {
// 清理资源
WechatOCR.symbols.stop_ocr()
console.log('OCR结束...')
}
}
// 运行主函数
main()