@zh-keyboard/recognizer 0.1.0 → 0.2.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/README.md +1 -1
- package/dist/index.d.mts +3 -2
- package/dist/index.d.ts +2 -2
- package/dist/index.js +18 -11
- package/dist/index.js.map +1 -1
- package/dist/index.mjs +14 -7
- package/dist/index.mjs.map +1 -1
- package/package.json +15 -4
- package/qrcode.jpg +0 -0
package/README.md
CHANGED
|
@@ -39,7 +39,7 @@ import { ZhkRecognizer } from '@zh-keyboard/recognizer'
|
|
|
39
39
|
const recognizer = new ZhkRecognizer({
|
|
40
40
|
modelPath: '/models/handwrite/model.json', // TensorFlow.js模型路径
|
|
41
41
|
dictPath: '/models/dict.txt', // 汉字字典路径
|
|
42
|
-
backend: 'webgl' // 可选:'webgl'或'cpu',默认为'
|
|
42
|
+
backend: 'webgl' // 可选:'webgl'或'cpu',默认为'webgl'
|
|
43
43
|
})
|
|
44
44
|
|
|
45
45
|
// 初始化识别器(加载模型和字典)
|
package/dist/index.d.mts
CHANGED
|
@@ -1,4 +1,5 @@
|
|
|
1
|
-
import
|
|
1
|
+
import "@tensorflow/tfjs-backend-webgl";
|
|
2
|
+
import { HandwritingRecognizer, RecognizerInitOptions } from "@zh-keyboard/core";
|
|
2
3
|
|
|
3
4
|
//#region src/index.d.ts
|
|
4
5
|
interface RecognizerOptions {
|
|
@@ -24,7 +25,7 @@ declare class ZhkRecognizer implements HandwritingRecognizer {
|
|
|
24
25
|
private dictPath;
|
|
25
26
|
private backend;
|
|
26
27
|
constructor(options: RecognizerOptions);
|
|
27
|
-
initialize(): Promise<boolean>;
|
|
28
|
+
initialize(options?: RecognizerInitOptions): Promise<boolean>;
|
|
28
29
|
recognize(strokeData: number[]): Promise<string[]>;
|
|
29
30
|
close(): Promise<void>;
|
|
30
31
|
} //#endregion
|
package/dist/index.d.ts
CHANGED
|
@@ -1,4 +1,4 @@
|
|
|
1
|
-
import { HandwritingRecognizer } from "@zh-keyboard/core";
|
|
1
|
+
import { HandwritingRecognizer, RecognizerInitOptions } from "@zh-keyboard/core";
|
|
2
2
|
|
|
3
3
|
//#region src/index.d.ts
|
|
4
4
|
interface RecognizerOptions {
|
|
@@ -24,7 +24,7 @@ declare class ZhkRecognizer implements HandwritingRecognizer {
|
|
|
24
24
|
private dictPath;
|
|
25
25
|
private backend;
|
|
26
26
|
constructor(options: RecognizerOptions);
|
|
27
|
-
initialize(): Promise<boolean>;
|
|
27
|
+
initialize(options?: RecognizerInitOptions): Promise<boolean>;
|
|
28
28
|
recognize(strokeData: number[]): Promise<string[]>;
|
|
29
29
|
close(): Promise<void>;
|
|
30
30
|
} //#endregion
|
package/dist/index.js
CHANGED
|
@@ -22,7 +22,9 @@ var __toESM = (mod, isNodeMode, target) => (target = mod != null ? __create(__ge
|
|
|
22
22
|
}) : target, mod));
|
|
23
23
|
|
|
24
24
|
//#endregion
|
|
25
|
-
const
|
|
25
|
+
const __tensorflow_tfjs_converter = __toESM(require("@tensorflow/tfjs-converter"));
|
|
26
|
+
const __tensorflow_tfjs_core = __toESM(require("@tensorflow/tfjs-core"));
|
|
27
|
+
require("@tensorflow/tfjs-backend-webgl");
|
|
26
28
|
|
|
27
29
|
//#region src/index.ts
|
|
28
30
|
var ZhkRecognizer = class {
|
|
@@ -36,18 +38,21 @@ var ZhkRecognizer = class {
|
|
|
36
38
|
constructor(options) {
|
|
37
39
|
this.modelPath = options.modelPath;
|
|
38
40
|
this.dictPath = options.dictPath;
|
|
39
|
-
this.backend = options.backend || "
|
|
41
|
+
this.backend = options.backend || "webgl";
|
|
40
42
|
this.canvas = document.createElement("canvas");
|
|
41
43
|
this.canvas.width = this.canvas.height = 64;
|
|
42
44
|
this.ctx = this.canvas.getContext("2d", { willReadFrequently: true });
|
|
43
45
|
}
|
|
44
|
-
async initialize() {
|
|
46
|
+
async initialize(options) {
|
|
45
47
|
const text = await fetch(this.dictPath).then((r) => r.text());
|
|
46
48
|
this.dict = text.split("\n");
|
|
47
|
-
this.model = await
|
|
49
|
+
this.model = await (0, __tensorflow_tfjs_converter.loadGraphModel)(this.modelPath, {
|
|
50
|
+
streamWeights: true,
|
|
51
|
+
onProgress: options?.onProgress
|
|
52
|
+
});
|
|
48
53
|
if (this.backend === "webgl") {
|
|
49
|
-
await
|
|
50
|
-
await
|
|
54
|
+
await __tensorflow_tfjs_core.setBackend("webgl");
|
|
55
|
+
await __tensorflow_tfjs_core.ready();
|
|
51
56
|
await this.recognize([
|
|
52
57
|
10,
|
|
53
58
|
10,
|
|
@@ -56,7 +61,7 @@ var ZhkRecognizer = class {
|
|
|
56
61
|
20,
|
|
57
62
|
1
|
|
58
63
|
]);
|
|
59
|
-
} else await
|
|
64
|
+
} else await __tensorflow_tfjs_core.setBackend("cpu");
|
|
60
65
|
return true;
|
|
61
66
|
}
|
|
62
67
|
async recognize(strokeData) {
|
|
@@ -104,11 +109,13 @@ var ZhkRecognizer = class {
|
|
|
104
109
|
}
|
|
105
110
|
last = s;
|
|
106
111
|
}
|
|
107
|
-
return
|
|
108
|
-
const image =
|
|
109
|
-
const
|
|
112
|
+
return __tensorflow_tfjs_core.tidy(() => {
|
|
113
|
+
const image = __tensorflow_tfjs_core.browser.fromPixels(canvas, 3);
|
|
114
|
+
const floatImage = __tensorflow_tfjs_core.cast(image, "float32");
|
|
115
|
+
const normalizedImage = __tensorflow_tfjs_core.div(floatImage, 255);
|
|
116
|
+
const batchedImage = __tensorflow_tfjs_core.expandDims(normalizedImage, 0);
|
|
117
|
+
const probs = model.predict(batchedImage).dataSync();
|
|
110
118
|
const idxs = Array.from(probs.keys()).sort((a, b) => probs[b] - probs[a]).slice(0, 10);
|
|
111
|
-
__tensorflow_tfjs.dispose(image);
|
|
112
119
|
return idxs.map((i) => i < dict.length ? dict[i] : "").filter(Boolean);
|
|
113
120
|
});
|
|
114
121
|
}
|
package/dist/index.js.map
CHANGED
|
@@ -1 +1 @@
|
|
|
1
|
-
{"version":3,"file":"index.js","names":["options: RecognizerOptions","strokeData: number[]","last: { x: number, y: number, isEnd: boolean } | null"],"sources":["../src/index.ts"],"sourcesContent":["import type { HandwritingRecognizer } from '@zh-keyboard/core'\r\nimport * as tf from '@tensorflow/tfjs'\r\n\r\nexport interface RecognizerOptions {\r\n /**\r\n * 模型路径\r\n */\r\n modelPath: string\r\n /**\r\n * 字典路径\r\n */\r\n dictPath: string\r\n /**\r\n * 后端类型\r\n */\r\n backend?: 'webgl' | 'cpu'\r\n}\r\n\r\nexport class ZhkRecognizer implements HandwritingRecognizer {\r\n private model?:
|
|
1
|
+
{"version":3,"file":"index.js","names":["options: RecognizerOptions","options?: RecognizerInitOptions","strokeData: number[]","last: { x: number, y: number, isEnd: boolean } | null"],"sources":["../src/index.ts"],"sourcesContent":["import type { GraphModel, Tensor } from '@tensorflow/tfjs'\r\nimport type { HandwritingRecognizer, RecognizerInitOptions } from '@zh-keyboard/core'\r\nimport { loadGraphModel } from '@tensorflow/tfjs-converter'\r\nimport * as tf from '@tensorflow/tfjs-core'\r\nimport '@tensorflow/tfjs-backend-webgl'\r\n\r\nexport interface RecognizerOptions {\r\n /**\r\n * 模型路径\r\n */\r\n modelPath: string\r\n /**\r\n * 字典路径\r\n */\r\n dictPath: string\r\n /**\r\n * 后端类型\r\n */\r\n backend?: 'webgl' | 'cpu'\r\n}\r\n\r\nexport class ZhkRecognizer implements HandwritingRecognizer {\r\n private model?: GraphModel\r\n private dict: string[] = []\r\n private canvas: HTMLCanvasElement\r\n private ctx: CanvasRenderingContext2D\r\n private modelPath: string\r\n private dictPath: string\r\n private backend: 'webgl' | 'cpu'\r\n\r\n constructor(options: RecognizerOptions) {\r\n this.modelPath = options.modelPath\r\n this.dictPath = options.dictPath\r\n this.backend = options.backend || 'webgl'\r\n this.canvas = document.createElement('canvas')\r\n this.canvas.width = this.canvas.height = 64\r\n this.ctx = this.canvas.getContext('2d', { willReadFrequently: true })!\r\n }\r\n\r\n async initialize(options?: RecognizerInitOptions) {\r\n const text = await fetch(this.dictPath).then(r => r.text())\r\n this.dict = text.split('\\n')\r\n this.model = await loadGraphModel(this.modelPath, {\r\n streamWeights: true,\r\n onProgress: options?.onProgress,\r\n })\r\n // 如果后端为webgl,则需要进行预热\r\n if (this.backend === 'webgl') {\r\n await tf.setBackend('webgl')\r\n await tf.ready()\r\n await this.recognize([10, 10, 0, 20, 20, 1])\r\n } else {\r\n await tf.setBackend('cpu')\r\n }\r\n return true\r\n }\r\n\r\n async recognize(strokeData: number[]): Promise<string[]> {\r\n if (!this.model) {\r\n throw new Error('Model not initialized')\r\n }\r\n const { canvas, ctx, model, dict } = this\r\n ctx.fillStyle = 'white'\r\n ctx.fillRect(0, 0, canvas.width, canvas.height)\r\n\r\n const n = strokeData.length / 3\r\n const strokes = Array.from({ length: n }, (_, i) => ({\r\n x: strokeData[3 * i],\r\n y: strokeData[3 * i + 1],\r\n isEnd: strokeData[3 * i + 2] === 1,\r\n }))\r\n\r\n let minX = Infinity\r\n let minY = Infinity\r\n let maxX = -Infinity\r\n let maxY = -Infinity\r\n for (const { x, y } of strokes) {\r\n if (x < minX) {\r\n minX = x\r\n }\r\n if (x > maxX) {\r\n maxX = x\r\n }\r\n if (y < minY) {\r\n minY = y\r\n }\r\n if (y > maxY) {\r\n maxY = y\r\n }\r\n }\r\n\r\n const w = maxX - minX || 1\r\n const h = maxY - minY || 1\r\n const cx = (minX + maxX) / 2\r\n const cy = (minY + maxY) / 2\r\n const scale = Math.min(canvas.width * 0.9 / w, canvas.height * 0.9 / h)\r\n\r\n ctx.strokeStyle = 'black'\r\n ctx.lineWidth = 2\r\n ctx.lineCap = 'round'\r\n ctx.lineJoin = 'round'\r\n\r\n let last: { x: number, y: number, isEnd: boolean } | null = null\r\n for (const s of strokes) {\r\n const x = canvas.width / 2 + (s.x - cx) * scale\r\n const y = canvas.height / 2 + (s.y - cy) * scale\r\n if (last && !last.isEnd) {\r\n ctx.beginPath()\r\n ctx.moveTo(canvas.width / 2 + (last.x - cx) * scale, canvas.height / 2 + (last.y - cy) * scale)\r\n ctx.lineTo(x, y)\r\n ctx.stroke()\r\n } else {\r\n ctx.beginPath()\r\n ctx.moveTo(x, y)\r\n }\r\n last = s\r\n }\r\n\r\n return tf.tidy(() => {\r\n const image = tf.browser.fromPixels(canvas, 3)\r\n const floatImage = tf.cast(image, 'float32')\r\n const normalizedImage = tf.div(floatImage, 255)\r\n const batchedImage = tf.expandDims(normalizedImage, 0)\r\n\r\n const probs = (model!.predict(batchedImage) as Tensor).dataSync()\r\n const idxs = Array.from(probs.keys()).sort((a, b) => probs[b] - probs[a]).slice(0, 10)\r\n\r\n return idxs.map(i => (i < dict.length ? dict[i] : '')).filter(Boolean)\r\n })\r\n }\r\n\r\n async close() {\r\n this.model?.dispose()\r\n this.model = undefined\r\n }\r\n}\r\n"],"mappings":";;;;;;;;;;;;;;;;;;;;;;;;;;;;;AAqBA,IAAa,gBAAb,MAA4D;CAC1D,AAAQ;CACR,AAAQ,OAAiB,CAAE;CAC3B,AAAQ;CACR,AAAQ;CACR,AAAQ;CACR,AAAQ;CACR,AAAQ;CAER,YAAYA,SAA4B;AACtC,OAAK,YAAY,QAAQ;AACzB,OAAK,WAAW,QAAQ;AACxB,OAAK,UAAU,QAAQ,WAAW;AAClC,OAAK,SAAS,SAAS,cAAc,SAAS;AAC9C,OAAK,OAAO,QAAQ,KAAK,OAAO,SAAS;AACzC,OAAK,MAAM,KAAK,OAAO,WAAW,MAAM,EAAE,oBAAoB,KAAM,EAAC;CACtE;CAED,MAAM,WAAWC,SAAiC;EAChD,MAAM,OAAO,MAAM,MAAM,KAAK,SAAS,CAAC,KAAK,OAAK,EAAE,MAAM,CAAC;AAC3D,OAAK,OAAO,KAAK,MAAM,KAAK;AAC5B,OAAK,QAAQ,MAAM,gDAAe,KAAK,WAAW;GAChD,eAAe;GACf,YAAY,SAAS;EACtB,EAAC;AAEF,MAAI,KAAK,YAAY,SAAS;AAC5B,SAAM,uBAAG,WAAW,QAAQ;AAC5B,SAAM,uBAAG,OAAO;AAChB,SAAM,KAAK,UAAU;IAAC;IAAI;IAAI;IAAG;IAAI;IAAI;GAAE,EAAC;EAC7C,MACC,OAAM,uBAAG,WAAW,MAAM;AAE5B,SAAO;CACR;CAED,MAAM,UAAUC,YAAyC;AACvD,OAAK,KAAK,MACR,OAAM,IAAI,MAAM;EAElB,MAAM,EAAE,QAAQ,KAAK,OAAO,MAAM,GAAG;AACrC,MAAI,YAAY;AAChB,MAAI,SAAS,GAAG,GAAG,OAAO,OAAO,OAAO,OAAO;EAE/C,MAAM,IAAI,WAAW,SAAS;EAC9B,MAAM,UAAU,MAAM,KAAK,EAAE,QAAQ,EAAG,GAAE,CAAC,GAAG,OAAO;GACnD,GAAG,WAAW,IAAI;GAClB,GAAG,WAAW,IAAI,IAAI;GACtB,OAAO,WAAW,IAAI,IAAI,OAAO;EAClC,GAAE;EAEH,IAAI,OAAO;EACX,IAAI,OAAO;EACX,IAAI,OAAO;EACX,IAAI,OAAO;AACX,OAAK,MAAM,EAAE,GAAG,GAAG,IAAI,SAAS;AAC9B,OAAI,IAAI,KACN,QAAO;AAET,OAAI,IAAI,KACN,QAAO;AAET,OAAI,IAAI,KACN,QAAO;AAET,OAAI,IAAI,KACN,QAAO;EAEV;EAED,MAAM,IAAI,OAAO,QAAQ;EACzB,MAAM,IAAI,OAAO,QAAQ;EACzB,MAAM,MAAM,OAAO,QAAQ;EAC3B,MAAM,MAAM,OAAO,QAAQ;EAC3B,MAAM,QAAQ,KAAK,IAAI,OAAO,QAAQ,KAAM,GAAG,OAAO,SAAS,KAAM,EAAE;AAEvE,MAAI,cAAc;AAClB,MAAI,YAAY;AAChB,MAAI,UAAU;AACd,MAAI,WAAW;EAEf,IAAIC,OAAwD;AAC5D,OAAK,MAAM,KAAK,SAAS;GACvB,MAAM,IAAI,OAAO,QAAQ,KAAK,EAAE,IAAI,MAAM;GAC1C,MAAM,IAAI,OAAO,SAAS,KAAK,EAAE,IAAI,MAAM;AAC3C,OAAI,SAAS,KAAK,OAAO;AACvB,QAAI,WAAW;AACf,QAAI,OAAO,OAAO,QAAQ,KAAK,KAAK,IAAI,MAAM,OAAO,OAAO,SAAS,KAAK,KAAK,IAAI,MAAM,MAAM;AAC/F,QAAI,OAAO,GAAG,EAAE;AAChB,QAAI,QAAQ;GACb,OAAM;AACL,QAAI,WAAW;AACf,QAAI,OAAO,GAAG,EAAE;GACjB;AACD,UAAO;EACR;AAED,SAAO,uBAAG,KAAK,MAAM;GACnB,MAAM,QAAQ,uBAAG,QAAQ,WAAW,QAAQ,EAAE;GAC9C,MAAM,aAAa,uBAAG,KAAK,OAAO,UAAU;GAC5C,MAAM,kBAAkB,uBAAG,IAAI,YAAY,IAAI;GAC/C,MAAM,eAAe,uBAAG,WAAW,iBAAiB,EAAE;GAEtD,MAAM,QAAQ,AAAC,MAAO,QAAQ,aAAa,CAAY,UAAU;GACjE,MAAM,OAAO,MAAM,KAAK,MAAM,MAAM,CAAC,CAAC,KAAK,CAAC,GAAG,MAAM,MAAM,KAAK,MAAM,GAAG,CAAC,MAAM,GAAG,GAAG;AAEtF,UAAO,KAAK,IAAI,OAAM,IAAI,KAAK,SAAS,KAAK,KAAK,GAAI,CAAC,OAAO,QAAQ;EACvE,EAAC;CACH;CAED,MAAM,QAAQ;AACZ,OAAK,OAAO,SAAS;AACrB,OAAK;CACN;AACF"}
|
package/dist/index.mjs
CHANGED
|
@@ -1,4 +1,6 @@
|
|
|
1
|
-
import
|
|
1
|
+
import { loadGraphModel } from "@tensorflow/tfjs-converter";
|
|
2
|
+
import * as tf from "@tensorflow/tfjs-core";
|
|
3
|
+
import "@tensorflow/tfjs-backend-webgl";
|
|
2
4
|
|
|
3
5
|
//#region src/index.ts
|
|
4
6
|
var ZhkRecognizer = class {
|
|
@@ -12,15 +14,18 @@ var ZhkRecognizer = class {
|
|
|
12
14
|
constructor(options) {
|
|
13
15
|
this.modelPath = options.modelPath;
|
|
14
16
|
this.dictPath = options.dictPath;
|
|
15
|
-
this.backend = options.backend || "
|
|
17
|
+
this.backend = options.backend || "webgl";
|
|
16
18
|
this.canvas = document.createElement("canvas");
|
|
17
19
|
this.canvas.width = this.canvas.height = 64;
|
|
18
20
|
this.ctx = this.canvas.getContext("2d", { willReadFrequently: true });
|
|
19
21
|
}
|
|
20
|
-
async initialize() {
|
|
22
|
+
async initialize(options) {
|
|
21
23
|
const text = await fetch(this.dictPath).then((r) => r.text());
|
|
22
24
|
this.dict = text.split("\n");
|
|
23
|
-
this.model = await
|
|
25
|
+
this.model = await loadGraphModel(this.modelPath, {
|
|
26
|
+
streamWeights: true,
|
|
27
|
+
onProgress: options?.onProgress
|
|
28
|
+
});
|
|
24
29
|
if (this.backend === "webgl") {
|
|
25
30
|
await tf.setBackend("webgl");
|
|
26
31
|
await tf.ready();
|
|
@@ -81,10 +86,12 @@ var ZhkRecognizer = class {
|
|
|
81
86
|
last = s;
|
|
82
87
|
}
|
|
83
88
|
return tf.tidy(() => {
|
|
84
|
-
const image = tf.browser.fromPixels(canvas, 3)
|
|
85
|
-
const
|
|
89
|
+
const image = tf.browser.fromPixels(canvas, 3);
|
|
90
|
+
const floatImage = tf.cast(image, "float32");
|
|
91
|
+
const normalizedImage = tf.div(floatImage, 255);
|
|
92
|
+
const batchedImage = tf.expandDims(normalizedImage, 0);
|
|
93
|
+
const probs = model.predict(batchedImage).dataSync();
|
|
86
94
|
const idxs = Array.from(probs.keys()).sort((a, b) => probs[b] - probs[a]).slice(0, 10);
|
|
87
|
-
tf.dispose(image);
|
|
88
95
|
return idxs.map((i) => i < dict.length ? dict[i] : "").filter(Boolean);
|
|
89
96
|
});
|
|
90
97
|
}
|
package/dist/index.mjs.map
CHANGED
|
@@ -1 +1 @@
|
|
|
1
|
-
{"version":3,"file":"index.mjs","names":["options: RecognizerOptions","strokeData: number[]","last: { x: number, y: number, isEnd: boolean } | null"],"sources":["../src/index.ts"],"sourcesContent":["import type { HandwritingRecognizer } from '@zh-keyboard/core'\r\nimport * as tf from '@tensorflow/tfjs'\r\n\r\nexport interface RecognizerOptions {\r\n /**\r\n * 模型路径\r\n */\r\n modelPath: string\r\n /**\r\n * 字典路径\r\n */\r\n dictPath: string\r\n /**\r\n * 后端类型\r\n */\r\n backend?: 'webgl' | 'cpu'\r\n}\r\n\r\nexport class ZhkRecognizer implements HandwritingRecognizer {\r\n private model?:
|
|
1
|
+
{"version":3,"file":"index.mjs","names":["options: RecognizerOptions","options?: RecognizerInitOptions","strokeData: number[]","last: { x: number, y: number, isEnd: boolean } | null"],"sources":["../src/index.ts"],"sourcesContent":["import type { GraphModel, Tensor } from '@tensorflow/tfjs'\r\nimport type { HandwritingRecognizer, RecognizerInitOptions } from '@zh-keyboard/core'\r\nimport { loadGraphModel } from '@tensorflow/tfjs-converter'\r\nimport * as tf from '@tensorflow/tfjs-core'\r\nimport '@tensorflow/tfjs-backend-webgl'\r\n\r\nexport interface RecognizerOptions {\r\n /**\r\n * 模型路径\r\n */\r\n modelPath: string\r\n /**\r\n * 字典路径\r\n */\r\n dictPath: string\r\n /**\r\n * 后端类型\r\n */\r\n backend?: 'webgl' | 'cpu'\r\n}\r\n\r\nexport class ZhkRecognizer implements HandwritingRecognizer {\r\n private model?: GraphModel\r\n private dict: string[] = []\r\n private canvas: HTMLCanvasElement\r\n private ctx: CanvasRenderingContext2D\r\n private modelPath: string\r\n private dictPath: string\r\n private backend: 'webgl' | 'cpu'\r\n\r\n constructor(options: RecognizerOptions) {\r\n this.modelPath = options.modelPath\r\n this.dictPath = options.dictPath\r\n this.backend = options.backend || 'webgl'\r\n this.canvas = document.createElement('canvas')\r\n this.canvas.width = this.canvas.height = 64\r\n this.ctx = this.canvas.getContext('2d', { willReadFrequently: true })!\r\n }\r\n\r\n async initialize(options?: RecognizerInitOptions) {\r\n const text = await fetch(this.dictPath).then(r => r.text())\r\n this.dict = text.split('\\n')\r\n this.model = await loadGraphModel(this.modelPath, {\r\n streamWeights: true,\r\n onProgress: options?.onProgress,\r\n })\r\n // 如果后端为webgl,则需要进行预热\r\n if (this.backend === 'webgl') {\r\n await tf.setBackend('webgl')\r\n await tf.ready()\r\n await this.recognize([10, 10, 0, 20, 20, 1])\r\n } else {\r\n await tf.setBackend('cpu')\r\n }\r\n return true\r\n }\r\n\r\n async recognize(strokeData: number[]): Promise<string[]> {\r\n if (!this.model) {\r\n throw new Error('Model not initialized')\r\n }\r\n const { canvas, ctx, model, dict } = this\r\n ctx.fillStyle = 'white'\r\n ctx.fillRect(0, 0, canvas.width, canvas.height)\r\n\r\n const n = strokeData.length / 3\r\n const strokes = Array.from({ length: n }, (_, i) => ({\r\n x: strokeData[3 * i],\r\n y: strokeData[3 * i + 1],\r\n isEnd: strokeData[3 * i + 2] === 1,\r\n }))\r\n\r\n let minX = Infinity\r\n let minY = Infinity\r\n let maxX = -Infinity\r\n let maxY = -Infinity\r\n for (const { x, y } of strokes) {\r\n if (x < minX) {\r\n minX = x\r\n }\r\n if (x > maxX) {\r\n maxX = x\r\n }\r\n if (y < minY) {\r\n minY = y\r\n }\r\n if (y > maxY) {\r\n maxY = y\r\n }\r\n }\r\n\r\n const w = maxX - minX || 1\r\n const h = maxY - minY || 1\r\n const cx = (minX + maxX) / 2\r\n const cy = (minY + maxY) / 2\r\n const scale = Math.min(canvas.width * 0.9 / w, canvas.height * 0.9 / h)\r\n\r\n ctx.strokeStyle = 'black'\r\n ctx.lineWidth = 2\r\n ctx.lineCap = 'round'\r\n ctx.lineJoin = 'round'\r\n\r\n let last: { x: number, y: number, isEnd: boolean } | null = null\r\n for (const s of strokes) {\r\n const x = canvas.width / 2 + (s.x - cx) * scale\r\n const y = canvas.height / 2 + (s.y - cy) * scale\r\n if (last && !last.isEnd) {\r\n ctx.beginPath()\r\n ctx.moveTo(canvas.width / 2 + (last.x - cx) * scale, canvas.height / 2 + (last.y - cy) * scale)\r\n ctx.lineTo(x, y)\r\n ctx.stroke()\r\n } else {\r\n ctx.beginPath()\r\n ctx.moveTo(x, y)\r\n }\r\n last = s\r\n }\r\n\r\n return tf.tidy(() => {\r\n const image = tf.browser.fromPixels(canvas, 3)\r\n const floatImage = tf.cast(image, 'float32')\r\n const normalizedImage = tf.div(floatImage, 255)\r\n const batchedImage = tf.expandDims(normalizedImage, 0)\r\n\r\n const probs = (model!.predict(batchedImage) as Tensor).dataSync()\r\n const idxs = Array.from(probs.keys()).sort((a, b) => probs[b] - probs[a]).slice(0, 10)\r\n\r\n return idxs.map(i => (i < dict.length ? dict[i] : '')).filter(Boolean)\r\n })\r\n }\r\n\r\n async close() {\r\n this.model?.dispose()\r\n this.model = undefined\r\n }\r\n}\r\n"],"mappings":";;;;;AAqBA,IAAa,gBAAb,MAA4D;CAC1D,AAAQ;CACR,AAAQ,OAAiB,CAAE;CAC3B,AAAQ;CACR,AAAQ;CACR,AAAQ;CACR,AAAQ;CACR,AAAQ;CAER,YAAYA,SAA4B;AACtC,OAAK,YAAY,QAAQ;AACzB,OAAK,WAAW,QAAQ;AACxB,OAAK,UAAU,QAAQ,WAAW;AAClC,OAAK,SAAS,SAAS,cAAc,SAAS;AAC9C,OAAK,OAAO,QAAQ,KAAK,OAAO,SAAS;AACzC,OAAK,MAAM,KAAK,OAAO,WAAW,MAAM,EAAE,oBAAoB,KAAM,EAAC;CACtE;CAED,MAAM,WAAWC,SAAiC;EAChD,MAAM,OAAO,MAAM,MAAM,KAAK,SAAS,CAAC,KAAK,OAAK,EAAE,MAAM,CAAC;AAC3D,OAAK,OAAO,KAAK,MAAM,KAAK;AAC5B,OAAK,QAAQ,MAAM,eAAe,KAAK,WAAW;GAChD,eAAe;GACf,YAAY,SAAS;EACtB,EAAC;AAEF,MAAI,KAAK,YAAY,SAAS;AAC5B,SAAM,GAAG,WAAW,QAAQ;AAC5B,SAAM,GAAG,OAAO;AAChB,SAAM,KAAK,UAAU;IAAC;IAAI;IAAI;IAAG;IAAI;IAAI;GAAE,EAAC;EAC7C,MACC,OAAM,GAAG,WAAW,MAAM;AAE5B,SAAO;CACR;CAED,MAAM,UAAUC,YAAyC;AACvD,OAAK,KAAK,MACR,OAAM,IAAI,MAAM;EAElB,MAAM,EAAE,QAAQ,KAAK,OAAO,MAAM,GAAG;AACrC,MAAI,YAAY;AAChB,MAAI,SAAS,GAAG,GAAG,OAAO,OAAO,OAAO,OAAO;EAE/C,MAAM,IAAI,WAAW,SAAS;EAC9B,MAAM,UAAU,MAAM,KAAK,EAAE,QAAQ,EAAG,GAAE,CAAC,GAAG,OAAO;GACnD,GAAG,WAAW,IAAI;GAClB,GAAG,WAAW,IAAI,IAAI;GACtB,OAAO,WAAW,IAAI,IAAI,OAAO;EAClC,GAAE;EAEH,IAAI,OAAO;EACX,IAAI,OAAO;EACX,IAAI,OAAO;EACX,IAAI,OAAO;AACX,OAAK,MAAM,EAAE,GAAG,GAAG,IAAI,SAAS;AAC9B,OAAI,IAAI,KACN,QAAO;AAET,OAAI,IAAI,KACN,QAAO;AAET,OAAI,IAAI,KACN,QAAO;AAET,OAAI,IAAI,KACN,QAAO;EAEV;EAED,MAAM,IAAI,OAAO,QAAQ;EACzB,MAAM,IAAI,OAAO,QAAQ;EACzB,MAAM,MAAM,OAAO,QAAQ;EAC3B,MAAM,MAAM,OAAO,QAAQ;EAC3B,MAAM,QAAQ,KAAK,IAAI,OAAO,QAAQ,KAAM,GAAG,OAAO,SAAS,KAAM,EAAE;AAEvE,MAAI,cAAc;AAClB,MAAI,YAAY;AAChB,MAAI,UAAU;AACd,MAAI,WAAW;EAEf,IAAIC,OAAwD;AAC5D,OAAK,MAAM,KAAK,SAAS;GACvB,MAAM,IAAI,OAAO,QAAQ,KAAK,EAAE,IAAI,MAAM;GAC1C,MAAM,IAAI,OAAO,SAAS,KAAK,EAAE,IAAI,MAAM;AAC3C,OAAI,SAAS,KAAK,OAAO;AACvB,QAAI,WAAW;AACf,QAAI,OAAO,OAAO,QAAQ,KAAK,KAAK,IAAI,MAAM,OAAO,OAAO,SAAS,KAAK,KAAK,IAAI,MAAM,MAAM;AAC/F,QAAI,OAAO,GAAG,EAAE;AAChB,QAAI,QAAQ;GACb,OAAM;AACL,QAAI,WAAW;AACf,QAAI,OAAO,GAAG,EAAE;GACjB;AACD,UAAO;EACR;AAED,SAAO,GAAG,KAAK,MAAM;GACnB,MAAM,QAAQ,GAAG,QAAQ,WAAW,QAAQ,EAAE;GAC9C,MAAM,aAAa,GAAG,KAAK,OAAO,UAAU;GAC5C,MAAM,kBAAkB,GAAG,IAAI,YAAY,IAAI;GAC/C,MAAM,eAAe,GAAG,WAAW,iBAAiB,EAAE;GAEtD,MAAM,QAAQ,AAAC,MAAO,QAAQ,aAAa,CAAY,UAAU;GACjE,MAAM,OAAO,MAAM,KAAK,MAAM,MAAM,CAAC,CAAC,KAAK,CAAC,GAAG,MAAM,MAAM,KAAK,MAAM,GAAG,CAAC,MAAM,GAAG,GAAG;AAEtF,UAAO,KAAK,IAAI,OAAM,IAAI,KAAK,SAAS,KAAK,KAAK,GAAI,CAAC,OAAO,QAAQ;EACvE,EAAC;CACH;CAED,MAAM,QAAQ;AACZ,OAAK,OAAO,SAAS;AACrB,OAAK;CACN;AACF"}
|
package/package.json
CHANGED
|
@@ -1,9 +1,17 @@
|
|
|
1
1
|
{
|
|
2
2
|
"name": "@zh-keyboard/recognizer",
|
|
3
|
-
"version": "0.
|
|
3
|
+
"version": "0.2.0",
|
|
4
4
|
"description": "中文手写汉字识别器组件",
|
|
5
5
|
"author": "dusionlike",
|
|
6
6
|
"license": "Apache 2.0",
|
|
7
|
+
"repository": {
|
|
8
|
+
"type": "git",
|
|
9
|
+
"url": "git+https://github.com/dusionlike/zh-keyboard",
|
|
10
|
+
"directory": "packages/recognizer"
|
|
11
|
+
},
|
|
12
|
+
"bugs": {
|
|
13
|
+
"url": "https://github.com/dusionlike/zh-keyboard/issues"
|
|
14
|
+
},
|
|
7
15
|
"keywords": [
|
|
8
16
|
"键盘",
|
|
9
17
|
"中文",
|
|
@@ -24,11 +32,14 @@
|
|
|
24
32
|
"module": "dist/index.mjs",
|
|
25
33
|
"types": "dist/index.d.ts",
|
|
26
34
|
"files": [
|
|
27
|
-
"dist"
|
|
35
|
+
"dist",
|
|
36
|
+
"qrcode.jpg"
|
|
28
37
|
],
|
|
29
38
|
"dependencies": {
|
|
30
|
-
"@tensorflow/tfjs": "^4.22.0",
|
|
31
|
-
"@
|
|
39
|
+
"@tensorflow/tfjs-backend-webgl": "^4.22.0",
|
|
40
|
+
"@tensorflow/tfjs-converter": "^4.22.0",
|
|
41
|
+
"@tensorflow/tfjs-core": "^4.22.0",
|
|
42
|
+
"@zh-keyboard/core": "0.2.0"
|
|
32
43
|
},
|
|
33
44
|
"devDependencies": {
|
|
34
45
|
"tsdown": "^0.11.1",
|
package/qrcode.jpg
ADDED
|
Binary file
|