react-cai 1.0.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/index.d.mts +45 -0
- package/dist/index.d.ts +45 -0
- package/dist/index.js +1 -0
- package/dist/index.mjs +1 -0
- package/package.json +29 -0
- package/scripts/embed-worker.js +20 -0
- package/src/index.ts +3 -0
- package/src/types.ts +44 -0
- package/src/useLocalAI.ts +109 -0
- package/src/worker-embedded.ts +1 -0
- package/src/worker.ts +63 -0
- package/tsconfig.json +20 -0
- package/tsup.config.ts +37 -0
package/package.json
ADDED
|
@@ -0,0 +1,29 @@
|
|
|
1
|
+
{
|
|
2
|
+
"name": "react-cai",
|
|
3
|
+
"version": "1.0.0",
|
|
4
|
+
"main": "./dist/index.js",
|
|
5
|
+
"module": "./dist/index.mjs",
|
|
6
|
+
"types": "./dist/index.d.ts",
|
|
7
|
+
"scripts": {
|
|
8
|
+
"build:worker": "tsup src/worker.ts --format esm --no-splitting --clean --dts",
|
|
9
|
+
"embed": "node scripts/embed-worker.js",
|
|
10
|
+
"build:lib": "tsup src/index.ts --format cjs,esm --dts --external react",
|
|
11
|
+
"build": "npm run build:worker && npm run embed && npm run build:lib"
|
|
12
|
+
},
|
|
13
|
+
"keywords": [],
|
|
14
|
+
"author": "",
|
|
15
|
+
"license": "ISC",
|
|
16
|
+
"description": "",
|
|
17
|
+
"dependencies": {
|
|
18
|
+
"@mlc-ai/web-llm": "^0.2.80"
|
|
19
|
+
},
|
|
20
|
+
"devDependencies": {
|
|
21
|
+
"@types/node": "^25.0.3",
|
|
22
|
+
"@types/react": "^19.2.7",
|
|
23
|
+
"@webgpu/types": "^0.1.68",
|
|
24
|
+
"esbuild-plugin-obfuscator": "^1.4.0",
|
|
25
|
+
"react": "^19.2.3",
|
|
26
|
+
"tsup": "^8.5.1",
|
|
27
|
+
"typescript": "^5.9.3"
|
|
28
|
+
}
|
|
29
|
+
}
|
|
@@ -0,0 +1,20 @@
|
|
|
1
|
+
const fs = require('fs');
|
|
2
|
+
const path = require('path');
|
|
3
|
+
|
|
4
|
+
// CHANGE THIS LINE: Point to .mjs instead of .js
|
|
5
|
+
const workerPath = path.join(__dirname, '../dist/worker.mjs');
|
|
6
|
+
|
|
7
|
+
// Verify file exists before reading
|
|
8
|
+
if (!fs.existsSync(workerPath)) {
|
|
9
|
+
console.error("❌ Error: Could not find", workerPath);
|
|
10
|
+
process.exit(1);
|
|
11
|
+
}
|
|
12
|
+
|
|
13
|
+
const workerCode = fs.readFileSync(workerPath, 'utf8');
|
|
14
|
+
|
|
15
|
+
const content = `export const WORKER_CODE = ${JSON.stringify(workerCode)};`;
|
|
16
|
+
|
|
17
|
+
const outputPath = path.join(__dirname, '../src/worker-embedded.ts');
|
|
18
|
+
fs.writeFileSync(outputPath, content);
|
|
19
|
+
|
|
20
|
+
console.log("✅ Worker code embedded into src/worker-embedded.ts");
|
package/src/index.ts
ADDED
package/src/types.ts
ADDED
|
@@ -0,0 +1,44 @@
|
|
|
1
|
+
// src/types.ts
|
|
2
|
+
import { InitProgressReport } from "@mlc-ai/web-llm";
|
|
3
|
+
|
|
4
|
+
export type ModelID = "Llama-3-8B-Instruct-q4f16_1-MLC" | "Gemma-2b-it-q4f16_1-MLC" | string;
|
|
5
|
+
|
|
6
|
+
export interface Message {
|
|
7
|
+
role: "system" | "user" | "assistant";
|
|
8
|
+
content: string;
|
|
9
|
+
}
|
|
10
|
+
|
|
11
|
+
export interface WorkerMessage {
|
|
12
|
+
type: "READY" | "PROGRESS" | "TOKEN" | "DONE" | "ERROR";
|
|
13
|
+
payload?: any;
|
|
14
|
+
}
|
|
15
|
+
|
|
16
|
+
// src/types.ts (or wherever you keep this)
|
|
17
|
+
|
|
18
|
+
// src/types.ts (Add this)
|
|
19
|
+
export interface CustomModelRecord {
|
|
20
|
+
model: string; // URL to Hugging Face weights repo
|
|
21
|
+
model_id: string; // Unique ID for local caching
|
|
22
|
+
model_lib: string; // URL to the .wasm binary for the architecture
|
|
23
|
+
}
|
|
24
|
+
|
|
25
|
+
// src/types.ts
|
|
26
|
+
export interface ModelRecord {
|
|
27
|
+
model: string; // HF URL
|
|
28
|
+
model_id: string; // Local ID
|
|
29
|
+
model_lib: string; // WASM URL
|
|
30
|
+
}
|
|
31
|
+
|
|
32
|
+
export interface WorkerPayload {
|
|
33
|
+
type: "LOAD" | "GENERATE";
|
|
34
|
+
modelId?: string | ModelRecord; // Allow both
|
|
35
|
+
messages?: any[];
|
|
36
|
+
options?: {
|
|
37
|
+
context_window_size?: number;
|
|
38
|
+
};
|
|
39
|
+
}
|
|
40
|
+
// ... include your WorkerMessage and InitProgressReport types here
|
|
41
|
+
|
|
42
|
+
|
|
43
|
+
|
|
44
|
+
export type { InitProgressReport };
|
|
@@ -0,0 +1,109 @@
|
|
|
1
|
+
// src/useLocalAI.ts
|
|
2
|
+
import { useState, useCallback, useEffect, useRef } from "react";
|
|
3
|
+
import { Message, InitProgressReport, CustomModelRecord } from "./types";
|
|
4
|
+
// This file doesn't exist yet, but your "npm run build" script will generate it!
|
|
5
|
+
import { WORKER_CODE } from "./worker-embedded";
|
|
6
|
+
|
|
7
|
+
export function useLocalAI() {
|
|
8
|
+
const workerRef = useRef<Worker | null>(null);
|
|
9
|
+
|
|
10
|
+
const [isSupported, setIsSupported] = useState<boolean | null>(null);
|
|
11
|
+
|
|
12
|
+
const [isReady, setIsReady] = useState(false);
|
|
13
|
+
const [isLoading, setIsLoading] = useState(false);
|
|
14
|
+
const [response, setResponse] = useState("");
|
|
15
|
+
const [progress, setProgress] = useState<InitProgressReport | null>(null);
|
|
16
|
+
const [error, setError] = useState<string | null>(null);
|
|
17
|
+
|
|
18
|
+
// 1. AUTOMATIC INITIALIZATION (No more initWorker)
|
|
19
|
+
useEffect(() => {
|
|
20
|
+
if (!navigator.gpu) {
|
|
21
|
+
console.error("WebGPU is not supported on this device.");
|
|
22
|
+
setIsSupported(false);
|
|
23
|
+
setError("WebGPU is not supported on this device.");
|
|
24
|
+
return; // Stop here, don't load the worker
|
|
25
|
+
}
|
|
26
|
+
|
|
27
|
+
setIsSupported(true);
|
|
28
|
+
|
|
29
|
+
if (!WORKER_CODE) return;
|
|
30
|
+
|
|
31
|
+
// A. Create the worker from the embedded string
|
|
32
|
+
const blob = new Blob([WORKER_CODE], { type: 'application/javascript' });
|
|
33
|
+
const workerUrl = URL.createObjectURL(blob);
|
|
34
|
+
const worker = new Worker(workerUrl, { type: 'module' });
|
|
35
|
+
|
|
36
|
+
workerRef.current = worker;
|
|
37
|
+
|
|
38
|
+
// B. Set up the listeners immediately
|
|
39
|
+
worker.onmessage = (event) => {
|
|
40
|
+
const { type, payload } = event.data;
|
|
41
|
+
|
|
42
|
+
switch (type) {
|
|
43
|
+
case "PROGRESS":
|
|
44
|
+
setProgress(payload);
|
|
45
|
+
break;
|
|
46
|
+
case "READY":
|
|
47
|
+
setIsReady(true);
|
|
48
|
+
break;
|
|
49
|
+
case "TOKEN":
|
|
50
|
+
setResponse((prev) => prev + payload);
|
|
51
|
+
break;
|
|
52
|
+
case "DONE":
|
|
53
|
+
setIsLoading(false);
|
|
54
|
+
break;
|
|
55
|
+
case "ERROR":
|
|
56
|
+
setError(String(payload));
|
|
57
|
+
setIsLoading(false);
|
|
58
|
+
break;
|
|
59
|
+
}
|
|
60
|
+
};
|
|
61
|
+
|
|
62
|
+
// C. Cleanup when the component unmounts
|
|
63
|
+
return () => {
|
|
64
|
+
worker.terminate();
|
|
65
|
+
URL.revokeObjectURL(workerUrl);
|
|
66
|
+
};
|
|
67
|
+
}, []);
|
|
68
|
+
|
|
69
|
+
const loadModel = useCallback((
|
|
70
|
+
model: string | CustomModelRecord,
|
|
71
|
+
options?: { contextWindow?: number }
|
|
72
|
+
) => {
|
|
73
|
+
if (!workerRef.current) return;
|
|
74
|
+
setError(null);
|
|
75
|
+
setIsReady(false);
|
|
76
|
+
setProgress(null); // Clear old progress
|
|
77
|
+
|
|
78
|
+
workerRef.current.postMessage({
|
|
79
|
+
type: "LOAD",
|
|
80
|
+
payload: {
|
|
81
|
+
model,
|
|
82
|
+
options: { context_window_size: options?.contextWindow }
|
|
83
|
+
}
|
|
84
|
+
});
|
|
85
|
+
}, []);
|
|
86
|
+
|
|
87
|
+
const chat = useCallback((messages: Message[]) => {
|
|
88
|
+
if (!workerRef.current) return;
|
|
89
|
+
setIsLoading(true);
|
|
90
|
+
setResponse("");
|
|
91
|
+
// Ensure this matches the worker's "GENERATE" listener
|
|
92
|
+
workerRef.current.postMessage({
|
|
93
|
+
type: "GENERATE",
|
|
94
|
+
messages
|
|
95
|
+
});
|
|
96
|
+
}, []);
|
|
97
|
+
|
|
98
|
+
return {
|
|
99
|
+
// initWorker is gone!
|
|
100
|
+
isSupported,
|
|
101
|
+
loadModel,
|
|
102
|
+
chat,
|
|
103
|
+
isReady,
|
|
104
|
+
isLoading,
|
|
105
|
+
response,
|
|
106
|
+
progress,
|
|
107
|
+
error
|
|
108
|
+
};
|
|
109
|
+
}
|