react-brai 1.2.3
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/copy-dts.js +29 -0
- package/dist/index.d.mts +57 -0
- package/dist/index.d.ts +57 -0
- package/dist/index.js +1 -0
- package/dist/index.mjs +1 -0
- package/dist/worker.d.mts +2 -0
- package/dist/worker.d.ts +2 -0
- package/dist/worker.js +1 -0
- package/dist/worker.mjs +1 -0
- package/package.json +37 -0
package/copy-dts.js
ADDED
|
@@ -0,0 +1,29 @@
|
|
|
1
|
+
// copy-dts.js
|
|
2
|
+
const fs = require("fs");
|
|
3
|
+
const path = require("path");
|
|
4
|
+
|
|
5
|
+
function copyDts(src, dest) {
|
|
6
|
+
if (!fs.existsSync(dest)) fs.mkdirSync(dest);
|
|
7
|
+
|
|
8
|
+
const entries = fs.readdirSync(src, { withFileTypes: true });
|
|
9
|
+
|
|
10
|
+
for (const entry of entries) {
|
|
11
|
+
const srcPath = path.join(src, entry.name);
|
|
12
|
+
const destPath = path.join(dest, entry.name);
|
|
13
|
+
|
|
14
|
+
if (entry.isDirectory()) {
|
|
15
|
+
copyDts(srcPath, destPath);
|
|
16
|
+
} else if (entry.name.endsWith(".d.ts") || entry.name.endsWith(".d.mts")) {
|
|
17
|
+
fs.copyFileSync(srcPath, destPath);
|
|
18
|
+
console.log(`Saved type definition: ${entry.name}`);
|
|
19
|
+
}
|
|
20
|
+
}
|
|
21
|
+
}
|
|
22
|
+
|
|
23
|
+
try {
|
|
24
|
+
copyDts("dist-raw", "dist");
|
|
25
|
+
console.log("✅ Type definitions copied successfully.");
|
|
26
|
+
} catch (err) {
|
|
27
|
+
console.error("❌ Failed to copy types:", err);
|
|
28
|
+
process.exit(1);
|
|
29
|
+
}
|
package/dist/index.d.mts
ADDED
|
@@ -0,0 +1,57 @@
|
|
|
1
|
+
import { InitProgressReport } from '@mlc-ai/web-llm';
|
|
2
|
+
export { InitProgressReport } from '@mlc-ai/web-llm';
|
|
3
|
+
import * as react_cai from 'react-cai';
|
|
4
|
+
|
|
5
|
+
type ModelID = "Llama-3-8B-Instruct-q4f16_1-MLC" | "Gemma-2b-it-q4f16_1-MLC" | string;
|
|
6
|
+
interface Message {
|
|
7
|
+
role: "system" | "user" | "assistant";
|
|
8
|
+
content: string;
|
|
9
|
+
}
|
|
10
|
+
interface WorkerMessage {
|
|
11
|
+
type: "READY" | "PROGRESS" | "TOKEN" | "DONE" | "ERROR";
|
|
12
|
+
payload?: any;
|
|
13
|
+
}
|
|
14
|
+
interface CustomModelRecord {
|
|
15
|
+
model: string;
|
|
16
|
+
model_id: string;
|
|
17
|
+
model_lib: string;
|
|
18
|
+
}
|
|
19
|
+
interface ModelRecord {
|
|
20
|
+
model: string;
|
|
21
|
+
model_id: string;
|
|
22
|
+
model_lib: string;
|
|
23
|
+
}
|
|
24
|
+
interface WorkerPayload {
|
|
25
|
+
type: "LOAD" | "GENERATE";
|
|
26
|
+
modelId?: string | ModelRecord;
|
|
27
|
+
messages?: any[];
|
|
28
|
+
options?: {
|
|
29
|
+
context_window_size?: number;
|
|
30
|
+
};
|
|
31
|
+
}
|
|
32
|
+
|
|
33
|
+
declare function useLocalAI(): {
|
|
34
|
+
isSupported: boolean | null;
|
|
35
|
+
loadModel: (model: string | CustomModelRecord, options?: {
|
|
36
|
+
contextWindow?: number;
|
|
37
|
+
}) => void;
|
|
38
|
+
chat: (messages: Message[]) => void;
|
|
39
|
+
isReady: boolean;
|
|
40
|
+
isLoading: boolean;
|
|
41
|
+
response: string;
|
|
42
|
+
progress: InitProgressReport | null;
|
|
43
|
+
error: string | null;
|
|
44
|
+
};
|
|
45
|
+
|
|
46
|
+
type TransformerStatus = "idle" | "loading" | "ready" | "error";
|
|
47
|
+
declare function useUniversalAI(): {
|
|
48
|
+
chat: (messages: react_cai.Message[]) => void;
|
|
49
|
+
response: string;
|
|
50
|
+
isLlmReady: boolean;
|
|
51
|
+
loadPipeline: (task: string, model: string) => string;
|
|
52
|
+
runPipeline: (task: string, model: string, input: any, options?: {}) => Promise<any>;
|
|
53
|
+
pipelines: Record<string, TransformerStatus>;
|
|
54
|
+
progress: Record<string, number>;
|
|
55
|
+
};
|
|
56
|
+
|
|
57
|
+
export { type CustomModelRecord, type Message, type ModelID, type ModelRecord, type WorkerMessage, type WorkerPayload, useLocalAI, useUniversalAI };
|
package/dist/index.d.ts
ADDED
|
@@ -0,0 +1,57 @@
|
|
|
1
|
+
import { InitProgressReport } from '@mlc-ai/web-llm';
|
|
2
|
+
export { InitProgressReport } from '@mlc-ai/web-llm';
|
|
3
|
+
import * as react_cai from 'react-cai';
|
|
4
|
+
|
|
5
|
+
type ModelID = "Llama-3-8B-Instruct-q4f16_1-MLC" | "Gemma-2b-it-q4f16_1-MLC" | string;
|
|
6
|
+
interface Message {
|
|
7
|
+
role: "system" | "user" | "assistant";
|
|
8
|
+
content: string;
|
|
9
|
+
}
|
|
10
|
+
interface WorkerMessage {
|
|
11
|
+
type: "READY" | "PROGRESS" | "TOKEN" | "DONE" | "ERROR";
|
|
12
|
+
payload?: any;
|
|
13
|
+
}
|
|
14
|
+
interface CustomModelRecord {
|
|
15
|
+
model: string;
|
|
16
|
+
model_id: string;
|
|
17
|
+
model_lib: string;
|
|
18
|
+
}
|
|
19
|
+
interface ModelRecord {
|
|
20
|
+
model: string;
|
|
21
|
+
model_id: string;
|
|
22
|
+
model_lib: string;
|
|
23
|
+
}
|
|
24
|
+
interface WorkerPayload {
|
|
25
|
+
type: "LOAD" | "GENERATE";
|
|
26
|
+
modelId?: string | ModelRecord;
|
|
27
|
+
messages?: any[];
|
|
28
|
+
options?: {
|
|
29
|
+
context_window_size?: number;
|
|
30
|
+
};
|
|
31
|
+
}
|
|
32
|
+
|
|
33
|
+
declare function useLocalAI(): {
|
|
34
|
+
isSupported: boolean | null;
|
|
35
|
+
loadModel: (model: string | CustomModelRecord, options?: {
|
|
36
|
+
contextWindow?: number;
|
|
37
|
+
}) => void;
|
|
38
|
+
chat: (messages: Message[]) => void;
|
|
39
|
+
isReady: boolean;
|
|
40
|
+
isLoading: boolean;
|
|
41
|
+
response: string;
|
|
42
|
+
progress: InitProgressReport | null;
|
|
43
|
+
error: string | null;
|
|
44
|
+
};
|
|
45
|
+
|
|
46
|
+
type TransformerStatus = "idle" | "loading" | "ready" | "error";
|
|
47
|
+
declare function useUniversalAI(): {
|
|
48
|
+
chat: (messages: react_cai.Message[]) => void;
|
|
49
|
+
response: string;
|
|
50
|
+
isLlmReady: boolean;
|
|
51
|
+
loadPipeline: (task: string, model: string) => string;
|
|
52
|
+
runPipeline: (task: string, model: string, input: any, options?: {}) => Promise<any>;
|
|
53
|
+
pipelines: Record<string, TransformerStatus>;
|
|
54
|
+
progress: Record<string, number>;
|
|
55
|
+
};
|
|
56
|
+
|
|
57
|
+
export { type CustomModelRecord, type Message, type ModelID, type ModelRecord, type WorkerMessage, type WorkerPayload, useLocalAI, useUniversalAI };
|