react-brai 1.0.0 → 1.0.1

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of react-brai might be problematic. Click here for more details.

package/package.json CHANGED
@@ -1,20 +1,15 @@
1
1
  {
2
2
  "name": "react-brai",
3
- "version": "1.0.0",
3
+ "version": "1.0.1",
4
4
  "main": "./dist/index.js",
5
5
  "module": "./dist/index.mjs",
6
6
  "types": "./dist/index.d.ts",
7
- "scripts": {
8
- "build:worker": "tsup src/worker.ts --format esm --no-splitting --clean --dts",
9
-
10
- "embed": "node scripts/embed-worker.js",
11
-
12
-
13
- "build:lib": "tsup src/index.ts --format cjs,esm --dts --external react",
14
-
15
-
16
- "build": "npm run build:worker && npm run embed && npm run build:lib"
17
- },
7
+ "scripts": {
8
+ "build:worker": "tsup src/worker.ts --format esm --no-splitting --clean --dts",
9
+ "embed": "node scripts/embed-worker.js",
10
+ "build:lib": "tsup src/index.ts --format cjs,esm --dts --external react",
11
+ "build": "npm run build:worker && npm run embed && npm run build:lib"
12
+ },
18
13
  "keywords": [],
19
14
  "author": "",
20
15
  "license": "ISC",
@@ -25,6 +20,7 @@
25
20
  "devDependencies": {
26
21
  "@types/node": "^25.0.3",
27
22
  "@types/react": "^19.2.7",
23
+ "@webgpu/types": "^0.1.68",
28
24
  "react": "^19.2.3",
29
25
  "tsup": "^8.5.1",
30
26
  "typescript": "^5.9.3"
package/src/useLocalAI.ts CHANGED
@@ -7,6 +7,8 @@ import { WORKER_CODE } from "./worker-embedded";
7
7
  export function useLocalAI() {
8
8
  const workerRef = useRef<Worker | null>(null);
9
9
 
10
+ const [isSupported, setIsSupported] = useState<boolean | null>(null);
11
+
10
12
  const [isReady, setIsReady] = useState(false);
11
13
  const [isLoading, setIsLoading] = useState(false);
12
14
  const [response, setResponse] = useState("");
@@ -15,6 +17,15 @@ export function useLocalAI() {
15
17
 
16
18
  // 1. AUTOMATIC INITIALIZATION (No more initWorker)
17
19
  useEffect(() => {
20
+ if (!navigator.gpu) {
21
+ console.error("WebGPU is not supported on this device.");
22
+ setIsSupported(false);
23
+ setError("WebGPU is not supported on this device.");
24
+ return; // Stop here, don't load the worker
25
+ }
26
+
27
+ setIsSupported(true);
28
+
18
29
  if (!WORKER_CODE) return;
19
30
 
20
31
  // A. Create the worker from the embedded string
@@ -71,6 +82,7 @@ export function useLocalAI() {
71
82
 
72
83
  return {
73
84
  // initWorker is gone!
85
+ isSupported,
74
86
  loadModel,
75
87
  chat,
76
88
  isReady,
package/tsconfig.json CHANGED
@@ -12,7 +12,8 @@
12
12
  "skipLibCheck": true,
13
13
  "forceConsistentCasingInFileNames": true,
14
14
  "incremental": false, // <--- THIS IS THE FIX
15
- "jsx": "react"
15
+ "jsx": "react",
16
+ "types":["@webgpu/types"]
16
17
  },
17
18
  "include": ["src"],
18
19
  "exclude": ["node_modules", "dist"]
package/dist/index.d.mts DELETED
@@ -1,29 +0,0 @@
1
- import { InitProgressReport } from '@mlc-ai/web-llm';
2
- export { InitProgressReport } from '@mlc-ai/web-llm';
3
-
4
- type ModelID = "Llama-3-8B-Instruct-q4f16_1-MLC" | "Gemma-2b-it-q4f16_1-MLC" | string;
5
- interface Message {
6
- role: "system" | "user" | "assistant";
7
- content: string;
8
- }
9
- interface WorkerMessage {
10
- type: "READY" | "PROGRESS" | "TOKEN" | "DONE" | "ERROR";
11
- payload?: any;
12
- }
13
- interface WorkerPayload {
14
- type: "LOAD" | "GENERATE";
15
- modelId?: string;
16
- messages?: Message[];
17
- }
18
-
19
- declare function useLocalAI(): {
20
- loadModel: (modelId: string) => void;
21
- chat: (messages: Message[]) => void;
22
- isReady: boolean;
23
- isLoading: boolean;
24
- response: string;
25
- progress: InitProgressReport | null;
26
- error: string | null;
27
- };
28
-
29
- export { type Message, type ModelID, type WorkerMessage, type WorkerPayload, useLocalAI };
package/dist/index.d.ts DELETED
@@ -1,29 +0,0 @@
1
- import { InitProgressReport } from '@mlc-ai/web-llm';
2
- export { InitProgressReport } from '@mlc-ai/web-llm';
3
-
4
- type ModelID = "Llama-3-8B-Instruct-q4f16_1-MLC" | "Gemma-2b-it-q4f16_1-MLC" | string;
5
- interface Message {
6
- role: "system" | "user" | "assistant";
7
- content: string;
8
- }
9
- interface WorkerMessage {
10
- type: "READY" | "PROGRESS" | "TOKEN" | "DONE" | "ERROR";
11
- payload?: any;
12
- }
13
- interface WorkerPayload {
14
- type: "LOAD" | "GENERATE";
15
- modelId?: string;
16
- messages?: Message[];
17
- }
18
-
19
- declare function useLocalAI(): {
20
- loadModel: (modelId: string) => void;
21
- chat: (messages: Message[]) => void;
22
- isReady: boolean;
23
- isLoading: boolean;
24
- response: string;
25
- progress: InitProgressReport | null;
26
- error: string | null;
27
- };
28
-
29
- export { type Message, type ModelID, type WorkerMessage, type WorkerPayload, useLocalAI };