@ismail-kattakath/mediapipe-react 0.0.1

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (44) hide show
  1. package/LICENSE +21 -0
  2. package/dist/audio.d.mts +3 -0
  3. package/dist/audio.d.ts +3 -0
  4. package/dist/audio.js +7 -0
  5. package/dist/audio.js.map +1 -0
  6. package/dist/audio.mjs +7 -0
  7. package/dist/audio.mjs.map +1 -0
  8. package/dist/chunk-G5GKFZAW.js +9 -0
  9. package/dist/chunk-G5GKFZAW.js.map +1 -0
  10. package/dist/chunk-HDKHZ5MY.mjs +9 -0
  11. package/dist/chunk-HDKHZ5MY.mjs.map +1 -0
  12. package/dist/chunk-HQ7V4B42.js +179 -0
  13. package/dist/chunk-HQ7V4B42.js.map +1 -0
  14. package/dist/chunk-LMIUBEL2.mjs +9 -0
  15. package/dist/chunk-LMIUBEL2.mjs.map +1 -0
  16. package/dist/chunk-SUBDHSBP.js +9 -0
  17. package/dist/chunk-SUBDHSBP.js.map +1 -0
  18. package/dist/chunk-Z5PADYRT.mjs +179 -0
  19. package/dist/chunk-Z5PADYRT.mjs.map +1 -0
  20. package/dist/genai.d.mts +13 -0
  21. package/dist/genai.d.ts +13 -0
  22. package/dist/genai.js +10 -0
  23. package/dist/genai.js.map +1 -0
  24. package/dist/genai.mjs +10 -0
  25. package/dist/genai.mjs.map +1 -0
  26. package/dist/genai.worker.d.mts +2 -0
  27. package/dist/genai.worker.d.ts +2 -0
  28. package/dist/genai.worker.js +52 -0
  29. package/dist/genai.worker.js.map +1 -0
  30. package/dist/genai.worker.mjs +52 -0
  31. package/dist/genai.worker.mjs.map +1 -0
  32. package/dist/index.d.mts +19 -0
  33. package/dist/index.d.ts +19 -0
  34. package/dist/index.js +20 -0
  35. package/dist/index.js.map +1 -0
  36. package/dist/index.mjs +20 -0
  37. package/dist/index.mjs.map +1 -0
  38. package/dist/vision.d.mts +3 -0
  39. package/dist/vision.d.ts +3 -0
  40. package/dist/vision.js +7 -0
  41. package/dist/vision.js.map +1 -0
  42. package/dist/vision.mjs +7 -0
  43. package/dist/vision.mjs.map +1 -0
  44. package/package.json +62 -0
package/LICENSE ADDED
@@ -0,0 +1,21 @@
1
+ MIT License
2
+
3
+ Copyright (c) 2026 Ismail Kattakath
4
+
5
+ Permission is hereby granted, free of charge, to any person obtaining a copy
6
+ of this software and associated documentation files (the "Software"), to deal
7
+ in the Software without restriction, including without limitation the rights
8
+ to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
9
+ copies of the Software, and to permit persons to whom the Software is
10
+ furnished to do so, subject to the following conditions:
11
+
12
+ The above copyright notice and this permission notice shall be included in all
13
+ copies or substantial portions of the Software.
14
+
15
+ THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
16
+ IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
17
+ FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
18
+ AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
19
+ LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
20
+ OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
21
+ SOFTWARE.
@@ -0,0 +1,3 @@
1
+ declare const useAudio: () => string;
2
+
3
+ export { useAudio };
@@ -0,0 +1,3 @@
1
+ declare const useAudio: () => string;
2
+
3
+ export { useAudio };
package/dist/audio.js ADDED
@@ -0,0 +1,7 @@
1
+ "use strict";Object.defineProperty(exports, "__esModule", {value: true});
2
+
3
+ var _chunkG5GKFZAWjs = require('./chunk-G5GKFZAW.js');
4
+
5
+
6
+ exports.useAudio = _chunkG5GKFZAWjs.useAudio;
7
+ //# sourceMappingURL=audio.js.map
@@ -0,0 +1 @@
1
+ {"version":3,"sources":["/Users/aloshy/aloshy-ai/mediapipe-react/packages/core/dist/audio.js"],"names":[],"mappings":"AAAA;AACE;AACF,sDAA4B;AAC5B;AACE;AACF,6CAAC","file":"/Users/aloshy/aloshy-ai/mediapipe-react/packages/core/dist/audio.js"}
package/dist/audio.mjs ADDED
@@ -0,0 +1,7 @@
1
+ import {
2
+ useAudio
3
+ } from "./chunk-LMIUBEL2.mjs";
4
+ export {
5
+ useAudio
6
+ };
7
+ //# sourceMappingURL=audio.mjs.map
@@ -0,0 +1 @@
1
+ {"version":3,"sources":[],"sourcesContent":[],"mappings":"","names":[]}
@@ -0,0 +1,9 @@
1
+ "use strict";Object.defineProperty(exports, "__esModule", {value: true});// src/audio/index.ts
2
+ var useAudio = () => {
3
+ return "Audio implementation";
4
+ };
5
+
6
+
7
+
8
+ exports.useAudio = useAudio;
9
+ //# sourceMappingURL=chunk-G5GKFZAW.js.map
@@ -0,0 +1 @@
1
+ {"version":3,"sources":["/Users/aloshy/aloshy-ai/mediapipe-react/packages/core/dist/chunk-G5GKFZAW.js","../src/audio/index.ts"],"names":[],"mappings":"AAAA;ACAO,IAAM,SAAA,EAAW,CAAA,EAAA,GAAM;AAC1B,EAAA,OAAO,sBAAA;AACX,CAAA;ADEA;AACA;AACE;AACF,4BAAC","file":"/Users/aloshy/aloshy-ai/mediapipe-react/packages/core/dist/chunk-G5GKFZAW.js","sourcesContent":[null,"export const useAudio = () => {\n return \"Audio implementation\";\n};\n"]}
@@ -0,0 +1,9 @@
1
+ // src/vision/index.ts
2
+ var useVision = () => {
3
+ return "Vision implementation";
4
+ };
5
+
6
+ export {
7
+ useVision
8
+ };
9
+ //# sourceMappingURL=chunk-HDKHZ5MY.mjs.map
@@ -0,0 +1 @@
1
+ {"version":3,"sources":["../src/vision/index.ts"],"sourcesContent":["export const useVision = () => {\n return \"Vision implementation\";\n};\n"],"mappings":";AAAO,IAAM,YAAY,MAAM;AAC3B,SAAO;AACX;","names":[]}
@@ -0,0 +1,179 @@
1
+ "use strict";Object.defineProperty(exports, "__esModule", {value: true});// src/genai.ts
2
+ var _react = require('react');
3
+
4
+ // src/index.tsx
5
+
6
+
7
+ // src/utils.ts
8
+ var isBrowser = typeof window !== "undefined";
9
+
10
+ // src/index.tsx
11
+ var _jsxruntime = require('react/jsx-runtime');
12
+ var MediaPipeContext = _react.createContext.call(void 0, null);
13
+ var MediaPipeProvider = ({
14
+ children,
15
+ wasmPath,
16
+ modelPath
17
+ }) => {
18
+ const value = _react.useMemo.call(void 0,
19
+ () => ({
20
+ wasmPath,
21
+ modelPath,
22
+ isBrowser
23
+ }),
24
+ [wasmPath, modelPath]
25
+ );
26
+ return /* @__PURE__ */ _jsxruntime.jsx.call(void 0, MediaPipeContext.Provider, { value, children });
27
+ };
28
+ var useMediaPipeContext = () => {
29
+ const context = _react.useContext.call(void 0, MediaPipeContext);
30
+ if (!context) {
31
+ throw new Error("useMediaPipeContext must be used within a MediaPipeProvider");
32
+ }
33
+ return context;
34
+ };
35
+
36
+ // src/genai.ts
37
+ var workerScript = `
38
+ import { LlmInference, FilesetResolver } from 'https://cdn.jsdelivr.net/npm/@mediapipe/tasks-genai';
39
+
40
+ let llmInference = null;
41
+
42
+ async function checkGpuSupport() {
43
+ if (!('gpu' in navigator)) {
44
+ throw new Error('WebGPU is not supported in this browser.');
45
+ }
46
+ const gpu = navigator.gpu;
47
+ const adapter = await gpu.requestAdapter();
48
+ if (!adapter) {
49
+ throw new Error('No appropriate GPU adapter found.');
50
+ }
51
+ }
52
+
53
+ async function initInference(modelPath, wasmPath) {
54
+ try {
55
+ await checkGpuSupport();
56
+ const genai = await FilesetResolver.forGenAiTasks(wasmPath);
57
+ llmInference = await LlmInference.createFromOptions(genai, {
58
+ baseOptions: { modelAssetPath: modelPath },
59
+ });
60
+ self.postMessage({ type: 'INIT_COMPLETE' });
61
+ } catch (error) {
62
+ self.postMessage({ type: 'ERROR', error: error.message || 'Unknown error during initialization' });
63
+ }
64
+ }
65
+
66
+ self.onmessage = async (event) => {
67
+ const { type, payload } = event.data;
68
+
69
+ if (type === 'INIT') {
70
+ const { modelPath, wasmPath } = payload;
71
+ await initInference(modelPath, wasmPath);
72
+ }
73
+
74
+ if (type === 'GENERATE') {
75
+ if (!llmInference) {
76
+ self.postMessage({ type: 'ERROR', error: 'LLM Inference not initialized. Please ensure the model is loaded and WebGPU is supported.' });
77
+ return;
78
+ }
79
+
80
+ try {
81
+ const { prompt } = payload;
82
+ llmInference.generateResponse(prompt, (partialText, done) => {
83
+ self.postMessage({
84
+ type: 'CHUNK',
85
+ payload: { text: partialText, done }
86
+ });
87
+ });
88
+ } catch (error) {
89
+ self.postMessage({ type: 'ERROR', error: error.message || 'Error generating response' });
90
+ }
91
+ }
92
+ };
93
+ `;
94
+ function useLlm(options = {}) {
95
+ const context = useMediaPipeContext();
96
+ const [output, setOutput] = _react.useState.call(void 0, "");
97
+ const [isLoading, setIsLoading] = _react.useState.call(void 0, false);
98
+ const [error, setError] = _react.useState.call(void 0, null);
99
+ const [progress, setProgress] = _react.useState.call(void 0, 0);
100
+ const workerRef = _react.useRef.call(void 0, null);
101
+ const modelPath = options.modelPath || context.modelPath;
102
+ const wasmPath = options.wasmPath || context.wasmPath || "https://cdn.jsdelivr.net/npm/@mediapipe/tasks-genai/wasm";
103
+ _react.useEffect.call(void 0, () => {
104
+ if (!context.isBrowser || !modelPath) return;
105
+ if (!("gpu" in navigator)) {
106
+ setTimeout(() => setError("WebGPU is not supported in this browser."), 0);
107
+ return;
108
+ }
109
+ let worker;
110
+ try {
111
+ worker = new Worker(new URL("./genai.worker", import.meta.url), {
112
+ type: "module",
113
+ name: "mediapipe-genai-worker"
114
+ });
115
+ } catch (_e) {
116
+ console.warn("MediaPipe React: Falling back to Blob-based GenAI worker");
117
+ const blob = new Blob([workerScript], { type: "application/javascript" });
118
+ worker = new Worker(URL.createObjectURL(blob));
119
+ }
120
+ workerRef.current = worker;
121
+ worker.onmessage = (event) => {
122
+ const { type, payload, error: workerError } = event.data;
123
+ switch (type) {
124
+ case "INIT_COMPLETE":
125
+ setIsLoading(false);
126
+ setProgress(100);
127
+ break;
128
+ case "CHUNK":
129
+ setOutput((prev) => prev + payload.text);
130
+ if (payload.done) {
131
+ setIsLoading(false);
132
+ }
133
+ break;
134
+ case "ERROR":
135
+ setError(workerError || "Worker encountered an error");
136
+ setIsLoading(false);
137
+ break;
138
+ }
139
+ };
140
+ setTimeout(() => {
141
+ setIsLoading(true);
142
+ setProgress(10);
143
+ }, 0);
144
+ worker.postMessage({
145
+ type: "INIT",
146
+ payload: { modelPath, wasmPath }
147
+ });
148
+ return () => {
149
+ worker.terminate();
150
+ };
151
+ }, [context.isBrowser, modelPath, wasmPath]);
152
+ const generate = _react.useCallback.call(void 0, (prompt) => {
153
+ if (!workerRef.current) {
154
+ setError("Worker not initialized");
155
+ return;
156
+ }
157
+ setOutput("");
158
+ setIsLoading(true);
159
+ setError(null);
160
+ workerRef.current.postMessage({
161
+ type: "GENERATE",
162
+ payload: { prompt }
163
+ });
164
+ }, []);
165
+ return {
166
+ output,
167
+ isLoading,
168
+ progress,
169
+ error,
170
+ generate
171
+ };
172
+ }
173
+
174
+
175
+
176
+
177
+
178
+ exports.useLlm = useLlm; exports.MediaPipeProvider = MediaPipeProvider; exports.useMediaPipeContext = useMediaPipeContext;
179
+ //# sourceMappingURL=chunk-HQ7V4B42.js.map
@@ -0,0 +1 @@
1
+ {"version":3,"sources":["/Users/aloshy/aloshy-ai/mediapipe-react/packages/core/dist/chunk-HQ7V4B42.js","../src/genai.ts","../src/index.tsx","../src/utils.ts"],"names":[],"mappings":"AAAA;ACEA,8BAAyD;ADAzD;AACA;AEDA;AFGA;AACA;AGNO,IAAM,UAAA,EAAY,OAAO,OAAA,IAAW,WAAA;AHQ3C;AACA;AE6BQ,+CAAA;AAvBR,IAAM,iBAAA,EAAmB,kCAAA,IAA+C,CAAA;AAQjE,IAAM,kBAAA,EAAsD,CAAC;AAAA,EAChE,QAAA;AAAA,EACA,QAAA;AAAA,EACA;AACJ,CAAA,EAAA,GAAM;AACF,EAAA,MAAM,MAAA,EAAQ,4BAAA;AAAA,IACV,CAAA,EAAA,GAAA,CAAO;AAAA,MACH,QAAA;AAAA,MACA,SAAA;AAAA,MACA;AAAA,IACJ,CAAA,CAAA;AAAA,IACA,CAAC,QAAA,EAAU,SAAS;AAAA,EACxB,CAAA;AAEA,EAAA,uBACI,6BAAA,gBAAC,CAAiB,QAAA,EAAjB,EAA0B,KAAA,EACzB,SAAA,CACF,CAAA;AAER,CAAA;AAEO,IAAM,oBAAA,EAAsB,CAAA,EAAA,GAAM;AACrC,EAAA,MAAM,QAAA,EAAU,+BAAA,gBAA2B,CAAA;AAC3C,EAAA,GAAA,CAAI,CAAC,OAAA,EAAS;AACV,IAAA,MAAM,IAAI,KAAA,CAAM,6DAA6D,CAAA;AAAA,EACjF;AACA,EAAA,OAAO,OAAA;AACX,CAAA;AFhBA;AACA;AC1BA,IAAM,aAAA,EAAe,CAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,CAAA;AA+Dd,SAAS,MAAA,CAAO,QAAA,EAAyB,CAAC,CAAA,EAAG;AAClD,EAAA,MAAM,QAAA,EAAU,mBAAA,CAAoB,CAAA;AACpC,EAAA,MAAM,CAAC,MAAA,EAAQ,SAAS,EAAA,EAAI,6BAAA,EAAW,CAAA;AACvC,EAAA,MAAM,CAAC,SAAA,EAAW,YAAY,EAAA,EAAI,6BAAA,KAAc,CAAA;AAChD,EAAA,MAAM,CAAC,KAAA,EAAO,QAAQ,EAAA,EAAI,6BAAA,IAA4B,CAAA;AACtD,EAAA,MAAM,CAAC,QAAA,EAAU,WAAW,EAAA,EAAI,6BAAA,CAAU,CAAA;AAE1C,EAAA,MAAM,UAAA,EAAY,2BAAA,IAA0B,CAAA;AAG5C,EAAA,MAAM,UAAA,EAAY,OAAA,CAAQ,UAAA,GAAa,OAAA,CAAQ,SAAA;AAC/C,EAAA,MAAM,SAAA,EAAW,OAAA,CAAQ,SAAA,GAAY,OAAA,CAAQ,SAAA,GAAY,0DAAA;AAEzD,EAAA,8BAAA,CAAU,EAAA,GAAM;AACd,IAAA,GAAA,CAAI,CAAC,OAAA,CAAQ,UAAA,GAAa,CAAC,SAAA,EAAW,MAAA;AAGtC,IAAA,GAAA,CAAI,CAAA,CAAE,MAAA,GAAS,SAAA,CAAA,EAAY;AACzB,MAAA,UAAA,CAAW,CAAA,EAAA,GAAM,QAAA,CAAS,0CAA0C,CAAA,EAAG,CAAC,CAAA;AACxE,MAAA,MAAA;AAAA,IACF;AAEA,IAAA,IAAI,MAAA;AAEJ,IAAA,IAAI;AAEF,MAAA,OAAA,EAAS,IAAI,MAAA,CAAO,IAAI,GAAA,CAAI,gBAAA,EAAkB,MAAA,CAAA,IAAA,CAAY,GAAG,CAAA,EAAG;AAAA,QAC9D,IAAA,EAAM,QAAA;AAAA,QACN,IAAA,EAAM;AAAA,MACR,CAAC,CAAA;AAAA,IACH,EAAA,MAAA,CAAS,EAAA,EAAI;AAEX,MAAA,OAAA,CAAQ,IAAA,CAAK,0DAA0D,CAAA;AACvE,MAAA,MAAM,KAAA,EAAO,IAAI,IAAA,CAAK,CAAC,YAAY,CAAA,EAAG,EAAE,IAAA,EAAM,yBAAyB,CAAC,CAAA;AACxE,MAAA,OAAA,EAAS,IAAI,MAAA,CAAO,GAAA,CAAI,eAAA,CAAgB,IAAI,CAAC,CAAA;AAAA,IAC/C;AAEA,IAAA,SAAA,CAAU,QAAA,EAAU,MAAA;AAEpB,IAAA,MAAA,CAAO,UAAA,EAAY,CAAC,KAAA,EAAA,GAAU;AAC5B,MAAA,MAAM,EAAE,IAAA,EAAM,OAAA,EAAS,KAAA,EAAO,YAAY,EAAA,EAAI,KAAA,CAAM,IAAA;AAEpD,MAAA,OAAA,CAAQ,IAAA,EAAM;AAAA,QACZ,KAAK,eAAA;AACH,UAAA,YAAA,CAAa,KAAK,CAAA;AAClB,UAAA,WAAA,CAAY,GAAG,CAAA;AACf,UAAA,KAAA;AAAA,QACF,KAAK,OAAA;AACH,UAAA,SAAA,CAAU,CAAC,IAAA,EAAA,GAAS,KAAA,EAAO,OAAA,CAAQ,IAAI,CAAA;AACvC,UAAA,GAAA,CAAI,OAAA,CAAQ,IAAA,EAAM;AAChB,YAAA,YAAA,CAAa,KAAK,CAAA;AAAA,UACpB;AACA,UAAA,KAAA;AAAA,QACF,KAAK,OAAA;AACH,UAAA,QAAA,CAAS,YAAA,GAAe,6BAA6B,CAAA;AACrD,UAAA,YAAA,CAAa,KAAK,CAAA;AAClB,UAAA,KAAA;AAAA,MACJ;AAAA,IACF,CAAA;AAEA,IAAA,UAAA,CAAW,CAAA,EAAA,GAAM;AACf,MAAA,YAAA,CAAa,IAAI,CAAA;AACjB,MAAA,WAAA,CAAY,EAAE,CAAA;AAAA,IAChB,CAAA,EAAG,CAAC,CAAA;AACJ,IAAA,MAAA,CAAO,WAAA,CAAY;AAAA,MACjB,IAAA,EAAM,MAAA;AAAA,MACN,OAAA,EAAS,EAAE,SAAA,EAAW,SAAS;AAAA,IACjC,CAAC,CAAA;AAED,IAAA,OAAO,CAAA,EAAA,GAAM;AACX,MAAA,MAAA,CAAO,SAAA,CAAU,CAAA;AAAA,IACnB,CAAA;AAAA,EACF,CAAA,EAAG,CAAC,OAAA,CAAQ,SAAA,EAAW,SAAA,EAAW,QAAQ,CAAC,CAAA;AAE3C,EAAA,MAAM,SAAA,EAAW,gCAAA,CAAa,MAAA,EAAA,GAAmB;AAC/C,IAAA,GAAA,CAAI,CAAC,SAAA,CAAU,OAAA,EAAS;AACtB,MAAA,QAAA,CAAS,wBAAwB,CAAA;AACjC,MAAA,MAAA;AAAA,IACF;AAEA,IAAA,SAAA,CAAU,EAAE,CAAA;AACZ,IAAA,YAAA,CAAa,IAAI,CAAA;AACjB,IAAA,QAAA,CAAS,IAAI,CAAA;AAEb,IAAA,SAAA,CAAU,OAAA,CAAQ,WAAA,CAAY;AAAA,MAC5B,IAAA,EAAM,UAAA;AAAA,MACN,OAAA,EAAS,EAAE,OAAO;AAAA,IACpB,CAAC,CAAA;AAAA,EACH,CAAA,EAAG,CAAC,CAAC,CAAA;AAEL,EAAA,OAAO;AAAA,IACL,MAAA;AAAA,IACA,SAAA;AAAA,IACA,QAAA;AAAA,IACA,KAAA;AAAA,IACA;AAAA,EACF,CAAA;AACF;ADGA;AACA;AACE;AACA;AACA;AACF,0HAAC","file":"/Users/aloshy/aloshy-ai/mediapipe-react/packages/core/dist/chunk-HQ7V4B42.js","sourcesContent":[null,"'use client';\n\nimport { useEffect, useState, useCallback, useRef } from 'react';\nimport { useMediaPipeContext } from './index';\n\n/**\n * The Web Worker logic for MediaPipe GenAI.\n * This is stringified so it can be easily initialized as a Blob URL if the file-based worker fails.\n */\nconst workerScript = `\nimport { LlmInference, FilesetResolver } from 'https://cdn.jsdelivr.net/npm/@mediapipe/tasks-genai';\n\nlet llmInference = null;\n\nasync function checkGpuSupport() {\n if (!('gpu' in navigator)) {\n throw new Error('WebGPU is not supported in this browser.');\n }\n const gpu = navigator.gpu;\n const adapter = await gpu.requestAdapter();\n if (!adapter) {\n throw new Error('No appropriate GPU adapter found.');\n }\n}\n\nasync function initInference(modelPath, wasmPath) {\n try {\n await checkGpuSupport();\n const genai = await FilesetResolver.forGenAiTasks(wasmPath);\n llmInference = await LlmInference.createFromOptions(genai, {\n baseOptions: { modelAssetPath: modelPath },\n });\n self.postMessage({ type: 'INIT_COMPLETE' });\n } catch (error) {\n self.postMessage({ type: 'ERROR', error: error.message || 'Unknown error during initialization' });\n }\n}\n\nself.onmessage = async (event) => {\n const { type, payload } = event.data;\n\n if (type === 'INIT') {\n const { modelPath, wasmPath } = payload;\n await initInference(modelPath, wasmPath);\n }\n\n if (type === 'GENERATE') {\n if (!llmInference) {\n self.postMessage({ type: 'ERROR', error: 'LLM Inference not initialized. Please ensure the model is loaded and WebGPU is supported.' });\n return;\n }\n\n try {\n const { prompt } = payload;\n llmInference.generateResponse(prompt, (partialText, done) => {\n self.postMessage({\n type: 'CHUNK',\n payload: { text: partialText, done }\n });\n });\n } catch (error) {\n self.postMessage({ type: 'ERROR', error: error.message || 'Error generating response' });\n }\n }\n};\n`;\n\nexport interface UseLlmOptions {\n modelPath?: string;\n wasmPath?: string;\n}\n\nexport function useLlm(options: UseLlmOptions = {}) {\n const context = useMediaPipeContext();\n const [output, setOutput] = useState('');\n const [isLoading, setIsLoading] = useState(false);\n const [error, setError] = useState<string | null>(null);\n const [progress, setProgress] = useState(0);\n\n const workerRef = useRef<Worker | null>(null);\n\n // Use values from props if provided, otherwise fallback to context\n const modelPath = options.modelPath || context.modelPath;\n const wasmPath = options.wasmPath || context.wasmPath || 'https://cdn.jsdelivr.net/npm/@mediapipe/tasks-genai/wasm';\n\n useEffect(() => {\n if (!context.isBrowser || !modelPath) return;\n\n // Early check for WebGPU support in the UI thread too\n if (!('gpu' in navigator)) {\n setTimeout(() => setError('WebGPU is not supported in this browser.'), 0);\n return;\n }\n\n let worker: Worker;\n\n try {\n // Attempt to load from the separate worker file (Vite/Next.js/Webpack friendly)\n worker = new Worker(new URL('./genai.worker', import.meta.url), {\n type: 'module',\n name: 'mediapipe-genai-worker'\n });\n } catch (_e) {\n // Fallback to Blob-based worker if relative path fails\n console.warn('MediaPipe React: Falling back to Blob-based GenAI worker');\n const blob = new Blob([workerScript], { type: 'application/javascript' });\n worker = new Worker(URL.createObjectURL(blob));\n }\n\n workerRef.current = worker;\n\n worker.onmessage = (event) => {\n const { type, payload, error: workerError } = event.data;\n\n switch (type) {\n case 'INIT_COMPLETE':\n setIsLoading(false);\n setProgress(100);\n break;\n case 'CHUNK':\n setOutput((prev) => prev + payload.text);\n if (payload.done) {\n setIsLoading(false);\n }\n break;\n case 'ERROR':\n setError(workerError || 'Worker encountered an error');\n setIsLoading(false);\n break;\n }\n };\n\n setTimeout(() => {\n setIsLoading(true);\n setProgress(10); // Initial progress\n }, 0);\n worker.postMessage({\n type: 'INIT',\n payload: { modelPath, wasmPath },\n });\n\n return () => {\n worker.terminate();\n };\n }, [context.isBrowser, modelPath, wasmPath]);\n\n const generate = useCallback((prompt: string) => {\n if (!workerRef.current) {\n setError('Worker not initialized');\n return;\n }\n\n setOutput('');\n setIsLoading(true);\n setError(null);\n\n workerRef.current.postMessage({\n type: 'GENERATE',\n payload: { prompt },\n });\n }, []);\n\n return {\n output,\n isLoading,\n progress,\n error,\n generate,\n };\n}\n","'use client';\n\nimport React, { createContext, useContext, useMemo } from 'react';\nimport { isBrowser } from './utils';\n\nexport * from './genai';\nexport * from './vision';\nexport * from './audio';\n\nexport interface MediaPipeContextType {\n wasmPath?: string;\n modelPath?: string;\n isBrowser: boolean;\n}\n\nconst MediaPipeContext = createContext<MediaPipeContextType | null>(null);\n\nexport interface MediaPipeProviderProps {\n children: React.ReactNode;\n wasmPath?: string;\n modelPath?: string;\n}\n\nexport const MediaPipeProvider: React.FC<MediaPipeProviderProps> = ({\n children,\n wasmPath,\n modelPath,\n}) => {\n const value = useMemo(\n () => ({\n wasmPath,\n modelPath,\n isBrowser,\n }),\n [wasmPath, modelPath]\n );\n\n return (\n <MediaPipeContext.Provider value= { value } >\n { children }\n </MediaPipeContext.Provider>\n );\n};\n\nexport const useMediaPipeContext = () => {\n const context = useContext(MediaPipeContext);\n if (!context) {\n throw new Error('useMediaPipeContext must be used within a MediaPipeProvider');\n }\n return context;\n};\n\n","export const isBrowser = typeof window !== 'undefined';\n"]}
@@ -0,0 +1,9 @@
1
+ // src/audio/index.ts
2
+ var useAudio = () => {
3
+ return "Audio implementation";
4
+ };
5
+
6
+ export {
7
+ useAudio
8
+ };
9
+ //# sourceMappingURL=chunk-LMIUBEL2.mjs.map
@@ -0,0 +1 @@
1
+ {"version":3,"sources":["../src/audio/index.ts"],"sourcesContent":["export const useAudio = () => {\n return \"Audio implementation\";\n};\n"],"mappings":";AAAO,IAAM,WAAW,MAAM;AAC1B,SAAO;AACX;","names":[]}
@@ -0,0 +1,9 @@
1
+ "use strict";Object.defineProperty(exports, "__esModule", {value: true});// src/vision/index.ts
2
+ var useVision = () => {
3
+ return "Vision implementation";
4
+ };
5
+
6
+
7
+
8
+ exports.useVision = useVision;
9
+ //# sourceMappingURL=chunk-SUBDHSBP.js.map
@@ -0,0 +1 @@
1
+ {"version":3,"sources":["/Users/aloshy/aloshy-ai/mediapipe-react/packages/core/dist/chunk-SUBDHSBP.js","../src/vision/index.ts"],"names":[],"mappings":"AAAA;ACAO,IAAM,UAAA,EAAY,CAAA,EAAA,GAAM;AAC3B,EAAA,OAAO,uBAAA;AACX,CAAA;ADEA;AACA;AACE;AACF,8BAAC","file":"/Users/aloshy/aloshy-ai/mediapipe-react/packages/core/dist/chunk-SUBDHSBP.js","sourcesContent":[null,"export const useVision = () => {\n return \"Vision implementation\";\n};\n"]}
@@ -0,0 +1,179 @@
1
+ // src/genai.ts
2
+ import { useEffect, useState, useCallback, useRef } from "react";
3
+
4
+ // src/index.tsx
5
+ import { createContext, useContext, useMemo } from "react";
6
+
7
+ // src/utils.ts
8
+ var isBrowser = typeof window !== "undefined";
9
+
10
+ // src/index.tsx
11
+ import { jsx } from "react/jsx-runtime";
12
+ var MediaPipeContext = createContext(null);
13
+ var MediaPipeProvider = ({
14
+ children,
15
+ wasmPath,
16
+ modelPath
17
+ }) => {
18
+ const value = useMemo(
19
+ () => ({
20
+ wasmPath,
21
+ modelPath,
22
+ isBrowser
23
+ }),
24
+ [wasmPath, modelPath]
25
+ );
26
+ return /* @__PURE__ */ jsx(MediaPipeContext.Provider, { value, children });
27
+ };
28
+ var useMediaPipeContext = () => {
29
+ const context = useContext(MediaPipeContext);
30
+ if (!context) {
31
+ throw new Error("useMediaPipeContext must be used within a MediaPipeProvider");
32
+ }
33
+ return context;
34
+ };
35
+
36
+ // src/genai.ts
37
+ var workerScript = `
38
+ import { LlmInference, FilesetResolver } from 'https://cdn.jsdelivr.net/npm/@mediapipe/tasks-genai';
39
+
40
+ let llmInference = null;
41
+
42
+ async function checkGpuSupport() {
43
+ if (!('gpu' in navigator)) {
44
+ throw new Error('WebGPU is not supported in this browser.');
45
+ }
46
+ const gpu = navigator.gpu;
47
+ const adapter = await gpu.requestAdapter();
48
+ if (!adapter) {
49
+ throw new Error('No appropriate GPU adapter found.');
50
+ }
51
+ }
52
+
53
+ async function initInference(modelPath, wasmPath) {
54
+ try {
55
+ await checkGpuSupport();
56
+ const genai = await FilesetResolver.forGenAiTasks(wasmPath);
57
+ llmInference = await LlmInference.createFromOptions(genai, {
58
+ baseOptions: { modelAssetPath: modelPath },
59
+ });
60
+ self.postMessage({ type: 'INIT_COMPLETE' });
61
+ } catch (error) {
62
+ self.postMessage({ type: 'ERROR', error: error.message || 'Unknown error during initialization' });
63
+ }
64
+ }
65
+
66
+ self.onmessage = async (event) => {
67
+ const { type, payload } = event.data;
68
+
69
+ if (type === 'INIT') {
70
+ const { modelPath, wasmPath } = payload;
71
+ await initInference(modelPath, wasmPath);
72
+ }
73
+
74
+ if (type === 'GENERATE') {
75
+ if (!llmInference) {
76
+ self.postMessage({ type: 'ERROR', error: 'LLM Inference not initialized. Please ensure the model is loaded and WebGPU is supported.' });
77
+ return;
78
+ }
79
+
80
+ try {
81
+ const { prompt } = payload;
82
+ llmInference.generateResponse(prompt, (partialText, done) => {
83
+ self.postMessage({
84
+ type: 'CHUNK',
85
+ payload: { text: partialText, done }
86
+ });
87
+ });
88
+ } catch (error) {
89
+ self.postMessage({ type: 'ERROR', error: error.message || 'Error generating response' });
90
+ }
91
+ }
92
+ };
93
+ `;
94
+ function useLlm(options = {}) {
95
+ const context = useMediaPipeContext();
96
+ const [output, setOutput] = useState("");
97
+ const [isLoading, setIsLoading] = useState(false);
98
+ const [error, setError] = useState(null);
99
+ const [progress, setProgress] = useState(0);
100
+ const workerRef = useRef(null);
101
+ const modelPath = options.modelPath || context.modelPath;
102
+ const wasmPath = options.wasmPath || context.wasmPath || "https://cdn.jsdelivr.net/npm/@mediapipe/tasks-genai/wasm";
103
+ useEffect(() => {
104
+ if (!context.isBrowser || !modelPath) return;
105
+ if (!("gpu" in navigator)) {
106
+ setTimeout(() => setError("WebGPU is not supported in this browser."), 0);
107
+ return;
108
+ }
109
+ let worker;
110
+ try {
111
+ worker = new Worker(new URL("./genai.worker", import.meta.url), {
112
+ type: "module",
113
+ name: "mediapipe-genai-worker"
114
+ });
115
+ } catch (_e) {
116
+ console.warn("MediaPipe React: Falling back to Blob-based GenAI worker");
117
+ const blob = new Blob([workerScript], { type: "application/javascript" });
118
+ worker = new Worker(URL.createObjectURL(blob));
119
+ }
120
+ workerRef.current = worker;
121
+ worker.onmessage = (event) => {
122
+ const { type, payload, error: workerError } = event.data;
123
+ switch (type) {
124
+ case "INIT_COMPLETE":
125
+ setIsLoading(false);
126
+ setProgress(100);
127
+ break;
128
+ case "CHUNK":
129
+ setOutput((prev) => prev + payload.text);
130
+ if (payload.done) {
131
+ setIsLoading(false);
132
+ }
133
+ break;
134
+ case "ERROR":
135
+ setError(workerError || "Worker encountered an error");
136
+ setIsLoading(false);
137
+ break;
138
+ }
139
+ };
140
+ setTimeout(() => {
141
+ setIsLoading(true);
142
+ setProgress(10);
143
+ }, 0);
144
+ worker.postMessage({
145
+ type: "INIT",
146
+ payload: { modelPath, wasmPath }
147
+ });
148
+ return () => {
149
+ worker.terminate();
150
+ };
151
+ }, [context.isBrowser, modelPath, wasmPath]);
152
+ const generate = useCallback((prompt) => {
153
+ if (!workerRef.current) {
154
+ setError("Worker not initialized");
155
+ return;
156
+ }
157
+ setOutput("");
158
+ setIsLoading(true);
159
+ setError(null);
160
+ workerRef.current.postMessage({
161
+ type: "GENERATE",
162
+ payload: { prompt }
163
+ });
164
+ }, []);
165
+ return {
166
+ output,
167
+ isLoading,
168
+ progress,
169
+ error,
170
+ generate
171
+ };
172
+ }
173
+
174
+ export {
175
+ useLlm,
176
+ MediaPipeProvider,
177
+ useMediaPipeContext
178
+ };
179
+ //# sourceMappingURL=chunk-Z5PADYRT.mjs.map
@@ -0,0 +1 @@
1
+ {"version":3,"sources":["../src/genai.ts","../src/index.tsx","../src/utils.ts"],"sourcesContent":["'use client';\n\nimport { useEffect, useState, useCallback, useRef } from 'react';\nimport { useMediaPipeContext } from './index';\n\n/**\n * The Web Worker logic for MediaPipe GenAI.\n * This is stringified so it can be easily initialized as a Blob URL if the file-based worker fails.\n */\nconst workerScript = `\nimport { LlmInference, FilesetResolver } from 'https://cdn.jsdelivr.net/npm/@mediapipe/tasks-genai';\n\nlet llmInference = null;\n\nasync function checkGpuSupport() {\n if (!('gpu' in navigator)) {\n throw new Error('WebGPU is not supported in this browser.');\n }\n const gpu = navigator.gpu;\n const adapter = await gpu.requestAdapter();\n if (!adapter) {\n throw new Error('No appropriate GPU adapter found.');\n }\n}\n\nasync function initInference(modelPath, wasmPath) {\n try {\n await checkGpuSupport();\n const genai = await FilesetResolver.forGenAiTasks(wasmPath);\n llmInference = await LlmInference.createFromOptions(genai, {\n baseOptions: { modelAssetPath: modelPath },\n });\n self.postMessage({ type: 'INIT_COMPLETE' });\n } catch (error) {\n self.postMessage({ type: 'ERROR', error: error.message || 'Unknown error during initialization' });\n }\n}\n\nself.onmessage = async (event) => {\n const { type, payload } = event.data;\n\n if (type === 'INIT') {\n const { modelPath, wasmPath } = payload;\n await initInference(modelPath, wasmPath);\n }\n\n if (type === 'GENERATE') {\n if (!llmInference) {\n self.postMessage({ type: 'ERROR', error: 'LLM Inference not initialized. Please ensure the model is loaded and WebGPU is supported.' });\n return;\n }\n\n try {\n const { prompt } = payload;\n llmInference.generateResponse(prompt, (partialText, done) => {\n self.postMessage({\n type: 'CHUNK',\n payload: { text: partialText, done }\n });\n });\n } catch (error) {\n self.postMessage({ type: 'ERROR', error: error.message || 'Error generating response' });\n }\n }\n};\n`;\n\nexport interface UseLlmOptions {\n modelPath?: string;\n wasmPath?: string;\n}\n\nexport function useLlm(options: UseLlmOptions = {}) {\n const context = useMediaPipeContext();\n const [output, setOutput] = useState('');\n const [isLoading, setIsLoading] = useState(false);\n const [error, setError] = useState<string | null>(null);\n const [progress, setProgress] = useState(0);\n\n const workerRef = useRef<Worker | null>(null);\n\n // Use values from props if provided, otherwise fallback to context\n const modelPath = options.modelPath || context.modelPath;\n const wasmPath = options.wasmPath || context.wasmPath || 'https://cdn.jsdelivr.net/npm/@mediapipe/tasks-genai/wasm';\n\n useEffect(() => {\n if (!context.isBrowser || !modelPath) return;\n\n // Early check for WebGPU support in the UI thread too\n if (!('gpu' in navigator)) {\n setTimeout(() => setError('WebGPU is not supported in this browser.'), 0);\n return;\n }\n\n let worker: Worker;\n\n try {\n // Attempt to load from the separate worker file (Vite/Next.js/Webpack friendly)\n worker = new Worker(new URL('./genai.worker', import.meta.url), {\n type: 'module',\n name: 'mediapipe-genai-worker'\n });\n } catch (_e) {\n // Fallback to Blob-based worker if relative path fails\n console.warn('MediaPipe React: Falling back to Blob-based GenAI worker');\n const blob = new Blob([workerScript], { type: 'application/javascript' });\n worker = new Worker(URL.createObjectURL(blob));\n }\n\n workerRef.current = worker;\n\n worker.onmessage = (event) => {\n const { type, payload, error: workerError } = event.data;\n\n switch (type) {\n case 'INIT_COMPLETE':\n setIsLoading(false);\n setProgress(100);\n break;\n case 'CHUNK':\n setOutput((prev) => prev + payload.text);\n if (payload.done) {\n setIsLoading(false);\n }\n break;\n case 'ERROR':\n setError(workerError || 'Worker encountered an error');\n setIsLoading(false);\n break;\n }\n };\n\n setTimeout(() => {\n setIsLoading(true);\n setProgress(10); // Initial progress\n }, 0);\n worker.postMessage({\n type: 'INIT',\n payload: { modelPath, wasmPath },\n });\n\n return () => {\n worker.terminate();\n };\n }, [context.isBrowser, modelPath, wasmPath]);\n\n const generate = useCallback((prompt: string) => {\n if (!workerRef.current) {\n setError('Worker not initialized');\n return;\n }\n\n setOutput('');\n setIsLoading(true);\n setError(null);\n\n workerRef.current.postMessage({\n type: 'GENERATE',\n payload: { prompt },\n });\n }, []);\n\n return {\n output,\n isLoading,\n progress,\n error,\n generate,\n };\n}\n","'use client';\n\nimport React, { createContext, useContext, useMemo } from 'react';\nimport { isBrowser } from './utils';\n\nexport * from './genai';\nexport * from './vision';\nexport * from './audio';\n\nexport interface MediaPipeContextType {\n wasmPath?: string;\n modelPath?: string;\n isBrowser: boolean;\n}\n\nconst MediaPipeContext = createContext<MediaPipeContextType | null>(null);\n\nexport interface MediaPipeProviderProps {\n children: React.ReactNode;\n wasmPath?: string;\n modelPath?: string;\n}\n\nexport const MediaPipeProvider: React.FC<MediaPipeProviderProps> = ({\n children,\n wasmPath,\n modelPath,\n}) => {\n const value = useMemo(\n () => ({\n wasmPath,\n modelPath,\n isBrowser,\n }),\n [wasmPath, modelPath]\n );\n\n return (\n <MediaPipeContext.Provider value= { value } >\n { children }\n </MediaPipeContext.Provider>\n );\n};\n\nexport const useMediaPipeContext = () => {\n const context = useContext(MediaPipeContext);\n if (!context) {\n throw new Error('useMediaPipeContext must be used within a MediaPipeProvider');\n }\n return context;\n};\n\n","export const isBrowser = typeof window !== 'undefined';\n"],"mappings":";AAEA,SAAS,WAAW,UAAU,aAAa,cAAc;;;ACAzD,SAAgB,eAAe,YAAY,eAAe;;;ACFnD,IAAM,YAAY,OAAO,WAAW;;;ADsCnC;AAvBR,IAAM,mBAAmB,cAA2C,IAAI;AAQjE,IAAM,oBAAsD,CAAC;AAAA,EAChE;AAAA,EACA;AAAA,EACA;AACJ,MAAM;AACF,QAAM,QAAQ;AAAA,IACV,OAAO;AAAA,MACH;AAAA,MACA;AAAA,MACA;AAAA,IACJ;AAAA,IACA,CAAC,UAAU,SAAS;AAAA,EACxB;AAEA,SACI,oBAAC,iBAAiB,UAAjB,EAA0B,OACzB,UACF;AAER;AAEO,IAAM,sBAAsB,MAAM;AACrC,QAAM,UAAU,WAAW,gBAAgB;AAC3C,MAAI,CAAC,SAAS;AACV,UAAM,IAAI,MAAM,6DAA6D;AAAA,EACjF;AACA,SAAO;AACX;;;ADzCA,IAAM,eAAe;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AA+Dd,SAAS,OAAO,UAAyB,CAAC,GAAG;AAClD,QAAM,UAAU,oBAAoB;AACpC,QAAM,CAAC,QAAQ,SAAS,IAAI,SAAS,EAAE;AACvC,QAAM,CAAC,WAAW,YAAY,IAAI,SAAS,KAAK;AAChD,QAAM,CAAC,OAAO,QAAQ,IAAI,SAAwB,IAAI;AACtD,QAAM,CAAC,UAAU,WAAW,IAAI,SAAS,CAAC;AAE1C,QAAM,YAAY,OAAsB,IAAI;AAG5C,QAAM,YAAY,QAAQ,aAAa,QAAQ;AAC/C,QAAM,WAAW,QAAQ,YAAY,QAAQ,YAAY;AAEzD,YAAU,MAAM;AACd,QAAI,CAAC,QAAQ,aAAa,CAAC,UAAW;AAGtC,QAAI,EAAE,SAAS,YAAY;AACzB,iBAAW,MAAM,SAAS,0CAA0C,GAAG,CAAC;AACxE;AAAA,IACF;AAEA,QAAI;AAEJ,QAAI;AAEF,eAAS,IAAI,OAAO,IAAI,IAAI,kBAAkB,YAAY,GAAG,GAAG;AAAA,QAC9D,MAAM;AAAA,QACN,MAAM;AAAA,MACR,CAAC;AAAA,IACH,SAAS,IAAI;AAEX,cAAQ,KAAK,0DAA0D;AACvE,YAAM,OAAO,IAAI,KAAK,CAAC,YAAY,GAAG,EAAE,MAAM,yBAAyB,CAAC;AACxE,eAAS,IAAI,OAAO,IAAI,gBAAgB,IAAI,CAAC;AAAA,IAC/C;AAEA,cAAU,UAAU;AAEpB,WAAO,YAAY,CAAC,UAAU;AAC5B,YAAM,EAAE,MAAM,SAAS,OAAO,YAAY,IAAI,MAAM;AAEpD,cAAQ,MAAM;AAAA,QACZ,KAAK;AACH,uBAAa,KAAK;AAClB,sBAAY,GAAG;AACf;AAAA,QACF,KAAK;AACH,oBAAU,CAAC,SAAS,OAAO,QAAQ,IAAI;AACvC,cAAI,QAAQ,MAAM;AAChB,yBAAa,KAAK;AAAA,UACpB;AACA;AAAA,QACF,KAAK;AACH,mBAAS,eAAe,6BAA6B;AACrD,uBAAa,KAAK;AAClB;AAAA,MACJ;AAAA,IACF;AAEA,eAAW,MAAM;AACf,mBAAa,IAAI;AACjB,kBAAY,EAAE;AAAA,IAChB,GAAG,CAAC;AACJ,WAAO,YAAY;AAAA,MACjB,MAAM;AAAA,MACN,SAAS,EAAE,WAAW,SAAS;AAAA,IACjC,CAAC;AAED,WAAO,MAAM;AACX,aAAO,UAAU;AAAA,IACnB;AAAA,EACF,GAAG,CAAC,QAAQ,WAAW,WAAW,QAAQ,CAAC;AAE3C,QAAM,WAAW,YAAY,CAAC,WAAmB;AAC/C,QAAI,CAAC,UAAU,SAAS;AACtB,eAAS,wBAAwB;AACjC;AAAA,IACF;AAEA,cAAU,EAAE;AACZ,iBAAa,IAAI;AACjB,aAAS,IAAI;AAEb,cAAU,QAAQ,YAAY;AAAA,MAC5B,MAAM;AAAA,MACN,SAAS,EAAE,OAAO;AAAA,IACpB,CAAC;AAAA,EACH,GAAG,CAAC,CAAC;AAEL,SAAO;AAAA,IACL;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,EACF;AACF;","names":[]}
@@ -0,0 +1,13 @@
1
+ interface UseLlmOptions {
2
+ modelPath?: string;
3
+ wasmPath?: string;
4
+ }
5
+ declare function useLlm(options?: UseLlmOptions): {
6
+ output: string;
7
+ isLoading: boolean;
8
+ progress: number;
9
+ error: string | null;
10
+ generate: (prompt: string) => void;
11
+ };
12
+
13
+ export { type UseLlmOptions, useLlm };
@@ -0,0 +1,13 @@
1
+ interface UseLlmOptions {
2
+ modelPath?: string;
3
+ wasmPath?: string;
4
+ }
5
+ declare function useLlm(options?: UseLlmOptions): {
6
+ output: string;
7
+ isLoading: boolean;
8
+ progress: number;
9
+ error: string | null;
10
+ generate: (prompt: string) => void;
11
+ };
12
+
13
+ export { type UseLlmOptions, useLlm };
package/dist/genai.js ADDED
@@ -0,0 +1,10 @@
1
+ "use strict";Object.defineProperty(exports, "__esModule", {value: true});"use client";
2
+
3
+
4
+ var _chunkHQ7V4B42js = require('./chunk-HQ7V4B42.js');
5
+ require('./chunk-SUBDHSBP.js');
6
+ require('./chunk-G5GKFZAW.js');
7
+
8
+
9
+ exports.useLlm = _chunkHQ7V4B42js.useLlm;
10
+ //# sourceMappingURL=genai.js.map
@@ -0,0 +1 @@
1
+ {"version":3,"sources":["/Users/aloshy/aloshy-ai/mediapipe-react/packages/core/dist/genai.js"],"names":[],"mappings":"AAAA,qFAAY;AACZ;AACE;AACF,sDAA4B;AAC5B,+BAA4B;AAC5B,+BAA4B;AAC5B;AACE;AACF,yCAAC","file":"/Users/aloshy/aloshy-ai/mediapipe-react/packages/core/dist/genai.js"}
package/dist/genai.mjs ADDED
@@ -0,0 +1,10 @@
1
+ "use client";
2
+ import {
3
+ useLlm
4
+ } from "./chunk-Z5PADYRT.mjs";
5
+ import "./chunk-HDKHZ5MY.mjs";
6
+ import "./chunk-LMIUBEL2.mjs";
7
+ export {
8
+ useLlm
9
+ };
10
+ //# sourceMappingURL=genai.mjs.map
@@ -0,0 +1 @@
1
+ {"version":3,"sources":[],"sourcesContent":[],"mappings":"","names":[]}
@@ -0,0 +1,2 @@
1
+
2
+ export { }
@@ -0,0 +1,2 @@
1
+
2
+ export { }
@@ -0,0 +1,52 @@
1
+ "use strict";// src/genai.worker.ts
2
+ var _tasksgenai = require('@mediapipe/tasks-genai');
3
+ var llmInference = null;
4
+ async function checkGpuSupport() {
5
+ if (!("gpu" in navigator)) {
6
+ throw new Error("WebGPU is not supported in this browser.");
7
+ }
8
+ const gpu = navigator.gpu;
9
+ const adapter = await gpu.requestAdapter();
10
+ if (!adapter) {
11
+ throw new Error("No appropriate GPU adapter found.");
12
+ }
13
+ }
14
+ async function initInference(modelPath, wasmPath) {
15
+ try {
16
+ await checkGpuSupport();
17
+ const genai = await _tasksgenai.FilesetResolver.forGenAiTasks(wasmPath);
18
+ llmInference = await _tasksgenai.LlmInference.createFromOptions(genai, {
19
+ baseOptions: { modelAssetPath: modelPath }
20
+ });
21
+ self.postMessage({ type: "INIT_COMPLETE" });
22
+ } catch (error) {
23
+ const message = error instanceof Error ? error.message : "Unknown error during initialization";
24
+ self.postMessage({ type: "ERROR", error: message });
25
+ }
26
+ }
27
+ self.onmessage = async (event) => {
28
+ const { type, payload } = event.data;
29
+ if (type === "INIT") {
30
+ const { modelPath, wasmPath } = payload;
31
+ await initInference(modelPath, wasmPath);
32
+ }
33
+ if (type === "GENERATE") {
34
+ if (!llmInference) {
35
+ self.postMessage({ type: "ERROR", error: "LLM Inference not initialized. Please ensure the model is loaded and WebGPU is supported." });
36
+ return;
37
+ }
38
+ try {
39
+ const { prompt } = payload;
40
+ llmInference.generateResponse(prompt, (partialText, done) => {
41
+ self.postMessage({
42
+ type: "CHUNK",
43
+ payload: { text: partialText, done }
44
+ });
45
+ });
46
+ } catch (error) {
47
+ const message = error instanceof Error ? error.message : "Error generating response";
48
+ self.postMessage({ type: "ERROR", error: message });
49
+ }
50
+ }
51
+ };
52
+ //# sourceMappingURL=genai.worker.js.map
@@ -0,0 +1 @@
1
+ {"version":3,"sources":["/Users/aloshy/aloshy-ai/mediapipe-react/packages/core/dist/genai.worker.js","../src/genai.worker.ts"],"names":[],"mappings":"AAAA;ACAA,oDAA8C;AAE9C,IAAI,aAAA,EAAoC,IAAA;AAExC,MAAA,SAAe,eAAA,CAAA,EAAkB;AAC7B,EAAA,GAAA,CAAI,CAAA,CAAE,MAAA,GAAS,SAAA,CAAA,EAAY;AACvB,IAAA,MAAM,IAAI,KAAA,CAAM,0CAA0C,CAAA;AAAA,EAC9D;AACA,EAAA,MAAM,IAAA,EAAO,SAAA,CAAsC,GAAA;AACnD,EAAA,MAAM,QAAA,EAAU,MAAM,GAAA,CAAI,cAAA,CAAe,CAAA;AACzC,EAAA,GAAA,CAAI,CAAC,OAAA,EAAS;AACV,IAAA,MAAM,IAAI,KAAA,CAAM,mCAAmC,CAAA;AAAA,EACvD;AACJ;AAEA,MAAA,SAAe,aAAA,CAAc,SAAA,EAAmB,QAAA,EAAkB;AAC9D,EAAA,IAAI;AACA,IAAA,MAAM,eAAA,CAAgB,CAAA;AAEtB,IAAA,MAAM,MAAA,EAAQ,MAAM,2BAAA,CAAgB,aAAA,CAAc,QAAQ,CAAA;AAC1D,IAAA,aAAA,EAAe,MAAM,wBAAA,CAAa,iBAAA,CAAkB,KAAA,EAAO;AAAA,MACvD,WAAA,EAAa,EAAE,cAAA,EAAgB,UAAU;AAAA,IAC7C,CAAC,CAAA;AACD,IAAA,IAAA,CAAK,WAAA,CAAY,EAAE,IAAA,EAAM,gBAAgB,CAAC,CAAA;AAAA,EAC9C,EAAA,MAAA,CAAS,KAAA,EAAgB;AACrB,IAAA,MAAM,QAAA,EAAU,MAAA,WAAiB,MAAA,EAAQ,KAAA,CAAM,QAAA,EAAU,qCAAA;AACzD,IAAA,IAAA,CAAK,WAAA,CAAY,EAAE,IAAA,EAAM,OAAA,EAAS,KAAA,EAAO,QAAQ,CAAC,CAAA;AAAA,EACtD;AACJ;AAEA,IAAA,CAAK,UAAA,EAAY,MAAA,CAAO,KAAA,EAAA,GAAU;AAC9B,EAAA,MAAM,EAAE,IAAA,EAAM,QAAQ,EAAA,EAAI,KAAA,CAAM,IAAA;AAEhC,EAAA,GAAA,CAAI,KAAA,IAAS,MAAA,EAAQ;AACjB,IAAA,MAAM,EAAE,SAAA,EAAW,SAAS,EAAA,EAAI,OAAA;AAChC,IAAA,MAAM,aAAA,CAAc,SAAA,EAAW,QAAQ,CAAA;AAAA,EAC3C;AAEA,EAAA,GAAA,CAAI,KAAA,IAAS,UAAA,EAAY;AACrB,IAAA,GAAA,CAAI,CAAC,YAAA,EAAc;AACf,MAAA,IAAA,CAAK,WAAA,CAAY,EAAE,IAAA,EAAM,OAAA,EAAS,KAAA,EAAO,4FAA4F,CAAC,CAAA;AACtI,MAAA,MAAA;AAAA,IACJ;AAEA,IAAA,IAAI;AACA,MAAA,MAAM,EAAE,OAAO,EAAA,EAAI,OAAA;AACnB,MAAA,YAAA,CAAa,gBAAA,CAAiB,MAAA,EAAQ,CAAC,WAAA,EAAa,IAAA,EAAA,GAAS;AACzD,QAAA,IAAA,CAAK,WAAA,CAAY;AAAA,UACb,IAAA,EAAM,OAAA;AAAA,UACN,OAAA,EAAS,EAAE,IAAA,EAAM,WAAA,EAAa,KAAK;AAAA,QACvC,CAAC,CAAA;AAAA,MACL,CAAC,CAAA;AAAA,IACL,EAAA,MAAA,CAAS,KAAA,EAAgB;AACrB,MAAA,MAAM,QAAA,EAAU,MAAA,WAAiB,MAAA,EAAQ,KAAA,CAAM,QAAA,EAAU,2BAAA;AACzD,MAAA,IAAA,CAAK,WAAA,CAAY,EAAE,IAAA,EAAM,OAAA,EAAS,KAAA,EAAO,QAAQ,CAAC,CAAA;AAAA,IACtD;AAAA,EACJ;AACJ,CAAA","file":"/Users/aloshy/aloshy-ai/mediapipe-react/packages/core/dist/genai.worker.js","sourcesContent":[null,"import { LlmInference, FilesetResolver } from '@mediapipe/tasks-genai';\n\nlet llmInference: LlmInference | null = null;\n\nasync function checkGpuSupport() {\n if (!('gpu' in navigator)) {\n throw new Error('WebGPU is not supported in this browser.');\n }\n const gpu = (navigator as unknown as { gpu: GPU }).gpu;\n const adapter = await gpu.requestAdapter();\n if (!adapter) {\n throw new Error('No appropriate GPU adapter found.');\n }\n}\n\nasync function initInference(modelPath: string, wasmPath: string) {\n try {\n await checkGpuSupport();\n\n const genai = await FilesetResolver.forGenAiTasks(wasmPath);\n llmInference = await LlmInference.createFromOptions(genai, {\n baseOptions: { modelAssetPath: modelPath },\n });\n self.postMessage({ type: 'INIT_COMPLETE' });\n } catch (error: unknown) {\n const message = error instanceof Error ? error.message : 'Unknown error during initialization';\n self.postMessage({ type: 'ERROR', error: message });\n }\n}\n\nself.onmessage = async (event) => {\n const { type, payload } = event.data;\n\n if (type === 'INIT') {\n const { modelPath, wasmPath } = payload;\n await initInference(modelPath, wasmPath);\n }\n\n if (type === 'GENERATE') {\n if (!llmInference) {\n self.postMessage({ type: 'ERROR', error: 'LLM Inference not initialized. Please ensure the model is loaded and WebGPU is supported.' });\n return;\n }\n\n try {\n const { prompt } = payload;\n llmInference.generateResponse(prompt, (partialText, done) => {\n self.postMessage({\n type: 'CHUNK',\n payload: { text: partialText, done }\n });\n });\n } catch (error: unknown) {\n const message = error instanceof Error ? error.message : 'Error generating response';\n self.postMessage({ type: 'ERROR', error: message });\n }\n }\n};\n"]}
@@ -0,0 +1,52 @@
1
+ // src/genai.worker.ts
2
+ import { LlmInference, FilesetResolver } from "@mediapipe/tasks-genai";
3
+ var llmInference = null;
4
+ async function checkGpuSupport() {
5
+ if (!("gpu" in navigator)) {
6
+ throw new Error("WebGPU is not supported in this browser.");
7
+ }
8
+ const gpu = navigator.gpu;
9
+ const adapter = await gpu.requestAdapter();
10
+ if (!adapter) {
11
+ throw new Error("No appropriate GPU adapter found.");
12
+ }
13
+ }
14
+ async function initInference(modelPath, wasmPath) {
15
+ try {
16
+ await checkGpuSupport();
17
+ const genai = await FilesetResolver.forGenAiTasks(wasmPath);
18
+ llmInference = await LlmInference.createFromOptions(genai, {
19
+ baseOptions: { modelAssetPath: modelPath }
20
+ });
21
+ self.postMessage({ type: "INIT_COMPLETE" });
22
+ } catch (error) {
23
+ const message = error instanceof Error ? error.message : "Unknown error during initialization";
24
+ self.postMessage({ type: "ERROR", error: message });
25
+ }
26
+ }
27
+ self.onmessage = async (event) => {
28
+ const { type, payload } = event.data;
29
+ if (type === "INIT") {
30
+ const { modelPath, wasmPath } = payload;
31
+ await initInference(modelPath, wasmPath);
32
+ }
33
+ if (type === "GENERATE") {
34
+ if (!llmInference) {
35
+ self.postMessage({ type: "ERROR", error: "LLM Inference not initialized. Please ensure the model is loaded and WebGPU is supported." });
36
+ return;
37
+ }
38
+ try {
39
+ const { prompt } = payload;
40
+ llmInference.generateResponse(prompt, (partialText, done) => {
41
+ self.postMessage({
42
+ type: "CHUNK",
43
+ payload: { text: partialText, done }
44
+ });
45
+ });
46
+ } catch (error) {
47
+ const message = error instanceof Error ? error.message : "Error generating response";
48
+ self.postMessage({ type: "ERROR", error: message });
49
+ }
50
+ }
51
+ };
52
+ //# sourceMappingURL=genai.worker.mjs.map
@@ -0,0 +1 @@
1
+ {"version":3,"sources":["../src/genai.worker.ts"],"sourcesContent":["import { LlmInference, FilesetResolver } from '@mediapipe/tasks-genai';\n\nlet llmInference: LlmInference | null = null;\n\nasync function checkGpuSupport() {\n if (!('gpu' in navigator)) {\n throw new Error('WebGPU is not supported in this browser.');\n }\n const gpu = (navigator as unknown as { gpu: GPU }).gpu;\n const adapter = await gpu.requestAdapter();\n if (!adapter) {\n throw new Error('No appropriate GPU adapter found.');\n }\n}\n\nasync function initInference(modelPath: string, wasmPath: string) {\n try {\n await checkGpuSupport();\n\n const genai = await FilesetResolver.forGenAiTasks(wasmPath);\n llmInference = await LlmInference.createFromOptions(genai, {\n baseOptions: { modelAssetPath: modelPath },\n });\n self.postMessage({ type: 'INIT_COMPLETE' });\n } catch (error: unknown) {\n const message = error instanceof Error ? error.message : 'Unknown error during initialization';\n self.postMessage({ type: 'ERROR', error: message });\n }\n}\n\nself.onmessage = async (event) => {\n const { type, payload } = event.data;\n\n if (type === 'INIT') {\n const { modelPath, wasmPath } = payload;\n await initInference(modelPath, wasmPath);\n }\n\n if (type === 'GENERATE') {\n if (!llmInference) {\n self.postMessage({ type: 'ERROR', error: 'LLM Inference not initialized. Please ensure the model is loaded and WebGPU is supported.' });\n return;\n }\n\n try {\n const { prompt } = payload;\n llmInference.generateResponse(prompt, (partialText, done) => {\n self.postMessage({\n type: 'CHUNK',\n payload: { text: partialText, done }\n });\n });\n } catch (error: unknown) {\n const message = error instanceof Error ? error.message : 'Error generating response';\n self.postMessage({ type: 'ERROR', error: message });\n }\n }\n};\n"],"mappings":";AAAA,SAAS,cAAc,uBAAuB;AAE9C,IAAI,eAAoC;AAExC,eAAe,kBAAkB;AAC7B,MAAI,EAAE,SAAS,YAAY;AACvB,UAAM,IAAI,MAAM,0CAA0C;AAAA,EAC9D;AACA,QAAM,MAAO,UAAsC;AACnD,QAAM,UAAU,MAAM,IAAI,eAAe;AACzC,MAAI,CAAC,SAAS;AACV,UAAM,IAAI,MAAM,mCAAmC;AAAA,EACvD;AACJ;AAEA,eAAe,cAAc,WAAmB,UAAkB;AAC9D,MAAI;AACA,UAAM,gBAAgB;AAEtB,UAAM,QAAQ,MAAM,gBAAgB,cAAc,QAAQ;AAC1D,mBAAe,MAAM,aAAa,kBAAkB,OAAO;AAAA,MACvD,aAAa,EAAE,gBAAgB,UAAU;AAAA,IAC7C,CAAC;AACD,SAAK,YAAY,EAAE,MAAM,gBAAgB,CAAC;AAAA,EAC9C,SAAS,OAAgB;AACrB,UAAM,UAAU,iBAAiB,QAAQ,MAAM,UAAU;AACzD,SAAK,YAAY,EAAE,MAAM,SAAS,OAAO,QAAQ,CAAC;AAAA,EACtD;AACJ;AAEA,KAAK,YAAY,OAAO,UAAU;AAC9B,QAAM,EAAE,MAAM,QAAQ,IAAI,MAAM;AAEhC,MAAI,SAAS,QAAQ;AACjB,UAAM,EAAE,WAAW,SAAS,IAAI;AAChC,UAAM,cAAc,WAAW,QAAQ;AAAA,EAC3C;AAEA,MAAI,SAAS,YAAY;AACrB,QAAI,CAAC,cAAc;AACf,WAAK,YAAY,EAAE,MAAM,SAAS,OAAO,4FAA4F,CAAC;AACtI;AAAA,IACJ;AAEA,QAAI;AACA,YAAM,EAAE,OAAO,IAAI;AACnB,mBAAa,iBAAiB,QAAQ,CAAC,aAAa,SAAS;AACzD,aAAK,YAAY;AAAA,UACb,MAAM;AAAA,UACN,SAAS,EAAE,MAAM,aAAa,KAAK;AAAA,QACvC,CAAC;AAAA,MACL,CAAC;AAAA,IACL,SAAS,OAAgB;AACrB,YAAM,UAAU,iBAAiB,QAAQ,MAAM,UAAU;AACzD,WAAK,YAAY,EAAE,MAAM,SAAS,OAAO,QAAQ,CAAC;AAAA,IACtD;AAAA,EACJ;AACJ;","names":[]}
@@ -0,0 +1,19 @@
1
+ import React from 'react';
2
+ export { UseLlmOptions, useLlm } from './genai.mjs';
3
+ export { useVision } from './vision.mjs';
4
+ export { useAudio } from './audio.mjs';
5
+
6
+ interface MediaPipeContextType {
7
+ wasmPath?: string;
8
+ modelPath?: string;
9
+ isBrowser: boolean;
10
+ }
11
+ interface MediaPipeProviderProps {
12
+ children: React.ReactNode;
13
+ wasmPath?: string;
14
+ modelPath?: string;
15
+ }
16
+ declare const MediaPipeProvider: React.FC<MediaPipeProviderProps>;
17
+ declare const useMediaPipeContext: () => MediaPipeContextType;
18
+
19
+ export { type MediaPipeContextType, MediaPipeProvider, type MediaPipeProviderProps, useMediaPipeContext };
@@ -0,0 +1,19 @@
1
+ import React from 'react';
2
+ export { UseLlmOptions, useLlm } from './genai.js';
3
+ export { useVision } from './vision.js';
4
+ export { useAudio } from './audio.js';
5
+
6
+ interface MediaPipeContextType {
7
+ wasmPath?: string;
8
+ modelPath?: string;
9
+ isBrowser: boolean;
10
+ }
11
+ interface MediaPipeProviderProps {
12
+ children: React.ReactNode;
13
+ wasmPath?: string;
14
+ modelPath?: string;
15
+ }
16
+ declare const MediaPipeProvider: React.FC<MediaPipeProviderProps>;
17
+ declare const useMediaPipeContext: () => MediaPipeContextType;
18
+
19
+ export { type MediaPipeContextType, MediaPipeProvider, type MediaPipeProviderProps, useMediaPipeContext };
package/dist/index.js ADDED
@@ -0,0 +1,20 @@
1
+ "use strict";Object.defineProperty(exports, "__esModule", {value: true});"use client";
2
+
3
+
4
+
5
+
6
+ var _chunkHQ7V4B42js = require('./chunk-HQ7V4B42.js');
7
+
8
+
9
+ var _chunkSUBDHSBPjs = require('./chunk-SUBDHSBP.js');
10
+
11
+
12
+ var _chunkG5GKFZAWjs = require('./chunk-G5GKFZAW.js');
13
+
14
+
15
+
16
+
17
+
18
+
19
+ exports.MediaPipeProvider = _chunkHQ7V4B42js.MediaPipeProvider; exports.useAudio = _chunkG5GKFZAWjs.useAudio; exports.useLlm = _chunkHQ7V4B42js.useLlm; exports.useMediaPipeContext = _chunkHQ7V4B42js.useMediaPipeContext; exports.useVision = _chunkSUBDHSBPjs.useVision;
20
+ //# sourceMappingURL=index.js.map
@@ -0,0 +1 @@
1
+ {"version":3,"sources":["/Users/aloshy/aloshy-ai/mediapipe-react/packages/core/dist/index.js"],"names":[],"mappings":"AAAA,qFAAY;AACZ;AACE;AACA;AACA;AACF,sDAA4B;AAC5B;AACE;AACF,sDAA4B;AAC5B;AACE;AACF,sDAA4B;AAC5B;AACE;AACA;AACA;AACA;AACA;AACF,2QAAC","file":"/Users/aloshy/aloshy-ai/mediapipe-react/packages/core/dist/index.js"}
package/dist/index.mjs ADDED
@@ -0,0 +1,20 @@
1
+ "use client";
2
+ import {
3
+ MediaPipeProvider,
4
+ useLlm,
5
+ useMediaPipeContext
6
+ } from "./chunk-Z5PADYRT.mjs";
7
+ import {
8
+ useVision
9
+ } from "./chunk-HDKHZ5MY.mjs";
10
+ import {
11
+ useAudio
12
+ } from "./chunk-LMIUBEL2.mjs";
13
+ export {
14
+ MediaPipeProvider,
15
+ useAudio,
16
+ useLlm,
17
+ useMediaPipeContext,
18
+ useVision
19
+ };
20
+ //# sourceMappingURL=index.mjs.map
@@ -0,0 +1 @@
1
+ {"version":3,"sources":[],"sourcesContent":[],"mappings":"","names":[]}
@@ -0,0 +1,3 @@
1
+ declare const useVision: () => string;
2
+
3
+ export { useVision };
@@ -0,0 +1,3 @@
1
+ declare const useVision: () => string;
2
+
3
+ export { useVision };
package/dist/vision.js ADDED
@@ -0,0 +1,7 @@
1
+ "use strict";Object.defineProperty(exports, "__esModule", {value: true});
2
+
3
+ var _chunkSUBDHSBPjs = require('./chunk-SUBDHSBP.js');
4
+
5
+
6
+ exports.useVision = _chunkSUBDHSBPjs.useVision;
7
+ //# sourceMappingURL=vision.js.map
@@ -0,0 +1 @@
1
+ {"version":3,"sources":["/Users/aloshy/aloshy-ai/mediapipe-react/packages/core/dist/vision.js"],"names":[],"mappings":"AAAA;AACE;AACF,sDAA4B;AAC5B;AACE;AACF,+CAAC","file":"/Users/aloshy/aloshy-ai/mediapipe-react/packages/core/dist/vision.js"}
@@ -0,0 +1,7 @@
1
+ import {
2
+ useVision
3
+ } from "./chunk-HDKHZ5MY.mjs";
4
+ export {
5
+ useVision
6
+ };
7
+ //# sourceMappingURL=vision.mjs.map
@@ -0,0 +1 @@
1
+ {"version":3,"sources":[],"sourcesContent":[],"mappings":"","names":[]}
package/package.json ADDED
@@ -0,0 +1,62 @@
1
+ {
2
+ "name": "@ismail-kattakath/mediapipe-react",
3
+ "version": "0.0.1",
4
+ "publishConfig": {
5
+ "access": "public"
6
+ },
7
+ "main": "./dist/index.js",
8
+ "module": "./dist/index.mjs",
9
+ "types": "./dist/index.d.ts",
10
+ "exports": {
11
+ ".": {
12
+ "types": "./dist/index.d.ts",
13
+ "import": "./dist/index.mjs",
14
+ "require": "./dist/index.js"
15
+ },
16
+ "./genai": {
17
+ "types": "./dist/genai.d.ts",
18
+ "import": "./dist/genai.mjs",
19
+ "require": "./dist/genai.js"
20
+ },
21
+ "./vision": {
22
+ "types": "./dist/vision.d.ts",
23
+ "import": "./dist/vision.mjs",
24
+ "require": "./dist/vision.js"
25
+ },
26
+ "./audio": {
27
+ "types": "./dist/audio.d.ts",
28
+ "import": "./dist/audio.mjs",
29
+ "require": "./dist/audio.js"
30
+ }
31
+ },
32
+ "files": [
33
+ "dist"
34
+ ],
35
+ "peerDependencies": {
36
+ "@mediapipe/tasks-genai": "^0.10.0",
37
+ "react": ">=18",
38
+ "react-dom": ">=18"
39
+ },
40
+ "devDependencies": {
41
+ "@testing-library/dom": "^10.4.1",
42
+ "@testing-library/jest-dom": "^6.9.1",
43
+ "@testing-library/react": "^16.3.2",
44
+ "@types/react": "^18.2.79",
45
+ "@types/react-dom": "^18.2.25",
46
+ "@vitejs/plugin-react": "^4.2.1",
47
+ "@vitest/ui": "^4.0.18",
48
+ "jsdom": "^28.0.0",
49
+ "tsup": "^8.0.2",
50
+ "typescript": "^5.4.5",
51
+ "vitest": "^4.0.18"
52
+ },
53
+ "scripts": {
54
+ "build": "tsup",
55
+ "dev": "tsup --watch",
56
+ "lint": "eslint src/",
57
+ "test": "vitest run",
58
+ "test:watch": "vitest",
59
+ "test:ui": "vitest --ui",
60
+ "clean": "rm -rf dist"
61
+ }
62
+ }