react-native-ai-core 0.1.0 → 0.3.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/LICENSE +1 -1
- package/README.md +153 -3
- package/android/src/main/AndroidManifest.xml +12 -0
- package/android/src/main/java/com/aicore/AiCoreModule.kt +560 -74
- package/android/src/main/java/com/aicore/InferenceService.kt +85 -0
- package/lib/module/NativeAiCore.js +2 -2
- package/lib/module/NativeAiCore.js.map +1 -1
- package/lib/module/index.js +61 -52
- package/lib/module/index.js.map +1 -1
- package/lib/module/structured.js +752 -0
- package/lib/module/structured.js.map +1 -0
- package/lib/typescript/src/NativeAiCore.d.ts +26 -15
- package/lib/typescript/src/NativeAiCore.d.ts.map +1 -1
- package/lib/typescript/src/index.d.ts +55 -41
- package/lib/typescript/src/index.d.ts.map +1 -1
- package/lib/typescript/src/structured.d.ts +39 -0
- package/lib/typescript/src/structured.d.ts.map +1 -0
- package/package.json +8 -5
- package/src/NativeAiCore.ts +29 -16
- package/src/index.tsx +74 -55
- package/src/structured.ts +1170 -0
|
@@ -0,0 +1,85 @@
|
|
|
1
|
+
package com.aicore
|
|
2
|
+
|
|
3
|
+
import android.app.Notification
|
|
4
|
+
import android.app.NotificationChannel
|
|
5
|
+
import android.app.NotificationManager
|
|
6
|
+
import android.app.Service
|
|
7
|
+
import android.content.Intent
|
|
8
|
+
import android.content.pm.ServiceInfo
|
|
9
|
+
import android.os.Build
|
|
10
|
+
import android.os.IBinder
|
|
11
|
+
|
|
12
|
+
/**
|
|
13
|
+
* Foreground Service that keeps the process alive while inference is running.
|
|
14
|
+
* Displays a persistent notification so Android does not suspend the app when
|
|
15
|
+
* it goes to background or the screen is locked.
|
|
16
|
+
*
|
|
17
|
+
* Lifecycle:
|
|
18
|
+
* - AiCoreModule calls startForegroundService() before a generation call.
|
|
19
|
+
* - The service promotes itself to foreground with a notification.
|
|
20
|
+
* - AiCoreModule calls stopService() (or sends ACTION_STOP) when done.
|
|
21
|
+
*/
|
|
22
|
+
class InferenceService : Service() {
|
|
23
|
+
|
|
24
|
+
companion object {
|
|
25
|
+
const val CHANNEL_ID = "aicore_inference_channel"
|
|
26
|
+
const val NOTIFICATION_ID = 1001
|
|
27
|
+
const val ACTION_STOP = "com.aicore.INFERENCE_STOP"
|
|
28
|
+
}
|
|
29
|
+
|
|
30
|
+
override fun onCreate() {
|
|
31
|
+
super.onCreate()
|
|
32
|
+
ensureNotificationChannel()
|
|
33
|
+
}
|
|
34
|
+
|
|
35
|
+
override fun onStartCommand(intent: Intent?, flags: Int, startId: Int): Int {
|
|
36
|
+
if (intent?.action == ACTION_STOP) {
|
|
37
|
+
stopForeground(STOP_FOREGROUND_REMOVE)
|
|
38
|
+
stopSelf()
|
|
39
|
+
return START_NOT_STICKY
|
|
40
|
+
}
|
|
41
|
+
|
|
42
|
+
val notification = buildNotification()
|
|
43
|
+
if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.UPSIDE_DOWN_CAKE) {
|
|
44
|
+
startForeground(NOTIFICATION_ID, notification, ServiceInfo.FOREGROUND_SERVICE_TYPE_DATA_SYNC)
|
|
45
|
+
} else {
|
|
46
|
+
startForeground(NOTIFICATION_ID, notification)
|
|
47
|
+
}
|
|
48
|
+
// We do not auto-stop here — AiCoreModule stops us when generation finishes.
|
|
49
|
+
return START_NOT_STICKY
|
|
50
|
+
}
|
|
51
|
+
|
|
52
|
+
override fun onBind(intent: Intent?): IBinder? = null
|
|
53
|
+
|
|
54
|
+
/** Called when the user removes the app from the recents list or the system kills the task. */
|
|
55
|
+
override fun onTaskRemoved(rootIntent: Intent?) {
|
|
56
|
+
super.onTaskRemoved(rootIntent)
|
|
57
|
+
stopForeground(STOP_FOREGROUND_REMOVE)
|
|
58
|
+
stopSelf()
|
|
59
|
+
}
|
|
60
|
+
|
|
61
|
+
// ── helpers ────────────────────────────────────────────────────────────────
|
|
62
|
+
|
|
63
|
+
private fun ensureNotificationChannel() {
|
|
64
|
+
val manager = getSystemService(NotificationManager::class.java) ?: return
|
|
65
|
+
if (manager.getNotificationChannel(CHANNEL_ID) != null) return
|
|
66
|
+
|
|
67
|
+
val channel = NotificationChannel(
|
|
68
|
+
CHANNEL_ID,
|
|
69
|
+
"AI Inference",
|
|
70
|
+
NotificationManager.IMPORTANCE_LOW // silent — no sound/vibration
|
|
71
|
+
).apply {
|
|
72
|
+
description = "Shown while on-device AI generation is in progress"
|
|
73
|
+
setShowBadge(false)
|
|
74
|
+
}
|
|
75
|
+
manager.createNotificationChannel(channel)
|
|
76
|
+
}
|
|
77
|
+
|
|
78
|
+
private fun buildNotification(): Notification =
|
|
79
|
+
Notification.Builder(this, CHANNEL_ID)
|
|
80
|
+
.setContentTitle("AI processing…")
|
|
81
|
+
.setContentText("Generating response in background")
|
|
82
|
+
.setSmallIcon(android.R.drawable.ic_popup_sync)
|
|
83
|
+
.setOngoing(true)
|
|
84
|
+
.build()
|
|
85
|
+
}
|
|
@@ -3,8 +3,8 @@
|
|
|
3
3
|
/**
|
|
4
4
|
* NativeAICore — TurboModule Spec (New Architecture)
|
|
5
5
|
*
|
|
6
|
-
*
|
|
7
|
-
*
|
|
6
|
+
* High-performance JSI bridge to the Google AI Edge SDK (MediaPipe)
|
|
7
|
+
* for running Gemini Nano on-device via the device NPU.
|
|
8
8
|
*/
|
|
9
9
|
import { TurboModuleRegistry } from 'react-native';
|
|
10
10
|
export default TurboModuleRegistry.getEnforcing('AiCore');
|
|
@@ -1 +1 @@
|
|
|
1
|
-
{"version":3,"names":["TurboModuleRegistry","getEnforcing"],"sourceRoot":"../../src","sources":["NativeAiCore.ts"],"mappings":";;AAAA;AACA;AACA;AACA;AACA;AACA;AACA,SAASA,mBAAmB,QAA0B,cAAc;
|
|
1
|
+
{"version":3,"names":["TurboModuleRegistry","getEnforcing"],"sourceRoot":"../../src","sources":["NativeAiCore.ts"],"mappings":";;AAAA;AACA;AACA;AACA;AACA;AACA;AACA,SAASA,mBAAmB,QAA0B,cAAc;AA2DpE,eAAeA,mBAAmB,CAACC,YAAY,CAAO,QAAQ,CAAC","ignoreList":[]}
|
package/lib/module/index.js
CHANGED
|
@@ -3,55 +3,53 @@
|
|
|
3
3
|
/**
|
|
4
4
|
* react-native-ai-core
|
|
5
5
|
*
|
|
6
|
-
*
|
|
7
|
-
*
|
|
8
|
-
*
|
|
6
|
+
* JS abstraction layer over the native TurboModule.
|
|
7
|
+
* Provides a clean, typed API for running Gemini Nano on-device
|
|
8
|
+
* via the Google AI Edge SDK (MediaPipe).
|
|
9
9
|
*
|
|
10
10
|
* @example
|
|
11
11
|
* import AICore from 'react-native-ai-core';
|
|
12
12
|
*
|
|
13
13
|
* await AICore.initialize('/data/local/tmp/gemini-nano.bin');
|
|
14
|
-
* const answer = await AICore.generateResponse('
|
|
14
|
+
* const answer = await AICore.generateResponse('What is JSI?');
|
|
15
15
|
*/
|
|
16
16
|
|
|
17
17
|
import { NativeEventEmitter, Platform } from 'react-native';
|
|
18
18
|
import NativeAiCore from "./NativeAiCore.js";
|
|
19
|
+
import { generateStructuredResponse } from "./structured.js";
|
|
20
|
+
export { generateStructuredResponse, StructuredOutputError } from "./structured.js";
|
|
19
21
|
|
|
20
|
-
// ──
|
|
22
|
+
// ── Public types ────────────────────────────────────────────────────────────────
|
|
21
23
|
|
|
22
|
-
/**
|
|
24
|
+
/** Availability status of Gemini Nano on the device */
|
|
23
25
|
|
|
24
|
-
/**
|
|
26
|
+
/** Streaming response callbacks */
|
|
25
27
|
|
|
26
|
-
/**
|
|
28
|
+
/** Normalised error structure */
|
|
27
29
|
|
|
28
|
-
// ──
|
|
30
|
+
// ── Event names (must match the Kotlin module constants) ───────────────────────
|
|
29
31
|
const EVENT_STREAM_TOKEN = 'AICore_streamToken';
|
|
30
32
|
const EVENT_STREAM_COMPLETE = 'AICore_streamComplete';
|
|
31
33
|
const EVENT_STREAM_ERROR = 'AICore_streamError';
|
|
32
|
-
|
|
33
|
-
// Instancia del emisor de eventos nativo (null en plataformas no soportadas)
|
|
34
34
|
const emitter = NativeAiCore != null ? new NativeEventEmitter(NativeAiCore) : null;
|
|
35
|
-
|
|
36
|
-
// ── Guarda de plataforma ──────────────────────────────────────────────────────
|
|
37
|
-
|
|
38
35
|
function assertAvailable() {
|
|
39
36
|
if (!NativeAiCore) {
|
|
40
|
-
throw new Error(`react-native-ai-core:
|
|
37
|
+
throw new Error(`react-native-ai-core: native module unavailable on ${Platform.OS}. ` + 'This module requires Android with NPU support.');
|
|
41
38
|
}
|
|
42
39
|
}
|
|
43
40
|
|
|
44
|
-
// ── API
|
|
41
|
+
// ── Public API ───────────────────────────────────────────────────────────────────
|
|
45
42
|
|
|
46
43
|
/**
|
|
47
|
-
*
|
|
44
|
+
* Initialises the LLM inference engine with the given model.
|
|
48
45
|
*
|
|
49
|
-
* @param modelPath
|
|
50
|
-
*
|
|
46
|
+
* @param modelPath Absolute path to the `.bin` model file on the device.
|
|
47
|
+
* Pass an empty string to use ML Kit AICore (Gemini Nano NPU).
|
|
48
|
+
* @returns `true` on success.
|
|
51
49
|
*
|
|
52
|
-
* @throws `MODEL_NOT_FOUND`
|
|
53
|
-
* @throws `NPU_UNSUPPORTED`
|
|
54
|
-
* @throws `INIT_FAILED`
|
|
50
|
+
* @throws `MODEL_NOT_FOUND` if the file does not exist at `modelPath`.
|
|
51
|
+
* @throws `NPU_UNSUPPORTED` if the device NPU is incompatible.
|
|
52
|
+
* @throws `INIT_FAILED` if the engine could not start for another reason.
|
|
55
53
|
*
|
|
56
54
|
* @example
|
|
57
55
|
* const ok = await initialize('/data/local/tmp/gemini-nano.bin');
|
|
@@ -62,16 +60,16 @@ export async function initialize(modelPath) {
|
|
|
62
60
|
}
|
|
63
61
|
|
|
64
62
|
/**
|
|
65
|
-
*
|
|
63
|
+
* Generates a complete (non-streaming) response for the given prompt.
|
|
66
64
|
*
|
|
67
|
-
* @param prompt
|
|
68
|
-
* @returns
|
|
65
|
+
* @param prompt Input text for the model.
|
|
66
|
+
* @returns Full response as a string.
|
|
69
67
|
*
|
|
70
|
-
* @throws `NOT_INITIALIZED`
|
|
71
|
-
* @throws `GENERATION_ERROR`
|
|
68
|
+
* @throws `NOT_INITIALIZED` if `initialize()` was not called first.
|
|
69
|
+
* @throws `GENERATION_ERROR` if the model fails during inference.
|
|
72
70
|
*
|
|
73
71
|
* @example
|
|
74
|
-
* const response = await generateResponse('
|
|
72
|
+
* const response = await generateResponse('Explain TurboModules');
|
|
75
73
|
*/
|
|
76
74
|
export async function generateResponse(prompt) {
|
|
77
75
|
assertAvailable();
|
|
@@ -79,28 +77,28 @@ export async function generateResponse(prompt) {
|
|
|
79
77
|
}
|
|
80
78
|
|
|
81
79
|
/**
|
|
82
|
-
*
|
|
83
|
-
*
|
|
80
|
+
* Generates a response token-by-token via streaming.
|
|
81
|
+
* Tokens are delivered in real time through the callbacks.
|
|
84
82
|
*
|
|
85
|
-
* @param prompt
|
|
83
|
+
* @param prompt Input text for the model.
|
|
86
84
|
* @param callbacks `{ onToken, onComplete, onError }`.
|
|
87
|
-
* @returns
|
|
85
|
+
* @returns Cleanup function — call it to remove the event subscriptions.
|
|
88
86
|
*
|
|
89
87
|
* @example
|
|
90
|
-
* const unsubscribe = generateResponseStream('
|
|
88
|
+
* const unsubscribe = generateResponseStream('What is MediaPipe?', {
|
|
91
89
|
* onToken: (token, done) => console.log(token),
|
|
92
|
-
* onComplete: () => console.log('
|
|
90
|
+
* onComplete: () => console.log('Done!'),
|
|
93
91
|
* onError: (err) => console.error(err),
|
|
94
92
|
* });
|
|
95
93
|
*
|
|
96
|
-
* //
|
|
94
|
+
* // On component unmount:
|
|
97
95
|
* unsubscribe();
|
|
98
96
|
*/
|
|
99
97
|
export function generateResponseStream(prompt, callbacks) {
|
|
100
98
|
if (!NativeAiCore || !emitter) {
|
|
101
99
|
callbacks.onError({
|
|
102
100
|
code: 'UNAVAILABLE',
|
|
103
|
-
message: `react-native-ai-core
|
|
101
|
+
message: `react-native-ai-core is not available on ${Platform.OS}.`
|
|
104
102
|
});
|
|
105
103
|
return () => {};
|
|
106
104
|
}
|
|
@@ -117,11 +115,7 @@ export function generateResponseStream(prompt, callbacks) {
|
|
|
117
115
|
error => {
|
|
118
116
|
callbacks.onError(error);
|
|
119
117
|
});
|
|
120
|
-
|
|
121
|
-
// Arrancar la inferencia en el lado nativo
|
|
122
118
|
NativeAiCore.generateResponseStream(prompt);
|
|
123
|
-
|
|
124
|
-
// Devuelve función de limpieza para remover los listeners
|
|
125
119
|
return () => {
|
|
126
120
|
tokenSub.remove();
|
|
127
121
|
completeSub.remove();
|
|
@@ -130,17 +124,17 @@ export function generateResponseStream(prompt, callbacks) {
|
|
|
130
124
|
}
|
|
131
125
|
|
|
132
126
|
/**
|
|
133
|
-
*
|
|
127
|
+
* Checks whether Gemini Nano is available on this device.
|
|
134
128
|
*
|
|
135
129
|
* @returns
|
|
136
|
-
* - `'AVAILABLE'` →
|
|
137
|
-
* - `'NEED_DOWNLOAD'` →
|
|
138
|
-
* - `'UNSUPPORTED'` →
|
|
130
|
+
* - `'AVAILABLE'` → Model is ready to use.
|
|
131
|
+
* - `'NEED_DOWNLOAD'` → Device is compatible but the model is not yet downloaded.
|
|
132
|
+
* - `'UNSUPPORTED'` → Device does not meet the minimum requirements.
|
|
139
133
|
*
|
|
140
134
|
* @example
|
|
141
135
|
* const status = await checkAvailability();
|
|
142
136
|
* if (status === 'NEED_DOWNLOAD') {
|
|
143
|
-
* //
|
|
137
|
+
* // show model download UI
|
|
144
138
|
* }
|
|
145
139
|
*/
|
|
146
140
|
export async function checkAvailability() {
|
|
@@ -149,8 +143,8 @@ export async function checkAvailability() {
|
|
|
149
143
|
}
|
|
150
144
|
|
|
151
145
|
/**
|
|
152
|
-
*
|
|
153
|
-
* **
|
|
146
|
+
* Releases the model from NPU memory.
|
|
147
|
+
* **Recommended**: call in the root component's `useEffect` cleanup.
|
|
154
148
|
*
|
|
155
149
|
* @example
|
|
156
150
|
* useEffect(() => {
|
|
@@ -164,26 +158,41 @@ export async function release() {
|
|
|
164
158
|
}
|
|
165
159
|
|
|
166
160
|
/**
|
|
167
|
-
*
|
|
168
|
-
*
|
|
161
|
+
* Clears the conversation history in the native engine without releasing the model.
|
|
162
|
+
* The next `generateResponse` call will start without any previous context.
|
|
169
163
|
*
|
|
170
164
|
* @example
|
|
171
|
-
* await resetConversation(); //
|
|
165
|
+
* await resetConversation(); // new conversation, same engine
|
|
172
166
|
*/
|
|
173
167
|
export async function resetConversation() {
|
|
174
168
|
if (!NativeAiCore) return;
|
|
175
169
|
return NativeAiCore.resetConversation();
|
|
176
170
|
}
|
|
177
171
|
|
|
178
|
-
|
|
172
|
+
/**
|
|
173
|
+
* Cancels any generation currently in progress.
|
|
174
|
+
*
|
|
175
|
+
* - **Streaming**: the stream ends immediately with tokens generated so far.
|
|
176
|
+
* - **Non-streaming**: the pending `generateResponse` promise rejects with code `'CANCELLED'`.
|
|
177
|
+
*
|
|
178
|
+
* Safe to call when no generation is running.
|
|
179
|
+
*/
|
|
180
|
+
export async function cancelGeneration() {
|
|
181
|
+
if (!NativeAiCore) return;
|
|
182
|
+
return NativeAiCore.cancelGeneration();
|
|
183
|
+
}
|
|
184
|
+
|
|
185
|
+
// ── Default export (API object) ───────────────────────────────────────────────
|
|
179
186
|
|
|
180
187
|
const AICore = {
|
|
181
188
|
initialize,
|
|
182
189
|
generateResponse,
|
|
183
190
|
generateResponseStream,
|
|
191
|
+
generateStructuredResponse,
|
|
184
192
|
checkAvailability,
|
|
185
193
|
release,
|
|
186
|
-
resetConversation
|
|
194
|
+
resetConversation,
|
|
195
|
+
cancelGeneration
|
|
187
196
|
};
|
|
188
197
|
export default AICore;
|
|
189
198
|
//# sourceMappingURL=index.js.map
|
package/lib/module/index.js.map
CHANGED
|
@@ -1 +1 @@
|
|
|
1
|
-
{"version":3,"names":["NativeEventEmitter","Platform","NativeAiCore","EVENT_STREAM_TOKEN","EVENT_STREAM_COMPLETE","EVENT_STREAM_ERROR","emitter","assertAvailable","Error","OS","initialize","modelPath","generateResponse","prompt","generateResponseStream","callbacks","onError","code","message","tokenSub","addListener","event","onToken","token","done","completeSub","onComplete","errorSub","error","remove","checkAvailability","release","resetConversation","AICore"],"sourceRoot":"../../src","sources":["index.tsx"],"mappings":";;AAAA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;AAEA,SAASA,kBAAkB,EAAEC,QAAQ,QAAQ,cAAc;AAC3D,OAAOC,YAAY,MAAM,mBAAgB;;
|
|
1
|
+
{"version":3,"names":["NativeEventEmitter","Platform","NativeAiCore","generateStructuredResponse","StructuredOutputError","EVENT_STREAM_TOKEN","EVENT_STREAM_COMPLETE","EVENT_STREAM_ERROR","emitter","assertAvailable","Error","OS","initialize","modelPath","generateResponse","prompt","generateResponseStream","callbacks","onError","code","message","tokenSub","addListener","event","onToken","token","done","completeSub","onComplete","errorSub","error","remove","checkAvailability","release","resetConversation","cancelGeneration","AICore"],"sourceRoot":"../../src","sources":["index.tsx"],"mappings":";;AAAA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;AAEA,SAASA,kBAAkB,EAAEC,QAAQ,QAAQ,cAAc;AAC3D,OAAOC,YAAY,MAAM,mBAAgB;AACzC,SAASC,0BAA0B,QAAQ,iBAAc;AACzD,SACEA,0BAA0B,EAC1BC,qBAAqB,QAIhB,iBAAc;;AAErB;;AAEA;;AAGA;;AAcA;;AAMA;AACA,MAAMC,kBAAkB,GAAM,oBAAoB;AAClD,MAAMC,qBAAqB,GAAG,uBAAuB;AACrD,MAAMC,kBAAkB,GAAM,oBAAoB;AAElD,MAAMC,OAAO,GACXN,YAAY,IAAI,IAAI,GAAG,IAAIF,kBAAkB,CAACE,YAAY,CAAC,GAAG,IAAI;AAEpE,SAASO,eAAeA,CAAA,EAAS;EAC/B,IAAI,CAACP,YAAY,EAAE;IACjB,MAAM,IAAIQ,KAAK,CACb,sDAAsDT,QAAQ,CAACU,EAAE,IAAI,GACnE,gDACJ,CAAC;EACH;AACF;;AAEA;;AAEA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA,OAAO,eAAeC,UAAUA,CAACC,SAAiB,EAAoB;EACpEJ,eAAe,CAAC,CAAC;EACjB,OAAOP,YAAY,CAAEU,UAAU,CAACC,SAAS,CAAC;AAC5C;;AAEA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA,OAAO,eAAeC,gBAAgBA,CAACC,MAAc,EAAmB;EACtEN,eAAe,CAAC,CAAC;EACjB,OAAOP,YAAY,CAAEY,gBAAgB,CAACC,MAAM,CAAC;AAC/C;;AAEA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA,OAAO,SAASC,sBAAsBA,CACpCD,MAAc,EACdE,SAA0B,EACd;EACZ,IAAI,CAACf,YAAY,IAAI,CAACM,OAAO,EAAE;IAC7BS,SAAS,CAACC,OAAO,CAAC;MAChBC,IAAI,EAAE,aAAa;MACnBC,OAAO,EAAE,4CAA4CnB,QAAQ,CAACU,EAAE;IAClE,CAAC,CAAC;IACF,OAAO,MAAM,CAAC,CAAC;EACjB;EAEA,MAAMU,QAAQ,GAAGb,OAAO,CAACc,WAAW,CAClCjB,kBAAkB;EAClB;EACCkB,KAAU,IAAK;IACdN,SAAS,CAACO,OAAO,CACdD,KAAK,CAAsCE,KAAK,EAChDF,KAAK,CAAsCG,IAC9C,CAAC;EACH,CACF,CAAC;EAED,MAAMC,WAAW,GAAGnB,OAAO,CAACc,WAAW,CAAChB,qBAAqB,EAAE,MAAM;IACnEW,SAAS,CAACW,UAAU,CAAC,CAAC;EACxB,CAAC,CAAC;EAEF,MAAMC,QAAQ,GAAGrB,OAAO,CAACc,WAAW,CAClCf,kBAAkB;EAClB;EACCuB,KAAU,IAAK;IACdb,SAAS,CAACC,OAAO,CAACY,KAAgB,CAAC;EACrC,CACF,CAAC;EAED5B,YAAY,CAACc,sBAAsB,CAACD,MAAM,CAAC;EAE3C,OAAO,MAAM;IACXM,QAAQ,CAACU,MAAM,CAAC,CAAC;IACjBJ,WAAW,CAACI,MAAM,CAAC,CAAC;IACpBF,QAAQ,CAACE,MAAM,CAAC,CAAC;EACnB,CAAC;AACH;;AAEA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA,OAAO,eAAeC,iBAAiBA,CAAA,EAAgC;EACrE,IAAI,CAAC9B,YAAY,EAAE,OAAO,aAAa;EACvC,OAAOA,YAAY,CAAC8B,iBAAiB,CAAC,CAAC;AACzC;;AAEA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA,OAAO,eAAeC,OAAOA,CAAA,EAAkB;EAC7C,IAAI,CAAC/B,YAAY,EAAE;EACnB,OAAOA,YAAY,CAAC+B,OAAO,CAAC,CAAC;AAC/B;;AAEA;AACA;AACA;AACA;AACA;AACA;AACA;AACA,OAAO,eAAeC,iBAAiBA,CAAA,EAAkB;EACvD,IAAI,CAAChC,YAAY,EAAE;EACnB,OAAOA,YAAY,CAACgC,iBAAiB,CAAC,CAAC;AACzC;;AAEA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA,OAAO,eAAeC,gBAAgBA,CAAA,EAAkB;EACtD,IAAI,CAACjC,YAAY,EAAE;EACnB,OAAOA,YAAY,CAACiC,gBAAgB,CAAC,CAAC;AACxC;;AAEA;;AAEA,MAAMC,MAAM,GAAG;EACbxB,UAAU;EACVE,gBAAgB;EAChBE,sBAAsB;EACtBb,0BAA0B;EAC1B6B,iBAAiB;EACjBC,OAAO;EACPC,iBAAiB;EACjBC;AACF,CAAC;AAED,eAAeC,MAAM","ignoreList":[]}
|