@elizaos/plugin-local-ai 2.0.0-alpha.6 → 2.0.0-beta.1

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/LICENSE ADDED
@@ -0,0 +1,21 @@
1
+ MIT License
2
+
3
+ Copyright (c) 2025 Shaw Walters and elizaOS Contributors
4
+
5
+ Permission is hereby granted, free of charge, to any person obtaining a copy
6
+ of this software and associated documentation files (the "Software"), to deal
7
+ in the Software without restriction, including without limitation the rights
8
+ to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
9
+ copies of the Software, and to permit persons to whom the Software is
10
+ furnished to do so, subject to the following conditions:
11
+
12
+ The above copyright notice and this permission notice shall be included in all
13
+ copies or substantial portions of the Software.
14
+
15
+ THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
16
+ IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
17
+ FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
18
+ AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
19
+ LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
20
+ OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
21
+ SOFTWARE.
package/README.md ADDED
@@ -0,0 +1,148 @@
1
+ # Local AI Plugin
2
+
3
+ This plugin provides local AI model capabilities through the ElizaOS platform, supporting text generation, image analysis, speech synthesis, and audio transcription.
4
+
5
+ ## Usage
6
+
7
+ Add the plugin to your character configuration:
8
+
9
+ ```json
10
+ "plugins": ["@elizaos/plugin-local-ai"]
11
+ ```
12
+
13
+ ## Configuration
14
+
15
+ The plugin is configured using environment variables (typically set in a `.env` file or via your deployment settings):
16
+
17
+ Or in `.env` file:
18
+
19
+ ```env
20
+ # Optional: Specify a custom directory for models (GGUF files)
21
+ # MODELS_DIR=/path/to/your/models
22
+
23
+ # Optional: Specify a custom directory for caching other components (tokenizers, etc.)
24
+ # CACHE_DIR=/path/to/your/cache
25
+
26
+ # Optional: Specify filenames for the text generation and embedding models within the models directory
27
+ # LOCAL_SMALL_MODEL=my-custom-small-model.gguf
28
+ # LOCAL_LARGE_MODEL=my-custom-large-model.gguf
29
+ # LOCAL_EMBEDDING_MODEL=my-custom-embedding-model.gguf
30
+
31
+ # Optional: Fallback dimension size for embeddings if generation fails. Defaults to the model's default (e.g., 1024).
32
+ # LOCAL_EMBEDDING_DIMENSIONS=1024
33
+ ```
34
+
35
+ ### Configuration Options
36
+
37
+ - `MODELS_DIR` (Optional): Specifies a custom directory for storing model files (GGUF format). If not set, defaults to `~/.eliza/models`.
38
+ - `CACHE_DIR` (Optional): Specifies a custom directory for caching other components like tokenizers. If not set, defaults to `~/.eliza/cache`.
39
+ - `LOCAL_SMALL_MODEL` (Optional): Specifies the filename for the small text generation model (e.g., `text/eliza-1-mobile-1_7b-32k.gguf`) located in the models directory.
40
+ - `LOCAL_LARGE_MODEL` (Optional): Specifies the filename for the large text generation model (e.g., `text/eliza-1-desktop-9b-64k.gguf`) located in the models directory.
41
+ - `LOCAL_EMBEDDING_MODEL` (Optional): Specifies the filename for the text embedding model (e.g., `text/eliza-1-lite-0_6b-32k.gguf`) located in the models directory.
42
+ - `LOCAL_EMBEDDING_DIMENSIONS` (Optional): Defines the expected dimension size for text embeddings. This is primarily used as a fallback dimension if the embedding model fails to generate an embedding. If not set, it defaults to the embedding model's native dimension size (e.g., 1024 for `text/eliza-1-lite-0_6b-32k.gguf`).
43
+ - `LOCAL_AI_TEST_MODEL_PATH` (Optional, tests only): Absolute path to a GGUF model file used by the gated integration tests in `__tests__/integration.test.ts`. The integration tests are skipped unless this is set.
44
+
45
+ ## Features
46
+
47
+ The plugin provides these model classes:
48
+
49
+ - `TEXT_SMALL`: Fast, efficient text generation using smaller models
50
+ - `TEXT_LARGE`: More capable text generation using larger models
51
+ - `TEXT_EMBEDDING`: Generates text embeddings locally.
52
+ - `IMAGE_DESCRIPTION`: Local image analysis using Florence-2 vision model
53
+ - `TEXT_TO_SPEECH`: Local text-to-speech synthesis
54
+ - `TRANSCRIPTION`: Local audio transcription using Whisper
55
+
56
+ ### Native tool calling and structured output
57
+
58
+ `TEXT_SMALL` and `TEXT_LARGE` route `tools`, `responseSchema`, and
59
+ `responseFormat: { type: "json_object" }` through `node-llama-cpp`'s native
60
+ function-calling and grammar-constrained-output APIs. When any of these are
61
+ set the handler returns `{ text, toolCalls, finishReason? }` (matching the
62
+ shape used by `plugin-openai` and `plugin-anthropic`) instead of a plain
63
+ string.
64
+
65
+ Tool calling works best on the Eliza-1 chat tiers shipped through the local
66
+ catalog. Smaller local models without tool-call training may refuse to emit
67
+ tool calls — pass a larger Eliza-1 tier or drop the `tools` field.
68
+
69
+ ```typescript
70
+ const result = await runtime.useModel(ModelType.TEXT_LARGE, {
71
+ prompt: "What's the weather in Paris?",
72
+ tools: [
73
+ {
74
+ name: "get_weather",
75
+ description: "Look up weather for a city",
76
+ parameters: {
77
+ type: "object",
78
+ properties: { city: { type: "string" } },
79
+ required: ["city"],
80
+ },
81
+ },
82
+ ],
83
+ });
84
+ // result.toolCalls -> [{ id, name: "get_weather", arguments: { city: "Paris" }, type: "function" }]
85
+ ```
86
+
87
+ ### Prompt cache reuse
88
+
89
+ The plugin keeps one long-lived `LlamaContext` + `LlamaChatSession` per
90
+ model type (`TEXT_SMALL` / `TEXT_LARGE`). Successive `useModel` calls reuse
91
+ the existing KV cache — there is no per-call context teardown, so the
92
+ system-prompt prefix stays evaluated. The session is dropped only when the
93
+ system prompt changes for that model type.
94
+
95
+ This mirrors what `plugin-anthropic` does with `cache_control` and what
96
+ `plugin-openai` does with stable system prompts: the prefix is paid for
97
+ once, subsequent turns extend the cache instead of rebuilding it.
98
+
99
+ ### Text Generation
100
+
101
+ ```typescript
102
+ // Using small model
103
+ const smallResponse = await runtime.useModel(ModelType.TEXT_SMALL, {
104
+ prompt: "Generate a short response",
105
+ stopSequences: [],
106
+ });
107
+
108
+ // Using large model
109
+ const largeResponse = await runtime.useModel(ModelType.TEXT_LARGE, {
110
+ prompt: "Generate a detailed response",
111
+ stopSequences: [],
112
+ });
113
+ ```
114
+
115
+ ### Text Embedding
116
+
117
+ ```typescript
118
+ const embedding = await runtime.useModel(ModelType.TEXT_EMBEDDING, {
119
+ text: "Text to get embedding for",
120
+ });
121
+ ```
122
+
123
+ ### Image Analysis
124
+
125
+ ```typescript
126
+ const { title, description } = await runtime.useModel(
127
+ ModelType.IMAGE_DESCRIPTION,
128
+ "https://example.com/image.jpg",
129
+ );
130
+ ```
131
+
132
+ ### Text-to-Speech
133
+
134
+ ```typescript
135
+ const audioStream = await runtime.useModel(
136
+ ModelType.TEXT_TO_SPEECH,
137
+ "Text to convert to speech",
138
+ );
139
+ ```
140
+
141
+ ### Audio Transcription
142
+
143
+ ```typescript
144
+ const transcription = await runtime.useModel(
145
+ ModelType.TRANSCRIPTION,
146
+ audioBuffer,
147
+ );
148
+ ```
@@ -5,14 +5,13 @@ var unsupportedMessage = "Local AI is not supported in browsers. Use a server pr
5
5
  var warnUnsupported = (modelType) => {
6
6
  logger.warn(`[plugin-${pluginName}] ${modelType} is not available in browsers.`);
7
7
  };
8
- var unsupportedText = (modelType) => {
8
+ var unsupportedText = (modelType, params) => {
9
9
  warnUnsupported(modelType);
10
+ if (params && (params.tools || params.responseSchema || params.toolChoice)) {
11
+ throw new Error(`[plugin-${pluginName}] Tool calling and structured output require the Node runtime. ` + "Browsers cannot execute llama.cpp directly — switch providers or proxy through a server.");
12
+ }
10
13
  return unsupportedMessage;
11
14
  };
12
- var unsupportedObject = (modelType) => {
13
- warnUnsupported(modelType);
14
- return { error: unsupportedMessage };
15
- };
16
15
  var unsupportedImageDescription = (modelType) => {
17
16
  warnUnsupported(modelType);
18
17
  return {
@@ -27,14 +26,14 @@ var localAiPlugin = {
27
26
  logger.warn(`[plugin-${pluginName}] This plugin is not supported directly in browsers. Use a server proxy.`);
28
27
  },
29
28
  models: {
30
- [ModelType.TEXT_SMALL]: async (_runtime, _params) => unsupportedText(ModelType.TEXT_SMALL),
31
- [ModelType.TEXT_LARGE]: async (_runtime, _params) => unsupportedText(ModelType.TEXT_LARGE),
29
+ [ModelType.TEXT_SMALL]: async (_runtime, params) => unsupportedText(ModelType.TEXT_SMALL, params),
30
+ [ModelType.TEXT_LARGE]: async (_runtime, params) => unsupportedText(ModelType.TEXT_LARGE, params),
32
31
  [ModelType.TEXT_REASONING_SMALL]: async (_runtime, _params) => unsupportedText(ModelType.TEXT_REASONING_SMALL),
33
32
  [ModelType.TEXT_REASONING_LARGE]: async (_runtime, _params) => unsupportedText(ModelType.TEXT_REASONING_LARGE),
34
33
  [ModelType.TEXT_COMPLETION]: async (_runtime, _params) => unsupportedText(ModelType.TEXT_COMPLETION),
35
34
  [ModelType.TEXT_EMBEDDING]: async (_runtime, _params) => {
36
35
  warnUnsupported(ModelType.TEXT_EMBEDDING);
37
- return new Array(384).fill(0);
36
+ return new Array(1024).fill(0);
38
37
  },
39
38
  [ModelType.TEXT_TOKENIZER_ENCODE]: async () => {
40
39
  warnUnsupported(ModelType.TEXT_TOKENIZER_ENCODE);
@@ -44,8 +43,6 @@ var localAiPlugin = {
44
43
  warnUnsupported(ModelType.TEXT_TOKENIZER_DECODE);
45
44
  return "";
46
45
  },
47
- [ModelType.OBJECT_SMALL]: async (_runtime, _params) => unsupportedObject(ModelType.OBJECT_SMALL),
48
- [ModelType.OBJECT_LARGE]: async (_runtime, _params) => unsupportedObject(ModelType.OBJECT_LARGE),
49
46
  [ModelType.IMAGE_DESCRIPTION]: async (_runtime, _params) => unsupportedImageDescription(ModelType.IMAGE_DESCRIPTION),
50
47
  [ModelType.TRANSCRIPTION]: async () => unsupportedText(ModelType.TRANSCRIPTION),
51
48
  [ModelType.TEXT_TO_SPEECH]: async () => {
@@ -64,4 +61,4 @@ export {
64
61
  index_browser_default as default
65
62
  };
66
63
 
67
- //# debugId=23BEA74AC5595BB664756E2164756E21
64
+ //# debugId=7DCC0F22BB4D518664756E2164756E21
@@ -2,9 +2,9 @@
2
2
  "version": 3,
3
3
  "sources": ["../../index.browser.ts"],
4
4
  "sourcesContent": [
5
- "import type {\n GenerateTextParams,\n IAgentRuntime,\n ImageDescriptionParams,\n ObjectGenerationParams,\n Plugin,\n TextEmbeddingParams,\n} from \"@elizaos/core\";\nimport { logger, ModelType } from \"@elizaos/core\";\n\n// Inline types for browser compatibility (avoid import resolution issues)\ntype ImageDescriptionResult = {\n title: string;\n description: string;\n};\n\ntype ImageGenerationResult = {\n url: string;\n};\n\nconst pluginName = \"local-ai\";\nconst unsupportedMessage =\n \"Local AI is not supported in browsers. Use a server proxy or switch providers.\";\n\nconst warnUnsupported = (modelType: string): void => {\n logger.warn(`[plugin-${pluginName}] ${modelType} is not available in browsers.`);\n};\n\nconst unsupportedText = (modelType: string): string => {\n warnUnsupported(modelType);\n return unsupportedMessage;\n};\n\nconst unsupportedObject = (modelType: string): Record<string, string> => {\n warnUnsupported(modelType);\n return { error: unsupportedMessage };\n};\n\nconst unsupportedImageDescription = (modelType: string): ImageDescriptionResult => {\n warnUnsupported(modelType);\n return {\n title: \"Unsupported\",\n description: unsupportedMessage,\n };\n};\n\nexport const localAiPlugin: Plugin = {\n name: pluginName,\n description: \"Local AI plugin (browser stub; use a server proxy)\",\n async init(_config, _runtime: IAgentRuntime): Promise<void> {\n logger.warn(\n `[plugin-${pluginName}] This plugin is not supported directly in browsers. Use a server proxy.`\n );\n },\n models: {\n [ModelType.TEXT_SMALL]: async (\n _runtime: IAgentRuntime,\n _params: GenerateTextParams\n ): Promise<string> => unsupportedText(ModelType.TEXT_SMALL),\n [ModelType.TEXT_LARGE]: async (\n _runtime: IAgentRuntime,\n _params: GenerateTextParams\n ): Promise<string> => unsupportedText(ModelType.TEXT_LARGE),\n [ModelType.TEXT_REASONING_SMALL]: async (\n _runtime: IAgentRuntime,\n _params: GenerateTextParams\n ): Promise<string> => unsupportedText(ModelType.TEXT_REASONING_SMALL),\n [ModelType.TEXT_REASONING_LARGE]: async (\n _runtime: IAgentRuntime,\n _params: GenerateTextParams\n ): Promise<string> => unsupportedText(ModelType.TEXT_REASONING_LARGE),\n [ModelType.TEXT_COMPLETION]: async (\n _runtime: IAgentRuntime,\n _params: GenerateTextParams\n ): Promise<string> => unsupportedText(ModelType.TEXT_COMPLETION),\n [ModelType.TEXT_EMBEDDING]: async (\n _runtime: IAgentRuntime,\n _params: TextEmbeddingParams | string | null\n ): Promise<number[]> => {\n warnUnsupported(ModelType.TEXT_EMBEDDING);\n return new Array(384).fill(0);\n },\n [ModelType.TEXT_TOKENIZER_ENCODE]: async (): Promise<number[]> => {\n warnUnsupported(ModelType.TEXT_TOKENIZER_ENCODE);\n return [];\n },\n [ModelType.TEXT_TOKENIZER_DECODE]: async (): Promise<string> => {\n warnUnsupported(ModelType.TEXT_TOKENIZER_DECODE);\n return \"\";\n },\n [ModelType.OBJECT_SMALL]: async (\n _runtime: IAgentRuntime,\n _params: ObjectGenerationParams\n ): Promise<Record<string, string>> => unsupportedObject(ModelType.OBJECT_SMALL),\n [ModelType.OBJECT_LARGE]: async (\n _runtime: IAgentRuntime,\n _params: ObjectGenerationParams\n ): Promise<Record<string, string>> => unsupportedObject(ModelType.OBJECT_LARGE),\n [ModelType.IMAGE_DESCRIPTION]: async (\n _runtime: IAgentRuntime,\n _params: ImageDescriptionParams | string\n ): Promise<ImageDescriptionResult> => unsupportedImageDescription(ModelType.IMAGE_DESCRIPTION),\n [ModelType.TRANSCRIPTION]: async (): Promise<string> =>\n unsupportedText(ModelType.TRANSCRIPTION),\n [ModelType.TEXT_TO_SPEECH]: async (): Promise<Uint8Array> => {\n warnUnsupported(ModelType.TEXT_TO_SPEECH);\n return new Uint8Array();\n },\n [ModelType.IMAGE]: async (): Promise<ImageGenerationResult[]> => {\n warnUnsupported(ModelType.IMAGE);\n return [];\n },\n },\n};\n\nexport default localAiPlugin;\n"
5
+ "import type {\n GenerateTextParams,\n IAgentRuntime,\n ImageDescriptionParams,\n Plugin,\n TextEmbeddingParams,\n} from \"@elizaos/core\";\nimport { logger, ModelType } from \"@elizaos/core\";\n\n// Inline types for browser compatibility (avoid import resolution issues)\ntype ImageDescriptionResult = {\n title: string;\n description: string;\n};\n\ntype ImageGenerationResult = {\n url: string;\n};\n\nconst pluginName = \"local-ai\";\nconst unsupportedMessage =\n \"Local AI is not supported in browsers. Use a server proxy or switch providers.\";\n\nconst warnUnsupported = (modelType: string): void => {\n logger.warn(`[plugin-${pluginName}] ${modelType} is not available in browsers.`);\n};\n\nconst unsupportedText = (modelType: string, params?: GenerateTextParams): string => {\n warnUnsupported(modelType);\n if (params && (params.tools || params.responseSchema || params.toolChoice)) {\n throw new Error(\n `[plugin-${pluginName}] Tool calling and structured output require the Node runtime. ` +\n \"Browsers cannot execute llama.cpp directly switch providers or proxy through a server.\"\n );\n }\n return unsupportedMessage;\n};\n\nconst unsupportedImageDescription = (modelType: string): ImageDescriptionResult => {\n warnUnsupported(modelType);\n return {\n title: \"Unsupported\",\n description: unsupportedMessage,\n };\n};\n\nexport const localAiPlugin: Plugin = {\n name: pluginName,\n description: \"Local AI plugin (browser stub; use a server proxy)\",\n async init(_config, _runtime: IAgentRuntime): Promise<void> {\n logger.warn(\n `[plugin-${pluginName}] This plugin is not supported directly in browsers. Use a server proxy.`\n );\n },\n models: {\n [ModelType.TEXT_SMALL]: async (\n _runtime: IAgentRuntime,\n params: GenerateTextParams\n ): Promise<string> => unsupportedText(ModelType.TEXT_SMALL, params),\n [ModelType.TEXT_LARGE]: async (\n _runtime: IAgentRuntime,\n params: GenerateTextParams\n ): Promise<string> => unsupportedText(ModelType.TEXT_LARGE, params),\n [ModelType.TEXT_REASONING_SMALL]: async (\n _runtime: IAgentRuntime,\n _params: GenerateTextParams\n ): Promise<string> => unsupportedText(ModelType.TEXT_REASONING_SMALL),\n [ModelType.TEXT_REASONING_LARGE]: async (\n _runtime: IAgentRuntime,\n _params: GenerateTextParams\n ): Promise<string> => unsupportedText(ModelType.TEXT_REASONING_LARGE),\n [ModelType.TEXT_COMPLETION]: async (\n _runtime: IAgentRuntime,\n _params: GenerateTextParams\n ): Promise<string> => unsupportedText(ModelType.TEXT_COMPLETION),\n [ModelType.TEXT_EMBEDDING]: async (\n _runtime: IAgentRuntime,\n _params: TextEmbeddingParams | string | null\n ): Promise<number[]> => {\n warnUnsupported(ModelType.TEXT_EMBEDDING);\n return new Array(1024).fill(0);\n },\n [ModelType.TEXT_TOKENIZER_ENCODE]: async (): Promise<number[]> => {\n warnUnsupported(ModelType.TEXT_TOKENIZER_ENCODE);\n return [];\n },\n [ModelType.TEXT_TOKENIZER_DECODE]: async (): Promise<string> => {\n warnUnsupported(ModelType.TEXT_TOKENIZER_DECODE);\n return \"\";\n },\n [ModelType.IMAGE_DESCRIPTION]: async (\n _runtime: IAgentRuntime,\n _params: ImageDescriptionParams | string\n ): Promise<ImageDescriptionResult> => unsupportedImageDescription(ModelType.IMAGE_DESCRIPTION),\n [ModelType.TRANSCRIPTION]: async (): Promise<string> =>\n unsupportedText(ModelType.TRANSCRIPTION),\n [ModelType.TEXT_TO_SPEECH]: async (): Promise<Uint8Array> => {\n warnUnsupported(ModelType.TEXT_TO_SPEECH);\n return new Uint8Array();\n },\n [ModelType.IMAGE]: async (): Promise<ImageGenerationResult[]> => {\n warnUnsupported(ModelType.IMAGE);\n return [];\n },\n },\n};\n\nexport default localAiPlugin;\n"
6
6
  ],
7
- "mappings": ";AAQA;AAYA,IAAM,aAAa;AACnB,IAAM,qBACJ;AAEF,IAAM,kBAAkB,CAAC,cAA4B;AAAA,EACnD,OAAO,KAAK,WAAW,eAAe,yCAAyC;AAAA;AAGjF,IAAM,kBAAkB,CAAC,cAA8B;AAAA,EACrD,gBAAgB,SAAS;AAAA,EACzB,OAAO;AAAA;AAGT,IAAM,oBAAoB,CAAC,cAA8C;AAAA,EACvE,gBAAgB,SAAS;AAAA,EACzB,OAAO,EAAE,OAAO,mBAAmB;AAAA;AAGrC,IAAM,8BAA8B,CAAC,cAA8C;AAAA,EACjF,gBAAgB,SAAS;AAAA,EACzB,OAAO;AAAA,IACL,OAAO;AAAA,IACP,aAAa;AAAA,EACf;AAAA;AAGK,IAAM,gBAAwB;AAAA,EACnC,MAAM;AAAA,EACN,aAAa;AAAA,OACP,KAAI,CAAC,SAAS,UAAwC;AAAA,IAC1D,OAAO,KACL,WAAW,oFACb;AAAA;AAAA,EAEF,QAAQ;AAAA,KACL,UAAU,aAAa,OACtB,UACA,YACoB,gBAAgB,UAAU,UAAU;AAAA,KACzD,UAAU,aAAa,OACtB,UACA,YACoB,gBAAgB,UAAU,UAAU;AAAA,KACzD,UAAU,uBAAuB,OAChC,UACA,YACoB,gBAAgB,UAAU,oBAAoB;AAAA,KACnE,UAAU,uBAAuB,OAChC,UACA,YACoB,gBAAgB,UAAU,oBAAoB;AAAA,KACnE,UAAU,kBAAkB,OAC3B,UACA,YACoB,gBAAgB,UAAU,eAAe;AAAA,KAC9D,UAAU,iBAAiB,OAC1B,UACA,YACsB;AAAA,MACtB,gBAAgB,UAAU,cAAc;AAAA,MACxC,OAAO,IAAI,MAAM,GAAG,EAAE,KAAK,CAAC;AAAA;AAAA,KAE7B,UAAU,wBAAwB,YAA+B;AAAA,MAChE,gBAAgB,UAAU,qBAAqB;AAAA,MAC/C,OAAO,CAAC;AAAA;AAAA,KAET,UAAU,wBAAwB,YAA6B;AAAA,MAC9D,gBAAgB,UAAU,qBAAqB;AAAA,MAC/C,OAAO;AAAA;AAAA,KAER,UAAU,eAAe,OACxB,UACA,YACoC,kBAAkB,UAAU,YAAY;AAAA,KAC7E,UAAU,eAAe,OACxB,UACA,YACoC,kBAAkB,UAAU,YAAY;AAAA,KAC7E,UAAU,oBAAoB,OAC7B,UACA,YACoC,4BAA4B,UAAU,iBAAiB;AAAA,KAC5F,UAAU,gBAAgB,YACzB,gBAAgB,UAAU,aAAa;AAAA,KACxC,UAAU,iBAAiB,YAAiC;AAAA,MAC3D,gBAAgB,UAAU,cAAc;AAAA,MACxC,OAAO,IAAI;AAAA;AAAA,KAEZ,UAAU,QAAQ,YAA8C;AAAA,MAC/D,gBAAgB,UAAU,KAAK;AAAA,MAC/B,OAAO,CAAC;AAAA;AAAA,EAEZ;AACF;AAEA,IAAe;",
8
- "debugId": "23BEA74AC5595BB664756E2164756E21",
7
+ "mappings": ";AAOA;AAYA,IAAM,aAAa;AACnB,IAAM,qBACJ;AAEF,IAAM,kBAAkB,CAAC,cAA4B;AAAA,EACnD,OAAO,KAAK,WAAW,eAAe,yCAAyC;AAAA;AAGjF,IAAM,kBAAkB,CAAC,WAAmB,WAAwC;AAAA,EAClF,gBAAgB,SAAS;AAAA,EACzB,IAAI,WAAW,OAAO,SAAS,OAAO,kBAAkB,OAAO,aAAa;AAAA,IAC1E,MAAM,IAAI,MACR,WAAW,8EACT,0FACJ;AAAA,EACF;AAAA,EACA,OAAO;AAAA;AAGT,IAAM,8BAA8B,CAAC,cAA8C;AAAA,EACjF,gBAAgB,SAAS;AAAA,EACzB,OAAO;AAAA,IACL,OAAO;AAAA,IACP,aAAa;AAAA,EACf;AAAA;AAGK,IAAM,gBAAwB;AAAA,EACnC,MAAM;AAAA,EACN,aAAa;AAAA,OACP,KAAI,CAAC,SAAS,UAAwC;AAAA,IAC1D,OAAO,KACL,WAAW,oFACb;AAAA;AAAA,EAEF,QAAQ;AAAA,KACL,UAAU,aAAa,OACtB,UACA,WACoB,gBAAgB,UAAU,YAAY,MAAM;AAAA,KACjE,UAAU,aAAa,OACtB,UACA,WACoB,gBAAgB,UAAU,YAAY,MAAM;AAAA,KACjE,UAAU,uBAAuB,OAChC,UACA,YACoB,gBAAgB,UAAU,oBAAoB;AAAA,KACnE,UAAU,uBAAuB,OAChC,UACA,YACoB,gBAAgB,UAAU,oBAAoB;AAAA,KACnE,UAAU,kBAAkB,OAC3B,UACA,YACoB,gBAAgB,UAAU,eAAe;AAAA,KAC9D,UAAU,iBAAiB,OAC1B,UACA,YACsB;AAAA,MACtB,gBAAgB,UAAU,cAAc;AAAA,MACxC,OAAO,IAAI,MAAM,IAAI,EAAE,KAAK,CAAC;AAAA;AAAA,KAE9B,UAAU,wBAAwB,YAA+B;AAAA,MAChE,gBAAgB,UAAU,qBAAqB;AAAA,MAC/C,OAAO,CAAC;AAAA;AAAA,KAET,UAAU,wBAAwB,YAA6B;AAAA,MAC9D,gBAAgB,UAAU,qBAAqB;AAAA,MAC/C,OAAO;AAAA;AAAA,KAER,UAAU,oBAAoB,OAC7B,UACA,YACoC,4BAA4B,UAAU,iBAAiB;AAAA,KAC5F,UAAU,gBAAgB,YACzB,gBAAgB,UAAU,aAAa;AAAA,KACxC,UAAU,iBAAiB,YAAiC;AAAA,MAC3D,gBAAgB,UAAU,cAAc;AAAA,MACxC,OAAO,IAAI;AAAA;AAAA,KAEZ,UAAU,QAAQ,YAA8C;AAAA,MAC/D,gBAAgB,UAAU,KAAK;AAAA,MAC/B,OAAO,CAAC;AAAA;AAAA,EAEZ;AACF;AAEA,IAAe;",
8
+ "debugId": "7DCC0F22BB4D518664756E2164756E21",
9
9
  "names": []
10
10
  }
@@ -1 +1 @@
1
- {"version":3,"file":"build.d.ts","sourceRoot":"","sources":["../build.ts"],"names":[],"mappings":";AAEA,QAAA,MAAM,YAAY,UASjB,CAAC;AAEF,iBAAe,KAAK,IAAI,OAAO,CAAC,IAAI,CAAC,CAsHpC"}
1
+ {"version":3,"file":"build.d.ts","sourceRoot":"","sources":["../build.ts"],"names":[],"mappings":";AAEA,QAAA,MAAM,YAAY,UAQjB,CAAC;AAEF,iBAAe,KAAK,IAAI,OAAO,CAAC,IAAI,CAAC,CAqFpC"}
@@ -1,2 +1,2 @@
1
- export * from '../index';
2
- export { default } from '../index';
1
+ export * from "../index";
2
+ export { default } from "../index";