nuxt-edge-ai 0.1.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/LICENSE +21 -0
- package/README.md +141 -0
- package/dist/module.d.mts +23 -0
- package/dist/module.json +9 -0
- package/dist/module.mjs +121 -0
- package/dist/runtime/composables/useEdgeAI.d.ts +9 -0
- package/dist/runtime/composables/useEdgeAI.js +24 -0
- package/dist/runtime/plugin.d.ts +21 -0
- package/dist/runtime/plugin.js +31 -0
- package/dist/runtime/server/api/generate.post.d.ts +2 -0
- package/dist/runtime/server/api/generate.post.js +19 -0
- package/dist/runtime/server/api/health.get.d.ts +2 -0
- package/dist/runtime/server/api/health.get.js +6 -0
- package/dist/runtime/server/api/pull.post.d.ts +2 -0
- package/dist/runtime/server/api/pull.post.js +6 -0
- package/dist/runtime/server/tsconfig.json +3 -0
- package/dist/runtime/server/utils/edge-ai-engine.d.ts +4 -0
- package/dist/runtime/server/utils/edge-ai-engine.js +270 -0
- package/dist/runtime/server/vendor/huggingface/transformers.web.d.mts +857 -0
- package/dist/runtime/server/vendor/huggingface/transformers.web.js +38740 -0
- package/dist/runtime/server/vendor/licenses/huggingface-transformers.LICENSE +202 -0
- package/dist/runtime/server/vendor/onnxruntime/onnxruntime-common.d.mts +3 -0
- package/dist/runtime/server/vendor/onnxruntime/onnxruntime-common.js +3 -0
- package/dist/runtime/server/vendor/onnxruntime/onnxruntime-web.d.mts +3 -0
- package/dist/runtime/server/vendor/onnxruntime/onnxruntime-web.js +3 -0
- package/dist/runtime/server/vendor/onnxruntime/ort-wasm-simd-threaded.d.mts +2 -0
- package/dist/runtime/server/vendor/onnxruntime/ort-wasm-simd-threaded.js +59 -0
- package/dist/runtime/server/vendor/onnxruntime/ort-wasm-simd-threaded.wasm +0 -0
- package/dist/runtime/server/vendor/onnxruntime/ort.wasm.min.d.mts +11 -0
- package/dist/runtime/server/vendor/onnxruntime/ort.wasm.min.js +7 -0
- package/dist/runtime/types.d.ts +74 -0
- package/dist/runtime/types.js +0 -0
- package/dist/types.d.mts +5 -0
- package/package.json +74 -0
package/LICENSE
ADDED
|
@@ -0,0 +1,21 @@
|
|
|
1
|
+
MIT License
|
|
2
|
+
|
|
3
|
+
Copyright (c) 2026
|
|
4
|
+
|
|
5
|
+
Permission is hereby granted, free of charge, to any person obtaining a copy
|
|
6
|
+
of this software and associated documentation files (the "Software"), to deal
|
|
7
|
+
in the Software without restriction, including without limitation the rights
|
|
8
|
+
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
|
|
9
|
+
copies of the Software, and to permit persons to whom the Software is
|
|
10
|
+
furnished to do so, subject to the following conditions:
|
|
11
|
+
|
|
12
|
+
The above copyright notice and this permission notice shall be included in all
|
|
13
|
+
copies or substantial portions of the Software.
|
|
14
|
+
|
|
15
|
+
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
|
16
|
+
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
|
17
|
+
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
|
|
18
|
+
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
|
19
|
+
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
|
|
20
|
+
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
|
|
21
|
+
SOFTWARE.
|
package/README.md
ADDED
|
@@ -0,0 +1,141 @@
|
|
|
1
|
+
# nuxt-edge-ai
|
|
2
|
+
|
|
3
|
+
`nuxt-edge-ai` is a Nuxt module for building local-first AI applications with a real server-side WASM inference runtime.
|
|
4
|
+
|
|
5
|
+
It ships:
|
|
6
|
+
|
|
7
|
+
- a Nuxt module install surface
|
|
8
|
+
- Nitro API routes for health, model pull, and generation
|
|
9
|
+
- a client composable for app-side usage
|
|
10
|
+
- a vendored `transformers.js` + `onnxruntime-web` runtime inside the package
|
|
11
|
+
- no Ollama, no `llama.cpp`, no Rust/C++/native runtime dependency for consumers
|
|
12
|
+
|
|
13
|
+
The model weights are not bundled. Users either point the module at a local model directory or allow it to download and cache the model on first run.
|
|
14
|
+
|
|
15
|
+
## Why this exists
|
|
16
|
+
|
|
17
|
+
The goal is to make `nuxt-edge-ai` a credible, publishable Nuxt module:
|
|
18
|
+
|
|
19
|
+
- installable in a regular Nuxt app
|
|
20
|
+
- able to run a real local model
|
|
21
|
+
- packaged as JS/TS + WASM only
|
|
22
|
+
- suitable as a strong portfolio / resume project
|
|
23
|
+
|
|
24
|
+
## Current runtime
|
|
25
|
+
|
|
26
|
+
Current real runtime path:
|
|
27
|
+
|
|
28
|
+
- `transformers.js` web build
|
|
29
|
+
- `onnxruntime-web` WASM backend
|
|
30
|
+
- server-side execution through Nitro
|
|
31
|
+
|
|
32
|
+
Recommended first demo model:
|
|
33
|
+
|
|
34
|
+
- `Xenova/distilgpt2` for quick validation
|
|
35
|
+
|
|
36
|
+
Recommended next upgrade target:
|
|
37
|
+
|
|
38
|
+
- `onnx-community/Qwen2.5-0.5B-Instruct-ONNX`
|
|
39
|
+
|
|
40
|
+
## Install
|
|
41
|
+
|
|
42
|
+
```bash
|
|
43
|
+
pnpm add nuxt-edge-ai
|
|
44
|
+
```
|
|
45
|
+
|
|
46
|
+
```ts
|
|
47
|
+
// nuxt.config.ts
|
|
48
|
+
export default defineNuxtConfig({
|
|
49
|
+
modules: ['nuxt-edge-ai'],
|
|
50
|
+
edgeAI: {
|
|
51
|
+
runtime: 'transformers-wasm',
|
|
52
|
+
cacheDir: './.cache/nuxt-edge-ai',
|
|
53
|
+
model: {
|
|
54
|
+
id: 'Xenova/distilgpt2',
|
|
55
|
+
task: 'text-generation',
|
|
56
|
+
allowRemote: true,
|
|
57
|
+
dtype: 'q8',
|
|
58
|
+
generation: {
|
|
59
|
+
maxNewTokens: 96,
|
|
60
|
+
temperature: 0.7,
|
|
61
|
+
topP: 0.9,
|
|
62
|
+
doSample: true,
|
|
63
|
+
repetitionPenalty: 1.05,
|
|
64
|
+
},
|
|
65
|
+
},
|
|
66
|
+
},
|
|
67
|
+
})
|
|
68
|
+
```
|
|
69
|
+
|
|
70
|
+
```vue
|
|
71
|
+
<script setup lang="ts">
|
|
72
|
+
const edgeAI = useEdgeAI()
|
|
73
|
+
|
|
74
|
+
await edgeAI.pull()
|
|
75
|
+
|
|
76
|
+
const result = await edgeAI.generate({
|
|
77
|
+
prompt: 'Write a pitch for a local-first Nuxt AI module.',
|
|
78
|
+
})
|
|
79
|
+
</script>
|
|
80
|
+
```
|
|
81
|
+
|
|
82
|
+
## Consumer runtime guarantees
|
|
83
|
+
|
|
84
|
+
Consumers do not need to install:
|
|
85
|
+
|
|
86
|
+
- Ollama
|
|
87
|
+
- Rust
|
|
88
|
+
- C++
|
|
89
|
+
- Python
|
|
90
|
+
- `llama.cpp`
|
|
91
|
+
- extra runtime npm packages beyond this module
|
|
92
|
+
|
|
93
|
+
What consumers do need:
|
|
94
|
+
|
|
95
|
+
- a Node/Nitro server runtime
|
|
96
|
+
- a model path or permission to download a compatible model
|
|
97
|
+
|
|
98
|
+
## API surface
|
|
99
|
+
|
|
100
|
+
- `GET /api/edge-ai/health`
|
|
101
|
+
- `POST /api/edge-ai/pull`
|
|
102
|
+
- `POST /api/edge-ai/generate`
|
|
103
|
+
- `useEdgeAI().health()`
|
|
104
|
+
- `useEdgeAI().pull()`
|
|
105
|
+
- `useEdgeAI().generate()`
|
|
106
|
+
|
|
107
|
+
## Local development
|
|
108
|
+
|
|
109
|
+
```bash
|
|
110
|
+
pnpm install
|
|
111
|
+
pnpm dev
|
|
112
|
+
```
|
|
113
|
+
|
|
114
|
+
Useful commands:
|
|
115
|
+
|
|
116
|
+
```bash
|
|
117
|
+
pnpm vendor:runtime
|
|
118
|
+
pnpm lint
|
|
119
|
+
pnpm test
|
|
120
|
+
pnpm test:types
|
|
121
|
+
pnpm prepack
|
|
122
|
+
```
|
|
123
|
+
|
|
124
|
+
## Docs
|
|
125
|
+
|
|
126
|
+
See [`docs/index.md`](./docs/index.md) for the project docs tree.
|
|
127
|
+
|
|
128
|
+
## Repository shape
|
|
129
|
+
|
|
130
|
+
This repository follows a Nuxt modules-style layout:
|
|
131
|
+
|
|
132
|
+
- `src/module.ts`: module entry and runtime config wiring
|
|
133
|
+
- `src/runtime/`: composables, plugin, and Nitro runtime code
|
|
134
|
+
- `playground/`: interactive demo app
|
|
135
|
+
- `test/fixtures/`: module consumer fixtures
|
|
136
|
+
- `docs/`: module documentation
|
|
137
|
+
- `scripts/vendor-runtime.mjs`: vendored runtime generation
|
|
138
|
+
|
|
139
|
+
## Status
|
|
140
|
+
|
|
141
|
+
This is an MVP, but it now runs a real model instead of mock text when `runtime: 'transformers-wasm'` is enabled.
|
|
@@ -0,0 +1,23 @@
|
|
|
1
|
+
import * as _nuxt_schema from '@nuxt/schema';
|
|
2
|
+
import { EdgeAIGenerationOptions } from '../dist/runtime/types.js';
|
|
3
|
+
export { EdgeAIGenerateRequest, EdgeAIGenerateResponse, EdgeAIHealthResponse, EdgeAIPullResponse } from '../dist/runtime/types.js';
|
|
4
|
+
|
|
5
|
+
interface EdgeAIModelOptions {
|
|
6
|
+
id: string;
|
|
7
|
+
task: 'text-generation';
|
|
8
|
+
localPath?: string;
|
|
9
|
+
allowRemote: boolean;
|
|
10
|
+
dtype?: string;
|
|
11
|
+
generation: EdgeAIGenerationOptions;
|
|
12
|
+
}
|
|
13
|
+
interface ModuleOptions {
|
|
14
|
+
routeBase: string;
|
|
15
|
+
runtime: 'transformers-wasm' | 'mock';
|
|
16
|
+
cacheDir: string;
|
|
17
|
+
warmup: boolean;
|
|
18
|
+
model: EdgeAIModelOptions;
|
|
19
|
+
}
|
|
20
|
+
declare const _default: _nuxt_schema.NuxtModule<ModuleOptions, ModuleOptions, false>;
|
|
21
|
+
|
|
22
|
+
export { _default as default };
|
|
23
|
+
export type { EdgeAIModelOptions, ModuleOptions };
|
package/dist/module.json
ADDED
package/dist/module.mjs
ADDED
|
@@ -0,0 +1,121 @@
|
|
|
1
|
+
import { isAbsolute, resolve } from 'node:path';
|
|
2
|
+
import { defineNuxtModule, createResolver, addImportsDir, addPlugin, addServerHandler, addTypeTemplate } from '@nuxt/kit';
|
|
3
|
+
|
|
4
|
+
function resolveMaybeAbsolute(rootDir, value) {
|
|
5
|
+
if (!value) {
|
|
6
|
+
return void 0;
|
|
7
|
+
}
|
|
8
|
+
return isAbsolute(value) ? value : resolve(rootDir, value);
|
|
9
|
+
}
|
|
10
|
+
function normalizeRouteBase(routeBase) {
|
|
11
|
+
const normalized = routeBase.trim().replace(/\/+$/, "");
|
|
12
|
+
return normalized || "/api/edge-ai";
|
|
13
|
+
}
|
|
14
|
+
const module$1 = defineNuxtModule({
|
|
15
|
+
meta: {
|
|
16
|
+
name: "nuxt-edge-ai",
|
|
17
|
+
configKey: "edgeAI"
|
|
18
|
+
},
|
|
19
|
+
defaults: {
|
|
20
|
+
routeBase: "/api/edge-ai",
|
|
21
|
+
runtime: "transformers-wasm",
|
|
22
|
+
cacheDir: "./.cache/nuxt-edge-ai",
|
|
23
|
+
warmup: false,
|
|
24
|
+
model: {
|
|
25
|
+
id: "Xenova/distilgpt2",
|
|
26
|
+
task: "text-generation",
|
|
27
|
+
allowRemote: true,
|
|
28
|
+
dtype: "q8",
|
|
29
|
+
generation: {
|
|
30
|
+
maxNewTokens: 96,
|
|
31
|
+
temperature: 0.7,
|
|
32
|
+
topP: 0.9,
|
|
33
|
+
doSample: true,
|
|
34
|
+
repetitionPenalty: 1.05
|
|
35
|
+
}
|
|
36
|
+
}
|
|
37
|
+
},
|
|
38
|
+
setup(options, nuxt) {
|
|
39
|
+
const resolver = createResolver(import.meta.url);
|
|
40
|
+
const routeBase = normalizeRouteBase(options.routeBase);
|
|
41
|
+
const cacheDir = resolveMaybeAbsolute(nuxt.options.rootDir, options.cacheDir) ?? options.cacheDir;
|
|
42
|
+
const modelLocalPath = resolveMaybeAbsolute(nuxt.options.rootDir, options.model.localPath);
|
|
43
|
+
const runtimeConfig = nuxt.options.runtimeConfig;
|
|
44
|
+
runtimeConfig.edgeAI = {
|
|
45
|
+
routeBase,
|
|
46
|
+
runtime: options.runtime,
|
|
47
|
+
cacheDir,
|
|
48
|
+
warmup: options.warmup,
|
|
49
|
+
model: {
|
|
50
|
+
id: options.model.id,
|
|
51
|
+
task: options.model.task,
|
|
52
|
+
localPath: modelLocalPath,
|
|
53
|
+
allowRemote: options.model.allowRemote,
|
|
54
|
+
dtype: options.model.dtype,
|
|
55
|
+
generation: {
|
|
56
|
+
maxNewTokens: options.model.generation.maxNewTokens,
|
|
57
|
+
temperature: options.model.generation.temperature,
|
|
58
|
+
topP: options.model.generation.topP,
|
|
59
|
+
doSample: options.model.generation.doSample,
|
|
60
|
+
repetitionPenalty: options.model.generation.repetitionPenalty
|
|
61
|
+
}
|
|
62
|
+
}
|
|
63
|
+
};
|
|
64
|
+
runtimeConfig.public.edgeAI = {
|
|
65
|
+
routeBase,
|
|
66
|
+
runtime: options.runtime,
|
|
67
|
+
defaultModel: options.model.id
|
|
68
|
+
};
|
|
69
|
+
addImportsDir(resolver.resolve("./runtime/composables"));
|
|
70
|
+
addPlugin(resolver.resolve("./runtime/plugin"));
|
|
71
|
+
addServerHandler({
|
|
72
|
+
route: `${routeBase}/health`,
|
|
73
|
+
method: "get",
|
|
74
|
+
handler: resolver.resolve("./runtime/server/api/health.get")
|
|
75
|
+
});
|
|
76
|
+
addServerHandler({
|
|
77
|
+
route: `${routeBase}/pull`,
|
|
78
|
+
method: "post",
|
|
79
|
+
handler: resolver.resolve("./runtime/server/api/pull.post")
|
|
80
|
+
});
|
|
81
|
+
addServerHandler({
|
|
82
|
+
route: `${routeBase}/generate`,
|
|
83
|
+
method: "post",
|
|
84
|
+
handler: resolver.resolve("./runtime/server/api/generate.post")
|
|
85
|
+
});
|
|
86
|
+
addTypeTemplate({
|
|
87
|
+
filename: "types/nuxt-edge-ai.d.ts",
|
|
88
|
+
getContents: () => `import type { NuxtApp } from '#app'
|
|
89
|
+
import type {
|
|
90
|
+
EdgeAIPullResponse,
|
|
91
|
+
EdgeAIGenerateRequest,
|
|
92
|
+
EdgeAIGenerateResponse,
|
|
93
|
+
EdgeAIHealthResponse
|
|
94
|
+
} from 'nuxt-edge-ai'
|
|
95
|
+
|
|
96
|
+
declare module '#app' {
|
|
97
|
+
interface NuxtApp {
|
|
98
|
+
$edgeAI: {
|
|
99
|
+
routeBase: string
|
|
100
|
+
runtime: 'transformers-wasm' | 'mock'
|
|
101
|
+
defaultModel: string
|
|
102
|
+
pull: () => Promise<EdgeAIPullResponse>
|
|
103
|
+
generate: (payload: EdgeAIGenerateRequest) => Promise<EdgeAIGenerateResponse>
|
|
104
|
+
health: () => Promise<EdgeAIHealthResponse>
|
|
105
|
+
}
|
|
106
|
+
}
|
|
107
|
+
}
|
|
108
|
+
|
|
109
|
+
declare module 'vue' {
|
|
110
|
+
interface ComponentCustomProperties {
|
|
111
|
+
$edgeAI: NuxtApp['$edgeAI']
|
|
112
|
+
}
|
|
113
|
+
}
|
|
114
|
+
|
|
115
|
+
export {}
|
|
116
|
+
`
|
|
117
|
+
});
|
|
118
|
+
}
|
|
119
|
+
});
|
|
120
|
+
|
|
121
|
+
export { module$1 as default };
|
|
@@ -0,0 +1,9 @@
|
|
|
1
|
+
import type { EdgeAIGenerateRequest, EdgeAIGenerateResponse, EdgeAIHealthResponse, EdgeAIPullResponse } from '../types.js';
|
|
2
|
+
export declare function useEdgeAI(): {
|
|
3
|
+
routeBase: string;
|
|
4
|
+
defaultModel: string;
|
|
5
|
+
runtime: "transformers-wasm" | "mock";
|
|
6
|
+
pull(): Promise<EdgeAIPullResponse>;
|
|
7
|
+
generate(payload: EdgeAIGenerateRequest): Promise<EdgeAIGenerateResponse>;
|
|
8
|
+
health(): Promise<EdgeAIHealthResponse>;
|
|
9
|
+
};
|
|
@@ -0,0 +1,24 @@
|
|
|
1
|
+
import { useNuxtApp, useRuntimeConfig } from "#app";
|
|
2
|
+
export function useEdgeAI() {
|
|
3
|
+
const config = useRuntimeConfig();
|
|
4
|
+
const nuxtApp = useNuxtApp();
|
|
5
|
+
const edgeAIService = nuxtApp.$edgeAI;
|
|
6
|
+
const publicConfig = config.public.edgeAI;
|
|
7
|
+
const routeBase = publicConfig.routeBase;
|
|
8
|
+
const defaultModel = publicConfig.defaultModel;
|
|
9
|
+
const runtime = publicConfig.runtime;
|
|
10
|
+
return {
|
|
11
|
+
routeBase,
|
|
12
|
+
defaultModel,
|
|
13
|
+
runtime,
|
|
14
|
+
pull() {
|
|
15
|
+
return edgeAIService.pull();
|
|
16
|
+
},
|
|
17
|
+
generate(payload) {
|
|
18
|
+
return edgeAIService.generate(payload);
|
|
19
|
+
},
|
|
20
|
+
health() {
|
|
21
|
+
return edgeAIService.health();
|
|
22
|
+
}
|
|
23
|
+
};
|
|
24
|
+
}
|
|
@@ -0,0 +1,21 @@
|
|
|
1
|
+
import type { EdgeAIGenerateRequest, EdgeAIGenerateResponse, EdgeAIHealthResponse, EdgeAIPullResponse } from './types.js';
|
|
2
|
+
declare const _default: import("nuxt/app").Plugin<{
|
|
3
|
+
edgeAI: {
|
|
4
|
+
routeBase: string;
|
|
5
|
+
runtime: "transformers-wasm" | "mock";
|
|
6
|
+
defaultModel: string;
|
|
7
|
+
pull(): Promise<EdgeAIPullResponse>;
|
|
8
|
+
generate(payload: EdgeAIGenerateRequest): Promise<EdgeAIGenerateResponse>;
|
|
9
|
+
health(): Promise<EdgeAIHealthResponse>;
|
|
10
|
+
};
|
|
11
|
+
}> & import("nuxt/app").ObjectPlugin<{
|
|
12
|
+
edgeAI: {
|
|
13
|
+
routeBase: string;
|
|
14
|
+
runtime: "transformers-wasm" | "mock";
|
|
15
|
+
defaultModel: string;
|
|
16
|
+
pull(): Promise<EdgeAIPullResponse>;
|
|
17
|
+
generate(payload: EdgeAIGenerateRequest): Promise<EdgeAIGenerateResponse>;
|
|
18
|
+
health(): Promise<EdgeAIHealthResponse>;
|
|
19
|
+
};
|
|
20
|
+
}>;
|
|
21
|
+
export default _default;
|
|
@@ -0,0 +1,31 @@
|
|
|
1
|
+
import { defineNuxtPlugin, useRuntimeConfig } from "#app";
|
|
2
|
+
export default defineNuxtPlugin(() => {
|
|
3
|
+
const config = useRuntimeConfig();
|
|
4
|
+
const publicConfig = config.public.edgeAI;
|
|
5
|
+
const routeBase = publicConfig.routeBase;
|
|
6
|
+
const runtime = publicConfig.runtime;
|
|
7
|
+
const defaultModel = publicConfig.defaultModel;
|
|
8
|
+
return {
|
|
9
|
+
provide: {
|
|
10
|
+
edgeAI: {
|
|
11
|
+
routeBase,
|
|
12
|
+
runtime,
|
|
13
|
+
defaultModel,
|
|
14
|
+
pull() {
|
|
15
|
+
return $fetch(`${routeBase}/pull`, {
|
|
16
|
+
method: "POST"
|
|
17
|
+
});
|
|
18
|
+
},
|
|
19
|
+
generate(payload) {
|
|
20
|
+
return $fetch(`${routeBase}/generate`, {
|
|
21
|
+
method: "POST",
|
|
22
|
+
body: payload
|
|
23
|
+
});
|
|
24
|
+
},
|
|
25
|
+
health() {
|
|
26
|
+
return $fetch(`${routeBase}/health`);
|
|
27
|
+
}
|
|
28
|
+
}
|
|
29
|
+
}
|
|
30
|
+
};
|
|
31
|
+
});
|
|
@@ -0,0 +1,19 @@
|
|
|
1
|
+
import { createError, defineEventHandler, readBody } from "h3";
|
|
2
|
+
import { useRuntimeConfig } from "#imports";
|
|
3
|
+
import { generateEdgeAIText } from "../utils/edge-ai-engine.js";
|
|
4
|
+
export default defineEventHandler(async (event) => {
|
|
5
|
+
const body = await readBody(event);
|
|
6
|
+
const prompt = body?.prompt?.trim();
|
|
7
|
+
if (!prompt) {
|
|
8
|
+
throw createError({
|
|
9
|
+
statusCode: 400,
|
|
10
|
+
statusMessage: "Prompt is required."
|
|
11
|
+
});
|
|
12
|
+
}
|
|
13
|
+
const config = useRuntimeConfig().edgeAI;
|
|
14
|
+
return generateEdgeAIText(config, {
|
|
15
|
+
prompt,
|
|
16
|
+
model: body.model,
|
|
17
|
+
generation: body.generation
|
|
18
|
+
});
|
|
19
|
+
});
|
|
@@ -0,0 +1,4 @@
|
|
|
1
|
+
import type { EdgeAIGenerateRequest, EdgeAIGenerateResponse, EdgeAIHealthResponse, EdgeAIServerRuntimeConfig, EdgeAIPullResponse } from '../../types.js';
|
|
2
|
+
export declare function getEdgeAIHealth(config: EdgeAIServerRuntimeConfig): Promise<EdgeAIHealthResponse>;
|
|
3
|
+
export declare function pullEdgeAIModel(config: EdgeAIServerRuntimeConfig): Promise<EdgeAIPullResponse>;
|
|
4
|
+
export declare function generateEdgeAIText(config: EdgeAIServerRuntimeConfig, input: EdgeAIGenerateRequest): Promise<EdgeAIGenerateResponse>;
|