@eclipse-lyra/extension-webllm 0.0.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/i18n.json.d.ts +13 -0
- package/dist/index.d.ts +2 -0
- package/dist/index.d.ts.map +1 -0
- package/dist/index.js +20 -0
- package/dist/index.js.map +1 -0
- package/dist/webllmservice-BEqHp69F.js +61 -0
- package/dist/webllmservice-BEqHp69F.js.map +1 -0
- package/dist/webllmservice.d.ts +3 -0
- package/dist/webllmservice.d.ts.map +1 -0
- package/package.json +30 -0
|
@@ -0,0 +1,13 @@
|
|
|
1
|
+
declare const _default: {
|
|
2
|
+
"namespace": "extensions",
|
|
3
|
+
"en": {
|
|
4
|
+
"EXT_WEBLLM_NAME": "WebLLM",
|
|
5
|
+
"EXT_WEBLLM_DESC": "In-Browser LLM inference"
|
|
6
|
+
},
|
|
7
|
+
"de": {
|
|
8
|
+
"EXT_WEBLLM_NAME": "WebLLM",
|
|
9
|
+
"EXT_WEBLLM_DESC": "LLM-Inferenz im Browser"
|
|
10
|
+
}
|
|
11
|
+
};
|
|
12
|
+
|
|
13
|
+
export default _default;
|
package/dist/index.d.ts
ADDED
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"file":"index.d.ts","sourceRoot":"","sources":["../src/index.ts"],"names":[],"mappings":""}
|
package/dist/index.js
ADDED
|
@@ -0,0 +1,20 @@
|
|
|
1
|
+
import { contributionRegistry, SYSTEM_LANGUAGE_BUNDLES, i18nLazy, extensionRegistry } from "@eclipse-lyra/core";
|
|
2
|
+
import pkg from "../package.json";
|
|
3
|
+
const namespace = "extensions";
|
|
4
|
+
const en = { "EXT_WEBLLM_NAME": "WebLLM", "EXT_WEBLLM_DESC": "In-Browser LLM inference" };
|
|
5
|
+
const de = { "EXT_WEBLLM_NAME": "WebLLM", "EXT_WEBLLM_DESC": "LLM-Inferenz im Browser" };
|
|
6
|
+
const bundle = {
|
|
7
|
+
namespace,
|
|
8
|
+
en,
|
|
9
|
+
de
|
|
10
|
+
};
|
|
11
|
+
contributionRegistry.registerContribution(SYSTEM_LANGUAGE_BUNDLES, bundle);
|
|
12
|
+
const t = i18nLazy("extensions");
|
|
13
|
+
extensionRegistry.registerExtension({
|
|
14
|
+
id: pkg.name,
|
|
15
|
+
name: t("EXT_WEBLLM_NAME"),
|
|
16
|
+
description: t("EXT_WEBLLM_DESC"),
|
|
17
|
+
loader: () => import("./webllmservice-BEqHp69F.js"),
|
|
18
|
+
icon: "robot"
|
|
19
|
+
});
|
|
20
|
+
//# sourceMappingURL=index.js.map
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"file":"index.js","sources":["../src/index.ts"],"sourcesContent":["import { extensionRegistry, i18nLazy, contributionRegistry, SYSTEM_LANGUAGE_BUNDLES } from '@eclipse-lyra/core';\nimport bundle from './i18n.json';\nimport pkg from '../package.json';\n\ncontributionRegistry.registerContribution(SYSTEM_LANGUAGE_BUNDLES, bundle as any);\n\nconst t = i18nLazy('extensions');\n\nextensionRegistry.registerExtension({\n id: pkg.name,\n name: t('EXT_WEBLLM_NAME'),\n description: t('EXT_WEBLLM_DESC'),\n loader: () => import(\"./webllmservice\"),\n icon: \"robot\",\n \n \n});\n"],"names":[],"mappings":";;;;;;;;;;AAIA,qBAAqB,qBAAqB,yBAAyB,MAAa;AAEhF,MAAM,IAAI,SAAS,YAAY;AAE/B,kBAAkB,kBAAkB;AAAA,EAClC,IAAI,IAAI;AAAA,EACR,MAAM,EAAE,iBAAiB;AAAA,EACzB,aAAa,EAAE,iBAAiB;AAAA,EAChC,QAAQ,MAAM,OAAO,6BAAiB;AAAA,EACtC,MAAM;AAGR,CAAC;"}
|
|
@@ -0,0 +1,61 @@
|
|
|
1
|
+
import { MLCEngine } from "@mlc-ai/web-llm";
|
|
2
|
+
import { aiService } from "@eclipse-lyra/extension-ai-system/api";
|
|
3
|
+
class WebLLMProvider {
|
|
4
|
+
constructor() {
|
|
5
|
+
this.name = "webllm";
|
|
6
|
+
}
|
|
7
|
+
canHandle(chatProvider) {
|
|
8
|
+
return chatProvider.name === "webllm";
|
|
9
|
+
}
|
|
10
|
+
async init(model, parameters) {
|
|
11
|
+
if (this.engine) return;
|
|
12
|
+
const initProgressCallback = (progress) => {
|
|
13
|
+
console.log("Model loading progress:", progress);
|
|
14
|
+
};
|
|
15
|
+
this.engine = new MLCEngine({ initProgressCallback });
|
|
16
|
+
await this.engine.reload(model, parameters);
|
|
17
|
+
}
|
|
18
|
+
async *stream(params) {
|
|
19
|
+
if (!this.engine) {
|
|
20
|
+
await this.init(params.model, params.chatConfig.parameters);
|
|
21
|
+
}
|
|
22
|
+
const internalMessages = params.messages.map((message2) => {
|
|
23
|
+
return { ...message2 };
|
|
24
|
+
});
|
|
25
|
+
const result = await this.engine.chat.completions.create({
|
|
26
|
+
messages: internalMessages
|
|
27
|
+
});
|
|
28
|
+
const message = result.choices[0].message;
|
|
29
|
+
if (message.content) {
|
|
30
|
+
for (const char of message.content) {
|
|
31
|
+
yield {
|
|
32
|
+
type: "token",
|
|
33
|
+
content: char
|
|
34
|
+
};
|
|
35
|
+
}
|
|
36
|
+
}
|
|
37
|
+
yield {
|
|
38
|
+
type: "done",
|
|
39
|
+
content: ""
|
|
40
|
+
};
|
|
41
|
+
}
|
|
42
|
+
async complete(params) {
|
|
43
|
+
if (!this.engine) {
|
|
44
|
+
await this.init(params.model, params.chatConfig.parameters);
|
|
45
|
+
}
|
|
46
|
+
const internalMessages = params.messages.map((message) => {
|
|
47
|
+
return { ...message };
|
|
48
|
+
});
|
|
49
|
+
const result = await this.engine.chat.completions.create({
|
|
50
|
+
messages: internalMessages
|
|
51
|
+
});
|
|
52
|
+
return result.choices[0].message;
|
|
53
|
+
}
|
|
54
|
+
}
|
|
55
|
+
const webllmservice = () => {
|
|
56
|
+
aiService.registerStreamingFetcher(new WebLLMProvider());
|
|
57
|
+
};
|
|
58
|
+
export {
|
|
59
|
+
webllmservice as default
|
|
60
|
+
};
|
|
61
|
+
//# sourceMappingURL=webllmservice-BEqHp69F.js.map
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"file":"webllmservice-BEqHp69F.js","sources":["../src/webllmservice.ts"],"sourcesContent":["import {ChatCompletionMessageParam, MLCEngine} from \"@mlc-ai/web-llm\";\nimport type {ChatMessage, ChatProvider, StreamChunk} from \"@eclipse-lyra/extension-ai-system/api\";\nimport type {IProvider, StreamingParams, CompletionParams} from \"@eclipse-lyra/extension-ai-system/api\";\nimport {aiService} from \"@eclipse-lyra/extension-ai-system/api\";\n\nclass WebLLMProvider implements IProvider {\n name = \"webllm\";\n private engine?: MLCEngine;\n\n canHandle(chatProvider: ChatProvider): boolean {\n return chatProvider.name === \"webllm\";\n }\n\n private async init(model: string, parameters?: any): Promise<void> {\n if (this.engine) return;\n \n const initProgressCallback = (progress: any) => {\n console.log(\"Model loading progress:\", progress);\n };\n\n this.engine = new MLCEngine({initProgressCallback});\n await this.engine.reload(model, parameters);\n }\n\n async *stream(params: StreamingParams): AsyncIterable<StreamChunk> {\n if (!this.engine) {\n await this.init(params.model, params.chatConfig.parameters);\n }\n \n const internalMessages = params.messages.map((message) => {\n return {...message} as ChatCompletionMessageParam;\n });\n \n const result = await this.engine!.chat.completions.create({\n messages: internalMessages\n });\n \n const message = result.choices[0].message as ChatMessage;\n \n if (message.content) {\n for (const char of message.content) {\n yield {\n type: 'token',\n content: char\n };\n }\n }\n \n yield {\n type: 'done',\n content: ''\n };\n }\n\n async complete(params: CompletionParams): Promise<ChatMessage> {\n if (!this.engine) {\n await this.init(params.model, params.chatConfig.parameters);\n }\n \n const internalMessages = params.messages.map((message) => {\n return {...message} as ChatCompletionMessageParam;\n });\n \n const result = await this.engine!.chat.completions.create({\n messages: internalMessages\n });\n \n return result.choices[0].message as ChatMessage;\n }\n}\n\nexport default () => {\n aiService.registerStreamingFetcher(new WebLLMProvider());\n}"],"names":["message"],"mappings":";;AAKA,MAAM,eAAoC;AAAA,EAA1C,cAAA;AACI,SAAA,OAAO;AAAA,EAAA;AAAA,EAGP,UAAU,cAAqC;AAC3C,WAAO,aAAa,SAAS;AAAA,EACjC;AAAA,EAEA,MAAc,KAAK,OAAe,YAAiC;AAC/D,QAAI,KAAK,OAAQ;AAEjB,UAAM,uBAAuB,CAAC,aAAkB;AAC5C,cAAQ,IAAI,2BAA2B,QAAQ;AAAA,IACnD;AAEA,SAAK,SAAS,IAAI,UAAU,EAAC,sBAAqB;AAClD,UAAM,KAAK,OAAO,OAAO,OAAO,UAAU;AAAA,EAC9C;AAAA,EAEA,OAAO,OAAO,QAAqD;AAC/D,QAAI,CAAC,KAAK,QAAQ;AACd,YAAM,KAAK,KAAK,OAAO,OAAO,OAAO,WAAW,UAAU;AAAA,IAC9D;AAEA,UAAM,mBAAmB,OAAO,SAAS,IAAI,CAACA,aAAY;AACtD,aAAO,EAAC,GAAGA,SAAAA;AAAAA,IACf,CAAC;AAED,UAAM,SAAS,MAAM,KAAK,OAAQ,KAAK,YAAY,OAAO;AAAA,MACtD,UAAU;AAAA,IAAA,CACb;AAED,UAAM,UAAU,OAAO,QAAQ,CAAC,EAAE;AAElC,QAAI,QAAQ,SAAS;AACjB,iBAAW,QAAQ,QAAQ,SAAS;AAChC,cAAM;AAAA,UACF,MAAM;AAAA,UACN,SAAS;AAAA,QAAA;AAAA,MAEjB;AAAA,IACJ;AAEA,UAAM;AAAA,MACF,MAAM;AAAA,MACN,SAAS;AAAA,IAAA;AAAA,EAEjB;AAAA,EAEA,MAAM,SAAS,QAAgD;AAC3D,QAAI,CAAC,KAAK,QAAQ;AACd,YAAM,KAAK,KAAK,OAAO,OAAO,OAAO,WAAW,UAAU;AAAA,IAC9D;AAEA,UAAM,mBAAmB,OAAO,SAAS,IAAI,CAAC,YAAY;AACtD,aAAO,EAAC,GAAG,QAAA;AAAA,IACf,CAAC;AAED,UAAM,SAAS,MAAM,KAAK,OAAQ,KAAK,YAAY,OAAO;AAAA,MACtD,UAAU;AAAA,IAAA,CACb;AAED,WAAO,OAAO,QAAQ,CAAC,EAAE;AAAA,EAC7B;AACJ;AAEA,MAAA,gBAAe,MAAM;AACjB,YAAU,yBAAyB,IAAI,gBAAgB;AAC3D;"}
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"file":"webllmservice.d.ts","sourceRoot":"","sources":["../src/webllmservice.ts"],"names":[],"mappings":";AAuEA,wBAEC"}
|
package/package.json
ADDED
|
@@ -0,0 +1,30 @@
|
|
|
1
|
+
{
|
|
2
|
+
"name": "@eclipse-lyra/extension-webllm",
|
|
3
|
+
"version": "0.0.0",
|
|
4
|
+
"type": "module",
|
|
5
|
+
"main": "./dist/index.js",
|
|
6
|
+
"exports": {
|
|
7
|
+
".": {
|
|
8
|
+
"types": "./dist/index.d.ts",
|
|
9
|
+
"import": "./dist/index.js"
|
|
10
|
+
}
|
|
11
|
+
},
|
|
12
|
+
"dependencies": {
|
|
13
|
+
"@eclipse-lyra/core": "*",
|
|
14
|
+
"@eclipse-lyra/extension-ai-system": "*",
|
|
15
|
+
"@mlc-ai/web-llm": "^0.2.79"
|
|
16
|
+
},
|
|
17
|
+
"devDependencies": {
|
|
18
|
+
"typescript": "^5.9.3",
|
|
19
|
+
"vite": "^7.1.12",
|
|
20
|
+
"vite-plugin-dts": "^4.5.4"
|
|
21
|
+
},
|
|
22
|
+
"module": "./dist/index.js",
|
|
23
|
+
"types": "./dist/index.d.ts",
|
|
24
|
+
"files": [
|
|
25
|
+
"dist"
|
|
26
|
+
],
|
|
27
|
+
"scripts": {
|
|
28
|
+
"build": "vite build"
|
|
29
|
+
}
|
|
30
|
+
}
|