@theia/ai-llamafile 1.46.0-next.241
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/README.md +57 -0
- package/lib/browser/llamafile-command-contribution.d.ts +21 -0
- package/lib/browser/llamafile-command-contribution.d.ts.map +1 -0
- package/lib/browser/llamafile-command-contribution.js +104 -0
- package/lib/browser/llamafile-command-contribution.js.map +1 -0
- package/lib/browser/llamafile-frontend-application-contribution.d.ts +18 -0
- package/lib/browser/llamafile-frontend-application-contribution.d.ts.map +1 -0
- package/lib/browser/llamafile-frontend-application-contribution.js +113 -0
- package/lib/browser/llamafile-frontend-application-contribution.js.map +1 -0
- package/lib/browser/llamafile-frontend-module.d.ts +4 -0
- package/lib/browser/llamafile-frontend-module.d.ts.map +1 -0
- package/lib/browser/llamafile-frontend-module.js +46 -0
- package/lib/browser/llamafile-frontend-module.js.map +1 -0
- package/lib/browser/llamafile-preferences.d.ts +7 -0
- package/lib/browser/llamafile-preferences.d.ts.map +1 -0
- package/lib/browser/llamafile-preferences.js +62 -0
- package/lib/browser/llamafile-preferences.js.map +1 -0
- package/lib/common/llamafile-language-model.d.ts +25 -0
- package/lib/common/llamafile-language-model.d.ts.map +1 -0
- package/lib/common/llamafile-language-model.js +122 -0
- package/lib/common/llamafile-language-model.js.map +1 -0
- package/lib/common/llamafile-manager.d.ts +29 -0
- package/lib/common/llamafile-manager.d.ts.map +1 -0
- package/lib/common/llamafile-manager.js +21 -0
- package/lib/common/llamafile-manager.js.map +1 -0
- package/lib/node/llamafile-backend-module.d.ts +4 -0
- package/lib/node/llamafile-backend-module.d.ts.map +1 -0
- package/lib/node/llamafile-backend-module.js +30 -0
- package/lib/node/llamafile-backend-module.js.map +1 -0
- package/lib/node/llamafile-manager-impl.d.ts +18 -0
- package/lib/node/llamafile-manager-impl.d.ts.map +1 -0
- package/lib/node/llamafile-manager-impl.js +123 -0
- package/lib/node/llamafile-manager-impl.js.map +1 -0
- package/lib/package.spec.d.ts +1 -0
- package/lib/package.spec.d.ts.map +1 -0
- package/lib/package.spec.js +26 -0
- package/lib/package.spec.js.map +1 -0
- package/package.json +51 -0
- package/src/browser/llamafile-command-contribution.ts +93 -0
- package/src/browser/llamafile-frontend-application-contribution.ts +126 -0
- package/src/browser/llamafile-frontend-module.ts +45 -0
- package/src/browser/llamafile-preferences.ts +62 -0
- package/src/common/llamafile-language-model.ts +126 -0
- package/src/common/llamafile-manager.ts +42 -0
- package/src/node/llamafile-backend-module.ts +32 -0
- package/src/node/llamafile-manager-impl.ts +129 -0
- package/src/package.spec.ts +27 -0
package/README.md
ADDED
|
@@ -0,0 +1,57 @@
|
|
|
1
|
+
# AI Llamafile Integration
|
|
2
|
+
|
|
3
|
+
The AI Llamafile package provides an integration that allows users to manage and interact with Llamafile language models within Theia IDE.
|
|
4
|
+
|
|
5
|
+
## Features
|
|
6
|
+
|
|
7
|
+
- Start and stop Llamafile language servers.
|
|
8
|
+
|
|
9
|
+
## Commands
|
|
10
|
+
|
|
11
|
+
### Start Llamafile
|
|
12
|
+
|
|
13
|
+
- **Command ID:** `llamafile.start`
|
|
14
|
+
- **Label:** `Start Llamafile`
|
|
15
|
+
- **Functionality:** Allows you to start a Llamafile language server by selecting from a list of configured Llamafiles.
|
|
16
|
+
|
|
17
|
+
### Stop Llamafile
|
|
18
|
+
|
|
19
|
+
- **Command ID:** `llamafile.stop`
|
|
20
|
+
- **Label:** `Stop Llamafile`
|
|
21
|
+
- **Functionality:** Allows you to stop a running Llamafile language server by selecting from a list of currently running Llamafiles.
|
|
22
|
+
|
|
23
|
+
## Usage
|
|
24
|
+
|
|
25
|
+
1. **Starting a Llamafile Language Server:**
|
|
26
|
+
|
|
27
|
+
- Use the command palette to invoke `Start Llamafile`.
|
|
28
|
+
- A quick pick menu will appear with a list of configured Llamafiles.
|
|
29
|
+
- Select a Llamafile to start its language server.
|
|
30
|
+
|
|
31
|
+
2. **Stopping a Llamafile Language Server:**
|
|
32
|
+
- Use the command palette to invoke `Stop Llamafile`.
|
|
33
|
+
- A quick pick menu will display a list of currently running Llamafiles.
|
|
34
|
+
- Select a Llamafile to stop its language server.
|
|
35
|
+
|
|
36
|
+
## Dependencies
|
|
37
|
+
|
|
38
|
+
This extension depends on the `@theia/ai-core` package for AI-related services and functionalities.
|
|
39
|
+
|
|
40
|
+
## Configuration
|
|
41
|
+
|
|
42
|
+
Make sure to configure your Llamafiles properly within the preference settings.
|
|
43
|
+
This setting is an array of objects, where each object defines a llamafile with a user-friendly name, the file uri, and the port to start the server on.
|
|
44
|
+
|
|
45
|
+
Example Configuration:
|
|
46
|
+
|
|
47
|
+
```json
|
|
48
|
+
{
|
|
49
|
+
"ai-features.llamafile.llamafiles": [
|
|
50
|
+
{
|
|
51
|
+
"name": "MyLlamaFile",
|
|
52
|
+
"uri": "file:///path/to/my.llamafile",
|
|
53
|
+
"port": 30000
|
|
54
|
+
}
|
|
55
|
+
]
|
|
56
|
+
}
|
|
57
|
+
```
|
|
@@ -0,0 +1,21 @@
|
|
|
1
|
+
import { AICommandHandlerFactory } from '@theia/ai-core/lib/browser/ai-command-handler-factory';
|
|
2
|
+
import { CommandContribution, CommandRegistry, MessageService } from '@theia/core';
|
|
3
|
+
import { PreferenceService, QuickInputService } from '@theia/core/lib/browser';
|
|
4
|
+
import { LlamafileManager } from '../common/llamafile-manager';
|
|
5
|
+
export declare const StartLlamafileCommand: {
|
|
6
|
+
id: string;
|
|
7
|
+
label: string;
|
|
8
|
+
};
|
|
9
|
+
export declare const StopLlamafileCommand: {
|
|
10
|
+
id: string;
|
|
11
|
+
label: string;
|
|
12
|
+
};
|
|
13
|
+
export declare class LlamafileCommandContribution implements CommandContribution {
|
|
14
|
+
protected readonly quickInputService: QuickInputService;
|
|
15
|
+
protected readonly commandHandlerFactory: AICommandHandlerFactory;
|
|
16
|
+
protected preferenceService: PreferenceService;
|
|
17
|
+
protected messageService: MessageService;
|
|
18
|
+
protected llamafileManager: LlamafileManager;
|
|
19
|
+
registerCommands(commandRegistry: CommandRegistry): void;
|
|
20
|
+
}
|
|
21
|
+
//# sourceMappingURL=llamafile-command-contribution.d.ts.map
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"file":"llamafile-command-contribution.d.ts","sourceRoot":"","sources":["../../src/browser/llamafile-command-contribution.ts"],"names":[],"mappings":"AAeA,OAAO,EAAE,uBAAuB,EAAE,MAAM,uDAAuD,CAAC;AAChG,OAAO,EAAE,mBAAmB,EAAE,eAAe,EAAE,cAAc,EAAE,MAAM,aAAa,CAAC;AACnF,OAAO,EAAE,iBAAiB,EAAE,iBAAiB,EAAE,MAAM,yBAAyB,CAAC;AAE/E,OAAO,EAAE,gBAAgB,EAAE,MAAM,6BAA6B,CAAC;AAI/D,eAAO,MAAM,qBAAqB;;;CAGjC,CAAC;AACF,eAAO,MAAM,oBAAoB;;;CAGhC,CAAC;AAEF,qBACa,4BAA6B,YAAW,mBAAmB;IAGpE,SAAS,CAAC,QAAQ,CAAC,iBAAiB,EAAE,iBAAiB,CAAC;IAGxD,SAAS,CAAC,QAAQ,CAAC,qBAAqB,EAAE,uBAAuB,CAAC;IAGlE,SAAS,CAAC,iBAAiB,EAAE,iBAAiB,CAAC;IAG/C,SAAS,CAAC,cAAc,EAAE,cAAc,CAAC;IAGzC,SAAS,CAAC,gBAAgB,EAAE,gBAAgB,CAAC;IAE7C,gBAAgB,CAAC,eAAe,EAAE,eAAe,GAAG,IAAI;CA0C3D"}
|
|
@@ -0,0 +1,104 @@
|
|
|
1
|
+
"use strict";
|
|
2
|
+
Object.defineProperty(exports, "__esModule", { value: true });
|
|
3
|
+
exports.LlamafileCommandContribution = exports.StopLlamafileCommand = exports.StartLlamafileCommand = void 0;
|
|
4
|
+
const tslib_1 = require("tslib");
|
|
5
|
+
// *****************************************************************************
|
|
6
|
+
// Copyright (C) 2024 EclipseSource GmbH.
|
|
7
|
+
//
|
|
8
|
+
// This program and the accompanying materials are made available under the
|
|
9
|
+
// terms of the Eclipse Public License v. 2.0 which is available at
|
|
10
|
+
// http://www.eclipse.org/legal/epl-2.0.
|
|
11
|
+
//
|
|
12
|
+
// This Source Code may also be made available under the following Secondary
|
|
13
|
+
// Licenses when the conditions for such availability set forth in the Eclipse
|
|
14
|
+
// Public License v. 2.0 are satisfied: GNU General Public License, version 2
|
|
15
|
+
// with the GNU Classpath Exception which is available at
|
|
16
|
+
// https://www.gnu.org/software/classpath/license.html.
|
|
17
|
+
//
|
|
18
|
+
// SPDX-License-Identifier: EPL-2.0 OR GPL-2.0-only WITH Classpath-exception-2.0
|
|
19
|
+
// *****************************************************************************
|
|
20
|
+
const ai_command_handler_factory_1 = require("@theia/ai-core/lib/browser/ai-command-handler-factory");
|
|
21
|
+
const core_1 = require("@theia/core");
|
|
22
|
+
const browser_1 = require("@theia/core/lib/browser");
|
|
23
|
+
const inversify_1 = require("@theia/core/shared/inversify");
|
|
24
|
+
const llamafile_manager_1 = require("../common/llamafile-manager");
|
|
25
|
+
const llamafile_preferences_1 = require("./llamafile-preferences");
|
|
26
|
+
exports.StartLlamafileCommand = {
|
|
27
|
+
id: 'llamafile.start',
|
|
28
|
+
label: 'Start Llamafile',
|
|
29
|
+
};
|
|
30
|
+
exports.StopLlamafileCommand = {
|
|
31
|
+
id: 'llamafile.stop',
|
|
32
|
+
label: 'Stop Llamafile',
|
|
33
|
+
};
|
|
34
|
+
let LlamafileCommandContribution = class LlamafileCommandContribution {
|
|
35
|
+
registerCommands(commandRegistry) {
|
|
36
|
+
commandRegistry.registerCommand(exports.StartLlamafileCommand, this.commandHandlerFactory({
|
|
37
|
+
execute: async () => {
|
|
38
|
+
try {
|
|
39
|
+
const llamaFiles = this.preferenceService.get(llamafile_preferences_1.PREFERENCE_LLAMAFILE);
|
|
40
|
+
if (llamaFiles === undefined || llamaFiles.length === 0) {
|
|
41
|
+
this.messageService.error('No Llamafiles configured.');
|
|
42
|
+
return;
|
|
43
|
+
}
|
|
44
|
+
const options = llamaFiles.map(llamaFile => ({ label: llamaFile.name }));
|
|
45
|
+
const result = await this.quickInputService.showQuickPick(options);
|
|
46
|
+
if (result === undefined) {
|
|
47
|
+
return;
|
|
48
|
+
}
|
|
49
|
+
this.llamafileManager.startServer(result.label);
|
|
50
|
+
}
|
|
51
|
+
catch (error) {
|
|
52
|
+
console.error('Something went wrong during the llamafile start.', error);
|
|
53
|
+
this.messageService.error(`Something went wrong during the llamafile start: ${error.message}.\nFor more information, see the console.`);
|
|
54
|
+
}
|
|
55
|
+
}
|
|
56
|
+
}));
|
|
57
|
+
commandRegistry.registerCommand(exports.StopLlamafileCommand, this.commandHandlerFactory({
|
|
58
|
+
execute: async () => {
|
|
59
|
+
try {
|
|
60
|
+
const llamaFiles = await this.llamafileManager.getStartedLlamafiles();
|
|
61
|
+
if (llamaFiles === undefined || llamaFiles.length === 0) {
|
|
62
|
+
this.messageService.error('No Llamafiles running.');
|
|
63
|
+
return;
|
|
64
|
+
}
|
|
65
|
+
const options = llamaFiles.map(llamaFile => ({ label: llamaFile }));
|
|
66
|
+
const result = await this.quickInputService.showQuickPick(options);
|
|
67
|
+
if (result === undefined) {
|
|
68
|
+
return;
|
|
69
|
+
}
|
|
70
|
+
this.llamafileManager.stopServer(result.label);
|
|
71
|
+
}
|
|
72
|
+
catch (error) {
|
|
73
|
+
console.error('Something went wrong during the llamafile stop.', error);
|
|
74
|
+
this.messageService.error(`Something went wrong during the llamafile stop: ${error.message}.\nFor more information, see the console.`);
|
|
75
|
+
}
|
|
76
|
+
}
|
|
77
|
+
}));
|
|
78
|
+
}
|
|
79
|
+
};
|
|
80
|
+
exports.LlamafileCommandContribution = LlamafileCommandContribution;
|
|
81
|
+
tslib_1.__decorate([
|
|
82
|
+
(0, inversify_1.inject)(browser_1.QuickInputService),
|
|
83
|
+
tslib_1.__metadata("design:type", Object)
|
|
84
|
+
], LlamafileCommandContribution.prototype, "quickInputService", void 0);
|
|
85
|
+
tslib_1.__decorate([
|
|
86
|
+
(0, inversify_1.inject)(ai_command_handler_factory_1.AICommandHandlerFactory),
|
|
87
|
+
tslib_1.__metadata("design:type", Function)
|
|
88
|
+
], LlamafileCommandContribution.prototype, "commandHandlerFactory", void 0);
|
|
89
|
+
tslib_1.__decorate([
|
|
90
|
+
(0, inversify_1.inject)(browser_1.PreferenceService),
|
|
91
|
+
tslib_1.__metadata("design:type", Object)
|
|
92
|
+
], LlamafileCommandContribution.prototype, "preferenceService", void 0);
|
|
93
|
+
tslib_1.__decorate([
|
|
94
|
+
(0, inversify_1.inject)(core_1.MessageService),
|
|
95
|
+
tslib_1.__metadata("design:type", core_1.MessageService)
|
|
96
|
+
], LlamafileCommandContribution.prototype, "messageService", void 0);
|
|
97
|
+
tslib_1.__decorate([
|
|
98
|
+
(0, inversify_1.inject)(llamafile_manager_1.LlamafileManager),
|
|
99
|
+
tslib_1.__metadata("design:type", Object)
|
|
100
|
+
], LlamafileCommandContribution.prototype, "llamafileManager", void 0);
|
|
101
|
+
exports.LlamafileCommandContribution = LlamafileCommandContribution = tslib_1.__decorate([
|
|
102
|
+
(0, inversify_1.injectable)()
|
|
103
|
+
], LlamafileCommandContribution);
|
|
104
|
+
//# sourceMappingURL=llamafile-command-contribution.js.map
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"file":"llamafile-command-contribution.js","sourceRoot":"","sources":["../../src/browser/llamafile-command-contribution.ts"],"names":[],"mappings":";;;;AAAA,gFAAgF;AAChF,yCAAyC;AACzC,EAAE;AACF,2EAA2E;AAC3E,mEAAmE;AACnE,wCAAwC;AACxC,EAAE;AACF,4EAA4E;AAC5E,8EAA8E;AAC9E,6EAA6E;AAC7E,yDAAyD;AACzD,uDAAuD;AACvD,EAAE;AACF,gFAAgF;AAChF,gFAAgF;AAChF,sGAAgG;AAChG,sCAAmF;AACnF,qDAA+E;AAC/E,4DAAkE;AAClE,mEAA+D;AAC/D,mEAA+D;AAGlD,QAAA,qBAAqB,GAAG;IACjC,EAAE,EAAE,iBAAiB;IACrB,KAAK,EAAE,iBAAiB;CAC3B,CAAC;AACW,QAAA,oBAAoB,GAAG;IAChC,EAAE,EAAE,gBAAgB;IACpB,KAAK,EAAE,gBAAgB;CAC1B,CAAC;AAGK,IAAM,4BAA4B,GAAlC,MAAM,4BAA4B;IAiBrC,gBAAgB,CAAC,eAAgC;QAC7C,eAAe,CAAC,eAAe,CAAC,6BAAqB,EAAE,IAAI,CAAC,qBAAqB,CAAC;YAC9E,OAAO,EAAE,KAAK,IAAI,EAAE;gBAChB,IAAI,CAAC;oBACD,MAAM,UAAU,GAAG,IAAI,CAAC,iBAAiB,CAAC,GAAG,CAAmB,4CAAoB,CAAC,CAAC;oBACtF,IAAI,UAAU,KAAK,SAAS,IAAI,UAAU,CAAC,MAAM,KAAK,CAAC,EAAE,CAAC;wBACtD,IAAI,CAAC,cAAc,CAAC,KAAK,CAAC,2BAA2B,CAAC,CAAC;wBACvD,OAAO;oBACX,CAAC;oBACD,MAAM,OAAO,GAAG,UAAU,CAAC,GAAG,CAAC,SAAS,CAAC,EAAE,CAAC,CAAC,EAAE,KAAK,EAAE,SAAS,CAAC,IAAI,EAAE,CAAC,CAAC,CAAC;oBACzE,MAAM,MAAM,GAAG,MAAM,IAAI,CAAC,iBAAiB,CAAC,aAAa,CAAC,OAAO,CAAC,CAAC;oBACnE,IAAI,MAAM,KAAK,SAAS,EAAE,CAAC;wBACvB,OAAO;oBACX,CAAC;oBACD,IAAI,CAAC,gBAAgB,CAAC,WAAW,CAAC,MAAM,CAAC,KAAK,CAAC,CAAC;gBACpD,CAAC;gBAAC,OAAO,KAAK,EAAE,CAAC;oBACb,OAAO,CAAC,KAAK,CAAC,kDAAkD,EAAE,KAAK,CAAC,CAAC;oBACzE,IAAI,CAAC,cAAc,CAAC,KAAK,CAAC,oDAAoD,KAAK,CAAC,OAAO,2CAA2C,CAAC,CAAC;gBAC5I,CAAC;YACL,CAAC;SACJ,CAAC,CAAC,CAAC;QACJ,eAAe,CAAC,eAAe,CAAC,4BAAoB,EAAE,IAAI,CAAC,qBAAqB,CAAC;YAC7E,OAAO,EAAE,KAAK,IAAI,EAAE;gBAChB,IAAI,CAAC;oBACD,MAAM,UAAU,GAAG,MAAM,IAAI,CAAC,gBAAgB,CAAC,oBAAoB,EAAE,CAAC;oBACtE,IAAI,UAAU,KAAK,SAAS,IAAI,UAAU,CAAC,MAAM,KAAK,CAAC,EAAE,CAAC;wBACtD,IAAI,CAAC,cAAc,CAAC,KAAK,CAAC,wBAAwB,CAAC,CAAC;wBACpD,OAAO;oBACX,CAAC;oBACD,MAAM,OAAO,GAAG,UAAU,CAAC,GAAG,CAAC,SAAS,CAAC,EAAE,CAAC,CAAC,EAAE,KAAK,EAAE,SAAS,EAAE,CAAC,CAAC,CAAC;oBACpE,MAAM,MAAM,GAAG,MAAM,IAAI,CAAC,iBAAiB,CAAC,aAAa,CAAC,OAAO,CAAC,CAAC;oBACnE,IAAI,MAAM,KAAK,SAAS,EAAE,CAAC;wBACvB,OAAO;oBACX,CAAC;oBACD,IAAI,CAAC,gBAAgB,CAAC,UAAU,CAAC,MAAM,CAAC,KAAK,CAAC,CAAC;gBACnD,CAAC;gBAAC,OAAO,KAAK,EAAE,CAAC;oBACb,OAAO,CAAC,KAAK,CAAC,iDAAiD,EAAE,KAAK,CAAC,CAAC;oBACxE,IAAI,CAAC,cAAc,CAAC,KAAK,CAAC,mDAAmD,KAAK,CAAC,OAAO,2CAA2C,CAAC,CAAC;gBAC3I,CAAC;YACL,CAAC;SACJ,CAAC,CAAC,CAAC;IACR,CAAC;CACJ,CAAA;AA3DY,oEAA4B;AAGlB;IADlB,IAAA,kBAAM,EAAC,2BAAiB,CAAC;;uEAC8B;AAGrC;IADlB,IAAA,kBAAM,EAAC,oDAAuB,CAAC;;2EACkC;AAGxD;IADT,IAAA,kBAAM,EAAC,2BAAiB,CAAC;;uEACqB;AAGrC;IADT,IAAA,kBAAM,EAAC,qBAAc,CAAC;sCACG,qBAAc;oEAAC;AAG/B;IADT,IAAA,kBAAM,EAAC,oCAAgB,CAAC;;sEACoB;uCAfpC,4BAA4B;IADxC,IAAA,sBAAU,GAAE;GACA,4BAA4B,CA2DxC"}
|
|
@@ -0,0 +1,18 @@
|
|
|
1
|
+
import { FrontendApplicationContribution, PreferenceService } from '@theia/core/lib/browser';
|
|
2
|
+
import { LlamafileManager, LlamafileModelDescription } from '../common/llamafile-manager';
|
|
3
|
+
import { RequestSetting } from '@theia/ai-core/lib/browser/ai-core-preferences';
|
|
4
|
+
export declare class LlamafileFrontendApplicationContribution implements FrontendApplicationContribution {
|
|
5
|
+
protected preferenceService: PreferenceService;
|
|
6
|
+
protected llamafileManager: LlamafileManager;
|
|
7
|
+
private _knownLlamaFiles;
|
|
8
|
+
onStart(): void;
|
|
9
|
+
protected getLLamaFileModelDescriptions(llamafiles: LlamafileEntry[]): LlamafileModelDescription[];
|
|
10
|
+
protected handleLlamaFilePreferenceChange(newModels: LlamafileEntry[]): void;
|
|
11
|
+
protected handleRequestSettingsChange(newSettings: RequestSetting[]): void;
|
|
12
|
+
}
|
|
13
|
+
export interface LlamafileEntry {
|
|
14
|
+
name: string;
|
|
15
|
+
uri: string;
|
|
16
|
+
port: number;
|
|
17
|
+
}
|
|
18
|
+
//# sourceMappingURL=llamafile-frontend-application-contribution.d.ts.map
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"file":"llamafile-frontend-application-contribution.d.ts","sourceRoot":"","sources":["../../src/browser/llamafile-frontend-application-contribution.ts"],"names":[],"mappings":"AAgBA,OAAO,EAAE,+BAA+B,EAAE,iBAAiB,EAAE,MAAM,yBAAyB,CAAC;AAE7F,OAAO,EAAE,gBAAgB,EAAE,yBAAyB,EAAE,MAAM,6BAA6B,CAAC;AAE1F,OAAO,EAAoC,cAAc,EAAE,MAAM,gDAAgD,CAAC;AAGlH,qBACa,wCAAyC,YAAW,+BAA+B;IAG5F,SAAS,CAAC,iBAAiB,EAAE,iBAAiB,CAAC;IAG/C,SAAS,CAAC,gBAAgB,EAAE,gBAAgB,CAAC;IAE7C,OAAO,CAAC,gBAAgB,CAA0C;IAElE,OAAO,IAAI,IAAI;IAqBf,SAAS,CAAC,6BAA6B,CAAC,UAAU,EAAE,cAAc,EAAE,GAAG,yBAAyB,EAAE;IAoBlG,SAAS,CAAC,+BAA+B,CAAC,SAAS,EAAE,cAAc,EAAE,GAAG,IAAI;IAgB5E,SAAS,CAAC,2BAA2B,CAAC,WAAW,EAAE,cAAc,EAAE,GAAG,IAAI;CAO7E;AAED,MAAM,WAAW,cAAc;IAC3B,IAAI,EAAE,MAAM,CAAC;IACb,GAAG,EAAE,MAAM,CAAC;IACZ,IAAI,EAAE,MAAM,CAAC;CAChB"}
|
|
@@ -0,0 +1,113 @@
|
|
|
1
|
+
"use strict";
|
|
2
|
+
// *****************************************************************************
|
|
3
|
+
// Copyright (C) 2024 EclipseSource GmbH.
|
|
4
|
+
//
|
|
5
|
+
// This program and the accompanying materials are made available under the
|
|
6
|
+
// terms of the Eclipse Public License v. 2.0 which is available at
|
|
7
|
+
// http://www.eclipse.org/legal/epl-2.0.
|
|
8
|
+
//
|
|
9
|
+
// This Source Code may also be made available under the following Secondary
|
|
10
|
+
// Licenses when the conditions for such availability set forth in the Eclipse
|
|
11
|
+
// Public License v. 2.0 are satisfied: GNU General Public License, version 2
|
|
12
|
+
// with the GNU Classpath Exception which is available at
|
|
13
|
+
// https://www.gnu.org/software/classpath/license.html.
|
|
14
|
+
//
|
|
15
|
+
// SPDX-License-Identifier: EPL-2.0 OR GPL-2.0-only WITH Classpath-exception-2.0
|
|
16
|
+
// *****************************************************************************
|
|
17
|
+
Object.defineProperty(exports, "__esModule", { value: true });
|
|
18
|
+
exports.LlamafileFrontendApplicationContribution = void 0;
|
|
19
|
+
const tslib_1 = require("tslib");
|
|
20
|
+
const browser_1 = require("@theia/core/lib/browser");
|
|
21
|
+
const inversify_1 = require("@theia/core/shared/inversify");
|
|
22
|
+
const llamafile_manager_1 = require("../common/llamafile-manager");
|
|
23
|
+
const llamafile_preferences_1 = require("./llamafile-preferences");
|
|
24
|
+
const ai_core_preferences_1 = require("@theia/ai-core/lib/browser/ai-core-preferences");
|
|
25
|
+
const LLAMAFILE_PROVIDER_ID = 'llamafile';
|
|
26
|
+
let LlamafileFrontendApplicationContribution = class LlamafileFrontendApplicationContribution {
|
|
27
|
+
constructor() {
|
|
28
|
+
this._knownLlamaFiles = new Map();
|
|
29
|
+
}
|
|
30
|
+
onStart() {
|
|
31
|
+
this.preferenceService.ready.then(() => {
|
|
32
|
+
const llamafiles = this.preferenceService.get(llamafile_preferences_1.PREFERENCE_LLAMAFILE, []);
|
|
33
|
+
const validLlamafiles = llamafiles.filter(LlamafileEntry.is);
|
|
34
|
+
const LlamafileModelDescriptions = this.getLLamaFileModelDescriptions(validLlamafiles);
|
|
35
|
+
this.llamafileManager.addLanguageModels(LlamafileModelDescriptions);
|
|
36
|
+
validLlamafiles.forEach(model => this._knownLlamaFiles.set(model.name, model));
|
|
37
|
+
this.preferenceService.onPreferenceChanged(event => {
|
|
38
|
+
if (event.preferenceName === llamafile_preferences_1.PREFERENCE_LLAMAFILE) {
|
|
39
|
+
const newModels = event.newValue.filter((llamafileEntry) => LlamafileEntry.is(llamafileEntry));
|
|
40
|
+
this.handleLlamaFilePreferenceChange(newModels);
|
|
41
|
+
}
|
|
42
|
+
else if (event.preferenceName === ai_core_preferences_1.PREFERENCE_NAME_REQUEST_SETTINGS) {
|
|
43
|
+
this.handleRequestSettingsChange(event.newValue);
|
|
44
|
+
}
|
|
45
|
+
});
|
|
46
|
+
});
|
|
47
|
+
}
|
|
48
|
+
getLLamaFileModelDescriptions(llamafiles) {
|
|
49
|
+
const requestSettings = this.preferenceService.get(ai_core_preferences_1.PREFERENCE_NAME_REQUEST_SETTINGS, []);
|
|
50
|
+
return llamafiles.map(llamafile => {
|
|
51
|
+
var _a;
|
|
52
|
+
const matchingSettings = requestSettings.filter(setting => (!setting.providerId || setting.providerId === LLAMAFILE_PROVIDER_ID) &&
|
|
53
|
+
setting.modelId === llamafile.name);
|
|
54
|
+
if (matchingSettings.length > 1) {
|
|
55
|
+
console.warn(`Multiple entries found for model "${llamafile.name}". Using the first match.`);
|
|
56
|
+
}
|
|
57
|
+
return {
|
|
58
|
+
name: llamafile.name,
|
|
59
|
+
uri: llamafile.uri,
|
|
60
|
+
port: llamafile.port,
|
|
61
|
+
defaultRequestSettings: (_a = matchingSettings[0]) === null || _a === void 0 ? void 0 : _a.requestSettings
|
|
62
|
+
};
|
|
63
|
+
});
|
|
64
|
+
}
|
|
65
|
+
handleLlamaFilePreferenceChange(newModels) {
|
|
66
|
+
const llamafilesToAdd = newModels.filter(llamafile => !this._knownLlamaFiles.has(llamafile.name) ||
|
|
67
|
+
!LlamafileEntry.equals(this._knownLlamaFiles.get(llamafile.name), llamafile));
|
|
68
|
+
const llamafileIdsToRemove = [...this._knownLlamaFiles.values()].filter(llamafile => !newModels.find(newModel => LlamafileEntry.equals(newModel, llamafile)))
|
|
69
|
+
.map(llamafile => llamafile.name);
|
|
70
|
+
this.llamafileManager.removeLanguageModels(llamafileIdsToRemove);
|
|
71
|
+
llamafileIdsToRemove.forEach(id => this._knownLlamaFiles.delete(id));
|
|
72
|
+
this.llamafileManager.addLanguageModels(this.getLLamaFileModelDescriptions(llamafilesToAdd));
|
|
73
|
+
llamafilesToAdd.forEach(model => this._knownLlamaFiles.set(model.name, model));
|
|
74
|
+
}
|
|
75
|
+
handleRequestSettingsChange(newSettings) {
|
|
76
|
+
const llamafiles = Array.from(this._knownLlamaFiles.values());
|
|
77
|
+
const llamafileModelDescriptions = this.getLLamaFileModelDescriptions(llamafiles);
|
|
78
|
+
llamafileModelDescriptions.forEach(llamafileModelDescription => {
|
|
79
|
+
this.llamafileManager.updateRequestSettings(llamafileModelDescription.name, llamafileModelDescription.defaultRequestSettings);
|
|
80
|
+
});
|
|
81
|
+
}
|
|
82
|
+
};
|
|
83
|
+
exports.LlamafileFrontendApplicationContribution = LlamafileFrontendApplicationContribution;
|
|
84
|
+
tslib_1.__decorate([
|
|
85
|
+
(0, inversify_1.inject)(browser_1.PreferenceService),
|
|
86
|
+
tslib_1.__metadata("design:type", Object)
|
|
87
|
+
], LlamafileFrontendApplicationContribution.prototype, "preferenceService", void 0);
|
|
88
|
+
tslib_1.__decorate([
|
|
89
|
+
(0, inversify_1.inject)(llamafile_manager_1.LlamafileManager),
|
|
90
|
+
tslib_1.__metadata("design:type", Object)
|
|
91
|
+
], LlamafileFrontendApplicationContribution.prototype, "llamafileManager", void 0);
|
|
92
|
+
exports.LlamafileFrontendApplicationContribution = LlamafileFrontendApplicationContribution = tslib_1.__decorate([
|
|
93
|
+
(0, inversify_1.injectable)()
|
|
94
|
+
], LlamafileFrontendApplicationContribution);
|
|
95
|
+
var LlamafileEntry;
|
|
96
|
+
(function (LlamafileEntry) {
|
|
97
|
+
function equals(a, b) {
|
|
98
|
+
return (a.name === b.name &&
|
|
99
|
+
a.uri === b.uri &&
|
|
100
|
+
a.port === b.port);
|
|
101
|
+
}
|
|
102
|
+
LlamafileEntry.equals = equals;
|
|
103
|
+
function is(entry) {
|
|
104
|
+
return (typeof entry === 'object' &&
|
|
105
|
+
// eslint-disable-next-line no-null/no-null
|
|
106
|
+
entry !== null &&
|
|
107
|
+
'name' in entry && typeof entry.name === 'string' &&
|
|
108
|
+
'uri' in entry && typeof entry.uri === 'string' &&
|
|
109
|
+
'port' in entry && typeof entry.port === 'number');
|
|
110
|
+
}
|
|
111
|
+
LlamafileEntry.is = is;
|
|
112
|
+
})(LlamafileEntry || (LlamafileEntry = {}));
|
|
113
|
+
//# sourceMappingURL=llamafile-frontend-application-contribution.js.map
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"file":"llamafile-frontend-application-contribution.js","sourceRoot":"","sources":["../../src/browser/llamafile-frontend-application-contribution.ts"],"names":[],"mappings":";AAAA,gFAAgF;AAChF,yCAAyC;AACzC,EAAE;AACF,2EAA2E;AAC3E,mEAAmE;AACnE,wCAAwC;AACxC,EAAE;AACF,4EAA4E;AAC5E,8EAA8E;AAC9E,6EAA6E;AAC7E,yDAAyD;AACzD,uDAAuD;AACvD,EAAE;AACF,gFAAgF;AAChF,gFAAgF;;;;AAEhF,qDAA6F;AAC7F,4DAAkE;AAClE,mEAA0F;AAC1F,mEAA+D;AAC/D,wFAAkH;AAElH,MAAM,qBAAqB,GAAG,WAAW,CAAC;AAEnC,IAAM,wCAAwC,GAA9C,MAAM,wCAAwC;IAA9C;QAQK,qBAAgB,GAAgC,IAAI,GAAG,EAAE,CAAC;IAkEtE,CAAC;IAhEG,OAAO;QACH,IAAI,CAAC,iBAAiB,CAAC,KAAK,CAAC,IAAI,CAAC,GAAG,EAAE;YACnC,MAAM,UAAU,GAAG,IAAI,CAAC,iBAAiB,CAAC,GAAG,CAAmB,4CAAoB,EAAE,EAAE,CAAC,CAAC;YAC1F,MAAM,eAAe,GAAG,UAAU,CAAC,MAAM,CAAC,cAAc,CAAC,EAAE,CAAC,CAAC;YAE7D,MAAM,0BAA0B,GAAG,IAAI,CAAC,6BAA6B,CAAC,eAAe,CAAC,CAAC;YAEvF,IAAI,CAAC,gBAAgB,CAAC,iBAAiB,CAAC,0BAA0B,CAAC,CAAC;YACpE,eAAe,CAAC,OAAO,CAAC,KAAK,CAAC,EAAE,CAAC,IAAI,CAAC,gBAAgB,CAAC,GAAG,CAAC,KAAK,CAAC,IAAI,EAAE,KAAK,CAAC,CAAC,CAAC;YAE/E,IAAI,CAAC,iBAAiB,CAAC,mBAAmB,CAAC,KAAK,CAAC,EAAE;gBAC/C,IAAI,KAAK,CAAC,cAAc,KAAK,4CAAoB,EAAE,CAAC;oBAChD,MAAM,SAAS,GAAG,KAAK,CAAC,QAAQ,CAAC,MAAM,CAAC,CAAC,cAAuB,EAAE,EAAE,CAAC,cAAc,CAAC,EAAE,CAAC,cAAc,CAAC,CAAqB,CAAC;oBAC5H,IAAI,CAAC,+BAA+B,CAAC,SAAS,CAAC,CAAC;gBACpD,CAAC;qBAAM,IAAI,KAAK,CAAC,cAAc,KAAK,sDAAgC,EAAE,CAAC;oBACnE,IAAI,CAAC,2BAA2B,CAAC,KAAK,CAAC,QAA4B,CAAC,CAAC;gBACzE,CAAC;YACL,CAAC,CAAC,CAAC;QACP,CAAC,CAAC,CAAC;IACP,CAAC;IAES,6BAA6B,CAAC,UAA4B;QAChE,MAAM,eAAe,GAAG,IAAI,CAAC,iBAAiB,CAAC,GAAG,CAAmB,sDAAgC,EAAE,EAAE,CAAC,CAAC;QAC3G,OAAO,UAAU,CAAC,GAAG,CAAC,SAAS,CAAC,EAAE;;YAC9B,MAAM,gBAAgB,GAAG,eAAe,CAAC,MAAM,CAC3C,OAAO,CAAC,EAAE,CACN,CAAC,CAAC,OAAO,CAAC,UAAU,IAAI,OAAO,CAAC,UAAU,KAAK,qBAAqB,CAAC;gBACrE,OAAO,CAAC,OAAO,KAAK,SAAS,CAAC,IAAI,CACzC,CAAC;YACF,IAAI,gBAAgB,CAAC,MAAM,GAAG,CAAC,EAAE,CAAC;gBAC9B,OAAO,CAAC,IAAI,CAAC,qCAAqC,SAAS,CAAC,IAAI,2BAA2B,CAAC,CAAC;YACjG,CAAC;YACD,OAAO;gBACH,IAAI,EAAE,SAAS,CAAC,IAAI;gBACpB,GAAG,EAAE,SAAS,CAAC,GAAG;gBAClB,IAAI,EAAE,SAAS,CAAC,IAAI;gBACpB,sBAAsB,EAAE,MAAA,gBAAgB,CAAC,CAAC,CAAC,0CAAE,eAAe;aAC/D,CAAC;QACN,CAAC,CAAC,CAAC;IACP,CAAC;IAES,+BAA+B,CAAC,SAA2B;QACjE,MAAM,eAAe,GAAG,SAAS,CAAC,MAAM,CAAC,SAAS,CAAC,EAAE,CACjD,CAAC,IAAI,CAAC,gBAAgB,CAAC,GAAG,CAAC,SAAS,CAAC,IAAI,CAAC;YAC1C,CAAC,cAAc,CAAC,MAAM,CAAC,IAAI,CAAC,gBAAgB,CAAC,GAAG,CAAC,SAAS,CAAC,IAAI,CAAE,EAAE,SAAS,CAAC,CAAC,CAAC;QAEnF,MAAM,oBAAoB,GAAG,CAAC,GAAG,IAAI,CAAC,gBAAgB,CAAC,MAAM,EAAE,CAAC,CAAC,MAAM,CAAC,SAAS,CAAC,EAAE,CAChF,CAAC,SAAS,CAAC,IAAI,CAAC,QAAQ,CAAC,EAAE,CAAC,cAAc,CAAC,MAAM,CAAC,QAAQ,EAAE,SAAS,CAAC,CAAC,CAAC;aACvE,GAAG,CAAC,SAAS,CAAC,EAAE,CAAC,SAAS,CAAC,IAAI,CAAC,CAAC;QAEtC,IAAI,CAAC,gBAAgB,CAAC,oBAAoB,CAAC,oBAAoB,CAAC,CAAC;QACjE,oBAAoB,CAAC,OAAO,CAAC,EAAE,CAAC,EAAE,CAAC,IAAI,CAAC,gBAAgB,CAAC,MAAM,CAAC,EAAE,CAAC,CAAC,CAAC;QAErE,IAAI,CAAC,gBAAgB,CAAC,iBAAiB,CAAC,IAAI,CAAC,6BAA6B,CAAC,eAAe,CAAC,CAAC,CAAC;QAC7F,eAAe,CAAC,OAAO,CAAC,KAAK,CAAC,EAAE,CAAC,IAAI,CAAC,gBAAgB,CAAC,GAAG,CAAC,KAAK,CAAC,IAAI,EAAE,KAAK,CAAC,CAAC,CAAC;IACnF,CAAC;IAES,2BAA2B,CAAC,WAA6B;QAC/D,MAAM,UAAU,GAAG,KAAK,CAAC,IAAI,CAAC,IAAI,CAAC,gBAAgB,CAAC,MAAM,EAAE,CAAC,CAAC;QAC9D,MAAM,0BAA0B,GAAG,IAAI,CAAC,6BAA6B,CAAC,UAAU,CAAC,CAAC;QAClF,0BAA0B,CAAC,OAAO,CAAC,yBAAyB,CAAC,EAAE;YAC3D,IAAI,CAAC,gBAAgB,CAAC,qBAAqB,CAAC,yBAAyB,CAAC,IAAI,EAAE,yBAAyB,CAAC,sBAAsB,CAAC,CAAC;QAClI,CAAC,CAAC,CAAC;IACP,CAAC;CACJ,CAAA;AA1EY,4FAAwC;AAGvC;IADT,IAAA,kBAAM,EAAC,2BAAiB,CAAC;;mFACqB;AAGrC;IADT,IAAA,kBAAM,EAAC,oCAAgB,CAAC;;kFACoB;mDANpC,wCAAwC;IADpD,IAAA,sBAAU,GAAE;GACA,wCAAwC,CA0EpD;AAQD,IAAU,cAAc,CAmBvB;AAnBD,WAAU,cAAc;IACpB,SAAgB,MAAM,CAAC,CAAiB,EAAE,CAAiB;QACvD,OAAO,CACH,CAAC,CAAC,IAAI,KAAK,CAAC,CAAC,IAAI;YACjB,CAAC,CAAC,GAAG,KAAK,CAAC,CAAC,GAAG;YACf,CAAC,CAAC,IAAI,KAAK,CAAC,CAAC,IAAI,CACpB,CAAC;IACN,CAAC;IANe,qBAAM,SAMrB,CAAA;IAED,SAAgB,EAAE,CAAC,KAAc;QAC7B,OAAO,CACH,OAAO,KAAK,KAAK,QAAQ;YACzB,2CAA2C;YAC3C,KAAK,KAAK,IAAI;YACd,MAAM,IAAI,KAAK,IAAI,OAAQ,KAAwB,CAAC,IAAI,KAAK,QAAQ;YACrE,KAAK,IAAI,KAAK,IAAI,OAAQ,KAAwB,CAAC,GAAG,KAAK,QAAQ;YACnE,MAAM,IAAI,KAAK,IAAI,OAAQ,KAAwB,CAAC,IAAI,KAAK,QAAQ,CACxE,CAAC;IACN,CAAC;IATe,iBAAE,KASjB,CAAA;AACL,CAAC,EAnBS,cAAc,KAAd,cAAc,QAmBvB"}
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"file":"llamafile-frontend-module.d.ts","sourceRoot":"","sources":["../../src/browser/llamafile-frontend-module.ts"],"names":[],"mappings":"AAiBA,OAAO,EAAE,eAAe,EAAE,MAAM,8BAA8B,CAAC;;AAO/D,wBAoBG"}
|
|
@@ -0,0 +1,46 @@
|
|
|
1
|
+
"use strict";
|
|
2
|
+
Object.defineProperty(exports, "__esModule", { value: true });
|
|
3
|
+
// *****************************************************************************
|
|
4
|
+
// Copyright (C) 2024 EclipseSource GmbH.
|
|
5
|
+
//
|
|
6
|
+
// This program and the accompanying materials are made available under the
|
|
7
|
+
// terms of the Eclipse Public License v. 2.0 which is available at
|
|
8
|
+
// http://www.eclipse.org/legal/epl-2.0.
|
|
9
|
+
//
|
|
10
|
+
// This Source Code may also be made available under the following Secondary
|
|
11
|
+
// Licenses when the conditions for such availability set forth in the Eclipse
|
|
12
|
+
// Public License v. 2.0 are satisfied: GNU General Public License, version 2
|
|
13
|
+
// with the GNU Classpath Exception which is available at
|
|
14
|
+
// https://www.gnu.org/software/classpath/license.html.
|
|
15
|
+
//
|
|
16
|
+
// SPDX-License-Identifier: EPL-2.0 OR GPL-2.0-only WITH Classpath-exception-2.0
|
|
17
|
+
// *****************************************************************************
|
|
18
|
+
const core_1 = require("@theia/core");
|
|
19
|
+
const browser_1 = require("@theia/core/lib/browser");
|
|
20
|
+
const inversify_1 = require("@theia/core/shared/inversify");
|
|
21
|
+
const output_channel_1 = require("@theia/output/lib/browser/output-channel");
|
|
22
|
+
const llamafile_manager_1 = require("../common/llamafile-manager");
|
|
23
|
+
const llamafile_command_contribution_1 = require("./llamafile-command-contribution");
|
|
24
|
+
const llamafile_frontend_application_contribution_1 = require("./llamafile-frontend-application-contribution");
|
|
25
|
+
const llamafile_preferences_1 = require("./llamafile-preferences");
|
|
26
|
+
exports.default = new inversify_1.ContainerModule(bind => {
|
|
27
|
+
bind(browser_1.FrontendApplicationContribution).to(llamafile_frontend_application_contribution_1.LlamafileFrontendApplicationContribution).inSingletonScope();
|
|
28
|
+
bind(core_1.CommandContribution).to(llamafile_command_contribution_1.LlamafileCommandContribution).inSingletonScope();
|
|
29
|
+
bind(llamafile_manager_1.LlamafileManager).toDynamicValue(ctx => {
|
|
30
|
+
const connection = ctx.container.get(browser_1.RemoteConnectionProvider);
|
|
31
|
+
const outputChannelManager = ctx.container.get(output_channel_1.OutputChannelManager);
|
|
32
|
+
const client = {
|
|
33
|
+
error: (llamafileName, message) => {
|
|
34
|
+
const channel = outputChannelManager.getChannel(`${llamafileName}-llamafile`);
|
|
35
|
+
channel.appendLine(message, output_channel_1.OutputChannelSeverity.Error);
|
|
36
|
+
},
|
|
37
|
+
log: (llamafileName, message) => {
|
|
38
|
+
const channel = outputChannelManager.getChannel(`${llamafileName}-llamafile`);
|
|
39
|
+
channel.appendLine(message, output_channel_1.OutputChannelSeverity.Info);
|
|
40
|
+
}
|
|
41
|
+
};
|
|
42
|
+
return connection.createProxy(llamafile_manager_1.LlamafileManagerPath, client);
|
|
43
|
+
}).inSingletonScope();
|
|
44
|
+
(0, llamafile_preferences_1.bindAILlamafilePreferences)(bind);
|
|
45
|
+
});
|
|
46
|
+
//# sourceMappingURL=llamafile-frontend-module.js.map
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"file":"llamafile-frontend-module.js","sourceRoot":"","sources":["../../src/browser/llamafile-frontend-module.ts"],"names":[],"mappings":";;AAAA,gFAAgF;AAChF,yCAAyC;AACzC,EAAE;AACF,2EAA2E;AAC3E,mEAAmE;AACnE,wCAAwC;AACxC,EAAE;AACF,4EAA4E;AAC5E,8EAA8E;AAC9E,6EAA6E;AAC7E,yDAAyD;AACzD,uDAAuD;AACvD,EAAE;AACF,gFAAgF;AAChF,gFAAgF;AAChF,sCAAkD;AAClD,qDAA+H;AAC/H,4DAA+D;AAC/D,6EAAuG;AACvG,mEAAmH;AACnH,qFAAgF;AAChF,+GAAyG;AACzG,mEAAqE;AAErE,kBAAe,IAAI,2BAAe,CAAC,IAAI,CAAC,EAAE;IACtC,IAAI,CAAC,yCAA+B,CAAC,CAAC,EAAE,CAAC,sFAAwC,CAAC,CAAC,gBAAgB,EAAE,CAAC;IACtG,IAAI,CAAC,0BAAmB,CAAC,CAAC,EAAE,CAAC,6DAA4B,CAAC,CAAC,gBAAgB,EAAE,CAAC;IAC9E,IAAI,CAAC,oCAAgB,CAAC,CAAC,cAAc,CAAC,GAAG,CAAC,EAAE;QACxC,MAAM,UAAU,GAAG,GAAG,CAAC,SAAS,CAAC,GAAG,CAA4B,kCAAwB,CAAC,CAAC;QAC1F,MAAM,oBAAoB,GAAG,GAAG,CAAC,SAAS,CAAC,GAAG,CAAC,qCAAoB,CAAC,CAAC;QACrE,MAAM,MAAM,GAAiC;YACzC,KAAK,EAAE,CAAC,aAAa,EAAE,OAAO,EAAE,EAAE;gBAC9B,MAAM,OAAO,GAAG,oBAAoB,CAAC,UAAU,CAAC,GAAG,aAAa,YAAY,CAAC,CAAC;gBAC9E,OAAO,CAAC,UAAU,CAAC,OAAO,EAAE,sCAAqB,CAAC,KAAK,CAAC,CAAC;YAC7D,CAAC;YACD,GAAG,EAAE,CAAC,aAAa,EAAE,OAAO,EAAE,EAAE;gBAC5B,MAAM,OAAO,GAAG,oBAAoB,CAAC,UAAU,CAAC,GAAG,aAAa,YAAY,CAAC,CAAC;gBAC9E,OAAO,CAAC,UAAU,CAAC,OAAO,EAAE,sCAAqB,CAAC,IAAI,CAAC,CAAC;YAC5D,CAAC;SACJ,CAAC;QACF,OAAO,UAAU,CAAC,WAAW,CAAmB,wCAAoB,EAAE,MAAM,CAAC,CAAC;IAClF,CAAC,CAAC,CAAC,gBAAgB,EAAE,CAAC;IAEtB,IAAA,kDAA0B,EAAC,IAAI,CAAC,CAAC;AACrC,CAAC,CAAC,CAAC"}
|
|
@@ -0,0 +1,7 @@
|
|
|
1
|
+
import { PreferenceSchema } from '@theia/core/lib/browser';
|
|
2
|
+
import { interfaces } from '@theia/core/shared/inversify';
|
|
3
|
+
export declare const AI_LLAMAFILE_PREFERENCES_TITLE = "\u2728 AI LlamaFile";
|
|
4
|
+
export declare const PREFERENCE_LLAMAFILE = "ai-features.llamafile.llamafiles";
|
|
5
|
+
export declare const aiLlamafilePreferencesSchema: PreferenceSchema;
|
|
6
|
+
export declare function bindAILlamafilePreferences(bind: interfaces.Bind): void;
|
|
7
|
+
//# sourceMappingURL=llamafile-preferences.d.ts.map
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"file":"llamafile-preferences.d.ts","sourceRoot":"","sources":["../../src/browser/llamafile-preferences.ts"],"names":[],"mappings":"AAgBA,OAAO,EAA0B,gBAAgB,EAAE,MAAM,yBAAyB,CAAC;AACnF,OAAO,EAAE,UAAU,EAAE,MAAM,8BAA8B,CAAC;AAE1D,eAAO,MAAM,8BAA8B,wBAAmB,CAAC;AAC/D,eAAO,MAAM,oBAAoB,qCAAqC,CAAC;AAEvE,eAAO,MAAM,4BAA4B,EAAE,gBAmC1C,CAAC;AAEF,wBAAgB,0BAA0B,CAAC,IAAI,EAAE,UAAU,CAAC,IAAI,GAAG,IAAI,CAEtE"}
|
|
@@ -0,0 +1,62 @@
|
|
|
1
|
+
"use strict";
|
|
2
|
+
// *****************************************************************************
|
|
3
|
+
// Copyright (C) 2024 EclipseSource GmbH.
|
|
4
|
+
//
|
|
5
|
+
// This program and the accompanying materials are made available under the
|
|
6
|
+
// terms of the Eclipse Public License v. 2.0 which is available at
|
|
7
|
+
// http://www.eclipse.org/legal/epl-2.0.
|
|
8
|
+
//
|
|
9
|
+
// This Source Code may also be made available under the following Secondary
|
|
10
|
+
// Licenses when the conditions for such availability set forth in the Eclipse
|
|
11
|
+
// Public License v. 2.0 are satisfied: GNU General Public License, version 2
|
|
12
|
+
// with the GNU Classpath Exception which is available at
|
|
13
|
+
// https://www.gnu.org/software/classpath/license.html.
|
|
14
|
+
//
|
|
15
|
+
// SPDX-License-Identifier: EPL-2.0 OR GPL-2.0-only WITH Classpath-exception-2.0
|
|
16
|
+
// *****************************************************************************
|
|
17
|
+
Object.defineProperty(exports, "__esModule", { value: true });
|
|
18
|
+
exports.bindAILlamafilePreferences = exports.aiLlamafilePreferencesSchema = exports.PREFERENCE_LLAMAFILE = exports.AI_LLAMAFILE_PREFERENCES_TITLE = void 0;
|
|
19
|
+
const browser_1 = require("@theia/core/lib/browser");
|
|
20
|
+
exports.AI_LLAMAFILE_PREFERENCES_TITLE = '✨ AI LlamaFile';
|
|
21
|
+
exports.PREFERENCE_LLAMAFILE = 'ai-features.llamafile.llamafiles';
|
|
22
|
+
exports.aiLlamafilePreferencesSchema = {
|
|
23
|
+
type: 'object',
|
|
24
|
+
properties: {
|
|
25
|
+
[exports.PREFERENCE_LLAMAFILE]: {
|
|
26
|
+
title: exports.AI_LLAMAFILE_PREFERENCES_TITLE,
|
|
27
|
+
markdownDescription: 'This setting allows you to configure and manage LlamaFile models in Theia IDE.\
|
|
28
|
+
\n\
|
|
29
|
+
Each entry requires a user-friendly `name`, the file `uri` pointing to your LlamaFile, and the `port` on which it will run.\
|
|
30
|
+
\n\
|
|
31
|
+
To start a LlamaFile, use the "Start LlamaFile" command, which enables you to select the desired model.\
|
|
32
|
+
\n\
|
|
33
|
+
If you edit an entry (e.g., change the port), any running instance will stop, and you will need to manually start it again.\
|
|
34
|
+
\n\
|
|
35
|
+
[Learn more about configuring and managing LlamaFiles in the Theia IDE documentation](https://theia-ide.org/docs/user_ai/#llamafile-models).',
|
|
36
|
+
type: 'array',
|
|
37
|
+
default: [],
|
|
38
|
+
items: {
|
|
39
|
+
type: 'object',
|
|
40
|
+
properties: {
|
|
41
|
+
name: {
|
|
42
|
+
type: 'string',
|
|
43
|
+
description: 'The model name to use for this Llamafile.'
|
|
44
|
+
},
|
|
45
|
+
uri: {
|
|
46
|
+
type: 'string',
|
|
47
|
+
description: 'The file uri to the Llamafile.'
|
|
48
|
+
},
|
|
49
|
+
port: {
|
|
50
|
+
type: 'number',
|
|
51
|
+
description: 'The port to use to start the server.'
|
|
52
|
+
}
|
|
53
|
+
}
|
|
54
|
+
}
|
|
55
|
+
}
|
|
56
|
+
}
|
|
57
|
+
};
|
|
58
|
+
function bindAILlamafilePreferences(bind) {
|
|
59
|
+
bind(browser_1.PreferenceContribution).toConstantValue({ schema: exports.aiLlamafilePreferencesSchema });
|
|
60
|
+
}
|
|
61
|
+
exports.bindAILlamafilePreferences = bindAILlamafilePreferences;
|
|
62
|
+
//# sourceMappingURL=llamafile-preferences.js.map
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"file":"llamafile-preferences.js","sourceRoot":"","sources":["../../src/browser/llamafile-preferences.ts"],"names":[],"mappings":";AAAA,gFAAgF;AAChF,yCAAyC;AACzC,EAAE;AACF,2EAA2E;AAC3E,mEAAmE;AACnE,wCAAwC;AACxC,EAAE;AACF,4EAA4E;AAC5E,8EAA8E;AAC9E,6EAA6E;AAC7E,yDAAyD;AACzD,uDAAuD;AACvD,EAAE;AACF,gFAAgF;AAChF,gFAAgF;;;AAEhF,qDAAmF;AAGtE,QAAA,8BAA8B,GAAG,gBAAgB,CAAC;AAClD,QAAA,oBAAoB,GAAG,kCAAkC,CAAC;AAE1D,QAAA,4BAA4B,GAAqB;IAC1D,IAAI,EAAE,QAAQ;IACd,UAAU,EAAE;QACR,CAAC,4BAAoB,CAAC,EAAE;YACpB,KAAK,EAAE,sCAA8B;YACrC,mBAAmB,EAAE;;;;;;;;yJAQwH;YAC7I,IAAI,EAAE,OAAO;YACb,OAAO,EAAE,EAAE;YACX,KAAK,EAAE;gBACH,IAAI,EAAE,QAAQ;gBACd,UAAU,EAAE;oBACR,IAAI,EAAE;wBACF,IAAI,EAAE,QAAQ;wBACd,WAAW,EAAE,2CAA2C;qBAC3D;oBACD,GAAG,EAAE;wBACD,IAAI,EAAE,QAAQ;wBACd,WAAW,EAAE,gCAAgC;qBAChD;oBACD,IAAI,EAAE;wBACF,IAAI,EAAE,QAAQ;wBACd,WAAW,EAAE,sCAAsC;qBACtD;iBACJ;aACJ;SACJ;KACJ;CACJ,CAAC;AAEF,SAAgB,0BAA0B,CAAC,IAAqB;IAC5D,IAAI,CAAC,gCAAsB,CAAC,CAAC,eAAe,CAAC,EAAE,MAAM,EAAE,oCAA4B,EAAE,CAAC,CAAC;AAC3F,CAAC;AAFD,gEAEC"}
|
|
@@ -0,0 +1,25 @@
|
|
|
1
|
+
import { LanguageModel, LanguageModelRequest, LanguageModelResponse } from '@theia/ai-core';
|
|
2
|
+
import { CancellationToken } from '@theia/core';
|
|
3
|
+
export declare class LlamafileLanguageModel implements LanguageModel {
|
|
4
|
+
readonly name: string;
|
|
5
|
+
readonly uri: string;
|
|
6
|
+
readonly port: number;
|
|
7
|
+
defaultRequestSettings?: {
|
|
8
|
+
[key: string]: unknown;
|
|
9
|
+
} | undefined;
|
|
10
|
+
readonly providerId = "llamafile";
|
|
11
|
+
readonly vendor: string;
|
|
12
|
+
/**
|
|
13
|
+
* @param name the unique name for this language model. It will be used to identify the model in the UI.
|
|
14
|
+
* @param uri the URI pointing to the Llamafile model location.
|
|
15
|
+
* @param port the port on which the Llamafile model server operates.
|
|
16
|
+
* @param defaultRequestSettings optional default settings for requests made using this model.
|
|
17
|
+
*/
|
|
18
|
+
constructor(name: string, uri: string, port: number, defaultRequestSettings?: {
|
|
19
|
+
[key: string]: unknown;
|
|
20
|
+
} | undefined);
|
|
21
|
+
get id(): string;
|
|
22
|
+
protected getSettings(request: LanguageModelRequest): Record<string, unknown>;
|
|
23
|
+
request(request: LanguageModelRequest, cancellationToken?: CancellationToken): Promise<LanguageModelResponse>;
|
|
24
|
+
}
|
|
25
|
+
//# sourceMappingURL=llamafile-language-model.d.ts.map
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"file":"llamafile-language-model.d.ts","sourceRoot":"","sources":["../../src/common/llamafile-language-model.ts"],"names":[],"mappings":"AAgBA,OAAO,EAAE,aAAa,EAAE,oBAAoB,EAAE,qBAAqB,EAAmC,MAAM,gBAAgB,CAAC;AAC7H,OAAO,EAAE,iBAAiB,EAAE,MAAM,aAAa,CAAC;AAChD,qBAAa,sBAAuB,YAAW,aAAa;aAYpC,IAAI,EAAE,MAAM;aACZ,GAAG,EAAE,MAAM;aACX,IAAI,EAAE,MAAM;IACrB,sBAAsB,CAAC;;;IAblC,QAAQ,CAAC,UAAU,eAAe;IAClC,QAAQ,CAAC,MAAM,EAAE,MAAM,CAAa;IAEpC;;;;;OAKG;gBAEiB,IAAI,EAAE,MAAM,EACZ,GAAG,EAAE,MAAM,EACX,IAAI,EAAE,MAAM,EACrB,sBAAsB,CAAC;;iBAA4B;IAG9D,IAAI,EAAE,IAAI,MAAM,CAEf;IACD,SAAS,CAAC,WAAW,CAAC,OAAO,EAAE,oBAAoB,GAAG,MAAM,CAAC,MAAM,EAAE,OAAO,CAAC;IAavE,OAAO,CAAC,OAAO,EAAE,oBAAoB,EAAE,iBAAiB,CAAC,EAAE,iBAAiB,GAAG,OAAO,CAAC,qBAAqB,CAAC;CAyEtH"}
|
|
@@ -0,0 +1,122 @@
|
|
|
1
|
+
"use strict";
|
|
2
|
+
// *****************************************************************************
|
|
3
|
+
// Copyright (C) 2024 EclipseSource GmbH.
|
|
4
|
+
//
|
|
5
|
+
// This program and the accompanying materials are made available under the
|
|
6
|
+
// terms of the Eclipse Public License v. 2.0 which is available at
|
|
7
|
+
// http://www.eclipse.org/legal/epl-2.0.
|
|
8
|
+
//
|
|
9
|
+
// This Source Code may also be made available under the following Secondary
|
|
10
|
+
// Licenses when the conditions for such availability set forth in the Eclipse
|
|
11
|
+
// Public License v. 2.0 are satisfied: GNU General Public License, version 2
|
|
12
|
+
// with the GNU Classpath Exception which is available at
|
|
13
|
+
// https://www.gnu.org/software/classpath/license.html.
|
|
14
|
+
//
|
|
15
|
+
// SPDX-License-Identifier: EPL-2.0 OR GPL-2.0-only WITH Classpath-exception-2.0
|
|
16
|
+
// *****************************************************************************
|
|
17
|
+
Object.defineProperty(exports, "__esModule", { value: true });
|
|
18
|
+
exports.LlamafileLanguageModel = void 0;
|
|
19
|
+
class LlamafileLanguageModel {
|
|
20
|
+
/**
|
|
21
|
+
* @param name the unique name for this language model. It will be used to identify the model in the UI.
|
|
22
|
+
* @param uri the URI pointing to the Llamafile model location.
|
|
23
|
+
* @param port the port on which the Llamafile model server operates.
|
|
24
|
+
* @param defaultRequestSettings optional default settings for requests made using this model.
|
|
25
|
+
*/
|
|
26
|
+
constructor(name, uri, port, defaultRequestSettings) {
|
|
27
|
+
this.name = name;
|
|
28
|
+
this.uri = uri;
|
|
29
|
+
this.port = port;
|
|
30
|
+
this.defaultRequestSettings = defaultRequestSettings;
|
|
31
|
+
this.providerId = 'llamafile';
|
|
32
|
+
this.vendor = 'Mozilla';
|
|
33
|
+
}
|
|
34
|
+
get id() {
|
|
35
|
+
return this.name;
|
|
36
|
+
}
|
|
37
|
+
getSettings(request) {
|
|
38
|
+
const settings = request.settings ? request.settings : this.defaultRequestSettings;
|
|
39
|
+
if (!settings) {
|
|
40
|
+
return {
|
|
41
|
+
n_predict: 200,
|
|
42
|
+
stream: true,
|
|
43
|
+
stop: ['</s>', 'Llama:', 'User:', '<|eot_id|>'],
|
|
44
|
+
cache_prompt: true,
|
|
45
|
+
};
|
|
46
|
+
}
|
|
47
|
+
return settings;
|
|
48
|
+
}
|
|
49
|
+
async request(request, cancellationToken) {
|
|
50
|
+
const settings = this.getSettings(request);
|
|
51
|
+
try {
|
|
52
|
+
let prompt = request.messages.map(message => {
|
|
53
|
+
switch (message.actor) {
|
|
54
|
+
case 'user':
|
|
55
|
+
return `User: ${message.query}`;
|
|
56
|
+
case 'ai':
|
|
57
|
+
return `Llama: ${message.query}`;
|
|
58
|
+
case 'system':
|
|
59
|
+
return `${message.query.replace(/\n\n/g, '\n')}`;
|
|
60
|
+
}
|
|
61
|
+
}).join('\n');
|
|
62
|
+
prompt += '\nLlama:';
|
|
63
|
+
const response = await fetch(`http://localhost:${this.port}/completion`, {
|
|
64
|
+
method: 'POST',
|
|
65
|
+
headers: {
|
|
66
|
+
'Content-Type': 'application/json',
|
|
67
|
+
},
|
|
68
|
+
body: JSON.stringify({
|
|
69
|
+
prompt: prompt,
|
|
70
|
+
...settings
|
|
71
|
+
}),
|
|
72
|
+
});
|
|
73
|
+
if (!response.ok) {
|
|
74
|
+
throw new Error(`HTTP error! status: ${response.status}`);
|
|
75
|
+
}
|
|
76
|
+
if (!response.body) {
|
|
77
|
+
throw new Error('Response body is undefined');
|
|
78
|
+
}
|
|
79
|
+
const reader = response.body.getReader();
|
|
80
|
+
const decoder = new TextDecoder();
|
|
81
|
+
return {
|
|
82
|
+
stream: {
|
|
83
|
+
[Symbol.asyncIterator]() {
|
|
84
|
+
return {
|
|
85
|
+
async next() {
|
|
86
|
+
if (cancellationToken === null || cancellationToken === void 0 ? void 0 : cancellationToken.isCancellationRequested) {
|
|
87
|
+
reader.cancel();
|
|
88
|
+
return { value: undefined, done: true };
|
|
89
|
+
}
|
|
90
|
+
const { value, done } = await reader.read();
|
|
91
|
+
if (done) {
|
|
92
|
+
return { value: undefined, done: true };
|
|
93
|
+
}
|
|
94
|
+
const read = decoder.decode(value, { stream: true });
|
|
95
|
+
const chunk = read.split('\n').filter(l => l.length !== 0).reduce((acc, line) => {
|
|
96
|
+
try {
|
|
97
|
+
const parsed = JSON.parse(line.substring(6));
|
|
98
|
+
acc += parsed.content;
|
|
99
|
+
return acc;
|
|
100
|
+
}
|
|
101
|
+
catch (error) {
|
|
102
|
+
console.error('Error parsing JSON:', error);
|
|
103
|
+
return acc;
|
|
104
|
+
}
|
|
105
|
+
}, '');
|
|
106
|
+
return { value: { content: chunk }, done: false };
|
|
107
|
+
}
|
|
108
|
+
};
|
|
109
|
+
}
|
|
110
|
+
}
|
|
111
|
+
};
|
|
112
|
+
}
|
|
113
|
+
catch (error) {
|
|
114
|
+
console.error('Error:', error);
|
|
115
|
+
return {
|
|
116
|
+
text: `Error: ${error}`
|
|
117
|
+
};
|
|
118
|
+
}
|
|
119
|
+
}
|
|
120
|
+
}
|
|
121
|
+
exports.LlamafileLanguageModel = LlamafileLanguageModel;
|
|
122
|
+
//# sourceMappingURL=llamafile-language-model.js.map
|