@theia/ai-llamafile 1.56.0 → 1.57.0-next.112
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/README.md
CHANGED
|
@@ -4,21 +4,21 @@ The AI Llamafile package provides an integration that allows users to manage and
|
|
|
4
4
|
|
|
5
5
|
## Features
|
|
6
6
|
|
|
7
|
-
-
|
|
7
|
+
- Start and stop Llamafile language servers.
|
|
8
8
|
|
|
9
9
|
## Commands
|
|
10
10
|
|
|
11
11
|
### Start Llamafile
|
|
12
12
|
|
|
13
|
-
-
|
|
14
|
-
-
|
|
15
|
-
-
|
|
13
|
+
- **Command ID:** `llamafile.start`
|
|
14
|
+
- **Label:** `Start Llamafile`
|
|
15
|
+
- **Functionality:** Allows you to start a Llamafile language server by selecting from a list of configured Llamafiles.
|
|
16
16
|
|
|
17
17
|
### Stop Llamafile
|
|
18
18
|
|
|
19
|
-
-
|
|
20
|
-
-
|
|
21
|
-
-
|
|
19
|
+
- **Command ID:** `llamafile.stop`
|
|
20
|
+
- **Label:** `Stop Llamafile`
|
|
21
|
+
- **Functionality:** Allows you to stop a running Llamafile language server by selecting from a list of currently running Llamafiles.
|
|
22
22
|
|
|
23
23
|
## Usage
|
|
24
24
|
|
|
@@ -1 +1 @@
|
|
|
1
|
-
{"version":3,"file":"llamafile-backend-module.d.ts","sourceRoot":"","sources":["../../src/node/llamafile-backend-module.ts"],"names":[],"mappings":"AAgBA,OAAO,EAAE,eAAe,EAAE,MAAM,8BAA8B,CAAC;;
|
|
1
|
+
{"version":3,"file":"llamafile-backend-module.d.ts","sourceRoot":"","sources":["../../src/node/llamafile-backend-module.ts"],"names":[],"mappings":"AAgBA,OAAO,EAAE,eAAe,EAAE,MAAM,8BAA8B,CAAC;;AAmB/D,wBAEG"}
|
|
@@ -19,7 +19,9 @@ const inversify_1 = require("@theia/core/shared/inversify");
|
|
|
19
19
|
const llamafile_manager_impl_1 = require("./llamafile-manager-impl");
|
|
20
20
|
const llamafile_manager_1 = require("../common/llamafile-manager");
|
|
21
21
|
const core_1 = require("@theia/core");
|
|
22
|
-
|
|
22
|
+
const connection_container_module_1 = require("@theia/core/lib/node/messaging/connection-container-module");
|
|
23
|
+
// We use a connection module to handle AI services separately for each frontend.
|
|
24
|
+
const llamafileConnectionModule = connection_container_module_1.ConnectionContainerModule.create(({ bind, bindBackendService, bindFrontendService }) => {
|
|
23
25
|
bind(llamafile_manager_1.LlamafileManager).to(llamafile_manager_impl_1.LlamafileManagerImpl).inSingletonScope();
|
|
24
26
|
bind(core_1.ConnectionHandler).toDynamicValue(ctx => new core_1.RpcConnectionHandler(llamafile_manager_1.LlamafileManagerPath, client => {
|
|
25
27
|
const service = ctx.container.get(llamafile_manager_1.LlamafileManager);
|
|
@@ -27,4 +29,7 @@ exports.default = new inversify_1.ContainerModule(bind => {
|
|
|
27
29
|
return service;
|
|
28
30
|
})).inSingletonScope();
|
|
29
31
|
});
|
|
32
|
+
exports.default = new inversify_1.ContainerModule(bind => {
|
|
33
|
+
bind(connection_container_module_1.ConnectionContainerModule).toConstantValue(llamafileConnectionModule);
|
|
34
|
+
});
|
|
30
35
|
//# sourceMappingURL=llamafile-backend-module.js.map
|
|
@@ -1 +1 @@
|
|
|
1
|
-
{"version":3,"file":"llamafile-backend-module.js","sourceRoot":"","sources":["../../src/node/llamafile-backend-module.ts"],"names":[],"mappings":";AAAA,gFAAgF;AAChF,yCAAyC;AACzC,EAAE;AACF,2EAA2E;AAC3E,mEAAmE;AACnE,wCAAwC;AACxC,EAAE;AACF,4EAA4E;AAC5E,8EAA8E;AAC9E,6EAA6E;AAC7E,yDAAyD;AACzD,uDAAuD;AACvD,EAAE;AACF,gFAAgF;AAChF,gFAAgF;;AAEhF,4DAA+D;AAC/D,qEAAgE;AAChE,mEAAmH;AACnH,sCAAsE;
|
|
1
|
+
{"version":3,"file":"llamafile-backend-module.js","sourceRoot":"","sources":["../../src/node/llamafile-backend-module.ts"],"names":[],"mappings":";AAAA,gFAAgF;AAChF,yCAAyC;AACzC,EAAE;AACF,2EAA2E;AAC3E,mEAAmE;AACnE,wCAAwC;AACxC,EAAE;AACF,4EAA4E;AAC5E,8EAA8E;AAC9E,6EAA6E;AAC7E,yDAAyD;AACzD,uDAAuD;AACvD,EAAE;AACF,gFAAgF;AAChF,gFAAgF;;AAEhF,4DAA+D;AAC/D,qEAAgE;AAChE,mEAAmH;AACnH,sCAAsE;AACtE,4GAAuG;AAEvG,iFAAiF;AACjF,MAAM,yBAAyB,GAAG,uDAAyB,CAAC,MAAM,CAAC,CAAC,EAAE,IAAI,EAAE,kBAAkB,EAAE,mBAAmB,EAAE,EAAE,EAAE;IACrH,IAAI,CAAC,oCAAgB,CAAC,CAAC,EAAE,CAAC,6CAAoB,CAAC,CAAC,gBAAgB,EAAE,CAAC;IACnE,IAAI,CAAC,wBAAiB,CAAC,CAAC,cAAc,CAAC,GAAG,CAAC,EAAE,CAAC,IAAI,2BAAoB,CAClE,wCAAoB,EACpB,MAAM,CAAC,EAAE;QACL,MAAM,OAAO,GAAG,GAAG,CAAC,SAAS,CAAC,GAAG,CAAmB,oCAAgB,CAAC,CAAC;QACtE,OAAO,CAAC,SAAS,CAAC,MAAM,CAAC,CAAC;QAC1B,OAAO,OAAO,CAAC;IACnB,CAAC,CACJ,CAAC,CAAC,gBAAgB,EAAE,CAAC;AAC1B,CAAC,CAAC,CAAC;AAEH,kBAAe,IAAI,2BAAe,CAAC,IAAI,CAAC,EAAE;IACtC,IAAI,CAAC,uDAAyB,CAAC,CAAC,eAAe,CAAC,yBAAyB,CAAC,CAAC;AAC/E,CAAC,CAAC,CAAC"}
|
package/package.json
CHANGED
|
@@ -1,11 +1,11 @@
|
|
|
1
1
|
{
|
|
2
2
|
"name": "@theia/ai-llamafile",
|
|
3
|
-
"version": "1.
|
|
3
|
+
"version": "1.57.0-next.112+f4778c273",
|
|
4
4
|
"description": "Theia - Llamafile Integration",
|
|
5
5
|
"dependencies": {
|
|
6
|
-
"@theia/ai-core": "1.
|
|
7
|
-
"@theia/core": "1.
|
|
8
|
-
"@theia/output": "1.
|
|
6
|
+
"@theia/ai-core": "1.57.0-next.112+f4778c273",
|
|
7
|
+
"@theia/core": "1.57.0-next.112+f4778c273",
|
|
8
|
+
"@theia/output": "1.57.0-next.112+f4778c273",
|
|
9
9
|
"tslib": "^2.6.2"
|
|
10
10
|
},
|
|
11
11
|
"publishConfig": {
|
|
@@ -42,10 +42,10 @@
|
|
|
42
42
|
"watch": "theiaext watch"
|
|
43
43
|
},
|
|
44
44
|
"devDependencies": {
|
|
45
|
-
"@theia/ext-scripts": "1.
|
|
45
|
+
"@theia/ext-scripts": "1.58.0"
|
|
46
46
|
},
|
|
47
47
|
"nyc": {
|
|
48
48
|
"extends": "../../configs/nyc.json"
|
|
49
49
|
},
|
|
50
|
-
"gitHead": "
|
|
50
|
+
"gitHead": "f4778c2737bb75613f0e1f99da8996bad91f6e17"
|
|
51
51
|
}
|
|
@@ -18,8 +18,10 @@ import { ContainerModule } from '@theia/core/shared/inversify';
|
|
|
18
18
|
import { LlamafileManagerImpl } from './llamafile-manager-impl';
|
|
19
19
|
import { LlamafileManager, LlamafileServerManagerClient, LlamafileManagerPath } from '../common/llamafile-manager';
|
|
20
20
|
import { ConnectionHandler, RpcConnectionHandler } from '@theia/core';
|
|
21
|
+
import { ConnectionContainerModule } from '@theia/core/lib/node/messaging/connection-container-module';
|
|
21
22
|
|
|
22
|
-
|
|
23
|
+
// We use a connection module to handle AI services separately for each frontend.
|
|
24
|
+
const llamafileConnectionModule = ConnectionContainerModule.create(({ bind, bindBackendService, bindFrontendService }) => {
|
|
23
25
|
bind(LlamafileManager).to(LlamafileManagerImpl).inSingletonScope();
|
|
24
26
|
bind(ConnectionHandler).toDynamicValue(ctx => new RpcConnectionHandler<LlamafileServerManagerClient>(
|
|
25
27
|
LlamafileManagerPath,
|
|
@@ -30,3 +32,7 @@ export default new ContainerModule(bind => {
|
|
|
30
32
|
}
|
|
31
33
|
)).inSingletonScope();
|
|
32
34
|
});
|
|
35
|
+
|
|
36
|
+
export default new ContainerModule(bind => {
|
|
37
|
+
bind(ConnectionContainerModule).toConstantValue(llamafileConnectionModule);
|
|
38
|
+
});
|