@aigne/lmstudio 1.2.0 → 1.74.0-beta

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/README.md CHANGED
@@ -230,6 +230,6 @@ Contributions are welcome! Please read our [contributing guidelines](../../CONTR
230
230
 
231
231
  ## Support
232
232
 
233
- - [GitHub Issues](https://github.com/AIGNE-io/aigne-framework/issues)
233
+ - [GitHub Issues](https://github.com/ArcBlock/aigne-framework/issues)
234
234
  - [Documentation](https://www.arcblock.io/docs/aigne-framework)
235
235
  - [LM Studio Documentation](https://lmstudio.ai/docs)
package/dist/index.cjs ADDED
@@ -0,0 +1,3 @@
1
+ const require_lmstudio_chat_model = require('./lmstudio-chat-model.cjs');
2
+
3
+ exports.LMStudioChatModel = require_lmstudio_chat_model.LMStudioChatModel;
@@ -0,0 +1,2 @@
1
+ import { LMStudioChatModel } from "./lmstudio-chat-model.cjs";
2
+ export { LMStudioChatModel };
@@ -0,0 +1,2 @@
1
+ import { LMStudioChatModel } from "./lmstudio-chat-model.mjs";
2
+ export { LMStudioChatModel };
package/dist/index.mjs ADDED
@@ -0,0 +1,3 @@
1
+ import { LMStudioChatModel } from "./lmstudio-chat-model.mjs";
2
+
3
+ export { LMStudioChatModel };
@@ -0,0 +1,37 @@
1
+ let _aigne_openai = require("@aigne/openai");
2
+
3
+ //#region src/lmstudio-chat-model.ts
4
+ const LM_STUDIO_DEFAULT_BASE_URL = "http://localhost:1234/v1";
5
+ const LM_STUDIO_DEFAULT_CHAT_MODEL = "llama-3.2-3b-instruct";
6
+ /**
7
+ * Implementation of the ChatModel interface for LM Studio
8
+ *
9
+ * This model allows you to run local LLMs through LM Studio,
10
+ * with an OpenAI-compatible API interface.
11
+ *
12
+ * Default model: 'llama-3.2-3b-instruct'
13
+ *
14
+ * @example
15
+ * Here's how to create and use an LM Studio chat model:
16
+ * {@includeCode ../test/lmstudio-chat-model.test.ts#example-lmstudio-chat-model}
17
+ *
18
+ * @example
19
+ * Here's an example with streaming response:
20
+ * {@includeCode ../test/lmstudio-chat-model.test.ts#example-lmstudio-chat-model-streaming}
21
+ */
22
+ var LMStudioChatModel = class extends _aigne_openai.OpenAIChatModel {
23
+ constructor(options) {
24
+ super({
25
+ ...options,
26
+ model: options?.model || LM_STUDIO_DEFAULT_CHAT_MODEL,
27
+ baseURL: options?.baseURL || process.env.LM_STUDIO_BASE_URL || LM_STUDIO_DEFAULT_BASE_URL
28
+ });
29
+ }
30
+ apiKeyEnvName = "LM_STUDIO_API_KEY";
31
+ apiKeyDefault = "not-required";
32
+ supportsNativeStructuredOutputs = false;
33
+ supportsTemperature = true;
34
+ };
35
+
36
+ //#endregion
37
+ exports.LMStudioChatModel = LMStudioChatModel;
@@ -1,4 +1,6 @@
1
- import { OpenAIChatModel, type OpenAIChatModelOptions } from "@aigne/openai";
1
+ import { OpenAIChatModel, OpenAIChatModelOptions } from "@aigne/openai";
2
+
3
+ //#region src/lmstudio-chat-model.d.ts
2
4
  /**
3
5
  * Implementation of the ChatModel interface for LM Studio
4
6
  *
@@ -15,10 +17,13 @@ import { OpenAIChatModel, type OpenAIChatModelOptions } from "@aigne/openai";
15
17
  * Here's an example with streaming response:
16
18
  * {@includeCode ../test/lmstudio-chat-model.test.ts#example-lmstudio-chat-model-streaming}
17
19
  */
18
- export declare class LMStudioChatModel extends OpenAIChatModel {
19
- constructor(options?: OpenAIChatModelOptions);
20
- protected apiKeyEnvName: string;
21
- protected apiKeyDefault: string;
22
- protected supportsNativeStructuredOutputs: boolean;
23
- protected supportsTemperature: boolean;
20
+ declare class LMStudioChatModel extends OpenAIChatModel {
21
+ constructor(options?: OpenAIChatModelOptions);
22
+ protected apiKeyEnvName: string;
23
+ protected apiKeyDefault: string;
24
+ protected supportsNativeStructuredOutputs: boolean;
25
+ protected supportsTemperature: boolean;
24
26
  }
27
+ //#endregion
28
+ export { LMStudioChatModel };
29
+ //# sourceMappingURL=lmstudio-chat-model.d.cts.map
@@ -0,0 +1 @@
1
+ {"version":3,"file":"lmstudio-chat-model.d.cts","names":[],"sources":["../src/lmstudio-chat-model.ts"],"mappings":";;;;AAqBA;;;;;;;;;;;;;;;cAAa,iBAAA,SAA0B,eAAA;EAAA,YAAA,OAAA,GACf,sBAAA;EAAA,UAAA,aAAA;EAAA,UAAA,aAAA;EAAA,UAAA,+BAAA;EAAA,UAAA,mBAAA;AAAA"}
@@ -1,4 +1,6 @@
1
- import { OpenAIChatModel, type OpenAIChatModelOptions } from "@aigne/openai";
1
+ import { OpenAIChatModel, OpenAIChatModelOptions } from "@aigne/openai";
2
+
3
+ //#region src/lmstudio-chat-model.d.ts
2
4
  /**
3
5
  * Implementation of the ChatModel interface for LM Studio
4
6
  *
@@ -15,10 +17,13 @@ import { OpenAIChatModel, type OpenAIChatModelOptions } from "@aigne/openai";
15
17
  * Here's an example with streaming response:
16
18
  * {@includeCode ../test/lmstudio-chat-model.test.ts#example-lmstudio-chat-model-streaming}
17
19
  */
18
- export declare class LMStudioChatModel extends OpenAIChatModel {
19
- constructor(options?: OpenAIChatModelOptions);
20
- protected apiKeyEnvName: string;
21
- protected apiKeyDefault: string;
22
- protected supportsNativeStructuredOutputs: boolean;
23
- protected supportsTemperature: boolean;
20
+ declare class LMStudioChatModel extends OpenAIChatModel {
21
+ constructor(options?: OpenAIChatModelOptions);
22
+ protected apiKeyEnvName: string;
23
+ protected apiKeyDefault: string;
24
+ protected supportsNativeStructuredOutputs: boolean;
25
+ protected supportsTemperature: boolean;
24
26
  }
27
+ //#endregion
28
+ export { LMStudioChatModel };
29
+ //# sourceMappingURL=lmstudio-chat-model.d.mts.map
@@ -0,0 +1 @@
1
+ {"version":3,"file":"lmstudio-chat-model.d.mts","names":[],"sources":["../src/lmstudio-chat-model.ts"],"mappings":";;;;AAqBA;;;;;;;;;;;;;;;cAAa,iBAAA,SAA0B,eAAA;EAAA,YAAA,OAAA,GACf,sBAAA;EAAA,UAAA,aAAA;EAAA,UAAA,aAAA;EAAA,UAAA,+BAAA;EAAA,UAAA,mBAAA;AAAA"}
@@ -0,0 +1,38 @@
1
+ import { OpenAIChatModel } from "@aigne/openai";
2
+
3
+ //#region src/lmstudio-chat-model.ts
4
+ const LM_STUDIO_DEFAULT_BASE_URL = "http://localhost:1234/v1";
5
+ const LM_STUDIO_DEFAULT_CHAT_MODEL = "llama-3.2-3b-instruct";
6
+ /**
7
+ * Implementation of the ChatModel interface for LM Studio
8
+ *
9
+ * This model allows you to run local LLMs through LM Studio,
10
+ * with an OpenAI-compatible API interface.
11
+ *
12
+ * Default model: 'llama-3.2-3b-instruct'
13
+ *
14
+ * @example
15
+ * Here's how to create and use an LM Studio chat model:
16
+ * {@includeCode ../test/lmstudio-chat-model.test.ts#example-lmstudio-chat-model}
17
+ *
18
+ * @example
19
+ * Here's an example with streaming response:
20
+ * {@includeCode ../test/lmstudio-chat-model.test.ts#example-lmstudio-chat-model-streaming}
21
+ */
22
+ var LMStudioChatModel = class extends OpenAIChatModel {
23
+ constructor(options) {
24
+ super({
25
+ ...options,
26
+ model: options?.model || LM_STUDIO_DEFAULT_CHAT_MODEL,
27
+ baseURL: options?.baseURL || process.env.LM_STUDIO_BASE_URL || LM_STUDIO_DEFAULT_BASE_URL
28
+ });
29
+ }
30
+ apiKeyEnvName = "LM_STUDIO_API_KEY";
31
+ apiKeyDefault = "not-required";
32
+ supportsNativeStructuredOutputs = false;
33
+ supportsTemperature = true;
34
+ };
35
+
36
+ //#endregion
37
+ export { LMStudioChatModel };
38
+ //# sourceMappingURL=lmstudio-chat-model.mjs.map
@@ -0,0 +1 @@
1
+ {"version":3,"file":"lmstudio-chat-model.mjs","names":[],"sources":["../src/lmstudio-chat-model.ts"],"sourcesContent":["import { OpenAIChatModel, type OpenAIChatModelOptions } from \"@aigne/openai\";\n\nconst LM_STUDIO_DEFAULT_BASE_URL = \"http://localhost:1234/v1\";\nconst LM_STUDIO_DEFAULT_CHAT_MODEL = \"llama-3.2-3b-instruct\";\n\n/**\n * Implementation of the ChatModel interface for LM Studio\n *\n * This model allows you to run local LLMs through LM Studio,\n * with an OpenAI-compatible API interface.\n *\n * Default model: 'llama-3.2-3b-instruct'\n *\n * @example\n * Here's how to create and use an LM Studio chat model:\n * {@includeCode ../test/lmstudio-chat-model.test.ts#example-lmstudio-chat-model}\n *\n * @example\n * Here's an example with streaming response:\n * {@includeCode ../test/lmstudio-chat-model.test.ts#example-lmstudio-chat-model-streaming}\n */\nexport class LMStudioChatModel extends OpenAIChatModel {\n constructor(options?: OpenAIChatModelOptions) {\n super({\n ...options,\n model: options?.model || LM_STUDIO_DEFAULT_CHAT_MODEL,\n baseURL: options?.baseURL || process.env.LM_STUDIO_BASE_URL || LM_STUDIO_DEFAULT_BASE_URL,\n });\n }\n\n protected override apiKeyEnvName = \"LM_STUDIO_API_KEY\";\n protected override apiKeyDefault = \"not-required\";\n\n protected override supportsNativeStructuredOutputs = false;\n protected override supportsTemperature = true;\n}\n"],"mappings":";;;AAEA,MAAM,6BAA6B;AACnC,MAAM,+BAA+B;;;;;;;;;;;;;;;;;AAkBrC,IAAa,oBAAb,cAAuC,gBAAgB;CACrD,YAAY,SAAkC;AAC5C,QAAM;GACJ,GAAG;GACH,OAAO,SAAS,SAAS;GACzB,SAAS,SAAS,WAAW,QAAQ,IAAI,sBAAsB;GAChE,CAAC;;CAGJ,AAAmB,gBAAgB;CACnC,AAAmB,gBAAgB;CAEnC,AAAmB,kCAAkC;CACrD,AAAmB,sBAAsB"}
package/package.json CHANGED
@@ -1,54 +1,56 @@
1
1
  {
2
2
  "name": "@aigne/lmstudio",
3
- "version": "1.2.0",
3
+ "version": "1.74.0-beta",
4
4
  "description": "AIGNE LM Studio model adapter for integrating with locally hosted AI models via LM Studio",
5
+ "license": "Elastic-2.0",
5
6
  "publishConfig": {
6
7
  "access": "public"
7
8
  },
8
9
  "author": "Arcblock <blocklet@arcblock.io> https://github.com/blocklet",
9
- "homepage": "https://github.com/AIGNE-io/aigne-framework",
10
- "license": "Elastic-2.0",
10
+ "homepage": "https://www.aigne.io/framework",
11
11
  "repository": {
12
12
  "type": "git",
13
- "url": "git+https://github.com/AIGNE-io/aigne-framework"
13
+ "url": "git+https://github.com/ArcBlock/aigne-framework"
14
+ },
15
+ "bugs": {
16
+ "url": "https://github.com/ArcBlock/aigne-framework/issues"
17
+ },
18
+ "type": "module",
19
+ "main": "./dist/index.cjs",
20
+ "module": "./dist/index.mjs",
21
+ "types": "./dist/index.d.cts",
22
+ "exports": {
23
+ ".": {
24
+ "require": "./dist/index.cjs",
25
+ "import": "./dist/index.mjs"
26
+ },
27
+ "./*": "./*"
14
28
  },
15
29
  "files": [
16
- "lib/cjs",
17
- "lib/dts",
18
- "lib/esm",
30
+ "dist",
19
31
  "LICENSE",
20
32
  "README.md",
21
33
  "CHANGELOG.md"
22
34
  ],
23
- "type": "module",
24
- "main": "./lib/cjs/index.js",
25
- "module": "./lib/esm/index.js",
26
- "types": "./lib/dts/index.d.ts",
27
- "exports": {
28
- ".": {
29
- "import": "./lib/esm/index.js",
30
- "require": "./lib/cjs/index.js",
31
- "types": "./lib/dts/index.d.ts"
32
- }
33
- },
34
35
  "dependencies": {
35
- "@aigne/openai": "^0.16.16"
36
+ "@aigne/openai": "^1.74.0-beta"
36
37
  },
37
38
  "devDependencies": {
38
- "@types/bun": "^1.2.22",
39
- "@types/node": "^24.5.1",
39
+ "@types/bun": "^1.3.6",
40
40
  "npm-run-all": "^4.1.5",
41
- "rimraf": "^6.0.1",
42
- "typescript": "^5.9.2",
43
- "@aigne/core": "^1.72.0",
44
- "@aigne/test-utils": "^0.5.69"
41
+ "rimraf": "^6.1.2",
42
+ "tsdown": "0.20.0-beta.3",
43
+ "typescript": "5.9.2",
44
+ "@aigne/core": "1.74.0-beta",
45
+ "@aigne/utils": "1.74.0-beta",
46
+ "@aigne/typescript-config": "0.0.0",
47
+ "@aigne/scripts": "0.0.0"
45
48
  },
46
49
  "scripts": {
47
- "lint": "tsc --noEmit",
48
- "build": "tsc --build scripts/tsconfig.build.json",
49
- "clean": "rimraf lib test/coverage",
50
+ "build": "tsdown",
51
+ "check-types": "tsc --noEmit",
52
+ "clean": "rimraf dist coverage",
50
53
  "test": "bun test",
51
- "test:coverage": "bun test --coverage --coverage-reporter=lcov --coverage-reporter=text",
52
- "postbuild": "node ../../scripts/post-build-lib.mjs"
54
+ "test:coverage": "bun test --coverage --coverage-reporter=lcov --coverage-reporter=text"
53
55
  }
54
56
  }