@chenchaolong/plugin-vllm 0.0.6 → 0.0.7

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/dist/llm/llm.d.ts CHANGED
@@ -10,6 +10,7 @@ export declare class VLLMLargeLanguageModel extends LargeLanguageModel {
10
10
  /**
11
11
  * Generate model schema from credentials for customizable models
12
12
  * This method dynamically generates parameter rules including thinking mode
13
+ * Merges parent class parameter rules (streaming, temperature, etc.) with thinking mode
13
14
  */
14
15
  getCustomizableModelSchemaFromCredentials(model: string, credentials: Record<string, any>): AIModelEntity | null;
15
16
  }
@@ -1 +1 @@
1
- {"version":3,"file":"llm.d.ts","sourceRoot":"","sources":["../../src/llm/llm.ts"],"names":[],"mappings":"AACA,OAAO,EACL,aAAa,EAGb,aAAa,EAId,MAAM,kBAAkB,CAAA;AAEzB,OAAO,EACL,2BAA2B,EAG3B,kBAAkB,EAClB,iBAAiB,EAClB,MAAM,sBAAsB,CAAA;AAE7B,OAAO,EAAE,oBAAoB,EAAE,MAAM,yBAAyB,CAAA;AAC9D,OAAO,EAAsB,oBAAoB,EAAE,MAAM,aAAa,CAAA;AAGtE,qBACa,sBAAuB,SAAQ,kBAAkB;;gBAGhD,aAAa,EAAE,oBAAoB;IAIzC,mBAAmB,CAAC,KAAK,EAAE,MAAM,EAAE,WAAW,EAAE,oBAAoB,GAAG,OAAO,CAAC,IAAI,CAAC;IAkBjF,YAAY,CAAC,YAAY,EAAE,aAAa,EAAE,OAAO,CAAC,EAAE,iBAAiB;IA+C9E;;;OAGG;IACM,yCAAyC,CAChD,KAAK,EAAE,MAAM,EACb,WAAW,EAAE,MAAM,CAAC,MAAM,EAAE,GAAG,CAAC,GAC/B,aAAa,GAAG,IAAI;CAwExB"}
1
+ {"version":3,"file":"llm.d.ts","sourceRoot":"","sources":["../../src/llm/llm.ts"],"names":[],"mappings":"AACA,OAAO,EACL,aAAa,EAGb,aAAa,EAId,MAAM,kBAAkB,CAAA;AAEzB,OAAO,EACL,2BAA2B,EAG3B,kBAAkB,EAClB,iBAAiB,EAClB,MAAM,sBAAsB,CAAA;AAE7B,OAAO,EAAE,oBAAoB,EAAE,MAAM,yBAAyB,CAAA;AAC9D,OAAO,EAAsB,oBAAoB,EAAE,MAAM,aAAa,CAAA;AAGtE,qBACa,sBAAuB,SAAQ,kBAAkB;;gBAGhD,aAAa,EAAE,oBAAoB;IAIzC,mBAAmB,CAAC,KAAK,EAAE,MAAM,EAAE,WAAW,EAAE,oBAAoB,GAAG,OAAO,CAAC,IAAI,CAAC;IAkBjF,YAAY,CAAC,YAAY,EAAE,aAAa,EAAE,OAAO,CAAC,EAAE,iBAAiB;IA+C9E;;;;OAIG;IACM,yCAAyC,CAChD,KAAK,EAAE,MAAM,EACb,WAAW,EAAE,MAAM,CAAC,MAAM,EAAE,GAAG,CAAC,GAC/B,aAAa,GAAG,IAAI;CA0FxB"}
package/dist/llm/llm.js CHANGED
@@ -73,12 +73,16 @@ let VLLMLargeLanguageModel = VLLMLargeLanguageModel_1 = class VLLMLargeLanguageM
73
73
  /**
74
74
  * Generate model schema from credentials for customizable models
75
75
  * This method dynamically generates parameter rules including thinking mode
76
+ * Merges parent class parameter rules (streaming, temperature, etc.) with thinking mode
76
77
  */
77
78
  getCustomizableModelSchemaFromCredentials(model, credentials) {
78
- const rules = [];
79
+ // Get parent class parameter rules (streaming and temperature)
80
+ // This ensures we include common parameters from the base class
81
+ const parentSchema = super.getCustomizableModelSchemaFromCredentials(model, credentials);
82
+ const parentRules = parentSchema?.parameter_rules || [];
79
83
  // Add thinking mode parameter
80
84
  // This parameter enables thinking mode for models deployed on vLLM and SGLang
81
- rules.push({
85
+ const thinkingRule = {
82
86
  name: 'thinking',
83
87
  type: ParameterType.BOOLEAN,
84
88
  label: {
@@ -91,7 +95,13 @@ let VLLMLargeLanguageModel = VLLMLargeLanguageModel_1 = class VLLMLargeLanguageM
91
95
  },
92
96
  required: false,
93
97
  default: credentials['thinking'] ?? false
94
- });
98
+ };
99
+ // Merge parent rules with thinking rule
100
+ // Filter out any duplicate rules by name to ensure thinking rule takes precedence
101
+ const rules = [
102
+ ...parentRules,
103
+ thinkingRule
104
+ ].filter((rule, index, self) => index === self.findIndex(r => r.name === rule.name));
95
105
  // Determine completion type from credentials
96
106
  let completionType = 'chat';
97
107
  if (credentials['mode']) {
@@ -136,7 +146,13 @@ let VLLMLargeLanguageModel = VLLMLargeLanguageModel_1 = class VLLMLargeLanguageM
136
146
  [ModelPropertyKey.MODE]: completionType,
137
147
  [ModelPropertyKey.CONTEXT_SIZE]: contextSize
138
148
  },
139
- parameter_rules: rules
149
+ parameter_rules: rules,
150
+ pricing: parentSchema?.pricing || {
151
+ input: credentials['input_price'] ?? 0,
152
+ output: credentials['output_price'] ?? 0,
153
+ unit: credentials['unit'] ?? 0,
154
+ currency: credentials['currency'] ?? 'USD'
155
+ }
140
156
  };
141
157
  }
142
158
  };
package/package.json CHANGED
@@ -1,6 +1,6 @@
1
1
  {
2
2
  "name": "@chenchaolong/plugin-vllm",
3
- "version": "0.0.6",
3
+ "version": "0.0.7",
4
4
  "author": {
5
5
  "name": "XpertAI",
6
6
  "url": "https://xpertai.cn"