plugin-sensitive-filter-xr 0.1.7 → 0.1.19

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -1 +1 @@
1
- {"version":3,"file":"sensitiveFilter.d.ts","sourceRoot":"","sources":["../../src/lib/sensitiveFilter.ts"],"names":[],"mappings":"AAMA,OAAO,KAAK,EAAa,oBAAoB,EAA8B,MAAM,kBAAkB,CAAA;AAGnG,OAAO,EACL,eAAe,EAGf,uBAAuB,EACvB,wBAAwB,EAEzB,MAAM,sBAAsB,CAAA;AAC7B,OAAO,EAML,qBAAqB,EAKtB,MAAM,YAAY,CAAA;AAixBnB,qBAEa,yBAA0B,YAAW,wBAAwB,CAAC,qBAAqB,CAAC;IAE/F,OAAO,CAAC,QAAQ,CAAC,UAAU,CAAY;IAEvC,QAAQ,CAAC,IAAI,EAAE,oBAAoB,CA6NlC;IAEK,gBAAgB,CACpB,OAAO,EAAE,qBAAqB,EAC9B,OAAO,EAAE,uBAAuB,GAC/B,OAAO,CAAC,eAAe,CAAC;IAa3B,OAAO,CAAC,wBAAwB;IAgThC,OAAO,CAAC,uBAAuB;CA2fhC;AAED,YAAY,EAAE,qBAAqB,EAAE,CAAA"}
1
+ {"version":3,"file":"sensitiveFilter.d.ts","sourceRoot":"","sources":["../../src/lib/sensitiveFilter.ts"],"names":[],"mappings":"AAMA,OAAO,KAAK,EAAa,oBAAoB,EAA8B,MAAM,kBAAkB,CAAA;AAGnG,OAAO,EACL,eAAe,EAGf,uBAAuB,EACvB,wBAAwB,EAEzB,MAAM,sBAAsB,CAAA;AAC7B,OAAO,EAML,qBAAqB,EAKtB,MAAM,YAAY,CAAA;AAoxBnB,qBAEa,yBAA0B,YAAW,wBAAwB,CAAC,qBAAqB,CAAC;IAE/F,OAAO,CAAC,QAAQ,CAAC,UAAU,CAAY;IAEvC,QAAQ,CAAC,IAAI,EAAE,oBAAoB,CA6NlC;IAEK,gBAAgB,CACpB,OAAO,EAAE,qBAAqB,EAC9B,OAAO,EAAE,uBAAuB,GAC/B,OAAO,CAAC,eAAe,CAAC;IAa3B,OAAO,CAAC,wBAAwB;IAgThC,OAAO,CAAC,uBAAuB;CA6fhC;AAED,YAAY,EAAE,qBAAqB,EAAE,CAAA"}
@@ -234,16 +234,19 @@ class BufferedOutputProxyChatModel extends BaseChatModel {
234
234
  const internalOptions = buildInternalSourceOptions(options);
235
235
  const streamFn = this.innerModel?.stream;
236
236
  if (typeof streamFn === 'function') {
237
- let mergedChunk = null;
238
- for await (const rawChunk of streamFn.call(this.innerModel, messages, internalOptions)) {
239
- const chunk = toAiMessageChunk(rawChunk);
240
- if (!chunk) {
241
- continue;
237
+ const stream = await streamFn.call(this.innerModel, messages, internalOptions);
238
+ if (stream && typeof stream[Symbol.asyncIterator] === 'function') {
239
+ let mergedChunk = null;
240
+ for await (const rawChunk of stream) {
241
+ const chunk = toAiMessageChunk(rawChunk);
242
+ if (!chunk) {
243
+ continue;
244
+ }
245
+ mergedChunk = mergedChunk ? mergedChunk.concat(chunk) : chunk;
246
+ }
247
+ if (mergedChunk) {
248
+ return toAiMessage(mergedChunk);
242
249
  }
243
- mergedChunk = mergedChunk ? mergedChunk.concat(chunk) : chunk;
244
- }
245
- if (mergedChunk) {
246
- return toAiMessage(mergedChunk);
247
250
  }
248
251
  }
249
252
  return toAiMessage(await this.innerModel.invoke(messages, internalOptions));
@@ -1097,7 +1100,9 @@ let SensitiveFilterMiddleware = class SensitiveFilterMiddleware {
1097
1100
  const llmConfig = getLlmConfig();
1098
1101
  if (!modelPromise) {
1099
1102
  modelPromise = this.commandBus.execute(new CreateModelClientCommand(buildInternalModelConfig(llmConfig.model), {
1100
- usageCallback: () => { },
1103
+ usageCallback: () => {
1104
+ //
1105
+ },
1101
1106
  }));
1102
1107
  }
1103
1108
  return modelPromise;
package/package.json CHANGED
@@ -1,6 +1,6 @@
1
1
  {
2
2
  "name": "plugin-sensitive-filter-xr",
3
- "version": "0.1.7",
3
+ "version": "0.1.19",
4
4
  "author": {
5
5
  "name": "XpertAI",
6
6
  "url": "https://xpertai.cn"