@leikeduntech/leiai-js 3.5.1 → 3.5.3

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (2) hide show
  1. package/build/index.js +177 -90
  2. package/package.json +2 -2
package/build/index.js CHANGED
@@ -1,4 +1,5 @@
1
1
  // src/chatgpt-api.ts
2
+ import { Spark } from "@leikeduntech/spark-nodejs";
2
3
  import Keyv from "keyv";
3
4
  import pTimeout from "p-timeout";
4
5
  import QuickLRU from "quick-lru";
@@ -106,9 +107,6 @@ async function fetchSSE(url, options, fetch2 = fetch, manufacturer = "OpenAI") {
106
107
  }
107
108
  }
108
109
 
109
- // src/chatgpt-api.ts
110
- import { Spark } from "@leikeduntech/spark-nodejs";
111
-
112
110
  // src/utils.ts
113
111
  import CryptoJS from "crypto-js";
114
112
  var uuidv4Re = /^[0-9a-f]{8}-[0-9a-f]{4}-[1-5][0-9a-f]{3}-[89ab][0-9a-f]{3}-[0-9a-f]{12}$/i;
@@ -320,13 +318,11 @@ var ChatGPTAPI = class {
320
318
  }
321
319
  if (this._debug)
322
320
  console.log("\u8DDF\u8E2A2");
323
- const { messages, maxTokens, numTokens, errorMessage } = await this._buildMessages(
324
- text,
325
- opts,
326
- last_plugin_res
327
- );
321
+ const { messages, maxTokens, numTokens, errorMessage } = await this._buildMessages(text, opts, last_plugin_res);
328
322
  if (this._debug)
329
- console.log(`typeof errorMessage ${typeof errorMessage},numTokens:${numTokens}, _maxModelTokens:${this._maxModelTokens},errorMessage: ${errorMessage}`);
323
+ console.log(
324
+ `typeof errorMessage ${typeof errorMessage},numTokens:${numTokens}, _maxModelTokens:${this._maxModelTokens},errorMessage: ${errorMessage}`
325
+ );
330
326
  if (errorMessage !== "" && errorMessage !== null && errorMessage.length > 0) {
331
327
  return new Promise((resolve, reject) => {
332
328
  const errMsg = {
@@ -408,76 +404,101 @@ var ChatGPTAPI = class {
408
404
  let tools_key = "tools", tool_choice_key = "tool_choice", funArr = null;
409
405
  if (this._manufacturer.toLowerCase() === "azure")
410
406
  tools_key = "functions", tool_choice_key = "function_call";
411
- funArr = [{
412
- type: "function",
413
- function: {
414
- "name": "DallE2Fun",
415
- "description": this.pluginListMap(pluginList).description,
416
- "parameters": {
417
- "type": "object",
418
- "properties": {
419
- "prompt": {
420
- "type": "string",
421
- "description": "\u7528\u6237\u5BF9\u9700\u8981\u7F16\u8F91\u7684\u56FE\u7247\u4FEE\u6539\u7684\u63D0\u793A\u5185\u5BB9"
407
+ funArr = [
408
+ {
409
+ type: "function",
410
+ function: {
411
+ name: "DallE2Fun",
412
+ description: this.pluginListMap(pluginList).description,
413
+ parameters: {
414
+ type: "object",
415
+ properties: {
416
+ prompt: {
417
+ type: "string",
418
+ description: "\u7528\u6237\u5BF9\u9700\u8981\u7F16\u8F91\u7684\u56FE\u7247\u4FEE\u6539\u7684\u63D0\u793A\u5185\u5BB9"
419
+ },
420
+ image_url: {
421
+ type: "string",
422
+ description: "\u4ECE\u5BF9\u8BDD\u5386\u53F2\u8BB0\u5F55\u548C\u7528\u6237\u63D0\u793A\u8BCD\u91CC\u5339\u914D\u6700\u8FD1\u4E00\u6B21\u51FA\u73B0\u7684\u4EE5http\u5F00\u5934\u7684\u56FE\u7247\u7684\u94FE\u63A5\u5730\u5740"
423
+ }
422
424
  },
423
- "image_url": {
424
- "type": "string",
425
- "description": "\u4ECE\u5BF9\u8BDD\u5386\u53F2\u8BB0\u5F55\u548C\u7528\u6237\u63D0\u793A\u8BCD\u91CC\u5339\u914D\u6700\u8FD1\u4E00\u6B21\u51FA\u73B0\u7684\u4EE5http\u5F00\u5934\u7684\u56FE\u7247\u7684\u94FE\u63A5\u5730\u5740"
426
- }
427
- },
428
- "required": ["prompt", "image_url"]
425
+ required: ["prompt", "image_url"]
426
+ }
429
427
  }
430
- }
431
- }, {
432
- type: "function",
433
- function: {
434
- "name": "DallE3Fun",
435
- "description": this.pluginListMap(pluginList).description,
436
- "parameters": {
437
- "type": "object",
438
- "properties": {
439
- "prompt": {
440
- "type": "string",
441
- "description": "\u7ED8\u56FE\u7684\u63D0\u793A\u8BCD"
442
- }
443
- },
444
- "required": ["prompt"]
428
+ },
429
+ {
430
+ type: "function",
431
+ function: {
432
+ name: "DallE3Fun",
433
+ description: this.pluginListMap(pluginList).description,
434
+ parameters: {
435
+ type: "object",
436
+ properties: {
437
+ prompt: {
438
+ type: "string",
439
+ description: "\u7ED8\u56FE\u7684\u63D0\u793A\u8BCD"
440
+ }
441
+ },
442
+ required: ["prompt"]
443
+ }
445
444
  }
446
445
  }
447
- }];
446
+ ];
448
447
  if (this._manufacturer.toLowerCase() === "azure") {
449
448
  funArr = funArr.map((item) => {
450
449
  return item.function;
451
450
  });
452
451
  }
453
- body = Object.assign(body, { [tools_key]: funArr, [tool_choice_key]: "auto" });
452
+ body = Object.assign(body, {
453
+ [tools_key]: funArr,
454
+ [tool_choice_key]: "auto"
455
+ });
454
456
  break;
455
457
  default:
456
458
  break;
457
459
  }
458
460
  } else if (this._manufacturer.toLowerCase() === "aliyun") {
459
- body = Object.assign(body, { parameters: { result_format: "message" }, input: { messages } });
461
+ body = Object.assign(body, {
462
+ parameters: { result_format: "message" },
463
+ input: { messages }
464
+ });
460
465
  delete body.messages;
461
466
  } else if (this._manufacturer.toLowerCase() === "zhipu") {
462
- delete body.messages;
463
- body = Object.assign(body, { prompt: messages });
467
+ if (completionParams.model === "glm-4") {
468
+ } else {
469
+ delete body.messages;
470
+ body = Object.assign(body, { prompt: messages });
471
+ }
464
472
  } else if (this._manufacturer.toLowerCase() === "tencent") {
465
473
  url = this._apiBaseUrl;
466
474
  const timestamp = Math.ceil((/* @__PURE__ */ new Date()).getTime() / 1e3) + 1;
467
475
  const keyList = this._apiKey.split(".");
468
- body = Object.assign(body, { app_id: parseInt(keyList[0]), secret_id: keyList[1], timestamp, expired: timestamp + 24 * 60 * 60, stream: stream ? 1 : 0 });
476
+ body = Object.assign(body, {
477
+ app_id: parseInt(keyList[0]),
478
+ secret_id: keyList[1],
479
+ timestamp,
480
+ expired: timestamp + 24 * 60 * 60,
481
+ stream: stream ? 1 : 0
482
+ });
469
483
  delete body.model;
470
484
  delete body.max_tokens;
471
485
  headers["Authorization"] = signTencentHunyuan(body, url, keyList);
472
486
  } else if (this._manufacturer.toLowerCase() === "baidu") {
473
487
  if (pluginList && pluginList.indexOf("zhishiku") > -1) {
474
488
  let query = messages.splice(messages.length - 1, 1);
475
- body = Object.assign(body, { query: (_a = query[0]) == null ? void 0 : _a.content, history: messages });
489
+ body = Object.assign(body, {
490
+ query: (_a = query[0]) == null ? void 0 : _a.content,
491
+ history: messages
492
+ });
476
493
  delete body.messages;
477
494
  }
478
495
  } else if (this._manufacturer.toLowerCase() === "chatdoc") {
479
496
  let query = messages.splice(messages.length - 1, 1);
480
- body = Object.assign(body, { upload_id: (_b = completionParams == null ? void 0 : completionParams.extParams) == null ? void 0 : _b.upload_id, question: (_c = query[0]) == null ? void 0 : _c.content, history: messages });
497
+ body = Object.assign(body, {
498
+ upload_id: (_b = completionParams == null ? void 0 : completionParams.extParams) == null ? void 0 : _b.upload_id,
499
+ question: (_c = query[0]) == null ? void 0 : _c.content,
500
+ history: messages
501
+ });
481
502
  if (body.extParams)
482
503
  delete body.extParams;
483
504
  if (body.model)
@@ -496,7 +517,11 @@ var ChatGPTAPI = class {
496
517
  console.log(`api url ${url}`);
497
518
  console.log(`api header ${JSON.stringify(headers)}`);
498
519
  console.log(`sendMessage (${numTokens} tokens) body: `, body);
499
- console.log(`sendMessage (${numTokens} tokens) message : `, messages, typeof messages);
520
+ console.log(
521
+ `sendMessage (${numTokens} tokens) message : `,
522
+ messages,
523
+ typeof messages
524
+ );
500
525
  }
501
526
  if (this._manufacturer.toLowerCase() === "xunfei") {
502
527
  const self = this;
@@ -520,7 +545,9 @@ var ChatGPTAPI = class {
520
545
  onData({ content, start, end, seq }) {
521
546
  if (self._debug)
522
547
  console.log("onData", content, start, end, seq);
523
- result.id = `xunfei-${Math.floor(Math.random() * 1e7)}${(/* @__PURE__ */ new Date()).getTime()}`;
548
+ result.id = `xunfei-${Math.floor(
549
+ Math.random() * 1e7
550
+ )}${(/* @__PURE__ */ new Date()).getTime()}`;
524
551
  result.delta = content;
525
552
  if (content)
526
553
  result.text += content;
@@ -530,7 +557,13 @@ var ChatGPTAPI = class {
530
557
  onEnd({ content, tokens, questionTokens }) {
531
558
  if (self._debug)
532
559
  console.log("onEnd", content, tokens, questionTokens);
533
- result.detail = { usage: { prompt_tokens: questionTokens, completion_tokens: tokens, total_tokens: questionTokens + tokens } };
560
+ result.detail = {
561
+ usage: {
562
+ prompt_tokens: questionTokens,
563
+ completion_tokens: tokens,
564
+ total_tokens: questionTokens + tokens
565
+ }
566
+ };
534
567
  result.text = content.trim();
535
568
  return resolve(result);
536
569
  }
@@ -545,7 +578,7 @@ var ChatGPTAPI = class {
545
578
  body: JSON.stringify(body),
546
579
  signal: abortSignal,
547
580
  onMessage: (data) => {
548
- var _a2, _b2, _c2, _d2, _e2, _f2, _g2, _h2, _i2, _j, _k, _l, _m, _n, _o, _p, _q, _r, _s, _t, _u, _v;
581
+ var _a2, _b2, _c2, _d2, _e2, _f2, _g2, _h2, _i2, _j, _k, _l, _m, _n, _o, _p, _q, _r, _s, _t, _u, _v, _w, _x, _y, _z, _A, _B;
549
582
  if (data === "[DONE]") {
550
583
  result.text = result.text.trim();
551
584
  return resolve(result);
@@ -553,7 +586,11 @@ var ChatGPTAPI = class {
553
586
  try {
554
587
  const response = JSON.parse(data);
555
588
  if (this._debug) {
556
- console.log(`row data ${typeof response} : `, response, response == null ? void 0 : response.choices);
589
+ console.log(
590
+ `row data ${typeof response} : `,
591
+ response,
592
+ response == null ? void 0 : response.choices
593
+ );
557
594
  }
558
595
  if (this._manufacturer.toLowerCase() === "baidu") {
559
596
  if ((response == null ? void 0 : response.is_end) === true) {
@@ -567,22 +604,34 @@ var ChatGPTAPI = class {
567
604
  return resolve(result);
568
605
  }
569
606
  } else if (this._manufacturer.toLowerCase() === "aliyun") {
570
- if (["stop", "length"].indexOf((_c2 = (_b2 = response == null ? void 0 : response.output) == null ? void 0 : _b2.choices[0]) == null ? void 0 : _c2.finish_reason) > -1) {
607
+ if (["stop", "length"].indexOf(
608
+ (_c2 = (_b2 = response == null ? void 0 : response.output) == null ? void 0 : _b2.choices[0]) == null ? void 0 : _c2.finish_reason
609
+ ) > -1) {
571
610
  result.text = (_f2 = (_e2 = (_d2 = response == null ? void 0 : response.output) == null ? void 0 : _d2.choices[0]) == null ? void 0 : _e2.message) == null ? void 0 : _f2.content.trim();
572
611
  return resolve(result);
573
612
  }
574
613
  } else if (this._manufacturer.toLowerCase() === "zhipu") {
575
- if ((response == null ? void 0 : response.event) === "finish") {
576
- result.text += response == null ? void 0 : response.data.trim();
577
- return resolve(result);
614
+ if (completionParams.model === "glm-4") {
615
+ const gelResponse = JSON.parse(response.data);
616
+ if (((_h2 = (_g2 = gelResponse == null ? void 0 : gelResponse.choices) == null ? void 0 : _g2[0]) == null ? void 0 : _h2.finish_reason) === "stop") {
617
+ result.text = result.text.trim();
618
+ return resolve(result);
619
+ }
620
+ } else {
621
+ if ((response == null ? void 0 : response.event) === "finish") {
622
+ result.text += response == null ? void 0 : response.data.trim();
623
+ return resolve(result);
624
+ }
578
625
  }
579
626
  } else if (this._manufacturer.toLowerCase() === "tencent") {
580
- if (((_g2 = response.choices[0]) == null ? void 0 : _g2.finish_reason) === "stop") {
581
- result.text += (_i2 = (_h2 = response == null ? void 0 : response.choices[0]) == null ? void 0 : _h2.delta) == null ? void 0 : _i2.content.trim();
627
+ if (((_i2 = response.choices[0]) == null ? void 0 : _i2.finish_reason) === "stop") {
628
+ result.text += (_k = (_j = response == null ? void 0 : response.choices[0]) == null ? void 0 : _j.delta) == null ? void 0 : _k.content.trim();
582
629
  return resolve(result);
583
630
  }
584
631
  } else if (this._manufacturer.toLowerCase() === "chatdoc") {
585
- result.id = `chatdoc-${Math.floor(Math.random() * 1e7)}${(/* @__PURE__ */ new Date()).getTime()}`;
632
+ result.id = `chatdoc-${Math.floor(
633
+ Math.random() * 1e7
634
+ )}${(/* @__PURE__ */ new Date()).getTime()}`;
586
635
  if (response == null ? void 0 : response.source_info) {
587
636
  result.text += response == null ? void 0 : response.answer;
588
637
  return resolve(result);
@@ -597,7 +646,9 @@ var ChatGPTAPI = class {
597
646
  result.id = response.id;
598
647
  }
599
648
  }
600
- if (((_j = response.choices) == null ? void 0 : _j.length) && ["openai", "azure", "tencent"].indexOf(this._manufacturer.toLowerCase()) > -1) {
649
+ if (((_l = response.choices) == null ? void 0 : _l.length) && ["openai", "azure", "tencent"].indexOf(
650
+ this._manufacturer.toLowerCase()
651
+ ) > -1) {
601
652
  const delta = response.choices[0].delta;
602
653
  result.delta = "";
603
654
  if (response.choices[0].finish_reason === "tool_calls") {
@@ -622,27 +673,50 @@ var ChatGPTAPI = class {
622
673
  result.detail = response;
623
674
  onProgress == null ? void 0 : onProgress(result);
624
675
  } else if ((response == null ? void 0 : response.output) && this._manufacturer.toLowerCase() === "aliyun") {
625
- response.usage = Object.assign(response.usage, { prompt_tokens: (_k = response.usage) == null ? void 0 : _k.input_tokens, completion_tokens: (_l = response.usage) == null ? void 0 : _l.output_tokens, total_tokens: ((_m = response.usage) == null ? void 0 : _m.input_tokens) + ((_n = response.usage) == null ? void 0 : _n.output_tokens) });
676
+ response.usage = Object.assign(response.usage, {
677
+ prompt_tokens: (_m = response.usage) == null ? void 0 : _m.input_tokens,
678
+ completion_tokens: (_n = response.usage) == null ? void 0 : _n.output_tokens,
679
+ total_tokens: ((_o = response.usage) == null ? void 0 : _o.input_tokens) + ((_p = response.usage) == null ? void 0 : _p.output_tokens)
680
+ });
626
681
  result.delta = "";
627
- if ((_q = (_p = (_o = response == null ? void 0 : response.output) == null ? void 0 : _o.choices[0]) == null ? void 0 : _p.message) == null ? void 0 : _q.content)
628
- result.text = (_t = (_s = (_r = response == null ? void 0 : response.output) == null ? void 0 : _r.choices[0]) == null ? void 0 : _s.message) == null ? void 0 : _t.content;
682
+ if ((_s = (_r = (_q = response == null ? void 0 : response.output) == null ? void 0 : _q.choices[0]) == null ? void 0 : _r.message) == null ? void 0 : _s.content)
683
+ result.text = (_v = (_u = (_t = response == null ? void 0 : response.output) == null ? void 0 : _t.choices[0]) == null ? void 0 : _u.message) == null ? void 0 : _v.content;
629
684
  result.role = "assistant";
630
685
  result.detail = response;
631
686
  onProgress == null ? void 0 : onProgress(result);
632
687
  } else if ((response == null ? void 0 : response.data) && this._manufacturer.toLowerCase() === "zhipu") {
633
- if (response.event === "finish") {
634
- if ((_u = response == null ? void 0 : response.meta) == null ? void 0 : _u.usage) {
635
- response.usage = (_v = response == null ? void 0 : response.meta) == null ? void 0 : _v.usage;
636
- } else {
637
- response.usage = { prompt_tokens: 1, completion_tokens: 1, total_tokens: 2 };
688
+ if (completionParams.model === "glm-4") {
689
+ const glmResponse = JSON.parse(response.data);
690
+ if (((_x = (_w = glmResponse == null ? void 0 : glmResponse.choices) == null ? void 0 : _w[0]) == null ? void 0 : _x.finish_reason) === "stop") {
691
+ response.usage = glmResponse.usage;
692
+ }
693
+ let data2 = (_z = (_y = glmResponse == null ? void 0 : glmResponse.choices) == null ? void 0 : _y[0]) == null ? void 0 : _z.delta.content;
694
+ result.delta = data2;
695
+ if (data2) {
696
+ result.text += data2;
697
+ }
698
+ result.role = "assistant";
699
+ result.detail = response;
700
+ onProgress == null ? void 0 : onProgress(result);
701
+ } else {
702
+ if (response.event === "finish") {
703
+ if ((_A = response == null ? void 0 : response.meta) == null ? void 0 : _A.usage) {
704
+ response.usage = (_B = response == null ? void 0 : response.meta) == null ? void 0 : _B.usage;
705
+ } else {
706
+ response.usage = {
707
+ prompt_tokens: 1,
708
+ completion_tokens: 1,
709
+ total_tokens: 2
710
+ };
711
+ }
638
712
  }
713
+ result.delta = response.data;
714
+ if (response == null ? void 0 : response.data)
715
+ result.text += response == null ? void 0 : response.data;
716
+ result.role = "assistant";
717
+ result.detail = response;
718
+ onProgress == null ? void 0 : onProgress(result);
639
719
  }
640
- result.delta = response.data;
641
- if (response == null ? void 0 : response.data)
642
- result.text += response == null ? void 0 : response.data;
643
- result.role = "assistant";
644
- result.detail = response;
645
- onProgress == null ? void 0 : onProgress(result);
646
720
  } else if ((response == null ? void 0 : response.answer) && this._manufacturer.toLowerCase() === "chatdoc") {
647
721
  result.delta = response.answer;
648
722
  if (response == null ? void 0 : response.answer)
@@ -652,7 +726,10 @@ var ChatGPTAPI = class {
652
726
  onProgress == null ? void 0 : onProgress(result);
653
727
  }
654
728
  } catch (err) {
655
- console.warn(`${this._manufacturer} stream SEE event unexpected error`, err);
729
+ console.warn(
730
+ `${this._manufacturer} stream SEE event unexpected error`,
731
+ err
732
+ );
656
733
  return reject(err);
657
734
  }
658
735
  }
@@ -682,7 +759,11 @@ var ChatGPTAPI = class {
682
759
  }
683
760
  const response = await res.json();
684
761
  if (this._debug) {
685
- console.log(`row data ${typeof response} : `, response, (response == null ? void 0 : response.choices) && ((_d = response == null ? void 0 : response.choices[0]) == null ? void 0 : _d.message));
762
+ console.log(
763
+ `row data ${typeof response} : `,
764
+ response,
765
+ (response == null ? void 0 : response.choices) && ((_d = response == null ? void 0 : response.choices[0]) == null ? void 0 : _d.message)
766
+ );
686
767
  }
687
768
  if (this._manufacturer.toLowerCase() === "aliyun") {
688
769
  if (response == null ? void 0 : response.request_id) {
@@ -753,11 +834,13 @@ var ChatGPTAPI = class {
753
834
  message2.detail.choices[0].finish_reason = "tool_calls";
754
835
  message2.detail.choices[0].message = {
755
836
  role: message2.detail.choices[0].message.role,
756
- tool_calls: [{
757
- id: `call_${uuidv4()}`,
758
- type: "function",
759
- function: (_e = message2.detail.choices[0].message) == null ? void 0 : _e.function_call
760
- }]
837
+ tool_calls: [
838
+ {
839
+ id: `call_${uuidv4()}`,
840
+ type: "function",
841
+ function: (_e = message2.detail.choices[0].message) == null ? void 0 : _e.function_call
842
+ }
843
+ ]
761
844
  };
762
845
  }
763
846
  }
@@ -856,7 +939,9 @@ var ChatGPTAPI = class {
856
939
  }
857
940
  const systemMessageOffset = messages.length;
858
941
  let userMessage = null;
859
- if (["baidu", "zhipu", "xunfei", "aliyun", "tencent", "chatdoc"].indexOf(this._manufacturer.toLowerCase()) > -1) {
942
+ if (["baidu", "zhipu", "xunfei", "aliyun", "tencent", "chatdoc"].indexOf(
943
+ this._manufacturer.toLowerCase()
944
+ ) > -1) {
860
945
  userMessage = [{ role: "user", content: text }];
861
946
  } else if (pluginData) {
862
947
  userMessage = {
@@ -912,8 +997,13 @@ ${message.content}`]);
912
997
  }
913
998
  const parentMessageRole = parentMessage.role || "user";
914
999
  let parentMessageItem = null;
915
- if (["baidu", "zhipu", "xunfei", "aliyun", "tencent", "chatdoc"].indexOf(this._manufacturer.toLowerCase()) > -1) {
916
- parentMessageItem = { role: parentMessageRole, content: parentMessage.text };
1000
+ if (["baidu", "zhipu", "xunfei", "aliyun", "tencent", "chatdoc"].indexOf(
1001
+ this._manufacturer.toLowerCase()
1002
+ ) > -1) {
1003
+ parentMessageItem = {
1004
+ role: parentMessageRole,
1005
+ content: parentMessage.text
1006
+ };
917
1007
  } else if (parentMessage.content && ((_a = parentMessage.content) == null ? void 0 : _a.finish_reason) === "tool_calls") {
918
1008
  if (["azure"].indexOf(this._manufacturer.toLowerCase()) > -1) {
919
1009
  parentMessageItem = Object.assign(parentMessage.content.message, {
@@ -933,10 +1023,7 @@ ${message.content}`]);
933
1023
  if (parentMessage.tool_call_id)
934
1024
  parentMessageItem.tool_call_id = parentMessage.tool_call_id;
935
1025
  }
936
- nextMessages = nextMessages.slice(0, systemMessageOffset).concat([
937
- parentMessageItem,
938
- ...nextMessages.slice(systemMessageOffset)
939
- ]);
1026
+ nextMessages = nextMessages.slice(0, systemMessageOffset).concat([parentMessageItem, ...nextMessages.slice(systemMessageOffset)]);
940
1027
  if (nextMessages.length >= contextRestriction)
941
1028
  break;
942
1029
  parentMessageId = parentMessage.parentMessageId;
package/package.json CHANGED
@@ -1,6 +1,6 @@
1
1
  {
2
2
  "name": "@leikeduntech/leiai-js",
3
- "version": "3.5.1",
3
+ "version": "3.5.3",
4
4
  "author": "liuhean",
5
5
  "repository": {
6
6
  "type": "git",
@@ -41,7 +41,7 @@
41
41
  "node": ">=14"
42
42
  },
43
43
  "dependencies": {
44
- "@leikeduntech/spark-nodejs": "1.0.1",
44
+ "@leikeduntech/spark-nodejs": "1.1.0",
45
45
  "cac": "^6.7.14",
46
46
  "conf": "^11.0.1",
47
47
  "crypto-js": "^4.1.1",