@yourgpt/llm-sdk 1.2.0 → 1.2.2

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/dist/index.js CHANGED
@@ -3454,7 +3454,13 @@ var Runtime = class {
3454
3454
  options
3455
3455
  );
3456
3456
  }
3457
- const generator = useAgentLoop ? this.processChatWithLoop(body, signal, void 0, void 0, request) : this.processChat(body, signal);
3457
+ const generator = this.processChatWithLoop(
3458
+ body,
3459
+ signal,
3460
+ void 0,
3461
+ void 0,
3462
+ request
3463
+ );
3458
3464
  const wrappedGenerator = this.wrapGeneratorWithOnFinish(
3459
3465
  generator,
3460
3466
  body.threadId,
@@ -3501,15 +3507,15 @@ var Runtime = class {
3501
3507
  /**
3502
3508
  * Handle non-streaming request - returns JSON instead of SSE
3503
3509
  */
3504
- async handleNonStreamingRequest(body, signal, useAgentLoop, httpRequest, options) {
3510
+ async handleNonStreamingRequest(body, signal, _useAgentLoop, httpRequest, options) {
3505
3511
  try {
3506
- const generator = useAgentLoop ? this.processChatWithLoop(
3512
+ const generator = this.processChatWithLoop(
3507
3513
  body,
3508
3514
  signal,
3509
3515
  void 0,
3510
3516
  void 0,
3511
3517
  httpRequest
3512
- ) : this.processChat(body, signal);
3518
+ );
3513
3519
  const events = [];
3514
3520
  let content = "";
3515
3521
  const toolCalls = [];