@defai.digital/automatosx 6.5.6 → 6.5.7

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (2) hide show
  1. package/dist/index.js +14 -7
  2. package/package.json +1 -1
package/dist/index.js CHANGED
@@ -3691,7 +3691,9 @@ function estimateTimeout(options) {
3691
3691
  model,
3692
3692
  streaming = false,
3693
3693
  maxTokens,
3694
- historicalAverage
3694
+ historicalAverage,
3695
+ maxTimeoutMs = 36e5
3696
+ // Default: 1 hour if not specified
3695
3697
  } = options;
3696
3698
  const inputTokens = estimateTokenCount(prompt) + (systemPrompt ? estimateTokenCount(systemPrompt) : 0);
3697
3699
  const estimatedOutputTokens = maxTokens || Math.min(inputTokens * 2, 4096);
@@ -3711,7 +3713,7 @@ function estimateTimeout(options) {
3711
3713
  confidence = "low";
3712
3714
  }
3713
3715
  const timeoutMs = Math.ceil(finalEstimate * 2.5);
3714
- const cappedTimeout = Math.min(Math.max(timeoutMs, 3e4), 3e5);
3716
+ const cappedTimeout = Math.min(Math.max(timeoutMs, 3e4), maxTimeoutMs);
3715
3717
  const reasoning = buildReasoning({
3716
3718
  inputTokens,
3717
3719
  estimatedOutputTokens,
@@ -4108,7 +4110,8 @@ User: ${request.prompt}`;
4108
4110
  prompt: fullPrompt,
4109
4111
  systemPrompt: request.systemPrompt,
4110
4112
  model: typeof request.model === "string" ? request.model : void 0,
4111
- maxTokens: request.maxTokens
4113
+ maxTokens: request.maxTokens,
4114
+ maxTimeoutMs: this.config.timeout
4112
4115
  });
4113
4116
  if (process.env.AUTOMATOSX_QUIET !== "true") {
4114
4117
  logger.info(formatTimeoutEstimate(timeoutEstimate));
@@ -4469,7 +4472,8 @@ User: ${request.prompt}`;
4469
4472
  prompt: fullPrompt,
4470
4473
  systemPrompt: request.systemPrompt,
4471
4474
  model: typeof request.model === "string" ? request.model : void 0,
4472
- maxTokens: request.maxTokens
4475
+ maxTokens: request.maxTokens,
4476
+ maxTimeoutMs: this.config.timeout
4473
4477
  });
4474
4478
  if (process.env.AUTOMATOSX_QUIET !== "true") {
4475
4479
  logger.info(formatTimeoutEstimate(timeoutEstimate));
@@ -5112,7 +5116,8 @@ ${request.prompt}`;
5112
5116
  prompt: fullPrompt,
5113
5117
  systemPrompt: request.systemPrompt,
5114
5118
  model: typeof request.model === "string" ? request.model : void 0,
5115
- maxTokens: request.maxTokens
5119
+ maxTokens: request.maxTokens,
5120
+ maxTimeoutMs: this.config.timeout
5116
5121
  });
5117
5122
  if (process.env.AUTOMATOSX_QUIET !== "true") {
5118
5123
  logger.info(formatTimeoutEstimate(timeoutEstimate));
@@ -5327,7 +5332,8 @@ User: ${request.prompt}`;
5327
5332
  systemPrompt: request.systemPrompt,
5328
5333
  model: typeof request.model === "string" ? request.model : void 0,
5329
5334
  // Ensure model is string or undefined
5330
- maxTokens: request.maxTokens
5335
+ maxTokens: request.maxTokens,
5336
+ maxTimeoutMs: this.config.timeout
5331
5337
  });
5332
5338
  if (process.env.AUTOMATOSX_QUIET !== "true") {
5333
5339
  logger.info(formatTimeoutEstimate(timeoutEstimate));
@@ -5597,7 +5603,8 @@ User: ${request.prompt}`;
5597
5603
  prompt: fullPrompt,
5598
5604
  systemPrompt: request.systemPrompt,
5599
5605
  model: typeof request.model === "string" ? request.model : void 0,
5600
- maxTokens: request.maxTokens
5606
+ maxTokens: request.maxTokens,
5607
+ maxTimeoutMs: this.config.timeout
5601
5608
  });
5602
5609
  if (process.env.AUTOMATOSX_QUIET !== "true") {
5603
5610
  logger.info(formatTimeoutEstimate(timeoutEstimate));
package/package.json CHANGED
@@ -1,6 +1,6 @@
1
1
  {
2
2
  "name": "@defai.digital/automatosx",
3
- "version": "6.5.6",
3
+ "version": "6.5.7",
4
4
  "description": "AI Agent Orchestration Platform",
5
5
  "type": "module",
6
6
  "publishConfig": {