@probelabs/visor 0.1.147-ee → 0.1.148-ee

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (62) hide show
  1. package/dist/frontends/github-frontend.d.ts +2 -1
  2. package/dist/frontends/github-frontend.d.ts.map +1 -1
  3. package/dist/index.js +726 -113
  4. package/dist/providers/ai-check-provider.d.ts.map +1 -1
  5. package/dist/scheduler/schedule-tool.d.ts.map +1 -1
  6. package/dist/scheduler/scheduler.d.ts +5 -0
  7. package/dist/scheduler/scheduler.d.ts.map +1 -1
  8. package/dist/sdk/{check-provider-registry-LBYIKFYM.mjs → check-provider-registry-AMYY2ZJY.mjs} +5 -6
  9. package/dist/sdk/{check-provider-registry-SCPM6DIT.mjs → check-provider-registry-DVQDGTOE.mjs} +5 -6
  10. package/dist/sdk/{chunk-4F5UVWAN.mjs → chunk-62TNF5PJ.mjs} +2 -2
  11. package/dist/sdk/{chunk-4F5UVWAN.mjs.map → chunk-62TNF5PJ.mjs.map} +1 -1
  12. package/dist/sdk/{chunk-PNZH3JSI.mjs → chunk-75Q63UNX.mjs} +2742 -276
  13. package/dist/sdk/chunk-75Q63UNX.mjs.map +1 -0
  14. package/dist/sdk/{chunk-FBJ7MC7R.mjs → chunk-CISJ6DJW.mjs} +3 -3
  15. package/dist/sdk/{chunk-EWGX7LI7.mjs → chunk-H4AYMOAT.mjs} +2742 -276
  16. package/dist/sdk/chunk-H4AYMOAT.mjs.map +1 -0
  17. package/dist/sdk/{chunk-V2QW6ECX.mjs → chunk-RJLJUTSU.mjs} +2 -2
  18. package/dist/sdk/{failure-condition-evaluator-FHNZL2US.mjs → failure-condition-evaluator-IVCTD4BZ.mjs} +3 -3
  19. package/dist/sdk/{github-frontend-47EU2HBY.mjs → github-frontend-DFT5G32K.mjs} +16 -4
  20. package/dist/sdk/github-frontend-DFT5G32K.mjs.map +1 -0
  21. package/dist/sdk/{host-GVR4UGZ3.mjs → host-H7IX4GBK.mjs} +2 -2
  22. package/dist/sdk/{host-KGN5OIAM.mjs → host-NZXGBBJI.mjs} +2 -2
  23. package/dist/sdk/{routing-CZ36LVVS.mjs → routing-LU5PAREW.mjs} +4 -4
  24. package/dist/sdk/schedule-tool-4JMWZCCK.mjs +35 -0
  25. package/dist/sdk/schedule-tool-CONR4VW3.mjs +35 -0
  26. package/dist/sdk/{schedule-tool-handler-KFYNV7HL.mjs → schedule-tool-handler-AXMR7NBI.mjs} +5 -6
  27. package/dist/sdk/{schedule-tool-handler-E7XHMU5G.mjs → schedule-tool-handler-YUC6CAXX.mjs} +5 -6
  28. package/dist/sdk/sdk.js +1608 -406
  29. package/dist/sdk/sdk.js.map +1 -1
  30. package/dist/sdk/sdk.mjs +4 -5
  31. package/dist/sdk/sdk.mjs.map +1 -1
  32. package/dist/sdk/{trace-helpers-EHDZ42HH.mjs → trace-helpers-6ROJR7N3.mjs} +2 -2
  33. package/dist/sdk/{workflow-check-provider-5453TW65.mjs → workflow-check-provider-DYSO3PML.mjs} +5 -6
  34. package/dist/sdk/{workflow-check-provider-BSUSPFOF.mjs → workflow-check-provider-MMB7L3YG.mjs} +5 -6
  35. package/dist/sdk/workflow-check-provider-MMB7L3YG.mjs.map +1 -0
  36. package/dist/state-machine/context/build-engine-context.d.ts.map +1 -1
  37. package/dist/utils/tool-resolver.d.ts.map +1 -1
  38. package/dist/utils/workspace-manager.d.ts +31 -8
  39. package/dist/utils/workspace-manager.d.ts.map +1 -1
  40. package/dist/utils/worktree-manager.d.ts +6 -0
  41. package/dist/utils/worktree-manager.d.ts.map +1 -1
  42. package/package.json +2 -2
  43. package/dist/sdk/chunk-EWGX7LI7.mjs.map +0 -1
  44. package/dist/sdk/chunk-PNZH3JSI.mjs.map +0 -1
  45. package/dist/sdk/chunk-XKCER23W.mjs +0 -1490
  46. package/dist/sdk/chunk-XKCER23W.mjs.map +0 -1
  47. package/dist/sdk/github-frontend-47EU2HBY.mjs.map +0 -1
  48. package/dist/sdk/schedule-tool-2COUUTF7.mjs +0 -18
  49. /package/dist/sdk/{check-provider-registry-LBYIKFYM.mjs.map → check-provider-registry-AMYY2ZJY.mjs.map} +0 -0
  50. /package/dist/sdk/{check-provider-registry-SCPM6DIT.mjs.map → check-provider-registry-DVQDGTOE.mjs.map} +0 -0
  51. /package/dist/sdk/{chunk-FBJ7MC7R.mjs.map → chunk-CISJ6DJW.mjs.map} +0 -0
  52. /package/dist/sdk/{chunk-V2QW6ECX.mjs.map → chunk-RJLJUTSU.mjs.map} +0 -0
  53. /package/dist/sdk/{failure-condition-evaluator-FHNZL2US.mjs.map → failure-condition-evaluator-IVCTD4BZ.mjs.map} +0 -0
  54. /package/dist/sdk/{host-GVR4UGZ3.mjs.map → host-H7IX4GBK.mjs.map} +0 -0
  55. /package/dist/sdk/{host-KGN5OIAM.mjs.map → host-NZXGBBJI.mjs.map} +0 -0
  56. /package/dist/sdk/{routing-CZ36LVVS.mjs.map → routing-LU5PAREW.mjs.map} +0 -0
  57. /package/dist/sdk/{schedule-tool-2COUUTF7.mjs.map → schedule-tool-4JMWZCCK.mjs.map} +0 -0
  58. /package/dist/sdk/{schedule-tool-handler-E7XHMU5G.mjs.map → schedule-tool-CONR4VW3.mjs.map} +0 -0
  59. /package/dist/sdk/{schedule-tool-handler-KFYNV7HL.mjs.map → schedule-tool-handler-AXMR7NBI.mjs.map} +0 -0
  60. /package/dist/sdk/{trace-helpers-EHDZ42HH.mjs.map → schedule-tool-handler-YUC6CAXX.mjs.map} +0 -0
  61. /package/dist/sdk/{workflow-check-provider-5453TW65.mjs.map → trace-helpers-6ROJR7N3.mjs.map} +0 -0
  62. /package/dist/sdk/{workflow-check-provider-BSUSPFOF.mjs.map → workflow-check-provider-DYSO3PML.mjs.map} +0 -0
package/dist/index.js CHANGED
@@ -1,8 +1,8 @@
1
1
  #!/usr/bin/env node
2
- process.env.VISOR_VERSION = '0.1.147';
3
- process.env.PROBE_VERSION = '0.6.0-rc262';
4
- process.env.VISOR_COMMIT_SHA = '986bd9df126dcf4c0564d12701994e4bad407897';
5
- process.env.VISOR_COMMIT_SHORT = '986bd9d';
2
+ process.env.VISOR_VERSION = '0.1.148';
3
+ process.env.PROBE_VERSION = '0.6.0-rc264';
4
+ process.env.VISOR_COMMIT_SHA = '8d09da19e112d8649810da46458aa77d3f034fc6';
5
+ process.env.VISOR_COMMIT_SHORT = '8d09da1';
6
6
  /******/ (() => { // webpackBootstrap
7
7
  /******/ var __webpack_modules__ = ({
8
8
 
@@ -169587,6 +169587,7 @@ class GitHubFrontend {
169587
169587
  minUpdateDelayMs = 1000; // Minimum delay between updates (public for testing)
169588
169588
  // Cache of created GitHub comment IDs per group to handle API eventual consistency
169589
169589
  createdCommentGithubIds = new Map();
169590
+ _stopped = false;
169590
169591
  start(ctx) {
169591
169592
  const log = ctx.logger;
169592
169593
  const bus = ctx.eventBus;
@@ -169714,10 +169715,23 @@ class GitHubFrontend {
169714
169715
  }
169715
169716
  }));
169716
169717
  }
169717
- stop() {
169718
+ async stop() {
169719
+ this._stopped = true;
169718
169720
  for (const s of this.subs)
169719
169721
  s.unsubscribe();
169720
169722
  this.subs = [];
169723
+ if (this._timer) {
169724
+ clearTimeout(this._timer);
169725
+ this._timer = null;
169726
+ }
169727
+ this._pendingIds.clear();
169728
+ // Drain any in-flight updateGroupedComment operations so callers that
169729
+ // await stop() (e.g. FrontendsHost.stopAll) are guaranteed no async
169730
+ // work leaks after stop resolves.
169731
+ const pending = Array.from(this.updateLocks.values());
169732
+ if (pending.length > 0) {
169733
+ await Promise.allSettled(pending);
169734
+ }
169721
169735
  }
169722
169736
  async buildFullBody(ctx, group) {
169723
169737
  const header = this.renderThreadHeader(ctx, group);
@@ -169808,6 +169822,8 @@ ${end}`);
169808
169822
  */
169809
169823
  async performGroupedCommentUpdate(ctx, comments, group, changedIds) {
169810
169824
  try {
169825
+ if (this._stopped)
169826
+ return;
169811
169827
  if (!ctx.run.repo || !ctx.run.pr)
169812
169828
  return;
169813
169829
  // Check if PR comments are enabled (default to true if not specified)
@@ -181187,12 +181203,53 @@ class AICheckProvider extends check_provider_interface_1.CheckProvider {
181187
181203
  if (Object.keys(dynamicServers).length > 0) {
181188
181204
  Object.assign(mcpServers, dynamicServers);
181189
181205
  }
181206
+ // Emit telemetry for tool setup diagnostics
181207
+ try {
181208
+ const span = lazy_otel_1.trace.getSpan(lazy_otel_1.context.active());
181209
+ if (span) {
181210
+ span.addEvent('tool_setup.mcp_servers_js', {
181211
+ 'tool_setup.server_count': Object.keys(dynamicServers).length,
181212
+ 'tool_setup.server_names': Object.keys(dynamicServers).join(','),
181213
+ 'tool_setup.workflow_entries': Object.entries(dynamicServers)
181214
+ .filter(([, cfg]) => cfg?.workflow)
181215
+ .map(([name, cfg]) => `${name}→${cfg.workflow}`)
181216
+ .join(','),
181217
+ });
181218
+ }
181219
+ }
181220
+ catch { }
181190
181221
  }
181191
181222
  catch (error) {
181192
- logger_1.logger.error(`[AICheckProvider] Failed to evaluate ai_mcp_servers_js: ${error instanceof Error ? error.message : 'Unknown error'}`);
181223
+ const errMsg = error instanceof Error ? error.message : 'Unknown error';
181224
+ logger_1.logger.error(`[AICheckProvider] Failed to evaluate ai_mcp_servers_js: ${errMsg}`);
181225
+ // Emit telemetry for the failure
181226
+ try {
181227
+ const span = lazy_otel_1.trace.getSpan(lazy_otel_1.context.active());
181228
+ if (span) {
181229
+ span.addEvent('tool_setup.mcp_servers_js_error', {
181230
+ 'tool_setup.error': errMsg,
181231
+ });
181232
+ }
181233
+ }
181234
+ catch { }
181193
181235
  // Continue without dynamic servers
181194
181236
  }
181195
181237
  }
181238
+ else if (mcpServersJsExpr && !_dependencyResults) {
181239
+ // Expression exists but no dependency results — this means the check has no dependencies
181240
+ // or the dependency results map was empty/undefined
181241
+ try {
181242
+ const span = lazy_otel_1.trace.getSpan(lazy_otel_1.context.active());
181243
+ if (span) {
181244
+ span.addEvent('tool_setup.mcp_servers_js_skipped', {
181245
+ 'tool_setup.reason': 'no_dependency_results',
181246
+ 'tool_setup.has_expr': true,
181247
+ 'tool_setup.has_deps': false,
181248
+ });
181249
+ }
181250
+ }
181251
+ catch { }
181252
+ }
181196
181253
  // 5. Resolve environment variable placeholders in MCP server env configs
181197
181254
  // Supports ${VAR} and ${{ env.VAR }} syntax
181198
181255
  for (const serverConfig of Object.values(mcpServers)) {
@@ -181345,6 +181402,28 @@ class AICheckProvider extends check_provider_interface_1.CheckProvider {
181345
181402
  try {
181346
181403
  // Load custom tools from global config (supports workflows and custom tools)
181347
181404
  const customTools = this.loadCustomTools(customToolsToLoad, config);
181405
+ // Emit telemetry for tool resolution results
181406
+ try {
181407
+ const span = lazy_otel_1.trace.getSpan(lazy_otel_1.context.active());
181408
+ if (span) {
181409
+ const requestedNames = customToolsToLoad.map(item => typeof item === 'string'
181410
+ ? item
181411
+ : `${item.name || item.workflow}(wf:${item.workflow})`);
181412
+ span.addEvent('tool_setup.resolution', {
181413
+ 'tool_setup.requested_count': customToolsToLoad.length,
181414
+ 'tool_setup.requested_names': requestedNames.join(','),
181415
+ 'tool_setup.resolved_count': customTools.size,
181416
+ 'tool_setup.resolved_names': Array.from(customTools.keys()).join(','),
181417
+ 'tool_setup.missing_count': customToolsToLoad.length - customTools.size,
181418
+ });
181419
+ }
181420
+ }
181421
+ catch { }
181422
+ if (customToolsToLoad.length > 0 && customTools.size === 0) {
181423
+ logger_1.logger.warn(`[AICheckProvider] All ${customToolsToLoad.length} custom tools failed to resolve! ` +
181424
+ `Requested: ${customToolsToLoad.map(item => (typeof item === 'string' ? item : item.workflow)).join(', ')}. ` +
181425
+ `AI will have no workflow tools available.`);
181426
+ }
181348
181427
  // Add schedule tool if enabled (via ai_mcp_servers { tool: 'schedule' } or enable_scheduler)
181349
181428
  if (scheduleToolEnabled) {
181350
181429
  const scheduleTool = (0, schedule_tool_1.getScheduleToolDefinition)();
@@ -181382,10 +181461,36 @@ class AICheckProvider extends check_provider_interface_1.CheckProvider {
181382
181461
  }
181383
181462
  }
181384
181463
  catch (error) {
181385
- logger_1.logger.error(`[AICheckProvider] Failed to start custom tools SSE server '${customToolsServerName}': ${error instanceof Error ? error.message : 'Unknown error'}`);
181464
+ const errMsg = error instanceof Error ? error.message : 'Unknown error';
181465
+ logger_1.logger.error(`[AICheckProvider] Failed to start custom tools SSE server '${customToolsServerName}': ${errMsg}`);
181466
+ // Emit telemetry for SSE server failure
181467
+ try {
181468
+ const span = lazy_otel_1.trace.getSpan(lazy_otel_1.context.active());
181469
+ if (span) {
181470
+ span.addEvent('tool_setup.sse_server_error', {
181471
+ 'tool_setup.error': errMsg,
181472
+ 'tool_setup.server_name': customToolsServerName || '',
181473
+ });
181474
+ }
181475
+ }
181476
+ catch { }
181386
181477
  // Continue without custom tools
181387
181478
  }
181388
181479
  }
181480
+ // Emit final tool setup summary telemetry
181481
+ try {
181482
+ const span = lazy_otel_1.trace.getSpan(lazy_otel_1.context.active());
181483
+ if (span) {
181484
+ const finalServerNames = Object.keys(mcpServers);
181485
+ span.addEvent('tool_setup.final', {
181486
+ 'tool_setup.final_server_count': finalServerNames.length,
181487
+ 'tool_setup.final_server_names': finalServerNames.join(','),
181488
+ 'tool_setup.has_custom_tools_server': !!customToolsServer,
181489
+ 'tool_setup.tools_disabled': !!config.ai?.disableTools,
181490
+ });
181491
+ }
181492
+ }
181493
+ catch { }
181389
181494
  // Pass MCP server config directly to AI service (unless tools are disabled)
181390
181495
  if (Object.keys(mcpServers).length > 0 && !config.ai?.disableTools) {
181391
181496
  // eslint-disable-next-line @typescript-eslint/no-explicit-any
@@ -193932,6 +194037,11 @@ async function handleCancel(positional, flags) {
193932
194037
  process.exit(1);
193933
194038
  }
193934
194039
  await store.deleteAsync(schedule.id);
194040
+ // Also cancel the in-memory job (cron or timeout) so it doesn't fire
194041
+ const scheduler = (0, scheduler_1.getScheduler)();
194042
+ if (scheduler) {
194043
+ scheduler.cancelSchedule(schedule.id);
194044
+ }
193935
194045
  console.log('Schedule cancelled successfully!');
193936
194046
  console.log();
193937
194047
  console.log(` ID: ${schedule.id.substring(0, 8)}`);
@@ -194839,6 +194949,7 @@ exports.isScheduleTool = isScheduleTool;
194839
194949
  exports.buildScheduleToolContext = buildScheduleToolContext;
194840
194950
  const schedule_store_1 = __nccwpck_require__(27336);
194841
194951
  const schedule_parser_1 = __nccwpck_require__(48478);
194952
+ const scheduler_1 = __nccwpck_require__(28404);
194842
194953
  const logger_1 = __nccwpck_require__(86999);
194843
194954
  /**
194844
194955
  * Simple glob-style pattern matching for workflow names
@@ -195259,8 +195370,13 @@ async function handleCancel(args, context, store) {
195259
195370
  error: 'You can only cancel your own schedules.',
195260
195371
  };
195261
195372
  }
195262
- // Delete the schedule
195373
+ // Delete the schedule from DB
195263
195374
  await store.deleteAsync(schedule.id);
195375
+ // Also cancel the in-memory job (cron or timeout) so it doesn't fire
195376
+ const scheduler = (0, scheduler_1.getScheduler)();
195377
+ if (scheduler) {
195378
+ scheduler.cancelSchedule(schedule.id);
195379
+ }
195264
195380
  logger_1.logger.info(`[ScheduleTool] Cancelled schedule ${schedule.id} for user ${context.userId}`);
195265
195381
  return {
195266
195382
  success: true,
@@ -195718,6 +195834,27 @@ class Scheduler {
195718
195834
  getStore() {
195719
195835
  return this.store;
195720
195836
  }
195837
+ /**
195838
+ * Cancel a schedule's in-memory job (cron or timeout).
195839
+ * Called after deleting from DB to ensure the job doesn't fire again.
195840
+ */
195841
+ cancelSchedule(scheduleId) {
195842
+ // Stop cron job if it exists
195843
+ const cronJob = this.cronJobs.get(scheduleId);
195844
+ if (cronJob) {
195845
+ cronJob.stop();
195846
+ this.cronJobs.delete(scheduleId);
195847
+ logger_1.logger.debug(`[Scheduler] Cancelled cron job for schedule ${scheduleId}`);
195848
+ return;
195849
+ }
195850
+ // Clear timeout if it exists
195851
+ const timeout = this.oneTimeTimeouts.get(scheduleId);
195852
+ if (timeout) {
195853
+ clearTimeout(timeout);
195854
+ this.oneTimeTimeouts.delete(scheduleId);
195855
+ logger_1.logger.debug(`[Scheduler] Cancelled timeout for schedule ${scheduleId}`);
195856
+ }
195857
+ }
195721
195858
  /**
195722
195859
  * Start the scheduler
195723
195860
  */
@@ -196098,6 +196235,26 @@ class Scheduler {
196098
196235
  * Execute a scheduled workflow
196099
196236
  */
196100
196237
  async executeSchedule(schedule) {
196238
+ // DB freshness check: verify the schedule still exists and is active.
196239
+ // This prevents execution of cancelled or paused schedules when the
196240
+ // in-memory job fires after a DB-only cancellation.
196241
+ // Note: This is a single indexed primary-key lookup (<1ms for SQLite),
196242
+ // and only runs for user-created schedules (static cron jobs use
196243
+ // executeStaticCronJob instead), so the overhead is negligible
196244
+ // compared to the workflow execution that follows.
196245
+ try {
196246
+ const fresh = await this.store.getAsync(schedule.id);
196247
+ if (!fresh || fresh.status !== 'active') {
196248
+ logger_1.logger.info(`[Scheduler] Schedule ${schedule.id} is no longer active (${fresh ? fresh.status : 'deleted'}), skipping execution`);
196249
+ // Clean up the in-memory job since the schedule is gone/inactive
196250
+ this.cancelSchedule(schedule.id);
196251
+ return;
196252
+ }
196253
+ }
196254
+ catch {
196255
+ // If we can't check the DB, log and proceed (don't block execution on DB errors)
196256
+ logger_1.logger.warn(`[Scheduler] Could not verify schedule ${schedule.id} freshness, proceeding with execution`);
196257
+ }
196101
196258
  const description = schedule.workflow || 'reminder';
196102
196259
  logger_1.logger.info(`[Scheduler] Executing schedule ${schedule.id}: ${description}`);
196103
196260
  const startTime = Date.now();
@@ -201330,6 +201487,13 @@ async function initializeWorkspace(context) {
201330
201487
  process.env.VISOR_WORKSPACE_MAIN_PROJECT = info.mainProjectPath;
201331
201488
  process.env.VISOR_WORKSPACE_MAIN_PROJECT_NAME = info.mainProjectName;
201332
201489
  process.env.VISOR_ORIGINAL_WORKDIR = originalPath;
201490
+ // Prevent git from walking above the workspace base path.
201491
+ // Without this, git commands in workspace subdirectories can discover
201492
+ // a rogue .git in a parent directory (e.g. /tmp/.git) and leak
201493
+ // operations across all workspaces.
201494
+ const basePath = workspaceConfig?.base_path || process.env.VISOR_WORKSPACE_PATH || '/tmp/visor-workspaces';
201495
+ const existing = process.env.GIT_CEILING_DIRECTORIES;
201496
+ process.env.GIT_CEILING_DIRECTORIES = existing ? `${existing}:${basePath}` : basePath;
201333
201497
  }
201334
201498
  catch { }
201335
201499
  logger_1.logger.info(`[Workspace] Initialized workspace: ${info.workspacePath}`);
@@ -222335,6 +222499,7 @@ function buildProviderTemplateContext(prInfo, dependencyResults, memoryStore, ou
222335
222499
  Object.defineProperty(exports, "__esModule", ({ value: true }));
222336
222500
  exports.resolveTools = resolveTools;
222337
222501
  const workflow_tool_executor_1 = __nccwpck_require__(30236);
222502
+ const workflow_registry_1 = __nccwpck_require__(82824);
222338
222503
  const logger_1 = __nccwpck_require__(86999);
222339
222504
  /**
222340
222505
  * Resolve tool items to CustomToolDefinition instances.
@@ -222345,6 +222510,13 @@ const logger_1 = __nccwpck_require__(86999);
222345
222510
  */
222346
222511
  function resolveTools(toolItems, globalTools, logPrefix = '[ToolResolver]') {
222347
222512
  const tools = new Map();
222513
+ // Log registry state once for debugging workflow resolution failures
222514
+ const registry = workflow_registry_1.WorkflowRegistry.getInstance();
222515
+ const registeredWorkflows = registry.list().map(w => w.id);
222516
+ if (toolItems.some(item => typeof item !== 'string' && (0, workflow_tool_executor_1.isWorkflowToolReference)(item))) {
222517
+ logger_1.logger.info(`${logPrefix} Resolving ${toolItems.length} tool items. ` +
222518
+ `WorkflowRegistry has ${registeredWorkflows.length} workflows: [${registeredWorkflows.join(', ')}]`);
222519
+ }
222348
222520
  for (const item of toolItems) {
222349
222521
  // First, try to resolve as a workflow tool
222350
222522
  const workflowTool = (0, workflow_tool_executor_1.resolveWorkflowToolFromItem)(item);
@@ -222364,7 +222536,8 @@ function resolveTools(toolItems, globalTools, logPrefix = '[ToolResolver]') {
222364
222536
  logger_1.logger.warn(`${logPrefix} Tool '${item}' not found in global tools or workflow registry`);
222365
222537
  }
222366
222538
  else if ((0, workflow_tool_executor_1.isWorkflowToolReference)(item)) {
222367
- logger_1.logger.warn(`${logPrefix} Workflow '${item.workflow}' referenced but not found in registry`);
222539
+ logger_1.logger.warn(`${logPrefix} Workflow '${item.workflow}' referenced but not found in registry. ` +
222540
+ `Available: [${registeredWorkflows.join(', ')}]`);
222368
222541
  }
222369
222542
  }
222370
222543
  if (tools.size === 0 && toolItems.length > 0 && !globalTools) {
@@ -222741,6 +222914,10 @@ class WorkspaceManager {
222741
222914
  catch { }
222742
222915
  await this.createMainProjectWorktree(mainProjectPath);
222743
222916
  }
222917
+ else {
222918
+ // Worktree exists and is valid — update to latest upstream and clean
222919
+ await this.refreshWorktreeToUpstream(mainProjectPath);
222920
+ }
222744
222921
  }
222745
222922
  else {
222746
222923
  await this.createMainProjectWorktree(mainProjectPath);
@@ -222956,32 +223133,129 @@ class WorkspaceManager {
222956
223133
  return cleaned;
222957
223134
  }
222958
223135
  /**
222959
- * Create worktree for the main project
222960
- *
222961
- * visor-disable: architecture - Not using WorktreeManager here because:
222962
- * 1. WorktreeManager expects remote URLs and clones to bare repos first
222963
- * 2. This operates on the LOCAL repo we're already in (no cloning needed)
222964
- * 3. Adding a "local mode" to WorktreeManager would add complexity for minimal benefit
222965
- * The git commands here are simpler (just rev-parse + worktree add) vs WorktreeManager's
222966
- * full clone/bare-repo/fetch/worktree pipeline.
223136
+ * visor-disable: architecture - The helpers below (resolveUpstreamRef,
223137
+ * fetchAndResolveUpstream, resetAndCleanWorktree, refreshWorktreeToUpstream)
223138
+ * are NOT duplicates of WorktreeManager's fetchRef/getCommitShaForRef/cleanWorktree.
223139
+ * WorktreeManager operates on BARE repo caches cloned from remote URLs, while
223140
+ * WorkspaceManager operates on the LOCAL working repo the user already has checked out.
223141
+ * The git commands differ (e.g. `fetch origin --prune` vs `fetch origin <ref>:<ref>`)
223142
+ * and sharing code would require adding a "local mode" to WorktreeManager for no benefit.
222967
223143
  */
222968
- async createMainProjectWorktree(targetPath) {
222969
- logger_1.logger.debug(`Creating main project worktree: ${targetPath}`);
222970
- // Get current HEAD
222971
- const headResult = await command_executor_1.commandExecutor.execute(`git -C ${shellEscape(this.originalPath)} rev-parse HEAD`, {
222972
- timeout: 10000,
222973
- });
223144
+ /**
223145
+ * Resolve the upstream default branch ref.
223146
+ * Tries origin/HEAD (symbolic), then origin/main, then origin/master.
223147
+ * Falls back to local HEAD if no remote is configured.
223148
+ */
223149
+ async resolveUpstreamRef() {
223150
+ const esc = shellEscape(this.originalPath);
223151
+ // First, try to resolve origin/HEAD (follows the remote's default branch)
223152
+ const symbolicResult = await command_executor_1.commandExecutor.execute(`git -C ${esc} symbolic-ref refs/remotes/origin/HEAD 2>/dev/null`, { timeout: 10000 });
223153
+ if (symbolicResult.exitCode === 0 && symbolicResult.stdout.trim()) {
223154
+ // Returns something like "refs/remotes/origin/main"
223155
+ const ref = symbolicResult.stdout.trim().replace('refs/remotes/', '');
223156
+ logger_1.logger.debug(`[Workspace] Resolved upstream default branch via origin/HEAD: ${ref}`);
223157
+ return ref;
223158
+ }
223159
+ // Try origin/main
223160
+ const mainResult = await command_executor_1.commandExecutor.execute(`git -C ${esc} rev-parse --verify origin/main 2>/dev/null`, { timeout: 10000 });
223161
+ if (mainResult.exitCode === 0) {
223162
+ logger_1.logger.debug(`[Workspace] Using origin/main as upstream ref`);
223163
+ return 'origin/main';
223164
+ }
223165
+ // Try origin/master
223166
+ const masterResult = await command_executor_1.commandExecutor.execute(`git -C ${esc} rev-parse --verify origin/master 2>/dev/null`, { timeout: 10000 });
223167
+ if (masterResult.exitCode === 0) {
223168
+ logger_1.logger.debug(`[Workspace] Using origin/master as upstream ref`);
223169
+ return 'origin/master';
223170
+ }
223171
+ // Fallback: no remote configured, use local HEAD
223172
+ logger_1.logger.warn(`[Workspace] No upstream remote found, falling back to local HEAD`);
223173
+ return 'HEAD';
223174
+ }
223175
+ /**
223176
+ * Fetch latest from origin, resolve the upstream default branch, and return
223177
+ * both the ref name and the resolved commit SHA.
223178
+ */
223179
+ async fetchAndResolveUpstream() {
223180
+ // Fetch latest from origin
223181
+ logger_1.logger.debug(`[Workspace] Fetching latest from origin`);
223182
+ const fetchResult = await command_executor_1.commandExecutor.execute(`git -C ${shellEscape(this.originalPath)} fetch origin --prune 2>&1`, { timeout: 120000 });
223183
+ if (fetchResult.exitCode !== 0) {
223184
+ logger_1.logger.warn(`[Workspace] fetch origin failed (will use cached refs): ${fetchResult.stderr}`);
223185
+ }
223186
+ // Resolve the upstream ref
223187
+ const upstreamRef = await this.resolveUpstreamRef();
223188
+ // Get the commit SHA for the upstream ref
223189
+ const shaResult = await command_executor_1.commandExecutor.execute(`git -C ${shellEscape(this.originalPath)} rev-parse ${shellEscape(upstreamRef)}`, { timeout: 10000 });
223190
+ if (shaResult.exitCode === 0) {
223191
+ return { upstreamRef, targetSha: shaResult.stdout.trim() };
223192
+ }
223193
+ // Upstream ref unresolvable — fall back to local HEAD
223194
+ logger_1.logger.warn(`[Workspace] Could not resolve ${upstreamRef} (${shaResult.stderr.trim()}), falling back to HEAD`);
223195
+ const headResult = await command_executor_1.commandExecutor.execute(`git -C ${shellEscape(this.originalPath)} rev-parse HEAD`, { timeout: 10000 });
222974
223196
  if (headResult.exitCode !== 0) {
222975
- throw new Error(`Failed to get HEAD: ${headResult.stderr}`);
223197
+ throw new Error(`Repository has no commits — cannot create worktree: ${headResult.stderr}`);
223198
+ }
223199
+ return { upstreamRef: 'HEAD', targetSha: headResult.stdout.trim() };
223200
+ }
223201
+ /**
223202
+ * Reset a worktree to a specific commit and clean all modifications.
223203
+ */
223204
+ async resetAndCleanWorktree(worktreePath, targetSha) {
223205
+ const escapedPath = shellEscape(worktreePath);
223206
+ const escapedSha = shellEscape(targetSha);
223207
+ const resetResult = await command_executor_1.commandExecutor.execute(`git -C ${escapedPath} reset --hard ${escapedSha}`, { timeout: 10000 });
223208
+ if (resetResult.exitCode !== 0) {
223209
+ logger_1.logger.warn(`[Workspace] reset --hard failed: ${resetResult.stderr}`);
223210
+ }
223211
+ const cleanResult = await command_executor_1.commandExecutor.execute(`git -C ${escapedPath} clean -fdx`, {
223212
+ timeout: 30000,
223213
+ });
223214
+ if (cleanResult.exitCode !== 0) {
223215
+ logger_1.logger.warn(`[Workspace] clean -fdx failed: ${cleanResult.stderr}`);
223216
+ }
223217
+ }
223218
+ /**
223219
+ * Refresh an existing worktree to the latest upstream default branch
223220
+ * and ensure it has no modified or untracked files.
223221
+ */
223222
+ async refreshWorktreeToUpstream(worktreePath) {
223223
+ logger_1.logger.info(`[Workspace] Refreshing worktree to latest upstream: ${worktreePath}`);
223224
+ try {
223225
+ const { upstreamRef, targetSha } = await this.fetchAndResolveUpstream();
223226
+ // Point worktree to the upstream commit
223227
+ const checkoutResult = await command_executor_1.commandExecutor.execute(`git -C ${shellEscape(worktreePath)} checkout --detach ${shellEscape(targetSha)}`, { timeout: 30000 });
223228
+ if (checkoutResult.exitCode !== 0) {
223229
+ logger_1.logger.warn(`[Workspace] checkout --detach failed (worktree stays at current commit): ${checkoutResult.stderr}`);
223230
+ // Still clean even if checkout failed — the worktree is valid, just at old commit
223231
+ await this.resetAndCleanWorktree(worktreePath, 'HEAD');
223232
+ return;
223233
+ }
223234
+ // Reset and clean
223235
+ await this.resetAndCleanWorktree(worktreePath, targetSha);
223236
+ logger_1.logger.info(`[Workspace] Worktree updated to ${upstreamRef} (${targetSha.slice(0, 8)})`);
222976
223237
  }
222977
- const headRef = headResult.stdout.trim();
222978
- // Create worktree using detached HEAD to avoid branch conflicts
222979
- const createCmd = `git -C ${shellEscape(this.originalPath)} worktree add --detach ${shellEscape(targetPath)} ${shellEscape(headRef)}`;
223238
+ catch (error) {
223239
+ // Best-effort: a stale worktree is better than failing initialization entirely
223240
+ logger_1.logger.warn(`[Workspace] Failed to refresh worktree (continuing with stale state): ${error}`);
223241
+ }
223242
+ }
223243
+ /**
223244
+ * Create worktree for the main project.
223245
+ * See visor-disable comment above resolveUpstreamRef for why this doesn't use WorktreeManager.
223246
+ */
223247
+ async createMainProjectWorktree(targetPath) {
223248
+ logger_1.logger.debug(`Creating main project worktree: ${targetPath}`);
223249
+ const { upstreamRef, targetSha } = await this.fetchAndResolveUpstream();
223250
+ // Create worktree using detached HEAD at the upstream commit
223251
+ const createCmd = `git -C ${shellEscape(this.originalPath)} worktree add --detach ${shellEscape(targetPath)} ${shellEscape(targetSha)}`;
222980
223252
  const result = await command_executor_1.commandExecutor.execute(createCmd, { timeout: 60000 });
222981
223253
  if (result.exitCode !== 0) {
222982
223254
  throw new Error(`Failed to create main project worktree: ${result.stderr}`);
222983
223255
  }
222984
- logger_1.logger.debug(`Created main project worktree at ${targetPath}`);
223256
+ // Clean (shouldn't be needed in a fresh worktree, but defense in depth)
223257
+ await this.resetAndCleanWorktree(targetPath, targetSha);
223258
+ logger_1.logger.info(`Created main project worktree at ${targetPath} (${upstreamRef} -> ${targetSha.slice(0, 8)})`);
222985
223259
  }
222986
223260
  /**
222987
223261
  * Remove main project worktree
@@ -223663,27 +223937,52 @@ class WorktreeManager {
223663
223937
  fs.rmSync(worktree_path, { recursive: true, force: true });
223664
223938
  }
223665
223939
  }
223940
+ // Clean up sibling metadata file
223941
+ const metadataPath = this.getMetadataPath(worktree_path);
223942
+ try {
223943
+ if (fs.existsSync(metadataPath)) {
223944
+ fs.unlinkSync(metadataPath);
223945
+ }
223946
+ }
223947
+ catch {
223948
+ // best-effort cleanup
223949
+ }
223666
223950
  // Remove from active list
223667
223951
  this.activeWorktrees.delete(worktreeId);
223668
223952
  logger_1.logger.info(`Successfully removed worktree: ${worktreeId}`);
223669
223953
  }
223954
+ /**
223955
+ * Get the metadata file path for a worktree.
223956
+ * Stored as a sibling file OUTSIDE the worktree to avoid being committed
223957
+ * when agents run `git add .` inside the checked-out repo.
223958
+ */
223959
+ getMetadataPath(worktreePath) {
223960
+ return worktreePath.replace(/\/?$/, '') + '.metadata.json';
223961
+ }
223670
223962
  /**
223671
223963
  * Save worktree metadata
223672
223964
  */
223673
223965
  async saveMetadata(worktreePath, metadata) {
223674
- const metadataPath = path.join(worktreePath, '.visor-metadata.json');
223966
+ const metadataPath = this.getMetadataPath(worktreePath);
223675
223967
  fs.writeFileSync(metadataPath, JSON.stringify(metadata, null, 2), 'utf8');
223676
223968
  }
223677
223969
  /**
223678
223970
  * Load worktree metadata
223679
223971
  */
223680
223972
  async loadMetadata(worktreePath) {
223681
- const metadataPath = path.join(worktreePath, '.visor-metadata.json');
223682
- if (!fs.existsSync(metadataPath)) {
223973
+ const metadataPath = this.getMetadataPath(worktreePath);
223974
+ // Also check legacy location (inside worktree) for backwards compatibility
223975
+ const legacyPath = path.join(worktreePath, '.visor-metadata.json');
223976
+ const pathToRead = fs.existsSync(metadataPath)
223977
+ ? metadataPath
223978
+ : fs.existsSync(legacyPath)
223979
+ ? legacyPath
223980
+ : null;
223981
+ if (!pathToRead) {
223683
223982
  return null;
223684
223983
  }
223685
223984
  try {
223686
- const content = fs.readFileSync(metadataPath, 'utf8');
223985
+ const content = fs.readFileSync(pathToRead, 'utf8');
223687
223986
  return JSON.parse(content);
223688
223987
  }
223689
223988
  catch (error) {
@@ -266426,7 +266725,7 @@ var require_package2 = __commonJS({
266426
266725
  module2.exports = {
266427
266726
  name: "@aws-sdk/client-bedrock-runtime",
266428
266727
  description: "AWS SDK for JavaScript Bedrock Runtime Client for Node.js, Browser and React Native",
266429
- version: "3.999.0",
266728
+ version: "3.1000.0",
266430
266729
  scripts: {
266431
266730
  build: "concurrently 'yarn:build:types' 'yarn:build:es' && yarn build:cjs",
266432
266731
  "build:cjs": "node ../../scripts/compilation/inline client-bedrock-runtime",
@@ -266437,7 +266736,11 @@ var require_package2 = __commonJS({
266437
266736
  clean: "premove dist-cjs dist-es dist-types tsconfig.cjs.tsbuildinfo tsconfig.es.tsbuildinfo tsconfig.types.tsbuildinfo",
266438
266737
  "extract:docs": "api-extractor run --local",
266439
266738
  "generate:client": "node ../../scripts/generate-clients/single-service --solo bedrock-runtime",
266440
- "test:index": "tsc --noEmit ./test/index-types.ts && node ./test/index-objects.spec.mjs"
266739
+ test: "yarn g:vitest run --passWithNoTests",
266740
+ "test:index": "tsc --noEmit ./test/index-types.ts && node ./test/index-objects.spec.mjs",
266741
+ "test:integration": "yarn g:vitest run --passWithNoTests -c vitest.config.integ.mts",
266742
+ "test:integration:watch": "yarn g:vitest run --passWithNoTests -c vitest.config.integ.mts",
266743
+ "test:watch": "yarn g:vitest watch --passWithNoTests"
266441
266744
  },
266442
266745
  main: "./dist-cjs/index.js",
266443
266746
  types: "./dist-types/index.d.ts",
@@ -266456,7 +266759,7 @@ var require_package2 = __commonJS({
266456
266759
  "@aws-sdk/middleware-user-agent": "^3.972.15",
266457
266760
  "@aws-sdk/middleware-websocket": "^3.972.10",
266458
266761
  "@aws-sdk/region-config-resolver": "^3.972.6",
266459
- "@aws-sdk/token-providers": "3.999.0",
266762
+ "@aws-sdk/token-providers": "3.1000.0",
266460
266763
  "@aws-sdk/types": "^3.973.4",
266461
266764
  "@aws-sdk/util-endpoints": "^3.996.3",
266462
266765
  "@aws-sdk/util-user-agent-browser": "^3.972.6",
@@ -266493,12 +266796,14 @@ var require_package2 = __commonJS({
266493
266796
  tslib: "^2.6.2"
266494
266797
  },
266495
266798
  devDependencies: {
266799
+ "@smithy/snapshot-testing": "^1.0.7",
266496
266800
  "@tsconfig/node20": "20.1.8",
266497
266801
  "@types/node": "^20.14.8",
266498
266802
  concurrently: "7.0.0",
266499
266803
  "downlevel-dts": "0.10.1",
266500
266804
  premove: "4.0.0",
266501
- typescript: "~5.8.3"
266805
+ typescript: "~5.8.3",
266806
+ vitest: "^4.0.17"
266502
266807
  },
266503
266808
  engines: {
266504
266809
  node: ">=20.0.0"
@@ -268500,9 +268805,9 @@ var init_sso_oidc = __esm({
268500
268805
  }
268501
268806
  });
268502
268807
 
268503
- // node_modules/@aws-sdk/token-providers/dist-cjs/index.js
268808
+ // node_modules/@aws-sdk/credential-provider-sso/node_modules/@aws-sdk/token-providers/dist-cjs/index.js
268504
268809
  var require_dist_cjs56 = __commonJS({
268505
- "node_modules/@aws-sdk/token-providers/dist-cjs/index.js"(exports2) {
268810
+ "node_modules/@aws-sdk/credential-provider-sso/node_modules/@aws-sdk/token-providers/dist-cjs/index.js"(exports2) {
268506
268811
  "use strict";
268507
268812
  var client = (init_client(), __toCommonJS(client_exports));
268508
268813
  var httpAuthSchemes = (init_httpAuthSchemes2(), __toCommonJS(httpAuthSchemes_exports));
@@ -272335,8 +272640,155 @@ var require_dist_cjs63 = __commonJS({
272335
272640
  }
272336
272641
  });
272337
272642
 
272338
- // node_modules/@smithy/eventstream-serde-node/dist-cjs/index.js
272643
+ // node_modules/@aws-sdk/token-providers/dist-cjs/index.js
272339
272644
  var require_dist_cjs64 = __commonJS({
272645
+ "node_modules/@aws-sdk/token-providers/dist-cjs/index.js"(exports2) {
272646
+ "use strict";
272647
+ var client = (init_client(), __toCommonJS(client_exports));
272648
+ var httpAuthSchemes = (init_httpAuthSchemes2(), __toCommonJS(httpAuthSchemes_exports));
272649
+ var propertyProvider = require_dist_cjs24();
272650
+ var sharedIniFileLoader = require_dist_cjs42();
272651
+ var node_fs = __nccwpck_require__(73024);
272652
+ var fromEnvSigningName = ({ logger: logger2, signingName } = {}) => async () => {
272653
+ logger2?.debug?.("@aws-sdk/token-providers - fromEnvSigningName");
272654
+ if (!signingName) {
272655
+ throw new propertyProvider.TokenProviderError("Please pass 'signingName' to compute environment variable key", { logger: logger2 });
272656
+ }
272657
+ const bearerTokenKey = httpAuthSchemes.getBearerTokenEnvKey(signingName);
272658
+ if (!(bearerTokenKey in process.env)) {
272659
+ throw new propertyProvider.TokenProviderError(`Token not present in '${bearerTokenKey}' environment variable`, { logger: logger2 });
272660
+ }
272661
+ const token = { token: process.env[bearerTokenKey] };
272662
+ client.setTokenFeature(token, "BEARER_SERVICE_ENV_VARS", "3");
272663
+ return token;
272664
+ };
272665
+ var EXPIRE_WINDOW_MS = 5 * 60 * 1e3;
272666
+ var REFRESH_MESSAGE = `To refresh this SSO session run 'aws sso login' with the corresponding profile.`;
272667
+ var getSsoOidcClient = async (ssoRegion, init = {}, callerClientConfig) => {
272668
+ const { SSOOIDCClient: SSOOIDCClient2 } = await Promise.resolve().then(() => (init_sso_oidc(), sso_oidc_exports));
272669
+ const coalesce = (prop) => init.clientConfig?.[prop] ?? init.parentClientConfig?.[prop] ?? callerClientConfig?.[prop];
272670
+ const ssoOidcClient = new SSOOIDCClient2(Object.assign({}, init.clientConfig ?? {}, {
272671
+ region: ssoRegion ?? init.clientConfig?.region,
272672
+ logger: coalesce("logger"),
272673
+ userAgentAppId: coalesce("userAgentAppId")
272674
+ }));
272675
+ return ssoOidcClient;
272676
+ };
272677
+ var getNewSsoOidcToken = async (ssoToken, ssoRegion, init = {}, callerClientConfig) => {
272678
+ const { CreateTokenCommand: CreateTokenCommand2 } = await Promise.resolve().then(() => (init_sso_oidc(), sso_oidc_exports));
272679
+ const ssoOidcClient = await getSsoOidcClient(ssoRegion, init, callerClientConfig);
272680
+ return ssoOidcClient.send(new CreateTokenCommand2({
272681
+ clientId: ssoToken.clientId,
272682
+ clientSecret: ssoToken.clientSecret,
272683
+ refreshToken: ssoToken.refreshToken,
272684
+ grantType: "refresh_token"
272685
+ }));
272686
+ };
272687
+ var validateTokenExpiry = (token) => {
272688
+ if (token.expiration && token.expiration.getTime() < Date.now()) {
272689
+ throw new propertyProvider.TokenProviderError(`Token is expired. ${REFRESH_MESSAGE}`, false);
272690
+ }
272691
+ };
272692
+ var validateTokenKey = (key, value, forRefresh = false) => {
272693
+ if (typeof value === "undefined") {
272694
+ throw new propertyProvider.TokenProviderError(`Value not present for '${key}' in SSO Token${forRefresh ? ". Cannot refresh" : ""}. ${REFRESH_MESSAGE}`, false);
272695
+ }
272696
+ };
272697
+ var { writeFile: writeFile2 } = node_fs.promises;
272698
+ var writeSSOTokenToFile = (id, ssoToken) => {
272699
+ const tokenFilepath = sharedIniFileLoader.getSSOTokenFilepath(id);
272700
+ const tokenString = JSON.stringify(ssoToken, null, 2);
272701
+ return writeFile2(tokenFilepath, tokenString);
272702
+ };
272703
+ var lastRefreshAttemptTime = /* @__PURE__ */ new Date(0);
272704
+ var fromSso = (init = {}) => async ({ callerClientConfig } = {}) => {
272705
+ init.logger?.debug("@aws-sdk/token-providers - fromSso");
272706
+ const profiles = await sharedIniFileLoader.parseKnownFiles(init);
272707
+ const profileName = sharedIniFileLoader.getProfileName({
272708
+ profile: init.profile ?? callerClientConfig?.profile
272709
+ });
272710
+ const profile = profiles[profileName];
272711
+ if (!profile) {
272712
+ throw new propertyProvider.TokenProviderError(`Profile '${profileName}' could not be found in shared credentials file.`, false);
272713
+ } else if (!profile["sso_session"]) {
272714
+ throw new propertyProvider.TokenProviderError(`Profile '${profileName}' is missing required property 'sso_session'.`);
272715
+ }
272716
+ const ssoSessionName = profile["sso_session"];
272717
+ const ssoSessions = await sharedIniFileLoader.loadSsoSessionData(init);
272718
+ const ssoSession = ssoSessions[ssoSessionName];
272719
+ if (!ssoSession) {
272720
+ throw new propertyProvider.TokenProviderError(`Sso session '${ssoSessionName}' could not be found in shared credentials file.`, false);
272721
+ }
272722
+ for (const ssoSessionRequiredKey of ["sso_start_url", "sso_region"]) {
272723
+ if (!ssoSession[ssoSessionRequiredKey]) {
272724
+ throw new propertyProvider.TokenProviderError(`Sso session '${ssoSessionName}' is missing required property '${ssoSessionRequiredKey}'.`, false);
272725
+ }
272726
+ }
272727
+ ssoSession["sso_start_url"];
272728
+ const ssoRegion = ssoSession["sso_region"];
272729
+ let ssoToken;
272730
+ try {
272731
+ ssoToken = await sharedIniFileLoader.getSSOTokenFromFile(ssoSessionName);
272732
+ } catch (e5) {
272733
+ throw new propertyProvider.TokenProviderError(`The SSO session token associated with profile=${profileName} was not found or is invalid. ${REFRESH_MESSAGE}`, false);
272734
+ }
272735
+ validateTokenKey("accessToken", ssoToken.accessToken);
272736
+ validateTokenKey("expiresAt", ssoToken.expiresAt);
272737
+ const { accessToken, expiresAt } = ssoToken;
272738
+ const existingToken = { token: accessToken, expiration: new Date(expiresAt) };
272739
+ if (existingToken.expiration.getTime() - Date.now() > EXPIRE_WINDOW_MS) {
272740
+ return existingToken;
272741
+ }
272742
+ if (Date.now() - lastRefreshAttemptTime.getTime() < 30 * 1e3) {
272743
+ validateTokenExpiry(existingToken);
272744
+ return existingToken;
272745
+ }
272746
+ validateTokenKey("clientId", ssoToken.clientId, true);
272747
+ validateTokenKey("clientSecret", ssoToken.clientSecret, true);
272748
+ validateTokenKey("refreshToken", ssoToken.refreshToken, true);
272749
+ try {
272750
+ lastRefreshAttemptTime.setTime(Date.now());
272751
+ const newSsoOidcToken = await getNewSsoOidcToken(ssoToken, ssoRegion, init, callerClientConfig);
272752
+ validateTokenKey("accessToken", newSsoOidcToken.accessToken);
272753
+ validateTokenKey("expiresIn", newSsoOidcToken.expiresIn);
272754
+ const newTokenExpiration = new Date(Date.now() + newSsoOidcToken.expiresIn * 1e3);
272755
+ try {
272756
+ await writeSSOTokenToFile(ssoSessionName, {
272757
+ ...ssoToken,
272758
+ accessToken: newSsoOidcToken.accessToken,
272759
+ expiresAt: newTokenExpiration.toISOString(),
272760
+ refreshToken: newSsoOidcToken.refreshToken
272761
+ });
272762
+ } catch (error2) {
272763
+ }
272764
+ return {
272765
+ token: newSsoOidcToken.accessToken,
272766
+ expiration: newTokenExpiration
272767
+ };
272768
+ } catch (error2) {
272769
+ validateTokenExpiry(existingToken);
272770
+ return existingToken;
272771
+ }
272772
+ };
272773
+ var fromStatic = ({ token, logger: logger2 }) => async () => {
272774
+ logger2?.debug("@aws-sdk/token-providers - fromStatic");
272775
+ if (!token || !token.token) {
272776
+ throw new propertyProvider.TokenProviderError(`Please pass a valid token to fromStatic`, false);
272777
+ }
272778
+ return token;
272779
+ };
272780
+ var nodeProvider = (init = {}) => propertyProvider.memoize(propertyProvider.chain(fromSso(init), async () => {
272781
+ throw new propertyProvider.TokenProviderError("Could not load token from any providers", false);
272782
+ }), (token) => token.expiration !== void 0 && token.expiration.getTime() - Date.now() < 3e5, (token) => token.expiration !== void 0);
272783
+ exports2.fromEnvSigningName = fromEnvSigningName;
272784
+ exports2.fromSso = fromSso;
272785
+ exports2.fromStatic = fromStatic;
272786
+ exports2.nodeProvider = nodeProvider;
272787
+ }
272788
+ });
272789
+
272790
+ // node_modules/@smithy/eventstream-serde-node/dist-cjs/index.js
272791
+ var require_dist_cjs65 = __commonJS({
272340
272792
  "node_modules/@smithy/eventstream-serde-node/dist-cjs/index.js"(exports2) {
272341
272793
  "use strict";
272342
272794
  var eventstreamSerdeUniversal = require_dist_cjs35();
@@ -275018,11 +275470,11 @@ var require_runtimeConfig = __commonJS({
275018
275470
  var core_1 = (init_dist_es2(), __toCommonJS(dist_es_exports2));
275019
275471
  var credential_provider_node_1 = require_dist_cjs62();
275020
275472
  var eventstream_handler_node_1 = require_dist_cjs63();
275021
- var token_providers_1 = require_dist_cjs56();
275473
+ var token_providers_1 = require_dist_cjs64();
275022
275474
  var util_user_agent_node_1 = require_dist_cjs51();
275023
275475
  var config_resolver_1 = require_dist_cjs39();
275024
275476
  var core_2 = (init_dist_es(), __toCommonJS(dist_es_exports));
275025
- var eventstream_serde_node_1 = require_dist_cjs64();
275477
+ var eventstream_serde_node_1 = require_dist_cjs65();
275026
275478
  var hash_node_1 = require_dist_cjs52();
275027
275479
  var middleware_retry_1 = require_dist_cjs47();
275028
275480
  var node_config_provider_1 = require_dist_cjs43();
@@ -275094,7 +275546,7 @@ var require_runtimeConfig = __commonJS({
275094
275546
  });
275095
275547
 
275096
275548
  // node_modules/@aws-sdk/client-bedrock-runtime/dist-cjs/index.js
275097
- var require_dist_cjs65 = __commonJS({
275549
+ var require_dist_cjs66 = __commonJS({
275098
275550
  "node_modules/@aws-sdk/client-bedrock-runtime/dist-cjs/index.js"(exports2) {
275099
275551
  "use strict";
275100
275552
  var middlewareEventstream = require_dist_cjs3();
@@ -275939,13 +276391,13 @@ var import_client_bedrock_runtime, import_client_bedrock_runtime2, import_client
275939
276391
  var init_dist3 = __esm({
275940
276392
  "node_modules/@ai-sdk/amazon-bedrock/dist/index.mjs"() {
275941
276393
  init_dist2();
275942
- import_client_bedrock_runtime = __toESM(require_dist_cjs65(), 1);
276394
+ import_client_bedrock_runtime = __toESM(require_dist_cjs66(), 1);
275943
276395
  init_dist();
275944
- import_client_bedrock_runtime2 = __toESM(require_dist_cjs65(), 1);
276396
+ import_client_bedrock_runtime2 = __toESM(require_dist_cjs66(), 1);
275945
276397
  init_dist();
275946
276398
  init_dist();
275947
276399
  init_dist2();
275948
- import_client_bedrock_runtime3 = __toESM(require_dist_cjs65(), 1);
276400
+ import_client_bedrock_runtime3 = __toESM(require_dist_cjs66(), 1);
275949
276401
  generateFileId = createIdGenerator({ prefix: "file", size: 16 });
275950
276402
  BedrockChatLanguageModel = class {
275951
276403
  constructor(modelId, settings, config) {
@@ -299113,7 +299565,6 @@ var init_reg_exp = __esm({
299113
299565
  // node_modules/chevrotain/lib/src/scan/lexer.js
299114
299566
  function analyzeTokenTypes(tokenTypes, options) {
299115
299567
  options = defaults_default(options, {
299116
- useSticky: SUPPORT_STICKY,
299117
299568
  debug: false,
299118
299569
  safeMode: false,
299119
299570
  positionTracking: "full",
@@ -299162,7 +299613,7 @@ function analyzeTokenTypes(tokenTypes, options) {
299162
299613
  ], regExpSource[1])) {
299163
299614
  return regExpSource[1];
299164
299615
  } else {
299165
- return options.useSticky ? addStickyFlag(currPattern) : addStartOfInput(currPattern);
299616
+ return addStickyFlag(currPattern);
299166
299617
  }
299167
299618
  } else if (isFunction_default(currPattern)) {
299168
299619
  hasCustom = true;
@@ -299176,7 +299627,7 @@ function analyzeTokenTypes(tokenTypes, options) {
299176
299627
  } else {
299177
299628
  const escapedRegExpString = currPattern.replace(/[\\^$.*+?()[\]{}|]/g, "\\$&");
299178
299629
  const wrappedRegExp = new RegExp(escapedRegExpString);
299179
- return options.useSticky ? addStickyFlag(wrappedRegExp) : addStartOfInput(wrappedRegExp);
299630
+ return addStickyFlag(wrappedRegExp);
299180
299631
  }
299181
299632
  } else {
299182
299633
  throw Error("non exhaustive match");
@@ -299580,10 +300031,6 @@ function noMetaChar(regExp) {
299580
300031
  function usesLookAheadOrBehind(regExp) {
299581
300032
  return /(\(\?=)|(\(\?!)|(\(\?<=)|(\(\?<!)/.test(regExp.source);
299582
300033
  }
299583
- function addStartOfInput(pattern) {
299584
- const flags = pattern.ignoreCase ? "i" : "";
299585
- return new RegExp(`^(?:${pattern.source})`, flags);
299586
- }
299587
300034
  function addStickyFlag(pattern) {
299588
300035
  const flags = pattern.ignoreCase ? "iy" : "y";
299589
300036
  return new RegExp(`${pattern.source}`, flags);
@@ -299772,7 +300219,7 @@ function initCharCodeToOptimizedIndexMap() {
299772
300219
  }
299773
300220
  }
299774
300221
  }
299775
- var PATTERN, DEFAULT_MODE, MODES, SUPPORT_STICKY, end_of_input, start_of_input, LineTerminatorOptimizedTester, minOptimizationVal, charCodeToOptimizedIdxMap;
300222
+ var PATTERN, DEFAULT_MODE, MODES, end_of_input, start_of_input, LineTerminatorOptimizedTester, minOptimizationVal, charCodeToOptimizedIdxMap;
299776
300223
  var init_lexer = __esm({
299777
300224
  "node_modules/chevrotain/lib/src/scan/lexer.js"() {
299778
300225
  init_api3();
@@ -299784,7 +300231,6 @@ var init_lexer = __esm({
299784
300231
  PATTERN = "PATTERN";
299785
300232
  DEFAULT_MODE = "defaultMode";
299786
300233
  MODES = "modes";
299787
- SUPPORT_STICKY = typeof new RegExp("(?:)").sticky === "boolean";
299788
300234
  end_of_input = /[^\\][$]/;
299789
300235
  start_of_input = /[^\\[][\^]|^\^/;
299790
300236
  LineTerminatorOptimizedTester = {
@@ -300100,13 +300546,6 @@ var init_lexer_public = __esm({
300100
300546
  PRINT_WARNING(warningDescriptor.message);
300101
300547
  });
300102
300548
  this.TRACE_INIT("Choosing sub-methods implementations", () => {
300103
- if (SUPPORT_STICKY) {
300104
- this.chopInput = identity_default;
300105
- this.match = this.matchWithTest;
300106
- } else {
300107
- this.updateLastIndex = noop_default;
300108
- this.match = this.matchWithExec;
300109
- }
300110
300549
  if (hasOnlySingleMode) {
300111
300550
  this.handleModes = noop_default;
300112
300551
  }
@@ -300169,7 +300608,7 @@ var init_lexer_public = __esm({
300169
300608
  // this method also used quite a bit of `!` none null assertions because it is too optimized
300170
300609
  // for `tsc` to always understand it is "safe"
300171
300610
  tokenizeInternal(text, initialMode) {
300172
- let i5, j5, k5, matchAltImage, longerAlt, matchedImage, payload2, altPayload, imageLength, group, tokType, newToken, errLength, droppedChar, msg, match2;
300611
+ let i5, j5, k5, matchAltImage, longerAlt, matchedImage, payload2, altPayload, imageLength, group, tokType, newToken, errLength, msg, match2;
300173
300612
  const orgText = text;
300174
300613
  const orgLength = orgText.length;
300175
300614
  let offset2 = 0;
@@ -300188,19 +300627,7 @@ var init_lexer_public = __esm({
300188
300627
  const modeStack = [];
300189
300628
  const emptyArray = [];
300190
300629
  Object.freeze(emptyArray);
300191
- let getPossiblePatterns;
300192
- function getPossiblePatternsSlow() {
300193
- return patternIdxToConfig;
300194
- }
300195
- function getPossiblePatternsOptimized(charCode) {
300196
- const optimizedCharIdx = charCodeToOptimizedIndex(charCode);
300197
- const possiblePatterns = currCharCodeToPatternIdxToConfig[optimizedCharIdx];
300198
- if (possiblePatterns === void 0) {
300199
- return emptyArray;
300200
- } else {
300201
- return possiblePatterns;
300202
- }
300203
- }
300630
+ let isOptimizedMode = false;
300204
300631
  const pop_mode = (popToken) => {
300205
300632
  if (modeStack.length === 1 && // if we have both a POP_MODE and a PUSH_MODE this is in-fact a "transition"
300206
300633
  // So no error should occur.
@@ -300221,9 +300648,9 @@ var init_lexer_public = __esm({
300221
300648
  currModePatternsLength = patternIdxToConfig.length;
300222
300649
  const modeCanBeOptimized = this.canModeBeOptimized[newMode] && this.config.safeMode === false;
300223
300650
  if (currCharCodeToPatternIdxToConfig && modeCanBeOptimized) {
300224
- getPossiblePatterns = getPossiblePatternsOptimized;
300651
+ isOptimizedMode = true;
300225
300652
  } else {
300226
- getPossiblePatterns = getPossiblePatternsSlow;
300653
+ isOptimizedMode = false;
300227
300654
  }
300228
300655
  }
300229
300656
  };
@@ -300235,9 +300662,9 @@ var init_lexer_public = __esm({
300235
300662
  currModePatternsLength = patternIdxToConfig.length;
300236
300663
  const modeCanBeOptimized = this.canModeBeOptimized[newMode] && this.config.safeMode === false;
300237
300664
  if (currCharCodeToPatternIdxToConfig && modeCanBeOptimized) {
300238
- getPossiblePatterns = getPossiblePatternsOptimized;
300665
+ isOptimizedMode = true;
300239
300666
  } else {
300240
- getPossiblePatterns = getPossiblePatternsSlow;
300667
+ isOptimizedMode = false;
300241
300668
  }
300242
300669
  }
300243
300670
  push_mode.call(this, initialMode);
@@ -300245,8 +300672,16 @@ var init_lexer_public = __esm({
300245
300672
  const recoveryEnabled = this.config.recoveryEnabled;
300246
300673
  while (offset2 < orgLength) {
300247
300674
  matchedImage = null;
300675
+ imageLength = -1;
300248
300676
  const nextCharCode = orgText.charCodeAt(offset2);
300249
- const chosenPatternIdxToConfig = getPossiblePatterns(nextCharCode);
300677
+ let chosenPatternIdxToConfig;
300678
+ if (isOptimizedMode) {
300679
+ const optimizedCharIdx = charCodeToOptimizedIndex(nextCharCode);
300680
+ const possiblePatterns = currCharCodeToPatternIdxToConfig[optimizedCharIdx];
300681
+ chosenPatternIdxToConfig = possiblePatterns !== void 0 ? possiblePatterns : emptyArray;
300682
+ } else {
300683
+ chosenPatternIdxToConfig = patternIdxToConfig;
300684
+ }
300250
300685
  const chosenPatternsLength = chosenPatternIdxToConfig.length;
300251
300686
  for (i5 = 0; i5 < chosenPatternsLength; i5++) {
300252
300687
  currConfig = chosenPatternIdxToConfig[i5];
@@ -300255,12 +300690,14 @@ var init_lexer_public = __esm({
300255
300690
  const singleCharCode = currConfig.short;
300256
300691
  if (singleCharCode !== false) {
300257
300692
  if (nextCharCode === singleCharCode) {
300693
+ imageLength = 1;
300258
300694
  matchedImage = currPattern;
300259
300695
  }
300260
300696
  } else if (currConfig.isCustom === true) {
300261
300697
  match2 = currPattern.exec(orgText, offset2, matchedTokens, groups);
300262
300698
  if (match2 !== null) {
300263
300699
  matchedImage = match2[0];
300700
+ imageLength = matchedImage.length;
300264
300701
  if (match2.payload !== void 0) {
300265
300702
  payload2 = match2.payload;
300266
300703
  }
@@ -300268,12 +300705,13 @@ var init_lexer_public = __esm({
300268
300705
  matchedImage = null;
300269
300706
  }
300270
300707
  } else {
300271
- this.updateLastIndex(currPattern, offset2);
300272
- matchedImage = this.match(currPattern, text, offset2);
300708
+ currPattern.lastIndex = offset2;
300709
+ imageLength = this.matchLength(currPattern, text, offset2);
300273
300710
  }
300274
- if (matchedImage !== null) {
300711
+ if (imageLength !== -1) {
300275
300712
  longerAlt = currConfig.longerAlt;
300276
300713
  if (longerAlt !== void 0) {
300714
+ matchedImage = text.substring(offset2, offset2 + imageLength);
300277
300715
  const longerAltLength = longerAlt.length;
300278
300716
  for (k5 = 0; k5 < longerAltLength; k5++) {
300279
300717
  const longerAltConfig = patternIdxToConfig[longerAlt[k5]];
@@ -300290,11 +300728,12 @@ var init_lexer_public = __esm({
300290
300728
  matchAltImage = null;
300291
300729
  }
300292
300730
  } else {
300293
- this.updateLastIndex(longerAltPattern, offset2);
300731
+ longerAltPattern.lastIndex = offset2;
300294
300732
  matchAltImage = this.match(longerAltPattern, text, offset2);
300295
300733
  }
300296
300734
  if (matchAltImage && matchAltImage.length > matchedImage.length) {
300297
300735
  matchedImage = matchAltImage;
300736
+ imageLength = matchAltImage.length;
300298
300737
  payload2 = altPayload;
300299
300738
  currConfig = longerAltConfig;
300300
300739
  break;
@@ -300304,10 +300743,10 @@ var init_lexer_public = __esm({
300304
300743
  break;
300305
300744
  }
300306
300745
  }
300307
- if (matchedImage !== null) {
300308
- imageLength = matchedImage.length;
300746
+ if (imageLength !== -1) {
300309
300747
  group = currConfig.group;
300310
300748
  if (group !== void 0) {
300749
+ matchedImage = matchedImage !== null ? matchedImage : text.substring(offset2, offset2 + imageLength);
300311
300750
  tokType = currConfig.tokenTypeIdx;
300312
300751
  newToken = this.createTokenInstance(matchedImage, offset2, tokType, currConfig.tokenType, line, column, imageLength);
300313
300752
  this.handlePayload(newToken, payload2);
@@ -300317,15 +300756,13 @@ var init_lexer_public = __esm({
300317
300756
  groups[group].push(newToken);
300318
300757
  }
300319
300758
  }
300320
- text = this.chopInput(text, imageLength);
300321
- offset2 = offset2 + imageLength;
300322
- column = this.computeNewColumn(column, imageLength);
300323
300759
  if (trackLines === true && currConfig.canLineTerminator === true) {
300324
300760
  let numOfLTsInMatch = 0;
300325
300761
  let foundTerminator;
300326
300762
  let lastLTEndOffset;
300327
300763
  lineTerminatorPattern.lastIndex = 0;
300328
300764
  do {
300765
+ matchedImage = matchedImage !== null ? matchedImage : text.substring(offset2, offset2 + imageLength);
300329
300766
  foundTerminator = lineTerminatorPattern.test(matchedImage);
300330
300767
  if (foundTerminator === true) {
300331
300768
  lastLTEndOffset = lineTerminatorPattern.lastIndex - 1;
@@ -300336,8 +300773,13 @@ var init_lexer_public = __esm({
300336
300773
  line = line + numOfLTsInMatch;
300337
300774
  column = imageLength - lastLTEndOffset;
300338
300775
  this.updateTokenEndLineColumnLocation(newToken, group, lastLTEndOffset, numOfLTsInMatch, line, column, imageLength);
300776
+ } else {
300777
+ column = this.computeNewColumn(column, imageLength);
300339
300778
  }
300779
+ } else {
300780
+ column = this.computeNewColumn(column, imageLength);
300340
300781
  }
300782
+ offset2 = offset2 + imageLength;
300341
300783
  this.handleModes(currConfig, pop_mode, push_mode, newToken);
300342
300784
  } else {
300343
300785
  const errorStartOffset = offset2;
@@ -300345,7 +300787,6 @@ var init_lexer_public = __esm({
300345
300787
  const errorColumn = column;
300346
300788
  let foundResyncPoint = recoveryEnabled === false;
300347
300789
  while (foundResyncPoint === false && offset2 < orgLength) {
300348
- text = this.chopInput(text, 1);
300349
300790
  offset2++;
300350
300791
  for (j5 = 0; j5 < currModePatternsLength; j5++) {
300351
300792
  const currConfig2 = patternIdxToConfig[j5];
@@ -300358,7 +300799,7 @@ var init_lexer_public = __esm({
300358
300799
  } else if (currConfig2.isCustom === true) {
300359
300800
  foundResyncPoint = currPattern.exec(orgText, offset2, matchedTokens, groups) !== null;
300360
300801
  } else {
300361
- this.updateLastIndex(currPattern, offset2);
300802
+ currPattern.lastIndex = offset2;
300362
300803
  foundResyncPoint = currPattern.exec(text) !== null;
300363
300804
  }
300364
300805
  if (foundResyncPoint === true) {
@@ -300401,12 +300842,6 @@ var init_lexer_public = __esm({
300401
300842
  push_mode.call(this, config.push);
300402
300843
  }
300403
300844
  }
300404
- chopInput(text, length) {
300405
- return text.substring(length);
300406
- }
300407
- updateLastIndex(regExp, newLastIndex) {
300408
- regExp.lastIndex = newLastIndex;
300409
- }
300410
300845
  // TODO: decrease this under 600 characters? inspect stripping comments option in TSC compiler
300411
300846
  updateTokenEndLineColumnLocation(newToken, group, lastLTIdx, numOfLTsInMatch, line, column, imageLength) {
300412
300847
  let lastCharIsLT, fixForEndingInLT;
@@ -300469,16 +300904,19 @@ var init_lexer_public = __esm({
300469
300904
  token.payload = payload2;
300470
300905
  }
300471
300906
  }
300472
- matchWithTest(pattern, text, offset2) {
300907
+ match(pattern, text, offset2) {
300473
300908
  const found = pattern.test(text);
300474
300909
  if (found === true) {
300475
300910
  return text.substring(offset2, pattern.lastIndex);
300476
300911
  }
300477
300912
  return null;
300478
300913
  }
300479
- matchWithExec(pattern, text) {
300480
- const regExpArray = pattern.exec(text);
300481
- return regExpArray !== null ? regExpArray[0] : null;
300914
+ matchLength(pattern, text, offset2) {
300915
+ const found = pattern.test(text);
300916
+ if (found === true) {
300917
+ return pattern.lastIndex - offset2;
300918
+ }
300919
+ return -1;
300482
300920
  }
300483
300921
  };
300484
300922
  Lexer.SKIPPED = "This marks a skipped Token pattern, this means each token identified by it will be consumed and then thrown into oblivion, this can be used to for example to completely ignore whitespace.";
@@ -300670,12 +301108,20 @@ For Further details.`;
300670
301108
  return errMsg;
300671
301109
  },
300672
301110
  buildAlternationAmbiguityError(options) {
300673
- const pathMsg = map_default(options.prefixPath, (currtok) => tokenLabel2(currtok)).join(", ");
300674
301111
  const occurrence = options.alternation.idx === 0 ? "" : options.alternation.idx;
301112
+ const isEmptyPath = options.prefixPath.length === 0;
300675
301113
  let currMessage = `Ambiguous Alternatives Detected: <${options.ambiguityIndices.join(" ,")}> in <OR${occurrence}> inside <${options.topLevelRule.name}> Rule,
300676
- <${pathMsg}> may appears as a prefix path in all these alternatives.
300677
301114
  `;
300678
- currMessage = currMessage + `See: https://chevrotain.io/docs/guide/resolving_grammar_errors.html#AMBIGUOUS_ALTERNATIVES
301115
+ if (isEmptyPath) {
301116
+ currMessage += `These alternatives are all empty (match no tokens), making them indistinguishable.
301117
+ Only the last alternative may be empty.
301118
+ `;
301119
+ } else {
301120
+ const pathMsg = map_default(options.prefixPath, (currtok) => tokenLabel2(currtok)).join(", ");
301121
+ currMessage += `<${pathMsg}> may appears as a prefix path in all these alternatives.
301122
+ `;
301123
+ }
301124
+ currMessage += `See: https://chevrotain.io/docs/guide/resolving_grammar_errors.html#AMBIGUOUS_ALTERNATIVES
300679
301125
  For Further details.`;
300680
301126
  return currMessage;
300681
301127
  },
@@ -329914,12 +330360,31 @@ function cleanSchemaResponse(response) {
329914
330360
  const closeChar = openChar === "{" ? "}" : "]";
329915
330361
  let bracketCount = 1;
329916
330362
  let endIndex = startIndex + 1;
330363
+ let inString = false;
330364
+ let escapeNext = false;
329917
330365
  while (endIndex < trimmed.length && bracketCount > 0) {
329918
330366
  const char = trimmed[endIndex];
329919
- if (char === openChar) {
329920
- bracketCount++;
329921
- } else if (char === closeChar) {
329922
- bracketCount--;
330367
+ if (escapeNext) {
330368
+ escapeNext = false;
330369
+ endIndex++;
330370
+ continue;
330371
+ }
330372
+ if (char === "\\" && inString) {
330373
+ escapeNext = true;
330374
+ endIndex++;
330375
+ continue;
330376
+ }
330377
+ if (char === '"') {
330378
+ inString = !inString;
330379
+ endIndex++;
330380
+ continue;
330381
+ }
330382
+ if (!inString) {
330383
+ if (char === openChar) {
330384
+ bracketCount++;
330385
+ } else if (char === closeChar) {
330386
+ bracketCount--;
330387
+ }
329923
330388
  }
329924
330389
  endIndex++;
329925
330390
  }
@@ -352300,6 +352765,122 @@ var init_bashPermissions = __esm({
352300
352765
  });
352301
352766
 
352302
352767
  // src/agent/bashExecutor.js
352768
+ function splitCommandComponents(command) {
352769
+ const parts = [];
352770
+ let current2 = "";
352771
+ let inQuote = false;
352772
+ let quoteChar = "";
352773
+ for (let i5 = 0; i5 < command.length; i5++) {
352774
+ const c5 = command[i5];
352775
+ const next = command[i5 + 1] || "";
352776
+ if (c5 === "\\" && !inQuote) {
352777
+ current2 += c5 + next;
352778
+ i5++;
352779
+ continue;
352780
+ }
352781
+ if (inQuote && quoteChar === '"' && c5 === "\\" && next) {
352782
+ current2 += c5 + next;
352783
+ i5++;
352784
+ continue;
352785
+ }
352786
+ if (!inQuote && (c5 === '"' || c5 === "'")) {
352787
+ inQuote = true;
352788
+ quoteChar = c5;
352789
+ current2 += c5;
352790
+ continue;
352791
+ }
352792
+ if (inQuote && c5 === quoteChar) {
352793
+ inQuote = false;
352794
+ current2 += c5;
352795
+ continue;
352796
+ }
352797
+ if (!inQuote) {
352798
+ if (c5 === "&" && next === "&" || c5 === "|" && next === "|") {
352799
+ if (current2.trim()) parts.push(current2.trim());
352800
+ current2 = "";
352801
+ i5++;
352802
+ continue;
352803
+ }
352804
+ if (c5 === "|" || c5 === ";") {
352805
+ if (current2.trim()) parts.push(current2.trim());
352806
+ current2 = "";
352807
+ continue;
352808
+ }
352809
+ }
352810
+ current2 += c5;
352811
+ }
352812
+ if (current2.trim()) parts.push(current2.trim());
352813
+ return parts;
352814
+ }
352815
+ function checkSingleCommandInteractive(command) {
352816
+ let effective = command.trim();
352817
+ while (/^\w+=\S*\s/.test(effective)) {
352818
+ effective = effective.replace(/^\w+=\S*\s+/, "");
352819
+ }
352820
+ const parts = effective.split(/\s+/);
352821
+ const base2 = parts[0];
352822
+ const args = parts.slice(1);
352823
+ if (["vi", "vim", "nvim", "nano", "emacs", "pico", "joe", "mcedit"].includes(base2)) {
352824
+ return `'${base2}' is an interactive editor and cannot run without a terminal. Use non-interactive file manipulation commands instead.`;
352825
+ }
352826
+ if (["less", "more"].includes(base2)) {
352827
+ return `'${base2}' is an interactive pager. Use 'cat', 'head', or 'tail' instead.`;
352828
+ }
352829
+ if (base2 === "git") {
352830
+ const sub = args[0];
352831
+ if (sub === "commit") {
352832
+ const hasNonInteractiveFlag = args.some(
352833
+ (a5) => a5 === "-m" || a5.startsWith("--message") || a5 === "-C" || a5 === "-c" || a5.startsWith("--fixup") || a5.startsWith("--squash") || a5 === "--allow-empty-message" || a5 === "--no-edit"
352834
+ );
352835
+ if (!hasNonInteractiveFlag) {
352836
+ return `Interactive command: 'git commit' opens an editor for the commit message. Use 'git commit -m "your message"' instead.`;
352837
+ }
352838
+ }
352839
+ if (sub === "rebase" && (args.includes("--continue") || args.includes("--skip"))) {
352840
+ return "Interactive command: 'git rebase --continue' opens an editor. Set environment variable GIT_EDITOR=true to accept default messages, e.g. pass env: {GIT_EDITOR: 'true'} or prepend GIT_EDITOR=true to the command.";
352841
+ }
352842
+ if (sub === "rebase" && (args.includes("-i") || args.includes("--interactive"))) {
352843
+ return "Interactive command: 'git rebase -i' requires an interactive editor. Interactive rebase cannot run without a terminal.";
352844
+ }
352845
+ if (sub === "merge" && !args.includes("--no-edit") && !args.includes("--no-commit") && !args.includes("--ff-only")) {
352846
+ return "Interactive command: 'git merge' may open an editor for the merge commit message. Add '--no-edit' to accept the default message.";
352847
+ }
352848
+ if (sub === "cherry-pick" && !args.includes("--no-edit")) {
352849
+ return "Interactive command: 'git cherry-pick' may open an editor. Add '--no-edit' to accept the default message.";
352850
+ }
352851
+ if (sub === "revert" && !args.includes("--no-edit")) {
352852
+ return "Interactive command: 'git revert' opens an editor. Add '--no-edit' to accept the default message.";
352853
+ }
352854
+ if (sub === "tag" && args.includes("-a") && !args.some((a5) => a5 === "-m" || a5.startsWith("--message"))) {
352855
+ return `Interactive command: 'git tag -a' opens an editor for the tag message. Use 'git tag -a <name> -m "message"' instead.`;
352856
+ }
352857
+ if (sub === "add" && (args.includes("-i") || args.includes("--interactive") || args.includes("-p") || args.includes("--patch"))) {
352858
+ return "Interactive command: 'git add -i/-p' requires interactive input. Use 'git add <files>' to stage specific files instead.";
352859
+ }
352860
+ }
352861
+ if (["python", "python3", "node", "irb", "ghci", "lua", "R", "ruby"].includes(base2) && args.length === 0) {
352862
+ return `Interactive command: '${base2}' without arguments starts an interactive REPL. Provide a script file or use '-c'/'--eval' for inline code.`;
352863
+ }
352864
+ if (base2 === "mysql" && !args.some((a5) => a5 === "-e" || a5.startsWith("--execute"))) {
352865
+ return `Interactive command: 'mysql' without -e flag starts an interactive session. Use 'mysql -e "SQL QUERY"' instead.`;
352866
+ }
352867
+ if (base2 === "psql" && !args.some((a5) => a5 === "-c" || a5.startsWith("--command") || a5 === "-f" || a5.startsWith("--file"))) {
352868
+ return `Interactive command: 'psql' without -c flag starts an interactive session. Use 'psql -c "SQL QUERY"' instead.`;
352869
+ }
352870
+ if (["top", "htop", "btop", "nmon"].includes(base2)) {
352871
+ return `Interactive command: '${base2}' is an interactive TUI tool. Use 'ps aux' or 'top -b -n 1' for non-interactive process listing.`;
352872
+ }
352873
+ return null;
352874
+ }
352875
+ function checkInteractiveCommand(command) {
352876
+ if (!command || typeof command !== "string") return null;
352877
+ const components = splitCommandComponents(command.trim());
352878
+ for (const component of components) {
352879
+ const result = checkSingleCommandInteractive(component);
352880
+ if (result) return result;
352881
+ }
352882
+ return null;
352883
+ }
352303
352884
  async function executeBashCommand(command, options = {}) {
352304
352885
  const {
352305
352886
  workingDirectory = process.cwd(),
@@ -352329,6 +352910,24 @@ async function executeBashCommand(command, options = {}) {
352329
352910
  };
352330
352911
  }
352331
352912
  const startTime = Date.now();
352913
+ const interactiveError = checkInteractiveCommand(command);
352914
+ if (interactiveError) {
352915
+ if (debug) {
352916
+ console.log(`[BashExecutor] Blocked interactive command: "${command}"`);
352917
+ console.log(`[BashExecutor] Reason: ${interactiveError}`);
352918
+ }
352919
+ return {
352920
+ success: false,
352921
+ error: interactiveError,
352922
+ stdout: "",
352923
+ stderr: interactiveError,
352924
+ exitCode: 1,
352925
+ command,
352926
+ workingDirectory: cwd,
352927
+ duration: 0,
352928
+ interactive: true
352929
+ };
352930
+ }
352332
352931
  if (debug) {
352333
352932
  console.log(`[BashExecutor] Executing command: "${command}"`);
352334
352933
  console.log(`[BashExecutor] Working directory: "${cwd}"`);
@@ -352339,6 +352938,8 @@ async function executeBashCommand(command, options = {}) {
352339
352938
  ...process.env,
352340
352939
  ...env
352341
352940
  };
352941
+ if (!processEnv.GIT_EDITOR) processEnv.GIT_EDITOR = "true";
352942
+ if (!processEnv.GIT_TERMINAL_PROMPT) processEnv.GIT_TERMINAL_PROMPT = "0";
352342
352943
  const isComplex = isComplexCommand(command);
352343
352944
  let cmd, cmdArgs, useShell;
352344
352945
  if (isComplex) {
@@ -352373,20 +352974,32 @@ async function executeBashCommand(command, options = {}) {
352373
352974
  // stdin ignored, capture stdout/stderr
352374
352975
  shell: useShell,
352375
352976
  // false for security
352977
+ detached: true,
352978
+ // new session — no controlling terminal
352376
352979
  windowsHide: true
352377
352980
  });
352378
352981
  let stdout = "";
352379
352982
  let stderr = "";
352380
352983
  let killed = false;
352381
352984
  let timeoutHandle;
352985
+ const killProcessGroup = (signal) => {
352986
+ try {
352987
+ if (child.pid) process.kill(-child.pid, signal);
352988
+ } catch {
352989
+ try {
352990
+ child.kill(signal);
352991
+ } catch {
352992
+ }
352993
+ }
352994
+ };
352382
352995
  if (timeout > 0) {
352383
352996
  timeoutHandle = setTimeout(() => {
352384
352997
  if (!killed) {
352385
352998
  killed = true;
352386
- child.kill("SIGTERM");
352999
+ killProcessGroup("SIGTERM");
352387
353000
  setTimeout(() => {
352388
353001
  if (child.exitCode === null) {
352389
- child.kill("SIGKILL");
353002
+ killProcessGroup("SIGKILL");
352390
353003
  }
352391
353004
  }, 5e3);
352392
353005
  }
@@ -352399,7 +353012,7 @@ async function executeBashCommand(command, options = {}) {
352399
353012
  } else {
352400
353013
  if (!killed) {
352401
353014
  killed = true;
352402
- child.kill("SIGTERM");
353015
+ killProcessGroup("SIGTERM");
352403
353016
  }
352404
353017
  }
352405
353018
  });
@@ -352410,7 +353023,7 @@ async function executeBashCommand(command, options = {}) {
352410
353023
  } else {
352411
353024
  if (!killed) {
352412
353025
  killed = true;
352413
- child.kill("SIGTERM");
353026
+ killProcessGroup("SIGTERM");
352414
353027
  }
352415
353028
  }
352416
353029
  });
@@ -396950,7 +397563,7 @@ module.exports = /*#__PURE__*/JSON.parse('{"100":"Continue","101":"Switching Pro
396950
397563
  /***/ ((module) => {
396951
397564
 
396952
397565
  "use strict";
396953
- module.exports = /*#__PURE__*/JSON.parse('{"name":"@probelabs/visor","version":"0.1.42","main":"dist/index.js","bin":{"visor":"./dist/index.js"},"exports":{".":{"require":"./dist/index.js","import":"./dist/index.js"},"./sdk":{"types":"./dist/sdk/sdk.d.ts","import":"./dist/sdk/sdk.mjs","require":"./dist/sdk/sdk.js"},"./cli":{"require":"./dist/index.js"}},"files":["dist/","defaults/","action.yml","README.md","LICENSE"],"publishConfig":{"access":"public","registry":"https://registry.npmjs.org/"},"scripts":{"build:cli":"ncc build src/index.ts -o dist && cp -r defaults dist/ && cp -r output dist/ && cp -r docs dist/ && cp -r examples dist/ && cp -r src/debug-visualizer/ui dist/debug-visualizer/ && node scripts/inject-version.js && echo \'#!/usr/bin/env node\' | cat - dist/index.js > temp && mv temp dist/index.js && chmod +x dist/index.js","build:sdk":"tsup src/sdk.ts --dts --sourcemap --format esm,cjs --out-dir dist/sdk","build":"./scripts/build-oss.sh","build:ee":"npm run build:cli && npm run build:sdk","test":"jest && npm run test:yaml","test:unit":"jest","prepublishOnly":"npm run build","test:watch":"jest --watch","test:coverage":"jest --coverage","test:ee":"jest --testPathPatterns=\'tests/ee\' --testPathIgnorePatterns=\'/node_modules/\' --no-coverage","test:manual:bash":"RUN_MANUAL_TESTS=true jest tests/manual/bash-config-manual.test.ts","lint":"eslint src tests --ext .ts","lint:fix":"eslint src tests --ext .ts --fix","format":"prettier --write src tests","format:check":"prettier --check src tests","clean":"","clean:traces":"node scripts/clean-traces.js","prebuild":"npm run clean && node scripts/generate-config-schema.js","pretest":"npm run clean:traces && node scripts/generate-config-schema.js && npm run build:cli","pretest:unit":"npm run clean:traces && node scripts/generate-config-schema.js && npm run build:cli","test:with-build":"npm run build:cli && jest","test:yaml":"node dist/index.js test --progress compact","test:yaml:parallel":"node dist/index.js test --progress compact --max-parallel 4","prepare":"husky","pre-commit":"lint-staged","deploy:site":"cd site && npx wrangler pages deploy . --project-name=visor-site --commit-dirty=true","deploy:worker":"npx wrangler deploy","deploy":"npm run deploy:site && npm run deploy:worker","publish:ee":"./scripts/publish-ee.sh","release":"./scripts/release.sh","release:patch":"./scripts/release.sh patch","release:minor":"./scripts/release.sh minor","release:major":"./scripts/release.sh major","release:prerelease":"./scripts/release.sh prerelease","docs:validate":"node scripts/validate-readme-links.js","workshop:setup":"npm install -D reveal-md@6.1.2","workshop:serve":"cd workshop && reveal-md slides.md -w","workshop:export":"reveal-md workshop/slides.md --static workshop/build","workshop:pdf":"reveal-md workshop/slides.md --print workshop/Visor-Workshop.pdf --print-size letter","workshop:pdf:ci":"reveal-md workshop/slides.md --print workshop/Visor-Workshop.pdf --print-size letter --puppeteer-launch-args=\\"--no-sandbox --disable-dev-shm-usage\\"","workshop:pdf:a4":"reveal-md workshop/slides.md --print workshop/Visor-Workshop-A4.pdf --print-size A4","workshop:build":"npm run workshop:export && npm run workshop:pdf","simulate:issue":"TS_NODE_TRANSPILE_ONLY=1 ts-node scripts/simulate-gh-run.ts --event issues --action opened --debug","simulate:comment":"TS_NODE_TRANSPILE_ONLY=1 ts-node scripts/simulate-gh-run.ts --event issue_comment --action created --debug"},"keywords":["code-review","ai","github-action","cli","pr-review","visor"],"author":"Probe Labs","license":"MIT","description":"AI workflow engine for code review, assistants, and automation — orchestrate checks, MCP tools, and AI providers with YAML-driven pipelines","repository":{"type":"git","url":"git+https://github.com/probelabs/visor.git"},"bugs":{"url":"https://github.com/probelabs/visor/issues"},"homepage":"https://github.com/probelabs/visor#readme","dependencies":{"@actions/core":"^1.11.1","@apidevtools/swagger-parser":"^12.1.0","@modelcontextprotocol/sdk":"^1.25.3","@nyariv/sandboxjs":"github:probelabs/SandboxJS#f1c13b8eee98734a8ea024061eada4aa9a9ff2e9","@octokit/action":"^8.0.2","@octokit/auth-app":"^8.1.0","@octokit/core":"^7.0.3","@octokit/rest":"^22.0.0","@opentelemetry/api":"^1.9.0","@opentelemetry/core":"^1.30.1","@opentelemetry/exporter-trace-otlp-grpc":"^0.203.0","@opentelemetry/exporter-trace-otlp-http":"^0.203.0","@opentelemetry/instrumentation":"^0.203.0","@opentelemetry/resources":"^1.30.1","@opentelemetry/sdk-metrics":"^1.30.1","@opentelemetry/sdk-node":"^0.203.0","@opentelemetry/sdk-trace-base":"^1.30.1","@opentelemetry/semantic-conventions":"^1.30.1","@probelabs/probe":"^0.6.0-rc262","@types/commander":"^2.12.0","@types/uuid":"^10.0.0","acorn":"^8.16.0","acorn-walk":"^8.3.5","ajv":"^8.17.1","ajv-formats":"^3.0.1","better-sqlite3":"^11.0.0","blessed":"^0.1.81","cli-table3":"^0.6.5","commander":"^14.0.0","deepmerge":"^4.3.1","dotenv":"^17.2.3","ignore":"^7.0.5","js-yaml":"^4.1.0","jsonpath-plus":"^10.4.0","liquidjs":"^10.21.1","minimatch":"^10.2.2","node-cron":"^3.0.3","open":"^9.1.0","simple-git":"^3.28.0","uuid":"^11.1.0","ws":"^8.18.3"},"optionalDependencies":{"@anthropic/claude-code-sdk":"npm:null@*","@open-policy-agent/opa-wasm":"^1.10.0","knex":"^3.1.0","mysql2":"^3.11.0","pg":"^8.13.0","tedious":"^19.0.0"},"devDependencies":{"@eslint/js":"^9.34.0","@kie/act-js":"^2.6.2","@kie/mock-github":"^2.0.1","@swc/core":"^1.13.2","@swc/jest":"^0.2.37","@types/better-sqlite3":"^7.6.0","@types/blessed":"^0.1.27","@types/jest":"^30.0.0","@types/js-yaml":"^4.0.9","@types/node":"^24.3.0","@types/node-cron":"^3.0.11","@types/ws":"^8.18.1","@typescript-eslint/eslint-plugin":"^8.42.0","@typescript-eslint/parser":"^8.42.0","@vercel/ncc":"^0.38.4","eslint":"^9.34.0","eslint-config-prettier":"^10.1.8","eslint-plugin-prettier":"^5.5.4","husky":"^9.1.7","jest":"^30.1.3","lint-staged":"^16.1.6","prettier":"^3.6.2","reveal-md":"^6.1.2","ts-json-schema-generator":"^1.5.1","ts-node":"^10.9.2","tsup":"^8.5.0","typescript":"^5.9.2","wrangler":"^3.0.0"},"peerDependenciesMeta":{"@anthropic/claude-code-sdk":{"optional":true}},"directories":{"test":"tests"},"lint-staged":{"src/**/*.{ts,js}":["eslint --fix","prettier --write"],"tests/**/*.{ts,js}":["eslint --fix","prettier --write"],"*.{json,md,yml,yaml}":["prettier --write"]}}');
397566
+ module.exports = /*#__PURE__*/JSON.parse('{"name":"@probelabs/visor","version":"0.1.42","main":"dist/index.js","bin":{"visor":"./dist/index.js"},"exports":{".":{"require":"./dist/index.js","import":"./dist/index.js"},"./sdk":{"types":"./dist/sdk/sdk.d.ts","import":"./dist/sdk/sdk.mjs","require":"./dist/sdk/sdk.js"},"./cli":{"require":"./dist/index.js"}},"files":["dist/","defaults/","action.yml","README.md","LICENSE"],"publishConfig":{"access":"public","registry":"https://registry.npmjs.org/"},"scripts":{"build:cli":"ncc build src/index.ts -o dist && cp -r defaults dist/ && cp -r output dist/ && cp -r docs dist/ && cp -r examples dist/ && cp -r src/debug-visualizer/ui dist/debug-visualizer/ && node scripts/inject-version.js && echo \'#!/usr/bin/env node\' | cat - dist/index.js > temp && mv temp dist/index.js && chmod +x dist/index.js","build:sdk":"tsup src/sdk.ts --dts --sourcemap --format esm,cjs --out-dir dist/sdk","build":"./scripts/build-oss.sh","build:ee":"npm run build:cli && npm run build:sdk","test":"jest && npm run test:yaml","test:unit":"jest","prepublishOnly":"npm run build","test:watch":"jest --watch","test:coverage":"jest --coverage","test:ee":"jest --testPathPatterns=\'tests/ee\' --testPathIgnorePatterns=\'/node_modules/\' --no-coverage","test:manual:bash":"RUN_MANUAL_TESTS=true jest tests/manual/bash-config-manual.test.ts","lint":"eslint src tests --ext .ts","lint:fix":"eslint src tests --ext .ts --fix","format":"prettier --write src tests","format:check":"prettier --check src tests","clean":"","clean:traces":"node scripts/clean-traces.js","prebuild":"npm run clean && node scripts/generate-config-schema.js","pretest":"npm run clean:traces && node scripts/generate-config-schema.js && npm run build:cli","pretest:unit":"npm run clean:traces && node scripts/generate-config-schema.js && npm run build:cli","test:with-build":"npm run build:cli && jest","test:yaml":"node dist/index.js test --progress compact","test:yaml:parallel":"node dist/index.js test --progress compact --max-parallel 4","prepare":"husky","pre-commit":"lint-staged","deploy:site":"cd site && npx wrangler pages deploy . --project-name=visor-site --commit-dirty=true","deploy:worker":"npx wrangler deploy","deploy":"npm run deploy:site && npm run deploy:worker","publish:ee":"./scripts/publish-ee.sh","release":"./scripts/release.sh","release:patch":"./scripts/release.sh patch","release:minor":"./scripts/release.sh minor","release:major":"./scripts/release.sh major","release:prerelease":"./scripts/release.sh prerelease","docs:validate":"node scripts/validate-readme-links.js","workshop:setup":"npm install -D reveal-md@6.1.2","workshop:serve":"cd workshop && reveal-md slides.md -w","workshop:export":"reveal-md workshop/slides.md --static workshop/build","workshop:pdf":"reveal-md workshop/slides.md --print workshop/Visor-Workshop.pdf --print-size letter","workshop:pdf:ci":"reveal-md workshop/slides.md --print workshop/Visor-Workshop.pdf --print-size letter --puppeteer-launch-args=\\"--no-sandbox --disable-dev-shm-usage\\"","workshop:pdf:a4":"reveal-md workshop/slides.md --print workshop/Visor-Workshop-A4.pdf --print-size A4","workshop:build":"npm run workshop:export && npm run workshop:pdf","simulate:issue":"TS_NODE_TRANSPILE_ONLY=1 ts-node scripts/simulate-gh-run.ts --event issues --action opened --debug","simulate:comment":"TS_NODE_TRANSPILE_ONLY=1 ts-node scripts/simulate-gh-run.ts --event issue_comment --action created --debug"},"keywords":["code-review","ai","github-action","cli","pr-review","visor"],"author":"Probe Labs","license":"MIT","description":"AI workflow engine for code review, assistants, and automation — orchestrate checks, MCP tools, and AI providers with YAML-driven pipelines","repository":{"type":"git","url":"git+https://github.com/probelabs/visor.git"},"bugs":{"url":"https://github.com/probelabs/visor/issues"},"homepage":"https://github.com/probelabs/visor#readme","dependencies":{"@actions/core":"^1.11.1","@apidevtools/swagger-parser":"^12.1.0","@modelcontextprotocol/sdk":"^1.25.3","@nyariv/sandboxjs":"github:probelabs/SandboxJS#f1c13b8eee98734a8ea024061eada4aa9a9ff2e9","@octokit/action":"^8.0.2","@octokit/auth-app":"^8.1.0","@octokit/core":"^7.0.3","@octokit/rest":"^22.0.0","@opentelemetry/api":"^1.9.0","@opentelemetry/core":"^1.30.1","@opentelemetry/exporter-trace-otlp-grpc":"^0.203.0","@opentelemetry/exporter-trace-otlp-http":"^0.203.0","@opentelemetry/instrumentation":"^0.203.0","@opentelemetry/resources":"^1.30.1","@opentelemetry/sdk-metrics":"^1.30.1","@opentelemetry/sdk-node":"^0.203.0","@opentelemetry/sdk-trace-base":"^1.30.1","@opentelemetry/semantic-conventions":"^1.30.1","@probelabs/probe":"^0.6.0-rc264","@types/commander":"^2.12.0","@types/uuid":"^10.0.0","acorn":"^8.16.0","acorn-walk":"^8.3.5","ajv":"^8.17.1","ajv-formats":"^3.0.1","better-sqlite3":"^11.0.0","blessed":"^0.1.81","cli-table3":"^0.6.5","commander":"^14.0.0","deepmerge":"^4.3.1","dotenv":"^17.2.3","ignore":"^7.0.5","js-yaml":"^4.1.0","jsonpath-plus":"^10.4.0","liquidjs":"^10.21.1","minimatch":"^10.2.2","node-cron":"^3.0.3","open":"^9.1.0","simple-git":"^3.28.0","uuid":"^11.1.0","ws":"^8.18.3"},"optionalDependencies":{"@anthropic/claude-code-sdk":"npm:null@*","@open-policy-agent/opa-wasm":"^1.10.0","knex":"^3.1.0","mysql2":"^3.11.0","pg":"^8.13.0","tedious":"^19.0.0"},"devDependencies":{"@eslint/js":"^9.34.0","@kie/act-js":"^2.6.2","@kie/mock-github":"^2.0.1","@swc/core":"^1.13.2","@swc/jest":"^0.2.37","@types/better-sqlite3":"^7.6.0","@types/blessed":"^0.1.27","@types/jest":"^30.0.0","@types/js-yaml":"^4.0.9","@types/node":"^24.3.0","@types/node-cron":"^3.0.11","@types/ws":"^8.18.1","@typescript-eslint/eslint-plugin":"^8.42.0","@typescript-eslint/parser":"^8.42.0","@vercel/ncc":"^0.38.4","eslint":"^9.34.0","eslint-config-prettier":"^10.1.8","eslint-plugin-prettier":"^5.5.4","husky":"^9.1.7","jest":"^30.1.3","lint-staged":"^16.1.6","prettier":"^3.6.2","reveal-md":"^6.1.2","ts-json-schema-generator":"^1.5.1","ts-node":"^10.9.2","tsup":"^8.5.0","typescript":"^5.9.2","wrangler":"^3.0.0"},"peerDependenciesMeta":{"@anthropic/claude-code-sdk":{"optional":true}},"directories":{"test":"tests"},"lint-staged":{"src/**/*.{ts,js}":["eslint --fix","prettier --write"],"tests/**/*.{ts,js}":["eslint --fix","prettier --write"],"*.{json,md,yml,yaml}":["prettier --write"]}}');
396954
397567
 
396955
397568
  /***/ })
396956
397569