veryfront 0.0.69 → 0.0.70

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/dist/ai/index.js CHANGED
@@ -19,6 +19,11 @@ globalThis.Deno = globalThis.Deno || {
19
19
  }
20
20
  };
21
21
 
22
+ // src/ai/types/agent.ts
23
+ function getTextFromParts(parts) {
24
+ return parts.filter((p) => p.type === "text").map((p) => p.text).join("");
25
+ }
26
+
22
27
  // src/ai/providers/openai.ts
23
28
  import { z as z2 } from "zod";
24
29
 
@@ -174,6 +179,7 @@ var OpenAIStreamChunkSchema = z.object({
174
179
  })).min(1)
175
180
  });
176
181
  var BaseProvider = class {
182
+ config;
177
183
  constructor(config) {
178
184
  this.config = config;
179
185
  this.validateConfig();
@@ -388,9 +394,12 @@ var OpenAIResponseSchema = z2.object({
388
394
  }).optional()
389
395
  });
390
396
  var OpenAIProvider = class extends BaseProvider {
397
+ name = "openai";
398
+ apiKey;
399
+ baseURL;
400
+ organizationId;
391
401
  constructor(config) {
392
402
  super(config);
393
- this.name = "openai";
394
403
  this.apiKey = config.apiKey;
395
404
  this.baseURL = config.baseURL || "https://api.openai.com/v1";
396
405
  this.organizationId = config.organizationId;
@@ -492,9 +501,11 @@ var OpenAIProvider = class extends BaseProvider {
492
501
 
493
502
  // src/ai/providers/anthropic.ts
494
503
  var AnthropicProvider = class extends BaseProvider {
504
+ name = "anthropic";
505
+ apiKey;
506
+ baseURL;
495
507
  constructor(config) {
496
508
  super(config);
497
- this.name = "anthropic";
498
509
  this.apiKey = config.apiKey;
499
510
  this.baseURL = config.baseURL || "https://api.anthropic.com";
500
511
  }
@@ -792,9 +803,11 @@ var GoogleResponseSchema = z3.object({
792
803
  }).optional()
793
804
  });
794
805
  var GoogleProvider = class extends BaseProvider {
806
+ name = "google";
807
+ apiKey;
808
+ baseURL;
795
809
  constructor(config) {
796
810
  super(config);
797
- this.name = "google";
798
811
  this.apiKey = config.apiKey;
799
812
  this.baseURL = config.baseURL || "https://generativelanguage.googleapis.com/v1beta";
800
813
  }
@@ -916,11 +929,9 @@ function getEnv(key) {
916
929
 
917
930
  // src/ai/providers/factory.ts
918
931
  var ProviderRegistry = class {
919
- constructor() {
920
- this.providers = /* @__PURE__ */ new Map();
921
- this.config = {};
922
- this.autoInitialized = false;
923
- }
932
+ providers = /* @__PURE__ */ new Map();
933
+ config = {};
934
+ autoInitialized = false;
924
935
  /**
925
936
  * Auto-initialize providers from environment variables
926
937
  * This is called lazily when a provider is first requested
@@ -1293,9 +1304,7 @@ function generateToolId() {
1293
1304
  return `tool_${Date.now()}_${toolIdCounter++}`;
1294
1305
  }
1295
1306
  var ToolRegistryClass = class {
1296
- constructor() {
1297
- this.tools = /* @__PURE__ */ new Map();
1298
- }
1307
+ tools = /* @__PURE__ */ new Map();
1299
1308
  register(id, toolInstance) {
1300
1309
  if (this.tools.has(id)) {
1301
1310
  agentLogger.debug(`Tool "${id}" is already registered. Overwriting.`);
@@ -1555,9 +1564,7 @@ function patternToId(pattern) {
1555
1564
  return pattern.replace(/^\//, "").replace(/\//g, "_").replace(/:/g, "");
1556
1565
  }
1557
1566
  var ResourceRegistryClass = class {
1558
- constructor() {
1559
- this.resources = /* @__PURE__ */ new Map();
1560
- }
1567
+ resources = /* @__PURE__ */ new Map();
1561
1568
  /**
1562
1569
  * Register a resource
1563
1570
  */
@@ -1653,9 +1660,7 @@ function interpolateVariables(template, variables) {
1653
1660
  });
1654
1661
  }
1655
1662
  var PromptRegistryClass = class {
1656
- constructor() {
1657
- this.prompts = /* @__PURE__ */ new Map();
1658
- }
1663
+ prompts = /* @__PURE__ */ new Map();
1659
1664
  /**
1660
1665
  * Register a prompt
1661
1666
  */
@@ -1785,9 +1790,7 @@ function createWorkflow(config) {
1785
1790
  };
1786
1791
  }
1787
1792
  var AgentRegistryClass = class {
1788
- constructor() {
1789
- this.agents = /* @__PURE__ */ new Map();
1790
- }
1793
+ agents = /* @__PURE__ */ new Map();
1791
1794
  /**
1792
1795
  * Register an agent
1793
1796
  */
@@ -2040,7 +2043,7 @@ import { join } from "node:path";
2040
2043
  // deno.json
2041
2044
  var deno_default = {
2042
2045
  name: "veryfront",
2043
- version: "0.0.68",
2046
+ version: "0.0.70",
2044
2047
  nodeModulesDir: "auto",
2045
2048
  exclude: [
2046
2049
  "npm/",
@@ -2709,12 +2712,10 @@ function createMockAdapter() {
2709
2712
 
2710
2713
  // src/platform/compat/fs.ts
2711
2714
  var NodeFileSystem = class {
2712
- constructor() {
2713
- this.fs = null;
2714
- this.os = null;
2715
- this.path = null;
2716
- this.initialized = false;
2717
- }
2715
+ fs = null;
2716
+ os = null;
2717
+ path = null;
2718
+ initialized = false;
2718
2719
  async ensureInitialized() {
2719
2720
  if (this.initialized)
2720
2721
  return;
@@ -3455,8 +3456,9 @@ function generateId(prefix) {
3455
3456
 
3456
3457
  // src/ai/agent/memory.ts
3457
3458
  var ConversationMemory = class {
3459
+ messages = [];
3460
+ config;
3458
3461
  constructor(config) {
3459
- this.messages = [];
3460
3462
  this.config = config;
3461
3463
  }
3462
3464
  async add(message) {
@@ -3494,15 +3496,17 @@ var ConversationMemory = class {
3494
3496
  }
3495
3497
  estimateTokens(messages) {
3496
3498
  const totalChars = messages.reduce(
3497
- (sum, msg) => sum + msg.content.length,
3499
+ (sum, msg) => sum + getTextFromParts(msg.parts).length,
3498
3500
  0
3499
3501
  );
3500
3502
  return Math.ceil(totalChars / 4);
3501
3503
  }
3502
3504
  };
3503
3505
  var BufferMemory = class {
3506
+ messages = [];
3507
+ config;
3508
+ bufferSize;
3504
3509
  constructor(config) {
3505
- this.messages = [];
3506
3510
  this.config = config;
3507
3511
  this.bufferSize = config.maxMessages || 10;
3508
3512
  }
@@ -3529,16 +3533,18 @@ var BufferMemory = class {
3529
3533
  }
3530
3534
  estimateTokens(messages) {
3531
3535
  const totalChars = messages.reduce(
3532
- (sum, msg) => sum + msg.content.length,
3536
+ (sum, msg) => sum + getTextFromParts(msg.parts).length,
3533
3537
  0
3534
3538
  );
3535
3539
  return Math.ceil(totalChars / 4);
3536
3540
  }
3537
3541
  };
3538
3542
  var SummaryMemory = class {
3543
+ messages = [];
3544
+ summary = "";
3545
+ config;
3546
+ summaryThreshold;
3539
3547
  constructor(config) {
3540
- this.messages = [];
3541
- this.summary = "";
3542
3548
  this.config = config;
3543
3549
  this.summaryThreshold = config.maxMessages || 20;
3544
3550
  }
@@ -3554,8 +3560,8 @@ var SummaryMemory = class {
3554
3560
  {
3555
3561
  id: "summary",
3556
3562
  role: "system",
3557
- content: `Previous conversation summary:
3558
- ${this.summary}`,
3563
+ parts: [{ type: "text", text: `Previous conversation summary:
3564
+ ${this.summary}` }],
3559
3565
  timestamp: Date.now()
3560
3566
  },
3561
3567
  ...this.messages
@@ -3579,13 +3585,13 @@ ${this.summary}`,
3579
3585
  summarizeOldMessages() {
3580
3586
  const toSummarize = this.messages.slice(0, Math.floor(this.messages.length / 2));
3581
3587
  const remaining = this.messages.slice(Math.floor(this.messages.length / 2));
3582
- const topics = toSummarize.filter((m) => m.role === "user").map((m) => m.content.substring(0, 50)).join("; ");
3588
+ const topics = toSummarize.filter((m) => m.role === "user").map((m) => getTextFromParts(m.parts).substring(0, 50)).join("; ");
3583
3589
  this.summary = `Discussed: ${topics}`;
3584
3590
  this.messages = remaining;
3585
3591
  return Promise.resolve();
3586
3592
  }
3587
3593
  estimateTokens(messages) {
3588
- const totalChars = messages.reduce((sum, msg) => sum + msg.content.length, 0) + this.summary.length;
3594
+ const totalChars = messages.reduce((sum, msg) => sum + getTextFromParts(msg.parts).length, 0) + this.summary.length;
3589
3595
  return Math.ceil(totalChars / 4);
3590
3596
  }
3591
3597
  };
@@ -3647,11 +3653,9 @@ var VERYFRONT_PATHS = {
3647
3653
 
3648
3654
  // src/core/utils/bundle-manifest.ts
3649
3655
  var InMemoryBundleManifestStore = class {
3650
- constructor() {
3651
- this.metadata = /* @__PURE__ */ new Map();
3652
- this.code = /* @__PURE__ */ new Map();
3653
- this.sourceIndex = /* @__PURE__ */ new Map();
3654
- }
3656
+ metadata = /* @__PURE__ */ new Map();
3657
+ code = /* @__PURE__ */ new Map();
3658
+ sourceIndex = /* @__PURE__ */ new Map();
3655
3659
  getBundleMetadata(key) {
3656
3660
  const entry = this.metadata.get(key);
3657
3661
  if (!entry)
@@ -3943,16 +3947,14 @@ var ContextPropagation = class {
3943
3947
 
3944
3948
  // src/observability/tracing/manager.ts
3945
3949
  var TracingManager = class {
3946
- constructor() {
3947
- this.state = {
3948
- initialized: false,
3949
- tracer: null,
3950
- api: null,
3951
- propagator: null
3952
- };
3953
- this.spanOps = null;
3954
- this.contextProp = null;
3955
- }
3950
+ state = {
3951
+ initialized: false,
3952
+ tracer: null,
3953
+ api: null,
3954
+ propagator: null
3955
+ };
3956
+ spanOps = null;
3957
+ contextProp = null;
3956
3958
  async initialize(config = {}, adapter) {
3957
3959
  if (this.state.initialized) {
3958
3960
  serverLogger.debug("[tracing] Already initialized");
@@ -4080,9 +4082,40 @@ var AgentStreamEventSchema = z6.discriminatedUnion("type", [
4080
4082
  ]);
4081
4083
  var DEFAULT_MAX_TOKENS = 4096;
4082
4084
  var DEFAULT_TEMPERATURE = 0.7;
4085
+ function convertMessageToProvider(msg) {
4086
+ const content = getTextFromParts(msg.parts);
4087
+ const providerMsg = {
4088
+ role: msg.role,
4089
+ content
4090
+ };
4091
+ const toolCallParts = msg.parts.filter(
4092
+ (p) => p.type === "tool-call"
4093
+ );
4094
+ if (toolCallParts.length > 0) {
4095
+ providerMsg.tool_calls = toolCallParts.map((tc) => ({
4096
+ id: tc.toolCallId,
4097
+ type: "function",
4098
+ function: {
4099
+ name: tc.toolName,
4100
+ arguments: JSON.stringify(tc.args)
4101
+ }
4102
+ }));
4103
+ }
4104
+ const toolResultPart = msg.parts.find(
4105
+ (p) => p.type === "tool-result"
4106
+ );
4107
+ if (toolResultPart && msg.role === "tool") {
4108
+ providerMsg.tool_call_id = toolResultPart.toolCallId;
4109
+ providerMsg.content = JSON.stringify(toolResultPart.result);
4110
+ }
4111
+ return providerMsg;
4112
+ }
4083
4113
  var AgentRuntime = class {
4114
+ id;
4115
+ config;
4116
+ memory;
4117
+ status = "idle";
4084
4118
  constructor(id, config) {
4085
- this.status = "idle";
4086
4119
  this.id = id;
4087
4120
  this.config = config;
4088
4121
  const memoryConfig = config.memory || { type: "conversation", maxTokens: 4e3 };
@@ -4224,26 +4257,7 @@ var AgentRuntime = class {
4224
4257
  return await provider.complete({
4225
4258
  model,
4226
4259
  system: systemPrompt,
4227
- messages: currentMessages.map((m) => {
4228
- const msg = {
4229
- role: m.role,
4230
- content: m.content
4231
- };
4232
- if (m.role === "assistant" && m.toolCalls) {
4233
- msg.tool_calls = m.toolCalls.map((tc) => ({
4234
- id: tc.id,
4235
- type: "function",
4236
- function: {
4237
- name: tc.name,
4238
- arguments: JSON.stringify(tc.arguments)
4239
- }
4240
- }));
4241
- }
4242
- if (m.role === "tool" && m.toolCallId) {
4243
- msg.tool_call_id = m.toolCallId;
4244
- }
4245
- return msg;
4246
- }),
4260
+ messages: currentMessages.map((m) => convertMessageToProvider(m)),
4247
4261
  tools: tools.length > 0 ? tools : void 0,
4248
4262
  maxTokens: this.config.memory?.maxTokens || DEFAULT_MAX_TOKENS,
4249
4263
  temperature: DEFAULT_TEMPERATURE
@@ -4252,11 +4266,24 @@ var AgentRuntime = class {
4252
4266
  totalUsage.promptTokens += response.usage.promptTokens;
4253
4267
  totalUsage.completionTokens += response.usage.completionTokens;
4254
4268
  totalUsage.totalTokens += response.usage.totalTokens;
4269
+ const assistantParts = [];
4270
+ if (response.text) {
4271
+ assistantParts.push({ type: "text", text: response.text });
4272
+ }
4273
+ if (response.toolCalls) {
4274
+ for (const tc of response.toolCalls) {
4275
+ assistantParts.push({
4276
+ type: "tool-call",
4277
+ toolCallId: tc.id,
4278
+ toolName: tc.name,
4279
+ args: tc.arguments
4280
+ });
4281
+ }
4282
+ }
4255
4283
  const assistantMessage = {
4256
4284
  id: `msg_${Date.now()}_${step2}`,
4257
4285
  role: "assistant",
4258
- content: response.text,
4259
- toolCalls: response.toolCalls,
4286
+ parts: assistantParts,
4260
4287
  timestamp: Date.now()
4261
4288
  };
4262
4289
  currentMessages.push(assistantMessage);
@@ -4290,9 +4317,12 @@ var AgentRuntime = class {
4290
4317
  const toolResultMessage = {
4291
4318
  id: `tool_${tc.id}`,
4292
4319
  role: "tool",
4293
- content: JSON.stringify(result),
4294
- toolCallId: tc.id,
4295
- toolCall,
4320
+ parts: [{
4321
+ type: "tool-result",
4322
+ toolCallId: tc.id,
4323
+ toolName: tc.name,
4324
+ result
4325
+ }],
4296
4326
  timestamp: Date.now()
4297
4327
  };
4298
4328
  currentMessages.push(toolResultMessage);
@@ -4304,9 +4334,12 @@ var AgentRuntime = class {
4304
4334
  const errorMessage = {
4305
4335
  id: `tool_error_${tc.id}`,
4306
4336
  role: "tool",
4307
- content: `Error: ${toolCall.error}`,
4308
- toolCallId: tc.id,
4309
- toolCall,
4337
+ parts: [{
4338
+ type: "tool-result",
4339
+ toolCallId: tc.id,
4340
+ toolName: tc.name,
4341
+ result: { error: toolCall.error }
4342
+ }],
4310
4343
  timestamp: Date.now()
4311
4344
  };
4312
4345
  currentMessages.push(errorMessage);
@@ -4329,8 +4362,9 @@ var AgentRuntime = class {
4329
4362
  }
4330
4363
  this.status = "completed";
4331
4364
  addSpanEvent(loopSpan, "max_steps_reached", { maxSteps });
4365
+ const lastMsg = currentMessages[currentMessages.length - 1];
4332
4366
  return {
4333
- text: currentMessages[currentMessages.length - 1]?.content || "",
4367
+ text: lastMsg ? getTextFromParts(lastMsg.parts) : "",
4334
4368
  messages: currentMessages,
4335
4369
  toolCalls,
4336
4370
  status: this.status,
@@ -4360,19 +4394,7 @@ var AgentRuntime = class {
4360
4394
  const stream = await provider.stream({
4361
4395
  model,
4362
4396
  system: systemPrompt,
4363
- messages: currentMessages.map((m) => ({
4364
- role: m.role,
4365
- content: m.content,
4366
- tool_calls: m.toolCalls?.map((tc) => ({
4367
- id: tc.id,
4368
- type: "function",
4369
- function: {
4370
- name: tc.name,
4371
- arguments: JSON.stringify(tc.arguments)
4372
- }
4373
- })),
4374
- tool_call_id: m.toolCallId
4375
- })),
4397
+ messages: currentMessages.map((m) => convertMessageToProvider(m)),
4376
4398
  tools: tools.length > 0 ? tools : void 0,
4377
4399
  maxTokens: this.config.memory?.maxTokens || DEFAULT_MAX_TOKENS,
4378
4400
  temperature: DEFAULT_TEMPERATURE
@@ -4410,9 +4432,12 @@ var AgentRuntime = class {
4410
4432
  const errorMessage = {
4411
4433
  id: `tool_error_${toolCall.id}`,
4412
4434
  role: "tool",
4413
- content: `Error: ${errorStr}`,
4414
- toolCallId: toolCall.id,
4415
- toolCall,
4435
+ parts: [{
4436
+ type: "tool-result",
4437
+ toolCallId: toolCall.id,
4438
+ toolName: toolCall.name,
4439
+ result: { error: errorStr }
4440
+ }],
4416
4441
  timestamp: Date.now()
4417
4442
  };
4418
4443
  currentMessages.push(errorMessage);
@@ -4531,30 +4556,33 @@ var AgentRuntime = class {
4531
4556
  } catch {
4532
4557
  }
4533
4558
  }
4559
+ const streamParts = [];
4560
+ if (accumulatedText) {
4561
+ streamParts.push({ type: "text", text: accumulatedText });
4562
+ }
4563
+ if (streamToolCalls.size > 0) {
4564
+ for (const tc of streamToolCalls.values()) {
4565
+ const { args, error } = parseStreamToolArgs(tc.arguments);
4566
+ if (error) {
4567
+ serverLogger.warn("[AGENT] Failed to parse streamed tool arguments", {
4568
+ toolCallId: tc.id,
4569
+ error
4570
+ });
4571
+ }
4572
+ streamParts.push({
4573
+ type: "tool-call",
4574
+ toolCallId: tc.id,
4575
+ toolName: tc.name,
4576
+ args
4577
+ });
4578
+ }
4579
+ }
4534
4580
  const assistantMessage = {
4535
4581
  id: `msg_${Date.now()}_${step2}`,
4536
4582
  role: "assistant",
4537
- content: accumulatedText,
4583
+ parts: streamParts,
4538
4584
  timestamp: Date.now()
4539
4585
  };
4540
- if (streamToolCalls.size > 0) {
4541
- assistantMessage.toolCalls = Array.from(streamToolCalls.values()).map(
4542
- (tc) => {
4543
- const { args, error } = parseStreamToolArgs(tc.arguments);
4544
- if (error) {
4545
- serverLogger.warn("[AGENT] Failed to parse streamed tool arguments", {
4546
- toolCallId: tc.id,
4547
- error
4548
- });
4549
- }
4550
- return {
4551
- id: tc.id,
4552
- name: tc.name,
4553
- arguments: args
4554
- };
4555
- }
4556
- );
4557
- }
4558
4586
  currentMessages.push(assistantMessage);
4559
4587
  await this.memory.add(assistantMessage);
4560
4588
  if (finishReason === "tool_calls" && streamToolCalls.size > 0) {
@@ -4609,9 +4637,12 @@ var AgentRuntime = class {
4609
4637
  const toolResultMessage = {
4610
4638
  id: `tool_${tc.id}`,
4611
4639
  role: "tool",
4612
- content: JSON.stringify(result),
4613
- toolCallId: tc.id,
4614
- toolCall,
4640
+ parts: [{
4641
+ type: "tool-result",
4642
+ toolCallId: tc.id,
4643
+ toolName: tc.name,
4644
+ result
4645
+ }],
4615
4646
  timestamp: Date.now()
4616
4647
  };
4617
4648
  currentMessages.push(toolResultMessage);
@@ -4626,8 +4657,9 @@ var AgentRuntime = class {
4626
4657
  }
4627
4658
  break;
4628
4659
  }
4660
+ const lastMessage = currentMessages[currentMessages.length - 1];
4629
4661
  return {
4630
- text: currentMessages[currentMessages.length - 1]?.content || "",
4662
+ text: lastMessage ? getTextFromParts(lastMessage.parts) : "",
4631
4663
  messages: currentMessages,
4632
4664
  toolCalls,
4633
4665
  status: "completed",
@@ -4703,7 +4735,7 @@ var AgentRuntime = class {
4703
4735
  return "You are a helpful AI assistant.";
4704
4736
  }
4705
4737
  /**
4706
- * Normalize input to messages array
4738
+ * Normalize input to messages array (v5 format with parts)
4707
4739
  */
4708
4740
  normalizeInput(input) {
4709
4741
  if (typeof input === "string") {
@@ -4711,7 +4743,7 @@ var AgentRuntime = class {
4711
4743
  {
4712
4744
  id: `msg_${Date.now()}`,
4713
4745
  role: "user",
4714
- content: input,
4746
+ parts: [{ type: "text", text: input }],
4715
4747
  timestamp: Date.now()
4716
4748
  }
4717
4749
  ];
@@ -4812,7 +4844,11 @@ ${compatibility.warnings.join("\n")}`
4812
4844
  return runtime.generate(input.input, input.context);
4813
4845
  },
4814
4846
  async stream(input) {
4815
- const inputMessages = input.input ? [{ role: "user", content: input.input }] : input.messages || [];
4847
+ const inputMessages = input.input ? [{
4848
+ id: `msg_${Date.now()}`,
4849
+ role: "user",
4850
+ parts: [{ type: "text", text: input.input }]
4851
+ }] : input.messages || [];
4816
4852
  const stream = await runtime.stream(inputMessages, input.context, {
4817
4853
  onToolCall: input.onToolCall,
4818
4854
  onChunk: input.onChunk
@@ -5035,6 +5071,7 @@ async function setupAI(options = {}) {
5035
5071
 
5036
5072
  // src/ai/mcp/server.ts
5037
5073
  var MCPServer = class {
5074
+ config;
5038
5075
  constructor(config) {
5039
5076
  this.config = config;
5040
5077
  }
@@ -5346,8 +5383,9 @@ import { anthropic } from "@ai-sdk/anthropic";
5346
5383
 
5347
5384
  // src/ai/production/rate-limit/limiter.ts
5348
5385
  var FixedWindowLimiter = class {
5386
+ requests = /* @__PURE__ */ new Map();
5387
+ config;
5349
5388
  constructor(config) {
5350
- this.requests = /* @__PURE__ */ new Map();
5351
5389
  this.config = config;
5352
5390
  }
5353
5391
  check(identifier) {
@@ -5388,8 +5426,10 @@ var FixedWindowLimiter = class {
5388
5426
  }
5389
5427
  };
5390
5428
  var TokenBucketLimiter = class {
5429
+ buckets = /* @__PURE__ */ new Map();
5430
+ config;
5431
+ refillRate;
5391
5432
  constructor(config) {
5392
- this.buckets = /* @__PURE__ */ new Map();
5393
5433
  this.config = config;
5394
5434
  this.refillRate = config.maxRequests / config.windowMs;
5395
5435
  }
@@ -5492,9 +5532,7 @@ function rateLimitMiddleware(config) {
5492
5532
 
5493
5533
  // src/ai/production/cache/cache.ts
5494
5534
  var MemoryCache = class {
5495
- constructor() {
5496
- this.cache = /* @__PURE__ */ new Map();
5497
- }
5535
+ cache = /* @__PURE__ */ new Map();
5498
5536
  set(key, response) {
5499
5537
  this.cache.set(key, {
5500
5538
  response,
@@ -5525,8 +5563,9 @@ var MemoryCache = class {
5525
5563
  }
5526
5564
  };
5527
5565
  var LRUCache = class {
5566
+ cache = /* @__PURE__ */ new Map();
5567
+ maxSize;
5528
5568
  constructor(maxSize = 100) {
5529
- this.cache = /* @__PURE__ */ new Map();
5530
5569
  this.maxSize = maxSize;
5531
5570
  }
5532
5571
  set(key, response) {
@@ -5570,9 +5609,10 @@ var LRUCache = class {
5570
5609
  }
5571
5610
  };
5572
5611
  var TTLCache = class {
5612
+ cache = /* @__PURE__ */ new Map();
5613
+ ttl;
5614
+ cleanupInterval = null;
5573
5615
  constructor(ttl = 3e5) {
5574
- this.cache = /* @__PURE__ */ new Map();
5575
- this.cleanupInterval = null;
5576
5616
  this.ttl = ttl;
5577
5617
  this.startCleanup();
5578
5618
  }
@@ -5726,13 +5766,14 @@ function cacheMiddleware(config) {
5726
5766
 
5727
5767
  // src/ai/production/cost-tracking/tracker.ts
5728
5768
  var CostTracker = class {
5769
+ records = [];
5770
+ config;
5771
+ dailyTotal = 0;
5772
+ monthlyTotal = 0;
5773
+ lastDayReset = Date.now();
5774
+ lastMonthReset = Date.now();
5775
+ resetInterval = null;
5729
5776
  constructor(config) {
5730
- this.records = [];
5731
- this.dailyTotal = 0;
5732
- this.monthlyTotal = 0;
5733
- this.lastDayReset = Date.now();
5734
- this.lastMonthReset = Date.now();
5735
- this.resetInterval = null;
5736
5777
  this.config = config;
5737
5778
  this.startPeriodicReset();
5738
5779
  }
@@ -5995,6 +6036,7 @@ var PII_PATTERNS = {
5995
6036
  creditCard: /\b\d{4}[-\s]?\d{4}[-\s]?\d{4}[-\s]?\d{4}\b/g
5996
6037
  };
5997
6038
  var InputValidator = class {
6039
+ config;
5998
6040
  constructor(config) {
5999
6041
  this.config = config || {};
6000
6042
  }
@@ -6054,6 +6096,7 @@ var InputValidator = class {
6054
6096
  }
6055
6097
  };
6056
6098
  var OutputFilter = class {
6099
+ config;
6057
6100
  constructor(config) {
6058
6101
  this.config = config || {};
6059
6102
  }
@@ -6327,12 +6370,13 @@ function hasLockSupport(backend) {
6327
6370
  // src/ai/workflow/backends/memory.ts
6328
6371
  var DEFAULT_MAX_QUEUE_SIZE = 1e4;
6329
6372
  var MemoryBackend = class {
6373
+ runs = /* @__PURE__ */ new Map();
6374
+ checkpoints = /* @__PURE__ */ new Map();
6375
+ approvals = /* @__PURE__ */ new Map();
6376
+ queue = [];
6377
+ locks = /* @__PURE__ */ new Map();
6378
+ config;
6330
6379
  constructor(config = {}) {
6331
- this.runs = /* @__PURE__ */ new Map();
6332
- this.checkpoints = /* @__PURE__ */ new Map();
6333
- this.approvals = /* @__PURE__ */ new Map();
6334
- this.queue = [];
6335
- this.locks = /* @__PURE__ */ new Map();
6336
6380
  this.config = {
6337
6381
  prefix: "wf:",
6338
6382
  debug: false,
@@ -6663,6 +6707,7 @@ var MemoryBackend = class {
6663
6707
 
6664
6708
  // src/ai/workflow/executor/dag-executor.ts
6665
6709
  var DAGExecutor = class {
6710
+ config;
6666
6711
  constructor(config) {
6667
6712
  this.config = {
6668
6713
  maxConcurrency: 10,
@@ -7204,6 +7249,7 @@ var DAGExecutor = class {
7204
7249
 
7205
7250
  // src/ai/workflow/executor/checkpoint-manager.ts
7206
7251
  var CheckpointManager = class {
7252
+ config;
7207
7253
  constructor(config) {
7208
7254
  this.config = {
7209
7255
  debug: false,
@@ -7361,6 +7407,7 @@ var CheckpointManager = class {
7361
7407
  // src/ai/workflow/executor/step-executor.ts
7362
7408
  var DEFAULT_STEP_TIMEOUT_MS = 5 * 60 * 1e3;
7363
7409
  var StepExecutor = class {
7410
+ config;
7364
7411
  constructor(config = {}) {
7365
7412
  this.config = {
7366
7413
  defaultTimeout: DEFAULT_STEP_TIMEOUT_MS,
@@ -7582,8 +7629,15 @@ var StepExecutor = class {
7582
7629
 
7583
7630
  // src/ai/workflow/executor/workflow-executor.ts
7584
7631
  var WorkflowExecutor = class _WorkflowExecutor {
7632
+ config;
7633
+ stepExecutor;
7634
+ checkpointManager;
7635
+ dagExecutor;
7636
+ workflows = /* @__PURE__ */ new Map();
7637
+ blobResolver;
7638
+ /** Default lock duration: 30 seconds */
7639
+ static DEFAULT_LOCK_DURATION = 3e4;
7585
7640
  constructor(config) {
7586
- this.workflows = /* @__PURE__ */ new Map();
7587
7641
  this.config = {
7588
7642
  maxConcurrency: 10,
7589
7643
  debug: false,
@@ -7619,10 +7673,6 @@ var WorkflowExecutor = class _WorkflowExecutor {
7619
7673
  };
7620
7674
  }
7621
7675
  }
7622
- static {
7623
- /** Default lock duration: 30 seconds */
7624
- this.DEFAULT_LOCK_DURATION = 3e4;
7625
- }
7626
7676
  /**
7627
7677
  * Register a workflow definition
7628
7678
  */
@@ -7977,8 +8027,10 @@ var WorkflowExecutor = class _WorkflowExecutor {
7977
8027
 
7978
8028
  // src/ai/workflow/runtime/approval-manager.ts
7979
8029
  var ApprovalManager = class {
8030
+ config;
8031
+ expirationTimer;
8032
+ destroyed = false;
7980
8033
  constructor(config) {
7981
- this.destroyed = false;
7982
8034
  this.config = {
7983
8035
  expirationCheckInterval: 6e4,
7984
8036
  // Check every minute
@@ -8205,6 +8257,10 @@ var ApprovalManager = class {
8205
8257
 
8206
8258
  // src/ai/workflow/api/workflow-client.ts
8207
8259
  var WorkflowClient = class {
8260
+ backend;
8261
+ executor;
8262
+ approvalManager;
8263
+ debug;
8208
8264
  constructor(config = {}) {
8209
8265
  this.debug = config.debug ?? false;
8210
8266
  this.backend = config.backend ?? new MemoryBackend({ debug: this.debug });