@agentforge/core 0.11.6 → 0.11.7

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/dist/index.cjs CHANGED
@@ -91,6 +91,9 @@ __export(index_exports, {
91
91
  createProgressTracker: () => createProgressTracker,
92
92
  createSSEFormatter: () => createSSEFormatter,
93
93
  createSequentialWorkflow: () => createSequentialWorkflow,
94
+ createSharedCache: () => createSharedCache,
95
+ createSharedConcurrencyController: () => createSharedConcurrencyController,
96
+ createSharedRateLimiter: () => createSharedRateLimiter,
94
97
  createSqliteCheckpointer: () => createSqliteCheckpointer,
95
98
  createStateAnnotation: () => createStateAnnotation,
96
99
  createSubgraph: () => createSubgraph,
@@ -146,11 +149,16 @@ __export(index_exports, {
146
149
  validateTool: () => validateTool,
147
150
  validateToolMetadata: () => validateToolMetadata,
148
151
  validateToolName: () => validateToolName,
152
+ withCache: () => withCache,
153
+ withConcurrency: () => withConcurrency,
149
154
  withErrorHandler: () => withErrorHandler,
155
+ withLogging: () => withLogging,
150
156
  withMetrics: () => withMetrics,
157
+ withRateLimit: () => withRateLimit,
151
158
  withRetry: () => withRetry,
152
159
  withTimeout: () => withTimeout,
153
- withTracing: () => withTracing
160
+ withTracing: () => withTracing,
161
+ withValidation: () => withValidation
154
162
  });
155
163
  module.exports = __toCommonJS(index_exports);
156
164
 
@@ -2854,6 +2862,578 @@ var presets = {
2854
2862
  testing
2855
2863
  };
2856
2864
 
2865
+ // src/langgraph/middleware/caching.ts
2866
+ var LRUCache = class {
2867
+ cache = /* @__PURE__ */ new Map();
2868
+ maxSize;
2869
+ evictionStrategy;
2870
+ constructor(maxSize, evictionStrategy = "lru") {
2871
+ this.maxSize = maxSize;
2872
+ this.evictionStrategy = evictionStrategy;
2873
+ }
2874
+ get(key) {
2875
+ const entry = this.cache.get(key);
2876
+ if (!entry) return void 0;
2877
+ entry.hits++;
2878
+ entry.lastAccessed = Date.now();
2879
+ return entry.value;
2880
+ }
2881
+ set(key, value) {
2882
+ if (this.cache.size >= this.maxSize && !this.cache.has(key)) {
2883
+ this.evict();
2884
+ }
2885
+ this.cache.set(key, {
2886
+ value,
2887
+ timestamp: Date.now(),
2888
+ hits: 0,
2889
+ lastAccessed: Date.now()
2890
+ });
2891
+ }
2892
+ has(key) {
2893
+ return this.cache.has(key);
2894
+ }
2895
+ delete(key) {
2896
+ return this.cache.delete(key);
2897
+ }
2898
+ clear() {
2899
+ this.cache.clear();
2900
+ }
2901
+ size() {
2902
+ return this.cache.size;
2903
+ }
2904
+ evict() {
2905
+ if (this.cache.size === 0) return;
2906
+ let keyToEvict;
2907
+ if (this.evictionStrategy === "lru") {
2908
+ let oldestAccess = Infinity;
2909
+ for (const [key, entry] of this.cache.entries()) {
2910
+ if (entry.lastAccessed < oldestAccess) {
2911
+ oldestAccess = entry.lastAccessed;
2912
+ keyToEvict = key;
2913
+ }
2914
+ }
2915
+ } else if (this.evictionStrategy === "lfu") {
2916
+ let lowestHits = Infinity;
2917
+ for (const [key, entry] of this.cache.entries()) {
2918
+ if (entry.hits < lowestHits) {
2919
+ lowestHits = entry.hits;
2920
+ keyToEvict = key;
2921
+ }
2922
+ }
2923
+ } else {
2924
+ keyToEvict = this.cache.keys().next().value;
2925
+ }
2926
+ if (keyToEvict) {
2927
+ this.cache.delete(keyToEvict);
2928
+ }
2929
+ }
2930
+ };
2931
+ function defaultKeyGenerator(state) {
2932
+ try {
2933
+ return JSON.stringify(state);
2934
+ } catch {
2935
+ return String(state);
2936
+ }
2937
+ }
2938
+ function withCache(node, options = {}) {
2939
+ const {
2940
+ ttl = 36e5,
2941
+ // 1 hour default
2942
+ maxSize = 100,
2943
+ evictionStrategy = "lru",
2944
+ keyGenerator = defaultKeyGenerator,
2945
+ cacheErrors = false,
2946
+ onCacheHit,
2947
+ onCacheMiss,
2948
+ onEviction
2949
+ } = options;
2950
+ const cache2 = new LRUCache(maxSize, evictionStrategy);
2951
+ return async (state) => {
2952
+ const cacheKey = keyGenerator(state);
2953
+ const cachedValue = cache2.get(cacheKey);
2954
+ if (cachedValue !== void 0) {
2955
+ const entry = cache2.cache.get(cacheKey);
2956
+ const age = Date.now() - entry.timestamp;
2957
+ if (age < ttl) {
2958
+ if (onCacheHit) {
2959
+ onCacheHit(cacheKey, cachedValue);
2960
+ }
2961
+ return cachedValue;
2962
+ } else {
2963
+ cache2.delete(cacheKey);
2964
+ if (onEviction) {
2965
+ onEviction(cacheKey, cachedValue);
2966
+ }
2967
+ }
2968
+ }
2969
+ if (onCacheMiss) {
2970
+ onCacheMiss(cacheKey);
2971
+ }
2972
+ try {
2973
+ const result = await Promise.resolve(node(state));
2974
+ cache2.set(cacheKey, result);
2975
+ return result;
2976
+ } catch (error) {
2977
+ if (cacheErrors && error instanceof Error) {
2978
+ const errorResult = { error: error.message };
2979
+ cache2.set(cacheKey, errorResult);
2980
+ }
2981
+ throw error;
2982
+ }
2983
+ };
2984
+ }
2985
+ function createSharedCache(options = {}) {
2986
+ const {
2987
+ ttl = 36e5,
2988
+ maxSize = 100,
2989
+ evictionStrategy = "lru",
2990
+ cacheErrors = false,
2991
+ onCacheHit,
2992
+ onCacheMiss,
2993
+ onEviction
2994
+ } = options;
2995
+ const cache2 = new LRUCache(maxSize, evictionStrategy);
2996
+ return {
2997
+ withCache: (node, keyGenerator = defaultKeyGenerator) => {
2998
+ return async (state) => {
2999
+ const cacheKey = keyGenerator(state);
3000
+ const cachedValue = cache2.get(cacheKey);
3001
+ if (cachedValue !== void 0) {
3002
+ const entry = cache2.cache.get(cacheKey);
3003
+ const age = Date.now() - entry.timestamp;
3004
+ if (age < ttl) {
3005
+ if (onCacheHit) {
3006
+ onCacheHit(cacheKey, cachedValue);
3007
+ }
3008
+ return cachedValue;
3009
+ } else {
3010
+ cache2.delete(cacheKey);
3011
+ if (onEviction) {
3012
+ onEviction(cacheKey, cachedValue);
3013
+ }
3014
+ }
3015
+ }
3016
+ if (onCacheMiss) {
3017
+ onCacheMiss(cacheKey);
3018
+ }
3019
+ try {
3020
+ const result = await Promise.resolve(node(state));
3021
+ cache2.set(cacheKey, result);
3022
+ return result;
3023
+ } catch (error) {
3024
+ if (cacheErrors && error instanceof Error) {
3025
+ const errorResult = { error: error.message };
3026
+ cache2.set(cacheKey, errorResult);
3027
+ }
3028
+ throw error;
3029
+ }
3030
+ };
3031
+ },
3032
+ clear: () => cache2.clear(),
3033
+ size: () => cache2.size()
3034
+ };
3035
+ }
3036
+
3037
+ // src/langgraph/middleware/rate-limiting.ts
3038
+ var TokenBucket = class {
3039
+ constructor(maxTokens, refillRate) {
3040
+ this.maxTokens = maxTokens;
3041
+ this.refillRate = refillRate;
3042
+ this.tokens = maxTokens;
3043
+ this.lastRefill = Date.now();
3044
+ }
3045
+ tokens;
3046
+ lastRefill;
3047
+ tryConsume() {
3048
+ this.refill();
3049
+ if (this.tokens >= 1) {
3050
+ this.tokens -= 1;
3051
+ return true;
3052
+ }
3053
+ return false;
3054
+ }
3055
+ refill() {
3056
+ const now = Date.now();
3057
+ const timePassed = now - this.lastRefill;
3058
+ const tokensToAdd = timePassed * this.refillRate;
3059
+ this.tokens = Math.min(this.maxTokens, this.tokens + tokensToAdd);
3060
+ this.lastRefill = now;
3061
+ }
3062
+ reset() {
3063
+ this.tokens = this.maxTokens;
3064
+ this.lastRefill = Date.now();
3065
+ }
3066
+ };
3067
+ var SlidingWindow = class {
3068
+ constructor(maxRequests, windowMs) {
3069
+ this.maxRequests = maxRequests;
3070
+ this.windowMs = windowMs;
3071
+ }
3072
+ requests = [];
3073
+ tryConsume() {
3074
+ const now = Date.now();
3075
+ this.requests = this.requests.filter((timestamp) => now - timestamp < this.windowMs);
3076
+ if (this.requests.length < this.maxRequests) {
3077
+ this.requests.push(now);
3078
+ return true;
3079
+ }
3080
+ return false;
3081
+ }
3082
+ reset() {
3083
+ this.requests = [];
3084
+ }
3085
+ };
3086
+ var FixedWindow = class {
3087
+ constructor(maxRequests, windowMs) {
3088
+ this.maxRequests = maxRequests;
3089
+ this.windowMs = windowMs;
3090
+ this.windowStart = Date.now();
3091
+ }
3092
+ count = 0;
3093
+ windowStart;
3094
+ tryConsume() {
3095
+ const now = Date.now();
3096
+ if (now - this.windowStart >= this.windowMs) {
3097
+ this.count = 0;
3098
+ this.windowStart = now;
3099
+ }
3100
+ if (this.count < this.maxRequests) {
3101
+ this.count++;
3102
+ return true;
3103
+ }
3104
+ return false;
3105
+ }
3106
+ reset() {
3107
+ this.count = 0;
3108
+ this.windowStart = Date.now();
3109
+ }
3110
+ };
3111
+ function withRateLimit(node, options) {
3112
+ const {
3113
+ maxRequests,
3114
+ windowMs,
3115
+ strategy = "token-bucket",
3116
+ onRateLimitExceeded,
3117
+ onRateLimitReset,
3118
+ keyGenerator = () => "global"
3119
+ } = options;
3120
+ const limiters = /* @__PURE__ */ new Map();
3121
+ return async (state) => {
3122
+ const key = keyGenerator(state);
3123
+ if (!limiters.has(key)) {
3124
+ let limiter2;
3125
+ switch (strategy) {
3126
+ case "token-bucket":
3127
+ limiter2 = new TokenBucket(maxRequests, maxRequests / windowMs);
3128
+ break;
3129
+ case "sliding-window":
3130
+ limiter2 = new SlidingWindow(maxRequests, windowMs);
3131
+ break;
3132
+ case "fixed-window":
3133
+ limiter2 = new FixedWindow(maxRequests, windowMs);
3134
+ break;
3135
+ default:
3136
+ throw new Error(`Unknown rate limit strategy: ${strategy}`);
3137
+ }
3138
+ limiters.set(key, limiter2);
3139
+ }
3140
+ const limiter = limiters.get(key);
3141
+ if (!limiter.tryConsume()) {
3142
+ if (onRateLimitExceeded) {
3143
+ onRateLimitExceeded(key);
3144
+ }
3145
+ throw new Error(`Rate limit exceeded for key: ${key}`);
3146
+ }
3147
+ return await Promise.resolve(node(state));
3148
+ };
3149
+ }
3150
+ function createSharedRateLimiter(options) {
3151
+ const {
3152
+ maxRequests,
3153
+ windowMs,
3154
+ strategy = "token-bucket",
3155
+ onRateLimitExceeded,
3156
+ onRateLimitReset
3157
+ } = options;
3158
+ const limiters = /* @__PURE__ */ new Map();
3159
+ return {
3160
+ withRateLimit: (node, keyGenerator = (state) => "global") => {
3161
+ return async (state) => {
3162
+ const key = keyGenerator(state);
3163
+ if (!limiters.has(key)) {
3164
+ let limiter2;
3165
+ switch (strategy) {
3166
+ case "token-bucket":
3167
+ limiter2 = new TokenBucket(maxRequests, maxRequests / windowMs);
3168
+ break;
3169
+ case "sliding-window":
3170
+ limiter2 = new SlidingWindow(maxRequests, windowMs);
3171
+ break;
3172
+ case "fixed-window":
3173
+ limiter2 = new FixedWindow(maxRequests, windowMs);
3174
+ break;
3175
+ default:
3176
+ throw new Error(`Unknown rate limit strategy: ${strategy}`);
3177
+ }
3178
+ limiters.set(key, limiter2);
3179
+ }
3180
+ const limiter = limiters.get(key);
3181
+ if (!limiter.tryConsume()) {
3182
+ if (onRateLimitExceeded) {
3183
+ onRateLimitExceeded(key);
3184
+ }
3185
+ throw new Error(`Rate limit exceeded for key: ${key}`);
3186
+ }
3187
+ return await Promise.resolve(node(state));
3188
+ };
3189
+ },
3190
+ reset: (key) => {
3191
+ if (key) {
3192
+ const limiter = limiters.get(key);
3193
+ if (limiter) {
3194
+ limiter.reset();
3195
+ if (onRateLimitReset) {
3196
+ onRateLimitReset(key);
3197
+ }
3198
+ }
3199
+ } else {
3200
+ limiters.forEach((limiter, k) => {
3201
+ limiter.reset();
3202
+ if (onRateLimitReset) {
3203
+ onRateLimitReset(k);
3204
+ }
3205
+ });
3206
+ }
3207
+ }
3208
+ };
3209
+ }
3210
+
3211
+ // src/langgraph/middleware/validation.ts
3212
+ function withValidation(node, options) {
3213
+ const {
3214
+ inputSchema,
3215
+ outputSchema,
3216
+ inputValidator,
3217
+ outputValidator,
3218
+ mode = "both",
3219
+ throwOnError = true,
3220
+ onValidationError,
3221
+ onValidationSuccess,
3222
+ stripUnknown = false
3223
+ } = options;
3224
+ return async (state) => {
3225
+ if (mode === "input" || mode === "both") {
3226
+ try {
3227
+ if (inputSchema) {
3228
+ const validated = inputSchema.parse(state);
3229
+ state = validated;
3230
+ }
3231
+ if (inputValidator) {
3232
+ const isValid = await Promise.resolve(inputValidator(state));
3233
+ if (!isValid) {
3234
+ throw new Error("Input validation failed: custom validator returned false");
3235
+ }
3236
+ }
3237
+ if (onValidationSuccess) {
3238
+ onValidationSuccess(state, "input");
3239
+ }
3240
+ } catch (error) {
3241
+ if (onValidationError) {
3242
+ return onValidationError(error, state, "input");
3243
+ }
3244
+ if (throwOnError) {
3245
+ throw error;
3246
+ }
3247
+ return state;
3248
+ }
3249
+ }
3250
+ const result = await Promise.resolve(node(state));
3251
+ if (mode === "output" || mode === "both") {
3252
+ try {
3253
+ if (outputSchema) {
3254
+ const validated = outputSchema.parse(result);
3255
+ if (onValidationSuccess) {
3256
+ onValidationSuccess(validated, "output");
3257
+ }
3258
+ return validated;
3259
+ }
3260
+ if (outputValidator) {
3261
+ const isValid = await Promise.resolve(outputValidator(result));
3262
+ if (!isValid) {
3263
+ throw new Error("Output validation failed: custom validator returned false");
3264
+ }
3265
+ }
3266
+ if (onValidationSuccess) {
3267
+ onValidationSuccess(result, "output");
3268
+ }
3269
+ } catch (error) {
3270
+ if (onValidationError) {
3271
+ return onValidationError(error, state, "output");
3272
+ }
3273
+ if (throwOnError) {
3274
+ throw error;
3275
+ }
3276
+ }
3277
+ }
3278
+ return result;
3279
+ };
3280
+ }
3281
+
3282
+ // src/langgraph/middleware/concurrency.ts
3283
+ var ConcurrencyController = class {
3284
+ constructor(maxConcurrent, maxQueueSize, onQueued, onExecutionStart, onExecutionComplete, onQueueFull, queueTimeout) {
3285
+ this.maxConcurrent = maxConcurrent;
3286
+ this.maxQueueSize = maxQueueSize;
3287
+ this.onQueued = onQueued;
3288
+ this.onExecutionStart = onExecutionStart;
3289
+ this.onExecutionComplete = onExecutionComplete;
3290
+ this.onQueueFull = onQueueFull;
3291
+ this.queueTimeout = queueTimeout;
3292
+ }
3293
+ activeCount = 0;
3294
+ queue = [];
3295
+ async execute(state, priority, executor) {
3296
+ if (this.activeCount < this.maxConcurrent) {
3297
+ return this.executeTask(state, executor);
3298
+ }
3299
+ if (this.maxQueueSize > 0 && this.queue.length >= this.maxQueueSize) {
3300
+ if (this.onQueueFull) {
3301
+ this.onQueueFull(state);
3302
+ }
3303
+ throw new Error(`Queue is full (max size: ${this.maxQueueSize})`);
3304
+ }
3305
+ return new Promise((resolve, reject) => {
3306
+ const task = {
3307
+ state,
3308
+ priority,
3309
+ executor,
3310
+ resolve,
3311
+ reject,
3312
+ timestamp: Date.now()
3313
+ };
3314
+ this.insertByPriority(task);
3315
+ if (this.onQueued) {
3316
+ this.onQueued(this.queue.length, state);
3317
+ }
3318
+ if (this.queueTimeout && this.queueTimeout > 0) {
3319
+ setTimeout(() => {
3320
+ const index = this.queue.indexOf(task);
3321
+ if (index !== -1) {
3322
+ this.queue.splice(index, 1);
3323
+ reject(new Error(`Task timed out after ${this.queueTimeout}ms in queue`));
3324
+ }
3325
+ }, this.queueTimeout);
3326
+ }
3327
+ });
3328
+ }
3329
+ insertByPriority(task) {
3330
+ const priorityOrder = { high: 0, normal: 1, low: 2 };
3331
+ const taskPriorityValue = priorityOrder[task.priority];
3332
+ let insertIndex = this.queue.length;
3333
+ for (let i = 0; i < this.queue.length; i++) {
3334
+ const queuedPriorityValue = priorityOrder[this.queue[i].priority];
3335
+ if (taskPriorityValue < queuedPriorityValue) {
3336
+ insertIndex = i;
3337
+ break;
3338
+ }
3339
+ }
3340
+ this.queue.splice(insertIndex, 0, task);
3341
+ }
3342
+ async executeTask(state, executor) {
3343
+ this.activeCount++;
3344
+ if (this.onExecutionStart) {
3345
+ this.onExecutionStart(this.activeCount, state);
3346
+ }
3347
+ try {
3348
+ const result = await executor(state);
3349
+ if (this.onExecutionComplete) {
3350
+ this.onExecutionComplete(this.activeCount - 1, state);
3351
+ }
3352
+ return result;
3353
+ } finally {
3354
+ this.activeCount--;
3355
+ this.processQueue();
3356
+ }
3357
+ }
3358
+ processQueue() {
3359
+ if (this.queue.length === 0 || this.activeCount >= this.maxConcurrent) {
3360
+ return;
3361
+ }
3362
+ const task = this.queue.shift();
3363
+ if (task) {
3364
+ this.executeTask(task.state, task.executor).then(task.resolve).catch(task.reject);
3365
+ }
3366
+ }
3367
+ getStats() {
3368
+ return {
3369
+ activeCount: this.activeCount,
3370
+ queueSize: this.queue.length
3371
+ };
3372
+ }
3373
+ clear() {
3374
+ this.queue.forEach((task) => {
3375
+ task.reject(new Error("Queue cleared"));
3376
+ });
3377
+ this.queue = [];
3378
+ }
3379
+ };
3380
+ function withConcurrency(node, options = {}) {
3381
+ const {
3382
+ maxConcurrent = 1,
3383
+ maxQueueSize = 0,
3384
+ priorityFn = () => "normal",
3385
+ onQueued,
3386
+ onExecutionStart,
3387
+ onExecutionComplete,
3388
+ onQueueFull,
3389
+ queueTimeout = 0
3390
+ } = options;
3391
+ const controller = new ConcurrencyController(
3392
+ maxConcurrent,
3393
+ maxQueueSize,
3394
+ onQueued,
3395
+ onExecutionStart,
3396
+ onExecutionComplete,
3397
+ onQueueFull,
3398
+ queueTimeout
3399
+ );
3400
+ return async (state) => {
3401
+ const priority = priorityFn(state);
3402
+ return controller.execute(state, priority, async (s) => await node(s));
3403
+ };
3404
+ }
3405
+ function createSharedConcurrencyController(options = {}) {
3406
+ const {
3407
+ maxConcurrent = 1,
3408
+ maxQueueSize = 0,
3409
+ priorityFn = () => "normal",
3410
+ onQueued,
3411
+ onExecutionStart,
3412
+ onExecutionComplete,
3413
+ onQueueFull,
3414
+ queueTimeout = 0
3415
+ } = options;
3416
+ const controller = new ConcurrencyController(
3417
+ maxConcurrent,
3418
+ maxQueueSize,
3419
+ onQueued,
3420
+ onExecutionStart,
3421
+ onExecutionComplete,
3422
+ onQueueFull,
3423
+ queueTimeout
3424
+ );
3425
+ return {
3426
+ withConcurrency: (node) => {
3427
+ return async (state) => {
3428
+ const priority = priorityFn(state);
3429
+ return controller.execute(state, priority, async (s) => await node(s));
3430
+ };
3431
+ },
3432
+ getStats: () => controller.getStats(),
3433
+ clear: () => controller.clear()
3434
+ };
3435
+ }
3436
+
2857
3437
  // src/langgraph/persistence/checkpointer.ts
2858
3438
  var import_langgraph5 = require("@langchain/langgraph");
2859
3439
  function createMemoryCheckpointer(options) {
@@ -4413,6 +4993,9 @@ function createCircuitBreaker(options) {
4413
4993
  createProgressTracker,
4414
4994
  createSSEFormatter,
4415
4995
  createSequentialWorkflow,
4996
+ createSharedCache,
4997
+ createSharedConcurrencyController,
4998
+ createSharedRateLimiter,
4416
4999
  createSqliteCheckpointer,
4417
5000
  createStateAnnotation,
4418
5001
  createSubgraph,
@@ -4468,9 +5051,14 @@ function createCircuitBreaker(options) {
4468
5051
  validateTool,
4469
5052
  validateToolMetadata,
4470
5053
  validateToolName,
5054
+ withCache,
5055
+ withConcurrency,
4471
5056
  withErrorHandler,
5057
+ withLogging,
4472
5058
  withMetrics,
5059
+ withRateLimit,
4473
5060
  withRetry,
4474
5061
  withTimeout,
4475
- withTracing
5062
+ withTracing,
5063
+ withValidation
4476
5064
  });