@agentforge/core 0.11.6 → 0.11.8
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/index.cjs +658 -2
- package/dist/index.d.cts +453 -6
- package/dist/index.d.ts +453 -6
- package/dist/index.js +646 -1
- package/package.json +1 -1
package/dist/index.js
CHANGED
|
@@ -2698,6 +2698,578 @@ var presets = {
|
|
|
2698
2698
|
testing
|
|
2699
2699
|
};
|
|
2700
2700
|
|
|
2701
|
+
// src/langgraph/middleware/caching.ts
|
|
2702
|
+
var LRUCache = class {
|
|
2703
|
+
cache = /* @__PURE__ */ new Map();
|
|
2704
|
+
maxSize;
|
|
2705
|
+
evictionStrategy;
|
|
2706
|
+
constructor(maxSize, evictionStrategy = "lru") {
|
|
2707
|
+
this.maxSize = maxSize;
|
|
2708
|
+
this.evictionStrategy = evictionStrategy;
|
|
2709
|
+
}
|
|
2710
|
+
get(key) {
|
|
2711
|
+
const entry = this.cache.get(key);
|
|
2712
|
+
if (!entry) return void 0;
|
|
2713
|
+
entry.hits++;
|
|
2714
|
+
entry.lastAccessed = Date.now();
|
|
2715
|
+
return entry.value;
|
|
2716
|
+
}
|
|
2717
|
+
set(key, value) {
|
|
2718
|
+
if (this.cache.size >= this.maxSize && !this.cache.has(key)) {
|
|
2719
|
+
this.evict();
|
|
2720
|
+
}
|
|
2721
|
+
this.cache.set(key, {
|
|
2722
|
+
value,
|
|
2723
|
+
timestamp: Date.now(),
|
|
2724
|
+
hits: 0,
|
|
2725
|
+
lastAccessed: Date.now()
|
|
2726
|
+
});
|
|
2727
|
+
}
|
|
2728
|
+
has(key) {
|
|
2729
|
+
return this.cache.has(key);
|
|
2730
|
+
}
|
|
2731
|
+
delete(key) {
|
|
2732
|
+
return this.cache.delete(key);
|
|
2733
|
+
}
|
|
2734
|
+
clear() {
|
|
2735
|
+
this.cache.clear();
|
|
2736
|
+
}
|
|
2737
|
+
size() {
|
|
2738
|
+
return this.cache.size;
|
|
2739
|
+
}
|
|
2740
|
+
evict() {
|
|
2741
|
+
if (this.cache.size === 0) return;
|
|
2742
|
+
let keyToEvict;
|
|
2743
|
+
if (this.evictionStrategy === "lru") {
|
|
2744
|
+
let oldestAccess = Infinity;
|
|
2745
|
+
for (const [key, entry] of this.cache.entries()) {
|
|
2746
|
+
if (entry.lastAccessed < oldestAccess) {
|
|
2747
|
+
oldestAccess = entry.lastAccessed;
|
|
2748
|
+
keyToEvict = key;
|
|
2749
|
+
}
|
|
2750
|
+
}
|
|
2751
|
+
} else if (this.evictionStrategy === "lfu") {
|
|
2752
|
+
let lowestHits = Infinity;
|
|
2753
|
+
for (const [key, entry] of this.cache.entries()) {
|
|
2754
|
+
if (entry.hits < lowestHits) {
|
|
2755
|
+
lowestHits = entry.hits;
|
|
2756
|
+
keyToEvict = key;
|
|
2757
|
+
}
|
|
2758
|
+
}
|
|
2759
|
+
} else {
|
|
2760
|
+
keyToEvict = this.cache.keys().next().value;
|
|
2761
|
+
}
|
|
2762
|
+
if (keyToEvict) {
|
|
2763
|
+
this.cache.delete(keyToEvict);
|
|
2764
|
+
}
|
|
2765
|
+
}
|
|
2766
|
+
};
|
|
2767
|
+
function defaultKeyGenerator(state) {
|
|
2768
|
+
try {
|
|
2769
|
+
return JSON.stringify(state);
|
|
2770
|
+
} catch {
|
|
2771
|
+
return String(state);
|
|
2772
|
+
}
|
|
2773
|
+
}
|
|
2774
|
+
function withCache(node, options = {}) {
|
|
2775
|
+
const {
|
|
2776
|
+
ttl = 36e5,
|
|
2777
|
+
// 1 hour default
|
|
2778
|
+
maxSize = 100,
|
|
2779
|
+
evictionStrategy = "lru",
|
|
2780
|
+
keyGenerator = defaultKeyGenerator,
|
|
2781
|
+
cacheErrors = false,
|
|
2782
|
+
onCacheHit,
|
|
2783
|
+
onCacheMiss,
|
|
2784
|
+
onEviction
|
|
2785
|
+
} = options;
|
|
2786
|
+
const cache2 = new LRUCache(maxSize, evictionStrategy);
|
|
2787
|
+
return async (state) => {
|
|
2788
|
+
const cacheKey = keyGenerator(state);
|
|
2789
|
+
const cachedValue = cache2.get(cacheKey);
|
|
2790
|
+
if (cachedValue !== void 0) {
|
|
2791
|
+
const entry = cache2.cache.get(cacheKey);
|
|
2792
|
+
const age = Date.now() - entry.timestamp;
|
|
2793
|
+
if (age < ttl) {
|
|
2794
|
+
if (onCacheHit) {
|
|
2795
|
+
onCacheHit(cacheKey, cachedValue);
|
|
2796
|
+
}
|
|
2797
|
+
return cachedValue;
|
|
2798
|
+
} else {
|
|
2799
|
+
cache2.delete(cacheKey);
|
|
2800
|
+
if (onEviction) {
|
|
2801
|
+
onEviction(cacheKey, cachedValue);
|
|
2802
|
+
}
|
|
2803
|
+
}
|
|
2804
|
+
}
|
|
2805
|
+
if (onCacheMiss) {
|
|
2806
|
+
onCacheMiss(cacheKey);
|
|
2807
|
+
}
|
|
2808
|
+
try {
|
|
2809
|
+
const result = await Promise.resolve(node(state));
|
|
2810
|
+
cache2.set(cacheKey, result);
|
|
2811
|
+
return result;
|
|
2812
|
+
} catch (error) {
|
|
2813
|
+
if (cacheErrors && error instanceof Error) {
|
|
2814
|
+
const errorResult = { error: error.message };
|
|
2815
|
+
cache2.set(cacheKey, errorResult);
|
|
2816
|
+
}
|
|
2817
|
+
throw error;
|
|
2818
|
+
}
|
|
2819
|
+
};
|
|
2820
|
+
}
|
|
2821
|
+
function createSharedCache(options = {}) {
|
|
2822
|
+
const {
|
|
2823
|
+
ttl = 36e5,
|
|
2824
|
+
maxSize = 100,
|
|
2825
|
+
evictionStrategy = "lru",
|
|
2826
|
+
cacheErrors = false,
|
|
2827
|
+
onCacheHit,
|
|
2828
|
+
onCacheMiss,
|
|
2829
|
+
onEviction
|
|
2830
|
+
} = options;
|
|
2831
|
+
const cache2 = new LRUCache(maxSize, evictionStrategy);
|
|
2832
|
+
return {
|
|
2833
|
+
withCache: (node, keyGenerator = defaultKeyGenerator) => {
|
|
2834
|
+
return async (state) => {
|
|
2835
|
+
const cacheKey = keyGenerator(state);
|
|
2836
|
+
const cachedValue = cache2.get(cacheKey);
|
|
2837
|
+
if (cachedValue !== void 0) {
|
|
2838
|
+
const entry = cache2.cache.get(cacheKey);
|
|
2839
|
+
const age = Date.now() - entry.timestamp;
|
|
2840
|
+
if (age < ttl) {
|
|
2841
|
+
if (onCacheHit) {
|
|
2842
|
+
onCacheHit(cacheKey, cachedValue);
|
|
2843
|
+
}
|
|
2844
|
+
return cachedValue;
|
|
2845
|
+
} else {
|
|
2846
|
+
cache2.delete(cacheKey);
|
|
2847
|
+
if (onEviction) {
|
|
2848
|
+
onEviction(cacheKey, cachedValue);
|
|
2849
|
+
}
|
|
2850
|
+
}
|
|
2851
|
+
}
|
|
2852
|
+
if (onCacheMiss) {
|
|
2853
|
+
onCacheMiss(cacheKey);
|
|
2854
|
+
}
|
|
2855
|
+
try {
|
|
2856
|
+
const result = await Promise.resolve(node(state));
|
|
2857
|
+
cache2.set(cacheKey, result);
|
|
2858
|
+
return result;
|
|
2859
|
+
} catch (error) {
|
|
2860
|
+
if (cacheErrors && error instanceof Error) {
|
|
2861
|
+
const errorResult = { error: error.message };
|
|
2862
|
+
cache2.set(cacheKey, errorResult);
|
|
2863
|
+
}
|
|
2864
|
+
throw error;
|
|
2865
|
+
}
|
|
2866
|
+
};
|
|
2867
|
+
},
|
|
2868
|
+
clear: () => cache2.clear(),
|
|
2869
|
+
size: () => cache2.size()
|
|
2870
|
+
};
|
|
2871
|
+
}
|
|
2872
|
+
|
|
2873
|
+
// src/langgraph/middleware/rate-limiting.ts
|
|
2874
|
+
var TokenBucket = class {
|
|
2875
|
+
constructor(maxTokens, refillRate) {
|
|
2876
|
+
this.maxTokens = maxTokens;
|
|
2877
|
+
this.refillRate = refillRate;
|
|
2878
|
+
this.tokens = maxTokens;
|
|
2879
|
+
this.lastRefill = Date.now();
|
|
2880
|
+
}
|
|
2881
|
+
tokens;
|
|
2882
|
+
lastRefill;
|
|
2883
|
+
tryConsume() {
|
|
2884
|
+
this.refill();
|
|
2885
|
+
if (this.tokens >= 1) {
|
|
2886
|
+
this.tokens -= 1;
|
|
2887
|
+
return true;
|
|
2888
|
+
}
|
|
2889
|
+
return false;
|
|
2890
|
+
}
|
|
2891
|
+
refill() {
|
|
2892
|
+
const now = Date.now();
|
|
2893
|
+
const timePassed = now - this.lastRefill;
|
|
2894
|
+
const tokensToAdd = timePassed * this.refillRate;
|
|
2895
|
+
this.tokens = Math.min(this.maxTokens, this.tokens + tokensToAdd);
|
|
2896
|
+
this.lastRefill = now;
|
|
2897
|
+
}
|
|
2898
|
+
reset() {
|
|
2899
|
+
this.tokens = this.maxTokens;
|
|
2900
|
+
this.lastRefill = Date.now();
|
|
2901
|
+
}
|
|
2902
|
+
};
|
|
2903
|
+
var SlidingWindow = class {
|
|
2904
|
+
constructor(maxRequests, windowMs) {
|
|
2905
|
+
this.maxRequests = maxRequests;
|
|
2906
|
+
this.windowMs = windowMs;
|
|
2907
|
+
}
|
|
2908
|
+
requests = [];
|
|
2909
|
+
tryConsume() {
|
|
2910
|
+
const now = Date.now();
|
|
2911
|
+
this.requests = this.requests.filter((timestamp) => now - timestamp < this.windowMs);
|
|
2912
|
+
if (this.requests.length < this.maxRequests) {
|
|
2913
|
+
this.requests.push(now);
|
|
2914
|
+
return true;
|
|
2915
|
+
}
|
|
2916
|
+
return false;
|
|
2917
|
+
}
|
|
2918
|
+
reset() {
|
|
2919
|
+
this.requests = [];
|
|
2920
|
+
}
|
|
2921
|
+
};
|
|
2922
|
+
var FixedWindow = class {
|
|
2923
|
+
constructor(maxRequests, windowMs) {
|
|
2924
|
+
this.maxRequests = maxRequests;
|
|
2925
|
+
this.windowMs = windowMs;
|
|
2926
|
+
this.windowStart = Date.now();
|
|
2927
|
+
}
|
|
2928
|
+
count = 0;
|
|
2929
|
+
windowStart;
|
|
2930
|
+
tryConsume() {
|
|
2931
|
+
const now = Date.now();
|
|
2932
|
+
if (now - this.windowStart >= this.windowMs) {
|
|
2933
|
+
this.count = 0;
|
|
2934
|
+
this.windowStart = now;
|
|
2935
|
+
}
|
|
2936
|
+
if (this.count < this.maxRequests) {
|
|
2937
|
+
this.count++;
|
|
2938
|
+
return true;
|
|
2939
|
+
}
|
|
2940
|
+
return false;
|
|
2941
|
+
}
|
|
2942
|
+
reset() {
|
|
2943
|
+
this.count = 0;
|
|
2944
|
+
this.windowStart = Date.now();
|
|
2945
|
+
}
|
|
2946
|
+
};
|
|
2947
|
+
function withRateLimit(node, options) {
|
|
2948
|
+
const {
|
|
2949
|
+
maxRequests,
|
|
2950
|
+
windowMs,
|
|
2951
|
+
strategy = "token-bucket",
|
|
2952
|
+
onRateLimitExceeded,
|
|
2953
|
+
onRateLimitReset,
|
|
2954
|
+
keyGenerator = () => "global"
|
|
2955
|
+
} = options;
|
|
2956
|
+
const limiters = /* @__PURE__ */ new Map();
|
|
2957
|
+
return async (state) => {
|
|
2958
|
+
const key = keyGenerator(state);
|
|
2959
|
+
if (!limiters.has(key)) {
|
|
2960
|
+
let limiter2;
|
|
2961
|
+
switch (strategy) {
|
|
2962
|
+
case "token-bucket":
|
|
2963
|
+
limiter2 = new TokenBucket(maxRequests, maxRequests / windowMs);
|
|
2964
|
+
break;
|
|
2965
|
+
case "sliding-window":
|
|
2966
|
+
limiter2 = new SlidingWindow(maxRequests, windowMs);
|
|
2967
|
+
break;
|
|
2968
|
+
case "fixed-window":
|
|
2969
|
+
limiter2 = new FixedWindow(maxRequests, windowMs);
|
|
2970
|
+
break;
|
|
2971
|
+
default:
|
|
2972
|
+
throw new Error(`Unknown rate limit strategy: ${strategy}`);
|
|
2973
|
+
}
|
|
2974
|
+
limiters.set(key, limiter2);
|
|
2975
|
+
}
|
|
2976
|
+
const limiter = limiters.get(key);
|
|
2977
|
+
if (!limiter.tryConsume()) {
|
|
2978
|
+
if (onRateLimitExceeded) {
|
|
2979
|
+
onRateLimitExceeded(key);
|
|
2980
|
+
}
|
|
2981
|
+
throw new Error(`Rate limit exceeded for key: ${key}`);
|
|
2982
|
+
}
|
|
2983
|
+
return await Promise.resolve(node(state));
|
|
2984
|
+
};
|
|
2985
|
+
}
|
|
2986
|
+
function createSharedRateLimiter(options) {
|
|
2987
|
+
const {
|
|
2988
|
+
maxRequests,
|
|
2989
|
+
windowMs,
|
|
2990
|
+
strategy = "token-bucket",
|
|
2991
|
+
onRateLimitExceeded,
|
|
2992
|
+
onRateLimitReset
|
|
2993
|
+
} = options;
|
|
2994
|
+
const limiters = /* @__PURE__ */ new Map();
|
|
2995
|
+
return {
|
|
2996
|
+
withRateLimit: (node, keyGenerator = (state) => "global") => {
|
|
2997
|
+
return async (state) => {
|
|
2998
|
+
const key = keyGenerator(state);
|
|
2999
|
+
if (!limiters.has(key)) {
|
|
3000
|
+
let limiter2;
|
|
3001
|
+
switch (strategy) {
|
|
3002
|
+
case "token-bucket":
|
|
3003
|
+
limiter2 = new TokenBucket(maxRequests, maxRequests / windowMs);
|
|
3004
|
+
break;
|
|
3005
|
+
case "sliding-window":
|
|
3006
|
+
limiter2 = new SlidingWindow(maxRequests, windowMs);
|
|
3007
|
+
break;
|
|
3008
|
+
case "fixed-window":
|
|
3009
|
+
limiter2 = new FixedWindow(maxRequests, windowMs);
|
|
3010
|
+
break;
|
|
3011
|
+
default:
|
|
3012
|
+
throw new Error(`Unknown rate limit strategy: ${strategy}`);
|
|
3013
|
+
}
|
|
3014
|
+
limiters.set(key, limiter2);
|
|
3015
|
+
}
|
|
3016
|
+
const limiter = limiters.get(key);
|
|
3017
|
+
if (!limiter.tryConsume()) {
|
|
3018
|
+
if (onRateLimitExceeded) {
|
|
3019
|
+
onRateLimitExceeded(key);
|
|
3020
|
+
}
|
|
3021
|
+
throw new Error(`Rate limit exceeded for key: ${key}`);
|
|
3022
|
+
}
|
|
3023
|
+
return await Promise.resolve(node(state));
|
|
3024
|
+
};
|
|
3025
|
+
},
|
|
3026
|
+
reset: (key) => {
|
|
3027
|
+
if (key) {
|
|
3028
|
+
const limiter = limiters.get(key);
|
|
3029
|
+
if (limiter) {
|
|
3030
|
+
limiter.reset();
|
|
3031
|
+
if (onRateLimitReset) {
|
|
3032
|
+
onRateLimitReset(key);
|
|
3033
|
+
}
|
|
3034
|
+
}
|
|
3035
|
+
} else {
|
|
3036
|
+
limiters.forEach((limiter, k) => {
|
|
3037
|
+
limiter.reset();
|
|
3038
|
+
if (onRateLimitReset) {
|
|
3039
|
+
onRateLimitReset(k);
|
|
3040
|
+
}
|
|
3041
|
+
});
|
|
3042
|
+
}
|
|
3043
|
+
}
|
|
3044
|
+
};
|
|
3045
|
+
}
|
|
3046
|
+
|
|
3047
|
+
// src/langgraph/middleware/validation.ts
|
|
3048
|
+
function withValidation(node, options) {
|
|
3049
|
+
const {
|
|
3050
|
+
inputSchema,
|
|
3051
|
+
outputSchema,
|
|
3052
|
+
inputValidator,
|
|
3053
|
+
outputValidator,
|
|
3054
|
+
mode = "both",
|
|
3055
|
+
throwOnError = true,
|
|
3056
|
+
onValidationError,
|
|
3057
|
+
onValidationSuccess,
|
|
3058
|
+
stripUnknown = false
|
|
3059
|
+
} = options;
|
|
3060
|
+
return async (state) => {
|
|
3061
|
+
if (mode === "input" || mode === "both") {
|
|
3062
|
+
try {
|
|
3063
|
+
if (inputSchema) {
|
|
3064
|
+
const validated = inputSchema.parse(state);
|
|
3065
|
+
state = validated;
|
|
3066
|
+
}
|
|
3067
|
+
if (inputValidator) {
|
|
3068
|
+
const isValid = await Promise.resolve(inputValidator(state));
|
|
3069
|
+
if (!isValid) {
|
|
3070
|
+
throw new Error("Input validation failed: custom validator returned false");
|
|
3071
|
+
}
|
|
3072
|
+
}
|
|
3073
|
+
if (onValidationSuccess) {
|
|
3074
|
+
onValidationSuccess(state, "input");
|
|
3075
|
+
}
|
|
3076
|
+
} catch (error) {
|
|
3077
|
+
if (onValidationError) {
|
|
3078
|
+
return onValidationError(error, state, "input");
|
|
3079
|
+
}
|
|
3080
|
+
if (throwOnError) {
|
|
3081
|
+
throw error;
|
|
3082
|
+
}
|
|
3083
|
+
return state;
|
|
3084
|
+
}
|
|
3085
|
+
}
|
|
3086
|
+
const result = await Promise.resolve(node(state));
|
|
3087
|
+
if (mode === "output" || mode === "both") {
|
|
3088
|
+
try {
|
|
3089
|
+
if (outputSchema) {
|
|
3090
|
+
const validated = outputSchema.parse(result);
|
|
3091
|
+
if (onValidationSuccess) {
|
|
3092
|
+
onValidationSuccess(validated, "output");
|
|
3093
|
+
}
|
|
3094
|
+
return validated;
|
|
3095
|
+
}
|
|
3096
|
+
if (outputValidator) {
|
|
3097
|
+
const isValid = await Promise.resolve(outputValidator(result));
|
|
3098
|
+
if (!isValid) {
|
|
3099
|
+
throw new Error("Output validation failed: custom validator returned false");
|
|
3100
|
+
}
|
|
3101
|
+
}
|
|
3102
|
+
if (onValidationSuccess) {
|
|
3103
|
+
onValidationSuccess(result, "output");
|
|
3104
|
+
}
|
|
3105
|
+
} catch (error) {
|
|
3106
|
+
if (onValidationError) {
|
|
3107
|
+
return onValidationError(error, state, "output");
|
|
3108
|
+
}
|
|
3109
|
+
if (throwOnError) {
|
|
3110
|
+
throw error;
|
|
3111
|
+
}
|
|
3112
|
+
}
|
|
3113
|
+
}
|
|
3114
|
+
return result;
|
|
3115
|
+
};
|
|
3116
|
+
}
|
|
3117
|
+
|
|
3118
|
+
// src/langgraph/middleware/concurrency.ts
|
|
3119
|
+
var ConcurrencyController = class {
|
|
3120
|
+
constructor(maxConcurrent, maxQueueSize, onQueued, onExecutionStart, onExecutionComplete, onQueueFull, queueTimeout) {
|
|
3121
|
+
this.maxConcurrent = maxConcurrent;
|
|
3122
|
+
this.maxQueueSize = maxQueueSize;
|
|
3123
|
+
this.onQueued = onQueued;
|
|
3124
|
+
this.onExecutionStart = onExecutionStart;
|
|
3125
|
+
this.onExecutionComplete = onExecutionComplete;
|
|
3126
|
+
this.onQueueFull = onQueueFull;
|
|
3127
|
+
this.queueTimeout = queueTimeout;
|
|
3128
|
+
}
|
|
3129
|
+
activeCount = 0;
|
|
3130
|
+
queue = [];
|
|
3131
|
+
async execute(state, priority, executor) {
|
|
3132
|
+
if (this.activeCount < this.maxConcurrent) {
|
|
3133
|
+
return this.executeTask(state, executor);
|
|
3134
|
+
}
|
|
3135
|
+
if (this.maxQueueSize > 0 && this.queue.length >= this.maxQueueSize) {
|
|
3136
|
+
if (this.onQueueFull) {
|
|
3137
|
+
this.onQueueFull(state);
|
|
3138
|
+
}
|
|
3139
|
+
throw new Error(`Queue is full (max size: ${this.maxQueueSize})`);
|
|
3140
|
+
}
|
|
3141
|
+
return new Promise((resolve, reject) => {
|
|
3142
|
+
const task = {
|
|
3143
|
+
state,
|
|
3144
|
+
priority,
|
|
3145
|
+
executor,
|
|
3146
|
+
resolve,
|
|
3147
|
+
reject,
|
|
3148
|
+
timestamp: Date.now()
|
|
3149
|
+
};
|
|
3150
|
+
this.insertByPriority(task);
|
|
3151
|
+
if (this.onQueued) {
|
|
3152
|
+
this.onQueued(this.queue.length, state);
|
|
3153
|
+
}
|
|
3154
|
+
if (this.queueTimeout && this.queueTimeout > 0) {
|
|
3155
|
+
setTimeout(() => {
|
|
3156
|
+
const index = this.queue.indexOf(task);
|
|
3157
|
+
if (index !== -1) {
|
|
3158
|
+
this.queue.splice(index, 1);
|
|
3159
|
+
reject(new Error(`Task timed out after ${this.queueTimeout}ms in queue`));
|
|
3160
|
+
}
|
|
3161
|
+
}, this.queueTimeout);
|
|
3162
|
+
}
|
|
3163
|
+
});
|
|
3164
|
+
}
|
|
3165
|
+
insertByPriority(task) {
|
|
3166
|
+
const priorityOrder = { high: 0, normal: 1, low: 2 };
|
|
3167
|
+
const taskPriorityValue = priorityOrder[task.priority];
|
|
3168
|
+
let insertIndex = this.queue.length;
|
|
3169
|
+
for (let i = 0; i < this.queue.length; i++) {
|
|
3170
|
+
const queuedPriorityValue = priorityOrder[this.queue[i].priority];
|
|
3171
|
+
if (taskPriorityValue < queuedPriorityValue) {
|
|
3172
|
+
insertIndex = i;
|
|
3173
|
+
break;
|
|
3174
|
+
}
|
|
3175
|
+
}
|
|
3176
|
+
this.queue.splice(insertIndex, 0, task);
|
|
3177
|
+
}
|
|
3178
|
+
async executeTask(state, executor) {
|
|
3179
|
+
this.activeCount++;
|
|
3180
|
+
if (this.onExecutionStart) {
|
|
3181
|
+
this.onExecutionStart(this.activeCount, state);
|
|
3182
|
+
}
|
|
3183
|
+
try {
|
|
3184
|
+
const result = await executor(state);
|
|
3185
|
+
if (this.onExecutionComplete) {
|
|
3186
|
+
this.onExecutionComplete(this.activeCount - 1, state);
|
|
3187
|
+
}
|
|
3188
|
+
return result;
|
|
3189
|
+
} finally {
|
|
3190
|
+
this.activeCount--;
|
|
3191
|
+
this.processQueue();
|
|
3192
|
+
}
|
|
3193
|
+
}
|
|
3194
|
+
processQueue() {
|
|
3195
|
+
if (this.queue.length === 0 || this.activeCount >= this.maxConcurrent) {
|
|
3196
|
+
return;
|
|
3197
|
+
}
|
|
3198
|
+
const task = this.queue.shift();
|
|
3199
|
+
if (task) {
|
|
3200
|
+
this.executeTask(task.state, task.executor).then(task.resolve).catch(task.reject);
|
|
3201
|
+
}
|
|
3202
|
+
}
|
|
3203
|
+
getStats() {
|
|
3204
|
+
return {
|
|
3205
|
+
activeCount: this.activeCount,
|
|
3206
|
+
queueSize: this.queue.length
|
|
3207
|
+
};
|
|
3208
|
+
}
|
|
3209
|
+
clear() {
|
|
3210
|
+
this.queue.forEach((task) => {
|
|
3211
|
+
task.reject(new Error("Queue cleared"));
|
|
3212
|
+
});
|
|
3213
|
+
this.queue = [];
|
|
3214
|
+
}
|
|
3215
|
+
};
|
|
3216
|
+
function withConcurrency(node, options = {}) {
|
|
3217
|
+
const {
|
|
3218
|
+
maxConcurrent = 1,
|
|
3219
|
+
maxQueueSize = 0,
|
|
3220
|
+
priorityFn = () => "normal",
|
|
3221
|
+
onQueued,
|
|
3222
|
+
onExecutionStart,
|
|
3223
|
+
onExecutionComplete,
|
|
3224
|
+
onQueueFull,
|
|
3225
|
+
queueTimeout = 0
|
|
3226
|
+
} = options;
|
|
3227
|
+
const controller = new ConcurrencyController(
|
|
3228
|
+
maxConcurrent,
|
|
3229
|
+
maxQueueSize,
|
|
3230
|
+
onQueued,
|
|
3231
|
+
onExecutionStart,
|
|
3232
|
+
onExecutionComplete,
|
|
3233
|
+
onQueueFull,
|
|
3234
|
+
queueTimeout
|
|
3235
|
+
);
|
|
3236
|
+
return async (state) => {
|
|
3237
|
+
const priority = priorityFn(state);
|
|
3238
|
+
return controller.execute(state, priority, async (s) => await node(s));
|
|
3239
|
+
};
|
|
3240
|
+
}
|
|
3241
|
+
function createSharedConcurrencyController(options = {}) {
|
|
3242
|
+
const {
|
|
3243
|
+
maxConcurrent = 1,
|
|
3244
|
+
maxQueueSize = 0,
|
|
3245
|
+
priorityFn = () => "normal",
|
|
3246
|
+
onQueued,
|
|
3247
|
+
onExecutionStart,
|
|
3248
|
+
onExecutionComplete,
|
|
3249
|
+
onQueueFull,
|
|
3250
|
+
queueTimeout = 0
|
|
3251
|
+
} = options;
|
|
3252
|
+
const controller = new ConcurrencyController(
|
|
3253
|
+
maxConcurrent,
|
|
3254
|
+
maxQueueSize,
|
|
3255
|
+
onQueued,
|
|
3256
|
+
onExecutionStart,
|
|
3257
|
+
onExecutionComplete,
|
|
3258
|
+
onQueueFull,
|
|
3259
|
+
queueTimeout
|
|
3260
|
+
);
|
|
3261
|
+
return {
|
|
3262
|
+
withConcurrency: (node) => {
|
|
3263
|
+
return async (state) => {
|
|
3264
|
+
const priority = priorityFn(state);
|
|
3265
|
+
return controller.execute(state, priority, async (s) => await node(s));
|
|
3266
|
+
};
|
|
3267
|
+
},
|
|
3268
|
+
getStats: () => controller.getStats(),
|
|
3269
|
+
clear: () => controller.clear()
|
|
3270
|
+
};
|
|
3271
|
+
}
|
|
3272
|
+
|
|
2701
3273
|
// src/langgraph/persistence/checkpointer.ts
|
|
2702
3274
|
import { MemorySaver } from "@langchain/langgraph";
|
|
2703
3275
|
function createMemoryCheckpointer(options) {
|
|
@@ -4194,6 +4766,68 @@ var CircuitBreaker = class {
|
|
|
4194
4766
|
function createCircuitBreaker(options) {
|
|
4195
4767
|
return new CircuitBreaker(options);
|
|
4196
4768
|
}
|
|
4769
|
+
|
|
4770
|
+
// src/prompt-loader/index.ts
|
|
4771
|
+
import { readFileSync } from "fs";
|
|
4772
|
+
import { join } from "path";
|
|
4773
|
+
var MAX_VARIABLE_LENGTH = 500;
|
|
4774
|
+
function sanitizeValue(value) {
|
|
4775
|
+
if (value === void 0 || value === null) return "";
|
|
4776
|
+
let sanitized = String(value);
|
|
4777
|
+
sanitized = sanitized.replace(/^#+\s*/gm, "");
|
|
4778
|
+
sanitized = sanitized.replace(/[\r\n]+/g, " ");
|
|
4779
|
+
sanitized = sanitized.trim().replace(/\s+/g, " ");
|
|
4780
|
+
if (sanitized.length > MAX_VARIABLE_LENGTH) {
|
|
4781
|
+
sanitized = sanitized.substring(0, MAX_VARIABLE_LENGTH) + "...";
|
|
4782
|
+
}
|
|
4783
|
+
return sanitized;
|
|
4784
|
+
}
|
|
4785
|
+
function renderTemplate(template, options) {
|
|
4786
|
+
let rawVariables;
|
|
4787
|
+
let sanitizedVariables;
|
|
4788
|
+
if ("trustedVariables" in options || "untrustedVariables" in options) {
|
|
4789
|
+
const opts = options;
|
|
4790
|
+
rawVariables = {
|
|
4791
|
+
...opts.trustedVariables,
|
|
4792
|
+
...opts.untrustedVariables
|
|
4793
|
+
};
|
|
4794
|
+
const sanitizedUntrusted = {};
|
|
4795
|
+
if (opts.untrustedVariables) {
|
|
4796
|
+
for (const [key, value] of Object.entries(opts.untrustedVariables)) {
|
|
4797
|
+
sanitizedUntrusted[key] = sanitizeValue(value);
|
|
4798
|
+
}
|
|
4799
|
+
}
|
|
4800
|
+
sanitizedVariables = {
|
|
4801
|
+
...opts.trustedVariables,
|
|
4802
|
+
...sanitizedUntrusted
|
|
4803
|
+
};
|
|
4804
|
+
} else {
|
|
4805
|
+
rawVariables = options;
|
|
4806
|
+
sanitizedVariables = options;
|
|
4807
|
+
}
|
|
4808
|
+
let result = template;
|
|
4809
|
+
result = result.replace(/\{\{#if\s+(\w+)\}\}([\s\S]*?)\{\{\/if\}\}/g, (_, varName, content) => {
|
|
4810
|
+
return rawVariables[varName] ? content : "";
|
|
4811
|
+
});
|
|
4812
|
+
result = result.replace(/\{\{(\w+)\}\}/g, (_, varName) => {
|
|
4813
|
+
const value = sanitizedVariables[varName];
|
|
4814
|
+
if (value === void 0 || value === null) return "";
|
|
4815
|
+
return String(value);
|
|
4816
|
+
});
|
|
4817
|
+
return result;
|
|
4818
|
+
}
|
|
4819
|
+
function loadPrompt(promptName, options = {}, promptsDir) {
|
|
4820
|
+
const baseDir = promptsDir || join(process.cwd(), "prompts");
|
|
4821
|
+
const promptPath = join(baseDir, `${promptName}.md`);
|
|
4822
|
+
try {
|
|
4823
|
+
const template = readFileSync(promptPath, "utf-8");
|
|
4824
|
+
return renderTemplate(template, options);
|
|
4825
|
+
} catch (error) {
|
|
4826
|
+
throw new Error(
|
|
4827
|
+
`Failed to load prompt "${promptName}" from ${promptPath}: ${error instanceof Error ? error.message : String(error)}`
|
|
4828
|
+
);
|
|
4829
|
+
}
|
|
4830
|
+
}
|
|
4197
4831
|
export {
|
|
4198
4832
|
AgentError,
|
|
4199
4833
|
BatchProcessor,
|
|
@@ -4256,6 +4890,9 @@ export {
|
|
|
4256
4890
|
createProgressTracker,
|
|
4257
4891
|
createSSEFormatter,
|
|
4258
4892
|
createSequentialWorkflow,
|
|
4893
|
+
createSharedCache,
|
|
4894
|
+
createSharedConcurrencyController,
|
|
4895
|
+
createSharedRateLimiter,
|
|
4259
4896
|
createSqliteCheckpointer,
|
|
4260
4897
|
createStateAnnotation,
|
|
4261
4898
|
createSubgraph,
|
|
@@ -4286,6 +4923,7 @@ export {
|
|
|
4286
4923
|
isHumanRequestInterrupt,
|
|
4287
4924
|
isMemoryCheckpointer,
|
|
4288
4925
|
isTracingEnabled,
|
|
4926
|
+
loadPrompt,
|
|
4289
4927
|
map,
|
|
4290
4928
|
merge,
|
|
4291
4929
|
mergeState,
|
|
@@ -4294,8 +4932,10 @@ export {
|
|
|
4294
4932
|
presets,
|
|
4295
4933
|
production,
|
|
4296
4934
|
reduce,
|
|
4935
|
+
renderTemplate,
|
|
4297
4936
|
retry,
|
|
4298
4937
|
safeValidateSchemaDescriptions,
|
|
4938
|
+
sanitizeValue,
|
|
4299
4939
|
sendMessage,
|
|
4300
4940
|
sequential,
|
|
4301
4941
|
sequentialBuilder,
|
|
@@ -4311,9 +4951,14 @@ export {
|
|
|
4311
4951
|
validateTool,
|
|
4312
4952
|
validateToolMetadata,
|
|
4313
4953
|
validateToolName,
|
|
4954
|
+
withCache,
|
|
4955
|
+
withConcurrency,
|
|
4314
4956
|
withErrorHandler,
|
|
4957
|
+
withLogging,
|
|
4315
4958
|
withMetrics,
|
|
4959
|
+
withRateLimit,
|
|
4316
4960
|
withRetry,
|
|
4317
4961
|
withTimeout,
|
|
4318
|
-
withTracing
|
|
4962
|
+
withTracing,
|
|
4963
|
+
withValidation
|
|
4319
4964
|
};
|