@soulcraft/brainy 4.10.3 → 4.11.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -9,7 +9,7 @@
9
9
  * 4. HMAC Keys (fallback for backward compatibility)
10
10
  */
11
11
  import { HNSWNoun, HNSWVerb, HNSWNounWithMetadata, HNSWVerbWithMetadata, StatisticsData } from '../../coreTypes.js';
12
- import { BaseStorage } from '../baseStorage.js';
12
+ import { BaseStorage, StorageBatchConfig } from '../baseStorage.js';
13
13
  type HNSWNode = HNSWNoun;
14
14
  type Edge = HNSWVerb;
15
15
  /**
@@ -76,6 +76,21 @@ export declare class GcsStorage extends BaseStorage {
76
76
  };
77
77
  readOnly?: boolean;
78
78
  });
79
+ /**
80
+ * Get GCS-optimized batch configuration
81
+ *
82
+ * GCS has strict rate limits (~5000 writes/second per bucket) and benefits from:
83
+ * - Moderate batch sizes (50 items)
84
+ * - Sequential processing (not parallel)
85
+ * - Delays between batches (100ms)
86
+ *
87
+ * Note: Each entity write involves 2 operations (vector + metadata),
88
+ * so 800 ops/sec = ~400 entities/sec = ~2500 actual GCS writes/sec
89
+ *
90
+ * @returns GCS-optimized batch configuration
91
+ * @since v4.11.0
92
+ */
93
+ getBatchConfig(): StorageBatchConfig;
79
94
  /**
80
95
  * Initialize the storage adapter
81
96
  */
@@ -92,6 +92,32 @@ export class GcsStorage extends BaseStorage {
92
92
  prodLog.info('🚀 High-volume mode FORCED via BRAINY_FORCE_HIGH_VOLUME environment variable');
93
93
  }
94
94
  }
95
+ /**
96
+ * Get GCS-optimized batch configuration
97
+ *
98
+ * GCS has strict rate limits (~5000 writes/second per bucket) and benefits from:
99
+ * - Moderate batch sizes (50 items)
100
+ * - Sequential processing (not parallel)
101
+ * - Delays between batches (100ms)
102
+ *
103
+ * Note: Each entity write involves 2 operations (vector + metadata),
104
+ * so 800 ops/sec = ~400 entities/sec = ~2500 actual GCS writes/sec
105
+ *
106
+ * @returns GCS-optimized batch configuration
107
+ * @since v4.11.0
108
+ */
109
+ getBatchConfig() {
110
+ return {
111
+ maxBatchSize: 50,
112
+ batchDelayMs: 100,
113
+ maxConcurrent: 50,
114
+ supportsParallelWrites: false, // Sequential is safer for GCS rate limits
115
+ rateLimit: {
116
+ operationsPerSecond: 800, // Conservative estimate for entity operations
117
+ burstCapacity: 200
118
+ }
119
+ };
120
+ }
95
121
  /**
96
122
  * Initialize the storage adapter
97
123
  */
@@ -3,7 +3,7 @@
3
3
  * In-memory storage adapter for environments where persistent storage is not available or needed
4
4
  */
5
5
  import { HNSWNoun, HNSWVerb, HNSWNounWithMetadata, HNSWVerbWithMetadata, StatisticsData } from '../../coreTypes.js';
6
- import { BaseStorage } from '../baseStorage.js';
6
+ import { BaseStorage, StorageBatchConfig } from '../baseStorage.js';
7
7
  /**
8
8
  * In-memory storage adapter
9
9
  * Uses Maps to store data in memory
@@ -17,6 +17,19 @@ export declare class MemoryStorage extends BaseStorage {
17
17
  private get nounMetadata();
18
18
  private get verbMetadata();
19
19
  constructor();
20
+ /**
21
+ * Get Memory-optimized batch configuration
22
+ *
23
+ * Memory storage has no rate limits and can handle very high throughput:
24
+ * - Large batch sizes (1000 items)
25
+ * - No delays needed (0ms)
26
+ * - High concurrency (1000 operations)
27
+ * - Parallel processing maximizes throughput
28
+ *
29
+ * @returns Memory-optimized batch configuration
30
+ * @since v4.11.0
31
+ */
32
+ getBatchConfig(): StorageBatchConfig;
20
33
  /**
21
34
  * Initialize the storage adapter
22
35
  * Nothing to initialize for in-memory storage
@@ -32,6 +32,30 @@ export class MemoryStorage extends BaseStorage {
32
32
  // Even in-memory operations need serialization to prevent async race conditions
33
33
  this.hnswLocks = new Map();
34
34
  }
35
+ /**
36
+ * Get Memory-optimized batch configuration
37
+ *
38
+ * Memory storage has no rate limits and can handle very high throughput:
39
+ * - Large batch sizes (1000 items)
40
+ * - No delays needed (0ms)
41
+ * - High concurrency (1000 operations)
42
+ * - Parallel processing maximizes throughput
43
+ *
44
+ * @returns Memory-optimized batch configuration
45
+ * @since v4.11.0
46
+ */
47
+ getBatchConfig() {
48
+ return {
49
+ maxBatchSize: 1000,
50
+ batchDelayMs: 0,
51
+ maxConcurrent: 1000,
52
+ supportsParallelWrites: true, // Memory loves parallel operations
53
+ rateLimit: {
54
+ operationsPerSecond: 100000, // Virtually unlimited
55
+ burstCapacity: 100000
56
+ }
57
+ };
58
+ }
35
59
  /**
36
60
  * Initialize the storage adapter
37
61
  * Nothing to initialize for in-memory storage
@@ -3,7 +3,7 @@
3
3
  * Provides persistent storage for the vector database using the Origin Private File System API
4
4
  */
5
5
  import { HNSWNoun, HNSWVerb, HNSWNounWithMetadata, HNSWVerbWithMetadata, StatisticsData } from '../../coreTypes.js';
6
- import { BaseStorage } from '../baseStorage.js';
6
+ import { BaseStorage, StorageBatchConfig } from '../baseStorage.js';
7
7
  import '../../types/fileSystemTypes.js';
8
8
  type HNSWNode = HNSWNoun;
9
9
  /**
@@ -30,6 +30,19 @@ export declare class OPFSStorage extends BaseStorage {
30
30
  private activeLocks;
31
31
  private lockPrefix;
32
32
  constructor();
33
+ /**
34
+ * Get OPFS-optimized batch configuration
35
+ *
36
+ * OPFS (Origin Private File System) is browser-based storage with moderate performance:
37
+ * - Moderate batch sizes (100 items)
38
+ * - Small delays (10ms) for browser event loop
39
+ * - Limited concurrency (50 operations) - browser constraints
40
+ * - Sequential processing preferred for stability
41
+ *
42
+ * @returns OPFS-optimized batch configuration
43
+ * @since v4.11.0
44
+ */
45
+ getBatchConfig(): StorageBatchConfig;
33
46
  /**
34
47
  * Initialize the storage adapter
35
48
  */
@@ -51,6 +51,30 @@ export class OPFSStorage extends BaseStorage {
51
51
  'storage' in navigator &&
52
52
  'getDirectory' in navigator.storage;
53
53
  }
54
+ /**
55
+ * Get OPFS-optimized batch configuration
56
+ *
57
+ * OPFS (Origin Private File System) is browser-based storage with moderate performance:
58
+ * - Moderate batch sizes (100 items)
59
+ * - Small delays (10ms) for browser event loop
60
+ * - Limited concurrency (50 operations) - browser constraints
61
+ * - Sequential processing preferred for stability
62
+ *
63
+ * @returns OPFS-optimized batch configuration
64
+ * @since v4.11.0
65
+ */
66
+ getBatchConfig() {
67
+ return {
68
+ maxBatchSize: 100,
69
+ batchDelayMs: 10,
70
+ maxConcurrent: 50,
71
+ supportsParallelWrites: false, // Sequential safer in browser
72
+ rateLimit: {
73
+ operationsPerSecond: 1000,
74
+ burstCapacity: 500
75
+ }
76
+ };
77
+ }
54
78
  /**
55
79
  * Initialize the storage adapter
56
80
  */
@@ -12,7 +12,7 @@
12
12
  * Based on latest GCS and S3 implementations with R2-specific enhancements
13
13
  */
14
14
  import { HNSWNoun, HNSWVerb, HNSWNounWithMetadata, HNSWVerbWithMetadata, StatisticsData } from '../../coreTypes.js';
15
- import { BaseStorage } from '../baseStorage.js';
15
+ import { BaseStorage, StorageBatchConfig } from '../baseStorage.js';
16
16
  type HNSWNode = HNSWNoun;
17
17
  type Edge = HNSWVerb;
18
18
  /**
@@ -75,6 +75,23 @@ export declare class R2Storage extends BaseStorage {
75
75
  };
76
76
  readOnly?: boolean;
77
77
  });
78
+ /**
79
+ * Get R2-optimized batch configuration
80
+ *
81
+ * Cloudflare R2 has S3-compatible characteristics with some advantages:
82
+ * - Zero egress fees (can cache more aggressively)
83
+ * - Global edge network
84
+ * - Similar throughput to S3
85
+ *
86
+ * R2 benefits from the same configuration as S3:
87
+ * - Larger batch sizes (100 items)
88
+ * - Parallel processing
89
+ * - Short delays (50ms)
90
+ *
91
+ * @returns R2-optimized batch configuration
92
+ * @since v4.11.0
93
+ */
94
+ getBatchConfig(): StorageBatchConfig;
78
95
  /**
79
96
  * Initialize the storage adapter
80
97
  */
@@ -94,6 +94,34 @@ export class R2Storage extends BaseStorage {
94
94
  prodLog.info('🚀 R2: High-volume mode FORCED via environment variable');
95
95
  }
96
96
  }
97
+ /**
98
+ * Get R2-optimized batch configuration
99
+ *
100
+ * Cloudflare R2 has S3-compatible characteristics with some advantages:
101
+ * - Zero egress fees (can cache more aggressively)
102
+ * - Global edge network
103
+ * - Similar throughput to S3
104
+ *
105
+ * R2 benefits from the same configuration as S3:
106
+ * - Larger batch sizes (100 items)
107
+ * - Parallel processing
108
+ * - Short delays (50ms)
109
+ *
110
+ * @returns R2-optimized batch configuration
111
+ * @since v4.11.0
112
+ */
113
+ getBatchConfig() {
114
+ return {
115
+ maxBatchSize: 100,
116
+ batchDelayMs: 50,
117
+ maxConcurrent: 100,
118
+ supportsParallelWrites: true, // R2 handles parallel writes like S3
119
+ rateLimit: {
120
+ operationsPerSecond: 3500, // Similar to S3 throughput
121
+ burstCapacity: 1000
122
+ }
123
+ };
124
+ }
97
125
  /**
98
126
  * Initialize the storage adapter
99
127
  */
@@ -4,7 +4,7 @@
4
4
  * including Amazon S3, Cloudflare R2, and Google Cloud Storage
5
5
  */
6
6
  import { Change, HNSWNoun, HNSWVerb, HNSWNounWithMetadata, HNSWVerbWithMetadata, StatisticsData } from '../../coreTypes.js';
7
- import { BaseStorage } from '../baseStorage.js';
7
+ import { BaseStorage, StorageBatchConfig } from '../baseStorage.js';
8
8
  import { OperationConfig } from '../../utils/operationUtils.js';
9
9
  type HNSWNode = HNSWNoun;
10
10
  type Edge = HNSWVerb;
@@ -96,6 +96,20 @@ export declare class S3CompatibleStorage extends BaseStorage {
96
96
  };
97
97
  readOnly?: boolean;
98
98
  });
99
+ /**
100
+ * Get S3-optimized batch configuration
101
+ *
102
+ * S3 has higher throughput than GCS and handles parallel writes efficiently:
103
+ * - Larger batch sizes (100 items)
104
+ * - Parallel processing supported
105
+ * - Shorter delays between batches (50ms)
106
+ *
107
+ * S3 can handle ~3500 operations/second per bucket with good performance
108
+ *
109
+ * @returns S3-optimized batch configuration
110
+ * @since v4.11.0
111
+ */
112
+ getBatchConfig(): StorageBatchConfig;
99
113
  /**
100
114
  * Initialize the storage adapter
101
115
  */
@@ -114,6 +114,31 @@ export class S3CompatibleStorage extends BaseStorage {
114
114
  this.nounCacheManager = new CacheManager(options.cacheConfig);
115
115
  this.verbCacheManager = new CacheManager(options.cacheConfig);
116
116
  }
117
+ /**
118
+ * Get S3-optimized batch configuration
119
+ *
120
+ * S3 has higher throughput than GCS and handles parallel writes efficiently:
121
+ * - Larger batch sizes (100 items)
122
+ * - Parallel processing supported
123
+ * - Shorter delays between batches (50ms)
124
+ *
125
+ * S3 can handle ~3500 operations/second per bucket with good performance
126
+ *
127
+ * @returns S3-optimized batch configuration
128
+ * @since v4.11.0
129
+ */
130
+ getBatchConfig() {
131
+ return {
132
+ maxBatchSize: 100,
133
+ batchDelayMs: 50,
134
+ maxConcurrent: 100,
135
+ supportsParallelWrites: true, // S3 handles parallel writes efficiently
136
+ rateLimit: {
137
+ operationsPerSecond: 3500, // S3 is more permissive than GCS
138
+ burstCapacity: 1000
139
+ }
140
+ };
141
+ }
117
142
  /**
118
143
  * Initialize the storage adapter
119
144
  */
@@ -5,6 +5,30 @@
5
5
  import { GraphAdjacencyIndex } from '../graph/graphAdjacencyIndex.js';
6
6
  import { GraphVerb, HNSWNoun, HNSWVerb, NounMetadata, VerbMetadata, HNSWNounWithMetadata, HNSWVerbWithMetadata, StatisticsData } from '../coreTypes.js';
7
7
  import { BaseStorageAdapter } from './adapters/baseStorageAdapter.js';
8
+ /**
9
+ * Storage adapter batch configuration profile
10
+ * Each storage adapter declares its optimal batch behavior for rate limiting
11
+ * and performance optimization
12
+ *
13
+ * @since v4.11.0
14
+ */
15
+ export interface StorageBatchConfig {
16
+ /** Maximum items per batch */
17
+ maxBatchSize: number;
18
+ /** Delay between batches in milliseconds (for rate limiting) */
19
+ batchDelayMs: number;
20
+ /** Maximum concurrent operations this storage can handle */
21
+ maxConcurrent: number;
22
+ /** Whether storage can handle parallel writes efficiently */
23
+ supportsParallelWrites: boolean;
24
+ /** Rate limit characteristics of this storage adapter */
25
+ rateLimit: {
26
+ /** Approximate operations per second this storage can handle */
27
+ operationsPerSecond: number;
28
+ /** Maximum burst capacity before throttling occurs */
29
+ burstCapacity: number;
30
+ };
31
+ }
8
32
  export declare const NOUNS_METADATA_DIR = "entities/nouns/metadata";
9
33
  export declare const VERBS_METADATA_DIR = "entities/verbs/metadata";
10
34
  export declare const SYSTEM_DIR = "_system";
@@ -27,33 +27,40 @@ export declare class AdaptiveBackpressure {
27
27
  private metrics;
28
28
  private config;
29
29
  private patterns;
30
- private circuitState;
31
- private circuitOpenTime;
32
- private circuitFailures;
33
- private circuitThreshold;
34
- private circuitTimeout;
30
+ private circuits;
35
31
  private operationTimes;
36
32
  private completedOps;
37
33
  private errorOps;
38
34
  private lastAdaptation;
39
35
  /**
40
36
  * Request permission to proceed with an operation
37
+ * @param operationId Unique ID for this operation
38
+ * @param priority Priority level (higher = more important)
39
+ * @param operationType Type of operation (read or write) for circuit breaker isolation
41
40
  */
42
- requestPermission(operationId: string, priority?: number): Promise<void>;
41
+ requestPermission(operationId: string, priority?: number, operationType?: 'read' | 'write'): Promise<void>;
43
42
  /**
44
43
  * Release permission after operation completes
44
+ * @param operationId Unique ID for this operation
45
+ * @param success Whether the operation succeeded
46
+ * @param operationType Type of operation (read or write) for circuit breaker tracking
45
47
  */
46
- releasePermission(operationId: string, success?: boolean): void;
48
+ releasePermission(operationId: string, success?: boolean, operationType?: 'read' | 'write'): void;
47
49
  /**
48
- * Check if circuit breaker is open
50
+ * Check if circuit breaker is open for a specific operation type
51
+ * @param circuit The circuit to check (read or write)
49
52
  */
50
53
  private isCircuitOpen;
51
54
  /**
52
- * Open the circuit breaker
55
+ * Open the circuit breaker for a specific operation type
56
+ * @param circuit The circuit to open (read or write)
57
+ * @param operationType The operation type name for logging
53
58
  */
54
59
  private openCircuit;
55
60
  /**
56
- * Close the circuit breaker
61
+ * Close the circuit breaker for a specific operation type
62
+ * @param circuit The circuit to close (read or write)
63
+ * @param operationType The operation type name for logging
57
64
  */
58
65
  private closeCircuit;
59
66
  /**
@@ -32,12 +32,24 @@ export class AdaptiveBackpressure {
32
32
  };
33
33
  // Historical patterns for learning
34
34
  this.patterns = [];
35
- // Circuit breaker state
36
- this.circuitState = 'closed';
37
- this.circuitOpenTime = 0;
38
- this.circuitFailures = 0;
39
- this.circuitThreshold = 5;
40
- this.circuitTimeout = 30000; // 30 seconds
35
+ // Separate circuit breakers for read vs write operations
36
+ // This allows reads to continue even when writes are throttled
37
+ this.circuits = {
38
+ read: {
39
+ state: 'closed',
40
+ failures: 0,
41
+ openTime: 0,
42
+ threshold: 10, // More lenient for reads
43
+ timeout: 30000
44
+ },
45
+ write: {
46
+ state: 'closed',
47
+ failures: 0,
48
+ openTime: 0,
49
+ threshold: 5, // Stricter for writes
50
+ timeout: 30000
51
+ }
52
+ };
41
53
  // Performance tracking
42
54
  this.operationTimes = new Map();
43
55
  this.completedOps = [];
@@ -46,11 +58,22 @@ export class AdaptiveBackpressure {
46
58
  }
47
59
  /**
48
60
  * Request permission to proceed with an operation
61
+ * @param operationId Unique ID for this operation
62
+ * @param priority Priority level (higher = more important)
63
+ * @param operationType Type of operation (read or write) for circuit breaker isolation
49
64
  */
50
- async requestPermission(operationId, priority = 1) {
51
- // Check circuit breaker
52
- if (this.isCircuitOpen()) {
53
- throw new Error('Circuit breaker is open - system is recovering');
65
+ async requestPermission(operationId, priority = 1, operationType = 'write') {
66
+ const circuit = this.circuits[operationType];
67
+ // Check circuit breaker for this operation type
68
+ if (this.isCircuitOpen(circuit)) {
69
+ // KEY: Allow reads even if write circuit is open
70
+ if (operationType === 'read' && this.circuits.write.state === 'open') {
71
+ // Write circuit is open but read circuit is fine - allow read
72
+ this.activeOperations.add(operationId);
73
+ this.operationTimes.set(operationId, Date.now());
74
+ return;
75
+ }
76
+ throw new Error(`Circuit breaker is open for ${operationType} operations - system is recovering`);
54
77
  }
55
78
  // Fast path for low load
56
79
  if (this.activeOperations.size < this.maxConcurrent * 0.5 && this.queue.length === 0) {
@@ -84,8 +107,11 @@ export class AdaptiveBackpressure {
84
107
  }
85
108
  /**
86
109
  * Release permission after operation completes
110
+ * @param operationId Unique ID for this operation
111
+ * @param success Whether the operation succeeded
112
+ * @param operationType Type of operation (read or write) for circuit breaker tracking
87
113
  */
88
- releasePermission(operationId, success = true) {
114
+ releasePermission(operationId, success = true, operationType = 'write') {
89
115
  // Remove from active operations
90
116
  this.activeOperations.delete(operationId);
91
117
  // Track completion time
@@ -99,19 +125,20 @@ export class AdaptiveBackpressure {
99
125
  this.completedOps = this.completedOps.slice(-500);
100
126
  }
101
127
  }
102
- // Track errors for circuit breaker
128
+ // Track errors for circuit breaker per operation type
129
+ const circuit = this.circuits[operationType];
103
130
  if (!success) {
104
131
  this.errorOps++;
105
- this.circuitFailures++;
106
- // Check if we should open circuit
107
- if (this.circuitFailures >= this.circuitThreshold) {
108
- this.openCircuit();
132
+ circuit.failures++;
133
+ // Check if we should open circuit for this operation type
134
+ if (circuit.failures >= circuit.threshold) {
135
+ this.openCircuit(circuit, operationType);
109
136
  }
110
137
  }
111
138
  else {
112
139
  // Reset circuit failures on success
113
- if (this.circuitState === 'half-open') {
114
- this.closeCircuit();
140
+ if (circuit.state === 'half-open') {
141
+ this.closeCircuit(circuit, operationType);
115
142
  }
116
143
  }
117
144
  // Process queue if there are waiting operations
@@ -129,13 +156,14 @@ export class AdaptiveBackpressure {
129
156
  this.adaptIfNeeded();
130
157
  }
131
158
  /**
132
- * Check if circuit breaker is open
159
+ * Check if circuit breaker is open for a specific operation type
160
+ * @param circuit The circuit to check (read or write)
133
161
  */
134
- isCircuitOpen() {
135
- if (this.circuitState === 'open') {
162
+ isCircuitOpen(circuit) {
163
+ if (circuit.state === 'open') {
136
164
  // Check if timeout has passed
137
- if (Date.now() - this.circuitOpenTime > this.circuitTimeout) {
138
- this.circuitState = 'half-open';
165
+ if (Date.now() - circuit.openTime > circuit.timeout) {
166
+ circuit.state = 'half-open';
139
167
  this.logger.info('Circuit breaker entering half-open state');
140
168
  return false;
141
169
  }
@@ -144,26 +172,34 @@ export class AdaptiveBackpressure {
144
172
  return false;
145
173
  }
146
174
  /**
147
- * Open the circuit breaker
175
+ * Open the circuit breaker for a specific operation type
176
+ * @param circuit The circuit to open (read or write)
177
+ * @param operationType The operation type name for logging
148
178
  */
149
- openCircuit() {
150
- if (this.circuitState !== 'open') {
151
- this.circuitState = 'open';
152
- this.circuitOpenTime = Date.now();
153
- this.logger.warn('Circuit breaker opened due to high error rate');
154
- // Reduce load immediately
155
- this.maxConcurrent = Math.max(10, Math.floor(this.maxConcurrent * 0.3));
179
+ openCircuit(circuit, operationType) {
180
+ if (circuit.state !== 'open') {
181
+ circuit.state = 'open';
182
+ circuit.openTime = Date.now();
183
+ this.logger.warn(`Circuit breaker opened for ${operationType} operations due to high error rate`);
184
+ // Reduce load immediately for write operations
185
+ if (operationType === 'write') {
186
+ this.maxConcurrent = Math.max(10, Math.floor(this.maxConcurrent * 0.3));
187
+ }
156
188
  }
157
189
  }
158
190
  /**
159
- * Close the circuit breaker
191
+ * Close the circuit breaker for a specific operation type
192
+ * @param circuit The circuit to close (read or write)
193
+ * @param operationType The operation type name for logging
160
194
  */
161
- closeCircuit() {
162
- this.circuitState = 'closed';
163
- this.circuitFailures = 0;
164
- this.logger.info('Circuit breaker closed - system recovered');
165
- // Gradually increase capacity
166
- this.maxConcurrent = Math.min(500, Math.floor(this.maxConcurrent * 1.5));
195
+ closeCircuit(circuit, operationType) {
196
+ circuit.state = 'closed';
197
+ circuit.failures = 0;
198
+ this.logger.info(`Circuit breaker closed for ${operationType} - system recovered`);
199
+ // Gradually increase capacity for write operations
200
+ if (operationType === 'write') {
201
+ this.maxConcurrent = Math.min(500, Math.floor(this.maxConcurrent * 1.5));
202
+ }
167
203
  }
168
204
  /**
169
205
  * Adapt configuration based on metrics
@@ -262,13 +298,8 @@ export class AdaptiveBackpressure {
262
298
  // Allow queue depth to be 10 seconds worth of throughput
263
299
  this.config.maxQueueDepth = Math.max(100, Math.min(10000, Math.floor(this.metrics.throughput * 10)));
264
300
  }
265
- // Adapt circuit breaker threshold based on error patterns
266
- if (this.metrics.errorRate < 0.01 && this.circuitThreshold > 5) {
267
- this.circuitThreshold = Math.max(5, this.circuitThreshold - 1);
268
- }
269
- else if (this.metrics.errorRate > 0.05 && this.circuitThreshold < 20) {
270
- this.circuitThreshold = Math.min(20, this.circuitThreshold + 1);
271
- }
301
+ // Note: Circuit breaker thresholds are now fixed per operation type (read/write)
302
+ // and do not adapt dynamically to maintain predictable behavior
272
303
  }
273
304
  /**
274
305
  * Predict future load based on patterns
@@ -303,10 +334,24 @@ export class AdaptiveBackpressure {
303
334
  * Get current configuration and metrics
304
335
  */
305
336
  getStatus() {
337
+ // Combined circuit status for backward compatibility
338
+ let circuitStatus = 'closed';
339
+ if (this.circuits.read.state === 'open' && this.circuits.write.state === 'open') {
340
+ circuitStatus = 'open';
341
+ }
342
+ else if (this.circuits.write.state === 'open') {
343
+ circuitStatus = 'write-circuit-open';
344
+ }
345
+ else if (this.circuits.read.state === 'open') {
346
+ circuitStatus = 'read-circuit-open';
347
+ }
348
+ else if (this.circuits.read.state === 'half-open' || this.circuits.write.state === 'half-open') {
349
+ circuitStatus = 'half-open';
350
+ }
306
351
  return {
307
352
  config: { ...this.config },
308
353
  metrics: { ...this.metrics },
309
- circuit: this.circuitState,
354
+ circuit: circuitStatus,
310
355
  maxConcurrent: this.maxConcurrent,
311
356
  activeOps: this.activeOperations.size,
312
357
  queueLength: this.queue.length
@@ -322,8 +367,13 @@ export class AdaptiveBackpressure {
322
367
  this.completedOps = [];
323
368
  this.errorOps = 0;
324
369
  this.patterns = [];
325
- this.circuitState = 'closed';
326
- this.circuitFailures = 0;
370
+ // Reset both circuit breakers
371
+ this.circuits.read.state = 'closed';
372
+ this.circuits.read.failures = 0;
373
+ this.circuits.read.openTime = 0;
374
+ this.circuits.write.state = 'closed';
375
+ this.circuits.write.failures = 0;
376
+ this.circuits.write.openTime = 0;
327
377
  this.maxConcurrent = 100;
328
378
  this.logger.info('Backpressure system reset to defaults');
329
379
  }
package/package.json CHANGED
@@ -1,6 +1,6 @@
1
1
  {
2
2
  "name": "@soulcraft/brainy",
3
- "version": "4.10.3",
3
+ "version": "4.11.0",
4
4
  "description": "Universal Knowledge Protocol™ - World's first Triple Intelligence database unifying vector, graph, and document search in one API. 31 nouns × 40 verbs for infinite expressiveness.",
5
5
  "main": "dist/index.js",
6
6
  "module": "dist/index.js",