@soulcraft/brainy 0.62.3 → 1.0.0-rc.1

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (36) hide show
  1. package/README.md +3 -3
  2. package/bin/brainy.js +903 -1153
  3. package/dist/augmentationPipeline.d.ts +60 -0
  4. package/dist/augmentationPipeline.js +94 -0
  5. package/dist/augmentations/{cortexSense.d.ts → neuralImport.d.ts} +14 -11
  6. package/dist/augmentations/{cortexSense.js → neuralImport.js} +14 -11
  7. package/dist/brainyData.d.ts +199 -18
  8. package/dist/brainyData.js +601 -18
  9. package/dist/chat/BrainyChat.d.ts +113 -0
  10. package/dist/chat/BrainyChat.js +368 -0
  11. package/dist/chat/ChatCLI.d.ts +61 -0
  12. package/dist/chat/ChatCLI.js +351 -0
  13. package/dist/connectors/interfaces/IConnector.d.ts +3 -3
  14. package/dist/connectors/interfaces/IConnector.js +1 -1
  15. package/dist/cortex/neuralImport.js +1 -3
  16. package/dist/index.d.ts +4 -6
  17. package/dist/index.js +6 -7
  18. package/dist/pipeline.d.ts +15 -271
  19. package/dist/pipeline.js +25 -586
  20. package/dist/shared/default-augmentations.d.ts +3 -3
  21. package/dist/shared/default-augmentations.js +10 -10
  22. package/package.json +3 -1
  23. package/dist/chat/brainyChat.d.ts +0 -42
  24. package/dist/chat/brainyChat.js +0 -340
  25. package/dist/cortex/cliWrapper.d.ts +0 -32
  26. package/dist/cortex/cliWrapper.js +0 -209
  27. package/dist/cortex/cortex-legacy.d.ts +0 -264
  28. package/dist/cortex/cortex-legacy.js +0 -2463
  29. package/dist/cortex/cortex.d.ts +0 -264
  30. package/dist/cortex/cortex.js +0 -2463
  31. package/dist/cortex/serviceIntegration.d.ts +0 -156
  32. package/dist/cortex/serviceIntegration.js +0 -384
  33. package/dist/sequentialPipeline.d.ts +0 -113
  34. package/dist/sequentialPipeline.js +0 -417
  35. package/dist/utils/modelLoader.d.ts +0 -12
  36. package/dist/utils/modelLoader.js +0 -88
@@ -1,156 +0,0 @@
1
- /**
2
- * Service Integration Helpers - Seamless Cortex Integration for Existing Services
3
- *
4
- * Atomic Age Service Management Protocol
5
- */
6
- import { BrainyData } from '../brainyData.js';
7
- import { Cortex } from './cortex-legacy.js';
8
- export interface ServiceConfig {
9
- name: string;
10
- version?: string;
11
- environment?: 'development' | 'production' | 'staging' | 'test';
12
- storage?: {
13
- type: 'filesystem' | 's3' | 'gcs' | 'memory';
14
- bucket?: string;
15
- path?: string;
16
- credentials?: any;
17
- };
18
- features?: {
19
- chat?: boolean;
20
- augmentations?: string[];
21
- encryption?: boolean;
22
- };
23
- migration?: {
24
- strategy: 'immediate' | 'gradual';
25
- rollback?: boolean;
26
- };
27
- }
28
- export interface BrainyOptions {
29
- storage?: any;
30
- augmentations?: any[];
31
- encryption?: boolean;
32
- caching?: boolean;
33
- }
34
- export interface MigrationPlan {
35
- fromStorage: string;
36
- toStorage: string;
37
- strategy: 'immediate' | 'gradual';
38
- rollback?: boolean;
39
- validation?: boolean;
40
- backup?: boolean;
41
- }
42
- export interface ServiceInstance {
43
- id: string;
44
- name: string;
45
- version: string;
46
- status: 'healthy' | 'degraded' | 'unhealthy';
47
- lastSeen: Date;
48
- config: ServiceConfig;
49
- }
50
- export interface HealthReport {
51
- service: ServiceInstance;
52
- checks: {
53
- storage: boolean;
54
- search: boolean;
55
- embedding: boolean;
56
- config: boolean;
57
- };
58
- performance: {
59
- responseTime: number;
60
- memoryUsage: number;
61
- storageSize: number;
62
- };
63
- issues: string[];
64
- }
65
- export interface MigrationReport {
66
- plan: MigrationPlan;
67
- estimated: {
68
- duration: number;
69
- downtime: number;
70
- dataSize: number;
71
- complexity: 'low' | 'medium' | 'high';
72
- };
73
- risks: string[];
74
- prerequisites: string[];
75
- steps: string[];
76
- }
77
- /**
78
- * Service Integration Helper Class
79
- */
80
- export declare class CortexServiceIntegration {
81
- /**
82
- * Initialize Cortex for a service with automatic configuration
83
- */
84
- static initializeForService(serviceName: string, options?: Partial<ServiceConfig>): Promise<{
85
- cortex: Cortex;
86
- config: ServiceConfig;
87
- }>;
88
- /**
89
- * Create BrainyData instance from Cortex configuration
90
- */
91
- static createBrainyFromCortex(cortex: Cortex, serviceName?: string): Promise<BrainyData>;
92
- /**
93
- * Auto-discover Brainy instances in the current environment
94
- */
95
- static discoverBrainyInstances(): Promise<ServiceInstance[]>;
96
- /**
97
- * Perform health check on all discovered services
98
- */
99
- static healthCheckAll(): Promise<HealthReport[]>;
100
- /**
101
- * Plan migration for a service
102
- */
103
- static planMigration(serviceName: string, plan: Partial<MigrationPlan>): Promise<MigrationReport>;
104
- /**
105
- * Execute migration for all services
106
- */
107
- static migrateAll(plan: MigrationPlan): Promise<void>;
108
- /**
109
- * Generate Brainy storage options from Cortex config
110
- */
111
- private static getBrainyStorageOptions;
112
- /**
113
- * Load service configuration
114
- */
115
- private static loadServiceConfig;
116
- /**
117
- * Create new service configuration
118
- */
119
- private static createServiceConfig;
120
- /**
121
- * Load service instance information
122
- */
123
- private static loadServiceInstance;
124
- /**
125
- * Perform health check on a service instance
126
- */
127
- private static performHealthCheck;
128
- /**
129
- * Estimate data size for migration planning
130
- */
131
- private static estimateDataSize;
132
- /**
133
- * Assess migration complexity
134
- */
135
- private static assessMigrationComplexity;
136
- /**
137
- * Estimate migration duration
138
- */
139
- private static estimateDuration;
140
- /**
141
- * Estimate downtime for migration strategy
142
- */
143
- private static estimateDowntime;
144
- /**
145
- * Identify migration risks
146
- */
147
- private static identifyRisks;
148
- /**
149
- * Get migration prerequisites
150
- */
151
- private static getPrerequisites;
152
- /**
153
- * Generate migration steps
154
- */
155
- private static generateMigrationSteps;
156
- }
@@ -1,384 +0,0 @@
1
- /**
2
- * Service Integration Helpers - Seamless Cortex Integration for Existing Services
3
- *
4
- * Atomic Age Service Management Protocol
5
- */
6
- import { BrainyData } from '../brainyData.js';
7
- import { Cortex } from './cortex-legacy.js';
8
- import * as fs from '../universal/fs.js';
9
- import * as path from '../universal/path.js';
10
- /**
11
- * Service Integration Helper Class
12
- */
13
- export class CortexServiceIntegration {
14
- /**
15
- * Initialize Cortex for a service with automatic configuration
16
- */
17
- static async initializeForService(serviceName, options) {
18
- const cortex = new Cortex();
19
- // Try to load existing configuration
20
- let config;
21
- try {
22
- config = await this.loadServiceConfig(serviceName);
23
- }
24
- catch {
25
- // Create new configuration
26
- config = await this.createServiceConfig(serviceName, options);
27
- }
28
- await cortex.init({
29
- storage: config.storage?.type,
30
- encryption: config.features?.encryption
31
- });
32
- return { cortex, config };
33
- }
34
- /**
35
- * Create BrainyData instance from Cortex configuration
36
- */
37
- static async createBrainyFromCortex(cortex, serviceName) {
38
- // Get storage configuration from Cortex
39
- const storageType = await cortex.configGet('STORAGE_TYPE') || 'filesystem';
40
- const encryptionEnabled = await cortex.configGet('ENCRYPTION_ENABLED') === 'true';
41
- const options = {
42
- storage: await this.getBrainyStorageOptions(cortex, storageType),
43
- encryption: encryptionEnabled,
44
- caching: true
45
- };
46
- // Load augmentations if specified
47
- if (serviceName) {
48
- const serviceConfig = await this.loadServiceConfig(serviceName);
49
- if (serviceConfig.features?.augmentations) {
50
- options.augmentations = serviceConfig.features.augmentations;
51
- }
52
- }
53
- const brainy = new BrainyData(options);
54
- await brainy.init();
55
- return brainy;
56
- }
57
- /**
58
- * Auto-discover Brainy instances in the current environment
59
- */
60
- static async discoverBrainyInstances() {
61
- const instances = [];
62
- // Look for .cortex directories
63
- const searchPaths = [
64
- process.cwd(),
65
- path.join(process.cwd(), '..'),
66
- '/opt/services',
67
- '/var/lib/services'
68
- ];
69
- for (const searchPath of searchPaths) {
70
- try {
71
- const entries = await fs.readdir(searchPath, { withFileTypes: true });
72
- for (const entry of entries) {
73
- if (entry.isDirectory()) {
74
- const cortexPath = path.join(searchPath, entry.name, '.cortex');
75
- try {
76
- await fs.access(cortexPath);
77
- const instance = await this.loadServiceInstance(path.join(searchPath, entry.name));
78
- if (instance)
79
- instances.push(instance);
80
- }
81
- catch {
82
- // No Cortex in this directory
83
- }
84
- }
85
- }
86
- }
87
- catch {
88
- // Directory doesn't exist or can't be read
89
- }
90
- }
91
- return instances;
92
- }
93
- /**
94
- * Perform health check on all discovered services
95
- */
96
- static async healthCheckAll() {
97
- const instances = await this.discoverBrainyInstances();
98
- const reports = [];
99
- for (const instance of instances) {
100
- try {
101
- const report = await this.performHealthCheck(instance);
102
- reports.push(report);
103
- }
104
- catch (error) {
105
- reports.push({
106
- service: instance,
107
- checks: { storage: false, search: false, embedding: false, config: false },
108
- performance: { responseTime: -1, memoryUsage: -1, storageSize: -1 },
109
- issues: [`Health check failed: ${error}`]
110
- });
111
- }
112
- }
113
- return reports;
114
- }
115
- /**
116
- * Plan migration for a service
117
- */
118
- static async planMigration(serviceName, plan) {
119
- const config = await this.loadServiceConfig(serviceName);
120
- const fullPlan = {
121
- fromStorage: config.storage?.type || 'filesystem',
122
- toStorage: plan.toStorage || 's3',
123
- strategy: plan.strategy || 'immediate',
124
- rollback: plan.rollback ?? true,
125
- validation: plan.validation ?? true,
126
- backup: plan.backup ?? true
127
- };
128
- // Estimate migration complexity
129
- const dataSize = await this.estimateDataSize(serviceName);
130
- const complexity = this.assessMigrationComplexity(fullPlan, dataSize);
131
- return {
132
- plan: fullPlan,
133
- estimated: {
134
- duration: this.estimateDuration(complexity, dataSize),
135
- downtime: this.estimateDowntime(fullPlan.strategy),
136
- dataSize,
137
- complexity
138
- },
139
- risks: this.identifyRisks(fullPlan),
140
- prerequisites: this.getPrerequisites(fullPlan),
141
- steps: this.generateMigrationSteps(fullPlan)
142
- };
143
- }
144
- /**
145
- * Execute migration for all services
146
- */
147
- static async migrateAll(plan) {
148
- const instances = await this.discoverBrainyInstances();
149
- for (const instance of instances) {
150
- const cortex = new Cortex();
151
- // Set working directory to service directory
152
- process.chdir(path.dirname(instance.config.name));
153
- await cortex.migrate({
154
- to: plan.toStorage,
155
- strategy: plan.strategy,
156
- bucket: plan.toStorage === 's3' ? 'default-bucket' : undefined
157
- });
158
- }
159
- }
160
- /**
161
- * Generate Brainy storage options from Cortex config
162
- */
163
- static async getBrainyStorageOptions(cortex, storageType) {
164
- switch (storageType) {
165
- case 'filesystem':
166
- return { forceFileSystemStorage: true };
167
- case 's3':
168
- return {
169
- forceS3CompatibleStorage: true,
170
- s3Config: {
171
- bucket: await cortex.configGet('S3_BUCKET'),
172
- accessKeyId: await cortex.configGet('AWS_ACCESS_KEY_ID'),
173
- secretAccessKey: await cortex.configGet('AWS_SECRET_ACCESS_KEY'),
174
- region: await cortex.configGet('AWS_REGION') || 'us-east-1'
175
- }
176
- };
177
- case 'r2':
178
- return {
179
- forceS3CompatibleStorage: true,
180
- s3Config: {
181
- bucket: await cortex.configGet('CLOUDFLARE_R2_BUCKET'),
182
- accessKeyId: await cortex.configGet('AWS_ACCESS_KEY_ID'), // R2 uses AWS-compatible keys
183
- secretAccessKey: await cortex.configGet('AWS_SECRET_ACCESS_KEY'),
184
- endpoint: await cortex.configGet('CLOUDFLARE_R2_ENDPOINT') ||
185
- `https://${await cortex.configGet('CLOUDFLARE_R2_ACCOUNT_ID')}.r2.cloudflarestorage.com`,
186
- region: 'auto' // R2 uses 'auto' as region
187
- }
188
- };
189
- case 'gcs':
190
- return {
191
- forceS3CompatibleStorage: true,
192
- s3Config: {
193
- bucket: await cortex.configGet('GCS_BUCKET'),
194
- endpoint: 'https://storage.googleapis.com',
195
- // GCS credentials would be configured here
196
- }
197
- };
198
- default:
199
- return { forceMemoryStorage: true };
200
- }
201
- }
202
- /**
203
- * Load service configuration
204
- */
205
- static async loadServiceConfig(serviceName) {
206
- const configPath = path.join(process.cwd(), '.cortex', 'service.json');
207
- const data = await fs.readFile(configPath, 'utf8');
208
- return JSON.parse(data);
209
- }
210
- /**
211
- * Create new service configuration
212
- */
213
- static async createServiceConfig(serviceName, options) {
214
- const config = {
215
- name: serviceName,
216
- version: '1.0.0',
217
- environment: 'development',
218
- storage: {
219
- type: 'filesystem',
220
- path: './brainy_data'
221
- },
222
- features: {
223
- chat: true,
224
- encryption: true,
225
- augmentations: []
226
- },
227
- ...options
228
- };
229
- // Save configuration
230
- const configDir = path.join(process.cwd(), '.cortex');
231
- await fs.mkdir(configDir, { recursive: true });
232
- await fs.writeFile(path.join(configDir, 'service.json'), JSON.stringify(config, null, 2));
233
- return config;
234
- }
235
- /**
236
- * Load service instance information
237
- */
238
- static async loadServiceInstance(servicePath) {
239
- try {
240
- const configPath = path.join(servicePath, '.cortex', 'service.json');
241
- const config = JSON.parse(await fs.readFile(configPath, 'utf8'));
242
- return {
243
- id: path.basename(servicePath),
244
- name: config.name,
245
- version: config.version || '1.0.0',
246
- status: 'healthy', // Would be determined by actual health check
247
- lastSeen: new Date(),
248
- config
249
- };
250
- }
251
- catch {
252
- return null;
253
- }
254
- }
255
- /**
256
- * Perform health check on a service instance
257
- */
258
- static async performHealthCheck(instance) {
259
- // Simulate health check - in real implementation, this would:
260
- // 1. Connect to the service
261
- // 2. Test storage connectivity
262
- // 3. Verify search functionality
263
- // 4. Check embedding model availability
264
- // 5. Measure performance metrics
265
- return {
266
- service: instance,
267
- checks: {
268
- storage: true,
269
- search: true,
270
- embedding: true,
271
- config: true
272
- },
273
- performance: {
274
- responseTime: Math.random() * 100 + 50, // 50-150ms
275
- memoryUsage: Math.random() * 512 + 256, // 256-768MB
276
- storageSize: Math.random() * 1024 + 100 // 100-1124MB
277
- },
278
- issues: []
279
- };
280
- }
281
- /**
282
- * Estimate data size for migration planning
283
- */
284
- static async estimateDataSize(serviceName) {
285
- // Simulate data size estimation
286
- return Math.floor(Math.random() * 1000 + 100); // 100-1100MB
287
- }
288
- /**
289
- * Assess migration complexity
290
- */
291
- static assessMigrationComplexity(plan, dataSize) {
292
- if (dataSize > 5000 || plan.fromStorage !== plan.toStorage)
293
- return 'high';
294
- if (dataSize > 1000)
295
- return 'medium';
296
- return 'low';
297
- }
298
- /**
299
- * Estimate migration duration
300
- */
301
- static estimateDuration(complexity, dataSize) {
302
- const baseTime = dataSize / 100; // 1 minute per 100MB
303
- const multiplier = complexity === 'high' ? 3 : complexity === 'medium' ? 2 : 1;
304
- return Math.ceil(baseTime * multiplier);
305
- }
306
- /**
307
- * Estimate downtime for migration strategy
308
- */
309
- static estimateDowntime(strategy) {
310
- switch (strategy) {
311
- case 'immediate': return 60; // 1 minute
312
- case 'gradual': return 10; // 10 seconds
313
- default: return 30;
314
- }
315
- }
316
- /**
317
- * Identify migration risks
318
- */
319
- static identifyRisks(plan) {
320
- const risks = [];
321
- if (plan.fromStorage !== plan.toStorage) {
322
- risks.push('Cross-platform data compatibility');
323
- }
324
- if (plan.strategy === 'immediate') {
325
- risks.push('Service downtime during migration');
326
- }
327
- if (!plan.backup) {
328
- risks.push('Data loss if migration fails');
329
- }
330
- return risks;
331
- }
332
- /**
333
- * Get migration prerequisites
334
- */
335
- static getPrerequisites(plan) {
336
- const prereqs = [];
337
- if (plan.toStorage === 's3') {
338
- prereqs.push('AWS credentials configured');
339
- prereqs.push('S3 bucket created and accessible');
340
- }
341
- if (plan.toStorage === 'r2') {
342
- prereqs.push('Cloudflare R2 API token configured');
343
- prereqs.push('R2 bucket created and accessible');
344
- prereqs.push('CLOUDFLARE_R2_ACCOUNT_ID environment variable set');
345
- }
346
- if (plan.toStorage === 'gcs') {
347
- prereqs.push('GCP service account configured');
348
- prereqs.push('GCS bucket created and accessible');
349
- }
350
- if (plan.backup) {
351
- prereqs.push('Sufficient storage space for backup');
352
- }
353
- return prereqs;
354
- }
355
- /**
356
- * Generate migration steps
357
- */
358
- static generateMigrationSteps(plan) {
359
- const steps = [];
360
- if (plan.backup) {
361
- steps.push('Create backup of current data');
362
- }
363
- steps.push(`Initialize ${plan.toStorage} storage`);
364
- steps.push('Validate connectivity to target storage');
365
- if (plan.strategy === 'gradual') {
366
- steps.push('Begin gradual data migration');
367
- steps.push('Monitor migration progress');
368
- steps.push('Switch traffic to new storage');
369
- }
370
- else {
371
- steps.push('Stop service');
372
- steps.push('Migrate all data');
373
- steps.push('Update configuration');
374
- steps.push('Start service with new storage');
375
- }
376
- if (plan.validation) {
377
- steps.push('Validate data integrity');
378
- steps.push('Run health checks');
379
- }
380
- steps.push('Clean up old storage (if successful)');
381
- return steps;
382
- }
383
- }
384
- //# sourceMappingURL=serviceIntegration.js.map
@@ -1,113 +0,0 @@
1
- /**
2
- * Sequential Augmentation Pipeline
3
- *
4
- * This module provides a pipeline for executing augmentations in a specific sequence:
5
- * ISense -> IMemory -> ICognition -> IConduit -> IActivation -> IPerception
6
- *
7
- * It supports high-performance streaming data from WebSockets without blocking.
8
- * Optimized for Node.js 23.11+ using native WebStreams API.
9
- */
10
- import { AugmentationResponse, WebSocketConnection } from './types/augmentations.js';
11
- import { BrainyData } from './brainyData.js';
12
- /**
13
- * Options for sequential pipeline execution
14
- */
15
- export interface SequentialPipelineOptions {
16
- /**
17
- * Timeout for each augmentation execution in milliseconds
18
- */
19
- timeout?: number;
20
- /**
21
- * Whether to stop execution if an error occurs
22
- */
23
- stopOnError?: boolean;
24
- /**
25
- * BrainyData instance to use for storage
26
- */
27
- brainyData?: BrainyData;
28
- }
29
- /**
30
- * Result of a pipeline execution
31
- */
32
- export interface PipelineResult<T> {
33
- /**
34
- * Whether the pipeline execution was successful
35
- */
36
- success: boolean;
37
- /**
38
- * The data returned by the pipeline
39
- */
40
- data: T;
41
- /**
42
- * Error message if the pipeline execution failed
43
- */
44
- error?: string;
45
- /**
46
- * Results from each stage of the pipeline
47
- */
48
- stageResults: {
49
- sense?: AugmentationResponse<unknown>;
50
- memory?: AugmentationResponse<unknown>;
51
- cognition?: AugmentationResponse<unknown>;
52
- conduit?: AugmentationResponse<unknown>;
53
- activation?: AugmentationResponse<unknown>;
54
- perception?: AugmentationResponse<unknown>;
55
- };
56
- }
57
- /**
58
- * SequentialPipeline class
59
- *
60
- * Executes augmentations in a specific sequence:
61
- * ISense -> IMemory -> ICognition -> IConduit -> IActivation -> IPerception
62
- */
63
- export declare class SequentialPipeline {
64
- private brainyData;
65
- /**
66
- * Create a new sequential pipeline
67
- *
68
- * @param options Options for the pipeline
69
- */
70
- constructor(options?: SequentialPipelineOptions);
71
- /**
72
- * Ensure stream classes are initialized
73
- * @private
74
- */
75
- private ensureStreamClassesInitialized;
76
- /**
77
- * Initialize the pipeline
78
- *
79
- * @returns A promise that resolves when initialization is complete
80
- */
81
- initialize(): Promise<void>;
82
- /**
83
- * Process data through the sequential pipeline
84
- *
85
- * @param rawData The raw data to process
86
- * @param dataType The type of data (e.g., 'text', 'image', 'audio')
87
- * @param options Options for pipeline execution
88
- * @returns A promise that resolves with the pipeline result
89
- */
90
- processData(rawData: Buffer | string, dataType: string, options?: SequentialPipelineOptions): Promise<PipelineResult<unknown>>;
91
- /**
92
- * Process WebSocket data through the sequential pipeline
93
- *
94
- * @param connection The WebSocket connection
95
- * @param dataType The type of data (e.g., 'text', 'image', 'audio')
96
- * @param options Options for pipeline execution
97
- * @returns A function to handle incoming WebSocket messages
98
- */
99
- createWebSocketHandler(connection: WebSocketConnection, dataType: string, options?: SequentialPipelineOptions): Promise<(data: unknown) => void>;
100
- /**
101
- * Set up a WebSocket connection to process data through the pipeline
102
- *
103
- * @param url The WebSocket URL to connect to
104
- * @param dataType The type of data (e.g., 'text', 'image', 'audio')
105
- * @param options Options for pipeline execution
106
- * @returns A promise that resolves with the WebSocket connection and associated streams
107
- */
108
- setupWebSocketPipeline(url: string, dataType: string, options?: SequentialPipelineOptions): Promise<WebSocketConnection & {
109
- readableStream?: ReadableStream<unknown>;
110
- writableStream?: WritableStream<unknown>;
111
- }>;
112
- }
113
- export declare const sequentialPipeline: SequentialPipeline;