@soulcraft/brainy 0.63.0 → 1.0.0-rc.2

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -1,417 +0,0 @@
1
- /**
2
- * Sequential Augmentation Pipeline
3
- *
4
- * This module provides a pipeline for executing augmentations in a specific sequence:
5
- * ISense -> IMemory -> ICognition -> IConduit -> IActivation -> IPerception
6
- *
7
- * It supports high-performance streaming data from WebSockets without blocking.
8
- * Optimized for Node.js 23.11+ using native WebStreams API.
9
- */
10
- import { AugmentationType } from './types/augmentations.js';
11
- import { BrainyData } from './brainyData.js';
12
- import { augmentationPipeline } from './augmentationPipeline.js';
13
- // Use the browser's built-in WebStreams API or Node.js native WebStreams API
14
- // This approach ensures compatibility with both environments
15
- let TransformStream, ReadableStream, WritableStream;
16
- // Function to initialize the stream classes
17
- const initializeStreamClasses = () => {
18
- // Try to use the browser's built-in WebStreams API first
19
- if (typeof globalThis.TransformStream !== 'undefined' &&
20
- typeof globalThis.ReadableStream !== 'undefined' &&
21
- typeof globalThis.WritableStream !== 'undefined') {
22
- TransformStream = globalThis.TransformStream;
23
- ReadableStream = globalThis.ReadableStream;
24
- WritableStream = globalThis.WritableStream;
25
- return Promise.resolve();
26
- }
27
- else {
28
- // In Node.js environment, try to import from node:stream/web
29
- // This will be executed in Node.js but not in browsers
30
- return import('node:stream/web')
31
- .then(streamWebModule => {
32
- TransformStream = streamWebModule.TransformStream;
33
- ReadableStream = streamWebModule.ReadableStream;
34
- WritableStream = streamWebModule.WritableStream;
35
- })
36
- .catch(error => {
37
- console.error('Failed to import WebStreams API:', error);
38
- // Provide fallback implementations or throw a more helpful error
39
- throw new Error('WebStreams API is not available in this environment. Please use a modern browser or Node.js 18+.');
40
- });
41
- }
42
- };
43
- // Initialize immediately but don't block module execution
44
- const streamClassesPromise = initializeStreamClasses();
45
- /**
46
- * Default pipeline options
47
- */
48
- const DEFAULT_SEQUENTIAL_PIPELINE_OPTIONS = {
49
- timeout: 30000,
50
- stopOnError: false
51
- };
52
- /**
53
- * SequentialPipeline class
54
- *
55
- * Executes augmentations in a specific sequence:
56
- * ISense -> IMemory -> ICognition -> IConduit -> IActivation -> IPerception
57
- */
58
- export class SequentialPipeline {
59
- /**
60
- * Create a new sequential pipeline
61
- *
62
- * @param options Options for the pipeline
63
- */
64
- constructor(options = {}) {
65
- this.brainyData = options.brainyData || new BrainyData();
66
- }
67
- /**
68
- * Ensure stream classes are initialized
69
- * @private
70
- */
71
- async ensureStreamClassesInitialized() {
72
- await streamClassesPromise;
73
- }
74
- /**
75
- * Initialize the pipeline
76
- *
77
- * @returns A promise that resolves when initialization is complete
78
- */
79
- async initialize() {
80
- // Initialize stream classes and BrainyData in parallel
81
- await Promise.all([
82
- this.ensureStreamClassesInitialized(),
83
- this.brainyData.init()
84
- ]);
85
- }
86
- /**
87
- * Process data through the sequential pipeline
88
- *
89
- * @param rawData The raw data to process
90
- * @param dataType The type of data (e.g., 'text', 'image', 'audio')
91
- * @param options Options for pipeline execution
92
- * @returns A promise that resolves with the pipeline result
93
- */
94
- async processData(rawData, dataType, options = {}) {
95
- const opts = { ...DEFAULT_SEQUENTIAL_PIPELINE_OPTIONS, ...options };
96
- const result = {
97
- success: true,
98
- data: null,
99
- stageResults: {}
100
- };
101
- try {
102
- // Step 1: Process raw data with ISense augmentations
103
- const senseResults = await augmentationPipeline.executeSensePipeline('processRawData', [rawData, dataType], { timeout: opts.timeout, stopOnError: opts.stopOnError });
104
- // Get the first successful result
105
- let senseResult = null;
106
- for (const resultPromise of senseResults) {
107
- const res = await resultPromise;
108
- if (res.success) {
109
- senseResult = res;
110
- break;
111
- }
112
- }
113
- if (!senseResult || !senseResult.success) {
114
- return {
115
- success: false,
116
- data: null,
117
- error: 'Failed to process raw data with ISense augmentations',
118
- stageResults: { sense: senseResult || { success: false, data: null, error: 'No sense augmentations available' } }
119
- };
120
- }
121
- result.stageResults.sense = senseResult;
122
- // Step 2: Store data in BrainyData using IMemory augmentations
123
- const memoryAugmentations = augmentationPipeline.getAugmentationsByType(AugmentationType.MEMORY);
124
- if (memoryAugmentations.length === 0) {
125
- return {
126
- success: false,
127
- data: null,
128
- error: 'No memory augmentations available',
129
- stageResults: result.stageResults
130
- };
131
- }
132
- // Use the first available memory augmentation
133
- const memoryAugmentation = memoryAugmentations[0];
134
- // Generate a key for the data
135
- const dataKey = `data_${Date.now()}_${Math.random().toString(36).substring(2, 15)}`;
136
- // Store the data
137
- const memoryResult = await memoryAugmentation.storeData(dataKey, {
138
- rawData,
139
- dataType,
140
- nouns: senseResult.data.nouns,
141
- verbs: senseResult.data.verbs,
142
- timestamp: Date.now()
143
- });
144
- if (!memoryResult.success) {
145
- return {
146
- success: false,
147
- data: null,
148
- error: `Failed to store data: ${memoryResult.error}`,
149
- stageResults: { ...result.stageResults, memory: memoryResult }
150
- };
151
- }
152
- result.stageResults.memory = memoryResult;
153
- // Step 3: Trigger ICognition augmentations to analyze the data
154
- const cognitionResults = await augmentationPipeline.executeCognitionPipeline('reason', [`Analyze data with key ${dataKey}`, { dataKey }], { timeout: opts.timeout, stopOnError: opts.stopOnError });
155
- // Get the first successful result
156
- let cognitionResult = null;
157
- for (const resultPromise of cognitionResults) {
158
- const res = await resultPromise;
159
- if (res.success) {
160
- cognitionResult = res;
161
- break;
162
- }
163
- }
164
- if (cognitionResult) {
165
- result.stageResults.cognition = cognitionResult;
166
- }
167
- // Step 4: Send notifications to IConduit augmentations
168
- const conduitResults = await augmentationPipeline.executeConduitPipeline('writeData', [{ dataKey, nouns: senseResult.data.nouns, verbs: senseResult.data.verbs }], { timeout: opts.timeout, stopOnError: opts.stopOnError });
169
- // Get the first successful result
170
- let conduitResult = null;
171
- for (const resultPromise of conduitResults) {
172
- const res = await resultPromise;
173
- if (res.success) {
174
- conduitResult = res;
175
- break;
176
- }
177
- }
178
- if (conduitResult) {
179
- result.stageResults.conduit = conduitResult;
180
- }
181
- // Step 5: Send notifications to IActivation augmentations
182
- const activationResults = await augmentationPipeline.executeActivationPipeline('triggerAction', ['dataProcessed', { dataKey }], { timeout: opts.timeout, stopOnError: opts.stopOnError });
183
- // Get the first successful result
184
- let activationResult = null;
185
- for (const resultPromise of activationResults) {
186
- const res = await resultPromise;
187
- if (res.success) {
188
- activationResult = res;
189
- break;
190
- }
191
- }
192
- if (activationResult) {
193
- result.stageResults.activation = activationResult;
194
- }
195
- // Step 6: Send notifications to IPerception augmentations
196
- const perceptionResults = await augmentationPipeline.executePerceptionPipeline('interpret', [senseResult.data.nouns, senseResult.data.verbs, { dataKey }], { timeout: opts.timeout, stopOnError: opts.stopOnError });
197
- // Get the first successful result
198
- let perceptionResult = null;
199
- for (const resultPromise of perceptionResults) {
200
- const res = await resultPromise;
201
- if (res.success) {
202
- perceptionResult = res;
203
- break;
204
- }
205
- }
206
- if (perceptionResult) {
207
- result.stageResults.perception = perceptionResult;
208
- result.data = perceptionResult.data;
209
- }
210
- else {
211
- // If no perception result, use the cognition result as the final data
212
- result.data = cognitionResult ? cognitionResult.data : { dataKey };
213
- }
214
- return result;
215
- }
216
- catch (error) {
217
- return {
218
- success: false,
219
- data: null,
220
- error: `Pipeline execution failed: ${error}`,
221
- stageResults: result.stageResults
222
- };
223
- }
224
- }
225
- /**
226
- * Process WebSocket data through the sequential pipeline
227
- *
228
- * @param connection The WebSocket connection
229
- * @param dataType The type of data (e.g., 'text', 'image', 'audio')
230
- * @param options Options for pipeline execution
231
- * @returns A function to handle incoming WebSocket messages
232
- */
233
- async createWebSocketHandler(connection, dataType, options = {}) {
234
- // Ensure stream classes are initialized
235
- await this.ensureStreamClassesInitialized();
236
- // Create a transform stream for processing data
237
- const transformStream = new TransformStream({
238
- transform: async (chunk, controller) => {
239
- try {
240
- const data = typeof chunk === 'string' ? chunk : JSON.stringify(chunk);
241
- const result = await this.processData(data, dataType, options);
242
- if (result.success) {
243
- controller.enqueue(result);
244
- }
245
- else {
246
- console.warn('Pipeline processing failed:', result.error);
247
- }
248
- }
249
- catch (error) {
250
- console.error('Error in transform stream:', error);
251
- }
252
- }
253
- });
254
- // Create a writable stream that will be the sink for our data
255
- const writableStream = new WritableStream({
256
- write: async (result) => {
257
- // Handle the processed result if needed
258
- if (connection.send && typeof connection.send === 'function') {
259
- try {
260
- // Only send back results if the connection supports it
261
- await connection.send(JSON.stringify(result));
262
- }
263
- catch (error) {
264
- console.error('Error sending result back to WebSocket:', error);
265
- }
266
- }
267
- }
268
- });
269
- // Connect the transform stream to the writable stream
270
- transformStream.readable.pipeTo(writableStream).catch((error) => {
271
- console.error('Error in pipeline stream:', error);
272
- });
273
- // Return a function that writes to the transform stream
274
- return (data) => {
275
- try {
276
- // Write to the transform stream's writable side
277
- const writer = transformStream.writable.getWriter();
278
- writer.write(data).catch((error) => {
279
- console.error('Error writing to stream:', error);
280
- }).finally(() => {
281
- writer.releaseLock();
282
- });
283
- }
284
- catch (error) {
285
- console.error('Error getting writer for transform stream:', error);
286
- }
287
- };
288
- }
289
- /**
290
- * Set up a WebSocket connection to process data through the pipeline
291
- *
292
- * @param url The WebSocket URL to connect to
293
- * @param dataType The type of data (e.g., 'text', 'image', 'audio')
294
- * @param options Options for pipeline execution
295
- * @returns A promise that resolves with the WebSocket connection and associated streams
296
- */
297
- async setupWebSocketPipeline(url, dataType, options = {}) {
298
- // Ensure stream classes are initialized
299
- await this.ensureStreamClassesInitialized();
300
- // Get WebSocket-supporting augmentations
301
- const webSocketAugmentations = augmentationPipeline.getWebSocketAugmentations();
302
- if (webSocketAugmentations.length === 0) {
303
- throw new Error('No WebSocket-supporting augmentations available');
304
- }
305
- // Use the first available WebSocket augmentation
306
- const webSocketAugmentation = webSocketAugmentations[0];
307
- // Connect to the WebSocket
308
- const connection = await webSocketAugmentation.connectWebSocket(url);
309
- // Create a readable stream from the WebSocket messages
310
- const readableStream = new ReadableStream({
311
- start: (controller) => {
312
- // Define a message handler that writes to the stream
313
- const messageHandler = (event) => {
314
- try {
315
- const data = typeof event.data === 'string'
316
- ? event.data
317
- : event.data instanceof Blob
318
- ? new Promise(resolve => {
319
- const reader = new FileReader();
320
- reader.onload = () => resolve(reader.result);
321
- reader.readAsText(event.data);
322
- })
323
- : JSON.stringify(event.data);
324
- // Handle both string data and promises
325
- if (data instanceof Promise) {
326
- data.then(resolvedData => {
327
- controller.enqueue(resolvedData);
328
- }).catch((error) => {
329
- console.error('Error processing blob data:', error);
330
- });
331
- }
332
- else {
333
- controller.enqueue(data);
334
- }
335
- }
336
- catch (error) {
337
- console.error('Error processing WebSocket message:', error);
338
- }
339
- };
340
- // Create a wrapper function that adapts the event-based handler to the data-based callback
341
- const messageHandlerWrapper = (data) => {
342
- messageHandler({ data });
343
- };
344
- // Store both handlers for later cleanup
345
- connection._streamMessageHandler = messageHandler;
346
- connection._messageHandlerWrapper = messageHandlerWrapper;
347
- webSocketAugmentation.onWebSocketMessage(connection.connectionId, messageHandlerWrapper).catch((error) => {
348
- console.error('Error registering WebSocket message handler:', error);
349
- controller.error(error);
350
- });
351
- },
352
- cancel: () => {
353
- // Clean up the message handler when the stream is cancelled
354
- if (connection._messageHandlerWrapper) {
355
- webSocketAugmentation.offWebSocketMessage(connection.connectionId, connection._messageHandlerWrapper).catch((error) => {
356
- console.error('Error removing WebSocket message handler:', error);
357
- });
358
- delete connection._streamMessageHandler;
359
- delete connection._messageHandlerWrapper;
360
- }
361
- }
362
- });
363
- // Create a handler for processing the data
364
- const handlerPromise = this.createWebSocketHandler(connection, dataType, options);
365
- // Create a writable stream that sends data to the WebSocket
366
- const writableStream = new WritableStream({
367
- write: async (chunk) => {
368
- if (connection.send && typeof connection.send === 'function') {
369
- try {
370
- const data = typeof chunk === 'string' ? chunk : JSON.stringify(chunk);
371
- await connection.send(data);
372
- }
373
- catch (error) {
374
- console.error('Error sending data to WebSocket:', error);
375
- throw error;
376
- }
377
- }
378
- else {
379
- throw new Error('WebSocket connection does not support sending data');
380
- }
381
- },
382
- close: () => {
383
- // Close the WebSocket connection when the stream is closed
384
- if (connection.close && typeof connection.close === 'function') {
385
- connection.close().catch((error) => {
386
- console.error('Error closing WebSocket connection:', error);
387
- });
388
- }
389
- }
390
- });
391
- // Pipe the readable stream through our processing pipeline
392
- readableStream
393
- .pipeThrough(new TransformStream({
394
- transform: async (chunk, controller) => {
395
- // Process each chunk through our handler
396
- const handler = await handlerPromise;
397
- handler(chunk);
398
- // Pass through the original data
399
- controller.enqueue(chunk);
400
- }
401
- }))
402
- .pipeTo(new WritableStream({
403
- write: () => { },
404
- abort: (error) => {
405
- console.error('Error in WebSocket pipeline:', error);
406
- }
407
- }));
408
- // Attach the streams to the connection object for convenience
409
- const enhancedConnection = connection;
410
- enhancedConnection.readableStream = readableStream;
411
- enhancedConnection.writableStream = writableStream;
412
- return enhancedConnection;
413
- }
414
- }
415
- // Create and export a default instance of the sequential pipeline
416
- export const sequentialPipeline = new SequentialPipeline();
417
- //# sourceMappingURL=sequentialPipeline.js.map
@@ -1,12 +0,0 @@
1
- /**
2
- * Smart Model Loader - Zero Configuration ML Models
3
- * Downloads models on-demand, caches intelligently
4
- */
5
- export declare class SmartModelLoader {
6
- private static readonly MODEL_SOURCES;
7
- static loadModel(modelName: string): Promise<ArrayBuffer>;
8
- private static tryLoadFrom;
9
- private static cacheLocally;
10
- private static generateFallbackModel;
11
- }
12
- export declare function getEmbedding(text: string): Promise<Float32Array>;
@@ -1,88 +0,0 @@
1
- /**
2
- * Smart Model Loader - Zero Configuration ML Models
3
- * Downloads models on-demand, caches intelligently
4
- */
5
- export class SmartModelLoader {
6
- static async loadModel(modelName) {
7
- // Try each source in order
8
- for (const source of this.MODEL_SOURCES) {
9
- try {
10
- const model = await this.tryLoadFrom(source, modelName);
11
- if (model) {
12
- await this.cacheLocally(model, modelName);
13
- return model;
14
- }
15
- }
16
- catch {
17
- continue; // Try next source
18
- }
19
- }
20
- // Fallback: Generate lightweight random embeddings
21
- console.warn('Using fallback embeddings (reduced accuracy)');
22
- return this.generateFallbackModel(modelName);
23
- }
24
- static async tryLoadFrom(source, model) {
25
- if (source.startsWith('http')) {
26
- // Download from CDN
27
- const response = await fetch(`${source}/${model}`);
28
- if (response.ok) {
29
- return await response.arrayBuffer();
30
- }
31
- }
32
- else {
33
- // Check local filesystem
34
- try {
35
- const fs = await import('fs');
36
- return fs.readFileSync(`${source}/${model}`);
37
- }
38
- catch {
39
- return null;
40
- }
41
- }
42
- return null;
43
- }
44
- static async cacheLocally(model, name) {
45
- // Cache in best available location
46
- if (typeof window !== 'undefined' && 'caches' in window) {
47
- // Browser: Use Cache API
48
- const cache = await caches.open('brainy-models');
49
- await cache.put(name, new Response(model));
50
- }
51
- else if (typeof process !== 'undefined') {
52
- // Node: Use filesystem cache
53
- const fs = await import('fs');
54
- const path = await import('path');
55
- const cacheDir = path.join(process.env.HOME || '', '.brainy', 'models');
56
- fs.mkdirSync(cacheDir, { recursive: true });
57
- fs.writeFileSync(path.join(cacheDir, name), Buffer.from(model));
58
- }
59
- }
60
- static generateFallbackModel(name) {
61
- // Deterministic "random" embeddings based on input
62
- // Good enough for development/testing
63
- const seed = name.split('').reduce((a, b) => a + b.charCodeAt(0), 0);
64
- const model = new Float32Array(384); // Standard embedding size
65
- for (let i = 0; i < model.length; i++) {
66
- model[i] = Math.sin(seed * (i + 1)) * 0.1;
67
- }
68
- return model.buffer;
69
- }
70
- }
71
- SmartModelLoader.MODEL_SOURCES = [
72
- // 1. Check if bundled locally
73
- './models',
74
- '../models',
75
- // 2. Check user's cache
76
- '~/.brainy/models',
77
- // 3. Check CDN (fast, free)
78
- 'https://cdn.jsdelivr.net/npm/@brainy/models@latest',
79
- 'https://unpkg.com/@brainy/models',
80
- // 4. Check Hugging Face (original source)
81
- 'https://huggingface.co/Xenova/all-MiniLM-L6-v2/resolve/main'
82
- ];
83
- // Usage - Zero configuration required!
84
- export async function getEmbedding(text) {
85
- const model = await SmartModelLoader.loadModel('encoder.onnx');
86
- // ... use model
87
- }
88
- //# sourceMappingURL=modelLoader.js.map