@soulcraft/brainy 3.4.0 → 3.5.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/api/DataAPI.js +1 -1
- package/dist/api/UniversalImportAPI.js +1 -1
- package/dist/augmentations/apiServerAugmentation.js +1 -1
- package/dist/augmentations/auditLogAugmentation.js +1 -1
- package/dist/augmentations/configResolver.js +5 -5
- package/dist/augmentations/discovery/localDiscovery.js +2 -2
- package/dist/config/storageAutoConfig.js +2 -2
- package/dist/cortex/backupRestore.js +7 -7
- package/dist/critical/model-guardian.js +5 -5
- package/dist/distributed/cacheSync.d.ts +1 -1
- package/dist/distributed/cacheSync.js +1 -1
- package/dist/distributed/coordinator.d.ts +1 -1
- package/dist/distributed/coordinator.js +2 -2
- package/dist/distributed/httpTransport.d.ts +2 -2
- package/dist/distributed/httpTransport.js +5 -5
- package/dist/distributed/networkTransport.d.ts +1 -1
- package/dist/distributed/networkTransport.js +3 -3
- package/dist/distributed/readWriteSeparation.d.ts +1 -1
- package/dist/distributed/readWriteSeparation.js +1 -1
- package/dist/distributed/shardManager.d.ts +1 -1
- package/dist/distributed/shardManager.js +2 -2
- package/dist/distributed/shardMigration.d.ts +1 -1
- package/dist/distributed/shardMigration.js +1 -1
- package/dist/distributed/storageDiscovery.d.ts +1 -1
- package/dist/distributed/storageDiscovery.js +2 -2
- package/dist/embeddings/EmbeddingManager.js +2 -2
- package/dist/storage/adapters/fileSystemStorage.js +2 -2
- package/dist/storage/cacheManager.js +1 -1
- package/dist/storage/enhancedClearOperations.js +1 -1
- package/dist/universal/crypto.js +2 -1
- package/dist/universal/events.js +1 -1
- package/dist/universal/fs.js +2 -1
- package/dist/universal/path.js +2 -1
- package/dist/utils/adaptiveSocketManager.js +1 -1
- package/dist/utils/autoConfiguration.js +1 -1
- package/dist/utils/embedding.js +2 -2
- package/dist/utils/paramValidation.js +1 -1
- package/dist/utils/structuredLogger.js +2 -2
- package/dist/utils/version.js +3 -3
- package/package.json +1 -1
package/dist/api/DataAPI.js
CHANGED
|
@@ -61,7 +61,7 @@ export class DataAPI {
|
|
|
61
61
|
// Compress if requested
|
|
62
62
|
if (compress) {
|
|
63
63
|
// Import zlib for compression
|
|
64
|
-
const { gzipSync } = await import('zlib');
|
|
64
|
+
const { gzipSync } = await import('node:zlib');
|
|
65
65
|
const jsonString = JSON.stringify(backupData);
|
|
66
66
|
const compressed = gzipSync(Buffer.from(jsonString));
|
|
67
67
|
return {
|
|
@@ -78,7 +78,7 @@ export class UniversalImportAPI {
|
|
|
78
78
|
*/
|
|
79
79
|
async importFromFile(filePath) {
|
|
80
80
|
// Read the actual file content
|
|
81
|
-
const { readFileSync } = await import('fs');
|
|
81
|
+
const { readFileSync } = await import('node:fs');
|
|
82
82
|
const ext = filePath.split('.').pop()?.toLowerCase() || 'txt';
|
|
83
83
|
try {
|
|
84
84
|
const fileContent = readFileSync(filePath, 'utf-8');
|
|
@@ -72,7 +72,7 @@ export class APIServerAugmentation extends BaseAugmentation {
|
|
|
72
72
|
const express = await import('express').catch(() => null);
|
|
73
73
|
const cors = await import('cors').catch(() => null);
|
|
74
74
|
const ws = await import('ws').catch(() => null);
|
|
75
|
-
const { createServer } = await import('http');
|
|
75
|
+
const { createServer } = await import('node:http');
|
|
76
76
|
if (!express || !cors || !ws) {
|
|
77
77
|
this.log('Express, cors, or ws not available. Install with: npm install express cors ws', 'error');
|
|
78
78
|
return;
|
|
@@ -7,9 +7,9 @@
|
|
|
7
7
|
* - Runtime updates
|
|
8
8
|
* - Default values from schema
|
|
9
9
|
*/
|
|
10
|
-
import { existsSync, readFileSync } from 'fs';
|
|
11
|
-
import { join } from 'path';
|
|
12
|
-
import { homedir } from 'os';
|
|
10
|
+
import { existsSync, readFileSync } from 'node:fs';
|
|
11
|
+
import { join } from 'node:path';
|
|
12
|
+
import { homedir } from 'node:os';
|
|
13
13
|
/**
|
|
14
14
|
* Configuration source priority (highest to lowest)
|
|
15
15
|
*/
|
|
@@ -379,8 +379,8 @@ export class AugmentationConfigResolver {
|
|
|
379
379
|
if (typeof process === 'undefined' || typeof window !== 'undefined') {
|
|
380
380
|
throw new Error('Cannot save configuration files in browser environment');
|
|
381
381
|
}
|
|
382
|
-
const fs = await import('fs');
|
|
383
|
-
const path = await import('path');
|
|
382
|
+
const fs = await import('node:fs');
|
|
383
|
+
const path = await import('node:path');
|
|
384
384
|
const configPath = filepath || this.options.configPaths?.[0] || '.brainyrc';
|
|
385
385
|
const augId = this.options.augmentationId;
|
|
386
386
|
// Load existing config if it exists
|
|
@@ -4,8 +4,8 @@
|
|
|
4
4
|
* Discovers augmentations installed locally in node_modules
|
|
5
5
|
* and built-in augmentations that ship with Brainy
|
|
6
6
|
*/
|
|
7
|
-
import { existsSync, readdirSync, readFileSync } from 'fs';
|
|
8
|
-
import { join } from 'path';
|
|
7
|
+
import { existsSync, readdirSync, readFileSync } from 'node:fs';
|
|
8
|
+
import { join } from 'node:path';
|
|
9
9
|
/**
|
|
10
10
|
* Discovers augmentations installed locally
|
|
11
11
|
*/
|
|
@@ -300,8 +300,8 @@ async function isWritable(dirPath) {
|
|
|
300
300
|
return false;
|
|
301
301
|
try {
|
|
302
302
|
// Dynamic import fs for Node.js
|
|
303
|
-
const { promises: fs } = await import('fs');
|
|
304
|
-
const path = await import('path');
|
|
303
|
+
const { promises: fs } = await import('node:fs');
|
|
304
|
+
const path = await import('node:path');
|
|
305
305
|
// Try to create directory if it doesn't exist
|
|
306
306
|
await fs.mkdir(dirPath, { recursive: true });
|
|
307
307
|
// Try to write a test file
|
|
@@ -257,16 +257,16 @@ export class BackupRestore {
|
|
|
257
257
|
}
|
|
258
258
|
async compressData(data) {
|
|
259
259
|
// Use zlib gzip compression
|
|
260
|
-
const { gzip } = await import('zlib');
|
|
261
|
-
const { promisify } = await import('util');
|
|
260
|
+
const { gzip } = await import('node:zlib');
|
|
261
|
+
const { promisify } = await import('node:util');
|
|
262
262
|
const gzipAsync = promisify(gzip);
|
|
263
263
|
const compressed = await gzipAsync(Buffer.from(data, 'utf-8'));
|
|
264
264
|
return compressed.toString('base64');
|
|
265
265
|
}
|
|
266
266
|
async decompressData(data) {
|
|
267
267
|
// Use zlib gunzip decompression
|
|
268
|
-
const { gunzip } = await import('zlib');
|
|
269
|
-
const { promisify } = await import('util');
|
|
268
|
+
const { gunzip } = await import('node:zlib');
|
|
269
|
+
const { promisify } = await import('node:util');
|
|
270
270
|
const gunzipAsync = promisify(gunzip);
|
|
271
271
|
const compressed = Buffer.from(data, 'base64');
|
|
272
272
|
const decompressed = await gunzipAsync(compressed);
|
|
@@ -274,7 +274,7 @@ export class BackupRestore {
|
|
|
274
274
|
}
|
|
275
275
|
async encryptData(data, password) {
|
|
276
276
|
// Use crypto module for AES-256 encryption
|
|
277
|
-
const crypto = await import('crypto');
|
|
277
|
+
const crypto = await import('node:crypto');
|
|
278
278
|
// Generate key from password
|
|
279
279
|
const key = crypto.createHash('sha256').update(password).digest();
|
|
280
280
|
const iv = crypto.randomBytes(16);
|
|
@@ -286,7 +286,7 @@ export class BackupRestore {
|
|
|
286
286
|
}
|
|
287
287
|
async decryptData(data, password) {
|
|
288
288
|
// Use crypto module for AES-256 decryption
|
|
289
|
-
const crypto = await import('crypto');
|
|
289
|
+
const crypto = await import('node:crypto');
|
|
290
290
|
// Split IV and encrypted data
|
|
291
291
|
const [ivString, encrypted] = data.split(':');
|
|
292
292
|
const iv = Buffer.from(ivString, 'base64');
|
|
@@ -367,7 +367,7 @@ export class BackupRestore {
|
|
|
367
367
|
}
|
|
368
368
|
async calculateChecksum(data) {
|
|
369
369
|
// Use crypto module for SHA-256 checksum
|
|
370
|
-
const crypto = await import('crypto');
|
|
370
|
+
const crypto = await import('node:crypto');
|
|
371
371
|
return crypto.createHash('sha256').update(data).digest('hex');
|
|
372
372
|
}
|
|
373
373
|
formatFileSize(bytes) {
|
|
@@ -10,9 +10,9 @@
|
|
|
10
10
|
* 3. Model MUST produce consistent 384-dim embeddings
|
|
11
11
|
* 4. System MUST fail fast if model unavailable in production
|
|
12
12
|
*/
|
|
13
|
-
import { existsSync } from 'fs';
|
|
14
|
-
import { stat } from 'fs/promises';
|
|
15
|
-
import { join } from 'path';
|
|
13
|
+
import { existsSync } from 'node:fs';
|
|
14
|
+
import { stat } from 'node:fs/promises';
|
|
15
|
+
import { join } from 'node:path';
|
|
16
16
|
import { env } from '@huggingface/transformers';
|
|
17
17
|
// CRITICAL: These values MUST NEVER CHANGE
|
|
18
18
|
const CRITICAL_MODEL_CONFIG = {
|
|
@@ -172,8 +172,8 @@ export class ModelGuardian {
|
|
|
172
172
|
*/
|
|
173
173
|
async computeFileHash(filePath) {
|
|
174
174
|
try {
|
|
175
|
-
const { readFile } = await import('fs/promises');
|
|
176
|
-
const { createHash } = await import('crypto');
|
|
175
|
+
const { readFile } = await import('node:fs/promises');
|
|
176
|
+
const { createHash } = await import('node:crypto');
|
|
177
177
|
const fileBuffer = await readFile(filePath);
|
|
178
178
|
const hash = createHash('sha256').update(fileBuffer).digest('hex');
|
|
179
179
|
return hash;
|
|
@@ -2,7 +2,7 @@
|
|
|
2
2
|
* Distributed Cache Synchronization
|
|
3
3
|
* Provides cache coherence across multiple Brainy instances
|
|
4
4
|
*/
|
|
5
|
-
import { EventEmitter } from 'events';
|
|
5
|
+
import { EventEmitter } from 'node:events';
|
|
6
6
|
export interface CacheSyncConfig {
|
|
7
7
|
nodeId: string;
|
|
8
8
|
syncInterval?: number;
|
|
@@ -2,7 +2,7 @@
|
|
|
2
2
|
* Distributed Coordinator for Brainy 3.0
|
|
3
3
|
* Provides leader election, consensus, and coordination for distributed instances
|
|
4
4
|
*/
|
|
5
|
-
import { EventEmitter } from 'events';
|
|
5
|
+
import { EventEmitter } from 'node:events';
|
|
6
6
|
import { NetworkTransport } from './networkTransport.js';
|
|
7
7
|
export interface NodeInfo {
|
|
8
8
|
id: string;
|
|
@@ -2,8 +2,8 @@
|
|
|
2
2
|
* Distributed Coordinator for Brainy 3.0
|
|
3
3
|
* Provides leader election, consensus, and coordination for distributed instances
|
|
4
4
|
*/
|
|
5
|
-
import { EventEmitter } from 'events';
|
|
6
|
-
import { createHash } from 'crypto';
|
|
5
|
+
import { EventEmitter } from 'node:events';
|
|
6
|
+
import { createHash } from 'node:crypto';
|
|
7
7
|
/**
|
|
8
8
|
* Distributed Coordinator implementing Raft-like consensus
|
|
9
9
|
*/
|
|
@@ -3,8 +3,8 @@
|
|
|
3
3
|
* Simple, reliable, works everywhere - no WebSocket complexity!
|
|
4
4
|
* REAL PRODUCTION CODE - Handles millions of operations
|
|
5
5
|
*/
|
|
6
|
-
import * as http from 'http';
|
|
7
|
-
import { EventEmitter } from 'events';
|
|
6
|
+
import * as http from 'node:http';
|
|
7
|
+
import { EventEmitter } from 'node:events';
|
|
8
8
|
export interface TransportMessage {
|
|
9
9
|
id: string;
|
|
10
10
|
method: string;
|
|
@@ -3,11 +3,11 @@
|
|
|
3
3
|
* Simple, reliable, works everywhere - no WebSocket complexity!
|
|
4
4
|
* REAL PRODUCTION CODE - Handles millions of operations
|
|
5
5
|
*/
|
|
6
|
-
import * as http from 'http';
|
|
7
|
-
import * as https from 'https';
|
|
8
|
-
import { EventEmitter } from 'events';
|
|
9
|
-
import * as net from 'net';
|
|
10
|
-
import { URL } from 'url';
|
|
6
|
+
import * as http from 'node:http';
|
|
7
|
+
import * as https from 'node:https';
|
|
8
|
+
import { EventEmitter } from 'node:events';
|
|
9
|
+
import * as net from 'node:net';
|
|
10
|
+
import { URL } from 'node:url';
|
|
11
11
|
export class HTTPTransport extends EventEmitter {
|
|
12
12
|
constructor(nodeId) {
|
|
13
13
|
super();
|
|
@@ -2,8 +2,8 @@
|
|
|
2
2
|
* Network Transport Layer for Distributed Brainy
|
|
3
3
|
* Uses WebSocket + HTTP for maximum compatibility
|
|
4
4
|
*/
|
|
5
|
-
import * as http from 'http';
|
|
6
|
-
import { EventEmitter } from 'events';
|
|
5
|
+
import * as http from 'node:http';
|
|
6
|
+
import { EventEmitter } from 'node:events';
|
|
7
7
|
import { WebSocket } from 'ws';
|
|
8
8
|
// Use dynamic imports for Node.js specific modules
|
|
9
9
|
let WebSocketServer;
|
|
@@ -311,7 +311,7 @@ export class NetworkTransport extends EventEmitter {
|
|
|
311
311
|
const token = process.env.KUBERNETES_TOKEN || '';
|
|
312
312
|
try {
|
|
313
313
|
// Query Kubernetes API for pod endpoints
|
|
314
|
-
const https = await import('https');
|
|
314
|
+
const https = await import('node:https');
|
|
315
315
|
const response = await new Promise((resolve, reject) => {
|
|
316
316
|
https.get(`${apiServer}/api/v1/namespaces/${namespace}/endpoints/${serviceName}`, {
|
|
317
317
|
headers: {
|
|
@@ -2,7 +2,7 @@
|
|
|
2
2
|
* Read/Write Separation for Distributed Scaling
|
|
3
3
|
* Implements primary-replica architecture for scalable reads
|
|
4
4
|
*/
|
|
5
|
-
import { EventEmitter } from 'events';
|
|
5
|
+
import { EventEmitter } from 'node:events';
|
|
6
6
|
import { DistributedCoordinator } from './coordinator.js';
|
|
7
7
|
import { ShardManager } from './shardManager.js';
|
|
8
8
|
import { CacheSync } from './cacheSync.js';
|
|
@@ -2,7 +2,7 @@
|
|
|
2
2
|
* Shard Manager for Horizontal Scaling
|
|
3
3
|
* Implements consistent hashing for data distribution across shards
|
|
4
4
|
*/
|
|
5
|
-
import { EventEmitter } from 'events';
|
|
5
|
+
import { EventEmitter } from 'node:events';
|
|
6
6
|
export interface ShardConfig {
|
|
7
7
|
shardCount?: number;
|
|
8
8
|
replicationFactor?: number;
|
|
@@ -2,8 +2,8 @@
|
|
|
2
2
|
* Shard Manager for Horizontal Scaling
|
|
3
3
|
* Implements consistent hashing for data distribution across shards
|
|
4
4
|
*/
|
|
5
|
-
import { createHash } from 'crypto';
|
|
6
|
-
import { EventEmitter } from 'events';
|
|
5
|
+
import { createHash } from 'node:crypto';
|
|
6
|
+
import { EventEmitter } from 'node:events';
|
|
7
7
|
/**
|
|
8
8
|
* Consistent Hash Ring for shard distribution
|
|
9
9
|
*/
|
|
@@ -4,7 +4,7 @@
|
|
|
4
4
|
* Handles zero-downtime migration of data between nodes
|
|
5
5
|
* Uses streaming for efficient transfer of large datasets
|
|
6
6
|
*/
|
|
7
|
-
import { EventEmitter } from 'events';
|
|
7
|
+
import { EventEmitter } from 'node:events';
|
|
8
8
|
import type { StorageAdapter } from '../coreTypes.js';
|
|
9
9
|
import type { ShardManager } from './shardManager.js';
|
|
10
10
|
import type { HTTPTransport } from './httpTransport.js';
|
|
@@ -4,7 +4,7 @@
|
|
|
4
4
|
* Handles zero-downtime migration of data between nodes
|
|
5
5
|
* Uses streaming for efficient transfer of large datasets
|
|
6
6
|
*/
|
|
7
|
-
import { EventEmitter } from 'events';
|
|
7
|
+
import { EventEmitter } from 'node:events';
|
|
8
8
|
export class ShardMigrationManager extends EventEmitter {
|
|
9
9
|
constructor(nodeId, storage, shardManager, transport, coordinator) {
|
|
10
10
|
super();
|
|
@@ -3,7 +3,7 @@
|
|
|
3
3
|
* Uses shared storage (S3/GCS/R2) as coordination point
|
|
4
4
|
* REAL PRODUCTION CODE - No mocks, no stubs!
|
|
5
5
|
*/
|
|
6
|
-
import { EventEmitter } from 'events';
|
|
6
|
+
import { EventEmitter } from 'node:events';
|
|
7
7
|
import { StorageAdapter } from '../coreTypes.js';
|
|
8
8
|
export interface NodeInfo {
|
|
9
9
|
id: string;
|
|
@@ -3,8 +3,8 @@
|
|
|
3
3
|
* Uses shared storage (S3/GCS/R2) as coordination point
|
|
4
4
|
* REAL PRODUCTION CODE - No mocks, no stubs!
|
|
5
5
|
*/
|
|
6
|
-
import { EventEmitter } from 'events';
|
|
7
|
-
import * as os from 'os';
|
|
6
|
+
import { EventEmitter } from 'node:events';
|
|
7
|
+
import * as os from 'node:os';
|
|
8
8
|
export class StorageDiscovery extends EventEmitter {
|
|
9
9
|
constructor(storage, nodeId) {
|
|
10
10
|
super();
|
|
@@ -16,8 +16,8 @@
|
|
|
16
16
|
* hybridModelManager, universalMemoryManager, and more.
|
|
17
17
|
*/
|
|
18
18
|
import { pipeline, env } from '@huggingface/transformers';
|
|
19
|
-
import { existsSync } from 'fs';
|
|
20
|
-
import { join } from 'path';
|
|
19
|
+
import { existsSync } from 'node:fs';
|
|
20
|
+
import { join } from 'node:path';
|
|
21
21
|
// Global state for true singleton across entire process
|
|
22
22
|
let globalInstance = null;
|
|
23
23
|
let globalInitPromise = null;
|
|
@@ -11,8 +11,8 @@ let moduleLoadingPromise = null;
|
|
|
11
11
|
// Try to load Node.js modules
|
|
12
12
|
try {
|
|
13
13
|
// Using dynamic imports to avoid issues in browser environments
|
|
14
|
-
const fsPromise = import('fs');
|
|
15
|
-
const pathPromise = import('path');
|
|
14
|
+
const fsPromise = import('node:fs');
|
|
15
|
+
const pathPromise = import('node:path');
|
|
16
16
|
moduleLoadingPromise = Promise.all([fsPromise, pathPromise])
|
|
17
17
|
.then(([fsModule, pathModule]) => {
|
|
18
18
|
fs = fsModule;
|
|
@@ -279,7 +279,7 @@ export class CacheManager {
|
|
|
279
279
|
if (this.environment === Environment.NODE) {
|
|
280
280
|
try {
|
|
281
281
|
// Use dynamic import for OS module
|
|
282
|
-
const os = await import('os');
|
|
282
|
+
const os = await import('node:os');
|
|
283
283
|
// Get actual system memory information
|
|
284
284
|
const totalMemory = os.totalmem();
|
|
285
285
|
const freeMemory = os.freemem();
|
|
@@ -139,7 +139,7 @@ export class EnhancedFileSystemClear {
|
|
|
139
139
|
const timestamp = new Date().toISOString().replace(/[:.]/g, '-');
|
|
140
140
|
const backupDir = `${this.rootDir}-backup-${timestamp}`;
|
|
141
141
|
// Use cp -r for efficient directory copying
|
|
142
|
-
const { spawn } = await import('child_process');
|
|
142
|
+
const { spawn } = await import('node:child_process');
|
|
143
143
|
return new Promise((resolve, reject) => {
|
|
144
144
|
const cp = spawn('cp', ['-r', this.rootDir, backupDir]);
|
|
145
145
|
cp.on('close', (code) => {
|
package/dist/universal/crypto.js
CHANGED
|
@@ -8,7 +8,8 @@ let nodeCrypto = null;
|
|
|
8
8
|
// Dynamic import for Node.js crypto (only in Node.js environment)
|
|
9
9
|
if (isNode()) {
|
|
10
10
|
try {
|
|
11
|
-
|
|
11
|
+
// Use node: protocol to prevent bundler polyfilling (requires Node 22+)
|
|
12
|
+
nodeCrypto = await import('node:crypto');
|
|
12
13
|
}
|
|
13
14
|
catch {
|
|
14
15
|
// Ignore import errors in non-Node environments
|
package/dist/universal/events.js
CHANGED
|
@@ -8,7 +8,7 @@ let nodeEvents = null;
|
|
|
8
8
|
// Dynamic import for Node.js events (only in Node.js environment)
|
|
9
9
|
if (isNode()) {
|
|
10
10
|
try {
|
|
11
|
-
nodeEvents = await import('events');
|
|
11
|
+
nodeEvents = await import('node:events');
|
|
12
12
|
}
|
|
13
13
|
catch {
|
|
14
14
|
// Ignore import errors in non-Node environments
|
package/dist/universal/fs.js
CHANGED
|
@@ -8,7 +8,8 @@ let nodeFs = null;
|
|
|
8
8
|
// Dynamic import for Node.js fs (only in Node.js environment)
|
|
9
9
|
if (isNode()) {
|
|
10
10
|
try {
|
|
11
|
-
|
|
11
|
+
// Use node: protocol to prevent bundler polyfilling (requires Node 22+)
|
|
12
|
+
nodeFs = await import('node:fs/promises');
|
|
12
13
|
}
|
|
13
14
|
catch {
|
|
14
15
|
// Ignore import errors in non-Node environments
|
package/dist/universal/path.js
CHANGED
|
@@ -8,7 +8,8 @@ let nodePath = null;
|
|
|
8
8
|
// Dynamic import for Node.js path (only in Node.js environment)
|
|
9
9
|
if (isNode()) {
|
|
10
10
|
try {
|
|
11
|
-
|
|
11
|
+
// Use node: protocol to prevent bundler polyfilling (requires Node 22+)
|
|
12
|
+
nodePath = await import('node:path');
|
|
12
13
|
}
|
|
13
14
|
catch {
|
|
14
15
|
// Ignore import errors in non-Node environments
|
|
@@ -3,7 +3,7 @@
|
|
|
3
3
|
* Automatically manages socket pools and connection settings based on load patterns
|
|
4
4
|
* Zero-configuration approach that learns and adapts to workload characteristics
|
|
5
5
|
*/
|
|
6
|
-
import { Agent as HttpsAgent } from 'https';
|
|
6
|
+
import { Agent as HttpsAgent } from 'node:https';
|
|
7
7
|
import { NodeHttpHandler } from '@smithy/node-http-handler';
|
|
8
8
|
import { createModuleLogger } from './logger.js';
|
|
9
9
|
/**
|
|
@@ -159,7 +159,7 @@ export class AutoConfiguration {
|
|
|
159
159
|
// Node.js memory detection
|
|
160
160
|
if (isNode()) {
|
|
161
161
|
try {
|
|
162
|
-
const os = await import('os');
|
|
162
|
+
const os = await import('node:os');
|
|
163
163
|
availableMemory = os.totalmem() * 0.7; // Use 70% of total memory
|
|
164
164
|
cpuCores = os.cpus().length;
|
|
165
165
|
}
|
package/dist/utils/embedding.js
CHANGED
|
@@ -3,8 +3,8 @@
|
|
|
3
3
|
* Complete rewrite to eliminate TensorFlow.js and use ONNX-based models
|
|
4
4
|
*/
|
|
5
5
|
import { isBrowser } from './environment.js';
|
|
6
|
-
import { join } from 'path';
|
|
7
|
-
import { existsSync } from 'fs';
|
|
6
|
+
import { join } from 'node:path';
|
|
7
|
+
import { existsSync } from 'node:fs';
|
|
8
8
|
// @ts-ignore - Transformers.js is now the primary embedding library
|
|
9
9
|
import { pipeline, env } from '@huggingface/transformers';
|
|
10
10
|
// CRITICAL: Disable ONNX memory arena to prevent 4-8GB allocation
|
|
@@ -5,7 +5,7 @@
|
|
|
5
5
|
* Only enforces universal truths, learns everything else
|
|
6
6
|
*/
|
|
7
7
|
import { NounType, VerbType } from '../types/graphTypes.js';
|
|
8
|
-
import * as os from 'os';
|
|
8
|
+
import * as os from 'node:os';
|
|
9
9
|
/**
|
|
10
10
|
* Auto-configured limits based on system resources
|
|
11
11
|
* These adapt to available memory and observed performance
|
|
@@ -4,8 +4,8 @@
|
|
|
4
4
|
* performance tracking, and multiple transport support
|
|
5
5
|
*/
|
|
6
6
|
import { performance } from 'perf_hooks';
|
|
7
|
-
import { hostname } from 'os';
|
|
8
|
-
import { randomUUID } from 'crypto';
|
|
7
|
+
import { hostname } from 'node:os';
|
|
8
|
+
import { randomUUID } from 'node:crypto';
|
|
9
9
|
export var LogLevel;
|
|
10
10
|
(function (LogLevel) {
|
|
11
11
|
LogLevel[LogLevel["SILENT"] = -1] = "SILENT";
|
package/dist/utils/version.js
CHANGED
|
@@ -1,9 +1,9 @@
|
|
|
1
1
|
/**
|
|
2
2
|
* Version utilities for Brainy
|
|
3
3
|
*/
|
|
4
|
-
import { readFileSync } from 'fs';
|
|
5
|
-
import { join, dirname } from 'path';
|
|
6
|
-
import { fileURLToPath } from 'url';
|
|
4
|
+
import { readFileSync } from 'node:fs';
|
|
5
|
+
import { join, dirname } from 'node:path';
|
|
6
|
+
import { fileURLToPath } from 'node:url';
|
|
7
7
|
// Get package.json path relative to this file
|
|
8
8
|
const __filename = fileURLToPath(import.meta.url);
|
|
9
9
|
const __dirname = dirname(__filename);
|
package/package.json
CHANGED
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
{
|
|
2
2
|
"name": "@soulcraft/brainy",
|
|
3
|
-
"version": "3.
|
|
3
|
+
"version": "3.5.0",
|
|
4
4
|
"description": "Universal Knowledge Protocol™ - World's first Triple Intelligence database unifying vector, graph, and document search in one API. 31 nouns × 40 verbs for infinite expressiveness.",
|
|
5
5
|
"main": "dist/index.js",
|
|
6
6
|
"module": "dist/index.js",
|