@aleph-ai/tinyaleph 1.0.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (58) hide show
  1. package/LICENSE +21 -0
  2. package/README.md +278 -0
  3. package/backends/cryptographic/index.js +196 -0
  4. package/backends/index.js +15 -0
  5. package/backends/interface.js +89 -0
  6. package/backends/scientific/index.js +272 -0
  7. package/backends/semantic/index.js +527 -0
  8. package/backends/semantic/surface.js +393 -0
  9. package/backends/semantic/two-layer.js +375 -0
  10. package/core/fano.js +127 -0
  11. package/core/hilbert.js +564 -0
  12. package/core/hypercomplex.js +141 -0
  13. package/core/index.js +133 -0
  14. package/core/llm.js +132 -0
  15. package/core/prime.js +184 -0
  16. package/core/resonance.js +695 -0
  17. package/core/rformer-tf.js +1086 -0
  18. package/core/rformer.js +806 -0
  19. package/core/sieve.js +350 -0
  20. package/data.json +8163 -0
  21. package/docs/EXAMPLES_PLAN.md +293 -0
  22. package/docs/README.md +159 -0
  23. package/docs/design/ALEPH_CHAT_ARCHITECTURE.md +499 -0
  24. package/docs/guide/01-quickstart.md +298 -0
  25. package/docs/guide/02-semantic-computing.md +409 -0
  26. package/docs/guide/03-cryptographic.md +420 -0
  27. package/docs/guide/04-scientific.md +494 -0
  28. package/docs/guide/05-llm-integration.md +568 -0
  29. package/docs/guide/06-advanced.md +996 -0
  30. package/docs/guide/README.md +188 -0
  31. package/docs/reference/01-core.md +695 -0
  32. package/docs/reference/02-physics.md +601 -0
  33. package/docs/reference/03-backends.md +892 -0
  34. package/docs/reference/04-engine.md +632 -0
  35. package/docs/reference/README.md +252 -0
  36. package/docs/theory/01-prime-semantics.md +327 -0
  37. package/docs/theory/02-hypercomplex-algebra.md +421 -0
  38. package/docs/theory/03-phase-synchronization.md +364 -0
  39. package/docs/theory/04-entropy-reasoning.md +348 -0
  40. package/docs/theory/05-non-commutativity.md +402 -0
  41. package/docs/theory/06-two-layer-meaning.md +414 -0
  42. package/docs/theory/07-resonant-field-interface.md +419 -0
  43. package/docs/theory/08-semantic-sieve.md +520 -0
  44. package/docs/theory/09-temporal-emergence.md +298 -0
  45. package/docs/theory/10-quaternionic-memory.md +415 -0
  46. package/docs/theory/README.md +162 -0
  47. package/engine/aleph.js +418 -0
  48. package/engine/index.js +7 -0
  49. package/index.js +23 -0
  50. package/modular.js +254 -0
  51. package/package.json +99 -0
  52. package/physics/collapse.js +95 -0
  53. package/physics/entropy.js +88 -0
  54. package/physics/index.js +65 -0
  55. package/physics/kuramoto.js +91 -0
  56. package/physics/lyapunov.js +80 -0
  57. package/physics/oscillator.js +95 -0
  58. package/types/index.d.ts +575 -0
@@ -0,0 +1,527 @@
1
+ /**
2
+ * Semantic Backend - Natural language understanding and concept mapping
3
+ *
4
+ * IMPORTANT: Concepts are non-commutative. "dog bites man" ≠ "man bites dog"
5
+ * We use sequential hypercomplex multiplication to preserve order.
6
+ *
7
+ * DNA-INSPIRED PROCESSING:
8
+ * - Bidirectional (boustrophedon): Forward AND backward states combined
9
+ * - Codon chunking: Triplet groupings for emergent meaning
10
+ * - Reading frames: 6 frames (3 forward + 3 reverse offsets)
11
+ * - Sense/Antisense: Dual representations via conjugation
12
+ */
13
+ const { Backend } = require('../interface');
14
+ const { Hypercomplex } = require('../../core/hypercomplex');
15
+ const { primeToFrequency, primeToAngle, DEFAULT_PRIMES, nthPrime } = require('../../core/prime');
16
+
17
+ class SemanticBackend extends Backend {
18
+ constructor(config) {
19
+ super(config);
20
+ this.config.primes = config.primes || DEFAULT_PRIMES;
21
+ this.vocabulary = new Map(Object.entries(config.vocabulary || {}));
22
+ this.ontology = config.ontology || {};
23
+ this.transforms = config.transforms || [];
24
+ this.axes = config.axes || {};
25
+ this.corePrimes = new Set(config.corePrimes || [
26
+ 2, 3, 5, 7, 11, 13, 17, 19, 23, 29, 31, 37, 41, 43, 47,
27
+ 53, 59, 61, 67, 71, 73, 79, 83, 89, 97
28
+ ]);
29
+ this.stopWords = new Set(config.stopWords || [
30
+ 'a', 'an', 'the', 'is', 'are', 'was', 'were', 'be', 'of', 'in', 'to',
31
+ 'for', 'with', 'on', 'at', 'by', 'from', 'and', 'or', 'but', 'if',
32
+ 'it', 'its', 'this', 'that', 'what', 'which', 'who', 'whom', 'whose',
33
+ 'how', 'when', 'where', 'why', 'can', 'could', 'would', 'should', 'will'
34
+ ]);
35
+ }
36
+
37
+ tokenize(text, filterStopWords = false) {
38
+ const words = text.toLowerCase().split(/\s+/).filter(Boolean);
39
+ const tokens = [];
40
+ let position = 0;
41
+ for (const word of words) {
42
+ const clean = word.replace(/[^\w]/g, '');
43
+ if (!clean) continue;
44
+ const isStop = this.stopWords.has(clean);
45
+ if (filterStopWords && isStop) continue;
46
+ const primes = this.vocabulary.get(clean) || this.wordToPrimes(clean);
47
+ tokens.push({
48
+ word: clean,
49
+ primes,
50
+ known: this.vocabulary.has(clean),
51
+ isStop,
52
+ position: position++ // Track position for order preservation
53
+ });
54
+ }
55
+ return tokens;
56
+ }
57
+
58
+ wordToPrimes(word) {
59
+ // Hash unknown words to primes based on character codes
60
+ const primes = this.config.primes;
61
+ return [...word].map(c => primes[c.charCodeAt(0) % primes.length]);
62
+ }
63
+
64
+ encode(text) {
65
+ const tokens = this.tokenize(text, true);
66
+ return tokens.flatMap(t => t.primes);
67
+ }
68
+
69
+ /**
70
+ * Encode text to primes WITHOUT filtering stop words
71
+ * Use this when you need complete semantic content (e.g., for training/comparison)
72
+ */
73
+ encodeAll(text) {
74
+ const tokens = this.tokenize(text, false);
75
+ return tokens.flatMap(t => t.primes);
76
+ }
77
+
78
+ /**
79
+ * Decode primes to text using greedy covering algorithm
80
+ *
81
+ * The key insight from TWO_LAYER_MEANING.md:
82
+ * - Layer 1 (primes) = the actual meaning
83
+ * - Layer 2 (words) = surface manifestation of that meaning
84
+ *
85
+ * We pick words that COVER the input primes with minimal noise,
86
+ * ensuring different prime signatures produce different outputs.
87
+ */
88
+ decode(primes) {
89
+ const primeSet = new Set(primes);
90
+ const covered = new Set();
91
+ const selected = [];
92
+ const maxWords = 5;
93
+
94
+ // Greedy covering: pick words that cover most NEW primes with least noise
95
+ while (covered.size < primeSet.size && selected.length < maxWords) {
96
+ let best = { word: null, wordPrimes: [], score: -Infinity };
97
+
98
+ for (const [word, wordPrimes] of this.vocabulary) {
99
+ if (this.stopWords.has(word)) continue;
100
+ if (selected.includes(word)) continue;
101
+
102
+ // Count primes this word would newly cover
103
+ const newCoverage = wordPrimes.filter(p => primeSet.has(p) && !covered.has(p)).length;
104
+
105
+ // Penalize words that introduce primes NOT in our target set (noise)
106
+ const noise = wordPrimes.filter(p => !primeSet.has(p)).length;
107
+
108
+ // Reward exact matches more highly
109
+ const exactMatch = wordPrimes.every(p => primeSet.has(p)) ? 2 : 0;
110
+
111
+ // Score: new coverage + exact match bonus - noise penalty
112
+ const score = newCoverage + exactMatch - (noise * 0.4);
113
+
114
+ if (score > best.score && newCoverage > 0) {
115
+ best = { word, wordPrimes, score };
116
+ }
117
+ }
118
+
119
+ if (best.word && best.score > 0) {
120
+ selected.push(best.word);
121
+ best.wordPrimes.forEach(p => covered.add(p));
122
+ } else {
123
+ break; // No more useful words found
124
+ }
125
+ }
126
+
127
+ // Fallback: describe remaining uncovered primes via ontology
128
+ const uncovered = [...primeSet].filter(p => !covered.has(p));
129
+ for (const p of uncovered.slice(0, maxWords - selected.length)) {
130
+ const meaning = this.ontology[p];
131
+ if (meaning) {
132
+ selected.push(meaning.split('/')[0]); // Take first part of "existence/being"
133
+ } else if (p > 100) {
134
+ // Large primes are from unknown words - skip them
135
+ continue;
136
+ } else {
137
+ selected.push(`P${p}`);
138
+ }
139
+ }
140
+
141
+ return selected.join(' ') || this.primesToMeaning(primes);
142
+ }
143
+
144
+ primesToMeaning(primes) {
145
+ return [...new Set(primes)].map(p => this.ontology[p] || `P${p}`).join('·');
146
+ }
147
+
148
+ /**
149
+ * DEPRECATED: Use orderedPrimesToState for proper non-commutative encoding
150
+ * This method treats primes as unordered set (loses word order)
151
+ */
152
+ primesToState(primes) {
153
+ const state = Hypercomplex.zero(this.dimension);
154
+ for (const p of primes) {
155
+ const angle = primeToAngle(p);
156
+ for (let i = 0; i < this.dimension; i++) {
157
+ state.c[i] += Math.cos(angle * (i + 1)) / Math.sqrt(primes.length || 1);
158
+ }
159
+ }
160
+ return state.normalize();
161
+ }
162
+
163
+ /**
164
+ * Encode ordered tokens to state using sequential multiplication (NON-COMMUTATIVE)
165
+ * "dog bites man" will produce DIFFERENT state than "man bites dog"
166
+ */
167
+ orderedPrimesToState(orderedTokens) {
168
+ // Start with identity element (1 + 0i + 0j + ...)
169
+ let state = Hypercomplex.basis(this.dimension, 0, 1);
170
+
171
+ for (let i = 0; i < orderedTokens.length; i++) {
172
+ const token = orderedTokens[i];
173
+ const primes = Array.isArray(token) ? token : token.primes;
174
+
175
+ // Convert primes to hypercomplex rotation
176
+ const tokenH = this.primesToHypercomplex(primes);
177
+
178
+ // Apply position-dependent phase shift (breaks commutativity further)
179
+ const positioned = this.applyPositionPhase(tokenH, i);
180
+
181
+ // Sequential MULTIPLICATION (non-commutative!)
182
+ // This is the key: mul(A,B) ≠ mul(B,A) for hypercomplex
183
+ state = state.mul(positioned);
184
+ }
185
+
186
+ return state.normalize();
187
+ }
188
+
189
+ /**
190
+ * Convert primes to a hypercomplex number (rotation in high-D space)
191
+ */
192
+ primesToHypercomplex(primes) {
193
+ const h = Hypercomplex.basis(this.dimension, 0, 1); // Start with 1
194
+
195
+ for (const p of primes) {
196
+ const angle = primeToAngle(p);
197
+ const axis = (p % (this.dimension - 1)) + 1; // Use prime to select axis (1 to dim-1)
198
+
199
+ // Create rotation: cos(θ) + sin(θ)·eₐₓᵢₛ
200
+ const rot = Hypercomplex.zero(this.dimension);
201
+ rot.c[0] = Math.cos(angle);
202
+ rot.c[axis] = Math.sin(angle);
203
+
204
+ // Accumulate by multiplication
205
+ const result = h.mul(rot);
206
+ for (let i = 0; i < this.dimension; i++) {
207
+ h.c[i] = result.c[i];
208
+ }
209
+ }
210
+
211
+ return h.normalize();
212
+ }
213
+
214
+ /**
215
+ * Apply position-dependent phase rotation
216
+ * This ensures that position in sequence affects the final state
217
+ */
218
+ applyPositionPhase(h, position) {
219
+ // Use position-th prime for phase shift
220
+ const posPrime = nthPrime(position + 1);
221
+ const angle = primeToAngle(posPrime) * 0.5; // Half angle for subtler effect
222
+
223
+ // Rotate in the position-dependent plane
224
+ const posAxis = (position % (this.dimension - 2)) + 1;
225
+
226
+ const rot = Hypercomplex.zero(this.dimension);
227
+ rot.c[0] = Math.cos(angle);
228
+ rot.c[posAxis] = Math.sin(angle);
229
+
230
+ return h.mul(rot);
231
+ }
232
+
233
+ /**
234
+ * Ordered encode: returns tokens with position information
235
+ */
236
+ encodeOrdered(text) {
237
+ return this.tokenize(text, true);
238
+ }
239
+
240
+ /**
241
+ * Full ordered processing: text → ordered tokens → non-commutative state
242
+ */
243
+ textToOrderedState(text) {
244
+ const tokens = this.encodeOrdered(text);
245
+ return this.orderedPrimesToState(tokens);
246
+ }
247
+
248
+ primesToFrequencies(primes) {
249
+ return primes.map(p => primeToFrequency(p));
250
+ }
251
+
252
+ applyTransform(inputPrimes, transform) {
253
+ const inputSet = new Set(inputPrimes);
254
+ // Check if transform query primes are present
255
+ if (!transform.q || !transform.q.some(p => inputSet.has(p))) return inputPrimes;
256
+ // Don't transform core primes
257
+ if (transform.q.some(p => this.corePrimes.has(p))) return inputPrimes;
258
+ // Apply replacement
259
+ const kept = inputPrimes.filter(p => this.corePrimes.has(p) || !transform.q.includes(p));
260
+ return [...new Set([...kept, ...(transform.r || [])])];
261
+ }
262
+
263
+ learn(word, primes, confidence = 0.5) {
264
+ this.vocabulary.set(word.toLowerCase().trim(), primes);
265
+ return { word, primes, confidence };
266
+ }
267
+
268
+ getVocabularySize() {
269
+ return this.vocabulary.size;
270
+ }
271
+
272
+ hasWord(word) {
273
+ return this.vocabulary.has(word.toLowerCase().trim());
274
+ }
275
+
276
+ getWordPrimes(word) {
277
+ return this.vocabulary.get(word.toLowerCase().trim());
278
+ }
279
+
280
+ getOntologyMeaning(prime) {
281
+ return this.ontology[prime];
282
+ }
283
+
284
+ getAxisPrimes(axisIndex) {
285
+ return this.axes[axisIndex];
286
+ }
287
+
288
+ // ============================================
289
+ // DNA-INSPIRED SEMANTIC PROCESSING
290
+ // ============================================
291
+
292
+ /**
293
+ * BIDIRECTIONAL PROCESSING (Enochian Boustrophedon)
294
+ * Like Dee & Kelley's method: read forward AND backward, combine results
295
+ *
296
+ * The forward and backward states capture different semantic perspectives.
297
+ * Combined via multiplication with conjugation for anti-symmetric blending.
298
+ */
299
+ bidirectionalState(tokens) {
300
+ if (!tokens || tokens.length === 0) {
301
+ return Hypercomplex.basis(this.dimension, 0, 1);
302
+ }
303
+
304
+ // Forward state (normal left-to-right)
305
+ const stateF = this.orderedPrimesToState(tokens);
306
+
307
+ // Backward state (reversed order - reveals hidden patterns)
308
+ const tokensB = [...tokens].reverse();
309
+ const stateB = this.orderedPrimesToState(tokensB);
310
+
311
+ // Combine: forward ⊗ conjugate(backward)
312
+ // The conjugate creates an antisymmetric relationship
313
+ const combined = stateF.mul(stateB.conjugate());
314
+
315
+ return combined.normalize();
316
+ }
317
+
318
+ /**
319
+ * CODON-STYLE CHUNKING (DNA Triplets)
320
+ * Group tokens into triplets - meaning emerges from 3-unit groups
321
+ *
322
+ * In DNA, codons (3 nucleotides) encode amino acids.
323
+ * Similarly, semantic "codons" may carry emergent meaning.
324
+ */
325
+ tokensToCodons(tokens, codonSize = 3) {
326
+ const codons = [];
327
+ for (let i = 0; i < tokens.length; i += codonSize) {
328
+ const chunk = tokens.slice(i, i + codonSize);
329
+ // Merge primes from all tokens in the codon
330
+ const codonPrimes = chunk.flatMap(t => Array.isArray(t) ? t : t.primes);
331
+ codons.push({
332
+ tokens: chunk,
333
+ primes: codonPrimes,
334
+ position: Math.floor(i / codonSize)
335
+ });
336
+ }
337
+ return codons;
338
+ }
339
+
340
+ /**
341
+ * Process text using codon chunking
342
+ */
343
+ codonState(text, codonSize = 3) {
344
+ const tokens = this.tokenize(text, true);
345
+ const codons = this.tokensToCodons(tokens, codonSize);
346
+
347
+ // Each codon becomes a unit for ordered processing
348
+ return this.orderedPrimesToState(codons);
349
+ }
350
+
351
+ /**
352
+ * READING FRAME SHIFTS (DNA 6-Frame Translation)
353
+ * DNA has 6 reading frames: 3 forward offsets + 3 reverse offsets
354
+ *
355
+ * Each frame yields a different interpretation of the same sequence.
356
+ * Combined, they provide a richer semantic representation.
357
+ */
358
+ readingFrameStates(tokens, numFrames = 3) {
359
+ const frames = [];
360
+
361
+ // Forward frames (offset 0, 1, 2)
362
+ for (let offset = 0; offset < numFrames && offset < tokens.length; offset++) {
363
+ const frameTokens = tokens.slice(offset);
364
+ const state = this.orderedPrimesToState(frameTokens);
365
+ frames.push({
366
+ direction: 'forward',
367
+ offset,
368
+ state,
369
+ tokens: frameTokens
370
+ });
371
+ }
372
+
373
+ // Reverse frames (reversed sequence with offsets)
374
+ const reversed = [...tokens].reverse();
375
+ for (let offset = 0; offset < numFrames && offset < reversed.length; offset++) {
376
+ const frameTokens = reversed.slice(offset);
377
+ const state = this.orderedPrimesToState(frameTokens);
378
+ frames.push({
379
+ direction: 'reverse',
380
+ offset,
381
+ state,
382
+ tokens: frameTokens
383
+ });
384
+ }
385
+
386
+ return frames;
387
+ }
388
+
389
+ /**
390
+ * Combine all 6 reading frames into a unified state
391
+ */
392
+ sixFrameState(text) {
393
+ const tokens = this.tokenize(text, true);
394
+ const frames = this.readingFrameStates(tokens, 3);
395
+
396
+ // Start with identity
397
+ let combined = Hypercomplex.basis(this.dimension, 0, 1);
398
+
399
+ // Multiply all frame states together
400
+ for (const frame of frames) {
401
+ combined = combined.mul(frame.state);
402
+ }
403
+
404
+ return combined.normalize();
405
+ }
406
+
407
+ /**
408
+ * SENSE/ANTISENSE DUALITY (DNA Double Helix)
409
+ * Like DNA's complementary strands, maintain dual representations
410
+ *
411
+ * - Sense: The primary interpretation (primes as-is)
412
+ * - Antisense: The complementary interpretation (conjugate)
413
+ *
414
+ * Together they form a complete representation like the double helix.
415
+ */
416
+ dualRepresentation(tokens) {
417
+ const state = this.orderedPrimesToState(tokens);
418
+
419
+ return {
420
+ sense: state,
421
+ antisense: state.conjugate(),
422
+ // The product of sense and antisense = |state|² scalar (real number)
423
+ // This represents the "strength" of the semantic encoding
424
+ magnitude: state.norm(),
425
+ // Coherence between sense and antisense
426
+ coherence: state.coherence ? state.coherence(state.conjugate()) : 1.0
427
+ };
428
+ }
429
+
430
+ /**
431
+ * FULL DNA-INSPIRED ENCODING
432
+ * Combines all four methods for maximum semantic richness:
433
+ * 1. Tokenize to codons (triplet chunking)
434
+ * 2. Apply 6-frame processing
435
+ * 3. Compute bidirectional state
436
+ * 4. Return sense/antisense duality
437
+ */
438
+ dnaEncode(text) {
439
+ const tokens = this.tokenize(text, true);
440
+
441
+ if (tokens.length === 0) {
442
+ const identity = Hypercomplex.basis(this.dimension, 0, 1);
443
+ return {
444
+ tokens: [],
445
+ codons: [],
446
+ frames: [],
447
+ bidirectional: identity,
448
+ sixFrame: identity,
449
+ sense: identity,
450
+ antisense: identity.conjugate(),
451
+ magnitude: 1,
452
+ coherence: 1
453
+ };
454
+ }
455
+
456
+ // Codon chunking
457
+ const codons = this.tokensToCodons(tokens, 3);
458
+
459
+ // 6-frame processing
460
+ const frames = this.readingFrameStates(tokens, 3);
461
+
462
+ // Bidirectional (boustrophedon)
463
+ const bidirectional = this.bidirectionalState(tokens);
464
+
465
+ // Six-frame combined
466
+ let sixFrame = Hypercomplex.basis(this.dimension, 0, 1);
467
+ for (const frame of frames) {
468
+ sixFrame = sixFrame.mul(frame.state);
469
+ }
470
+ sixFrame = sixFrame.normalize();
471
+
472
+ // Final state combines bidirectional and six-frame
473
+ const finalState = bidirectional.mul(sixFrame).normalize();
474
+
475
+ // Sense/Antisense duality
476
+ return {
477
+ tokens,
478
+ codons,
479
+ frames,
480
+ bidirectional,
481
+ sixFrame,
482
+ sense: finalState,
483
+ antisense: finalState.conjugate(),
484
+ magnitude: finalState.norm(),
485
+ coherence: finalState.coherence ? finalState.coherence(finalState.conjugate()) : 1.0
486
+ };
487
+ }
488
+
489
+ /**
490
+ * DNA-inspired text comparison
491
+ * Compare two texts using their DNA encodings
492
+ */
493
+ dnaCompare(text1, text2) {
494
+ const enc1 = this.dnaEncode(text1);
495
+ const enc2 = this.dnaEncode(text2);
496
+
497
+ // Coherence between the two sense states
498
+ const senseCoherence = enc1.sense.coherence
499
+ ? enc1.sense.coherence(enc2.sense)
500
+ : this.fallbackCoherence(enc1.sense, enc2.sense);
501
+
502
+ // Cross-coherence: sense1 with antisense2 (complementary match)
503
+ const crossCoherence = enc1.sense.coherence
504
+ ? enc1.sense.coherence(enc2.antisense)
505
+ : this.fallbackCoherence(enc1.sense, enc2.antisense);
506
+
507
+ return {
508
+ senseCoherence,
509
+ crossCoherence,
510
+ // Average of direct and complementary matching
511
+ combinedScore: (senseCoherence + Math.abs(crossCoherence)) / 2
512
+ };
513
+ }
514
+
515
+ /**
516
+ * Fallback coherence calculation if not defined on Hypercomplex
517
+ */
518
+ fallbackCoherence(h1, h2) {
519
+ let dot = 0;
520
+ for (let i = 0; i < this.dimension; i++) {
521
+ dot += h1.c[i] * h2.c[i];
522
+ }
523
+ return dot / (h1.norm() * h2.norm() || 1);
524
+ }
525
+ }
526
+
527
+ module.exports = { SemanticBackend };