@aleph-ai/tinyaleph 1.3.0 → 1.4.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/README.md +423 -12
- package/backends/cryptographic/index.js +455 -2
- package/core/beacon.js +735 -0
- package/core/crt-homology.js +1004 -0
- package/core/enochian-vocabulary.js +910 -0
- package/core/enochian.js +744 -0
- package/core/errors.js +587 -0
- package/core/hilbert.js +651 -1
- package/core/index.js +86 -1
- package/core/lambda.js +284 -33
- package/core/logger.js +350 -0
- package/core/prime.js +136 -1
- package/core/quaternion-semantics.js +623 -0
- package/core/reduction.js +391 -1
- package/core/rformer-crt.js +892 -0
- package/core/topology.js +655 -0
- package/docs/README.md +54 -0
- package/docs/reference/07-topology.md +257 -0
- package/docs/reference/08-observer.md +421 -0
- package/docs/reference/09-crt-homology.md +369 -0
- package/modular.js +231 -3
- package/package.json +1 -1
|
@@ -0,0 +1,421 @@
|
|
|
1
|
+
# Observer Module Reference
|
|
2
|
+
|
|
3
|
+
The observer module provides components for building sentient observer systems based on the whitepaper architecture. It includes oscillator dynamics, semantic fields, temporal processing, symbolic grounding, and validation assays.
|
|
4
|
+
|
|
5
|
+
## Installation
|
|
6
|
+
|
|
7
|
+
```javascript
|
|
8
|
+
const observer = require('@aleph-ai/tinyaleph/observer');
|
|
9
|
+
|
|
10
|
+
// Or import specific components:
|
|
11
|
+
const {
|
|
12
|
+
SedenionMemoryField,
|
|
13
|
+
PRSCLayer,
|
|
14
|
+
TemporalLayer,
|
|
15
|
+
SymbolicSMF,
|
|
16
|
+
SymbolicTemporalLayer,
|
|
17
|
+
AssaySuite
|
|
18
|
+
} = require('@aleph-ai/tinyaleph/observer');
|
|
19
|
+
```
|
|
20
|
+
|
|
21
|
+
---
|
|
22
|
+
|
|
23
|
+
## Core Components
|
|
24
|
+
|
|
25
|
+
### SedenionMemoryField (SMF)
|
|
26
|
+
|
|
27
|
+
16-dimensional semantic orientation field using sedenion algebra.
|
|
28
|
+
|
|
29
|
+
```javascript
|
|
30
|
+
const { SedenionMemoryField, SMF_AXES } = require('@aleph-ai/tinyaleph/observer');
|
|
31
|
+
|
|
32
|
+
// Create uniform field
|
|
33
|
+
const smf = SedenionMemoryField.uniform();
|
|
34
|
+
|
|
35
|
+
// Create basis state (single axis activated)
|
|
36
|
+
const wisdom = SedenionMemoryField.basis('wisdom');
|
|
37
|
+
|
|
38
|
+
// Set/get axes by name
|
|
39
|
+
smf.set('coherence', 0.8);
|
|
40
|
+
console.log(smf.get('wisdom')); // 0.0
|
|
41
|
+
|
|
42
|
+
// Key operations
|
|
43
|
+
smf.normalize(); // Normalize to unit length
|
|
44
|
+
const entropy = smf.entropy(); // Shannon entropy
|
|
45
|
+
const coh = smf1.coherence(smf2); // Cosine similarity
|
|
46
|
+
const mid = smf1.slerp(smf2, 0.5); // Spherical interpolation
|
|
47
|
+
|
|
48
|
+
// Get dominant axes
|
|
49
|
+
const dominant = smf.dominantAxes(3);
|
|
50
|
+
// [{ name: 'coherence', value: 0.8, index: 0 }, ...]
|
|
51
|
+
|
|
52
|
+
// Find nearest codebook attractor (64-attractor codebook)
|
|
53
|
+
const nearest = smf.nearestCodebook();
|
|
54
|
+
// { attractor: {...}, index: 12, distance: 0.15 }
|
|
55
|
+
```
|
|
56
|
+
|
|
57
|
+
**SMF_AXES** (16 named dimensions):
|
|
58
|
+
- coherence, identity, intention, emotion
|
|
59
|
+
- wisdom, temporal, relation, creation
|
|
60
|
+
- destruction, balance, growth, form
|
|
61
|
+
- void, truth, beauty, love
|
|
62
|
+
|
|
63
|
+
---
|
|
64
|
+
|
|
65
|
+
### PRSCLayer
|
|
66
|
+
|
|
67
|
+
Prime Resonance Semantic Coherence - bank of prime-indexed oscillators.
|
|
68
|
+
|
|
69
|
+
```javascript
|
|
70
|
+
const { PRSCLayer, PrimeOscillator } = require('@aleph-ai/tinyaleph/observer');
|
|
71
|
+
|
|
72
|
+
// Create with first N primes
|
|
73
|
+
const prsc = new PRSCLayer(10);
|
|
74
|
+
|
|
75
|
+
// Or with specific primes
|
|
76
|
+
const prsc = new PRSCLayer([2, 3, 5, 7, 11]);
|
|
77
|
+
|
|
78
|
+
// Excite specific primes
|
|
79
|
+
prsc.excite([3, 5], 0.8);
|
|
80
|
+
|
|
81
|
+
// Tick dynamics
|
|
82
|
+
prsc.tick(0.1);
|
|
83
|
+
|
|
84
|
+
// Get coherence metrics
|
|
85
|
+
const coherence = prsc.globalCoherence(); // 0-1
|
|
86
|
+
const r = prsc.orderParameter(); // Kuramoto order parameter
|
|
87
|
+
const entropy = prsc.amplitudeEntropy(); // Distribution entropy
|
|
88
|
+
|
|
89
|
+
// Get oscillator state
|
|
90
|
+
const phases = prsc.getPhases();
|
|
91
|
+
const active = prsc.activePrimes(0.1); // Primes with amplitude > 0.1
|
|
92
|
+
```
|
|
93
|
+
|
|
94
|
+
---
|
|
95
|
+
|
|
96
|
+
### TemporalLayer
|
|
97
|
+
|
|
98
|
+
Moment classification and subjective time tracking.
|
|
99
|
+
|
|
100
|
+
```javascript
|
|
101
|
+
const { TemporalLayer, Moment } = require('@aleph-ai/tinyaleph/observer');
|
|
102
|
+
|
|
103
|
+
const temporal = new TemporalLayer({
|
|
104
|
+
coherenceThreshold: 0.7,
|
|
105
|
+
entropyMin: 0.1,
|
|
106
|
+
entropyMax: 0.9,
|
|
107
|
+
onMoment: (moment) => console.log('New moment:', moment.id)
|
|
108
|
+
});
|
|
109
|
+
|
|
110
|
+
// Update with current state
|
|
111
|
+
temporal.update({
|
|
112
|
+
coherence: 0.8,
|
|
113
|
+
entropy: 0.4,
|
|
114
|
+
phases: [0.1, 0.2, 0.3],
|
|
115
|
+
activePrimes: [2, 3, 5]
|
|
116
|
+
});
|
|
117
|
+
|
|
118
|
+
// Get statistics
|
|
119
|
+
const stats = temporal.getStats();
|
|
120
|
+
// { momentCount, subjectiveTime, objectiveTime, temporalRatio }
|
|
121
|
+
|
|
122
|
+
// Get recent moments
|
|
123
|
+
const recent = temporal.recentMoments(10);
|
|
124
|
+
```
|
|
125
|
+
|
|
126
|
+
---
|
|
127
|
+
|
|
128
|
+
### AgencyLayer
|
|
129
|
+
|
|
130
|
+
Goal management, attention, and intention tracking.
|
|
131
|
+
|
|
132
|
+
```javascript
|
|
133
|
+
const { AgencyLayer, Goal, AttentionFocus } = require('@aleph-ai/tinyaleph/observer');
|
|
134
|
+
|
|
135
|
+
const agency = new AgencyLayer({
|
|
136
|
+
maxFoci: 5,
|
|
137
|
+
maxGoals: 10
|
|
138
|
+
});
|
|
139
|
+
|
|
140
|
+
// Add attention focus
|
|
141
|
+
agency.addOrUpdateFocus({
|
|
142
|
+
target: 'task_completion',
|
|
143
|
+
type: 'goal',
|
|
144
|
+
intensity: 0.8
|
|
145
|
+
});
|
|
146
|
+
|
|
147
|
+
// Create goal
|
|
148
|
+
const goal = agency.maybeCreateGoal({
|
|
149
|
+
description: 'Complete analysis',
|
|
150
|
+
priority: 0.9
|
|
151
|
+
});
|
|
152
|
+
|
|
153
|
+
// Track progress
|
|
154
|
+
agency.updateGoalProgress(goal.id, 0.5);
|
|
155
|
+
|
|
156
|
+
// Get top priorities
|
|
157
|
+
const topFocus = agency.getTopFocus();
|
|
158
|
+
const topGoal = agency.getTopGoal();
|
|
159
|
+
```
|
|
160
|
+
|
|
161
|
+
---
|
|
162
|
+
|
|
163
|
+
### BoundaryLayer
|
|
164
|
+
|
|
165
|
+
Self-other differentiation with sensory/motor channels.
|
|
166
|
+
|
|
167
|
+
```javascript
|
|
168
|
+
const { BoundaryLayer, SensoryChannel, ObjectivityGate } = require('@aleph-ai/tinyaleph/observer');
|
|
169
|
+
|
|
170
|
+
const boundary = new BoundaryLayer();
|
|
171
|
+
|
|
172
|
+
// Process input
|
|
173
|
+
const result = boundary.processInput('text_input', 'Hello, world!');
|
|
174
|
+
|
|
175
|
+
// Queue output (with objectivity gate)
|
|
176
|
+
const output = boundary.queueOutput('text_output', 'This is a response.');
|
|
177
|
+
// { queued: true, gateResult: { R: 0.85, shouldBroadcast: true } }
|
|
178
|
+
```
|
|
179
|
+
|
|
180
|
+
---
|
|
181
|
+
|
|
182
|
+
### SafetyLayer
|
|
183
|
+
|
|
184
|
+
Constraint monitoring and violation detection.
|
|
185
|
+
|
|
186
|
+
```javascript
|
|
187
|
+
const { SafetyLayer, SafetyConstraint } = require('@aleph-ai/tinyaleph/observer');
|
|
188
|
+
|
|
189
|
+
const safety = new SafetyLayer();
|
|
190
|
+
|
|
191
|
+
// Check current state
|
|
192
|
+
const result = safety.checkConstraints({
|
|
193
|
+
coherence: 0.5,
|
|
194
|
+
entropy: 0.5,
|
|
195
|
+
totalAmplitude: 1.0
|
|
196
|
+
});
|
|
197
|
+
// { safe: true, violations: [], alertLevel: 'normal' }
|
|
198
|
+
|
|
199
|
+
// Add custom constraint
|
|
200
|
+
safety.addConstraint(new SafetyConstraint({
|
|
201
|
+
name: 'max_amplitude',
|
|
202
|
+
condition: (state) => state.totalAmplitude > 10
|
|
203
|
+
}));
|
|
204
|
+
```
|
|
205
|
+
|
|
206
|
+
---
|
|
207
|
+
|
|
208
|
+
## Symbolic Extensions
|
|
209
|
+
|
|
210
|
+
### SymbolicSMF
|
|
211
|
+
|
|
212
|
+
SedenionMemoryField with symbol grounding.
|
|
213
|
+
|
|
214
|
+
```javascript
|
|
215
|
+
const { SymbolicSMF, SMFSymbolMapper, AXIS_SYMBOL_MAPPING } = require('@aleph-ai/tinyaleph/observer');
|
|
216
|
+
const { symbolDatabase } = require('@aleph-ai/tinyaleph/core/symbols');
|
|
217
|
+
|
|
218
|
+
const smf = new SymbolicSMF(symbolDatabase);
|
|
219
|
+
|
|
220
|
+
// Excite from symbol
|
|
221
|
+
smf.exciteFromSymbol('fire');
|
|
222
|
+
|
|
223
|
+
// Ground state in symbols
|
|
224
|
+
const grounded = smf.groundInSymbols(3);
|
|
225
|
+
// [{ symbol: {...}, axis: 'creation', contribution: 0.8 }, ...]
|
|
226
|
+
|
|
227
|
+
// Find resonant symbols
|
|
228
|
+
const resonant = smf.findResonantSymbols(5);
|
|
229
|
+
|
|
230
|
+
// Get semantic orientation
|
|
231
|
+
const orientation = smf.getSemanticOrientation();
|
|
232
|
+
// { dominant: [...], grounded: [...], entropy: 0.5 }
|
|
233
|
+
```
|
|
234
|
+
|
|
235
|
+
**AXIS_SYMBOL_MAPPING**: Maps each of 16 SMF axes to archetypal symbols.
|
|
236
|
+
|
|
237
|
+
---
|
|
238
|
+
|
|
239
|
+
### SymbolicTemporalLayer
|
|
240
|
+
|
|
241
|
+
I-Ching hexagram-based moment classification.
|
|
242
|
+
|
|
243
|
+
```javascript
|
|
244
|
+
const { SymbolicTemporalLayer, SymbolicMoment, HEXAGRAM_ARCHETYPES } = require('@aleph-ai/tinyaleph/observer');
|
|
245
|
+
|
|
246
|
+
const temporal = new SymbolicTemporalLayer({
|
|
247
|
+
onSymbolicMoment: (moment, classification) => {
|
|
248
|
+
console.log(`Hexagram ${classification.hexagramIndex}: ${classification.archetype.name}`);
|
|
249
|
+
},
|
|
250
|
+
onHexagramTransition: (transition) => {
|
|
251
|
+
console.log(`Transition: ${transition.from} → ${transition.to}`);
|
|
252
|
+
}
|
|
253
|
+
});
|
|
254
|
+
|
|
255
|
+
// Update creates classified moments
|
|
256
|
+
temporal.update({
|
|
257
|
+
coherence: 0.8,
|
|
258
|
+
entropy: 0.3,
|
|
259
|
+
phases: [0.1, 0.2, 0.3, 0.4, 0.5, 0.6],
|
|
260
|
+
activePrimes: [2, 3, 5]
|
|
261
|
+
});
|
|
262
|
+
|
|
263
|
+
// Get I-Ching reading
|
|
264
|
+
const reading = temporal.getIChingReading();
|
|
265
|
+
|
|
266
|
+
// Get dominant archetypes
|
|
267
|
+
const archetypes = temporal.getDominantArchetypes(5);
|
|
268
|
+
|
|
269
|
+
// Predict next archetype
|
|
270
|
+
const prediction = temporal.predictNextArchetype();
|
|
271
|
+
```
|
|
272
|
+
|
|
273
|
+
**HEXAGRAM_ARCHETYPES**: 64 hexagrams mapped to archetypal meanings:
|
|
274
|
+
- 0: Creative (pure yang, heaven)
|
|
275
|
+
- 1: Receptive (pure yin, earth)
|
|
276
|
+
- 2: Difficulty (initial obstacles)
|
|
277
|
+
- ...through 63
|
|
278
|
+
|
|
279
|
+
---
|
|
280
|
+
|
|
281
|
+
### SymbolicPatternDetector
|
|
282
|
+
|
|
283
|
+
Narrative pattern detection (hero's journey, transformation, etc.)
|
|
284
|
+
|
|
285
|
+
```javascript
|
|
286
|
+
const { SymbolicPatternDetector, SymbolicMoment } = require('@aleph-ai/tinyaleph/observer');
|
|
287
|
+
|
|
288
|
+
const detector = new SymbolicPatternDetector();
|
|
289
|
+
|
|
290
|
+
// Create moment sequence
|
|
291
|
+
const moments = [
|
|
292
|
+
new SymbolicMoment({ coherence: 0.5, hexagramIndex: 2 }),
|
|
293
|
+
new SymbolicMoment({ coherence: 0.3, hexagramIndex: 29 }),
|
|
294
|
+
new SymbolicMoment({ coherence: 0.7, hexagramIndex: 50 }),
|
|
295
|
+
new SymbolicMoment({ coherence: 0.9, hexagramIndex: 1 })
|
|
296
|
+
];
|
|
297
|
+
|
|
298
|
+
// Detect narrative patterns
|
|
299
|
+
const narratives = detector.detectNarrativePatterns(moments);
|
|
300
|
+
// [{ type: 'hero_journey', confidence: 0.8, startIndex: 0, endIndex: 3 }, ...]
|
|
301
|
+
```
|
|
302
|
+
|
|
303
|
+
---
|
|
304
|
+
|
|
305
|
+
## Evaluation Assays
|
|
306
|
+
|
|
307
|
+
Four validation tests from whitepaper Section 15.
|
|
308
|
+
|
|
309
|
+
### AssaySuite
|
|
310
|
+
|
|
311
|
+
```javascript
|
|
312
|
+
const { AssaySuite } = require('@aleph-ai/tinyaleph/observer');
|
|
313
|
+
|
|
314
|
+
// Create suite with observer core
|
|
315
|
+
const suite = new AssaySuite(observerCore);
|
|
316
|
+
|
|
317
|
+
// Run all assays
|
|
318
|
+
const results = await suite.runAll();
|
|
319
|
+
// {
|
|
320
|
+
// timestamp: '...',
|
|
321
|
+
// assays: [ resultA, resultB, resultC, resultD ],
|
|
322
|
+
// summary: { passed: 4, total: 4, score: 1.0, allPassed: true }
|
|
323
|
+
// }
|
|
324
|
+
|
|
325
|
+
// Run single assay
|
|
326
|
+
const resultA = await suite.runSingle('A', { duration: 100 });
|
|
327
|
+
```
|
|
328
|
+
|
|
329
|
+
### Assay A: Time Dilation
|
|
330
|
+
|
|
331
|
+
Tests whether subjective time dilates with coherence.
|
|
332
|
+
τ = ∫ C(t) dt / ∫ dt
|
|
333
|
+
|
|
334
|
+
```javascript
|
|
335
|
+
const { TimeDilationAssay } = require('@aleph-ai/tinyaleph/observer');
|
|
336
|
+
|
|
337
|
+
const assay = new TimeDilationAssay(observerCore);
|
|
338
|
+
const result = await assay.run({
|
|
339
|
+
duration: 100,
|
|
340
|
+
lowCoherenceTarget: 0.3,
|
|
341
|
+
highCoherenceTarget: 0.8
|
|
342
|
+
});
|
|
343
|
+
// { passed: true, dilationFactor: 1.5, interpretation: '...' }
|
|
344
|
+
```
|
|
345
|
+
|
|
346
|
+
### Assay B: Memory Continuity
|
|
347
|
+
|
|
348
|
+
Tests identity persistence under perturbation.
|
|
349
|
+
|
|
350
|
+
```javascript
|
|
351
|
+
const { MemoryContinuityAssay } = require('@aleph-ai/tinyaleph/observer');
|
|
352
|
+
|
|
353
|
+
const assay = new MemoryContinuityAssay(observerCore);
|
|
354
|
+
const result = await assay.run({
|
|
355
|
+
perturbationStrength: 0.5,
|
|
356
|
+
recoveryTicks: 50
|
|
357
|
+
});
|
|
358
|
+
// { passed: true, identityScore: 0.82, components: {...} }
|
|
359
|
+
```
|
|
360
|
+
|
|
361
|
+
### Assay C: Agency Under Constraint
|
|
362
|
+
|
|
363
|
+
Tests goal-directed behavior under resource limits.
|
|
364
|
+
|
|
365
|
+
```javascript
|
|
366
|
+
const { AgencyConstraintAssay } = require('@aleph-ai/tinyaleph/observer');
|
|
367
|
+
|
|
368
|
+
const assay = new AgencyConstraintAssay(observerCore);
|
|
369
|
+
const result = await assay.run({
|
|
370
|
+
constraintLevel: 0.5,
|
|
371
|
+
goalDifficulty: 0.5,
|
|
372
|
+
maxTicks: 100
|
|
373
|
+
});
|
|
374
|
+
// { passed: true, goal: { achieved: true, progress: 1.0 }, metrics: {...} }
|
|
375
|
+
```
|
|
376
|
+
|
|
377
|
+
### Assay D: Non-Commutative Meaning
|
|
378
|
+
|
|
379
|
+
Tests whether order matters (A→B→C ≠ C→B→A).
|
|
380
|
+
|
|
381
|
+
```javascript
|
|
382
|
+
const { NonCommutativeMeaningAssay } = require('@aleph-ai/tinyaleph/observer');
|
|
383
|
+
|
|
384
|
+
const assay = new NonCommutativeMeaningAssay(observerCore);
|
|
385
|
+
const result = await assay.run({
|
|
386
|
+
conceptSequence: ['observe', 'analyze', 'conclude']
|
|
387
|
+
});
|
|
388
|
+
// { passed: true, nonCommScore: 0.15, signatures: { forward, reverse, scrambled } }
|
|
389
|
+
```
|
|
390
|
+
|
|
391
|
+
---
|
|
392
|
+
|
|
393
|
+
## Complete Export List
|
|
394
|
+
|
|
395
|
+
```javascript
|
|
396
|
+
// Core components
|
|
397
|
+
PrimeOscillator, PRSCLayer, EntanglementDetector, coherenceKernel
|
|
398
|
+
TickGate, StabilizationController, HolographicEncoder, HQE
|
|
399
|
+
SedenionMemoryField, SMF_AXES, AXIS_INDEX
|
|
400
|
+
Moment, TemporalLayer, TemporalPatternDetector
|
|
401
|
+
AttentionFocus, Goal, Action, Intent, AgencyLayer
|
|
402
|
+
SensoryChannel, MotorChannel, EnvironmentalModel, SelfModel, BoundaryLayer
|
|
403
|
+
EntangledPair, Phrase, EntanglementLayer
|
|
404
|
+
SafetyConstraint, ViolationEvent, SafetyMonitor, DEFAULT_CONSTRAINTS
|
|
405
|
+
|
|
406
|
+
// Symbolic extensions
|
|
407
|
+
SymbolicSMF, SMFSymbolMapper, smfMapper, AXIS_SYMBOL_MAPPING, TAG_TO_AXIS
|
|
408
|
+
SymbolicMoment, SymbolicTemporalLayer, SymbolicPatternDetector, HEXAGRAM_ARCHETYPES
|
|
409
|
+
|
|
410
|
+
// Evaluation assays
|
|
411
|
+
TimeDilationAssay, MemoryContinuityAssay, AgencyConstraintAssay
|
|
412
|
+
NonCommutativeMeaningAssay, AssaySuite
|
|
413
|
+
```
|
|
414
|
+
|
|
415
|
+
---
|
|
416
|
+
|
|
417
|
+
## Related Documentation
|
|
418
|
+
|
|
419
|
+
- [Theory: Temporal Emergence](../theory/09-temporal-emergence.md)
|
|
420
|
+
- [Theory: Quaternionic Memory](../theory/10-quaternionic-memory.md)
|
|
421
|
+
- [Design: Sentient Observer](../design/SENTIENT_OBSERVER_DESIGN.md)
|