@aleph-ai/tinyaleph 1.5.7 → 1.6.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/core/alexander-module.js +1469 -0
- package/core/arithmetic-link-kernel.js +1338 -0
- package/core/index.js +95 -2
- package/examples/01-hello-world.js +69 -0
- package/examples/02-basic-hash.js +90 -0
- package/examples/02-observer-stack.js +385 -0
- package/examples/03-quantum-coin.js +136 -0
- package/examples/05-symbolic-resonance.js +146 -0
- package/examples/06-symbol-database.js +150 -0
- package/examples/07-semantic-inference.js +223 -0
- package/examples/08-compound-symbols.js +219 -0
- package/examples/README.md +170 -0
- package/examples/ai/01-embeddings.js +155 -0
- package/examples/ai/02-semantic-memory.js +243 -0
- package/examples/ai/03-reasoning.js +243 -0
- package/examples/ai/04-knowledge-graph.js +279 -0
- package/examples/ai/05-llm-integration.js +333 -0
- package/examples/ai/06-agent.js +294 -0
- package/examples/ai/07-hybrid-ai.js +223 -0
- package/examples/ai/08-entropy-reasoning.js +259 -0
- package/examples/ai/09-concept-learning.js +271 -0
- package/examples/ai/10-prompt-primes.js +312 -0
- package/examples/ai/11-rag.js +332 -0
- package/examples/ai/12-neuro-symbolic.js +321 -0
- package/examples/ai/README.md +80 -0
- package/examples/arithmetic-topology/01-legendre-symbol.js +78 -0
- package/examples/arithmetic-topology/02-redei-symbol.js +126 -0
- package/examples/arithmetic-topology/03-alk-kuramoto.js +138 -0
- package/examples/arithmetic-topology/04-alexander-module.js +117 -0
- package/examples/arithmetic-topology/05-signature-memory.js +118 -0
- package/examples/arithmetic-topology/README.md +291 -0
- package/examples/bioinformatics/01-dna-encoding.js +108 -0
- package/examples/bioinformatics/02-central-dogma.js +162 -0
- package/examples/bioinformatics/03-protein-folding.js +206 -0
- package/examples/bioinformatics/04-dna-computing.js +192 -0
- package/examples/bioinformatics/05-molecular-binding.js +209 -0
- package/examples/chat.js +105 -0
- package/examples/crt-homology/01-residue-encoding.js +87 -0
- package/examples/crt-homology/02-birkhoff-attention.js +100 -0
- package/examples/crt-homology/03-homology-loss.js +132 -0
- package/examples/crt-homology/04-crt-resoformer.js +132 -0
- package/examples/crt-homology/README.md +67 -0
- package/examples/crypto/01-password-hash.js +210 -0
- package/examples/crypto/02-key-derivation.js +210 -0
- package/examples/crypto/03-hmac.js +229 -0
- package/examples/crypto/04-file-integrity.js +263 -0
- package/examples/crypto/05-content-hash.js +263 -0
- package/examples/crypto/README.md +99 -0
- package/examples/demo-modular.js +223 -0
- package/examples/demo-two-layer.js +196 -0
- package/examples/discrete/01-integer-sine-table.js +120 -0
- package/examples/discrete/02-codebook-tunneling.js +118 -0
- package/examples/discrete/03-canonical-fusion.js +135 -0
- package/examples/discrete/04-tick-gate.js +139 -0
- package/examples/discrete/README.md +142 -0
- package/examples/formal-semantics/01-typed-terms.js +156 -0
- package/examples/formal-semantics/02-reduction.js +202 -0
- package/examples/formal-semantics/03-lambda-translation.js +206 -0
- package/examples/formal-semantics/04-enochian-language.js +257 -0
- package/examples/formal-semantics/README.md +98 -0
- package/examples/math/01-quaternions.js +237 -0
- package/examples/math/02-octonions.js +192 -0
- package/examples/math/03-prime-factorization.js +215 -0
- package/examples/math/04-vector-spaces.js +210 -0
- package/examples/math/05-gaussian-primes.js +234 -0
- package/examples/math/README.md +93 -0
- package/examples/physics/01-oscillator.js +177 -0
- package/examples/physics/02-lyapunov.js +201 -0
- package/examples/physics/03-collapse.js +183 -0
- package/examples/physics/04-kuramoto.js +212 -0
- package/examples/physics/05-entropy.js +226 -0
- package/examples/physics/05-sync-models.js +298 -0
- package/examples/physics/06-primeon-ladder.js +233 -0
- package/examples/physics/07-kuramoto-coupled-ladder.js +298 -0
- package/examples/physics/README.md +126 -0
- package/examples/resonance/01-prime-hilbert-space.js +140 -0
- package/examples/resonance/02-prime-resonance-network.js +221 -0
- package/examples/resonance/03-resoformer.js +349 -0
- package/examples/resonance/04-resoformer-training.js +329 -0
- package/examples/resonance/05-language-model.js +484 -0
- package/examples/resonance/README.md +238 -0
- package/examples/run-examples.js +417 -0
- package/examples/scientific/01-single-qubit.js +185 -0
- package/examples/scientific/02-two-qubit.js +209 -0
- package/examples/scientific/03-quantum-circuits.js +270 -0
- package/examples/scientific/04-measurement.js +229 -0
- package/examples/scientific/05-algorithms.js +245 -0
- package/examples/scientific/06-random.js +225 -0
- package/examples/scientific/07-wavefunction.js +192 -0
- package/examples/scientific/README.md +118 -0
- package/examples/semantic/01-vocabulary.js +186 -0
- package/examples/semantic/02-similarity.js +263 -0
- package/examples/semantic/03-word-algebra.js +295 -0
- package/examples/semantic/04-clustering.js +348 -0
- package/examples/semantic/05-classification.js +386 -0
- package/examples/semantic/06-dna-encoding.js +228 -0
- package/examples/semantic/07-search.js +304 -0
- package/examples/semantic/08-qa-system.js +278 -0
- package/examples/semantic/README.md +116 -0
- package/examples/topology/01-108-invariant.js +81 -0
- package/examples/topology/02-trefoil-constants.js +112 -0
- package/examples/topology/03-gauge-symmetry.js +112 -0
- package/examples/topology/04-free-energy-dynamics.js +124 -0
- package/examples/topology/README.md +129 -0
- package/index.js +32 -0
- package/modular.js +63 -2
- package/package.json +8 -3
- package/physics/alk-kuramoto.js +817 -0
- package/physics/index.js +23 -2
|
@@ -0,0 +1,263 @@
|
|
|
1
|
+
/**
|
|
2
|
+
* @example Semantic Similarity
|
|
3
|
+
* @description Compute similarity between texts using hypercomplex embeddings
|
|
4
|
+
*
|
|
5
|
+
* TinyAleph computes semantic similarity using:
|
|
6
|
+
* - Hypercomplex inner products (generalized dot product)
|
|
7
|
+
* - Geometric distance in high-dimensional space
|
|
8
|
+
* - Structural alignment of prime representations
|
|
9
|
+
*/
|
|
10
|
+
|
|
11
|
+
const { SemanticBackend, Hypercomplex } = require('../../modular');
|
|
12
|
+
|
|
13
|
+
// ===========================================
|
|
14
|
+
// SETUP
|
|
15
|
+
// ===========================================
|
|
16
|
+
|
|
17
|
+
const backend = new SemanticBackend({ dimension: 16 });
|
|
18
|
+
|
|
19
|
+
console.log('TinyAleph Semantic Similarity Example');
|
|
20
|
+
console.log('=====================================\n');
|
|
21
|
+
|
|
22
|
+
// ===========================================
|
|
23
|
+
// HELPER FUNCTIONS
|
|
24
|
+
// ===========================================
|
|
25
|
+
|
|
26
|
+
// Cosine similarity between two hypercomplex states
|
|
27
|
+
function cosineSimilarity(state1, state2) {
|
|
28
|
+
let dot = 0, mag1 = 0, mag2 = 0;
|
|
29
|
+
for (let i = 0; i < state1.c.length; i++) {
|
|
30
|
+
dot += state1.c[i] * state2.c[i];
|
|
31
|
+
mag1 += state1.c[i] * state1.c[i];
|
|
32
|
+
mag2 += state2.c[i] * state2.c[i];
|
|
33
|
+
}
|
|
34
|
+
return dot / (Math.sqrt(mag1) * Math.sqrt(mag2) || 1);
|
|
35
|
+
}
|
|
36
|
+
|
|
37
|
+
// Euclidean distance
|
|
38
|
+
function euclideanDistance(state1, state2) {
|
|
39
|
+
let sum = 0;
|
|
40
|
+
for (let i = 0; i < state1.c.length; i++) {
|
|
41
|
+
const diff = state1.c[i] - state2.c[i];
|
|
42
|
+
sum += diff * diff;
|
|
43
|
+
}
|
|
44
|
+
return Math.sqrt(sum);
|
|
45
|
+
}
|
|
46
|
+
|
|
47
|
+
// Angular distance (arc cosine of similarity)
|
|
48
|
+
function angularDistance(state1, state2) {
|
|
49
|
+
const sim = cosineSimilarity(state1, state2);
|
|
50
|
+
return Math.acos(Math.min(1, Math.max(-1, sim))) / Math.PI;
|
|
51
|
+
}
|
|
52
|
+
|
|
53
|
+
// ===========================================
|
|
54
|
+
// BASIC SIMILARITY
|
|
55
|
+
// ===========================================
|
|
56
|
+
|
|
57
|
+
console.log('Basic Similarity Comparison:');
|
|
58
|
+
console.log('─'.repeat(50) + '\n');
|
|
59
|
+
|
|
60
|
+
const pairs = [
|
|
61
|
+
['cat', 'dog'],
|
|
62
|
+
['cat', 'kitten'],
|
|
63
|
+
['cat', 'automobile'],
|
|
64
|
+
['happy', 'joyful'],
|
|
65
|
+
['happy', 'sad'],
|
|
66
|
+
['king', 'queen'],
|
|
67
|
+
['computer', 'laptop']
|
|
68
|
+
];
|
|
69
|
+
|
|
70
|
+
for (const [text1, text2] of pairs) {
|
|
71
|
+
const state1 = backend.textToOrderedState(text1);
|
|
72
|
+
const state2 = backend.textToOrderedState(text2);
|
|
73
|
+
|
|
74
|
+
const sim = cosineSimilarity(state1, state2);
|
|
75
|
+
const dist = euclideanDistance(state1, state2);
|
|
76
|
+
|
|
77
|
+
console.log(`"${text1}" vs "${text2}"`);
|
|
78
|
+
console.log(` Similarity: ${(sim * 100).toFixed(1)}%`);
|
|
79
|
+
console.log(` Distance: ${dist.toFixed(4)}\n`);
|
|
80
|
+
}
|
|
81
|
+
|
|
82
|
+
// ===========================================
|
|
83
|
+
// SENTENCE SIMILARITY
|
|
84
|
+
// ===========================================
|
|
85
|
+
|
|
86
|
+
console.log('═'.repeat(50));
|
|
87
|
+
console.log('Sentence Similarity:');
|
|
88
|
+
console.log('═'.repeat(50) + '\n');
|
|
89
|
+
|
|
90
|
+
const sentencePairs = [
|
|
91
|
+
['The cat sat on the mat', 'A cat was sitting on the mat'],
|
|
92
|
+
['The cat sat on the mat', 'The dog slept on the rug'],
|
|
93
|
+
['The cat sat on the mat', 'Financial markets are volatile'],
|
|
94
|
+
['I love programming', 'I enjoy coding'],
|
|
95
|
+
['The weather is nice today', 'It is a beautiful day']
|
|
96
|
+
];
|
|
97
|
+
|
|
98
|
+
for (const [sent1, sent2] of sentencePairs) {
|
|
99
|
+
const state1 = backend.textToOrderedState(sent1);
|
|
100
|
+
const state2 = backend.textToOrderedState(sent2);
|
|
101
|
+
|
|
102
|
+
const sim = cosineSimilarity(state1, state2);
|
|
103
|
+
|
|
104
|
+
console.log(`"${sent1}"`);
|
|
105
|
+
console.log(`"${sent2}"`);
|
|
106
|
+
console.log(` Similarity: ${(sim * 100).toFixed(1)}%\n`);
|
|
107
|
+
}
|
|
108
|
+
|
|
109
|
+
// ===========================================
|
|
110
|
+
// SIMILARITY MATRIX
|
|
111
|
+
// ===========================================
|
|
112
|
+
|
|
113
|
+
console.log('═'.repeat(50));
|
|
114
|
+
console.log('Similarity Matrix:');
|
|
115
|
+
console.log('═'.repeat(50) + '\n');
|
|
116
|
+
|
|
117
|
+
const concepts = ['cat', 'dog', 'bird', 'car', 'bus', 'computer'];
|
|
118
|
+
const states = concepts.map(c => backend.textToOrderedState(c));
|
|
119
|
+
|
|
120
|
+
// Print header
|
|
121
|
+
process.stdout.write(' ');
|
|
122
|
+
for (const c of concepts) {
|
|
123
|
+
process.stdout.write(c.padStart(8));
|
|
124
|
+
}
|
|
125
|
+
console.log();
|
|
126
|
+
|
|
127
|
+
// Print matrix
|
|
128
|
+
for (let i = 0; i < concepts.length; i++) {
|
|
129
|
+
process.stdout.write(concepts[i].padEnd(9));
|
|
130
|
+
for (let j = 0; j < concepts.length; j++) {
|
|
131
|
+
const sim = cosineSimilarity(states[i], states[j]);
|
|
132
|
+
process.stdout.write((sim * 100).toFixed(0).padStart(7) + '%');
|
|
133
|
+
}
|
|
134
|
+
console.log();
|
|
135
|
+
}
|
|
136
|
+
|
|
137
|
+
// ===========================================
|
|
138
|
+
// NEAREST NEIGHBORS
|
|
139
|
+
// ===========================================
|
|
140
|
+
|
|
141
|
+
console.log('\n' + '═'.repeat(50));
|
|
142
|
+
console.log('Nearest Neighbor Search:');
|
|
143
|
+
console.log('═'.repeat(50) + '\n');
|
|
144
|
+
|
|
145
|
+
const vocabulary = [
|
|
146
|
+
'cat', 'kitten', 'dog', 'puppy', 'bird', 'parrot',
|
|
147
|
+
'car', 'automobile', 'vehicle', 'bus', 'truck',
|
|
148
|
+
'computer', 'laptop', 'desktop', 'phone', 'tablet',
|
|
149
|
+
'happy', 'joyful', 'sad', 'angry', 'calm'
|
|
150
|
+
];
|
|
151
|
+
|
|
152
|
+
const vocabStates = vocabulary.map(word => ({
|
|
153
|
+
word,
|
|
154
|
+
state: backend.textToOrderedState(word)
|
|
155
|
+
}));
|
|
156
|
+
|
|
157
|
+
function findNeighbors(query, k = 5) {
|
|
158
|
+
const queryState = backend.textToOrderedState(query);
|
|
159
|
+
|
|
160
|
+
const scored = vocabStates.map(v => ({
|
|
161
|
+
word: v.word,
|
|
162
|
+
similarity: cosineSimilarity(queryState, v.state)
|
|
163
|
+
}));
|
|
164
|
+
|
|
165
|
+
scored.sort((a, b) => b.similarity - a.similarity);
|
|
166
|
+
|
|
167
|
+
return scored.slice(0, k);
|
|
168
|
+
}
|
|
169
|
+
|
|
170
|
+
const queries = ['feline', 'automobile', 'emotions', 'technology'];
|
|
171
|
+
|
|
172
|
+
for (const query of queries) {
|
|
173
|
+
console.log(`Query: "${query}"`);
|
|
174
|
+
const neighbors = findNeighbors(query);
|
|
175
|
+
for (let i = 0; i < neighbors.length; i++) {
|
|
176
|
+
console.log(` ${i + 1}. ${neighbors[i].word} (${(neighbors[i].similarity * 100).toFixed(1)}%)`);
|
|
177
|
+
}
|
|
178
|
+
console.log();
|
|
179
|
+
}
|
|
180
|
+
|
|
181
|
+
// ===========================================
|
|
182
|
+
// DOCUMENT SIMILARITY
|
|
183
|
+
// ===========================================
|
|
184
|
+
|
|
185
|
+
console.log('═'.repeat(50));
|
|
186
|
+
console.log('Document Similarity:');
|
|
187
|
+
console.log('═'.repeat(50) + '\n');
|
|
188
|
+
|
|
189
|
+
const documents = [
|
|
190
|
+
{ id: 'doc1', text: 'Machine learning is a subset of artificial intelligence that enables systems to learn from data.' },
|
|
191
|
+
{ id: 'doc2', text: 'Deep learning uses neural networks with many layers to process complex patterns.' },
|
|
192
|
+
{ id: 'doc3', text: 'Natural language processing helps computers understand human language.' },
|
|
193
|
+
{ id: 'doc4', text: 'The stock market experienced high volatility due to economic uncertainty.' },
|
|
194
|
+
{ id: 'doc5', text: 'Quantum computing uses quantum mechanics to perform computations.' }
|
|
195
|
+
];
|
|
196
|
+
|
|
197
|
+
const docStates = documents.map(d => ({
|
|
198
|
+
...d,
|
|
199
|
+
state: backend.textToOrderedState(d.text)
|
|
200
|
+
}));
|
|
201
|
+
|
|
202
|
+
// Find most similar document pairs
|
|
203
|
+
console.log('Most similar document pairs:');
|
|
204
|
+
const docPairs = [];
|
|
205
|
+
|
|
206
|
+
for (let i = 0; i < docStates.length; i++) {
|
|
207
|
+
for (let j = i + 1; j < docStates.length; j++) {
|
|
208
|
+
const sim = cosineSimilarity(docStates[i].state, docStates[j].state);
|
|
209
|
+
docPairs.push({
|
|
210
|
+
doc1: docStates[i].id,
|
|
211
|
+
doc2: docStates[j].id,
|
|
212
|
+
similarity: sim
|
|
213
|
+
});
|
|
214
|
+
}
|
|
215
|
+
}
|
|
216
|
+
|
|
217
|
+
docPairs.sort((a, b) => b.similarity - a.similarity);
|
|
218
|
+
|
|
219
|
+
for (let i = 0; i < Math.min(5, docPairs.length); i++) {
|
|
220
|
+
const p = docPairs[i];
|
|
221
|
+
console.log(` ${p.doc1} ↔ ${p.doc2}: ${(p.similarity * 100).toFixed(1)}%`);
|
|
222
|
+
}
|
|
223
|
+
|
|
224
|
+
// ===========================================
|
|
225
|
+
// MULTIPLE METRICS
|
|
226
|
+
// ===========================================
|
|
227
|
+
|
|
228
|
+
console.log('\n' + '═'.repeat(50));
|
|
229
|
+
console.log('Comparing Different Metrics:');
|
|
230
|
+
console.log('═'.repeat(50) + '\n');
|
|
231
|
+
|
|
232
|
+
const metricTests = [
|
|
233
|
+
['machine learning', 'deep learning'],
|
|
234
|
+
['machine learning', 'quantum physics']
|
|
235
|
+
];
|
|
236
|
+
|
|
237
|
+
console.log('Metric comparison:');
|
|
238
|
+
console.log(' Pair Cosine Euclidean Angular');
|
|
239
|
+
console.log('─'.repeat(70));
|
|
240
|
+
|
|
241
|
+
for (const [t1, t2] of metricTests) {
|
|
242
|
+
const s1 = backend.textToOrderedState(t1);
|
|
243
|
+
const s2 = backend.textToOrderedState(t2);
|
|
244
|
+
|
|
245
|
+
const cos = cosineSimilarity(s1, s2);
|
|
246
|
+
const euc = euclideanDistance(s1, s2);
|
|
247
|
+
const ang = angularDistance(s1, s2);
|
|
248
|
+
|
|
249
|
+
const pairStr = `"${t1}" vs "${t2}"`;
|
|
250
|
+
console.log(`${pairStr.padEnd(40)} ${(cos * 100).toFixed(1).padStart(6)}% ${euc.toFixed(4).padStart(8)} ${(ang * 100).toFixed(1).padStart(6)}%`);
|
|
251
|
+
}
|
|
252
|
+
|
|
253
|
+
// ===========================================
|
|
254
|
+
// KEY TAKEAWAYS
|
|
255
|
+
// ===========================================
|
|
256
|
+
|
|
257
|
+
console.log('\n' + '═'.repeat(50));
|
|
258
|
+
console.log('KEY TAKEAWAYS:');
|
|
259
|
+
console.log('1. Cosine similarity measures angular alignment');
|
|
260
|
+
console.log('2. Euclidean distance measures absolute difference');
|
|
261
|
+
console.log('3. Similar concepts cluster in hypercomplex space');
|
|
262
|
+
console.log('4. Similarity is structural, not just lexical');
|
|
263
|
+
console.log('5. Use similarity matrices for global analysis');
|
|
@@ -0,0 +1,295 @@
|
|
|
1
|
+
/**
|
|
2
|
+
* @example Word Algebra
|
|
3
|
+
* @description Perform algebraic operations on word embeddings
|
|
4
|
+
*
|
|
5
|
+
* Just like word2vec's famous "king - man + woman = queen",
|
|
6
|
+
* TinyAleph supports algebraic operations on hypercomplex embeddings:
|
|
7
|
+
* - Addition: combine meanings
|
|
8
|
+
* - Subtraction: remove aspects
|
|
9
|
+
* - Composition: create complex concepts
|
|
10
|
+
*/
|
|
11
|
+
|
|
12
|
+
const { SemanticBackend, Hypercomplex } = require('../../modular');
|
|
13
|
+
|
|
14
|
+
// ===========================================
|
|
15
|
+
// SETUP
|
|
16
|
+
// ===========================================
|
|
17
|
+
|
|
18
|
+
const backend = new SemanticBackend({ dimension: 16 });
|
|
19
|
+
|
|
20
|
+
console.log('TinyAleph Word Algebra Example');
|
|
21
|
+
console.log('===============================\n');
|
|
22
|
+
|
|
23
|
+
// ===========================================
|
|
24
|
+
// HELPER FUNCTIONS
|
|
25
|
+
// ===========================================
|
|
26
|
+
|
|
27
|
+
// Add two hypercomplex states
|
|
28
|
+
function add(state1, state2) {
|
|
29
|
+
const result = Hypercomplex.zero(state1.c.length);
|
|
30
|
+
for (let i = 0; i < result.c.length; i++) {
|
|
31
|
+
result.c[i] = state1.c[i] + state2.c[i];
|
|
32
|
+
}
|
|
33
|
+
return result.normalize();
|
|
34
|
+
}
|
|
35
|
+
|
|
36
|
+
// Subtract state2 from state1
|
|
37
|
+
function subtract(state1, state2) {
|
|
38
|
+
const result = Hypercomplex.zero(state1.c.length);
|
|
39
|
+
for (let i = 0; i < result.c.length; i++) {
|
|
40
|
+
result.c[i] = state1.c[i] - state2.c[i];
|
|
41
|
+
}
|
|
42
|
+
return result.normalize();
|
|
43
|
+
}
|
|
44
|
+
|
|
45
|
+
// Scale a state
|
|
46
|
+
function scale(state, factor) {
|
|
47
|
+
const result = Hypercomplex.zero(state.c.length);
|
|
48
|
+
for (let i = 0; i < result.c.length; i++) {
|
|
49
|
+
result.c[i] = state.c[i] * factor;
|
|
50
|
+
}
|
|
51
|
+
return result;
|
|
52
|
+
}
|
|
53
|
+
|
|
54
|
+
// Cosine similarity
|
|
55
|
+
function similarity(state1, state2) {
|
|
56
|
+
let dot = 0, mag1 = 0, mag2 = 0;
|
|
57
|
+
for (let i = 0; i < state1.c.length; i++) {
|
|
58
|
+
dot += state1.c[i] * state2.c[i];
|
|
59
|
+
mag1 += state1.c[i] * state1.c[i];
|
|
60
|
+
mag2 += state2.c[i] * state2.c[i];
|
|
61
|
+
}
|
|
62
|
+
return dot / (Math.sqrt(mag1) * Math.sqrt(mag2) || 1);
|
|
63
|
+
}
|
|
64
|
+
|
|
65
|
+
// Find nearest word to a state
|
|
66
|
+
function findNearest(targetState, vocabulary) {
|
|
67
|
+
let best = null;
|
|
68
|
+
let bestSim = -1;
|
|
69
|
+
|
|
70
|
+
for (const word of vocabulary) {
|
|
71
|
+
const state = backend.textToOrderedState(word);
|
|
72
|
+
const sim = similarity(targetState, state);
|
|
73
|
+
if (sim > bestSim) {
|
|
74
|
+
bestSim = sim;
|
|
75
|
+
best = word;
|
|
76
|
+
}
|
|
77
|
+
}
|
|
78
|
+
|
|
79
|
+
return { word: best, similarity: bestSim };
|
|
80
|
+
}
|
|
81
|
+
|
|
82
|
+
// ===========================================
|
|
83
|
+
// CLASSIC WORD ANALOGIES
|
|
84
|
+
// ===========================================
|
|
85
|
+
|
|
86
|
+
console.log('Classic Word Analogies:');
|
|
87
|
+
console.log('─'.repeat(50) + '\n');
|
|
88
|
+
|
|
89
|
+
// Build a vocabulary for analogy testing
|
|
90
|
+
const vocabulary = [
|
|
91
|
+
'king', 'queen', 'man', 'woman', 'prince', 'princess',
|
|
92
|
+
'boy', 'girl', 'father', 'mother', 'son', 'daughter',
|
|
93
|
+
'Paris', 'France', 'Berlin', 'Germany', 'Tokyo', 'Japan', 'London', 'England',
|
|
94
|
+
'big', 'bigger', 'biggest', 'small', 'smaller', 'smallest',
|
|
95
|
+
'walk', 'walked', 'walking', 'run', 'ran', 'running'
|
|
96
|
+
];
|
|
97
|
+
|
|
98
|
+
// a:b :: c:?
|
|
99
|
+
function analogy(a, b, c) {
|
|
100
|
+
const stateA = backend.textToOrderedState(a);
|
|
101
|
+
const stateB = backend.textToOrderedState(b);
|
|
102
|
+
const stateC = backend.textToOrderedState(c);
|
|
103
|
+
|
|
104
|
+
// b - a + c = ?
|
|
105
|
+
const diff = subtract(stateB, stateA);
|
|
106
|
+
const result = add(diff, stateC);
|
|
107
|
+
|
|
108
|
+
// Find nearest, excluding input words
|
|
109
|
+
const candidates = vocabulary.filter(w => w !== a && w !== b && w !== c);
|
|
110
|
+
return findNearest(result, candidates);
|
|
111
|
+
}
|
|
112
|
+
|
|
113
|
+
const analogies = [
|
|
114
|
+
['king', 'queen', 'man'], // man:woman
|
|
115
|
+
['man', 'woman', 'king'], // king:queen
|
|
116
|
+
['Paris', 'France', 'Berlin'], // Berlin:Germany
|
|
117
|
+
['walk', 'walked', 'run'] // run:ran
|
|
118
|
+
];
|
|
119
|
+
|
|
120
|
+
for (const [a, b, c] of analogies) {
|
|
121
|
+
const result = analogy(a, b, c);
|
|
122
|
+
console.log(`${a} : ${b} :: ${c} : ?`);
|
|
123
|
+
console.log(` → ${result.word} (${(result.similarity * 100).toFixed(1)}%)\n`);
|
|
124
|
+
}
|
|
125
|
+
|
|
126
|
+
// ===========================================
|
|
127
|
+
// CONCEPT ADDITION
|
|
128
|
+
// ===========================================
|
|
129
|
+
|
|
130
|
+
console.log('═'.repeat(50));
|
|
131
|
+
console.log('Concept Addition (A + B):');
|
|
132
|
+
console.log('═'.repeat(50) + '\n');
|
|
133
|
+
|
|
134
|
+
const additions = [
|
|
135
|
+
['fast', 'car'],
|
|
136
|
+
['smart', 'phone'],
|
|
137
|
+
['ice', 'cream'],
|
|
138
|
+
['machine', 'learning'],
|
|
139
|
+
['artificial', 'intelligence']
|
|
140
|
+
];
|
|
141
|
+
|
|
142
|
+
const additionVocab = [
|
|
143
|
+
'sports car', 'racing', 'smartphone', 'mobile', 'dessert',
|
|
144
|
+
'frozen', 'AI', 'algorithm', 'data science', 'automation'
|
|
145
|
+
];
|
|
146
|
+
|
|
147
|
+
for (const [word1, word2] of additions) {
|
|
148
|
+
const state1 = backend.textToOrderedState(word1);
|
|
149
|
+
const state2 = backend.textToOrderedState(word2);
|
|
150
|
+
const combined = add(state1, state2);
|
|
151
|
+
|
|
152
|
+
const nearest = findNearest(combined, additionVocab);
|
|
153
|
+
|
|
154
|
+
console.log(`"${word1}" + "${word2}"`);
|
|
155
|
+
console.log(` → Nearest: "${nearest.word}" (${(nearest.similarity * 100).toFixed(1)}%)\n`);
|
|
156
|
+
}
|
|
157
|
+
|
|
158
|
+
// ===========================================
|
|
159
|
+
// CONCEPT SUBTRACTION
|
|
160
|
+
// ===========================================
|
|
161
|
+
|
|
162
|
+
console.log('═'.repeat(50));
|
|
163
|
+
console.log('Concept Subtraction (A - B):');
|
|
164
|
+
console.log('═'.repeat(50) + '\n');
|
|
165
|
+
|
|
166
|
+
const subtractions = [
|
|
167
|
+
['king', 'royalty'],
|
|
168
|
+
['smartphone', 'phone'],
|
|
169
|
+
['airplane', 'air'],
|
|
170
|
+
['breakfast', 'morning']
|
|
171
|
+
];
|
|
172
|
+
|
|
173
|
+
const subtractionVocab = [
|
|
174
|
+
'man', 'leader', 'smart', 'computer', 'plane', 'vehicle',
|
|
175
|
+
'food', 'meal', 'lunch', 'dinner'
|
|
176
|
+
];
|
|
177
|
+
|
|
178
|
+
for (const [word1, word2] of subtractions) {
|
|
179
|
+
const state1 = backend.textToOrderedState(word1);
|
|
180
|
+
const state2 = backend.textToOrderedState(word2);
|
|
181
|
+
const diff = subtract(state1, state2);
|
|
182
|
+
|
|
183
|
+
const nearest = findNearest(diff, subtractionVocab);
|
|
184
|
+
|
|
185
|
+
console.log(`"${word1}" - "${word2}"`);
|
|
186
|
+
console.log(` → Nearest: "${nearest.word}" (${(nearest.similarity * 100).toFixed(1)}%)\n`);
|
|
187
|
+
}
|
|
188
|
+
|
|
189
|
+
// ===========================================
|
|
190
|
+
// WEIGHTED COMBINATIONS
|
|
191
|
+
// ===========================================
|
|
192
|
+
|
|
193
|
+
console.log('═'.repeat(50));
|
|
194
|
+
console.log('Weighted Combinations (αA + βB):');
|
|
195
|
+
console.log('═'.repeat(50) + '\n');
|
|
196
|
+
|
|
197
|
+
const baseState1 = backend.textToOrderedState('happy');
|
|
198
|
+
const baseState2 = backend.textToOrderedState('sad');
|
|
199
|
+
|
|
200
|
+
const emotionVocab = ['joyful', 'content', 'neutral', 'melancholy', 'depressed', 'mixed', 'bittersweet'];
|
|
201
|
+
|
|
202
|
+
console.log('Blending "happy" and "sad" with different weights:\n');
|
|
203
|
+
|
|
204
|
+
const weights = [
|
|
205
|
+
[1.0, 0.0],
|
|
206
|
+
[0.8, 0.2],
|
|
207
|
+
[0.6, 0.4],
|
|
208
|
+
[0.5, 0.5],
|
|
209
|
+
[0.4, 0.6],
|
|
210
|
+
[0.2, 0.8],
|
|
211
|
+
[0.0, 1.0]
|
|
212
|
+
];
|
|
213
|
+
|
|
214
|
+
for (const [w1, w2] of weights) {
|
|
215
|
+
const scaled1 = scale(baseState1, w1);
|
|
216
|
+
const scaled2 = scale(baseState2, w2);
|
|
217
|
+
const combined = add(scaled1, scaled2);
|
|
218
|
+
|
|
219
|
+
const nearest = findNearest(combined, emotionVocab);
|
|
220
|
+
|
|
221
|
+
console.log(` ${w1.toFixed(1)} × happy + ${w2.toFixed(1)} × sad → "${nearest.word}"`);
|
|
222
|
+
}
|
|
223
|
+
|
|
224
|
+
// ===========================================
|
|
225
|
+
// VECTOR ARITHMETIC PROPERTIES
|
|
226
|
+
// ===========================================
|
|
227
|
+
|
|
228
|
+
console.log('\n' + '═'.repeat(50));
|
|
229
|
+
console.log('Vector Arithmetic Properties:');
|
|
230
|
+
console.log('═'.repeat(50) + '\n');
|
|
231
|
+
|
|
232
|
+
const A = backend.textToOrderedState('alpha');
|
|
233
|
+
const B = backend.textToOrderedState('beta');
|
|
234
|
+
const C = backend.textToOrderedState('gamma');
|
|
235
|
+
|
|
236
|
+
// Commutativity: A + B = B + A
|
|
237
|
+
const AB = add(A, B);
|
|
238
|
+
const BA = add(B, A);
|
|
239
|
+
const commutative = similarity(AB, BA);
|
|
240
|
+
console.log(`Commutativity (A + B = B + A): ${(commutative * 100).toFixed(1)}%`);
|
|
241
|
+
|
|
242
|
+
// Associativity: (A + B) + C = A + (B + C)
|
|
243
|
+
const AB_C = add(add(A, B), C);
|
|
244
|
+
const A_BC = add(A, add(B, C));
|
|
245
|
+
const associative = similarity(AB_C, A_BC);
|
|
246
|
+
console.log(`Associativity ((A+B)+C = A+(B+C)): ${(associative * 100).toFixed(1)}%`);
|
|
247
|
+
|
|
248
|
+
// Zero element: A + 0 = A
|
|
249
|
+
const zero = Hypercomplex.zero(16);
|
|
250
|
+
const A_zero = add(A, zero);
|
|
251
|
+
const identity = similarity(A, A_zero);
|
|
252
|
+
console.log(`Identity (A + 0 = A): ${(identity * 100).toFixed(1)}%`);
|
|
253
|
+
|
|
254
|
+
// ===========================================
|
|
255
|
+
// COMPLEX ALGEBRAIC EXPRESSIONS
|
|
256
|
+
// ===========================================
|
|
257
|
+
|
|
258
|
+
console.log('\n' + '═'.repeat(50));
|
|
259
|
+
console.log('Complex Algebraic Expressions:');
|
|
260
|
+
console.log('═'.repeat(50) + '\n');
|
|
261
|
+
|
|
262
|
+
// (technology + science) - (math)
|
|
263
|
+
const tech = backend.textToOrderedState('technology');
|
|
264
|
+
const science = backend.textToOrderedState('science');
|
|
265
|
+
const math = backend.textToOrderedState('math');
|
|
266
|
+
|
|
267
|
+
const techScience = add(tech, science);
|
|
268
|
+
const appliedScience = subtract(techScience, math);
|
|
269
|
+
|
|
270
|
+
const scienceVocab = ['engineering', 'physics', 'chemistry', 'biology', 'invention', 'research'];
|
|
271
|
+
const result1 = findNearest(appliedScience, scienceVocab);
|
|
272
|
+
console.log(`(technology + science) - math = "${result1.word}"`);
|
|
273
|
+
|
|
274
|
+
// (animal + water) - (land)
|
|
275
|
+
const animal = backend.textToOrderedState('animal');
|
|
276
|
+
const water = backend.textToOrderedState('water');
|
|
277
|
+
const land = backend.textToOrderedState('land');
|
|
278
|
+
|
|
279
|
+
const aquaticCalc = subtract(add(animal, water), land);
|
|
280
|
+
const animalVocab = ['fish', 'whale', 'dolphin', 'shark', 'mammal', 'bird'];
|
|
281
|
+
const result2 = findNearest(aquaticCalc, animalVocab);
|
|
282
|
+
console.log(`(animal + water) - land = "${result2.word}"`);
|
|
283
|
+
|
|
284
|
+
// ===========================================
|
|
285
|
+
// KEY TAKEAWAYS
|
|
286
|
+
// ===========================================
|
|
287
|
+
|
|
288
|
+
console.log('\n' + '═'.repeat(50));
|
|
289
|
+
console.log('KEY TAKEAWAYS:');
|
|
290
|
+
console.log('1. Embeddings support algebraic operations');
|
|
291
|
+
console.log('2. Analogies work via: b - a + c = d');
|
|
292
|
+
console.log('3. Addition combines semantic features');
|
|
293
|
+
console.log('4. Subtraction removes semantic aspects');
|
|
294
|
+
console.log('5. Weighted combinations blend meanings');
|
|
295
|
+
console.log('6. Vector space properties mostly hold');
|