@sparkleideas/plugins 3.0.0-alpha.10

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (80) hide show
  1. package/README.md +401 -0
  2. package/__tests__/collection-manager.test.ts +332 -0
  3. package/__tests__/dependency-graph.test.ts +434 -0
  4. package/__tests__/enhanced-plugin-registry.test.ts +488 -0
  5. package/__tests__/plugin-registry.test.ts +368 -0
  6. package/__tests__/ruvector-bridge.test.ts +2429 -0
  7. package/__tests__/ruvector-integration.test.ts +1602 -0
  8. package/__tests__/ruvector-migrations.test.ts +1099 -0
  9. package/__tests__/ruvector-quantization.test.ts +846 -0
  10. package/__tests__/ruvector-streaming.test.ts +1088 -0
  11. package/__tests__/sdk.test.ts +325 -0
  12. package/__tests__/security.test.ts +348 -0
  13. package/__tests__/utils/ruvector-test-utils.ts +860 -0
  14. package/examples/plugin-creator/index.ts +636 -0
  15. package/examples/plugin-creator/plugin-creator.test.ts +312 -0
  16. package/examples/ruvector/README.md +288 -0
  17. package/examples/ruvector/attention-patterns.ts +394 -0
  18. package/examples/ruvector/basic-usage.ts +288 -0
  19. package/examples/ruvector/docker-compose.yml +75 -0
  20. package/examples/ruvector/gnn-analysis.ts +501 -0
  21. package/examples/ruvector/hyperbolic-hierarchies.ts +557 -0
  22. package/examples/ruvector/init-db.sql +119 -0
  23. package/examples/ruvector/quantization.ts +680 -0
  24. package/examples/ruvector/self-learning.ts +447 -0
  25. package/examples/ruvector/semantic-search.ts +576 -0
  26. package/examples/ruvector/streaming-large-data.ts +507 -0
  27. package/examples/ruvector/transactions.ts +594 -0
  28. package/examples/ruvector-plugins/hook-pattern-library.ts +486 -0
  29. package/examples/ruvector-plugins/index.ts +79 -0
  30. package/examples/ruvector-plugins/intent-router.ts +354 -0
  31. package/examples/ruvector-plugins/mcp-tool-optimizer.ts +424 -0
  32. package/examples/ruvector-plugins/reasoning-bank.ts +657 -0
  33. package/examples/ruvector-plugins/ruvector-plugins.test.ts +518 -0
  34. package/examples/ruvector-plugins/semantic-code-search.ts +498 -0
  35. package/examples/ruvector-plugins/shared/index.ts +20 -0
  36. package/examples/ruvector-plugins/shared/vector-utils.ts +257 -0
  37. package/examples/ruvector-plugins/sona-learning.ts +445 -0
  38. package/package.json +97 -0
  39. package/src/collections/collection-manager.ts +661 -0
  40. package/src/collections/index.ts +56 -0
  41. package/src/collections/official/index.ts +1040 -0
  42. package/src/core/base-plugin.ts +416 -0
  43. package/src/core/plugin-interface.ts +215 -0
  44. package/src/hooks/index.ts +685 -0
  45. package/src/index.ts +378 -0
  46. package/src/integrations/agentic-flow.ts +743 -0
  47. package/src/integrations/index.ts +88 -0
  48. package/src/integrations/ruvector/ARCHITECTURE.md +1245 -0
  49. package/src/integrations/ruvector/attention-advanced.ts +1040 -0
  50. package/src/integrations/ruvector/attention-executor.ts +782 -0
  51. package/src/integrations/ruvector/attention-mechanisms.ts +757 -0
  52. package/src/integrations/ruvector/attention.ts +1063 -0
  53. package/src/integrations/ruvector/gnn.ts +3050 -0
  54. package/src/integrations/ruvector/hyperbolic.ts +1948 -0
  55. package/src/integrations/ruvector/index.ts +394 -0
  56. package/src/integrations/ruvector/migrations/001_create_extension.sql +135 -0
  57. package/src/integrations/ruvector/migrations/002_create_vector_tables.sql +259 -0
  58. package/src/integrations/ruvector/migrations/003_create_indices.sql +328 -0
  59. package/src/integrations/ruvector/migrations/004_create_functions.sql +598 -0
  60. package/src/integrations/ruvector/migrations/005_create_attention_functions.sql +654 -0
  61. package/src/integrations/ruvector/migrations/006_create_gnn_functions.sql +728 -0
  62. package/src/integrations/ruvector/migrations/007_create_hyperbolic_functions.sql +762 -0
  63. package/src/integrations/ruvector/migrations/index.ts +35 -0
  64. package/src/integrations/ruvector/migrations/migrations.ts +647 -0
  65. package/src/integrations/ruvector/quantization.ts +2036 -0
  66. package/src/integrations/ruvector/ruvector-bridge.ts +2000 -0
  67. package/src/integrations/ruvector/self-learning.ts +2376 -0
  68. package/src/integrations/ruvector/streaming.ts +1737 -0
  69. package/src/integrations/ruvector/types.ts +1945 -0
  70. package/src/providers/index.ts +643 -0
  71. package/src/registry/dependency-graph.ts +568 -0
  72. package/src/registry/enhanced-plugin-registry.ts +994 -0
  73. package/src/registry/plugin-registry.ts +604 -0
  74. package/src/sdk/index.ts +563 -0
  75. package/src/security/index.ts +594 -0
  76. package/src/types/index.ts +446 -0
  77. package/src/workers/index.ts +700 -0
  78. package/tmp.json +0 -0
  79. package/tsconfig.json +25 -0
  80. package/vitest.config.ts +23 -0
@@ -0,0 +1,654 @@
1
+ -- ============================================================================
2
+ -- Migration 005: Create Attention Mechanism Functions
3
+ -- RuVector PostgreSQL Bridge - Claude Flow V3
4
+ --
5
+ -- Creates SQL functions for attention mechanisms including multi-head attention,
6
+ -- flash attention, and sparse attention patterns.
7
+ -- Compatible with PostgreSQL 14+ and pgvector 0.5+
8
+ -- ============================================================================
9
+
10
+ BEGIN;
11
+
12
+ -- ----------------------------------------------------------------------------
13
+ -- Softmax Function (used by attention mechanisms)
14
+ -- ----------------------------------------------------------------------------
15
+
16
+ CREATE OR REPLACE FUNCTION claude_flow.softmax(
17
+ scores REAL[]
18
+ ) RETURNS REAL[] AS $$
19
+ DECLARE
20
+ max_score REAL;
21
+ sum_exp REAL := 0;
22
+ i INTEGER;
23
+ result REAL[];
24
+ BEGIN
25
+ -- Find max for numerical stability
26
+ max_score := scores[1];
27
+ FOR i IN 2..array_length(scores, 1) LOOP
28
+ IF scores[i] > max_score THEN
29
+ max_score := scores[i];
30
+ END IF;
31
+ END LOOP;
32
+
33
+ -- Compute sum of exponentials
34
+ FOR i IN 1..array_length(scores, 1) LOOP
35
+ sum_exp := sum_exp + exp(scores[i] - max_score);
36
+ END LOOP;
37
+
38
+ -- Compute softmax
39
+ result := ARRAY[]::REAL[];
40
+ FOR i IN 1..array_length(scores, 1) LOOP
41
+ result := array_append(result, exp(scores[i] - max_score) / sum_exp);
42
+ END LOOP;
43
+
44
+ RETURN result;
45
+ END;
46
+ $$ LANGUAGE plpgsql IMMUTABLE STRICT PARALLEL SAFE;
47
+
48
+ -- ----------------------------------------------------------------------------
49
+ -- Scaled Dot-Product Attention
50
+ -- attention(Q, K, V) = softmax(QK^T / sqrt(d_k)) * V
51
+ -- ----------------------------------------------------------------------------
52
+
53
+ CREATE OR REPLACE FUNCTION claude_flow.scaled_dot_product_attention(
54
+ query REAL[], -- Query vector [d_k]
55
+ keys REAL[][], -- Key matrix [seq_len, d_k]
56
+ values REAL[][], -- Value matrix [seq_len, d_v]
57
+ scale REAL DEFAULT NULL -- Optional scaling factor (default: 1/sqrt(d_k))
58
+ ) RETURNS REAL[] AS $$
59
+ DECLARE
60
+ d_k INTEGER;
61
+ seq_len INTEGER;
62
+ scores REAL[];
63
+ attention_weights REAL[];
64
+ output REAL[];
65
+ i INTEGER;
66
+ j INTEGER;
67
+ score REAL;
68
+ v_scale REAL;
69
+ BEGIN
70
+ -- Get dimensions
71
+ d_k := array_length(query, 1);
72
+ seq_len := array_length(keys, 1);
73
+
74
+ -- Calculate scaling factor
75
+ v_scale := COALESCE(scale, 1.0 / sqrt(d_k::REAL));
76
+
77
+ -- Compute attention scores: Q * K^T
78
+ scores := ARRAY[]::REAL[];
79
+ FOR i IN 1..seq_len LOOP
80
+ score := 0;
81
+ FOR j IN 1..d_k LOOP
82
+ score := score + query[j] * keys[i][j];
83
+ END LOOP;
84
+ scores := array_append(scores, score * v_scale);
85
+ END LOOP;
86
+
87
+ -- Apply softmax
88
+ attention_weights := claude_flow.softmax(scores);
89
+
90
+ -- Compute weighted sum of values
91
+ output := ARRAY[]::REAL[];
92
+ FOR j IN 1..array_length(values[1], 1) LOOP
93
+ score := 0;
94
+ FOR i IN 1..seq_len LOOP
95
+ score := score + attention_weights[i] * values[i][j];
96
+ END LOOP;
97
+ output := array_append(output, score);
98
+ END LOOP;
99
+
100
+ RETURN output;
101
+ END;
102
+ $$ LANGUAGE plpgsql IMMUTABLE PARALLEL SAFE;
103
+
104
+ -- ----------------------------------------------------------------------------
105
+ -- Multi-Head Attention
106
+ -- MultiHead(Q, K, V) = Concat(head_1, ..., head_h) * W_O
107
+ -- ----------------------------------------------------------------------------
108
+
109
+ CREATE OR REPLACE FUNCTION claude_flow.multi_head_attention(
110
+ query REAL[], -- Query vector [d_model]
111
+ keys REAL[][], -- Key matrix [seq_len, d_model]
112
+ values REAL[][], -- Value matrix [seq_len, d_model]
113
+ num_heads INTEGER DEFAULT 8
114
+ ) RETURNS TABLE (
115
+ output REAL[],
116
+ attention_weights REAL[][]
117
+ ) AS $$
118
+ DECLARE
119
+ d_model INTEGER;
120
+ d_k INTEGER;
121
+ seq_len INTEGER;
122
+ head_outputs REAL[][];
123
+ head_weights REAL[][];
124
+ h INTEGER;
125
+ i INTEGER;
126
+ j INTEGER;
127
+ start_idx INTEGER;
128
+ head_query REAL[];
129
+ head_keys REAL[][];
130
+ head_values REAL[][];
131
+ head_output REAL[];
132
+ concat_output REAL[];
133
+ BEGIN
134
+ -- Get dimensions
135
+ d_model := array_length(query, 1);
136
+ seq_len := array_length(keys, 1);
137
+
138
+ -- Validate dimensions
139
+ IF d_model % num_heads != 0 THEN
140
+ RAISE EXCEPTION 'd_model (%) must be divisible by num_heads (%)', d_model, num_heads;
141
+ END IF;
142
+
143
+ d_k := d_model / num_heads;
144
+
145
+ -- Initialize outputs
146
+ head_outputs := ARRAY[]::REAL[][];
147
+ head_weights := ARRAY[]::REAL[][];
148
+
149
+ -- Process each head
150
+ FOR h IN 1..num_heads LOOP
151
+ start_idx := (h - 1) * d_k + 1;
152
+
153
+ -- Extract head-specific query
154
+ head_query := ARRAY[]::REAL[];
155
+ FOR i IN start_idx..(start_idx + d_k - 1) LOOP
156
+ head_query := array_append(head_query, query[i]);
157
+ END LOOP;
158
+
159
+ -- Extract head-specific keys and values
160
+ head_keys := ARRAY[]::REAL[][];
161
+ head_values := ARRAY[]::REAL[][];
162
+ FOR i IN 1..seq_len LOOP
163
+ DECLARE
164
+ k_row REAL[] := ARRAY[]::REAL[];
165
+ v_row REAL[] := ARRAY[]::REAL[];
166
+ BEGIN
167
+ FOR j IN start_idx..(start_idx + d_k - 1) LOOP
168
+ k_row := array_append(k_row, keys[i][j]);
169
+ v_row := array_append(v_row, values[i][j]);
170
+ END LOOP;
171
+ head_keys := array_cat(head_keys, ARRAY[k_row]);
172
+ head_values := array_cat(head_values, ARRAY[v_row]);
173
+ END;
174
+ END LOOP;
175
+
176
+ -- Compute attention for this head
177
+ head_output := claude_flow.scaled_dot_product_attention(head_query, head_keys, head_values);
178
+ head_outputs := array_cat(head_outputs, ARRAY[head_output]);
179
+ END LOOP;
180
+
181
+ -- Concatenate head outputs
182
+ concat_output := ARRAY[]::REAL[];
183
+ FOR h IN 1..num_heads LOOP
184
+ FOR i IN 1..d_k LOOP
185
+ concat_output := array_append(concat_output, head_outputs[h][i]);
186
+ END LOOP;
187
+ END LOOP;
188
+
189
+ output := concat_output;
190
+ attention_weights := head_weights;
191
+ RETURN NEXT;
192
+ END;
193
+ $$ LANGUAGE plpgsql STABLE;
194
+
195
+ -- ----------------------------------------------------------------------------
196
+ -- Flash Attention (Memory-Efficient Approximation)
197
+ -- Processes attention in blocks to reduce memory usage
198
+ -- ----------------------------------------------------------------------------
199
+
200
+ CREATE OR REPLACE FUNCTION claude_flow.flash_attention(
201
+ query REAL[], -- Query vector
202
+ keys REAL[][], -- Key matrix
203
+ values REAL[][], -- Value matrix
204
+ block_size INTEGER DEFAULT 64 -- Block size for chunked processing
205
+ ) RETURNS TABLE (
206
+ output REAL[],
207
+ max_attention REAL,
208
+ computation_blocks INTEGER
209
+ ) AS $$
210
+ DECLARE
211
+ d_k INTEGER;
212
+ seq_len INTEGER;
213
+ num_blocks INTEGER;
214
+ block_start INTEGER;
215
+ block_end INTEGER;
216
+ block_scores REAL[];
217
+ block_max REAL;
218
+ global_max REAL := -1e9;
219
+ running_sum REAL[];
220
+ running_max REAL := -1e9;
221
+ i INTEGER;
222
+ j INTEGER;
223
+ b INTEGER;
224
+ score REAL;
225
+ scale REAL;
226
+ block_keys REAL[][];
227
+ block_values REAL[][];
228
+ block_output REAL[];
229
+ final_output REAL[];
230
+ BEGIN
231
+ -- Get dimensions
232
+ d_k := array_length(query, 1);
233
+ seq_len := array_length(keys, 1);
234
+ scale := 1.0 / sqrt(d_k::REAL);
235
+
236
+ -- Calculate number of blocks
237
+ num_blocks := ceil(seq_len::REAL / block_size)::INTEGER;
238
+
239
+ -- Initialize running sum
240
+ running_sum := ARRAY[]::REAL[];
241
+ FOR i IN 1..array_length(values[1], 1) LOOP
242
+ running_sum := array_append(running_sum, 0.0);
243
+ END LOOP;
244
+
245
+ -- Process blocks
246
+ FOR b IN 1..num_blocks LOOP
247
+ block_start := (b - 1) * block_size + 1;
248
+ block_end := LEAST(b * block_size, seq_len);
249
+
250
+ -- Compute block scores
251
+ block_scores := ARRAY[]::REAL[];
252
+ FOR i IN block_start..block_end LOOP
253
+ score := 0;
254
+ FOR j IN 1..d_k LOOP
255
+ score := score + query[j] * keys[i][j];
256
+ END LOOP;
257
+ block_scores := array_append(block_scores, score * scale);
258
+ END LOOP;
259
+
260
+ -- Find block max for numerical stability
261
+ block_max := block_scores[1];
262
+ FOR i IN 2..array_length(block_scores, 1) LOOP
263
+ IF block_scores[i] > block_max THEN
264
+ block_max := block_scores[i];
265
+ END IF;
266
+ END LOOP;
267
+
268
+ IF block_max > global_max THEN
269
+ global_max := block_max;
270
+ END IF;
271
+
272
+ -- Apply softmax and accumulate
273
+ DECLARE
274
+ exp_scores REAL[] := ARRAY[]::REAL[];
275
+ exp_sum REAL := 0;
276
+ BEGIN
277
+ FOR i IN 1..array_length(block_scores, 1) LOOP
278
+ score := exp(block_scores[i] - block_max);
279
+ exp_scores := array_append(exp_scores, score);
280
+ exp_sum := exp_sum + score;
281
+ END LOOP;
282
+
283
+ -- Weighted sum of values in this block
284
+ FOR j IN 1..array_length(values[1], 1) LOOP
285
+ score := 0;
286
+ FOR i IN 1..array_length(exp_scores, 1) LOOP
287
+ score := score + (exp_scores[i] / exp_sum) * values[block_start + i - 1][j];
288
+ END LOOP;
289
+ running_sum[j] := running_sum[j] + score / num_blocks;
290
+ END LOOP;
291
+ END;
292
+ END LOOP;
293
+
294
+ output := running_sum;
295
+ max_attention := global_max;
296
+ computation_blocks := num_blocks;
297
+ RETURN NEXT;
298
+ END;
299
+ $$ LANGUAGE plpgsql STABLE;
300
+
301
+ -- ----------------------------------------------------------------------------
302
+ -- Sparse Attention (with configurable patterns)
303
+ -- Supports: local, strided, fixed, and custom patterns
304
+ -- ----------------------------------------------------------------------------
305
+
306
+ CREATE OR REPLACE FUNCTION claude_flow.sparse_attention(
307
+ query REAL[], -- Query vector
308
+ keys REAL[][], -- Key matrix
309
+ values REAL[][], -- Value matrix
310
+ pattern TEXT DEFAULT 'local', -- Attention pattern: local, strided, fixed
311
+ window_size INTEGER DEFAULT 64, -- For local attention
312
+ stride INTEGER DEFAULT 32 -- For strided attention
313
+ ) RETURNS TABLE (
314
+ output REAL[],
315
+ active_positions INTEGER[],
316
+ sparsity_ratio REAL
317
+ ) AS $$
318
+ DECLARE
319
+ d_k INTEGER;
320
+ seq_len INTEGER;
321
+ scale REAL;
322
+ active_indices INTEGER[];
323
+ scores REAL[];
324
+ attention_weights REAL[];
325
+ final_output REAL[];
326
+ i INTEGER;
327
+ j INTEGER;
328
+ score REAL;
329
+ total_active INTEGER;
330
+ BEGIN
331
+ -- Get dimensions
332
+ d_k := array_length(query, 1);
333
+ seq_len := array_length(keys, 1);
334
+ scale := 1.0 / sqrt(d_k::REAL);
335
+
336
+ -- Determine active positions based on pattern
337
+ active_indices := ARRAY[]::INTEGER[];
338
+
339
+ CASE pattern
340
+ WHEN 'local' THEN
341
+ -- Local attention: attend to nearby positions
342
+ FOR i IN 1..seq_len LOOP
343
+ IF i <= window_size THEN
344
+ active_indices := array_append(active_indices, i);
345
+ END IF;
346
+ END LOOP;
347
+
348
+ WHEN 'strided' THEN
349
+ -- Strided attention: attend to every Nth position
350
+ FOR i IN 1..seq_len LOOP
351
+ IF (i - 1) % stride = 0 THEN
352
+ active_indices := array_append(active_indices, i);
353
+ END IF;
354
+ END LOOP;
355
+
356
+ WHEN 'fixed' THEN
357
+ -- Fixed attention: attend to first window_size and strided positions
358
+ FOR i IN 1..seq_len LOOP
359
+ IF i <= window_size OR (i - 1) % stride = 0 THEN
360
+ active_indices := array_append(active_indices, i);
361
+ END IF;
362
+ END LOOP;
363
+
364
+ ELSE
365
+ -- Default: attend to all (dense)
366
+ FOR i IN 1..seq_len LOOP
367
+ active_indices := array_append(active_indices, i);
368
+ END LOOP;
369
+ END CASE;
370
+
371
+ total_active := array_length(active_indices, 1);
372
+
373
+ -- Compute attention scores for active positions only
374
+ scores := ARRAY[]::REAL[];
375
+ FOR i IN 1..total_active LOOP
376
+ score := 0;
377
+ FOR j IN 1..d_k LOOP
378
+ score := score + query[j] * keys[active_indices[i]][j];
379
+ END LOOP;
380
+ scores := array_append(scores, score * scale);
381
+ END LOOP;
382
+
383
+ -- Apply softmax
384
+ attention_weights := claude_flow.softmax(scores);
385
+
386
+ -- Compute weighted sum
387
+ final_output := ARRAY[]::REAL[];
388
+ FOR j IN 1..array_length(values[1], 1) LOOP
389
+ score := 0;
390
+ FOR i IN 1..total_active LOOP
391
+ score := score + attention_weights[i] * values[active_indices[i]][j];
392
+ END LOOP;
393
+ final_output := array_append(final_output, score);
394
+ END LOOP;
395
+
396
+ output := final_output;
397
+ active_positions := active_indices;
398
+ sparsity_ratio := 1.0 - (total_active::REAL / seq_len::REAL);
399
+ RETURN NEXT;
400
+ END;
401
+ $$ LANGUAGE plpgsql STABLE;
402
+
403
+ -- ----------------------------------------------------------------------------
404
+ -- Linear Attention (O(n) complexity approximation)
405
+ -- Uses kernel approximation for efficient attention
406
+ -- ----------------------------------------------------------------------------
407
+
408
+ CREATE OR REPLACE FUNCTION claude_flow.linear_attention(
409
+ query REAL[], -- Query vector
410
+ keys REAL[][], -- Key matrix
411
+ values REAL[][], -- Value matrix
412
+ feature_map TEXT DEFAULT 'elu' -- Feature map: elu, relu, softmax
413
+ ) RETURNS REAL[] AS $$
414
+ DECLARE
415
+ d_k INTEGER;
416
+ seq_len INTEGER;
417
+ phi_q REAL[];
418
+ phi_k REAL[][];
419
+ kv_sum REAL[];
420
+ k_sum REAL;
421
+ output REAL[];
422
+ i INTEGER;
423
+ j INTEGER;
424
+ val REAL;
425
+ BEGIN
426
+ -- Get dimensions
427
+ d_k := array_length(query, 1);
428
+ seq_len := array_length(keys, 1);
429
+
430
+ -- Apply feature map to query
431
+ phi_q := ARRAY[]::REAL[];
432
+ FOR i IN 1..d_k LOOP
433
+ CASE feature_map
434
+ WHEN 'elu' THEN
435
+ IF query[i] >= 0 THEN
436
+ phi_q := array_append(phi_q, query[i] + 1.0);
437
+ ELSE
438
+ phi_q := array_append(phi_q, exp(query[i]));
439
+ END IF;
440
+ WHEN 'relu' THEN
441
+ phi_q := array_append(phi_q, GREATEST(0, query[i]));
442
+ ELSE
443
+ phi_q := array_append(phi_q, exp(query[i]));
444
+ END CASE;
445
+ END LOOP;
446
+
447
+ -- Apply feature map to keys and compute cumulative sums
448
+ kv_sum := ARRAY[]::REAL[];
449
+ FOR j IN 1..array_length(values[1], 1) LOOP
450
+ kv_sum := array_append(kv_sum, 0.0);
451
+ END LOOP;
452
+ k_sum := 0;
453
+
454
+ FOR i IN 1..seq_len LOOP
455
+ DECLARE
456
+ phi_ki REAL[];
457
+ ki_dot REAL := 0;
458
+ BEGIN
459
+ -- Apply feature map to key
460
+ phi_ki := ARRAY[]::REAL[];
461
+ FOR j IN 1..d_k LOOP
462
+ CASE feature_map
463
+ WHEN 'elu' THEN
464
+ IF keys[i][j] >= 0 THEN
465
+ val := keys[i][j] + 1.0;
466
+ ELSE
467
+ val := exp(keys[i][j]);
468
+ END IF;
469
+ WHEN 'relu' THEN
470
+ val := GREATEST(0, keys[i][j]);
471
+ ELSE
472
+ val := exp(keys[i][j]);
473
+ END CASE;
474
+ phi_ki := array_append(phi_ki, val);
475
+ ki_dot := ki_dot + phi_q[j] * val;
476
+ END LOOP;
477
+
478
+ -- Accumulate
479
+ k_sum := k_sum + ki_dot;
480
+ FOR j IN 1..array_length(values[1], 1) LOOP
481
+ kv_sum[j] := kv_sum[j] + ki_dot * values[i][j];
482
+ END LOOP;
483
+ END;
484
+ END LOOP;
485
+
486
+ -- Normalize
487
+ IF k_sum > 0 THEN
488
+ output := ARRAY[]::REAL[];
489
+ FOR j IN 1..array_length(kv_sum, 1) LOOP
490
+ output := array_append(output, kv_sum[j] / k_sum);
491
+ END LOOP;
492
+ ELSE
493
+ output := kv_sum;
494
+ END IF;
495
+
496
+ RETURN output;
497
+ END;
498
+ $$ LANGUAGE plpgsql IMMUTABLE PARALLEL SAFE;
499
+
500
+ -- ----------------------------------------------------------------------------
501
+ -- Cross Attention (for encoder-decoder architectures)
502
+ -- ----------------------------------------------------------------------------
503
+
504
+ CREATE OR REPLACE FUNCTION claude_flow.cross_attention(
505
+ decoder_query REAL[], -- Query from decoder
506
+ encoder_keys REAL[][], -- Keys from encoder
507
+ encoder_values REAL[][], -- Values from encoder
508
+ num_heads INTEGER DEFAULT 8
509
+ ) RETURNS REAL[] AS $$
510
+ DECLARE
511
+ result RECORD;
512
+ BEGIN
513
+ SELECT * INTO result FROM claude_flow.multi_head_attention(
514
+ decoder_query,
515
+ encoder_keys,
516
+ encoder_values,
517
+ num_heads
518
+ );
519
+
520
+ RETURN result.output;
521
+ END;
522
+ $$ LANGUAGE plpgsql STABLE;
523
+
524
+ -- ----------------------------------------------------------------------------
525
+ -- Attention Cache Management
526
+ -- ----------------------------------------------------------------------------
527
+
528
+ -- Store attention result in cache
529
+ CREATE OR REPLACE FUNCTION claude_flow.cache_attention_result(
530
+ p_query_hash TEXT,
531
+ p_keys_hash TEXT,
532
+ p_values_hash TEXT,
533
+ p_num_heads INTEGER,
534
+ p_attention_type TEXT,
535
+ p_attention_weights REAL[],
536
+ p_attention_output REAL[],
537
+ p_output_dimensions INTEGER[],
538
+ p_computation_time_ms REAL DEFAULT NULL,
539
+ p_ttl_hours INTEGER DEFAULT 24
540
+ ) RETURNS UUID AS $$
541
+ DECLARE
542
+ v_cache_key TEXT;
543
+ v_id UUID;
544
+ BEGIN
545
+ v_cache_key := md5(p_query_hash || p_keys_hash || p_values_hash || p_num_heads::TEXT || p_attention_type);
546
+
547
+ INSERT INTO claude_flow.attention_cache (
548
+ cache_key,
549
+ query_hash,
550
+ keys_hash,
551
+ values_hash,
552
+ num_heads,
553
+ attention_type,
554
+ attention_weights,
555
+ attention_output,
556
+ output_dimensions,
557
+ computation_time_ms,
558
+ expires_at
559
+ ) VALUES (
560
+ v_cache_key,
561
+ p_query_hash,
562
+ p_keys_hash,
563
+ p_values_hash,
564
+ p_num_heads,
565
+ p_attention_type,
566
+ p_attention_weights,
567
+ p_attention_output,
568
+ p_output_dimensions,
569
+ p_computation_time_ms,
570
+ NOW() + (p_ttl_hours || ' hours')::INTERVAL
571
+ )
572
+ ON CONFLICT (cache_key) DO UPDATE
573
+ SET attention_weights = EXCLUDED.attention_weights,
574
+ attention_output = EXCLUDED.attention_output,
575
+ hit_count = claude_flow.attention_cache.hit_count + 1,
576
+ last_accessed_at = NOW(),
577
+ expires_at = EXCLUDED.expires_at
578
+ RETURNING id INTO v_id;
579
+
580
+ RETURN v_id;
581
+ END;
582
+ $$ LANGUAGE plpgsql;
583
+
584
+ -- Retrieve cached attention result
585
+ CREATE OR REPLACE FUNCTION claude_flow.get_cached_attention(
586
+ p_query_hash TEXT,
587
+ p_keys_hash TEXT,
588
+ p_values_hash TEXT,
589
+ p_num_heads INTEGER,
590
+ p_attention_type TEXT
591
+ ) RETURNS TABLE (
592
+ attention_weights REAL[],
593
+ attention_output REAL[],
594
+ output_dimensions INTEGER[],
595
+ cache_hit BOOLEAN
596
+ ) AS $$
597
+ DECLARE
598
+ v_cache_key TEXT;
599
+ v_record RECORD;
600
+ BEGIN
601
+ v_cache_key := md5(p_query_hash || p_keys_hash || p_values_hash || p_num_heads::TEXT || p_attention_type);
602
+
603
+ SELECT ac.attention_weights, ac.attention_output, ac.output_dimensions
604
+ INTO v_record
605
+ FROM claude_flow.attention_cache ac
606
+ WHERE ac.cache_key = v_cache_key
607
+ AND (ac.expires_at IS NULL OR ac.expires_at > NOW());
608
+
609
+ IF FOUND THEN
610
+ -- Update access stats
611
+ UPDATE claude_flow.attention_cache
612
+ SET hit_count = hit_count + 1,
613
+ last_accessed_at = NOW()
614
+ WHERE cache_key = v_cache_key;
615
+
616
+ attention_weights := v_record.attention_weights;
617
+ attention_output := v_record.attention_output;
618
+ output_dimensions := v_record.output_dimensions;
619
+ cache_hit := TRUE;
620
+ ELSE
621
+ attention_weights := NULL;
622
+ attention_output := NULL;
623
+ output_dimensions := NULL;
624
+ cache_hit := FALSE;
625
+ END IF;
626
+
627
+ RETURN NEXT;
628
+ END;
629
+ $$ LANGUAGE plpgsql;
630
+
631
+ -- ----------------------------------------------------------------------------
632
+ -- Record migration
633
+ -- ----------------------------------------------------------------------------
634
+ INSERT INTO claude_flow.migrations (name, checksum)
635
+ VALUES ('005_create_attention_functions', md5('005_create_attention_functions'))
636
+ ON CONFLICT (name) DO NOTHING;
637
+
638
+ COMMIT;
639
+
640
+ -- ============================================================================
641
+ -- Rollback Script
642
+ -- ============================================================================
643
+ -- BEGIN;
644
+ -- DROP FUNCTION IF EXISTS claude_flow.get_cached_attention(TEXT, TEXT, TEXT, INTEGER, TEXT);
645
+ -- DROP FUNCTION IF EXISTS claude_flow.cache_attention_result(TEXT, TEXT, TEXT, INTEGER, TEXT, REAL[], REAL[], INTEGER[], REAL, INTEGER);
646
+ -- DROP FUNCTION IF EXISTS claude_flow.cross_attention(REAL[], REAL[][], REAL[][], INTEGER);
647
+ -- DROP FUNCTION IF EXISTS claude_flow.linear_attention(REAL[], REAL[][], REAL[][], TEXT);
648
+ -- DROP FUNCTION IF EXISTS claude_flow.sparse_attention(REAL[], REAL[][], REAL[][], TEXT, INTEGER, INTEGER);
649
+ -- DROP FUNCTION IF EXISTS claude_flow.flash_attention(REAL[], REAL[][], REAL[][], INTEGER);
650
+ -- DROP FUNCTION IF EXISTS claude_flow.multi_head_attention(REAL[], REAL[][], REAL[][], INTEGER);
651
+ -- DROP FUNCTION IF EXISTS claude_flow.scaled_dot_product_attention(REAL[], REAL[][], REAL[][], REAL);
652
+ -- DROP FUNCTION IF EXISTS claude_flow.softmax(REAL[]);
653
+ -- DELETE FROM claude_flow.migrations WHERE name = '005_create_attention_functions';
654
+ -- COMMIT;