kbs 0.0.1 → 0.1.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (86) hide show
  1. checksums.yaml +4 -4
  2. data/.github/workflows/deploy-github-pages.yml +52 -0
  3. data/CHANGELOG.md +68 -2
  4. data/README.md +235 -334
  5. data/docs/DOCUMENTATION_STATUS.md +158 -0
  6. data/docs/advanced/custom-persistence.md +775 -0
  7. data/docs/advanced/debugging.md +726 -0
  8. data/docs/advanced/index.md +8 -0
  9. data/docs/advanced/performance.md +832 -0
  10. data/docs/advanced/testing.md +691 -0
  11. data/docs/api/blackboard.md +1157 -0
  12. data/docs/api/engine.md +978 -0
  13. data/docs/api/facts.md +1212 -0
  14. data/docs/api/index.md +12 -0
  15. data/docs/api/rules.md +1034 -0
  16. data/docs/architecture/blackboard.md +553 -0
  17. data/docs/architecture/index.md +277 -0
  18. data/docs/architecture/network-structure.md +343 -0
  19. data/docs/architecture/rete-algorithm.md +737 -0
  20. data/docs/assets/css/custom.css +83 -0
  21. data/docs/assets/images/blackboard-architecture.svg +136 -0
  22. data/docs/assets/images/compiled-network.svg +101 -0
  23. data/docs/assets/images/fact-assertion-flow.svg +117 -0
  24. data/docs/assets/images/kbs.jpg +0 -0
  25. data/docs/assets/images/pattern-matching-trace.svg +136 -0
  26. data/docs/assets/images/rete-network-layers.svg +96 -0
  27. data/docs/assets/images/system-layers.svg +69 -0
  28. data/docs/assets/images/trading-signal-network.svg +139 -0
  29. data/docs/assets/js/mathjax.js +17 -0
  30. data/docs/examples/expert-systems.md +1031 -0
  31. data/docs/examples/index.md +9 -0
  32. data/docs/examples/multi-agent.md +1335 -0
  33. data/docs/examples/stock-trading.md +488 -0
  34. data/docs/guides/blackboard-memory.md +558 -0
  35. data/docs/guides/dsl.md +1321 -0
  36. data/docs/guides/facts.md +652 -0
  37. data/docs/guides/getting-started.md +383 -0
  38. data/docs/guides/index.md +23 -0
  39. data/docs/guides/negation.md +529 -0
  40. data/docs/guides/pattern-matching.md +561 -0
  41. data/docs/guides/persistence.md +451 -0
  42. data/docs/guides/variable-binding.md +491 -0
  43. data/docs/guides/writing-rules.md +755 -0
  44. data/docs/index.md +157 -0
  45. data/docs/installation.md +156 -0
  46. data/docs/quick-start.md +228 -0
  47. data/examples/README.md +2 -2
  48. data/examples/advanced_example.rb +2 -2
  49. data/examples/advanced_example_dsl.rb +224 -0
  50. data/examples/ai_enhanced_kbs.rb +1 -1
  51. data/examples/ai_enhanced_kbs_dsl.rb +538 -0
  52. data/examples/blackboard_demo_dsl.rb +50 -0
  53. data/examples/car_diagnostic.rb +1 -1
  54. data/examples/car_diagnostic_dsl.rb +54 -0
  55. data/examples/concurrent_inference_demo.rb +5 -5
  56. data/examples/concurrent_inference_demo_dsl.rb +363 -0
  57. data/examples/csv_trading_system.rb +1 -1
  58. data/examples/csv_trading_system_dsl.rb +525 -0
  59. data/examples/knowledge_base.db +0 -0
  60. data/examples/portfolio_rebalancing_system.rb +2 -2
  61. data/examples/portfolio_rebalancing_system_dsl.rb +613 -0
  62. data/examples/redis_trading_demo_dsl.rb +177 -0
  63. data/examples/run_all.rb +50 -0
  64. data/examples/run_all_dsl.rb +49 -0
  65. data/examples/stock_trading_advanced.rb +1 -1
  66. data/examples/stock_trading_advanced_dsl.rb +404 -0
  67. data/examples/temp.txt +7693 -0
  68. data/examples/temp_dsl.txt +8447 -0
  69. data/examples/timestamped_trading.rb +1 -1
  70. data/examples/timestamped_trading_dsl.rb +258 -0
  71. data/examples/trading_demo.rb +1 -1
  72. data/examples/trading_demo_dsl.rb +322 -0
  73. data/examples/working_demo.rb +1 -1
  74. data/examples/working_demo_dsl.rb +160 -0
  75. data/lib/kbs/blackboard/engine.rb +3 -3
  76. data/lib/kbs/blackboard/fact.rb +1 -1
  77. data/lib/kbs/condition.rb +1 -1
  78. data/lib/kbs/dsl/knowledge_base.rb +1 -1
  79. data/lib/kbs/dsl/variable.rb +1 -1
  80. data/lib/kbs/{rete_engine.rb → engine.rb} +1 -1
  81. data/lib/kbs/fact.rb +1 -1
  82. data/lib/kbs/version.rb +1 -1
  83. data/lib/kbs.rb +2 -2
  84. data/mkdocs.yml +181 -0
  85. metadata +66 -6
  86. data/examples/stock_trading_system.rb.bak +0 -563
@@ -0,0 +1,775 @@
1
+ # Custom Persistence
2
+
3
+ Implement custom storage backends for KBS by extending the `Store` interface. This guide covers implementing, testing, and optimizing custom persistence layers for PostgreSQL, MongoDB, or other databases.
4
+
5
+ ## Store Interface
6
+
7
+ Custom stores must implement the `KBS::Blackboard::Persistence::Store` interface:
8
+
9
+ ```ruby
10
+ module KBS
11
+ module Blackboard
12
+ module Persistence
13
+ class Store
14
+ # Fact Operations
15
+ def save_fact(fact)
16
+ raise NotImplementedError
17
+ end
18
+
19
+ def load_facts(type = nil)
20
+ raise NotImplementedError
21
+ end
22
+
23
+ def update_fact(fact_id, attributes)
24
+ raise NotImplementedError
25
+ end
26
+
27
+ def delete_fact(fact_id)
28
+ raise NotImplementedError
29
+ end
30
+
31
+ # Message Queue Operations
32
+ def send_message(topic, content, priority:)
33
+ raise NotImplementedError
34
+ end
35
+
36
+ def pop_message(topic)
37
+ raise NotImplementedError
38
+ end
39
+
40
+ # Audit Operations
41
+ def log_fact_change(operation, fact, attributes = {})
42
+ raise NotImplementedError
43
+ end
44
+
45
+ def fact_history(fact_id)
46
+ raise NotImplementedError
47
+ end
48
+
49
+ def log_rule_firing(rule_name, fact_ids, bindings)
50
+ raise NotImplementedError
51
+ end
52
+
53
+ def rule_firings(rule_name: nil, limit: 100)
54
+ raise NotImplementedError
55
+ end
56
+
57
+ # Transaction Operations (optional)
58
+ def transaction
59
+ yield
60
+ end
61
+
62
+ def close
63
+ # Cleanup resources
64
+ end
65
+ end
66
+ end
67
+ end
68
+ end
69
+ ```
70
+
71
+ ## PostgreSQL Store
72
+
73
+ ### Implementation
74
+
75
+ ```ruby
76
+ require 'pg'
77
+ require 'json'
78
+
79
+ class PostgresStore < KBS::Blackboard::Persistence::Store
80
+ def initialize(connection_string:)
81
+ @conn = PG.connect(connection_string)
82
+ setup_tables
83
+ end
84
+
85
+ def setup_tables
86
+ @conn.exec <<~SQL
87
+ CREATE TABLE IF NOT EXISTS facts (
88
+ id UUID PRIMARY KEY,
89
+ fact_type VARCHAR(255) NOT NULL,
90
+ attributes JSONB NOT NULL,
91
+ created_at TIMESTAMP DEFAULT NOW(),
92
+ updated_at TIMESTAMP DEFAULT NOW()
93
+ );
94
+
95
+ CREATE INDEX IF NOT EXISTS idx_facts_type ON facts(fact_type);
96
+ CREATE INDEX IF NOT EXISTS idx_facts_attributes ON facts USING gin(attributes);
97
+
98
+ CREATE TABLE IF NOT EXISTS messages (
99
+ id SERIAL PRIMARY KEY,
100
+ topic VARCHAR(255) NOT NULL,
101
+ content JSONB NOT NULL,
102
+ priority INTEGER NOT NULL,
103
+ created_at TIMESTAMP DEFAULT NOW()
104
+ );
105
+
106
+ CREATE INDEX IF NOT EXISTS idx_messages_topic_priority
107
+ ON messages(topic, priority DESC);
108
+
109
+ CREATE TABLE IF NOT EXISTS audit_log (
110
+ id SERIAL PRIMARY KEY,
111
+ fact_id UUID NOT NULL,
112
+ operation VARCHAR(50) NOT NULL,
113
+ attributes JSONB,
114
+ timestamp TIMESTAMP DEFAULT NOW()
115
+ );
116
+
117
+ CREATE INDEX IF NOT EXISTS idx_audit_fact_id ON audit_log(fact_id);
118
+
119
+ CREATE TABLE IF NOT EXISTS rule_firings (
120
+ id SERIAL PRIMARY KEY,
121
+ rule_name VARCHAR(255) NOT NULL,
122
+ fact_ids UUID[] NOT NULL,
123
+ bindings JSONB NOT NULL,
124
+ timestamp TIMESTAMP DEFAULT NOW()
125
+ );
126
+
127
+ CREATE INDEX IF NOT EXISTS idx_rule_firings_name ON rule_firings(rule_name);
128
+ SQL
129
+ end
130
+
131
+ # Fact Operations
132
+
133
+ def save_fact(fact)
134
+ @conn.exec_params(
135
+ "INSERT INTO facts (id, fact_type, attributes) VALUES ($1, $2, $3)",
136
+ [fact.id, fact.type.to_s, fact.attributes.to_json]
137
+ )
138
+
139
+ log_fact_change('add', fact, fact.attributes)
140
+ fact
141
+ end
142
+
143
+ def load_facts(type = nil)
144
+ query = if type
145
+ @conn.exec_params(
146
+ "SELECT id, fact_type, attributes, created_at FROM facts WHERE fact_type = $1",
147
+ [type.to_s]
148
+ )
149
+ else
150
+ @conn.exec("SELECT id, fact_type, attributes, created_at FROM facts")
151
+ end
152
+
153
+ query.map do |row|
154
+ KBS::Blackboard::Fact.new(
155
+ row['fact_type'].to_sym,
156
+ JSON.parse(row['attributes'], symbolize_names: true),
157
+ id: row['id'],
158
+ created_at: Time.parse(row['created_at'])
159
+ )
160
+ end
161
+ end
162
+
163
+ def update_fact(fact_id, attributes)
164
+ @conn.exec_params(
165
+ "UPDATE facts SET attributes = $1, updated_at = NOW() WHERE id = $2",
166
+ [attributes.to_json, fact_id]
167
+ )
168
+
169
+ log_fact_change('update', fact_id, attributes)
170
+ end
171
+
172
+ def delete_fact(fact_id)
173
+ result = @conn.exec_params(
174
+ "DELETE FROM facts WHERE id = $1 RETURNING attributes",
175
+ [fact_id]
176
+ )
177
+
178
+ if result.ntuples > 0
179
+ attrs = JSON.parse(result[0]['attributes'], symbolize_names: true)
180
+ log_fact_change('delete', fact_id, attrs)
181
+ end
182
+ end
183
+
184
+ # Message Queue Operations
185
+
186
+ def send_message(topic, content, priority:)
187
+ @conn.exec_params(
188
+ "INSERT INTO messages (topic, content, priority) VALUES ($1, $2, $3)",
189
+ [topic.to_s, content.to_json, priority]
190
+ )
191
+ end
192
+
193
+ def pop_message(topic)
194
+ # Atomic pop using DELETE RETURNING
195
+ result = @conn.exec_params(<<~SQL, [topic.to_s])
196
+ DELETE FROM messages
197
+ WHERE id = (
198
+ SELECT id FROM messages
199
+ WHERE topic = $1
200
+ ORDER BY priority DESC, created_at ASC
201
+ LIMIT 1
202
+ FOR UPDATE SKIP LOCKED
203
+ )
204
+ RETURNING content, priority
205
+ SQL
206
+
207
+ return nil if result.ntuples == 0
208
+
209
+ {
210
+ content: JSON.parse(result[0]['content'], symbolize_names: true),
211
+ priority: result[0]['priority'].to_i
212
+ }
213
+ end
214
+
215
+ # Audit Operations
216
+
217
+ def log_fact_change(operation, fact_or_id, attributes)
218
+ fact_id = fact_or_id.is_a?(String) ? fact_or_id : fact_or_id.id
219
+
220
+ @conn.exec_params(
221
+ "INSERT INTO audit_log (fact_id, operation, attributes) VALUES ($1, $2, $3)",
222
+ [fact_id, operation, attributes.to_json]
223
+ )
224
+ end
225
+
226
+ def fact_history(fact_id)
227
+ result = @conn.exec_params(
228
+ "SELECT operation, attributes, timestamp FROM audit_log WHERE fact_id = $1 ORDER BY timestamp",
229
+ [fact_id]
230
+ )
231
+
232
+ result.map do |row|
233
+ {
234
+ operation: row['operation'],
235
+ attributes: JSON.parse(row['attributes'], symbolize_names: true),
236
+ timestamp: Time.parse(row['timestamp'])
237
+ }
238
+ end
239
+ end
240
+
241
+ def log_rule_firing(rule_name, fact_ids, bindings)
242
+ @conn.exec_params(
243
+ "INSERT INTO rule_firings (rule_name, fact_ids, bindings) VALUES ($1, $2, $3)",
244
+ [rule_name, "{#{fact_ids.join(',')}}", bindings.to_json]
245
+ )
246
+ end
247
+
248
+ def rule_firings(rule_name: nil, limit: 100)
249
+ query = if rule_name
250
+ @conn.exec_params(
251
+ "SELECT rule_name, fact_ids, bindings, timestamp FROM rule_firings WHERE rule_name = $1 ORDER BY timestamp DESC LIMIT $2",
252
+ [rule_name, limit]
253
+ )
254
+ else
255
+ @conn.exec_params(
256
+ "SELECT rule_name, fact_ids, bindings, timestamp FROM rule_firings ORDER BY timestamp DESC LIMIT $1",
257
+ [limit]
258
+ )
259
+ end
260
+
261
+ query.map do |row|
262
+ {
263
+ rule_name: row['rule_name'],
264
+ fact_ids: row['fact_ids'].gsub(/[{}]/, '').split(','),
265
+ bindings: JSON.parse(row['bindings'], symbolize_names: true),
266
+ timestamp: Time.parse(row['timestamp'])
267
+ }
268
+ end
269
+ end
270
+
271
+ # Transaction Support
272
+
273
+ def transaction
274
+ @conn.exec("BEGIN")
275
+ yield
276
+ @conn.exec("COMMIT")
277
+ rescue => e
278
+ @conn.exec("ROLLBACK")
279
+ raise e
280
+ end
281
+
282
+ def close
283
+ @conn.close if @conn
284
+ end
285
+ end
286
+
287
+ # Usage
288
+ store = PostgresStore.new(
289
+ connection_string: "postgresql://localhost/kbs_production"
290
+ )
291
+
292
+ engine = KBS::Blackboard::Engine.new(store: store)
293
+ ```
294
+
295
+ ## MongoDB Store
296
+
297
+ ### Implementation
298
+
299
+ ```ruby
300
+ require 'mongo'
301
+ require 'securerandom'
302
+
303
+ class MongoStore < KBS::Blackboard::Persistence::Store
304
+ def initialize(url:, database: 'kbs')
305
+ @client = Mongo::Client.new(url)
306
+ @db = @client.use(database)
307
+ setup_collections
308
+ end
309
+
310
+ def setup_collections
311
+ # Facts collection
312
+ @facts = @db[:facts]
313
+ @facts.indexes.create_one({ fact_type: 1 })
314
+ @facts.indexes.create_one({ created_at: 1 })
315
+
316
+ # Messages collection
317
+ @messages = @db[:messages]
318
+ @messages.indexes.create_one({ topic: 1, priority: -1, created_at: 1 })
319
+
320
+ # Audit log
321
+ @audit = @db[:audit_log]
322
+ @audit.indexes.create_one({ fact_id: 1, timestamp: 1 })
323
+
324
+ # Rule firings
325
+ @rule_firings = @db[:rule_firings]
326
+ @rule_firings.indexes.create_one({ rule_name: 1, timestamp: -1 })
327
+ end
328
+
329
+ # Fact Operations
330
+
331
+ def save_fact(fact)
332
+ doc = {
333
+ _id: fact.id,
334
+ fact_type: fact.type.to_s,
335
+ attributes: fact.attributes,
336
+ created_at: Time.now,
337
+ updated_at: Time.now
338
+ }
339
+
340
+ @facts.insert_one(doc)
341
+
342
+ log_fact_change('add', fact, fact.attributes)
343
+ fact
344
+ end
345
+
346
+ def load_facts(type = nil)
347
+ query = type ? { fact_type: type.to_s } : {}
348
+
349
+ @facts.find(query).map do |doc|
350
+ KBS::Blackboard::Fact.new(
351
+ doc['fact_type'].to_sym,
352
+ doc['attributes'].transform_keys(&:to_sym),
353
+ id: doc['_id'],
354
+ created_at: doc['created_at']
355
+ )
356
+ end
357
+ end
358
+
359
+ def update_fact(fact_id, attributes)
360
+ @facts.update_one(
361
+ { _id: fact_id },
362
+ { '$set' => { attributes: attributes, updated_at: Time.now } }
363
+ )
364
+
365
+ log_fact_change('update', fact_id, attributes)
366
+ end
367
+
368
+ def delete_fact(fact_id)
369
+ doc = @facts.find_one_and_delete({ _id: fact_id })
370
+
371
+ if doc
372
+ log_fact_change('delete', fact_id, doc['attributes'])
373
+ end
374
+ end
375
+
376
+ # Message Queue Operations
377
+
378
+ def send_message(topic, content, priority:)
379
+ @messages.insert_one({
380
+ topic: topic.to_s,
381
+ content: content,
382
+ priority: priority,
383
+ created_at: Time.now
384
+ })
385
+ end
386
+
387
+ def pop_message(topic)
388
+ # Find highest priority message
389
+ doc = @messages.find_one_and_delete(
390
+ { topic: topic.to_s },
391
+ sort: { priority: -1, created_at: 1 }
392
+ )
393
+
394
+ return nil unless doc
395
+
396
+ {
397
+ content: doc['content'].transform_keys(&:to_sym),
398
+ priority: doc['priority']
399
+ }
400
+ end
401
+
402
+ # Audit Operations
403
+
404
+ def log_fact_change(operation, fact_or_id, attributes)
405
+ fact_id = fact_or_id.is_a?(String) ? fact_or_id : fact_or_id.id
406
+
407
+ @audit.insert_one({
408
+ fact_id: fact_id,
409
+ operation: operation,
410
+ attributes: attributes,
411
+ timestamp: Time.now
412
+ })
413
+ end
414
+
415
+ def fact_history(fact_id)
416
+ @audit.find({ fact_id: fact_id })
417
+ .sort(timestamp: 1)
418
+ .map do |doc|
419
+ {
420
+ operation: doc['operation'],
421
+ attributes: doc['attributes'].transform_keys(&:to_sym),
422
+ timestamp: doc['timestamp']
423
+ }
424
+ end
425
+ end
426
+
427
+ def log_rule_firing(rule_name, fact_ids, bindings)
428
+ @rule_firings.insert_one({
429
+ rule_name: rule_name,
430
+ fact_ids: fact_ids,
431
+ bindings: bindings,
432
+ timestamp: Time.now
433
+ })
434
+ end
435
+
436
+ def rule_firings(rule_name: nil, limit: 100)
437
+ query = rule_name ? { rule_name: rule_name } : {}
438
+
439
+ @rule_firings.find(query)
440
+ .sort(timestamp: -1)
441
+ .limit(limit)
442
+ .map do |doc|
443
+ {
444
+ rule_name: doc['rule_name'],
445
+ fact_ids: doc['fact_ids'],
446
+ bindings: doc['bindings'].transform_keys(&:to_sym),
447
+ timestamp: doc['timestamp']
448
+ }
449
+ end
450
+ end
451
+
452
+ # Transaction Support (MongoDB 4.0+)
453
+
454
+ def transaction
455
+ session = @client.start_session
456
+
457
+ session.with_transaction do
458
+ yield
459
+ end
460
+ ensure
461
+ session.end_session if session
462
+ end
463
+
464
+ def close
465
+ @client.close if @client
466
+ end
467
+ end
468
+
469
+ # Usage
470
+ store = MongoStore.new(
471
+ url: 'mongodb://localhost:27017',
472
+ database: 'kbs_production'
473
+ )
474
+
475
+ engine = KBS::Blackboard::Engine.new(store: store)
476
+ ```
477
+
478
+ ## Testing Custom Stores
479
+
480
+ ### Test Suite
481
+
482
+ ```ruby
483
+ require 'minitest/autorun'
484
+
485
+ class TestCustomStore < Minitest::Test
486
+ def setup
487
+ @store = MyCustomStore.new
488
+ end
489
+
490
+ def teardown
491
+ @store.close
492
+ end
493
+
494
+ def test_save_and_load_facts
495
+ fact = KBS::Blackboard::Fact.new(:test, { value: 42 })
496
+
497
+ @store.save_fact(fact)
498
+ loaded = @store.load_facts(:test)
499
+
500
+ assert_equal 1, loaded.size
501
+ assert_equal 42, loaded.first[:value]
502
+ end
503
+
504
+ def test_load_facts_by_type
505
+ @store.save_fact(KBS::Blackboard::Fact.new(:type_a, { value: 1 }))
506
+ @store.save_fact(KBS::Blackboard::Fact.new(:type_b, { value: 2 }))
507
+
508
+ type_a_facts = @store.load_facts(:type_a)
509
+
510
+ assert_equal 1, type_a_facts.size
511
+ assert_equal :type_a, type_a_facts.first.type
512
+ end
513
+
514
+ def test_update_fact
515
+ fact = KBS::Blackboard::Fact.new(:test, { value: 1 })
516
+ @store.save_fact(fact)
517
+
518
+ @store.update_fact(fact.id, { value: 2 })
519
+
520
+ loaded = @store.load_facts(:test)
521
+ assert_equal 2, loaded.first[:value]
522
+ end
523
+
524
+ def test_delete_fact
525
+ fact = KBS::Blackboard::Fact.new(:test, { value: 1 })
526
+ @store.save_fact(fact)
527
+
528
+ @store.delete_fact(fact.id)
529
+
530
+ loaded = @store.load_facts(:test)
531
+ assert_empty loaded
532
+ end
533
+
534
+ def test_message_queue
535
+ @store.send_message(:alerts, { text: "High priority" }, priority: 100)
536
+ @store.send_message(:alerts, { text: "Low priority" }, priority: 10)
537
+
538
+ # Pop should return highest priority
539
+ msg = @store.pop_message(:alerts)
540
+
541
+ assert_equal "High priority", msg[:content][:text]
542
+ assert_equal 100, msg[:priority]
543
+
544
+ # Next pop gets lower priority
545
+ msg = @store.pop_message(:alerts)
546
+ assert_equal "Low priority", msg[:content][:text]
547
+ end
548
+
549
+ def test_message_queue_empty
550
+ msg = @store.pop_message(:nonexistent)
551
+ assert_nil msg
552
+ end
553
+
554
+ def test_fact_audit_trail
555
+ fact = KBS::Blackboard::Fact.new(:test, { value: 1 })
556
+
557
+ @store.save_fact(fact)
558
+ @store.update_fact(fact.id, { value: 2 })
559
+ @store.delete_fact(fact.id)
560
+
561
+ history = @store.fact_history(fact.id)
562
+
563
+ assert_equal 3, history.size
564
+ assert_equal "add", history[0][:operation]
565
+ assert_equal "update", history[1][:operation]
566
+ assert_equal "delete", history[2][:operation]
567
+ end
568
+
569
+ def test_rule_firing_log
570
+ @store.log_rule_firing("test_rule", ["fact1", "fact2"], { var: :value })
571
+
572
+ firings = @store.rule_firings(rule_name: "test_rule")
573
+
574
+ assert_equal 1, firings.size
575
+ assert_equal "test_rule", firings.first[:rule_name]
576
+ assert_equal ["fact1", "fact2"], firings.first[:fact_ids]
577
+ end
578
+
579
+ def test_transactions
580
+ fact1 = KBS::Blackboard::Fact.new(:test, { value: 1 })
581
+ fact2 = KBS::Blackboard::Fact.new(:test, { value: 2 })
582
+
583
+ # Successful transaction
584
+ @store.transaction do
585
+ @store.save_fact(fact1)
586
+ @store.save_fact(fact2)
587
+ end
588
+
589
+ assert_equal 2, @store.load_facts(:test).size
590
+
591
+ # Failed transaction
592
+ begin
593
+ @store.transaction do
594
+ @store.save_fact(KBS::Blackboard::Fact.new(:test, { value: 3 }))
595
+ raise "Rollback!"
596
+ end
597
+ rescue
598
+ # Expected
599
+ end
600
+
601
+ # Should still be 2 facts (transaction rolled back)
602
+ assert_equal 2, @store.load_facts(:test).size
603
+ end
604
+ end
605
+ ```
606
+
607
+ ## Performance Considerations
608
+
609
+ ### 1. Connection Pooling
610
+
611
+ ```ruby
612
+ class PooledPostgresStore < PostgresStore
613
+ def initialize(connection_string:, pool_size: 10)
614
+ @pool = ConnectionPool.new(size: pool_size) do
615
+ PG.connect(connection_string)
616
+ end
617
+
618
+ # Setup using one connection
619
+ @pool.with { |conn| setup_tables_with_conn(conn) }
620
+ end
621
+
622
+ def save_fact(fact)
623
+ @pool.with do |conn|
624
+ conn.exec_params(
625
+ "INSERT INTO facts (id, fact_type, attributes) VALUES ($1, $2, $3)",
626
+ [fact.id, fact.type.to_s, fact.attributes.to_json]
627
+ )
628
+ end
629
+
630
+ fact
631
+ end
632
+
633
+ # ... other methods using @pool.with { |conn| ... }
634
+ end
635
+ ```
636
+
637
+ ### 2. Batch Operations
638
+
639
+ ```ruby
640
+ def save_facts(facts)
641
+ @conn.exec("BEGIN")
642
+
643
+ facts.each do |fact|
644
+ save_fact(fact)
645
+ end
646
+
647
+ @conn.exec("COMMIT")
648
+ rescue => e
649
+ @conn.exec("ROLLBACK")
650
+ raise e
651
+ end
652
+ ```
653
+
654
+ ### 3. Indexing
655
+
656
+ ```ruby
657
+ def optimize_indexes
658
+ # Add indexes for common queries
659
+ @conn.exec(<<~SQL)
660
+ CREATE INDEX CONCURRENTLY IF NOT EXISTS idx_facts_created
661
+ ON facts(created_at DESC);
662
+
663
+ CREATE INDEX CONCURRENTLY IF NOT EXISTS idx_messages_topic_priority
664
+ ON messages(topic, priority DESC)
665
+ WHERE topic IN ('alerts', 'critical');
666
+
667
+ -- JSONB indexes for attribute queries
668
+ CREATE INDEX CONCURRENTLY IF NOT EXISTS idx_facts_value
669
+ ON facts((attributes->>'value'));
670
+ SQL
671
+ end
672
+ ```
673
+
674
+ ### 4. Caching
675
+
676
+ ```ruby
677
+ class CachedStore < KBS::Blackboard::Persistence::Store
678
+ def initialize(underlying_store, cache_ttl: 300)
679
+ @store = underlying_store
680
+ @cache = {}
681
+ @cache_ttl = cache_ttl
682
+ end
683
+
684
+ def load_facts(type = nil)
685
+ cache_key = "facts:#{type}"
686
+
687
+ if cached = @cache[cache_key]
688
+ return cached[:data] if Time.now - cached[:timestamp] < @cache_ttl
689
+ end
690
+
691
+ facts = @store.load_facts(type)
692
+
693
+ @cache[cache_key] = {
694
+ data: facts,
695
+ timestamp: Time.now
696
+ }
697
+
698
+ facts
699
+ end
700
+
701
+ def save_fact(fact)
702
+ result = @store.save_fact(fact)
703
+
704
+ # Invalidate cache
705
+ @cache.delete("facts:#{fact.type}")
706
+ @cache.delete("facts:")
707
+
708
+ result
709
+ end
710
+
711
+ # Delegate other methods
712
+ def method_missing(method, *args, &block)
713
+ @store.send(method, *args, &block)
714
+ end
715
+ end
716
+ ```
717
+
718
+ ## Best Practices
719
+
720
+ ### 1. Handle Errors Gracefully
721
+
722
+ ```ruby
723
+ def save_fact(fact)
724
+ retries = 0
725
+
726
+ begin
727
+ @conn.exec_params(...)
728
+ rescue PG::ConnectionBad => e
729
+ retries += 1
730
+
731
+ if retries < 3
732
+ reconnect
733
+ retry
734
+ else
735
+ raise e
736
+ end
737
+ end
738
+ end
739
+ ```
740
+
741
+ ### 2. Use Prepared Statements
742
+
743
+ ```ruby
744
+ def initialize(connection_string:)
745
+ super
746
+ @conn.prepare('save_fact',
747
+ "INSERT INTO facts (id, fact_type, attributes) VALUES ($1, $2, $3)")
748
+ end
749
+
750
+ def save_fact(fact)
751
+ @conn.exec_prepared('save_fact', [fact.id, fact.type.to_s, fact.attributes.to_json])
752
+ end
753
+ ```
754
+
755
+ ### 3. Implement Health Checks
756
+
757
+ ```ruby
758
+ def healthy?
759
+ @conn.exec("SELECT 1")
760
+ true
761
+ rescue => e
762
+ false
763
+ end
764
+ ```
765
+
766
+ ## Next Steps
767
+
768
+ - **[Persistence Guide](../guides/persistence.md)** - Choosing backends
769
+ - **[Testing Guide](testing.md)** - Testing custom stores
770
+ - **[Performance Guide](performance.md)** - Optimizing queries
771
+ - **[API Reference](../api/blackboard.md)** - Complete API documentation
772
+
773
+ ---
774
+
775
+ *Custom stores enable KBS to work with any database. Implement the Store interface and test thoroughly.*