agentf 0.3.0 → 0.4.1

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
data/lib/agentf/memory.rb CHANGED
@@ -10,6 +10,8 @@ module Agentf
10
10
  module Memory
11
11
  # Redis-backed memory system for agent learning
12
12
  class RedisMemory
13
+ EDGE_INDEX = "edge:links"
14
+
13
15
  attr_reader :project
14
16
 
15
17
  def initialize(redis_url: nil, project: nil)
@@ -21,7 +23,7 @@ module Agentf
21
23
  ensure_indexes if @search_supported
22
24
  end
23
25
 
24
- def store_task(content:, embedding: [], language: nil, task_type: nil, success: true, agent: "ARCHITECT")
26
+ def store_task(content:, embedding: [], language: nil, task_type: nil, success: true, agent: Agentf::AgentRoles::PLANNER)
25
27
  task_id = "task_#{SecureRandom.hex(4)}"
26
28
 
27
29
  data = {
@@ -42,9 +44,19 @@ module Agentf
42
44
  task_id
43
45
  end
44
46
 
45
- def store_episode(type:, title:, description:, context: "", code_snippet: "", tags: [], agent: "SPECIALIST", related_task_id: nil,
46
- metadata: {})
47
+ def store_episode(type:, title:, description:, context: "", code_snippet: "", tags: [], agent: Agentf::AgentRoles::ENGINEER,
48
+ related_task_id: nil, metadata: {}, entity_ids: [], relationships: [], parent_episode_id: nil, causal_from: nil)
47
49
  episode_id = "episode_#{SecureRandom.hex(4)}"
50
+ normalized_metadata = enrich_metadata(
51
+ metadata: metadata,
52
+ agent: agent,
53
+ type: type,
54
+ tags: tags,
55
+ entity_ids: entity_ids,
56
+ relationships: relationships,
57
+ parent_episode_id: parent_episode_id,
58
+ causal_from: causal_from
59
+ )
48
60
 
49
61
  data = {
50
62
  "id" => episode_id,
@@ -58,7 +70,11 @@ module Agentf
58
70
  "created_at" => Time.now.to_i,
59
71
  "agent" => agent,
60
72
  "related_task_id" => related_task_id || "",
61
- "metadata" => metadata
73
+ "entity_ids" => entity_ids,
74
+ "relationships" => relationships,
75
+ "parent_episode_id" => parent_episode_id.to_s,
76
+ "causal_from" => causal_from.to_s,
77
+ "metadata" => normalized_metadata
62
78
  }
63
79
 
64
80
  key = "episodic:#{episode_id}"
@@ -79,10 +95,19 @@ module Agentf
79
95
  @client.set(key, payload)
80
96
  end
81
97
 
98
+ persist_relationship_edges(
99
+ episode_id: episode_id,
100
+ related_task_id: related_task_id,
101
+ relationships: relationships,
102
+ metadata: normalized_metadata,
103
+ tags: tags,
104
+ agent: agent
105
+ )
106
+
82
107
  episode_id
83
108
  end
84
109
 
85
- def store_success(title:, description:, context: "", code_snippet: "", tags: [], agent: "SPECIALIST")
110
+ def store_success(title:, description:, context: "", code_snippet: "", tags: [], agent: Agentf::AgentRoles::ENGINEER)
86
111
  store_episode(
87
112
  type: "success",
88
113
  title: title,
@@ -94,7 +119,7 @@ module Agentf
94
119
  )
95
120
  end
96
121
 
97
- def store_pitfall(title:, description:, context: "", code_snippet: "", tags: [], agent: "SPECIALIST")
122
+ def store_pitfall(title:, description:, context: "", code_snippet: "", tags: [], agent: Agentf::AgentRoles::ENGINEER)
98
123
  store_episode(
99
124
  type: "pitfall",
100
125
  title: title,
@@ -106,7 +131,7 @@ module Agentf
106
131
  )
107
132
  end
108
133
 
109
- def store_lesson(title:, description:, context: "", code_snippet: "", tags: [], agent: "SPECIALIST")
134
+ def store_lesson(title:, description:, context: "", code_snippet: "", tags: [], agent: Agentf::AgentRoles::ENGINEER)
110
135
  store_episode(
111
136
  type: "lesson",
112
137
  title: title,
@@ -118,7 +143,7 @@ module Agentf
118
143
  )
119
144
  end
120
145
 
121
- def store_business_intent(title:, description:, constraints: [], tags: [], agent: "WORKFLOW_ENGINE", priority: 1)
146
+ def store_business_intent(title:, description:, constraints: [], tags: [], agent: Agentf::AgentRoles::ORCHESTRATOR, priority: 1)
122
147
  context = constraints.any? ? "Constraints: #{constraints.join('; ')}" : ""
123
148
 
124
149
  store_episode(
@@ -136,7 +161,8 @@ module Agentf
136
161
  )
137
162
  end
138
163
 
139
- def store_feature_intent(title:, description:, acceptance_criteria: [], non_goals: [], tags: [], agent: "ARCHITECT", related_task_id: nil)
164
+ def store_feature_intent(title:, description:, acceptance_criteria: [], non_goals: [], tags: [], agent: Agentf::AgentRoles::PLANNER,
165
+ related_task_id: nil)
140
166
  context_parts = []
141
167
  context_parts << "Acceptance: #{acceptance_criteria.join('; ')}" if acceptance_criteria.any?
142
168
  context_parts << "Non-goals: #{non_goals.join('; ')}" if non_goals.any?
@@ -157,7 +183,8 @@ module Agentf
157
183
  )
158
184
  end
159
185
 
160
- def store_incident(title:, description:, root_cause: "", resolution: "", tags: [], agent: "DEBUGGER", business_capability: nil)
186
+ def store_incident(title:, description:, root_cause: "", resolution: "", tags: [], agent: Agentf::AgentRoles::INCIDENT_RESPONDER,
187
+ business_capability: nil)
161
188
  store_episode(
162
189
  type: "incident",
163
190
  title: title,
@@ -174,7 +201,7 @@ module Agentf
174
201
  )
175
202
  end
176
203
 
177
- def store_playbook(title:, description:, steps: [], tags: [], agent: "ARCHITECT", feature_area: nil)
204
+ def store_playbook(title:, description:, steps: [], tags: [], agent: Agentf::AgentRoles::PLANNER, feature_area: nil)
178
205
  store_episode(
179
206
  type: "playbook",
180
207
  title: title,
@@ -226,7 +253,7 @@ module Agentf
226
253
  query = "@type:#{type} @project:{#{@project}}"
227
254
  search_episodic(query: query, limit: limit)
228
255
  else
229
- fetch_memories_without_search(limit: [limit * 4, 100].min).select { |mem| mem["type"] == type }.first(limit)
256
+ fetch_memories_without_search(limit: 100).select { |mem| mem["type"] == type }.first(limit)
230
257
  end
231
258
  end
232
259
 
@@ -285,6 +312,112 @@ module Agentf
285
312
  all_tags.to_a
286
313
  end
287
314
 
315
+ def delete_memory_by_id(id:, scope: "project", dry_run: false)
316
+ normalized_scope = normalize_scope(scope)
317
+ episode_id = normalize_episode_id(id)
318
+ episode_key = "episodic:#{episode_id}"
319
+ memory = load_episode(episode_key)
320
+
321
+ return delete_result(mode: "id", scope: normalized_scope, dry_run: dry_run, error: "Memory not found: #{id}") unless memory
322
+ if normalized_scope == "project" && memory["project"].to_s != @project.to_s
323
+ return delete_result(mode: "id", scope: normalized_scope, dry_run: dry_run, error: "Memory not in current project")
324
+ end
325
+
326
+ keys = [episode_key]
327
+ keys.concat(collect_related_edge_keys(episode_ids: [episode_id], scope: normalized_scope))
328
+ result = delete_keys(keys.uniq, dry_run: dry_run)
329
+ result.merge(
330
+ "mode" => "id",
331
+ "scope" => normalized_scope,
332
+ "deleted_ids" => [episode_id],
333
+ "filters" => {}
334
+ )
335
+ end
336
+
337
+ def delete_recent(limit: 10, scope: "project", type: nil, agent: nil, dry_run: false)
338
+ normalized_scope = normalize_scope(scope)
339
+ count = [limit.to_i, 0].max
340
+ return delete_result(mode: "last", scope: normalized_scope, dry_run: dry_run, deleted_ids: [], filters: { "type" => type, "agent" => agent }) if count.zero?
341
+
342
+ episodes = collect_episode_records(scope: normalized_scope, type: type, agent: agent)
343
+ selected = episodes.sort_by { |mem| -(mem["created_at"] || 0) }.first(count)
344
+ episode_ids = selected.map { |mem| mem["id"].to_s }
345
+ keys = selected.map { |mem| "episodic:#{mem['id']}" }
346
+ keys.concat(collect_related_edge_keys(episode_ids: episode_ids, scope: normalized_scope))
347
+ result = delete_keys(keys.uniq, dry_run: dry_run)
348
+ result.merge(
349
+ "mode" => "last",
350
+ "scope" => normalized_scope,
351
+ "deleted_ids" => episode_ids,
352
+ "filters" => { "type" => type, "agent" => agent }
353
+ )
354
+ end
355
+
356
+ def delete_all(scope: "project", type: nil, agent: nil, dry_run: false)
357
+ normalized_scope = normalize_scope(scope)
358
+ episodic_records = collect_episode_records(scope: normalized_scope, type: type, agent: agent)
359
+ episode_ids = episodic_records.map { |mem| mem["id"].to_s }
360
+ keys = episodic_records.map { |mem| "episodic:#{mem['id']}" }
361
+ keys.concat(collect_related_edge_keys(episode_ids: episode_ids, scope: normalized_scope))
362
+
363
+ if type.to_s.empty? && agent.to_s.empty?
364
+ keys.concat(collect_edge_keys(scope: normalized_scope))
365
+ keys.concat(collect_semantic_keys(scope: normalized_scope))
366
+ end
367
+
368
+ result = delete_keys(keys.uniq, dry_run: dry_run)
369
+ result.merge(
370
+ "mode" => "all",
371
+ "scope" => normalized_scope,
372
+ "deleted_ids" => episode_ids,
373
+ "filters" => { "type" => type, "agent" => agent }
374
+ )
375
+ end
376
+
377
+ def store_edge(source_id:, target_id:, relation:, weight: 1.0, tags: [], agent: Agentf::AgentRoles::ORCHESTRATOR, metadata: {})
378
+ edge_id = "edge_#{SecureRandom.hex(5)}"
379
+ data = {
380
+ "id" => edge_id,
381
+ "source_id" => source_id,
382
+ "target_id" => target_id,
383
+ "relation" => relation,
384
+ "weight" => weight.to_f,
385
+ "tags" => tags,
386
+ "project" => @project,
387
+ "agent" => agent,
388
+ "metadata" => metadata,
389
+ "created_at" => Time.now.to_i
390
+ }
391
+
392
+ key = "edge:#{edge_id}"
393
+ payload = JSON.generate(data)
394
+
395
+ if @json_supported
396
+ begin
397
+ @client.call("JSON.SET", key, ".", payload)
398
+ rescue Redis::CommandError => e
399
+ if missing_json_module?(e)
400
+ @json_supported = false
401
+ @client.set(key, payload)
402
+ else
403
+ raise
404
+ end
405
+ end
406
+ else
407
+ @client.set(key, payload)
408
+ end
409
+
410
+ edge_id
411
+ end
412
+
413
+ def neighbors(node_id:, relation: nil, depth: 1, limit: 50)
414
+ traverse_edges(seed_ids: [node_id], relation_filters: relation ? [relation] : nil, depth: depth, limit: limit)
415
+ end
416
+
417
+ def subgraph(seed_ids:, depth: 2, relation_filters: nil, limit: 200)
418
+ traverse_edges(seed_ids: seed_ids, relation_filters: relation_filters, depth: depth, limit: limit)
419
+ end
420
+
288
421
  def close
289
422
  @client.close
290
423
  end
@@ -295,8 +428,9 @@ module Agentf
295
428
  return unless @search_supported
296
429
 
297
430
  create_episodic_index
431
+ create_edge_index
298
432
  rescue Redis::CommandError => e
299
- raise Redis::CommandError, "Failed to create episodic index: #{e.message}. Ensure Redis Stack with RediSearch is available." unless index_already_exists?(e)
433
+ raise Redis::CommandError, "Failed to create indexes: #{e.message}. Ensure Redis Stack with RediSearch is available." unless index_already_exists?(e)
300
434
  end
301
435
 
302
436
  def create_episodic_index
@@ -320,7 +454,31 @@ module Agentf
320
454
  "$.metadata.priority", "AS", "priority", "NUMERIC",
321
455
  "$.metadata.confidence", "AS", "confidence", "NUMERIC",
322
456
  "$.metadata.business_capability", "AS", "business_capability", "TAG",
323
- "$.metadata.feature_area", "AS", "feature_area", "TAG"
457
+ "$.metadata.feature_area", "AS", "feature_area", "TAG",
458
+ "$.metadata.agent_role", "AS", "agent_role", "TAG",
459
+ "$.metadata.division", "AS", "division", "TAG",
460
+ "$.metadata.specialty", "AS", "specialty", "TAG",
461
+ "$.entity_ids[*]", "AS", "entity_ids", "TAG",
462
+ "$.parent_episode_id", "AS", "parent_episode_id", "TEXT",
463
+ "$.causal_from", "AS", "causal_from", "TEXT"
464
+ )
465
+ end
466
+
467
+ def create_edge_index
468
+ @client.call(
469
+ "FT.CREATE", EDGE_INDEX,
470
+ "ON", "JSON",
471
+ "PREFIX", "1", "edge:",
472
+ "SCHEMA",
473
+ "$.id", "AS", "id", "TEXT",
474
+ "$.source_id", "AS", "source_id", "TAG",
475
+ "$.target_id", "AS", "target_id", "TAG",
476
+ "$.relation", "AS", "relation", "TAG",
477
+ "$.project", "AS", "project", "TAG",
478
+ "$.agent", "AS", "agent", "TAG",
479
+ "$.weight", "AS", "weight", "NUMERIC",
480
+ "$.created_at", "AS", "created_at", "NUMERIC",
481
+ "$.tags", "AS", "tags", "TAG"
324
482
  )
325
483
  end
326
484
 
@@ -432,15 +590,15 @@ module Agentf
432
590
 
433
591
  def context_profile(agent)
434
592
  case agent.to_s.upcase
435
- when "ARCHITECT"
593
+ when Agentf::AgentRoles::PLANNER
436
594
  { "preferred_types" => %w[business_intent feature_intent lesson playbook pitfall], "pitfall_penalty" => 0.1 }
437
- when "SPECIALIST"
595
+ when Agentf::AgentRoles::ENGINEER
438
596
  { "preferred_types" => %w[playbook success lesson pitfall], "pitfall_penalty" => 0.05 }
439
- when "TESTER"
597
+ when Agentf::AgentRoles::QA_TESTER
440
598
  { "preferred_types" => %w[lesson pitfall incident success], "pitfall_penalty" => 0.0 }
441
- when "DEBUGGER"
599
+ when Agentf::AgentRoles::INCIDENT_RESPONDER
442
600
  { "preferred_types" => %w[incident pitfall lesson], "pitfall_penalty" => 0.0 }
443
- when "SECURITY"
601
+ when Agentf::AgentRoles::SECURITY_REVIEWER
444
602
  { "preferred_types" => %w[pitfall lesson incident], "pitfall_penalty" => 0.0 }
445
603
  else
446
604
  { "preferred_types" => %w[lesson pitfall success business_intent feature_intent], "pitfall_penalty" => 0.05 }
@@ -461,7 +619,7 @@ module Agentf
461
619
  confidence = 1.0 if confidence > 1.0
462
620
 
463
621
  type_score = preferred_types.include?(type) ? 1.0 : 0.25
464
- agent_score = (memory["agent"] == agent || memory["agent"] == "WORKFLOW_ENGINE") ? 1.0 : 0.2
622
+ agent_score = (memory["agent"] == agent || memory["agent"] == Agentf::AgentRoles::ORCHESTRATOR) ? 1.0 : 0.2
465
623
  age_seconds = [now - memory.fetch("created_at", now).to_i, 0].max
466
624
  recency_score = 1.0 / (1.0 + (age_seconds / 86_400.0))
467
625
 
@@ -520,6 +678,363 @@ module Agentf
520
678
  def client_options
521
679
  { url: @redis_url }
522
680
  end
681
+
682
+ def normalize_scope(scope)
683
+ value = scope.to_s.strip.downcase
684
+ return "all" if value == "all"
685
+
686
+ "project"
687
+ end
688
+
689
+ def normalize_episode_id(id)
690
+ value = id.to_s.strip
691
+ value = value.sub("episodic:", "") if value.start_with?("episodic:")
692
+ value
693
+ end
694
+
695
+ def collect_episode_records(scope:, type: nil, agent: nil)
696
+ memories = []
697
+ cursor = "0"
698
+ loop do
699
+ cursor, batch = @client.scan(cursor, match: "episodic:*", count: 100)
700
+ batch.each do |key|
701
+ mem = load_episode(key)
702
+ next unless mem.is_a?(Hash)
703
+ next if scope == "project" && mem["project"].to_s != @project.to_s
704
+ next unless type.to_s.empty? || mem["type"].to_s == type.to_s
705
+ next unless agent.to_s.empty? || mem["agent"].to_s == agent.to_s
706
+
707
+ memories << mem
708
+ end
709
+ break if cursor == "0"
710
+ end
711
+ memories
712
+ end
713
+
714
+ def collect_related_edge_keys(episode_ids:, scope:)
715
+ ids = episode_ids.map(&:to_s).reject(&:empty?).to_set
716
+ return [] if ids.empty?
717
+
718
+ keys = []
719
+ cursor = "0"
720
+ loop do
721
+ cursor, batch = @client.scan(cursor, match: "edge:*", count: 100)
722
+ batch.each do |key|
723
+ edge = load_episode(key)
724
+ next unless edge.is_a?(Hash)
725
+ next if scope == "project" && edge["project"].to_s != @project.to_s
726
+
727
+ source = edge["source_id"].to_s
728
+ target = edge["target_id"].to_s
729
+ keys << key if ids.include?(source) || ids.include?(target)
730
+ end
731
+ break if cursor == "0"
732
+ end
733
+ keys
734
+ end
735
+
736
+ def collect_edge_keys(scope:)
737
+ keys = []
738
+ cursor = "0"
739
+ loop do
740
+ cursor, batch = @client.scan(cursor, match: "edge:*", count: 100)
741
+ batch.each do |key|
742
+ if scope == "all"
743
+ keys << key
744
+ next
745
+ end
746
+
747
+ edge = load_episode(key)
748
+ keys << key if edge.is_a?(Hash) && edge["project"].to_s == @project.to_s
749
+ end
750
+ break if cursor == "0"
751
+ end
752
+ keys
753
+ end
754
+
755
+ def collect_semantic_keys(scope:)
756
+ keys = []
757
+ cursor = "0"
758
+ loop do
759
+ cursor, batch = @client.scan(cursor, match: "semantic:*", count: 100)
760
+ batch.each do |key|
761
+ if scope == "all"
762
+ keys << key
763
+ next
764
+ end
765
+
766
+ task = @client.hgetall(key)
767
+ keys << key if task.is_a?(Hash) && task["project"].to_s == @project.to_s
768
+ end
769
+ break if cursor == "0"
770
+ end
771
+ keys
772
+ end
773
+
774
+ def delete_keys(keys, dry_run:)
775
+ if dry_run
776
+ {
777
+ "dry_run" => true,
778
+ "candidate_count" => keys.length,
779
+ "deleted_count" => 0,
780
+ "deleted_keys" => [],
781
+ "planned_keys" => keys
782
+ }
783
+ else
784
+ deleted = keys.empty? ? 0 : @client.del(*keys)
785
+ {
786
+ "dry_run" => false,
787
+ "candidate_count" => keys.length,
788
+ "deleted_count" => deleted,
789
+ "deleted_keys" => keys,
790
+ "planned_keys" => []
791
+ }
792
+ end
793
+ end
794
+
795
+ def delete_result(mode:, scope:, dry_run:, deleted_ids: [], filters: {}, error: nil)
796
+ {
797
+ "mode" => mode,
798
+ "scope" => scope,
799
+ "dry_run" => dry_run,
800
+ "candidate_count" => 0,
801
+ "deleted_count" => 0,
802
+ "deleted_keys" => [],
803
+ "planned_keys" => [],
804
+ "deleted_ids" => deleted_ids,
805
+ "filters" => filters,
806
+ "error" => error
807
+ }
808
+ end
809
+
810
+ def persist_relationship_edges(episode_id:, related_task_id:, relationships:, metadata:, tags:, agent:)
811
+ if related_task_id && !related_task_id.to_s.strip.empty?
812
+ store_edge(source_id: episode_id, target_id: related_task_id, relation: "relates_to", tags: tags, agent: agent)
813
+ end
814
+
815
+ Array(relationships).each do |relation|
816
+ next unless relation.is_a?(Hash)
817
+
818
+ target = relation["to"] || relation[:to]
819
+ relation_type = relation["type"] || relation[:type] || "related"
820
+ next if target.to_s.strip.empty?
821
+
822
+ store_edge(
823
+ source_id: episode_id,
824
+ target_id: target,
825
+ relation: relation_type,
826
+ weight: (relation["weight"] || relation[:weight] || 1.0).to_f,
827
+ tags: tags,
828
+ agent: agent,
829
+ metadata: { "source_metadata" => extract_metadata_slice(metadata, %w[intent_kind agent_role division]) }
830
+ )
831
+ end
832
+
833
+ parent = metadata["parent_episode_id"].to_s
834
+ unless parent.empty?
835
+ store_edge(source_id: episode_id, target_id: parent, relation: "child_of", tags: tags, agent: agent)
836
+ end
837
+
838
+ causal_from = metadata["causal_from"].to_s
839
+ unless causal_from.empty?
840
+ store_edge(source_id: episode_id, target_id: causal_from, relation: "caused_by", tags: tags, agent: agent)
841
+ end
842
+ rescue StandardError
843
+ nil
844
+ end
845
+
846
+ def enrich_metadata(metadata:, agent:, type:, tags:, entity_ids:, relationships:, parent_episode_id:, causal_from:)
847
+ base = metadata.is_a?(Hash) ? metadata.dup : {}
848
+ base["agent_role"] = agent
849
+ base["division"] = infer_division(agent)
850
+ base["specialty"] = infer_specialty(agent)
851
+ base["capabilities"] = infer_capabilities(agent)
852
+ base["episode_type"] = type
853
+ base["tag_count"] = Array(tags).length
854
+ base["relationship_count"] = Array(relationships).length
855
+ base["entity_ids"] = Array(entity_ids)
856
+ base["parent_episode_id"] = parent_episode_id.to_s unless parent_episode_id.to_s.empty?
857
+ base["causal_from"] = causal_from.to_s unless causal_from.to_s.empty?
858
+ base
859
+ end
860
+
861
+ def infer_division(agent)
862
+ case agent
863
+ when Agentf::AgentRoles::PLANNER, Agentf::AgentRoles::ORCHESTRATOR, Agentf::AgentRoles::KNOWLEDGE_MANAGER
864
+ "strategy"
865
+ when Agentf::AgentRoles::ENGINEER, Agentf::AgentRoles::RESEARCHER, Agentf::AgentRoles::UI_ENGINEER
866
+ "engineering"
867
+ when Agentf::AgentRoles::QA_TESTER, Agentf::AgentRoles::REVIEWER, Agentf::AgentRoles::SECURITY_REVIEWER
868
+ "quality"
869
+ when Agentf::AgentRoles::INCIDENT_RESPONDER
870
+ "operations"
871
+ else
872
+ "general"
873
+ end
874
+ end
875
+
876
+ def infer_specialty(agent)
877
+ case agent
878
+ when Agentf::AgentRoles::PLANNER
879
+ "planning"
880
+ when Agentf::AgentRoles::ENGINEER
881
+ "implementation"
882
+ when Agentf::AgentRoles::RESEARCHER
883
+ "discovery"
884
+ when Agentf::AgentRoles::QA_TESTER
885
+ "testing"
886
+ when Agentf::AgentRoles::INCIDENT_RESPONDER
887
+ "debugging"
888
+ when Agentf::AgentRoles::UI_ENGINEER
889
+ "design-implementation"
890
+ when Agentf::AgentRoles::SECURITY_REVIEWER
891
+ "security"
892
+ when Agentf::AgentRoles::KNOWLEDGE_MANAGER
893
+ "documentation"
894
+ when Agentf::AgentRoles::REVIEWER
895
+ "review"
896
+ when Agentf::AgentRoles::ORCHESTRATOR
897
+ "orchestration"
898
+ else
899
+ "general"
900
+ end
901
+ end
902
+
903
+ def infer_capabilities(agent)
904
+ case agent
905
+ when Agentf::AgentRoles::PLANNER
906
+ %w[decompose prioritize plan]
907
+ when Agentf::AgentRoles::ENGINEER
908
+ %w[implement execute modify]
909
+ when Agentf::AgentRoles::RESEARCHER
910
+ %w[search map discover]
911
+ when Agentf::AgentRoles::QA_TESTER
912
+ %w[test validate report]
913
+ when Agentf::AgentRoles::INCIDENT_RESPONDER
914
+ %w[triage diagnose remediate]
915
+ when Agentf::AgentRoles::UI_ENGINEER
916
+ %w[design implement-ui validate-ui]
917
+ when Agentf::AgentRoles::SECURITY_REVIEWER
918
+ %w[scan assess harden]
919
+ when Agentf::AgentRoles::KNOWLEDGE_MANAGER
920
+ %w[summarize document synthesize]
921
+ when Agentf::AgentRoles::REVIEWER
922
+ %w[review approve reject]
923
+ else
924
+ %w[coordinate]
925
+ end
926
+ end
927
+
928
+ def traverse_edges(seed_ids:, relation_filters:, depth:, limit:)
929
+ current = Array(seed_ids).compact.map(&:to_s).reject(&:empty?).uniq
930
+ visited_nodes = Set.new(current)
931
+ visited_edges = Set.new
932
+ layers = []
933
+ edges = []
934
+
935
+ depth.to_i.times do |hop|
936
+ break if current.empty?
937
+
938
+ next_nodes = []
939
+ layer_edges = []
940
+ current.each do |node_id|
941
+ fetch_edges_for(node_id: node_id, relation_filters: relation_filters, limit: limit).each do |edge|
942
+ edge_id = edge["id"].to_s
943
+ next if edge_id.empty? || visited_edges.include?(edge_id)
944
+
945
+ visited_edges << edge_id
946
+ layer_edges << edge
947
+ target = edge["target_id"].to_s
948
+ next if target.empty? || visited_nodes.include?(target)
949
+
950
+ visited_nodes << target
951
+ next_nodes << target
952
+ end
953
+ end
954
+ layers << { "depth" => hop + 1, "count" => layer_edges.length }
955
+ edges.concat(layer_edges)
956
+ current = next_nodes.uniq
957
+ break if edges.length >= limit
958
+ end
959
+
960
+ {
961
+ "seed_ids" => seed_ids,
962
+ "nodes" => visited_nodes.to_a,
963
+ "edges" => edges.first(limit),
964
+ "layers" => layers,
965
+ "count" => [edges.length, limit].min
966
+ }
967
+ end
968
+
969
+ def fetch_edges_for(node_id:, relation_filters:, limit:)
970
+ if @search_supported
971
+ query = ["@source_id:{#{escape_tag(node_id)}}", "@project:{#{escape_tag(@project)}}"]
972
+ if relation_filters && relation_filters.any?
973
+ relations = relation_filters.map { |item| escape_tag(item.to_s) }.join("|")
974
+ query << "@relation:{#{relations}}"
975
+ end
976
+ search_json_index(index: EDGE_INDEX, query: query.join(" "), limit: limit)
977
+ else
978
+ fetch_edges_without_search(node_id: node_id, relation_filters: relation_filters, limit: limit)
979
+ end
980
+ end
981
+
982
+ def search_json_index(index:, query:, limit:)
983
+ results = @client.call(
984
+ "FT.SEARCH", index,
985
+ query,
986
+ "SORTBY", "created_at", "DESC",
987
+ "LIMIT", "0", limit.to_s
988
+ )
989
+ return [] unless results && results[0] > 0
990
+
991
+ records = []
992
+ (2...results.length).step(2) do |i|
993
+ item = results[i]
994
+ next unless item.is_a?(Array)
995
+
996
+ item.each_with_index do |part, j|
997
+ next unless part == "$" && j + 1 < item.length
998
+
999
+ begin
1000
+ records << JSON.parse(item[j + 1])
1001
+ rescue JSON::ParserError
1002
+ nil
1003
+ end
1004
+ end
1005
+ end
1006
+ records
1007
+ end
1008
+
1009
+ def fetch_edges_without_search(node_id:, relation_filters:, limit:)
1010
+ edges = []
1011
+ cursor = "0"
1012
+ loop do
1013
+ cursor, batch = @client.scan(cursor, match: "edge:*", count: 100)
1014
+ batch.each do |key|
1015
+ edge = load_episode(key)
1016
+ next unless edge.is_a?(Hash)
1017
+ next unless edge["source_id"].to_s == node_id.to_s
1018
+ next unless edge["project"].to_s == @project.to_s
1019
+ next if relation_filters && relation_filters.any? && !relation_filters.include?(edge["relation"])
1020
+
1021
+ edges << edge
1022
+ return edges.first(limit) if edges.length >= limit
1023
+ end
1024
+ break if cursor == "0"
1025
+ end
1026
+ edges
1027
+ end
1028
+
1029
+ def escape_tag(value)
1030
+ value.to_s.gsub(/[\-{}\[\]|\\]/) { |m| "\\#{m}" }
1031
+ end
1032
+
1033
+ def extract_metadata_slice(metadata, keys)
1034
+ keys.each_with_object({}) do |key, acc|
1035
+ acc[key] = metadata[key] if metadata.key?(key)
1036
+ end
1037
+ end
523
1038
  end
524
1039
 
525
1040
  # Convenience method