swarm_sdk 2.0.3 → 2.0.5
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- checksums.yaml +4 -4
- data/lib/swarm_sdk/agent/builder.rb +41 -0
- data/lib/swarm_sdk/agent/chat/logging_helpers.rb +22 -5
- data/lib/swarm_sdk/agent/definition.rb +52 -6
- data/lib/swarm_sdk/configuration.rb +3 -1
- data/lib/swarm_sdk/prompts/memory.md.erb +480 -0
- data/lib/swarm_sdk/swarm/agent_initializer.rb +16 -3
- data/lib/swarm_sdk/swarm/builder.rb +9 -1
- data/lib/swarm_sdk/swarm/tool_configurator.rb +73 -23
- data/lib/swarm_sdk/swarm.rb +51 -7
- data/lib/swarm_sdk/tools/document_converters/html_converter.rb +101 -0
- data/lib/swarm_sdk/tools/memory/memory_delete.rb +64 -0
- data/lib/swarm_sdk/tools/memory/memory_edit.rb +145 -0
- data/lib/swarm_sdk/tools/memory/memory_glob.rb +94 -0
- data/lib/swarm_sdk/tools/memory/memory_grep.rb +147 -0
- data/lib/swarm_sdk/tools/memory/memory_multi_edit.rb +228 -0
- data/lib/swarm_sdk/tools/memory/memory_read.rb +82 -0
- data/lib/swarm_sdk/tools/memory/memory_write.rb +90 -0
- data/lib/swarm_sdk/tools/registry.rb +11 -3
- data/lib/swarm_sdk/tools/scratchpad/scratchpad_list.rb +96 -0
- data/lib/swarm_sdk/tools/scratchpad/scratchpad_read.rb +76 -0
- data/lib/swarm_sdk/tools/scratchpad/scratchpad_write.rb +91 -0
- data/lib/swarm_sdk/tools/stores/memory_storage.rb +300 -0
- data/lib/swarm_sdk/tools/stores/scratchpad_storage.rb +224 -0
- data/lib/swarm_sdk/tools/stores/storage.rb +148 -0
- data/lib/swarm_sdk/tools/stores/storage_read_tracker.rb +61 -0
- data/lib/swarm_sdk/tools/web_fetch.rb +261 -0
- data/lib/swarm_sdk/version.rb +1 -1
- data/lib/swarm_sdk.rb +39 -0
- metadata +18 -5
- data/lib/swarm_sdk/tools/scratchpad_list.rb +0 -88
- data/lib/swarm_sdk/tools/scratchpad_read.rb +0 -59
- data/lib/swarm_sdk/tools/scratchpad_write.rb +0 -88
- data/lib/swarm_sdk/tools/stores/scratchpad.rb +0 -153
@@ -0,0 +1,300 @@
|
|
1
|
+
# frozen_string_literal: true
|
2
|
+
|
3
|
+
module SwarmSDK
|
4
|
+
module Tools
|
5
|
+
module Stores
|
6
|
+
# MemoryStorage provides persistent, per-agent storage
|
7
|
+
#
|
8
|
+
# Features:
|
9
|
+
# - Per-agent: Each agent has its own isolated storage
|
10
|
+
# - Persistent: ALWAYS saves to JSON file
|
11
|
+
# - Path-based: Hierarchical organization using file-path-like addresses
|
12
|
+
# - Metadata-rich: Stores content + title + timestamp + size
|
13
|
+
# - Thread-safe: Mutex-protected operations
|
14
|
+
class MemoryStorage < Storage
|
15
|
+
# Initialize memory storage with required persistence
|
16
|
+
#
|
17
|
+
# @param persist_to [String] Path to JSON file for persistence (REQUIRED)
|
18
|
+
# @raise [ArgumentError] If persist_to is not provided
|
19
|
+
def initialize(persist_to:)
|
20
|
+
super() # Initialize parent Storage class
|
21
|
+
raise ArgumentError, "persist_to is required for MemoryStorage" if persist_to.nil? || persist_to.to_s.strip.empty?
|
22
|
+
|
23
|
+
@entries = {}
|
24
|
+
@total_size = 0
|
25
|
+
@persist_to = persist_to
|
26
|
+
@mutex = Mutex.new
|
27
|
+
|
28
|
+
# Load existing data if file exists
|
29
|
+
load_from_file if File.exist?(@persist_to)
|
30
|
+
end
|
31
|
+
|
32
|
+
# Write content to memory storage
|
33
|
+
#
|
34
|
+
# @param file_path [String] Path to store content
|
35
|
+
# @param content [String] Content to store
|
36
|
+
# @param title [String] Brief title describing the content
|
37
|
+
# @raise [ArgumentError] If size limits are exceeded
|
38
|
+
# @return [Entry] The created entry
|
39
|
+
def write(file_path:, content:, title:)
|
40
|
+
@mutex.synchronize do
|
41
|
+
raise ArgumentError, "file_path is required" if file_path.nil? || file_path.to_s.strip.empty?
|
42
|
+
raise ArgumentError, "content is required" if content.nil?
|
43
|
+
raise ArgumentError, "title is required" if title.nil? || title.to_s.strip.empty?
|
44
|
+
|
45
|
+
content_size = content.bytesize
|
46
|
+
|
47
|
+
# Check entry size limit
|
48
|
+
if content_size > MAX_ENTRY_SIZE
|
49
|
+
raise ArgumentError, "Content exceeds maximum size (#{format_bytes(MAX_ENTRY_SIZE)}). " \
|
50
|
+
"Current: #{format_bytes(content_size)}"
|
51
|
+
end
|
52
|
+
|
53
|
+
# Calculate new total size
|
54
|
+
existing_entry = @entries[file_path]
|
55
|
+
existing_size = existing_entry ? existing_entry.size : 0
|
56
|
+
new_total_size = @total_size - existing_size + content_size
|
57
|
+
|
58
|
+
# Check total size limit
|
59
|
+
if new_total_size > MAX_TOTAL_SIZE
|
60
|
+
raise ArgumentError, "Memory storage full (#{format_bytes(MAX_TOTAL_SIZE)} limit). " \
|
61
|
+
"Current: #{format_bytes(@total_size)}, " \
|
62
|
+
"Would be: #{format_bytes(new_total_size)}. " \
|
63
|
+
"Clear old entries or use smaller content."
|
64
|
+
end
|
65
|
+
|
66
|
+
# Create entry
|
67
|
+
entry = Entry.new(
|
68
|
+
content: content,
|
69
|
+
title: title,
|
70
|
+
updated_at: Time.now,
|
71
|
+
size: content_size,
|
72
|
+
)
|
73
|
+
|
74
|
+
# Update storage
|
75
|
+
@entries[file_path] = entry
|
76
|
+
@total_size = new_total_size
|
77
|
+
|
78
|
+
# Always persist to file
|
79
|
+
save_to_file
|
80
|
+
|
81
|
+
entry
|
82
|
+
end
|
83
|
+
end
|
84
|
+
|
85
|
+
# Read content from memory storage
|
86
|
+
#
|
87
|
+
# @param file_path [String] Path to read from
|
88
|
+
# @raise [ArgumentError] If path not found
|
89
|
+
# @return [String] Content at the path
|
90
|
+
def read(file_path:)
|
91
|
+
raise ArgumentError, "file_path is required" if file_path.nil? || file_path.to_s.strip.empty?
|
92
|
+
|
93
|
+
entry = @entries[file_path]
|
94
|
+
raise ArgumentError, "memory://#{file_path} not found" unless entry
|
95
|
+
|
96
|
+
entry.content
|
97
|
+
end
|
98
|
+
|
99
|
+
# Delete a specific entry
|
100
|
+
#
|
101
|
+
# @param file_path [String] Path to delete
|
102
|
+
# @raise [ArgumentError] If path not found
|
103
|
+
# @return [void]
|
104
|
+
def delete(file_path:)
|
105
|
+
@mutex.synchronize do
|
106
|
+
raise ArgumentError, "file_path is required" if file_path.nil? || file_path.to_s.strip.empty?
|
107
|
+
|
108
|
+
entry = @entries[file_path]
|
109
|
+
raise ArgumentError, "memory://#{file_path} not found" unless entry
|
110
|
+
|
111
|
+
# Update total size
|
112
|
+
@total_size -= entry.size
|
113
|
+
|
114
|
+
# Remove entry
|
115
|
+
@entries.delete(file_path)
|
116
|
+
|
117
|
+
# Always persist to file
|
118
|
+
save_to_file
|
119
|
+
end
|
120
|
+
end
|
121
|
+
|
122
|
+
# List memory storage entries, optionally filtered by prefix
|
123
|
+
#
|
124
|
+
# @param prefix [String, nil] Filter by path prefix
|
125
|
+
# @return [Array<Hash>] Array of entry metadata (path, title, size, updated_at)
|
126
|
+
def list(prefix: nil)
|
127
|
+
entries = @entries
|
128
|
+
|
129
|
+
# Filter by prefix if provided
|
130
|
+
if prefix && !prefix.empty?
|
131
|
+
entries = entries.select { |path, _| path.start_with?(prefix) }
|
132
|
+
end
|
133
|
+
|
134
|
+
# Return metadata sorted by path
|
135
|
+
entries.map do |path, entry|
|
136
|
+
{
|
137
|
+
path: path,
|
138
|
+
title: entry.title,
|
139
|
+
size: entry.size,
|
140
|
+
updated_at: entry.updated_at,
|
141
|
+
}
|
142
|
+
end.sort_by { |e| e[:path] }
|
143
|
+
end
|
144
|
+
|
145
|
+
# Search entries by glob pattern
|
146
|
+
#
|
147
|
+
# @param pattern [String] Glob pattern (e.g., "**/*.txt", "parallel/*/task_*")
|
148
|
+
# @return [Array<Hash>] Array of matching entry metadata, sorted by most recent first
|
149
|
+
def glob(pattern:)
|
150
|
+
raise ArgumentError, "pattern is required" if pattern.nil? || pattern.to_s.strip.empty?
|
151
|
+
|
152
|
+
# Convert glob pattern to regex
|
153
|
+
regex = glob_to_regex(pattern)
|
154
|
+
|
155
|
+
# Filter entries by pattern
|
156
|
+
matching_entries = @entries.select { |path, _| regex.match?(path) }
|
157
|
+
|
158
|
+
# Return metadata sorted by most recent first
|
159
|
+
matching_entries.map do |path, entry|
|
160
|
+
{
|
161
|
+
path: path,
|
162
|
+
title: entry.title,
|
163
|
+
size: entry.size,
|
164
|
+
updated_at: entry.updated_at,
|
165
|
+
}
|
166
|
+
end.sort_by { |e| -e[:updated_at].to_f }
|
167
|
+
end
|
168
|
+
|
169
|
+
# Search entry content by pattern
|
170
|
+
#
|
171
|
+
# @param pattern [String] Regular expression pattern to search for
|
172
|
+
# @param case_insensitive [Boolean] Whether to perform case-insensitive search
|
173
|
+
# @param output_mode [String] Output mode: "files_with_matches" (default), "content", or "count"
|
174
|
+
# @return [Array<Hash>, String] Results based on output_mode
|
175
|
+
def grep(pattern:, case_insensitive: false, output_mode: "files_with_matches")
|
176
|
+
raise ArgumentError, "pattern is required" if pattern.nil? || pattern.to_s.strip.empty?
|
177
|
+
|
178
|
+
# Create regex from pattern
|
179
|
+
flags = case_insensitive ? Regexp::IGNORECASE : 0
|
180
|
+
regex = Regexp.new(pattern, flags)
|
181
|
+
|
182
|
+
case output_mode
|
183
|
+
when "files_with_matches"
|
184
|
+
# Return just the paths that match
|
185
|
+
matching_paths = @entries.select { |_path, entry| regex.match?(entry.content) }
|
186
|
+
.map { |path, _| path }
|
187
|
+
.sort
|
188
|
+
matching_paths
|
189
|
+
when "content"
|
190
|
+
# Return paths with matching lines, sorted by most recent first
|
191
|
+
results = []
|
192
|
+
@entries.each do |path, entry|
|
193
|
+
matching_lines = []
|
194
|
+
entry.content.each_line.with_index(1) do |line, line_num|
|
195
|
+
matching_lines << { line_number: line_num, content: line.chomp } if regex.match?(line)
|
196
|
+
end
|
197
|
+
results << { path: path, matches: matching_lines, updated_at: entry.updated_at } unless matching_lines.empty?
|
198
|
+
end
|
199
|
+
results.sort_by { |r| -r[:updated_at].to_f }.map { |r| r.except(:updated_at) }
|
200
|
+
when "count"
|
201
|
+
# Return paths with match counts, sorted by most recent first
|
202
|
+
results = []
|
203
|
+
@entries.each do |path, entry|
|
204
|
+
count = entry.content.scan(regex).size
|
205
|
+
results << { path: path, count: count, updated_at: entry.updated_at } if count > 0
|
206
|
+
end
|
207
|
+
results.sort_by { |r| -r[:updated_at].to_f }.map { |r| r.except(:updated_at) }
|
208
|
+
else
|
209
|
+
raise ArgumentError, "Invalid output_mode: #{output_mode}. Must be 'files_with_matches', 'content', or 'count'"
|
210
|
+
end
|
211
|
+
end
|
212
|
+
|
213
|
+
# Clear all entries
|
214
|
+
#
|
215
|
+
# @return [void]
|
216
|
+
def clear
|
217
|
+
@mutex.synchronize do
|
218
|
+
@entries.clear
|
219
|
+
@total_size = 0
|
220
|
+
save_to_file
|
221
|
+
end
|
222
|
+
end
|
223
|
+
|
224
|
+
# Get current total size
|
225
|
+
#
|
226
|
+
# @return [Integer] Total size in bytes
|
227
|
+
attr_reader :total_size
|
228
|
+
|
229
|
+
# Get number of entries
|
230
|
+
#
|
231
|
+
# @return [Integer] Number of entries
|
232
|
+
def size
|
233
|
+
@entries.size
|
234
|
+
end
|
235
|
+
|
236
|
+
private
|
237
|
+
|
238
|
+
# Save memory storage data to JSON file
|
239
|
+
#
|
240
|
+
# @return [void]
|
241
|
+
def save_to_file
|
242
|
+
# Convert entries to serializable format
|
243
|
+
data = {
|
244
|
+
version: 1,
|
245
|
+
total_size: @total_size,
|
246
|
+
entries: @entries.transform_values do |entry|
|
247
|
+
{
|
248
|
+
content: entry.content,
|
249
|
+
title: entry.title,
|
250
|
+
updated_at: entry.updated_at.iso8601,
|
251
|
+
size: entry.size,
|
252
|
+
}
|
253
|
+
end,
|
254
|
+
}
|
255
|
+
|
256
|
+
# Ensure directory exists
|
257
|
+
dir = File.dirname(@persist_to)
|
258
|
+
FileUtils.mkdir_p(dir) unless Dir.exist?(dir)
|
259
|
+
|
260
|
+
# Write to file atomically (write to temp file, then rename)
|
261
|
+
temp_file = "#{@persist_to}.tmp"
|
262
|
+
File.write(temp_file, JSON.pretty_generate(data))
|
263
|
+
File.rename(temp_file, @persist_to)
|
264
|
+
end
|
265
|
+
|
266
|
+
# Load memory storage data from JSON file
|
267
|
+
#
|
268
|
+
# @return [void]
|
269
|
+
def load_from_file
|
270
|
+
return unless File.exist?(@persist_to)
|
271
|
+
|
272
|
+
data = JSON.parse(File.read(@persist_to))
|
273
|
+
|
274
|
+
# Restore entries
|
275
|
+
@entries = data["entries"].transform_values do |entry_data|
|
276
|
+
Entry.new(
|
277
|
+
content: entry_data["content"],
|
278
|
+
title: entry_data["title"],
|
279
|
+
updated_at: Time.parse(entry_data["updated_at"]),
|
280
|
+
size: entry_data["size"],
|
281
|
+
)
|
282
|
+
end
|
283
|
+
|
284
|
+
# Restore total size
|
285
|
+
@total_size = data["total_size"]
|
286
|
+
rescue JSON::ParserError => e
|
287
|
+
# If file is corrupted, log warning and start fresh
|
288
|
+
warn("Warning: Failed to load memory storage from #{@persist_to}: #{e.message}. Starting with empty storage.")
|
289
|
+
@entries = {}
|
290
|
+
@total_size = 0
|
291
|
+
rescue StandardError => e
|
292
|
+
# If any other error occurs, log warning and start fresh
|
293
|
+
warn("Warning: Failed to load memory storage from #{@persist_to}: #{e.message}. Starting with empty storage.")
|
294
|
+
@entries = {}
|
295
|
+
@total_size = 0
|
296
|
+
end
|
297
|
+
end
|
298
|
+
end
|
299
|
+
end
|
300
|
+
end
|
@@ -0,0 +1,224 @@
|
|
1
|
+
# frozen_string_literal: true
|
2
|
+
|
3
|
+
module SwarmSDK
|
4
|
+
module Tools
|
5
|
+
module Stores
|
6
|
+
# ScratchpadStorage provides volatile, shared storage
|
7
|
+
#
|
8
|
+
# Features:
|
9
|
+
# - Shared: All agents share the same scratchpad
|
10
|
+
# - Volatile: NEVER persists - all data lost when process ends
|
11
|
+
# - Path-based: Hierarchical organization using file-path-like addresses
|
12
|
+
# - Metadata-rich: Stores content + title + timestamp + size
|
13
|
+
# - Thread-safe: Mutex-protected operations
|
14
|
+
#
|
15
|
+
# Use for temporary, cross-agent communication within a single session.
|
16
|
+
class ScratchpadStorage < Storage
|
17
|
+
# Initialize scratchpad storage (always volatile)
|
18
|
+
def initialize
|
19
|
+
super() # Initialize parent Storage class
|
20
|
+
@entries = {}
|
21
|
+
@total_size = 0
|
22
|
+
@mutex = Mutex.new
|
23
|
+
end
|
24
|
+
|
25
|
+
# Write content to scratchpad
|
26
|
+
#
|
27
|
+
# @param file_path [String] Path to store content
|
28
|
+
# @param content [String] Content to store
|
29
|
+
# @param title [String] Brief title describing the content
|
30
|
+
# @raise [ArgumentError] If size limits are exceeded
|
31
|
+
# @return [Entry] The created entry
|
32
|
+
def write(file_path:, content:, title:)
|
33
|
+
@mutex.synchronize do
|
34
|
+
raise ArgumentError, "file_path is required" if file_path.nil? || file_path.to_s.strip.empty?
|
35
|
+
raise ArgumentError, "content is required" if content.nil?
|
36
|
+
raise ArgumentError, "title is required" if title.nil? || title.to_s.strip.empty?
|
37
|
+
|
38
|
+
content_size = content.bytesize
|
39
|
+
|
40
|
+
# Check entry size limit
|
41
|
+
if content_size > MAX_ENTRY_SIZE
|
42
|
+
raise ArgumentError, "Content exceeds maximum size (#{format_bytes(MAX_ENTRY_SIZE)}). " \
|
43
|
+
"Current: #{format_bytes(content_size)}"
|
44
|
+
end
|
45
|
+
|
46
|
+
# Calculate new total size
|
47
|
+
existing_entry = @entries[file_path]
|
48
|
+
existing_size = existing_entry ? existing_entry.size : 0
|
49
|
+
new_total_size = @total_size - existing_size + content_size
|
50
|
+
|
51
|
+
# Check total size limit
|
52
|
+
if new_total_size > MAX_TOTAL_SIZE
|
53
|
+
raise ArgumentError, "Scratchpad full (#{format_bytes(MAX_TOTAL_SIZE)} limit). " \
|
54
|
+
"Current: #{format_bytes(@total_size)}, " \
|
55
|
+
"Would be: #{format_bytes(new_total_size)}. " \
|
56
|
+
"Clear old entries or use smaller content."
|
57
|
+
end
|
58
|
+
|
59
|
+
# Create entry
|
60
|
+
entry = Entry.new(
|
61
|
+
content: content,
|
62
|
+
title: title,
|
63
|
+
updated_at: Time.now,
|
64
|
+
size: content_size,
|
65
|
+
)
|
66
|
+
|
67
|
+
# Update storage
|
68
|
+
@entries[file_path] = entry
|
69
|
+
@total_size = new_total_size
|
70
|
+
|
71
|
+
entry
|
72
|
+
end
|
73
|
+
end
|
74
|
+
|
75
|
+
# Read content from scratchpad
|
76
|
+
#
|
77
|
+
# @param file_path [String] Path to read from
|
78
|
+
# @raise [ArgumentError] If path not found
|
79
|
+
# @return [String] Content at the path
|
80
|
+
def read(file_path:)
|
81
|
+
raise ArgumentError, "file_path is required" if file_path.nil? || file_path.to_s.strip.empty?
|
82
|
+
|
83
|
+
entry = @entries[file_path]
|
84
|
+
raise ArgumentError, "scratchpad://#{file_path} not found" unless entry
|
85
|
+
|
86
|
+
entry.content
|
87
|
+
end
|
88
|
+
|
89
|
+
# Delete a specific entry
|
90
|
+
#
|
91
|
+
# @param file_path [String] Path to delete
|
92
|
+
# @raise [ArgumentError] If path not found
|
93
|
+
# @return [void]
|
94
|
+
def delete(file_path:)
|
95
|
+
@mutex.synchronize do
|
96
|
+
raise ArgumentError, "file_path is required" if file_path.nil? || file_path.to_s.strip.empty?
|
97
|
+
|
98
|
+
entry = @entries[file_path]
|
99
|
+
raise ArgumentError, "scratchpad://#{file_path} not found" unless entry
|
100
|
+
|
101
|
+
# Update total size
|
102
|
+
@total_size -= entry.size
|
103
|
+
|
104
|
+
# Remove entry
|
105
|
+
@entries.delete(file_path)
|
106
|
+
end
|
107
|
+
end
|
108
|
+
|
109
|
+
# List scratchpad entries, optionally filtered by prefix
|
110
|
+
#
|
111
|
+
# @param prefix [String, nil] Filter by path prefix
|
112
|
+
# @return [Array<Hash>] Array of entry metadata (path, title, size, updated_at)
|
113
|
+
def list(prefix: nil)
|
114
|
+
entries = @entries
|
115
|
+
|
116
|
+
# Filter by prefix if provided
|
117
|
+
if prefix && !prefix.empty?
|
118
|
+
entries = entries.select { |path, _| path.start_with?(prefix) }
|
119
|
+
end
|
120
|
+
|
121
|
+
# Return metadata sorted by path
|
122
|
+
entries.map do |path, entry|
|
123
|
+
{
|
124
|
+
path: path,
|
125
|
+
title: entry.title,
|
126
|
+
size: entry.size,
|
127
|
+
updated_at: entry.updated_at,
|
128
|
+
}
|
129
|
+
end.sort_by { |e| e[:path] }
|
130
|
+
end
|
131
|
+
|
132
|
+
# Search entries by glob pattern
|
133
|
+
#
|
134
|
+
# @param pattern [String] Glob pattern (e.g., "**/*.txt", "parallel/*/task_*")
|
135
|
+
# @return [Array<Hash>] Array of matching entry metadata, sorted by most recent first
|
136
|
+
def glob(pattern:)
|
137
|
+
raise ArgumentError, "pattern is required" if pattern.nil? || pattern.to_s.strip.empty?
|
138
|
+
|
139
|
+
# Convert glob pattern to regex
|
140
|
+
regex = glob_to_regex(pattern)
|
141
|
+
|
142
|
+
# Filter entries by pattern
|
143
|
+
matching_entries = @entries.select { |path, _| regex.match?(path) }
|
144
|
+
|
145
|
+
# Return metadata sorted by most recent first
|
146
|
+
matching_entries.map do |path, entry|
|
147
|
+
{
|
148
|
+
path: path,
|
149
|
+
title: entry.title,
|
150
|
+
size: entry.size,
|
151
|
+
updated_at: entry.updated_at,
|
152
|
+
}
|
153
|
+
end.sort_by { |e| -e[:updated_at].to_f }
|
154
|
+
end
|
155
|
+
|
156
|
+
# Search entry content by pattern
|
157
|
+
#
|
158
|
+
# @param pattern [String] Regular expression pattern to search for
|
159
|
+
# @param case_insensitive [Boolean] Whether to perform case-insensitive search
|
160
|
+
# @param output_mode [String] Output mode: "files_with_matches" (default), "content", or "count"
|
161
|
+
# @return [Array<Hash>, String] Results based on output_mode
|
162
|
+
def grep(pattern:, case_insensitive: false, output_mode: "files_with_matches")
|
163
|
+
raise ArgumentError, "pattern is required" if pattern.nil? || pattern.to_s.strip.empty?
|
164
|
+
|
165
|
+
# Create regex from pattern
|
166
|
+
flags = case_insensitive ? Regexp::IGNORECASE : 0
|
167
|
+
regex = Regexp.new(pattern, flags)
|
168
|
+
|
169
|
+
case output_mode
|
170
|
+
when "files_with_matches"
|
171
|
+
# Return just the paths that match
|
172
|
+
matching_paths = @entries.select { |_path, entry| regex.match?(entry.content) }
|
173
|
+
.map { |path, _| path }
|
174
|
+
.sort
|
175
|
+
matching_paths
|
176
|
+
when "content"
|
177
|
+
# Return paths with matching lines, sorted by most recent first
|
178
|
+
results = []
|
179
|
+
@entries.each do |path, entry|
|
180
|
+
matching_lines = []
|
181
|
+
entry.content.each_line.with_index(1) do |line, line_num|
|
182
|
+
matching_lines << { line_number: line_num, content: line.chomp } if regex.match?(line)
|
183
|
+
end
|
184
|
+
results << { path: path, matches: matching_lines, updated_at: entry.updated_at } unless matching_lines.empty?
|
185
|
+
end
|
186
|
+
results.sort_by { |r| -r[:updated_at].to_f }.map { |r| r.except(:updated_at) }
|
187
|
+
when "count"
|
188
|
+
# Return paths with match counts, sorted by most recent first
|
189
|
+
results = []
|
190
|
+
@entries.each do |path, entry|
|
191
|
+
count = entry.content.scan(regex).size
|
192
|
+
results << { path: path, count: count, updated_at: entry.updated_at } if count > 0
|
193
|
+
end
|
194
|
+
results.sort_by { |r| -r[:updated_at].to_f }.map { |r| r.except(:updated_at) }
|
195
|
+
else
|
196
|
+
raise ArgumentError, "Invalid output_mode: #{output_mode}. Must be 'files_with_matches', 'content', or 'count'"
|
197
|
+
end
|
198
|
+
end
|
199
|
+
|
200
|
+
# Clear all entries
|
201
|
+
#
|
202
|
+
# @return [void]
|
203
|
+
def clear
|
204
|
+
@mutex.synchronize do
|
205
|
+
@entries.clear
|
206
|
+
@total_size = 0
|
207
|
+
end
|
208
|
+
end
|
209
|
+
|
210
|
+
# Get current total size
|
211
|
+
#
|
212
|
+
# @return [Integer] Total size in bytes
|
213
|
+
attr_reader :total_size
|
214
|
+
|
215
|
+
# Get number of entries
|
216
|
+
#
|
217
|
+
# @return [Integer] Number of entries
|
218
|
+
def size
|
219
|
+
@entries.size
|
220
|
+
end
|
221
|
+
end
|
222
|
+
end
|
223
|
+
end
|
224
|
+
end
|
@@ -0,0 +1,148 @@
|
|
1
|
+
# frozen_string_literal: true
|
2
|
+
|
3
|
+
module SwarmSDK
|
4
|
+
module Tools
|
5
|
+
module Stores
|
6
|
+
# Abstract base class for hierarchical key-value storage with metadata
|
7
|
+
#
|
8
|
+
# Provides session-scoped storage for agents with path-based organization.
|
9
|
+
# Subclasses implement persistence behavior (volatile vs persistent).
|
10
|
+
#
|
11
|
+
# Features:
|
12
|
+
# - Path-based: Hierarchical organization using file-path-like addresses
|
13
|
+
# - Metadata-rich: Stores content + title + timestamp + size
|
14
|
+
# - Search capabilities: Glob patterns and grep-style content search
|
15
|
+
# - Thread-safe: Mutex-protected operations
|
16
|
+
class Storage
|
17
|
+
# Maximum size per entry (1MB)
|
18
|
+
MAX_ENTRY_SIZE = 1_000_000
|
19
|
+
|
20
|
+
# Maximum total storage size (100MB)
|
21
|
+
MAX_TOTAL_SIZE = 100_000_000
|
22
|
+
|
23
|
+
# Represents a single storage entry with metadata
|
24
|
+
Entry = Struct.new(:content, :title, :updated_at, :size, keyword_init: true)
|
25
|
+
|
26
|
+
# Initialize storage
|
27
|
+
#
|
28
|
+
# Subclasses should call super() in their initialize method.
|
29
|
+
# This base implementation does nothing - it exists only to satisfy RuboCop.
|
30
|
+
def initialize
|
31
|
+
# Base class initialization - subclasses implement their own logic
|
32
|
+
end
|
33
|
+
|
34
|
+
# Write content to storage
|
35
|
+
#
|
36
|
+
# @param file_path [String] Path to store content
|
37
|
+
# @param content [String] Content to store
|
38
|
+
# @param title [String] Brief title describing the content
|
39
|
+
# @raise [ArgumentError] If size limits are exceeded
|
40
|
+
# @return [Entry] The created entry
|
41
|
+
def write(file_path:, content:, title:)
|
42
|
+
raise NotImplementedError, "Subclass must implement #write"
|
43
|
+
end
|
44
|
+
|
45
|
+
# Read content from storage
|
46
|
+
#
|
47
|
+
# @param file_path [String] Path to read from
|
48
|
+
# @raise [ArgumentError] If path not found
|
49
|
+
# @return [String] Content at the path
|
50
|
+
def read(file_path:)
|
51
|
+
raise NotImplementedError, "Subclass must implement #read"
|
52
|
+
end
|
53
|
+
|
54
|
+
# Delete a specific entry
|
55
|
+
#
|
56
|
+
# @param file_path [String] Path to delete
|
57
|
+
# @raise [ArgumentError] If path not found
|
58
|
+
# @return [void]
|
59
|
+
def delete(file_path:)
|
60
|
+
raise NotImplementedError, "Subclass must implement #delete"
|
61
|
+
end
|
62
|
+
|
63
|
+
# List entries, optionally filtered by prefix
|
64
|
+
#
|
65
|
+
# @param prefix [String, nil] Filter by path prefix
|
66
|
+
# @return [Array<Hash>] Array of entry metadata (path, title, size, updated_at)
|
67
|
+
def list(prefix: nil)
|
68
|
+
raise NotImplementedError, "Subclass must implement #list"
|
69
|
+
end
|
70
|
+
|
71
|
+
# Search entries by glob pattern
|
72
|
+
#
|
73
|
+
# @param pattern [String] Glob pattern (e.g., "**/*.txt", "parallel/*/task_*")
|
74
|
+
# @return [Array<Hash>] Array of matching entry metadata, sorted by most recent first
|
75
|
+
def glob(pattern:)
|
76
|
+
raise NotImplementedError, "Subclass must implement #glob"
|
77
|
+
end
|
78
|
+
|
79
|
+
# Search entry content by pattern
|
80
|
+
#
|
81
|
+
# @param pattern [String] Regular expression pattern to search for
|
82
|
+
# @param case_insensitive [Boolean] Whether to perform case-insensitive search
|
83
|
+
# @param output_mode [String] Output mode: "files_with_matches" (default), "content", or "count"
|
84
|
+
# @return [Array<Hash>, String] Results based on output_mode
|
85
|
+
def grep(pattern:, case_insensitive: false, output_mode: "files_with_matches")
|
86
|
+
raise NotImplementedError, "Subclass must implement #grep"
|
87
|
+
end
|
88
|
+
|
89
|
+
# Clear all entries
|
90
|
+
#
|
91
|
+
# @return [void]
|
92
|
+
def clear
|
93
|
+
raise NotImplementedError, "Subclass must implement #clear"
|
94
|
+
end
|
95
|
+
|
96
|
+
# Get current total size
|
97
|
+
#
|
98
|
+
# @return [Integer] Total size in bytes
|
99
|
+
def total_size
|
100
|
+
raise NotImplementedError, "Subclass must implement #total_size"
|
101
|
+
end
|
102
|
+
|
103
|
+
# Get number of entries
|
104
|
+
#
|
105
|
+
# @return [Integer] Number of entries
|
106
|
+
def size
|
107
|
+
raise NotImplementedError, "Subclass must implement #size"
|
108
|
+
end
|
109
|
+
|
110
|
+
protected
|
111
|
+
|
112
|
+
# Format bytes to human-readable size
|
113
|
+
#
|
114
|
+
# @param bytes [Integer] Number of bytes
|
115
|
+
# @return [String] Formatted size (e.g., "1.5MB", "500.0KB")
|
116
|
+
def format_bytes(bytes)
|
117
|
+
if bytes >= 1_000_000
|
118
|
+
"#{(bytes.to_f / 1_000_000).round(1)}MB"
|
119
|
+
elsif bytes >= 1_000
|
120
|
+
"#{(bytes.to_f / 1_000).round(1)}KB"
|
121
|
+
else
|
122
|
+
"#{bytes}B"
|
123
|
+
end
|
124
|
+
end
|
125
|
+
|
126
|
+
# Convert glob pattern to regex
|
127
|
+
#
|
128
|
+
# @param pattern [String] Glob pattern
|
129
|
+
# @return [Regexp] Regular expression
|
130
|
+
def glob_to_regex(pattern)
|
131
|
+
# Escape special regex characters except glob wildcards
|
132
|
+
escaped = Regexp.escape(pattern)
|
133
|
+
|
134
|
+
# Convert glob wildcards to regex
|
135
|
+
# ** matches any number of directories (including zero)
|
136
|
+
escaped = escaped.gsub('\*\*', ".*")
|
137
|
+
# * matches anything except directory separator
|
138
|
+
escaped = escaped.gsub('\*', "[^/]*")
|
139
|
+
# ? matches single character except directory separator
|
140
|
+
escaped = escaped.gsub('\?', "[^/]")
|
141
|
+
|
142
|
+
# Anchor to start and end
|
143
|
+
Regexp.new("\\A#{escaped}\\z")
|
144
|
+
end
|
145
|
+
end
|
146
|
+
end
|
147
|
+
end
|
148
|
+
end
|