fractor 0.1.3 → 0.1.6
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- checksums.yaml +4 -4
- data/.rubocop-https---raw-githubusercontent-com-riboseinc-oss-guides-main-ci-rubocop-yml +552 -0
- data/.rubocop.yml +14 -8
- data/.rubocop_todo.yml +154 -48
- data/README.adoc +1371 -317
- data/examples/auto_detection/README.adoc +52 -0
- data/examples/auto_detection/auto_detection.rb +170 -0
- data/examples/continuous_chat_common/message_protocol.rb +53 -0
- data/examples/continuous_chat_fractor/README.adoc +217 -0
- data/examples/continuous_chat_fractor/chat_client.rb +303 -0
- data/examples/continuous_chat_fractor/chat_common.rb +83 -0
- data/examples/continuous_chat_fractor/chat_server.rb +167 -0
- data/examples/continuous_chat_fractor/simulate.rb +345 -0
- data/examples/continuous_chat_server/README.adoc +135 -0
- data/examples/continuous_chat_server/chat_client.rb +303 -0
- data/examples/continuous_chat_server/chat_server.rb +359 -0
- data/examples/continuous_chat_server/simulate.rb +343 -0
- data/examples/hierarchical_hasher/hierarchical_hasher.rb +12 -8
- data/examples/multi_work_type/multi_work_type.rb +30 -29
- data/examples/pipeline_processing/pipeline_processing.rb +15 -15
- data/examples/producer_subscriber/producer_subscriber.rb +20 -16
- data/examples/scatter_gather/scatter_gather.rb +29 -28
- data/examples/simple/sample.rb +38 -6
- data/examples/specialized_workers/specialized_workers.rb +44 -37
- data/lib/fractor/continuous_server.rb +188 -0
- data/lib/fractor/result_aggregator.rb +1 -1
- data/lib/fractor/supervisor.rb +291 -108
- data/lib/fractor/version.rb +1 -1
- data/lib/fractor/work_queue.rb +68 -0
- data/lib/fractor/work_result.rb +1 -1
- data/lib/fractor/worker.rb +2 -1
- data/lib/fractor/wrapped_ractor.rb +12 -2
- data/lib/fractor.rb +2 -0
- metadata +17 -2
|
@@ -0,0 +1,343 @@
|
|
|
1
|
+
#!/usr/bin/env ruby
|
|
2
|
+
# frozen_string_literal: true
|
|
3
|
+
|
|
4
|
+
require "fileutils"
|
|
5
|
+
require "optparse"
|
|
6
|
+
require "json"
|
|
7
|
+
|
|
8
|
+
module ContinuousChat
|
|
9
|
+
# Simulation controller that manages the server and clients
|
|
10
|
+
class Simulation
|
|
11
|
+
attr_reader :server_port, :log_dir
|
|
12
|
+
|
|
13
|
+
def initialize(server_port = 3000, duration = 10, log_dir = "logs")
|
|
14
|
+
@server_port = server_port
|
|
15
|
+
@duration = duration
|
|
16
|
+
@log_dir = log_dir
|
|
17
|
+
@server_pid = nil
|
|
18
|
+
@client_pids = {}
|
|
19
|
+
@running = false
|
|
20
|
+
|
|
21
|
+
# Create log directory if it doesn't exist
|
|
22
|
+
FileUtils.mkdir_p(@log_dir)
|
|
23
|
+
end
|
|
24
|
+
|
|
25
|
+
# Start the simulation
|
|
26
|
+
def start
|
|
27
|
+
puts "Starting chat simulation on port #{@server_port}"
|
|
28
|
+
puts "Logs will be saved to #{@log_dir}"
|
|
29
|
+
|
|
30
|
+
# Start the server
|
|
31
|
+
start_server
|
|
32
|
+
|
|
33
|
+
# Give the server time to initialize
|
|
34
|
+
puts "Waiting for server to initialize..."
|
|
35
|
+
sleep(2)
|
|
36
|
+
|
|
37
|
+
# Start the clients
|
|
38
|
+
start_clients
|
|
39
|
+
|
|
40
|
+
@running = true
|
|
41
|
+
puts "Chat simulation started"
|
|
42
|
+
|
|
43
|
+
# Wait for the specified duration
|
|
44
|
+
puts "Simulation will run for #{@duration} seconds"
|
|
45
|
+
|
|
46
|
+
# Give clients time to connect
|
|
47
|
+
sleep(2)
|
|
48
|
+
puts "Clients should be connecting now..."
|
|
49
|
+
|
|
50
|
+
# Wait for messages to be processed
|
|
51
|
+
remaining_time = @duration - 4
|
|
52
|
+
if remaining_time.positive?
|
|
53
|
+
puts "Waiting #{remaining_time} more seconds for processing..."
|
|
54
|
+
sleep(remaining_time)
|
|
55
|
+
end
|
|
56
|
+
|
|
57
|
+
puts "Simulation time complete, stopping..."
|
|
58
|
+
|
|
59
|
+
# Stop the simulation
|
|
60
|
+
stop
|
|
61
|
+
|
|
62
|
+
# Analyze the logs
|
|
63
|
+
analyze_logs
|
|
64
|
+
|
|
65
|
+
true
|
|
66
|
+
rescue StandardError => e
|
|
67
|
+
puts "Failed to start simulation: #{e.message}"
|
|
68
|
+
stop
|
|
69
|
+
false
|
|
70
|
+
end
|
|
71
|
+
|
|
72
|
+
# Stop the simulation
|
|
73
|
+
def stop
|
|
74
|
+
puts "Stopping chat simulation..."
|
|
75
|
+
|
|
76
|
+
# Stop all clients
|
|
77
|
+
stop_clients
|
|
78
|
+
|
|
79
|
+
# Stop the server
|
|
80
|
+
stop_server
|
|
81
|
+
|
|
82
|
+
@running = false
|
|
83
|
+
puts "Chat simulation stopped"
|
|
84
|
+
end
|
|
85
|
+
|
|
86
|
+
private
|
|
87
|
+
|
|
88
|
+
# Start the server process
|
|
89
|
+
def start_server
|
|
90
|
+
server_log_file = File.join(@log_dir, "server_messages.log")
|
|
91
|
+
|
|
92
|
+
# Get the directory where this script is located
|
|
93
|
+
script_dir = File.dirname(__FILE__)
|
|
94
|
+
server_script = File.join(script_dir, "chat_server.rb")
|
|
95
|
+
|
|
96
|
+
server_cmd = "ruby #{server_script} #{@server_port} #{server_log_file}"
|
|
97
|
+
|
|
98
|
+
puts "Starting server: #{server_cmd}"
|
|
99
|
+
|
|
100
|
+
# Start the server process as a fork
|
|
101
|
+
@server_pid = fork do
|
|
102
|
+
exec(server_cmd)
|
|
103
|
+
end
|
|
104
|
+
|
|
105
|
+
puts "Server started with PID #{@server_pid}"
|
|
106
|
+
end
|
|
107
|
+
|
|
108
|
+
# Stop the server process
|
|
109
|
+
def stop_server
|
|
110
|
+
return unless @server_pid
|
|
111
|
+
|
|
112
|
+
puts "Stopping server (PID #{@server_pid})..."
|
|
113
|
+
|
|
114
|
+
# Send SIGINT to the server process
|
|
115
|
+
begin
|
|
116
|
+
Process.kill("INT", @server_pid)
|
|
117
|
+
# Give it a moment to shut down gracefully
|
|
118
|
+
sleep(1)
|
|
119
|
+
|
|
120
|
+
# Force kill if still running
|
|
121
|
+
Process.kill("KILL", @server_pid) if process_running?(@server_pid)
|
|
122
|
+
rescue Errno::ESRCH
|
|
123
|
+
# Process already gone
|
|
124
|
+
end
|
|
125
|
+
|
|
126
|
+
@server_pid = nil
|
|
127
|
+
puts "Server stopped"
|
|
128
|
+
end
|
|
129
|
+
|
|
130
|
+
# Start client processes
|
|
131
|
+
def start_clients
|
|
132
|
+
# Define the client usernames and their messages
|
|
133
|
+
clients = {
|
|
134
|
+
"alice" => [
|
|
135
|
+
{ content: "Hello everyone!", recipient: "all" },
|
|
136
|
+
{ content: "I'm working on a Ruby project using sockets",
|
|
137
|
+
recipient: "all" },
|
|
138
|
+
{ content: "It's a simple chat server and client", recipient: "all" },
|
|
139
|
+
],
|
|
140
|
+
"bob" => [
|
|
141
|
+
{ content: "Hi Alice!", recipient: "alice" },
|
|
142
|
+
{ content: "That sounds interesting. What kind of project?",
|
|
143
|
+
recipient: "alice" },
|
|
144
|
+
{ content: "Cool! I love Ruby's socket features",
|
|
145
|
+
recipient: "alice" },
|
|
146
|
+
],
|
|
147
|
+
"charlie" => [
|
|
148
|
+
{ content: "How's everyone doing today?", recipient: "all" },
|
|
149
|
+
{ content: "Are you using any specific libraries?",
|
|
150
|
+
recipient: "alice" },
|
|
151
|
+
{ content: "Non-blocking IO in chat clients is efficient",
|
|
152
|
+
recipient: "all" },
|
|
153
|
+
],
|
|
154
|
+
}
|
|
155
|
+
|
|
156
|
+
puts "Starting #{clients.size} clients: #{clients.keys.join(', ')}"
|
|
157
|
+
|
|
158
|
+
# Start each client in a separate process
|
|
159
|
+
clients.each do |username, messages|
|
|
160
|
+
start_client(username, messages)
|
|
161
|
+
end
|
|
162
|
+
end
|
|
163
|
+
|
|
164
|
+
# Start a single client process
|
|
165
|
+
def start_client(username, messages)
|
|
166
|
+
client_log_file = File.join(@log_dir, "client_#{username}_messages.log")
|
|
167
|
+
messages_file = File.join(@log_dir,
|
|
168
|
+
"client_#{username}_send_messages.json")
|
|
169
|
+
|
|
170
|
+
# Write the messages to a JSON file
|
|
171
|
+
File.write(messages_file, JSON.generate(messages))
|
|
172
|
+
|
|
173
|
+
# Get the directory where this script is located
|
|
174
|
+
script_dir = File.dirname(__FILE__)
|
|
175
|
+
client_script = File.join(script_dir, "chat_client.rb")
|
|
176
|
+
|
|
177
|
+
# Build the client command
|
|
178
|
+
client_cmd = "ruby #{client_script} #{username} #{@server_port} #{client_log_file}"
|
|
179
|
+
|
|
180
|
+
puts "Starting client #{username}"
|
|
181
|
+
|
|
182
|
+
# Start the client process as a fork
|
|
183
|
+
@client_pids[username] = fork do
|
|
184
|
+
exec(client_cmd)
|
|
185
|
+
end
|
|
186
|
+
|
|
187
|
+
puts "Client #{username} started with PID #{@client_pids[username]}"
|
|
188
|
+
end
|
|
189
|
+
|
|
190
|
+
# Stop all client processes
|
|
191
|
+
def stop_clients
|
|
192
|
+
return if @client_pids.empty?
|
|
193
|
+
|
|
194
|
+
puts "Stopping #{@client_pids.size} clients..."
|
|
195
|
+
|
|
196
|
+
@client_pids.each do |username, pid|
|
|
197
|
+
# Try to gracefully terminate the process
|
|
198
|
+
begin
|
|
199
|
+
Process.kill("INT", pid)
|
|
200
|
+
# Give it a moment to shut down
|
|
201
|
+
sleep(0.5)
|
|
202
|
+
|
|
203
|
+
# Force kill if still running
|
|
204
|
+
Process.kill("KILL", pid) if process_running?(pid)
|
|
205
|
+
rescue Errno::ESRCH
|
|
206
|
+
# Process already gone
|
|
207
|
+
end
|
|
208
|
+
|
|
209
|
+
puts "Client #{username} stopped"
|
|
210
|
+
rescue StandardError => e
|
|
211
|
+
puts "Error stopping client #{username}: #{e.message}"
|
|
212
|
+
end
|
|
213
|
+
|
|
214
|
+
@client_pids.clear
|
|
215
|
+
end
|
|
216
|
+
|
|
217
|
+
# Check if a process is still running
|
|
218
|
+
def process_running?(pid)
|
|
219
|
+
Process.getpgid(pid)
|
|
220
|
+
true
|
|
221
|
+
rescue Errno::ESRCH
|
|
222
|
+
false
|
|
223
|
+
end
|
|
224
|
+
|
|
225
|
+
# Analyze the log files after the simulation
|
|
226
|
+
def analyze_logs
|
|
227
|
+
puts "\nSimulation Results"
|
|
228
|
+
puts "================="
|
|
229
|
+
|
|
230
|
+
# Analyze server log
|
|
231
|
+
server_log_file = File.join(@log_dir, "server_messages.log")
|
|
232
|
+
if File.exist?(server_log_file)
|
|
233
|
+
server_log = File.readlines(server_log_file)
|
|
234
|
+
puts "Server processed #{server_log.size} log entries"
|
|
235
|
+
|
|
236
|
+
# Count message types
|
|
237
|
+
message_count = server_log.count do |line|
|
|
238
|
+
line.include?("Received from")
|
|
239
|
+
end
|
|
240
|
+
broadcast_count = server_log.count do |line|
|
|
241
|
+
line.include?('Broadcasting: {:type=>"broadcast"')
|
|
242
|
+
end
|
|
243
|
+
direct_count = server_log.count do |line|
|
|
244
|
+
line =~ /Received from \w+:.*"recipient":"(?!all)/
|
|
245
|
+
end
|
|
246
|
+
|
|
247
|
+
puts " - #{message_count} messages received from clients"
|
|
248
|
+
puts " - #{broadcast_count} broadcast messages sent"
|
|
249
|
+
puts " - #{direct_count} direct messages sent"
|
|
250
|
+
else
|
|
251
|
+
puts "Server log file not found"
|
|
252
|
+
end
|
|
253
|
+
|
|
254
|
+
puts "\nClient Activity:"
|
|
255
|
+
# Analyze each client log
|
|
256
|
+
@client_pids.each_key do |username|
|
|
257
|
+
client_log_file = File.join(@log_dir, "client_#{username}_messages.log")
|
|
258
|
+
if File.exist?(client_log_file)
|
|
259
|
+
client_log = File.readlines(client_log_file)
|
|
260
|
+
sent_count = client_log.count { |line| line.include?("Sent message") }
|
|
261
|
+
received_count = client_log.count do |line|
|
|
262
|
+
line.include?("Received:")
|
|
263
|
+
end
|
|
264
|
+
|
|
265
|
+
puts " #{username}: Sent #{sent_count} messages, Received #{received_count} messages"
|
|
266
|
+
else
|
|
267
|
+
puts " #{username}: Log file not found"
|
|
268
|
+
end
|
|
269
|
+
end
|
|
270
|
+
|
|
271
|
+
puts "\nLog files are available in the #{@log_dir} directory for detailed analysis."
|
|
272
|
+
end
|
|
273
|
+
end
|
|
274
|
+
end
|
|
275
|
+
|
|
276
|
+
# When run directly, start the simulation
|
|
277
|
+
if __FILE__ == $PROGRAM_NAME
|
|
278
|
+
options = {
|
|
279
|
+
port: 3000,
|
|
280
|
+
duration: 10,
|
|
281
|
+
log_dir: "logs",
|
|
282
|
+
}
|
|
283
|
+
|
|
284
|
+
# Parse command line options
|
|
285
|
+
OptionParser.new do |opts|
|
|
286
|
+
opts.banner = "Usage: ruby simulate.rb [options]"
|
|
287
|
+
|
|
288
|
+
opts.on("-p", "--port PORT", Integer,
|
|
289
|
+
"Server port (default: 3000)") do |port|
|
|
290
|
+
options[:port] = port
|
|
291
|
+
end
|
|
292
|
+
|
|
293
|
+
opts.on("-d", "--duration SECONDS", Integer,
|
|
294
|
+
"Simulation duration in seconds (default: 10)") do |duration|
|
|
295
|
+
options[:duration] = duration
|
|
296
|
+
end
|
|
297
|
+
|
|
298
|
+
opts.on("-l", "--log-dir DIR",
|
|
299
|
+
"Directory for log files (default: logs)") do |dir|
|
|
300
|
+
options[:log_dir] = dir
|
|
301
|
+
end
|
|
302
|
+
|
|
303
|
+
opts.on("-h", "--help", "Show this help message") do
|
|
304
|
+
puts opts
|
|
305
|
+
exit
|
|
306
|
+
end
|
|
307
|
+
end.parse!
|
|
308
|
+
|
|
309
|
+
puts "Starting Chat Simulation"
|
|
310
|
+
puts "======================"
|
|
311
|
+
puts "This simulation runs a chat server and multiple clients as separate processes"
|
|
312
|
+
puts "to demonstrate a basic chat application with socket communication."
|
|
313
|
+
puts
|
|
314
|
+
|
|
315
|
+
# Create and run the simulation
|
|
316
|
+
simulation = ContinuousChat::Simulation.new(
|
|
317
|
+
options[:port],
|
|
318
|
+
options[:duration],
|
|
319
|
+
options[:log_dir],
|
|
320
|
+
)
|
|
321
|
+
|
|
322
|
+
# Set up signal handlers to properly clean up child processes
|
|
323
|
+
Signal.trap("INT") do
|
|
324
|
+
puts "\nSimulation interrupted"
|
|
325
|
+
simulation.stop
|
|
326
|
+
exit
|
|
327
|
+
end
|
|
328
|
+
|
|
329
|
+
Signal.trap("TERM") do
|
|
330
|
+
puts "\nSimulation terminated"
|
|
331
|
+
simulation.stop
|
|
332
|
+
exit
|
|
333
|
+
end
|
|
334
|
+
|
|
335
|
+
begin
|
|
336
|
+
simulation.start
|
|
337
|
+
rescue Interrupt
|
|
338
|
+
puts "\nSimulation interrupted"
|
|
339
|
+
simulation.stop
|
|
340
|
+
end
|
|
341
|
+
|
|
342
|
+
puts "Simulation completed"
|
|
343
|
+
end
|
|
@@ -10,7 +10,7 @@ module HierarchicalHasher
|
|
|
10
10
|
super({
|
|
11
11
|
data: data,
|
|
12
12
|
start: start,
|
|
13
|
-
length: length || data.bytesize
|
|
13
|
+
length: length || data.bytesize,
|
|
14
14
|
})
|
|
15
15
|
end
|
|
16
16
|
|
|
@@ -46,15 +46,15 @@ module HierarchicalHasher
|
|
|
46
46
|
result: {
|
|
47
47
|
start: work.start,
|
|
48
48
|
length: work.length,
|
|
49
|
-
hash: hash
|
|
49
|
+
hash: hash,
|
|
50
50
|
},
|
|
51
|
-
work: work
|
|
51
|
+
work: work,
|
|
52
52
|
)
|
|
53
53
|
rescue StandardError => e
|
|
54
54
|
# Return error result if something goes wrong
|
|
55
55
|
Fractor::WorkResult.new(
|
|
56
56
|
error: "Failed to hash chunk: #{e.message}",
|
|
57
|
-
work: work
|
|
57
|
+
work: work,
|
|
58
58
|
)
|
|
59
59
|
end
|
|
60
60
|
end
|
|
@@ -74,8 +74,8 @@ module HierarchicalHasher
|
|
|
74
74
|
# Create the supervisor with our worker class in a worker pool
|
|
75
75
|
supervisor = Fractor::Supervisor.new(
|
|
76
76
|
worker_pools: [
|
|
77
|
-
{ worker_class: HashWorker, num_workers: @worker_count }
|
|
78
|
-
]
|
|
77
|
+
{ worker_class: HashWorker, num_workers: @worker_count },
|
|
78
|
+
],
|
|
79
79
|
)
|
|
80
80
|
|
|
81
81
|
# Load the file and create work chunks
|
|
@@ -111,10 +111,14 @@ module HierarchicalHasher
|
|
|
111
111
|
return nil if results_aggregator.results.empty?
|
|
112
112
|
|
|
113
113
|
# Sort results by start position
|
|
114
|
-
sorted_results = results_aggregator.results.sort_by
|
|
114
|
+
sorted_results = results_aggregator.results.sort_by do |result|
|
|
115
|
+
result.result[:start]
|
|
116
|
+
end
|
|
115
117
|
|
|
116
118
|
# Concatenate all hashes with newlines
|
|
117
|
-
combined_hash_string = sorted_results.map
|
|
119
|
+
combined_hash_string = sorted_results.map do |result|
|
|
120
|
+
result.result[:hash]
|
|
121
|
+
end.join("\n")
|
|
118
122
|
|
|
119
123
|
# Calculate final SHA-256 hash (instead of SHA3)
|
|
120
124
|
Digest::SHA256.hexdigest(combined_hash_string)
|
|
@@ -45,7 +45,7 @@ module MultiWorkType
|
|
|
45
45
|
end
|
|
46
46
|
|
|
47
47
|
def to_s
|
|
48
|
-
"ImageWork: dimensions=#{dimensions.join(
|
|
48
|
+
"ImageWork: dimensions=#{dimensions.join('x')}, format=#{format}"
|
|
49
49
|
end
|
|
50
50
|
end
|
|
51
51
|
|
|
@@ -62,7 +62,7 @@ module MultiWorkType
|
|
|
62
62
|
error = TypeError.new("Unsupported work type: #{work.class}")
|
|
63
63
|
Fractor::WorkResult.new(
|
|
64
64
|
error: error,
|
|
65
|
-
work: work
|
|
65
|
+
work: work,
|
|
66
66
|
)
|
|
67
67
|
end
|
|
68
68
|
end
|
|
@@ -74,7 +74,8 @@ module MultiWorkType
|
|
|
74
74
|
sleep(rand(0.01..0.05)) # Simulate processing time
|
|
75
75
|
|
|
76
76
|
processed_text = case work.format
|
|
77
|
-
when :markdown then process_markdown(work.data,
|
|
77
|
+
when :markdown then process_markdown(work.data,
|
|
78
|
+
work.options)
|
|
78
79
|
when :html then process_html(work.data, work.options)
|
|
79
80
|
when :json then process_json(work.data, work.options)
|
|
80
81
|
else work.data.upcase # Simple transformation for plain text
|
|
@@ -87,10 +88,10 @@ module MultiWorkType
|
|
|
87
88
|
transformed_data: processed_text,
|
|
88
89
|
metadata: {
|
|
89
90
|
word_count: processed_text.split(/\s+/).size,
|
|
90
|
-
char_count: processed_text.length
|
|
91
|
-
}
|
|
91
|
+
char_count: processed_text.length,
|
|
92
|
+
},
|
|
92
93
|
},
|
|
93
|
-
work: work
|
|
94
|
+
work: work,
|
|
94
95
|
)
|
|
95
96
|
end
|
|
96
97
|
|
|
@@ -110,13 +111,13 @@ module MultiWorkType
|
|
|
110
111
|
applied_filters: %i[sharpen contrast],
|
|
111
112
|
processing_metadata: {
|
|
112
113
|
original_size: input_size,
|
|
113
|
-
processed_size: (input_size * 0.8).to_i # Simulate compression
|
|
114
|
-
}
|
|
114
|
+
processed_size: (input_size * 0.8).to_i, # Simulate compression
|
|
115
|
+
},
|
|
115
116
|
}
|
|
116
117
|
|
|
117
118
|
Fractor::WorkResult.new(
|
|
118
119
|
result: simulated_result,
|
|
119
|
-
work: work
|
|
120
|
+
work: work,
|
|
120
121
|
)
|
|
121
122
|
end
|
|
122
123
|
|
|
@@ -127,7 +128,7 @@ module MultiWorkType
|
|
|
127
128
|
links = text.scan(/\[(.+?)\]\((.+?)\)/)
|
|
128
129
|
|
|
129
130
|
"Processed Markdown: #{text.length} chars, #{headers.size} headers, #{links.size} links\n" \
|
|
130
|
-
"Headers: #{headers.join(
|
|
131
|
+
"Headers: #{headers.join(', ')}\n" \
|
|
131
132
|
"#{text.gsub(/^#+\s+(.+)$/, '💫 \1 💫')}"
|
|
132
133
|
end
|
|
133
134
|
|
|
@@ -136,7 +137,7 @@ module MultiWorkType
|
|
|
136
137
|
tags = text.scan(/<(\w+)[^>]*>/).flatten
|
|
137
138
|
|
|
138
139
|
"Processed HTML: #{text.length} chars, #{tags.size} tags\n" \
|
|
139
|
-
"Tags: #{tags.uniq.join(
|
|
140
|
+
"Tags: #{tags.uniq.join(', ')}\n" \
|
|
140
141
|
"#{text.gsub(%r{<(\w+)[^>]*>(.+?)</\1>}, '✨\2✨')}"
|
|
141
142
|
end
|
|
142
143
|
|
|
@@ -147,7 +148,7 @@ module MultiWorkType
|
|
|
147
148
|
keys = data.keys
|
|
148
149
|
|
|
149
150
|
"Processed JSON: #{keys.size} top-level keys\n" \
|
|
150
|
-
"Keys: #{keys.join(
|
|
151
|
+
"Keys: #{keys.join(', ')}\n" \
|
|
151
152
|
"Pretty-printed: #{data}"
|
|
152
153
|
rescue StandardError => e
|
|
153
154
|
"Invalid JSON: #{e.message}"
|
|
@@ -162,14 +163,14 @@ module MultiWorkType
|
|
|
162
163
|
# Create supervisor with a MultiFormatWorker pool
|
|
163
164
|
@supervisor = Fractor::Supervisor.new(
|
|
164
165
|
worker_pools: [
|
|
165
|
-
{ worker_class: MultiFormatWorker, num_workers: worker_count }
|
|
166
|
-
]
|
|
166
|
+
{ worker_class: MultiFormatWorker, num_workers: worker_count },
|
|
167
|
+
],
|
|
167
168
|
)
|
|
168
169
|
|
|
169
170
|
@results = {
|
|
170
171
|
text: [],
|
|
171
172
|
image: [],
|
|
172
|
-
errors: []
|
|
173
|
+
errors: [],
|
|
173
174
|
}
|
|
174
175
|
end
|
|
175
176
|
|
|
@@ -197,10 +198,10 @@ module MultiWorkType
|
|
|
197
198
|
total_items: text_items.size + image_items.size,
|
|
198
199
|
processed: {
|
|
199
200
|
text: @results[:text].size,
|
|
200
|
-
image: @results[:image].size
|
|
201
|
+
image: @results[:image].size,
|
|
201
202
|
},
|
|
202
203
|
errors: @results[:errors].size,
|
|
203
|
-
results: @results
|
|
204
|
+
results: @results,
|
|
204
205
|
}
|
|
205
206
|
end
|
|
206
207
|
|
|
@@ -220,7 +221,7 @@ module MultiWorkType
|
|
|
220
221
|
results_aggregator.errors.each do |error_result|
|
|
221
222
|
@results[:errors] << {
|
|
222
223
|
error: error_result.error,
|
|
223
|
-
work_type: error_result.work.class.name
|
|
224
|
+
work_type: error_result.work.class.name,
|
|
224
225
|
}
|
|
225
226
|
end
|
|
226
227
|
|
|
@@ -244,21 +245,21 @@ if __FILE__ == $PROGRAM_NAME
|
|
|
244
245
|
text_items = [
|
|
245
246
|
{
|
|
246
247
|
data: "This is a plain text document. It has no special formatting.",
|
|
247
|
-
format: :plain
|
|
248
|
+
format: :plain,
|
|
248
249
|
},
|
|
249
250
|
{
|
|
250
251
|
data: "# Markdown Document\n\nThis is a **bold** statement. Here's a [link](https://example.com).",
|
|
251
|
-
format: :markdown
|
|
252
|
+
format: :markdown,
|
|
252
253
|
},
|
|
253
254
|
{
|
|
254
255
|
data: "<html><body><h1>HTML Document</h1><p>This is a paragraph.</p></body></html>",
|
|
255
|
-
format: :html
|
|
256
|
+
format: :html,
|
|
256
257
|
},
|
|
257
258
|
{
|
|
258
259
|
data: "{name: 'Product', price: 29.99, tags: ['electronics', 'gadget']}",
|
|
259
260
|
format: :json,
|
|
260
|
-
options: { pretty: true }
|
|
261
|
-
}
|
|
261
|
+
options: { pretty: true },
|
|
262
|
+
},
|
|
262
263
|
]
|
|
263
264
|
|
|
264
265
|
# Sample image items (simulated)
|
|
@@ -266,18 +267,18 @@ if __FILE__ == $PROGRAM_NAME
|
|
|
266
267
|
{
|
|
267
268
|
data: "simulated_jpeg_data_1",
|
|
268
269
|
dimensions: [800, 600],
|
|
269
|
-
format: :jpeg
|
|
270
|
+
format: :jpeg,
|
|
270
271
|
},
|
|
271
272
|
{
|
|
272
273
|
data: "simulated_png_data_1",
|
|
273
274
|
dimensions: [1024, 768],
|
|
274
|
-
format: :png
|
|
275
|
+
format: :png,
|
|
275
276
|
},
|
|
276
277
|
{
|
|
277
278
|
data: "simulated_gif_data_1",
|
|
278
279
|
dimensions: [320, 240],
|
|
279
|
-
format: :gif
|
|
280
|
-
}
|
|
280
|
+
format: :gif,
|
|
281
|
+
},
|
|
281
282
|
]
|
|
282
283
|
|
|
283
284
|
worker_count = 4
|
|
@@ -309,8 +310,8 @@ if __FILE__ == $PROGRAM_NAME
|
|
|
309
310
|
puts "Image Processing Results:"
|
|
310
311
|
result[:results][:image].each_with_index do |image_result, index|
|
|
311
312
|
puts "Image Item #{index + 1} (#{image_result[:format]}):"
|
|
312
|
-
puts " Dimensions: #{image_result[:dimensions].join(
|
|
313
|
-
puts " Applied filters: #{image_result[:applied_filters].join(
|
|
313
|
+
puts " Dimensions: #{image_result[:dimensions].join('x')}"
|
|
314
|
+
puts " Applied filters: #{image_result[:applied_filters].join(', ')}"
|
|
314
315
|
puts " Compression: #{(1 - image_result[:processing_metadata][:processed_size].to_f / image_result[:processing_metadata][:original_size]).round(2) * 100}%"
|
|
315
316
|
puts
|
|
316
317
|
end
|
|
@@ -9,7 +9,7 @@ module PipelineProcessing
|
|
|
9
9
|
super({
|
|
10
10
|
data: data,
|
|
11
11
|
stage: stage,
|
|
12
|
-
metadata: metadata
|
|
12
|
+
metadata: metadata,
|
|
13
13
|
})
|
|
14
14
|
end
|
|
15
15
|
|
|
@@ -29,7 +29,7 @@ module PipelineProcessing
|
|
|
29
29
|
"MediaWork: stage=#{stage}, metadata=#{metadata}, data_size=#{begin
|
|
30
30
|
data.bytesize
|
|
31
31
|
rescue StandardError
|
|
32
|
-
|
|
32
|
+
'unknown'
|
|
33
33
|
end}"
|
|
34
34
|
end
|
|
35
35
|
end
|
|
@@ -46,7 +46,7 @@ module PipelineProcessing
|
|
|
46
46
|
else
|
|
47
47
|
return Fractor::WorkResult.new(
|
|
48
48
|
error: "Unknown stage: #{work.stage}",
|
|
49
|
-
work: work
|
|
49
|
+
work: work,
|
|
50
50
|
)
|
|
51
51
|
end
|
|
52
52
|
|
|
@@ -58,7 +58,7 @@ module PipelineProcessing
|
|
|
58
58
|
# Update metadata with processing information
|
|
59
59
|
updated_metadata = work.metadata.merge(
|
|
60
60
|
"#{work.stage}_completed" => true,
|
|
61
|
-
"#{work.stage}_time" => Time.now.to_s
|
|
61
|
+
"#{work.stage}_time" => Time.now.to_s,
|
|
62
62
|
)
|
|
63
63
|
|
|
64
64
|
# Return the result with next stage information
|
|
@@ -67,9 +67,9 @@ module PipelineProcessing
|
|
|
67
67
|
processed_data: result,
|
|
68
68
|
current_stage: work.stage,
|
|
69
69
|
next_stage: next_stage,
|
|
70
|
-
metadata: updated_metadata
|
|
70
|
+
metadata: updated_metadata,
|
|
71
71
|
},
|
|
72
|
-
work: work
|
|
72
|
+
work: work,
|
|
73
73
|
)
|
|
74
74
|
end
|
|
75
75
|
|
|
@@ -95,7 +95,7 @@ module PipelineProcessing
|
|
|
95
95
|
sleep(rand(0.01..0.05)) # Simulate processing time
|
|
96
96
|
tags = %w[landscape portrait nature urban abstract]
|
|
97
97
|
selected_tags = tags.sample(rand(1..3))
|
|
98
|
-
"Tagged image: #{work.data} (tags: #{selected_tags.join(
|
|
98
|
+
"Tagged image: #{work.data} (tags: #{selected_tags.join(', ')})"
|
|
99
99
|
end
|
|
100
100
|
end
|
|
101
101
|
|
|
@@ -106,8 +106,8 @@ module PipelineProcessing
|
|
|
106
106
|
def initialize(worker_count = 4)
|
|
107
107
|
@supervisor = Fractor::Supervisor.new(
|
|
108
108
|
worker_pools: [
|
|
109
|
-
{ worker_class: PipelineWorker, num_workers: worker_count }
|
|
110
|
-
]
|
|
109
|
+
{ worker_class: PipelineWorker, num_workers: worker_count },
|
|
110
|
+
],
|
|
111
111
|
)
|
|
112
112
|
|
|
113
113
|
# Register callback to handle pipeline stage transitions
|
|
@@ -119,7 +119,7 @@ module PipelineProcessing
|
|
|
119
119
|
new_work = MediaWork.new(
|
|
120
120
|
result.result[:processed_data],
|
|
121
121
|
next_stage,
|
|
122
|
-
result.result[:metadata]
|
|
122
|
+
result.result[:metadata],
|
|
123
123
|
)
|
|
124
124
|
@supervisor.add_work_item(new_work)
|
|
125
125
|
end
|
|
@@ -127,7 +127,7 @@ module PipelineProcessing
|
|
|
127
127
|
|
|
128
128
|
@results = {
|
|
129
129
|
completed: [],
|
|
130
|
-
in_progress: []
|
|
130
|
+
in_progress: [],
|
|
131
131
|
}
|
|
132
132
|
end
|
|
133
133
|
|
|
@@ -137,7 +137,7 @@ module PipelineProcessing
|
|
|
137
137
|
MediaWork.new(
|
|
138
138
|
image,
|
|
139
139
|
:resize,
|
|
140
|
-
{ original_filename: image, started_at: Time.now.to_s }
|
|
140
|
+
{ original_filename: image, started_at: Time.now.to_s },
|
|
141
141
|
)
|
|
142
142
|
end
|
|
143
143
|
|
|
@@ -159,7 +159,7 @@ module PipelineProcessing
|
|
|
159
159
|
total_images: images.size,
|
|
160
160
|
completed: @results[:completed].size,
|
|
161
161
|
in_progress: @results[:in_progress].size,
|
|
162
|
-
results: @results[:completed]
|
|
162
|
+
results: @results[:completed],
|
|
163
163
|
}
|
|
164
164
|
end
|
|
165
165
|
end
|
|
@@ -182,7 +182,7 @@ if __FILE__ == $PROGRAM_NAME
|
|
|
182
182
|
"mountains.png",
|
|
183
183
|
"beach.jpg",
|
|
184
184
|
"city_skyline.jpg",
|
|
185
|
-
"forest.png"
|
|
185
|
+
"forest.png",
|
|
186
186
|
]
|
|
187
187
|
|
|
188
188
|
worker_count = 4
|
|
@@ -205,7 +205,7 @@ if __FILE__ == $PROGRAM_NAME
|
|
|
205
205
|
puts "Image #{index + 1}: #{image_result[:processed_data]}"
|
|
206
206
|
puts " Processing path:"
|
|
207
207
|
image_result[:metadata].each do |key, value|
|
|
208
|
-
next unless key.to_s.end_with?("_completed"
|
|
208
|
+
next unless key.to_s.end_with?("_completed", "_time")
|
|
209
209
|
|
|
210
210
|
puts " #{key}: #{value}"
|
|
211
211
|
end
|