simple_flow 0.1.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (80) hide show
  1. checksums.yaml +7 -0
  2. data/.envrc +1 -0
  3. data/.github/workflows/deploy-github-pages.yml +52 -0
  4. data/.rubocop.yml +57 -0
  5. data/CHANGELOG.md +4 -0
  6. data/COMMITS.md +196 -0
  7. data/LICENSE +21 -0
  8. data/README.md +481 -0
  9. data/Rakefile +15 -0
  10. data/benchmarks/parallel_vs_sequential.rb +98 -0
  11. data/benchmarks/pipeline_overhead.rb +130 -0
  12. data/docs/api/middleware.md +468 -0
  13. data/docs/api/parallel-step.md +363 -0
  14. data/docs/api/pipeline.md +382 -0
  15. data/docs/api/result.md +375 -0
  16. data/docs/concurrent/best-practices.md +687 -0
  17. data/docs/concurrent/introduction.md +246 -0
  18. data/docs/concurrent/parallel-steps.md +418 -0
  19. data/docs/concurrent/performance.md +481 -0
  20. data/docs/core-concepts/flow-control.md +452 -0
  21. data/docs/core-concepts/middleware.md +389 -0
  22. data/docs/core-concepts/overview.md +219 -0
  23. data/docs/core-concepts/pipeline.md +315 -0
  24. data/docs/core-concepts/result.md +168 -0
  25. data/docs/core-concepts/steps.md +391 -0
  26. data/docs/development/benchmarking.md +443 -0
  27. data/docs/development/contributing.md +380 -0
  28. data/docs/development/dagwood-concepts.md +435 -0
  29. data/docs/development/testing.md +514 -0
  30. data/docs/getting-started/examples.md +197 -0
  31. data/docs/getting-started/installation.md +62 -0
  32. data/docs/getting-started/quick-start.md +218 -0
  33. data/docs/guides/choosing-concurrency-model.md +441 -0
  34. data/docs/guides/complex-workflows.md +440 -0
  35. data/docs/guides/data-fetching.md +478 -0
  36. data/docs/guides/error-handling.md +635 -0
  37. data/docs/guides/file-processing.md +505 -0
  38. data/docs/guides/validation-patterns.md +496 -0
  39. data/docs/index.md +169 -0
  40. data/examples/.gitignore +3 -0
  41. data/examples/01_basic_pipeline.rb +112 -0
  42. data/examples/02_error_handling.rb +178 -0
  43. data/examples/03_middleware.rb +186 -0
  44. data/examples/04_parallel_automatic.rb +221 -0
  45. data/examples/05_parallel_explicit.rb +279 -0
  46. data/examples/06_real_world_ecommerce.rb +288 -0
  47. data/examples/07_real_world_etl.rb +277 -0
  48. data/examples/08_graph_visualization.rb +246 -0
  49. data/examples/09_pipeline_visualization.rb +266 -0
  50. data/examples/10_concurrency_control.rb +235 -0
  51. data/examples/11_sequential_dependencies.rb +243 -0
  52. data/examples/12_none_constant.rb +161 -0
  53. data/examples/README.md +374 -0
  54. data/examples/regression_test/01_basic_pipeline.txt +38 -0
  55. data/examples/regression_test/02_error_handling.txt +92 -0
  56. data/examples/regression_test/03_middleware.txt +61 -0
  57. data/examples/regression_test/04_parallel_automatic.txt +86 -0
  58. data/examples/regression_test/05_parallel_explicit.txt +80 -0
  59. data/examples/regression_test/06_real_world_ecommerce.txt +53 -0
  60. data/examples/regression_test/07_real_world_etl.txt +58 -0
  61. data/examples/regression_test/08_graph_visualization.txt +429 -0
  62. data/examples/regression_test/09_pipeline_visualization.txt +305 -0
  63. data/examples/regression_test/10_concurrency_control.txt +96 -0
  64. data/examples/regression_test/11_sequential_dependencies.txt +86 -0
  65. data/examples/regression_test/12_none_constant.txt +64 -0
  66. data/examples/regression_test.rb +105 -0
  67. data/lib/simple_flow/dependency_graph.rb +120 -0
  68. data/lib/simple_flow/dependency_graph_visualizer.rb +326 -0
  69. data/lib/simple_flow/middleware.rb +36 -0
  70. data/lib/simple_flow/parallel_executor.rb +80 -0
  71. data/lib/simple_flow/pipeline.rb +405 -0
  72. data/lib/simple_flow/result.rb +88 -0
  73. data/lib/simple_flow/step_tracker.rb +58 -0
  74. data/lib/simple_flow/version.rb +5 -0
  75. data/lib/simple_flow.rb +41 -0
  76. data/mkdocs.yml +146 -0
  77. data/pipeline_graph.dot +51 -0
  78. data/pipeline_graph.html +60 -0
  79. data/pipeline_graph.mmd +19 -0
  80. metadata +127 -0
@@ -0,0 +1,277 @@
1
+ #!/usr/bin/env ruby
2
+ # frozen_string_literal: true
3
+
4
+ require_relative '../lib/simple_flow'
5
+ require 'json'
6
+ require 'timecop'
7
+ Timecop.travel(Time.local(2001, 9, 11, 7, 0, 0))
8
+
9
+ # Real-world example: Data ETL (Extract, Transform, Load) pipeline
10
+
11
+ puts "=" * 60
12
+ puts "Real-World Example: Data ETL Pipeline"
13
+ puts "=" * 60
14
+ puts
15
+
16
+ # Simulate data sources
17
+ class DataSource
18
+ def self.fetch_users_csv
19
+ sleep 0.1
20
+ [
21
+ { id: 1, name: "Alice Johnson", email: "alice@example.com", signup_date: "2023-01-15" },
22
+ { id: 2, name: "Bob Smith", email: "bob@example.com", signup_date: "2023-02-20" },
23
+ { id: 3, name: "Charlie Brown", email: "CHARLIE@EXAMPLE.COM", signup_date: "2023-03-10" }
24
+ ]
25
+ end
26
+
27
+ def self.fetch_orders_json
28
+ sleep 0.1
29
+ [
30
+ { order_id: 101, user_id: 1, amount: 150.00, status: "completed" },
31
+ { order_id: 102, user_id: 2, amount: 75.50, status: "pending" },
32
+ { order_id: 103, user_id: 1, amount: 200.00, status: "completed" },
33
+ { order_id: 104, user_id: 3, amount: 50.00, status: "cancelled" }
34
+ ]
35
+ end
36
+
37
+ def self.fetch_products_api
38
+ sleep 0.1
39
+ [
40
+ { product_id: "A1", name: "Widget", category: "tools" },
41
+ { product_id: "B2", name: "Gadget", category: "electronics" },
42
+ { product_id: "C3", name: "Doohickey", category: "tools" }
43
+ ]
44
+ end
45
+ end
46
+
47
+ # Build the ETL pipeline
48
+ etl_pipeline = SimpleFlow::Pipeline.new do
49
+ # Extract Phase: Load data from multiple sources in parallel
50
+ step :extract_users, ->(result) {
51
+ puts " 📥 Extracting users from CSV..."
52
+ users = DataSource.fetch_users_csv
53
+ result.with_context(:raw_users, users).continue(result.value)
54
+ }, depends_on: :none
55
+
56
+ step :extract_orders, ->(result) {
57
+ puts " 📥 Extracting orders from JSON..."
58
+ orders = DataSource.fetch_orders_json
59
+ result.with_context(:raw_orders, orders).continue(result.value)
60
+ }, depends_on: :none
61
+
62
+ step :extract_products, ->(result) {
63
+ puts " 📥 Extracting products from API..."
64
+ products = DataSource.fetch_products_api
65
+ result.with_context(:raw_products, products).continue(result.value)
66
+ }, depends_on: :none
67
+
68
+ # Transform Phase: Clean and normalize data in parallel
69
+ step :transform_users, ->(result) {
70
+ puts " 🔄 Transforming user data..."
71
+ raw_users = result.context[:raw_users]
72
+
73
+ transformed = raw_users.map do |user|
74
+ {
75
+ id: user[:id],
76
+ name: user[:name].downcase.split.map(&:capitalize).join(' '),
77
+ email: user[:email].downcase,
78
+ signup_year: user[:signup_date].split('-').first.to_i,
79
+ created_at: Time.now
80
+ }
81
+ end
82
+
83
+ result.with_context(:users, transformed).continue(result.value)
84
+ }, depends_on: [:extract_users]
85
+
86
+ step :transform_orders, ->(result) {
87
+ puts " 🔄 Transforming order data..."
88
+ raw_orders = result.context[:raw_orders]
89
+
90
+ # Filter out cancelled orders and add computed fields
91
+ transformed = raw_orders
92
+ .reject { |o| o[:status] == "cancelled" }
93
+ .map do |order|
94
+ {
95
+ id: order[:order_id],
96
+ user_id: order[:user_id],
97
+ amount: order[:amount],
98
+ status: order[:status].to_sym,
99
+ tax: (order[:amount] * 0.08).round(2),
100
+ total: (order[:amount] * 1.08).round(2)
101
+ }
102
+ end
103
+
104
+ result.with_context(:orders, transformed).continue(result.value)
105
+ }, depends_on: [:extract_orders]
106
+
107
+ step :transform_products, ->(result) {
108
+ puts " 🔄 Transforming product data..."
109
+ raw_products = result.context[:raw_products]
110
+
111
+ # Normalize and categorize
112
+ transformed = raw_products.map do |product|
113
+ {
114
+ id: product[:product_id],
115
+ name: product[:name],
116
+ category: product[:category].to_sym,
117
+ normalized_name: product[:name].downcase.gsub(/[^a-z0-9]/, '_')
118
+ }
119
+ end
120
+
121
+ result.with_context(:products, transformed).continue(result.value)
122
+ }, depends_on: [:extract_products]
123
+
124
+ # Aggregate Phase: Join and compute analytics
125
+ step :aggregate_user_stats, ->(result) {
126
+ puts " 📊 Aggregating user statistics..."
127
+ users = result.context[:users]
128
+ orders = result.context[:orders]
129
+
130
+ user_stats = users.map do |user|
131
+ user_orders = orders.select { |o| o[:user_id] == user[:id] }
132
+ {
133
+ user_id: user[:id],
134
+ name: user[:name],
135
+ email: user[:email],
136
+ total_orders: user_orders.size,
137
+ total_spent: user_orders.sum { |o| o[:total] },
138
+ avg_order_value: user_orders.size > 0 ? (user_orders.sum { |o| o[:total] } / user_orders.size).round(2) : 0
139
+ }
140
+ end
141
+
142
+ result.with_context(:user_stats, user_stats).continue(result.value)
143
+ }, depends_on: [:transform_users, :transform_orders]
144
+
145
+ step :aggregate_category_stats, ->(result) {
146
+ puts " 📊 Aggregating category statistics..."
147
+ products = result.context[:products]
148
+
149
+ category_stats = products
150
+ .group_by { |p| p[:category] }
151
+ .transform_values { |prods| { count: prods.size, products: prods.map { |p| p[:name] } } }
152
+
153
+ result.with_context(:category_stats, category_stats).continue(result.value)
154
+ }, depends_on: [:transform_products]
155
+
156
+ # Validation Phase: Check data quality
157
+ step :validate_data, ->(result) {
158
+ puts " ✅ Validating data quality..."
159
+ users = result.context[:users]
160
+ orders = result.context[:orders]
161
+
162
+ issues = []
163
+
164
+ # Check for duplicate emails
165
+ emails = users.map { |u| u[:email] }
166
+ duplicates = emails.select { |e| emails.count(e) > 1 }.uniq
167
+ issues << "Duplicate emails found: #{duplicates.join(', ')}" if duplicates.any?
168
+
169
+ # Check for orphaned orders
170
+ user_ids = users.map { |u| u[:id] }
171
+ orphaned = orders.reject { |o| user_ids.include?(o[:user_id]) }
172
+ issues << "#{orphaned.size} orphaned orders found" if orphaned.any?
173
+
174
+ if issues.any?
175
+ result.with_context(:validation_warnings, issues).continue(result.value)
176
+ else
177
+ result.with_context(:validation_warnings, []).continue(result.value)
178
+ end
179
+ }, depends_on: [:aggregate_user_stats]
180
+
181
+ # Load Phase: Prepare final output
182
+ step :prepare_output, ->(result) {
183
+ puts " 💾 Preparing output..."
184
+
185
+ output = {
186
+ metadata: {
187
+ processed_at: Time.now,
188
+ pipeline_version: "1.0",
189
+ warnings: result.context[:validation_warnings]
190
+ },
191
+ users: result.context[:users],
192
+ orders: result.context[:orders],
193
+ products: result.context[:products],
194
+ analytics: {
195
+ user_stats: result.context[:user_stats],
196
+ category_stats: result.context[:category_stats],
197
+ summary: {
198
+ total_users: result.context[:users].size,
199
+ total_orders: result.context[:orders].size,
200
+ total_products: result.context[:products].size,
201
+ total_revenue: result.context[:orders].sum { |o| o[:total] }.round(2)
202
+ }
203
+ }
204
+ }
205
+
206
+ result.continue(output)
207
+ }, depends_on: [:validate_data, :aggregate_category_stats]
208
+ end
209
+
210
+ puts "\nStarting ETL pipeline..."
211
+ puts "=" * 60
212
+ puts
213
+
214
+ start_time = Time.now
215
+ result = etl_pipeline.call_parallel(SimpleFlow::Result.new({}))
216
+ elapsed = Time.now - start_time
217
+
218
+ puts "\n" + "=" * 60
219
+ if result.continue?
220
+ puts "✅ ETL Pipeline completed successfully!"
221
+ puts "=" * 60
222
+
223
+ output = result.value
224
+
225
+ puts "\nMetadata:"
226
+ puts " Processed at: #{output[:metadata][:processed_at]}"
227
+ puts " Pipeline version: #{output[:metadata][:pipeline_version]}"
228
+ if output[:metadata][:warnings].any?
229
+ puts " Warnings: #{output[:metadata][:warnings].join('; ')}"
230
+ end
231
+
232
+ puts "\nData Summary:"
233
+ puts " Users processed: #{output[:analytics][:summary][:total_users]}"
234
+ puts " Orders processed: #{output[:analytics][:summary][:total_orders]}"
235
+ puts " Products processed: #{output[:analytics][:summary][:total_products]}"
236
+ puts " Total revenue: $#{output[:analytics][:summary][:total_revenue]}"
237
+
238
+ puts "\nUser Statistics:"
239
+ output[:analytics][:user_stats].each do |stat|
240
+ puts " #{stat[:name]} (#{stat[:email]})"
241
+ puts " Orders: #{stat[:total_orders]}, Spent: $#{stat[:total_spent]}, Avg: $#{stat[:avg_order_value]}"
242
+ end
243
+
244
+ puts "\nCategory Statistics:"
245
+ output[:analytics][:category_stats].each do |category, stats|
246
+ puts " #{category}: #{stats[:count]} products (#{stats[:products].join(', ')})"
247
+ end
248
+
249
+ puts "\nProcessing time: #{(elapsed * 1000).round(2)}ms"
250
+
251
+ # Show dependency graph execution
252
+ puts "\nExecution Flow:"
253
+ puts " Phase 1 (Extract): users, orders, products (parallel)"
254
+ puts " Phase 2 (Transform): transform_users, transform_orders, transform_products (parallel)"
255
+ puts " Phase 3 (Aggregate): user_stats, category_stats (parallel after transforms)"
256
+ puts " Phase 4 (Validate): data validation"
257
+ puts " Phase 5 (Load): prepare output"
258
+
259
+ # Optionally save to file
260
+ if ARGV.include?("--save")
261
+ filename = "etl_output_#{Time.now.to_i}.json"
262
+ File.write(filename, JSON.pretty_generate(output))
263
+ puts "\n✅ Output saved to #{filename}"
264
+ end
265
+ else
266
+ puts "❌ ETL Pipeline failed"
267
+ puts "=" * 60
268
+ puts "\nErrors:"
269
+ result.errors.each do |category, messages|
270
+ puts " #{category}: #{messages.join(', ')}"
271
+ end
272
+ end
273
+
274
+ puts "\n" + "=" * 60
275
+ puts "ETL example completed!"
276
+ puts "Run with --save flag to save output to JSON file"
277
+ puts "=" * 60
@@ -0,0 +1,246 @@
1
+ #!/usr/bin/env ruby
2
+ # frozen_string_literal: true
3
+
4
+ require_relative '../lib/simple_flow'
5
+ require 'timecop'
6
+ Timecop.travel(Time.local(2001, 9, 11, 7, 0, 0))
7
+
8
+ # Graph visualization examples
9
+
10
+ puts "=" * 60
11
+ puts "Dependency Graph Visualization"
12
+ puts "=" * 60
13
+ puts
14
+
15
+ # Example 1: Simple graph
16
+ puts "Example 1: Simple Dependency Graph"
17
+ puts "-" * 60
18
+ puts
19
+
20
+ simple_graph = SimpleFlow::DependencyGraph.new(
21
+ step_a: [],
22
+ step_b: [:step_a],
23
+ step_c: [:step_a],
24
+ step_d: [:step_b, :step_c]
25
+ )
26
+
27
+ visualizer = SimpleFlow::DependencyGraphVisualizer.new(simple_graph)
28
+
29
+ puts visualizer.to_ascii
30
+ puts
31
+
32
+ # Example 2: Complex real-world graph
33
+ puts "\n" + "=" * 60
34
+ puts "Example 2: E-commerce Order Processing Graph"
35
+ puts "=" * 60
36
+ puts
37
+
38
+ ecommerce_graph = SimpleFlow::DependencyGraph.new(
39
+ validate_order: [],
40
+ check_inventory: [:validate_order],
41
+ calculate_shipping: [:validate_order],
42
+ calculate_totals: [:check_inventory, :calculate_shipping],
43
+ process_payment: [:calculate_totals],
44
+ reserve_inventory: [:process_payment],
45
+ create_shipment: [:reserve_inventory],
46
+ send_email: [:create_shipment],
47
+ send_sms: [:create_shipment],
48
+ finalize_order: [:send_email, :send_sms]
49
+ )
50
+
51
+ ecommerce_visualizer = SimpleFlow::DependencyGraphVisualizer.new(ecommerce_graph)
52
+
53
+ puts ecommerce_visualizer.to_ascii
54
+ puts
55
+
56
+ # Example 3: Execution plan
57
+ puts "\n" + "=" * 60
58
+ puts "Example 3: Execution Plan"
59
+ puts "=" * 60
60
+ puts
61
+
62
+ puts ecommerce_visualizer.to_execution_plan
63
+ puts
64
+
65
+ # Example 4: ETL Pipeline graph
66
+ puts "\n" + "=" * 60
67
+ puts "Example 4: ETL Pipeline Graph"
68
+ puts "=" * 60
69
+ puts
70
+
71
+ etl_graph = SimpleFlow::DependencyGraph.new(
72
+ extract_users: [],
73
+ extract_orders: [],
74
+ extract_products: [],
75
+ transform_users: [:extract_users],
76
+ transform_orders: [:extract_orders],
77
+ transform_products: [:extract_products],
78
+ aggregate_user_stats: [:transform_users, :transform_orders],
79
+ aggregate_category_stats: [:transform_products],
80
+ validate_data: [:aggregate_user_stats],
81
+ prepare_output: [:validate_data, :aggregate_category_stats]
82
+ )
83
+
84
+ etl_visualizer = SimpleFlow::DependencyGraphVisualizer.new(etl_graph)
85
+
86
+ puts etl_visualizer.to_execution_plan
87
+ puts
88
+
89
+ # Example 5: Export formats
90
+ puts "\n" + "=" * 60
91
+ puts "Example 5: Exporting to Different Formats"
92
+ puts "=" * 60
93
+ puts
94
+
95
+ # Export to Graphviz DOT format
96
+ dot_output = ecommerce_visualizer.to_dot(include_groups: true, orientation: 'TB')
97
+ File.write('ecommerce_graph.dot', dot_output)
98
+ puts "✓ Exported to Graphviz DOT format: ecommerce_graph.dot"
99
+ puts " To generate PNG: dot -Tpng ecommerce_graph.dot -o ecommerce_graph.png"
100
+ puts " To generate SVG: dot -Tsvg ecommerce_graph.dot -o ecommerce_graph.svg"
101
+ puts
102
+
103
+ # Export to Mermaid format
104
+ mermaid_output = ecommerce_visualizer.to_mermaid
105
+ File.write('ecommerce_graph.mmd', mermaid_output)
106
+ puts "✓ Exported to Mermaid format: ecommerce_graph.mmd"
107
+ puts " View at: https://mermaid.live/"
108
+ puts
109
+
110
+ # Export to HTML
111
+ html_output = ecommerce_visualizer.to_html(title: "E-commerce Order Processing Graph")
112
+ File.write('ecommerce_graph.html', html_output)
113
+ puts "✓ Exported to interactive HTML: ecommerce_graph.html"
114
+ puts " Open in browser to view interactive graph"
115
+ puts
116
+
117
+ # Show the DOT format
118
+ puts "\nGraphviz DOT Format Preview:"
119
+ puts "-" * 60
120
+ puts dot_output.lines.take(20).join
121
+ puts "... (truncated)"
122
+ puts
123
+
124
+ # Show the Mermaid format
125
+ puts "\nMermaid Format Preview:"
126
+ puts "-" * 60
127
+ puts mermaid_output.lines.take(15).join
128
+ puts "... (truncated)"
129
+ puts
130
+
131
+ # Example 6: Visualizing a Pipeline (Direct Method - RECOMMENDED)
132
+ puts "\n" + "=" * 60
133
+ puts "Example 6: Visualizing a Pipeline Directly"
134
+ puts "=" * 60
135
+ puts
136
+
137
+ pipeline = SimpleFlow::Pipeline.new do
138
+ step :fetch_config, ->(result) {
139
+ result.with_context(:config, {}).continue(result.value)
140
+ }, depends_on: :none
141
+
142
+ step :load_data, ->(result) {
143
+ result.with_context(:data, []).continue(result.value)
144
+ }, depends_on: [:fetch_config]
145
+
146
+ step :validate_schema, ->(result) {
147
+ result.continue(result.value)
148
+ }, depends_on: [:load_data]
149
+
150
+ step :enrich_data, ->(result) {
151
+ result.continue(result.value)
152
+ }, depends_on: [:load_data]
153
+
154
+ step :save_results, ->(result) {
155
+ result.continue(result.value)
156
+ }, depends_on: [:validate_schema, :enrich_data]
157
+ end
158
+
159
+ # RECOMMENDED: Visualize directly from the pipeline
160
+ puts "Pipeline Dependency Graph:"
161
+ puts
162
+ puts pipeline.visualize_ascii
163
+ puts
164
+
165
+ # Alternative (manual approach - not recommended):
166
+ # pipeline_graph = SimpleFlow::DependencyGraph.new(pipeline.step_dependencies)
167
+ # pipeline_visualizer = SimpleFlow::DependencyGraphVisualizer.new(pipeline_graph)
168
+ # puts pipeline_visualizer.to_ascii
169
+
170
+ # Example 7: Comparing different graph structures
171
+ puts "\n" + "=" * 60
172
+ puts "Example 7: Graph Structure Comparison"
173
+ puts "=" * 60
174
+ puts
175
+
176
+ # Linear pipeline (no parallelism)
177
+ linear_graph = SimpleFlow::DependencyGraph.new(
178
+ step1: [],
179
+ step2: [:step1],
180
+ step3: [:step2],
181
+ step4: [:step3],
182
+ step5: [:step4]
183
+ )
184
+
185
+ # Fan-out/fan-in (maximum parallelism)
186
+ fanout_graph = SimpleFlow::DependencyGraph.new(
187
+ start: [],
188
+ task1: [:start],
189
+ task2: [:start],
190
+ task3: [:start],
191
+ task4: [:start],
192
+ end: [:task1, :task2, :task3, :task4]
193
+ )
194
+
195
+ puts "Linear Pipeline (Sequential):"
196
+ puts SimpleFlow::DependencyGraphVisualizer.new(linear_graph).to_execution_plan
197
+ puts
198
+
199
+ puts "\nFan-out/Fan-in Pipeline (Parallel):"
200
+ puts SimpleFlow::DependencyGraphVisualizer.new(fanout_graph).to_execution_plan
201
+ puts
202
+
203
+ # Example 8: Graph statistics
204
+ puts "\n" + "=" * 60
205
+ puts "Example 8: Graph Analytics"
206
+ puts "=" * 60
207
+ puts
208
+
209
+ def analyze_graph(graph, name)
210
+ parallel_groups = graph.parallel_order
211
+ total_steps = graph.dependencies.size
212
+ max_parallel = parallel_groups.map(&:size).max
213
+
214
+ puts "#{name}:"
215
+ puts " Total steps: #{total_steps}"
216
+ puts " Execution phases: #{parallel_groups.size}"
217
+ puts " Max parallel steps: #{max_parallel}"
218
+ puts " Theoretical speedup: #{(total_steps.to_f / parallel_groups.size).round(2)}x"
219
+ puts " Parallelization ratio: #{((max_parallel.to_f / total_steps) * 100).round(1)}%"
220
+ puts
221
+ end
222
+
223
+ analyze_graph(simple_graph, "Simple Graph")
224
+ analyze_graph(ecommerce_graph, "E-commerce Graph")
225
+ analyze_graph(etl_graph, "ETL Graph")
226
+ analyze_graph(linear_graph, "Linear Graph")
227
+ analyze_graph(fanout_graph, "Fan-out/Fan-in Graph")
228
+
229
+ puts "=" * 60
230
+ puts "Graph visualization examples completed!"
231
+ puts
232
+ puts "Generated files:"
233
+ puts " - ecommerce_graph.dot (Graphviz format)"
234
+ puts " - ecommerce_graph.mmd (Mermaid format)"
235
+ puts " - ecommerce_graph.html (Interactive HTML)"
236
+ puts
237
+ puts "To generate images with Graphviz:"
238
+ puts " $ dot -Tpng ecommerce_graph.dot -o ecommerce_graph.png"
239
+ puts " $ dot -Tsvg ecommerce_graph.dot -o ecommerce_graph.svg"
240
+ puts " $ dot -Tpdf ecommerce_graph.dot -o ecommerce_graph.pdf"
241
+ puts
242
+ puts "To view Mermaid diagram:"
243
+ puts " 1. Visit https://mermaid.live/"
244
+ puts " 2. Paste contents of ecommerce_graph.mmd"
245
+ puts " 3. Or use Mermaid CLI: mmdc -i ecommerce_graph.mmd -o graph.png"
246
+ puts "=" * 60