query_guard 0.4.2 → 0.5.1

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (64) hide show
  1. checksums.yaml +4 -4
  2. data/CHANGELOG.md +89 -1
  3. data/DESIGN.md +420 -0
  4. data/INDEX.md +309 -0
  5. data/README.md +579 -30
  6. data/exe/queryguard +23 -0
  7. data/lib/query_guard/action_controller_subscriber.rb +27 -0
  8. data/lib/query_guard/analysis/query_risk_classifier.rb +124 -0
  9. data/lib/query_guard/analysis/risk_detectors.rb +258 -0
  10. data/lib/query_guard/analysis/risk_level.rb +35 -0
  11. data/lib/query_guard/analyzers/base.rb +30 -0
  12. data/lib/query_guard/analyzers/query_count_analyzer.rb +31 -0
  13. data/lib/query_guard/analyzers/query_risk_analyzer.rb +146 -0
  14. data/lib/query_guard/analyzers/registry.rb +57 -0
  15. data/lib/query_guard/analyzers/select_star_analyzer.rb +42 -0
  16. data/lib/query_guard/analyzers/slow_query_analyzer.rb +39 -0
  17. data/lib/query_guard/budget.rb +148 -0
  18. data/lib/query_guard/cli/batch_report_formatter.rb +129 -0
  19. data/lib/query_guard/cli/command.rb +93 -0
  20. data/lib/query_guard/cli/commands/analyze.rb +52 -0
  21. data/lib/query_guard/cli/commands/check.rb +58 -0
  22. data/lib/query_guard/cli/formatter.rb +278 -0
  23. data/lib/query_guard/cli/json_reporter.rb +247 -0
  24. data/lib/query_guard/cli/paged_report_formatter.rb +137 -0
  25. data/lib/query_guard/cli/source_metadata_collector.rb +297 -0
  26. data/lib/query_guard/cli.rb +197 -0
  27. data/lib/query_guard/client.rb +4 -6
  28. data/lib/query_guard/config.rb +145 -6
  29. data/lib/query_guard/core/context.rb +80 -0
  30. data/lib/query_guard/core/finding.rb +162 -0
  31. data/lib/query_guard/core/finding_builders.rb +152 -0
  32. data/lib/query_guard/core/query.rb +40 -0
  33. data/lib/query_guard/explain/adapter_interface.rb +89 -0
  34. data/lib/query_guard/explain/explain_enricher.rb +367 -0
  35. data/lib/query_guard/explain/plan_signals.rb +385 -0
  36. data/lib/query_guard/explain/postgresql_adapter.rb +208 -0
  37. data/lib/query_guard/exporter.rb +124 -0
  38. data/lib/query_guard/fingerprint.rb +96 -0
  39. data/lib/query_guard/middleware.rb +101 -15
  40. data/lib/query_guard/migrations/database_adapter.rb +88 -0
  41. data/lib/query_guard/migrations/migration_analyzer.rb +100 -0
  42. data/lib/query_guard/migrations/migration_risk_detectors.rb +390 -0
  43. data/lib/query_guard/migrations/postgresql_adapter.rb +157 -0
  44. data/lib/query_guard/migrations/table_risk_analyzer.rb +154 -0
  45. data/lib/query_guard/migrations/table_size_resolver.rb +152 -0
  46. data/lib/query_guard/publish.rb +38 -0
  47. data/lib/query_guard/rspec.rb +119 -0
  48. data/lib/query_guard/security.rb +99 -0
  49. data/lib/query_guard/store.rb +38 -0
  50. data/lib/query_guard/subscriber.rb +46 -15
  51. data/lib/query_guard/suggest/index_suggester.rb +176 -0
  52. data/lib/query_guard/suggest/pattern_extractors.rb +137 -0
  53. data/lib/query_guard/trace.rb +106 -0
  54. data/lib/query_guard/uploader/http_uploader.rb +166 -0
  55. data/lib/query_guard/uploader/interface.rb +79 -0
  56. data/lib/query_guard/uploader/no_op_uploader.rb +46 -0
  57. data/lib/query_guard/uploader/registry.rb +37 -0
  58. data/lib/query_guard/uploader/upload_service.rb +80 -0
  59. data/lib/query_guard/version.rb +1 -1
  60. data/lib/query_guard.rb +54 -7
  61. metadata +78 -10
  62. data/.rspec +0 -3
  63. data/Rakefile +0 -21
  64. data/config/initializers/query_guard.rb +0 -9
@@ -0,0 +1,278 @@
1
+ # frozen_string_literal: true
2
+
3
+ module QueryGuard
4
+ class CLI
5
+ # Formats findings for terminal output or JSON
6
+ # Provides clean, developer-friendly output with:
7
+ # - Severity and type grouping
8
+ # - Clear recommendations and action items
9
+ # - Index suggestions and migration steps
10
+ # - Summary with detailed counts
11
+ class Formatter
12
+ def initialize(options = {})
13
+ @options = options
14
+ @json = options[:json] || options[:format] == 'json' || false
15
+ @verbose = options[:verbose] || false
16
+ end
17
+
18
+ def print_findings(findings, title = "Analysis Results", command = 'analyze', path = '.')
19
+ if @json
20
+ print_json(findings, command, path)
21
+ else
22
+ print_text(findings, title)
23
+ end
24
+ end
25
+
26
+ def print_text(findings, title)
27
+ puts "\n" + "=" * 60
28
+ puts title
29
+ puts "=" * 60
30
+
31
+ if findings.empty?
32
+ puts "\n✅ No issues found!\n"
33
+ return
34
+ end
35
+
36
+ # Group findings by severity, then by type
37
+ by_severity = findings.group_by { |f| f[:severity] }
38
+
39
+ # Print in order
40
+ severity_order = [:critical, :error, :warn, :info]
41
+ severity_order.each do |severity|
42
+ next unless by_severity[severity]
43
+
44
+ print_severity_section(severity, by_severity[severity])
45
+ end
46
+
47
+ print_summary(findings)
48
+ end
49
+
50
+ def print_severity_section(severity, findings)
51
+ icon = severity_icon(severity)
52
+ color = severity_color(severity)
53
+ label = severity.to_s.upcase
54
+
55
+ puts "\n#{icon} #{color}#{label}#{reset_color} (#{findings.length})"
56
+ puts "-" * 60
57
+
58
+ # Group by type within severity
59
+ by_type = findings.group_by { |f| "#{f[:analyzer_name]}:#{f[:rule_name]}" }
60
+
61
+ by_type.each do |type_key, type_findings|
62
+ print_type_group(type_key, type_findings)
63
+ end
64
+ end
65
+
66
+ def print_type_group(type_key, findings)
67
+ # Print type header when multiple findings of same type
68
+ if findings.length == 1
69
+ print_finding(findings.first)
70
+ else
71
+ puts "\n [#{type_key}] (#{findings.length} findings)"
72
+ findings.each do |finding|
73
+ print_finding(finding, indent: " ")
74
+ end
75
+ end
76
+ end
77
+
78
+ def print_finding(finding, indent: " ")
79
+ # Title and location
80
+ puts "\n#{indent}#{finding[:title]}"
81
+
82
+ # File and line info
83
+ if finding[:file_path]
84
+ location = "#{finding[:file_path]}"
85
+ location += ":#{finding[:line_number]}" if finding[:line_number]
86
+ puts "#{indent} 📄 #{location}"
87
+ end
88
+
89
+ # Description
90
+ if finding[:description]
91
+ puts "#{indent} #{finding[:description]}"
92
+ end
93
+
94
+ # Metadata context (table, operation, escalation)
95
+ print_finding_context(finding, indent)
96
+
97
+ # Recommendations (primary)
98
+ print_recommendations(finding, indent)
99
+
100
+ # Special metadata sections: indexes, migration steps, strategies
101
+ print_index_suggestions(finding, indent)
102
+ print_migration_steps(finding, indent)
103
+
104
+ # Verbose debug metadata
105
+ if @verbose && finding[:metadata] && !finding[:metadata].empty?
106
+ puts "#{indent} [Debug] #{finding[:metadata].inspect}"
107
+ end
108
+ end
109
+
110
+ def print_finding_context(finding, indent)
111
+ metadata = finding[:metadata] || {}
112
+
113
+ # Table context
114
+ if metadata[:table_name]
115
+ rows = metadata[:estimated_table_rows]
116
+ rows_str = rows ? " (#{format_row_count(rows)} rows)" : ""
117
+ puts "#{indent} 🗂️ Table: #{metadata[:table_name]}#{rows_str}"
118
+ end
119
+
120
+ # Operation context
121
+ if metadata[:operation]
122
+ puts "#{indent} ⚙️ Operation: #{metadata[:operation]}"
123
+ end
124
+
125
+ # Escalation
126
+ if metadata[:severity_escalated]
127
+ original = metadata[:original_severity]
128
+ reason = metadata[:escalation_reason]
129
+ puts "#{indent} ⬆️ Escalated from #{original}: #{reason}"
130
+ end
131
+
132
+ # Risk level
133
+ if metadata[:risk_level]
134
+ puts "#{indent} Risk Level: #{metadata[:risk_level]}"
135
+ end
136
+ end
137
+
138
+ def print_recommendations(finding, indent)
139
+ # Show recommendations as action items
140
+ recommendations = Array(finding[:recommendation])
141
+
142
+ if recommendations.any?
143
+ puts "#{indent} ✅ Recommended Actions:"
144
+ recommendations.each do |rec|
145
+ puts "#{indent} - #{rec}"
146
+ end
147
+ end
148
+ end
149
+
150
+ def print_index_suggestions(finding, indent)
151
+ metadata = finding[:metadata] || {}
152
+
153
+ # Check for single index suggestion
154
+ if metadata[:index_sql]
155
+ puts "#{indent} 🔧 Suggested Index:"
156
+ puts "#{indent} #{metadata[:index_sql]}"
157
+ elsif metadata[:suggested_indexes]
158
+ # Multiple index suggestions
159
+ puts "#{indent} 🔧 Suggested Indexes:"
160
+ Array(metadata[:suggested_indexes]).each do |idx_sql|
161
+ puts "#{indent} #{idx_sql}"
162
+ end
163
+ end
164
+ end
165
+
166
+ def print_migration_steps(finding, indent)
167
+ metadata = finding[:metadata] || {}
168
+
169
+ # Migration step-by-step guidance
170
+ if metadata[:migration_steps]
171
+ puts "#{indent} 📋 Migration Steps:"
172
+ Array(metadata[:migration_steps]).each_with_index do |step, i|
173
+ puts "#{indent} #{i + 1}. #{step}"
174
+ end
175
+ end
176
+
177
+ # Safe rollout strategy
178
+ if metadata[:safe_rollout_strategy]
179
+ puts "#{indent} 🛡️ Safe Rollout Strategy:"
180
+ puts "#{indent} #{metadata[:safe_rollout_strategy]}"
181
+ end
182
+ end
183
+
184
+ def print_json(findings, command = 'analyze', path = '.')
185
+ reporter = JsonReporter.new(
186
+ findings: findings,
187
+ command: command,
188
+ path: path,
189
+ options: @options
190
+ )
191
+ puts reporter.generate
192
+ end
193
+
194
+ private
195
+
196
+ def print_summary(findings)
197
+ by_severity = findings.group_by { |f| f[:severity] }
198
+ severity_counts = by_severity.transform_values { |v| v.length }
199
+
200
+ # Count by type
201
+ by_type = findings.group_by { |f| "#{f[:analyzer_name]}:#{f[:rule_name]}" }
202
+ type_counts = by_type.transform_values { |v| v.length }
203
+
204
+ puts "\n" + "=" * 60
205
+ puts "SUMMARY"
206
+ puts "=" * 60
207
+
208
+ # Total
209
+ puts "\n Total Findings: #{findings.length}"
210
+
211
+ # By Severity
212
+ puts "\n By Severity:"
213
+ %i[critical error warn info].each do |severity|
214
+ count = severity_counts[severity] || 0
215
+ next if count.zero?
216
+
217
+ icon = severity_icon(severity)
218
+ puts " #{icon} #{severity.to_s.upcase}: #{count}"
219
+ end
220
+
221
+ # By Type (if more than one)
222
+ if type_counts.length > 1
223
+ puts "\n By Type:"
224
+ type_counts.sort_by { |_k, v| -v }.each do |type_key, count|
225
+ puts " • #{type_key}: #{count}"
226
+ end
227
+ end
228
+
229
+ # Quick stats
230
+ puts "\n Files Analyzed:"
231
+ files_with_findings = findings.group_by { |f| f[:file_path] }.keys.compact.length
232
+ puts " • #{files_with_findings} file#{files_with_findings == 1 ? '' : 's'} with findings"
233
+
234
+ puts "\n" + "=" * 60 + "\n"
235
+ end
236
+
237
+ def severity_icon(severity)
238
+ case severity
239
+ when :critical then "🚨"
240
+ when :error then "❌"
241
+ when :warn then "⚠️ "
242
+ when :info then "ℹ️ "
243
+ else "•"
244
+ end
245
+ end
246
+
247
+ def severity_color(severity)
248
+ # ANSI color codes
249
+ case severity
250
+ when :critical then "\e[31m" # Red
251
+ when :error then "\e[31m" # Red
252
+ when :warn then "\e[33m" # Yellow
253
+ when :info then "\e[34m" # Blue
254
+ else ""
255
+ end
256
+ end
257
+
258
+ def reset_color
259
+ "\e[0m"
260
+ end
261
+
262
+ def format_row_count(count)
263
+ return "unknown" if count.nil?
264
+
265
+ case count
266
+ when 0..999
267
+ "#{count}"
268
+ when 1_000..999_999
269
+ "#{(count / 1_000.0).round(1)}K"
270
+ when 1_000_000..999_999_999
271
+ "#{(count / 1_000_000.0).round(1)}M"
272
+ else
273
+ "#{(count / 1_000_000_000.0).round(1)}B"
274
+ end
275
+ end
276
+ end
277
+ end
278
+ end
@@ -0,0 +1,247 @@
1
+ # frozen_string_literal: true
2
+
3
+ require 'json'
4
+ require 'time'
5
+
6
+ module QueryGuard
7
+ class CLI
8
+ # Generates standardized JSON reports for QueryGuard analysis and check results.
9
+ #
10
+ # Provides a stable, versioned JSON schema suitable for:
11
+ # - SaaS platform ingestion
12
+ # - CI/CD integration
13
+ # - External tool consumption
14
+ # - Compliance and audit archival
15
+ #
16
+ # Schema version: 1.0 (documented in docs/JSON_REPORT_SCHEMA.md)
17
+ #
18
+ # Example:
19
+ # reporter = QueryGuard::CLI::JsonReporter.new(
20
+ # findings: findings,
21
+ # command: 'analyze',
22
+ # path: 'db/migrate',
23
+ # options: { threshold: 'error' }
24
+ # )
25
+ # json_output = reporter.generate
26
+ class JsonReporter
27
+ SCHEMA_VERSION = '1.0'
28
+ TOOL_NAME = 'queryguard'
29
+
30
+ def initialize(findings:, command:, path: '.', options: {})
31
+ @findings = findings
32
+ @command = command # 'analyze' or 'check'
33
+ @path = path
34
+ @options = options
35
+ @started_at = Time.now
36
+
37
+ # Initialize tracing context (for distributed systems)
38
+ # Can be overridden via options: { request_id, trace_id, span_id, parent_span_id }
39
+ @request_id = options[:request_id] || generate_request_id
40
+ @trace_id = options[:trace_id] || generate_trace_id
41
+ @span_id = options[:span_id] || generate_span_id
42
+ @parent_span_id = options[:parent_span_id]
43
+
44
+ # Initialize source metadata collector
45
+ # Can be overridden via options: { source_metadata_collector }
46
+ @source_metadata_collector = options[:source_metadata_collector] || SourceMetadataCollector.new
47
+ end
48
+
49
+ # Generate the complete JSON report
50
+ # Returns: String (JSON)
51
+ def generate
52
+ JSON.pretty_generate(build_report)
53
+ end
54
+
55
+ # Generate report and return as parsed hash (useful for testing)
56
+ def build_report
57
+ {
58
+ report_version: SCHEMA_VERSION,
59
+ report_type: @command,
60
+ timestamp: Time.now.utc.iso8601.gsub('+00:00', 'Z'),
61
+ tool: build_tool,
62
+ source: build_source,
63
+ summary: build_summary,
64
+ findings: build_findings,
65
+ metadata: build_metadata,
66
+ tracing: build_tracing
67
+ }
68
+ end
69
+
70
+ private
71
+
72
+ # Tool information block
73
+ def build_tool
74
+ {
75
+ name: TOOL_NAME,
76
+ version: QueryGuard::VERSION || 'unknown'
77
+ }
78
+ end
79
+
80
+ # Source information (what was analyzed)
81
+ def build_source
82
+ source = {
83
+ path: File.expand_path(@path),
84
+ command: @command
85
+ }
86
+
87
+ # Include threshold for check command
88
+ source[:threshold] = @options[:threshold] if @command == 'check' && @options[:threshold]
89
+
90
+ # Include source metadata (git, CI provider information)
91
+ source_metadata = @source_metadata_collector.collect
92
+ if source_metadata
93
+ source[:metadata] = source_metadata
94
+ end
95
+
96
+ source
97
+ end
98
+
99
+ # Summary statistics
100
+ def build_summary
101
+ by_severity = @findings.group_by { |f| f[:severity] || :info }
102
+
103
+ {
104
+ total_findings: @findings.length,
105
+ by_severity: {
106
+ critical: (by_severity[:critical] || []).length,
107
+ error: (by_severity[:error] || []).length,
108
+ warn: (by_severity[:warn] || []).length,
109
+ info: (by_severity[:info] || []).length
110
+ },
111
+ files_analyzed: unique_files_count,
112
+ files_with_findings: unique_files_with_findings_count
113
+ }
114
+ end
115
+
116
+ # Array of finding objects
117
+ def build_findings
118
+ @findings.map do |finding|
119
+ {
120
+ id: finding[:id] || generate_finding_id(finding),
121
+ analyzer: finding[:analyzer_name] || finding[:analyzer] || 'unknown',
122
+ rule: finding[:rule_name] || finding[:rule] || 'unknown',
123
+ severity: (finding[:severity] || :info).to_s,
124
+ title: finding[:title] || finding[:message] || '(no title)',
125
+ description: finding[:description] || '',
126
+ file_path: finding[:file_path],
127
+ line_number: finding[:line_number],
128
+ recommendation: clean_recommendations(finding[:recommendation]),
129
+ metadata: clean_metadata(finding[:metadata])
130
+ }.compact # Remove nil values
131
+ end
132
+ end
133
+
134
+ # Additional metadata about the analysis
135
+ def build_metadata
136
+ {
137
+ total_files_checked: unique_files_count,
138
+ has_index_suggestions: findings_have_index_suggestions?,
139
+ has_migration_steps: findings_have_migration_steps?,
140
+ execution_time_ms: ((Time.now - @started_at) * 1000).round(2)
141
+ }
142
+ end
143
+
144
+ # Tracing context for distributed systems (OpenTelemetry compatible)
145
+ def build_tracing
146
+ tracing = {
147
+ request_id: @request_id,
148
+ trace_id: @trace_id,
149
+ span_id: @span_id
150
+ }
151
+
152
+ tracing[:parent_span_id] = @parent_span_id if @parent_span_id
153
+
154
+ tracing
155
+ end
156
+
157
+ # Helper: Count unique files analyzed
158
+ def unique_files_count
159
+ @findings.map { |f| f[:file_path] }.compact.uniq.length
160
+ end
161
+
162
+ # Helper: Count unique files that have findings
163
+ def unique_files_with_findings_count
164
+ @findings.map { |f| f[:file_path] }.compact.uniq.length
165
+ end
166
+
167
+ # Helper: Do any findings have index suggestions?
168
+ def findings_have_index_suggestions?
169
+ return false if @findings.empty?
170
+
171
+ @findings.any? do |f|
172
+ metadata = f[:metadata] || {}
173
+ metadata[:index_sql] || metadata[:suggested_indexes]
174
+ end
175
+ end
176
+
177
+ # Helper: Do any findings have migration steps?
178
+ def findings_have_migration_steps?
179
+ return false if @findings.empty?
180
+
181
+ @findings.any? do |f|
182
+ metadata = f[:metadata] || {}
183
+ metadata[:migration_steps]
184
+ end
185
+ end
186
+
187
+ # Helper: Ensure recommendations are an array of strings
188
+ def clean_recommendations(recommendations)
189
+ return nil if recommendations.nil? || recommendations.empty?
190
+
191
+ Array(recommendations).map(&:to_s)
192
+ end
193
+
194
+ # Helper: Clean metadata, ensure it doesn't leak internal objects
195
+ def clean_metadata(metadata)
196
+ return nil if metadata.nil? || metadata.empty?
197
+
198
+ # Convert metadata hash to a clean version
199
+ # Remove any Ruby objects that won't serialize to JSON
200
+ clean_hash(metadata)
201
+ end
202
+
203
+ # Helper: Recursively clean a hash for JSON serialization
204
+ def clean_hash(hash)
205
+ hash.transform_values do |value|
206
+ case value
207
+ when Hash
208
+ clean_hash(value)
209
+ when Array
210
+ value.map { |v| v.is_a?(Hash) ? clean_hash(v) : v.to_s }
211
+ when String, Numeric, TrueClass, FalseClass, NilClass
212
+ value
213
+ else
214
+ # Convert unknown objects to string representation
215
+ value.to_s
216
+ end
217
+ end
218
+ end
219
+
220
+ # Helper: Generate deterministic ID for a finding
221
+ def generate_finding_id(finding)
222
+ require 'digest'
223
+
224
+ content = "#{finding[:analyzer_name]}:#{finding[:rule_name]}:#{finding[:file_path]}:#{finding[:line_number]}"
225
+ Digest::SHA256.hexdigest(content)[0, 16]
226
+ end
227
+
228
+ # Helper: Generate a request ID (UUID-like format)
229
+ def generate_request_id
230
+ require 'securerandom'
231
+ SecureRandom.hex(8)
232
+ end
233
+
234
+ # Helper: Generate a trace ID for distributed tracing (16 hex chars, 64-bit)
235
+ def generate_trace_id
236
+ require 'securerandom'
237
+ SecureRandom.hex(8)
238
+ end
239
+
240
+ # Helper: Generate a span ID (8 hex chars, 32-bit)
241
+ def generate_span_id
242
+ require 'securerandom'
243
+ SecureRandom.hex(4)
244
+ end
245
+ end
246
+ end
247
+ end
@@ -0,0 +1,137 @@
1
+ # frozen_string_literal: true
2
+
3
+ require 'json'
4
+ require 'time'
5
+
6
+ module QueryGuard
7
+ class CLI
8
+ # Formats QueryGuard reports with pagination support for large finding sets.
9
+ #
10
+ # When a report has many findings (e.g., thousands), pagination allows:
11
+ # - Chunking findings into pages for API transmission
12
+ # - Cursor-based pagination for large datasets
13
+ # - Memory-efficient processing on receiving end
14
+ #
15
+ # Example:
16
+ # # Paginate findings into pages of 100 each
17
+ # paged = PagedReportFormatter.new(
18
+ # report: full_report,
19
+ # page_size: 100,
20
+ # page: 1
21
+ # )
22
+ #
23
+ # json = paged.generate # Returns report with findings[0..99]
24
+ #
25
+ # Example with cursor:
26
+ # paged = PagedReportFormatter.new(
27
+ # report: full_report,
28
+ # page_size: 100,
29
+ # continuation_token: 'abc123...'
30
+ # )
31
+ #
32
+ # json = paged.generate # Returns next batch of 100 findings
33
+ class PagedReportFormatter
34
+ DEFAULT_PAGE_SIZE = 100
35
+ MAX_PAGE_SIZE = 1000
36
+
37
+ def initialize(report:, page_size: DEFAULT_PAGE_SIZE, page: 1, continuation_token: nil)
38
+ @report = report.dup # Don't mutate original
39
+ @page_size = [page_size.to_i, MAX_PAGE_SIZE].min
40
+ @page = [page.to_i, 1].max
41
+ @continuation_token = continuation_token
42
+ @all_findings = @report.delete(:findings) || []
43
+ end
44
+
45
+ # Generate paginated report
46
+ # Returns: String (JSON)
47
+ def generate
48
+ JSON.pretty_generate(build_paged_report)
49
+ end
50
+
51
+ # Build the paginated report
52
+ def build_paged_report
53
+ start_index = (@page - 1) * @page_size
54
+ end_index = start_index + @page_size
55
+
56
+ paginated_findings = @all_findings[start_index...end_index]
57
+ has_more = end_index < @all_findings.length
58
+
59
+ # Build the report with paginated findings
60
+ report = @report.dup
61
+ report[:findings] = paginated_findings
62
+
63
+ # Add pagination metadata
64
+ report[:pagination] = build_pagination(has_more)
65
+
66
+ # Update summary to match paginated findings only
67
+ report[:summary] = update_summary(paginated_findings) if report[:summary]
68
+
69
+ report
70
+ end
71
+
72
+ private
73
+
74
+ # Build pagination metadata
75
+ def build_pagination(has_more)
76
+ pagination = {
77
+ page: @page,
78
+ page_size: @page_size,
79
+ total_findings: @all_findings.length,
80
+ has_more: has_more,
81
+ findings_on_page: [@all_findings.length - ((@page - 1) * @page_size), @page_size].min
82
+ }
83
+
84
+ # Add next/previous page tokens for cursor-based pagination
85
+ if has_more
86
+ pagination[:next_page_token] = generate_page_token(@page + 1)
87
+ end
88
+
89
+ if @page > 1
90
+ pagination[:prev_page_token] = generate_page_token(@page - 1)
91
+ end
92
+
93
+ pagination
94
+ end
95
+
96
+ # Update summary to reflect current page counts
97
+ def update_summary(paginated_findings)
98
+ summary = @report[:summary].dup
99
+ by_severity = paginated_findings.group_by { |f| f[:severity] || :info }
100
+
101
+ summary[:total_findings] = paginated_findings.length
102
+ summary[:by_severity] = {
103
+ critical: (by_severity[:critical] || []).length,
104
+ error: (by_severity[:error] || []).length,
105
+ warn: (by_severity[:warn] || []).length,
106
+ info: (by_severity[:info] || []).length
107
+ }
108
+
109
+ # Keep original file counts from full analysis
110
+ summary
111
+ end
112
+
113
+ # Generate a pagination token (opaque, base64-encoded)
114
+ def generate_page_token(page_number)
115
+ require 'base64'
116
+ data = {
117
+ page: page_number,
118
+ page_size: @page_size,
119
+ generated_at: Time.now.to_i
120
+ }
121
+ Base64.strict_encode64(JSON.dump(data))
122
+ end
123
+
124
+ # Decode a pagination token (for cursor continuations)
125
+ def decode_page_token(token)
126
+ require 'base64'
127
+ data = JSON.parse(Base64.strict_decode64(token))
128
+ {
129
+ page: data['page'],
130
+ page_size: data['page_size']
131
+ }
132
+ rescue StandardError
133
+ { page: 1, page_size: @page_size }
134
+ end
135
+ end
136
+ end
137
+ end