logstruct 0.0.1 → 0.0.2.pre.rc1

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (82) hide show
  1. checksums.yaml +4 -4
  2. data/CHANGELOG.md +26 -2
  3. data/LICENSE +21 -0
  4. data/README.md +67 -0
  5. data/lib/log_struct/concerns/configuration.rb +93 -0
  6. data/lib/log_struct/concerns/error_handling.rb +94 -0
  7. data/lib/log_struct/concerns/logging.rb +45 -0
  8. data/lib/log_struct/config_struct/error_handling_modes.rb +25 -0
  9. data/lib/log_struct/config_struct/filters.rb +80 -0
  10. data/lib/log_struct/config_struct/integrations.rb +89 -0
  11. data/lib/log_struct/configuration.rb +59 -0
  12. data/lib/log_struct/enums/error_handling_mode.rb +22 -0
  13. data/lib/log_struct/enums/error_reporter.rb +14 -0
  14. data/lib/log_struct/enums/event.rb +48 -0
  15. data/lib/log_struct/enums/level.rb +66 -0
  16. data/lib/log_struct/enums/source.rb +26 -0
  17. data/lib/log_struct/enums.rb +9 -0
  18. data/lib/log_struct/formatter.rb +224 -0
  19. data/lib/log_struct/handlers.rb +27 -0
  20. data/lib/log_struct/hash_utils.rb +21 -0
  21. data/lib/log_struct/integrations/action_mailer/callbacks.rb +100 -0
  22. data/lib/log_struct/integrations/action_mailer/error_handling.rb +173 -0
  23. data/lib/log_struct/integrations/action_mailer/event_logging.rb +90 -0
  24. data/lib/log_struct/integrations/action_mailer/metadata_collection.rb +78 -0
  25. data/lib/log_struct/integrations/action_mailer.rb +50 -0
  26. data/lib/log_struct/integrations/active_job/log_subscriber.rb +104 -0
  27. data/lib/log_struct/integrations/active_job.rb +38 -0
  28. data/lib/log_struct/integrations/active_record.rb +258 -0
  29. data/lib/log_struct/integrations/active_storage.rb +94 -0
  30. data/lib/log_struct/integrations/carrierwave.rb +111 -0
  31. data/lib/log_struct/integrations/good_job/log_subscriber.rb +228 -0
  32. data/lib/log_struct/integrations/good_job/logger.rb +73 -0
  33. data/lib/log_struct/integrations/good_job.rb +111 -0
  34. data/lib/log_struct/integrations/host_authorization.rb +81 -0
  35. data/lib/log_struct/integrations/integration_interface.rb +21 -0
  36. data/lib/log_struct/integrations/lograge.rb +114 -0
  37. data/lib/log_struct/integrations/rack.rb +31 -0
  38. data/lib/log_struct/integrations/rack_error_handler/middleware.rb +146 -0
  39. data/lib/log_struct/integrations/rack_error_handler.rb +32 -0
  40. data/lib/log_struct/integrations/shrine.rb +75 -0
  41. data/lib/log_struct/integrations/sidekiq/logger.rb +43 -0
  42. data/lib/log_struct/integrations/sidekiq.rb +39 -0
  43. data/lib/log_struct/integrations/sorbet.rb +49 -0
  44. data/lib/log_struct/integrations.rb +41 -0
  45. data/lib/log_struct/log/action_mailer.rb +55 -0
  46. data/lib/log_struct/log/active_job.rb +64 -0
  47. data/lib/log_struct/log/active_storage.rb +78 -0
  48. data/lib/log_struct/log/carrierwave.rb +82 -0
  49. data/lib/log_struct/log/error.rb +76 -0
  50. data/lib/log_struct/log/good_job.rb +151 -0
  51. data/lib/log_struct/log/interfaces/additional_data_field.rb +20 -0
  52. data/lib/log_struct/log/interfaces/common_fields.rb +42 -0
  53. data/lib/log_struct/log/interfaces/message_field.rb +20 -0
  54. data/lib/log_struct/log/interfaces/request_fields.rb +36 -0
  55. data/lib/log_struct/log/plain.rb +53 -0
  56. data/lib/log_struct/log/request.rb +76 -0
  57. data/lib/log_struct/log/security.rb +80 -0
  58. data/lib/log_struct/log/shared/add_request_fields.rb +29 -0
  59. data/lib/log_struct/log/shared/merge_additional_data_fields.rb +28 -0
  60. data/lib/log_struct/log/shared/serialize_common.rb +36 -0
  61. data/lib/log_struct/log/shrine.rb +70 -0
  62. data/lib/log_struct/log/sidekiq.rb +50 -0
  63. data/lib/log_struct/log/sql.rb +126 -0
  64. data/lib/log_struct/log.rb +43 -0
  65. data/lib/log_struct/log_keys.rb +102 -0
  66. data/lib/log_struct/monkey_patches/active_support/tagged_logging/formatter.rb +36 -0
  67. data/lib/log_struct/multi_error_reporter.rb +149 -0
  68. data/lib/log_struct/param_filters.rb +89 -0
  69. data/lib/log_struct/railtie.rb +31 -0
  70. data/lib/log_struct/semantic_logger/color_formatter.rb +209 -0
  71. data/lib/log_struct/semantic_logger/formatter.rb +94 -0
  72. data/lib/log_struct/semantic_logger/logger.rb +129 -0
  73. data/lib/log_struct/semantic_logger/setup.rb +219 -0
  74. data/lib/log_struct/sorbet/serialize_symbol_keys.rb +23 -0
  75. data/lib/log_struct/sorbet.rb +13 -0
  76. data/lib/log_struct/string_scrubber.rb +84 -0
  77. data/lib/log_struct/version.rb +6 -0
  78. data/lib/log_struct.rb +37 -0
  79. data/lib/logstruct.rb +2 -6
  80. data/logstruct.gemspec +52 -0
  81. metadata +221 -5
  82. data/Rakefile +0 -5
@@ -0,0 +1,94 @@
1
+ # typed: strict
2
+ # frozen_string_literal: true
3
+
4
+ require_relative "../enums/source"
5
+ require_relative "../enums/event"
6
+ require_relative "../log/active_storage"
7
+
8
+ module LogStruct
9
+ module Integrations
10
+ # Integration for ActiveStorage structured logging
11
+ module ActiveStorage
12
+ extend T::Sig
13
+ extend IntegrationInterface
14
+
15
+ # Set up ActiveStorage structured logging
16
+ sig { override.params(config: LogStruct::Configuration).returns(T.nilable(T::Boolean)) }
17
+ def self.setup(config)
18
+ return nil unless defined?(::ActiveStorage)
19
+ return nil unless config.enabled
20
+ return nil unless config.integrations.enable_activestorage
21
+
22
+ # Subscribe to all ActiveStorage service events
23
+ ::ActiveSupport::Notifications.subscribe(/service_.*\.active_storage/) do |*args|
24
+ process_active_storage_event(::ActiveSupport::Notifications::Event.new(*args), config)
25
+ end
26
+
27
+ true
28
+ end
29
+
30
+ private_class_method
31
+
32
+ # Process ActiveStorage events and create structured logs
33
+ sig { params(event: ActiveSupport::Notifications::Event, config: LogStruct::Configuration).void }
34
+ def self.process_active_storage_event(event, config)
35
+ return unless config.enabled
36
+ return unless config.integrations.enable_activestorage
37
+
38
+ # Extract key information from the event
39
+ event_name = event.name.sub(/\.active_storage$/, "")
40
+ service_name = event.payload[:service]
41
+ duration = event.duration
42
+
43
+ # Map service events to log event types
44
+ event_type = case event_name
45
+ when "service_upload"
46
+ Event::Upload
47
+ when "service_download"
48
+ Event::Download
49
+ when "service_delete"
50
+ Event::Delete
51
+ when "service_delete_prefixed"
52
+ Event::Delete
53
+ when "service_exist"
54
+ Event::Exist
55
+ when "service_url"
56
+ Event::Url
57
+ when "service_download_chunk"
58
+ Event::Download
59
+ when "service_stream"
60
+ Event::Stream
61
+ when "service_update_metadata"
62
+ Event::Metadata
63
+ else
64
+ Event::Unknown
65
+ end
66
+
67
+ # Map the event name to an operation
68
+ operation = event_name.sub(/^service_/, "").to_sym
69
+
70
+ # Create structured log event specific to ActiveStorage
71
+ log_data = Log::ActiveStorage.new(
72
+ event: event_type,
73
+ operation: operation,
74
+ storage: service_name.to_s,
75
+ file_id: event.payload[:key].to_s,
76
+ checksum: event.payload[:checksum].to_s,
77
+ duration: duration,
78
+ # Add other fields where available
79
+ metadata: event.payload[:metadata],
80
+ exist: event.payload[:exist],
81
+ url: event.payload[:url],
82
+ filename: event.payload[:filename],
83
+ mime_type: event.payload[:content_type],
84
+ size: event.payload[:byte_size],
85
+ prefix: event.payload[:prefix],
86
+ range: event.payload[:range]
87
+ )
88
+
89
+ # Log structured data
90
+ LogStruct.info(log_data)
91
+ end
92
+ end
93
+ end
94
+ end
@@ -0,0 +1,111 @@
1
+ # typed: strict
2
+ # frozen_string_literal: true
3
+
4
+ begin
5
+ require "carrierwave"
6
+ rescue LoadError
7
+ # CarrierWave gem is not available, integration will be skipped
8
+ end
9
+
10
+ module LogStruct
11
+ module Integrations
12
+ # CarrierWave integration for structured logging
13
+ module CarrierWave
14
+ extend T::Sig
15
+ extend IntegrationInterface
16
+
17
+ # Set up CarrierWave structured logging
18
+ sig { override.params(config: LogStruct::Configuration).returns(T.nilable(T::Boolean)) }
19
+ def self.setup(config)
20
+ return nil unless defined?(::CarrierWave)
21
+ return nil unless config.enabled
22
+ return nil unless config.integrations.enable_carrierwave
23
+
24
+ # Patch CarrierWave to add logging
25
+ ::CarrierWave::Uploader::Base.prepend(LoggingMethods)
26
+
27
+ true
28
+ end
29
+
30
+ # Methods to add logging to CarrierWave operations
31
+ module LoggingMethods
32
+ extend T::Sig
33
+ extend T::Helpers
34
+ requires_ancestor { ::CarrierWave::Uploader::Base }
35
+
36
+ # Log file storage operations
37
+ sig { params(args: T.untyped).returns(T.untyped) }
38
+ def store!(*args)
39
+ start_time = Process.clock_gettime(Process::CLOCK_MONOTONIC)
40
+ result = super
41
+ duration = Process.clock_gettime(Process::CLOCK_MONOTONIC) - start_time
42
+
43
+ # Extract file information
44
+ file_size = file.size if file.respond_to?(:size)
45
+ {
46
+ identifier: identifier,
47
+ filename: file.filename,
48
+ content_type: file.content_type,
49
+ size: file_size,
50
+ store_path: store_path,
51
+ extension: file.extension
52
+ }
53
+
54
+ # Log the store operation with structured data
55
+ log_data = Log::CarrierWave.new(
56
+ source: Source::CarrierWave,
57
+ event: Event::Upload,
58
+ duration: duration * 1000.0, # Convert to ms
59
+ model: model.class.name,
60
+ uploader: self.class.name,
61
+ storage: storage.class.name,
62
+ mount_point: mounted_as.to_s,
63
+ filename: file.filename,
64
+ mime_type: file.content_type,
65
+ size: file_size,
66
+ file_id: identifier,
67
+ additional_data: {
68
+ version: version_name.to_s,
69
+ store_path: store_path,
70
+ extension: file.extension
71
+ }
72
+ )
73
+
74
+ ::Rails.logger.info(log_data)
75
+ result
76
+ end
77
+
78
+ # Log file retrieve operations
79
+ sig { params(identifier: T.untyped, args: T.untyped).returns(T.untyped) }
80
+ def retrieve_from_store!(identifier, *args)
81
+ start_time = Process.clock_gettime(Process::CLOCK_MONOTONIC)
82
+ result = super
83
+ duration = Process.clock_gettime(Process::CLOCK_MONOTONIC) - start_time
84
+
85
+ # Extract file information if available
86
+ file_size = file.size if file&.respond_to?(:size)
87
+
88
+ # Log the retrieve operation with structured data
89
+ log_data = Log::CarrierWave.new(
90
+ source: Source::CarrierWave,
91
+ event: Event::Download,
92
+ duration: duration * 1000.0, # Convert to ms
93
+ uploader: self.class.name,
94
+ storage: storage.class.name,
95
+ mount_point: mounted_as.to_s,
96
+ file_id: identifier,
97
+ filename: file&.filename,
98
+ mime_type: file&.content_type,
99
+ size: file_size,
100
+ additional_data: {
101
+ version: version_name.to_s
102
+ }
103
+ )
104
+
105
+ ::Rails.logger.info(log_data)
106
+ result
107
+ end
108
+ end
109
+ end
110
+ end
111
+ end
@@ -0,0 +1,228 @@
1
+ # typed: strict
2
+ # frozen_string_literal: true
3
+
4
+ begin
5
+ require "active_support/log_subscriber"
6
+ rescue LoadError
7
+ # ActiveSupport is not available, log subscriber will be skipped
8
+ end
9
+
10
+ require_relative "../../log/good_job"
11
+ require_relative "../../enums/event"
12
+ require_relative "../../enums/level"
13
+
14
+ module LogStruct
15
+ module Integrations
16
+ module GoodJob
17
+ # LogSubscriber for GoodJob ActiveSupport notifications
18
+ #
19
+ # This subscriber captures GoodJob's ActiveSupport notifications and converts
20
+ # them into structured LogStruct::Log::GoodJob entries. It provides detailed
21
+ # logging for job lifecycle events, performance metrics, and error tracking.
22
+ #
23
+ # ## Supported Events:
24
+ # - job.enqueue - Job queued for execution
25
+ # - job.start - Job execution started
26
+ # - job.finish - Job completed successfully
27
+ # - job.error - Job failed with error
28
+ # - job.retry - Job retry initiated
29
+ # - job.schedule - Job scheduled for future execution
30
+ #
31
+ # ## Event Data Captured:
32
+ # - Job identification (ID, class, queue)
33
+ # - Execution context (arguments, priority, scheduled time)
34
+ # - Performance metrics (execution time, wait time)
35
+ # - Error information (class, message, backtrace)
36
+ # - Process and thread information
37
+ class LogSubscriber < ::ActiveSupport::LogSubscriber
38
+ extend T::Sig
39
+
40
+ # Job enqueued event
41
+ sig { params(event: T.untyped).void }
42
+ def enqueue(event)
43
+ job_data = extract_job_data(event)
44
+
45
+ log_entry = LogStruct::Log::GoodJob.new(
46
+ event: Event::Enqueue,
47
+ level: Level::Info,
48
+ job_id: job_data[:job_id],
49
+ job_class: job_data[:job_class],
50
+ queue_name: job_data[:queue_name],
51
+ arguments: job_data[:arguments],
52
+ scheduled_at: job_data[:scheduled_at],
53
+ priority: job_data[:priority],
54
+ execution_time: event.duration,
55
+ additional_data: {
56
+ enqueue_caller: job_data[:caller_location]
57
+ }
58
+ )
59
+
60
+ logger.info(log_entry)
61
+ end
62
+
63
+ # Job execution started event
64
+ sig { params(event: T.untyped).void }
65
+ def start(event)
66
+ job_data = extract_job_data(event)
67
+
68
+ log_entry = LogStruct::Log::GoodJob.new(
69
+ event: Event::Start,
70
+ level: Level::Info,
71
+ job_id: job_data[:job_id],
72
+ job_class: job_data[:job_class],
73
+ queue_name: job_data[:queue_name],
74
+ arguments: job_data[:arguments],
75
+ executions: job_data[:executions],
76
+ wait_time: job_data[:wait_time],
77
+ scheduled_at: job_data[:scheduled_at],
78
+ process_id: ::Process.pid,
79
+ thread_id: Thread.current.object_id.to_s(36)
80
+ )
81
+
82
+ logger.info(log_entry)
83
+ end
84
+
85
+ # Job completed successfully event
86
+ sig { params(event: T.untyped).void }
87
+ def finish(event)
88
+ job_data = extract_job_data(event)
89
+
90
+ log_entry = LogStruct::Log::GoodJob.new(
91
+ event: Event::Finish,
92
+ level: Level::Info,
93
+ job_id: job_data[:job_id],
94
+ job_class: job_data[:job_class],
95
+ queue_name: job_data[:queue_name],
96
+ executions: job_data[:executions],
97
+ run_time: event.duration,
98
+ finished_at: Time.now,
99
+ process_id: ::Process.pid,
100
+ thread_id: Thread.current.object_id.to_s(36),
101
+ additional_data: {
102
+ result: job_data[:result]
103
+ }
104
+ )
105
+
106
+ logger.info(log_entry)
107
+ end
108
+
109
+ # Job failed with error event
110
+ sig { params(event: T.untyped).void }
111
+ def error(event)
112
+ job_data = extract_job_data(event)
113
+
114
+ log_entry = LogStruct::Log::GoodJob.new(
115
+ event: Event::Error,
116
+ level: Level::Error,
117
+ job_id: job_data[:job_id],
118
+ job_class: job_data[:job_class],
119
+ queue_name: job_data[:queue_name],
120
+ executions: job_data[:executions],
121
+ exception_executions: job_data[:exception_executions],
122
+ error_class: job_data[:error_class],
123
+ error_message: job_data[:error_message],
124
+ error_backtrace: job_data[:error_backtrace],
125
+ run_time: event.duration,
126
+ process_id: ::Process.pid,
127
+ thread_id: Thread.current.object_id.to_s(36)
128
+ )
129
+
130
+ logger.error(log_entry)
131
+ end
132
+
133
+ # Job scheduled for future execution event
134
+ sig { params(event: T.untyped).void }
135
+ def schedule(event)
136
+ job_data = extract_job_data(event)
137
+
138
+ log_entry = LogStruct::Log::GoodJob.new(
139
+ event: Event::Schedule,
140
+ level: Level::Info,
141
+ job_id: job_data[:job_id],
142
+ job_class: job_data[:job_class],
143
+ queue_name: job_data[:queue_name],
144
+ arguments: job_data[:arguments],
145
+ scheduled_at: job_data[:scheduled_at],
146
+ priority: job_data[:priority],
147
+ cron_key: job_data[:cron_key],
148
+ execution_time: event.duration
149
+ )
150
+
151
+ logger.info(log_entry)
152
+ end
153
+
154
+ private
155
+
156
+ # Extract job data from ActiveSupport event payload
157
+ sig { params(event: T.untyped).returns(T::Hash[Symbol, T.untyped]) }
158
+ def extract_job_data(event)
159
+ payload = event.payload || {}
160
+ job = payload[:job]
161
+ execution = payload[:execution] || payload[:good_job_execution]
162
+ exception = payload[:exception] || payload[:error]
163
+
164
+ data = {}
165
+
166
+ # Basic job information
167
+ if job
168
+ data[:job_id] = job.job_id if job.respond_to?(:job_id)
169
+ data[:job_class] = job.job_class if job.respond_to?(:job_class)
170
+ data[:queue_name] = job.queue_name if job.respond_to?(:queue_name)
171
+ data[:arguments] = job.arguments if job.respond_to?(:arguments)
172
+ data[:priority] = job.priority if job.respond_to?(:priority)
173
+ data[:scheduled_at] = job.scheduled_at if job.respond_to?(:scheduled_at)
174
+ data[:cron_key] = job.cron_key if job.respond_to?(:cron_key)
175
+ data[:caller_location] = job.enqueue_caller_location if job.respond_to?(:enqueue_caller_location)
176
+ end
177
+
178
+ # Execution-specific information
179
+ if execution
180
+ data[:executions] = execution.executions if execution.respond_to?(:executions)
181
+ data[:exception_executions] = execution.exception_executions if execution.respond_to?(:exception_executions)
182
+ # Use existing wait_time if available, otherwise calculate it
183
+ if execution.respond_to?(:wait_time) && execution.wait_time
184
+ data[:wait_time] = execution.wait_time
185
+ elsif execution.respond_to?(:created_at)
186
+ data[:wait_time] = calculate_wait_time(execution)
187
+ end
188
+ data[:batch_id] = execution.batch_id if execution.respond_to?(:batch_id)
189
+ data[:cron_key] ||= execution.cron_key if execution.respond_to?(:cron_key)
190
+ end
191
+
192
+ # Error information
193
+ if exception
194
+ data[:error_class] = exception.class.name
195
+ data[:error_message] = exception.message
196
+ data[:error_backtrace] = exception.backtrace&.first(20) # Limit backtrace size
197
+ end
198
+
199
+ # Result information
200
+ data[:result] = payload[:result] if payload.key?(:result)
201
+
202
+ data
203
+ end
204
+
205
+ # Calculate wait time from job creation to execution start
206
+ sig { params(execution: T.untyped).returns(T.nilable(Float)) }
207
+ def calculate_wait_time(execution)
208
+ return nil unless execution.respond_to?(:created_at)
209
+ return nil unless execution.respond_to?(:performed_at)
210
+ return nil unless execution.created_at && execution.performed_at
211
+
212
+ (execution.performed_at - execution.created_at).to_f
213
+ rescue
214
+ # Return nil if calculation fails
215
+ nil
216
+ end
217
+
218
+ # Get the appropriate logger for GoodJob events
219
+ sig { returns(T.untyped) }
220
+ def logger
221
+ # Always use Rails.logger - in production it will be configured by the integration setup,
222
+ # in tests it will be set up by the test harness
223
+ Rails.logger
224
+ end
225
+ end
226
+ end
227
+ end
228
+ end
@@ -0,0 +1,73 @@
1
+ # typed: strict
2
+ # frozen_string_literal: true
3
+
4
+ require_relative "../../semantic_logger/logger"
5
+ require_relative "../../log/good_job"
6
+ require_relative "../../enums/source"
7
+
8
+ module LogStruct
9
+ module Integrations
10
+ module GoodJob
11
+ # Custom Logger for GoodJob that creates LogStruct::Log::GoodJob entries
12
+ #
13
+ # This logger extends LogStruct's SemanticLogger to provide optimal logging
14
+ # performance while creating structured log entries specifically for GoodJob
15
+ # operations and events.
16
+ #
17
+ # ## Benefits:
18
+ # - High-performance logging with SemanticLogger backend
19
+ # - Structured GoodJob-specific log entries
20
+ # - Automatic job context capture
21
+ # - Thread and process information
22
+ # - Performance metrics and timing data
23
+ #
24
+ # ## Usage:
25
+ # This logger is automatically configured when the GoodJob integration
26
+ # is enabled. It replaces GoodJob.logger to provide structured logging
27
+ # for all GoodJob operations.
28
+ class Logger < LogStruct::SemanticLogger::Logger
29
+ extend T::Sig
30
+
31
+ # Override log methods to create GoodJob-specific log structs
32
+ %i[debug info warn error fatal].each do |level|
33
+ define_method(level) do |message = nil, payload = nil, &block|
34
+ # Extract basic job context from thread-local variables
35
+ job_context = {}
36
+
37
+ if Thread.current[:good_job_execution]
38
+ execution = Thread.current[:good_job_execution]
39
+ if execution.respond_to?(:job_id)
40
+ job_context[:job_id] = execution.job_id
41
+ job_context[:job_class] = execution.job_class if execution.respond_to?(:job_class)
42
+ job_context[:queue_name] = execution.queue_name if execution.respond_to?(:queue_name)
43
+ job_context[:executions] = execution.executions if execution.respond_to?(:executions)
44
+ job_context[:scheduled_at] = execution.scheduled_at if execution.respond_to?(:scheduled_at)
45
+ job_context[:priority] = execution.priority if execution.respond_to?(:priority)
46
+ end
47
+ end
48
+
49
+ # Create a GoodJob log struct with the context
50
+ log_struct = Log::GoodJob.new(
51
+ event: Event::Log,
52
+ level: LogStruct::Level.from_severity(level.to_s.upcase),
53
+ process_id: ::Process.pid,
54
+ thread_id: Thread.current.object_id.to_s(36),
55
+ job_id: job_context[:job_id],
56
+ job_class: job_context[:job_class],
57
+ queue_name: job_context[:queue_name],
58
+ executions: job_context[:executions],
59
+ scheduled_at: job_context[:scheduled_at],
60
+ priority: job_context[:priority],
61
+ additional_data: {
62
+ message: message || (block ? block.call : "")
63
+ }
64
+ )
65
+
66
+ # Pass the struct to SemanticLogger
67
+ super(log_struct, payload, &nil)
68
+ end
69
+ end
70
+ end
71
+ end
72
+ end
73
+ end
@@ -0,0 +1,111 @@
1
+ # typed: strict
2
+ # frozen_string_literal: true
3
+
4
+ begin
5
+ require "good_job"
6
+ rescue LoadError
7
+ # GoodJob gem is not available, integration will be skipped
8
+ end
9
+
10
+ require_relative "good_job/logger" if defined?(::GoodJob)
11
+ require_relative "good_job/log_subscriber" if defined?(::GoodJob)
12
+
13
+ module LogStruct
14
+ module Integrations
15
+ # GoodJob integration for structured logging
16
+ #
17
+ # GoodJob is a PostgreSQL-based ActiveJob backend that provides reliable,
18
+ # scalable job processing for Rails applications. This integration provides
19
+ # structured logging for all GoodJob operations.
20
+ #
21
+ # ## Features:
22
+ # - Structured logging for job execution lifecycle
23
+ # - Error tracking and retry logging
24
+ # - Performance metrics and timing data
25
+ # - Database operation logging
26
+ # - Thread and process tracking
27
+ # - Custom GoodJob logger with LogStruct formatting
28
+ #
29
+ # ## Integration Points:
30
+ # - Replaces GoodJob.logger with LogStruct-compatible logger
31
+ # - Subscribes to GoodJob's ActiveSupport notifications
32
+ # - Captures job execution events, errors, and performance metrics
33
+ # - Logs database operations and connection information
34
+ #
35
+ # ## Configuration:
36
+ # The integration is automatically enabled when GoodJob is detected and
37
+ # LogStruct configuration allows it. It can be disabled by setting:
38
+ #
39
+ # ```ruby
40
+ # config.integrations.enable_goodjob = false
41
+ # ```
42
+ module GoodJob
43
+ extend T::Sig
44
+ extend IntegrationInterface
45
+
46
+ # Set up GoodJob structured logging
47
+ #
48
+ # This method configures GoodJob to use LogStruct's structured logging
49
+ # by replacing the default logger and subscribing to job events.
50
+ #
51
+ # @param config [LogStruct::Configuration] The LogStruct configuration
52
+ # @return [Boolean, nil] Returns true if setup was successful, nil if skipped
53
+ sig { override.params(config: LogStruct::Configuration).returns(T.nilable(T::Boolean)) }
54
+ def self.setup(config)
55
+ return nil unless defined?(::GoodJob)
56
+ return nil unless config.enabled
57
+ return nil unless config.integrations.enable_goodjob
58
+
59
+ # Replace GoodJob's logger with our structured logger
60
+ configure_logger
61
+
62
+ # Subscribe to GoodJob's ActiveSupport notifications
63
+ subscribe_to_notifications
64
+
65
+ true
66
+ end
67
+
68
+ # Configure GoodJob to use LogStruct's structured logger
69
+ sig { void }
70
+ def self.configure_logger
71
+ return unless defined?(::GoodJob)
72
+
73
+ # Use direct reference to avoid const_get - GoodJob is guaranteed to be defined here
74
+ goodjob_module = T.unsafe(GoodJob)
75
+
76
+ # Replace GoodJob.logger with our structured logger if GoodJob is available
77
+ if goodjob_module.respond_to?(:logger=)
78
+ goodjob_module.logger = LogStruct::Integrations::GoodJob::Logger.new("GoodJob")
79
+ end
80
+
81
+ # Configure error handling for thread errors if GoodJob supports it
82
+ if goodjob_module.respond_to?(:on_thread_error=)
83
+ goodjob_module.on_thread_error = ->(exception) do
84
+ # Log the error using our structured format
85
+ log_entry = LogStruct::Log::GoodJob.new(
86
+ event: Event::Error,
87
+ level: Level::Error,
88
+ error_class: exception.class.name,
89
+ error_message: exception.message,
90
+ error_backtrace: exception.backtrace
91
+ )
92
+
93
+ goodjob_module.logger.error(log_entry)
94
+ end
95
+ end
96
+ end
97
+
98
+ # Subscribe to GoodJob's ActiveSupport notifications
99
+ sig { void }
100
+ def self.subscribe_to_notifications
101
+ return unless defined?(::GoodJob)
102
+
103
+ # Subscribe to our custom log subscriber for GoodJob events
104
+ LogStruct::Integrations::GoodJob::LogSubscriber.attach_to :good_job
105
+ end
106
+
107
+ private_class_method :configure_logger
108
+ private_class_method :subscribe_to_notifications
109
+ end
110
+ end
111
+ end
@@ -0,0 +1,81 @@
1
+ # typed: strict
2
+ # frozen_string_literal: true
3
+
4
+ require "action_dispatch/middleware/host_authorization"
5
+ require_relative "../enums/event"
6
+
7
+ module LogStruct
8
+ module Integrations
9
+ # Host Authorization integration for structured logging of blocked hosts
10
+ module HostAuthorization
11
+ extend T::Sig
12
+ extend IntegrationInterface
13
+
14
+ RESPONSE_HTML = T.let(
15
+ "<html><head><title>Blocked Host</title></head><body>" \
16
+ "<h1>Blocked Host</h1>" \
17
+ "<p>This host is not permitted to access this application.</p>" \
18
+ "<p>If you are the administrator, check your configuration.</p>" \
19
+ "</body></html>",
20
+ String
21
+ )
22
+ RESPONSE_HEADERS = T.let(
23
+ {
24
+ "Content-Type" => "text/html",
25
+ "Content-Length" => RESPONSE_HTML.bytesize.to_s
26
+ }.freeze,
27
+ T::Hash[String, String]
28
+ )
29
+ FORBIDDEN_STATUS = T.let(403, Integer)
30
+
31
+ # Set up host authorization logging
32
+ sig { override.params(config: LogStruct::Configuration).returns(T.nilable(T::Boolean)) }
33
+ def self.setup(config)
34
+ return nil unless config.enabled
35
+ return nil unless config.integrations.enable_host_authorization
36
+
37
+ # Define the response app as a separate variable to fix block alignment
38
+ response_app = lambda do |env|
39
+ request = ::ActionDispatch::Request.new(env)
40
+ # Include the blocked hosts app configuration in the log entry
41
+ # This can be helpful later when reviewing logs.
42
+ blocked_hosts = env["action_dispatch.blocked_hosts"]
43
+
44
+ # Create a security error to be handled
45
+ blocked_host_error = ::ActionController::BadRequest.new(
46
+ "Blocked host detected: #{request.host}"
47
+ )
48
+
49
+ # Create request context hash
50
+ context = {
51
+ blocked_host: request.host,
52
+ client_ip: request.ip,
53
+ x_forwarded_for: request.x_forwarded_for,
54
+ http_method: request.method,
55
+ path: request.path,
56
+ user_agent: request.user_agent,
57
+ allowed_hosts: blocked_hosts.allowed_hosts,
58
+ allow_ip_hosts: blocked_hosts.allow_ip_hosts
59
+ }
60
+
61
+ # Handle error according to configured mode (log, report, raise)
62
+ LogStruct.handle_exception(
63
+ blocked_host_error,
64
+ source: Source::Security,
65
+ context: context
66
+ )
67
+
68
+ # Use pre-defined headers and response if we are only logging or reporting
69
+ [FORBIDDEN_STATUS, RESPONSE_HEADERS, [RESPONSE_HTML]]
70
+ end
71
+
72
+ # Replace the default HostAuthorization app with our custom app for logging
73
+ Rails.application.config.host_authorization = {
74
+ response_app: response_app
75
+ }
76
+
77
+ true
78
+ end
79
+ end
80
+ end
81
+ end