logstruct 0.1.10 → 0.1.11

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
checksums.yaml CHANGED
@@ -1,7 +1,7 @@
1
1
  ---
2
2
  SHA256:
3
- metadata.gz: b138e5ba6e0739ee31d032606f4d7508463deeee00b64e3a870d34d4316e7567
4
- data.tar.gz: f36bc4a9cb5ee2ab42b77a581efd1b28c7c23a75191429ac5b5926023d33a594
3
+ metadata.gz: 90d690080da4286690ca86f0d9eb5b9514483f2324898f07912d66015ec4269d
4
+ data.tar.gz: df38c5f2434cc5afae25606f77ab2d9a532c9d77c58f81bd558824907ed66418
5
5
  SHA512:
6
- metadata.gz: c06589605729de5ac9ea44694e82712d2d175201adab6052b6eff1a2ae2cc8cc6b476ed7200d1fa6bf6a79db6eb280847b4ca34559a501bc327129db4574acfe
7
- data.tar.gz: 992fc803c47c57367ca906179bf9b356618e42bf840ac6a3de4b7296d90be0db8d513177c57643e1564967624f444d0af00f2582844e4d68825a03f8e911ac37
6
+ metadata.gz: f654b8b6b7ecf8114c08d4c2b3f9d0f97160bfbd44f60b89ffb08001fed4b186bf7c9697ea8ca1b9ac2d25071f481637e4b050ab4be01efac286d793c12cb7ff
7
+ data.tar.gz: 88f8fcf0c87b5e2ccd96152d8abb3de94aaeb952879a37c4cb4fdb84fa9fa858d7fb230f50a8e4d440e30e3a6746cb6bb7b0dc3abdd1dc5f7d95ddfbfed18f0b
data/CHANGELOG.md CHANGED
@@ -9,6 +9,18 @@ and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0
9
9
 
10
10
  ### Changed
11
11
 
12
+ ## [0.1.11] - 2026-01-28
13
+
14
+ ### Changed
15
+
16
+ - **Dev**: Added jscpd task/config and refactored repeated helpers in tests and codegen
17
+
18
+ ### Fixed
19
+
20
+ - **Fix**: Restore request logs in Puma cluster mode by reopening SemanticLogger in forked workers
21
+ - **Fix**: Normalize ActiveJob and GoodJob timestamps to real wall clock time
22
+ - **Fix**: Tagged logging now emits `msg` instead of `message` for AMS-style logs
23
+
12
24
  ## [0.1.10] - 2026-01-23
13
25
 
14
26
  ### Added
@@ -190,6 +190,10 @@ module LogStruct
190
190
  data[:ts] ||= time.iso8601(3)
191
191
  data[:lvl] = level_enum # Set level from severity parameter
192
192
  data[:prog] = progname if progname.present?
193
+ request_id = Thread.current[:logstruct_request_id]
194
+ if request_id.is_a?(String) && !request_id.empty? && !data.key?(:req_id)
195
+ data[:req_id] = request_id
196
+ end
193
197
 
194
198
  generate_json(data)
195
199
  end
@@ -150,38 +150,8 @@ module LogStruct
150
150
 
151
151
  # Report to error reporting service if requested
152
152
  if report
153
- # Get message if available
154
- mailer_message = respond_to?(:message) ? message : nil
155
-
156
- # Prepare universal mailer fields
157
- message_data = {}
158
- MetadataCollection.add_message_metadata(self, message_data)
159
-
160
- # Prepare app-specific context data
161
153
  context_data = {recipients: recipients(error)}
162
- MetadataCollection.add_context_metadata(self, context_data)
163
-
164
- # Extract email fields
165
- to = mailer_message&.to
166
- from = mailer_message&.from&.first
167
- subject = mailer_message&.subject
168
- message_id = extract_message_id_from_mailer(self)
169
-
170
- # Create ActionMailer-specific error struct
171
- exception_data = Log::ActionMailer::Error.new(
172
- to: to,
173
- from: from,
174
- subject: subject,
175
- message_id: message_id,
176
- mailer_class: self.class.to_s,
177
- mailer_action: respond_to?(:action_name) ? action_name&.to_s : nil,
178
- attachment_count: message_data[:attachment_count],
179
- error_class: error.class,
180
- message: error.message,
181
- backtrace: error.backtrace,
182
- additional_data: context_data.presence,
183
- timestamp: Time.now
184
- )
154
+ exception_data = build_exception_data(error, Level::Error, context_data)
185
155
 
186
156
  # Log the exception with structured data
187
157
  LogStruct.error(exception_data)
@@ -202,29 +172,32 @@ module LogStruct
202
172
  # Log a notification event that can be picked up by external systems
203
173
  sig { params(error: StandardError).void }
204
174
  def log_notification_event(error)
205
- # Get message if available
206
- mailer_message = respond_to?(:message) ? message : nil
207
-
208
- # Prepare universal mailer fields
209
- message_data = {}
210
- MetadataCollection.add_message_metadata(self, message_data)
211
-
212
- # Prepare app-specific context data
213
175
  context_data = {
214
176
  mailer: self.class.to_s,
215
177
  action: action_name&.to_s,
216
178
  recipients: recipients(error)
217
179
  }
180
+ exception_data = build_exception_data(error, Level::Info, context_data)
181
+
182
+ # Log the error at info level since it's not a critical error
183
+ LogStruct.info(exception_data)
184
+ end
185
+
186
+ sig { params(error: StandardError, level: Level, context_data: T::Hash[Symbol, T.untyped]).returns(Log::ActionMailer::Error) }
187
+ def build_exception_data(error, level, context_data)
188
+ mailer_message = respond_to?(:message) ? message : nil
189
+
190
+ message_data = {}
191
+ MetadataCollection.add_message_metadata(self, message_data)
192
+
218
193
  MetadataCollection.add_context_metadata(self, context_data)
219
194
 
220
- # Extract email fields
221
195
  to = mailer_message&.to
222
196
  from = mailer_message&.from&.first
223
197
  subject = mailer_message&.subject
224
198
  message_id = extract_message_id_from_mailer(self)
225
199
 
226
- # Create ActionMailer-specific error struct
227
- exception_data = Log::ActionMailer::Error.new(
200
+ Log::ActionMailer::Error.new(
228
201
  to: to,
229
202
  from: from,
230
203
  subject: subject,
@@ -237,11 +210,8 @@ module LogStruct
237
210
  backtrace: error.backtrace,
238
211
  additional_data: context_data.presence,
239
212
  timestamp: Time.now,
240
- level: Level::Info
213
+ level: level
241
214
  )
242
-
243
- # Log the error at info level since it's not a critical error
244
- LogStruct.info(exception_data)
245
215
  end
246
216
 
247
217
  sig { params(error: StandardError).returns(String) }
@@ -6,6 +6,7 @@ require_relative "../../enums/source"
6
6
  require_relative "../../enums/event"
7
7
  require_relative "../../log/active_job"
8
8
  require_relative "../../log/error"
9
+ require_relative "../event_time"
9
10
 
10
11
  module LogStruct
11
12
  module Integrations
@@ -17,7 +18,7 @@ module LogStruct
17
18
  sig { params(event: ::ActiveSupport::Notifications::Event).void }
18
19
  def enqueue(event)
19
20
  job = T.cast(event.payload[:job], ::ActiveJob::Base)
20
- ts = event.time ? Time.at(event.time) : Time.now
21
+ ts = EventTime.coerce_event_time(event.time)
21
22
  base_fields = build_base_fields(job)
22
23
  logger.info(Log::ActiveJob::Enqueue.new(
23
24
  **base_fields.to_kwargs,
@@ -28,7 +29,7 @@ module LogStruct
28
29
  sig { params(event: ::ActiveSupport::Notifications::Event).void }
29
30
  def enqueue_at(event)
30
31
  job = T.cast(event.payload[:job], ::ActiveJob::Base)
31
- ts = event.time ? Time.at(event.time) : Time.now
32
+ ts = EventTime.coerce_event_time(event.time)
32
33
  base_fields = build_base_fields(job)
33
34
  logger.info(Log::ActiveJob::Schedule.new(
34
35
  **base_fields.to_kwargs,
@@ -46,10 +47,8 @@ module LogStruct
46
47
  # Log the exception with the job context
47
48
  log_exception(exception, job, event)
48
49
  else
49
- start_float = event.time
50
- end_float = event.end
51
- ts = start_float ? Time.at(start_float) : Time.now
52
- finished_at = end_float ? Time.at(end_float) : Time.now
50
+ ts = EventTime.coerce_event_time(event.time)
51
+ finished_at = EventTime.coerce_event_time(event.end)
53
52
  base_fields = build_base_fields(job)
54
53
  logger.info(Log::ActiveJob::Finish.new(
55
54
  **base_fields.to_kwargs,
@@ -63,7 +62,7 @@ module LogStruct
63
62
  sig { params(event: ::ActiveSupport::Notifications::Event).void }
64
63
  def perform_start(event)
65
64
  job = T.cast(event.payload[:job], ::ActiveJob::Base)
66
- ts = event.time ? Time.at(event.time) : Time.now
65
+ ts = EventTime.coerce_event_time(event.time)
67
66
  started_at = ts
68
67
  attempt = job.executions
69
68
  base_fields = build_base_fields(job)
@@ -34,6 +34,20 @@ module LogStruct
34
34
  class << self
35
35
  extend T::Sig
36
36
 
37
+ sig { params(env: T.untyped).returns(String) }
38
+ def relative_env_path(env)
39
+ abs = env.filename
40
+ begin
41
+ if defined?(::Rails) && ::Rails.respond_to?(:root) && ::Rails.root
42
+ Pathname.new(abs).relative_path_from(Pathname.new(::Rails.root.to_s)).to_s
43
+ else
44
+ abs
45
+ end
46
+ rescue
47
+ abs
48
+ end
49
+ end
50
+
37
51
  sig { void }
38
52
  def subscribe!
39
53
  # Guard against double subscription
@@ -47,16 +61,7 @@ module LogStruct
47
61
  LogStruct::Log::Dotenv.new
48
62
  event = ::ActiveSupport::Notifications::Event.new(*args)
49
63
  env = event.payload[:env]
50
- abs = env.filename
51
- file = begin
52
- if defined?(::Rails) && ::Rails.respond_to?(:root) && ::Rails.root
53
- Pathname.new(abs).relative_path_from(Pathname.new(::Rails.root.to_s)).to_s
54
- else
55
- abs
56
- end
57
- rescue
58
- abs
59
- end
64
+ file = relative_env_path(env)
60
65
 
61
66
  ts = event.time ? Time.at(event.time) : Time.now
62
67
  LogStruct.info(Log::Dotenv::Load.new(file: file, timestamp: ts))
@@ -134,16 +139,7 @@ module LogStruct
134
139
  instrumenter.subscribe("load.dotenv") do |*args|
135
140
  event = ::ActiveSupport::Notifications::Event.new(*args)
136
141
  env = event.payload[:env]
137
- abs = env.filename
138
- file = begin
139
- if defined?(::Rails) && ::Rails.respond_to?(:root) && ::Rails.root
140
- Pathname.new(abs).relative_path_from(Pathname.new(::Rails.root.to_s)).to_s
141
- else
142
- abs
143
- end
144
- rescue
145
- abs
146
- end
142
+ file = relative_env_path(env)
147
143
  ts = event.time ? Time.at(event.time) : Time.now
148
144
  LogStruct::BootBuffer.add(Log::Dotenv::Load.new(file: file, timestamp: ts))
149
145
  rescue => e
@@ -0,0 +1,19 @@
1
+ # typed: strict
2
+ # frozen_string_literal: true
3
+
4
+ module LogStruct
5
+ module Integrations
6
+ module EventTime
7
+ extend T::Sig
8
+
9
+ sig { params(value: T.untyped).returns(Time) }
10
+ def self.coerce_event_time(value)
11
+ return value if value.is_a?(Time)
12
+ return Time.now unless value.is_a?(Numeric)
13
+
14
+ monotonic_now = ::Process.clock_gettime(::Process::CLOCK_MONOTONIC)
15
+ Time.now - (monotonic_now - value)
16
+ end
17
+ end
18
+ end
19
+ end
@@ -10,6 +10,7 @@ end
10
10
  require_relative "../../log/good_job"
11
11
  require_relative "../../enums/event"
12
12
  require_relative "../../enums/level"
13
+ require_relative "../event_time"
13
14
 
14
15
  module LogStruct
15
16
  module Integrations
@@ -43,7 +44,7 @@ module LogStruct
43
44
  payload = T.let(event.payload, T::Hash[Symbol, T.untyped])
44
45
  job = payload[:job]
45
46
  base_fields = build_base_fields(job, payload)
46
- ts = event.time ? Time.at(event.time) : Time.now
47
+ ts = EventTime.coerce_event_time(event.time)
47
48
 
48
49
  logger.info(Log::GoodJob::Enqueue.new(
49
50
  **base_fields.to_kwargs,
@@ -61,7 +62,7 @@ module LogStruct
61
62
  job = payload[:job]
62
63
  execution = payload[:execution] || payload[:good_job_execution]
63
64
  base_fields = build_base_fields(job, payload)
64
- ts = event.time ? Time.at(event.time) : Time.now
65
+ ts = EventTime.coerce_event_time(event.time)
65
66
 
66
67
  logger.info(Log::GoodJob::Start.new(
67
68
  **base_fields.to_kwargs,
@@ -82,8 +83,8 @@ module LogStruct
82
83
  payload = T.let(event.payload, T::Hash[Symbol, T.untyped])
83
84
  job = payload[:job]
84
85
  base_fields = build_base_fields(job, payload)
85
- start_ts = event.time ? Time.at(event.time) : Time.now
86
- end_ts = event.end ? Time.at(event.end) : Time.now
86
+ start_ts = EventTime.coerce_event_time(event.time)
87
+ end_ts = EventTime.coerce_event_time(event.end)
87
88
 
88
89
  logger.info(Log::GoodJob::Finish.new(
89
90
  **base_fields.to_kwargs,
@@ -103,7 +104,7 @@ module LogStruct
103
104
  job = payload[:job]
104
105
  execution = payload[:execution] || payload[:good_job_execution]
105
106
  exception = payload[:exception] || payload[:error]
106
- ts = event.time ? Time.at(event.time) : Time.now
107
+ ts = EventTime.coerce_event_time(event.time)
107
108
  base_fields = build_base_fields(job, payload)
108
109
 
109
110
  logger.error(Log::GoodJob::Error.new(
@@ -125,7 +126,7 @@ module LogStruct
125
126
  payload = T.let(event.payload, T::Hash[Symbol, T.untyped])
126
127
  job = payload[:job]
127
128
  base_fields = build_base_fields(job, payload)
128
- ts = event.time ? Time.at(event.time) : Time.now
129
+ ts = EventTime.coerce_event_time(event.time)
129
130
 
130
131
  logger.info(Log::GoodJob::Schedule.new(
131
132
  **base_fields.to_kwargs,
@@ -50,16 +50,7 @@ module LogStruct
50
50
  if ARGV.include?("server")
51
51
  # Emit deterministic boot/started events based on CLI args
52
52
  begin
53
- port = T.let(nil, T.nilable(String))
54
- ARGV.each_with_index do |arg, idx|
55
- if arg == "-p" || arg == "--port"
56
- port = ARGV[idx + 1]
57
- break
58
- elsif arg.start_with?("--port=")
59
- port = arg.split("=", 2)[1]
60
- break
61
- end
62
- end
53
+ port = port_from_argv(ARGV)
63
54
  si = T.cast(STATE[:start_info], T::Hash[Symbol, T.untyped])
64
55
  si[:pid] ||= Process.pid
65
56
  si[:environment] ||= ((defined?(::Rails) && ::Rails.respond_to?(:env)) ? ::Rails.env : nil)
@@ -116,6 +107,13 @@ module LogStruct
116
107
  ev = T.unsafe(::Object.const_get("Puma::Events"))
117
108
  ev.prepend(EventsPatch)
118
109
  end
110
+ if puma_mod&.const_defined?(:Cluster)
111
+ cluster_mod = T.cast(puma_mod.const_get(:Cluster), Module)
112
+ if cluster_mod.const_defined?(:Worker)
113
+ worker_class = T.cast(cluster_mod.const_get(:Worker), T::Class[T.anything])
114
+ worker_class.prepend(ClusterWorkerPatch)
115
+ end
116
+ end
119
117
  # Patch Rack::Handler::Puma.run to emit lifecycle logs using options
120
118
  if ::Object.const_defined?(:Rack)
121
119
  rack_mod = T.unsafe(::Object.const_get(:Rack))
@@ -168,6 +166,18 @@ module LogStruct
168
166
  }
169
167
  end
170
168
 
169
+ sig { params(argv: T::Array[String]).returns(T.nilable(String)) }
170
+ def port_from_argv(argv)
171
+ argv.each_with_index do |arg, idx|
172
+ if arg == "-p" || arg == "--port"
173
+ return argv[idx + 1]
174
+ elsif arg.start_with?("--port=")
175
+ return arg.split("=", 2)[1]
176
+ end
177
+ end
178
+ nil
179
+ end
180
+
171
181
  sig { params(line: String).returns(T::Boolean) }
172
182
  def process_line(line)
173
183
  l = line.to_s.strip
@@ -236,16 +246,7 @@ module LogStruct
236
246
  # Fallback: if no listening address captured yet, infer from ARGV
237
247
  if T.cast(si[:listening], T::Array[T.untyped]).empty?
238
248
  begin
239
- port = T.let(nil, T.untyped)
240
- ARGV.each_with_index do |arg, idx|
241
- if arg == "-p" || arg == "--port"
242
- port = ARGV[idx + 1]
243
- break
244
- elsif arg.start_with?("--port=")
245
- port = arg.split("=", 2)[1]
246
- break
247
- end
248
- end
249
+ port = port_from_argv(ARGV)
249
250
  if port
250
251
  si[:listening] << "tcp://127.0.0.1:#{port}"
251
252
  end
@@ -389,6 +390,20 @@ module LogStruct
389
390
  end
390
391
  end
391
392
 
393
+ module ClusterWorkerPatch
394
+ extend T::Sig
395
+
396
+ sig { returns(T.untyped) }
397
+ def run
398
+ begin
399
+ ::SemanticLogger.reopen
400
+ rescue => e
401
+ ::LogStruct::Integrations::Puma.handle_integration_error(e)
402
+ end
403
+ super
404
+ end
405
+ end
406
+
392
407
  # Hook Rack::Handler::Puma.run to emit structured started/shutdown
393
408
  module RackHandlerPatch
394
409
  extend T::Sig
@@ -4,6 +4,7 @@
4
4
  require "rack"
5
5
  require "action_dispatch/middleware/show_exceptions"
6
6
  require_relative "rack/error_handling_middleware"
7
+ require_relative "rack_setup"
7
8
 
8
9
  module LogStruct
9
10
  module Integrations
@@ -15,12 +16,11 @@ module LogStruct
15
16
  # Set up Rack middleware for structured error logging
16
17
  sig { override.params(config: LogStruct::Configuration).returns(T.nilable(T::Boolean)) }
17
18
  def self.setup(config)
18
- return nil unless config.enabled
19
- return nil unless config.integrations.enable_rack_error_handler
19
+ return nil unless RackSetup.enabled?(config)
20
20
 
21
21
  # Add structured logging middleware for security violations and errors
22
22
  # Need to insert after ShowExceptions to catch IP spoofing errors
23
- ::Rails.application.middleware.insert_after(
23
+ RackSetup.insert_after(
24
24
  ::ActionDispatch::ShowExceptions,
25
25
  Integrations::RackErrorHandler::Middleware
26
26
  )
@@ -4,6 +4,7 @@
4
4
  require "rack"
5
5
  require "action_dispatch/middleware/show_exceptions"
6
6
  require_relative "rack_error_handler/middleware"
7
+ require_relative "rack_setup"
7
8
 
8
9
  module LogStruct
9
10
  module Integrations
@@ -15,12 +16,11 @@ module LogStruct
15
16
  # Set up Rack middleware for structured error logging
16
17
  sig { override.params(config: LogStruct::Configuration).returns(T.nilable(T::Boolean)) }
17
18
  def self.setup(config)
18
- return nil unless config.enabled
19
- return nil unless config.integrations.enable_rack_error_handler
19
+ return nil unless RackSetup.enabled?(config)
20
20
 
21
21
  # Add structured logging middleware for security violations and errors
22
22
  # Need to insert before RemoteIp to catch IP spoofing errors it raises
23
- ::Rails.application.middleware.insert_before(
23
+ RackSetup.insert_before(
24
24
  ::ActionDispatch::RemoteIp,
25
25
  Integrations::RackErrorHandler::Middleware
26
26
  )
@@ -0,0 +1,28 @@
1
+ # typed: strict
2
+ # frozen_string_literal: true
3
+
4
+ module LogStruct
5
+ module Integrations
6
+ module RackSetup
7
+ extend T::Sig
8
+
9
+ sig { params(config: LogStruct::Configuration).returns(T::Boolean) }
10
+ def self.enabled?(config)
11
+ return false unless config.enabled
12
+ return false unless config.integrations.enable_rack_error_handler
13
+
14
+ true
15
+ end
16
+
17
+ sig { params(anchor: T.untyped, middleware: T.untyped).void }
18
+ def self.insert_after(anchor, middleware)
19
+ ::Rails.application.middleware.insert_after(anchor, middleware)
20
+ end
21
+
22
+ sig { params(anchor: T.untyped, middleware: T.untyped).void }
23
+ def self.insert_before(anchor, middleware)
24
+ ::Rails.application.middleware.insert_before(anchor, middleware)
25
+ end
26
+ end
27
+ end
28
+ end
@@ -17,10 +17,13 @@ module LogStruct
17
17
  sig { params(env: T.untyped).returns(T.untyped) }
18
18
  def call(env)
19
19
  request = ::ActionDispatch::Request.new(env)
20
- ::SemanticLogger.push_named_tags(request_id: request.request_id)
20
+ request_id = request.request_id
21
+ Thread.current[:logstruct_request_id] = request_id
22
+ ::SemanticLogger.push_named_tags(request_id: request_id)
21
23
  @app.call(env)
22
24
  ensure
23
25
  ::SemanticLogger.pop_named_tags
26
+ Thread.current[:logstruct_request_id] = nil
24
27
  end
25
28
  end
26
29
  end
@@ -41,7 +41,8 @@ module ActiveSupport
41
41
  hash = T.unsafe(data).serialize
42
42
  tags.present? ? hash.merge(tags: tags) : hash
43
43
  else
44
- tags.present? ? {message: data.to_s, tags: tags} : {message: data.to_s}
44
+ base = {LogStruct::LogField::Message.serialize => data.to_s}
45
+ tags.present? ? base.merge(tags: tags) : base
45
46
  end
46
47
 
47
48
  # Delegate to LogStruct::Formatter for JSON serialization with filtering
@@ -46,16 +46,7 @@ module LogStruct
46
46
  next unless is_server
47
47
  begin
48
48
  require "log_struct/log/puma"
49
- port = T.let(nil, T.nilable(String))
50
- ARGV.each_with_index do |arg, idx|
51
- if arg == "-p" || arg == "--port"
52
- port = ARGV[idx + 1]
53
- break
54
- elsif arg.start_with?("--port=")
55
- port = arg.split("=", 2)[1]
56
- break
57
- end
58
- end
49
+ port = LogStruct::Integrations::Puma.port_from_argv(ARGV)
59
50
  started = LogStruct::Log::Puma::Start.new(
60
51
  mode: "single",
61
52
  environment: (defined?(::Rails) && ::Rails.respond_to?(:env)) ? ::Rails.env : nil,
@@ -62,44 +62,11 @@ module LogStruct
62
62
  def call(log, logger)
63
63
  # Handle LogStruct types specially with colorization
64
64
  if log.payload.is_a?(LogStruct::Log::Interfaces::CommonFields)
65
- # Get the LogStruct formatted JSON
66
- logstruct_json = @logstruct_formatter.call(log.level, log.time, log.name, log.payload)
67
-
68
- # Parse and colorize it
69
- begin
70
- parsed_data = T.let(JSON.parse(logstruct_json), T::Hash[String, T.untyped])
71
- colorized_json = colorize_json(parsed_data)
72
-
73
- # Use SemanticLogger's prefix formatting but with our colorized content
74
- prefix = format("%<time>s %<level>s [%<process>s] %<name>s -- ",
75
- time: format_time(log.time),
76
- level: format_level(log.level),
77
- process: log.process_info,
78
- name: format_name(log.name))
79
-
80
- "#{prefix}#{colorized_json}\n"
81
- rescue JSON::ParserError
82
- # Fallback to standard formatting
83
- super
84
- end
65
+ formatted = format_logstruct_payload(log)
66
+ formatted if formatted
85
67
  elsif log.payload.is_a?(Hash) || log.payload.is_a?(T::Struct)
86
- # Process hashes through our formatter then colorize
87
- begin
88
- logstruct_json = @logstruct_formatter.call(log.level, log.time, log.name, log.payload)
89
- parsed_data = T.let(JSON.parse(logstruct_json), T::Hash[String, T.untyped])
90
- colorized_json = colorize_json(parsed_data)
91
-
92
- prefix = format("%<time>s %<level>s [%<process>s] %<name>s -- ",
93
- time: format_time(log.time),
94
- level: format_level(log.level),
95
- process: log.process_info,
96
- name: format_name(log.name))
97
-
98
- "#{prefix}#{colorized_json}\n"
99
- rescue JSON::ParserError
100
- # Fallback to standard formatting
101
- super
102
- end
68
+ formatted = format_logstruct_payload(log)
69
+ formatted if formatted
103
70
  else
104
71
  # For plain messages, use SemanticLogger's default colorization
105
72
  super
@@ -139,6 +106,24 @@ module LogStruct
139
106
  .gsub(": null", ": " + colorize_text("null", :nil))
140
107
  end
141
108
 
109
+ sig { params(log: ::SemanticLogger::Log).returns(T.nilable(String)) }
110
+ def format_logstruct_payload(log)
111
+ logstruct_json = @logstruct_formatter.call(log.level, log.time, log.name, log.payload)
112
+
113
+ parsed_data = T.let(JSON.parse(logstruct_json), T::Hash[String, T.untyped])
114
+ colorized_json = colorize_json(parsed_data)
115
+
116
+ prefix = format("%<time>s %<level>s [%<process>s] %<name>s -- ",
117
+ time: format_time(log.time),
118
+ level: format_level(log.level),
119
+ process: log.process_info,
120
+ name: format_name(log.name))
121
+
122
+ "#{prefix}#{colorized_json}\n"
123
+ rescue JSON::ParserError
124
+ nil
125
+ end
126
+
142
127
  # Add ANSI color codes to text
143
128
  sig { params(text: String, color_type: Symbol).returns(String) }
144
129
  def colorize_text(text, color_type)
@@ -0,0 +1,29 @@
1
+ # typed: strict
2
+ # frozen_string_literal: true
3
+
4
+ require_relative "../../enums/level"
5
+
6
+ module LogStruct
7
+ module Log
8
+ module Interfaces
9
+ module CommonFieldBase
10
+ extend T::Sig
11
+ extend T::Helpers
12
+
13
+ interface!
14
+
15
+ sig { abstract.returns(Level) }
16
+ def level
17
+ end
18
+
19
+ sig { abstract.returns(Time) }
20
+ def timestamp
21
+ end
22
+
23
+ sig { abstract.params(strict: T::Boolean).returns(T::Hash[Symbol, T.untyped]) }
24
+ def serialize(strict = true)
25
+ end
26
+ end
27
+ end
28
+ end
29
+ end
@@ -4,6 +4,7 @@
4
4
  require_relative "../../enums/source"
5
5
  require_relative "../../enums/event"
6
6
  require_relative "../../enums/level"
7
+ require_relative "common_field_base"
7
8
 
8
9
  module LogStruct
9
10
  module Log
@@ -14,6 +15,8 @@ module LogStruct
14
15
 
15
16
  interface!
16
17
 
18
+ include CommonFieldBase
19
+
17
20
  sig { abstract.returns(Source) }
18
21
  def source
19
22
  end
@@ -21,18 +24,6 @@ module LogStruct
21
24
  sig { abstract.returns(Event) }
22
25
  def event
23
26
  end
24
-
25
- sig { abstract.returns(Level) }
26
- def level
27
- end
28
-
29
- sig { abstract.returns(Time) }
30
- def timestamp
31
- end
32
-
33
- sig { abstract.params(strict: T::Boolean).returns(T::Hash[Symbol, T.untyped]) }
34
- def serialize(strict = true)
35
- end
36
27
  end
37
28
  end
38
29
  end
@@ -2,6 +2,7 @@
2
2
  # frozen_string_literal: true
3
3
 
4
4
  require_relative "../../enums/level"
5
+ require_relative "common_field_base"
5
6
 
6
7
  module LogStruct
7
8
  module Log
@@ -12,17 +13,7 @@ module LogStruct
12
13
 
13
14
  interface!
14
15
 
15
- sig { abstract.returns(Level) }
16
- def level
17
- end
18
-
19
- sig { abstract.returns(Time) }
20
- def timestamp
21
- end
22
-
23
- sig { abstract.params(strict: T::Boolean).returns(T::Hash[Symbol, T.untyped]) }
24
- def serialize(strict = true)
25
- end
16
+ include CommonFieldBase
26
17
  end
27
18
  end
28
19
  end
@@ -2,5 +2,5 @@
2
2
  # frozen_string_literal: true
3
3
 
4
4
  module LogStruct
5
- VERSION = "0.1.10"
5
+ VERSION = "0.1.11"
6
6
  end
metadata CHANGED
@@ -1,7 +1,7 @@
1
1
  --- !ruby/object:Gem::Specification
2
2
  name: logstruct
3
3
  version: !ruby/object:Gem::Version
4
- version: 0.1.10
4
+ version: 0.1.11
5
5
  platform: ruby
6
6
  authors:
7
7
  - DocSpring
@@ -271,6 +271,7 @@ files:
271
271
  - lib/log_struct/integrations/ahoy.rb
272
272
  - lib/log_struct/integrations/carrierwave.rb
273
273
  - lib/log_struct/integrations/dotenv.rb
274
+ - lib/log_struct/integrations/event_time.rb
274
275
  - lib/log_struct/integrations/good_job.rb
275
276
  - lib/log_struct/integrations/good_job/log_subscriber.rb
276
277
  - lib/log_struct/integrations/good_job/logger.rb
@@ -281,6 +282,7 @@ files:
281
282
  - lib/log_struct/integrations/rack.rb
282
283
  - lib/log_struct/integrations/rack_error_handler.rb
283
284
  - lib/log_struct/integrations/rack_error_handler/middleware.rb
285
+ - lib/log_struct/integrations/rack_setup.rb
284
286
  - lib/log_struct/integrations/request_context.rb
285
287
  - lib/log_struct/integrations/request_context/middleware.rb
286
288
  - lib/log_struct/integrations/shrine.rb
@@ -357,6 +359,7 @@ files:
357
359
  - lib/log_struct/semantic_logger/setup.rb
358
360
  - lib/log_struct/shared/add_request_fields.rb
359
361
  - lib/log_struct/shared/interfaces/additional_data_field.rb
362
+ - lib/log_struct/shared/interfaces/common_field_base.rb
360
363
  - lib/log_struct/shared/interfaces/common_fields.rb
361
364
  - lib/log_struct/shared/interfaces/public_common_fields.rb
362
365
  - lib/log_struct/shared/interfaces/request_fields.rb