logstruct 0.0.2 → 0.1.1
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- checksums.yaml +4 -4
- data/CHANGELOG.md +4 -22
- data/README.md +25 -2
- data/lib/log_struct/boot_buffer.rb +28 -0
- data/lib/log_struct/builders/active_job.rb +84 -0
- data/lib/log_struct/concerns/configuration.rb +126 -13
- data/lib/log_struct/concerns/error_handling.rb +3 -7
- data/lib/log_struct/concerns/logging.rb +5 -5
- data/lib/log_struct/config_struct/filters.rb +18 -0
- data/lib/log_struct/config_struct/integrations.rb +16 -12
- data/lib/log_struct/configuration.rb +13 -0
- data/lib/log_struct/enums/event.rb +13 -0
- data/lib/log_struct/enums/log_field.rb +154 -0
- data/lib/log_struct/enums/source.rb +4 -1
- data/lib/log_struct/formatter.rb +29 -17
- data/lib/log_struct/integrations/action_mailer/error_handling.rb +3 -11
- data/lib/log_struct/integrations/action_mailer/event_logging.rb +22 -12
- data/lib/log_struct/integrations/active_job/log_subscriber.rb +52 -48
- data/lib/log_struct/integrations/active_model_serializers.rb +49 -0
- data/lib/log_struct/integrations/active_record.rb +35 -5
- data/lib/log_struct/integrations/active_storage.rb +59 -20
- data/lib/log_struct/integrations/ahoy.rb +54 -0
- data/lib/log_struct/integrations/carrierwave.rb +13 -16
- data/lib/log_struct/integrations/dotenv.rb +278 -0
- data/lib/log_struct/integrations/good_job/log_subscriber.rb +86 -136
- data/lib/log_struct/integrations/good_job/logger.rb +8 -10
- data/lib/log_struct/integrations/good_job.rb +5 -7
- data/lib/log_struct/integrations/host_authorization.rb +25 -4
- data/lib/log_struct/integrations/lograge.rb +20 -14
- data/lib/log_struct/integrations/puma.rb +482 -0
- data/lib/log_struct/integrations/rack_error_handler/middleware.rb +11 -18
- data/lib/log_struct/integrations/shrine.rb +44 -19
- data/lib/log_struct/integrations/sorbet.rb +48 -0
- data/lib/log_struct/integrations.rb +25 -0
- data/lib/log_struct/log/action_mailer/delivered.rb +99 -0
- data/lib/log_struct/log/action_mailer/delivery.rb +99 -0
- data/lib/log_struct/log/action_mailer.rb +30 -45
- data/lib/log_struct/log/active_job/enqueue.rb +125 -0
- data/lib/log_struct/log/active_job/finish.rb +130 -0
- data/lib/log_struct/log/active_job/schedule.rb +125 -0
- data/lib/log_struct/log/active_job/start.rb +130 -0
- data/lib/log_struct/log/active_job.rb +41 -54
- data/lib/log_struct/log/active_model_serializers.rb +94 -0
- data/lib/log_struct/log/active_storage/delete.rb +87 -0
- data/lib/log_struct/log/active_storage/download.rb +103 -0
- data/lib/log_struct/log/active_storage/exist.rb +93 -0
- data/lib/log_struct/log/active_storage/metadata.rb +93 -0
- data/lib/log_struct/log/active_storage/stream.rb +93 -0
- data/lib/log_struct/log/active_storage/upload.rb +118 -0
- data/lib/log_struct/log/active_storage/url.rb +93 -0
- data/lib/log_struct/log/active_storage.rb +32 -68
- data/lib/log_struct/log/ahoy.rb +88 -0
- data/lib/log_struct/log/carrierwave/delete.rb +115 -0
- data/lib/log_struct/log/carrierwave/download.rb +131 -0
- data/lib/log_struct/log/carrierwave/upload.rb +141 -0
- data/lib/log_struct/log/carrierwave.rb +37 -72
- data/lib/log_struct/log/dotenv/load.rb +76 -0
- data/lib/log_struct/log/dotenv/restore.rb +76 -0
- data/lib/log_struct/log/dotenv/save.rb +76 -0
- data/lib/log_struct/log/dotenv/update.rb +76 -0
- data/lib/log_struct/log/dotenv.rb +12 -0
- data/lib/log_struct/log/error.rb +58 -46
- data/lib/log_struct/log/good_job/enqueue.rb +126 -0
- data/lib/log_struct/log/good_job/error.rb +151 -0
- data/lib/log_struct/log/good_job/finish.rb +136 -0
- data/lib/log_struct/log/good_job/log.rb +131 -0
- data/lib/log_struct/log/good_job/schedule.rb +136 -0
- data/lib/log_struct/log/good_job/start.rb +136 -0
- data/lib/log_struct/log/good_job.rb +40 -141
- data/lib/log_struct/log/interfaces/additional_data_field.rb +1 -17
- data/lib/log_struct/log/interfaces/common_fields.rb +1 -39
- data/lib/log_struct/log/interfaces/public_common_fields.rb +4 -0
- data/lib/log_struct/log/interfaces/request_fields.rb +1 -33
- data/lib/log_struct/log/plain.rb +59 -34
- data/lib/log_struct/log/puma/shutdown.rb +80 -0
- data/lib/log_struct/log/puma/start.rb +120 -0
- data/lib/log_struct/log/puma.rb +10 -0
- data/lib/log_struct/log/request.rb +132 -48
- data/lib/log_struct/log/security/blocked_host.rb +141 -0
- data/lib/log_struct/log/security/csrf_violation.rb +131 -0
- data/lib/log_struct/log/security/ip_spoof.rb +141 -0
- data/lib/log_struct/log/security.rb +40 -70
- data/lib/log_struct/log/shared/add_request_fields.rb +1 -26
- data/lib/log_struct/log/shared/merge_additional_data_fields.rb +1 -25
- data/lib/log_struct/log/shared/serialize_common.rb +1 -33
- data/lib/log_struct/log/shared/serialize_common_public.rb +44 -0
- data/lib/log_struct/log/shrine/delete.rb +85 -0
- data/lib/log_struct/log/shrine/download.rb +90 -0
- data/lib/log_struct/log/shrine/exist.rb +90 -0
- data/lib/log_struct/log/shrine/metadata.rb +90 -0
- data/lib/log_struct/log/shrine/upload.rb +105 -0
- data/lib/log_struct/log/shrine.rb +10 -67
- data/lib/log_struct/log/sidekiq.rb +65 -26
- data/lib/log_struct/log/sql.rb +113 -106
- data/lib/log_struct/log.rb +31 -32
- data/lib/log_struct/multi_error_reporter.rb +80 -22
- data/lib/log_struct/param_filters.rb +50 -7
- data/lib/log_struct/rails_boot_banner_silencer.rb +123 -0
- data/lib/log_struct/railtie.rb +71 -0
- data/lib/log_struct/semantic_logger/formatter.rb +4 -2
- data/lib/log_struct/semantic_logger/setup.rb +34 -18
- data/lib/log_struct/shared/interfaces/additional_data_field.rb +22 -0
- data/lib/log_struct/shared/interfaces/common_fields.rb +39 -0
- data/lib/log_struct/shared/interfaces/public_common_fields.rb +29 -0
- data/lib/log_struct/shared/interfaces/request_fields.rb +39 -0
- data/lib/log_struct/shared/shared/add_request_fields.rb +28 -0
- data/lib/log_struct/shared/shared/merge_additional_data_fields.rb +27 -0
- data/lib/log_struct/shared/shared/serialize_common.rb +58 -0
- data/lib/log_struct/version.rb +1 -1
- data/lib/log_struct.rb +22 -4
- data/logstruct.gemspec +3 -0
- metadata +108 -5
- data/lib/log_struct/log/interfaces/message_field.rb +0 -20
- data/lib/log_struct/log_keys.rb +0 -102
@@ -0,0 +1,154 @@
|
|
1
|
+
# typed: strict
|
2
|
+
# frozen_string_literal: true
|
3
|
+
|
4
|
+
# NOTE:
|
5
|
+
# - This enum defines human‑readable field names (constants) that map to compact
|
6
|
+
# JSON key symbols via `serialize` (e.g., Database => :db).
|
7
|
+
# - The enum constant names are code‑generated into
|
8
|
+
# `schemas/meta/log-fields.json` by `scripts/generate_structs.rb` and
|
9
|
+
# referenced from `schemas/meta/log-source-schema.json` to strictly validate
|
10
|
+
# field keys in `schemas/log_sources/*`.
|
11
|
+
# - When adding or renaming fields here, run the generator so schema validation
|
12
|
+
# stays in sync.
|
13
|
+
#
|
14
|
+
# Use human-readable field names as the enum values and short field names for the JSON properties
|
15
|
+
|
16
|
+
module LogStruct
|
17
|
+
class LogField < T::Enum
|
18
|
+
enums do
|
19
|
+
# Shared fields
|
20
|
+
Source = new(:src)
|
21
|
+
Event = new(:evt)
|
22
|
+
Timestamp = new(:ts)
|
23
|
+
Level = new(:lvl)
|
24
|
+
|
25
|
+
# Common fields
|
26
|
+
Message = new(:msg)
|
27
|
+
Data = new(:data)
|
28
|
+
|
29
|
+
# Request-related fields
|
30
|
+
Path = new(:path)
|
31
|
+
HttpMethod = new(:method) # property name was http_method
|
32
|
+
SourceIp = new(:source_ip)
|
33
|
+
UserAgent = new(:user_agent)
|
34
|
+
Referer = new(:referer)
|
35
|
+
RequestId = new(:request_id)
|
36
|
+
|
37
|
+
# HTTP-specific fields
|
38
|
+
Format = new(:format)
|
39
|
+
Controller = new(:controller)
|
40
|
+
Action = new(:action)
|
41
|
+
Status = new(:status)
|
42
|
+
# DurationMs already defined below for general metrics
|
43
|
+
View = new(:view)
|
44
|
+
Database = new(:db)
|
45
|
+
Params = new(:params)
|
46
|
+
|
47
|
+
# Security-specific fields
|
48
|
+
BlockedHost = new(:blocked_host)
|
49
|
+
BlockedHosts = new(:blocked_hosts)
|
50
|
+
ClientIp = new(:client_ip)
|
51
|
+
XForwardedFor = new(:x_forwarded_for)
|
52
|
+
|
53
|
+
# Email-specific fields
|
54
|
+
To = new(:to)
|
55
|
+
From = new(:from)
|
56
|
+
Subject = new(:subject)
|
57
|
+
|
58
|
+
# Error fields
|
59
|
+
ErrClass = new(:err_class)
|
60
|
+
Backtrace = new(:backtrace)
|
61
|
+
|
62
|
+
# Job-specific fields
|
63
|
+
JobId = new(:job_id)
|
64
|
+
JobClass = new(:job_class)
|
65
|
+
QueueName = new(:queue_name)
|
66
|
+
Arguments = new(:arguments)
|
67
|
+
RetryCount = new(:retry_count)
|
68
|
+
Retries = new(:retries)
|
69
|
+
Attempt = new(:attempt)
|
70
|
+
Executions = new(:executions)
|
71
|
+
ExceptionExecutions = new(:exception_executions)
|
72
|
+
ProviderJobId = new(:provider_job_id)
|
73
|
+
ScheduledAt = new(:scheduled_at)
|
74
|
+
StartedAt = new(:started_at)
|
75
|
+
FinishedAt = new(:finished_at)
|
76
|
+
DurationMs = new(:duration_ms)
|
77
|
+
WaitMs = new(:wait_ms)
|
78
|
+
# Deprecated: ExecutionTime/WaitTime/RunTime
|
79
|
+
ExecutionTime = new(:execution_time)
|
80
|
+
WaitTime = new(:wait_time)
|
81
|
+
RunTime = new(:run_time)
|
82
|
+
Priority = new(:priority)
|
83
|
+
CronKey = new(:cron_key)
|
84
|
+
ErrorMessage = new(:error_message)
|
85
|
+
|
86
|
+
# Dotenv fields
|
87
|
+
File = new(:file)
|
88
|
+
Vars = new(:vars)
|
89
|
+
Snapshot = new(:snapshot)
|
90
|
+
|
91
|
+
# Sidekiq-specific fields
|
92
|
+
ProcessId = new(:pid)
|
93
|
+
ThreadId = new(:tid)
|
94
|
+
Context = new(:ctx)
|
95
|
+
|
96
|
+
# Storage-specific fields (ActiveStorage)
|
97
|
+
Checksum = new(:checksum)
|
98
|
+
Exist = new(:exist)
|
99
|
+
Url = new(:url)
|
100
|
+
Prefix = new(:prefix)
|
101
|
+
Range = new(:range)
|
102
|
+
|
103
|
+
# Storage-specific fields (Shrine)
|
104
|
+
Storage = new(:storage)
|
105
|
+
Operation = new(:op)
|
106
|
+
FileId = new(:file_id)
|
107
|
+
Filename = new(:filename)
|
108
|
+
MimeType = new(:mime_type)
|
109
|
+
Size = new(:size)
|
110
|
+
Metadata = new(:metadata)
|
111
|
+
Location = new(:location)
|
112
|
+
UploadOptions = new(:upload_opts)
|
113
|
+
DownloadOptions = new(:download_opts)
|
114
|
+
Options = new(:opts)
|
115
|
+
Uploader = new(:uploader)
|
116
|
+
|
117
|
+
# CarrierWave-specific fields
|
118
|
+
Model = new(:model)
|
119
|
+
MountPoint = new(:mount_point)
|
120
|
+
|
121
|
+
# SQL-specific fields
|
122
|
+
Sql = new(:sql)
|
123
|
+
Name = new(:name)
|
124
|
+
RowCount = new(:row_count)
|
125
|
+
# Use Adapter for both AMS and SQL adapter name
|
126
|
+
BindParams = new(:bind_params)
|
127
|
+
DatabaseName = new(:db_name)
|
128
|
+
ConnectionPoolSize = new(:pool_size)
|
129
|
+
ActiveConnections = new(:active_count)
|
130
|
+
OperationType = new(:op_type)
|
131
|
+
TableNames = new(:table_names)
|
132
|
+
|
133
|
+
# ActiveModelSerializers fields
|
134
|
+
Serializer = new(:serializer)
|
135
|
+
Adapter = new(:adapter)
|
136
|
+
ResourceClass = new(:resource_class)
|
137
|
+
|
138
|
+
# Ahoy-specific fields
|
139
|
+
AhoyEvent = new(:ahoy_event)
|
140
|
+
Properties = new(:properties)
|
141
|
+
|
142
|
+
# Puma / server lifecycle fields
|
143
|
+
Mode = new(:mode)
|
144
|
+
PumaVersion = new(:puma_version)
|
145
|
+
PumaCodename = new(:puma_codename)
|
146
|
+
RubyVersion = new(:ruby_version)
|
147
|
+
MinThreads = new(:min_threads)
|
148
|
+
MaxThreads = new(:max_threads)
|
149
|
+
Environment = new(:environment)
|
150
|
+
ListeningAddresses = new(:listening_addresses)
|
151
|
+
Address = new(:addr)
|
152
|
+
end
|
153
|
+
end
|
154
|
+
end
|
@@ -7,8 +7,9 @@ module LogStruct
|
|
7
7
|
enums do
|
8
8
|
# Error sources
|
9
9
|
TypeChecking = new(:type_checking) # For type checking errors (Sorbet)
|
10
|
-
LogStruct = new(:logstruct) # Errors from LogStruct itself
|
11
10
|
Security = new(:security) # Security-related events
|
11
|
+
# Errors from LogStruct. (Cannot use LogStruct here because it confuses tapioca.)
|
12
|
+
Internal = new(:logstruct)
|
12
13
|
|
13
14
|
# Application sources
|
14
15
|
Rails = new(:rails) # For request-related logs/errors
|
@@ -21,6 +22,8 @@ module LogStruct
|
|
21
22
|
Shrine = new(:shrine)
|
22
23
|
CarrierWave = new(:carrierwave)
|
23
24
|
Sidekiq = new(:sidekiq)
|
25
|
+
Dotenv = new(:dotenv)
|
26
|
+
Puma = new(:puma)
|
24
27
|
end
|
25
28
|
end
|
26
29
|
end
|
data/lib/log_struct/formatter.rb
CHANGED
@@ -62,7 +62,7 @@ module LogStruct
|
|
62
62
|
# Process each key-value pair
|
63
63
|
arg.each do |key, value|
|
64
64
|
# Check if this key should be filtered at any depth
|
65
|
-
result[key] = if ParamFilters.should_filter_key?(key)
|
65
|
+
result[key] = if ParamFilters.should_filter_key?(key, value)
|
66
66
|
# Filter the value
|
67
67
|
{_filtered: ParamFilters.summarize_json_attribute(key, value)}
|
68
68
|
else
|
@@ -73,13 +73,7 @@ module LogStruct
|
|
73
73
|
|
74
74
|
result
|
75
75
|
when Array
|
76
|
-
|
77
|
-
|
78
|
-
# Filter large arrays, but don't truncate backtraces (arrays of strings that look like file:line)
|
79
|
-
if result.size > 10 && !looks_like_backtrace?(result)
|
80
|
-
result = result.take(10) + ["... and #{result.size - 10} more items"]
|
81
|
-
end
|
82
|
-
result
|
76
|
+
process_array(arg, recursion_depth: recursion_depth)
|
83
77
|
when GlobalID::Identification
|
84
78
|
begin
|
85
79
|
arg.to_global_id
|
@@ -91,10 +85,10 @@ module LogStruct
|
|
91
85
|
else
|
92
86
|
# For non-ActiveRecord objects that failed to_global_id, try to get a string representation
|
93
87
|
# If this also fails, we want to catch it and return the error placeholder
|
94
|
-
T.
|
88
|
+
String(T.cast(arg, Object))
|
95
89
|
end
|
96
90
|
rescue => e
|
97
|
-
LogStruct.handle_exception(e, source: Source::
|
91
|
+
LogStruct.handle_exception(e, source: Source::Internal)
|
98
92
|
"[GLOBALID_ERROR]"
|
99
93
|
end
|
100
94
|
end
|
@@ -115,7 +109,7 @@ module LogStruct
|
|
115
109
|
value_type: arg.class.name,
|
116
110
|
recursion_depth: recursion_depth
|
117
111
|
}
|
118
|
-
LogStruct.handle_exception(e, source: Source::
|
112
|
+
LogStruct.handle_exception(e, source: Source::Internal, context: context)
|
119
113
|
arg
|
120
114
|
end
|
121
115
|
|
@@ -166,7 +160,7 @@ module LogStruct
|
|
166
160
|
object_class: log_value.class.name,
|
167
161
|
object_inspect: log_value.inspect.truncate(100)
|
168
162
|
}
|
169
|
-
LogStruct.handle_exception(e, source: Source::
|
163
|
+
LogStruct.handle_exception(e, source: Source::Internal, context: context)
|
170
164
|
|
171
165
|
# Fall back to the string representation to ensure we continue processing
|
172
166
|
log_value.to_s
|
@@ -207,17 +201,35 @@ module LogStruct
|
|
207
201
|
"#{data.to_json}\n"
|
208
202
|
end
|
209
203
|
|
204
|
+
sig { params(array: T::Array[T.untyped], recursion_depth: Integer).returns(T::Array[T.untyped]) }
|
205
|
+
def process_array(array, recursion_depth:)
|
206
|
+
return [] if array.empty?
|
207
|
+
|
208
|
+
if looks_like_backtrace_array?(array)
|
209
|
+
array.map { |value| process_values(value, recursion_depth: recursion_depth + 1) }
|
210
|
+
else
|
211
|
+
processed = []
|
212
|
+
array.each_with_index do |value, index|
|
213
|
+
break if index >= 10
|
214
|
+
|
215
|
+
processed << process_values(value, recursion_depth: recursion_depth + 1)
|
216
|
+
end
|
217
|
+
|
218
|
+
if array.size > 10
|
219
|
+
processed << "... and #{array.size - 10} more items"
|
220
|
+
end
|
221
|
+
|
222
|
+
processed
|
223
|
+
end
|
224
|
+
end
|
225
|
+
|
210
226
|
# Check if an array looks like a backtrace (array of strings with file:line pattern)
|
211
227
|
sig { params(array: T::Array[T.untyped]).returns(T::Boolean) }
|
212
|
-
def
|
213
|
-
return false if array.empty?
|
214
|
-
|
215
|
-
# Check if most elements look like backtrace lines (file.rb:123 or similar patterns)
|
228
|
+
def looks_like_backtrace_array?(array)
|
216
229
|
backtrace_like_count = array.first(5).count do |element|
|
217
230
|
element.is_a?(String) && element.match?(/\A[^:\s]+:\d+/)
|
218
231
|
end
|
219
232
|
|
220
|
-
# If at least 3 out of the first 5 elements look like backtrace lines, treat as backtrace
|
221
233
|
backtrace_like_count >= 3
|
222
234
|
end
|
223
235
|
end
|
@@ -75,11 +75,7 @@ module LogStruct
|
|
75
75
|
}
|
76
76
|
|
77
77
|
# Create the structured exception log
|
78
|
-
exception_data = Log
|
79
|
-
Source::Mailer,
|
80
|
-
error,
|
81
|
-
context
|
82
|
-
)
|
78
|
+
exception_data = Log.from_exception(Source::Mailer, error, context)
|
83
79
|
|
84
80
|
# Log the structured error
|
85
81
|
LogStruct.error(exception_data)
|
@@ -123,11 +119,7 @@ module LogStruct
|
|
123
119
|
}
|
124
120
|
|
125
121
|
# Create an exception log for structured logging
|
126
|
-
exception_data = Log
|
127
|
-
Source::Mailer,
|
128
|
-
error,
|
129
|
-
context
|
130
|
-
)
|
122
|
+
exception_data = Log.from_exception(Source::Mailer, error, context)
|
131
123
|
|
132
124
|
# Log the exception with structured data
|
133
125
|
LogStruct.error(exception_data)
|
@@ -144,7 +136,7 @@ module LogStruct
|
|
144
136
|
sig { params(error: StandardError).void }
|
145
137
|
def log_notification_event(error)
|
146
138
|
# Create an error log data object
|
147
|
-
exception_data = Log
|
139
|
+
exception_data = Log.from_exception(
|
148
140
|
Source::Mailer,
|
149
141
|
error,
|
150
142
|
{
|
@@ -36,11 +36,7 @@ module LogStruct
|
|
36
36
|
private
|
37
37
|
|
38
38
|
# Log a mailer event with the given event type
|
39
|
-
sig
|
40
|
-
params(event_type: Log::ActionMailer::ActionMailerEvent,
|
41
|
-
level: Symbol,
|
42
|
-
additional_data: T::Hash[Symbol, T.untyped]).returns(T.untyped)
|
43
|
-
end
|
39
|
+
sig { params(event_type: LogStruct::Event, level: Symbol, additional_data: T::Hash[Symbol, T.untyped]).returns(T.untyped) }
|
44
40
|
def log_mailer_event(event_type, level = :info, additional_data = {})
|
45
41
|
# Get message (self refers to the mailer instance)
|
46
42
|
mailer_message = message if respond_to?(:message)
|
@@ -62,16 +58,30 @@ module LogStruct
|
|
62
58
|
from = mailer_message&.from&.first
|
63
59
|
subject = mailer_message&.subject
|
64
60
|
|
65
|
-
|
66
|
-
log_data = Log::ActionMailer.new(
|
67
|
-
event: event_type,
|
61
|
+
base_fields = Log::ActionMailer::BaseFields.new(
|
68
62
|
to: to,
|
69
63
|
from: from,
|
70
|
-
subject: subject
|
71
|
-
additional_data: data
|
64
|
+
subject: subject
|
72
65
|
)
|
73
|
-
|
74
|
-
|
66
|
+
|
67
|
+
log = case event_type
|
68
|
+
when Event::Delivery
|
69
|
+
Log::ActionMailer::Delivery.new(
|
70
|
+
**base_fields.to_kwargs,
|
71
|
+
additional_data: data,
|
72
|
+
timestamp: Time.now
|
73
|
+
)
|
74
|
+
when Event::Delivered
|
75
|
+
Log::ActionMailer::Delivered.new(
|
76
|
+
**base_fields.to_kwargs,
|
77
|
+
additional_data: data,
|
78
|
+
timestamp: Time.now
|
79
|
+
)
|
80
|
+
else
|
81
|
+
return
|
82
|
+
end
|
83
|
+
LogStruct.info(log)
|
84
|
+
log
|
75
85
|
end
|
76
86
|
|
77
87
|
# Extract message ID from the mailer
|
@@ -13,84 +13,88 @@ module LogStruct
|
|
13
13
|
class LogSubscriber < ::ActiveJob::LogSubscriber
|
14
14
|
extend T::Sig
|
15
15
|
|
16
|
-
sig { params(event:
|
16
|
+
sig { params(event: ::ActiveSupport::Notifications::Event).void }
|
17
17
|
def enqueue(event)
|
18
|
-
job = event.payload[:job]
|
19
|
-
|
18
|
+
job = T.cast(event.payload[:job], ::ActiveJob::Base)
|
19
|
+
ts = event.time ? Time.at(event.time) : Time.now
|
20
|
+
base_fields = build_base_fields(job)
|
21
|
+
logger.info(Log::ActiveJob::Enqueue.new(
|
22
|
+
**base_fields.to_kwargs,
|
23
|
+
timestamp: ts
|
24
|
+
))
|
20
25
|
end
|
21
26
|
|
22
|
-
sig { params(event:
|
27
|
+
sig { params(event: ::ActiveSupport::Notifications::Event).void }
|
23
28
|
def enqueue_at(event)
|
24
|
-
job = event.payload[:job]
|
25
|
-
|
29
|
+
job = T.cast(event.payload[:job], ::ActiveJob::Base)
|
30
|
+
ts = event.time ? Time.at(event.time) : Time.now
|
31
|
+
base_fields = build_base_fields(job)
|
32
|
+
logger.info(Log::ActiveJob::Schedule.new(
|
33
|
+
**base_fields.to_kwargs,
|
34
|
+
scheduled_at: job.scheduled_at,
|
35
|
+
timestamp: ts
|
36
|
+
))
|
26
37
|
end
|
27
38
|
|
28
|
-
sig { params(event:
|
39
|
+
sig { params(event: ::ActiveSupport::Notifications::Event).void }
|
29
40
|
def perform(event)
|
30
|
-
job = event.payload[:job]
|
41
|
+
job = T.cast(event.payload[:job], ::ActiveJob::Base)
|
31
42
|
exception = event.payload[:exception_object]
|
32
43
|
|
33
44
|
if exception
|
34
45
|
# Log the exception with the job context
|
35
46
|
log_exception(exception, job, event)
|
36
47
|
else
|
37
|
-
|
48
|
+
start_float = event.time
|
49
|
+
end_float = event.end
|
50
|
+
ts = start_float ? Time.at(start_float) : Time.now
|
51
|
+
finished_at = end_float ? Time.at(end_float) : Time.now
|
52
|
+
base_fields = build_base_fields(job)
|
53
|
+
logger.info(Log::ActiveJob::Finish.new(
|
54
|
+
**base_fields.to_kwargs,
|
55
|
+
duration_ms: event.duration.to_f,
|
56
|
+
finished_at: finished_at,
|
57
|
+
timestamp: ts
|
58
|
+
))
|
38
59
|
end
|
39
60
|
end
|
40
61
|
|
41
|
-
sig { params(event:
|
62
|
+
sig { params(event: ::ActiveSupport::Notifications::Event).void }
|
42
63
|
def perform_start(event)
|
43
|
-
job = event.payload[:job]
|
44
|
-
|
64
|
+
job = T.cast(event.payload[:job], ::ActiveJob::Base)
|
65
|
+
ts = event.time ? Time.at(event.time) : Time.now
|
66
|
+
started_at = ts
|
67
|
+
attempt = job.executions
|
68
|
+
base_fields = build_base_fields(job)
|
69
|
+
logger.info(Log::ActiveJob::Start.new(
|
70
|
+
**base_fields.to_kwargs,
|
71
|
+
started_at: started_at,
|
72
|
+
attempt: attempt,
|
73
|
+
timestamp: ts
|
74
|
+
))
|
45
75
|
end
|
46
76
|
|
47
77
|
private
|
48
78
|
|
49
|
-
sig { params(
|
50
|
-
def
|
51
|
-
|
52
|
-
log_data = Log::ActiveJob.new(
|
53
|
-
event: event_type,
|
79
|
+
sig { params(job: ::ActiveJob::Base).returns(Log::ActiveJob::BaseFields) }
|
80
|
+
def build_base_fields(job)
|
81
|
+
Log::ActiveJob::BaseFields.new(
|
54
82
|
job_id: job.job_id,
|
55
83
|
job_class: job.class.to_s,
|
56
84
|
queue_name: job.queue_name,
|
57
|
-
|
58
|
-
|
59
|
-
arguments: job.class.log_arguments? ? job.arguments : nil
|
60
|
-
# Store additional data in the data hash
|
61
|
-
additional_data: {
|
62
|
-
executions: job.executions,
|
63
|
-
scheduled_at: additional_data[:scheduled_at],
|
64
|
-
provider_job_id: job.provider_job_id
|
65
|
-
}.compact
|
85
|
+
executions: job.executions,
|
86
|
+
provider_job_id: job.provider_job_id,
|
87
|
+
arguments: ((job.class.respond_to?(:log_arguments?) && job.class.log_arguments?) ? job.arguments : nil)
|
66
88
|
)
|
67
|
-
|
68
|
-
# Use Rails logger with our structured formatter
|
69
|
-
logger.info(log_data)
|
70
89
|
end
|
71
90
|
|
72
|
-
sig { params(exception: StandardError, job:
|
91
|
+
sig { params(exception: StandardError, job: ::ActiveJob::Base, _event: ::ActiveSupport::Notifications::Event).void }
|
73
92
|
def log_exception(exception, job, _event)
|
74
|
-
|
75
|
-
job_context =
|
76
|
-
job_id: job.job_id,
|
77
|
-
job_class: job.class.to_s,
|
78
|
-
queue_name: job.queue_name,
|
79
|
-
executions: job.executions,
|
80
|
-
provider_job_id: job.provider_job_id
|
81
|
-
}
|
93
|
+
base_fields = build_base_fields(job)
|
94
|
+
job_context = base_fields.to_kwargs
|
82
95
|
|
83
|
-
|
84
|
-
job_context[:arguments] = job.arguments if job.class.log_arguments?
|
85
|
-
|
86
|
-
# Create exception log with job source and context
|
87
|
-
log_data = Log::Error.from_exception(
|
88
|
-
Source::Job,
|
89
|
-
exception,
|
90
|
-
job_context
|
91
|
-
)
|
96
|
+
log_data = Log.from_exception(Source::Job, exception, job_context)
|
92
97
|
|
93
|
-
# Use Rails logger with our structured formatter
|
94
98
|
logger.error(log_data)
|
95
99
|
end
|
96
100
|
|
@@ -0,0 +1,49 @@
|
|
1
|
+
# typed: strict
|
2
|
+
# frozen_string_literal: true
|
3
|
+
|
4
|
+
require "active_support/notifications"
|
5
|
+
|
6
|
+
module LogStruct
|
7
|
+
module Integrations
|
8
|
+
# ActiveModelSerializers integration. Subscribes to AMS notifications and
|
9
|
+
# emits structured logs with serializer/adapter/duration details.
|
10
|
+
module ActiveModelSerializers
|
11
|
+
extend T::Sig
|
12
|
+
|
13
|
+
sig { params(config: LogStruct::Configuration).returns(T.nilable(TrueClass)) }
|
14
|
+
def self.setup(config)
|
15
|
+
return nil unless defined?(::ActiveSupport::Notifications)
|
16
|
+
|
17
|
+
# Only activate if AMS appears to be present
|
18
|
+
return nil unless defined?(::ActiveModelSerializers)
|
19
|
+
|
20
|
+
# Subscribe to common AMS notification names; keep broad but specific
|
21
|
+
pattern = /\.active_model_serializers\z/
|
22
|
+
|
23
|
+
::ActiveSupport::Notifications.subscribe(pattern) do |_name, started, finished, _unique_id, payload|
|
24
|
+
# started/finished are Time; convert to ms
|
25
|
+
duration_ms = ((finished - started) * 1000.0).round(3)
|
26
|
+
|
27
|
+
serializer = payload[:serializer] || payload[:serializer_class]
|
28
|
+
adapter = payload[:adapter]
|
29
|
+
resource = payload[:resource] || payload[:object]
|
30
|
+
|
31
|
+
LogStruct.info(
|
32
|
+
LogStruct::Log::ActiveModelSerializers.new(
|
33
|
+
message: "ams.render",
|
34
|
+
serializer: serializer&.to_s,
|
35
|
+
adapter: adapter&.to_s,
|
36
|
+
resource_class: resource&.class&.name,
|
37
|
+
duration_ms: duration_ms,
|
38
|
+
timestamp: started
|
39
|
+
)
|
40
|
+
)
|
41
|
+
rescue => e
|
42
|
+
LogStruct.handle_exception(e, source: LogStruct::Source::Rails, context: {integration: :active_model_serializers})
|
43
|
+
end
|
44
|
+
|
45
|
+
true
|
46
|
+
end
|
47
|
+
end
|
48
|
+
end
|
49
|
+
end
|
@@ -40,12 +40,33 @@ module LogStruct
|
|
40
40
|
extend T::Sig
|
41
41
|
extend IntegrationInterface
|
42
42
|
|
43
|
+
# Track subscription state keyed to the current Notifications.notifier instance
|
44
|
+
State = ::Struct.new(:subscribed, :notifier_id)
|
45
|
+
STATE = T.let(State.new(false, nil), State)
|
46
|
+
|
43
47
|
# Set up SQL query logging integration
|
44
48
|
sig { override.params(config: LogStruct::Configuration).returns(T.nilable(T::Boolean)) }
|
45
49
|
def self.setup(config)
|
46
50
|
return nil unless config.integrations.enable_sql_logging
|
47
51
|
return nil unless defined?(::ActiveRecord::Base)
|
48
52
|
|
53
|
+
# Detach Rails' default ActiveRecord log subscriber to prevent
|
54
|
+
# duplicate/unstructured SQL debug output when LogStruct SQL logging
|
55
|
+
# is enabled. We still receive notifications via ActiveSupport.
|
56
|
+
if defined?(::ActiveRecord::LogSubscriber)
|
57
|
+
begin
|
58
|
+
::ActiveRecord::LogSubscriber.detach_from(:active_record)
|
59
|
+
rescue => e
|
60
|
+
LogStruct.handle_exception(e, source: LogStruct::Source::Internal)
|
61
|
+
end
|
62
|
+
end
|
63
|
+
|
64
|
+
# Disable verbose query logs ("↳ caller") since LogStruct provides
|
65
|
+
# structured context and these lines are noisy/unstructured.
|
66
|
+
if ::ActiveRecord::Base.respond_to?(:verbose_query_logs=)
|
67
|
+
T.unsafe(::ActiveRecord::Base).verbose_query_logs = false
|
68
|
+
end
|
69
|
+
|
49
70
|
subscribe_to_sql_notifications
|
50
71
|
true
|
51
72
|
end
|
@@ -55,11 +76,20 @@ module LogStruct
|
|
55
76
|
# Subscribe to ActiveRecord's sql.active_record notifications
|
56
77
|
sig { void }
|
57
78
|
def self.subscribe_to_sql_notifications
|
79
|
+
# Avoid duplicate subscriptions; re-subscribe if the notifier was reset
|
80
|
+
notifier = ::ActiveSupport::Notifications.notifier
|
81
|
+
current_id = notifier&.object_id
|
82
|
+
if STATE.subscribed && STATE.notifier_id == current_id
|
83
|
+
return
|
84
|
+
end
|
85
|
+
|
58
86
|
::ActiveSupport::Notifications.subscribe("sql.active_record") do |name, start, finish, id, payload|
|
59
87
|
handle_sql_event(name, start, finish, id, payload)
|
60
88
|
rescue => error
|
61
|
-
LogStruct.handle_exception(error, source: LogStruct::Source::
|
89
|
+
LogStruct.handle_exception(error, source: LogStruct::Source::Internal)
|
62
90
|
end
|
91
|
+
STATE.subscribed = true
|
92
|
+
STATE.notifier_id = current_id
|
63
93
|
end
|
64
94
|
|
65
95
|
# Process SQL notification event and create structured log
|
@@ -68,12 +98,12 @@ module LogStruct
|
|
68
98
|
# Skip schema queries and Rails internal queries
|
69
99
|
return if skip_query?(payload)
|
70
100
|
|
71
|
-
|
101
|
+
duration_ms = ((finish - start) * 1000.0).round(2)
|
72
102
|
|
73
103
|
# Skip fast queries if threshold is configured
|
74
104
|
config = LogStruct.config
|
75
105
|
if config.integrations.sql_slow_query_threshold&.positive?
|
76
|
-
return if
|
106
|
+
return if duration_ms < config.integrations.sql_slow_query_threshold
|
77
107
|
end
|
78
108
|
|
79
109
|
sql_log = Log::SQL.new(
|
@@ -82,9 +112,9 @@ module LogStruct
|
|
82
112
|
event: Event::Database,
|
83
113
|
sql: payload[:sql]&.strip || "",
|
84
114
|
name: payload[:name] || "SQL Query",
|
85
|
-
|
115
|
+
duration_ms: duration_ms,
|
86
116
|
row_count: extract_row_count(payload),
|
87
|
-
|
117
|
+
adapter: extract_adapter_name(payload),
|
88
118
|
bind_params: extract_and_filter_binds(payload),
|
89
119
|
database_name: extract_database_name(payload),
|
90
120
|
connection_pool_size: extract_pool_size(payload),
|