solid_log-core 0.1.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- checksums.yaml +7 -0
- data/MIT-LICENSE +20 -0
- data/README.md +176 -0
- data/Rakefile +11 -0
- data/db/log_migrate/20251222000001_create_solid_log_raw.rb +15 -0
- data/db/log_migrate/20251222000002_create_solid_log_entries.rb +29 -0
- data/db/log_migrate/20251222000004_create_solid_log_fields.rb +17 -0
- data/db/log_migrate/20251222000005_create_solid_log_tokens.rb +13 -0
- data/db/log_migrate/20251222000006_create_solid_log_facet_cache.rb +13 -0
- data/db/log_migrate/20251222000007_create_solid_log_fts_triggers.rb +41 -0
- data/db/log_structure_mysql.sql +96 -0
- data/db/log_structure_postgresql.sql +118 -0
- data/db/log_structure_sqlite.sql +123 -0
- data/lib/generators/solid_log/install/install_generator.rb +134 -0
- data/lib/generators/solid_log/install/templates/solid_log.rb.tt +133 -0
- data/lib/solid_log/adapters/adapter_factory.rb +34 -0
- data/lib/solid_log/adapters/base_adapter.rb +88 -0
- data/lib/solid_log/adapters/mysql_adapter.rb +163 -0
- data/lib/solid_log/adapters/postgresql_adapter.rb +141 -0
- data/lib/solid_log/adapters/sqlite_adapter.rb +149 -0
- data/lib/solid_log/core/client/buffer.rb +112 -0
- data/lib/solid_log/core/client/configuration.rb +31 -0
- data/lib/solid_log/core/client/http.rb +89 -0
- data/lib/solid_log/core/client/lograge_formatter.rb +99 -0
- data/lib/solid_log/core/client/retry_handler.rb +48 -0
- data/lib/solid_log/core/client.rb +138 -0
- data/lib/solid_log/core/configuration.rb +60 -0
- data/lib/solid_log/core/services/correlation_service.rb +74 -0
- data/lib/solid_log/core/services/field_analyzer.rb +108 -0
- data/lib/solid_log/core/services/health_service.rb +151 -0
- data/lib/solid_log/core/services/retention_service.rb +72 -0
- data/lib/solid_log/core/services/search_service.rb +269 -0
- data/lib/solid_log/core/version.rb +5 -0
- data/lib/solid_log/core.rb +106 -0
- data/lib/solid_log/direct_logger.rb +197 -0
- data/lib/solid_log/models/entry.rb +185 -0
- data/lib/solid_log/models/facet_cache.rb +58 -0
- data/lib/solid_log/models/field.rb +100 -0
- data/lib/solid_log/models/raw_entry.rb +33 -0
- data/lib/solid_log/models/record.rb +5 -0
- data/lib/solid_log/models/token.rb +61 -0
- data/lib/solid_log/parser.rb +179 -0
- data/lib/solid_log/silence_middleware.rb +34 -0
- data/lib/solid_log-core.rb +2 -0
- metadata +244 -0
|
@@ -0,0 +1,185 @@
|
|
|
1
|
+
module SolidLog
|
|
2
|
+
class Entry < Record
|
|
3
|
+
self.table_name = "solid_log_entries"
|
|
4
|
+
|
|
5
|
+
belongs_to :raw_entry, foreign_key: :raw_id, optional: true
|
|
6
|
+
|
|
7
|
+
# Override timestamp getter to ensure datetime casting for SQLite
|
|
8
|
+
# (SQLite stores as string, column name doesn't end in _at)
|
|
9
|
+
def timestamp
|
|
10
|
+
value = super
|
|
11
|
+
value.is_a?(String) ? Time.zone.parse(value) : value
|
|
12
|
+
end
|
|
13
|
+
|
|
14
|
+
validates :level, presence: true
|
|
15
|
+
validates :timestamp, presence: true
|
|
16
|
+
validates :created_at, presence: true
|
|
17
|
+
|
|
18
|
+
LOG_LEVELS = %w[debug info warn error fatal unknown].freeze
|
|
19
|
+
|
|
20
|
+
# Scopes for filtering (support both single values and arrays for multi-select)
|
|
21
|
+
scope :by_level, ->(level) { where(level: level) if level.present? }
|
|
22
|
+
scope :by_app, ->(app) {
|
|
23
|
+
return all if app.blank?
|
|
24
|
+
app.is_a?(Array) ? where(app: Array(app).flatten.reject(&:blank?)) : where(app: app)
|
|
25
|
+
}
|
|
26
|
+
scope :by_env, ->(env) {
|
|
27
|
+
return all if env.blank?
|
|
28
|
+
env.is_a?(Array) ? where(env: Array(env).flatten.reject(&:blank?)) : where(env: env)
|
|
29
|
+
}
|
|
30
|
+
scope :by_controller, ->(controller) {
|
|
31
|
+
return all if controller.blank?
|
|
32
|
+
controller.is_a?(Array) ? where(controller: Array(controller).flatten.reject(&:blank?)) : where(controller: controller)
|
|
33
|
+
}
|
|
34
|
+
scope :by_action, ->(action) {
|
|
35
|
+
return all if action.blank?
|
|
36
|
+
action.is_a?(Array) ? where(action: Array(action).flatten.reject(&:blank?)) : where(action: action)
|
|
37
|
+
}
|
|
38
|
+
scope :by_path, ->(path) {
|
|
39
|
+
return all if path.blank?
|
|
40
|
+
path.is_a?(Array) ? where(path: Array(path).flatten.reject(&:blank?)) : where(path: path)
|
|
41
|
+
}
|
|
42
|
+
scope :by_method, ->(method) {
|
|
43
|
+
return all if method.blank?
|
|
44
|
+
method.is_a?(Array) ? where(method: Array(method).flatten.reject(&:blank?)) : where(method: method)
|
|
45
|
+
}
|
|
46
|
+
scope :by_status_code, ->(status_code) {
|
|
47
|
+
return all if status_code.blank?
|
|
48
|
+
status_code.is_a?(Array) ? where(status_code: Array(status_code).flatten.reject(&:blank?)) : where(status_code: status_code)
|
|
49
|
+
}
|
|
50
|
+
scope :by_request_id, ->(request_id) { where(request_id: request_id) if request_id.present? }
|
|
51
|
+
scope :by_job_id, ->(job_id) { where(job_id: job_id) if job_id.present? }
|
|
52
|
+
scope :by_time_range, ->(start_time, end_time) {
|
|
53
|
+
scope = all
|
|
54
|
+
scope = scope.where("timestamp >= ?", start_time) if start_time.present?
|
|
55
|
+
scope = scope.where("timestamp <= ?", end_time) if end_time.present?
|
|
56
|
+
scope
|
|
57
|
+
}
|
|
58
|
+
scope :by_duration_range, ->(min_duration, max_duration) {
|
|
59
|
+
scope = all
|
|
60
|
+
scope = scope.where("duration >= ?", min_duration) if min_duration.present?
|
|
61
|
+
scope = scope.where("duration <= ?", max_duration) if max_duration.present?
|
|
62
|
+
scope
|
|
63
|
+
}
|
|
64
|
+
scope :recent, -> { order(timestamp: :asc) } # Ascending for terminal-style view (newest at bottom)
|
|
65
|
+
scope :errors, -> { where(level: %w[error fatal]) }
|
|
66
|
+
|
|
67
|
+
# Full-text search combined with LIKE for partial matching
|
|
68
|
+
# Delegates to the database adapter to build the appropriate query
|
|
69
|
+
def self.search_fts(query)
|
|
70
|
+
return all if query.blank?
|
|
71
|
+
|
|
72
|
+
SolidLog.adapter.search(query, all)
|
|
73
|
+
rescue => e
|
|
74
|
+
Rails.logger.error("Full-text search error: #{e.message}")
|
|
75
|
+
# Fall back to simple LIKE
|
|
76
|
+
adapter = SolidLog.adapter
|
|
77
|
+
sanitized_query = sanitize_sql_like(query)
|
|
78
|
+
like_condition = adapter.case_insensitive_like("message", "%#{sanitized_query}%")
|
|
79
|
+
where(like_condition)
|
|
80
|
+
end
|
|
81
|
+
|
|
82
|
+
# Filter by a dynamic field in extra_fields JSON (database-agnostic)
|
|
83
|
+
def self.filter_by_field(field_name, field_value)
|
|
84
|
+
return all if field_name.blank?
|
|
85
|
+
|
|
86
|
+
adapter = SolidLog.adapter
|
|
87
|
+
json_extract = adapter.extract_json_field("extra_fields", field_name)
|
|
88
|
+
|
|
89
|
+
# SQLite json_extract returns values with their JSON types
|
|
90
|
+
# For numeric values, we need to handle both string and number comparisons
|
|
91
|
+
where("#{json_extract} = ? OR #{json_extract} = ?", field_value.to_s, field_value)
|
|
92
|
+
rescue => e
|
|
93
|
+
Rails.logger.error("Field filter error: #{e.message}")
|
|
94
|
+
all
|
|
95
|
+
end
|
|
96
|
+
|
|
97
|
+
# Get correlation timeline for a request
|
|
98
|
+
def self.correlation_timeline_for_request(request_id)
|
|
99
|
+
by_request_id(request_id).recent
|
|
100
|
+
end
|
|
101
|
+
|
|
102
|
+
# Get correlation timeline for a job
|
|
103
|
+
def self.correlation_timeline_for_job(job_id)
|
|
104
|
+
by_job_id(job_id).recent
|
|
105
|
+
end
|
|
106
|
+
|
|
107
|
+
# Get available facets for a field
|
|
108
|
+
def self.facets_for(field, limit: 100)
|
|
109
|
+
return [] unless column_names.include?(field.to_s)
|
|
110
|
+
|
|
111
|
+
# Get distinct values
|
|
112
|
+
values = distinct.pluck(field).compact
|
|
113
|
+
|
|
114
|
+
# Sort and limit
|
|
115
|
+
sorted_values = case field.to_s
|
|
116
|
+
when "level"
|
|
117
|
+
# Sort by severity
|
|
118
|
+
values.sort_by { |l| LOG_LEVELS.index(l) || 999 }
|
|
119
|
+
when "status_code"
|
|
120
|
+
# Sort numerically
|
|
121
|
+
values.sort
|
|
122
|
+
when "controller", "action", "path"
|
|
123
|
+
# Sort these potentially large lists
|
|
124
|
+
values.sort
|
|
125
|
+
else
|
|
126
|
+
# Default: sort alphabetically
|
|
127
|
+
values.sort
|
|
128
|
+
end
|
|
129
|
+
|
|
130
|
+
# Apply limit to all fields
|
|
131
|
+
sorted_values.take(limit)
|
|
132
|
+
end
|
|
133
|
+
|
|
134
|
+
# Parse extra_fields JSON
|
|
135
|
+
def extra_fields_hash
|
|
136
|
+
return {} if extra_fields.blank?
|
|
137
|
+
@extra_fields_hash ||= JSON.parse(extra_fields)
|
|
138
|
+
rescue JSON::ParserError
|
|
139
|
+
{}
|
|
140
|
+
end
|
|
141
|
+
|
|
142
|
+
# Format log level with color class
|
|
143
|
+
def level_badge_class
|
|
144
|
+
case level
|
|
145
|
+
when "debug"
|
|
146
|
+
"badge-gray"
|
|
147
|
+
when "info"
|
|
148
|
+
"badge-blue"
|
|
149
|
+
when "warn"
|
|
150
|
+
"badge-yellow"
|
|
151
|
+
when "error"
|
|
152
|
+
"badge-red"
|
|
153
|
+
when "fatal"
|
|
154
|
+
"badge-dark-red"
|
|
155
|
+
else
|
|
156
|
+
"badge-gray"
|
|
157
|
+
end
|
|
158
|
+
end
|
|
159
|
+
|
|
160
|
+
# Check if this entry has correlation data
|
|
161
|
+
def correlated?
|
|
162
|
+
request_id.present? || job_id.present?
|
|
163
|
+
end
|
|
164
|
+
|
|
165
|
+
# Prevent recursive logging
|
|
166
|
+
around_save :without_logging_wrapper
|
|
167
|
+
around_create :without_logging_wrapper
|
|
168
|
+
around_update :without_logging_wrapper
|
|
169
|
+
around_destroy :without_logging_wrapper
|
|
170
|
+
|
|
171
|
+
def self.destroy_all
|
|
172
|
+
SolidLog.without_logging { super }
|
|
173
|
+
end
|
|
174
|
+
|
|
175
|
+
def self.delete_all
|
|
176
|
+
SolidLog.without_logging { super }
|
|
177
|
+
end
|
|
178
|
+
|
|
179
|
+
private
|
|
180
|
+
|
|
181
|
+
def without_logging_wrapper
|
|
182
|
+
SolidLog.without_logging { yield }
|
|
183
|
+
end
|
|
184
|
+
end
|
|
185
|
+
end
|
|
@@ -0,0 +1,58 @@
|
|
|
1
|
+
module SolidLog
|
|
2
|
+
class FacetCache < Record
|
|
3
|
+
self.table_name = "solid_log_facet_cache"
|
|
4
|
+
|
|
5
|
+
validates :key_name, presence: true, uniqueness: true
|
|
6
|
+
validates :cache_value, presence: true
|
|
7
|
+
|
|
8
|
+
scope :expired, -> { where("expires_at < ?", Time.current) }
|
|
9
|
+
scope :valid, -> { where("expires_at IS NULL OR expires_at >= ?", Time.current) }
|
|
10
|
+
|
|
11
|
+
# Fetch from cache or compute and store (thread-safe with database locking)
|
|
12
|
+
def self.fetch(key, ttl: 5.minutes, &block)
|
|
13
|
+
# First attempt: check for existing valid cache (no lock for read-heavy workloads)
|
|
14
|
+
cached = valid.find_by(key_name: key)
|
|
15
|
+
return JSON.parse(cached.cache_value) if cached
|
|
16
|
+
|
|
17
|
+
# Use database-level locking to prevent race condition
|
|
18
|
+
transaction do
|
|
19
|
+
# Double-check after acquiring lock (ensures only one thread computes)
|
|
20
|
+
cached = valid.lock.find_by(key_name: key)
|
|
21
|
+
return JSON.parse(cached.cache_value) if cached
|
|
22
|
+
|
|
23
|
+
# No valid cache exists, compute value
|
|
24
|
+
value = block.call
|
|
25
|
+
store(key, value, ttl: ttl)
|
|
26
|
+
value
|
|
27
|
+
end
|
|
28
|
+
end
|
|
29
|
+
|
|
30
|
+
# Store a value in the cache
|
|
31
|
+
def self.store(key, value, ttl: 5.minutes)
|
|
32
|
+
expires_at = ttl ? Time.current + ttl : nil
|
|
33
|
+
cache_value = value.to_json
|
|
34
|
+
|
|
35
|
+
upsert(
|
|
36
|
+
{ key_name: key, cache_value: cache_value, expires_at: expires_at, updated_at: Time.current },
|
|
37
|
+
unique_by: :key_name
|
|
38
|
+
)
|
|
39
|
+
|
|
40
|
+
value
|
|
41
|
+
end
|
|
42
|
+
|
|
43
|
+
# Invalidate a specific cache key
|
|
44
|
+
def self.invalidate(key)
|
|
45
|
+
find_by(key_name: key)&.destroy
|
|
46
|
+
end
|
|
47
|
+
|
|
48
|
+
# Clear all expired cache entries
|
|
49
|
+
def self.cleanup_expired!
|
|
50
|
+
expired.delete_all
|
|
51
|
+
end
|
|
52
|
+
|
|
53
|
+
# Clear all cache entries
|
|
54
|
+
def self.clear_all!
|
|
55
|
+
delete_all
|
|
56
|
+
end
|
|
57
|
+
end
|
|
58
|
+
end
|
|
@@ -0,0 +1,100 @@
|
|
|
1
|
+
module SolidLog
|
|
2
|
+
class Field < Record
|
|
3
|
+
self.table_name = "solid_log_fields"
|
|
4
|
+
|
|
5
|
+
FIELD_TYPES = %w[string number boolean datetime array object].freeze
|
|
6
|
+
FILTER_TYPES = %w[multiselect range exact contains tokens].freeze
|
|
7
|
+
|
|
8
|
+
# High-cardinality fields that should default to tokens
|
|
9
|
+
HIGH_CARDINALITY_PATTERNS = %w[user_id session_id ip_address uuid transaction_id].freeze
|
|
10
|
+
|
|
11
|
+
validates :name, presence: true, uniqueness: true
|
|
12
|
+
validates :field_type, presence: true, inclusion: { in: FIELD_TYPES }
|
|
13
|
+
validates :filter_type, presence: true, inclusion: { in: FILTER_TYPES }
|
|
14
|
+
validates :usage_count, numericality: { greater_than_or_equal_to: 0 }
|
|
15
|
+
|
|
16
|
+
scope :hot_fields, ->(threshold = 1000) { where("usage_count >= ?", threshold).order(usage_count: :desc) }
|
|
17
|
+
scope :promoted, -> { where(promoted: true) }
|
|
18
|
+
scope :unpromoted, -> { where(promoted: false) }
|
|
19
|
+
scope :recently_seen, ->(days = 7) { where("last_seen_at >= ?", days.days.ago) }
|
|
20
|
+
|
|
21
|
+
# Increment usage count and update last_seen_at
|
|
22
|
+
def increment_usage!
|
|
23
|
+
increment!(:usage_count)
|
|
24
|
+
touch(:last_seen_at)
|
|
25
|
+
end
|
|
26
|
+
|
|
27
|
+
# Mark field as promoted (has its own column)
|
|
28
|
+
def promote!
|
|
29
|
+
update!(promoted: true)
|
|
30
|
+
end
|
|
31
|
+
|
|
32
|
+
# Mark field as unpromoted (stored in JSON)
|
|
33
|
+
def demote!
|
|
34
|
+
update!(promoted: false)
|
|
35
|
+
end
|
|
36
|
+
|
|
37
|
+
# Check if field is promotable (high usage and not already promoted)
|
|
38
|
+
def promotable?(threshold: 1000)
|
|
39
|
+
!promoted? && usage_count >= threshold
|
|
40
|
+
end
|
|
41
|
+
|
|
42
|
+
# Track a field occurrence
|
|
43
|
+
def self.track(name, value)
|
|
44
|
+
field = find_or_initialize_by(name: name)
|
|
45
|
+
field.field_type ||= infer_type(value)
|
|
46
|
+
field.filter_type ||= infer_filter_type(field.field_type, name)
|
|
47
|
+
|
|
48
|
+
# Save if new record before calling increment_usage!
|
|
49
|
+
field.save! if field.new_record?
|
|
50
|
+
|
|
51
|
+
field.increment_usage!
|
|
52
|
+
field
|
|
53
|
+
end
|
|
54
|
+
|
|
55
|
+
private
|
|
56
|
+
|
|
57
|
+
# Infer field type from value
|
|
58
|
+
def self.infer_type(value)
|
|
59
|
+
case value
|
|
60
|
+
when Time, DateTime, Date
|
|
61
|
+
"datetime"
|
|
62
|
+
when TrueClass, FalseClass
|
|
63
|
+
"boolean"
|
|
64
|
+
when Numeric
|
|
65
|
+
"number"
|
|
66
|
+
when Array
|
|
67
|
+
"array"
|
|
68
|
+
when Hash
|
|
69
|
+
"object"
|
|
70
|
+
when String
|
|
71
|
+
# Try to parse as datetime
|
|
72
|
+
begin
|
|
73
|
+
Time.parse(value)
|
|
74
|
+
"datetime"
|
|
75
|
+
rescue ArgumentError
|
|
76
|
+
"string"
|
|
77
|
+
end
|
|
78
|
+
else
|
|
79
|
+
"string"
|
|
80
|
+
end
|
|
81
|
+
end
|
|
82
|
+
|
|
83
|
+
# Infer filter type from field type and name
|
|
84
|
+
def self.infer_filter_type(field_type, field_name = nil)
|
|
85
|
+
# Check if field name suggests high cardinality
|
|
86
|
+
if field_name && HIGH_CARDINALITY_PATTERNS.any? { |pattern| field_name.to_s.include?(pattern) }
|
|
87
|
+
return "tokens"
|
|
88
|
+
end
|
|
89
|
+
|
|
90
|
+
case field_type
|
|
91
|
+
when "number", "datetime"
|
|
92
|
+
"range"
|
|
93
|
+
when "boolean"
|
|
94
|
+
"exact"
|
|
95
|
+
else
|
|
96
|
+
"multiselect"
|
|
97
|
+
end
|
|
98
|
+
end
|
|
99
|
+
end
|
|
100
|
+
end
|
|
@@ -0,0 +1,33 @@
|
|
|
1
|
+
module SolidLog
|
|
2
|
+
class RawEntry < Record
|
|
3
|
+
self.table_name = "solid_log_raw"
|
|
4
|
+
|
|
5
|
+
belongs_to :token, foreign_key: :token_id, optional: true
|
|
6
|
+
has_one :entry, foreign_key: :raw_id, dependent: :destroy
|
|
7
|
+
|
|
8
|
+
validates :payload, presence: true
|
|
9
|
+
|
|
10
|
+
scope :unparsed, -> { where(parsed: false) }
|
|
11
|
+
scope :parsed, -> { where(parsed: true) }
|
|
12
|
+
scope :stale_unparsed, ->(threshold = 1.hour.ago) { unparsed.where("received_at < ?", threshold) }
|
|
13
|
+
scope :recent, -> { order(received_at: :desc) }
|
|
14
|
+
|
|
15
|
+
# Get the parsed payload as a hash
|
|
16
|
+
def payload_hash
|
|
17
|
+
@payload_hash ||= JSON.parse(payload)
|
|
18
|
+
rescue JSON::ParserError => e
|
|
19
|
+
Rails.logger.error "SolidLog: Failed to parse payload for RawEntry #{id}: #{e.message}"
|
|
20
|
+
{}
|
|
21
|
+
end
|
|
22
|
+
|
|
23
|
+
# Class method to claim unparsed entries for processing
|
|
24
|
+
# Returns an array of RawEntry records
|
|
25
|
+
# Uses database-specific locking strategy
|
|
26
|
+
def self.claim_batch(batch_size: 100)
|
|
27
|
+
SolidLog.adapter.claim_batch(batch_size)
|
|
28
|
+
rescue => e
|
|
29
|
+
Rails.logger.error "SolidLog: Failed to claim batch: #{e.message}"
|
|
30
|
+
[]
|
|
31
|
+
end
|
|
32
|
+
end
|
|
33
|
+
end
|
|
@@ -0,0 +1,61 @@
|
|
|
1
|
+
require "openssl"
|
|
2
|
+
require "active_support/security_utils"
|
|
3
|
+
|
|
4
|
+
module SolidLog
|
|
5
|
+
class Token < Record
|
|
6
|
+
self.table_name = "solid_log_tokens"
|
|
7
|
+
|
|
8
|
+
has_many :raw_entries, foreign_key: :token_id, dependent: :nullify
|
|
9
|
+
|
|
10
|
+
validates :name, presence: true
|
|
11
|
+
validates :token_hash, presence: true, uniqueness: true
|
|
12
|
+
|
|
13
|
+
# Generate a new token and return it (only time it's visible)
|
|
14
|
+
def self.generate!(name)
|
|
15
|
+
plaintext = "slk_" + SecureRandom.hex(32)
|
|
16
|
+
token = new(name: name)
|
|
17
|
+
token.token_hash = hash_token(plaintext)
|
|
18
|
+
token.save!
|
|
19
|
+
|
|
20
|
+
{
|
|
21
|
+
id: token.id,
|
|
22
|
+
name: token.name,
|
|
23
|
+
token: plaintext,
|
|
24
|
+
created_at: token.created_at
|
|
25
|
+
}
|
|
26
|
+
end
|
|
27
|
+
|
|
28
|
+
# Authenticate a plaintext token - O(1) database lookup
|
|
29
|
+
def self.authenticate(plaintext)
|
|
30
|
+
return nil if plaintext.blank?
|
|
31
|
+
|
|
32
|
+
hashed = hash_token(plaintext)
|
|
33
|
+
find_by(token_hash: hashed)
|
|
34
|
+
end
|
|
35
|
+
|
|
36
|
+
# Authenticate a plaintext token against this token's hash
|
|
37
|
+
def authenticate(plaintext)
|
|
38
|
+
return false if plaintext.blank? || token_hash.blank?
|
|
39
|
+
|
|
40
|
+
# Use constant-time comparison to prevent timing attacks
|
|
41
|
+
ActiveSupport::SecurityUtils.secure_compare(
|
|
42
|
+
self.class.hash_token(plaintext),
|
|
43
|
+
token_hash
|
|
44
|
+
)
|
|
45
|
+
end
|
|
46
|
+
|
|
47
|
+
# Touch last_used_at timestamp
|
|
48
|
+
def touch_last_used!
|
|
49
|
+
update_column(:last_used_at, Time.current)
|
|
50
|
+
end
|
|
51
|
+
|
|
52
|
+
private
|
|
53
|
+
|
|
54
|
+
# Generate deterministic hash using HMAC-SHA256
|
|
55
|
+
# This allows O(1) database lookups while maintaining security
|
|
56
|
+
def self.hash_token(plaintext)
|
|
57
|
+
secret_key = Rails.application.secret_key_base || raise("secret_key_base not configured")
|
|
58
|
+
OpenSSL::HMAC.hexdigest("SHA256", secret_key, plaintext)
|
|
59
|
+
end
|
|
60
|
+
end
|
|
61
|
+
end
|
|
@@ -0,0 +1,179 @@
|
|
|
1
|
+
module SolidLog
|
|
2
|
+
class Parser
|
|
3
|
+
STANDARD_FIELDS = %w[
|
|
4
|
+
timestamp time occurred_at created_at
|
|
5
|
+
level severity
|
|
6
|
+
message msg text
|
|
7
|
+
request_id
|
|
8
|
+
job_id
|
|
9
|
+
duration duration_ms
|
|
10
|
+
status status_code
|
|
11
|
+
controller
|
|
12
|
+
action
|
|
13
|
+
path
|
|
14
|
+
method http_method
|
|
15
|
+
app application
|
|
16
|
+
env environment
|
|
17
|
+
].freeze
|
|
18
|
+
|
|
19
|
+
VALID_LEVELS = %w[debug info warn error fatal unknown].freeze
|
|
20
|
+
|
|
21
|
+
# Instance method for parsing (used by tests and jobs)
|
|
22
|
+
def parse(raw_json)
|
|
23
|
+
return nil if raw_json.blank?
|
|
24
|
+
|
|
25
|
+
payload = JSON.parse(raw_json)
|
|
26
|
+
return nil if payload.blank?
|
|
27
|
+
|
|
28
|
+
extract_fields(payload)
|
|
29
|
+
rescue JSON::ParserError
|
|
30
|
+
nil
|
|
31
|
+
end
|
|
32
|
+
|
|
33
|
+
# Class method for parsing (for convenience)
|
|
34
|
+
def self.parse(raw_json)
|
|
35
|
+
new.parse(raw_json)
|
|
36
|
+
end
|
|
37
|
+
|
|
38
|
+
private
|
|
39
|
+
|
|
40
|
+
# Extract standard and dynamic fields from payload
|
|
41
|
+
def extract_fields(payload)
|
|
42
|
+
standard = extract_standard_fields(payload)
|
|
43
|
+
dynamic = extract_dynamic_fields(payload, standard.keys)
|
|
44
|
+
|
|
45
|
+
# Return extra_fields as hash (ParserJob will convert to JSON)
|
|
46
|
+
result = standard.merge(extra_fields: dynamic.empty? ? {} : dynamic)
|
|
47
|
+
|
|
48
|
+
result
|
|
49
|
+
end
|
|
50
|
+
|
|
51
|
+
# Extract known/standard fields
|
|
52
|
+
def extract_standard_fields(payload)
|
|
53
|
+
fields = {}
|
|
54
|
+
|
|
55
|
+
# Timestamp (required) - when the log event occurred
|
|
56
|
+
# created_at will be set automatically by Rails when the entry is saved
|
|
57
|
+
fields[:timestamp] = extract_timestamp(payload)
|
|
58
|
+
|
|
59
|
+
# Level (required, default to info)
|
|
60
|
+
fields[:level] = normalize_level(payload["level"] || payload["severity"] || "info")
|
|
61
|
+
|
|
62
|
+
# Message
|
|
63
|
+
fields[:message] = payload["message"] || payload["msg"] || payload["text"]
|
|
64
|
+
|
|
65
|
+
# App and env
|
|
66
|
+
fields[:app] = payload["app"] || payload["application"]
|
|
67
|
+
fields[:env] = payload["env"] || payload["environment"]
|
|
68
|
+
|
|
69
|
+
# Correlation IDs
|
|
70
|
+
fields[:request_id] = payload["request_id"]
|
|
71
|
+
fields[:job_id] = payload["job_id"]
|
|
72
|
+
|
|
73
|
+
# HTTP/Controller fields
|
|
74
|
+
fields[:controller] = payload["controller"]
|
|
75
|
+
fields[:action] = payload["action"]
|
|
76
|
+
fields[:path] = payload["path"]
|
|
77
|
+
fields[:method] = payload["method"] || payload["http_method"]
|
|
78
|
+
|
|
79
|
+
# Performance fields
|
|
80
|
+
fields[:duration] = extract_duration(payload)
|
|
81
|
+
fields[:status_code] = extract_status_code(payload)
|
|
82
|
+
|
|
83
|
+
# Remove nil values
|
|
84
|
+
fields.compact
|
|
85
|
+
end
|
|
86
|
+
|
|
87
|
+
# Extract dynamic fields (anything not in standard fields)
|
|
88
|
+
def extract_dynamic_fields(payload, standard_keys)
|
|
89
|
+
dynamic = payload.dup
|
|
90
|
+
|
|
91
|
+
# Remove standard fields
|
|
92
|
+
STANDARD_FIELDS.each { |f| dynamic.delete(f) }
|
|
93
|
+
standard_keys.each { |k| dynamic.delete(k.to_s) }
|
|
94
|
+
|
|
95
|
+
dynamic
|
|
96
|
+
end
|
|
97
|
+
|
|
98
|
+
# Normalize log level to standard values
|
|
99
|
+
def normalize_level(level)
|
|
100
|
+
return "info" if level.blank?
|
|
101
|
+
|
|
102
|
+
level_str = level.to_s.downcase
|
|
103
|
+
VALID_LEVELS.include?(level_str) ? level_str : "info"
|
|
104
|
+
end
|
|
105
|
+
|
|
106
|
+
# Extract timestamp from various field names (when the log event occurred)
|
|
107
|
+
def extract_timestamp(payload)
|
|
108
|
+
timestamp_fields = %w[timestamp time occurred_at created_at]
|
|
109
|
+
|
|
110
|
+
timestamp_fields.each do |field|
|
|
111
|
+
value = payload[field]
|
|
112
|
+
next if value.blank?
|
|
113
|
+
|
|
114
|
+
begin
|
|
115
|
+
return Time.parse(value) if value.is_a?(String)
|
|
116
|
+
# Handle both seconds and milliseconds
|
|
117
|
+
if value.is_a?(Numeric)
|
|
118
|
+
return value > 10_000_000_000 ? Time.at(value / 1000.0) : Time.at(value)
|
|
119
|
+
end
|
|
120
|
+
return value if value.is_a?(Time) || value.is_a?(DateTime)
|
|
121
|
+
rescue ArgumentError
|
|
122
|
+
next
|
|
123
|
+
end
|
|
124
|
+
end
|
|
125
|
+
|
|
126
|
+
# Fallback to current time
|
|
127
|
+
Time.current
|
|
128
|
+
end
|
|
129
|
+
|
|
130
|
+
# Extract duration in milliseconds
|
|
131
|
+
def extract_duration(payload)
|
|
132
|
+
duration = payload["duration"] || payload["duration_ms"]
|
|
133
|
+
return nil if duration.blank?
|
|
134
|
+
|
|
135
|
+
duration.to_f
|
|
136
|
+
end
|
|
137
|
+
|
|
138
|
+
# Extract HTTP status code
|
|
139
|
+
def extract_status_code(payload)
|
|
140
|
+
status = payload["status"] || payload["status_code"]
|
|
141
|
+
return nil if status.blank?
|
|
142
|
+
|
|
143
|
+
status.to_i
|
|
144
|
+
end
|
|
145
|
+
|
|
146
|
+
# Track fields in the registry
|
|
147
|
+
def track_fields(payload)
|
|
148
|
+
payload.each do |key, value|
|
|
149
|
+
next if STANDARD_FIELDS.include?(key)
|
|
150
|
+
|
|
151
|
+
SolidLog.without_logging do
|
|
152
|
+
field = Field.find_or_initialize_by(name: key)
|
|
153
|
+
field.field_type ||= infer_type(value)
|
|
154
|
+
field.usage_count ||= 0
|
|
155
|
+
field.usage_count += 1
|
|
156
|
+
field.last_seen_at = Time.current
|
|
157
|
+
field.save if field.changed?
|
|
158
|
+
end
|
|
159
|
+
end
|
|
160
|
+
rescue => e
|
|
161
|
+
# Silently fail if field tracking has issues
|
|
162
|
+
Rails.logger.debug("SolidLog: Failed to track fields: #{e.message}") if defined?(Rails)
|
|
163
|
+
end
|
|
164
|
+
|
|
165
|
+
# Infer the type of a value
|
|
166
|
+
def infer_type(value)
|
|
167
|
+
case value
|
|
168
|
+
when TrueClass, FalseClass
|
|
169
|
+
"boolean"
|
|
170
|
+
when Integer, Float
|
|
171
|
+
"number"
|
|
172
|
+
when Time, DateTime, Date
|
|
173
|
+
"datetime"
|
|
174
|
+
else
|
|
175
|
+
"string"
|
|
176
|
+
end
|
|
177
|
+
end
|
|
178
|
+
end
|
|
179
|
+
end
|
|
@@ -0,0 +1,34 @@
|
|
|
1
|
+
module SolidLog
|
|
2
|
+
class SilenceMiddleware
|
|
3
|
+
def initialize(app)
|
|
4
|
+
@app = app
|
|
5
|
+
end
|
|
6
|
+
|
|
7
|
+
def call(env)
|
|
8
|
+
request = Rack::Request.new(env)
|
|
9
|
+
|
|
10
|
+
# Check if this is a SolidLog request (UI or API)
|
|
11
|
+
if solid_log_request?(request)
|
|
12
|
+
# Set thread-local flag to prevent SolidLog from logging its own requests
|
|
13
|
+
Thread.current[:solid_log_silenced] = true
|
|
14
|
+
|
|
15
|
+
begin
|
|
16
|
+
@app.call(env)
|
|
17
|
+
ensure
|
|
18
|
+
Thread.current[:solid_log_silenced] = nil
|
|
19
|
+
end
|
|
20
|
+
else
|
|
21
|
+
@app.call(env)
|
|
22
|
+
end
|
|
23
|
+
end
|
|
24
|
+
|
|
25
|
+
private
|
|
26
|
+
|
|
27
|
+
def solid_log_request?(request)
|
|
28
|
+
# Match both UI routes (/admin/logs, /solid_log) and API routes (/api/v1/ingest)
|
|
29
|
+
request.path.start_with?("/admin/logs") ||
|
|
30
|
+
request.path.include?("solid_log") ||
|
|
31
|
+
request.path.start_with?("/api/v1/ingest")
|
|
32
|
+
end
|
|
33
|
+
end
|
|
34
|
+
end
|