solid_log-core 0.1.0 → 0.2.1

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -0,0 +1,248 @@
1
+ # frozen_string_literal: true
2
+
3
+ module SolidLog
4
+ module Core
5
+ module Services
6
+ # BatchParsingService processes batches of unparsed raw log entries.
7
+ #
8
+ # This service is the single source of truth for parsing logic, used by:
9
+ # - Puma plugin (inline processing)
10
+ # - ParseJob (Solid Queue / ActiveJob)
11
+ # - solid_log-service (dedicated service process)
12
+ #
13
+ # @example Basic usage
14
+ # stats = BatchParsingService.process_batch(batch_size: 200)
15
+ # # => { processed: 150, inserted: 145, errors: 5 }
16
+ #
17
+ # @example With custom callback
18
+ # callback = ->(entry_ids) { puts "Processed #{entry_ids.size} entries" }
19
+ # BatchParsingService.process_batch(broadcast_callback: callback)
20
+ class BatchParsingService
21
+ # Process a batch of unparsed raw entries
22
+ #
23
+ # @param batch_size [Integer] Number of entries to process (default: config value)
24
+ # @param logger [Logger] Logger instance (default: SolidLog.logger)
25
+ # @param broadcast_callback [Proc] Callback for broadcasting new entries
26
+ # @return [Hash] Statistics hash with :processed, :inserted, :errors keys
27
+ def self.process_batch(batch_size: nil, logger: nil, broadcast_callback: nil)
28
+ batch_size ||= SolidLog.configuration.parser_batch_size
29
+ logger ||= SolidLog.logger
30
+
31
+ stats = { processed: 0, inserted: 0, errors: 0 }
32
+
33
+ SolidLog.without_logging do
34
+ # Claim a batch of unparsed entries
35
+ raw_entries = RawEntry.claim_batch(batch_size: batch_size)
36
+
37
+ return stats if raw_entries.empty?
38
+
39
+ # Log to STDERR to avoid recursion (only in debug mode)
40
+ $stderr.puts "[SolidLog::BatchParsingService] Processing #{raw_entries.size} raw entries" if ENV["SOLIDLOG_DEBUG"]
41
+
42
+ # Get promoted fields cache (for performance)
43
+ promoted_fields = get_promoted_fields_cache
44
+
45
+ # Process each entry
46
+ entries_to_insert = []
47
+ fields_to_track = {}
48
+
49
+ raw_entries.each do |raw_entry|
50
+ begin
51
+ # Parse the raw payload
52
+ parsed = Parser.parse(raw_entry.payload)
53
+
54
+ # Extract dynamic fields for field registry
55
+ extra_fields = parsed.delete(:extra_fields) || {}
56
+ track_fields(fields_to_track, extra_fields)
57
+
58
+ # Prepare entry for insertion
59
+ entry_data = {
60
+ raw_id: raw_entry.id,
61
+ timestamp: parsed[:timestamp],
62
+ created_at: Time.current, # When entry was parsed/created
63
+ level: parsed[:level],
64
+ app: parsed[:app],
65
+ env: parsed[:env],
66
+ message: parsed[:message],
67
+ request_id: parsed[:request_id],
68
+ job_id: parsed[:job_id],
69
+ duration: parsed[:duration],
70
+ status_code: parsed[:status_code],
71
+ controller: parsed[:controller],
72
+ action: parsed[:action],
73
+ path: parsed[:path],
74
+ method: parsed[:method],
75
+ extra_fields: extra_fields.to_json
76
+ }
77
+
78
+ # Populate promoted field columns
79
+ populate_promoted_fields(entry_data, extra_fields, promoted_fields)
80
+
81
+ entries_to_insert << entry_data
82
+ stats[:processed] += 1
83
+ rescue StandardError => e
84
+ logger.error "SolidLog::BatchParsingService: Failed to parse entry #{raw_entry.id}: #{e.message}"
85
+ logger.error e.backtrace.first(5).join("\n") if e.backtrace
86
+ stats[:errors] += 1
87
+ # Entry remains unparsed so it can be retried or investigated
88
+ end
89
+ end
90
+
91
+ # Bulk insert parsed entries
92
+ if entries_to_insert.any?
93
+ raw_ids = entries_to_insert.map { |e| e[:raw_id] }
94
+
95
+ Entry.insert_all(entries_to_insert)
96
+ stats[:inserted] = entries_to_insert.size
97
+ # Log to STDERR to avoid recursion (only in debug mode)
98
+ $stderr.puts "[SolidLog::BatchParsingService] Inserted #{entries_to_insert.size} entries" if ENV["SOLIDLOG_DEBUG"]
99
+
100
+ # Broadcast new entry IDs for live tail
101
+ new_entry_ids = Entry.where(raw_id: raw_ids).pluck(:id)
102
+ broadcast_entries(new_entry_ids, broadcast_callback, logger)
103
+ end
104
+
105
+ # Update field registry
106
+ update_field_registry(fields_to_track, logger)
107
+ end
108
+
109
+ stats
110
+ end
111
+
112
+ private
113
+
114
+ # Track field occurrences for the registry
115
+ def self.track_fields(fields_hash, extra_fields)
116
+ extra_fields.each do |key, value|
117
+ fields_hash[key] ||= { values: [], count: 0 }
118
+ fields_hash[key][:count] += 1
119
+ fields_hash[key][:type] ||= infer_field_type(value)
120
+ end
121
+ end
122
+
123
+ # Update the field registry with tracked fields
124
+ def self.update_field_registry(fields_hash, logger)
125
+ fields_hash.each do |name, data|
126
+ field = Field.find_or_initialize_by(name: name)
127
+ field.field_type ||= data[:type]
128
+ field.usage_count += data[:count]
129
+ field.last_seen_at = Time.current
130
+ field.save!
131
+ end
132
+ rescue StandardError => e
133
+ logger.error "SolidLog::BatchParsingService: Failed to update field registry: #{e.message}"
134
+ end
135
+
136
+ # Infer field type from value
137
+ def self.infer_field_type(value)
138
+ case value
139
+ when String
140
+ "string"
141
+ when Numeric
142
+ "number"
143
+ when TrueClass, FalseClass
144
+ "boolean"
145
+ when Time, DateTime, Date
146
+ "datetime"
147
+ when Array
148
+ "array"
149
+ when Hash
150
+ "object"
151
+ else
152
+ "string"
153
+ end
154
+ end
155
+
156
+ # Get cache of promoted fields and their types
157
+ # Returns hash: { "field_name" => "field_type", ... }
158
+ def self.get_promoted_fields_cache
159
+ @promoted_fields_cache ||= {}
160
+ @promoted_fields_cache_time ||= Time.at(0)
161
+
162
+ # Refresh cache every 60 seconds
163
+ if Time.current - @promoted_fields_cache_time > 60
164
+ promoted = Field.promoted.pluck(:name, :field_type).to_h
165
+ available_columns = Entry.column_names
166
+
167
+ # Only include fields that have actual columns in the table
168
+ @promoted_fields_cache = promoted.select { |name, _type| available_columns.include?(name) }
169
+ @promoted_fields_cache_time = Time.current
170
+ end
171
+
172
+ @promoted_fields_cache
173
+ rescue StandardError
174
+ # If Field table doesn't exist yet or there's an error, return empty hash
175
+ {}
176
+ end
177
+
178
+ # Populate promoted field columns in entry_data
179
+ # Extracts values from extra_fields and adds them to entry_data
180
+ # Note: values remain in extra_fields JSON for backward compatibility
181
+ def self.populate_promoted_fields(entry_data, extra_fields, promoted_fields)
182
+ promoted_fields.each do |field_name, field_type|
183
+ next unless extra_fields.key?(field_name)
184
+
185
+ value = extra_fields[field_name]
186
+ next if value.nil?
187
+
188
+ # Type coercion for promoted field
189
+ begin
190
+ typed_value = coerce_field_value(value, field_type)
191
+ entry_data[field_name.to_sym] = typed_value
192
+ rescue StandardError
193
+ # If type coercion fails, skip this field
194
+ # Field remains in extra_fields JSON
195
+ end
196
+ end
197
+ end
198
+
199
+ # Coerce a value to the specified field type
200
+ def self.coerce_field_value(value, field_type)
201
+ case field_type
202
+ when "number"
203
+ value.is_a?(Numeric) ? value : value.to_f
204
+ when "boolean"
205
+ [true, "true", "1", 1].include?(value)
206
+ when "datetime"
207
+ value.is_a?(Time) ? value : Time.parse(value.to_s)
208
+ when "string"
209
+ value.to_s
210
+ when "array", "object"
211
+ # Arrays and objects stored as JSON text
212
+ value.is_a?(String) ? value : value.to_json
213
+ else
214
+ value
215
+ end
216
+ end
217
+
218
+ # Broadcast new entries via callback or ActionCable
219
+ def self.broadcast_entries(entry_ids, callback, logger)
220
+ return if entry_ids.empty?
221
+
222
+ # Try user-configured callback first
223
+ if callback&.respond_to?(:call)
224
+ callback.call(entry_ids)
225
+ return
226
+ end
227
+
228
+ # Try configuration callback
229
+ if SolidLog.configuration.after_entries_inserted&.respond_to?(:call)
230
+ SolidLog.configuration.after_entries_inserted.call(entry_ids)
231
+ return
232
+ end
233
+
234
+ # Fallback to ActionCable if available
235
+ if defined?(ActionCable) && ActionCable.server
236
+ ActionCable.server.broadcast(
237
+ "solid_log_new_entries",
238
+ { entry_ids: entry_ids }
239
+ )
240
+ end
241
+ rescue StandardError => e
242
+ # Silent failure - broadcasting is optional (log to STDERR to avoid recursion)
243
+ $stderr.puts "[SolidLog::BatchParsingService] Broadcast failed: #{e.message}" if ENV["SOLIDLOG_DEBUG"]
244
+ end
245
+ end
246
+ end
247
+ end
248
+ end
@@ -36,7 +36,7 @@ module SolidLog
36
36
 
37
37
  # Only auto-promote fields with high priority
38
38
  if candidate[:priority] >= 80
39
- Rails.logger.info "FieldAnalyzer: Auto-promoting field '#{field.name}' (usage: #{field.usage_count}, priority: #{candidate[:priority]})"
39
+ SolidLog.logger.info "FieldAnalyzer: Auto-promoting field '#{field.name}' (usage: #{field.usage_count}, priority: #{candidate[:priority]})"
40
40
 
41
41
  field.promote!
42
42
  promoted_count += 1
@@ -0,0 +1,160 @@
1
+ # frozen_string_literal: true
2
+
3
+ module SolidLog
4
+ module Core
5
+ # Generates ActiveRecord migrations for field promotions
6
+ # When a field is promoted, this creates a timestamped migration file that:
7
+ # 1. Adds a new column to solid_log_entries
8
+ # 2. Creates an index on the new column
9
+ # 3. Backfills existing data from the extra_fields JSON
10
+ class MigrationGenerator
11
+ class << self
12
+ # Generate a migration file for promoting a field
13
+ # @param field [Field] The field to promote
14
+ # @return [String, nil] Path to generated migration file, or nil if error
15
+ def generate_promotion_migration(field)
16
+ return nil unless field.is_a?(Field)
17
+
18
+ timestamp = Time.current.utc.strftime("%Y%m%d%H%M%S")
19
+ class_name = "AddPromoted#{field.name.camelize}ToEntries"
20
+ filename = "#{timestamp}_#{class_name.underscore}.rb"
21
+
22
+ # Determine where to write the migration
23
+ migration_path = determine_migration_path
24
+ FileUtils.mkdir_p(migration_path) unless Dir.exist?(migration_path)
25
+
26
+ file_path = File.join(migration_path, filename)
27
+
28
+ # Generate migration content
29
+ content = generate_migration_content(field, class_name)
30
+
31
+ # Write file
32
+ File.write(file_path, content)
33
+
34
+ SolidLog.logger&.info("Generated promotion migration: #{file_path}")
35
+ file_path
36
+ rescue => e
37
+ SolidLog.logger&.error("Failed to generate migration for field #{field.name}: #{e.message}")
38
+ nil
39
+ end
40
+
41
+ private
42
+
43
+ # Determine where to write the migration file
44
+ # Tries multiple locations in order of preference
45
+ def determine_migration_path
46
+ candidates = []
47
+
48
+ # Rails app log_migrate directory
49
+ candidates << Rails.root.join("db", "log_migrate") if defined?(Rails)
50
+
51
+ # Standalone service directory
52
+ candidates << File.expand_path("../../../../../solid_log-service/db/log_migrate", __FILE__)
53
+
54
+ # Core gem directory (fallback)
55
+ candidates << File.expand_path("../../../../db/log_migrate", __FILE__)
56
+
57
+ # Use first existing directory, or first candidate if none exist
58
+ candidates.find { |path| Dir.exist?(path) } || candidates.first
59
+ end
60
+
61
+ # Generate the Ruby migration code
62
+ def generate_migration_content(field, class_name)
63
+ <<~RUBY
64
+ # frozen_string_literal: true
65
+
66
+ # Migration auto-generated by SolidLog field promotion
67
+ # Promotes field '#{field.name}' from JSON to dedicated column
68
+ class #{class_name} < ActiveRecord::Migration[8.0]
69
+ def up
70
+ # Add column for promoted field
71
+ add_column :solid_log_entries, :#{field.name}, :#{sql_type(field.field_type)}
72
+
73
+ # Add index for efficient querying
74
+ add_index :solid_log_entries, :#{field.name}, name: "idx_entries_#{field.name}"
75
+
76
+ # Backfill existing data from extra_fields JSON
77
+ # Process in batches to avoid memory issues
78
+ say_with_time "Backfilling #{field.name} from extra_fields JSON" do
79
+ batch_size = 1000
80
+ offset = 0
81
+
82
+ loop do
83
+ entries = SolidLog::Entry
84
+ .where.not(extra_fields: nil)
85
+ .limit(batch_size)
86
+ .offset(offset)
87
+
88
+ break if entries.empty?
89
+
90
+ entries.each do |entry|
91
+ begin
92
+ extra = JSON.parse(entry.extra_fields)
93
+ value = extra['#{field.name}']
94
+
95
+ if value.present?
96
+ # Type coercion
97
+ typed_value = coerce_value(value, '#{field.field_type}')
98
+ entry.update_column(:#{field.name}, typed_value)
99
+ end
100
+ rescue JSON::ParserError, TypeError => e
101
+ # Skip entries with invalid JSON or type conversion errors
102
+ next
103
+ end
104
+ end
105
+
106
+ offset += batch_size
107
+ end
108
+ end
109
+ end
110
+
111
+ def down
112
+ remove_index :solid_log_entries, name: "idx_entries_#{field.name}"
113
+ remove_column :solid_log_entries, :#{field.name}
114
+ end
115
+
116
+ private
117
+
118
+ def coerce_value(value, field_type)
119
+ case field_type
120
+ when 'number'
121
+ value.is_a?(Numeric) ? value : value.to_f
122
+ when 'boolean'
123
+ [true, 'true', '1', 1].include?(value)
124
+ when 'datetime'
125
+ value.is_a?(Time) ? value : Time.parse(value.to_s)
126
+ when 'string'
127
+ value.to_s
128
+ else
129
+ value
130
+ end
131
+ rescue => e
132
+ nil # Return nil for unconvertible values
133
+ end
134
+ end
135
+ RUBY
136
+ end
137
+
138
+ # Map field types to SQL column types
139
+ def sql_type(field_type)
140
+ case field_type
141
+ when "number"
142
+ :float
143
+ when "boolean"
144
+ :boolean
145
+ when "datetime"
146
+ :datetime
147
+ when "string"
148
+ :string
149
+ when "array"
150
+ :text # Store as JSON text
151
+ when "object"
152
+ :text # Store as JSON text
153
+ else
154
+ :string # Default to string
155
+ end
156
+ end
157
+ end
158
+ end
159
+ end
160
+ end
@@ -0,0 +1,147 @@
1
+ # frozen_string_literal: true
2
+
3
+ module SolidLog
4
+ module Core
5
+ # Runs pending ActiveRecord migrations for the log database
6
+ # Used by standalone service to auto-migrate on startup
7
+ class MigrationRunner
8
+ class << self
9
+ # Run all pending migrations
10
+ # @param migration_paths [Array<String>] Paths to search for migrations (optional)
11
+ # @return [Boolean] true if migrations ran successfully, false otherwise
12
+ def run_pending_migrations(migration_paths: nil)
13
+ paths = migration_paths || default_migration_paths
14
+ paths = Array(paths).select { |p| Dir.exist?(p) }
15
+
16
+ if paths.empty?
17
+ SolidLog.logger&.warn("No migration directories found, skipping migrations")
18
+ return true
19
+ end
20
+
21
+ SolidLog.logger&.info("Checking for pending migrations in: #{paths.join(', ')}")
22
+
23
+ begin
24
+ # Create schema_migrations table if it doesn't exist
25
+ ensure_schema_migrations_table!
26
+
27
+ # Get currently applied migrations
28
+ applied_versions = get_applied_versions
29
+
30
+ # Find all migration files
31
+ migration_files = find_migration_files(paths)
32
+
33
+ # Filter to pending migrations
34
+ pending = migration_files.reject { |file| applied_versions.include?(version_from_file(file)) }
35
+
36
+ if pending.empty?
37
+ SolidLog.logger&.info("No pending migrations found")
38
+ return true
39
+ end
40
+
41
+ SolidLog.logger&.info("Found #{pending.size} pending migration(s)")
42
+
43
+ # Run each pending migration
44
+ pending.sort.each do |migration_file|
45
+ run_migration_file(migration_file)
46
+ end
47
+
48
+ SolidLog.logger&.info("Successfully ran #{pending.size} migration(s)")
49
+ true
50
+ rescue => e
51
+ SolidLog.logger&.error("Migration failed: #{e.message}")
52
+ SolidLog.logger&.error(e.backtrace.join("\n"))
53
+ false
54
+ end
55
+ end
56
+
57
+ # Check if there are pending migrations
58
+ # @param migration_paths [Array<String>] Paths to search for migrations (optional)
59
+ # @return [Boolean] true if pending migrations exist
60
+ def pending_migrations?(migration_paths: nil)
61
+ paths = migration_paths || default_migration_paths
62
+ paths = Array(paths).select { |p| Dir.exist?(p) }
63
+
64
+ return false if paths.empty?
65
+
66
+ applied_versions = get_applied_versions
67
+ migration_files = find_migration_files(paths)
68
+
69
+ migration_files.any? { |file| !applied_versions.include?(version_from_file(file)) }
70
+ end
71
+
72
+ private
73
+
74
+ # Default paths to search for migrations
75
+ def default_migration_paths
76
+ paths = []
77
+
78
+ # Rails app log_migrate directory
79
+ paths << Rails.root.join("db", "log_migrate") if defined?(Rails)
80
+
81
+ # Service directory
82
+ paths << File.expand_path("../../../../../solid_log-service/db/log_migrate", __FILE__)
83
+
84
+ # Core gem directory
85
+ paths << File.expand_path("../../../../db/log_migrate", __FILE__)
86
+
87
+ paths
88
+ end
89
+
90
+ # Ensure schema_migrations table exists
91
+ def ensure_schema_migrations_table!
92
+ ActiveRecord::Base.connection.execute(<<~SQL)
93
+ CREATE TABLE IF NOT EXISTS schema_migrations (
94
+ version VARCHAR(255) NOT NULL PRIMARY KEY
95
+ )
96
+ SQL
97
+ end
98
+
99
+ # Get list of applied migration versions
100
+ def get_applied_versions
101
+ ActiveRecord::Base.connection
102
+ .execute("SELECT version FROM schema_migrations")
103
+ .map { |row| row.is_a?(Hash) ? row["version"] : row[0] }
104
+ .to_set
105
+ end
106
+
107
+ # Find all migration files in given paths
108
+ def find_migration_files(paths)
109
+ paths.flat_map do |path|
110
+ Dir[File.join(path, "*.rb")]
111
+ end.uniq
112
+ end
113
+
114
+ # Extract version (timestamp) from migration filename
115
+ def version_from_file(filepath)
116
+ File.basename(filepath).match(/^(\d+)_/)[1]
117
+ end
118
+
119
+ # Run a single migration file
120
+ def run_migration_file(filepath)
121
+ version = version_from_file(filepath)
122
+ filename = File.basename(filepath, ".rb")
123
+
124
+ SolidLog.logger&.info("Running migration: #{filename}")
125
+
126
+ # Load and instantiate migration
127
+ load filepath
128
+ migration_name = filename.split("_", 2).last.camelize
129
+ migration_class = migration_name.constantize
130
+
131
+ # Run migration
132
+ migration = migration_class.new
133
+ ActiveRecord::Base.connection.transaction do
134
+ migration.migrate(:up)
135
+
136
+ # Record migration as applied
137
+ ActiveRecord::Base.connection.execute(
138
+ "INSERT INTO schema_migrations (version) VALUES ('#{version}')"
139
+ )
140
+ end
141
+
142
+ SolidLog.logger&.info("Completed migration: #{filename}")
143
+ end
144
+ end
145
+ end
146
+ end
147
+ end
@@ -2,7 +2,7 @@ module SolidLog
2
2
  module Core
3
3
  class RetentionService
4
4
  # Cleanup old entries based on retention policies
5
- def self.cleanup(retention_days:, error_retention_days:)
5
+ def self.cleanup(retention_days:, error_retention_days:, max_entries: nil)
6
6
  stats = {
7
7
  entries_deleted: 0,
8
8
  raw_deleted: 0,
@@ -25,6 +25,17 @@ module SolidLog
25
25
  .where(level: %w[error fatal])
26
26
  .delete_all
27
27
 
28
+ # Enforce max entries limit (delete oldest entries beyond limit)
29
+ if max_entries && max_entries > 0
30
+ current_count = Entry.count
31
+ if current_count > max_entries
32
+ delete_count = current_count - max_entries
33
+ oldest_ids = Entry.order(:timestamp).limit(delete_count).pluck(:id)
34
+ deleted = Entry.where(id: oldest_ids).delete_all
35
+ stats[:entries_deleted] += deleted
36
+ end
37
+ end
38
+
28
39
  # Delete corresponding raw entries (keep unparsed for investigation)
29
40
  raw_ids = Entry.pluck(:raw_id).compact
30
41
  stats[:raw_deleted] = RawEntry
@@ -1,5 +1,5 @@
1
1
  module SolidLog
2
2
  module Core
3
- VERSION = "0.1.0"
3
+ VERSION = "0.2.1"
4
4
  end
5
5
  end
@@ -2,6 +2,10 @@ require "solid_log/core/version"
2
2
  require "solid_log/core/configuration"
3
3
  require "solid_log/core/client"
4
4
  require "solid_log/silence_middleware"
5
+ require "logger"
6
+
7
+ # Load Railtie if Rails is available
8
+ require "solid_log/railtie" if defined?(Rails::Railtie)
5
9
 
6
10
  # Database adapters
7
11
  require "solid_log/adapters/base_adapter"
@@ -22,6 +26,15 @@ require "solid_log/core/services/field_analyzer"
22
26
  require "solid_log/core/services/search_service"
23
27
  require "solid_log/core/services/correlation_service"
24
28
  require "solid_log/core/services/health_service"
29
+ require "solid_log/core/services/migration_generator"
30
+ require "solid_log/core/services/migration_runner"
31
+ require "solid_log/core/services/batch_parsing_service"
32
+
33
+ # Jobs
34
+ require "solid_log/core/jobs/parse_job"
35
+ require "solid_log/core/jobs/retention_job"
36
+ require "solid_log/core/jobs/cache_cleanup_job"
37
+ require "solid_log/core/jobs/field_analysis_job"
25
38
 
26
39
  # Models (explicit requires - no engine, no app/ directory)
27
40
  require "solid_log/models/record"
@@ -69,6 +82,19 @@ module SolidLog
69
82
  def silenced?
70
83
  Thread.current[:solid_log_silenced] == true
71
84
  end
85
+
86
+ # Logger singleton - works in Rails and non-Rails contexts
87
+ def logger
88
+ @logger ||= if defined?(Rails) && Rails.respond_to?(:logger) && Rails.logger
89
+ Rails.logger
90
+ else
91
+ Logger.new(STDOUT).tap { |log| log.level = Logger::INFO }
92
+ end
93
+ end
94
+
95
+ def logger=(logger)
96
+ @logger = logger
97
+ end
72
98
  end
73
99
  end
74
100
  end
@@ -95,6 +121,14 @@ module SolidLog
95
121
  def silenced?
96
122
  Core.silenced?
97
123
  end
124
+
125
+ def logger
126
+ Core.logger
127
+ end
128
+
129
+ def logger=(logger)
130
+ Core.logger = logger
131
+ end
98
132
  end
99
133
 
100
134
  # Alias service classes for easier access
@@ -103,4 +137,6 @@ module SolidLog
103
137
  SearchService = Core::SearchService
104
138
  CorrelationService = Core::CorrelationService
105
139
  HealthService = Core::HealthService
140
+ MigrationGenerator = Core::MigrationGenerator
141
+ MigrationRunner = Core::MigrationRunner
106
142
  end
@@ -40,6 +40,14 @@ module SolidLog
40
40
  # EXCEPT for error/fatal logs which flush immediately to prevent data loss on crash
41
41
  def write(message)
42
42
  return if @closed
43
+ return if SolidLog.silenced? # Skip if logging is silenced (anti-recursion)
44
+
45
+ # Skip SolidLog's own view rendering logs
46
+ if message.is_a?(String)
47
+ # Strip whitespace before checking pattern
48
+ trimmed = message.strip
49
+ return if trimmed.match?(/^Rendering (layout |partial )?.*solid_log/i)
50
+ end
43
51
 
44
52
  log_entry = parse_message(message)
45
53
  return unless log_entry # Skip if parsing failed