solid_log-core 0.1.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (45) hide show
  1. checksums.yaml +7 -0
  2. data/MIT-LICENSE +20 -0
  3. data/README.md +176 -0
  4. data/Rakefile +11 -0
  5. data/db/log_migrate/20251222000001_create_solid_log_raw.rb +15 -0
  6. data/db/log_migrate/20251222000002_create_solid_log_entries.rb +29 -0
  7. data/db/log_migrate/20251222000004_create_solid_log_fields.rb +17 -0
  8. data/db/log_migrate/20251222000005_create_solid_log_tokens.rb +13 -0
  9. data/db/log_migrate/20251222000006_create_solid_log_facet_cache.rb +13 -0
  10. data/db/log_migrate/20251222000007_create_solid_log_fts_triggers.rb +41 -0
  11. data/db/log_structure_mysql.sql +96 -0
  12. data/db/log_structure_postgresql.sql +118 -0
  13. data/db/log_structure_sqlite.sql +123 -0
  14. data/lib/generators/solid_log/install/install_generator.rb +134 -0
  15. data/lib/generators/solid_log/install/templates/solid_log.rb.tt +133 -0
  16. data/lib/solid_log/adapters/adapter_factory.rb +34 -0
  17. data/lib/solid_log/adapters/base_adapter.rb +88 -0
  18. data/lib/solid_log/adapters/mysql_adapter.rb +163 -0
  19. data/lib/solid_log/adapters/postgresql_adapter.rb +141 -0
  20. data/lib/solid_log/adapters/sqlite_adapter.rb +149 -0
  21. data/lib/solid_log/core/client/buffer.rb +112 -0
  22. data/lib/solid_log/core/client/configuration.rb +31 -0
  23. data/lib/solid_log/core/client/http.rb +89 -0
  24. data/lib/solid_log/core/client/lograge_formatter.rb +99 -0
  25. data/lib/solid_log/core/client/retry_handler.rb +48 -0
  26. data/lib/solid_log/core/client.rb +138 -0
  27. data/lib/solid_log/core/configuration.rb +60 -0
  28. data/lib/solid_log/core/services/correlation_service.rb +74 -0
  29. data/lib/solid_log/core/services/field_analyzer.rb +108 -0
  30. data/lib/solid_log/core/services/health_service.rb +151 -0
  31. data/lib/solid_log/core/services/retention_service.rb +72 -0
  32. data/lib/solid_log/core/services/search_service.rb +269 -0
  33. data/lib/solid_log/core/version.rb +5 -0
  34. data/lib/solid_log/core.rb +106 -0
  35. data/lib/solid_log/direct_logger.rb +197 -0
  36. data/lib/solid_log/models/entry.rb +185 -0
  37. data/lib/solid_log/models/facet_cache.rb +58 -0
  38. data/lib/solid_log/models/field.rb +100 -0
  39. data/lib/solid_log/models/raw_entry.rb +33 -0
  40. data/lib/solid_log/models/record.rb +5 -0
  41. data/lib/solid_log/models/token.rb +61 -0
  42. data/lib/solid_log/parser.rb +179 -0
  43. data/lib/solid_log/silence_middleware.rb +34 -0
  44. data/lib/solid_log-core.rb +2 -0
  45. metadata +244 -0
@@ -0,0 +1,149 @@
1
+ module SolidLog
2
+ module Adapters
3
+ class SqliteAdapter < BaseAdapter
4
+ def search(query, base_scope = Entry)
5
+ sanitized_query = Entry.sanitize_sql_like(query)
6
+ table_name = Entry.table_name
7
+
8
+ # Build LIKE query for partial matching
9
+ like_condition = case_insensitive_like("#{table_name}.message", "%#{sanitized_query}%")
10
+ like_sql = "SELECT #{table_name}.* FROM #{table_name} WHERE #{like_condition}"
11
+
12
+ # Build FTS query for full-text matching
13
+ fts_fragment = fts_search(query)
14
+ fts_sql = "SELECT #{table_name}.* FROM #{table_name} #{fts_fragment}"
15
+
16
+ # UNION both queries (DISTINCT removes duplicates)
17
+ union_sql = "#{fts_sql} UNION #{like_sql}"
18
+ Entry.from("(#{union_sql}) AS #{table_name}")
19
+ end
20
+
21
+ def fts_search(query)
22
+ # SQLite FTS5 search
23
+ sanitized_query = connection.quote(query)
24
+ <<~SQL.squish
25
+ JOIN solid_log_entries_fts
26
+ ON solid_log_entries.id = solid_log_entries_fts.rowid
27
+ WHERE solid_log_entries_fts MATCH #{sanitized_query}
28
+ SQL
29
+ end
30
+
31
+ def case_insensitive_like(column, pattern)
32
+ # SQLite uses LIKE with COLLATE NOCASE for case-insensitive matching
33
+ "#{column} LIKE #{connection.quote(pattern)} COLLATE NOCASE"
34
+ end
35
+
36
+ def claim_batch(batch_size)
37
+ # SQLite doesn't support SKIP LOCKED, so we use a different approach
38
+ # Get IDs first, then update them
39
+ RawEntry.transaction do
40
+ ids = RawEntry.where(parsed: false)
41
+ .order(received_at: :asc)
42
+ .limit(batch_size)
43
+ .lock
44
+ .pluck(:id)
45
+
46
+ return [] if ids.empty?
47
+
48
+ # Mark as parsed immediately
49
+ RawEntry.where(id: ids).update_all(parsed: true, parsed_at: Time.current)
50
+
51
+ # Return the entries
52
+ RawEntry.where(id: ids).to_a
53
+ end
54
+ end
55
+
56
+ def extract_json_field(column, field_name)
57
+ "json_extract(#{column}, '$.#{field_name}')"
58
+ end
59
+
60
+ def facet_values(column)
61
+ "SELECT DISTINCT #{column} FROM solid_log_entries WHERE #{column} IS NOT NULL ORDER BY #{column}"
62
+ end
63
+
64
+ def optimize!
65
+ execute("PRAGMA optimize")
66
+ execute("PRAGMA wal_checkpoint(TRUNCATE)")
67
+ end
68
+
69
+ def database_size
70
+ db_path = connection.instance_variable_get(:@config)[:database]
71
+ File.size(db_path) rescue 0
72
+ end
73
+
74
+ def supports_skip_locked?
75
+ false
76
+ end
77
+
78
+ def supports_native_json?
79
+ false
80
+ end
81
+
82
+ def supports_full_text_search?
83
+ true # FTS5
84
+ end
85
+
86
+ def configure!
87
+ execute("PRAGMA journal_mode=WAL")
88
+ execute("PRAGMA synchronous=NORMAL")
89
+ execute("PRAGMA busy_timeout=5000")
90
+ execute("PRAGMA cache_size=-64000") # 64MB
91
+ execute("PRAGMA temp_store=MEMORY")
92
+ execute("PRAGMA mmap_size=268435456") # 256MB
93
+ rescue ActiveRecord::StatementInvalid => e
94
+ # Some pragmas may not be supported in all SQLite versions
95
+ Rails.logger.warn("SQLite configuration warning: #{e.message}")
96
+ end
97
+
98
+ def timestamp_to_epoch_sql(column)
99
+ "strftime('%s', #{column})"
100
+ end
101
+
102
+ # FTS5-specific methods
103
+ def create_fts_table_sql
104
+ <<~SQL
105
+ CREATE VIRTUAL TABLE IF NOT EXISTS solid_log_entries_fts USING fts5(
106
+ message,
107
+ extra_text,
108
+ content='solid_log_entries',
109
+ content_rowid='id'
110
+ )
111
+ SQL
112
+ end
113
+
114
+ def create_fts_triggers_sql
115
+ [
116
+ # Insert trigger
117
+ <<~SQL,
118
+ CREATE TRIGGER IF NOT EXISTS solid_log_entries_fts_insert
119
+ AFTER INSERT ON solid_log_entries
120
+ BEGIN
121
+ INSERT INTO solid_log_entries_fts(rowid, message, extra_text)
122
+ VALUES (new.id, new.message, new.extra_fields);
123
+ END
124
+ SQL
125
+
126
+ # Update trigger
127
+ <<~SQL,
128
+ CREATE TRIGGER IF NOT EXISTS solid_log_entries_fts_update
129
+ AFTER UPDATE ON solid_log_entries
130
+ BEGIN
131
+ UPDATE solid_log_entries_fts
132
+ SET message = new.message, extra_text = new.extra_fields
133
+ WHERE rowid = new.id;
134
+ END
135
+ SQL
136
+
137
+ # Delete trigger
138
+ <<~SQL
139
+ CREATE TRIGGER IF NOT EXISTS solid_log_entries_fts_delete
140
+ AFTER DELETE ON solid_log_entries
141
+ BEGIN
142
+ DELETE FROM solid_log_entries_fts WHERE rowid = old.id;
143
+ END
144
+ SQL
145
+ ]
146
+ end
147
+ end
148
+ end
149
+ end
@@ -0,0 +1,112 @@
1
+ require "thread"
2
+
3
+ module SolidLog
4
+ module Core
5
+ class Buffer
6
+ attr_reader :queue, :mutex, :flush_thread
7
+
8
+ def initialize(http_sender:, batch_size: 100, flush_interval: 5, max_queue_size: 10_000)
9
+ @http_sender = http_sender
10
+ @batch_size = batch_size
11
+ @flush_interval = flush_interval
12
+ @max_queue_size = max_queue_size
13
+
14
+ @queue = []
15
+ @mutex = Mutex.new
16
+ @flush_thread = nil
17
+ @running = false
18
+ end
19
+
20
+ # Add entry to buffer
21
+ def add(entry)
22
+ @mutex.synchronize do
23
+ # Drop oldest entries if queue is full
24
+ @queue.shift if @queue.size >= @max_queue_size
25
+
26
+ @queue << normalize_entry(entry)
27
+
28
+ # Auto-flush if batch size reached
29
+ flush_if_needed
30
+ end
31
+ end
32
+
33
+ # Flush pending entries
34
+ def flush
35
+ entries_to_send = []
36
+
37
+ @mutex.synchronize do
38
+ return if @queue.empty?
39
+
40
+ # Take up to batch_size entries
41
+ entries_to_send = @queue.shift(@batch_size)
42
+ end
43
+
44
+ # Send outside of mutex lock
45
+ send_entries(entries_to_send) if entries_to_send.any?
46
+ end
47
+
48
+ # Start automatic flushing in background thread
49
+ def start_auto_flush
50
+ return if @running
51
+
52
+ @running = true
53
+ @flush_thread = Thread.new do
54
+ loop do
55
+ sleep @flush_interval
56
+ break unless @running
57
+
58
+ begin
59
+ flush
60
+ rescue => e
61
+ Rails.logger.error "SolidLog::Client: Auto-flush error: #{e.message}" if defined?(Rails)
62
+ end
63
+ end
64
+ end
65
+ end
66
+
67
+ # Stop automatic flushing
68
+ def stop_auto_flush
69
+ @running = false
70
+ @flush_thread&.join
71
+ @flush_thread = nil
72
+ end
73
+
74
+ # Get current queue size
75
+ def size
76
+ @mutex.synchronize { @queue.size }
77
+ end
78
+
79
+ private
80
+
81
+ def flush_if_needed
82
+ flush if @queue.size >= @batch_size
83
+ end
84
+
85
+ def send_entries(entries)
86
+ @http_sender.send_batch(entries)
87
+ rescue => e
88
+ Rails.logger.error "SolidLog::Client: Failed to send batch: #{e.message}" if defined?(Rails)
89
+
90
+ # Put entries back in queue for retry
91
+ @mutex.synchronize do
92
+ @queue.unshift(*entries)
93
+
94
+ # Trim queue if it exceeds max size
95
+ while @queue.size > @max_queue_size
96
+ @queue.shift
97
+ end
98
+ end
99
+ end
100
+
101
+ def normalize_entry(entry)
102
+ # Ensure entry is a hash
103
+ entry = entry.to_h if entry.respond_to?(:to_h)
104
+
105
+ # Add timestamp if missing
106
+ entry[:timestamp] ||= Time.current.iso8601
107
+
108
+ entry
109
+ end
110
+ end
111
+ end
112
+ end
@@ -0,0 +1,31 @@
1
+ module SolidLog
2
+ module Core
3
+ class ClientConfiguration
4
+ attr_accessor :service_url,
5
+ :token,
6
+ :app_name,
7
+ :environment,
8
+ :batch_size,
9
+ :flush_interval,
10
+ :max_queue_size,
11
+ :retry_max_attempts,
12
+ :enabled
13
+
14
+ def initialize
15
+ @service_url = nil
16
+ @token = nil
17
+ @app_name = "app"
18
+ @environment = "production"
19
+ @batch_size = 100
20
+ @flush_interval = 5 # seconds
21
+ @max_queue_size = 10_000
22
+ @retry_max_attempts = 3
23
+ @enabled = true
24
+ end
25
+
26
+ def valid?
27
+ service_url.present? && token.present?
28
+ end
29
+ end
30
+ end
31
+ end
@@ -0,0 +1,89 @@
1
+ require "net/http"
2
+ require "json"
3
+ require "uri"
4
+
5
+ module SolidLog
6
+ module Core
7
+ class HttpSender
8
+ def initialize(url:, token:, retry_handler:)
9
+ @url = url
10
+ @token = token
11
+ @retry_handler = retry_handler
12
+ end
13
+
14
+ # Send a batch of entries to the ingestion endpoint
15
+ def send_batch(entries)
16
+ return if entries.empty?
17
+
18
+ @retry_handler.with_retry do
19
+ perform_request(entries)
20
+ end
21
+ end
22
+
23
+ private
24
+
25
+ def perform_request(entries)
26
+ uri = URI.parse(@url)
27
+ http = Net::HTTP.new(uri.host, uri.port)
28
+ http.use_ssl = (uri.scheme == "https")
29
+ http.open_timeout = 5
30
+ http.read_timeout = 10
31
+
32
+ request = build_request(uri.path, entries)
33
+ response = http.request(request)
34
+
35
+ handle_response(response, entries)
36
+ end
37
+
38
+ def build_request(path, entries)
39
+ request = Net::HTTP::Post.new(path)
40
+ request["Authorization"] = "Bearer #{@token}"
41
+ request["Content-Type"] = if entries.size == 1
42
+ "application/json"
43
+ else
44
+ "application/x-ndjson"
45
+ end
46
+
47
+ request.body = if entries.size == 1
48
+ JSON.generate(entries.first)
49
+ else
50
+ entries.map { |e| JSON.generate(e) }.join("\n")
51
+ end
52
+
53
+ request
54
+ end
55
+
56
+ def handle_response(response, entries)
57
+ case response.code.to_i
58
+ when 200..299
59
+ # Success
60
+ log_debug "Successfully sent #{entries.size} entries"
61
+ when 400..499
62
+ # Client error - don't retry
63
+ log_error "Client error (#{response.code}): #{response.body}"
64
+ raise ClientError, "HTTP #{response.code}: #{response.body}"
65
+ when 500..599
66
+ # Server error - retry
67
+ log_error "Server error (#{response.code}): #{response.body}"
68
+ raise ServerError, "HTTP #{response.code}: #{response.body}"
69
+ else
70
+ log_error "Unexpected response (#{response.code}): #{response.body}"
71
+ raise UnexpectedError, "HTTP #{response.code}: #{response.body}"
72
+ end
73
+ end
74
+
75
+ def log_debug(message)
76
+ Rails.logger.debug "SolidLog::Client: #{message}" if defined?(Rails)
77
+ end
78
+
79
+ def log_error(message)
80
+ Rails.logger.error "SolidLog::Client: #{message}" if defined?(Rails)
81
+ end
82
+
83
+ # Error classes
84
+ class ClientError < StandardError; end
85
+ class ServerError < StandardError; end
86
+ class UnexpectedError < StandardError; end
87
+ end
88
+ end
89
+ end
@@ -0,0 +1,99 @@
1
+ module SolidLog
2
+ module Core
3
+ class LogrageFormatter
4
+ # Format Lograge output for SolidLog
5
+ def call(data)
6
+ # Lograge data is already a hash with structured fields
7
+ # Just ensure we have the required fields
8
+
9
+ formatted = {
10
+ timestamp: Time.current.iso8601,
11
+ level: infer_level(data),
12
+ message: build_message(data),
13
+ app: Client.configuration.app_name,
14
+ env: Client.configuration.environment
15
+ }
16
+
17
+ # Add Lograge fields
18
+ formatted.merge!(extract_lograge_fields(data))
19
+
20
+ # Send to client
21
+ Client.log(formatted)
22
+
23
+ # Return JSON for Rails logger (if also logging to file)
24
+ JSON.generate(formatted)
25
+ end
26
+
27
+ private
28
+
29
+ def infer_level(data)
30
+ # Determine log level based on status code or error
31
+ if data[:status].to_i >= 500
32
+ "error"
33
+ elsif data[:status].to_i >= 400
34
+ "warn"
35
+ elsif data[:exception].present?
36
+ "error"
37
+ else
38
+ "info"
39
+ end
40
+ end
41
+
42
+ def build_message(data)
43
+ parts = []
44
+
45
+ # HTTP method and path
46
+ parts << "#{data[:method]} #{data[:path]}" if data[:method] && data[:path]
47
+
48
+ # Status code
49
+ parts << "(#{data[:status]})" if data[:status]
50
+
51
+ # Duration
52
+ parts << "#{data[:duration]}ms" if data[:duration]
53
+
54
+ # Controller and action
55
+ if data[:controller] && data[:action]
56
+ parts << "#{data[:controller]}##{data[:action]}"
57
+ end
58
+
59
+ parts.join(" ")
60
+ end
61
+
62
+ def extract_lograge_fields(data)
63
+ fields = {}
64
+
65
+ # Standard Lograge fields
66
+ fields[:method] = data[:method] if data[:method]
67
+ fields[:path] = data[:path] if data[:path]
68
+ fields[:controller] = data[:controller] if data[:controller]
69
+ fields[:action] = data[:action] if data[:action]
70
+ fields[:status_code] = data[:status] if data[:status]
71
+ fields[:duration] = data[:duration] if data[:duration]
72
+
73
+ # Request ID for correlation
74
+ fields[:request_id] = data[:request_id] if data[:request_id]
75
+
76
+ # View rendering time
77
+ fields[:view_runtime] = data[:view] if data[:view]
78
+ fields[:db_runtime] = data[:db] if data[:db]
79
+
80
+ # Params (be careful with sensitive data)
81
+ fields[:params] = data[:params] if data[:params]
82
+
83
+ # Remote IP
84
+ fields[:ip] = data[:remote_ip] if data[:remote_ip]
85
+
86
+ # User agent
87
+ fields[:user_agent] = data[:user_agent] if data[:user_agent]
88
+
89
+ # Any other custom fields
90
+ data.except(:method, :path, :controller, :action, :status, :duration,
91
+ :request_id, :view, :db, :params, :remote_ip, :user_agent).each do |key, value|
92
+ fields[key] = value unless value.nil?
93
+ end
94
+
95
+ fields
96
+ end
97
+ end
98
+ end
99
+ end
@@ -0,0 +1,48 @@
1
+ module SolidLog
2
+ module Core
3
+ class RetryHandler
4
+ def initialize(max_attempts: 3)
5
+ @max_attempts = max_attempts
6
+ end
7
+
8
+ # Execute block with exponential backoff retry
9
+ def with_retry(&block)
10
+ attempts = 0
11
+
12
+ begin
13
+ attempts += 1
14
+ yield
15
+ rescue HttpSender::ServerError, HttpSender::UnexpectedError, StandardError => e
16
+ if attempts < @max_attempts
17
+ sleep_time = exponential_backoff(attempts)
18
+ log_retry(attempts, sleep_time, e)
19
+ sleep(sleep_time)
20
+ retry
21
+ else
22
+ log_error "Max retry attempts (#{@max_attempts}) exceeded: #{e.message}"
23
+ raise
24
+ end
25
+ rescue HttpSender::ClientError => e
26
+ # Don't retry client errors (4xx)
27
+ log_error "Client error, not retrying: #{e.message}"
28
+ raise
29
+ end
30
+ end
31
+
32
+ private
33
+
34
+ def exponential_backoff(attempt)
35
+ # 1s, 2s, 4s, 8s, etc.
36
+ [2 ** (attempt - 1), 30].min # Cap at 30 seconds
37
+ end
38
+
39
+ def log_retry(attempt, sleep_time, error)
40
+ Rails.logger.warn "SolidLog::Client: Retry attempt #{attempt}/#{@max_attempts} after #{sleep_time}s (#{error.class}: #{error.message})" if defined?(Rails)
41
+ end
42
+
43
+ def log_error(message)
44
+ Rails.logger.error "SolidLog::Client: #{message}" if defined?(Rails)
45
+ end
46
+ end
47
+ end
48
+ end
@@ -0,0 +1,138 @@
1
+ require_relative "client/configuration"
2
+ require_relative "client/buffer"
3
+ require_relative "client/http"
4
+ require_relative "client/retry_handler"
5
+ require_relative "client/lograge_formatter"
6
+
7
+ module SolidLog
8
+ module Core
9
+ class Client
10
+ class << self
11
+ attr_writer :configuration
12
+
13
+ def configuration
14
+ @configuration ||= ClientConfiguration.new
15
+ end
16
+
17
+ def configure
18
+ yield(configuration)
19
+ initialize_client
20
+ end
21
+
22
+ # Get logger instance for Lograge integration
23
+ def logger
24
+ @logger ||= BufferedLogger.new
25
+ end
26
+
27
+ # Log a single entry
28
+ def log(entry)
29
+ return unless configuration.enabled
30
+
31
+ buffer.add(entry)
32
+ end
33
+
34
+ # Flush pending logs immediately
35
+ def flush
36
+ buffer.flush
37
+ end
38
+
39
+ # Start background flushing (if not already started)
40
+ def start
41
+ buffer.start_auto_flush
42
+ end
43
+
44
+ # Stop background flushing and flush pending logs
45
+ def stop
46
+ buffer.stop_auto_flush
47
+ flush
48
+ end
49
+
50
+ private
51
+
52
+ def buffer
53
+ @buffer ||= Buffer.new(
54
+ http_sender: http_sender,
55
+ batch_size: configuration.batch_size,
56
+ flush_interval: configuration.flush_interval,
57
+ max_queue_size: configuration.max_queue_size
58
+ )
59
+ end
60
+
61
+ def http_sender
62
+ @http_sender ||= HttpSender.new(
63
+ url: configuration.service_url,
64
+ token: configuration.token,
65
+ retry_handler: retry_handler
66
+ )
67
+ end
68
+
69
+ def retry_handler
70
+ @retry_handler ||= RetryHandler.new(
71
+ max_attempts: configuration.retry_max_attempts
72
+ )
73
+ end
74
+
75
+ def initialize_client
76
+ # Reset instances when configuration changes
77
+ @buffer = nil
78
+ @http_sender = nil
79
+ @retry_handler = nil
80
+ @logger = nil
81
+ end
82
+ end
83
+
84
+ # BufferedLogger for Lograge integration
85
+ class BufferedLogger
86
+ def initialize
87
+ @client = Client
88
+ end
89
+
90
+ def info(message)
91
+ log_message(message, level: "info")
92
+ end
93
+
94
+ def debug(message)
95
+ log_message(message, level: "debug")
96
+ end
97
+
98
+ def warn(message)
99
+ log_message(message, level: "warn")
100
+ end
101
+
102
+ def error(message)
103
+ log_message(message, level: "error")
104
+ end
105
+
106
+ def fatal(message)
107
+ log_message(message, level: "fatal")
108
+ end
109
+
110
+ def <<(message)
111
+ log_message(message, level: "info")
112
+ end
113
+
114
+ private
115
+
116
+ def log_message(message, level:)
117
+ # Parse JSON if message is a string
118
+ entry = if message.is_a?(String)
119
+ begin
120
+ JSON.parse(message).merge(level: level)
121
+ rescue JSON::ParserError
122
+ { message: message, level: level }
123
+ end
124
+ else
125
+ message.merge(level: level)
126
+ end
127
+
128
+ # Add default fields
129
+ entry[:timestamp] ||= Time.current.iso8601
130
+ entry[:app] ||= Client.configuration.app_name
131
+ entry[:env] ||= Client.configuration.environment
132
+
133
+ @client.log(entry)
134
+ end
135
+ end
136
+ end
137
+ end
138
+ end