solid_log-core 0.1.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (45) hide show
  1. checksums.yaml +7 -0
  2. data/MIT-LICENSE +20 -0
  3. data/README.md +176 -0
  4. data/Rakefile +11 -0
  5. data/db/log_migrate/20251222000001_create_solid_log_raw.rb +15 -0
  6. data/db/log_migrate/20251222000002_create_solid_log_entries.rb +29 -0
  7. data/db/log_migrate/20251222000004_create_solid_log_fields.rb +17 -0
  8. data/db/log_migrate/20251222000005_create_solid_log_tokens.rb +13 -0
  9. data/db/log_migrate/20251222000006_create_solid_log_facet_cache.rb +13 -0
  10. data/db/log_migrate/20251222000007_create_solid_log_fts_triggers.rb +41 -0
  11. data/db/log_structure_mysql.sql +96 -0
  12. data/db/log_structure_postgresql.sql +118 -0
  13. data/db/log_structure_sqlite.sql +123 -0
  14. data/lib/generators/solid_log/install/install_generator.rb +134 -0
  15. data/lib/generators/solid_log/install/templates/solid_log.rb.tt +133 -0
  16. data/lib/solid_log/adapters/adapter_factory.rb +34 -0
  17. data/lib/solid_log/adapters/base_adapter.rb +88 -0
  18. data/lib/solid_log/adapters/mysql_adapter.rb +163 -0
  19. data/lib/solid_log/adapters/postgresql_adapter.rb +141 -0
  20. data/lib/solid_log/adapters/sqlite_adapter.rb +149 -0
  21. data/lib/solid_log/core/client/buffer.rb +112 -0
  22. data/lib/solid_log/core/client/configuration.rb +31 -0
  23. data/lib/solid_log/core/client/http.rb +89 -0
  24. data/lib/solid_log/core/client/lograge_formatter.rb +99 -0
  25. data/lib/solid_log/core/client/retry_handler.rb +48 -0
  26. data/lib/solid_log/core/client.rb +138 -0
  27. data/lib/solid_log/core/configuration.rb +60 -0
  28. data/lib/solid_log/core/services/correlation_service.rb +74 -0
  29. data/lib/solid_log/core/services/field_analyzer.rb +108 -0
  30. data/lib/solid_log/core/services/health_service.rb +151 -0
  31. data/lib/solid_log/core/services/retention_service.rb +72 -0
  32. data/lib/solid_log/core/services/search_service.rb +269 -0
  33. data/lib/solid_log/core/version.rb +5 -0
  34. data/lib/solid_log/core.rb +106 -0
  35. data/lib/solid_log/direct_logger.rb +197 -0
  36. data/lib/solid_log/models/entry.rb +185 -0
  37. data/lib/solid_log/models/facet_cache.rb +58 -0
  38. data/lib/solid_log/models/field.rb +100 -0
  39. data/lib/solid_log/models/raw_entry.rb +33 -0
  40. data/lib/solid_log/models/record.rb +5 -0
  41. data/lib/solid_log/models/token.rb +61 -0
  42. data/lib/solid_log/parser.rb +179 -0
  43. data/lib/solid_log/silence_middleware.rb +34 -0
  44. data/lib/solid_log-core.rb +2 -0
  45. metadata +244 -0
@@ -0,0 +1,134 @@
1
+ require "rails/generators/base"
2
+
3
+ module SolidLog
4
+ module Generators
5
+ class InstallGenerator < Rails::Generators::Base
6
+ source_root File.expand_path("templates", __dir__)
7
+
8
+ class_option :database, type: :string, default: nil, desc: "Database adapter (sqlite, postgresql, mysql)"
9
+
10
+ desc "Install SolidLog configuration, migrations, and database structure"
11
+
12
+ def copy_migrations
13
+ say "Copying SolidLog migrations...", :green
14
+
15
+ # Create log_migrate directory
16
+ empty_directory "db/log_migrate"
17
+
18
+ # Copy all migrations from solid_log-core
19
+ migrations_path = File.expand_path("../../../../db/log_migrate", __dir__)
20
+ Dir[File.join(migrations_path, "*.rb")].each do |file|
21
+ copy_file file, "db/log_migrate/#{File.basename(file)}"
22
+ end
23
+ end
24
+
25
+ def copy_structure_file
26
+ database = options[:database] || detect_database_adapter
27
+
28
+ say "Copying #{database} database structure file...", :green
29
+
30
+ structure_file = case database
31
+ when "sqlite", "sqlite3"
32
+ "log_structure_sqlite.sql"
33
+ when "postgresql", "postgres", "pg"
34
+ "log_structure_postgresql.sql"
35
+ when "mysql", "mysql2", "trilogy"
36
+ "log_structure_mysql.sql"
37
+ else
38
+ say "Unknown database: #{database}, skipping structure file", :yellow
39
+ return
40
+ end
41
+
42
+ source_path = File.expand_path("../../../../db/#{structure_file}", __dir__)
43
+ if File.exist?(source_path)
44
+ copy_file source_path, "db/log_structure.sql"
45
+ else
46
+ say "Structure file not found: #{structure_file}", :red
47
+ end
48
+ end
49
+
50
+ def create_initializer
51
+ say "Creating initializer...", :green
52
+ template "solid_log.rb.tt", "config/initializers/solid_log.rb"
53
+ end
54
+
55
+ def show_instructions
56
+ say "\n" + "="*80, :green
57
+ say "SolidLog Installation Complete! 🎉", :green
58
+ say "="*80 + "\n", :green
59
+
60
+ say "Next steps:", :yellow
61
+ say ""
62
+ say "1. Configure your database in config/database.yml:", :cyan
63
+ say " Add a :log database connection. Example:"
64
+ say ""
65
+ say " production:"
66
+ say " primary:"
67
+ say " adapter: sqlite3"
68
+ say " database: storage/production.sqlite3"
69
+ say " log:"
70
+ say " adapter: sqlite3"
71
+ say " database: storage/production_log.sqlite3"
72
+ say " migrations_paths: db/log_migrate"
73
+ say ""
74
+ say "2. Create databases and run migrations:", :cyan
75
+ say " rails db:create"
76
+ say " rails db:migrate"
77
+ say ""
78
+ say "3. (Alternative) Load structure file:", :cyan
79
+ say " rails db:create"
80
+ say " rails db:schema:load SCHEMA=db/log_structure.sql DATABASE=log"
81
+ say ""
82
+ say "4. Enable WAL mode for SQLite (HIGHLY RECOMMENDED):", :cyan
83
+ say " Already configured in config/initializers/solid_log.rb"
84
+ say " Provides 3.4x faster performance for crash-safe logging!"
85
+ say ""
86
+ say "5. Configure DirectLogger for your app (recommended):", :cyan
87
+ say " See config/initializers/solid_log.rb for examples"
88
+ say " DirectLogger is 9x faster than individual inserts"
89
+ say ""
90
+ say "6. Create an API token:", :cyan
91
+ say " rails solid_log:create_token[\"Production API\"]"
92
+ say ""
93
+ say "7. Mount the UI (if using solid_log-ui):", :cyan
94
+ say " Add to config/routes.rb:"
95
+ say " mount SolidLog::UI::Engine => '/admin/logs'"
96
+ say ""
97
+
98
+ say "Useful commands:", :yellow
99
+ say " rails solid_log:stats # View log statistics"
100
+ say " rails solid_log:health # Check system health"
101
+ say " rails solid_log:list_tokens # List API tokens"
102
+ say " rails solid_log:parse_logs # Parse raw logs"
103
+ say " rails solid_log:retention[30] # Clean up old logs"
104
+ say ""
105
+ say "Documentation:", :green
106
+ say " README: README.md"
107
+ say " Quickstart: QUICKSTART.md"
108
+ say " Benchmarks: solid_log-core/BENCHMARK_RESULTS.md"
109
+ say "="*80 + "\n", :green
110
+ end
111
+
112
+ private
113
+
114
+ def detect_database_adapter
115
+ if defined?(ActiveRecord::Base)
116
+ adapter = ActiveRecord::Base.connection_db_config.adapter rescue nil
117
+ return adapter if adapter
118
+ end
119
+
120
+ # Try to read from database.yml
121
+ if File.exist?("config/database.yml")
122
+ require "yaml"
123
+ db_config = YAML.load_file("config/database.yml")
124
+ env = ENV["RAILS_ENV"] || "development"
125
+ adapter = db_config.dig(env, "adapter") || db_config.dig(env, "primary", "adapter")
126
+ return adapter if adapter
127
+ end
128
+
129
+ # Default to sqlite
130
+ "sqlite3"
131
+ end
132
+ end
133
+ end
134
+ end
@@ -0,0 +1,133 @@
1
+ # SolidLog Configuration
2
+ SolidLog::Core.configure do |config|
3
+ # Retention policies
4
+ config.retention_days = 30 # Keep regular logs for 30 days
5
+ config.error_retention_days = 90 # Keep errors for 90 days
6
+
7
+ # Parsing & ingestion
8
+ config.max_batch_size = 100 # Logs per batch insert
9
+ config.parser_batch_size = 200 # Parser job batch size
10
+
11
+ # Performance
12
+ config.facet_cache_ttl = 5.minutes # Cache filter options for 5 min
13
+ end
14
+
15
+ # Enable WAL mode for SQLite (HIGHLY RECOMMENDED)
16
+ # Provides 3.4x faster eager flush performance (16,882 vs 4,923 logs/sec)
17
+ # Skip this if using PostgreSQL or MySQL
18
+ if defined?(ActiveRecord::Base)
19
+ begin
20
+ ActiveRecord::Base.connected_to(role: :log) do
21
+ adapter = ActiveRecord::Base.connection_db_config.adapter
22
+ if adapter == "sqlite3"
23
+ ActiveRecord::Base.connection.execute("PRAGMA journal_mode=WAL")
24
+ ActiveRecord::Base.connection.execute("PRAGMA synchronous=NORMAL")
25
+ Rails.logger.info "SolidLog: WAL mode enabled for SQLite"
26
+ end
27
+ end
28
+ rescue => e
29
+ Rails.logger.warn "SolidLog: Could not enable WAL mode: #{e.message}"
30
+ end
31
+ end
32
+
33
+ # ============================================================================
34
+ # DirectLogger Configuration (RECOMMENDED for parent app logging)
35
+ # ============================================================================
36
+ #
37
+ # DirectLogger writes logs directly to the database for maximum performance.
38
+ # It's 9x faster than individual inserts and 67x faster than HTTP logging.
39
+ #
40
+ # Performance with WAL mode:
41
+ # - With crash safety: 16,882 logs/sec
42
+ # - Without crash safety: 56,660 logs/sec
43
+ #
44
+ # See: solid_log-core/BENCHMARK_RESULTS.md for detailed benchmarks
45
+
46
+ Rails.application.configure do
47
+ # Option 1: Use DirectLogger with Lograge (recommended)
48
+ #
49
+ # config.lograge.enabled = true
50
+ # config.lograge.formatter = Lograge::Formatters::Json.new
51
+ #
52
+ # config.lograge.logger = ActiveSupport::Logger.new(
53
+ # SolidLog::DirectLogger.new(
54
+ # batch_size: 100, # Flush after 100 logs
55
+ # flush_interval: 5, # Or after 5 seconds
56
+ # eager_flush_levels: [:error, :fatal] # Flush errors immediately (crash safety)
57
+ # )
58
+ # )
59
+
60
+ # Option 2: Use DirectLogger with standard Rails logger
61
+ #
62
+ # config.logger = ActiveSupport::Logger.new(
63
+ # SolidLog::DirectLogger.new
64
+ # )
65
+
66
+ # Option 3: Use HTTP Logger (for external services)
67
+ #
68
+ # For services without direct database access, use HTTP:
69
+ #
70
+ # config.lograge.enabled = true
71
+ # config.lograge.formatter = Lograge::Formatters::Json.new
72
+ # config.lograge.logger = ActiveSupport::Logger.new(
73
+ # SolidLog::HttpLogger.new(
74
+ # url: ENV['SOLIDLOG_URL'] || "http://localhost:3000/admin/logs/api/v1/ingest",
75
+ # token: ENV['SOLIDLOG_TOKEN']
76
+ # )
77
+ # )
78
+ end
79
+
80
+ # ============================================================================
81
+ # DirectLogger Crash Safety
82
+ # ============================================================================
83
+ #
84
+ # By default, DirectLogger flushes error/fatal logs IMMEDIATELY to prevent
85
+ # losing the logs that explain WHY your app crashed.
86
+ #
87
+ # How it works:
88
+ # - info/debug/warn logs: Buffered (fast)
89
+ # - error/fatal logs: Flushed immediately (safe)
90
+ # - When error occurs: All buffered logs + error log flushed together
91
+ #
92
+ # Example:
93
+ # 10:00:00 - info: Request started (buffered)
94
+ # 10:00:01 - info: Processing params (buffered)
95
+ # 10:00:02 - ERROR: Database connection lost (FLUSHES ALL 3 LOGS IMMEDIATELY)
96
+ # [App crashes - but logs are safe!]
97
+ #
98
+ # Configuration options:
99
+ #
100
+ # # Maximum safety (flush everything)
101
+ # SolidLog::DirectLogger.new(
102
+ # batch_size: 10,
103
+ # flush_interval: 1,
104
+ # eager_flush_levels: [:debug, :info, :warn, :error, :fatal]
105
+ # )
106
+ #
107
+ # # Maximum performance (risky - may lose crash logs!)
108
+ # SolidLog::DirectLogger.new(
109
+ # batch_size: 500,
110
+ # flush_interval: 30,
111
+ # eager_flush_levels: [] # Disable eager flush
112
+ # )
113
+ #
114
+ # # Recommended balance (default)
115
+ # SolidLog::DirectLogger.new(
116
+ # batch_size: 100,
117
+ # flush_interval: 5,
118
+ # eager_flush_levels: [:error, :fatal] # Default
119
+ # )
120
+
121
+ # ============================================================================
122
+ # Token Configuration (Optional)
123
+ # ============================================================================
124
+ #
125
+ # DirectLogger's token_id is optional. Use it only for audit trail tracking.
126
+ #
127
+ # Priority order:
128
+ # 1. Explicit token_id parameter
129
+ # 2. ENV["SOLIDLOG_TOKEN_ID"]
130
+ # 3. nil (default - no token required)
131
+ #
132
+ # Example:
133
+ # ENV["SOLIDLOG_TOKEN_ID"] = "123" # Optional
@@ -0,0 +1,34 @@
1
+ module SolidLog
2
+ module Adapters
3
+ class AdapterFactory
4
+ class << self
5
+ def adapter
6
+ @adapter ||= build_adapter
7
+ end
8
+
9
+ def reset!
10
+ @adapter = nil
11
+ end
12
+
13
+ private
14
+
15
+ def build_adapter
16
+ connection = ActiveRecord::Base.connection
17
+ adapter_name = connection.adapter_name.downcase
18
+
19
+ case adapter_name
20
+ when "sqlite"
21
+ SqliteAdapter.new(connection)
22
+ when "postgresql"
23
+ PostgresqlAdapter.new(connection)
24
+ when "mysql2", "trilogy"
25
+ MysqlAdapter.new(connection)
26
+ else
27
+ raise "Unsupported database adapter: #{adapter_name}. " \
28
+ "SolidLog supports SQLite, PostgreSQL, and MySQL."
29
+ end
30
+ end
31
+ end
32
+ end
33
+ end
34
+ end
@@ -0,0 +1,88 @@
1
+ module SolidLog
2
+ module Adapters
3
+ class BaseAdapter
4
+ def initialize(connection)
5
+ @connection = connection
6
+ end
7
+
8
+ # Search entries - combines FTS with LIKE for comprehensive results
9
+ def search(query, base_scope = Entry)
10
+ raise NotImplementedError, "#{self.class} must implement #search"
11
+ end
12
+
13
+ # Full-text search (legacy - used internally by search)
14
+ def fts_search(query)
15
+ raise NotImplementedError, "#{self.class} must implement #fts_search"
16
+ end
17
+
18
+ # Case-insensitive LIKE (database-agnostic)
19
+ def case_insensitive_like(column, pattern)
20
+ raise NotImplementedError, "#{self.class} must implement #case_insensitive_like"
21
+ end
22
+
23
+ # Claim unparsed entries with lock
24
+ def claim_batch(batch_size)
25
+ raise NotImplementedError, "#{self.class} must implement #claim_batch"
26
+ end
27
+
28
+ # JSON field extraction
29
+ def extract_json_field(column, field_name)
30
+ raise NotImplementedError, "#{self.class} must implement #extract_json_field"
31
+ end
32
+
33
+ # Facet query (distinct values)
34
+ def facet_values(column)
35
+ raise NotImplementedError, "#{self.class} must implement #facet_values"
36
+ end
37
+
38
+ # Optimize database
39
+ def optimize!
40
+ # Default: no-op
41
+ end
42
+
43
+ # Database size in bytes
44
+ def database_size
45
+ raise NotImplementedError, "#{self.class} must implement #database_size"
46
+ end
47
+
48
+ # Supports feature?
49
+ def supports_skip_locked?
50
+ false
51
+ end
52
+
53
+ def supports_native_json?
54
+ false
55
+ end
56
+
57
+ def supports_full_text_search?
58
+ false
59
+ end
60
+
61
+ # Database-specific configurations
62
+ def configure!
63
+ # Default: no-op
64
+ end
65
+
66
+ # Database-specific epoch conversion (must be implemented by subclasses)
67
+ def timestamp_to_epoch_sql(column)
68
+ raise NotImplementedError, "#{self.class} must implement #timestamp_to_epoch_sql"
69
+ end
70
+
71
+ protected
72
+
73
+ attr_reader :connection
74
+
75
+ def execute(sql)
76
+ connection.execute(sql)
77
+ end
78
+
79
+ def select_all(sql)
80
+ connection.select_all(sql)
81
+ end
82
+
83
+ def select_value(sql)
84
+ connection.select_value(sql)
85
+ end
86
+ end
87
+ end
88
+ end
@@ -0,0 +1,163 @@
1
+ module SolidLog
2
+ module Adapters
3
+ class MysqlAdapter < BaseAdapter
4
+ def search(query, base_scope = Entry)
5
+ sanitized_query = Entry.sanitize_sql_like(query)
6
+ table_name = Entry.table_name
7
+
8
+ # Build LIKE query for partial matching
9
+ like_condition = case_insensitive_like("#{table_name}.message", "%#{sanitized_query}%")
10
+ like_sql = "SELECT #{table_name}.* FROM #{table_name} WHERE #{like_condition}"
11
+
12
+ # Build FTS query for full-text matching
13
+ fts_fragment = fts_search(query)
14
+ fts_sql = "SELECT #{table_name}.* FROM #{table_name} #{fts_fragment}"
15
+
16
+ # UNION both queries (DISTINCT removes duplicates)
17
+ union_sql = "#{fts_sql} UNION #{like_sql}"
18
+ Entry.from("(#{union_sql}) AS #{table_name}")
19
+ end
20
+
21
+ def fts_search(query)
22
+ # MySQL FULLTEXT search
23
+ sanitized_query = connection.quote(query)
24
+ <<~SQL.squish
25
+ WHERE MATCH(message, extra_fields) AGAINST(#{sanitized_query} IN NATURAL LANGUAGE MODE)
26
+ SQL
27
+ end
28
+
29
+ def case_insensitive_like(column, pattern)
30
+ # MySQL LIKE is case-insensitive by default with default collation
31
+ "#{column} LIKE #{connection.quote(pattern)}"
32
+ end
33
+
34
+ def claim_batch(batch_size)
35
+ # MySQL 8.0+ supports SKIP LOCKED
36
+ RawEntry.transaction do
37
+ entries = RawEntry.where(parsed: false)
38
+ .order(received_at: :asc)
39
+ .limit(batch_size)
40
+ .lock("FOR UPDATE SKIP LOCKED")
41
+ .to_a
42
+
43
+ return [] if entries.empty?
44
+
45
+ # Mark as parsed
46
+ entry_ids = entries.map(&:id)
47
+ RawEntry.where(id: entry_ids).update_all(parsed: true, parsed_at: Time.current)
48
+
49
+ entries
50
+ end
51
+ rescue ActiveRecord::StatementInvalid => e
52
+ # Fallback for MySQL < 8.0 without SKIP LOCKED
53
+ if e.message.include?("syntax error")
54
+ claim_batch_legacy(batch_size)
55
+ else
56
+ raise
57
+ end
58
+ end
59
+
60
+ def extract_json_field(column, field_name)
61
+ # MySQL JSON_EXTRACT
62
+ "JSON_UNQUOTE(JSON_EXTRACT(#{column}, '$.#{field_name}'))"
63
+ end
64
+
65
+ def facet_values(column)
66
+ "SELECT DISTINCT #{column} FROM solid_log_entries WHERE #{column} IS NOT NULL ORDER BY #{column}"
67
+ end
68
+
69
+ def optimize!
70
+ execute("OPTIMIZE TABLE solid_log_entries")
71
+ execute("OPTIMIZE TABLE solid_log_raw")
72
+ execute("ANALYZE TABLE solid_log_entries")
73
+ execute("ANALYZE TABLE solid_log_raw")
74
+ rescue ActiveRecord::StatementInvalid => e
75
+ Rails.logger.warn("MySQL optimize warning: #{e.message}")
76
+ end
77
+
78
+ def database_size
79
+ result = select_value(<<~SQL)
80
+ SELECT SUM(data_length + index_length)
81
+ FROM information_schema.TABLES
82
+ WHERE table_schema = DATABASE()
83
+ AND table_name LIKE 'solid_log_%'
84
+ SQL
85
+ result.to_i
86
+ end
87
+
88
+ def supports_skip_locked?
89
+ # MySQL 8.0+
90
+ version = connection.get_database_version
91
+ version >= "8.0"
92
+ end
93
+
94
+ def supports_native_json?
95
+ true # MySQL 5.7+
96
+ end
97
+
98
+ def supports_full_text_search?
99
+ true # FULLTEXT indexes
100
+ end
101
+
102
+ def configure!
103
+ # Set optimal MySQL settings
104
+ execute("SET SESSION sql_mode = 'TRADITIONAL'") rescue nil
105
+ execute("SET SESSION innodb_lock_wait_timeout = 50") rescue nil
106
+ end
107
+
108
+ def timestamp_to_epoch_sql(column)
109
+ "UNIX_TIMESTAMP(#{column})"
110
+ end
111
+
112
+ # MySQL-specific methods
113
+ def create_fts_index_sql
114
+ <<~SQL
115
+ CREATE FULLTEXT INDEX idx_entries_fts
116
+ ON solid_log_entries(message, extra_fields)
117
+ SQL
118
+ end
119
+
120
+ def create_json_indexes_sql
121
+ # MySQL doesn't have native JSON indexes, use generated columns
122
+ [
123
+ # Example for common fields - these would be created via field promotion
124
+ # "ALTER TABLE solid_log_entries ADD COLUMN user_id_virtual INT GENERATED ALWAYS AS (JSON_UNQUOTE(JSON_EXTRACT(extra_fields, '$.user_id'))) STORED",
125
+ # "CREATE INDEX idx_entries_user_id ON solid_log_entries(user_id_virtual)"
126
+ ]
127
+ end
128
+
129
+ # Bulk insert optimization for MySQL
130
+ def bulk_insert(table_name, records)
131
+ return if records.empty?
132
+
133
+ columns = records.first.keys
134
+ values_sql = records.map do |record|
135
+ "(#{columns.map { |col| connection.quote(record[col]) }.join(', ')})"
136
+ end.join(", ")
137
+
138
+ execute(<<~SQL)
139
+ INSERT INTO #{table_name} (#{columns.join(', ')})
140
+ VALUES #{values_sql}
141
+ SQL
142
+ end
143
+
144
+ private
145
+
146
+ def claim_batch_legacy(batch_size)
147
+ # Fallback for MySQL < 8.0 without SKIP LOCKED
148
+ RawEntry.transaction do
149
+ ids = RawEntry.where(parsed: false)
150
+ .order(received_at: :asc)
151
+ .limit(batch_size)
152
+ .lock
153
+ .pluck(:id)
154
+
155
+ return [] if ids.empty?
156
+
157
+ RawEntry.where(id: ids).update_all(parsed: true, parsed_at: Time.current)
158
+ RawEntry.where(id: ids).to_a
159
+ end
160
+ end
161
+ end
162
+ end
163
+ end
@@ -0,0 +1,141 @@
1
+ module SolidLog
2
+ module Adapters
3
+ class PostgresqlAdapter < BaseAdapter
4
+ def search(query, base_scope = Entry)
5
+ sanitized_query = Entry.sanitize_sql_like(query)
6
+ table_name = Entry.table_name
7
+
8
+ # Build LIKE query for partial matching
9
+ like_condition = case_insensitive_like("#{table_name}.message", "%#{sanitized_query}%")
10
+ like_sql = "SELECT #{table_name}.* FROM #{table_name} WHERE #{like_condition}"
11
+
12
+ # Build FTS query for full-text matching
13
+ fts_fragment = fts_search(query)
14
+ fts_sql = "SELECT #{table_name}.* FROM #{table_name} #{fts_fragment}"
15
+
16
+ # UNION both queries (DISTINCT removes duplicates)
17
+ union_sql = "#{fts_sql} UNION #{like_sql}"
18
+ Entry.from("(#{union_sql}) AS #{table_name}")
19
+ end
20
+
21
+ def fts_search(query)
22
+ # PostgreSQL full-text search using tsvector
23
+ sanitized_query = connection.quote(query.gsub(/[^\w\s]/, " "))
24
+ <<~SQL.squish
25
+ WHERE to_tsvector('english', COALESCE(message, '') || ' ' || COALESCE(extra_fields::text, ''))
26
+ @@ plainto_tsquery('english', #{sanitized_query})
27
+ SQL
28
+ end
29
+
30
+ def case_insensitive_like(column, pattern)
31
+ # PostgreSQL uses ILIKE for case-insensitive matching
32
+ "#{column} ILIKE #{connection.quote(pattern)}"
33
+ end
34
+
35
+ def claim_batch(batch_size)
36
+ # PostgreSQL supports SKIP LOCKED
37
+ RawEntry.transaction do
38
+ entries = RawEntry.where(parsed: false)
39
+ .order(received_at: :asc)
40
+ .limit(batch_size)
41
+ .lock("FOR UPDATE SKIP LOCKED")
42
+ .to_a
43
+
44
+ return [] if entries.empty?
45
+
46
+ # Mark as parsed
47
+ entry_ids = entries.map(&:id)
48
+ RawEntry.where(id: entry_ids).update_all(parsed: true, parsed_at: Time.current)
49
+
50
+ entries
51
+ end
52
+ end
53
+
54
+ def extract_json_field(column, field_name)
55
+ # PostgreSQL JSONB operator
56
+ "#{column}->>'#{field_name}'"
57
+ end
58
+
59
+ def facet_values(column)
60
+ "SELECT DISTINCT #{column} FROM solid_log_entries WHERE #{column} IS NOT NULL ORDER BY #{column}"
61
+ end
62
+
63
+ def optimize!
64
+ # Analyze tables for query planning
65
+ execute("ANALYZE solid_log_entries")
66
+ execute("ANALYZE solid_log_raw")
67
+ execute("ANALYZE solid_log_fields")
68
+
69
+ # Vacuum if needed (non-blocking)
70
+ execute("VACUUM ANALYZE solid_log_entries")
71
+ rescue ActiveRecord::StatementInvalid => e
72
+ # VACUUM might fail if already running
73
+ Rails.logger.warn("PostgreSQL vacuum warning: #{e.message}")
74
+ end
75
+
76
+ def database_size
77
+ result = select_value(<<~SQL)
78
+ SELECT pg_database_size(current_database())
79
+ SQL
80
+ result.to_i
81
+ end
82
+
83
+ def supports_skip_locked?
84
+ true
85
+ end
86
+
87
+ def supports_native_json?
88
+ true # JSONB
89
+ end
90
+
91
+ def supports_full_text_search?
92
+ true # tsvector/tsquery
93
+ end
94
+
95
+ def configure!
96
+ # Set optimal PostgreSQL settings for logging workload
97
+ execute("SET work_mem = '64MB'") rescue nil
98
+ execute("SET maintenance_work_mem = '256MB'") rescue nil
99
+ end
100
+
101
+ def timestamp_to_epoch_sql(column)
102
+ "EXTRACT(EPOCH FROM #{column})::bigint"
103
+ end
104
+
105
+ # PostgreSQL-specific methods
106
+ def create_fts_index_sql
107
+ <<~SQL
108
+ CREATE INDEX IF NOT EXISTS idx_entries_fts
109
+ ON solid_log_entries
110
+ USING GIN (to_tsvector('english', COALESCE(message, '') || ' ' || COALESCE(extra_fields::text, '')))
111
+ SQL
112
+ end
113
+
114
+ def create_json_indexes_sql
115
+ # GIN index for JSONB queries
116
+ [
117
+ <<~SQL
118
+ CREATE INDEX IF NOT EXISTS idx_entries_extra_fields
119
+ ON solid_log_entries
120
+ USING GIN (extra_fields jsonb_path_ops)
121
+ SQL
122
+ ]
123
+ end
124
+
125
+ # Bulk insert optimization for PostgreSQL
126
+ def bulk_insert(table_name, records)
127
+ return if records.empty?
128
+
129
+ columns = records.first.keys
130
+ values_sql = records.map do |record|
131
+ "(#{columns.map { |col| connection.quote(record[col]) }.join(', ')})"
132
+ end.join(", ")
133
+
134
+ execute(<<~SQL)
135
+ INSERT INTO #{table_name} (#{columns.join(', ')})
136
+ VALUES #{values_sql}
137
+ SQL
138
+ end
139
+ end
140
+ end
141
+ end