heathrow 0.7.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- checksums.yaml +7 -0
- data/.gitignore +58 -0
- data/README.md +205 -0
- data/bin/heathrow +42 -0
- data/bin/heathrowd +283 -0
- data/docs/ARCHITECTURE.md +1172 -0
- data/docs/DATABASE_SCHEMA.md +685 -0
- data/docs/DEVELOPMENT_WORKFLOW.md +867 -0
- data/docs/DISCORD_SETUP.md +142 -0
- data/docs/GMAIL_OAUTH_SETUP.md +120 -0
- data/docs/PLUGIN_SYSTEM.md +1370 -0
- data/docs/PROJECT_PLAN.md +1022 -0
- data/docs/README.md +417 -0
- data/docs/REDDIT_SETUP.md +174 -0
- data/docs/REPLY_FORWARD.md +182 -0
- data/docs/WHATSAPP_TELEGRAM_SETUP.md +306 -0
- data/heathrow.gemspec +34 -0
- data/heathrowd.service +21 -0
- data/img/heathrow.svg +95 -0
- data/img/rss_threaded.png +0 -0
- data/img/sources.png +0 -0
- data/lib/heathrow/address_book.rb +42 -0
- data/lib/heathrow/config.rb +332 -0
- data/lib/heathrow/database.rb +731 -0
- data/lib/heathrow/database_new.rb +392 -0
- data/lib/heathrow/event_bus.rb +175 -0
- data/lib/heathrow/logger.rb +122 -0
- data/lib/heathrow/message.rb +176 -0
- data/lib/heathrow/message_composer.rb +399 -0
- data/lib/heathrow/message_organizer.rb +774 -0
- data/lib/heathrow/migrations/001_initial_schema.rb +248 -0
- data/lib/heathrow/notmuch.rb +45 -0
- data/lib/heathrow/oauth2_smtp.rb +254 -0
- data/lib/heathrow/plugin/base.rb +212 -0
- data/lib/heathrow/plugin_manager.rb +141 -0
- data/lib/heathrow/poller.rb +93 -0
- data/lib/heathrow/smtp_sender.rb +204 -0
- data/lib/heathrow/source.rb +39 -0
- data/lib/heathrow/sources/base.rb +74 -0
- data/lib/heathrow/sources/discord.rb +357 -0
- data/lib/heathrow/sources/gmail.rb +294 -0
- data/lib/heathrow/sources/imap.rb +198 -0
- data/lib/heathrow/sources/instagram.rb +307 -0
- data/lib/heathrow/sources/instagram_fetch.py +101 -0
- data/lib/heathrow/sources/instagram_send.py +55 -0
- data/lib/heathrow/sources/instagram_send_marionette.py +104 -0
- data/lib/heathrow/sources/maildir.rb +606 -0
- data/lib/heathrow/sources/messenger.rb +212 -0
- data/lib/heathrow/sources/messenger_fetch.js +297 -0
- data/lib/heathrow/sources/messenger_fetch_marionette.py +138 -0
- data/lib/heathrow/sources/messenger_send.js +32 -0
- data/lib/heathrow/sources/messenger_send.py +100 -0
- data/lib/heathrow/sources/reddit.rb +461 -0
- data/lib/heathrow/sources/rss.rb +299 -0
- data/lib/heathrow/sources/slack.rb +375 -0
- data/lib/heathrow/sources/source_manager.rb +328 -0
- data/lib/heathrow/sources/telegram.rb +498 -0
- data/lib/heathrow/sources/webpage.rb +207 -0
- data/lib/heathrow/sources/weechat.rb +479 -0
- data/lib/heathrow/sources/whatsapp.rb +474 -0
- data/lib/heathrow/ui/application.rb +8098 -0
- data/lib/heathrow/ui/navigation.rb +8 -0
- data/lib/heathrow/ui/panes.rb +8 -0
- data/lib/heathrow/ui/source_wizard.rb +567 -0
- data/lib/heathrow/ui/threaded_view.rb +780 -0
- data/lib/heathrow/ui/views.rb +8 -0
- data/lib/heathrow/version.rb +3 -0
- data/lib/heathrow/wizards/discord_wizard.rb +193 -0
- data/lib/heathrow/wizards/slack_wizard.rb +140 -0
- data/lib/heathrow.rb +55 -0
- metadata +147 -0
|
@@ -0,0 +1,392 @@
|
|
|
1
|
+
require 'sqlite3'
|
|
2
|
+
require 'time'
|
|
3
|
+
require 'thread'
|
|
4
|
+
|
|
5
|
+
module Heathrow
|
|
6
|
+
class Database
|
|
7
|
+
attr_reader :db_path
|
|
8
|
+
|
|
9
|
+
def initialize(db_path = HEATHROW_DB)
|
|
10
|
+
@db_path = db_path
|
|
11
|
+
@db = nil
|
|
12
|
+
@mutex = Mutex.new
|
|
13
|
+
connect
|
|
14
|
+
migrate_to_latest
|
|
15
|
+
end
|
|
16
|
+
|
|
17
|
+
# Execute a SQL statement (INSERT, UPDATE, DELETE)
|
|
18
|
+
# @param sql [String] SQL statement
|
|
19
|
+
# @param params [Array] Parameters for prepared statement
|
|
20
|
+
# @return [Integer] Number of rows affected
|
|
21
|
+
def exec(sql, params = [])
|
|
22
|
+
@mutex.synchronize do
|
|
23
|
+
@db.execute(sql, params)
|
|
24
|
+
@db.changes
|
|
25
|
+
end
|
|
26
|
+
rescue SQLite3::Exception => e
|
|
27
|
+
raise DatabaseError, "SQL execution failed: #{e.message}"
|
|
28
|
+
end
|
|
29
|
+
|
|
30
|
+
# Query data (SELECT)
|
|
31
|
+
# @param sql [String] SQL query
|
|
32
|
+
# @param params [Array] Parameters for prepared statement
|
|
33
|
+
# @return [Array<Hash>] Array of result hashes
|
|
34
|
+
def query(sql, params = [])
|
|
35
|
+
@mutex.synchronize do
|
|
36
|
+
@db.execute(sql, params)
|
|
37
|
+
end
|
|
38
|
+
rescue SQLite3::Exception => e
|
|
39
|
+
raise DatabaseError, "SQL query failed: #{e.message}"
|
|
40
|
+
end
|
|
41
|
+
|
|
42
|
+
# Get first row of query result
|
|
43
|
+
# @param sql [String] SQL query
|
|
44
|
+
# @param params [Array] Parameters
|
|
45
|
+
# @return [Hash, nil] First result or nil
|
|
46
|
+
def query_one(sql, params = [])
|
|
47
|
+
query(sql, params).first
|
|
48
|
+
end
|
|
49
|
+
|
|
50
|
+
# Get single value from query
|
|
51
|
+
# @param sql [String] SQL query
|
|
52
|
+
# @param params [Array] Parameters
|
|
53
|
+
# @return [Object, nil] Single value or nil
|
|
54
|
+
def query_value(sql, params = [])
|
|
55
|
+
result = query_one(sql, params)
|
|
56
|
+
result&.values&.first
|
|
57
|
+
end
|
|
58
|
+
|
|
59
|
+
# Execute block in a transaction
|
|
60
|
+
# @yield Block to execute within transaction
|
|
61
|
+
# @return [Object] Return value of block
|
|
62
|
+
def transaction
|
|
63
|
+
@mutex.synchronize do
|
|
64
|
+
@db.transaction do
|
|
65
|
+
yield
|
|
66
|
+
end
|
|
67
|
+
end
|
|
68
|
+
rescue SQLite3::Exception => e
|
|
69
|
+
raise DatabaseError, "Transaction failed: #{e.message}"
|
|
70
|
+
end
|
|
71
|
+
|
|
72
|
+
# Migrate to latest schema version
|
|
73
|
+
def migrate_to_latest
|
|
74
|
+
# Create schema_version table if it doesn't exist
|
|
75
|
+
exec <<-SQL
|
|
76
|
+
CREATE TABLE IF NOT EXISTS schema_version (
|
|
77
|
+
version INTEGER PRIMARY KEY,
|
|
78
|
+
applied_at INTEGER NOT NULL
|
|
79
|
+
);
|
|
80
|
+
SQL
|
|
81
|
+
|
|
82
|
+
current = query_value("SELECT MAX(version) FROM schema_version") || 0
|
|
83
|
+
|
|
84
|
+
# Load and run migrations
|
|
85
|
+
Dir[File.join(__dir__, 'migrations', '*.rb')].sort.each do |file|
|
|
86
|
+
require file
|
|
87
|
+
migration_class = extract_migration_class(file)
|
|
88
|
+
next unless migration_class
|
|
89
|
+
next if migration_class::VERSION <= current
|
|
90
|
+
|
|
91
|
+
puts "Applying migration #{migration_class::VERSION}..."
|
|
92
|
+
transaction do
|
|
93
|
+
migration_class.up(self)
|
|
94
|
+
end
|
|
95
|
+
puts "Migration #{migration_class::VERSION} applied successfully"
|
|
96
|
+
end
|
|
97
|
+
end
|
|
98
|
+
|
|
99
|
+
# Backup database to file
|
|
100
|
+
# @param backup_path [String] Path to backup file
|
|
101
|
+
def backup(backup_path = nil)
|
|
102
|
+
backup_path ||= "#{@db_path}.backup.#{Time.now.to_i}"
|
|
103
|
+
|
|
104
|
+
@mutex.synchronize do
|
|
105
|
+
backup_db = SQLite3::Database.new(backup_path)
|
|
106
|
+
@db.backup('main', backup_db, 'main')
|
|
107
|
+
backup_db.close
|
|
108
|
+
end
|
|
109
|
+
|
|
110
|
+
backup_path
|
|
111
|
+
end
|
|
112
|
+
|
|
113
|
+
# Close database connection
|
|
114
|
+
def close
|
|
115
|
+
@mutex.synchronize do
|
|
116
|
+
@db.close if @db
|
|
117
|
+
@db = nil
|
|
118
|
+
end
|
|
119
|
+
end
|
|
120
|
+
|
|
121
|
+
# Reconnect to database
|
|
122
|
+
def reconnect
|
|
123
|
+
close
|
|
124
|
+
connect
|
|
125
|
+
end
|
|
126
|
+
|
|
127
|
+
# Get database statistics
|
|
128
|
+
# @return [Hash] Statistics about database
|
|
129
|
+
def stats
|
|
130
|
+
{
|
|
131
|
+
total_messages: query_value("SELECT COUNT(*) FROM messages") || 0,
|
|
132
|
+
unread_messages: query_value("SELECT COUNT(*) FROM messages WHERE read = 0") || 0,
|
|
133
|
+
starred_messages: query_value("SELECT COUNT(*) FROM messages WHERE starred = 1") || 0,
|
|
134
|
+
total_sources: query_value("SELECT COUNT(*) FROM sources") || 0,
|
|
135
|
+
active_sources: query_value("SELECT COUNT(*) FROM sources WHERE enabled = 1") || 0,
|
|
136
|
+
total_views: query_value("SELECT COUNT(*) FROM views") || 0,
|
|
137
|
+
db_size: File.size(@db_path)
|
|
138
|
+
}
|
|
139
|
+
end
|
|
140
|
+
|
|
141
|
+
# Optimize database (VACUUM)
|
|
142
|
+
def optimize
|
|
143
|
+
@mutex.synchronize do
|
|
144
|
+
@db.execute("VACUUM")
|
|
145
|
+
end
|
|
146
|
+
end
|
|
147
|
+
|
|
148
|
+
# === LEGACY COMPATIBILITY METHODS ===
|
|
149
|
+
# These maintain compatibility with existing code
|
|
150
|
+
|
|
151
|
+
def get_messages(filters = {}, limit = nil, offset = 0)
|
|
152
|
+
query_sql = "SELECT * FROM messages WHERE 1=1"
|
|
153
|
+
params = []
|
|
154
|
+
|
|
155
|
+
# Source filters
|
|
156
|
+
if filters[:source_id]
|
|
157
|
+
query_sql += " AND source_id = ?"
|
|
158
|
+
params << filters[:source_id]
|
|
159
|
+
end
|
|
160
|
+
|
|
161
|
+
if filters[:source_type]
|
|
162
|
+
# Map old source_type to plugin_type via sources table
|
|
163
|
+
source_ids = query("SELECT id FROM sources WHERE plugin_type = ?", [filters[:source_type]])
|
|
164
|
+
if source_ids.any?
|
|
165
|
+
placeholders = source_ids.map { '?' }.join(',')
|
|
166
|
+
query_sql += " AND source_id IN (#{placeholders})"
|
|
167
|
+
params += source_ids.map { |s| s['id'] }
|
|
168
|
+
end
|
|
169
|
+
end
|
|
170
|
+
|
|
171
|
+
if filters[:source_types] && filters[:source_types].is_a?(Array)
|
|
172
|
+
source_ids = query("SELECT id FROM sources WHERE plugin_type IN (#{filters[:source_types].map{'?'}.join(',')})",
|
|
173
|
+
filters[:source_types])
|
|
174
|
+
if source_ids.any?
|
|
175
|
+
placeholders = source_ids.map { '?' }.join(',')
|
|
176
|
+
query_sql += " AND source_id IN (#{placeholders})"
|
|
177
|
+
params += source_ids.map { |s| s['id'] }
|
|
178
|
+
end
|
|
179
|
+
end
|
|
180
|
+
|
|
181
|
+
# Sender filters
|
|
182
|
+
if filters[:sender_pattern]
|
|
183
|
+
patterns = filters[:sender_pattern].split('|')
|
|
184
|
+
conditions = patterns.map { "sender LIKE ?" }.join(' OR ')
|
|
185
|
+
query_sql += " AND (#{conditions})"
|
|
186
|
+
params += patterns.map { |p| "%#{p}%" }
|
|
187
|
+
end
|
|
188
|
+
|
|
189
|
+
# Subject filters
|
|
190
|
+
if filters[:subject_pattern]
|
|
191
|
+
patterns = filters[:subject_pattern].split('|')
|
|
192
|
+
conditions = patterns.map { "subject LIKE ?" }.join(' OR ')
|
|
193
|
+
query_sql += " AND (#{conditions})"
|
|
194
|
+
params += patterns.map { |p| "%#{p}%" }
|
|
195
|
+
end
|
|
196
|
+
|
|
197
|
+
# Content filters
|
|
198
|
+
if filters[:content_patterns]
|
|
199
|
+
filters[:content_patterns].each do |pattern_group|
|
|
200
|
+
if pattern_group.include?('|')
|
|
201
|
+
or_patterns = pattern_group.split('|').map(&:strip)
|
|
202
|
+
conditions = or_patterns.map { "content LIKE ?" }.join(' OR ')
|
|
203
|
+
query_sql += " AND (#{conditions})"
|
|
204
|
+
params += or_patterns.map { |p| "%#{p}%" }
|
|
205
|
+
else
|
|
206
|
+
query_sql += " AND content LIKE ?"
|
|
207
|
+
params << "%#{pattern_group}%"
|
|
208
|
+
end
|
|
209
|
+
end
|
|
210
|
+
end
|
|
211
|
+
|
|
212
|
+
# Search filter
|
|
213
|
+
if filters[:search]
|
|
214
|
+
query_sql += " AND (sender LIKE ? OR subject LIKE ? OR content LIKE ?)"
|
|
215
|
+
search_term = "%#{filters[:search]}%"
|
|
216
|
+
params += [search_term, search_term, search_term]
|
|
217
|
+
end
|
|
218
|
+
|
|
219
|
+
# Read status
|
|
220
|
+
if filters[:is_read] != nil
|
|
221
|
+
query_sql += " AND read = ?"
|
|
222
|
+
params << (filters[:is_read] ? 1 : 0)
|
|
223
|
+
end
|
|
224
|
+
|
|
225
|
+
# Sorting
|
|
226
|
+
query_sql += " ORDER BY timestamp DESC"
|
|
227
|
+
|
|
228
|
+
# Pagination
|
|
229
|
+
if limit
|
|
230
|
+
query_sql += " LIMIT ? OFFSET ?"
|
|
231
|
+
params += [limit, offset]
|
|
232
|
+
end
|
|
233
|
+
|
|
234
|
+
query(query_sql, params)
|
|
235
|
+
end
|
|
236
|
+
|
|
237
|
+
def insert_message(message_data)
|
|
238
|
+
# Legacy method - convert array format to hash and use new method
|
|
239
|
+
exec <<-SQL, message_data
|
|
240
|
+
INSERT OR REPLACE INTO messages
|
|
241
|
+
(source_id, source_type, external_id, sender, recipient, subject, content,
|
|
242
|
+
raw_data, attachments, timestamp, read)
|
|
243
|
+
VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?)
|
|
244
|
+
SQL
|
|
245
|
+
end
|
|
246
|
+
|
|
247
|
+
def mark_as_read(message_id)
|
|
248
|
+
exec("UPDATE messages SET read = 1 WHERE id = ?", [message_id]) > 0
|
|
249
|
+
end
|
|
250
|
+
|
|
251
|
+
def mark_as_unread(message_id)
|
|
252
|
+
exec("UPDATE messages SET read = 0 WHERE id = ?", [message_id]) > 0
|
|
253
|
+
end
|
|
254
|
+
|
|
255
|
+
def toggle_star(message_id)
|
|
256
|
+
exec("UPDATE messages SET starred = NOT starred WHERE id = ?", [message_id])
|
|
257
|
+
end
|
|
258
|
+
|
|
259
|
+
def delete_message(message_id)
|
|
260
|
+
exec("DELETE FROM messages WHERE id = ?", [message_id])
|
|
261
|
+
end
|
|
262
|
+
|
|
263
|
+
def add_source(id, type, name, config, polling_interval, color = 15, enabled = true)
|
|
264
|
+
config_json = config.is_a?(Hash) ? config.to_json : config
|
|
265
|
+
now = Time.now.to_i
|
|
266
|
+
|
|
267
|
+
# Check if source exists
|
|
268
|
+
existing = query_one("SELECT id FROM sources WHERE name = ?", [name])
|
|
269
|
+
|
|
270
|
+
if existing
|
|
271
|
+
# Update existing
|
|
272
|
+
exec <<-SQL, [type, config_json, enabled ? 1 : 0, now, existing['id']]
|
|
273
|
+
UPDATE sources
|
|
274
|
+
SET plugin_type = ?, config = ?, enabled = ?, updated_at = ?
|
|
275
|
+
WHERE id = ?
|
|
276
|
+
SQL
|
|
277
|
+
else
|
|
278
|
+
# Insert new
|
|
279
|
+
exec <<-SQL, [name, type, config_json, '["read"]', enabled ? 1 : 0, now, now]
|
|
280
|
+
INSERT INTO sources (name, plugin_type, config, capabilities, enabled, created_at, updated_at)
|
|
281
|
+
VALUES (?, ?, ?, ?, ?, ?, ?)
|
|
282
|
+
SQL
|
|
283
|
+
end
|
|
284
|
+
end
|
|
285
|
+
|
|
286
|
+
def get_sources(enabled_only = true)
|
|
287
|
+
sql = "SELECT * FROM sources"
|
|
288
|
+
sql += " WHERE enabled = 1" if enabled_only
|
|
289
|
+
|
|
290
|
+
sources = query(sql)
|
|
291
|
+
sources.each do |source|
|
|
292
|
+
source['config'] = JSON.parse(source['config']) if source['config']
|
|
293
|
+
source['capabilities'] = JSON.parse(source['capabilities']) if source['capabilities']
|
|
294
|
+
end
|
|
295
|
+
sources
|
|
296
|
+
end
|
|
297
|
+
|
|
298
|
+
def get_all_sources
|
|
299
|
+
get_sources(false)
|
|
300
|
+
end
|
|
301
|
+
|
|
302
|
+
def get_source_by_name(name)
|
|
303
|
+
source = query_one("SELECT * FROM sources WHERE name = ? LIMIT 1", [name])
|
|
304
|
+
if source
|
|
305
|
+
source['config'] = JSON.parse(source['config']) if source['config']
|
|
306
|
+
source['capabilities'] = JSON.parse(source['capabilities']) if source['capabilities']
|
|
307
|
+
end
|
|
308
|
+
source
|
|
309
|
+
end
|
|
310
|
+
|
|
311
|
+
def get_source_by_id(id)
|
|
312
|
+
source = query_one("SELECT * FROM sources WHERE id = ? LIMIT 1", [id])
|
|
313
|
+
if source
|
|
314
|
+
source['config'] = JSON.parse(source['config']) if source['config']
|
|
315
|
+
source['capabilities'] = JSON.parse(source['capabilities']) if source['capabilities']
|
|
316
|
+
end
|
|
317
|
+
source
|
|
318
|
+
end
|
|
319
|
+
|
|
320
|
+
def update_source(source_id, updates = {})
|
|
321
|
+
if updates[:config]
|
|
322
|
+
exec("UPDATE sources SET config = ?, updated_at = ? WHERE id = ?",
|
|
323
|
+
[updates[:config], Time.now.to_i, source_id])
|
|
324
|
+
end
|
|
325
|
+
end
|
|
326
|
+
|
|
327
|
+
def update_source_poll_time(source_id)
|
|
328
|
+
exec("UPDATE sources SET last_sync = ? WHERE id = ?", [Time.now.to_i, source_id])
|
|
329
|
+
end
|
|
330
|
+
|
|
331
|
+
def save_view(view_id, view_data)
|
|
332
|
+
now = Time.now.to_i
|
|
333
|
+
exec <<-SQL, [view_data[:name], view_data[:filters].to_json, view_data[:sort_order],
|
|
334
|
+
view_data.fetch(:key_binding, nil), now, view_id]
|
|
335
|
+
INSERT OR REPLACE INTO views (name, filters, sort_order, key_binding, updated_at, id)
|
|
336
|
+
VALUES (?, ?, ?, ?, ?, ?)
|
|
337
|
+
SQL
|
|
338
|
+
end
|
|
339
|
+
|
|
340
|
+
def delete_view(view_id)
|
|
341
|
+
exec("DELETE FROM views WHERE id = ?", [view_id])
|
|
342
|
+
end
|
|
343
|
+
|
|
344
|
+
def get_view(view_id)
|
|
345
|
+
view = query_one("SELECT * FROM views WHERE id = ?", [view_id])
|
|
346
|
+
if view
|
|
347
|
+
view['filters'] = JSON.parse(view['filters']) if view['filters']
|
|
348
|
+
end
|
|
349
|
+
view
|
|
350
|
+
end
|
|
351
|
+
|
|
352
|
+
def get_all_views
|
|
353
|
+
views = query("SELECT * FROM views ORDER BY id")
|
|
354
|
+
views.each do |view|
|
|
355
|
+
view['filters'] = JSON.parse(view['filters']) if view['filters']
|
|
356
|
+
end
|
|
357
|
+
views
|
|
358
|
+
end
|
|
359
|
+
|
|
360
|
+
def get_stats
|
|
361
|
+
stats
|
|
362
|
+
end
|
|
363
|
+
|
|
364
|
+
def execute(query, *params)
|
|
365
|
+
exec(query, params)
|
|
366
|
+
end
|
|
367
|
+
|
|
368
|
+
private
|
|
369
|
+
|
|
370
|
+
def connect
|
|
371
|
+
@db = SQLite3::Database.new(@db_path)
|
|
372
|
+
@db.results_as_hash = true
|
|
373
|
+
@db.busy_timeout = 5000 # Wait up to 5 seconds if database is locked
|
|
374
|
+
end
|
|
375
|
+
|
|
376
|
+
def extract_migration_class(file)
|
|
377
|
+
# Extract migration class from filename
|
|
378
|
+
# e.g., "001_initial_schema.rb" -> Heathrow::Migrations::InitialSchema
|
|
379
|
+
basename = File.basename(file, '.rb')
|
|
380
|
+
class_name = basename.split('_')[1..-1].map(&:capitalize).join
|
|
381
|
+
|
|
382
|
+
begin
|
|
383
|
+
Heathrow::Migrations.const_get(class_name)
|
|
384
|
+
rescue NameError
|
|
385
|
+
nil
|
|
386
|
+
end
|
|
387
|
+
end
|
|
388
|
+
end
|
|
389
|
+
|
|
390
|
+
# Custom error class
|
|
391
|
+
class DatabaseError < StandardError; end
|
|
392
|
+
end
|
|
@@ -0,0 +1,175 @@
|
|
|
1
|
+
require 'thread'
|
|
2
|
+
|
|
3
|
+
module Heathrow
|
|
4
|
+
# EventBus - Simple pub/sub system for inter-component communication
|
|
5
|
+
#
|
|
6
|
+
# Usage:
|
|
7
|
+
# bus = EventBus.instance
|
|
8
|
+
#
|
|
9
|
+
# # Subscribe to events
|
|
10
|
+
# bus.subscribe('message.new') { |data| puts "New message: #{data}" }
|
|
11
|
+
#
|
|
12
|
+
# # Publish events
|
|
13
|
+
# bus.publish('message.new', message_data)
|
|
14
|
+
#
|
|
15
|
+
# # Unsubscribe
|
|
16
|
+
# handler_id = bus.subscribe('message.new') { |data| ... }
|
|
17
|
+
# bus.unsubscribe('message.new', handler_id)
|
|
18
|
+
#
|
|
19
|
+
class EventBus
|
|
20
|
+
attr_reader :subscribers, :event_log
|
|
21
|
+
|
|
22
|
+
def initialize(logger = nil)
|
|
23
|
+
@subscribers = Hash.new { |h, k| h[k] = {} }
|
|
24
|
+
@event_log = []
|
|
25
|
+
@mutex = Mutex.new
|
|
26
|
+
@logger = logger
|
|
27
|
+
@next_id = 0
|
|
28
|
+
end
|
|
29
|
+
|
|
30
|
+
# Subscribe to an event
|
|
31
|
+
# Returns handler ID for later unsubscribing
|
|
32
|
+
def subscribe(event_name, &block)
|
|
33
|
+
@mutex.synchronize do
|
|
34
|
+
handler_id = generate_handler_id
|
|
35
|
+
@subscribers[event_name][handler_id] = block
|
|
36
|
+
@logger&.debug("EventBus: Subscribed to '#{event_name}' (handler #{handler_id})")
|
|
37
|
+
handler_id
|
|
38
|
+
end
|
|
39
|
+
end
|
|
40
|
+
|
|
41
|
+
# Unsubscribe from an event
|
|
42
|
+
def unsubscribe(event_name, handler_id)
|
|
43
|
+
@mutex.synchronize do
|
|
44
|
+
if @subscribers[event_name].delete(handler_id)
|
|
45
|
+
@logger&.debug("EventBus: Unsubscribed from '#{event_name}' (handler #{handler_id})")
|
|
46
|
+
true
|
|
47
|
+
else
|
|
48
|
+
false
|
|
49
|
+
end
|
|
50
|
+
end
|
|
51
|
+
end
|
|
52
|
+
|
|
53
|
+
# Unsubscribe all handlers for an event
|
|
54
|
+
def unsubscribe_all(event_name)
|
|
55
|
+
@mutex.synchronize do
|
|
56
|
+
count = @subscribers[event_name].size
|
|
57
|
+
@subscribers.delete(event_name)
|
|
58
|
+
@logger&.debug("EventBus: Unsubscribed all #{count} handlers from '#{event_name}'")
|
|
59
|
+
count
|
|
60
|
+
end
|
|
61
|
+
end
|
|
62
|
+
|
|
63
|
+
# Publish an event
|
|
64
|
+
def publish(event_name, data = nil)
|
|
65
|
+
handlers = nil
|
|
66
|
+
|
|
67
|
+
@mutex.synchronize do
|
|
68
|
+
handlers = @subscribers[event_name].values.dup
|
|
69
|
+
log_event(event_name, data)
|
|
70
|
+
end
|
|
71
|
+
|
|
72
|
+
@logger&.debug("EventBus: Publishing '#{event_name}' to #{handlers.size} handler(s)")
|
|
73
|
+
|
|
74
|
+
# Execute handlers outside the mutex to avoid deadlocks
|
|
75
|
+
handlers.each do |handler|
|
|
76
|
+
begin
|
|
77
|
+
handler.call(data)
|
|
78
|
+
rescue => e
|
|
79
|
+
@logger&.error("EventBus: Error in handler for '#{event_name}': #{e.message}")
|
|
80
|
+
@logger&.error(e.backtrace.join("\n")) if @logger
|
|
81
|
+
end
|
|
82
|
+
end
|
|
83
|
+
|
|
84
|
+
handlers.size
|
|
85
|
+
end
|
|
86
|
+
|
|
87
|
+
# Publish event asynchronously (returns immediately)
|
|
88
|
+
def publish_async(event_name, data = nil)
|
|
89
|
+
Thread.new do
|
|
90
|
+
begin
|
|
91
|
+
publish(event_name, data)
|
|
92
|
+
rescue => e
|
|
93
|
+
@logger&.error("EventBus: Error in async publish of '#{event_name}': #{e.message}")
|
|
94
|
+
end
|
|
95
|
+
end
|
|
96
|
+
end
|
|
97
|
+
|
|
98
|
+
# Get all subscribers for an event
|
|
99
|
+
def subscribers_for(event_name)
|
|
100
|
+
@mutex.synchronize do
|
|
101
|
+
@subscribers[event_name].keys
|
|
102
|
+
end
|
|
103
|
+
end
|
|
104
|
+
|
|
105
|
+
# Get all event names that have subscribers
|
|
106
|
+
def event_names
|
|
107
|
+
@mutex.synchronize do
|
|
108
|
+
@subscribers.keys.reject { |k| @subscribers[k].empty? }
|
|
109
|
+
end
|
|
110
|
+
end
|
|
111
|
+
|
|
112
|
+
# Get count of subscribers for an event
|
|
113
|
+
def subscriber_count(event_name)
|
|
114
|
+
@mutex.synchronize do
|
|
115
|
+
@subscribers[event_name].size
|
|
116
|
+
end
|
|
117
|
+
end
|
|
118
|
+
|
|
119
|
+
# Clear all subscribers (useful for testing)
|
|
120
|
+
def clear
|
|
121
|
+
@mutex.synchronize do
|
|
122
|
+
@subscribers.clear
|
|
123
|
+
@logger&.debug("EventBus: Cleared all subscribers")
|
|
124
|
+
end
|
|
125
|
+
end
|
|
126
|
+
|
|
127
|
+
# Get event log (last N events)
|
|
128
|
+
def recent_events(count = 10)
|
|
129
|
+
@mutex.synchronize do
|
|
130
|
+
@event_log.last(count)
|
|
131
|
+
end
|
|
132
|
+
end
|
|
133
|
+
|
|
134
|
+
# Enable/disable event logging
|
|
135
|
+
def log_events=(enabled)
|
|
136
|
+
@log_events = enabled
|
|
137
|
+
end
|
|
138
|
+
|
|
139
|
+
def log_events?
|
|
140
|
+
@log_events != false # Default to true
|
|
141
|
+
end
|
|
142
|
+
|
|
143
|
+
private
|
|
144
|
+
|
|
145
|
+
def generate_handler_id
|
|
146
|
+
@next_id += 1
|
|
147
|
+
"handler_#{@next_id}"
|
|
148
|
+
end
|
|
149
|
+
|
|
150
|
+
def log_event(event_name, data)
|
|
151
|
+
return unless log_events?
|
|
152
|
+
|
|
153
|
+
@event_log << {
|
|
154
|
+
name: event_name,
|
|
155
|
+
data: data,
|
|
156
|
+
timestamp: Time.now.to_i,
|
|
157
|
+
subscriber_count: @subscribers[event_name].size
|
|
158
|
+
}
|
|
159
|
+
|
|
160
|
+
# Keep only last 100 events
|
|
161
|
+
@event_log.shift if @event_log.size > 100
|
|
162
|
+
end
|
|
163
|
+
|
|
164
|
+
# Singleton pattern (optional, can also instantiate directly)
|
|
165
|
+
class << self
|
|
166
|
+
def instance
|
|
167
|
+
@instance ||= new
|
|
168
|
+
end
|
|
169
|
+
|
|
170
|
+
def reset_instance!
|
|
171
|
+
@instance = nil
|
|
172
|
+
end
|
|
173
|
+
end
|
|
174
|
+
end
|
|
175
|
+
end
|
|
@@ -0,0 +1,122 @@
|
|
|
1
|
+
require 'logger'
|
|
2
|
+
require 'fileutils'
|
|
3
|
+
|
|
4
|
+
module Heathrow
|
|
5
|
+
# Logger - Structured logging for Heathrow
|
|
6
|
+
#
|
|
7
|
+
# Usage:
|
|
8
|
+
# log = Heathrow::Logger.instance
|
|
9
|
+
# log.info("Application started")
|
|
10
|
+
# log.error("Failed to connect", error: e, source_id: 123)
|
|
11
|
+
# log.debug("Processing message", message_id: 456)
|
|
12
|
+
#
|
|
13
|
+
# Log Levels: DEBUG < INFO < WARN < ERROR < FATAL
|
|
14
|
+
#
|
|
15
|
+
class Logger
|
|
16
|
+
LEVELS = {
|
|
17
|
+
debug: ::Logger::DEBUG,
|
|
18
|
+
info: ::Logger::INFO,
|
|
19
|
+
warn: ::Logger::WARN,
|
|
20
|
+
error: ::Logger::ERROR,
|
|
21
|
+
fatal: ::Logger::FATAL
|
|
22
|
+
}.freeze
|
|
23
|
+
|
|
24
|
+
attr_reader :logger, :log_file
|
|
25
|
+
|
|
26
|
+
def initialize(log_file = nil, level: :info)
|
|
27
|
+
@log_file = log_file || default_log_file
|
|
28
|
+
ensure_log_directory
|
|
29
|
+
@logger = ::Logger.new(@log_file, 'daily')
|
|
30
|
+
@logger.level = LEVELS[level] || ::Logger::INFO
|
|
31
|
+
@logger.formatter = method(:format_message)
|
|
32
|
+
end
|
|
33
|
+
|
|
34
|
+
# Log methods
|
|
35
|
+
def debug(message, context = {})
|
|
36
|
+
log(:debug, message, context)
|
|
37
|
+
end
|
|
38
|
+
|
|
39
|
+
def info(message, context = {})
|
|
40
|
+
log(:info, message, context)
|
|
41
|
+
end
|
|
42
|
+
|
|
43
|
+
def warn(message, context = {})
|
|
44
|
+
log(:warn, message, context)
|
|
45
|
+
end
|
|
46
|
+
|
|
47
|
+
def error(message, context = {})
|
|
48
|
+
log(:error, message, context)
|
|
49
|
+
end
|
|
50
|
+
|
|
51
|
+
def fatal(message, context = {})
|
|
52
|
+
log(:fatal, message, context)
|
|
53
|
+
end
|
|
54
|
+
|
|
55
|
+
# Generic log method
|
|
56
|
+
def log(level, message, context = {})
|
|
57
|
+
return unless @logger
|
|
58
|
+
|
|
59
|
+
# Extract error if present
|
|
60
|
+
if context[:error].is_a?(Exception)
|
|
61
|
+
error = context.delete(:error)
|
|
62
|
+
context[:error_class] = error.class.name
|
|
63
|
+
context[:error_message] = error.message
|
|
64
|
+
context[:backtrace] = error.backtrace&.first(5)
|
|
65
|
+
end
|
|
66
|
+
|
|
67
|
+
@logger.send(level, message) do
|
|
68
|
+
context.empty? ? message : "#{message} #{context.inspect}"
|
|
69
|
+
end
|
|
70
|
+
end
|
|
71
|
+
|
|
72
|
+
# Change log level
|
|
73
|
+
def level=(level)
|
|
74
|
+
@logger.level = LEVELS[level] || ::Logger::INFO
|
|
75
|
+
end
|
|
76
|
+
|
|
77
|
+
# Get current log level as symbol
|
|
78
|
+
def level
|
|
79
|
+
LEVELS.key(@logger.level) || :info
|
|
80
|
+
end
|
|
81
|
+
|
|
82
|
+
# Close logger
|
|
83
|
+
def close
|
|
84
|
+
@logger.close if @logger
|
|
85
|
+
end
|
|
86
|
+
|
|
87
|
+
private
|
|
88
|
+
|
|
89
|
+
def default_log_file
|
|
90
|
+
home = Dir.home
|
|
91
|
+
File.join(home, '.heathrow', 'heathrow.log')
|
|
92
|
+
end
|
|
93
|
+
|
|
94
|
+
def ensure_log_directory
|
|
95
|
+
dir = File.dirname(@log_file)
|
|
96
|
+
FileUtils.mkdir_p(dir) unless Dir.exist?(dir)
|
|
97
|
+
end
|
|
98
|
+
|
|
99
|
+
def format_message(severity, datetime, progname, msg)
|
|
100
|
+
timestamp = datetime.strftime('%Y-%m-%d %H:%M:%S')
|
|
101
|
+
"[#{timestamp}] #{severity.ljust(5)} #{msg}\n"
|
|
102
|
+
end
|
|
103
|
+
|
|
104
|
+
# Singleton pattern
|
|
105
|
+
class << self
|
|
106
|
+
def instance
|
|
107
|
+
@instance ||= new
|
|
108
|
+
end
|
|
109
|
+
|
|
110
|
+
def reset_instance!
|
|
111
|
+
@instance&.close
|
|
112
|
+
@instance = nil
|
|
113
|
+
end
|
|
114
|
+
|
|
115
|
+
# Configure the singleton instance
|
|
116
|
+
def configure(log_file: nil, level: :info)
|
|
117
|
+
reset_instance!
|
|
118
|
+
@instance = new(log_file, level: level)
|
|
119
|
+
end
|
|
120
|
+
end
|
|
121
|
+
end
|
|
122
|
+
end
|