tsikol 0.1.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- checksums.yaml +7 -0
- data/CHANGELOG.md +22 -0
- data/CONTRIBUTING.md +84 -0
- data/LICENSE +21 -0
- data/README.md +579 -0
- data/Rakefile +12 -0
- data/docs/README.md +69 -0
- data/docs/api/middleware.md +721 -0
- data/docs/api/prompt.md +858 -0
- data/docs/api/resource.md +651 -0
- data/docs/api/server.md +509 -0
- data/docs/api/test-helpers.md +591 -0
- data/docs/api/tool.md +527 -0
- data/docs/cookbook/authentication.md +651 -0
- data/docs/cookbook/caching.md +877 -0
- data/docs/cookbook/dynamic-tools.md +970 -0
- data/docs/cookbook/error-handling.md +887 -0
- data/docs/cookbook/logging.md +1044 -0
- data/docs/cookbook/rate-limiting.md +717 -0
- data/docs/examples/code-assistant.md +922 -0
- data/docs/examples/complete-server.md +726 -0
- data/docs/examples/database-manager.md +1198 -0
- data/docs/examples/devops-tools.md +1382 -0
- data/docs/examples/echo-server.md +501 -0
- data/docs/examples/weather-service.md +822 -0
- data/docs/guides/completion.md +472 -0
- data/docs/guides/getting-started.md +462 -0
- data/docs/guides/middleware.md +823 -0
- data/docs/guides/project-structure.md +434 -0
- data/docs/guides/prompts.md +920 -0
- data/docs/guides/resources.md +720 -0
- data/docs/guides/sampling.md +804 -0
- data/docs/guides/testing.md +863 -0
- data/docs/guides/tools.md +627 -0
- data/examples/README.md +92 -0
- data/examples/advanced_features.rb +129 -0
- data/examples/basic-migrated/app/prompts/weather_chat.rb +44 -0
- data/examples/basic-migrated/app/resources/weather_alerts.rb +18 -0
- data/examples/basic-migrated/app/tools/get_current_weather.rb +34 -0
- data/examples/basic-migrated/app/tools/get_forecast.rb +30 -0
- data/examples/basic-migrated/app/tools/get_weather_by_coords.rb +48 -0
- data/examples/basic-migrated/server.rb +25 -0
- data/examples/basic.rb +73 -0
- data/examples/full_featured.rb +175 -0
- data/examples/middleware_example.rb +112 -0
- data/examples/sampling_example.rb +104 -0
- data/examples/weather-service/app/prompts/weather/chat.rb +90 -0
- data/examples/weather-service/app/resources/weather/alerts.rb +59 -0
- data/examples/weather-service/app/tools/weather/get_current.rb +82 -0
- data/examples/weather-service/app/tools/weather/get_forecast.rb +90 -0
- data/examples/weather-service/server.rb +28 -0
- data/exe/tsikol +6 -0
- data/lib/tsikol/cli/templates/Gemfile.erb +10 -0
- data/lib/tsikol/cli/templates/README.md.erb +38 -0
- data/lib/tsikol/cli/templates/gitignore.erb +49 -0
- data/lib/tsikol/cli/templates/prompt.rb.erb +53 -0
- data/lib/tsikol/cli/templates/resource.rb.erb +29 -0
- data/lib/tsikol/cli/templates/server.rb.erb +24 -0
- data/lib/tsikol/cli/templates/tool.rb.erb +60 -0
- data/lib/tsikol/cli.rb +203 -0
- data/lib/tsikol/error_handler.rb +141 -0
- data/lib/tsikol/health.rb +198 -0
- data/lib/tsikol/http_transport.rb +72 -0
- data/lib/tsikol/lifecycle.rb +149 -0
- data/lib/tsikol/middleware.rb +168 -0
- data/lib/tsikol/prompt.rb +101 -0
- data/lib/tsikol/resource.rb +53 -0
- data/lib/tsikol/router.rb +190 -0
- data/lib/tsikol/server.rb +660 -0
- data/lib/tsikol/stdio_transport.rb +108 -0
- data/lib/tsikol/test_helpers.rb +261 -0
- data/lib/tsikol/tool.rb +111 -0
- data/lib/tsikol/version.rb +5 -0
- data/lib/tsikol.rb +72 -0
- metadata +219 -0
@@ -0,0 +1,1198 @@
|
|
1
|
+
# Database Manager Example
|
2
|
+
|
3
|
+
A comprehensive MCP server for database operations including queries, migrations, backups, and monitoring across multiple database systems.
|
4
|
+
|
5
|
+
## Overview
|
6
|
+
|
7
|
+
This example demonstrates:
|
8
|
+
- Multi-database support (PostgreSQL, MySQL, SQLite)
|
9
|
+
- Query execution and optimization
|
10
|
+
- Database migrations
|
11
|
+
- Backup and restore operations
|
12
|
+
- Performance monitoring
|
13
|
+
- Connection pooling
|
14
|
+
- Transaction management
|
15
|
+
|
16
|
+
## Implementation
|
17
|
+
|
18
|
+
### server.rb
|
19
|
+
|
20
|
+
```ruby
|
21
|
+
#!/usr/bin/env ruby
|
22
|
+
|
23
|
+
require 'tsikol'
|
24
|
+
require 'sequel'
|
25
|
+
require 'pg'
|
26
|
+
require 'mysql2'
|
27
|
+
require 'sqlite3'
|
28
|
+
|
29
|
+
# Database connection manager
|
30
|
+
class DatabaseManager
|
31
|
+
attr_reader :connections
|
32
|
+
|
33
|
+
def initialize
|
34
|
+
@connections = {}
|
35
|
+
@pools = {}
|
36
|
+
end
|
37
|
+
|
38
|
+
def add_connection(name, config)
|
39
|
+
adapter = config[:adapter] || 'postgres'
|
40
|
+
|
41
|
+
connection_string = case adapter
|
42
|
+
when 'postgres', 'postgresql'
|
43
|
+
"postgres://#{config[:user]}:#{config[:password]}@#{config[:host]}:#{config[:port]}/#{config[:database]}"
|
44
|
+
when 'mysql', 'mysql2'
|
45
|
+
"mysql2://#{config[:user]}:#{config[:password]}@#{config[:host]}:#{config[:port]}/#{config[:database]}"
|
46
|
+
when 'sqlite'
|
47
|
+
"sqlite://#{config[:database]}"
|
48
|
+
end
|
49
|
+
|
50
|
+
@connections[name] = Sequel.connect(
|
51
|
+
connection_string,
|
52
|
+
max_connections: config[:pool_size] || 5,
|
53
|
+
pool_timeout: config[:pool_timeout] || 5
|
54
|
+
)
|
55
|
+
|
56
|
+
# Test connection
|
57
|
+
@connections[name].test_connection
|
58
|
+
|
59
|
+
log :info, "Database connected", name: name, adapter: adapter
|
60
|
+
end
|
61
|
+
|
62
|
+
def get_connection(name)
|
63
|
+
@connections[name] || raise("Unknown database: #{name}")
|
64
|
+
end
|
65
|
+
|
66
|
+
def close_all
|
67
|
+
@connections.each do |name, db|
|
68
|
+
db.disconnect
|
69
|
+
log :info, "Database disconnected", name: name
|
70
|
+
end
|
71
|
+
@connections.clear
|
72
|
+
end
|
73
|
+
end
|
74
|
+
|
75
|
+
# Query execution tool
|
76
|
+
class QueryTool < Tsikol::Tool
|
77
|
+
name "execute_query"
|
78
|
+
description "Execute SQL queries with safety checks"
|
79
|
+
|
80
|
+
parameter :database do
|
81
|
+
type :string
|
82
|
+
required
|
83
|
+
description "Database connection name"
|
84
|
+
|
85
|
+
complete do |partial|
|
86
|
+
@database_manager.connections.keys.select { |k| k.start_with?(partial) }
|
87
|
+
end
|
88
|
+
end
|
89
|
+
|
90
|
+
parameter :query do
|
91
|
+
type :string
|
92
|
+
required
|
93
|
+
description "SQL query to execute"
|
94
|
+
end
|
95
|
+
|
96
|
+
parameter :params do
|
97
|
+
type :array
|
98
|
+
optional
|
99
|
+
default []
|
100
|
+
description "Query parameters for prepared statements"
|
101
|
+
end
|
102
|
+
|
103
|
+
parameter :limit do
|
104
|
+
type :number
|
105
|
+
optional
|
106
|
+
default 1000
|
107
|
+
description "Maximum number of rows to return"
|
108
|
+
end
|
109
|
+
|
110
|
+
def execute(database:, query:, params: [], limit: 1000)
|
111
|
+
db = @database_manager.get_connection(database)
|
112
|
+
|
113
|
+
# Safety checks
|
114
|
+
validate_query!(query)
|
115
|
+
|
116
|
+
# Add limit if SELECT and no limit specified
|
117
|
+
query = add_limit_if_needed(query, limit)
|
118
|
+
|
119
|
+
start_time = Time.now
|
120
|
+
|
121
|
+
begin
|
122
|
+
if write_query?(query)
|
123
|
+
# Execute write query in transaction
|
124
|
+
result = db.transaction do
|
125
|
+
if params.any?
|
126
|
+
db[query, *params].all
|
127
|
+
else
|
128
|
+
db.run(query)
|
129
|
+
end
|
130
|
+
end
|
131
|
+
|
132
|
+
rows_affected = result.is_a?(Integer) ? result : 0
|
133
|
+
format_write_result(query, rows_affected, Time.now - start_time)
|
134
|
+
else
|
135
|
+
# Execute read query
|
136
|
+
dataset = params.any? ? db[query, *params] : db[query]
|
137
|
+
rows = dataset.limit(limit).all
|
138
|
+
|
139
|
+
format_query_result(rows, Time.now - start_time)
|
140
|
+
end
|
141
|
+
rescue Sequel::DatabaseError => e
|
142
|
+
log :error, "Query failed", error: e.message, query: query
|
143
|
+
"Query failed: #{e.message}"
|
144
|
+
end
|
145
|
+
end
|
146
|
+
|
147
|
+
private
|
148
|
+
|
149
|
+
def validate_query!(query)
|
150
|
+
# Basic SQL injection prevention
|
151
|
+
dangerous_patterns = [
|
152
|
+
/;\s*DROP\s+/i,
|
153
|
+
/;\s*DELETE\s+FROM\s+/i,
|
154
|
+
/;\s*TRUNCATE\s+/i,
|
155
|
+
/;\s*ALTER\s+/i
|
156
|
+
]
|
157
|
+
|
158
|
+
dangerous_patterns.each do |pattern|
|
159
|
+
if query.match?(pattern)
|
160
|
+
raise "Potentially dangerous query pattern detected"
|
161
|
+
end
|
162
|
+
end
|
163
|
+
end
|
164
|
+
|
165
|
+
def write_query?(query)
|
166
|
+
query.match?(/^\s*(INSERT|UPDATE|DELETE|CREATE|DROP|ALTER|TRUNCATE)/i)
|
167
|
+
end
|
168
|
+
|
169
|
+
def add_limit_if_needed(query, limit)
|
170
|
+
if query.match?(/^\s*SELECT/i) && !query.match?(/LIMIT\s+\d+/i)
|
171
|
+
"#{query.strip} LIMIT #{limit}"
|
172
|
+
else
|
173
|
+
query
|
174
|
+
end
|
175
|
+
end
|
176
|
+
|
177
|
+
def format_query_result(rows, duration)
|
178
|
+
return "No results found. (#{(duration * 1000).round(2)}ms)" if rows.empty?
|
179
|
+
|
180
|
+
# Get column names
|
181
|
+
columns = rows.first.keys
|
182
|
+
|
183
|
+
# Calculate column widths
|
184
|
+
widths = columns.map do |col|
|
185
|
+
[col.to_s.length, *rows.map { |row| row[col].to_s.length }].max
|
186
|
+
end
|
187
|
+
|
188
|
+
# Build table
|
189
|
+
lines = []
|
190
|
+
|
191
|
+
# Header
|
192
|
+
header = columns.zip(widths).map { |col, width| col.to_s.ljust(width) }.join(" | ")
|
193
|
+
lines << header
|
194
|
+
lines << "-" * header.length
|
195
|
+
|
196
|
+
# Rows
|
197
|
+
rows.each do |row|
|
198
|
+
line = columns.zip(widths).map { |col, width| row[col].to_s.ljust(width) }.join(" | ")
|
199
|
+
lines << line
|
200
|
+
end
|
201
|
+
|
202
|
+
lines << ""
|
203
|
+
lines << "#{rows.count} rows returned (#{(duration * 1000).round(2)}ms)"
|
204
|
+
|
205
|
+
lines.join("\n")
|
206
|
+
end
|
207
|
+
|
208
|
+
def format_write_result(query, rows_affected, duration)
|
209
|
+
operation = query.split.first.upcase
|
210
|
+
"#{operation} completed: #{rows_affected} rows affected (#{(duration * 1000).round(2)}ms)"
|
211
|
+
end
|
212
|
+
end
|
213
|
+
|
214
|
+
# Schema information tool
|
215
|
+
class SchemaTool < Tsikol::Tool
|
216
|
+
name "show_schema"
|
217
|
+
description "Display database schema information"
|
218
|
+
|
219
|
+
parameter :database do
|
220
|
+
type :string
|
221
|
+
required
|
222
|
+
description "Database connection name"
|
223
|
+
end
|
224
|
+
|
225
|
+
parameter :table do
|
226
|
+
type :string
|
227
|
+
optional
|
228
|
+
description "Specific table to inspect"
|
229
|
+
|
230
|
+
complete do |partial|
|
231
|
+
db = @database_manager.get_connection(params[:database])
|
232
|
+
db.tables.select { |t| t.to_s.start_with?(partial) }.map(&:to_s)
|
233
|
+
end
|
234
|
+
end
|
235
|
+
|
236
|
+
def execute(database:, table: nil)
|
237
|
+
db = @database_manager.get_connection(database)
|
238
|
+
|
239
|
+
if table
|
240
|
+
show_table_schema(db, table.to_sym)
|
241
|
+
else
|
242
|
+
show_database_schema(db)
|
243
|
+
end
|
244
|
+
end
|
245
|
+
|
246
|
+
private
|
247
|
+
|
248
|
+
def show_database_schema(db)
|
249
|
+
tables = db.tables
|
250
|
+
|
251
|
+
lines = ["Database Schema", "=" * 40, ""]
|
252
|
+
lines << "Tables (#{tables.count}):"
|
253
|
+
|
254
|
+
tables.each do |table|
|
255
|
+
row_count = db[table].count
|
256
|
+
lines << " #{table} (#{row_count} rows)"
|
257
|
+
end
|
258
|
+
|
259
|
+
# Show views if any
|
260
|
+
if db.respond_to?(:views)
|
261
|
+
views = db.views
|
262
|
+
if views.any?
|
263
|
+
lines << ""
|
264
|
+
lines << "Views (#{views.count}):"
|
265
|
+
views.each { |view| lines << " #{view}" }
|
266
|
+
end
|
267
|
+
end
|
268
|
+
|
269
|
+
lines.join("\n")
|
270
|
+
end
|
271
|
+
|
272
|
+
def show_table_schema(db, table)
|
273
|
+
unless db.table_exists?(table)
|
274
|
+
return "Table '#{table}' does not exist"
|
275
|
+
end
|
276
|
+
|
277
|
+
schema = db.schema(table)
|
278
|
+
indexes = db.indexes(table)
|
279
|
+
foreign_keys = db.foreign_key_list(table)
|
280
|
+
|
281
|
+
lines = ["Table: #{table}", "=" * 40, ""]
|
282
|
+
|
283
|
+
# Columns
|
284
|
+
lines << "Columns:"
|
285
|
+
schema.each do |column, info|
|
286
|
+
type = info[:db_type]
|
287
|
+
nullable = info[:allow_null] ? "NULL" : "NOT NULL"
|
288
|
+
default = info[:default] ? " DEFAULT #{info[:default]}" : ""
|
289
|
+
primary = info[:primary_key] ? " PRIMARY KEY" : ""
|
290
|
+
|
291
|
+
lines << " #{column}: #{type} #{nullable}#{default}#{primary}"
|
292
|
+
end
|
293
|
+
|
294
|
+
# Indexes
|
295
|
+
if indexes.any?
|
296
|
+
lines << ""
|
297
|
+
lines << "Indexes:"
|
298
|
+
indexes.each do |name, info|
|
299
|
+
columns = info[:columns].join(", ")
|
300
|
+
unique = info[:unique] ? "UNIQUE " : ""
|
301
|
+
lines << " #{name}: #{unique}(#{columns})"
|
302
|
+
end
|
303
|
+
end
|
304
|
+
|
305
|
+
# Foreign Keys
|
306
|
+
if foreign_keys.any?
|
307
|
+
lines << ""
|
308
|
+
lines << "Foreign Keys:"
|
309
|
+
foreign_keys.each do |fk|
|
310
|
+
lines << " #{fk[:columns].join(", ")} -> #{fk[:table]}.#{fk[:key].join(", ")}"
|
311
|
+
end
|
312
|
+
end
|
313
|
+
|
314
|
+
lines.join("\n")
|
315
|
+
end
|
316
|
+
end
|
317
|
+
|
318
|
+
# Migration tool
|
319
|
+
class MigrationTool < Tsikol::Tool
|
320
|
+
name "run_migration"
|
321
|
+
description "Run database migrations"
|
322
|
+
|
323
|
+
parameter :database do
|
324
|
+
type :string
|
325
|
+
required
|
326
|
+
description "Database connection name"
|
327
|
+
end
|
328
|
+
|
329
|
+
parameter :direction do
|
330
|
+
type :string
|
331
|
+
required
|
332
|
+
enum ["up", "down"]
|
333
|
+
description "Migration direction"
|
334
|
+
end
|
335
|
+
|
336
|
+
parameter :version do
|
337
|
+
type :number
|
338
|
+
optional
|
339
|
+
description "Specific version to migrate to"
|
340
|
+
end
|
341
|
+
|
342
|
+
parameter :migration_dir do
|
343
|
+
type :string
|
344
|
+
optional
|
345
|
+
default "./migrations"
|
346
|
+
description "Directory containing migration files"
|
347
|
+
end
|
348
|
+
|
349
|
+
def execute(database:, direction:, version: nil, migration_dir: "./migrations")
|
350
|
+
db = @database_manager.get_connection(database)
|
351
|
+
|
352
|
+
# Create migrations table if needed
|
353
|
+
create_migrations_table(db)
|
354
|
+
|
355
|
+
# Get migration files
|
356
|
+
migrations = load_migrations(migration_dir)
|
357
|
+
|
358
|
+
if direction == "up"
|
359
|
+
run_migrations_up(db, migrations, version)
|
360
|
+
else
|
361
|
+
run_migrations_down(db, migrations, version)
|
362
|
+
end
|
363
|
+
end
|
364
|
+
|
365
|
+
private
|
366
|
+
|
367
|
+
def create_migrations_table(db)
|
368
|
+
db.create_table?(:schema_migrations) do
|
369
|
+
String :version, primary_key: true
|
370
|
+
DateTime :applied_at
|
371
|
+
end
|
372
|
+
end
|
373
|
+
|
374
|
+
def load_migrations(dir)
|
375
|
+
return [] unless Dir.exist?(dir)
|
376
|
+
|
377
|
+
Dir.glob("#{dir}/*.rb").map do |file|
|
378
|
+
version = File.basename(file).split('_').first.to_i
|
379
|
+
{
|
380
|
+
version: version,
|
381
|
+
file: file,
|
382
|
+
name: File.basename(file, '.rb')
|
383
|
+
}
|
384
|
+
end.sort_by { |m| m[:version] }
|
385
|
+
end
|
386
|
+
|
387
|
+
def run_migrations_up(db, migrations, target_version)
|
388
|
+
applied = db[:schema_migrations].select_map(:version).map(&:to_i)
|
389
|
+
|
390
|
+
to_apply = migrations.reject { |m| applied.include?(m[:version]) }
|
391
|
+
to_apply = to_apply.select { |m| m[:version] <= target_version } if target_version
|
392
|
+
|
393
|
+
if to_apply.empty?
|
394
|
+
return "No migrations to apply"
|
395
|
+
end
|
396
|
+
|
397
|
+
results = []
|
398
|
+
|
399
|
+
to_apply.each do |migration|
|
400
|
+
begin
|
401
|
+
# Load and run migration
|
402
|
+
load migration[:file]
|
403
|
+
|
404
|
+
migration_class = Object.const_get("Migration#{migration[:version]}")
|
405
|
+
migration_instance = migration_class.new(db)
|
406
|
+
|
407
|
+
db.transaction do
|
408
|
+
migration_instance.up
|
409
|
+
db[:schema_migrations].insert(
|
410
|
+
version: migration[:version].to_s,
|
411
|
+
applied_at: Time.now
|
412
|
+
)
|
413
|
+
end
|
414
|
+
|
415
|
+
results << "Applied: #{migration[:name]}"
|
416
|
+
rescue => e
|
417
|
+
results << "Failed: #{migration[:name]} - #{e.message}"
|
418
|
+
break
|
419
|
+
end
|
420
|
+
end
|
421
|
+
|
422
|
+
results.join("\n")
|
423
|
+
end
|
424
|
+
end
|
425
|
+
|
426
|
+
# Backup tool
|
427
|
+
class BackupTool < Tsikol::Tool
|
428
|
+
name "backup_database"
|
429
|
+
description "Create database backups"
|
430
|
+
|
431
|
+
parameter :database do
|
432
|
+
type :string
|
433
|
+
required
|
434
|
+
description "Database to backup"
|
435
|
+
end
|
436
|
+
|
437
|
+
parameter :format do
|
438
|
+
type :string
|
439
|
+
optional
|
440
|
+
default "sql"
|
441
|
+
enum ["sql", "csv", "json"]
|
442
|
+
description "Backup format"
|
443
|
+
end
|
444
|
+
|
445
|
+
parameter :tables do
|
446
|
+
type :array
|
447
|
+
optional
|
448
|
+
description "Specific tables to backup (all if not specified)"
|
449
|
+
end
|
450
|
+
|
451
|
+
parameter :output_dir do
|
452
|
+
type :string
|
453
|
+
optional
|
454
|
+
default "./backups"
|
455
|
+
description "Output directory for backup files"
|
456
|
+
end
|
457
|
+
|
458
|
+
def execute(database:, format: "sql", tables: nil, output_dir: "./backups")
|
459
|
+
db = @database_manager.get_connection(database)
|
460
|
+
|
461
|
+
# Create backup directory
|
462
|
+
FileUtils.mkdir_p(output_dir)
|
463
|
+
|
464
|
+
# Generate backup filename
|
465
|
+
timestamp = Time.now.strftime("%Y%m%d_%H%M%S")
|
466
|
+
filename = "#{database}_#{timestamp}.#{format}"
|
467
|
+
filepath = File.join(output_dir, filename)
|
468
|
+
|
469
|
+
# Get tables to backup
|
470
|
+
tables_to_backup = tables || db.tables.map(&:to_s)
|
471
|
+
|
472
|
+
case format
|
473
|
+
when "sql"
|
474
|
+
backup_to_sql(db, filepath, tables_to_backup)
|
475
|
+
when "csv"
|
476
|
+
backup_to_csv(db, output_dir, timestamp, tables_to_backup)
|
477
|
+
when "json"
|
478
|
+
backup_to_json(db, filepath, tables_to_backup)
|
479
|
+
end
|
480
|
+
end
|
481
|
+
|
482
|
+
private
|
483
|
+
|
484
|
+
def backup_to_sql(db, filepath, tables)
|
485
|
+
File.open(filepath, 'w') do |file|
|
486
|
+
file.puts "-- Database backup generated at #{Time.now}"
|
487
|
+
file.puts "-- Tables: #{tables.join(', ')}"
|
488
|
+
file.puts
|
489
|
+
|
490
|
+
tables.each do |table|
|
491
|
+
table_sym = table.to_sym
|
492
|
+
|
493
|
+
# Table structure
|
494
|
+
file.puts "-- Table: #{table}"
|
495
|
+
file.puts generate_create_table(db, table_sym)
|
496
|
+
file.puts
|
497
|
+
|
498
|
+
# Table data
|
499
|
+
db[table_sym].each do |row|
|
500
|
+
columns = row.keys.join(", ")
|
501
|
+
values = row.values.map { |v| quote_value(v) }.join(", ")
|
502
|
+
file.puts "INSERT INTO #{table} (#{columns}) VALUES (#{values});"
|
503
|
+
end
|
504
|
+
file.puts
|
505
|
+
end
|
506
|
+
end
|
507
|
+
|
508
|
+
"Backup created: #{filepath}"
|
509
|
+
end
|
510
|
+
|
511
|
+
def quote_value(value)
|
512
|
+
case value
|
513
|
+
when nil then "NULL"
|
514
|
+
when String then "'#{value.gsub("'", "''")}'"
|
515
|
+
when Time, DateTime then "'#{value.iso8601}'"
|
516
|
+
else value.to_s
|
517
|
+
end
|
518
|
+
end
|
519
|
+
end
|
520
|
+
|
521
|
+
# Performance monitoring resource
|
522
|
+
class PerformanceResource < Tsikol::Resource
|
523
|
+
uri "database/:name/performance"
|
524
|
+
description "Database performance metrics"
|
525
|
+
|
526
|
+
def read
|
527
|
+
database_name = params[:name]
|
528
|
+
db = @database_manager.get_connection(database_name)
|
529
|
+
|
530
|
+
metrics = {
|
531
|
+
database: database_name,
|
532
|
+
timestamp: Time.now.iso8601,
|
533
|
+
connection_pool: get_pool_stats(db),
|
534
|
+
query_stats: get_query_stats(db),
|
535
|
+
table_sizes: get_table_sizes(db),
|
536
|
+
slow_queries: get_slow_queries(db)
|
537
|
+
}
|
538
|
+
|
539
|
+
metrics.to_json
|
540
|
+
end
|
541
|
+
|
542
|
+
private
|
543
|
+
|
544
|
+
def get_pool_stats(db)
|
545
|
+
pool = db.pool
|
546
|
+
{
|
547
|
+
size: pool.size,
|
548
|
+
available: pool.available_connections,
|
549
|
+
allocated: pool.allocated,
|
550
|
+
waiting: pool.instance_variable_get(:@waiter)&.length || 0
|
551
|
+
}
|
552
|
+
end
|
553
|
+
|
554
|
+
def get_query_stats(db)
|
555
|
+
# This would need database-specific implementation
|
556
|
+
{
|
557
|
+
total_queries: @query_count || 0,
|
558
|
+
average_duration_ms: @average_duration || 0,
|
559
|
+
cache_hit_rate: 0.85 # Mock value
|
560
|
+
}
|
561
|
+
end
|
562
|
+
|
563
|
+
def get_table_sizes(db)
|
564
|
+
db.tables.map do |table|
|
565
|
+
{
|
566
|
+
name: table,
|
567
|
+
row_count: db[table].count,
|
568
|
+
estimated_size: estimate_table_size(db, table)
|
569
|
+
}
|
570
|
+
end.sort_by { |t| -t[:row_count] }.first(10)
|
571
|
+
end
|
572
|
+
|
573
|
+
def estimate_table_size(db, table)
|
574
|
+
# Simple estimation based on row count and column count
|
575
|
+
row_count = db[table].count
|
576
|
+
col_count = db.schema(table).count
|
577
|
+
|
578
|
+
# Assume average 100 bytes per cell
|
579
|
+
bytes = row_count * col_count * 100
|
580
|
+
|
581
|
+
if bytes < 1024
|
582
|
+
"#{bytes} B"
|
583
|
+
elsif bytes < 1024 * 1024
|
584
|
+
"#{(bytes / 1024.0).round(2)} KB"
|
585
|
+
elsif bytes < 1024 * 1024 * 1024
|
586
|
+
"#{(bytes / (1024.0 * 1024)).round(2)} MB"
|
587
|
+
else
|
588
|
+
"#{(bytes / (1024.0 * 1024 * 1024)).round(2)} GB"
|
589
|
+
end
|
590
|
+
end
|
591
|
+
end
|
592
|
+
|
593
|
+
# Database health check resource
|
594
|
+
class HealthCheckResource < Tsikol::Resource
|
595
|
+
uri "database/:name/health"
|
596
|
+
description "Database health status"
|
597
|
+
|
598
|
+
def read
|
599
|
+
database_name = params[:name]
|
600
|
+
|
601
|
+
health = {
|
602
|
+
database: database_name,
|
603
|
+
status: "unknown",
|
604
|
+
checks: {}
|
605
|
+
}
|
606
|
+
|
607
|
+
begin
|
608
|
+
db = @database_manager.get_connection(database_name)
|
609
|
+
|
610
|
+
# Connection check
|
611
|
+
health[:checks][:connection] = check_connection(db)
|
612
|
+
|
613
|
+
# Disk space check
|
614
|
+
health[:checks][:disk_space] = check_disk_space
|
615
|
+
|
616
|
+
# Replication check (if applicable)
|
617
|
+
health[:checks][:replication] = check_replication(db)
|
618
|
+
|
619
|
+
# Overall status
|
620
|
+
health[:status] = determine_overall_status(health[:checks])
|
621
|
+
|
622
|
+
rescue => e
|
623
|
+
health[:status] = "error"
|
624
|
+
health[:error] = e.message
|
625
|
+
end
|
626
|
+
|
627
|
+
health.to_json
|
628
|
+
end
|
629
|
+
|
630
|
+
private
|
631
|
+
|
632
|
+
def check_connection(db)
|
633
|
+
start = Time.now
|
634
|
+
db.test_connection
|
635
|
+
latency = ((Time.now - start) * 1000).round(2)
|
636
|
+
|
637
|
+
{
|
638
|
+
status: latency < 100 ? "healthy" : "degraded",
|
639
|
+
latency_ms: latency
|
640
|
+
}
|
641
|
+
end
|
642
|
+
|
643
|
+
def check_disk_space
|
644
|
+
# Mock implementation
|
645
|
+
{
|
646
|
+
status: "healthy",
|
647
|
+
free_space: "42.5 GB",
|
648
|
+
usage_percent: 58
|
649
|
+
}
|
650
|
+
end
|
651
|
+
|
652
|
+
def check_replication(db)
|
653
|
+
# Database-specific implementation needed
|
654
|
+
{
|
655
|
+
status: "healthy",
|
656
|
+
lag_seconds: 0.2
|
657
|
+
}
|
658
|
+
end
|
659
|
+
|
660
|
+
def determine_overall_status(checks)
|
661
|
+
statuses = checks.values.map { |c| c[:status] }
|
662
|
+
|
663
|
+
return "error" if statuses.include?("error")
|
664
|
+
return "degraded" if statuses.include?("degraded")
|
665
|
+
"healthy"
|
666
|
+
end
|
667
|
+
end
|
668
|
+
|
669
|
+
# Query optimizer prompt
|
670
|
+
class QueryOptimizerPrompt < Tsikol::Prompt
|
671
|
+
name "query_optimizer"
|
672
|
+
description "SQL query optimization assistant"
|
673
|
+
|
674
|
+
argument :database_type do
|
675
|
+
type :string
|
676
|
+
required
|
677
|
+
enum ["postgresql", "mysql", "sqlite"]
|
678
|
+
description "Type of database"
|
679
|
+
end
|
680
|
+
|
681
|
+
argument :focus do
|
682
|
+
type :string
|
683
|
+
optional
|
684
|
+
default "performance"
|
685
|
+
enum ["performance", "readability", "maintainability"]
|
686
|
+
description "Optimization focus"
|
687
|
+
end
|
688
|
+
|
689
|
+
def get_messages(database_type:, focus: "performance")
|
690
|
+
[
|
691
|
+
{
|
692
|
+
role: "system",
|
693
|
+
content: {
|
694
|
+
type: "text",
|
695
|
+
text: build_system_prompt(database_type, focus)
|
696
|
+
}
|
697
|
+
},
|
698
|
+
{
|
699
|
+
role: "user",
|
700
|
+
content: {
|
701
|
+
type: "text",
|
702
|
+
text: "Initialize query optimizer"
|
703
|
+
}
|
704
|
+
},
|
705
|
+
{
|
706
|
+
role: "assistant",
|
707
|
+
content: {
|
708
|
+
type: "text",
|
709
|
+
text: "I'm ready to help optimize your #{database_type} queries with a focus on #{focus}. I can analyze query execution plans, suggest indexes, rewrite queries for better performance, and explain optimization strategies. What query would you like me to help with?"
|
710
|
+
}
|
711
|
+
}
|
712
|
+
]
|
713
|
+
end
|
714
|
+
|
715
|
+
private
|
716
|
+
|
717
|
+
def build_system_prompt(database_type, focus)
|
718
|
+
<<~PROMPT
|
719
|
+
You are an expert SQL query optimizer specializing in #{database_type}.
|
720
|
+
|
721
|
+
Focus: #{focus}
|
722
|
+
|
723
|
+
Key capabilities:
|
724
|
+
1. Analyze and explain query execution plans
|
725
|
+
2. Suggest optimal indexes
|
726
|
+
3. Rewrite queries for better performance
|
727
|
+
4. Identify and fix common performance issues
|
728
|
+
5. Provide database-specific optimizations
|
729
|
+
|
730
|
+
Guidelines for #{database_type}:
|
731
|
+
#{database_specific_guidelines(database_type)}
|
732
|
+
|
733
|
+
Always consider:
|
734
|
+
- Index usage and selectivity
|
735
|
+
- Join order and methods
|
736
|
+
- Subquery optimization
|
737
|
+
- Proper use of database-specific features
|
738
|
+
- Query caching opportunities
|
739
|
+
- Lock contention and concurrency
|
740
|
+
PROMPT
|
741
|
+
end
|
742
|
+
|
743
|
+
def database_specific_guidelines(type)
|
744
|
+
case type
|
745
|
+
when "postgresql"
|
746
|
+
"- Use EXPLAIN ANALYZE for accurate timings\n- Consider partial indexes\n- Leverage JSONB for semi-structured data\n- Use CTEs wisely (materialized vs not)\n- Consider parallel query execution"
|
747
|
+
when "mysql"
|
748
|
+
"- Check storage engine implications\n- Use covering indexes\n- Be aware of query cache\n- Optimize for InnoDB\n- Consider partitioning for large tables"
|
749
|
+
when "sqlite"
|
750
|
+
"- Remember single-writer limitation\n- Use appropriate journal modes\n- Consider memory vs disk trade-offs\n- Optimize for embedded use cases\n- Use EXPLAIN QUERY PLAN"
|
751
|
+
end
|
752
|
+
end
|
753
|
+
end
|
754
|
+
|
755
|
+
# Initialize and start server
|
756
|
+
database_manager = DatabaseManager.new
|
757
|
+
|
758
|
+
# Configure databases from environment or config
|
759
|
+
if ENV['DATABASE_URL']
|
760
|
+
database_manager.add_connection('default', {
|
761
|
+
adapter: 'postgres',
|
762
|
+
url: ENV['DATABASE_URL']
|
763
|
+
})
|
764
|
+
end
|
765
|
+
|
766
|
+
Tsikol.start(
|
767
|
+
name: "database-manager",
|
768
|
+
version: "1.0.0",
|
769
|
+
description: "Comprehensive database management MCP server"
|
770
|
+
) do
|
771
|
+
# Enable capabilities
|
772
|
+
logging true
|
773
|
+
prompts true
|
774
|
+
completion true
|
775
|
+
|
776
|
+
# Share database manager
|
777
|
+
before_start do
|
778
|
+
@database_manager = database_manager
|
779
|
+
end
|
780
|
+
|
781
|
+
# Cleanup on stop
|
782
|
+
before_stop do
|
783
|
+
@database_manager.close_all
|
784
|
+
end
|
785
|
+
|
786
|
+
# Middleware
|
787
|
+
use Tsikol::LoggingMiddleware, level: :info
|
788
|
+
use Tsikol::AuthenticationMiddleware,
|
789
|
+
type: :bearer,
|
790
|
+
validator: ->(token) { validate_db_token(token) }
|
791
|
+
|
792
|
+
# Tools
|
793
|
+
tool QueryTool
|
794
|
+
tool SchemaTool
|
795
|
+
tool MigrationTool
|
796
|
+
tool BackupTool
|
797
|
+
|
798
|
+
# Resources
|
799
|
+
resource PerformanceResource
|
800
|
+
resource HealthCheckResource
|
801
|
+
|
802
|
+
# Prompts
|
803
|
+
prompt QueryOptimizerPrompt
|
804
|
+
|
805
|
+
# Add dynamic database configuration
|
806
|
+
tool "add_database" do
|
807
|
+
parameter :name do
|
808
|
+
type :string
|
809
|
+
required
|
810
|
+
description "Connection name"
|
811
|
+
end
|
812
|
+
|
813
|
+
parameter :config do
|
814
|
+
type :object
|
815
|
+
required
|
816
|
+
description "Database configuration"
|
817
|
+
end
|
818
|
+
|
819
|
+
def execute(name:, config:)
|
820
|
+
@database_manager.add_connection(name, config)
|
821
|
+
"Database '#{name}' connected successfully"
|
822
|
+
rescue => e
|
823
|
+
"Failed to connect: #{e.message}"
|
824
|
+
end
|
825
|
+
end
|
826
|
+
|
827
|
+
# List databases
|
828
|
+
resource "databases" do
|
829
|
+
description "List configured databases"
|
830
|
+
|
831
|
+
def read
|
832
|
+
databases = @database_manager.connections.map do |name, db|
|
833
|
+
{
|
834
|
+
name: name,
|
835
|
+
connected: db.test_connection rescue false,
|
836
|
+
tables: db.tables.count rescue 0
|
837
|
+
}
|
838
|
+
end
|
839
|
+
|
840
|
+
{ databases: databases }.to_json
|
841
|
+
end
|
842
|
+
end
|
843
|
+
|
844
|
+
def validate_db_token(token)
|
845
|
+
# In production, validate against user database
|
846
|
+
return { user_id: "admin", permissions: ["read", "write"] } if token == "admin-token"
|
847
|
+
return { user_id: "readonly", permissions: ["read"] } if token == "readonly-token"
|
848
|
+
nil
|
849
|
+
end
|
850
|
+
end
|
851
|
+
```
|
852
|
+
|
853
|
+
### Migration Example
|
854
|
+
|
855
|
+
```ruby
|
856
|
+
# migrations/001_create_users.rb
|
857
|
+
class Migration001 < Sequel::Migration
|
858
|
+
def up
|
859
|
+
create_table :users do
|
860
|
+
primary_key :id
|
861
|
+
String :email, null: false, unique: true
|
862
|
+
String :name
|
863
|
+
String :password_digest, null: false
|
864
|
+
DateTime :created_at, null: false
|
865
|
+
DateTime :updated_at, null: false
|
866
|
+
|
867
|
+
index :email
|
868
|
+
index :created_at
|
869
|
+
end
|
870
|
+
end
|
871
|
+
|
872
|
+
def down
|
873
|
+
drop_table :users
|
874
|
+
end
|
875
|
+
end
|
876
|
+
|
877
|
+
# migrations/002_create_posts.rb
|
878
|
+
class Migration002 < Sequel::Migration
|
879
|
+
def up
|
880
|
+
create_table :posts do
|
881
|
+
primary_key :id
|
882
|
+
foreign_key :user_id, :users, null: false, on_delete: :cascade
|
883
|
+
String :title, null: false
|
884
|
+
Text :content
|
885
|
+
String :status, default: 'draft'
|
886
|
+
DateTime :published_at
|
887
|
+
DateTime :created_at, null: false
|
888
|
+
DateTime :updated_at, null: false
|
889
|
+
|
890
|
+
index :user_id
|
891
|
+
index :status
|
892
|
+
index :published_at
|
893
|
+
index [:user_id, :status]
|
894
|
+
end
|
895
|
+
end
|
896
|
+
|
897
|
+
def down
|
898
|
+
drop_table :posts
|
899
|
+
end
|
900
|
+
end
|
901
|
+
```
|
902
|
+
|
903
|
+
### Configuration
|
904
|
+
|
905
|
+
```yaml
|
906
|
+
# config/databases.yml
|
907
|
+
development:
|
908
|
+
default:
|
909
|
+
adapter: postgresql
|
910
|
+
host: localhost
|
911
|
+
port: 5432
|
912
|
+
database: myapp_development
|
913
|
+
user: postgres
|
914
|
+
password: postgres
|
915
|
+
pool_size: 5
|
916
|
+
|
917
|
+
analytics:
|
918
|
+
adapter: postgresql
|
919
|
+
host: localhost
|
920
|
+
port: 5432
|
921
|
+
database: myapp_analytics
|
922
|
+
user: postgres
|
923
|
+
password: postgres
|
924
|
+
pool_size: 3
|
925
|
+
|
926
|
+
cache:
|
927
|
+
adapter: sqlite
|
928
|
+
database: ./db/cache.db
|
929
|
+
|
930
|
+
production:
|
931
|
+
default:
|
932
|
+
adapter: postgresql
|
933
|
+
url: ${DATABASE_URL}
|
934
|
+
pool_size: ${DB_POOL_SIZE:20}
|
935
|
+
|
936
|
+
read_replica:
|
937
|
+
adapter: postgresql
|
938
|
+
url: ${READ_REPLICA_URL}
|
939
|
+
pool_size: ${DB_POOL_SIZE:20}
|
940
|
+
```
|
941
|
+
|
942
|
+
### Testing
|
943
|
+
|
944
|
+
```ruby
|
945
|
+
require 'minitest/autorun'
|
946
|
+
require 'tsikol/test_helpers'
|
947
|
+
|
948
|
+
class DatabaseManagerTest < Minitest::Test
|
949
|
+
include Tsikol::TestHelpers
|
950
|
+
|
951
|
+
def setup
|
952
|
+
@server = create_test_server
|
953
|
+
@client = TestClient.new(@server)
|
954
|
+
setup_test_database
|
955
|
+
end
|
956
|
+
|
957
|
+
def teardown
|
958
|
+
cleanup_test_database
|
959
|
+
end
|
960
|
+
|
961
|
+
def test_query_execution
|
962
|
+
# Insert test data
|
963
|
+
@client.call_tool("execute_query", {
|
964
|
+
"database" => "test",
|
965
|
+
"query" => "INSERT INTO users (name, email) VALUES (?, ?)",
|
966
|
+
"params" => ["John Doe", "john@example.com"]
|
967
|
+
})
|
968
|
+
|
969
|
+
# Query data
|
970
|
+
response = @client.call_tool("execute_query", {
|
971
|
+
"database" => "test",
|
972
|
+
"query" => "SELECT * FROM users WHERE email = ?",
|
973
|
+
"params" => ["john@example.com"]
|
974
|
+
})
|
975
|
+
|
976
|
+
assert_successful_response(response)
|
977
|
+
result = response.dig(:result, :content, 0, :text)
|
978
|
+
assert_match /John Doe/, result
|
979
|
+
end
|
980
|
+
|
981
|
+
def test_schema_information
|
982
|
+
response = @client.call_tool("show_schema", {
|
983
|
+
"database" => "test",
|
984
|
+
"table" => "users"
|
985
|
+
})
|
986
|
+
|
987
|
+
assert_successful_response(response)
|
988
|
+
result = response.dig(:result, :content, 0, :text)
|
989
|
+
assert_match /name: text/, result
|
990
|
+
assert_match /email: text/, result
|
991
|
+
end
|
992
|
+
|
993
|
+
def test_backup_creation
|
994
|
+
response = @client.call_tool("backup_database", {
|
995
|
+
"database" => "test",
|
996
|
+
"format" => "sql"
|
997
|
+
})
|
998
|
+
|
999
|
+
assert_successful_response(response)
|
1000
|
+
result = response.dig(:result, :content, 0, :text)
|
1001
|
+
assert_match /Backup created/, result
|
1002
|
+
|
1003
|
+
# Check backup file exists
|
1004
|
+
backup_files = Dir.glob("./backups/test_*.sql")
|
1005
|
+
assert backup_files.any?
|
1006
|
+
end
|
1007
|
+
|
1008
|
+
def test_query_safety
|
1009
|
+
# Test dangerous query detection
|
1010
|
+
response = @client.call_tool("execute_query", {
|
1011
|
+
"database" => "test",
|
1012
|
+
"query" => "SELECT * FROM users; DROP TABLE users;"
|
1013
|
+
})
|
1014
|
+
|
1015
|
+
assert_successful_response(response)
|
1016
|
+
result = response.dig(:result, :content, 0, :text)
|
1017
|
+
assert_match /dangerous query pattern/, result
|
1018
|
+
end
|
1019
|
+
|
1020
|
+
def test_performance_metrics
|
1021
|
+
response = @client.read_resource("database/test/performance")
|
1022
|
+
|
1023
|
+
assert_successful_response(response)
|
1024
|
+
metrics = JSON.parse(response.dig(:result, :contents, 0, :text))
|
1025
|
+
|
1026
|
+
assert metrics["connection_pool"]
|
1027
|
+
assert metrics["table_sizes"]
|
1028
|
+
end
|
1029
|
+
|
1030
|
+
private
|
1031
|
+
|
1032
|
+
def create_test_server
|
1033
|
+
manager = DatabaseManager.new
|
1034
|
+
manager.add_connection("test", {
|
1035
|
+
adapter: "sqlite",
|
1036
|
+
database: ":memory:"
|
1037
|
+
})
|
1038
|
+
|
1039
|
+
Tsikol::Server.new(name: "test-db-manager") do
|
1040
|
+
before_start { @database_manager = manager }
|
1041
|
+
|
1042
|
+
tool QueryTool
|
1043
|
+
tool SchemaTool
|
1044
|
+
tool BackupTool
|
1045
|
+
resource PerformanceResource
|
1046
|
+
end
|
1047
|
+
end
|
1048
|
+
|
1049
|
+
def setup_test_database
|
1050
|
+
db = @server.instance_variable_get(:@database_manager).get_connection("test")
|
1051
|
+
|
1052
|
+
db.create_table :users do
|
1053
|
+
primary_key :id
|
1054
|
+
String :name
|
1055
|
+
String :email
|
1056
|
+
end
|
1057
|
+
|
1058
|
+
db.create_table :posts do
|
1059
|
+
primary_key :id
|
1060
|
+
foreign_key :user_id, :users
|
1061
|
+
String :title
|
1062
|
+
Text :content
|
1063
|
+
end
|
1064
|
+
end
|
1065
|
+
|
1066
|
+
def cleanup_test_database
|
1067
|
+
FileUtils.rm_rf("./backups")
|
1068
|
+
end
|
1069
|
+
end
|
1070
|
+
```
|
1071
|
+
|
1072
|
+
## Advanced Features
|
1073
|
+
|
1074
|
+
### Query Analysis and Optimization
|
1075
|
+
|
1076
|
+
```ruby
|
1077
|
+
class QueryAnalyzer
|
1078
|
+
def analyze(query, database_type)
|
1079
|
+
analysis = {
|
1080
|
+
query: query,
|
1081
|
+
type: detect_query_type(query),
|
1082
|
+
tables: extract_tables(query),
|
1083
|
+
joins: analyze_joins(query),
|
1084
|
+
filters: analyze_filters(query),
|
1085
|
+
suggestions: []
|
1086
|
+
}
|
1087
|
+
|
1088
|
+
# Add optimization suggestions
|
1089
|
+
if missing_index?(analysis)
|
1090
|
+
analysis[:suggestions] << {
|
1091
|
+
type: "index",
|
1092
|
+
message: "Consider adding index on filter columns",
|
1093
|
+
columns: analysis[:filters].map { |f| f[:column] }
|
1094
|
+
}
|
1095
|
+
end
|
1096
|
+
|
1097
|
+
if inefficient_join?(analysis)
|
1098
|
+
analysis[:suggestions] << {
|
1099
|
+
type: "join",
|
1100
|
+
message: "Consider reordering joins for better performance"
|
1101
|
+
}
|
1102
|
+
end
|
1103
|
+
|
1104
|
+
analysis
|
1105
|
+
end
|
1106
|
+
|
1107
|
+
private
|
1108
|
+
|
1109
|
+
def detect_query_type(query)
|
1110
|
+
case query
|
1111
|
+
when /^\s*SELECT/i then :select
|
1112
|
+
when /^\s*INSERT/i then :insert
|
1113
|
+
when /^\s*UPDATE/i then :update
|
1114
|
+
when /^\s*DELETE/i then :delete
|
1115
|
+
else :other
|
1116
|
+
end
|
1117
|
+
end
|
1118
|
+
|
1119
|
+
def extract_tables(query)
|
1120
|
+
# Simple regex-based extraction
|
1121
|
+
query.scan(/(?:FROM|JOIN)\s+(\w+)/i).flatten.uniq
|
1122
|
+
end
|
1123
|
+
end
|
1124
|
+
```
|
1125
|
+
|
1126
|
+
### Connection Pool Monitoring
|
1127
|
+
|
1128
|
+
```ruby
|
1129
|
+
class PoolMonitor
|
1130
|
+
def initialize(database_manager)
|
1131
|
+
@database_manager = database_manager
|
1132
|
+
@metrics = Concurrent::Hash.new
|
1133
|
+
start_monitoring
|
1134
|
+
end
|
1135
|
+
|
1136
|
+
def start_monitoring
|
1137
|
+
Thread.new do
|
1138
|
+
loop do
|
1139
|
+
@database_manager.connections.each do |name, db|
|
1140
|
+
pool = db.pool
|
1141
|
+
|
1142
|
+
@metrics[name] = {
|
1143
|
+
size: pool.size,
|
1144
|
+
available: pool.available_connections,
|
1145
|
+
allocated: pool.allocated,
|
1146
|
+
max_size: pool.max_size,
|
1147
|
+
wait_timeout: pool.connection_timeout
|
1148
|
+
}
|
1149
|
+
end
|
1150
|
+
|
1151
|
+
sleep 5
|
1152
|
+
end
|
1153
|
+
end
|
1154
|
+
end
|
1155
|
+
|
1156
|
+
def get_metrics
|
1157
|
+
@metrics.to_h
|
1158
|
+
end
|
1159
|
+
end
|
1160
|
+
```
|
1161
|
+
|
1162
|
+
## Best Practices Demonstrated
|
1163
|
+
|
1164
|
+
1. **Connection Pooling** - Efficient database connection management
|
1165
|
+
2. **Prepared Statements** - SQL injection prevention
|
1166
|
+
3. **Transaction Management** - Data consistency
|
1167
|
+
4. **Query Optimization** - Performance monitoring and analysis
|
1168
|
+
5. **Backup Strategies** - Multiple format support
|
1169
|
+
6. **Migration System** - Version-controlled schema changes
|
1170
|
+
7. **Health Monitoring** - Proactive issue detection
|
1171
|
+
8. **Multi-Database Support** - Flexible architecture
|
1172
|
+
9. **Security** - Authentication and query validation
|
1173
|
+
10. **Resource Management** - Proper cleanup and connection handling
|
1174
|
+
|
1175
|
+
## Performance Optimization
|
1176
|
+
|
1177
|
+
1. **Query Caching** - Cache frequently used queries
|
1178
|
+
2. **Index Management** - Automatic index suggestions
|
1179
|
+
3. **Connection Pooling** - Reuse database connections
|
1180
|
+
4. **Batch Operations** - Process multiple operations together
|
1181
|
+
5. **Async Processing** - Non-blocking database operations
|
1182
|
+
|
1183
|
+
## Security Considerations
|
1184
|
+
|
1185
|
+
1. **SQL Injection Prevention** - Parameterized queries
|
1186
|
+
2. **Access Control** - Role-based permissions
|
1187
|
+
3. **Audit Logging** - Track all database operations
|
1188
|
+
4. **Encryption** - Secure sensitive data
|
1189
|
+
5. **Connection Security** - SSL/TLS for database connections
|
1190
|
+
|
1191
|
+
## Next Steps
|
1192
|
+
|
1193
|
+
- Add support for NoSQL databases
|
1194
|
+
- Implement query plan visualization
|
1195
|
+
- Add real-time replication monitoring
|
1196
|
+
- Create database migration UI
|
1197
|
+
- Add automatic backup scheduling
|
1198
|
+
- Implement query result caching
|