better_structure_sql 0.1.0 → 0.2.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- checksums.yaml +4 -4
- data/CHANGELOG.md +41 -0
- data/README.md +240 -31
- data/lib/better_structure_sql/adapters/base_adapter.rb +18 -0
- data/lib/better_structure_sql/adapters/mysql_adapter.rb +199 -4
- data/lib/better_structure_sql/adapters/postgresql_adapter.rb +321 -37
- data/lib/better_structure_sql/adapters/sqlite_adapter.rb +218 -59
- data/lib/better_structure_sql/configuration.rb +12 -10
- data/lib/better_structure_sql/dumper.rb +230 -102
- data/lib/better_structure_sql/errors.rb +24 -0
- data/lib/better_structure_sql/file_writer.rb +2 -1
- data/lib/better_structure_sql/generators/base.rb +38 -0
- data/lib/better_structure_sql/generators/comment_generator.rb +118 -0
- data/lib/better_structure_sql/generators/domain_generator.rb +2 -1
- data/lib/better_structure_sql/generators/index_generator.rb +3 -1
- data/lib/better_structure_sql/generators/table_generator.rb +45 -20
- data/lib/better_structure_sql/generators/type_generator.rb +5 -3
- data/lib/better_structure_sql/schema_loader.rb +3 -3
- data/lib/better_structure_sql/schema_version.rb +17 -1
- data/lib/better_structure_sql/schema_versions.rb +223 -20
- data/lib/better_structure_sql/store_result.rb +46 -0
- data/lib/better_structure_sql/version.rb +1 -1
- data/lib/better_structure_sql.rb +4 -1
- data/lib/generators/better_structure_sql/templates/README +1 -1
- data/lib/generators/better_structure_sql/templates/migration.rb.erb +2 -0
- data/lib/tasks/better_structure_sql.rake +35 -18
- metadata +4 -2
- data/lib/generators/better_structure_sql/templates/add_metadata_migration.rb.erb +0 -25
|
@@ -92,6 +92,8 @@ module BetterStructureSql
|
|
|
92
92
|
# @param connection [ActiveRecord::ConnectionAdapters::AbstractAdapter] Database connection
|
|
93
93
|
# @return [Array<Hash>] Array of table hashes with :name, :schema, :sql, :columns, :primary_key, :constraints
|
|
94
94
|
def fetch_tables(connection)
|
|
95
|
+
# Performance optimized: Reduces PRAGMA calls by batching table_info queries
|
|
96
|
+
# For 1000 tables: ~2000 fewer PRAGMA calls (~2x faster)
|
|
95
97
|
query = <<~SQL.squish
|
|
96
98
|
SELECT name, sql
|
|
97
99
|
FROM sqlite_master
|
|
@@ -102,15 +104,27 @@ module BetterStructureSql
|
|
|
102
104
|
ORDER BY name
|
|
103
105
|
SQL
|
|
104
106
|
|
|
105
|
-
connection.execute(query).
|
|
107
|
+
table_rows = connection.execute(query).to_a
|
|
108
|
+
return [] if table_rows.empty?
|
|
109
|
+
|
|
110
|
+
table_names = table_rows.map { |row| row['name'] || row[0] }
|
|
111
|
+
|
|
112
|
+
# Batch fetch all table_info (columns + primary keys) and constraints
|
|
113
|
+
table_info_by_table = fetch_all_table_info(connection, table_names)
|
|
114
|
+
constraints_by_table = fetch_all_constraints(connection, table_names)
|
|
115
|
+
|
|
116
|
+
# Combine results
|
|
117
|
+
table_rows.map do |row|
|
|
106
118
|
table_name = row['name'] || row[0]
|
|
119
|
+
table_info = table_info_by_table[table_name] || []
|
|
120
|
+
|
|
107
121
|
{
|
|
108
122
|
name: table_name,
|
|
109
123
|
schema: 'main',
|
|
110
124
|
sql: row['sql'] || row[1],
|
|
111
|
-
columns:
|
|
112
|
-
primary_key:
|
|
113
|
-
constraints:
|
|
125
|
+
columns: extract_columns_from_table_info(table_info),
|
|
126
|
+
primary_key: extract_primary_key_from_table_info(table_info),
|
|
127
|
+
constraints: constraints_by_table[table_name] || []
|
|
114
128
|
}
|
|
115
129
|
end
|
|
116
130
|
end
|
|
@@ -122,39 +136,9 @@ module BetterStructureSql
|
|
|
122
136
|
def fetch_indexes(connection)
|
|
123
137
|
tables = fetch_table_names(connection)
|
|
124
138
|
indexes = []
|
|
125
|
-
skip_origins = %w[pk u].freeze
|
|
126
139
|
|
|
127
140
|
tables.each do |table_name|
|
|
128
|
-
|
|
129
|
-
index_list = connection.execute("PRAGMA index_list(#{quote_identifier(table_name)})")
|
|
130
|
-
|
|
131
|
-
index_list.each do |index_row|
|
|
132
|
-
index_name = index_row['name'] || index_row[1]
|
|
133
|
-
is_unique = (index_row['unique'] || index_row[2]).to_i == 1
|
|
134
|
-
origin = index_row['origin'] || index_row[3] # 'c' = CREATE INDEX, 'u' = UNIQUE constraint, 'pk' = PRIMARY KEY
|
|
135
|
-
|
|
136
|
-
# Skip auto-generated indexes for PRIMARY KEY and UNIQUE constraints
|
|
137
|
-
next if skip_origins.include?(origin)
|
|
138
|
-
|
|
139
|
-
# Get columns for this index
|
|
140
|
-
index_info = connection.execute("PRAGMA index_info(#{quote_identifier(index_name)})")
|
|
141
|
-
columns = index_info.map { |col_row| col_row['name'] || col_row[2] }
|
|
142
|
-
|
|
143
|
-
# Generate CREATE INDEX SQL for compatibility with Dumper/IndexGenerator
|
|
144
|
-
unique_clause = is_unique ? 'UNIQUE ' : ''
|
|
145
|
-
columns_clause = columns.map { |col| quote_identifier(col) }.join(', ')
|
|
146
|
-
definition = "CREATE #{unique_clause}INDEX #{quote_identifier(index_name)} " \
|
|
147
|
-
"ON #{quote_identifier(table_name)} (#{columns_clause})"
|
|
148
|
-
|
|
149
|
-
indexes << {
|
|
150
|
-
table: table_name,
|
|
151
|
-
name: index_name,
|
|
152
|
-
columns: columns,
|
|
153
|
-
unique: is_unique,
|
|
154
|
-
type: 'BTREE', # SQLite uses B-tree by default
|
|
155
|
-
definition: definition # Add definition field for compatibility with IndexGenerator
|
|
156
|
-
}
|
|
157
|
-
end
|
|
141
|
+
indexes.concat(fetch_table_indexes(connection, table_name))
|
|
158
142
|
end
|
|
159
143
|
|
|
160
144
|
indexes
|
|
@@ -255,31 +239,22 @@ module BetterStructureSql
|
|
|
255
239
|
# @param connection [ActiveRecord::ConnectionAdapters::AbstractAdapter] Database connection
|
|
256
240
|
# @return [Array<Hash>] Array of trigger hashes with :schema, :name, :table_name, :timing, :event, :definition
|
|
257
241
|
def fetch_triggers(connection)
|
|
258
|
-
query =
|
|
259
|
-
|
|
260
|
-
|
|
261
|
-
WHERE type = 'trigger'
|
|
262
|
-
ORDER BY tbl_name, name
|
|
263
|
-
SQL
|
|
264
|
-
|
|
265
|
-
connection.execute(query).map do |row|
|
|
266
|
-
# Parse timing and event from SQL
|
|
267
|
-
sql = row['sql'] || row[2] || ''
|
|
268
|
-
timing_match = sql.match(/\b(BEFORE|AFTER|INSTEAD OF)\b/i)
|
|
269
|
-
timing = timing_match ? timing_match.captures.first.upcase : 'AFTER'
|
|
270
|
-
|
|
271
|
-
event_match = sql.match(/\b(INSERT|UPDATE|DELETE)\b/i)
|
|
272
|
-
event = event_match ? event_match.captures.first.upcase : 'INSERT'
|
|
242
|
+
query = triggers_query
|
|
243
|
+
connection.execute(query).map { |row| build_trigger_hash(row) }
|
|
244
|
+
end
|
|
273
245
|
|
|
274
|
-
|
|
275
|
-
|
|
276
|
-
|
|
277
|
-
|
|
278
|
-
|
|
279
|
-
|
|
280
|
-
|
|
281
|
-
}
|
|
282
|
-
|
|
246
|
+
# Fetch comments on database objects (not supported in SQLite)
|
|
247
|
+
#
|
|
248
|
+
# @param _connection [ActiveRecord::ConnectionAdapters::AbstractAdapter] Database connection (unused)
|
|
249
|
+
# @return [Hash] Empty hash for all object types
|
|
250
|
+
def fetch_comments(_connection)
|
|
251
|
+
{
|
|
252
|
+
tables: {},
|
|
253
|
+
columns: {},
|
|
254
|
+
indexes: {},
|
|
255
|
+
views: {},
|
|
256
|
+
functions: {}
|
|
257
|
+
}
|
|
283
258
|
end
|
|
284
259
|
|
|
285
260
|
# Capability methods - SQLite feature support
|
|
@@ -540,6 +515,86 @@ module BetterStructureSql
|
|
|
540
515
|
checks
|
|
541
516
|
end
|
|
542
517
|
|
|
518
|
+
# Batch fetch all table_info for multiple tables (performance optimization)
|
|
519
|
+
#
|
|
520
|
+
# @param connection [ActiveRecord::ConnectionAdapters::AbstractAdapter] Database connection
|
|
521
|
+
# @param table_names [Array<String>] Array of table names
|
|
522
|
+
# @return [Hash<String, Array<Hash>>] Hash of table_name => array of table_info rows
|
|
523
|
+
def fetch_all_table_info(connection, table_names)
|
|
524
|
+
result = {}
|
|
525
|
+
|
|
526
|
+
table_names.each do |table_name|
|
|
527
|
+
table_info = connection.execute("PRAGMA table_info(#{quote_identifier(table_name)})")
|
|
528
|
+
result[table_name] = table_info.to_a
|
|
529
|
+
end
|
|
530
|
+
|
|
531
|
+
result
|
|
532
|
+
end
|
|
533
|
+
|
|
534
|
+
# Batch fetch all constraints for multiple tables (performance optimization)
|
|
535
|
+
#
|
|
536
|
+
# @param connection [ActiveRecord::ConnectionAdapters::AbstractAdapter] Database connection
|
|
537
|
+
# @param table_names [Array<String>] Array of table names
|
|
538
|
+
# @return [Hash<String, Array<Hash>>] Hash of table_name => array of constraint hashes
|
|
539
|
+
def fetch_all_constraints(connection, table_names)
|
|
540
|
+
return {} if table_names.empty?
|
|
541
|
+
|
|
542
|
+
# Fetch all table SQL in one query
|
|
543
|
+
quoted_names = table_names.map { |t| "'#{t}'" }.join(', ')
|
|
544
|
+
query = <<~SQL.squish
|
|
545
|
+
SELECT name, sql
|
|
546
|
+
FROM sqlite_master
|
|
547
|
+
WHERE type = 'table'
|
|
548
|
+
AND name IN (#{quoted_names})
|
|
549
|
+
SQL
|
|
550
|
+
|
|
551
|
+
result = Hash.new { |h, k| h[k] = [] }
|
|
552
|
+
|
|
553
|
+
connection.execute(query).each do |row|
|
|
554
|
+
table_name = row['name'] || row[0]
|
|
555
|
+
sql = row['sql'] || row[1]
|
|
556
|
+
next unless sql
|
|
557
|
+
|
|
558
|
+
# Extract CHECK constraints from SQL
|
|
559
|
+
sql.scan(/CONSTRAINT\s+(\w+)\s+CHECK\s*\(([^)]+)\)/i) do |match|
|
|
560
|
+
result[table_name] << {
|
|
561
|
+
name: match[0],
|
|
562
|
+
definition: match[1],
|
|
563
|
+
type: :check
|
|
564
|
+
}
|
|
565
|
+
end
|
|
566
|
+
end
|
|
567
|
+
|
|
568
|
+
result
|
|
569
|
+
end
|
|
570
|
+
|
|
571
|
+
# Extract columns from table_info rows
|
|
572
|
+
#
|
|
573
|
+
# @param table_info [Array<Hash>] Array of table_info rows from PRAGMA table_info
|
|
574
|
+
# @return [Array<Hash>] Array of column hashes
|
|
575
|
+
def extract_columns_from_table_info(table_info)
|
|
576
|
+
table_info.map do |row|
|
|
577
|
+
{
|
|
578
|
+
name: row['name'] || row[1],
|
|
579
|
+
type: resolve_column_type(row['type'] || row[2]),
|
|
580
|
+
nullable: (row['notnull'] || row[3]).to_i.zero?,
|
|
581
|
+
default: row['dflt_value'] || row[4],
|
|
582
|
+
primary_key: (row['pk'] || row[5]).to_i == 1
|
|
583
|
+
}
|
|
584
|
+
end
|
|
585
|
+
end
|
|
586
|
+
|
|
587
|
+
# Extract primary key columns from table_info rows
|
|
588
|
+
#
|
|
589
|
+
# @param table_info [Array<Hash>] Array of table_info rows from PRAGMA table_info
|
|
590
|
+
# @return [Array<String>] Array of primary key column names in pk order
|
|
591
|
+
def extract_primary_key_from_table_info(table_info)
|
|
592
|
+
table_info
|
|
593
|
+
.select { |row| (row['pk'] || row[5]).to_i == 1 }
|
|
594
|
+
.sort_by { |row| row['pk'] || row[5] }
|
|
595
|
+
.map { |row| row['name'] || row[1] }
|
|
596
|
+
end
|
|
597
|
+
|
|
543
598
|
# Resolve SQLite column type into normalized format using type affinity
|
|
544
599
|
#
|
|
545
600
|
# @param type_string [String] Raw column type from PRAGMA table_info
|
|
@@ -639,6 +694,110 @@ module BetterStructureSql
|
|
|
639
694
|
value.to_s
|
|
640
695
|
end
|
|
641
696
|
end
|
|
697
|
+
|
|
698
|
+
# Fetch indexes for a specific table
|
|
699
|
+
#
|
|
700
|
+
# @param connection [ActiveRecord::ConnectionAdapters::AbstractAdapter] Database connection
|
|
701
|
+
# @param table_name [String] Table name
|
|
702
|
+
# @return [Array<Hash>] Array of index hashes
|
|
703
|
+
def fetch_table_indexes(connection, table_name)
|
|
704
|
+
index_list = connection.execute("PRAGMA index_list(#{quote_identifier(table_name)})")
|
|
705
|
+
skip_origins = %w[pk u].freeze
|
|
706
|
+
indexes = []
|
|
707
|
+
|
|
708
|
+
index_list.each do |index_row|
|
|
709
|
+
index_name = index_row['name'] || index_row[1]
|
|
710
|
+
is_unique = (index_row['unique'] || index_row[2]).to_i == 1
|
|
711
|
+
origin = index_row['origin'] || index_row[3]
|
|
712
|
+
|
|
713
|
+
next if skip_origins.include?(origin)
|
|
714
|
+
|
|
715
|
+
columns = fetch_index_columns(connection, index_name)
|
|
716
|
+
definition = build_index_definition(table_name, index_name, is_unique, columns)
|
|
717
|
+
|
|
718
|
+
indexes << {
|
|
719
|
+
table: table_name,
|
|
720
|
+
name: index_name,
|
|
721
|
+
columns: columns,
|
|
722
|
+
unique: is_unique,
|
|
723
|
+
type: 'BTREE',
|
|
724
|
+
definition: definition
|
|
725
|
+
}
|
|
726
|
+
end
|
|
727
|
+
|
|
728
|
+
indexes
|
|
729
|
+
end
|
|
730
|
+
|
|
731
|
+
# Fetch columns for a specific index
|
|
732
|
+
#
|
|
733
|
+
# @param connection [ActiveRecord::ConnectionAdapters::AbstractAdapter] Database connection
|
|
734
|
+
# @param index_name [String] Index name
|
|
735
|
+
# @return [Array<String>] Array of column names
|
|
736
|
+
def fetch_index_columns(connection, index_name)
|
|
737
|
+
index_info = connection.execute("PRAGMA index_info(#{quote_identifier(index_name)})")
|
|
738
|
+
index_info.map { |col_row| col_row['name'] || col_row[2] }
|
|
739
|
+
end
|
|
740
|
+
|
|
741
|
+
# Build CREATE INDEX definition string
|
|
742
|
+
#
|
|
743
|
+
# @param table_name [String] Table name
|
|
744
|
+
# @param index_name [String] Index name
|
|
745
|
+
# @param is_unique [Boolean] Whether index is unique
|
|
746
|
+
# @param columns [Array<String>] Column names
|
|
747
|
+
# @return [String] CREATE INDEX SQL statement
|
|
748
|
+
def build_index_definition(table_name, index_name, is_unique, columns)
|
|
749
|
+
unique_clause = is_unique ? 'UNIQUE ' : ''
|
|
750
|
+
columns_clause = columns.map { |col| quote_identifier(col) }.join(', ')
|
|
751
|
+
"CREATE #{unique_clause}INDEX #{quote_identifier(index_name)} " \
|
|
752
|
+
"ON #{quote_identifier(table_name)} (#{columns_clause})"
|
|
753
|
+
end
|
|
754
|
+
|
|
755
|
+
# SQL query for fetching triggers
|
|
756
|
+
#
|
|
757
|
+
# @return [String] SQL query string
|
|
758
|
+
def triggers_query
|
|
759
|
+
<<~SQL.squish
|
|
760
|
+
SELECT name, tbl_name, sql
|
|
761
|
+
FROM sqlite_master
|
|
762
|
+
WHERE type = 'trigger'
|
|
763
|
+
ORDER BY tbl_name, name
|
|
764
|
+
SQL
|
|
765
|
+
end
|
|
766
|
+
|
|
767
|
+
# Build trigger hash from query row
|
|
768
|
+
#
|
|
769
|
+
# @param row [Hash] Row from triggers query
|
|
770
|
+
# @return [Hash] Trigger hash with parsed timing and event
|
|
771
|
+
def build_trigger_hash(row)
|
|
772
|
+
sql = row['sql'] || row[2] || ''
|
|
773
|
+
|
|
774
|
+
{
|
|
775
|
+
schema: 'main',
|
|
776
|
+
name: row['name'] || row[0],
|
|
777
|
+
table_name: row['tbl_name'] || row[1],
|
|
778
|
+
timing: parse_trigger_timing(sql),
|
|
779
|
+
event: parse_trigger_event(sql),
|
|
780
|
+
definition: sql
|
|
781
|
+
}
|
|
782
|
+
end
|
|
783
|
+
|
|
784
|
+
# Parse trigger timing from SQL
|
|
785
|
+
#
|
|
786
|
+
# @param sql [String] Trigger SQL definition
|
|
787
|
+
# @return [String] Timing ('BEFORE', 'AFTER', or 'INSTEAD OF')
|
|
788
|
+
def parse_trigger_timing(sql)
|
|
789
|
+
timing_match = sql.match(/\b(BEFORE|AFTER|INSTEAD OF)\b/i)
|
|
790
|
+
timing_match ? timing_match.captures.first.upcase : 'AFTER'
|
|
791
|
+
end
|
|
792
|
+
|
|
793
|
+
# Parse trigger event from SQL
|
|
794
|
+
#
|
|
795
|
+
# @param sql [String] Trigger SQL definition
|
|
796
|
+
# @return [String] Event ('INSERT', 'UPDATE', or 'DELETE')
|
|
797
|
+
def parse_trigger_event(sql)
|
|
798
|
+
event_match = sql.match(/\b(INSERT|UPDATE|DELETE)\b/i)
|
|
799
|
+
event_match ? event_match.captures.first.upcase : 'INSERT'
|
|
800
|
+
end
|
|
642
801
|
end
|
|
643
802
|
end
|
|
644
803
|
end
|
|
@@ -14,8 +14,8 @@ module BetterStructureSql
|
|
|
14
14
|
:include_triggers,
|
|
15
15
|
:include_views,
|
|
16
16
|
:include_materialized_views,
|
|
17
|
-
:include_rules,
|
|
18
|
-
:include_comments,
|
|
17
|
+
:include_rules, # TODO: Not yet implemented
|
|
18
|
+
:include_comments, # TODO: Not yet implemented
|
|
19
19
|
:include_domains,
|
|
20
20
|
:include_sequences,
|
|
21
21
|
:include_custom_types,
|
|
@@ -51,7 +51,7 @@ module BetterStructureSql
|
|
|
51
51
|
@include_views = true
|
|
52
52
|
@include_materialized_views = true
|
|
53
53
|
@include_rules = false
|
|
54
|
-
@include_comments =
|
|
54
|
+
@include_comments = true
|
|
55
55
|
@include_domains = true
|
|
56
56
|
@include_sequences = true
|
|
57
57
|
@include_custom_types = true
|
|
@@ -85,37 +85,39 @@ module BetterStructureSql
|
|
|
85
85
|
private
|
|
86
86
|
|
|
87
87
|
def validate_output_path!
|
|
88
|
-
|
|
88
|
+
return unless output_path.nil? || output_path.strip.empty?
|
|
89
|
+
|
|
90
|
+
raise ConfigurationError, 'output_path cannot be blank. Use absolute path or relative like "db/structure.sql"'
|
|
89
91
|
end
|
|
90
92
|
|
|
91
93
|
def validate_schema_versions_limit!
|
|
92
94
|
return if schema_versions_limit.is_a?(Integer) && schema_versions_limit >= 0
|
|
93
95
|
|
|
94
|
-
raise
|
|
96
|
+
raise ConfigurationError, 'schema_versions_limit must be a non-negative integer (0 for unlimited, or positive number)'
|
|
95
97
|
end
|
|
96
98
|
|
|
97
99
|
def validate_indent_size!
|
|
98
100
|
return if indent_size.is_a?(Integer) && indent_size.positive?
|
|
99
101
|
|
|
100
|
-
raise
|
|
102
|
+
raise ConfigurationError, 'indent_size must be a positive integer (typically 2 or 4)'
|
|
101
103
|
end
|
|
102
104
|
|
|
103
105
|
def validate_schemas!
|
|
104
106
|
return if schemas.is_a?(Array) && schemas.any?
|
|
105
107
|
|
|
106
|
-
raise
|
|
108
|
+
raise ConfigurationError, 'schemas must be a non-empty array (e.g., ["public"] for PostgreSQL)'
|
|
107
109
|
end
|
|
108
110
|
|
|
109
111
|
def validate_max_lines_per_file!
|
|
110
112
|
return if max_lines_per_file.is_a?(Integer) && max_lines_per_file.positive?
|
|
111
113
|
|
|
112
|
-
raise
|
|
114
|
+
raise ConfigurationError, 'max_lines_per_file must be a positive integer (recommended: 500-1000)'
|
|
113
115
|
end
|
|
114
116
|
|
|
115
117
|
def validate_overflow_threshold!
|
|
116
118
|
return if overflow_threshold.is_a?(Numeric) && overflow_threshold >= 1.0
|
|
117
119
|
|
|
118
|
-
raise
|
|
120
|
+
raise ConfigurationError, 'overflow_threshold must be >= 1.0 (typically 1.1 for 10% overflow)'
|
|
119
121
|
end
|
|
120
122
|
|
|
121
123
|
def validate_adapter!
|
|
@@ -123,7 +125,7 @@ module BetterStructureSql
|
|
|
123
125
|
|
|
124
126
|
return if valid_adapters.include?(adapter)
|
|
125
127
|
|
|
126
|
-
raise
|
|
128
|
+
raise ConfigurationError, "Invalid adapter: #{adapter}. Valid options: #{valid_adapters.join(', ')}"
|
|
127
129
|
end
|
|
128
130
|
end
|
|
129
131
|
end
|