sequel 2.12.0 → 3.0.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (91) hide show
  1. data/CHANGELOG +62 -0
  2. data/README.rdoc +3 -3
  3. data/Rakefile +7 -0
  4. data/doc/advanced_associations.rdoc +44 -0
  5. data/doc/release_notes/3.0.0.txt +221 -0
  6. data/lib/sequel/adapters/amalgalite.rb +208 -0
  7. data/lib/sequel/adapters/db2.rb +3 -0
  8. data/lib/sequel/adapters/dbi.rb +9 -0
  9. data/lib/sequel/adapters/do.rb +0 -4
  10. data/lib/sequel/adapters/firebird.rb +16 -18
  11. data/lib/sequel/adapters/informix.rb +5 -3
  12. data/lib/sequel/adapters/jdbc.rb +24 -20
  13. data/lib/sequel/adapters/jdbc/h2.rb +15 -4
  14. data/lib/sequel/adapters/mysql.rb +4 -8
  15. data/lib/sequel/adapters/odbc.rb +0 -4
  16. data/lib/sequel/adapters/oracle.rb +0 -4
  17. data/lib/sequel/adapters/shared/mssql.rb +16 -5
  18. data/lib/sequel/adapters/shared/mysql.rb +87 -86
  19. data/lib/sequel/adapters/shared/oracle.rb +92 -3
  20. data/lib/sequel/adapters/shared/postgres.rb +85 -29
  21. data/lib/sequel/adapters/shared/progress.rb +8 -3
  22. data/lib/sequel/adapters/shared/sqlite.rb +53 -23
  23. data/lib/sequel/adapters/sqlite.rb +4 -7
  24. data/lib/sequel/adapters/utils/unsupported.rb +3 -3
  25. data/lib/sequel/connection_pool.rb +18 -25
  26. data/lib/sequel/core.rb +2 -21
  27. data/lib/sequel/database.rb +60 -44
  28. data/lib/sequel/database/schema_generator.rb +26 -31
  29. data/lib/sequel/database/schema_methods.rb +8 -3
  30. data/lib/sequel/database/schema_sql.rb +114 -28
  31. data/lib/sequel/dataset.rb +14 -41
  32. data/lib/sequel/dataset/convenience.rb +31 -54
  33. data/lib/sequel/dataset/graph.rb +7 -13
  34. data/lib/sequel/dataset/sql.rb +43 -54
  35. data/lib/sequel/extensions/inflector.rb +0 -5
  36. data/lib/sequel/extensions/schema_dumper.rb +238 -0
  37. data/lib/sequel/metaprogramming.rb +0 -20
  38. data/lib/sequel/model.rb +1 -2
  39. data/lib/sequel/model/base.rb +18 -16
  40. data/lib/sequel/model/inflections.rb +6 -9
  41. data/lib/sequel/plugins/caching.rb +0 -6
  42. data/lib/sequel/plugins/hook_class_methods.rb +1 -1
  43. data/lib/sequel/sql.rb +2 -0
  44. data/lib/sequel/version.rb +2 -2
  45. data/spec/adapters/firebird_spec.rb +35 -8
  46. data/spec/adapters/mysql_spec.rb +173 -266
  47. data/spec/adapters/oracle_spec.rb +13 -0
  48. data/spec/adapters/postgres_spec.rb +127 -227
  49. data/spec/adapters/sqlite_spec.rb +13 -171
  50. data/spec/core/connection_pool_spec.rb +15 -4
  51. data/spec/core/core_sql_spec.rb +14 -170
  52. data/spec/core/database_spec.rb +50 -132
  53. data/spec/core/dataset_spec.rb +47 -930
  54. data/spec/core/expression_filters_spec.rb +12 -0
  55. data/spec/core/schema_generator_spec.rb +37 -45
  56. data/spec/core/schema_spec.rb +26 -16
  57. data/spec/core/spec_helper.rb +0 -25
  58. data/spec/extensions/inflector_spec.rb +0 -3
  59. data/spec/extensions/schema_dumper_spec.rb +292 -0
  60. data/spec/extensions/serialization_spec.rb +9 -0
  61. data/spec/extensions/single_table_inheritance_spec.rb +6 -1
  62. data/spec/extensions/spec_helper.rb +1 -3
  63. data/spec/extensions/validation_helpers_spec.rb +4 -4
  64. data/spec/integration/database_test.rb +18 -0
  65. data/spec/integration/dataset_test.rb +112 -1
  66. data/spec/integration/eager_loader_test.rb +70 -9
  67. data/spec/integration/prepared_statement_test.rb +2 -2
  68. data/spec/integration/schema_test.rb +76 -27
  69. data/spec/integration/spec_helper.rb +0 -14
  70. data/spec/integration/transaction_test.rb +27 -0
  71. data/spec/model/associations_spec.rb +0 -36
  72. data/spec/model/base_spec.rb +18 -123
  73. data/spec/model/hooks_spec.rb +2 -235
  74. data/spec/model/inflector_spec.rb +15 -115
  75. data/spec/model/model_spec.rb +0 -120
  76. data/spec/model/plugins_spec.rb +0 -70
  77. data/spec/model/record_spec.rb +35 -93
  78. data/spec/model/spec_helper.rb +0 -27
  79. data/spec/model/validations_spec.rb +0 -931
  80. metadata +9 -14
  81. data/lib/sequel/deprecated.rb +0 -593
  82. data/lib/sequel/deprecated_migration.rb +0 -91
  83. data/lib/sequel/model/deprecated.rb +0 -204
  84. data/lib/sequel/model/deprecated_hooks.rb +0 -103
  85. data/lib/sequel/model/deprecated_inflector.rb +0 -335
  86. data/lib/sequel/model/deprecated_validations.rb +0 -388
  87. data/spec/core/core_ext_spec.rb +0 -156
  88. data/spec/core/migration_spec.rb +0 -263
  89. data/spec/core/pretty_table_spec.rb +0 -58
  90. data/spec/model/caching_spec.rb +0 -217
  91. data/spec/model/schema_spec.rb +0 -92
@@ -17,8 +17,7 @@ module Sequel
17
17
  # the tables are combined in the single return hash. You can get around that by
18
18
  # using .select with correct aliases for all of the columns, but it is simpler to
19
19
  # use graph and have the result set split for you. In addition, graph respects
20
- # any row_proc or transform attributes of the current dataset and the datasets
21
- # you use with graph.
20
+ # any row_proc of the current dataset and the datasets you use with graph.
22
21
  #
23
22
  # If you are graphing a table and all columns for that table are nil, this
24
23
  # indicates that no matching rows existed in the table, so graph will return nil
@@ -186,7 +185,7 @@ module Sequel
186
185
  # Fetch the rows, split them into component table parts,
187
186
  # tranform and run the row_proc on each part (if applicable),
188
187
  # and yield a hash of the parts.
189
- def graph_each(opts=(defarg=true;nil), &block)
188
+ def graph_each
190
189
  # Reject tables with nil datasets, as they are excluded from
191
190
  # the result set
192
191
  datasets = @opts[:graph][:table_aliases].to_a.reject{|ta,ds| ds.nil?}
@@ -194,13 +193,11 @@ module Sequel
194
193
  table_aliases = datasets.collect{|ta,ds| ta}
195
194
  # Get an array of arrays, one for each dataset, with
196
195
  # the necessary information about each dataset, for speed
197
- datasets = datasets.collect do |ta, ds|
198
- [ta, ds, ds.instance_variable_get(:@transform), ds.row_proc]
199
- end
196
+ datasets = datasets.collect{|ta, ds| [ta, ds, ds.row_proc]}
200
197
  # Use the manually set graph aliases, if any, otherwise
201
198
  # use the ones automatically created by .graph
202
199
  column_aliases = @opts[:graph_aliases] || @opts[:graph][:column_aliases]
203
- fetch_rows(defarg ? select_sql : select_sql(opts)) do |r|
200
+ fetch_rows(select_sql) do |r|
204
201
  graph = {}
205
202
  # Create the sub hashes, one per table
206
203
  table_aliases.each{|ta| graph[ta]={}}
@@ -211,14 +208,11 @@ module Sequel
211
208
  ta, column = tc
212
209
  graph[ta][column] = r[col_alias]
213
210
  end
214
- # For each dataset, transform and run the row
215
- # row_proc if applicable
216
- datasets.each do |ta,ds,tr,rp|
211
+ # For each dataset run the row_proc if applicable
212
+ datasets.each do |ta,ds,rp|
217
213
  g = graph[ta]
218
214
  graph[ta] = if g.values.any?{|x| !x.nil?}
219
- g = ds.transform_load(g) if tr
220
- g = rp[g] if rp
221
- g
215
+ rp ? rp.call(g) : g
222
216
  else
223
217
  nil
224
218
  end
@@ -12,7 +12,7 @@ module Sequel
12
12
  N_ARITY_OPERATORS = ::Sequel::SQL::ComplexExpression::N_ARITY_OPERATORS
13
13
  NULL = "NULL".freeze
14
14
  QUESTION_MARK = '?'.freeze
15
- STOCK_COUNT_OPTS = {:select => [LiteralString.new("COUNT(*)").freeze], :order => nil}.freeze
15
+ STOCK_COUNT_OPTS = {:select => [SQL::AliasedExpression.new(LiteralString.new("COUNT(*)").freeze, :count)], :order => nil}.freeze
16
16
  SELECT_CLAUSE_ORDER = %w'distinct columns from join where group having compounds order limit'.freeze
17
17
  TWO_ARITY_OPERATORS = ::Sequel::SQL::ComplexExpression::TWO_ARITY_OPERATORS
18
18
  WILDCARD = '*'.freeze
@@ -49,7 +49,7 @@ module Sequel
49
49
 
50
50
  # SQL fragment for the SQL CAST expression.
51
51
  def cast_sql(expr, type)
52
- "CAST(#{literal(expr)} AS #{db.send(:type_literal_base, :type=>type)})"
52
+ "CAST(#{literal(expr)} AS #{db.cast_type_literal(type)})"
53
53
  end
54
54
 
55
55
  # SQL fragment for specifying all columns in a given table.
@@ -87,9 +87,8 @@ module Sequel
87
87
  #
88
88
  # dataset.filter{|o| o.price >= 100}.delete_sql #=>
89
89
  # "DELETE FROM items WHERE (price >= 100)"
90
- def delete_sql(opts = (defarg=true;nil))
91
- Deprecation.deprecate("Calling Dataset#delete_sql with an argument is deprecated and will raise an error in Sequel 3.0. Use dataset.clone(opts).delete_sql.") unless defarg
92
- opts = opts ? @opts.merge(opts) : @opts
90
+ def delete_sql
91
+ opts = @opts
93
92
 
94
93
  return static_sql(opts[:sql]) if opts[:sql]
95
94
 
@@ -136,7 +135,6 @@ module Sequel
136
135
  def exclude(*cond, &block)
137
136
  clause = (@opts[:having] ? :having : :where)
138
137
  cond = cond.first if cond.size == 1
139
- cond = SQL::BooleanExpression.from_value_pairs(cond, :OR) if Sequel.condition_specifier?(cond)
140
138
  cond = filter_expr(cond, &block)
141
139
  cond = SQL::BooleanExpression.invert(cond)
142
140
  cond = SQL::BooleanExpression.new(:AND, @opts[clause], cond) if @opts[clause]
@@ -147,9 +145,8 @@ module Sequel
147
145
  #
148
146
  # DB.select(1).where(DB[:items].exists).sql
149
147
  # #=> "SELECT 1 WHERE EXISTS (SELECT * FROM items)"
150
- def exists(opts = (defarg=true;nil))
151
- Deprecation.deprecate("Calling Dataset#exists with an argument is deprecated and will raise an error in Sequel 3.0. Use dataset.clone(opts).exists.") unless defarg
152
- LiteralString.new("EXISTS (#{defarg ? select_sql : select_sql(opts)})")
148
+ def exists
149
+ LiteralString.new("EXISTS (#{select_sql})")
153
150
  end
154
151
 
155
152
  # Returns a copy of the dataset with the given conditions imposed upon it.
@@ -223,10 +220,11 @@ module Sequel
223
220
 
224
221
  # Returns a copy of the dataset with the source changed.
225
222
  #
223
+ # dataset.from # SQL: SELECT *
226
224
  # dataset.from(:blah) # SQL: SELECT * FROM blah
227
225
  # dataset.from(:blah, :foo) # SQL: SELECT * FROM blah, foo
228
226
  def from(*source)
229
- clone(:from => source)
227
+ clone(:from=>source.empty? ? nil : source)
230
228
  end
231
229
 
232
230
  # Returns a dataset selecting from the current dataset.
@@ -324,7 +322,6 @@ module Sequel
324
322
  when Hash
325
323
  values = @opts[:defaults].merge(values) if @opts[:defaults]
326
324
  values = values.merge(@opts[:overrides]) if @opts[:overrides]
327
- values = transform_save(values) if @transform
328
325
  if values.empty?
329
326
  insert_default_values_sql
330
327
  else
@@ -362,11 +359,6 @@ module Sequel
362
359
  clone(o)
363
360
  end
364
361
 
365
- # SQL fragment specifying an Irregular (cast/extract) SQL function call
366
- def irregular_function_sql(f)
367
- "#{f.f}(#{literal(f.arg1)} #{f.joiner} #{literal(f.arg2)})"
368
- end
369
-
370
362
  # SQL fragment specifying a JOIN clause without ON or USING.
371
363
  def join_clause_sql(jc)
372
364
  table = jc.table
@@ -608,6 +600,7 @@ module Sequel
608
600
  # quote the name with quoted_identifier.
609
601
  def quote_identifier(name)
610
602
  return name if name.is_a?(LiteralString)
603
+ name = name.value if name.is_a?(SQL::Identifier)
611
604
  name = input_identifier(name)
612
605
  name = quoted_identifier(name) if quote_identifiers?
613
606
  name
@@ -683,24 +676,21 @@ module Sequel
683
676
  # Formats a SELECT statement
684
677
  #
685
678
  # dataset.select_sql # => "SELECT * FROM items"
686
- def select_sql(opts = (defarg=true;nil))
687
- Deprecation.deprecate("Calling Dataset#select_sql with an argument is deprecated and will raise an error in Sequel 3.0. Use dataset.clone(opts).select_sql.") unless defarg
688
- opts = opts ? @opts.merge(opts) : @opts
689
- return static_sql(opts[:sql]) if opts[:sql]
679
+ def select_sql
680
+ return static_sql(@opts[:sql]) if @opts[:sql]
690
681
  sql = 'SELECT'
691
- select_clause_order.each{|x| send("select_#{x}_sql", sql, opts)}
682
+ select_clause_order.each{|x| send(:"select_#{x}_sql", sql)}
692
683
  sql
693
684
  end
694
685
 
695
686
  # Same as select_sql, not aliased directly to make subclassing simpler.
696
- def sql(opts = (defarg=true;nil))
697
- Deprecation.deprecate("Calling Dataset#select_sql with an argument is deprecated and will raise an error in Sequel 3.0. Use dataset.clone(opts).select_sql.") unless defarg
698
- defarg ? select_sql : select_sql(opts)
687
+ def sql
688
+ select_sql
699
689
  end
700
690
 
701
691
  # SQL fragment for specifying subscripts (SQL arrays)
702
692
  def subscript_sql(s)
703
- "#{s.f}[#{s.sub.join(COMMA_SEPARATOR)}]"
693
+ "#{literal(s.f)}[#{s.sub.join(COMMA_SEPARATOR)}]"
704
694
  end
705
695
 
706
696
  # Returns a copy of the dataset with no filters (HAVING or WHERE clause) applied.
@@ -733,9 +723,8 @@ module Sequel
733
723
  #
734
724
  # Raises an error if the dataset is grouped or includes more
735
725
  # than one table.
736
- def update_sql(values = {}, opts = (defarg=true;nil))
737
- Deprecation.deprecate("Calling Dataset#update_sql with an argument is deprecated and will raise an error in Sequel 3.0. Use dataset.clone(opts).update_sql.") unless defarg
738
- opts = opts ? @opts.merge(opts) : @opts
726
+ def update_sql(values = {})
727
+ opts = @opts
739
728
 
740
729
  return static_sql(opts[:sql]) if opts[:sql]
741
730
 
@@ -750,7 +739,6 @@ module Sequel
750
739
  values = opts[:defaults].merge(values) if opts[:defaults]
751
740
  values = values.merge(opts[:overrides]) if opts[:overrides]
752
741
  # get values from hash
753
- values = transform_save(values) if @transform
754
742
  values.map do |k, v|
755
743
  "#{[String, Symbol].any?{|c| k.is_a?(c)} ? quote_identifier(k) : literal(k)} = #{literal(v)}"
756
744
  end.join(COMMA_SEPARATOR)
@@ -775,7 +763,7 @@ module Sequel
775
763
  end
776
764
 
777
765
  # Returns a copy of the dataset with the static SQL used. This is useful if you want
778
- # to keep the same row_proc/transform/graph, but change the SQL used to custom SQL.
766
+ # to keep the same row_proc/graph, but change the SQL used to custom SQL.
779
767
  #
780
768
  # dataset.with_sql('SELECT * FROM foo') # SELECT * FROM foo
781
769
  def with_sql(sql, *args)
@@ -802,7 +790,6 @@ module Sequel
802
790
  # Internal filter method so it works on either the having or where clauses.
803
791
  def _filter(clause, *cond, &block)
804
792
  cond = cond.first if cond.size == 1
805
- cond = transform_save(cond) if @transform if cond.is_a?(Hash)
806
793
  cond = filter_expr(cond, &block)
807
794
  cond = SQL::BooleanExpression.new(:AND, @opts[clause], cond) if @opts[clause]
808
795
  clone(clause => cond)
@@ -851,8 +838,10 @@ module Sequel
851
838
  when Array
852
839
  if String === expr[0]
853
840
  SQL::PlaceholderLiteralString.new(expr.shift, expr, true)
854
- else
841
+ elsif Sequel.condition_specifier?(expr)
855
842
  SQL::BooleanExpression.from_value_pairs(expr)
843
+ else
844
+ SQL::BooleanExpression.new(:AND, *expr.map{|x| filter_expr(x)})
856
845
  end
857
846
  when Proc
858
847
  filter_expr(virtual_row_block_call(expr))
@@ -1021,13 +1010,13 @@ module Sequel
1021
1010
  end
1022
1011
 
1023
1012
  # Modify the sql to add the columns selected
1024
- def select_columns_sql(sql, opts)
1025
- sql << " #{column_list(opts[:select])}"
1013
+ def select_columns_sql(sql)
1014
+ sql << " #{column_list(@opts[:select])}"
1026
1015
  end
1027
1016
 
1028
1017
  # Modify the sql to add the DISTINCT modifier
1029
- def select_distinct_sql(sql, opts)
1030
- if distinct = opts[:distinct]
1018
+ def select_distinct_sql(sql)
1019
+ if distinct = @opts[:distinct]
1031
1020
  sql << " DISTINCT#{" ON (#{expression_list(distinct)})" unless distinct.empty?}"
1032
1021
  end
1033
1022
  end
@@ -1035,9 +1024,9 @@ module Sequel
1035
1024
  # Modify the sql to add a dataset to the via an EXCEPT, INTERSECT, or UNION clause.
1036
1025
  # This uses a subselect for the compound datasets used, because using parantheses doesn't
1037
1026
  # work on all databases. I consider this an ugly hack, but can't I think of a better default.
1038
- def select_compounds_sql(sql, opts)
1039
- return unless opts[:compounds]
1040
- opts[:compounds].each do |type, dataset, all|
1027
+ def select_compounds_sql(sql)
1028
+ return unless @opts[:compounds]
1029
+ @opts[:compounds].each do |type, dataset, all|
1041
1030
  compound_sql = subselect_sql(dataset)
1042
1031
  compound_sql = "SELECT * FROM (#{compound_sql})" if dataset.opts[:compounds]
1043
1032
  sql.replace("#{sql} #{type.to_s.upcase}#{' ALL' if all} #{compound_sql}")
@@ -1045,39 +1034,39 @@ module Sequel
1045
1034
  end
1046
1035
 
1047
1036
  # Modify the sql to add the list of tables to select FROM
1048
- def select_from_sql(sql, opts)
1049
- sql << " FROM #{source_list(opts[:from])}" if opts[:from]
1037
+ def select_from_sql(sql)
1038
+ sql << " FROM #{source_list(@opts[:from])}" if @opts[:from]
1050
1039
  end
1051
1040
 
1052
1041
  # Modify the sql to add the expressions to GROUP BY
1053
- def select_group_sql(sql, opts)
1054
- sql << " GROUP BY #{expression_list(opts[:group])}" if opts[:group]
1042
+ def select_group_sql(sql)
1043
+ sql << " GROUP BY #{expression_list(@opts[:group])}" if @opts[:group]
1055
1044
  end
1056
1045
 
1057
1046
  # Modify the sql to add the filter criteria in the HAVING clause
1058
- def select_having_sql(sql, opts)
1059
- sql << " HAVING #{literal(opts[:having])}" if opts[:having]
1047
+ def select_having_sql(sql)
1048
+ sql << " HAVING #{literal(@opts[:having])}" if @opts[:having]
1060
1049
  end
1061
1050
 
1062
1051
  # Modify the sql to add the list of tables to JOIN to
1063
- def select_join_sql(sql, opts)
1064
- opts[:join].each{|j| sql << literal(j)} if opts[:join]
1052
+ def select_join_sql(sql)
1053
+ @opts[:join].each{|j| sql << literal(j)} if @opts[:join]
1065
1054
  end
1066
1055
 
1067
1056
  # Modify the sql to limit the number of rows returned and offset
1068
- def select_limit_sql(sql, opts)
1069
- sql << " LIMIT #{opts[:limit]}" if opts[:limit]
1070
- sql << " OFFSET #{opts[:offset]}" if opts[:offset]
1057
+ def select_limit_sql(sql)
1058
+ sql << " LIMIT #{@opts[:limit]}" if @opts[:limit]
1059
+ sql << " OFFSET #{@opts[:offset]}" if @opts[:offset]
1071
1060
  end
1072
1061
 
1073
1062
  # Modify the sql to add the expressions to ORDER BY
1074
- def select_order_sql(sql, opts)
1075
- sql << " ORDER BY #{expression_list(opts[:order])}" if opts[:order]
1063
+ def select_order_sql(sql)
1064
+ sql << " ORDER BY #{expression_list(@opts[:order])}" if @opts[:order]
1076
1065
  end
1077
1066
 
1078
1067
  # Modify the sql to add the filter criteria in the WHERE clause
1079
- def select_where_sql(sql, opts)
1080
- sql << " WHERE #{literal(opts[:where])}" if opts[:where]
1068
+ def select_where_sql(sql)
1069
+ sql << " WHERE #{literal(@opts[:where])}" if @opts[:where]
1081
1070
  end
1082
1071
 
1083
1072
  # Converts an array of source names into into a comma separated list.
@@ -32,7 +32,6 @@ class String
32
32
  # clear :all
33
33
  # clear :plurals
34
34
  def self.clear(scope = :all)
35
- Sequel::Inflections.clear(scope)
36
35
  case scope
37
36
  when :all
38
37
  @plurals, @singulars, @uncountables = [], [], []
@@ -48,7 +47,6 @@ class String
48
47
  # irregular 'octopus', 'octopi'
49
48
  # irregular 'person', 'people'
50
49
  def self.irregular(singular, plural)
51
- Sequel::Inflections.irregular(singular, plural)
52
50
  plural(Regexp.new("(#{singular[0,1]})#{singular[1..-1]}$", "i"), '\1' + plural[1..-1])
53
51
  singular(Regexp.new("(#{plural[0,1]})#{plural[1..-1]}$", "i"), '\1' + singular[1..-1])
54
52
  end
@@ -59,7 +57,6 @@ class String
59
57
  # Example:
60
58
  # plural(/(x|ch|ss|sh)$/i, '\1es')
61
59
  def self.plural(rule, replacement)
62
- Sequel::Inflections.plural(rule, replacement)
63
60
  @plurals.insert(0, [rule, replacement])
64
61
  end
65
62
 
@@ -69,7 +66,6 @@ class String
69
66
  # Example:
70
67
  # singular(/([^aeiouy]|qu)ies$/i, '\1y')
71
68
  def self.singular(rule, replacement)
72
- Sequel::Inflections.singular(rule, replacement)
73
69
  @singulars.insert(0, [rule, replacement])
74
70
  end
75
71
 
@@ -80,7 +76,6 @@ class String
80
76
  # uncountable "money", "information"
81
77
  # uncountable %w( money information rice )
82
78
  def self.uncountable(*words)
83
- Sequel::Inflections.uncountable(*words)
84
79
  (@uncountables << words).flatten!
85
80
  end
86
81
 
@@ -0,0 +1,238 @@
1
+ module Sequel
2
+ class Database
3
+ # Dump indexes for all tables as a migration. This complements
4
+ # the :indexes=>false option to dump_schema_migration.
5
+ def dump_indexes_migration
6
+ ts = tables
7
+ <<END_MIG
8
+ Class.new(Sequel::Migration) do
9
+ def up
10
+ #{ts.map{|t| dump_table_indexes(t, :add_index)}.reject{|x| x == ''}.join("\n\n").gsub(/^/o, ' ')}
11
+ end
12
+
13
+ def down
14
+ #{ts.map{|t| dump_table_indexes(t, :drop_index)}.reject{|x| x == ''}.join("\n\n").gsub(/^/o, ' ')}
15
+ end
16
+ end
17
+ END_MIG
18
+ end
19
+
20
+ # Return a string that contains a Sequel::Migration subclass that when
21
+ # run would recreate the database structure. Options:
22
+ # * :same_db - Don't attempt to translate database types to ruby types.
23
+ # If this isn't set to true, all database types will be translated to
24
+ # ruby types, but there is no guarantee that the migration generated
25
+ # will yield the same type. Without this set, types that aren't
26
+ # recognized will be translated to a string-like type.
27
+ # * :indexes - If set to false, don't dump indexes (they can be added
28
+ # later via dump_index_migration).
29
+ def dump_schema_migration(options={})
30
+ ts = tables
31
+ <<END_MIG
32
+ Class.new(Sequel::Migration) do
33
+ def up
34
+ #{ts.map{|t| dump_table_schema(t, options)}.join("\n\n").gsub(/^/o, ' ')}
35
+ end
36
+
37
+ def down
38
+ drop_table(#{ts.inspect[1...-1]})
39
+ end
40
+ end
41
+ END_MIG
42
+ end
43
+
44
+ # Return a string with a create table block that will recreate the given
45
+ # table's schema. Takes the same options as dump_schema_migration.
46
+ def dump_table_schema(table, options={})
47
+ s = schema(table).dup
48
+ pks = s.find_all{|x| x.last[:primary_key] == true}.map{|x| x.first}
49
+ options = options.merge(:single_pk=>true) if pks.length == 1
50
+ m = method(:column_schema_to_generator_opts)
51
+ im = method(:index_to_generator_opts)
52
+ indexes = indexes(table).sort_by{|k,v| k.to_s} if options[:indexes] != false and respond_to?(:indexes)
53
+ gen = Schema::Generator.new(self) do
54
+ s.each{|name, info| send(*m.call(name, info, options))}
55
+ primary_key(pks) if !@primary_key && pks.length > 0
56
+ indexes.each{|iname, iopts| send(:index, iopts[:columns], im.call(table, iname, iopts))} if indexes
57
+ end
58
+ commands = [gen.dump_columns, gen.dump_constraints, gen.dump_indexes].reject{|x| x == ''}.join("\n\n")
59
+ "create_table(#{table.inspect}) do\n#{commands.gsub(/^/o, ' ')}\nend"
60
+ end
61
+
62
+ private
63
+
64
+ # Convert the given default, which should be a database specific string, into
65
+ # a ruby object. If it can't be converted, return the string with the inspect
66
+ # method modified so that .lit is always appended after it.
67
+ def column_schema_to_ruby_default(default, type)
68
+ case default
69
+ when /false/
70
+ false
71
+ when 'true'
72
+ true
73
+ when /\A\d+\z/
74
+ default.to_i
75
+ else
76
+ def default.inspect
77
+ "#{super}.lit"
78
+ end
79
+ default
80
+ end
81
+ end
82
+
83
+ # Convert the given name and parsed database schema into an array with a method
84
+ # name and arguments to it to pass to a Schema::Generator to recreate the column.
85
+ def column_schema_to_generator_opts(name, schema, options)
86
+ if options[:single_pk] && schema_autoincrementing_primary_key?(schema)
87
+ [:primary_key, name]
88
+ else
89
+ col_opts = options[:same_db] ? {:type=>schema[:db_type]} : column_schema_to_ruby_type(schema)
90
+ type = col_opts.delete(:type)
91
+ col_opts.delete(:size) if col_opts[:size].nil?
92
+ col_opts[:default] = column_schema_to_ruby_default(schema[:default], type) if schema[:default]
93
+ col_opts[:null] = false if schema[:allow_null] == false
94
+ [:column, name, type, col_opts]
95
+ end
96
+ end
97
+
98
+ # Convert the column schema information to a hash of column options, one of which must
99
+ # be :type. The other options added should modify that type (e.g. :size). If a
100
+ # database type is not recognized, return it as a String type.
101
+ def column_schema_to_ruby_type(schema)
102
+ case t = schema[:db_type].downcase
103
+ when /\A(?:medium|small)?int(?:eger)?(?:\((?:\d+)\))?\z/o
104
+ {:type=>Integer}
105
+ when /\Atinyint(?:\((?:\d+)\))?\z/o
106
+ {:type=>(Sequel.convert_tinyint_to_bool ? TrueClass : Integer)}
107
+ when /\Abigint(?:\((?:\d+)\))?\z/o
108
+ {:type=>Bignum}
109
+ when /\A(?:real|float|double(?: precision)?)\z/o
110
+ {:type=>Float}
111
+ when 'boolean'
112
+ {:type=>TrueClass}
113
+ when /\A(?:(?:tiny|medium|long)?text|clob)\z/o
114
+ {:type=>String, :text=>true}
115
+ when 'date'
116
+ {:type=>Date}
117
+ when 'datetime'
118
+ {:type=>DateTime}
119
+ when /\Atimestamp(?: with(?:out)? time zone)?\z/o
120
+ {:type=>DateTime}
121
+ when /\Atime(?: with(?:out)? time zone)?\z/o
122
+ {:type=>Time, :only_time=>true}
123
+ when /\Achar(?:acter)?(?:\((\d+)\))?\z/o
124
+ {:type=>String, :size=>($1.to_i if $1), :fixed=>true}
125
+ when /\A(?:varchar|character varying|bpchar|string)(?:\((\d+)\))?\z/o
126
+ s = ($1.to_i if $1)
127
+ {:type=>String, :size=>(s == 255 ? nil : s)}
128
+ when 'money'
129
+ {:type=>BigDecimal, :size=>[19,2]}
130
+ when /\A(?:decimal|numeric|number)(?:\((\d+)(?:,\s*(\d+))?\))?\z/o
131
+ s = [($1.to_i if $1), ($2.to_i if $2)].compact
132
+ {:type=>BigDecimal, :size=>(s.empty? ? nil : s)}
133
+ when /\A(?:bytea|(?:tiny|medium|long)?blob|(?:var)?binary)(?:\((\d+)\))?\z/o
134
+ {:type=>File, :size=>($1.to_i if $1)}
135
+ when 'year'
136
+ {:type=>Integer}
137
+ else
138
+ {:type=>String}
139
+ end
140
+ end
141
+
142
+ # Return a string that containing add_index/drop_index method calls for
143
+ # creating the index migration.
144
+ def dump_table_indexes(table, meth)
145
+ return '' unless respond_to?(:indexes)
146
+ im = method(:index_to_generator_opts)
147
+ indexes = indexes(table).sort_by{|k,v| k.to_s}
148
+ gen = Schema::Generator.new(self) do
149
+ indexes.each{|iname, iopts| send(:index, iopts[:columns], im.call(table, iname, iopts))}
150
+ end
151
+ gen.dump_indexes(meth=>table)
152
+ end
153
+
154
+ # Convert the parsed index information into options to the Generators index method.
155
+ def index_to_generator_opts(table, name, index_opts)
156
+ h = {}
157
+ h[:name] = name unless default_index_name(table, index_opts[:columns]) == name.to_s
158
+ h[:unique] = true if index_opts[:unique]
159
+ h
160
+ end
161
+ end
162
+
163
+ module Schema
164
+ class Generator
165
+ # Dump this generator's columns to a string that could be evaled inside
166
+ # another instance to represent the same columns
167
+ def dump_columns
168
+ strings = []
169
+ cols = columns.dup
170
+ if pkn = primary_key_name
171
+ cols.delete_if{|x| x[:name] == pkn}
172
+ pk = @primary_key.dup
173
+ pkname = pk.delete(:name)
174
+ @db.serial_primary_key_options.each{|k,v| pk.delete(k) if v == pk[k]}
175
+ strings << "primary_key #{pkname.inspect}#{opts_inspect(pk)}"
176
+ end
177
+ cols.each do |c|
178
+ c = c.dup
179
+ name = c.delete(:name)
180
+ type = c.delete(:type)
181
+ opts = opts_inspect(c)
182
+ strings << if type.is_a?(Class)
183
+ "#{type.name} #{name.inspect}#{opts}"
184
+ else
185
+ "column #{name.inspect}, #{type.inspect}#{opts}"
186
+ end
187
+ end
188
+ strings.join("\n")
189
+ end
190
+
191
+ # Dump this generator's constraints to a string that could be evaled inside
192
+ # another instance to represent the same constraints
193
+ def dump_constraints
194
+ constraints.map do |c|
195
+ c = c.dup
196
+ type = c.delete(:type)
197
+ case type
198
+ when :check
199
+ raise(Error, "can't dump check/constraint specified with Proc") if c[:check].is_a?(Proc)
200
+ name = c.delete(:name)
201
+ if !name and c[:check].length == 1 and c[:check].first.is_a?(Hash)
202
+ "check #{c[:check].first.inspect[1...-1]}"
203
+ else
204
+ "#{name ? "constraint #{name.inspect}," : 'check'} #{c[:check].map{|x| x.inspect}.join(', ')}"
205
+ end
206
+ else
207
+ cols = c.delete(:columns)
208
+ "#{type} #{cols.inspect}#{opts_inspect(c)}"
209
+ end
210
+ end.join("\n")
211
+ end
212
+
213
+ # Dump this generator's indexes to a string that could be evaled inside
214
+ # another instance to represent the same indexes. Options:
215
+ # * :add_index - Use add_index instead of index, so the methods
216
+ # can be called outside of a generator but inside a migration.
217
+ # The value of this option should be the table name to use.
218
+ # * :drop_index - Same as add_index, but create drop_index statements.
219
+ def dump_indexes(options={})
220
+ indexes.map do |c|
221
+ c = c.dup
222
+ cols = c.delete(:columns)
223
+ if table = options[:add_index] || options[:drop_index]
224
+ "#{options[:drop_index] ? 'drop' : 'add'}_index #{table.inspect}, #{cols.inspect}#{opts_inspect(c)}"
225
+ else
226
+ "index #{cols.inspect}#{opts_inspect(c)}"
227
+ end
228
+ end.join("\n")
229
+ end
230
+
231
+ private
232
+
233
+ def opts_inspect(opts)
234
+ ", #{opts.inspect[1...-1]}" if opts.length > 0
235
+ end
236
+ end
237
+ end
238
+ end