sequel 3.17.0 → 3.18.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- data/CHANGELOG +22 -0
- data/doc/migration.rdoc +34 -0
- data/doc/release_notes/3.18.0.txt +121 -0
- data/lib/sequel/adapters/jdbc.rb +6 -1
- data/lib/sequel/adapters/mysql.rb +11 -1
- data/lib/sequel/adapters/mysql2.rb +3 -1
- data/lib/sequel/adapters/postgres.rb +1 -1
- data/lib/sequel/adapters/shared/sqlite.rb +13 -1
- data/lib/sequel/adapters/sqlite.rb +31 -26
- data/lib/sequel/connection_pool/sharded_single.rb +5 -1
- data/lib/sequel/connection_pool/sharded_threaded.rb +5 -1
- data/lib/sequel/dataset/sql.rb +0 -5
- data/lib/sequel/extensions/migration.rb +117 -1
- data/lib/sequel/extensions/to_dot.rb +137 -0
- data/lib/sequel/model/base.rb +1 -1
- data/lib/sequel/plugins/instance_hooks.rb +14 -9
- data/lib/sequel/plugins/json_serializer.rb +3 -3
- data/lib/sequel/sql.rb +1 -1
- data/lib/sequel/version.rb +1 -1
- data/spec/adapters/sqlite_spec.rb +15 -7
- data/spec/core/connection_pool_spec.rb +17 -1
- data/spec/core/dataset_spec.rb +9 -0
- data/spec/extensions/instance_hooks_spec.rb +46 -0
- data/spec/extensions/json_serializer_spec.rb +11 -0
- data/spec/extensions/migration_spec.rb +107 -0
- data/spec/extensions/spec_helper.rb +1 -1
- data/spec/extensions/to_dot_spec.rb +152 -0
- data/spec/files/reversible_migrations/001_reversible.rb +5 -0
- data/spec/files/reversible_migrations/002_reversible.rb +5 -0
- data/spec/files/reversible_migrations/003_reversible.rb +5 -0
- data/spec/files/reversible_migrations/004_reversible.rb +5 -0
- data/spec/files/reversible_migrations/005_reversible.rb +10 -0
- data/spec/integration/migrator_test.rb +54 -0
- data/spec/model/association_reflection_spec.rb +19 -0
- metadata +13 -4
@@ -0,0 +1,137 @@
|
|
1
|
+
# This adds a <tt>Sequel::Dataset#to_dot</tt> method. The +to_dot+ method
|
2
|
+
# returns a string that can be processed by graphviz's +dot+ program in
|
3
|
+
# order to get a visualization of the dataset. Basically, it shows a version
|
4
|
+
# of the dataset's abstract syntax tree.
|
5
|
+
|
6
|
+
module Sequel
|
7
|
+
class Dataset
|
8
|
+
# The option keys that should be included in the dot output.
|
9
|
+
TO_DOT_OPTIONS = [:with, :distinct, :select, :from, :join, :where, :group, :having, :compounds, :order, :limit, :offset, :lock].freeze
|
10
|
+
|
11
|
+
# Return a string that can be processed by the +dot+ program (included
|
12
|
+
# with graphviz) in order to see a visualization of the dataset's
|
13
|
+
# abstract syntax tree.
|
14
|
+
def to_dot
|
15
|
+
i = 0
|
16
|
+
dot = ["digraph G {", "#{i} [label=\"self\"];"]
|
17
|
+
_to_dot(dot, "", i, self, i)
|
18
|
+
dot << "}"
|
19
|
+
dot.join("\n")
|
20
|
+
end
|
21
|
+
|
22
|
+
private
|
23
|
+
|
24
|
+
# Internal recursive version that handles all object types understood
|
25
|
+
# by Sequel. Arguments:
|
26
|
+
# * dot :: An array of strings representing the lines in the returned
|
27
|
+
# output. This function just pushes strings onto this array.
|
28
|
+
# * l :: The transition label from the parent node of the AST to the
|
29
|
+
# current node.
|
30
|
+
# * c :: An integer representing the parent node of the AST.
|
31
|
+
# * e :: The current node of the AST.
|
32
|
+
# * i :: The integer representing the last created node of the AST.
|
33
|
+
#
|
34
|
+
# The basic algorithm is that the +i+ is incremented to get the current
|
35
|
+
# node's integer. Then the transition from the parent node to the
|
36
|
+
# current node is added to the +dot+ array. Finally, the current node
|
37
|
+
# is added to the +dot+ array, and if it is a compound node with children,
|
38
|
+
# its children are then added by recursively calling this method. The
|
39
|
+
# return value is the integer representing the last created node.
|
40
|
+
def _to_dot(dot, l, c, e, i)
|
41
|
+
i += 1
|
42
|
+
dot << "#{c} -> #{i} [label=\"#{l}\"];" if l
|
43
|
+
c = i
|
44
|
+
case e
|
45
|
+
when LiteralString
|
46
|
+
dot << "#{i} [label=\"#{e.inspect.gsub('"', '\\"')}.lit\"];"
|
47
|
+
i
|
48
|
+
when Symbol, Numeric, String, Class, TrueClass, FalseClass, NilClass
|
49
|
+
dot << "#{i} [label=\"#{e.inspect.gsub('"', '\\"')}\"];"
|
50
|
+
i
|
51
|
+
when Array
|
52
|
+
dot << "#{i} [label=\"Array\"];"
|
53
|
+
e.each_with_index do |v, j|
|
54
|
+
i = _to_dot(dot, j, c, v, i)
|
55
|
+
end
|
56
|
+
when Hash
|
57
|
+
dot << "#{i} [label=\"Hash\"];"
|
58
|
+
e.each do |k, v|
|
59
|
+
i = _to_dot(dot, k, c, v, i)
|
60
|
+
end
|
61
|
+
when SQL::ComplexExpression
|
62
|
+
dot << "#{i} [label=\"ComplexExpression: #{e.op}\"];"
|
63
|
+
e.args.each_with_index do |v, j|
|
64
|
+
i = _to_dot(dot, j, c, v, i)
|
65
|
+
end
|
66
|
+
when SQL::Identifier
|
67
|
+
dot << "#{i} [label=\"Identifier\"];"
|
68
|
+
i = _to_dot(dot, :value, c, e.value, i)
|
69
|
+
when SQL::QualifiedIdentifier
|
70
|
+
dot << "#{i} [label=\"QualifiedIdentifier\"];"
|
71
|
+
i = _to_dot(dot, :table, c, e.table, i)
|
72
|
+
i = _to_dot(dot, :column, c, e.column, i)
|
73
|
+
when SQL::OrderedExpression
|
74
|
+
dot << "#{i} [label=\"OrderedExpression: #{e.descending ? :DESC : :ASC}#{" NULLS #{e.nulls.to_s.upcase}" if e.nulls}\"];"
|
75
|
+
i = _to_dot(dot, :expression, c, e.expression, i)
|
76
|
+
when SQL::AliasedExpression
|
77
|
+
dot << "#{i} [label=\"AliasedExpression\"];"
|
78
|
+
i = _to_dot(dot, :expression, c, e.expression, i)
|
79
|
+
i = _to_dot(dot, :alias, c, e.aliaz, i)
|
80
|
+
when SQL::CaseExpression
|
81
|
+
dot << "#{i} [label=\"CaseExpression\"];"
|
82
|
+
i = _to_dot(dot, :expression, c, e.expression, i) if e.expression
|
83
|
+
i = _to_dot(dot, :conditions, c, e.conditions, i)
|
84
|
+
i = _to_dot(dot, :default, c, e.default, i)
|
85
|
+
when SQL::Cast
|
86
|
+
dot << "#{i} [label=\"Cast\"];"
|
87
|
+
i = _to_dot(dot, :expr, c, e.expr, i)
|
88
|
+
i = _to_dot(dot, :type, c, e.type, i)
|
89
|
+
when SQL::Function
|
90
|
+
dot << "#{i} [label=\"Function: #{e.f}\"];"
|
91
|
+
e.args.each_with_index do |v, j|
|
92
|
+
i = _to_dot(dot, j, c, v, i)
|
93
|
+
end
|
94
|
+
when SQL::Subscript
|
95
|
+
dot << "#{i} [label=\"Subscript: #{e.f}\"];"
|
96
|
+
i = _to_dot(dot, :f, c, e.f, i)
|
97
|
+
i = _to_dot(dot, :sub, c, e.sub, i)
|
98
|
+
when SQL::WindowFunction
|
99
|
+
dot << "#{i} [label=\"WindowFunction\"];"
|
100
|
+
i = _to_dot(dot, :function, c, e.function, i)
|
101
|
+
i = _to_dot(dot, :window, c, e.window, i)
|
102
|
+
when SQL::Window
|
103
|
+
dot << "#{i} [label=\"Window\"];"
|
104
|
+
i = _to_dot(dot, :opts, c, e.opts, i)
|
105
|
+
when SQL::PlaceholderLiteralString
|
106
|
+
str = e.str
|
107
|
+
str = "(#{str})" if e.parens
|
108
|
+
dot << "#{i} [label=\"PlaceholderLiteralString: #{str.inspect.gsub('"', '\\"')}\"];"
|
109
|
+
i = _to_dot(dot, :args, c, e.args, i)
|
110
|
+
when SQL::JoinClause
|
111
|
+
str = "#{e.join_type.to_s.upcase} JOIN"
|
112
|
+
if e.is_a?(SQL::JoinOnClause)
|
113
|
+
str << " ON"
|
114
|
+
elsif e.is_a?(SQL::JoinUsingClause)
|
115
|
+
str << " USING"
|
116
|
+
end
|
117
|
+
dot << "#{i} [label=\"#{str}\"];"
|
118
|
+
i = _to_dot(dot, :table, c, e.table, i)
|
119
|
+
i = _to_dot(dot, :alias, c, e.table_alias, i) if e.table_alias
|
120
|
+
if e.is_a?(SQL::JoinOnClause)
|
121
|
+
i = _to_dot(dot, :on, c, e.on, i)
|
122
|
+
elsif e.is_a?(SQL::JoinUsingClause)
|
123
|
+
i = _to_dot(dot, :using, c, e.using, i)
|
124
|
+
end
|
125
|
+
when Dataset
|
126
|
+
dot << "#{i} [label=\"Dataset\"];"
|
127
|
+
TO_DOT_OPTIONS.each do |k|
|
128
|
+
next unless e.opts[k]
|
129
|
+
i = _to_dot(dot, k, c, e.opts[k], i)
|
130
|
+
end
|
131
|
+
else
|
132
|
+
dot << "#{i} [label=\"Unhandled: #{e.inspect.gsub('"', "''")}\"];"
|
133
|
+
end
|
134
|
+
i
|
135
|
+
end
|
136
|
+
end
|
137
|
+
end
|
data/lib/sequel/model/base.rb
CHANGED
@@ -553,7 +553,7 @@ module Sequel
|
|
553
553
|
single_table = ds_opts[:from] && (ds_opts[:from].length == 1) \
|
554
554
|
&& !ds_opts.include?(:join) && !ds_opts.include?(:sql)
|
555
555
|
get_columns = proc{check_non_connection_error{columns} || []}
|
556
|
-
if single_table && (schema_array = (db.schema(
|
556
|
+
if single_table && (schema_array = (db.schema(dataset.first_source_table, :reload=>reload) rescue nil))
|
557
557
|
schema_array.each{|k,v| schema_hash[k] = v}
|
558
558
|
if ds_opts.include?(:select)
|
559
559
|
# We don't remove the columns from the schema_hash,
|
@@ -10,7 +10,12 @@ module Sequel
|
|
10
10
|
# after calling super. If any of the instance level before hook blocks return
|
11
11
|
# false, no more instance level before hooks are called and false is returned.
|
12
12
|
#
|
13
|
-
# Instance level hooks
|
13
|
+
# Instance level hooks for before and after are cleared after all related
|
14
|
+
# after level instance hooks have run. This means that if you add a before_create
|
15
|
+
# and before_update instance hooks to a new object, the before_create hook will
|
16
|
+
# be run the first time you save the object (creating it), and the before_update
|
17
|
+
# hook will be run the second time you save the object (updating it), and no
|
18
|
+
# hooks will be run the third time you save the object.
|
14
19
|
#
|
15
20
|
# Usage:
|
16
21
|
#
|
@@ -27,14 +32,14 @@ module Sequel
|
|
27
32
|
HOOKS.each{|h| class_eval("def #{h}_hook(&block); add_instance_hook(:#{h}, &block) end", __FILE__, __LINE__)}
|
28
33
|
|
29
34
|
BEFORE_HOOKS.each{|h| class_eval("def #{h}; run_before_instance_hooks(:#{h}) == false ? false : super end", __FILE__, __LINE__)}
|
30
|
-
|
31
|
-
|
32
|
-
|
33
|
-
|
34
|
-
|
35
|
-
|
36
|
-
|
37
|
-
|
35
|
+
AFTER_HOOKS.each{|h| class_eval(<<-END, __FILE__, __LINE__ + 1)}
|
36
|
+
def #{h}
|
37
|
+
super
|
38
|
+
run_after_instance_hooks(:#{h})
|
39
|
+
@instance_hooks.delete(:#{h})
|
40
|
+
@instance_hooks.delete(:#{h.to_s.sub('after', 'before')})
|
41
|
+
end
|
42
|
+
END
|
38
43
|
|
39
44
|
private
|
40
45
|
|
@@ -106,10 +106,10 @@ module Sequel
|
|
106
106
|
hash.each do |k, v|
|
107
107
|
if assocs.include?(k)
|
108
108
|
obj.associations[k.to_sym] = v
|
109
|
-
elsif cols.include?(k)
|
110
|
-
obj.values[k.to_sym] = v
|
111
109
|
elsif meths.include?("#{k}=")
|
112
110
|
obj.send("#{k}=", v)
|
111
|
+
elsif cols.include?(k)
|
112
|
+
obj.values[k.to_sym] = v
|
113
113
|
else
|
114
114
|
raise Error, "Entry in JSON hash not an association or column and no setter method exists: #{k}"
|
115
115
|
end
|
@@ -171,7 +171,7 @@ module Sequel
|
|
171
171
|
vals.keys - Array(opts[:except])
|
172
172
|
end
|
173
173
|
h = (JSON.create_id && !opts[:naked] && !opts[:root]) ? {JSON.create_id=>model.name} : {}
|
174
|
-
cols.each{|c| h[c.to_s] =
|
174
|
+
cols.each{|c| h[c.to_s] = send(c)}
|
175
175
|
if inc = opts[:include]
|
176
176
|
if inc.is_a?(Hash)
|
177
177
|
inc.each do |k, v|
|
data/lib/sequel/sql.rb
CHANGED
@@ -6,7 +6,7 @@ module Sequel
|
|
6
6
|
class BasicObject
|
7
7
|
# The instance methods to not remove from the class when removing
|
8
8
|
# other methods.
|
9
|
-
KEEP_METHODS = %w"__id__ __send__ __metaclass__ instance_eval == equal? initialize"
|
9
|
+
KEEP_METHODS = %w"__id__ __send__ __metaclass__ instance_eval == equal? initialize method_missing"
|
10
10
|
|
11
11
|
# Remove all but the most basic instance methods from the class. A separate
|
12
12
|
# method so that it can be called again if necessary if you load libraries
|
data/lib/sequel/version.rb
CHANGED
@@ -3,7 +3,7 @@ module Sequel
|
|
3
3
|
MAJOR = 3
|
4
4
|
# The minor version of Sequel. Bumped for every non-patch level
|
5
5
|
# release, generally around once a month.
|
6
|
-
MINOR =
|
6
|
+
MINOR = 18
|
7
7
|
# The tiny version of Sequel. Usually 0, only bumped for bugfix
|
8
8
|
# releases that fix regressions from previous versions.
|
9
9
|
TINY = 0
|
@@ -101,14 +101,13 @@ context "An SQLite database" do
|
|
101
101
|
cspecify "should support timestamps and datetimes and respect datetime_class", :do, :jdbc, :amalgalite do
|
102
102
|
@db.create_table!(:time){timestamp :t; datetime :d}
|
103
103
|
t1 = Time.at(1)
|
104
|
-
@db[:time] << {:t => t1, :d => t1
|
105
|
-
@db[:time]
|
106
|
-
@db[:time].map(:
|
107
|
-
@db[:time].map(:d).should == [t1, t1]
|
104
|
+
@db[:time] << {:t => t1, :d => t1}
|
105
|
+
@db[:time].map(:t).should == [t1]
|
106
|
+
@db[:time].map(:d).should == [t1]
|
108
107
|
Sequel.datetime_class = DateTime
|
109
108
|
t2 = Sequel.string_to_datetime(t1.iso8601)
|
110
|
-
@db[:time].map(:t).should == [t2
|
111
|
-
@db[:time].map(:d).should == [t2
|
109
|
+
@db[:time].map(:t).should == [t2]
|
110
|
+
@db[:time].map(:d).should == [t2]
|
112
111
|
end
|
113
112
|
|
114
113
|
specify "should support sequential primary keys" do
|
@@ -412,7 +411,7 @@ context "A SQLite database" do
|
|
412
411
|
|
413
412
|
specify "should choose a temporary table name that isn't already used when dropping or renaming columns" do
|
414
413
|
sqls = []
|
415
|
-
@db.loggers << (l=Class.new{define_method(
|
414
|
+
@db.loggers << (l=Class.new{%w'info error'.each{|m| define_method(m){|sql| sqls << sql}}}.new)
|
416
415
|
@db.create_table! :test3 do
|
417
416
|
Integer :h
|
418
417
|
Integer :i
|
@@ -459,4 +458,13 @@ context "A SQLite database" do
|
|
459
458
|
@db.add_index :test2, :value, :unique => true
|
460
459
|
@db.drop_index :test2, :value
|
461
460
|
end
|
461
|
+
|
462
|
+
specify "should keep applicable indexes when emulating schema methods" do
|
463
|
+
@db.create_table!(:a){Integer :a; Integer :b}
|
464
|
+
@db.add_index :a, :a
|
465
|
+
@db.add_index :a, :b
|
466
|
+
@db.add_index :a, [:b, :a]
|
467
|
+
@db.drop_column :a, :b
|
468
|
+
@db.indexes(:a).should == {:a_a_index=>{:unique=>false, :columns=>[:a]}}
|
469
|
+
end
|
462
470
|
end
|
@@ -315,7 +315,7 @@ context "Threaded Sharded Connection Pool" do
|
|
315
315
|
@invoked_count = 0
|
316
316
|
@pool = Sequel::ConnectionPool.get_pool(CONNECTION_POOL_DEFAULTS.merge(:max_connections=>5, :servers=>{})) {@invoked_count += 1}
|
317
317
|
end
|
318
|
-
|
318
|
+
|
319
319
|
it_should_behave_like "A threaded connection pool"
|
320
320
|
end
|
321
321
|
|
@@ -418,6 +418,22 @@ context "A connection pool with multiple servers" do
|
|
418
418
|
end
|
419
419
|
end
|
420
420
|
|
421
|
+
specify "should support a :servers_hash option used for converting the server argument" do
|
422
|
+
@pool = Sequel::ConnectionPool.get_pool(CONNECTION_POOL_DEFAULTS.merge(:servers_hash=>Hash.new(:read_only), :servers=>{:read_only=>{}})){|server| "#{server}#{@invoked_counts[server] += 1}"}
|
423
|
+
@pool.hold(:blah) do |c1|
|
424
|
+
c1.should == "read_only1"
|
425
|
+
@pool.hold(:blah) do |c2|
|
426
|
+
c2.should == c1
|
427
|
+
@pool.hold(:blah2) do |c3|
|
428
|
+
c2.should == c3
|
429
|
+
end
|
430
|
+
end
|
431
|
+
end
|
432
|
+
|
433
|
+
@pool = Sequel::ConnectionPool.get_pool(CONNECTION_POOL_DEFAULTS.merge(:servers_hash=>Hash.new{|h,k| raise Sequel::Error}, :servers=>{:read_only=>{}})){|server| "#{server}#{@invoked_counts[server] += 1}"}
|
434
|
+
proc{@pool.hold(:blah){|c1|}}.should raise_error(Sequel::Error)
|
435
|
+
end
|
436
|
+
|
421
437
|
specify "should use the requested server if server is given" do
|
422
438
|
@pool.size(:read_only).should == 0
|
423
439
|
@pool.hold(:read_only) do |c|
|
data/spec/core/dataset_spec.rb
CHANGED
@@ -1552,6 +1552,10 @@ context "Dataset#qualified_column_name" do
|
|
1552
1552
|
specify "should not changed the qualifed column's table if given a qualified symbol" do
|
1553
1553
|
@dataset.literal(@dataset.send(:qualified_column_name, :ccc__b, :items)).should == 'ccc.b'
|
1554
1554
|
end
|
1555
|
+
|
1556
|
+
specify "should handle an aliased identifier" do
|
1557
|
+
@dataset.literal(@dataset.send(:qualified_column_name, :ccc, :items.as(:i))).should == 'i.ccc'
|
1558
|
+
end
|
1555
1559
|
end
|
1556
1560
|
|
1557
1561
|
class DummyDataset < Sequel::Dataset
|
@@ -1937,6 +1941,11 @@ context "Dataset#join_table" do
|
|
1937
1941
|
'SELECT * FROM "stats" INNER JOIN "players" AS "p" ON ("p"."id" = "stats"."player_id")'
|
1938
1942
|
end
|
1939
1943
|
|
1944
|
+
specify "should support aliased tables using an implicit alias" do
|
1945
|
+
@d.from('stats').join(:players.as(:p), {:id => :player_id}).sql.should ==
|
1946
|
+
'SELECT * FROM "stats" INNER JOIN "players" AS "p" ON ("p"."id" = "stats"."player_id")'
|
1947
|
+
end
|
1948
|
+
|
1940
1949
|
specify "should support using an alias for the FROM when doing the first join with unqualified condition columns" do
|
1941
1950
|
ds = MockDataset.new(nil).from(:foo => :f)
|
1942
1951
|
ds.quote_identifiers = true
|
@@ -130,4 +130,50 @@ describe "InstanceHooks plugin" do
|
|
130
130
|
@o.valid?.should == false
|
131
131
|
@r.should == [4, false]
|
132
132
|
end
|
133
|
+
|
134
|
+
it "should clear only related hooks on successful create" do
|
135
|
+
@o.after_destroy_hook{r 1}
|
136
|
+
@o.before_destroy_hook{r 2}
|
137
|
+
@o.after_update_hook{r 3}
|
138
|
+
@o.before_update_hook{r 4}
|
139
|
+
@o.before_save_hook{r 5}
|
140
|
+
@o.after_save_hook{r 6}
|
141
|
+
@o.before_create_hook{r 7}
|
142
|
+
@o.after_create_hook{r 8}
|
143
|
+
@o.save.should_not == nil
|
144
|
+
@r.should == [5, 7, 8, 6]
|
145
|
+
@o.instance_variable_set(:@new, false)
|
146
|
+
@o.save.should_not == nil
|
147
|
+
@r.should == [5, 7, 8, 6, 4, 3]
|
148
|
+
@o.save.should_not == nil
|
149
|
+
@r.should == [5, 7, 8, 6, 4, 3]
|
150
|
+
@o.destroy
|
151
|
+
@r.should == [5, 7, 8, 6, 4, 3, 2, 1]
|
152
|
+
end
|
153
|
+
|
154
|
+
it "should clear only related hooks on successful update" do
|
155
|
+
@x.after_destroy_hook{r 1}
|
156
|
+
@x.before_destroy_hook{r 2}
|
157
|
+
@x.before_update_hook{r 3}
|
158
|
+
@x.after_update_hook{r 4}
|
159
|
+
@x.before_save_hook{r 5}
|
160
|
+
@x.after_save_hook{r 6}
|
161
|
+
@x.save.should_not == nil
|
162
|
+
@r.should == [5, 3, 4, 6]
|
163
|
+
@x.save.should_not == nil
|
164
|
+
@r.should == [5, 3, 4, 6]
|
165
|
+
@x.destroy
|
166
|
+
@r.should == [5, 3, 4, 6, 2, 1]
|
167
|
+
end
|
168
|
+
|
169
|
+
it "should clear only related hooks on successful destroy" do
|
170
|
+
@x.after_destroy_hook{r 1}
|
171
|
+
@x.before_destroy_hook{r 2}
|
172
|
+
@x.before_update_hook{r 3}
|
173
|
+
@x.before_save_hook{r 4}
|
174
|
+
@x.destroy
|
175
|
+
@r.should == [2, 1]
|
176
|
+
@x.save.should_not == nil
|
177
|
+
@r.should == [2, 1, 4, 3]
|
178
|
+
end
|
133
179
|
end
|
@@ -10,12 +10,14 @@ describe "Sequel::Plugins::JsonSerializer" do
|
|
10
10
|
class ::Artist < Sequel::Model
|
11
11
|
plugin :json_serializer
|
12
12
|
columns :id, :name
|
13
|
+
def_column_accessor :id, :name
|
13
14
|
one_to_many :albums
|
14
15
|
end
|
15
16
|
class ::Album < Sequel::Model
|
16
17
|
attr_accessor :blah
|
17
18
|
plugin :json_serializer
|
18
19
|
columns :id, :name, :artist_id
|
20
|
+
def_column_accessor :id, :name, :artist_id
|
19
21
|
many_to_one :artist
|
20
22
|
end
|
21
23
|
@artist = Artist.load(:id=>2, :name=>'YJM')
|
@@ -34,6 +36,15 @@ describe "Sequel::Plugins::JsonSerializer" do
|
|
34
36
|
JSON.parse(@album.to_json).should == @album
|
35
37
|
end
|
36
38
|
|
39
|
+
it "should handle ruby objects in values" do
|
40
|
+
class ::Artist
|
41
|
+
def name=(v)
|
42
|
+
super(Date.parse(v))
|
43
|
+
end
|
44
|
+
end
|
45
|
+
JSON.parse(Artist.load(:name=>Date.today).to_json).should == Artist.load(:name=>Date.today)
|
46
|
+
end
|
47
|
+
|
37
48
|
it "should handle the :only option" do
|
38
49
|
JSON.parse(@artist.to_json(:only=>:name)).should == Artist.load(:name=>@artist.name)
|
39
50
|
JSON.parse(@album.to_json(:only=>[:id, :name])).should == Album.load(:id=>@album.id, :name=>@album.name)
|
@@ -86,6 +86,113 @@ context "SimpleMigration#apply" do
|
|
86
86
|
end
|
87
87
|
end
|
88
88
|
|
89
|
+
context "Reversible Migrations with Sequel.migration{change{}}" do
|
90
|
+
before do
|
91
|
+
@c = Class.new do
|
92
|
+
self::AT = Class.new do
|
93
|
+
attr_reader :actions
|
94
|
+
def initialize(&block)
|
95
|
+
@actions = []
|
96
|
+
instance_eval(&block)
|
97
|
+
end
|
98
|
+
def method_missing(*args)
|
99
|
+
@actions << args
|
100
|
+
end
|
101
|
+
self
|
102
|
+
end
|
103
|
+
attr_reader :actions
|
104
|
+
def initialize
|
105
|
+
@actions = []
|
106
|
+
end
|
107
|
+
def method_missing(*args)
|
108
|
+
@actions << args
|
109
|
+
end
|
110
|
+
def alter_table(*args, &block)
|
111
|
+
@actions << [:alter_table, self.class::AT.new(&block).actions]
|
112
|
+
end
|
113
|
+
end
|
114
|
+
@db = @c.new
|
115
|
+
@p = Proc.new do
|
116
|
+
create_table(:a){Integer :a}
|
117
|
+
add_column :a, :b, String
|
118
|
+
add_index :a, :b
|
119
|
+
rename_column :a, :b, :c
|
120
|
+
rename_table :a, :b
|
121
|
+
alter_table(:b) do
|
122
|
+
add_column :d, String
|
123
|
+
add_constraint :blah, 'd IS NOT NULL'
|
124
|
+
add_foreign_key :e, :b
|
125
|
+
add_primary_key :f, :b
|
126
|
+
add_index :e, :name=>'e_n'
|
127
|
+
add_full_text_index :e, :name=>'e_ft'
|
128
|
+
add_spatial_index :e, :name=>'e_s'
|
129
|
+
rename_column :e, :g
|
130
|
+
end
|
131
|
+
create_view(:c, 'SELECT * FROM b')
|
132
|
+
end
|
133
|
+
end
|
134
|
+
|
135
|
+
specify "should apply up with normal actions in normal order" do
|
136
|
+
p = @p
|
137
|
+
Sequel.migration{change(&p)}.apply(@db, :up)
|
138
|
+
@db.actions.should == [[:create_table, :a],
|
139
|
+
[:add_column, :a, :b, String],
|
140
|
+
[:add_index, :a, :b],
|
141
|
+
[:rename_column, :a, :b, :c],
|
142
|
+
[:rename_table, :a, :b],
|
143
|
+
[:alter_table, [
|
144
|
+
[:add_column, :d, String],
|
145
|
+
[:add_constraint, :blah, "d IS NOT NULL"],
|
146
|
+
[:add_foreign_key, :e, :b],
|
147
|
+
[:add_primary_key, :f, :b],
|
148
|
+
[:add_index, :e, {:name=>"e_n"}],
|
149
|
+
[:add_full_text_index, :e, {:name=>"e_ft"}],
|
150
|
+
[:add_spatial_index, :e, {:name=>"e_s"}],
|
151
|
+
[:rename_column, :e, :g]]
|
152
|
+
],
|
153
|
+
[:create_view, :c, "SELECT * FROM b"]]
|
154
|
+
end
|
155
|
+
|
156
|
+
specify "should execute down with reversing actions in reverse order" do
|
157
|
+
p = @p
|
158
|
+
Sequel.migration{change(&p)}.apply(@db, :down)
|
159
|
+
@db.actions.should == [[:drop_view, :c],
|
160
|
+
[:alter_table, [
|
161
|
+
[:rename_column, :g, :e],
|
162
|
+
[:drop_index, :e, {:name=>"e_s"}],
|
163
|
+
[:drop_index, :e, {:name=>"e_ft"}],
|
164
|
+
[:drop_index, :e, {:name=>"e_n"}],
|
165
|
+
[:drop_column, :f],
|
166
|
+
[:drop_column, :e],
|
167
|
+
[:drop_constraint, :blah],
|
168
|
+
[:drop_column, :d]]
|
169
|
+
],
|
170
|
+
[:rename_table, :b, :a],
|
171
|
+
[:rename_column, :a, :c, :b],
|
172
|
+
[:drop_index, :a, :b],
|
173
|
+
[:drop_column, :a, :b],
|
174
|
+
[:drop_table, :a]]
|
175
|
+
end
|
176
|
+
|
177
|
+
specify "should raise in the down direction if migration uses unsupported method" do
|
178
|
+
m = Sequel.migration{change{run 'SQL'}}
|
179
|
+
proc{m.apply(@db, :up)}.should_not raise_error(Sequel::Error)
|
180
|
+
proc{m.apply(@db, :down)}.should raise_error(Sequel::Error)
|
181
|
+
end
|
182
|
+
|
183
|
+
specify "should raise in the down direction if migration uses add_primary_key with an array" do
|
184
|
+
m = Sequel.migration{change{alter_table(:a){add_primary_key [:b]}}}
|
185
|
+
proc{m.apply(@db, :up)}.should_not raise_error(Sequel::Error)
|
186
|
+
proc{m.apply(@db, :down)}.should raise_error(Sequel::Error)
|
187
|
+
end
|
188
|
+
|
189
|
+
specify "should raise in the down direction if migration uses add_foreign_key with an array" do
|
190
|
+
m = Sequel.migration{change{alter_table(:a){add_foreign_key [:b]}}}
|
191
|
+
proc{m.apply(@db, :up)}.should_not raise_error(Sequel::Error)
|
192
|
+
proc{m.apply(@db, :down)}.should raise_error(Sequel::Error)
|
193
|
+
end
|
194
|
+
end
|
195
|
+
|
89
196
|
context "Sequel::IntegerMigrator" do
|
90
197
|
before do
|
91
198
|
dbc = Class.new(MockDatabase) do
|