lhm 1.0.0.rc.1

Sign up to get free protection for your applications and to get access to all the features.
@@ -0,0 +1,31 @@
1
+ #
2
+ # Copyright (c) 2011, SoundCloud Ltd., Rany Keddo, Tobias Bielohlawek, Tobias
3
+ # Schmidt
4
+ #
5
+
6
+ require File.expand_path(File.dirname(__FILE__)) + '/integration_helper'
7
+
8
+ require 'lhm'
9
+ require 'lhm/table'
10
+ require 'lhm/migration'
11
+
12
+ describe Lhm::Chunker do
13
+ include IntegrationHelper
14
+
15
+ before(:each) { connect! }
16
+
17
+ describe "copying" do
18
+ before(:each) do
19
+ @origin = table_create(:origin)
20
+ @destination = table_create(:destination)
21
+ @migration = Lhm::Migration.new(@origin, @destination)
22
+ end
23
+
24
+ it "should copy 23 rows from origin to destination" do
25
+ 23.times { |n| execute("insert into origin set common = '#{ n }'") }
26
+ Lhm::Chunker.new(@migration, limit = 23, connection).run
27
+ count_all(@destination.name).must_equal(23)
28
+ end
29
+ end
30
+ end
31
+
@@ -0,0 +1,60 @@
1
+ #
2
+ # Copyright (c) 2011, SoundCloud Ltd., Rany Keddo, Tobias Bielohlawek, Tobias
3
+ # Schmidt
4
+ #
5
+
6
+ require File.expand_path(File.dirname(__FILE__)) + '/integration_helper'
7
+
8
+ require 'lhm/table'
9
+ require 'lhm/migration'
10
+ require 'lhm/entangler'
11
+
12
+ describe Lhm::Entangler do
13
+ include IntegrationHelper
14
+
15
+ before(:each) { connect! }
16
+
17
+ describe "entanglement" do
18
+ before(:each) do
19
+ @origin = table_create("origin")
20
+ @destination = table_create("destination")
21
+ @migration = Lhm::Migration.new(@origin, @destination)
22
+ @entangler = Lhm::Entangler.new(@migration, connection)
23
+ end
24
+
25
+ it "should replay inserts from origin into destination" do
26
+ @entangler.run do |entangler|
27
+ execute("insert into origin (common) values ('inserted')")
28
+ end
29
+
30
+ count(:destination, "common", "inserted").must_equal(1)
31
+ end
32
+
33
+ it "should replay deletes from origin into destination" do
34
+ execute("insert into origin (common) values ('inserted')")
35
+
36
+ @entangler.run do |entangler|
37
+ execute("delete from origin where common = 'inserted'")
38
+ end
39
+
40
+ count(:destination, "common", "inserted").must_equal(0)
41
+ end
42
+
43
+ it "should replay updates from origin into destination" do
44
+ @entangler.run do |entangler|
45
+ execute("insert into origin (common) values ('inserted')")
46
+ execute("update origin set common = 'updated'")
47
+ end
48
+
49
+ count(:destination, "common", "updated").must_equal(1)
50
+ end
51
+
52
+ it "should remove entanglement" do
53
+ @entangler.run {}
54
+
55
+ execute("insert into origin (common) values ('inserted')")
56
+ count(:destination, "common", "inserted").must_equal(0)
57
+ end
58
+ end
59
+ end
60
+
@@ -0,0 +1,74 @@
1
+ #
2
+ # Copyright (c) 2011, SoundCloud Ltd., Rany Keddo, Tobias Bielohlawek, Tobias
3
+ # Schmidt
4
+ #
5
+
6
+ require File.expand_path(File.dirname(__FILE__)) + "/../bootstrap"
7
+
8
+ require 'active_record'
9
+ require 'lhm/table'
10
+
11
+ module IntegrationHelper
12
+ delegate :select_one, :select_value, :execute, :to => :connection
13
+
14
+ #
15
+ # Connectivity
16
+ #
17
+
18
+ def connect!
19
+ ActiveRecord::Base.establish_connection(
20
+ :adapter => 'mysql',
21
+ :database => 'lhm',
22
+ :username => '',
23
+ :host => 'localhost'
24
+ )
25
+
26
+ ActiveRecord::Migration.verbose = !!ENV["VERBOSE"]
27
+ end
28
+
29
+ def connection
30
+ ActiveRecord::Base.connection
31
+ end
32
+
33
+ #
34
+ # Test Data
35
+ #
36
+
37
+ def fixture(name)
38
+ File.read($fixtures.join("#{ name }.ddl"))
39
+ end
40
+
41
+ def table_create(fixture_name)
42
+ execute "drop table if exists `#{ fixture_name }`"
43
+ execute fixture(fixture_name)
44
+ table_read(fixture_name)
45
+ end
46
+
47
+ def table_read(fixture_name)
48
+ Lhm::Table.parse(fixture_name, connection)
49
+ end
50
+
51
+ def table_exists?(table)
52
+ connection.table_exists?(table.name)
53
+ end
54
+
55
+ #
56
+ # Database Helpers
57
+ #
58
+
59
+ def count(table, column, value)
60
+ query = "select count(*) from #{ table } where #{ column } = '#{ value }'"
61
+ select_value(query).to_i
62
+ end
63
+
64
+ def count_all(table)
65
+ query = "select count(*) from #{ table }"
66
+ select_value(query).to_i
67
+ end
68
+
69
+ def key?(table, cols)
70
+ query = "show indexes in #{ table.name } where key_name = '#{ table.idx_name(cols) }'"
71
+ !!select_value(query)
72
+ end
73
+ end
74
+
@@ -0,0 +1,118 @@
1
+ #
2
+ # Copyright (c) 2011, SoundCloud Ltd., Rany Keddo, Tobias Bielohlawek, Tobias
3
+ # Schmidt
4
+ #
5
+
6
+ require File.expand_path(File.dirname(__FILE__)) + '/integration_helper'
7
+
8
+ require 'lhm'
9
+
10
+ describe Lhm do
11
+ include IntegrationHelper
12
+ include Lhm
13
+
14
+ before(:each) { connect! }
15
+
16
+ describe "changes" do
17
+ before(:each) do
18
+ table_create(:users)
19
+ end
20
+
21
+ it "should add a column" do
22
+ hadron_change_table("users") do |t|
23
+ t.add_column(:logins, "INT(12) DEFAULT '0'")
24
+ end
25
+
26
+ table_read("users").columns["logins"].must_equal({
27
+ :type => "int(12)",
28
+ :metadata => "DEFAULT '0'"
29
+ })
30
+ end
31
+
32
+ it "should copy all rows" do
33
+ 23.times { |n| execute("insert into users set reference = '#{ n }'") }
34
+
35
+ hadron_change_table("users") do |t|
36
+ t.add_column(:logins, "INT(12) DEFAULT '0'")
37
+ end
38
+
39
+ count_all("users").must_equal(23)
40
+ end
41
+
42
+ it "should remove a column" do
43
+ hadron_change_table("users") do |t|
44
+ t.remove_column(:comment)
45
+ end
46
+
47
+ table_read("users").columns["comment"].must_equal nil
48
+ end
49
+
50
+ it "should add an index" do
51
+ hadron_change_table("users") do |t|
52
+ t.add_index([:comment, :created_at])
53
+ end
54
+
55
+ key?(table_read("users"), ["comment", "created_at"]).must_equal(true)
56
+ end
57
+
58
+ it "should remove an index" do
59
+ hadron_change_table("users") do |t|
60
+ t.remove_index(:username, :created_at)
61
+ end
62
+
63
+ key?(table_read("users"), ["username", "created_at"]).must_equal(false)
64
+ end
65
+
66
+ it "should apply a ddl statement" do
67
+ hadron_change_table("users") do |t|
68
+ t.ddl("alter table %s add column flag tinyint(1)" % t.name)
69
+ end
70
+
71
+ table_read("users").columns["flag"].must_equal({
72
+ :type => "tinyint(1)",
73
+ :metadata => "DEFAULT NULL"
74
+ })
75
+ end
76
+
77
+ describe "parallel" do
78
+ it "should perserve inserts during migration" do
79
+ 50.times { |n| execute("insert into users set reference = '#{ n }'") }
80
+
81
+ insert = Thread.new do
82
+ 10.times do |n|
83
+ execute("insert into users set reference = '#{ 100 + n }'")
84
+ sleep(0.17)
85
+ end
86
+ end
87
+
88
+ hadron_change_table("users", :stride => 10, :throttle => 97) do |t|
89
+ t.add_column(:parallel, "INT(10) DEFAULT '0'")
90
+ end
91
+
92
+ insert.join
93
+
94
+ count_all("users").must_equal(60)
95
+ end
96
+ end
97
+
98
+ it "should perserve deletes during migration" do
99
+ 50.times { |n| execute("insert into users set reference = '#{ n }'") }
100
+
101
+ insert = Thread.new do
102
+ 10.times do |n|
103
+ execute("delete from users where id = '#{ n + 1 }'")
104
+ sleep(0.17)
105
+ end
106
+ end
107
+
108
+ hadron_change_table("users", :stride => 10, :throttle => 97) do |t|
109
+ t.add_column(:parallel, "INT(10) DEFAULT '0'")
110
+ end
111
+
112
+ insert.join
113
+
114
+ count_all("users").must_equal(40)
115
+ end
116
+ end
117
+ end
118
+
@@ -0,0 +1,41 @@
1
+ #
2
+ # Copyright (c) 2011, SoundCloud Ltd., Rany Keddo, Tobias Bielohlawek, Tobias
3
+ # Schmidt
4
+ #
5
+
6
+ require File.expand_path(File.dirname(__FILE__)) + '/integration_helper'
7
+
8
+ require 'lhm/table'
9
+ require 'lhm/migration'
10
+ require 'lhm/locked_switcher'
11
+
12
+ describe Lhm::LockedSwitcher do
13
+ include IntegrationHelper
14
+
15
+ before(:each) { connect! }
16
+
17
+ describe "switching" do
18
+ before(:each) do
19
+ @origin = table_create("origin")
20
+ @destination = table_create("destination")
21
+ @migration = Lhm::Migration.new(@origin, @destination)
22
+ end
23
+
24
+ it "rename origin to archive" do
25
+ switcher = Lhm::LockedSwitcher.new(@migration, connection)
26
+ switcher.run
27
+
28
+ table_exists?(@origin).must_equal true
29
+ table_read(@migration.archive_name).columns.keys.must_include "origin"
30
+ end
31
+
32
+ it "rename destination to origin" do
33
+ switcher = Lhm::LockedSwitcher.new(@migration, connection)
34
+ switcher.run
35
+
36
+ table_exists?(@destination).must_equal false
37
+ table_read(@origin.name).columns.keys.must_include "destination"
38
+ end
39
+ end
40
+ end
41
+
@@ -0,0 +1,79 @@
1
+ #
2
+ # Copyright (c) 2011, SoundCloud Ltd., Rany Keddo, Tobias Bielohlawek, Tobias
3
+ # Schmidt
4
+ #
5
+
6
+ require File.expand_path(File.dirname(__FILE__)) + '/unit_helper'
7
+
8
+ require 'lhm/table'
9
+ require 'lhm/migration'
10
+ require 'lhm/chunker'
11
+
12
+ describe Lhm::Chunker do
13
+ include UnitHelper
14
+
15
+ before(:each) do
16
+ @origin = Lhm::Table.new("origin")
17
+ @destination = Lhm::Table.new("destination")
18
+ @migration = Lhm::Migration.new(@origin, @destination)
19
+ @chunker = Lhm::Chunker.new(@migration, 1, nil, { :stride => 100_000 })
20
+ end
21
+
22
+ describe "copy into" do
23
+ before(:each) do
24
+ @origin.columns["secret"] = { :metadata => "VARCHAR(255)"}
25
+ @destination.columns["secret"] = { :metadata => "VARCHAR(255)"}
26
+ end
27
+
28
+ it "should copy the correct range and column" do
29
+ @chunker.copy(from = 1, to = 100).must_equal(
30
+ "insert ignore into `destination` (`secret`) " +
31
+ "select `secret` from `origin` " +
32
+ "where `id` between 1 and 100"
33
+ )
34
+ end
35
+ end
36
+
37
+ describe "one" do
38
+ it "should have one chunk" do
39
+ @chunker.traversable_chunks_up_to(100).must_equal 1
40
+ end
41
+
42
+ it "should lower bound chunk on 1" do
43
+ @chunker.bottom(chunk = 1).must_equal 1
44
+ end
45
+
46
+ it "should upper bound chunk on 100" do
47
+ @chunker.top(chunk = 1, limit = 100).must_equal 100
48
+ end
49
+ end
50
+
51
+ describe "two" do
52
+ it "should have two chunks" do
53
+ @chunker.traversable_chunks_up_to(150_000).must_equal 2
54
+ end
55
+
56
+ it "should lower bound second chunk on 100_000" do
57
+ @chunker.bottom(chunk = 2).must_equal 100_001
58
+ end
59
+
60
+ it "should upper bound second chunk on 150_000" do
61
+ @chunker.top(chunk = 2, limit = 150_000).must_equal 150_000
62
+ end
63
+ end
64
+
65
+ describe "iterating" do
66
+ it "should iterate" do
67
+ @chunker = Lhm::Chunker.new(@migration, nil, nil, {
68
+ :stride => 150,
69
+ :throttle => 0
70
+ })
71
+
72
+ @chunker.up_to(limit = 100) do |bottom, top|
73
+ bottom.must_equal 1
74
+ top.must_equal 100
75
+ end
76
+ end
77
+ end
78
+ end
79
+
@@ -0,0 +1,79 @@
1
+ #
2
+ # Copyright (c) 2011, SoundCloud Ltd., Rany Keddo, Tobias Bielohlawek, Tobias
3
+ # Schmidt
4
+ #
5
+
6
+ require File.expand_path(File.dirname(__FILE__)) + '/unit_helper'
7
+
8
+ require 'lhm/table'
9
+ require 'lhm/migration'
10
+ require 'lhm/entangler'
11
+
12
+ describe Lhm::Entangler do
13
+ include UnitHelper
14
+
15
+ before(:each) do
16
+ @origin = Lhm::Table.new("origin")
17
+ @destination = Lhm::Table.new("destination")
18
+ @migration = Lhm::Migration.new(@origin, @destination)
19
+ @entangler = Lhm::Entangler.new(@migration)
20
+ end
21
+
22
+ describe "activation" do
23
+ before(:each) do
24
+ @origin.columns["info"] = { :type => "varchar(255)" }
25
+ @origin.columns["tags"] = { :type => "varchar(255)" }
26
+
27
+ @destination.columns["info"] = { :type => "varchar(255)" }
28
+ @destination.columns["tags"] = { :type => "varchar(255)" }
29
+ end
30
+
31
+ it "should create insert trigger to destination table" do
32
+ ddl = %Q{
33
+ create trigger `lhmt_ins_origin`
34
+ after insert on `origin` for each row
35
+ replace into `destination` (`info`, `tags`)
36
+ values (NEW.`info`, NEW.`tags`)
37
+ }
38
+
39
+ @entangler.entangle.must_include strip(ddl)
40
+ end
41
+
42
+ it "should create an update trigger to the destination table" do
43
+ ddl = %Q{
44
+ create trigger `lhmt_upd_origin`
45
+ after update on `origin` for each row
46
+ replace into `destination` (`info`, `tags`)
47
+ values (NEW.`info`, NEW.`tags`)
48
+ }
49
+
50
+ @entangler.entangle.must_include strip(ddl)
51
+ end
52
+
53
+ it "should create a delete trigger to the destination table" do
54
+ ddl = %Q{
55
+ create trigger `lhmt_del_origin`
56
+ after delete on `origin` for each row
57
+ delete ignore from `destination`
58
+ where `destination`.`id` = OLD.`id`
59
+ }
60
+
61
+ @entangler.entangle.must_include strip(ddl)
62
+ end
63
+ end
64
+
65
+ describe "removal" do
66
+ it "should remove insert trigger" do
67
+ @entangler.untangle.must_include("drop trigger if exists `lhmt_ins_origin`")
68
+ end
69
+
70
+ it "should remove update trigger" do
71
+ @entangler.untangle.must_include("drop trigger if exists `lhmt_upd_origin`")
72
+ end
73
+
74
+ it "should remove delete trigger" do
75
+ @entangler.untangle.must_include("drop trigger if exists `lhmt_del_origin`")
76
+ end
77
+ end
78
+ end
79
+