duck_duck_duck 1.0.0
Sign up to get free protection for your applications and to get access to all the features.
- checksums.yaml +7 -0
- data/.gitignore +20 -0
- data/Gemfile +3 -0
- data/LICENSE +23 -0
- data/README.md +19 -0
- data/VERSION +1 -0
- data/bin/duck_duck_duck +33 -0
- data/duck_duck_duck.gemspec +34 -0
- data/lib/duck_duck_duck.rb +165 -0
- data/node/app.js +193 -0
- data/node/drop.js +17 -0
- data/node/migrate_old +205 -0
- data/node/package.json +29 -0
- data/node/template.js +22 -0
- data/node/test.sh +76 -0
- data/node/tests.js +161 -0
- data/specs/duck_duck_duck.rb +150 -0
- data/specs/lib/helpers.rb +4 -0
- data/specs/lib/models/0010_model/migrates/0010-table.sql +13 -0
- data/specs/lib/models/0010_model/migrates/0020-insert_1.sql +11 -0
- data/specs/lib/models/0010_model/migrates/0030-insert_2.sql +11 -0
- data/specs/lib/models/0010_model/migrates/0040-insert_3.sql +11 -0
- data/specs/lib/models/0010_model/migrates/0050-insert_4.sql +11 -0
- data/specs/lib/models/0020_model/migrates/0010-table.sql +13 -0
- data/specs/lib/models/0020_model/migrates/0020-insert_1.sql +11 -0
- data/specs/lib/models/0020_model/migrates/0030-insert_2.sql +11 -0
- data/specs/lib/models/0030_model/migrates/0010-table.sql +13 -0
- data/specs/lib/models/0030_model/migrates/0020-insert_1.sql +11 -0
- data/specs/lib/models/0030_model/migrates/0030-insert_2.sql +11 -0
- data/specs/lib/user/migrates/001-user.js +3 -0
- data/specs/lib/user/migrates/002-two.js +3 -0
- metadata +188 -0
data/node/migrate_old
ADDED
@@ -0,0 +1,205 @@
|
|
1
|
+
#!/usr/bin/env node
|
2
|
+
|
3
|
+
var _ = require('underscore')
|
4
|
+
;
|
5
|
+
|
6
|
+
// Create databases: okdoki
|
7
|
+
|
8
|
+
var River = require('da_river').River
|
9
|
+
, Topogo = require('topogo').Topogo
|
10
|
+
, Customer = require('../Server/Customer/model').Customer
|
11
|
+
, Chat_Bot = require('../Server/Chat/Chat_Bot').Chat_Bot
|
12
|
+
, h = require('../test/helpers')
|
13
|
+
;
|
14
|
+
|
15
|
+
var _ = require('underscore');
|
16
|
+
|
17
|
+
var cmd = (process.argv[2] || 'nothing')
|
18
|
+
, is_reset_user = cmd === 'reset_with_data'
|
19
|
+
, is_reset = cmd === 'reset' || is_reset_user
|
20
|
+
, is_up = is_reset || cmd === 'up'
|
21
|
+
, is_down = is_reset || cmd === 'down'
|
22
|
+
;
|
23
|
+
|
24
|
+
if (!is_up && !is_down) {
|
25
|
+
console.log('Unknown cmd: ' + process.argv[2]);
|
26
|
+
process.exit(1);
|
27
|
+
}
|
28
|
+
|
29
|
+
var ok = {
|
30
|
+
list : [],
|
31
|
+
q: function (string) {
|
32
|
+
this.list.push(string
|
33
|
+
.replace(/\$id_size/g, Topogo.id_size)
|
34
|
+
.replace(/\$trashed_at/, " trashed_at bigint default null ")
|
35
|
+
);
|
36
|
+
return this;
|
37
|
+
}
|
38
|
+
};
|
39
|
+
|
40
|
+
function down(names, flow) {
|
41
|
+
var public = [];
|
42
|
+
var r = River.new(arguments);
|
43
|
+
|
44
|
+
if (!is_down)
|
45
|
+
return flow.finish(public);
|
46
|
+
|
47
|
+
_.each(names, function (n, i) {
|
48
|
+
if (n.indexOf('public.') === 0 ) {
|
49
|
+
public.push(n);
|
50
|
+
r.job('drop', n, function (j) {
|
51
|
+
Topogo.new(n).drop(j);
|
52
|
+
});
|
53
|
+
};
|
54
|
+
});
|
55
|
+
|
56
|
+
r.job('public tables', function (j, last) {
|
57
|
+
return j.finish(public);
|
58
|
+
});
|
59
|
+
|
60
|
+
r.run();
|
61
|
+
}
|
62
|
+
|
63
|
+
function up(flow) {
|
64
|
+
if (!is_up)
|
65
|
+
return flow.finish();
|
66
|
+
|
67
|
+
// ok.q("CREATE EXTENSION IF NOT EXISTS pgcrypto");
|
68
|
+
|
69
|
+
// ok.q(" \
|
70
|
+
// CREATE OR REPLACE FUNCTION public.json_get_varchar_array(j text, k text) \
|
71
|
+
// RETURNS varchar[] \
|
72
|
+
// AS $$ \
|
73
|
+
// import json; \
|
74
|
+
// d = json.loads(j or '{}'); \
|
75
|
+
// return d[k] if k in d else []; \
|
76
|
+
// $$ LANGUAGE plpython3u; \
|
77
|
+
// ");
|
78
|
+
|
79
|
+
// ok.q(" \
|
80
|
+
// CREATE OR REPLACE FUNCTION public.json_get_text_array(j text, k text) \
|
81
|
+
// RETURNS text[] \
|
82
|
+
// AS $$ \
|
83
|
+
// import json; \
|
84
|
+
// d = json.loads(j or '{}')[k]; \
|
85
|
+
// return d[k] if k in d else []; \
|
86
|
+
// $$ LANGUAGE plpython3u; \
|
87
|
+
// ");
|
88
|
+
|
89
|
+
// ok.q(" \
|
90
|
+
// CREATE OR REPLACE FUNCTION public.json_get(j text, k text) \
|
91
|
+
// RETURNS text \
|
92
|
+
// AS $$ \
|
93
|
+
// import json; \
|
94
|
+
// d = json.loads(j or '{}')[k]; \
|
95
|
+
// return d[k] if k in d else \"\"; \
|
96
|
+
// $$ LANGUAGE plpython3u; \
|
97
|
+
// ");
|
98
|
+
|
99
|
+
// ok.q(" \
|
100
|
+
// CREATE OR REPLACE FUNCTION public.json_merge(o text, n text) \
|
101
|
+
// RETURNS text \
|
102
|
+
// AS $$ \
|
103
|
+
// import json; \
|
104
|
+
// oj = json.loads(o or \"{}\"); \
|
105
|
+
// nj = json.loads(n or \"{}\"); \
|
106
|
+
// f = dict(list(oj.items()) + list(nj.items())); \
|
107
|
+
// return json.dumps(f); \
|
108
|
+
// $$ LANGUAGE plpython3u; \
|
109
|
+
// ");
|
110
|
+
|
111
|
+
// ok.q(" \
|
112
|
+
// CREATE OR REPLACE FUNCTION encode_pass_phrase(varchar) \
|
113
|
+
// RETURNS varchar \
|
114
|
+
// AS $$ \
|
115
|
+
// SELECT encode(digest($1, 'sha512'), 'hex') \
|
116
|
+
// $$ LANGUAGE SQL STRICT IMMUTABLE; \
|
117
|
+
// ");
|
118
|
+
|
119
|
+
|
120
|
+
} // end func up
|
121
|
+
|
122
|
+
function create(flow) {
|
123
|
+
console.log('Finished migrating the main db.');
|
124
|
+
if (!is_reset_user) {
|
125
|
+
return flow.finish();
|
126
|
+
}
|
127
|
+
|
128
|
+
var p = "pass phrase";
|
129
|
+
var report = function (j) {
|
130
|
+
console.log('Finished ' + j.group + ' ' + j.id);
|
131
|
+
};
|
132
|
+
|
133
|
+
var c_opts = {pass_phrase: p, confirm_pass_phrase: p, ip: '000.000.00'};
|
134
|
+
|
135
|
+
var r = River.new(arguments);
|
136
|
+
r.for_each_finish(report);
|
137
|
+
r.job('create:', 'go99', function (j) {
|
138
|
+
Customer.create(_.extend({screen_name: j.id}, c_opts), (j));
|
139
|
+
});
|
140
|
+
|
141
|
+
r.job('create:', 'dos', function (j) {
|
142
|
+
Customer.create(_.extend({screen_name: j.id}, c_opts), (j));
|
143
|
+
});
|
144
|
+
|
145
|
+
r.job('create bot:', '404', function (j) {
|
146
|
+
var c = j.river.reply_for('create:', 'go99');
|
147
|
+
Chat_Bot.create({owner_id: c.data.id, url: "https://okdoki-bot.herokuapp.com/test/404/404", name: j.id}, (j));
|
148
|
+
});
|
149
|
+
|
150
|
+
r.job('create bot:', 'ok', function (j) {
|
151
|
+
var c = j.river.reply_for('create:', 'go99');
|
152
|
+
Chat_Bot.create({owner_id: c.data.id, url: "https://okdoki-bot.herokuapp.com/test/ok", name: j.id}, (j));
|
153
|
+
});
|
154
|
+
|
155
|
+
r.job('create bot:', 'im', function (j) {
|
156
|
+
var c = j.river.reply_for('create:', 'dos');
|
157
|
+
Chat_Bot.create({owner_id: c.data.id, url: "https://okdoki-bot.herokuapp.com/test/im", name: j.id}, (j));
|
158
|
+
});
|
159
|
+
|
160
|
+
r.job('create bot:', 'not_json', function (j) {
|
161
|
+
var c = j.river.reply_for('create:', 'dos');
|
162
|
+
Chat_Bot.create({owner_id: c.data.id, url: "https://okdoki-bot.herokuapp.com/test/not_json", name: j.id}, (j));
|
163
|
+
});
|
164
|
+
|
165
|
+
r.run();
|
166
|
+
|
167
|
+
};
|
168
|
+
|
169
|
+
// // ==========================================================================================
|
170
|
+
|
171
|
+
// // ==========================================================================================
|
172
|
+
|
173
|
+
|
174
|
+
// ****************************************************************
|
175
|
+
// ****************** SQL Helpers *********************************
|
176
|
+
// ****************************************************************
|
177
|
+
|
178
|
+
var trashed_at = " trashed_at bigint default NULL ";
|
179
|
+
|
180
|
+
function varchar(o) { return " varchar( " + o + " ) "; }
|
181
|
+
var primary_key = " PRIMARY KEY ";
|
182
|
+
var serial = " serial ";
|
183
|
+
var not_null = " NOT NULL ";
|
184
|
+
var unique = " unique ";
|
185
|
+
var bigint = " bigint ";
|
186
|
+
var default_null= "default null";
|
187
|
+
|
188
|
+
function create_table(name, f) {
|
189
|
+
var vals = [];
|
190
|
+
var sql = function () {
|
191
|
+
vals.push(_.toArray(arguments));
|
192
|
+
};
|
193
|
+
|
194
|
+
f(sql);
|
195
|
+
|
196
|
+
return "CREATE TABLE IF NOT EXISTS " + name + " ( \n" + to_sql(vals) + " \n ); ";
|
197
|
+
}
|
198
|
+
|
199
|
+
function to_sql(vals) {
|
200
|
+
var lines = _.map(vals, function (row) { return row.join( ' ' ); });
|
201
|
+
return lines.join(", \n");
|
202
|
+
}
|
203
|
+
|
204
|
+
|
205
|
+
|
data/node/package.json
ADDED
@@ -0,0 +1,29 @@
|
|
1
|
+
{
|
2
|
+
"name": "duck_duck_duck",
|
3
|
+
"version": "0.4.4",
|
4
|
+
"description": "My personal way to do multi-applet migrations.",
|
5
|
+
"main": "app.js",
|
6
|
+
"directories": {
|
7
|
+
"test": "test"
|
8
|
+
},
|
9
|
+
"scripts": {
|
10
|
+
"test": "bin/test"
|
11
|
+
},
|
12
|
+
"repository": {
|
13
|
+
"type": "git",
|
14
|
+
"url": "git://github.com/da99/duck_duck_duck.git"
|
15
|
+
},
|
16
|
+
"dependencies": {
|
17
|
+
"underscore": "x.x.x",
|
18
|
+
"optimist": "x.x.x",
|
19
|
+
"da_river": "x.x.x",
|
20
|
+
"topogo": "x.x.x"
|
21
|
+
},
|
22
|
+
"keywords": [
|
23
|
+
"da99"
|
24
|
+
],
|
25
|
+
"author": "da99",
|
26
|
+
"license": "MIT",
|
27
|
+
"readmeFilename": "README.md",
|
28
|
+
"gitHead": "849c5374d165e9fb10721becc31538c6f610ef59"
|
29
|
+
}
|
data/node/template.js
ADDED
@@ -0,0 +1,22 @@
|
|
1
|
+
|
2
|
+
var Topogo = require("topogo").Topogo;
|
3
|
+
var River = require("da_river").River;
|
4
|
+
|
5
|
+
var table = "";
|
6
|
+
var m = module.exports = {};
|
7
|
+
|
8
|
+
m.migrate = function (dir, r) {
|
9
|
+
|
10
|
+
if (dir === 'down') {
|
11
|
+
|
12
|
+
Topogo.run('DROP TABLE IF EXISTS ' + table + ';', [], r);
|
13
|
+
|
14
|
+
} else {
|
15
|
+
|
16
|
+
var sql = 'CREATE TABLE IF NOT EXISTS ' + table + " ( \
|
17
|
+
\
|
18
|
+
);";
|
19
|
+
Topogo.run(sql, [], r);
|
20
|
+
|
21
|
+
}
|
22
|
+
};
|
data/node/test.sh
ADDED
@@ -0,0 +1,76 @@
|
|
1
|
+
#!/usr/bin/env bash
|
2
|
+
# -*- bash -*-
|
3
|
+
#
|
4
|
+
set -u -e -o pipefail
|
5
|
+
|
6
|
+
export SCHEMA_TABLE="_test_schema"
|
7
|
+
export DUCK_TEMPLATE="../../template.js"
|
8
|
+
|
9
|
+
rm -f /tmp/duck_*
|
10
|
+
touch /tmp/duck_up
|
11
|
+
touch /tmp/duck_down
|
12
|
+
touch /tmp/duck_drop_it
|
13
|
+
|
14
|
+
function init {
|
15
|
+
rm -f "migrates/002-two.js"
|
16
|
+
rm -f "migrates/003-three.js"
|
17
|
+
rm -f "migrates/004-four.js"
|
18
|
+
rm -f "migrates/005-five.js"
|
19
|
+
rm -f "migrates/006-six.js"
|
20
|
+
|
21
|
+
cp "migrates/001-one.js" "migrates/002-two.js"
|
22
|
+
cp "migrates/001-one.js" "migrates/003-three.js"
|
23
|
+
}
|
24
|
+
|
25
|
+
function init_last_three {
|
26
|
+
cp "migrates/001-one.js" "migrates/004-four.js"
|
27
|
+
cp "migrates/001-one.js" "migrates/005-five.js"
|
28
|
+
cp "migrates/001-one.js" "migrates/006-six.js"
|
29
|
+
}
|
30
|
+
|
31
|
+
# ==== reset
|
32
|
+
node tests/helpers/drop.js
|
33
|
+
|
34
|
+
cd tests/user
|
35
|
+
../../bin/duck_duck_duck up
|
36
|
+
|
37
|
+
cd ../raven_sword
|
38
|
+
../../bin/duck_duck_duck up
|
39
|
+
|
40
|
+
cd ../praying_mantis
|
41
|
+
rm -f "migrates/008-eight.js"
|
42
|
+
rm -f "migrates/010-ten.js"
|
43
|
+
cp "migrates/002-two.js" "migrates/004-four.js"
|
44
|
+
cp "migrates/002-two.js" "migrates/006-six.js"
|
45
|
+
../../bin/duck_duck_duck up
|
46
|
+
cp "migrates/002-two.js" "migrates/008-eight.js"
|
47
|
+
cp "migrates/002-two.js" "migrates/010-ten.js"
|
48
|
+
../../bin/duck_duck_duck down
|
49
|
+
|
50
|
+
cd ../lone_wolf
|
51
|
+
init
|
52
|
+
../../bin/duck_duck_duck up
|
53
|
+
init_last_three
|
54
|
+
../../bin/duck_duck_duck up
|
55
|
+
|
56
|
+
cd ../laughing_octopus
|
57
|
+
rm -rf migrates
|
58
|
+
../../bin/duck_duck_duck create one
|
59
|
+
../../bin/duck_duck_duck create two
|
60
|
+
../../bin/duck_duck_duck create three
|
61
|
+
|
62
|
+
# test .drop/.create
|
63
|
+
cd ../screaming_mantis
|
64
|
+
../../bin/duck_duck_duck up
|
65
|
+
../../bin/duck_duck_duck down
|
66
|
+
|
67
|
+
cd ../liquid
|
68
|
+
../../bin/duck_duck_duck up
|
69
|
+
../../bin/duck_duck_duck drop_it
|
70
|
+
|
71
|
+
cd ../..
|
72
|
+
bin/duck_duck_duck list > /tmp/duck_list
|
73
|
+
mocha tests/duck_duck_duck.js
|
74
|
+
|
75
|
+
|
76
|
+
|
data/node/tests.js
ADDED
@@ -0,0 +1,161 @@
|
|
1
|
+
|
2
|
+
var _ = require('underscore')
|
3
|
+
, Topogo = require('topogo').Topogo
|
4
|
+
, River = require('da_river').River
|
5
|
+
, assert = require('assert')
|
6
|
+
, fs = require('fs')
|
7
|
+
;
|
8
|
+
|
9
|
+
var does = function (name, func) {
|
10
|
+
if (func.length !== 1)
|
11
|
+
throw new Error('Test func requires done: ' + name);
|
12
|
+
return it(name, func);
|
13
|
+
};
|
14
|
+
|
15
|
+
var applets_list = {};
|
16
|
+
function applets(func) {
|
17
|
+
if (applets_list.length)
|
18
|
+
return func(applets_list);
|
19
|
+
|
20
|
+
River.new()
|
21
|
+
.job(function (j) {
|
22
|
+
Topogo.run('SELECT * FROM _test_schema', [], j);
|
23
|
+
})
|
24
|
+
.run(function (r, recs) {
|
25
|
+
_.each(recs, function (r, i) {
|
26
|
+
applets_list[r.name] = r.version;
|
27
|
+
});
|
28
|
+
func(applets_list);
|
29
|
+
});
|
30
|
+
}
|
31
|
+
|
32
|
+
describe( 'Before first migrate:', function () {
|
33
|
+
|
34
|
+
it( 'creates schema table', function (done) {
|
35
|
+
River.new(null)
|
36
|
+
.job(function (j) {
|
37
|
+
Topogo.run('SELECT * FROM _test_schema', [], j);
|
38
|
+
})
|
39
|
+
.job(function (j, last) {
|
40
|
+
assert.equal(last.length > 0, true);
|
41
|
+
done();
|
42
|
+
})
|
43
|
+
.run();
|
44
|
+
});
|
45
|
+
|
46
|
+
it( 'creates rows with: name, version', function (done) {
|
47
|
+
River.new(null)
|
48
|
+
.job(function (j) {
|
49
|
+
Topogo.run('SELECT * FROM _test_schema', [], j);
|
50
|
+
})
|
51
|
+
.job(function (j, last) {
|
52
|
+
assert.deepEqual(_.keys(last[0]), ['name', 'version']);
|
53
|
+
done();
|
54
|
+
})
|
55
|
+
.run();
|
56
|
+
});
|
57
|
+
|
58
|
+
}); // === end desc
|
59
|
+
|
60
|
+
describe( 'Migrate up:', function () {
|
61
|
+
|
62
|
+
does( 'updates version to latest migrate', function (done) {
|
63
|
+
River.new()
|
64
|
+
|
65
|
+
.job(function (j) {
|
66
|
+
Topogo.run('SELECT * FROM _test_schema', [], j);
|
67
|
+
})
|
68
|
+
|
69
|
+
.job(function (j, last) {
|
70
|
+
assert(last[0].version, 3);
|
71
|
+
done();
|
72
|
+
})
|
73
|
+
|
74
|
+
.run();
|
75
|
+
});
|
76
|
+
|
77
|
+
it( 'migrates files higher, but not equal, of current version', function () {
|
78
|
+
var contents = fs.readFileSync('/tmp/duck_up').toString().trim();
|
79
|
+
assert.equal(contents, "+1+2+3+4+5+6");
|
80
|
+
});
|
81
|
+
|
82
|
+
}); // === end desc
|
83
|
+
|
84
|
+
describe( 'Migrate down:', function () {
|
85
|
+
|
86
|
+
var contents = null;
|
87
|
+
|
88
|
+
before(function () {
|
89
|
+
contents = fs.readFileSync('/tmp/duck_down').toString().trim();
|
90
|
+
});
|
91
|
+
|
92
|
+
it( 'runs migrates in reverse order', function () {
|
93
|
+
assert.equal(contents, "+2+4+6-6-4-2");
|
94
|
+
});
|
95
|
+
|
96
|
+
it( 'does not run down migrates from later versions', function () {
|
97
|
+
// This tests is the same as "runs migrates in reverse order"
|
98
|
+
assert.equal(contents, "+2+4+6-6-4-2");
|
99
|
+
});
|
100
|
+
|
101
|
+
does( 'update version to one less than earlier version', function (done) {
|
102
|
+
River.new(null)
|
103
|
+
.job(function (j) {
|
104
|
+
Topogo.run('SELECT * FROM _test_schema', [], j);
|
105
|
+
})
|
106
|
+
.job(function (j, last) {
|
107
|
+
var pm = _.find(last, function (rec) {
|
108
|
+
return rec.name === 'praying_mantis';
|
109
|
+
});
|
110
|
+
assert.equal(pm.version, 0);
|
111
|
+
done();
|
112
|
+
})
|
113
|
+
.run();
|
114
|
+
});
|
115
|
+
|
116
|
+
}); // === end desc
|
117
|
+
|
118
|
+
|
119
|
+
describe( 'create ...', function () {
|
120
|
+
|
121
|
+
it( 'create a file', function () {
|
122
|
+
var contents = fs.readFileSync("tests/laughing_octopus/migrates/001-one.js").toString();
|
123
|
+
assert.equal(contents.indexOf('var ') > -1, true);
|
124
|
+
});
|
125
|
+
|
126
|
+
it( 'creates file in successive order', function () {
|
127
|
+
var contents = fs.readFileSync("tests/laughing_octopus/migrates/002-two.js").toString();
|
128
|
+
assert.equal(contents.indexOf('var ') > -1, true);
|
129
|
+
var contents = fs.readFileSync("tests/laughing_octopus/migrates/003-three.js").toString();
|
130
|
+
assert.equal(contents.indexOf('var ') > -1, true);
|
131
|
+
});
|
132
|
+
|
133
|
+
}); // === end desc
|
134
|
+
|
135
|
+
|
136
|
+
|
137
|
+
describe( 'drop_it', function () {
|
138
|
+
|
139
|
+
it( 'migrates down', function () {
|
140
|
+
var contents = fs.readFileSync("/tmp/duck_drop_it").toString();
|
141
|
+
assert.deepEqual(contents, "drop_it");
|
142
|
+
});
|
143
|
+
|
144
|
+
does( 'removes entry from schema', function (done) {
|
145
|
+
applets(function (list) {
|
146
|
+
assert.deepEqual(list.liquid, undefined);
|
147
|
+
done();
|
148
|
+
});
|
149
|
+
});
|
150
|
+
|
151
|
+
}); // === end desc
|
152
|
+
|
153
|
+
|
154
|
+
describe( 'list', function () {
|
155
|
+
|
156
|
+
it( 'outputs schema table on each line: VER_NUM NAME', function () {
|
157
|
+
var contents = fs.readFileSync("/tmp/duck_list").toString().split("\n");
|
158
|
+
assert.deepEqual(!!contents[0].match(/\d user/), true);
|
159
|
+
assert.deepEqual(!!contents[1].match(/\d raven_sword/), true);
|
160
|
+
});
|
161
|
+
}); // === end desc
|
@@ -0,0 +1,150 @@
|
|
1
|
+
|
2
|
+
require 'sequel'
|
3
|
+
require 'Exit_0'
|
4
|
+
|
5
|
+
schema = ENV['SCHEMA_TABLE'] = '_test_schema'
|
6
|
+
DB = Sequel.connect ENV['DATABASE_URL']
|
7
|
+
MODELS = Dir.glob('*/migrates').map { |dir| File.basename File.dirname(dir) }
|
8
|
+
|
9
|
+
# === Reset tables ===========================================================
|
10
|
+
def reset
|
11
|
+
tables = MODELS + [ENV['SCHEMA_TABLE']]
|
12
|
+
tables.each { |t|
|
13
|
+
DB << "DROP TABLE IF EXISTS #{t.inspect};"
|
14
|
+
}
|
15
|
+
end
|
16
|
+
|
17
|
+
reset
|
18
|
+
|
19
|
+
# === Helpers ================================================================
|
20
|
+
def get *args
|
21
|
+
field = args.last.is_a?(Symbol) ? args.pop : nil
|
22
|
+
rows = DB[*args].all
|
23
|
+
if field
|
24
|
+
rows.map { |row| row[field] }
|
25
|
+
else
|
26
|
+
rows
|
27
|
+
end
|
28
|
+
end
|
29
|
+
|
30
|
+
def versions mod
|
31
|
+
Dir.glob("#{mod}/migrates/*").map { |file|
|
32
|
+
file[/\/(\d{4})[^\/]+\.sql$/] && $1.to_i
|
33
|
+
}.sort
|
34
|
+
end
|
35
|
+
|
36
|
+
# === Specs ==================================================================
|
37
|
+
|
38
|
+
describe "create" do
|
39
|
+
|
40
|
+
before {
|
41
|
+
tmp_dir = '/tmp/ddd_ver'
|
42
|
+
Exit_0("rm -fr #{tmp_dir}")
|
43
|
+
Exit_0("mkdir -p #{tmp_dir}")
|
44
|
+
@dir = tmp_dir
|
45
|
+
}
|
46
|
+
|
47
|
+
it "names the file in successive file versions: 0000-....sql" do
|
48
|
+
Dir.chdir(@dir) {
|
49
|
+
Exit_0("duck_duck_duck create MOD table_1")
|
50
|
+
Exit_0("duck_duck_duck create MOD table_2")
|
51
|
+
|
52
|
+
Exit_0("touch MOD/migrates/0022-skip_zero.sql")
|
53
|
+
Exit_0("duck_duck_duck create MOD table_3")
|
54
|
+
|
55
|
+
Exit_0("touch MOD/migrates/0091-skip_zero.sql")
|
56
|
+
Exit_0("duck_duck_duck create MOD table_100")
|
57
|
+
|
58
|
+
File.should.exists('MOD/migrates/0010-table_1.sql')
|
59
|
+
File.should.exists('MOD/migrates/0020-table_2.sql')
|
60
|
+
File.should.exists('MOD/migrates/0030-table_3.sql')
|
61
|
+
File.should.exists('MOD/migrates/0100-table_100.sql')
|
62
|
+
}
|
63
|
+
end
|
64
|
+
|
65
|
+
end # === describe create
|
66
|
+
|
67
|
+
describe 'up model' do
|
68
|
+
|
69
|
+
before { reset }
|
70
|
+
|
71
|
+
it( 'updates version to latest migration' ) do
|
72
|
+
Exit_0("duck_duck_duck up 0010_model")
|
73
|
+
get('SELECT * FROM _test_schema').
|
74
|
+
first[:version].should == versions('0010_model').last
|
75
|
+
end
|
76
|
+
|
77
|
+
it 'does not run migrations from previous versions' do
|
78
|
+
Exit_0("duck_duck_duck migrate_schema")
|
79
|
+
DB << File.read("0010_model/migrates/0010-table.sql").split('-- DOWN').first
|
80
|
+
DB << "INSERT INTO #{schema.inspect} VALUES ('0010_model', '20');"
|
81
|
+
Exit_0("duck_duck_duck up 0010_model")
|
82
|
+
get('SELECT * FROM "0010_model"', :title).
|
83
|
+
should == ['record 30', 'record 40', 'record 50']
|
84
|
+
end
|
85
|
+
|
86
|
+
end # === describe up model
|
87
|
+
|
88
|
+
describe 'down model' do
|
89
|
+
|
90
|
+
before { reset }
|
91
|
+
|
92
|
+
it 'leaves version to 0' do
|
93
|
+
Exit_0("duck_duck_duck up 0010_model")
|
94
|
+
Exit_0("duck_duck_duck down 0010_model")
|
95
|
+
get(%^SELECT * FROM #{schema.inspect} WHERE name = '0010_model'^, :version).last.
|
96
|
+
should == 0
|
97
|
+
end
|
98
|
+
|
99
|
+
it 'runs migrates in reverse order' do
|
100
|
+
Exit_0("duck_duck_duck up 0020_model")
|
101
|
+
Exit_0("duck_duck_duck down 0020_model")
|
102
|
+
get('SELECT * FROM "0020_model"', :title).
|
103
|
+
should == ['record 20', 'record 30', 'DROP record 30', 'DROP record 20', 'DROP 0020_model']
|
104
|
+
end
|
105
|
+
|
106
|
+
it 'does not run down migrates from later versions' do
|
107
|
+
Exit_0("duck_duck_duck migrate_schema")
|
108
|
+
DB << File.read("0020_model/migrates/0010-table.sql").split('-- DOWN').first
|
109
|
+
DB << "INSERT INTO #{schema.inspect} VALUES ('0020_model', '20');"
|
110
|
+
DB << "UPDATE #{schema} SET version = '20' WHERE name = '0020_model';"
|
111
|
+
Exit_0("duck_duck_duck down 0020_model")
|
112
|
+
get('SELECT * FROM "0020_model"', :title).
|
113
|
+
should == ['DROP record 20', 'DROP 0020_model']
|
114
|
+
end
|
115
|
+
|
116
|
+
end # === describe down model
|
117
|
+
|
118
|
+
describe "up" do
|
119
|
+
|
120
|
+
before { reset }
|
121
|
+
|
122
|
+
it "migrates all models" do
|
123
|
+
Exit_0("duck_duck_duck up")
|
124
|
+
get("SELECT * FROM #{schema.inspect} ORDER BY name").
|
125
|
+
should == [
|
126
|
+
{:name=>'0010_model',:version=>50},
|
127
|
+
{:name=>'0020_model',:version=>30},
|
128
|
+
{:name=>'0030_model',:version=>30}
|
129
|
+
]
|
130
|
+
end
|
131
|
+
|
132
|
+
end # describe up
|
133
|
+
|
134
|
+
describe 'down' do
|
135
|
+
|
136
|
+
before { reset }
|
137
|
+
|
138
|
+
it "migrates down all models" do
|
139
|
+
Exit_0("duck_duck_duck up")
|
140
|
+
Exit_0("duck_duck_duck down")
|
141
|
+
get("SELECT * FROM #{schema.inspect} ORDER BY name").
|
142
|
+
should == [
|
143
|
+
{:name=>'0010_model',:version=>0},
|
144
|
+
{:name=>'0020_model',:version=>0},
|
145
|
+
{:name=>'0030_model',:version=>0}
|
146
|
+
]
|
147
|
+
end
|
148
|
+
|
149
|
+
end # === describe down
|
150
|
+
|