duck_duck_duck 1.0.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- checksums.yaml +7 -0
- data/.gitignore +20 -0
- data/Gemfile +3 -0
- data/LICENSE +23 -0
- data/README.md +19 -0
- data/VERSION +1 -0
- data/bin/duck_duck_duck +33 -0
- data/duck_duck_duck.gemspec +34 -0
- data/lib/duck_duck_duck.rb +165 -0
- data/node/app.js +193 -0
- data/node/drop.js +17 -0
- data/node/migrate_old +205 -0
- data/node/package.json +29 -0
- data/node/template.js +22 -0
- data/node/test.sh +76 -0
- data/node/tests.js +161 -0
- data/specs/duck_duck_duck.rb +150 -0
- data/specs/lib/helpers.rb +4 -0
- data/specs/lib/models/0010_model/migrates/0010-table.sql +13 -0
- data/specs/lib/models/0010_model/migrates/0020-insert_1.sql +11 -0
- data/specs/lib/models/0010_model/migrates/0030-insert_2.sql +11 -0
- data/specs/lib/models/0010_model/migrates/0040-insert_3.sql +11 -0
- data/specs/lib/models/0010_model/migrates/0050-insert_4.sql +11 -0
- data/specs/lib/models/0020_model/migrates/0010-table.sql +13 -0
- data/specs/lib/models/0020_model/migrates/0020-insert_1.sql +11 -0
- data/specs/lib/models/0020_model/migrates/0030-insert_2.sql +11 -0
- data/specs/lib/models/0030_model/migrates/0010-table.sql +13 -0
- data/specs/lib/models/0030_model/migrates/0020-insert_1.sql +11 -0
- data/specs/lib/models/0030_model/migrates/0030-insert_2.sql +11 -0
- data/specs/lib/user/migrates/001-user.js +3 -0
- data/specs/lib/user/migrates/002-two.js +3 -0
- metadata +188 -0
checksums.yaml
ADDED
@@ -0,0 +1,7 @@
|
|
1
|
+
---
|
2
|
+
SHA1:
|
3
|
+
metadata.gz: 57a6021e6db81052a0d1ded7aadb527cbb99059e
|
4
|
+
data.tar.gz: 3c2522c40d726f5cab897321d87488ab693705e1
|
5
|
+
SHA512:
|
6
|
+
metadata.gz: 6a037d4aaef8c580f04fcd5fd50f3f5bc5f0b3d9a67a8bcb2ecb91131e377a88829e506c708f7ce93de664087dfecf0d269a09f568ecccc2740e3e45d98c62bc
|
7
|
+
data.tar.gz: efda3885fc1f6303b701eafaf3f22f471018060fac6cb861d70ec6c2c98c68b6a3228deabcba05131b59d371f7dfd5067a824c81462003e371e517e94fe73ec6
|
data/.gitignore
ADDED
@@ -0,0 +1,20 @@
|
|
1
|
+
/node_modules/
|
2
|
+
/npm-debug.log
|
3
|
+
npm-debug.log
|
4
|
+
*.gem
|
5
|
+
*.rbc
|
6
|
+
.bundle
|
7
|
+
.config
|
8
|
+
.yardoc
|
9
|
+
Gemfile.lock
|
10
|
+
InstalledFiles
|
11
|
+
_yardoc
|
12
|
+
coverage
|
13
|
+
doc/
|
14
|
+
lib/bundler/man
|
15
|
+
pkg
|
16
|
+
rdoc
|
17
|
+
spec/reports
|
18
|
+
test/tmp
|
19
|
+
test/version_tmp
|
20
|
+
tmp
|
data/Gemfile
ADDED
data/LICENSE
ADDED
@@ -0,0 +1,23 @@
|
|
1
|
+
|
2
|
+
Copyright (c) 2013 da99
|
3
|
+
|
4
|
+
Permission is hereby granted, free of charge, to any person
|
5
|
+
obtaining a copy of this software and associated documentation
|
6
|
+
files (the "Software"), to deal in the Software without
|
7
|
+
restriction, including without limitation the rights to use,
|
8
|
+
copy, modify, merge, publish, distribute, sublicense, and/or sell
|
9
|
+
copies of the Software, and to permit persons to whom the
|
10
|
+
Software is furnished to do so, subject to the following
|
11
|
+
conditions:
|
12
|
+
|
13
|
+
The above copyright notice and this permission notice shall be
|
14
|
+
included in all copies or substantial portions of the Software.
|
15
|
+
|
16
|
+
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
|
17
|
+
EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
|
18
|
+
OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
|
19
|
+
NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
|
20
|
+
HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
|
21
|
+
WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
|
22
|
+
FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR
|
23
|
+
OTHER DEALINGS IN THE SOFTWARE.
|
data/README.md
ADDED
@@ -0,0 +1,19 @@
|
|
1
|
+
|
2
|
+
duck_duck_duck
|
3
|
+
==============
|
4
|
+
You won't find this useful.
|
5
|
+
|
6
|
+
However, if you are still curious:
|
7
|
+
|
8
|
+
* If you like to break up apps into smaller apps,
|
9
|
+
and you want them to use the smae db, but
|
10
|
+
different tables, duck\_duck\_duck
|
11
|
+
lets you migrate those mini-apps
|
12
|
+
to the same db.
|
13
|
+
|
14
|
+
Previously...
|
15
|
+
=============
|
16
|
+
|
17
|
+
Originally, this was a node module.
|
18
|
+
The node module is no longer maintained. It is now
|
19
|
+
a Ruby gem.
|
data/VERSION
ADDED
@@ -0,0 +1 @@
|
|
1
|
+
1.0.0
|
data/bin/duck_duck_duck
ADDED
@@ -0,0 +1,33 @@
|
|
1
|
+
#!/usr/bin/env ruby
|
2
|
+
#
|
3
|
+
|
4
|
+
case ARGV[0]
|
5
|
+
|
6
|
+
when 'help'
|
7
|
+
|
8
|
+
puts "========================"
|
9
|
+
puts ""
|
10
|
+
puts "help"
|
11
|
+
puts "create Model table"
|
12
|
+
puts "create Model default_data create"
|
13
|
+
puts "up"
|
14
|
+
puts "down"
|
15
|
+
puts "migrate_schema"
|
16
|
+
puts ""
|
17
|
+
puts "=== ENV/options: ==="
|
18
|
+
puts ""
|
19
|
+
puts "SCHEMA_TABLE=_schema_"
|
20
|
+
puts "DATABASE_URL='postgres://...@...:../..'"
|
21
|
+
puts ""
|
22
|
+
puts "========================"
|
23
|
+
|
24
|
+
else
|
25
|
+
|
26
|
+
require 'duck_duck_duck'
|
27
|
+
fail "Unknown cmd: #{ARGV[0]}" unless %w{ migrate_schema reset create up down }.include?(ARGV[0])
|
28
|
+
Duck_Duck_Duck.send(*ARGV)
|
29
|
+
|
30
|
+
end # === case ARGV[0]
|
31
|
+
|
32
|
+
|
33
|
+
|
@@ -0,0 +1,34 @@
|
|
1
|
+
# coding: utf-8
|
2
|
+
lib = File.expand_path('../lib', __FILE__)
|
3
|
+
$LOAD_PATH.unshift(lib) unless $LOAD_PATH.include?(lib)
|
4
|
+
|
5
|
+
Gem::Specification.new do |spec|
|
6
|
+
spec.name = "duck_duck_duck"
|
7
|
+
spec.version = `cat VERSION`
|
8
|
+
spec.authors = ["da99"]
|
9
|
+
spec.email = ["i-hate-spam-1234567@mailinator.com"]
|
10
|
+
spec.summary = %q{Migrations for apps composed of mini-apps.}
|
11
|
+
spec.description = %q{
|
12
|
+
I use it to keep track of various mini-apps
|
13
|
+
within a larger app.
|
14
|
+
}
|
15
|
+
spec.homepage = "https://github.com/da99/duck_duck_duck"
|
16
|
+
spec.license = "MIT"
|
17
|
+
|
18
|
+
spec.files = `git ls-files -z`.split("\x0").reject { |file|
|
19
|
+
file.index('bin/') == 0 && file != "bin/#{File.basename Dir.pwd}"
|
20
|
+
}
|
21
|
+
spec.executables = spec.files.grep(%r{^bin/}) { |f| File.basename(f) }
|
22
|
+
spec.test_files = spec.files.grep(%r{^(test|spec|features)/})
|
23
|
+
spec.require_paths = ["lib"]
|
24
|
+
|
25
|
+
spec.add_development_dependency "pry" , "~> 0.9"
|
26
|
+
spec.add_development_dependency "rake" , "~> 10.3"
|
27
|
+
spec.add_development_dependency "bundler" , "~> 1.5"
|
28
|
+
spec.add_development_dependency "bacon" , "~> 1.2.0"
|
29
|
+
spec.add_development_dependency "Bacon_Colored" , "~> 0.1"
|
30
|
+
|
31
|
+
spec.add_development_dependency "sequel" , "~> 4.13"
|
32
|
+
spec.add_development_dependency "pg" , "~> 0.16"
|
33
|
+
spec.add_development_dependency "Exit_0" , ">= 1.4.1"
|
34
|
+
end
|
@@ -0,0 +1,165 @@
|
|
1
|
+
|
2
|
+
require "sequel"
|
3
|
+
|
4
|
+
class Duck_Duck_Duck
|
5
|
+
|
6
|
+
DB = Sequel.connect(ENV['DATABASE_URL'])
|
7
|
+
SCHEMA_TABLE = ENV['SCHEMA_TABLE'] || '_schema'
|
8
|
+
|
9
|
+
class << self
|
10
|
+
|
11
|
+
def dev_only
|
12
|
+
fail "Not allowed on a dev machine." if ENV['IS_DEV']
|
13
|
+
end
|
14
|
+
|
15
|
+
def create *args
|
16
|
+
new(*args).create
|
17
|
+
end
|
18
|
+
|
19
|
+
def migrate_schema
|
20
|
+
DB << <<-EOF
|
21
|
+
CREATE TABLE IF NOT EXISTS #{SCHEMA_TABLE} (
|
22
|
+
name varchar(255) NOT NULL PRIMARY KEY ,
|
23
|
+
version smallint NOT NULL DEFAULT 0
|
24
|
+
)
|
25
|
+
EOF
|
26
|
+
end
|
27
|
+
|
28
|
+
%w{reset up down}.each { |meth|
|
29
|
+
eval <<-EOF, nil, __FILE__, __LINE__ + 1
|
30
|
+
def #{meth} name = nil
|
31
|
+
migrate_schema
|
32
|
+
names = name ? [name] : models
|
33
|
+
names.each { |name|
|
34
|
+
new(name).#{meth}
|
35
|
+
}
|
36
|
+
end
|
37
|
+
EOF
|
38
|
+
}
|
39
|
+
|
40
|
+
private # ======================================
|
41
|
+
|
42
|
+
def models
|
43
|
+
@models ||= Dir.glob("*/migrates").
|
44
|
+
map { |dir| File.basename File.dirname(dir) }
|
45
|
+
end
|
46
|
+
|
47
|
+
end # === class self ===
|
48
|
+
|
49
|
+
# ===============================================
|
50
|
+
# Instance methods:
|
51
|
+
# ===============================================
|
52
|
+
|
53
|
+
attr_reader :name, :action, :sub_action
|
54
|
+
|
55
|
+
def initialize *args
|
56
|
+
@name, @action, @sub_action = args
|
57
|
+
@files = Dir.glob("#{name}/migrates/*.sql")
|
58
|
+
end
|
59
|
+
|
60
|
+
def file_to_ver str
|
61
|
+
str.split('/').last[/\d{4}/].to_i
|
62
|
+
end
|
63
|
+
|
64
|
+
def reset
|
65
|
+
down
|
66
|
+
up
|
67
|
+
end
|
68
|
+
|
69
|
+
def up
|
70
|
+
rec = DB.fetch("SELECT version FROM #{SCHEMA_TABLE} WHERE name = :name", :name=>name).all.first
|
71
|
+
|
72
|
+
if !rec
|
73
|
+
ds = DB["INSERT INTO #{SCHEMA_TABLE} (name, version) VALUES (?, ?)", name, 0]
|
74
|
+
ds.insert
|
75
|
+
rec = {:version=>0}
|
76
|
+
end
|
77
|
+
|
78
|
+
if rec[:version] < 0
|
79
|
+
puts "#{name} has an invalid version: #{rec[:version]}\n"
|
80
|
+
exit 1
|
81
|
+
end
|
82
|
+
|
83
|
+
files = @files.sort.map { |f|
|
84
|
+
ver = file_to_ver(f)
|
85
|
+
if ver > rec[:version]
|
86
|
+
[ ver, File.read(f).split('-- DOWN').first ]
|
87
|
+
end
|
88
|
+
}.compact
|
89
|
+
|
90
|
+
files.each { |pair|
|
91
|
+
ver = pair.first
|
92
|
+
sql = pair[1]
|
93
|
+
DB << sql
|
94
|
+
DB[" UPDATE #{SCHEMA_TABLE.inspect} SET version = ? WHERE name = ? ", ver, name].update
|
95
|
+
puts "#{name} schema is now : #{ver}"
|
96
|
+
}
|
97
|
+
|
98
|
+
if files.empty?
|
99
|
+
puts "#{name} is already the latest: #{rec[:version]}"
|
100
|
+
end
|
101
|
+
end # === def up
|
102
|
+
|
103
|
+
def down
|
104
|
+
rec = DB.fetch("SELECT version FROM #{SCHEMA_TABLE} WHERE name = :name", :name=>name).all.first
|
105
|
+
|
106
|
+
if !rec
|
107
|
+
ds = DB["INSERT INTO #{SCHEMA_TABLE} (name, version) VALUES (?, ?)", name, 0]
|
108
|
+
ds.insert
|
109
|
+
rec = {:version=>0}
|
110
|
+
end
|
111
|
+
|
112
|
+
if rec[:version] == 0
|
113
|
+
puts "#{name} is already the latest: #{rec[:version]}\n"
|
114
|
+
exit 0
|
115
|
+
end
|
116
|
+
|
117
|
+
if rec[:version] < 0
|
118
|
+
puts "#{name} is at invalid version: #{rec[:version]}\n"
|
119
|
+
exit 1
|
120
|
+
end
|
121
|
+
|
122
|
+
files = @files.sort.reverse.map { |f|
|
123
|
+
ver = file_to_ver(f)
|
124
|
+
next unless ver <= rec[:version]
|
125
|
+
[ ver, File.read(f).split('-- DOWN').last ]
|
126
|
+
}.compact
|
127
|
+
|
128
|
+
if files.empty?
|
129
|
+
puts "#{name} is already the latest: #{rec[:version]}\n"
|
130
|
+
end
|
131
|
+
|
132
|
+
new_ver = nil
|
133
|
+
|
134
|
+
files.each_with_index { |pair, i|
|
135
|
+
prev_pair = files[i+1] || [0, nil]
|
136
|
+
ver = prev_pair.first.to_i
|
137
|
+
sql = pair[1]
|
138
|
+
DB << sql
|
139
|
+
DB[" UPDATE #{SCHEMA_TABLE} SET version = ? WHERE name = ? ", ver, name].update
|
140
|
+
puts "#{name} schema is now : #{ver}"
|
141
|
+
}
|
142
|
+
|
143
|
+
end # === def down
|
144
|
+
|
145
|
+
def create
|
146
|
+
`mkdir -p #{name}/migrates`
|
147
|
+
|
148
|
+
files = Dir.glob("#{name}/migrates/*.sql").grep(/\/\d{4}\-/).sort
|
149
|
+
|
150
|
+
next_ver = begin
|
151
|
+
(files.last || '')[/\/(\d{4})[^\/]+$/]
|
152
|
+
v = ($1 ? $1 : '0')
|
153
|
+
'%04d' % (v.to_i + (10 - v[/\d$/].to_i))
|
154
|
+
end
|
155
|
+
|
156
|
+
new_file = "#{name}/migrates/#{next_ver}-#{[action, sub_action].compact.join('-')}.sql"
|
157
|
+
File.open(new_file, 'a') do |f|
|
158
|
+
f.puts "\n\n\n\n-- DOWN\n\n\n\n"
|
159
|
+
end
|
160
|
+
|
161
|
+
puts new_file
|
162
|
+
end # === def create
|
163
|
+
|
164
|
+
|
165
|
+
end # === class Duck_Duck_Duck ===
|
data/node/app.js
ADDED
@@ -0,0 +1,193 @@
|
|
1
|
+
|
2
|
+
var _ = require('underscore')
|
3
|
+
, path = require('path')
|
4
|
+
, fs = require('fs')
|
5
|
+
, exec = require('child_process').exec
|
6
|
+
, River = require('da_river').River
|
7
|
+
, Topogo = require('topogo').Topogo
|
8
|
+
, argv = require('optimist').argv
|
9
|
+
;
|
10
|
+
|
11
|
+
var schema_table = process.env.SCHEMA_TABLE || '_schema';
|
12
|
+
var MIGRATE_PATTERN = /^\d+\-/;
|
13
|
+
var name = path.basename(process.cwd());
|
14
|
+
|
15
|
+
// From: stackoverflow.com/questions/1267283/how-can-i-create-a-zerofilled-value-using-javascript
|
16
|
+
function pad_it(n, p, c) {
|
17
|
+
var pad_char = typeof c !== 'undefined' ? c : '0';
|
18
|
+
var pad = new Array(1 + p).join(pad_char);
|
19
|
+
return (pad + n).slice(-pad.length);
|
20
|
+
}
|
21
|
+
|
22
|
+
function read_migrates() {
|
23
|
+
var folder = 'migrates';
|
24
|
+
return (fs.existsSync(folder)) ? _.select(fs.readdirSync(folder), function (file, i) {
|
25
|
+
return file.match(MIGRATE_PATTERN);
|
26
|
+
}) : []
|
27
|
+
}
|
28
|
+
|
29
|
+
if (argv._[0] === 'list') {
|
30
|
+
|
31
|
+
River.new(null)
|
32
|
+
.job(function (j) {
|
33
|
+
Topogo.run('SELECT * FROM ' + schema_table + ' ;', [], j);
|
34
|
+
})
|
35
|
+
.job(function (j, list) {
|
36
|
+
_.each(list, function (o) {
|
37
|
+
var v = o.version;
|
38
|
+
if (o.version < 10)
|
39
|
+
v = ' ' + v;
|
40
|
+
console.log(v, o.name);
|
41
|
+
});
|
42
|
+
j.finish(list);
|
43
|
+
})
|
44
|
+
.run(function () {
|
45
|
+
Topogo.close();
|
46
|
+
});
|
47
|
+
|
48
|
+
} else if (argv._[0] === 'create') {
|
49
|
+
|
50
|
+
var template = fs.readFileSync(process.env.DUCK_TEMPLATE).toString();
|
51
|
+
var file_name = _.last(process.argv);
|
52
|
+
exec("mkdir -p migrates", function (err, data) {
|
53
|
+
if (err) throw err;
|
54
|
+
|
55
|
+
var max = _.map(read_migrates(), function (f_name, i) {
|
56
|
+
return parseInt(f_name, 10);
|
57
|
+
}).sort().pop() || 0;
|
58
|
+
|
59
|
+
var final_file_name = pad_it(max + 1, 3) + "-" + file_name + '.js';
|
60
|
+
|
61
|
+
process.chdir('migrates')
|
62
|
+
fs.writeFile(final_file_name, template, function () {
|
63
|
+
});
|
64
|
+
});
|
65
|
+
|
66
|
+
} else if (_.contains(['up','down', 'drop_it'], argv._[0])) {
|
67
|
+
var migrates = read_migrates();
|
68
|
+
var versions = _.map(migrates, function (f) {
|
69
|
+
return parseInt(f, 10);
|
70
|
+
});
|
71
|
+
|
72
|
+
var orig_dir = argv._[0];
|
73
|
+
var direction = (_.contains(['down', 'drop_it'], argv._[0])) ? 'down' : 'up';
|
74
|
+
|
75
|
+
|
76
|
+
if (direction === 'down')
|
77
|
+
migrates.sort().reverse();
|
78
|
+
else
|
79
|
+
migrates.sort();
|
80
|
+
|
81
|
+
River.new(null)
|
82
|
+
.job(function (j) {
|
83
|
+
Topogo.run('CREATE TABLE IF NOT EXISTS ' + schema_table + ' (' +
|
84
|
+
' name varchar(255) NOT NULL UNIQUE , ' +
|
85
|
+
' version smallint NOT NULL DEFAULT 0 ' +
|
86
|
+
')', [], j);
|
87
|
+
})
|
88
|
+
.job(function (j) {
|
89
|
+
Topogo.run('SELECT * FROM ' + schema_table + ' WHERE name = $1 ;', [name], j);
|
90
|
+
})
|
91
|
+
.job(function (j, last) {
|
92
|
+
j.finish(last[0]);
|
93
|
+
})
|
94
|
+
.job(function (j, last) {
|
95
|
+
if (last)
|
96
|
+
j.finish(last.version);
|
97
|
+
else {
|
98
|
+
River.new(null)
|
99
|
+
.job(function (j_create) {
|
100
|
+
Topogo.new(schema_table)
|
101
|
+
.create({name: name}, j_create);
|
102
|
+
})
|
103
|
+
.run(function (j_create, last) {
|
104
|
+
j.finish(last.version);
|
105
|
+
});
|
106
|
+
}
|
107
|
+
})
|
108
|
+
.job(function (j, last_max) {
|
109
|
+
var r = River.new(null);
|
110
|
+
var has_migrates = false;
|
111
|
+
|
112
|
+
_.each(migrates, function (f) {
|
113
|
+
var max = parseInt(f, 10);
|
114
|
+
|
115
|
+
// Should it run?
|
116
|
+
if (direction === 'up' && last_max >= max)
|
117
|
+
return;
|
118
|
+
if (direction === 'down' && last_max < max)
|
119
|
+
return;
|
120
|
+
|
121
|
+
has_migrates = true;
|
122
|
+
|
123
|
+
// Yes? Then run it..
|
124
|
+
var m = require(process.cwd() + '/migrates/' + f);
|
125
|
+
|
126
|
+
r.job(function (j) {
|
127
|
+
|
128
|
+
var PG = j.PG = Topogo;
|
129
|
+
|
130
|
+
j.drop = function () {
|
131
|
+
var table_name = arguments[0];
|
132
|
+
var name = function (str) { return str.replace(new RegExp('@_t', 'ig'), '"' + table_name + '"'); };
|
133
|
+
var r = River.new(j);
|
134
|
+
|
135
|
+
_.each(_.toArray(arguments).reverse(), function (t_name) {
|
136
|
+
r.job(function (j) {
|
137
|
+
PG.run("DROP TABLE IF EXISTS \"" + t_name + "\" ;", [], j);
|
138
|
+
});
|
139
|
+
});
|
140
|
+
|
141
|
+
r.run();
|
142
|
+
}; // ================= .drop
|
143
|
+
|
144
|
+
j.create = function () {
|
145
|
+
var r = River.new(j);
|
146
|
+
_.each(_.toArray(arguments), function (sql) {
|
147
|
+
r.job(function (j) {
|
148
|
+
PG.run(sql, [], j);
|
149
|
+
});
|
150
|
+
});
|
151
|
+
r.run();
|
152
|
+
};
|
153
|
+
|
154
|
+
m.migrate(direction, j);
|
155
|
+
});
|
156
|
+
|
157
|
+
r.job(function (j) {
|
158
|
+
var t = Topogo.new(schema_table);
|
159
|
+
if (direction === 'down') {
|
160
|
+
max = _.find(versions.slice().reverse(), function (n) {
|
161
|
+
return n < max;
|
162
|
+
}) || 0;
|
163
|
+
}
|
164
|
+
t.update_where_set({name: name}, {version: max}, j);
|
165
|
+
});
|
166
|
+
|
167
|
+
});
|
168
|
+
|
169
|
+
if (has_migrates) {
|
170
|
+
r.run(function () {
|
171
|
+
j.finish();
|
172
|
+
});
|
173
|
+
} else {
|
174
|
+
j.finish();
|
175
|
+
}
|
176
|
+
|
177
|
+
})
|
178
|
+
.job(function (j, last) {
|
179
|
+
if (orig_dir !== 'drop_it')
|
180
|
+
return j.finish(last);
|
181
|
+
Topogo.run("DELETE FROM \"" + schema_table + "\" WHERE name = $1;", [name], j);
|
182
|
+
})
|
183
|
+
.run(function (r, last) {
|
184
|
+
Topogo.close();
|
185
|
+
});
|
186
|
+
|
187
|
+
|
188
|
+
} else {
|
189
|
+
throw new Error("Unknown argument: " + JSON.stringify(argv._));
|
190
|
+
}
|
191
|
+
|
192
|
+
|
193
|
+
|
data/node/drop.js
ADDED
@@ -0,0 +1,17 @@
|
|
1
|
+
|
2
|
+
|
3
|
+
var _ = require('underscore')
|
4
|
+
, path = require('path')
|
5
|
+
, fs = require('fs')
|
6
|
+
, River = require('da_river').River
|
7
|
+
, Topogo = require('topogo').Topogo
|
8
|
+
;
|
9
|
+
|
10
|
+
|
11
|
+
River.new(null)
|
12
|
+
.job(function (j) {
|
13
|
+
Topogo.run('DROP TABLE IF EXISTS ' + (process.env.MIGRATE_TABLE || '_test_schema')+ ';', [], j);
|
14
|
+
})
|
15
|
+
.run(function () {
|
16
|
+
Topogo.close();
|
17
|
+
});
|