prick 0.17.0 → 0.20.1
Sign up to get free protection for your applications and to get access to all the features.
- checksums.yaml +4 -4
- data/Gemfile +10 -4
- data/README.md +7 -7
- data/Rakefile +3 -1
- data/TODO +13 -11
- data/bin/console +2 -1
- data/doc/build-yml.txt +14 -0
- data/exe/prick +264 -28
- data/lib/builder/batch.rb +147 -0
- data/lib/builder/builder.rb +122 -0
- data/lib/builder/node.rb +189 -0
- data/lib/builder/node_pool.rb +105 -0
- data/lib/builder/parser.rb +120 -0
- data/lib/local/command.rb +193 -0
- data/lib/{prick → local}/git.rb +148 -22
- data/lib/local/timer.rb +98 -0
- data/lib/prick/constants.rb +54 -66
- data/lib/prick/diff.rb +28 -18
- data/lib/prick/prick_version.rb +161 -0
- data/lib/prick/state.rb +80 -165
- data/lib/prick/version.rb +2 -163
- data/lib/prick.rb +43 -27
- data/lib/share/init/.gitignore +10 -0
- data/lib/share/init/.prick-context +2 -0
- data/lib/share/init/.rspec +3 -0
- data/{share/schema/schema/public → lib/share/init/migration}/.keep +0 -0
- data/lib/share/init/prick.yml +6 -0
- data/lib/share/init/schema/.keep +0 -0
- data/lib/share/init/schema/build.yml +2 -0
- data/lib/share/init/schema/prick/.keep +0 -0
- data/lib/share/init/schema/prick/build.yml +5 -0
- data/lib/share/init/schema/prick/data.sql +6 -0
- data/{share/schema → lib/share/init}/schema/prick/tables.sql +2 -3
- data/lib/share/init/schema/public/.keep +0 -0
- data/lib/share/init/spec/prick_helper.rb +1 -0
- data/lib/share/init/spec/prick_spec.rb +6 -0
- data/lib/share/init/spec/spec_helper.rb +50 -0
- data/lib/share/migrate/migration/build.yml +4 -0
- data/lib/share/migrate/migration/diff.after-tables.sql +0 -0
- data/lib/share/migrate/migration/diff.before-tables.sql +0 -0
- data/lib/share/migrate/migration/diff.tables.sql +0 -0
- data/lib/subcommand/prick-build.rb +55 -0
- data/lib/subcommand/prick-create.rb +78 -0
- data/lib/subcommand/prick-drop.rb +25 -0
- data/lib/subcommand/prick-fox.rb +62 -0
- data/lib/subcommand/prick-init.rb +46 -0
- data/lib/subcommand/prick-make.rb +202 -0
- data/lib/subcommand/prick-migrate.rb +37 -0
- data/lib/subcommand/prick-release.rb +23 -0
- data/lib/subcommand/prick-setup.rb +20 -0
- data/lib/subcommand/prick-teardown.rb +18 -0
- data/prick.gemspec +43 -16
- metadata +161 -72
- data/.gitignore +0 -29
- data/.travis.yml +0 -7
- data/doc/create_release.txt +0 -17
- data/doc/flow.txt +0 -98
- data/doc/migra +0 -1
- data/doc/migrations.txt +0 -172
- data/doc/notes.txt +0 -116
- data/doc/prick.txt +0 -114
- data/doc/sh.prick +0 -316
- data/lib/ext/algorithm.rb +0 -14
- data/lib/ext/fileutils.rb +0 -26
- data/lib/ext/forward_method.rb +0 -18
- data/lib/ext/pg.rb +0 -18
- data/lib/ext/shortest_path.rb +0 -44
- data/lib/prick/archive.rb +0 -124
- data/lib/prick/branch.rb +0 -254
- data/lib/prick/builder.rb +0 -202
- data/lib/prick/cache.rb +0 -34
- data/lib/prick/command.rb +0 -93
- data/lib/prick/database.rb +0 -82
- data/lib/prick/dsort.rb +0 -151
- data/lib/prick/ensure.rb +0 -119
- data/lib/prick/exceptions.rb +0 -25
- data/lib/prick/head.rb +0 -183
- data/lib/prick/migration.rb +0 -70
- data/lib/prick/program.rb +0 -506
- data/lib/prick/project.rb +0 -626
- data/lib/prick/rdbms.rb +0 -137
- data/lib/prick/schema.rb +0 -27
- data/lib/prick/share.rb +0 -64
- data/libexec/strip-comments +0 -33
- data/make_releases +0 -72
- data/make_schema +0 -10
- data/share/diff/diff.after-tables.sql +0 -4
- data/share/diff/diff.before-tables.sql +0 -4
- data/share/diff/diff.tables.sql +0 -8
- data/share/features/diff.sql +0 -2
- data/share/features/feature/diff.sql +0 -2
- data/share/features/feature/migrate.sql +0 -2
- data/share/features/features.sql +0 -2
- data/share/features/features.yml +0 -2
- data/share/features/migrations.sql +0 -4
- data/share/gitignore +0 -2
- data/share/migration/diff.tables.sql +0 -8
- data/share/migration/features.yml +0 -6
- data/share/migration/migrate.sql +0 -3
- data/share/migration/migrate.yml +0 -8
- data/share/migration/tables.sql +0 -3
- data/share/schema/build.yml +0 -14
- data/share/schema/schema/build.yml +0 -3
- data/share/schema/schema/prick/build.yml +0 -14
- data/share/schema/schema/prick/data.sql +0 -7
- data/share/schema/schema/prick/schema.sql +0 -3
- data/share/schema/schema/public/build.yml +0 -13
- data/share/schema/schema.sql +0 -3
- data/test_assorted +0 -192
- data/test_feature +0 -112
- data/test_refactor +0 -34
- data/test_single_dev +0 -83
@@ -0,0 +1,122 @@
|
|
1
|
+
require 'local/command'
|
2
|
+
|
3
|
+
require 'builder/node.rb'
|
4
|
+
require 'builder/node_pool.rb'
|
5
|
+
require 'builder/batch.rb'
|
6
|
+
require 'builder/parser.rb'
|
7
|
+
|
8
|
+
include Constrain
|
9
|
+
|
10
|
+
module Prick
|
11
|
+
module Build
|
12
|
+
class Error < StandardError; end
|
13
|
+
class PostgresError < Error; end
|
14
|
+
|
15
|
+
class Builder
|
16
|
+
# PgConn object
|
17
|
+
attr_reader :conn
|
18
|
+
|
19
|
+
# Root schema directory
|
20
|
+
attr_reader :dir
|
21
|
+
|
22
|
+
# Reflections YAML file
|
23
|
+
attr_reader :reflections_file
|
24
|
+
|
25
|
+
# True if database is initially clean - ie. all tables are empty
|
26
|
+
attr_accessor :clean
|
27
|
+
|
28
|
+
# Root build node
|
29
|
+
attr_reader :root
|
30
|
+
|
31
|
+
# Pool of nodes. Initialized by #load_pool
|
32
|
+
attr_reader :pool
|
33
|
+
|
34
|
+
forward_to :pool, :nodes, :decl_nodes, :init_nodes, :term_nodes,
|
35
|
+
:seed_nodes, :fox_seed_nodes, :sql_seed_nodes,
|
36
|
+
:schemas
|
37
|
+
|
38
|
+
def batches() @batches ||= group end
|
39
|
+
|
40
|
+
def initialize(conn, dir, clean = true, touched: false)
|
41
|
+
@conn = conn
|
42
|
+
@dir = dir
|
43
|
+
@reflections_file = REFLECTIONS_PATH
|
44
|
+
@clean = clean
|
45
|
+
@pool = NodePool.new
|
46
|
+
@root = Parser.parse(conn, dir)
|
47
|
+
load_pool(@root) # Collect nodes into pool
|
48
|
+
@batches = nil # Initialized by #group
|
49
|
+
end
|
50
|
+
|
51
|
+
# Group sources into batches
|
52
|
+
def group
|
53
|
+
@batches = []
|
54
|
+
kind = nil
|
55
|
+
batch = nil
|
56
|
+
|
57
|
+
for node in [init_nodes, decl_nodes, fox_seed_nodes, sql_seed_nodes, term_nodes].flatten
|
58
|
+
case node.kind
|
59
|
+
when :module
|
60
|
+
if batch&.kind != :module
|
61
|
+
@batches << batch if batch
|
62
|
+
batch = ModuleBatch.new(self)
|
63
|
+
end
|
64
|
+
when :exe # Exe sources always create a new batch
|
65
|
+
@batches << batch if batch
|
66
|
+
batch = SqlBatch.new(self)
|
67
|
+
when batch&.kind
|
68
|
+
;
|
69
|
+
when :sql || node.kind == :inline
|
70
|
+
if batch&.kind != :exe
|
71
|
+
@batches << batch if batch
|
72
|
+
batch = SqlBatch.new(self)
|
73
|
+
end
|
74
|
+
when :inline
|
75
|
+
@batches << batch if batch
|
76
|
+
batch = SqlBatch.new(self)
|
77
|
+
when :fox
|
78
|
+
@batches << batch if batch
|
79
|
+
batch = FoxBatch.new(self)
|
80
|
+
when :yml
|
81
|
+
next
|
82
|
+
else
|
83
|
+
raise Error, "Unexpected node kind: #{node.kind}"
|
84
|
+
end
|
85
|
+
batch.nodes << node
|
86
|
+
end
|
87
|
+
@batches << batch if batch
|
88
|
+
end
|
89
|
+
|
90
|
+
def execute(conn, create_schemas: schemas)
|
91
|
+
group if batches.nil?
|
92
|
+
conn.exec create_schemas.grep_v("public").map { |schema| "create schema #{schema}" }
|
93
|
+
for batch in batches
|
94
|
+
batch.execute
|
95
|
+
end
|
96
|
+
end
|
97
|
+
|
98
|
+
# def setup
|
99
|
+
# end
|
100
|
+
|
101
|
+
# def teardown
|
102
|
+
# end
|
103
|
+
|
104
|
+
def dump
|
105
|
+
batches ? batches.each(&:dump) : pool.dump
|
106
|
+
end
|
107
|
+
|
108
|
+
private
|
109
|
+
def load_pool(build_node)
|
110
|
+
pool.add(build_node.init_nodes)
|
111
|
+
build_node.decl_nodes.each { |node|
|
112
|
+
pool.add node
|
113
|
+
load_pool(node) if node.kind == :yml
|
114
|
+
}
|
115
|
+
pool.add(build_node.seed_nodes)
|
116
|
+
pool.add(build_node.term_nodes)
|
117
|
+
end
|
118
|
+
end
|
119
|
+
end
|
120
|
+
end
|
121
|
+
|
122
|
+
|
data/lib/builder/node.rb
ADDED
@@ -0,0 +1,189 @@
|
|
1
|
+
module Prick
|
2
|
+
module Build
|
3
|
+
class Node
|
4
|
+
attr_reader :parent
|
5
|
+
forward_to :parent, :conn
|
6
|
+
attr_reader :phase # :init, :decl, :seed, :term or nil (for BuildNode)
|
7
|
+
attr_reader :kind # :sql, :exe, :fox, :yml, :inline, :module
|
8
|
+
attr_reader :path
|
9
|
+
attr_reader :args # only defined for :exe (String)
|
10
|
+
|
11
|
+
def name() @name = File.basename(path) end
|
12
|
+
|
13
|
+
def schema() @schema ||= parent&.schema || "public" end
|
14
|
+
def schema=(s) @schema = s end
|
15
|
+
|
16
|
+
attr_reader :source
|
17
|
+
|
18
|
+
def source
|
19
|
+
@source ||= read_source
|
20
|
+
end
|
21
|
+
|
22
|
+
def prefix_lines() 0 end
|
23
|
+
|
24
|
+
def source_lines()
|
25
|
+
return @source_lines if @source_lines
|
26
|
+
source
|
27
|
+
@source_lines
|
28
|
+
end
|
29
|
+
|
30
|
+
def lines() prefix_lines + source_lines end
|
31
|
+
|
32
|
+
def initialize(parent, phase, kind, path, args = nil)
|
33
|
+
constrain parent, BuildNode, NilClass
|
34
|
+
constrain phase, :init, :decl, :seed, :term, nil
|
35
|
+
constrain kind, :sql, :exe, :fox, :yml, :inline, :module
|
36
|
+
constrain path, String, NilClass
|
37
|
+
@parent, @phase, @kind, @path = parent, phase, kind, path
|
38
|
+
@args = args&.empty? ? nil : args
|
39
|
+
@schema = nil
|
40
|
+
@source_lines = nil
|
41
|
+
end
|
42
|
+
|
43
|
+
def to_s() [path, args].compact.join(" ") end
|
44
|
+
def inspect() to_s end
|
45
|
+
def dump() puts "#{inspect} (#{schema})" end
|
46
|
+
|
47
|
+
protected
|
48
|
+
def read_source
|
49
|
+
@source_lines = 0
|
50
|
+
@source = []
|
51
|
+
end
|
52
|
+
end
|
53
|
+
|
54
|
+
class SqlNode < Node
|
55
|
+
def initialize(parent, phase, path)
|
56
|
+
super(parent, phase, :sql, path)
|
57
|
+
end
|
58
|
+
|
59
|
+
protected
|
60
|
+
def read_source
|
61
|
+
file = File.read(path)
|
62
|
+
@source_lines = 1 + 1 + file.count("\n")
|
63
|
+
["set search_path to #{schema};\n", file]
|
64
|
+
end
|
65
|
+
end
|
66
|
+
|
67
|
+
class FoxNode < Node
|
68
|
+
def initialize(parent, phase, path)
|
69
|
+
super(parent, phase, :fox, path)
|
70
|
+
end
|
71
|
+
end
|
72
|
+
|
73
|
+
# Note that #path refers to the build file
|
74
|
+
class InlineNode < Node
|
75
|
+
attr_reader :stmts
|
76
|
+
|
77
|
+
def initialize(parent, phase, path, *stmts)
|
78
|
+
super(parent, phase, :inline, path)
|
79
|
+
@stmts = Array(stmts).flatten
|
80
|
+
end
|
81
|
+
|
82
|
+
def inspect() "#@path \"#{@stmts.join(";")}\"" end
|
83
|
+
|
84
|
+
protected
|
85
|
+
def read_source
|
86
|
+
@source_lines = @stmts.size
|
87
|
+
@stmts
|
88
|
+
end
|
89
|
+
end
|
90
|
+
|
91
|
+
class ModuleNode < Node
|
92
|
+
attr_reader :klass
|
93
|
+
attr_reader :command
|
94
|
+
def object() self.class.objects[klass] end
|
95
|
+
|
96
|
+
def initialize(parent, phase, path, klass, command, args = nil)
|
97
|
+
constrain klass, Symbol, String
|
98
|
+
constrain command, Symbol, String
|
99
|
+
|
100
|
+
super(parent, phase, :module, path, args)
|
101
|
+
|
102
|
+
@klass = klass.to_sym
|
103
|
+
@command = command.to_sym
|
104
|
+
|
105
|
+
if !object
|
106
|
+
Kernel.class_eval File.read(path)
|
107
|
+
self.class.objects[@klass] = eval(klass.to_s).new(conn)
|
108
|
+
end
|
109
|
+
end
|
110
|
+
|
111
|
+
def call()
|
112
|
+
object.send(@command, *args)
|
113
|
+
end
|
114
|
+
|
115
|
+
def inspect() "#{path} #{klass}##{command}" end
|
116
|
+
|
117
|
+
private
|
118
|
+
# Map from klass name (a Symbol) to object
|
119
|
+
@@objects = {}
|
120
|
+
def self.objects() @@objects end
|
121
|
+
end
|
122
|
+
|
123
|
+
class ExeNode < Node
|
124
|
+
# Using a pipe instead of just executing the command shaves off some
|
125
|
+
# deciseconds spent starting up bash. It expects the process to read
|
126
|
+
# database/username from standard input
|
127
|
+
attr_reader :pipe
|
128
|
+
|
129
|
+
def initialize(parent, phase, path, args = nil)
|
130
|
+
super(parent, phase, :exe, path, args)
|
131
|
+
@pipe = Command::Pipe.new(to_s, stderr: nil)
|
132
|
+
end
|
133
|
+
|
134
|
+
def inspect() "#{path}(#{args.join(", ")})" end
|
135
|
+
|
136
|
+
protected
|
137
|
+
def read_source
|
138
|
+
pipe.puts [conn.name, conn.user]
|
139
|
+
sql = pipe.wait
|
140
|
+
@source_lines = 1 + 1 + sql.count("\n")
|
141
|
+
["set search_path to #{schema};\n", sql]
|
142
|
+
end
|
143
|
+
end
|
144
|
+
|
145
|
+
class BuildNode < Node
|
146
|
+
def nodes() @nodes ||= init_nodes + decl_nodes + seed_nodes + term_nodes end
|
147
|
+
|
148
|
+
attr_reader :decl_nodes
|
149
|
+
attr_reader :init_nodes
|
150
|
+
attr_reader :term_nodes
|
151
|
+
attr_reader :seed_nodes
|
152
|
+
|
153
|
+
def initialize(parent, path)
|
154
|
+
super(parent, nil, :yml, path)
|
155
|
+
@decl_nodes = []
|
156
|
+
@init_nodes = []
|
157
|
+
@term_nodes = []
|
158
|
+
@seed_nodes = []
|
159
|
+
end
|
160
|
+
|
161
|
+
def inspect() to_s end
|
162
|
+
|
163
|
+
def dump
|
164
|
+
puts "BuildNode #{path}"
|
165
|
+
indent {
|
166
|
+
puts "schema: #{schema}" if schema
|
167
|
+
decl_nodes.each(&:dump)
|
168
|
+
for kind in [:init, :term, :seed]
|
169
|
+
kind_nodes = self.send("#{kind}_nodes".to_sym)
|
170
|
+
if !kind_nodes.empty?
|
171
|
+
puts "#{kind.upcase}:"
|
172
|
+
indent { kind_nodes.each(&:dump) }
|
173
|
+
end
|
174
|
+
end
|
175
|
+
}
|
176
|
+
end
|
177
|
+
end
|
178
|
+
|
179
|
+
class RootBuildNode < BuildNode
|
180
|
+
attr_reader :conn
|
181
|
+
|
182
|
+
def initialize(conn, path)
|
183
|
+
@conn = conn
|
184
|
+
super(nil, path)
|
185
|
+
end
|
186
|
+
end
|
187
|
+
end
|
188
|
+
end
|
189
|
+
|
@@ -0,0 +1,105 @@
|
|
1
|
+
module Prick
|
2
|
+
module Build
|
3
|
+
class NodePool
|
4
|
+
def schemas() @schemas.keys end
|
5
|
+
def before_schema(s) schemas.take_while { |schema| schema != s } end
|
6
|
+
def after_schema(s) schemas.reverse.take_while { |schema| schema != s }.reverse end
|
7
|
+
|
8
|
+
attr_reader :nodes
|
9
|
+
|
10
|
+
attr_reader :init_nodes
|
11
|
+
attr_reader :decl_nodes
|
12
|
+
attr_reader :seed_nodes
|
13
|
+
attr_reader :term_nodes
|
14
|
+
|
15
|
+
# attr_reader :setup_nodes
|
16
|
+
# attr_reader :teardown_nodes
|
17
|
+
|
18
|
+
def fox_seed_nodes() seed_nodes.select { |node| node.kind == :fox } end
|
19
|
+
def sql_seed_nodes() seed_nodes.select { |node| node.kind == :sql } end
|
20
|
+
|
21
|
+
def initialize()
|
22
|
+
self.clear
|
23
|
+
end
|
24
|
+
|
25
|
+
def add(*nodes)
|
26
|
+
nodes = Array(nodes).flatten
|
27
|
+
@nodes.concat(nodes)
|
28
|
+
nodes.each { |node|
|
29
|
+
@schemas[node.schema] += 1
|
30
|
+
@kind_nodes[node.phase]&.append(node)
|
31
|
+
}
|
32
|
+
self
|
33
|
+
end
|
34
|
+
|
35
|
+
def delete(*nodes)
|
36
|
+
# puts "#delete(*nodes)"
|
37
|
+
nodes = Array(nodes).flatten
|
38
|
+
nodes.each { |node|
|
39
|
+
delete_node(node)
|
40
|
+
kind_nodes = @kind_nodes[node.phase] and kind_nodes.delete_at(kind_nodes.index(node))
|
41
|
+
}
|
42
|
+
nodes.last
|
43
|
+
end
|
44
|
+
|
45
|
+
def delete_if(phase = nil, &block)
|
46
|
+
candidates = @kind_nodes[phase] || @nodes
|
47
|
+
delete(candidates.select { |node| yield(node) })
|
48
|
+
end
|
49
|
+
|
50
|
+
def delete_schema(*schemas, exclude: [])
|
51
|
+
schemas = Array(schemas).flatten
|
52
|
+
delete_if { |node|
|
53
|
+
schemas.include?(node.schema) && !exclude.include?(node.phase) && !exclude.include?(node.kind)
|
54
|
+
}
|
55
|
+
end
|
56
|
+
|
57
|
+
def clear(*phases)
|
58
|
+
phases = Array(phases).flatten
|
59
|
+
if !phases.empty?
|
60
|
+
for phase in phases
|
61
|
+
nodes = @kind_nodes[phase]
|
62
|
+
nodes.each { |node| delete_node(node) }
|
63
|
+
@kind_nodes[phase].clear
|
64
|
+
end
|
65
|
+
else
|
66
|
+
@schemas = Hash.new(0) # map from schema name to number of nodes
|
67
|
+
@nodes = []
|
68
|
+
@init_nodes = []
|
69
|
+
@decl_nodes = []
|
70
|
+
@seed_nodes = []
|
71
|
+
@term_nodes = []
|
72
|
+
@kind_nodes = {
|
73
|
+
decl: @decl_nodes,
|
74
|
+
init: @init_nodes,
|
75
|
+
seed: @seed_nodes,
|
76
|
+
term: @term_nodes,
|
77
|
+
yml: nil
|
78
|
+
}
|
79
|
+
end
|
80
|
+
end
|
81
|
+
|
82
|
+
def dump
|
83
|
+
puts "NodePool, #{nodes.size} nodes"
|
84
|
+
indent {
|
85
|
+
puts "init_nodes:"
|
86
|
+
indent { @init_nodes.each &:dump }
|
87
|
+
puts "decl_nodes:"
|
88
|
+
indent { @decl_nodes.each &:dump }
|
89
|
+
puts "seed_nodes:"
|
90
|
+
indent { @seed_nodes.each &:dump }
|
91
|
+
puts "term_nodes:"
|
92
|
+
indent { @term_nodes.each &:dump }
|
93
|
+
}
|
94
|
+
end
|
95
|
+
|
96
|
+
private
|
97
|
+
def delete_node(node)
|
98
|
+
# puts "#delete_node"
|
99
|
+
@nodes.delete_at(@nodes.index(node))
|
100
|
+
@schemas[node.schema] -= 1
|
101
|
+
@schemas.delete(node.schema) if @schemas[node.schema] == 0
|
102
|
+
end
|
103
|
+
end
|
104
|
+
end
|
105
|
+
end
|
@@ -0,0 +1,120 @@
|
|
1
|
+
module Prick
|
2
|
+
module Build
|
3
|
+
class Parser
|
4
|
+
def self.parse(conn, dir)
|
5
|
+
Parser.new(conn).parse(dir).unit
|
6
|
+
# [parser.unit, parser.schemas]
|
7
|
+
end
|
8
|
+
|
9
|
+
attr_reader :conn
|
10
|
+
attr_reader :dir
|
11
|
+
attr_reader :unit
|
12
|
+
# attr_reader :schemas
|
13
|
+
|
14
|
+
def initialize(conn)
|
15
|
+
@conn = conn
|
16
|
+
end
|
17
|
+
|
18
|
+
def parse(dir)
|
19
|
+
@dir = dir
|
20
|
+
# @schemas = {}
|
21
|
+
parse_directory(nil, dir)
|
22
|
+
self
|
23
|
+
end
|
24
|
+
|
25
|
+
private
|
26
|
+
|
27
|
+
# First build unit is a RootBuildNode, the rest are regular BuildNode objects
|
28
|
+
def make_build_unit(parent, path)
|
29
|
+
if @unit
|
30
|
+
BuildNode.new(parent, path)
|
31
|
+
else
|
32
|
+
@unit = RootBuildNode.new(conn, path)
|
33
|
+
end
|
34
|
+
end
|
35
|
+
|
36
|
+
def parse_directory(parent, dir)
|
37
|
+
build_file = "#{dir}/build.yml".sub(/\/\//, "/")
|
38
|
+
if File.exist? build_file
|
39
|
+
parse_build_file(parent, dir, build_file)
|
40
|
+
else
|
41
|
+
raise Error, "Can't find build.yml in #{dir}"
|
42
|
+
end
|
43
|
+
end
|
44
|
+
|
45
|
+
def parse_build_file(parent, dir, path)
|
46
|
+
unit = make_build_unit(parent, path)
|
47
|
+
entries = YAML.load(File.read(path)) || []
|
48
|
+
entries.each { |entry|
|
49
|
+
if entry.is_a?(Hash)
|
50
|
+
entry.each { |key, value|
|
51
|
+
if key == "schema"
|
52
|
+
unit.schema = value
|
53
|
+
# @schemas[unit.schema = value] = true
|
54
|
+
else
|
55
|
+
case key
|
56
|
+
when "init"; unit.init_nodes
|
57
|
+
when "term"; unit.term_nodes
|
58
|
+
when "seed"; unit.seed_nodes
|
59
|
+
else
|
60
|
+
raise Error, "Illegal key in #{unit.path}: #{key}"
|
61
|
+
end.concat(Array(value).map { |value| parse_entry(unit, key.to_sym, dir, value) })
|
62
|
+
end
|
63
|
+
}
|
64
|
+
else
|
65
|
+
node = parse_entry(unit, :decl, dir, entry)
|
66
|
+
if node.kind == :fox
|
67
|
+
unit.seed_nodes << node
|
68
|
+
else
|
69
|
+
unit.decl_nodes << node
|
70
|
+
end
|
71
|
+
end
|
72
|
+
}
|
73
|
+
unit
|
74
|
+
end
|
75
|
+
|
76
|
+
def parse_entry(unit, phase, dir, entry)
|
77
|
+
if entry.is_a?(Hash)
|
78
|
+
entry.size == 1 or raise Error, "sql and module are single-line values"
|
79
|
+
key, value = entry.first
|
80
|
+
case key
|
81
|
+
when "sql"; InlineNode.new(unit, phase, unit.path, value)
|
82
|
+
when "call";
|
83
|
+
args = value.split(/\s+/)
|
84
|
+
args.size >= 3 or raise "Illegal number of arguments: #{value}"
|
85
|
+
file, klass, command, args = *args
|
86
|
+
ModuleNode.new(unit, phase, "#{dir}/#{file}", klass, command, args)
|
87
|
+
else
|
88
|
+
raise Error, "Illegal key: #{key}"
|
89
|
+
end
|
90
|
+
else
|
91
|
+
name = entry
|
92
|
+
name.sub!(/\/$/, "")
|
93
|
+
if name =~ /^(\S+)\s+(.*)$/ # exe
|
94
|
+
file = $1
|
95
|
+
args = $2
|
96
|
+
else
|
97
|
+
file = name
|
98
|
+
end
|
99
|
+
path = "#{dir}/#{file}"
|
100
|
+
if File.directory? path
|
101
|
+
parse_directory(unit, path)
|
102
|
+
elsif File.executable? path
|
103
|
+
ExeNode.new(unit, phase, path, args)
|
104
|
+
elsif File.file? path
|
105
|
+
case path
|
106
|
+
when /\.sql$/
|
107
|
+
SqlNode.new(unit, phase, path)
|
108
|
+
when /\.fox$/
|
109
|
+
FoxNode.new(unit, :seed, path)
|
110
|
+
else
|
111
|
+
raise Error, "Unrecognized file type #{File.basename(path)} in #{unit.dir}"
|
112
|
+
end
|
113
|
+
else
|
114
|
+
raise Error, "Can't find #{name} in #{dir} from #{unit}"
|
115
|
+
end
|
116
|
+
end
|
117
|
+
end
|
118
|
+
end
|
119
|
+
end
|
120
|
+
end
|