dake 0.1.0 → 0.2.0

Sign up to get free protection for your applications and to get access to all the features.
checksums.yaml CHANGED
@@ -1,7 +1,7 @@
1
1
  ---
2
2
  SHA256:
3
- metadata.gz: 0bbdbd9a0326d99f69811c766b89258377e872f5fc520c99bdfdec9d9e24fcf0
4
- data.tar.gz: 78e6a370fec08445f8e7c2b16d182b6d00d5a2dc1413be1617d5d5a90cf1467f
3
+ metadata.gz: 480b83fa04055fcf5d393da94b870bafdb901c5a39788d52dbee7d89244f5f1f
4
+ data.tar.gz: a10e599b85562d3302997cd2500117357fbbacd2bae85cf7232452246afe5613
5
5
  SHA512:
6
- metadata.gz: 72eeb89e6d34de732cf390ccc81c45214f0139a76dbf63227dba93e1b805bc7cbc59fb3686ad309dee280f6b0338c931d5eef5359d19bc4441015faf934d8af7
7
- data.tar.gz: de369174d69b02799c2c815014e446de2368f96d1a00a0ac1fc5c081d997f996c0b4eaf6b05a0b663b26e400013b346cc7278f9d2d240f1138644ba0584ccf74
6
+ metadata.gz: 6c849058294a6c5daf084d19ba34d6cbecc53736863958cd064f1ab5f2f5d1cbe1a83c903fc137622ce02d2fa6547285a07882643b84d01c42b6a35b809bacb4
7
+ data.tar.gz: c665db3ce2bafab077723497299735e5c9d455654385b8c1845e944454ec0140c5410bbf5b4b67410224eb27b39ac97d7798a284b47c9d3e6d77ff2ce4394cd4
data/.gitignore CHANGED
@@ -1,3 +1,4 @@
1
+ /.idea/
1
2
  /.bundle/
2
3
  /.yardoc
3
4
  /_yardoc/
@@ -6,3 +7,4 @@
6
7
  /pkg/
7
8
  /spec/reports/
8
9
  /tmp/
10
+ Gemfile.lock
@@ -12,6 +12,7 @@ Gem::Specification.new do |spec|
12
12
  spec.summary = %q{Dake is a data workflow tool inspired by Drake.}
13
13
  spec.description = %q{Dake is a data workflow tool inspired by Drake.}
14
14
  spec.homepage = "https://github.com/minor6th/dake"
15
+ spec.license = "MIT"
15
16
 
16
17
  # Prevent pushing this gem to RubyGems.org. To allow pushes either set the 'allowed_push_host'
17
18
  # to allow pushing to a single host or delete this section to allow pushing to any host.
@@ -0,0 +1,40 @@
1
+ # This is an example
2
+
3
+ imdb/dataset_urls <- [ruby]
4
+ require 'net/http'
5
+ html = Net::HTTP.get(URI('https://datasets.imdbws.com/index.html'))
6
+ urls = html.each_line.grep(/href=.+tsv.gz/) { |line| line.sub(/.*<a href=(https:\/\/.+tsv.gz)>.*\n/, '\1') }
7
+ FileUtils.mkdir('imdb') unless Dir.exist? 'imdb'
8
+ file = File.open('$[OUTPUT]', 'w')
9
+ file.write(urls.join("\n"))
10
+ file.close
11
+
12
+ ^"imdb/.+\.tsv.gz" <- imdb/dataset_urls
13
+ FILE=`basename $[OUTPUT]`
14
+ URL=`grep $FILE $[INPUT]`
15
+ wget -O $[OUTPUT] $URL
16
+ touch $[OUTPUT]
17
+
18
+ ^"imdb/(?<file>.+\.tsv)" <- imdb/$[file].gz
19
+ gunzip -k $[INPUT]
20
+ touch $[OUTPUT]
21
+
22
+ imdb/title_type_rating_dist.pdf <- imdb/title.ratings.tsv, imdb/title.basics.tsv [r]
23
+ require(tidyverse)
24
+ title_ratings <- read_tsv("$[INPUT0]")
25
+ title_basics <- read_tsv("$[INPUT1]")
26
+ pd <- title_basics %>% left_join(title_ratings, by='tconst') %>%
27
+ group_by(titleType) %>%
28
+ summarise(averageRating = mean(averageRating, na.rm = T))
29
+ pdf("$[OUTPUT]", width = 5, height = 4)
30
+ ggplot(pd, aes(x = reorder(titleType, averageRating), y = averageRating)) +
31
+ geom_col() +
32
+ coord_flip() +
33
+ xlab("titleType")
34
+
35
+ imdb/rating_dist.pdf <- imdb/title.ratings.tsv [r]
36
+ require(tidyverse)
37
+ title_ratings <- read_tsv("$[INPUT0]")
38
+ pdf("$[OUTPUT]", width = 5, height = 4)
39
+ ggplot(title_ratings, aes(x = averageRating)) + geom_histogram()
40
+
data/exe/dake CHANGED
@@ -28,7 +28,7 @@ def target_opts(target)
28
28
  opts = Dake::TargetOption.new(false, false, :check, :up_tree)
29
29
  mdata = /(?<build_mode>[+-]?)(?<tree_mode>[=^]?)(?<tag>@?)(?<regex>%?)(?<target_name>.+)/.match(target)
30
30
  opts.build_mode = (mdata[:build_mode] == '+' ? :forced : :exclusion) unless mdata[:build_mode].empty?
31
- opts.tree_mode = (mdata[:tree_mode] == '=' ? 'target_only' : :down_tree) unless mdata[:tree_mode].empty?
31
+ opts.tree_mode = (mdata[:tree_mode] == '=' ? :target_only : :down_tree) unless mdata[:tree_mode].empty?
32
32
  opts.tag = true unless mdata[:tag].empty?
33
33
  opts.regex = true unless mdata[:regex].empty?
34
34
  [mdata[:target_name], opts]
@@ -77,18 +77,42 @@ command :build do |c|
77
77
  env = global_options[:var].reduce({}, :merge!)
78
78
  env.merge! 'BASE' => File.dirname(path)
79
79
  begin
80
- target_dict = {}
81
- args.each do |arg|
82
- target, opts = target_opts(arg)
83
- target_dict[target] = opts
84
- end
85
80
  tree = DakeParser.new.parse(workflow_file.read)
86
81
  workflow = DakeTransform.new.apply(tree, src_file: workflow_file.to_path)
87
82
  analyzer = DakeAnalyzer.new(workflow, [path], env).analyze
83
+ target_pairs = []
84
+ args.each do |arg|
85
+ target, opts = target_opts(arg)
86
+ if target == '...'
87
+ tag_list = analyzer.tag_target_dict.keys.map { |tag| [tag, Dake::TargetOption.new(true, false, :check, :up_tree)] }
88
+ file_list = analyzer.file_target_dict.keys.map { |file| [file, Dake::TargetOption.new(false, false, :check, :up_tree)] }
89
+ target_pairs += (tag_list + file_list)
90
+ else
91
+ if opts.regex
92
+ matched_targets =
93
+ if opts.tag
94
+ analyzer.tag_target_dict.keys.find_all { |tag_name| tag_name.match target }
95
+ else
96
+ analyzer.file_target_dict.keys.find_all { |file_name| file_name.match target }
97
+ end
98
+ matched_targets.each { |matched_target| target_pairs << [matched_target, opts] }
99
+ else
100
+ target_pairs << [target, opts]
101
+ end
102
+ end
103
+ end
104
+ if target_pairs.any? { |_, opts| opts.tree_mode == :down_tree }
105
+ tag_list = analyzer.tag_target_dict.keys.map { |tag| [tag, Dake::TargetOption.new(true, false, :check, :up_tree)] }
106
+ file_list = analyzer.file_target_dict.keys.map { |file| [file, Dake::TargetOption.new(false, false, :check, :up_tree)] }
107
+ resolve_pairs = (tag_list + file_list)
108
+ else
109
+ resolve_pairs = target_pairs
110
+ end
88
111
  resolver = DakeResolver.new(analyzer)
89
- dep_graph = resolver.resolve(target_dict)
112
+ dep_graph = resolver.resolve(resolve_pairs)
113
+ rebuild_set = resolver.target_rebuild_set(target_pairs, dep_graph)
90
114
  dake_db = DakeDB.new(workflow_file.to_path)
91
- DakeExecutor.new(analyzer, dake_db, dep_graph, options[:jobs]).execute(options[:'dry-run'], options[:log])
115
+ DakeExecutor.new(analyzer, dake_db, dep_graph, options[:jobs]).execute(rebuild_set, options[:'dry-run'], options[:log])
92
116
  rescue Parslet::ParseFailed => failure
93
117
  puts failure.cause
94
118
  exit(1)
@@ -116,11 +140,13 @@ command :list do |c|
116
140
  workflow = DakeTransform.new.apply(tree, src_file: workflow_file.to_path)
117
141
  analyzer = DakeAnalyzer.new(workflow, [path], env).analyze
118
142
  resolver = DakeResolver.new(analyzer)
119
-
120
- tag_list = analyzer.tag_target_dict.keys.map { |tag| [tag, Dake::TargetOption.new(true)] }
121
- file_list = analyzer.file_target_dict.keys.map { |file| [file, Dake::TargetOption.new(false)] }
122
-
123
- dep_graph = (options['dep'] ? resolver.resolve((tag_list + file_list).to_h, true) : nil)
143
+ dep_graph = nil
144
+ if options['dep']
145
+ tag_list = analyzer.tag_target_dict.keys.map { |tag| [tag, Dake::TargetOption.new(true, false, :check, :up_tree)] }
146
+ file_list = analyzer.file_target_dict.keys.map { |file| [file, Dake::TargetOption.new(false, false, :check, :up_tree)] }
147
+ target_pairs = (tag_list + file_list)
148
+ dep_graph = resolver.resolve(target_pairs)
149
+ end
124
150
  analyzer.tag_target_dict.each do |tag, steps|
125
151
  steps.each do |step|
126
152
  next if options['dep'] and not dep_graph.root_step.include? step
@@ -1,18 +1,6 @@
1
1
  require 'set'
2
2
  require 'open3'
3
3
 
4
- # the data struct needed by the executor
5
- # note that this is not nessesarily the complete graph,
6
- # the graph is only used to produce the given targets
7
- DepGraph = Struct.new(
8
- :succ_step, # a dict maps each step in the DepGraph to the steps depend on it
9
- :dep_step, # a dict maps each step in the DepGraph to the steps it depends on
10
- :step_list, # a list of steps represents one sequential execution order
11
- :root_step, # a set of steps that hos no dependant
12
- :leaf_step, # a set of steps that has no prerequisite
13
- :need_rebuild # a set of steps in step_list which should be executed to update their targets
14
- )
15
-
16
4
  class DakeAnalyzer
17
5
  attr_reader :workflow, :variable_dict, :method_dict, :included_files
18
6
  attr_reader :tag_target_dict, :file_target_dict
@@ -138,9 +126,9 @@ class DakeAnalyzer
138
126
  end
139
127
  else
140
128
  # Generated file list should not be used in targets
141
- if type == :targets
142
- raise "File list `#{file_name}' in #{step.src_file} at #{line}:#{column} cannot be used as targets."
143
- end
129
+ # if type == :targets
130
+ # raise "File list `#{file_name}' in #{step.src_file} at #{line}:#{column} cannot be used as targets."
131
+ # end
144
132
  end
145
133
  newfile = file.dup
146
134
  newfile.scheme = scheme
@@ -15,16 +15,17 @@ class DakeExecutor
15
15
  ) if @async
16
16
  end
17
17
 
18
- def execute(dry_run=false, log=false)
19
- if @dep_graph.need_rebuild.empty?
18
+ def execute(rebuild_set, dry_run=false, log=false)
19
+ if rebuild_set.empty?
20
20
  STDERR.puts "Nothing to be done.".colorize(:green)
21
21
  return
22
22
  end
23
23
  if @async
24
24
  dep_map = Hash.new
25
- @dep_graph.dep_step.each do |step, dep_set|
25
+ rebuild_set.each do |step|
26
+ dep_set = @dep_graph.dep_step[step]
26
27
  next if dep_set.empty?
27
- dep_map[step] = dep_set.dup
28
+ dep_map[step] = dep_set & rebuild_set
28
29
  end
29
30
 
30
31
  queue = Queue.new
@@ -42,7 +43,7 @@ class DakeExecutor
42
43
  end
43
44
 
44
45
  lock = Concurrent::ReadWriteLock.new
45
- @dep_graph.leaf_step.each { |step| queue << step }
46
+ @dep_graph.leaf_step.each { |step| queue << step if rebuild_set.include? step }
46
47
 
47
48
  while next_step = queue.deq
48
49
  @pool.post(next_step) do |step|
@@ -55,7 +56,7 @@ class DakeExecutor
55
56
  "skipped due to prerequisite step(s) error."
56
57
  error_queue << Exception.new(msg)
57
58
  else
58
- execute_step(step, dry_run, log) if @dep_graph.need_rebuild.include? step
59
+ execute_step(step, dry_run, log)
59
60
  end
60
61
  lock.acquire_write_lock
61
62
  dep_map.delete step
@@ -63,6 +64,7 @@ class DakeExecutor
63
64
  queue.close
64
65
  else
65
66
  @dep_graph.succ_step[step].each do |succ|
67
+ next unless dep_map[succ]
66
68
  dep_map[succ].delete step
67
69
  if dep_map[succ].empty?
68
70
  queue << succ
@@ -99,7 +101,7 @@ class DakeExecutor
99
101
  raise "Failed to execute some step(s)" unless error_steps.empty?
100
102
  else
101
103
  @dep_graph.step_list.each do |step|
102
- execute_step(step, dry_run, log) if @dep_graph.need_rebuild.include? step
104
+ execute_step(step, dry_run, log) if rebuild_set.include? step
103
105
  end
104
106
  end
105
107
  end
@@ -1,8 +1,115 @@
1
+ # the data struct needed by the executor
2
+ # note that this is not nessesarily the complete graph,
3
+ # the graph is only used to produce the given targets
4
+ DepGraph = Struct.new(
5
+ :succ_step, # a dict maps each step in the DepGraph to the steps depend on it
6
+ :dep_step, # a dict maps each step in the DepGraph to the steps it depends on
7
+ :step_list, # a list of steps represents one sequential execution order
8
+ :root_step, # a set of steps that hos no dependant
9
+ :leaf_step, # a set of steps that has no prerequisite
10
+ :step_target # a dict maps each step in the DepGraph to its output files used while resolving targets
11
+ )
12
+
1
13
  class DakeResolver
2
14
  def initialize(analyzer)
3
15
  @analyzer = analyzer
4
16
  end
5
17
 
18
+ def target_rebuild_set(target_pairs, dep_graph)
19
+ rebuild_set = Set.new
20
+ target_pairs.each do |target_name, target_opts|
21
+ if target_opts.tag
22
+ dummy_step = Step.new([], [], [], {}, nil, nil, @analyzer.variable_dict, nil, nil)
23
+ scheme = DakeScheme::Tag.new('@', target_name, dummy_step)
24
+ else
25
+ scheme = @analyzer.analyze_scheme(target_name, nil, nil, nil)
26
+ end
27
+ target_steps = find_steps(scheme, target_opts.tag).to_set
28
+ if target_steps.empty? and not scheme.exist?
29
+ raise "No step found for building file `#{target_name}'."
30
+ end
31
+
32
+ visited = Set.new
33
+ path_visited = Set.new
34
+ down_tree_steps = Set.new
35
+ up_tree_steps = Set.new
36
+ need_rebuild = Set.new
37
+ up_tree = 0
38
+ dep_step_list = {}
39
+
40
+ init_steps = (target_opts.tree_mode == :down_tree ? dep_graph.root_step : target_steps)
41
+ init_steps.each do |init_step|
42
+ stack = [init_step]
43
+
44
+ until stack.empty?
45
+ step = stack.last
46
+ visited << step
47
+ path_visited << step
48
+ up_tree_steps << step if up_tree > 0 or target_steps.include? step
49
+
50
+ dep_step_list[step] ||= dep_graph.dep_step[step].to_a
51
+ while next_step = dep_step_list[step].pop
52
+ break unless visited.include? next_step
53
+ end
54
+ if next_step
55
+ stack.push next_step
56
+ up_tree += 1 if target_steps.include? step
57
+ else
58
+ stack.pop
59
+
60
+ if dep_graph.leaf_step.include? step
61
+ step.prerequisites.each do |prereq|
62
+ if prereq.flag != '?' and not prereq.scheme.exist?
63
+ raise "No step found for building file `#{prereq.scheme.path}'."
64
+ end
65
+ end
66
+ end
67
+
68
+ if target_steps.include? step or dep_graph.dep_step[step].any? { |s| down_tree_steps.include? s }
69
+ down_tree_steps << step
70
+ end
71
+
72
+ if target_opts.build_mode == :check and (up_tree_steps.include? step or down_tree_steps.include? step)
73
+ if dep_graph.leaf_step.include? step
74
+ need_rebuild << step if need_execute?(dep_graph.step_target[step], step)
75
+ else
76
+ if dep_graph.dep_step[step].any? { |dep_step| need_rebuild.include? dep_step }
77
+ need_rebuild << step
78
+ else
79
+ need_rebuild << step if need_execute?(dep_graph.step_target[step], step)
80
+ end
81
+ end
82
+ end
83
+
84
+ up_tree -= 1 if target_steps.include? step
85
+ path_visited.delete step
86
+ end
87
+ end
88
+ end
89
+ case target_opts.build_mode
90
+ when :forced
91
+ case target_opts.tree_mode
92
+ when :up_tree then rebuild_set |= up_tree_steps
93
+ when :down_tree then rebuild_set |= down_tree_steps
94
+ when :target_only then rebuild_set |= target_steps
95
+ end
96
+ when :exclusion
97
+ case target_opts.tree_mode
98
+ when :up_tree then rebuild_set -= up_tree_steps
99
+ when :down_tree then rebuild_set -= down_tree_steps
100
+ when :target_only then rebuild_set -= target_steps
101
+ end
102
+ when :check
103
+ case target_opts.tree_mode
104
+ when :up_tree then rebuild_set |= (up_tree_steps & need_rebuild)
105
+ when :down_tree then rebuild_set |= (down_tree_steps & need_rebuild)
106
+ when :target_only then rebuild_set |= (target_steps & need_rebuild)
107
+ end
108
+ end
109
+ end
110
+ rebuild_set
111
+ end
112
+
6
113
  # check if a step needs to be executed to produce the given targets
7
114
  def need_execute?(targets, step)
8
115
  max_mtime = nil
@@ -43,7 +150,7 @@ class DakeResolver
43
150
  false
44
151
  end
45
152
 
46
- def find_steps(target_scheme, tag, optional=false)
153
+ def find_steps(target_scheme, tag)
47
154
  target_name = target_scheme.path
48
155
  target_src = target_scheme.src
49
156
  if tag
@@ -58,8 +165,8 @@ class DakeResolver
58
165
  steps = []
59
166
  template_steps.each do |template_step|
60
167
  if @analyzer.step_template_dict[template_step] and
61
- @analyzer.step_template_dict[template_step][mdata.named_captures]
62
- step = @analyzer.step_template_dict[template_step][mdata.named_captures]
168
+ @analyzer.step_template_dict[template_step][mdata]
169
+ step = @analyzer.step_template_dict[template_step][mdata]
63
170
  else
64
171
  step = template_step.dup
65
172
  step.targets = template_step.targets.dup
@@ -67,7 +174,7 @@ class DakeResolver
67
174
  step.context = template_step.context.dup
68
175
  step.context.merge! mdata.named_captures
69
176
  @analyzer.step_template_dict[template_step] ||= {}
70
- @analyzer.step_template_dict[template_step][mdata.named_captures] = step
177
+ @analyzer.step_template_dict[template_step][mdata] = step
71
178
  end
72
179
  step.targets.map! do |file|
73
180
  if file.scheme.is_a? DakeScheme::Regex and file.scheme.path.match target_name
@@ -96,8 +203,8 @@ class DakeResolver
96
203
  end
97
204
  if template_step
98
205
  if @analyzer.step_template_dict[template_step] and
99
- @analyzer.step_template_dict[template_step][mdata.named_captures]
100
- step = @step_template_dict[template_step][mdata.named_captures]
206
+ @analyzer.step_template_dict[template_step][mdata]
207
+ step = @analyzer.step_template_dict[template_step][mdata]
101
208
  else
102
209
  step = template_step.dup
103
210
  step.targets = template_step.targets.dup
@@ -105,7 +212,7 @@ class DakeResolver
105
212
  step.context = template_step.context.dup
106
213
  step.context.merge! mdata.named_captures
107
214
  @analyzer.step_template_dict[template_step] ||= {}
108
- @analyzer.step_template_dict[template_step][mdata.named_captures] = step
215
+ @analyzer.step_template_dict[template_step][mdata] = step
109
216
  end
110
217
  step.targets.map! do |file|
111
218
  if file.scheme.is_a? DakeScheme::Regex and file.scheme.path.match target_src
@@ -120,12 +227,7 @@ class DakeResolver
120
227
  @analyzer.file_target_dict[target_name] = step
121
228
  steps = [step]
122
229
  else
123
- scheme = @analyzer.analyze_scheme(target_name, nil, nil, nil)
124
- if optional or scheme.exist?
125
- steps = []
126
- else
127
- raise "No step found for building file `#{target_name}'."
128
- end
230
+ steps = []
129
231
  end
130
232
  end
131
233
  end
@@ -133,26 +235,25 @@ class DakeResolver
133
235
  end
134
236
 
135
237
  # resolve the dependency graph and generate step list for sequential execution
136
- def resolve(target_dict, no_check=false)
238
+ def resolve(target_pairs)
137
239
  step_list = []
138
240
  visited = Set.new
139
241
  path_visited = Set.new
140
242
  leaf_steps = Set.new
141
243
  target_steps = Set.new
142
- need_rebuild = Set.new
143
244
  succ_step_dict = {}
144
245
  dep_step_dict = {}
145
246
  dep_step_list = {}
146
247
  succ_target_dict = {}
147
248
 
148
- target_dict.each do |target_name, target_opts|
249
+ target_pairs.each do |target_name, target_opts|
149
250
  if target_opts.tag
150
251
  dummy_step = Step.new([], [], [], {}, nil, nil, @analyzer.variable_dict, nil, nil)
151
252
  scheme = DakeScheme::Tag.new('@', target_name, dummy_step)
152
253
  else
153
254
  scheme = @analyzer.analyze_scheme(target_name, nil, nil, nil)
154
255
  end
155
- dep_steps = find_steps(scheme, target_opts.tag, no_check)
256
+ dep_steps = find_steps(scheme, target_opts.tag)
156
257
  dep_steps.each do |dep_step|
157
258
  target = dep_step.targets.find { |target| target.scheme.path == scheme.path }
158
259
  succ_target_dict[dep_step] ||= Set.new
@@ -173,7 +274,7 @@ class DakeResolver
173
274
  dep_step_dict[step] = Set.new
174
275
  step.prerequisites.map! { |file| @analyzer.analyze_file(file, :prerequisites, step) }.flatten!
175
276
  step.prerequisites.each do |dep|
176
- dep_steps = find_steps(dep.scheme, dep.tag, (dep.flag == '?' or no_check))
277
+ dep_steps = find_steps(dep.scheme, dep.tag)
177
278
  dep_steps.each do |dep_step|
178
279
  dep_step_dict[step] << dep_step
179
280
  succ_step_dict[dep_step] ||= Set.new
@@ -205,18 +306,7 @@ class DakeResolver
205
306
  end
206
307
  end
207
308
  end
208
- step_list.each do |step|
209
- if leaf_steps.include? step
210
- need_rebuild << step if no_check or need_execute?(succ_target_dict[step], step)
211
- else
212
- if dep_step_dict[step].any? { |dep_step| need_rebuild.include? dep_step }
213
- need_rebuild << step
214
- else
215
- need_rebuild << step if no_check or need_execute?(succ_target_dict[step], step)
216
- end
217
- end
218
- end
219
309
  root_steps = target_steps.select { |step| succ_step_dict[step].empty? }.to_set
220
- DepGraph.new(succ_step_dict, dep_step_dict, step_list, root_steps, leaf_steps, need_rebuild)
310
+ DepGraph.new(succ_step_dict, dep_step_dict, step_list, root_steps, leaf_steps, succ_target_dict)
221
311
  end
222
312
  end
@@ -34,11 +34,12 @@ module DakeScheme
34
34
  class Local < Scheme
35
35
  PATTERN = ['local:']
36
36
  def initialize(scheme_part, path_part, step)
37
- @src = path_part
38
37
  if path_part.start_with? '/'
39
38
  @path = path_part
39
+ @src = Pathname.new(path_part).relative_path_from(step.context['BASE'])
40
40
  else
41
41
  @path = File.expand_path(path_part, step.context['BASE'])
42
+ @src = path_part
42
43
  end
43
44
  @step = step
44
45
  end
@@ -1,3 +1,3 @@
1
1
  module Dake
2
- VERSION = "0.1.0"
2
+ VERSION = "0.2.0"
3
3
  end
metadata CHANGED
@@ -1,14 +1,14 @@
1
1
  --- !ruby/object:Gem::Specification
2
2
  name: dake
3
3
  version: !ruby/object:Gem::Version
4
- version: 0.1.0
4
+ version: 0.2.0
5
5
  platform: ruby
6
6
  authors:
7
7
  - minor6th
8
8
  autorequire:
9
9
  bindir: exe
10
10
  cert_chain: []
11
- date: 2019-04-02 00:00:00.000000000 Z
11
+ date: 2019-04-07 00:00:00.000000000 Z
12
12
  dependencies:
13
13
  - !ruby/object:Gem::Dependency
14
14
  name: bundler
@@ -180,6 +180,7 @@ files:
180
180
  - bin/console
181
181
  - bin/setup
182
182
  - dake.gemspec
183
+ - examples/Dakefile
183
184
  - exe/dake
184
185
  - lib/dake.rb
185
186
  - lib/dake/analyzer.rb
@@ -193,7 +194,8 @@ files:
193
194
  - vim/ftdetect/dake.vim
194
195
  - vim/syntax/dake.vim
195
196
  homepage: https://github.com/minor6th/dake
196
- licenses: []
197
+ licenses:
198
+ - MIT
197
199
  metadata: {}
198
200
  post_install_message:
199
201
  rdoc_options: []