rake-pipeline-fork 0.8.0
Sign up to get free protection for your applications and to get access to all the features.
- checksums.yaml +15 -0
- data/.gitignore +18 -0
- data/.rspec +1 -0
- data/.travis.yml +12 -0
- data/.yardopts +2 -0
- data/GETTING_STARTED.md +268 -0
- data/Gemfile +14 -0
- data/LICENSE +20 -0
- data/README.markdown +11 -0
- data/README.yard +178 -0
- data/Rakefile +21 -0
- data/bin/rakep +4 -0
- data/examples/copying_files.md +12 -0
- data/examples/minifying_files.md +37 -0
- data/examples/modifying_pipelines.md +67 -0
- data/examples/multiple_pipelines.md +77 -0
- data/lib/generators/rake/pipeline/install/install_generator.rb +70 -0
- data/lib/rake-pipeline.rb +462 -0
- data/lib/rake-pipeline/cli.rb +56 -0
- data/lib/rake-pipeline/dsl.rb +9 -0
- data/lib/rake-pipeline/dsl/pipeline_dsl.rb +246 -0
- data/lib/rake-pipeline/dsl/project_dsl.rb +108 -0
- data/lib/rake-pipeline/dynamic_file_task.rb +194 -0
- data/lib/rake-pipeline/error.rb +17 -0
- data/lib/rake-pipeline/file_wrapper.rb +182 -0
- data/lib/rake-pipeline/filter.rb +249 -0
- data/lib/rake-pipeline/filters.rb +4 -0
- data/lib/rake-pipeline/filters/concat_filter.rb +63 -0
- data/lib/rake-pipeline/filters/gsub_filter.rb +56 -0
- data/lib/rake-pipeline/filters/ordering_concat_filter.rb +38 -0
- data/lib/rake-pipeline/filters/pipeline_finalizing_filter.rb +21 -0
- data/lib/rake-pipeline/graph.rb +178 -0
- data/lib/rake-pipeline/manifest.rb +86 -0
- data/lib/rake-pipeline/manifest_entry.rb +34 -0
- data/lib/rake-pipeline/matcher.rb +141 -0
- data/lib/rake-pipeline/middleware.rb +72 -0
- data/lib/rake-pipeline/precompile.rake +8 -0
- data/lib/rake-pipeline/project.rb +335 -0
- data/lib/rake-pipeline/rails_plugin.rb +10 -0
- data/lib/rake-pipeline/railtie.rb +34 -0
- data/lib/rake-pipeline/reject_matcher.rb +29 -0
- data/lib/rake-pipeline/server.rb +15 -0
- data/lib/rake-pipeline/sorted_pipeline.rb +19 -0
- data/lib/rake-pipeline/version.rb +6 -0
- data/rails/init.rb +2 -0
- data/rake-pipeline.gemspec +24 -0
- data/spec/cli_spec.rb +71 -0
- data/spec/concat_filter_spec.rb +37 -0
- data/spec/dsl/pipeline_dsl_spec.rb +165 -0
- data/spec/dsl/project_dsl_spec.rb +41 -0
- data/spec/dynamic_file_task_spec.rb +119 -0
- data/spec/encoding_spec.rb +106 -0
- data/spec/file_wrapper_spec.rb +132 -0
- data/spec/filter_spec.rb +332 -0
- data/spec/graph_spec.rb +56 -0
- data/spec/gsub_filter_spec.rb +87 -0
- data/spec/manifest_entry_spec.rb +46 -0
- data/spec/manifest_spec.rb +67 -0
- data/spec/matcher_spec.rb +141 -0
- data/spec/middleware_spec.rb +199 -0
- data/spec/ordering_concat_filter_spec.rb +42 -0
- data/spec/pipeline_spec.rb +232 -0
- data/spec/project_spec.rb +295 -0
- data/spec/rake_acceptance_spec.rb +738 -0
- data/spec/rake_tasks_spec.rb +21 -0
- data/spec/reject_matcher_spec.rb +31 -0
- data/spec/sorted_pipeline_spec.rb +27 -0
- data/spec/spec_helper.rb +38 -0
- data/spec/support/spec_helpers/file_utils.rb +35 -0
- data/spec/support/spec_helpers/filters.rb +37 -0
- data/spec/support/spec_helpers/input_helpers.rb +23 -0
- data/spec/support/spec_helpers/memory_file_wrapper.rb +31 -0
- data/spec/support/spec_helpers/memory_manifest.rb +19 -0
- data/tools/perfs +101 -0
- metadata +215 -0
@@ -0,0 +1,56 @@
|
|
1
|
+
require "thor"
|
2
|
+
|
3
|
+
module Rake
|
4
|
+
class Pipeline
|
5
|
+
class CLI < Thor
|
6
|
+
class_option :assetfile, :default => "Assetfile", :aliases => "-c"
|
7
|
+
default_task :build
|
8
|
+
|
9
|
+
desc "build", "Build the project."
|
10
|
+
method_option :pretend, :type => :boolean, :aliases => "-p"
|
11
|
+
method_option :clean, :type => :boolean, :aliases => "-C"
|
12
|
+
def build
|
13
|
+
if options[:pretend]
|
14
|
+
project.output_files.each do |file|
|
15
|
+
say_status :create, relative_path(file)
|
16
|
+
end
|
17
|
+
else
|
18
|
+
options[:clean] ? project.clean : project.cleanup_tmpdir
|
19
|
+
project.invoke
|
20
|
+
end
|
21
|
+
end
|
22
|
+
|
23
|
+
desc "clean", "Remove the pipeline's temporary and output files."
|
24
|
+
method_option :pretend, :type => :boolean, :aliases => "-p"
|
25
|
+
def clean
|
26
|
+
if options[:pretend]
|
27
|
+
project.files_to_clean.each do |file|
|
28
|
+
say_status :remove, relative_path(file)
|
29
|
+
end
|
30
|
+
else
|
31
|
+
project.clean
|
32
|
+
end
|
33
|
+
end
|
34
|
+
|
35
|
+
desc "server", "Run the Rake::Pipeline preview server."
|
36
|
+
def server
|
37
|
+
require "rake-pipeline/server"
|
38
|
+
Rake::Pipeline::Server.new.start
|
39
|
+
end
|
40
|
+
|
41
|
+
private
|
42
|
+
def project
|
43
|
+
@project ||= Rake::Pipeline::Project.new(options[:assetfile])
|
44
|
+
end
|
45
|
+
|
46
|
+
# @param [FileWrapper|String] path
|
47
|
+
# @return [String] The path to the file with the current
|
48
|
+
# directory stripped out.
|
49
|
+
def relative_path(path)
|
50
|
+
pathstr = path.respond_to?(:fullpath) ? path.fullpath : path
|
51
|
+
pathstr.sub(%r|#{Dir.pwd}/|, '')
|
52
|
+
end
|
53
|
+
end
|
54
|
+
end
|
55
|
+
end
|
56
|
+
|
@@ -0,0 +1,246 @@
|
|
1
|
+
module Rake
|
2
|
+
class Pipeline
|
3
|
+
module DSL
|
4
|
+
# This class is used by {ProjectDSL} to provide a convenient DSL for
|
5
|
+
# configuring a pipeline.
|
6
|
+
#
|
7
|
+
# All instance methods of {PipelineDSL} are available in the context
|
8
|
+
# the block passed to +Rake::Pipeline.+{Pipeline.build}.
|
9
|
+
class PipelineDSL
|
10
|
+
# @return [Pipeline] the pipeline the DSL should configure
|
11
|
+
attr_reader :pipeline
|
12
|
+
|
13
|
+
# Configure a pipeline with a passed in block.
|
14
|
+
#
|
15
|
+
# @param [Pipeline] pipeline the pipeline that the PipelineDSL
|
16
|
+
# should configure.
|
17
|
+
# @param [Proc] block the block describing the
|
18
|
+
# configuration. This block will be evaluated in
|
19
|
+
# the context of a new instance of {PipelineDSL}
|
20
|
+
# @return [void]
|
21
|
+
def self.evaluate(pipeline, options, &block)
|
22
|
+
dsl = new(pipeline)
|
23
|
+
|
24
|
+
# If any before filters, apply them to the pipeline.
|
25
|
+
# They will be run in reverse of insertion order.
|
26
|
+
if before_filters = options[:before_filters]
|
27
|
+
before_filters.each do |klass, args, block|
|
28
|
+
dsl.filter klass, *args, &block
|
29
|
+
end
|
30
|
+
end
|
31
|
+
|
32
|
+
# Evaluate the block in the context of the DSL.
|
33
|
+
dsl.instance_eval(&block)
|
34
|
+
|
35
|
+
# If any after filters, apply them to the pipeline.
|
36
|
+
# They will be run in insertion order.
|
37
|
+
if after_filters = options[:after_filters]
|
38
|
+
after_filters.each do |klass, args, block|
|
39
|
+
dsl.filter klass, *args, &block
|
40
|
+
end
|
41
|
+
end
|
42
|
+
|
43
|
+
# the FinalizingFilter should always come after all
|
44
|
+
# user specified after filters
|
45
|
+
pipeline.finalize
|
46
|
+
end
|
47
|
+
|
48
|
+
# Create a new {PipelineDSL} to configure a pipeline.
|
49
|
+
#
|
50
|
+
# @param [Pipeline] pipeline the pipeline that the PipelineDSL
|
51
|
+
# should configure.
|
52
|
+
# @return [void]
|
53
|
+
def initialize(pipeline)
|
54
|
+
@pipeline = pipeline
|
55
|
+
end
|
56
|
+
|
57
|
+
# Add an input location and files to a pipeline.
|
58
|
+
#
|
59
|
+
# @example
|
60
|
+
# !!!ruby
|
61
|
+
# Rake::Pipeline::Project.build do
|
62
|
+
# input "app" do
|
63
|
+
# input "assets", "**/*.js"
|
64
|
+
# # ...
|
65
|
+
# end
|
66
|
+
# end
|
67
|
+
#
|
68
|
+
# @param [String] root the root path where the pipeline
|
69
|
+
# should find its input files.
|
70
|
+
# @param [String] glob a file pattern that represents
|
71
|
+
# the list of files that the pipeline should
|
72
|
+
# process within +root+. The default is +"**/*"+.
|
73
|
+
# @return [void]
|
74
|
+
def input(root, glob="**/*")
|
75
|
+
pipeline.add_input root, glob
|
76
|
+
end
|
77
|
+
|
78
|
+
# Add a filter to the pipeline.
|
79
|
+
#
|
80
|
+
# In addition to a filter class, {#filter} takes a
|
81
|
+
# block that describes how the filter should map
|
82
|
+
# input files to output files.
|
83
|
+
#
|
84
|
+
# By default, the block maps an input file into
|
85
|
+
# an output file with the same name.
|
86
|
+
#
|
87
|
+
# Any additional arguments passed to {#filter} will
|
88
|
+
# be passed on to the filter class's constructor.
|
89
|
+
#
|
90
|
+
# @see Filter#outputs Filter#output (for an example
|
91
|
+
# of how a list of input files gets mapped to
|
92
|
+
# its outputs)
|
93
|
+
#
|
94
|
+
# @param [Class] filter_class the class of the filter.
|
95
|
+
# @param [Array] ctor_args a list of arguments to pass
|
96
|
+
# to the filter's constructor.
|
97
|
+
# @param [Proc] block an output file name generator.
|
98
|
+
# @return [void]
|
99
|
+
def filter(filter_class, *ctor_args, &block)
|
100
|
+
filter = filter_class.new(*ctor_args, &block)
|
101
|
+
pipeline.add_filter(filter)
|
102
|
+
end
|
103
|
+
|
104
|
+
# Apply a number of filters, but only to files matching
|
105
|
+
# a particular pattern.
|
106
|
+
#
|
107
|
+
# Inside the block passed to {#match match}, you may
|
108
|
+
# specify any number of filters that should be applied
|
109
|
+
# to files matching the pattern.
|
110
|
+
#
|
111
|
+
# @param [String] pattern a glob pattern to match
|
112
|
+
# @param [Proc] block a block that supplies filters
|
113
|
+
# @return [Matcher]
|
114
|
+
#
|
115
|
+
# @example
|
116
|
+
# !!!ruby
|
117
|
+
# Rake::Pipeline::Project.build do
|
118
|
+
# output "public"
|
119
|
+
#
|
120
|
+
# input "app/assets" do
|
121
|
+
# # compile coffee files into JS files
|
122
|
+
# match "*.coffee" do
|
123
|
+
# coffee_script
|
124
|
+
# end
|
125
|
+
#
|
126
|
+
# # because the previous step converted coffeee
|
127
|
+
# # into JS, the coffee files will be included here
|
128
|
+
# match "*.js" do
|
129
|
+
# uglify
|
130
|
+
# concat "application.js"
|
131
|
+
# end
|
132
|
+
# end
|
133
|
+
# end
|
134
|
+
def match(pattern, &block)
|
135
|
+
matcher = pipeline.copy(Matcher, &block)
|
136
|
+
matcher.glob = pattern
|
137
|
+
pipeline.add_filter matcher
|
138
|
+
matcher
|
139
|
+
end
|
140
|
+
|
141
|
+
# Reject files matching a pattern or block. You may specify
|
142
|
+
# a glob or a glob.
|
143
|
+
#
|
144
|
+
# @param [String] pattern a glob pattern to match
|
145
|
+
# @param [Proc] a block used to evaluate each file. Returning
|
146
|
+
# true will skip that file.
|
147
|
+
# @return [RejectMatcher]
|
148
|
+
#
|
149
|
+
# @example
|
150
|
+
# !!!ruby
|
151
|
+
# Rake::Pipeline::Project.build do
|
152
|
+
# output "public"
|
153
|
+
#
|
154
|
+
# input "app/assets" do
|
155
|
+
# # reject everything maching *.min
|
156
|
+
# reject "*.min"
|
157
|
+
# end
|
158
|
+
#
|
159
|
+
# input "app/javascripts" do
|
160
|
+
# reject do |file|
|
161
|
+
# # process the file here
|
162
|
+
# end
|
163
|
+
# end
|
164
|
+
# end
|
165
|
+
def reject(pattern = '', &block)
|
166
|
+
matcher = pipeline.copy(RejectMatcher)
|
167
|
+
matcher.glob = pattern
|
168
|
+
matcher.block = block
|
169
|
+
pipeline.add_filter matcher
|
170
|
+
matcher
|
171
|
+
end
|
172
|
+
alias_method :exclude, :reject
|
173
|
+
alias_method :skip, :reject
|
174
|
+
|
175
|
+
# Apply filters in a sorted fashion. Use this when you need
|
176
|
+
# something other than file name ordering.
|
177
|
+
#
|
178
|
+
# @param [Proc] block used to sort inputs
|
179
|
+
# @return [SortedPipeline]
|
180
|
+
#
|
181
|
+
# @example
|
182
|
+
# !!!ruby
|
183
|
+
# Rake::Pipeline::Project.build do
|
184
|
+
# match "*.js" do
|
185
|
+
# # inputs will be sorted according to the block and
|
186
|
+
# # passed to concat
|
187
|
+
# sort do |f1, f2|
|
188
|
+
# # reverse the inputs
|
189
|
+
# f2.fullpath <=> f1.fullpath
|
190
|
+
# end
|
191
|
+
# concat "application.js"
|
192
|
+
# end
|
193
|
+
# end
|
194
|
+
# end
|
195
|
+
def sort(&block)
|
196
|
+
sorter = pipeline.copy(SortedPipeline)
|
197
|
+
sorter.comparator = block
|
198
|
+
pipeline.add_filter sorter
|
199
|
+
sorter
|
200
|
+
end
|
201
|
+
|
202
|
+
# Specify the output directory for the pipeline.
|
203
|
+
#
|
204
|
+
# @param [String] root the output directory.
|
205
|
+
# @return [void]
|
206
|
+
def output(root)
|
207
|
+
pipeline.output_root = root
|
208
|
+
end
|
209
|
+
|
210
|
+
# A helper method for adding a concat filter to
|
211
|
+
# the pipeline.
|
212
|
+
# If the first argument is an Array, it adds a new
|
213
|
+
# {OrderingConcatFilter}, otherwise it adds a new
|
214
|
+
# {ConcatFilter}.
|
215
|
+
#
|
216
|
+
# @see OrderingConcatFilter#initialize
|
217
|
+
# @see ConcatFilter#initialize
|
218
|
+
def concat(*args, &block)
|
219
|
+
if args.first.kind_of?(Array)
|
220
|
+
filter(Rake::Pipeline::OrderingConcatFilter, *args, &block)
|
221
|
+
else
|
222
|
+
filter(Rake::Pipeline::ConcatFilter, *args, &block)
|
223
|
+
end
|
224
|
+
end
|
225
|
+
alias_method :copy, :concat
|
226
|
+
|
227
|
+
# A helper method for adding a gsub filter to the pipeline.
|
228
|
+
#
|
229
|
+
# @see GsubFilter#initialize
|
230
|
+
def gsub(*args, &block)
|
231
|
+
filter(Rake::Pipeline::GsubFilter, *args, &block)
|
232
|
+
end
|
233
|
+
alias_method :replace, :gsub
|
234
|
+
|
235
|
+
# A helper method like gsub, but removes everything
|
236
|
+
# specified by the matcher. The matcher is the first argument
|
237
|
+
# passed to String#gsub
|
238
|
+
#
|
239
|
+
# @see String#gsub
|
240
|
+
def strip(matcher)
|
241
|
+
filter(Rake::Pipeline::GsubFilter, matcher, '')
|
242
|
+
end
|
243
|
+
end
|
244
|
+
end
|
245
|
+
end
|
246
|
+
end
|
@@ -0,0 +1,108 @@
|
|
1
|
+
module Rake
|
2
|
+
class Pipeline
|
3
|
+
module DSL
|
4
|
+
# This class exists purely to provide a convenient DSL for
|
5
|
+
# configuring a project.
|
6
|
+
#
|
7
|
+
# All instance methods of {ProjectDSL} are available in the context
|
8
|
+
# the block passed to +Rake::Pipeline::Project.+{Project.build}.
|
9
|
+
#
|
10
|
+
# When configuring a project, you *must* provide an output root
|
11
|
+
# and a series of files using at least one {#input} block.
|
12
|
+
class ProjectDSL
|
13
|
+
# @return [Project] the project the DSL should configure
|
14
|
+
attr_reader :project
|
15
|
+
|
16
|
+
# Configure a project with a passed in block.
|
17
|
+
#
|
18
|
+
# @param [Project] project the project that the ProjectDSL
|
19
|
+
# should configure.
|
20
|
+
# @param [Proc] block the block describing the
|
21
|
+
# configuration. This block will be evaluated in
|
22
|
+
# the context of a new instance of {ProjectDSL}
|
23
|
+
# @return [void]
|
24
|
+
def self.evaluate(project, &block)
|
25
|
+
new(project).instance_eval(&block)
|
26
|
+
end
|
27
|
+
|
28
|
+
# Create a new {ProjectDSL} to configure a project.
|
29
|
+
#
|
30
|
+
# @param [Project] project
|
31
|
+
# the project that the ProjectDSL should configure.
|
32
|
+
# @return [void]
|
33
|
+
def initialize(project)
|
34
|
+
@project = project
|
35
|
+
@before_filters = []
|
36
|
+
@after_filters = []
|
37
|
+
@project.before_filters = @before_filters
|
38
|
+
@project.after_filters = @after_filters
|
39
|
+
end
|
40
|
+
|
41
|
+
# Add a filter to every input block. The parameters
|
42
|
+
# to +before_filter+ are the same as the parameters
|
43
|
+
# to {PipelineDSL#filter}.
|
44
|
+
#
|
45
|
+
# Filters will be executed before the specified
|
46
|
+
# filters in reverse of insertion order.
|
47
|
+
#
|
48
|
+
# @see {PipelineDSL#filter}
|
49
|
+
def before_filter(klass, *args, &block)
|
50
|
+
@before_filters.unshift [klass, args, block]
|
51
|
+
end
|
52
|
+
|
53
|
+
# Add a filter to every input block. The parameters
|
54
|
+
# to +after_filter+ are the same as the parameters
|
55
|
+
# to {PipelineDSL#filter}.
|
56
|
+
#
|
57
|
+
# Filters will be executed after the specified
|
58
|
+
# filters in insertion order.
|
59
|
+
#
|
60
|
+
# @see {PipelineDSL#filter}
|
61
|
+
def after_filter(klass, *args, &block)
|
62
|
+
@after_filters.push [klass, args, block]
|
63
|
+
end
|
64
|
+
|
65
|
+
# Specify the default output directory for the project.
|
66
|
+
#
|
67
|
+
# Pipelines created in this project will place their
|
68
|
+
# outputs here unless the value is overriden in their
|
69
|
+
# {#input} block.
|
70
|
+
#
|
71
|
+
# @param [String] root the output directory.
|
72
|
+
# @return [void]
|
73
|
+
def output(root)
|
74
|
+
project.default_output_root = root
|
75
|
+
end
|
76
|
+
|
77
|
+
# Specify the location of the root temporary directory.
|
78
|
+
#
|
79
|
+
# Pipelines will store intermediate build artifacts
|
80
|
+
# in a subdirectory of this directory.
|
81
|
+
#
|
82
|
+
# This defaults to "tmp" in the current working directory.
|
83
|
+
#
|
84
|
+
# @param [String] root the temporary directory
|
85
|
+
# @return [void]
|
86
|
+
def tmpdir(root)
|
87
|
+
project.tmpdir = root
|
88
|
+
end
|
89
|
+
|
90
|
+
# Add a new pipeline with the given inputs to the project.
|
91
|
+
#
|
92
|
+
# @see Project.build_pipeline
|
93
|
+
def input(*inputs, &block)
|
94
|
+
# Allow pipelines without a specified block. This is possible
|
95
|
+
# if before and after filters are all that are needed for a
|
96
|
+
# given input.
|
97
|
+
block = proc {} unless block_given?
|
98
|
+
project.build_pipeline(*inputs, &block)
|
99
|
+
end
|
100
|
+
alias inputs input
|
101
|
+
|
102
|
+
def map(path, &block)
|
103
|
+
project.maps[path] = block
|
104
|
+
end
|
105
|
+
end
|
106
|
+
end
|
107
|
+
end
|
108
|
+
end
|
@@ -0,0 +1,194 @@
|
|
1
|
+
module Rake
|
2
|
+
class Pipeline
|
3
|
+
# This class extends Rake's {Rake::FileTask} class to add support
|
4
|
+
# for dynamic dependencies. Typically, Rake handles static dependencies,
|
5
|
+
# where a file's dependencies are known before the task is invoked.
|
6
|
+
# A {DynamicFileTask} also supports dynamic dependencies, meaning the
|
7
|
+
# file's dependencies can be determined just before invoking the task.
|
8
|
+
# Because calculating a file's dependencies at runtime may be an expensive
|
9
|
+
# operation (it could involve reading the file from disk and parsing it
|
10
|
+
# to extract dependency information, for example), the results of this
|
11
|
+
# calculation are stored on disk in a manifest file, and reused on
|
12
|
+
# subsequent runs if possible.
|
13
|
+
#
|
14
|
+
# For example, consider this file app.c:
|
15
|
+
#
|
16
|
+
# #include "app.h"
|
17
|
+
# some_stuff();
|
18
|
+
#
|
19
|
+
# If we have a task that compiles app.c into app.o, it needs to
|
20
|
+
# process app.c to look for additional dependencies specified
|
21
|
+
# by the file itself.
|
22
|
+
class DynamicFileTask < Rake::FileTask
|
23
|
+
class ManifestRequired < StandardError
|
24
|
+
def to_s
|
25
|
+
"DynamicFileTask's cannot be invoked without a manifest."
|
26
|
+
end
|
27
|
+
end
|
28
|
+
|
29
|
+
attr_accessor :manifest, :last_manifest
|
30
|
+
|
31
|
+
# @return [Boolean] true if the task has a block to invoke
|
32
|
+
# for dynamic dependencies, false otherwise.
|
33
|
+
def has_dynamic_block?
|
34
|
+
!!@dynamic
|
35
|
+
end
|
36
|
+
|
37
|
+
# @return [ManifestEntry] the manifest entry from the current
|
38
|
+
# manifest. This is the entry that will be written to disk after
|
39
|
+
# the task runs.
|
40
|
+
def manifest_entry
|
41
|
+
manifest[name]
|
42
|
+
end
|
43
|
+
|
44
|
+
# Set the current manifest entry,
|
45
|
+
#
|
46
|
+
# @param [ManifestEntry] new_entry
|
47
|
+
# @return [ManifestEntry]
|
48
|
+
def manifest_entry=(new_entry)
|
49
|
+
manifest[name] = new_entry
|
50
|
+
end
|
51
|
+
|
52
|
+
def last_manifest_entry
|
53
|
+
last_manifest[name]
|
54
|
+
end
|
55
|
+
|
56
|
+
# Invoke this task. This method only checks to see if there
|
57
|
+
# is a manifest then delegates to super
|
58
|
+
def invoke(*args)
|
59
|
+
raise ManifestRequired if has_dynamic_block? && !manifest
|
60
|
+
super
|
61
|
+
end
|
62
|
+
|
63
|
+
# In addition to the regular FileTask check, a DynamicFileTask
|
64
|
+
# should be invoked when any of it's prerequisites are required,
|
65
|
+
# there is no manifest or it's dependencies are out of date.
|
66
|
+
#
|
67
|
+
# @return [Boolean]
|
68
|
+
def needed?
|
69
|
+
return true if super
|
70
|
+
|
71
|
+
return true if prerequisites_needed?
|
72
|
+
|
73
|
+
# if we have no manifest, this file task is needed
|
74
|
+
return true unless last_manifest_entry
|
75
|
+
|
76
|
+
# If any of this task's dynamic dependencies have changed,
|
77
|
+
# this file task is needed
|
78
|
+
last_manifest_entry.deps.each do |dep, time|
|
79
|
+
return true if File.mtime(dep).to_i > time
|
80
|
+
end
|
81
|
+
|
82
|
+
# Otherwise, it's not needed
|
83
|
+
false
|
84
|
+
end
|
85
|
+
|
86
|
+
# Add a block that will return dynamic dependencies. This
|
87
|
+
# block can assume that all static dependencies are up
|
88
|
+
# to date.
|
89
|
+
#
|
90
|
+
# @return [DynamicFileTask] self
|
91
|
+
def dynamic(&block)
|
92
|
+
@dynamic = block
|
93
|
+
self
|
94
|
+
end
|
95
|
+
|
96
|
+
# Invoke the task's dynamic block.
|
97
|
+
def invoke_dynamic_block
|
98
|
+
@dynamic.call(self)
|
99
|
+
end
|
100
|
+
|
101
|
+
# At runtime, we will call this to get dynamic prerequisites.
|
102
|
+
#
|
103
|
+
# @return [Array[String]] an array of paths to the task's
|
104
|
+
# dynamic dependencies.
|
105
|
+
def dynamic_prerequisites
|
106
|
+
@dynamic_prerequisites ||= begin
|
107
|
+
dynamics = if has_dynamic_block?
|
108
|
+
dynamic_prerequisites_from_manifest || invoke_dynamic_block
|
109
|
+
else
|
110
|
+
[]
|
111
|
+
end
|
112
|
+
|
113
|
+
# Make sure we don't dynamically depend on ourselves, as
|
114
|
+
# that will create a circular reference, and that makes
|
115
|
+
# everybody sad.
|
116
|
+
dynamics.reject { |x| x == name }
|
117
|
+
end
|
118
|
+
end
|
119
|
+
|
120
|
+
# Override rake's invoke_prerequisites method to invoke
|
121
|
+
# static prerequisites and then any dynamic prerequisites.
|
122
|
+
def invoke_prerequisites(task_args, invocation_chain)
|
123
|
+
super
|
124
|
+
|
125
|
+
raise ManifestRequired if has_dynamic_block? && !manifest
|
126
|
+
|
127
|
+
# Retrieve the dynamic prerequisites. If all goes well,
|
128
|
+
# we will not have to invoke the dynamic block to do this.
|
129
|
+
dynamics = dynamic_prerequisites
|
130
|
+
|
131
|
+
# invoke dynamic prerequisites just as we would invoke
|
132
|
+
# static prerequisites.
|
133
|
+
dynamics.each do |prereq|
|
134
|
+
task = lookup_prerequisite(prereq)
|
135
|
+
prereq_args = task_args.new_scope(task.arg_names)
|
136
|
+
task.invoke_with_call_chain(prereq_args, invocation_chain)
|
137
|
+
end
|
138
|
+
|
139
|
+
# Create a new manifest entry for each dynamic dependency.
|
140
|
+
# When the pipeline finishes, these manifest entries will be written
|
141
|
+
# to the file system.
|
142
|
+
entry = Rake::Pipeline::ManifestEntry.new
|
143
|
+
|
144
|
+
dynamics.each do |dynamic|
|
145
|
+
entry.deps.merge!(dynamic => mtime_or_now(dynamic).to_i)
|
146
|
+
end
|
147
|
+
|
148
|
+
self.manifest_entry = entry
|
149
|
+
end
|
150
|
+
|
151
|
+
# After invoking a task, add the mtime of the task's output
|
152
|
+
# to its current manifest entry.
|
153
|
+
def invoke_with_call_chain(*)
|
154
|
+
super
|
155
|
+
|
156
|
+
manifest_entry.mtime = mtime_or_now(name).to_i
|
157
|
+
end
|
158
|
+
|
159
|
+
private
|
160
|
+
# @return the mtime of the given file if it exists, and
|
161
|
+
# the current time otherwise.
|
162
|
+
def mtime_or_now(filename)
|
163
|
+
File.file?(filename) ? File.mtime(filename) : Time.now
|
164
|
+
end
|
165
|
+
|
166
|
+
# @return [Array<String>] a list of file paths that this
|
167
|
+
# task depends on.
|
168
|
+
# @return [nil] if the dependencies couldn't be read
|
169
|
+
# from the manifest.
|
170
|
+
def dynamic_prerequisites_from_manifest
|
171
|
+
# Try to avoid invoking the dynamic block if this file
|
172
|
+
# is not needed. If so, we may have all the information
|
173
|
+
# we need in the manifest file.
|
174
|
+
if !needed? && last_manifest_entry
|
175
|
+
mtime = last_manifest_entry.mtime
|
176
|
+
end
|
177
|
+
|
178
|
+
# If the output file of this task still exists and
|
179
|
+
# it hasn't been updated, we can simply return the
|
180
|
+
# list of dependencies in the manifest, which
|
181
|
+
# come from the return value of the dynamic block
|
182
|
+
# in a previous run.
|
183
|
+
if File.exist?(name) && mtime == File.mtime(name).to_i
|
184
|
+
return last_manifest_entry.deps.map { |k,v| k }
|
185
|
+
end
|
186
|
+
end
|
187
|
+
|
188
|
+
def prerequisites_needed?
|
189
|
+
prerequisite_tasks.any? { |n| n.needed? }
|
190
|
+
end
|
191
|
+
end
|
192
|
+
end
|
193
|
+
end
|
194
|
+
|