rake-pipeline-fork 0.8.0
Sign up to get free protection for your applications and to get access to all the features.
- checksums.yaml +15 -0
- data/.gitignore +18 -0
- data/.rspec +1 -0
- data/.travis.yml +12 -0
- data/.yardopts +2 -0
- data/GETTING_STARTED.md +268 -0
- data/Gemfile +14 -0
- data/LICENSE +20 -0
- data/README.markdown +11 -0
- data/README.yard +178 -0
- data/Rakefile +21 -0
- data/bin/rakep +4 -0
- data/examples/copying_files.md +12 -0
- data/examples/minifying_files.md +37 -0
- data/examples/modifying_pipelines.md +67 -0
- data/examples/multiple_pipelines.md +77 -0
- data/lib/generators/rake/pipeline/install/install_generator.rb +70 -0
- data/lib/rake-pipeline.rb +462 -0
- data/lib/rake-pipeline/cli.rb +56 -0
- data/lib/rake-pipeline/dsl.rb +9 -0
- data/lib/rake-pipeline/dsl/pipeline_dsl.rb +246 -0
- data/lib/rake-pipeline/dsl/project_dsl.rb +108 -0
- data/lib/rake-pipeline/dynamic_file_task.rb +194 -0
- data/lib/rake-pipeline/error.rb +17 -0
- data/lib/rake-pipeline/file_wrapper.rb +182 -0
- data/lib/rake-pipeline/filter.rb +249 -0
- data/lib/rake-pipeline/filters.rb +4 -0
- data/lib/rake-pipeline/filters/concat_filter.rb +63 -0
- data/lib/rake-pipeline/filters/gsub_filter.rb +56 -0
- data/lib/rake-pipeline/filters/ordering_concat_filter.rb +38 -0
- data/lib/rake-pipeline/filters/pipeline_finalizing_filter.rb +21 -0
- data/lib/rake-pipeline/graph.rb +178 -0
- data/lib/rake-pipeline/manifest.rb +86 -0
- data/lib/rake-pipeline/manifest_entry.rb +34 -0
- data/lib/rake-pipeline/matcher.rb +141 -0
- data/lib/rake-pipeline/middleware.rb +72 -0
- data/lib/rake-pipeline/precompile.rake +8 -0
- data/lib/rake-pipeline/project.rb +335 -0
- data/lib/rake-pipeline/rails_plugin.rb +10 -0
- data/lib/rake-pipeline/railtie.rb +34 -0
- data/lib/rake-pipeline/reject_matcher.rb +29 -0
- data/lib/rake-pipeline/server.rb +15 -0
- data/lib/rake-pipeline/sorted_pipeline.rb +19 -0
- data/lib/rake-pipeline/version.rb +6 -0
- data/rails/init.rb +2 -0
- data/rake-pipeline.gemspec +24 -0
- data/spec/cli_spec.rb +71 -0
- data/spec/concat_filter_spec.rb +37 -0
- data/spec/dsl/pipeline_dsl_spec.rb +165 -0
- data/spec/dsl/project_dsl_spec.rb +41 -0
- data/spec/dynamic_file_task_spec.rb +119 -0
- data/spec/encoding_spec.rb +106 -0
- data/spec/file_wrapper_spec.rb +132 -0
- data/spec/filter_spec.rb +332 -0
- data/spec/graph_spec.rb +56 -0
- data/spec/gsub_filter_spec.rb +87 -0
- data/spec/manifest_entry_spec.rb +46 -0
- data/spec/manifest_spec.rb +67 -0
- data/spec/matcher_spec.rb +141 -0
- data/spec/middleware_spec.rb +199 -0
- data/spec/ordering_concat_filter_spec.rb +42 -0
- data/spec/pipeline_spec.rb +232 -0
- data/spec/project_spec.rb +295 -0
- data/spec/rake_acceptance_spec.rb +738 -0
- data/spec/rake_tasks_spec.rb +21 -0
- data/spec/reject_matcher_spec.rb +31 -0
- data/spec/sorted_pipeline_spec.rb +27 -0
- data/spec/spec_helper.rb +38 -0
- data/spec/support/spec_helpers/file_utils.rb +35 -0
- data/spec/support/spec_helpers/filters.rb +37 -0
- data/spec/support/spec_helpers/input_helpers.rb +23 -0
- data/spec/support/spec_helpers/memory_file_wrapper.rb +31 -0
- data/spec/support/spec_helpers/memory_manifest.rb +19 -0
- data/tools/perfs +101 -0
- metadata +215 -0
@@ -0,0 +1,34 @@
|
|
1
|
+
module Rake
|
2
|
+
class Pipeline
|
3
|
+
# Represents a single entry in a dynamic dependency {Manifest}.
|
4
|
+
class ManifestEntry
|
5
|
+
# Create a new entry from the given hash.
|
6
|
+
def self.from_hash(hash)
|
7
|
+
entry = new
|
8
|
+
|
9
|
+
entry.mtime = hash["mtime"]
|
10
|
+
|
11
|
+
hash["deps"].each do |dep, time_string|
|
12
|
+
entry.deps[dep] = time_string
|
13
|
+
end
|
14
|
+
|
15
|
+
entry
|
16
|
+
end
|
17
|
+
|
18
|
+
attr_accessor :deps, :mtime
|
19
|
+
|
20
|
+
def initialize(deps={}, mtime=nil)
|
21
|
+
@deps, @mtime = deps, mtime
|
22
|
+
end
|
23
|
+
|
24
|
+
def as_json
|
25
|
+
{ :deps => @deps, :mtime => @mtime }
|
26
|
+
end
|
27
|
+
|
28
|
+
def ==(other)
|
29
|
+
mtime == other.mtime
|
30
|
+
deps == other.deps
|
31
|
+
end
|
32
|
+
end
|
33
|
+
end
|
34
|
+
end
|
@@ -0,0 +1,141 @@
|
|
1
|
+
require "strscan"
|
2
|
+
|
3
|
+
module Rake
|
4
|
+
class Pipeline
|
5
|
+
# A Matcher is a type of pipeline that restricts its
|
6
|
+
# filters to a particular pattern.
|
7
|
+
#
|
8
|
+
# A Matcher's pattern is a File glob.
|
9
|
+
#
|
10
|
+
# For instance, to restrict filters to operating on
|
11
|
+
# JavaScript files in the +app+ directory, the Matcher's
|
12
|
+
# {Pipeline#inputs inputs} should include +"app"+,
|
13
|
+
# and its glob would be +"*.js"+.
|
14
|
+
#
|
15
|
+
# In general, you should not use Matcher directly. Instead use
|
16
|
+
# {DSL#match} in the block passed to {Pipeline.build}.
|
17
|
+
class Matcher < Pipeline
|
18
|
+
attr_reader :glob
|
19
|
+
|
20
|
+
# @return [Rake::Pipeline] the Rake::Pipeline that contains
|
21
|
+
# this matcher.
|
22
|
+
attr_accessor :pipeline
|
23
|
+
|
24
|
+
# A glob matcher that a filter's input files must match
|
25
|
+
# in order to be processed by the filter.
|
26
|
+
#
|
27
|
+
# @return [String]
|
28
|
+
def glob=(pattern)
|
29
|
+
@glob = pattern
|
30
|
+
if pattern.kind_of?(Regexp)
|
31
|
+
@pattern = pattern
|
32
|
+
else
|
33
|
+
@pattern = scan_string
|
34
|
+
end
|
35
|
+
end
|
36
|
+
|
37
|
+
# A list of the output files that invoking this pipeline will
|
38
|
+
# generate. This will include the outputs of files matching
|
39
|
+
# the {#glob glob} and any inputs that did not match the
|
40
|
+
# glob.
|
41
|
+
#
|
42
|
+
# This will make those inputs available to any additional
|
43
|
+
# filters or matchers.
|
44
|
+
#
|
45
|
+
# @return [Array<FileWrapper>]
|
46
|
+
def output_files
|
47
|
+
super + input_files.reject do |file|
|
48
|
+
file.path =~ @pattern
|
49
|
+
end
|
50
|
+
end
|
51
|
+
|
52
|
+
# Override {Pipeline#finalize} to do nothing. We want to pass
|
53
|
+
# on our unmatched inputs to the next part of the pipeline.
|
54
|
+
#
|
55
|
+
# @return [void]
|
56
|
+
# @api private
|
57
|
+
def finalize
|
58
|
+
end
|
59
|
+
|
60
|
+
protected
|
61
|
+
# Let our containing pipeline generate temp directories for us.
|
62
|
+
def generate_tmpdir
|
63
|
+
pipeline.generate_tmpdir
|
64
|
+
end
|
65
|
+
|
66
|
+
private
|
67
|
+
# Override the default {Pipeline#eligible_input_files}
|
68
|
+
# to include only files that match the {#glob glob}.
|
69
|
+
#
|
70
|
+
# @return [Array<FileWrapper>]
|
71
|
+
def eligible_input_files
|
72
|
+
input_files.select do |file|
|
73
|
+
file.path =~ @pattern
|
74
|
+
end
|
75
|
+
end
|
76
|
+
|
77
|
+
# Convert string to regexp using StringScanner
|
78
|
+
#
|
79
|
+
# @return [Regexp]
|
80
|
+
def scan_string
|
81
|
+
scanner = StringScanner.new(glob)
|
82
|
+
|
83
|
+
output, pos = "", 0
|
84
|
+
|
85
|
+
# keep scanning until end of String
|
86
|
+
until scanner.eos?
|
87
|
+
|
88
|
+
# look for **/, *, {...}, or the end of the string
|
89
|
+
new_chars = scanner.scan_until %r{
|
90
|
+
\*\*/
|
91
|
+
| /\*\*/
|
92
|
+
| \*
|
93
|
+
| \{[^\}]*\}
|
94
|
+
| $
|
95
|
+
}x
|
96
|
+
|
97
|
+
# get the new part of the string up to the match
|
98
|
+
before = new_chars[0, new_chars.size - scanner.matched_size]
|
99
|
+
|
100
|
+
# get the match and new position
|
101
|
+
match = scanner.matched
|
102
|
+
pos = scanner.pos
|
103
|
+
|
104
|
+
# add any literal characters to the output
|
105
|
+
output << Regexp.escape(before) if before
|
106
|
+
|
107
|
+
output << case match
|
108
|
+
when "/**/"
|
109
|
+
# /**/ matches either a "/" followed by any number
|
110
|
+
# of characters or a single "/"
|
111
|
+
"(/.*|/)"
|
112
|
+
when "**/"
|
113
|
+
# **/ matches the beginning of the path or
|
114
|
+
# any number of characters followed by a "/"
|
115
|
+
"(^|.*/)"
|
116
|
+
when "*"
|
117
|
+
# * matches any number of non-"/" characters
|
118
|
+
"[^/]*"
|
119
|
+
when /\{.*\}/
|
120
|
+
# {...} is split over "," and glued back together
|
121
|
+
# as an or condition
|
122
|
+
"(" + match[1...-1].gsub(",", "|") + ")"
|
123
|
+
else String
|
124
|
+
# otherwise, we've grabbed until the end
|
125
|
+
match
|
126
|
+
end
|
127
|
+
end
|
128
|
+
|
129
|
+
if glob.include?("/")
|
130
|
+
# if the pattern includes a /, it must match the
|
131
|
+
# entire input, not just the end.
|
132
|
+
Regexp.new("^#{output}$", "i")
|
133
|
+
else
|
134
|
+
# anchor the pattern either at the beginning of the
|
135
|
+
# path or at any "/" character
|
136
|
+
Regexp.new("(^|/)#{output}$", "i")
|
137
|
+
end
|
138
|
+
end
|
139
|
+
end
|
140
|
+
end
|
141
|
+
end
|
@@ -0,0 +1,72 @@
|
|
1
|
+
require "rack"
|
2
|
+
|
3
|
+
module Rake
|
4
|
+
class Pipeline
|
5
|
+
# This middleware is used to provide a server that will continuously
|
6
|
+
# compile your files on demand.
|
7
|
+
#
|
8
|
+
# @example
|
9
|
+
# !!!ruby
|
10
|
+
# use Rake::Pipeline::Middleware, Rake::Pipeline.build {
|
11
|
+
# input "app/assets"
|
12
|
+
# output "public"
|
13
|
+
#
|
14
|
+
# ...
|
15
|
+
# }
|
16
|
+
class Middleware
|
17
|
+
attr_accessor :project
|
18
|
+
|
19
|
+
# @param [#call] app a Rack application
|
20
|
+
# @param [Rake::Pipeline::Project] an existing project
|
21
|
+
def initialize(app, project)
|
22
|
+
@app = app
|
23
|
+
@project = project
|
24
|
+
end
|
25
|
+
|
26
|
+
# Automatically compiles your assets if required and
|
27
|
+
# serves them up.
|
28
|
+
#
|
29
|
+
# @param [Hash] env a Rack environment
|
30
|
+
# @return [Array(Fixnum, Hash, #each)] A rack response
|
31
|
+
def call(env)
|
32
|
+
project.invoke
|
33
|
+
path = env["PATH_INFO"]
|
34
|
+
|
35
|
+
if project.maps.has_key?(path)
|
36
|
+
return project.maps[path].call(env)
|
37
|
+
end
|
38
|
+
|
39
|
+
if filename = file_for(path)
|
40
|
+
if File.directory?(filename)
|
41
|
+
index = File.join(filename, "index.html")
|
42
|
+
filename = File.file?(index) ? index : nil
|
43
|
+
end
|
44
|
+
|
45
|
+
if filename
|
46
|
+
return response_for(filename)
|
47
|
+
end
|
48
|
+
end
|
49
|
+
|
50
|
+
@app.call(env)
|
51
|
+
end
|
52
|
+
|
53
|
+
private
|
54
|
+
def response_for(file)
|
55
|
+
[ 200, headers_for(file), File.open(file, "r") ]
|
56
|
+
end
|
57
|
+
|
58
|
+
def file_for(path)
|
59
|
+
project.pipelines.each do |pipeline|
|
60
|
+
file = Dir[File.join(pipeline.output_root, path)].sort.first
|
61
|
+
return file unless file.nil?
|
62
|
+
end
|
63
|
+
nil
|
64
|
+
end
|
65
|
+
|
66
|
+
def headers_for(path)
|
67
|
+
mime = Rack::Mime.mime_type(File.extname(path), "text/plain")
|
68
|
+
{ "Content-Type" => mime }
|
69
|
+
end
|
70
|
+
end
|
71
|
+
end
|
72
|
+
end
|
@@ -0,0 +1,335 @@
|
|
1
|
+
require "digest"
|
2
|
+
|
3
|
+
module Rake
|
4
|
+
class Pipeline
|
5
|
+
# A Project controls the lifecycle of a series of Pipelines,
|
6
|
+
# creating them from an Assetfile and recreating them if the
|
7
|
+
# Assetfile changes.
|
8
|
+
class Project
|
9
|
+
# @return [Pipeline] the list of pipelines in the project
|
10
|
+
attr_reader :pipelines
|
11
|
+
|
12
|
+
attr_reader :maps
|
13
|
+
|
14
|
+
# @return [String|nil] the path to the project's Assetfile
|
15
|
+
# or nil if it was created without an Assetfile.
|
16
|
+
attr_reader :assetfile_path
|
17
|
+
|
18
|
+
# @return [String|nil] the digest of the Assetfile the
|
19
|
+
# project was created with, or nil if the project
|
20
|
+
# was created without an Assetfile.
|
21
|
+
attr_reader :assetfile_digest
|
22
|
+
|
23
|
+
# @return [String] the directory path for temporary files
|
24
|
+
attr_reader :tmpdir
|
25
|
+
|
26
|
+
# @return [String] the directory path where pipelines will
|
27
|
+
# write their outputs by default
|
28
|
+
attr_reader :default_output_root
|
29
|
+
|
30
|
+
# @return [Array] a list of filters to be applied before
|
31
|
+
# the specified filters in every pipeline
|
32
|
+
attr_writer :before_filters
|
33
|
+
|
34
|
+
# @return [Array] a list of filters to be applied after
|
35
|
+
# the specified filters in every pipeline
|
36
|
+
attr_writer :after_filters
|
37
|
+
|
38
|
+
class << self
|
39
|
+
# Configure a new project by evaluating a block with the
|
40
|
+
# Rake::Pipeline::DSL::ProjectDSL class.
|
41
|
+
#
|
42
|
+
# @see Rake::Pipeline::Filter Rake::Pipeline::Filter
|
43
|
+
#
|
44
|
+
# @example
|
45
|
+
# Rake::Pipeline::Project.build do
|
46
|
+
# tmpdir "tmp"
|
47
|
+
# output "public"
|
48
|
+
#
|
49
|
+
# input "app/assets" do
|
50
|
+
# concat "app.js"
|
51
|
+
# end
|
52
|
+
# end
|
53
|
+
#
|
54
|
+
# @return [Rake::Pipeline::Project] the newly configured project
|
55
|
+
def build(&block)
|
56
|
+
project = new
|
57
|
+
project.build(&block)
|
58
|
+
end
|
59
|
+
|
60
|
+
# @return [Array[String]] an array of strings that will be
|
61
|
+
# appended to {#digested_tmpdir}.
|
62
|
+
def digest_additions
|
63
|
+
@digest_additions ||= []
|
64
|
+
end
|
65
|
+
|
66
|
+
# Set {.digest_additions} to a sorted copy of the given array.
|
67
|
+
def digest_additions=(additions)
|
68
|
+
@digest_additions = additions.sort
|
69
|
+
end
|
70
|
+
|
71
|
+
# Add a value to the list of strings to append to the digest
|
72
|
+
# temp directory. Libraries can use this to add (for example)
|
73
|
+
# their version numbers so that the pipeline will be rebuilt
|
74
|
+
# if the library version changes.
|
75
|
+
#
|
76
|
+
# @example
|
77
|
+
# Rake::Pipeline::Project.add_to_digest(Rake::Pipeline::Web::Filters::VERSION)
|
78
|
+
#
|
79
|
+
# @param [#to_s] str a value to append to {#digested_tmpdir}.
|
80
|
+
def add_to_digest(str)
|
81
|
+
self.digest_additions << str.to_s
|
82
|
+
self.digest_additions.sort!
|
83
|
+
end
|
84
|
+
end
|
85
|
+
|
86
|
+
# @param [String|Pipeline] assetfile_or_pipeline
|
87
|
+
# if this a String, create a Pipeline from the Assetfile at
|
88
|
+
# that path. If it's a Pipeline, just wrap that pipeline.
|
89
|
+
def initialize(assetfile_or_pipeline=nil)
|
90
|
+
reset!
|
91
|
+
if assetfile_or_pipeline.kind_of?(String)
|
92
|
+
@assetfile_path = File.expand_path(assetfile_or_pipeline)
|
93
|
+
rebuild_from_assetfile(@assetfile_path)
|
94
|
+
elsif assetfile_or_pipeline
|
95
|
+
@pipelines << assetfile_or_pipeline
|
96
|
+
end
|
97
|
+
end
|
98
|
+
|
99
|
+
# Evaluate a block using the Rake::Pipeline::DSL::ProjectDSL
|
100
|
+
# DSL against an existing project.
|
101
|
+
def build(&block)
|
102
|
+
DSL::ProjectDSL.evaluate(self, &block) if block
|
103
|
+
self
|
104
|
+
end
|
105
|
+
|
106
|
+
# Invoke all of the project's pipelines, detecting any changes
|
107
|
+
# to the Assetfile and rebuilding the pipelines if necessary.
|
108
|
+
#
|
109
|
+
# @return [void]
|
110
|
+
# @see Rake::Pipeline#invoke
|
111
|
+
def invoke
|
112
|
+
@invoke_mutex.synchronize do
|
113
|
+
last_manifest.read_manifest
|
114
|
+
|
115
|
+
if dirty?
|
116
|
+
rebuild_from_assetfile(assetfile_path) if assetfile_dirty?
|
117
|
+
|
118
|
+
# The temporary files have to be cleaned otherwise
|
119
|
+
# there will be a "ghost" input. Here's an example
|
120
|
+
# rake task: application.js => [a.js, b.js]. Deleting a.js
|
121
|
+
# will make application.js => [b.js]. The task correctly checks
|
122
|
+
# if B has changed (which it hasn't) and says that application.js
|
123
|
+
# is correct. Cleaning tmp files ensures that this doesn't happen.
|
124
|
+
clean if files_deleted?
|
125
|
+
|
126
|
+
pipelines.each(&:invoke)
|
127
|
+
|
128
|
+
manifest.write_manifest
|
129
|
+
end
|
130
|
+
end
|
131
|
+
end
|
132
|
+
|
133
|
+
# Remove the project's temporary and output files.
|
134
|
+
def clean
|
135
|
+
last_manifest.clear
|
136
|
+
files_to_clean.each { |file| FileUtils.rm_rf(file) }
|
137
|
+
end
|
138
|
+
|
139
|
+
# Clean out old tmp directories from the pipeline's
|
140
|
+
# {Rake::Pipeline#tmpdir}.
|
141
|
+
#
|
142
|
+
# @return [void]
|
143
|
+
def cleanup_tmpdir
|
144
|
+
obsolete_tmpdirs.each { |dir| FileUtils.rm_rf(dir) }
|
145
|
+
end
|
146
|
+
|
147
|
+
# Set the default output root of this project and expand its path.
|
148
|
+
#
|
149
|
+
# @param [String] root this pipeline's output root
|
150
|
+
def default_output_root=(root)
|
151
|
+
@default_output_root = File.expand_path(root)
|
152
|
+
end
|
153
|
+
|
154
|
+
# Set the temporary directory for this project and expand its path.
|
155
|
+
#
|
156
|
+
# @param [String] root this project's temporary directory
|
157
|
+
def tmpdir=(dir)
|
158
|
+
@tmpdir = File.expand_path(dir)
|
159
|
+
end
|
160
|
+
|
161
|
+
# @return [String] A subdirectory of {#tmpdir} with the digest of
|
162
|
+
# the Assetfile's contents and any {.digest_additions} in its
|
163
|
+
# name.
|
164
|
+
def digested_tmpdir
|
165
|
+
suffix = assetfile_digest
|
166
|
+
unless self.class.digest_additions.empty?
|
167
|
+
suffix += "-#{self.class.digest_additions.join('-')}"
|
168
|
+
end
|
169
|
+
File.join(tmpdir, "rake-pipeline-#{suffix}")
|
170
|
+
end
|
171
|
+
|
172
|
+
# @return Array[String] a list of the paths to temporary directories
|
173
|
+
# that don't match the pipline's Assetfile digest.
|
174
|
+
def obsolete_tmpdirs
|
175
|
+
if File.directory?(tmpdir)
|
176
|
+
Dir["#{tmpdir}/rake-pipeline-*"].sort.reject do |dir|
|
177
|
+
dir == digested_tmpdir
|
178
|
+
end
|
179
|
+
else
|
180
|
+
[]
|
181
|
+
end
|
182
|
+
end
|
183
|
+
|
184
|
+
# @return Array[String] a list of files to delete to completely clean
|
185
|
+
# out a project's temporary and output files.
|
186
|
+
def files_to_clean
|
187
|
+
setup_pipelines
|
188
|
+
obsolete_tmpdirs + [digested_tmpdir] + output_files.map(&:fullpath)
|
189
|
+
end
|
190
|
+
|
191
|
+
# @return [Array[FileWrapper]] a list of the files that
|
192
|
+
# will be generated when this project is invoked.
|
193
|
+
def output_files
|
194
|
+
setup_pipelines
|
195
|
+
pipelines.map(&:output_files).flatten
|
196
|
+
end
|
197
|
+
|
198
|
+
# Build a new pipeline and add it to our list of pipelines.
|
199
|
+
def build_pipeline(input, glob=nil, &block)
|
200
|
+
pipeline = Rake::Pipeline.build({
|
201
|
+
:before_filters => @before_filters,
|
202
|
+
:after_filters => @after_filters,
|
203
|
+
:output_root => default_output_root,
|
204
|
+
:tmpdir => digested_tmpdir,
|
205
|
+
:project => self
|
206
|
+
}, &block)
|
207
|
+
|
208
|
+
if input.kind_of?(Array)
|
209
|
+
input.each { |x| pipeline.add_input(x) }
|
210
|
+
elsif input.kind_of?(Hash)
|
211
|
+
pipeline.inputs = input
|
212
|
+
else
|
213
|
+
pipeline.add_input(input, glob)
|
214
|
+
end
|
215
|
+
|
216
|
+
@pipelines << pipeline
|
217
|
+
pipeline
|
218
|
+
end
|
219
|
+
|
220
|
+
# @return [Manifest] the manifest to write dependency information
|
221
|
+
# to
|
222
|
+
def manifest
|
223
|
+
@manifest ||= Rake::Pipeline::Manifest.new(manifest_path)
|
224
|
+
end
|
225
|
+
|
226
|
+
# @return [Manifest] the manifest to write dependency information
|
227
|
+
# to
|
228
|
+
def last_manifest
|
229
|
+
@last_manifest ||= Rake::Pipeline::Manifest.new(manifest_path)
|
230
|
+
end
|
231
|
+
|
232
|
+
# @return [String] the path to the dynamic dependency manifest
|
233
|
+
def manifest_path
|
234
|
+
File.join(digested_tmpdir, "manifest.json")
|
235
|
+
end
|
236
|
+
|
237
|
+
private
|
238
|
+
# Reset this project's internal state to the default values.
|
239
|
+
#
|
240
|
+
# @return [void]
|
241
|
+
def reset!
|
242
|
+
@pipelines = []
|
243
|
+
@maps = {}
|
244
|
+
@tmpdir = "tmp"
|
245
|
+
@invoke_mutex = Mutex.new
|
246
|
+
@default_output_root = @assetfile_digest = @assetfile_path = nil
|
247
|
+
@manifest = @last_manifest = nil
|
248
|
+
end
|
249
|
+
|
250
|
+
# Reconfigure this project based on the Assetfile at path.
|
251
|
+
#
|
252
|
+
# @param [String] path the path to the Assetfile
|
253
|
+
# to use to configure the project.
|
254
|
+
# @param [String] source if given, this string is
|
255
|
+
# evaluated instead of reading the file at assetfile_path.
|
256
|
+
#
|
257
|
+
# @return [void]
|
258
|
+
def rebuild_from_assetfile(path, source=nil)
|
259
|
+
reset!
|
260
|
+
source ||= File.read(path)
|
261
|
+
@assetfile_digest = digest(source)
|
262
|
+
@assetfile_path = path
|
263
|
+
build { instance_eval(source, path, 1) }
|
264
|
+
end
|
265
|
+
|
266
|
+
# Setup the pipeline so its output files will be up to date.
|
267
|
+
def setup_pipelines
|
268
|
+
pipelines.map(&:setup_filters)
|
269
|
+
end
|
270
|
+
|
271
|
+
# @return [String] the SHA1 digest of the given string.
|
272
|
+
def digest(str)
|
273
|
+
Digest::SHA1.hexdigest(str)
|
274
|
+
end
|
275
|
+
|
276
|
+
def dirty?
|
277
|
+
assetfile_dirty? || files_deleted? || files_dirty?
|
278
|
+
end
|
279
|
+
|
280
|
+
def assetfile_dirty?
|
281
|
+
if assetfile_path
|
282
|
+
source = File.read(assetfile_path)
|
283
|
+
digest(source) != assetfile_digest
|
284
|
+
else
|
285
|
+
false
|
286
|
+
end
|
287
|
+
end
|
288
|
+
|
289
|
+
# Returns true if any of these conditions are met:
|
290
|
+
# * The pipeline hasn't been invoked yet
|
291
|
+
# * The input files have changed
|
292
|
+
# * There is a new input file
|
293
|
+
def files_dirty?
|
294
|
+
return true if manifest.empty?
|
295
|
+
|
296
|
+
previous_files = manifest.files
|
297
|
+
|
298
|
+
# check for modifications to new files
|
299
|
+
input_files.each do |input_file|
|
300
|
+
if !previous_files[input_file]
|
301
|
+
return true # there is a new file in the pipeline
|
302
|
+
elsif File.mtime(input_file).to_i != previous_files[input_file]
|
303
|
+
return true # existing file has been changed
|
304
|
+
end
|
305
|
+
end
|
306
|
+
|
307
|
+
false
|
308
|
+
end
|
309
|
+
|
310
|
+
def files_deleted?
|
311
|
+
last_manifest.files.each_key do |input_file|
|
312
|
+
return true if !File.exists?(input_file)
|
313
|
+
end
|
314
|
+
|
315
|
+
false
|
316
|
+
end
|
317
|
+
|
318
|
+
def input_files
|
319
|
+
static_input_files = pipelines.collect do |p|
|
320
|
+
p.input_files.reject { |file| file.in_directory? tmpdir }.map(&:fullpath)
|
321
|
+
end.flatten
|
322
|
+
|
323
|
+
dynamic_input_files = static_input_files.collect do |file|
|
324
|
+
if manifest[file]
|
325
|
+
manifest[file].deps.keys
|
326
|
+
else
|
327
|
+
nil
|
328
|
+
end
|
329
|
+
end.flatten.compact
|
330
|
+
|
331
|
+
static_input_files + dynamic_input_files
|
332
|
+
end
|
333
|
+
end
|
334
|
+
end
|
335
|
+
end
|