rake-pipeline-fork 0.8.0
Sign up to get free protection for your applications and to get access to all the features.
- checksums.yaml +15 -0
- data/.gitignore +18 -0
- data/.rspec +1 -0
- data/.travis.yml +12 -0
- data/.yardopts +2 -0
- data/GETTING_STARTED.md +268 -0
- data/Gemfile +14 -0
- data/LICENSE +20 -0
- data/README.markdown +11 -0
- data/README.yard +178 -0
- data/Rakefile +21 -0
- data/bin/rakep +4 -0
- data/examples/copying_files.md +12 -0
- data/examples/minifying_files.md +37 -0
- data/examples/modifying_pipelines.md +67 -0
- data/examples/multiple_pipelines.md +77 -0
- data/lib/generators/rake/pipeline/install/install_generator.rb +70 -0
- data/lib/rake-pipeline.rb +462 -0
- data/lib/rake-pipeline/cli.rb +56 -0
- data/lib/rake-pipeline/dsl.rb +9 -0
- data/lib/rake-pipeline/dsl/pipeline_dsl.rb +246 -0
- data/lib/rake-pipeline/dsl/project_dsl.rb +108 -0
- data/lib/rake-pipeline/dynamic_file_task.rb +194 -0
- data/lib/rake-pipeline/error.rb +17 -0
- data/lib/rake-pipeline/file_wrapper.rb +182 -0
- data/lib/rake-pipeline/filter.rb +249 -0
- data/lib/rake-pipeline/filters.rb +4 -0
- data/lib/rake-pipeline/filters/concat_filter.rb +63 -0
- data/lib/rake-pipeline/filters/gsub_filter.rb +56 -0
- data/lib/rake-pipeline/filters/ordering_concat_filter.rb +38 -0
- data/lib/rake-pipeline/filters/pipeline_finalizing_filter.rb +21 -0
- data/lib/rake-pipeline/graph.rb +178 -0
- data/lib/rake-pipeline/manifest.rb +86 -0
- data/lib/rake-pipeline/manifest_entry.rb +34 -0
- data/lib/rake-pipeline/matcher.rb +141 -0
- data/lib/rake-pipeline/middleware.rb +72 -0
- data/lib/rake-pipeline/precompile.rake +8 -0
- data/lib/rake-pipeline/project.rb +335 -0
- data/lib/rake-pipeline/rails_plugin.rb +10 -0
- data/lib/rake-pipeline/railtie.rb +34 -0
- data/lib/rake-pipeline/reject_matcher.rb +29 -0
- data/lib/rake-pipeline/server.rb +15 -0
- data/lib/rake-pipeline/sorted_pipeline.rb +19 -0
- data/lib/rake-pipeline/version.rb +6 -0
- data/rails/init.rb +2 -0
- data/rake-pipeline.gemspec +24 -0
- data/spec/cli_spec.rb +71 -0
- data/spec/concat_filter_spec.rb +37 -0
- data/spec/dsl/pipeline_dsl_spec.rb +165 -0
- data/spec/dsl/project_dsl_spec.rb +41 -0
- data/spec/dynamic_file_task_spec.rb +119 -0
- data/spec/encoding_spec.rb +106 -0
- data/spec/file_wrapper_spec.rb +132 -0
- data/spec/filter_spec.rb +332 -0
- data/spec/graph_spec.rb +56 -0
- data/spec/gsub_filter_spec.rb +87 -0
- data/spec/manifest_entry_spec.rb +46 -0
- data/spec/manifest_spec.rb +67 -0
- data/spec/matcher_spec.rb +141 -0
- data/spec/middleware_spec.rb +199 -0
- data/spec/ordering_concat_filter_spec.rb +42 -0
- data/spec/pipeline_spec.rb +232 -0
- data/spec/project_spec.rb +295 -0
- data/spec/rake_acceptance_spec.rb +738 -0
- data/spec/rake_tasks_spec.rb +21 -0
- data/spec/reject_matcher_spec.rb +31 -0
- data/spec/sorted_pipeline_spec.rb +27 -0
- data/spec/spec_helper.rb +38 -0
- data/spec/support/spec_helpers/file_utils.rb +35 -0
- data/spec/support/spec_helpers/filters.rb +37 -0
- data/spec/support/spec_helpers/input_helpers.rb +23 -0
- data/spec/support/spec_helpers/memory_file_wrapper.rb +31 -0
- data/spec/support/spec_helpers/memory_manifest.rb +19 -0
- data/tools/perfs +101 -0
- metadata +215 -0
@@ -0,0 +1,63 @@
|
|
1
|
+
module Rake
|
2
|
+
class Pipeline
|
3
|
+
# A built-in filter that simply accepts a series
|
4
|
+
# of inputs and concatenates them into output files
|
5
|
+
# based on the output file name generator.
|
6
|
+
#
|
7
|
+
# @example
|
8
|
+
# !!!ruby
|
9
|
+
# Pipeline.build do
|
10
|
+
# input "app/assets", "**/*.js"
|
11
|
+
# output "public"
|
12
|
+
#
|
13
|
+
# # create a concatenated output file for each
|
14
|
+
# # directory of inputs.
|
15
|
+
# filter(Rake::Pipeline::ConcatFilter) do |input|
|
16
|
+
# # input files will look something like:
|
17
|
+
# # javascripts/admin/main.js
|
18
|
+
# # javascripts/admin/app.js
|
19
|
+
# # javascripts/users/main.js
|
20
|
+
# #
|
21
|
+
# # and the outputs will look like:
|
22
|
+
# # javascripts/admin.js
|
23
|
+
# # javascripts/users.js
|
24
|
+
# directory = File.dirname(input)
|
25
|
+
# ext = File.extname(input)
|
26
|
+
#
|
27
|
+
# "#{directory}#{ext}"
|
28
|
+
# end
|
29
|
+
# end
|
30
|
+
class ConcatFilter < Rake::Pipeline::Filter
|
31
|
+
# @param [String] string the name of the output file to
|
32
|
+
# concatenate inputs to.
|
33
|
+
# @param [Proc] block a block to use as the Filter's
|
34
|
+
# {#output_name_generator}.
|
35
|
+
def initialize(string=nil, &block)
|
36
|
+
block = proc { string } if string
|
37
|
+
super(&block)
|
38
|
+
end
|
39
|
+
|
40
|
+
# @method encoding
|
41
|
+
# @return [String] the String +"BINARY"+
|
42
|
+
processes_binary_files
|
43
|
+
|
44
|
+
# implement the {#generate_output} method required by
|
45
|
+
# the {Filter} API. In this case, simply loop through
|
46
|
+
# the inputs and write their contents to the output.
|
47
|
+
#
|
48
|
+
# Recall that this method will be called once for each
|
49
|
+
# unique output file.
|
50
|
+
#
|
51
|
+
# @param [Array<FileWrapper>] inputs an Array of
|
52
|
+
# {FileWrapper} objects representing the inputs to
|
53
|
+
# this filter.
|
54
|
+
# @param [FileWrapper] a single {FileWrapper} object
|
55
|
+
# representing the output.
|
56
|
+
def generate_output(inputs, output)
|
57
|
+
inputs.each do |input|
|
58
|
+
output.write input.read
|
59
|
+
end
|
60
|
+
end
|
61
|
+
end
|
62
|
+
end
|
63
|
+
end
|
@@ -0,0 +1,56 @@
|
|
1
|
+
module Rake
|
2
|
+
class Pipeline
|
3
|
+
# A built in filter that applies String#gsub behavior.
|
4
|
+
#
|
5
|
+
# @example
|
6
|
+
# !!!ruby
|
7
|
+
# Pipeline.build do
|
8
|
+
# input "app/assets", "**/*.js"
|
9
|
+
# output "public"
|
10
|
+
#
|
11
|
+
# # replace javascript comments
|
12
|
+
# filter(Rake::Pipeline::GsubFilter, /\//\w+$/, '')
|
13
|
+
# end
|
14
|
+
class GsubFilter < Filter
|
15
|
+
# Arguments mimic String#gsub with one notable exception.
|
16
|
+
# String#gsub accepts a block where $1, $2, and friends are
|
17
|
+
# accessible. Due to Ruby's scoping rules of these variables
|
18
|
+
# they are not accssible inside the block itself. Instead they
|
19
|
+
# are passed in as additional arguments. Here's an example:
|
20
|
+
#
|
21
|
+
# @example
|
22
|
+
# !!!ruby
|
23
|
+
# Rake::Pipeline::GsubFilter.new /(\w+)\s(\w+)/ do |entire_match, capture1, capture2|
|
24
|
+
# # process the match
|
25
|
+
# end
|
26
|
+
#
|
27
|
+
# @see String#gsub
|
28
|
+
def initialize(*args, &block)
|
29
|
+
@args, @block = args, block
|
30
|
+
super() { |input| input }
|
31
|
+
end
|
32
|
+
|
33
|
+
# Implement the {#generate_output} method required by
|
34
|
+
# the {Filter} API. In this case, simply loop through
|
35
|
+
# the inputs and write String#gsub content to the output.
|
36
|
+
#
|
37
|
+
# @param [Array<FileWrapper>] inputs an Array of
|
38
|
+
# {FileWrapper} objects representing the inputs to
|
39
|
+
# this filter.
|
40
|
+
# @param [FileWrapper] a single {FileWrapper} object
|
41
|
+
# representing the output.
|
42
|
+
def generate_output(inputs, output)
|
43
|
+
inputs.each do |input|
|
44
|
+
if @block
|
45
|
+
content = input.read.gsub(*@args) do |match|
|
46
|
+
@block.call match, *$~.captures
|
47
|
+
end
|
48
|
+
output.write content
|
49
|
+
else
|
50
|
+
output.write input.read.gsub(*@args)
|
51
|
+
end
|
52
|
+
end
|
53
|
+
end
|
54
|
+
end
|
55
|
+
end
|
56
|
+
end
|
@@ -0,0 +1,38 @@
|
|
1
|
+
class Rake::Pipeline
|
2
|
+
# A filter that concats files in a specified order.
|
3
|
+
#
|
4
|
+
# @example
|
5
|
+
# !!!ruby
|
6
|
+
# Rake::Pipeline.build do
|
7
|
+
# input "app/assets", "**/*.js"
|
8
|
+
# output "public"
|
9
|
+
#
|
10
|
+
# # Concat each file into libs.js but make sure
|
11
|
+
# # that jQuery and Ember come first.
|
12
|
+
# filter Rake::Pipeline::OrderingConcatFilter, ["jquery.js", "ember.js"], "libs.js"
|
13
|
+
# end
|
14
|
+
class OrderingConcatFilter < ConcatFilter
|
15
|
+
|
16
|
+
# @param [Array<String>] ordering an Array of Strings
|
17
|
+
# of file names that should come in the specified order
|
18
|
+
# @param [String] string the name of the output file to
|
19
|
+
# concatenate inputs to.
|
20
|
+
# @param [Proc] block a block to use as the Filter's
|
21
|
+
# {#output_name_generator}.
|
22
|
+
def initialize(ordering, string=nil, &block)
|
23
|
+
@ordering = ordering
|
24
|
+
super(string, &block)
|
25
|
+
end
|
26
|
+
|
27
|
+
# Extend the {#generate_output} method supplied by {ConcatFilter}.
|
28
|
+
# Re-orders the inputs such that the specified files come first.
|
29
|
+
# If a file is not in the list it will come after the specified files.
|
30
|
+
def generate_output(inputs, output)
|
31
|
+
@ordering.reverse.each do |name|
|
32
|
+
file = inputs.find{|i| i.path == name }
|
33
|
+
inputs.unshift(inputs.delete(file)) if file
|
34
|
+
end
|
35
|
+
super
|
36
|
+
end
|
37
|
+
end
|
38
|
+
end
|
@@ -0,0 +1,21 @@
|
|
1
|
+
require 'set'
|
2
|
+
module Rake
|
3
|
+
class Pipeline
|
4
|
+
# @private
|
5
|
+
#
|
6
|
+
# A built-in filter that copies a pipeline's generated files over
|
7
|
+
# to its output.
|
8
|
+
class PipelineFinalizingFilter < ConcatFilter
|
9
|
+
|
10
|
+
# @return [Array[FileWrapper]] a list of the pipeline's
|
11
|
+
# output files, excluding any files that were originally
|
12
|
+
# inputs to the pipeline, meaning they weren't processed
|
13
|
+
# by any filter and should not be copied to the output.
|
14
|
+
def input_files
|
15
|
+
pipeline_input_files = Set.new pipeline.input_files
|
16
|
+
|
17
|
+
Set.new(super) - pipeline_input_files
|
18
|
+
end
|
19
|
+
end
|
20
|
+
end
|
21
|
+
end
|
@@ -0,0 +1,178 @@
|
|
1
|
+
require "set"
|
2
|
+
|
3
|
+
module Rake
|
4
|
+
class Pipeline
|
5
|
+
# The goal of this class is to make is easy to implement dynamic
|
6
|
+
# dependencies in additional_dependencies without having to parse
|
7
|
+
# all the files all of the time.
|
8
|
+
#
|
9
|
+
# To illustrate, imagine that we have two source files with the
|
10
|
+
# following inline dependencies:
|
11
|
+
#
|
12
|
+
# * application.scss
|
13
|
+
# * _core.scss
|
14
|
+
# * admin.scss
|
15
|
+
# * _admin.scss
|
16
|
+
#
|
17
|
+
# And further imagine that `_admin.scss` has an inline dependency
|
18
|
+
# on `_core.scss`.
|
19
|
+
#
|
20
|
+
# On initial build, we will scan all of the source files, find
|
21
|
+
# the dependencies, and build a node for each file, annotating
|
22
|
+
# the source files with `:source => true`. We also store off the
|
23
|
+
# `mtime` of each file in its node. We link each file to its
|
24
|
+
# dependencies.
|
25
|
+
#
|
26
|
+
# The `additional_dependencies` are a map of the files to their
|
27
|
+
# children, which will be used when generating rake tasks.
|
28
|
+
#
|
29
|
+
# Later, let's say that we change `_admin.scss`. We will need
|
30
|
+
# to unlink its dependencies first (on `_core.scss`), rescan
|
31
|
+
# the file, and create nodes for its dependencies. If no new
|
32
|
+
# dependencies
|
33
|
+
|
34
|
+
class Graph
|
35
|
+
class MissingNode < StandardError
|
36
|
+
end
|
37
|
+
|
38
|
+
class Node
|
39
|
+
# @return [String] the identifier of the node
|
40
|
+
attr_reader :name
|
41
|
+
|
42
|
+
# @return [Set] a Set of parent nodes
|
43
|
+
attr_reader :parents
|
44
|
+
|
45
|
+
# @return [Set] a Set of child nodes
|
46
|
+
attr_reader :children
|
47
|
+
|
48
|
+
# @return [Hash] a Hash of metadata
|
49
|
+
attr_reader :metadata
|
50
|
+
|
51
|
+
# @param [String] name the identifier of the node
|
52
|
+
# @param [Hash] metadata an optional hash of metadata
|
53
|
+
def initialize(name, metadata={})
|
54
|
+
@name = name
|
55
|
+
@parents = Set.new
|
56
|
+
@children = Set.new
|
57
|
+
@metadata = metadata
|
58
|
+
end
|
59
|
+
|
60
|
+
# A node is equal another node if it has the same name.
|
61
|
+
# This is because the Graph ensures that only one node
|
62
|
+
# with a given name can be created.
|
63
|
+
#
|
64
|
+
# @param [Node] other the node to compare
|
65
|
+
def ==(other)
|
66
|
+
@name == other.name
|
67
|
+
end
|
68
|
+
end
|
69
|
+
|
70
|
+
def initialize
|
71
|
+
@map = {}
|
72
|
+
end
|
73
|
+
|
74
|
+
# @return [Array] an Array of all of the nodes in the graph
|
75
|
+
def nodes
|
76
|
+
@map.values
|
77
|
+
end
|
78
|
+
|
79
|
+
# Add a new node to the graph. If an existing node with the
|
80
|
+
# current name already exists, do not add the node.
|
81
|
+
#
|
82
|
+
# @param [String] name an identifier for the node.
|
83
|
+
# @param [Hash] metadata optional metadata for the node
|
84
|
+
def add(name, metadata={})
|
85
|
+
return if @map.include?(name)
|
86
|
+
@map[name] = Node.new(name, metadata)
|
87
|
+
end
|
88
|
+
|
89
|
+
# Remove a node from the graph. Unlink its parent and children
|
90
|
+
# from it.
|
91
|
+
#
|
92
|
+
# If the existing node does not exist, raise.
|
93
|
+
#
|
94
|
+
# @param [String] name an identifier for the node
|
95
|
+
def remove(name)
|
96
|
+
node = verify(name)
|
97
|
+
|
98
|
+
node.parents.each do |parent_node|
|
99
|
+
parent_node.children.delete node
|
100
|
+
end
|
101
|
+
|
102
|
+
node.children.each do |child_node|
|
103
|
+
child_node.parents.delete node
|
104
|
+
end
|
105
|
+
|
106
|
+
@map.delete(name)
|
107
|
+
end
|
108
|
+
|
109
|
+
# Add a link from the parent to the child. This link is a
|
110
|
+
# two-way link, so the child will be added to the parent's
|
111
|
+
# `children` and the parent will be added to the child's
|
112
|
+
# `parents`.
|
113
|
+
#
|
114
|
+
# The parent and child are referenced by node identifier.
|
115
|
+
#
|
116
|
+
# @param [String] parent the identifier of the parent
|
117
|
+
# @param [String] child the identifier of the child
|
118
|
+
def link(parent, child)
|
119
|
+
parent, child = lookup(parent, child)
|
120
|
+
|
121
|
+
parent.children << child
|
122
|
+
child.parents << parent
|
123
|
+
end
|
124
|
+
|
125
|
+
# Remove a link from the parent to the child.
|
126
|
+
#
|
127
|
+
# The parent and child are referenced by node identifier.
|
128
|
+
#
|
129
|
+
# @param [String] parent the identifier of the parent
|
130
|
+
# @param [String] child the identifier of the child
|
131
|
+
def unlink(parent, child)
|
132
|
+
parent, child = lookup(parent, child)
|
133
|
+
|
134
|
+
parent.children.delete(child)
|
135
|
+
child.parents.delete(parent)
|
136
|
+
end
|
137
|
+
|
138
|
+
# Look up a node by name
|
139
|
+
#
|
140
|
+
# @param [String] name the identifier of the node
|
141
|
+
# @return [Node] the node referenced by the specified identifier
|
142
|
+
def [](name)
|
143
|
+
@map[name]
|
144
|
+
end
|
145
|
+
|
146
|
+
private
|
147
|
+
# Verify that the parent and child nodes exist, and return
|
148
|
+
# the nodes with the specified identifiers.
|
149
|
+
#
|
150
|
+
# The parent and child are referenced by node identifier.
|
151
|
+
#
|
152
|
+
# @param [String] parent the identifier of the parent
|
153
|
+
# @param [String] child the identifier of the child
|
154
|
+
# @return [Array(Node, Node)] the parent and child nodes
|
155
|
+
def lookup(parent, child)
|
156
|
+
parent = verify(parent)
|
157
|
+
child = verify(child)
|
158
|
+
|
159
|
+
return parent, child
|
160
|
+
end
|
161
|
+
|
162
|
+
# Verify that a node with a given identifier exists, and
|
163
|
+
# if it does, return it.
|
164
|
+
#
|
165
|
+
# If it does not, raise an exception.
|
166
|
+
#
|
167
|
+
# @param [String] name the identifier of the node
|
168
|
+
# @raise [MissingNode] if a node with the given name is
|
169
|
+
# not found, raise.
|
170
|
+
# @return [Node] the n
|
171
|
+
def verify(name)
|
172
|
+
node = @map[name]
|
173
|
+
raise MissingNode, "Node #{name} does not exist" unless node
|
174
|
+
node
|
175
|
+
end
|
176
|
+
end
|
177
|
+
end
|
178
|
+
end
|
@@ -0,0 +1,86 @@
|
|
1
|
+
require 'json'
|
2
|
+
|
3
|
+
module Rake
|
4
|
+
class Pipeline
|
5
|
+
# A Manifest is a container for storing dynamic dependency information.
|
6
|
+
# A {DynamicFileTask} will use a {Manifest} to keep track of its dynamic
|
7
|
+
# dependencies. This allows us to avoid scanning a file for dynamic
|
8
|
+
# dependencies if its contents have not changed.
|
9
|
+
class Manifest
|
10
|
+
attr_accessor :entries
|
11
|
+
attr_accessor :manifest_file
|
12
|
+
|
13
|
+
def initialize(manifest_file="manifest.json")
|
14
|
+
@manifest_file ||= manifest_file
|
15
|
+
@entries = {}
|
16
|
+
end
|
17
|
+
|
18
|
+
# Get the manifest off the file system, if it exists.
|
19
|
+
def read_manifest
|
20
|
+
@entries = File.file?(manifest_file) ? JSON.parse(File.read(manifest_file)) : {}
|
21
|
+
|
22
|
+
# convert the manifest JSON into a Hash of ManifestEntry objects
|
23
|
+
@entries.each do |file, raw|
|
24
|
+
@entries[file] = Rake::Pipeline::ManifestEntry.from_hash(raw)
|
25
|
+
end
|
26
|
+
|
27
|
+
self
|
28
|
+
end
|
29
|
+
|
30
|
+
# Write a JSON representation of this manifest out to disk if we
|
31
|
+
# have entries to save.
|
32
|
+
def write_manifest
|
33
|
+
unless @entries.empty?
|
34
|
+
File.open(manifest_file, "w") do |file|
|
35
|
+
file.puts JSON.generate(as_json)
|
36
|
+
end
|
37
|
+
end
|
38
|
+
end
|
39
|
+
|
40
|
+
# Convert this Manifest into a hash suitable for converting to
|
41
|
+
# JSON.
|
42
|
+
def as_json
|
43
|
+
hash = {}
|
44
|
+
|
45
|
+
@entries.each do |name, entry|
|
46
|
+
hash[name] = entry.as_json
|
47
|
+
end
|
48
|
+
|
49
|
+
hash
|
50
|
+
end
|
51
|
+
|
52
|
+
# Look up an entry by filename.
|
53
|
+
def [](key)
|
54
|
+
@entries[key]
|
55
|
+
end
|
56
|
+
|
57
|
+
# Set an entry
|
58
|
+
def []=(key, value)
|
59
|
+
@entries[key] = value
|
60
|
+
end
|
61
|
+
|
62
|
+
def clear
|
63
|
+
entries.clear
|
64
|
+
end
|
65
|
+
|
66
|
+
def empty?
|
67
|
+
entries.empty?
|
68
|
+
end
|
69
|
+
|
70
|
+
def files
|
71
|
+
entries.inject({}) do |hash, pair|
|
72
|
+
file = pair.first
|
73
|
+
entry = pair.last
|
74
|
+
|
75
|
+
hash.merge!(file => entry.mtime)
|
76
|
+
|
77
|
+
entry.deps.each_pair do |name, time|
|
78
|
+
hash.merge!(name => time)
|
79
|
+
end
|
80
|
+
|
81
|
+
hash
|
82
|
+
end
|
83
|
+
end
|
84
|
+
end
|
85
|
+
end
|
86
|
+
end
|