sprockets 2.2.3 → 4.0.0

Sign up to get free protection for your applications and to get access to all the features.
Files changed (99) hide show
  1. checksums.yaml +5 -5
  2. data/CHANGELOG.md +68 -0
  3. data/README.md +482 -255
  4. data/bin/sprockets +20 -7
  5. data/lib/rake/sprocketstask.rb +28 -15
  6. data/lib/sprockets/add_source_map_comment_to_asset_processor.rb +60 -0
  7. data/lib/sprockets/asset.rb +142 -207
  8. data/lib/sprockets/autoload/babel.rb +8 -0
  9. data/lib/sprockets/autoload/closure.rb +8 -0
  10. data/lib/sprockets/autoload/coffee_script.rb +8 -0
  11. data/lib/sprockets/autoload/eco.rb +8 -0
  12. data/lib/sprockets/autoload/ejs.rb +8 -0
  13. data/lib/sprockets/autoload/jsminc.rb +8 -0
  14. data/lib/sprockets/autoload/sass.rb +8 -0
  15. data/lib/sprockets/autoload/sassc.rb +8 -0
  16. data/lib/sprockets/autoload/uglifier.rb +8 -0
  17. data/lib/sprockets/autoload/yui.rb +8 -0
  18. data/lib/sprockets/autoload/zopfli.rb +7 -0
  19. data/lib/sprockets/autoload.rb +16 -0
  20. data/lib/sprockets/babel_processor.rb +66 -0
  21. data/lib/sprockets/base.rb +89 -249
  22. data/lib/sprockets/bower.rb +61 -0
  23. data/lib/sprockets/bundle.rb +105 -0
  24. data/lib/sprockets/cache/file_store.rb +190 -14
  25. data/lib/sprockets/cache/memory_store.rb +75 -0
  26. data/lib/sprockets/cache/null_store.rb +54 -0
  27. data/lib/sprockets/cache.rb +271 -0
  28. data/lib/sprockets/cached_environment.rb +64 -0
  29. data/lib/sprockets/closure_compressor.rb +48 -0
  30. data/lib/sprockets/coffee_script_processor.rb +39 -0
  31. data/lib/sprockets/compressing.rb +134 -0
  32. data/lib/sprockets/configuration.rb +79 -0
  33. data/lib/sprockets/context.rb +204 -135
  34. data/lib/sprockets/dependencies.rb +74 -0
  35. data/lib/sprockets/digest_utils.rb +200 -0
  36. data/lib/sprockets/directive_processor.rb +224 -216
  37. data/lib/sprockets/eco_processor.rb +33 -0
  38. data/lib/sprockets/ejs_processor.rb +32 -0
  39. data/lib/sprockets/encoding_utils.rb +262 -0
  40. data/lib/sprockets/environment.rb +23 -68
  41. data/lib/sprockets/erb_processor.rb +37 -0
  42. data/lib/sprockets/errors.rb +6 -13
  43. data/lib/sprockets/exporters/base.rb +72 -0
  44. data/lib/sprockets/exporters/file_exporter.rb +24 -0
  45. data/lib/sprockets/exporters/zlib_exporter.rb +33 -0
  46. data/lib/sprockets/exporters/zopfli_exporter.rb +14 -0
  47. data/lib/sprockets/exporting.rb +73 -0
  48. data/lib/sprockets/file_reader.rb +16 -0
  49. data/lib/sprockets/http_utils.rb +135 -0
  50. data/lib/sprockets/jsminc_compressor.rb +32 -0
  51. data/lib/sprockets/jst_processor.rb +36 -19
  52. data/lib/sprockets/loader.rb +343 -0
  53. data/lib/sprockets/manifest.rb +231 -96
  54. data/lib/sprockets/manifest_utils.rb +48 -0
  55. data/lib/sprockets/mime.rb +80 -32
  56. data/lib/sprockets/npm.rb +52 -0
  57. data/lib/sprockets/path_dependency_utils.rb +77 -0
  58. data/lib/sprockets/path_digest_utils.rb +48 -0
  59. data/lib/sprockets/path_utils.rb +367 -0
  60. data/lib/sprockets/paths.rb +82 -0
  61. data/lib/sprockets/preprocessors/default_source_map.rb +49 -0
  62. data/lib/sprockets/processing.rb +140 -192
  63. data/lib/sprockets/processor_utils.rb +169 -0
  64. data/lib/sprockets/resolve.rb +295 -0
  65. data/lib/sprockets/sass_cache_store.rb +30 -0
  66. data/lib/sprockets/sass_compressor.rb +63 -0
  67. data/lib/sprockets/sass_functions.rb +3 -0
  68. data/lib/sprockets/sass_importer.rb +3 -0
  69. data/lib/sprockets/sass_processor.rb +313 -0
  70. data/lib/sprockets/sassc_compressor.rb +56 -0
  71. data/lib/sprockets/sassc_processor.rb +297 -0
  72. data/lib/sprockets/server.rb +138 -90
  73. data/lib/sprockets/source_map_processor.rb +66 -0
  74. data/lib/sprockets/source_map_utils.rb +483 -0
  75. data/lib/sprockets/transformers.rb +173 -0
  76. data/lib/sprockets/uglifier_compressor.rb +66 -0
  77. data/lib/sprockets/unloaded_asset.rb +139 -0
  78. data/lib/sprockets/uri_tar.rb +99 -0
  79. data/lib/sprockets/uri_utils.rb +191 -0
  80. data/lib/sprockets/utils/gzip.rb +99 -0
  81. data/lib/sprockets/utils.rb +186 -53
  82. data/lib/sprockets/version.rb +2 -1
  83. data/lib/sprockets/yui_compressor.rb +56 -0
  84. data/lib/sprockets.rb +217 -52
  85. metadata +250 -59
  86. data/LICENSE +0 -21
  87. data/lib/sprockets/asset_attributes.rb +0 -126
  88. data/lib/sprockets/bundled_asset.rb +0 -79
  89. data/lib/sprockets/caching.rb +0 -96
  90. data/lib/sprockets/charset_normalizer.rb +0 -41
  91. data/lib/sprockets/eco_template.rb +0 -38
  92. data/lib/sprockets/ejs_template.rb +0 -37
  93. data/lib/sprockets/engines.rb +0 -74
  94. data/lib/sprockets/index.rb +0 -99
  95. data/lib/sprockets/processed_asset.rb +0 -152
  96. data/lib/sprockets/processor.rb +0 -32
  97. data/lib/sprockets/safety_colons.rb +0 -28
  98. data/lib/sprockets/static_asset.rb +0 -57
  99. data/lib/sprockets/trail.rb +0 -90
@@ -0,0 +1,483 @@
1
+ # frozen_string_literal: true
2
+ require 'json'
3
+ require 'sprockets/path_utils'
4
+
5
+ module Sprockets
6
+ module SourceMapUtils
7
+ extend self
8
+
9
+ # Public: Transpose source maps into a standard format
10
+ #
11
+ # NOTE: Does not support index maps
12
+ #
13
+ # version => 3
14
+ # file => logical path
15
+ # sources => relative from filename
16
+ #
17
+ # Unnecessary attributes are removed
18
+ #
19
+ # Example
20
+ #
21
+ # map
22
+ # #=> {
23
+ # # "version" => 3,
24
+ # # "file" => "stdin",
25
+ # # "sourceRoot" => "",
26
+ # # "sourceContents" => "blah blah blah",
27
+ # # "sources" => [/root/logical/path.js],
28
+ # # "names" => [..],
29
+ # #}
30
+ # format_source_map(map, input)
31
+ # #=> {
32
+ # # "version" => 3,
33
+ # # "file" => "logical/path.js",
34
+ # # "sources" => ["path.js"],
35
+ # # "names" => [..],
36
+ # #}
37
+ def format_source_map(map, input)
38
+ filename = input[:filename]
39
+ load_path = input[:load_path]
40
+ load_paths = input[:environment].config[:paths]
41
+ mime_exts = input[:environment].config[:mime_exts]
42
+ pipeline_exts = input[:environment].config[:pipeline_exts]
43
+ file = PathUtils.split_subpath(load_path, filename)
44
+ {
45
+ "version" => 3,
46
+ "file" => file,
47
+ "mappings" => map["mappings"],
48
+ "sources" => map["sources"].map do |source|
49
+ source = URIUtils.split_file_uri(source)[2] if source.start_with? "file://"
50
+ source = PathUtils.join(File.dirname(filename), source) unless PathUtils.absolute_path?(source)
51
+ _, source = PathUtils.paths_split(load_paths, source)
52
+ source = PathUtils.relative_path_from(file, source)
53
+ PathUtils.set_pipeline(source, mime_exts, pipeline_exts, :source)
54
+ end,
55
+ "names" => map["names"]
56
+ }
57
+ end
58
+
59
+ # Public: Concatenate two source maps.
60
+ #
61
+ # For an example, if two js scripts are concatenated, the individual source
62
+ # maps for those files can be concatenated to map back to the originals.
63
+ #
64
+ # Examples
65
+ #
66
+ # script3 = "#{script1}#{script2}"
67
+ # map3 = concat_source_maps(map1, map2)
68
+ #
69
+ # a - Source map hash
70
+ # b - Source map hash
71
+ #
72
+ # Returns a new source map hash.
73
+ def concat_source_maps(a, b)
74
+ return a || b unless a && b
75
+ a = make_index_map(a)
76
+ b = make_index_map(b)
77
+
78
+ offset = 0
79
+ if a["sections"].count != 0 && !a["sections"].last["map"]["mappings"].empty?
80
+ last_line_count = a["sections"].last["map"].delete("x_sprockets_linecount")
81
+ offset += last_line_count
82
+
83
+ last_offset = a["sections"].last["offset"]["line"]
84
+ offset += last_offset
85
+ end
86
+
87
+ a["sections"] += b["sections"].map do |section|
88
+ {
89
+ "offset" => section["offset"].merge({ "line" => section["offset"]["line"] + offset }),
90
+ "map" => section["map"].merge({
91
+ "sources" => section["map"]["sources"].map do |source|
92
+ PathUtils.relative_path_from(a["file"], PathUtils.join(File.dirname(b["file"]), source))
93
+ end
94
+ })
95
+ }
96
+ end
97
+ a
98
+ end
99
+
100
+ # Public: Converts source map to index map
101
+ #
102
+ # Example:
103
+ #
104
+ # map
105
+ # # => {
106
+ # "version" => 3,
107
+ # "file" => "..",
108
+ # "mappings" => "AAAA;AACA;..;AACA",
109
+ # "sources" => [..],
110
+ # "names" => [..]
111
+ # }
112
+ # make_index_map(map)
113
+ # # => {
114
+ # "version" => 3,
115
+ # "file" => "..",
116
+ # "sections" => [
117
+ # {
118
+ # "offset" => { "line" => 0, "column" => 0 },
119
+ # "map" => {
120
+ # "version" => 3,
121
+ # "file" => "..",
122
+ # "mappings" => "AAAA;AACA;..;AACA",
123
+ # "sources" => [..],
124
+ # "names" => [..]
125
+ # }
126
+ # }
127
+ # ]
128
+ # }
129
+ def make_index_map(map)
130
+ return map if map.key? "sections"
131
+ {
132
+ "version" => map["version"],
133
+ "file" => map["file"],
134
+ "sections" => [
135
+ {
136
+ "offset" => { "line" => 0, "column" => 0 },
137
+ "map" => map
138
+ }
139
+ ]
140
+ }
141
+ end
142
+
143
+ # Public: Combine two seperate source map transformations into a single
144
+ # mapping.
145
+ #
146
+ # Source transformations may happen in discrete steps producing separate
147
+ # source maps. These steps can be combined into a single mapping back to
148
+ # the source.
149
+ #
150
+ # For an example, CoffeeScript may transform a file producing a map. Then
151
+ # Uglifier processes the result and produces another map. The CoffeeScript
152
+ # map can be combined with the Uglifier map so the source lines of the
153
+ # minified output can be traced back to the original CoffeeScript file.
154
+ #
155
+ # Returns a source map hash.
156
+ def combine_source_maps(first, second)
157
+ return second unless first
158
+
159
+ _first = decode_source_map(first)
160
+ _second = decode_source_map(second)
161
+
162
+ new_mappings = []
163
+
164
+ _second[:mappings].each do |m|
165
+ first_line = bsearch_mappings(_first[:mappings], m[:original])
166
+ new_mappings << first_line.merge(generated: m[:generated]) if first_line
167
+ end
168
+
169
+ _first[:mappings] = new_mappings
170
+
171
+ encode_source_map(_first)
172
+ end
173
+
174
+ # Public: Decompress source map
175
+ #
176
+ # Example:
177
+ #
178
+ # decode_source_map(map)
179
+ # # => {
180
+ # version: 3,
181
+ # file: "..",
182
+ # mappings: [
183
+ # { source: "..", generated: [0, 0], original: [0, 0], name: ".."}, ..
184
+ # ],
185
+ # sources: [..],
186
+ # names: [..]
187
+ # }
188
+ #
189
+ # map - Source map hash (v3 spec)
190
+ #
191
+ # Returns an uncompressed source map hash
192
+ def decode_source_map(map)
193
+ return nil unless map
194
+
195
+ mappings, sources, names = [], [], []
196
+ if map["sections"]
197
+ map["sections"].each do |s|
198
+ mappings += decode_source_map(s["map"])[:mappings].each do |m|
199
+ m[:generated][0] += s["offset"]["line"]
200
+ m[:generated][1] += s["offset"]["column"]
201
+ end
202
+ sources |= s["map"]["sources"]
203
+ names |= s["map"]["names"]
204
+ end
205
+ else
206
+ mappings = decode_vlq_mappings(map["mappings"], sources: map["sources"], names: map["names"])
207
+ sources = map["sources"]
208
+ names = map["names"]
209
+ end
210
+ {
211
+ version: 3,
212
+ file: map["file"],
213
+ mappings: mappings,
214
+ sources: sources,
215
+ names: names
216
+ }
217
+ end
218
+
219
+ # Public: Compress source map
220
+ #
221
+ # Example:
222
+ #
223
+ # encode_source_map(map)
224
+ # # => {
225
+ # "version" => 3,
226
+ # "file" => "..",
227
+ # "mappings" => "AAAA;AACA;..;AACA",
228
+ # "sources" => [..],
229
+ # "names" => [..]
230
+ # }
231
+ #
232
+ # map - Source map hash (uncompressed)
233
+ #
234
+ # Returns a compressed source map hash according to source map spec v3
235
+ def encode_source_map(map)
236
+ return nil unless map
237
+ {
238
+ "version" => map[:version],
239
+ "file" => map[:file],
240
+ "mappings" => encode_vlq_mappings(map[:mappings], sources: map[:sources], names: map[:names]),
241
+ "sources" => map[:sources],
242
+ "names" => map[:names]
243
+ }
244
+ end
245
+
246
+ # Public: Compare two source map offsets.
247
+ #
248
+ # Compatible with Array#sort.
249
+ #
250
+ # a - Array [line, column]
251
+ # b - Array [line, column]
252
+ #
253
+ # Returns -1 if a < b, 0 if a == b and 1 if a > b.
254
+ def compare_source_offsets(a, b)
255
+ diff = a[0] - b[0]
256
+ diff = a[1] - b[1] if diff == 0
257
+
258
+ if diff < 0
259
+ -1
260
+ elsif diff > 0
261
+ 1
262
+ else
263
+ 0
264
+ end
265
+ end
266
+
267
+ # Public: Search Array of mappings for closest offset.
268
+ #
269
+ # mappings - Array of mapping Hash objects
270
+ # offset - Array [line, column]
271
+ #
272
+ # Returns mapping Hash object.
273
+ def bsearch_mappings(mappings, offset, from = 0, to = mappings.size - 1)
274
+ mid = (from + to) / 2
275
+
276
+ if from > to
277
+ return from < 1 ? nil : mappings[from-1]
278
+ end
279
+
280
+ case compare_source_offsets(offset, mappings[mid][:generated])
281
+ when 0
282
+ mappings[mid]
283
+ when -1
284
+ bsearch_mappings(mappings, offset, from, mid - 1)
285
+ when 1
286
+ bsearch_mappings(mappings, offset, mid + 1, to)
287
+ end
288
+ end
289
+
290
+ # Public: Decode VLQ mappings and match up sources and symbol names.
291
+ #
292
+ # str - VLQ string from 'mappings' attribute
293
+ # sources - Array of Strings from 'sources' attribute
294
+ # names - Array of Strings from 'names' attribute
295
+ #
296
+ # Returns an Array of Mappings.
297
+ def decode_vlq_mappings(str, sources: [], names: [])
298
+ mappings = []
299
+
300
+ source_id = 0
301
+ original_line = 1
302
+ original_column = 0
303
+ name_id = 0
304
+
305
+ vlq_decode_mappings(str).each_with_index do |group, index|
306
+ generated_column = 0
307
+ generated_line = index + 1
308
+
309
+ group.each do |segment|
310
+ generated_column += segment[0]
311
+ generated = [generated_line, generated_column]
312
+
313
+ if segment.size >= 4
314
+ source_id += segment[1]
315
+ original_line += segment[2]
316
+ original_column += segment[3]
317
+
318
+ source = sources[source_id]
319
+ original = [original_line, original_column]
320
+ else
321
+ # TODO: Research this case
322
+ next
323
+ end
324
+
325
+ if segment[4]
326
+ name_id += segment[4]
327
+ name = names[name_id]
328
+ end
329
+
330
+ mapping = {source: source, generated: generated, original: original}
331
+ mapping[:name] = name if name
332
+ mappings << mapping
333
+ end
334
+ end
335
+
336
+ mappings
337
+ end
338
+
339
+ # Public: Encode mappings Hash into a VLQ encoded String.
340
+ #
341
+ # mappings - Array of Hash mapping objects
342
+ # sources - Array of String sources (default: mappings source order)
343
+ # names - Array of String names (default: mappings name order)
344
+ #
345
+ # Returns a VLQ encoded String.
346
+ def encode_vlq_mappings(mappings, sources: nil, names: nil)
347
+ sources ||= mappings.map { |m| m[:source] }.uniq.compact
348
+ names ||= mappings.map { |m| m[:name] }.uniq.compact
349
+
350
+ sources_index = Hash[sources.each_with_index.to_a]
351
+ names_index = Hash[names.each_with_index.to_a]
352
+
353
+ source_id = 0
354
+ source_line = 1
355
+ source_column = 0
356
+ name_id = 0
357
+
358
+ by_lines = mappings.group_by { |m| m[:generated][0] }
359
+
360
+ ary = (1..(by_lines.keys.max || 1)).map do |line|
361
+ generated_column = 0
362
+
363
+ (by_lines[line] || []).map do |mapping|
364
+ group = []
365
+ group << mapping[:generated][1] - generated_column
366
+ group << sources_index[mapping[:source]] - source_id
367
+ group << mapping[:original][0] - source_line
368
+ group << mapping[:original][1] - source_column
369
+ group << names_index[mapping[:name]] - name_id if mapping[:name]
370
+
371
+ generated_column = mapping[:generated][1]
372
+ source_id = sources_index[mapping[:source]]
373
+ source_line = mapping[:original][0]
374
+ source_column = mapping[:original][1]
375
+ name_id = names_index[mapping[:name]] if mapping[:name]
376
+
377
+ group
378
+ end
379
+ end
380
+
381
+ vlq_encode_mappings(ary)
382
+ end
383
+
384
+ # Public: Base64 VLQ encoding
385
+ #
386
+ # Adopted from ConradIrwin/ruby-source_map
387
+ # https://github.com/ConradIrwin/ruby-source_map/blob/master/lib/source_map/vlq.rb
388
+ #
389
+ # Resources
390
+ #
391
+ # http://en.wikipedia.org/wiki/Variable-length_quantity
392
+ # https://docs.google.com/document/d/1U1RGAehQwRypUTovF1KRlpiOFze0b-_2gc6fAH0KY0k/edit
393
+ # https://github.com/mozilla/source-map/blob/master/lib/source-map/base64-vlq.js
394
+ #
395
+ VLQ_BASE_SHIFT = 5
396
+ VLQ_BASE = 1 << VLQ_BASE_SHIFT
397
+ VLQ_BASE_MASK = VLQ_BASE - 1
398
+ VLQ_CONTINUATION_BIT = VLQ_BASE
399
+
400
+ BASE64_DIGITS = 'ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz0123456789+/'.split('')
401
+ BASE64_VALUES = (0...64).inject({}) { |h, i| h[BASE64_DIGITS[i]] = i; h }
402
+
403
+ # Public: Encode a list of numbers into a compact VLQ string.
404
+ #
405
+ # ary - An Array of Integers
406
+ #
407
+ # Returns a VLQ String.
408
+ def vlq_encode(ary)
409
+ result = []
410
+ ary.each do |n|
411
+ vlq = n < 0 ? ((-n) << 1) + 1 : n << 1
412
+ loop do
413
+ digit = vlq & VLQ_BASE_MASK
414
+ vlq >>= VLQ_BASE_SHIFT
415
+ digit |= VLQ_CONTINUATION_BIT if vlq > 0
416
+ result << BASE64_DIGITS[digit]
417
+
418
+ break unless vlq > 0
419
+ end
420
+ end
421
+ result.join
422
+ end
423
+
424
+ # Public: Decode a VLQ string.
425
+ #
426
+ # str - VLQ encoded String
427
+ #
428
+ # Returns an Array of Integers.
429
+ def vlq_decode(str)
430
+ result = []
431
+ shift = 0
432
+ value = 0
433
+ i = 0
434
+
435
+ while i < str.size do
436
+ digit = BASE64_VALUES[str[i]]
437
+ raise ArgumentError unless digit
438
+ continuation = (digit & VLQ_CONTINUATION_BIT) != 0
439
+ digit &= VLQ_CONTINUATION_BIT - 1
440
+ value += digit << shift
441
+ if continuation
442
+ shift += VLQ_BASE_SHIFT
443
+ else
444
+ result << ((value & 1) == 1 ? -(value >> 1) : value >> 1)
445
+ value = shift = 0
446
+ end
447
+ i += 1
448
+ end
449
+ result
450
+ end
451
+
452
+ # Public: Encode a mapping array into a compact VLQ string.
453
+ #
454
+ # ary - Two dimensional Array of Integers.
455
+ #
456
+ # Returns a VLQ encoded String seperated by , and ;.
457
+ def vlq_encode_mappings(ary)
458
+ ary.map { |group|
459
+ group.map { |segment|
460
+ vlq_encode(segment)
461
+ }.join(',')
462
+ }.join(';')
463
+ end
464
+
465
+ # Public: Decode a VLQ string into mapping numbers.
466
+ #
467
+ # str - VLQ encoded String
468
+ #
469
+ # Returns an two dimensional Array of Integers.
470
+ def vlq_decode_mappings(str)
471
+ mappings = []
472
+
473
+ str.split(';').each_with_index do |group, index|
474
+ mappings[index] = []
475
+ group.split(',').each do |segment|
476
+ mappings[index] << vlq_decode(segment)
477
+ end
478
+ end
479
+
480
+ mappings
481
+ end
482
+ end
483
+ end
@@ -0,0 +1,173 @@
1
+ # frozen_string_literal: true
2
+ require 'sprockets/http_utils'
3
+ require 'sprockets/processor_utils'
4
+ require 'sprockets/utils'
5
+
6
+ module Sprockets
7
+ module Transformers
8
+ include HTTPUtils, ProcessorUtils, Utils
9
+
10
+ # Public: Two level mapping of a source mime type to a target mime type.
11
+ #
12
+ # environment.transformers
13
+ # # => { 'text/coffeescript' => {
14
+ # 'application/javascript' => ConvertCoffeeScriptToJavaScript
15
+ # }
16
+ # }
17
+ #
18
+ def transformers
19
+ config[:transformers]
20
+ end
21
+
22
+ Transformer = Struct.new :from, :to, :proc
23
+
24
+ # Public: Register a transformer from and to a mime type.
25
+ #
26
+ # from - String mime type
27
+ # to - String mime type
28
+ # proc - Callable block that accepts an input Hash.
29
+ #
30
+ # Examples
31
+ #
32
+ # register_transformer 'text/coffeescript', 'application/javascript',
33
+ # ConvertCoffeeScriptToJavaScript
34
+ #
35
+ # register_transformer 'image/svg+xml', 'image/png', ConvertSvgToPng
36
+ #
37
+ # Returns nothing.
38
+ def register_transformer(from, to, proc)
39
+ self.config = hash_reassoc(config, :registered_transformers) do |transformers|
40
+ transformers << Transformer.new(from, to, proc)
41
+ end
42
+ compute_transformers!(self.config[:registered_transformers])
43
+ end
44
+
45
+ # Internal: Register transformer for existing type adding a suffix.
46
+ #
47
+ # types - Array of existing mime type Strings
48
+ # type_format - String suffix formatting string
49
+ # extname - String extension to append
50
+ # processor - Callable block that accepts an input Hash.
51
+ #
52
+ # Returns nothing.
53
+ def register_transformer_suffix(types, type_format, extname, processor)
54
+ Array(types).each do |type|
55
+ extensions, charset = mime_types[type].values_at(:extensions, :charset)
56
+ parts = type.split('/')
57
+ suffix_type = type_format.sub('\1', parts[0]).sub('\2', parts[1])
58
+ extensions = extensions.map { |ext| "#{ext}#{extname}" }
59
+
60
+ register_mime_type(suffix_type, extensions: extensions, charset: charset)
61
+ register_transformer(suffix_type, type, processor)
62
+ end
63
+ end
64
+
65
+ # Internal: Resolve target mime type that the source type should be
66
+ # transformed to.
67
+ #
68
+ # type - String from mime type
69
+ # accept - String accept type list (default: '*/*')
70
+ #
71
+ # Examples
72
+ #
73
+ # resolve_transform_type('text/plain', 'text/plain')
74
+ # # => 'text/plain'
75
+ #
76
+ # resolve_transform_type('image/svg+xml', 'image/png, image/*')
77
+ # # => 'image/png'
78
+ #
79
+ # resolve_transform_type('text/css', 'image/png')
80
+ # # => nil
81
+ #
82
+ # Returns String mime type or nil is no type satisfied the accept value.
83
+ def resolve_transform_type(type, accept)
84
+ find_best_mime_type_match(accept || '*/*', [type].compact + config[:transformers][type].keys)
85
+ end
86
+
87
+ # Internal: Expand accept type list to include possible transformed types.
88
+ #
89
+ # parsed_accepts - Array of accept q values
90
+ #
91
+ # Examples
92
+ #
93
+ # expand_transform_accepts([['application/javascript', 1.0]])
94
+ # # => [['application/javascript', 1.0], ['text/coffeescript', 0.8]]
95
+ #
96
+ # Returns an expanded Array of q values.
97
+ def expand_transform_accepts(parsed_accepts)
98
+ accepts = []
99
+ parsed_accepts.each do |(type, q)|
100
+ accepts.push([type, q])
101
+ config[:inverted_transformers][type].each do |subtype|
102
+ accepts.push([subtype, q * 0.8])
103
+ end
104
+ end
105
+ accepts
106
+ end
107
+
108
+ # Internal: Compose multiple transformer steps into a single processor
109
+ # function.
110
+ #
111
+ # transformers - Two level Hash of a source mime type to a target mime type
112
+ # types - Array of mime type steps
113
+ #
114
+ # Returns Processor.
115
+ def compose_transformers(transformers, types, preprocessors, postprocessors)
116
+ if types.length < 2
117
+ raise ArgumentError, "too few transform types: #{types.inspect}"
118
+ end
119
+
120
+ processors = types.each_cons(2).map { |src, dst|
121
+ unless processor = transformers[src][dst]
122
+ raise ArgumentError, "missing transformer for type: #{src} to #{dst}"
123
+ end
124
+ processor
125
+ }
126
+
127
+ compose_transformer_list processors, preprocessors, postprocessors
128
+ end
129
+
130
+ private
131
+ def compose_transformer_list(transformers, preprocessors, postprocessors)
132
+ processors = []
133
+
134
+ transformers.each do |processor|
135
+ processors.concat postprocessors[processor.from]
136
+ processors << processor.proc
137
+ processors.concat preprocessors[processor.to]
138
+ end
139
+
140
+ if processors.size > 1
141
+ compose_processors(*processors.reverse)
142
+ elsif processors.size == 1
143
+ processors.first
144
+ end
145
+ end
146
+
147
+ def compute_transformers!(registered_transformers)
148
+ preprocessors = self.config[:preprocessors]
149
+ postprocessors = self.config[:postprocessors]
150
+ transformers = Hash.new { {} }
151
+ inverted_transformers = Hash.new { Set.new }
152
+ incoming_edges = registered_transformers.group_by(&:from)
153
+
154
+ registered_transformers.each do |t|
155
+ traversals = dfs_paths([t]) { |k| incoming_edges.fetch(k.to, []) }
156
+
157
+ traversals.each do |nodes|
158
+ src, dst = nodes.first.from, nodes.last.to
159
+ processor = compose_transformer_list nodes, preprocessors, postprocessors
160
+
161
+ transformers[src] = {} unless transformers.key?(src)
162
+ transformers[src][dst] = processor
163
+
164
+ inverted_transformers[dst] = Set.new unless inverted_transformers.key?(dst)
165
+ inverted_transformers[dst] << src
166
+ end
167
+ end
168
+
169
+ self.config = hash_reassoc(config, :transformers) { transformers }
170
+ self.config = hash_reassoc(config, :inverted_transformers) { inverted_transformers }
171
+ end
172
+ end
173
+ end