sprockets 3.0.0 → 4.0.2

Sign up to get free protection for your applications and to get access to all the features.
Files changed (95) hide show
  1. checksums.yaml +5 -5
  2. data/CHANGELOG.md +76 -0
  3. data/README.md +426 -404
  4. data/bin/sprockets +12 -7
  5. data/lib/rake/sprocketstask.rb +3 -2
  6. data/lib/sprockets/add_source_map_comment_to_asset_processor.rb +60 -0
  7. data/lib/sprockets/asset.rb +33 -24
  8. data/lib/sprockets/autoload/babel.rb +8 -0
  9. data/lib/sprockets/autoload/closure.rb +1 -0
  10. data/lib/sprockets/autoload/coffee_script.rb +1 -0
  11. data/lib/sprockets/autoload/eco.rb +1 -0
  12. data/lib/sprockets/autoload/ejs.rb +1 -0
  13. data/lib/sprockets/autoload/jsminc.rb +8 -0
  14. data/lib/sprockets/autoload/sass.rb +1 -0
  15. data/lib/sprockets/autoload/sassc.rb +8 -0
  16. data/lib/sprockets/autoload/uglifier.rb +1 -0
  17. data/lib/sprockets/autoload/yui.rb +1 -0
  18. data/lib/sprockets/autoload/zopfli.rb +7 -0
  19. data/lib/sprockets/autoload.rb +5 -0
  20. data/lib/sprockets/babel_processor.rb +66 -0
  21. data/lib/sprockets/base.rb +61 -13
  22. data/lib/sprockets/bower.rb +5 -2
  23. data/lib/sprockets/bundle.rb +44 -4
  24. data/lib/sprockets/cache/file_store.rb +32 -7
  25. data/lib/sprockets/cache/memory_store.rb +9 -0
  26. data/lib/sprockets/cache/null_store.rb +8 -0
  27. data/lib/sprockets/cache.rb +42 -5
  28. data/lib/sprockets/cached_environment.rb +14 -19
  29. data/lib/sprockets/closure_compressor.rb +6 -11
  30. data/lib/sprockets/coffee_script_processor.rb +19 -5
  31. data/lib/sprockets/compressing.rb +62 -2
  32. data/lib/sprockets/configuration.rb +3 -7
  33. data/lib/sprockets/context.rb +98 -23
  34. data/lib/sprockets/dependencies.rb +9 -8
  35. data/lib/sprockets/digest_utils.rb +104 -60
  36. data/lib/sprockets/directive_processor.rb +45 -35
  37. data/lib/sprockets/eco_processor.rb +3 -2
  38. data/lib/sprockets/ejs_processor.rb +3 -2
  39. data/lib/sprockets/encoding_utils.rb +8 -4
  40. data/lib/sprockets/environment.rb +9 -4
  41. data/lib/sprockets/erb_processor.rb +28 -21
  42. data/lib/sprockets/errors.rb +1 -1
  43. data/lib/sprockets/exporters/base.rb +71 -0
  44. data/lib/sprockets/exporters/file_exporter.rb +24 -0
  45. data/lib/sprockets/exporters/zlib_exporter.rb +33 -0
  46. data/lib/sprockets/exporters/zopfli_exporter.rb +14 -0
  47. data/lib/sprockets/exporting.rb +73 -0
  48. data/lib/sprockets/file_reader.rb +1 -0
  49. data/lib/sprockets/http_utils.rb +26 -6
  50. data/lib/sprockets/jsminc_compressor.rb +32 -0
  51. data/lib/sprockets/jst_processor.rb +11 -10
  52. data/lib/sprockets/loader.rb +239 -70
  53. data/lib/sprockets/manifest.rb +97 -44
  54. data/lib/sprockets/manifest_utils.rb +9 -6
  55. data/lib/sprockets/mime.rb +8 -42
  56. data/lib/sprockets/npm.rb +52 -0
  57. data/lib/sprockets/path_dependency_utils.rb +3 -11
  58. data/lib/sprockets/path_digest_utils.rb +2 -1
  59. data/lib/sprockets/path_utils.rb +106 -21
  60. data/lib/sprockets/paths.rb +1 -0
  61. data/lib/sprockets/preprocessors/default_source_map.rb +49 -0
  62. data/lib/sprockets/processing.rb +31 -51
  63. data/lib/sprockets/processor_utils.rb +81 -15
  64. data/lib/sprockets/resolve.rb +182 -95
  65. data/lib/sprockets/sass_cache_store.rb +1 -0
  66. data/lib/sprockets/sass_compressor.rb +21 -17
  67. data/lib/sprockets/sass_functions.rb +1 -0
  68. data/lib/sprockets/sass_importer.rb +1 -0
  69. data/lib/sprockets/sass_processor.rb +45 -17
  70. data/lib/sprockets/sassc_compressor.rb +56 -0
  71. data/lib/sprockets/sassc_processor.rb +297 -0
  72. data/lib/sprockets/server.rb +57 -34
  73. data/lib/sprockets/source_map_processor.rb +66 -0
  74. data/lib/sprockets/source_map_utils.rb +483 -0
  75. data/lib/sprockets/transformers.rb +63 -35
  76. data/lib/sprockets/uglifier_compressor.rb +23 -20
  77. data/lib/sprockets/unloaded_asset.rb +139 -0
  78. data/lib/sprockets/uri_tar.rb +99 -0
  79. data/lib/sprockets/uri_utils.rb +15 -14
  80. data/lib/sprockets/utils/gzip.rb +99 -0
  81. data/lib/sprockets/utils.rb +43 -59
  82. data/lib/sprockets/version.rb +2 -1
  83. data/lib/sprockets/yui_compressor.rb +5 -14
  84. data/lib/sprockets.rb +103 -33
  85. metadata +151 -22
  86. data/LICENSE +0 -21
  87. data/lib/sprockets/coffee_script_template.rb +0 -6
  88. data/lib/sprockets/eco_template.rb +0 -6
  89. data/lib/sprockets/ejs_template.rb +0 -6
  90. data/lib/sprockets/engines.rb +0 -81
  91. data/lib/sprockets/erb_template.rb +0 -6
  92. data/lib/sprockets/legacy.rb +0 -314
  93. data/lib/sprockets/legacy_proc_processor.rb +0 -35
  94. data/lib/sprockets/legacy_tilt_processor.rb +0 -29
  95. data/lib/sprockets/sass_template.rb +0 -7
@@ -0,0 +1,483 @@
1
+ # frozen_string_literal: true
2
+ require 'json'
3
+ require 'sprockets/path_utils'
4
+
5
+ module Sprockets
6
+ module SourceMapUtils
7
+ extend self
8
+
9
+ # Public: Transpose source maps into a standard format
10
+ #
11
+ # NOTE: Does not support index maps
12
+ #
13
+ # version => 3
14
+ # file => logical path
15
+ # sources => relative from filename
16
+ #
17
+ # Unnecessary attributes are removed
18
+ #
19
+ # Example
20
+ #
21
+ # map
22
+ # #=> {
23
+ # # "version" => 3,
24
+ # # "file" => "stdin",
25
+ # # "sourceRoot" => "",
26
+ # # "sourceContents" => "blah blah blah",
27
+ # # "sources" => [/root/logical/path.js],
28
+ # # "names" => [..],
29
+ # #}
30
+ # format_source_map(map, input)
31
+ # #=> {
32
+ # # "version" => 3,
33
+ # # "file" => "logical/path.js",
34
+ # # "sources" => ["path.js"],
35
+ # # "names" => [..],
36
+ # #}
37
+ def format_source_map(map, input)
38
+ filename = input[:filename]
39
+ load_path = input[:load_path]
40
+ load_paths = input[:environment].config[:paths]
41
+ mime_exts = input[:environment].config[:mime_exts]
42
+ pipeline_exts = input[:environment].config[:pipeline_exts]
43
+ file = PathUtils.split_subpath(load_path, filename)
44
+ {
45
+ "version" => 3,
46
+ "file" => file,
47
+ "mappings" => map["mappings"],
48
+ "sources" => map["sources"].map do |source|
49
+ source = URIUtils.split_file_uri(source)[2] if source.start_with? "file://"
50
+ source = PathUtils.join(File.dirname(filename), source) unless PathUtils.absolute_path?(source)
51
+ _, source = PathUtils.paths_split(load_paths, source)
52
+ source = PathUtils.relative_path_from(file, source)
53
+ PathUtils.set_pipeline(source, mime_exts, pipeline_exts, :source)
54
+ end,
55
+ "names" => map["names"]
56
+ }
57
+ end
58
+
59
+ # Public: Concatenate two source maps.
60
+ #
61
+ # For an example, if two js scripts are concatenated, the individual source
62
+ # maps for those files can be concatenated to map back to the originals.
63
+ #
64
+ # Examples
65
+ #
66
+ # script3 = "#{script1}#{script2}"
67
+ # map3 = concat_source_maps(map1, map2)
68
+ #
69
+ # a - Source map hash
70
+ # b - Source map hash
71
+ #
72
+ # Returns a new source map hash.
73
+ def concat_source_maps(a, b)
74
+ return a || b unless a && b
75
+ a = make_index_map(a)
76
+ b = make_index_map(b)
77
+
78
+ offset = 0
79
+ if a["sections"].count != 0 && !a["sections"].last["map"]["mappings"].empty?
80
+ last_line_count = a["sections"].last["map"].delete("x_sprockets_linecount")
81
+ offset += last_line_count || 1
82
+
83
+ last_offset = a["sections"].last["offset"]["line"]
84
+ offset += last_offset
85
+ end
86
+
87
+ a["sections"] += b["sections"].map do |section|
88
+ {
89
+ "offset" => section["offset"].merge({ "line" => section["offset"]["line"] + offset }),
90
+ "map" => section["map"].merge({
91
+ "sources" => section["map"]["sources"].map do |source|
92
+ PathUtils.relative_path_from(a["file"], PathUtils.join(File.dirname(b["file"]), source))
93
+ end
94
+ })
95
+ }
96
+ end
97
+ a
98
+ end
99
+
100
+ # Public: Converts source map to index map
101
+ #
102
+ # Example:
103
+ #
104
+ # map
105
+ # # => {
106
+ # "version" => 3,
107
+ # "file" => "..",
108
+ # "mappings" => "AAAA;AACA;..;AACA",
109
+ # "sources" => [..],
110
+ # "names" => [..]
111
+ # }
112
+ # make_index_map(map)
113
+ # # => {
114
+ # "version" => 3,
115
+ # "file" => "..",
116
+ # "sections" => [
117
+ # {
118
+ # "offset" => { "line" => 0, "column" => 0 },
119
+ # "map" => {
120
+ # "version" => 3,
121
+ # "file" => "..",
122
+ # "mappings" => "AAAA;AACA;..;AACA",
123
+ # "sources" => [..],
124
+ # "names" => [..]
125
+ # }
126
+ # }
127
+ # ]
128
+ # }
129
+ def make_index_map(map)
130
+ return map if map.key? "sections"
131
+ {
132
+ "version" => map["version"],
133
+ "file" => map["file"],
134
+ "sections" => [
135
+ {
136
+ "offset" => { "line" => 0, "column" => 0 },
137
+ "map" => map
138
+ }
139
+ ]
140
+ }
141
+ end
142
+
143
+ # Public: Combine two seperate source map transformations into a single
144
+ # mapping.
145
+ #
146
+ # Source transformations may happen in discrete steps producing separate
147
+ # source maps. These steps can be combined into a single mapping back to
148
+ # the source.
149
+ #
150
+ # For an example, CoffeeScript may transform a file producing a map. Then
151
+ # Uglifier processes the result and produces another map. The CoffeeScript
152
+ # map can be combined with the Uglifier map so the source lines of the
153
+ # minified output can be traced back to the original CoffeeScript file.
154
+ #
155
+ # Returns a source map hash.
156
+ def combine_source_maps(first, second)
157
+ return second unless first
158
+
159
+ _first = decode_source_map(first)
160
+ _second = decode_source_map(second)
161
+
162
+ new_mappings = []
163
+
164
+ _second[:mappings].each do |m|
165
+ first_line = bsearch_mappings(_first[:mappings], m[:original])
166
+ new_mappings << first_line.merge(generated: m[:generated]) if first_line
167
+ end
168
+
169
+ _first[:mappings] = new_mappings
170
+
171
+ encode_source_map(_first)
172
+ end
173
+
174
+ # Public: Decompress source map
175
+ #
176
+ # Example:
177
+ #
178
+ # decode_source_map(map)
179
+ # # => {
180
+ # version: 3,
181
+ # file: "..",
182
+ # mappings: [
183
+ # { source: "..", generated: [0, 0], original: [0, 0], name: ".."}, ..
184
+ # ],
185
+ # sources: [..],
186
+ # names: [..]
187
+ # }
188
+ #
189
+ # map - Source map hash (v3 spec)
190
+ #
191
+ # Returns an uncompressed source map hash
192
+ def decode_source_map(map)
193
+ return nil unless map
194
+
195
+ mappings, sources, names = [], [], []
196
+ if map["sections"]
197
+ map["sections"].each do |s|
198
+ mappings += decode_source_map(s["map"])[:mappings].each do |m|
199
+ m[:generated][0] += s["offset"]["line"]
200
+ m[:generated][1] += s["offset"]["column"]
201
+ end
202
+ sources |= s["map"]["sources"]
203
+ names |= s["map"]["names"]
204
+ end
205
+ else
206
+ mappings = decode_vlq_mappings(map["mappings"], sources: map["sources"], names: map["names"])
207
+ sources = map["sources"]
208
+ names = map["names"]
209
+ end
210
+ {
211
+ version: 3,
212
+ file: map["file"],
213
+ mappings: mappings,
214
+ sources: sources,
215
+ names: names
216
+ }
217
+ end
218
+
219
+ # Public: Compress source map
220
+ #
221
+ # Example:
222
+ #
223
+ # encode_source_map(map)
224
+ # # => {
225
+ # "version" => 3,
226
+ # "file" => "..",
227
+ # "mappings" => "AAAA;AACA;..;AACA",
228
+ # "sources" => [..],
229
+ # "names" => [..]
230
+ # }
231
+ #
232
+ # map - Source map hash (uncompressed)
233
+ #
234
+ # Returns a compressed source map hash according to source map spec v3
235
+ def encode_source_map(map)
236
+ return nil unless map
237
+ {
238
+ "version" => map[:version],
239
+ "file" => map[:file],
240
+ "mappings" => encode_vlq_mappings(map[:mappings], sources: map[:sources], names: map[:names]),
241
+ "sources" => map[:sources],
242
+ "names" => map[:names]
243
+ }
244
+ end
245
+
246
+ # Public: Compare two source map offsets.
247
+ #
248
+ # Compatible with Array#sort.
249
+ #
250
+ # a - Array [line, column]
251
+ # b - Array [line, column]
252
+ #
253
+ # Returns -1 if a < b, 0 if a == b and 1 if a > b.
254
+ def compare_source_offsets(a, b)
255
+ diff = a[0] - b[0]
256
+ diff = a[1] - b[1] if diff == 0
257
+
258
+ if diff < 0
259
+ -1
260
+ elsif diff > 0
261
+ 1
262
+ else
263
+ 0
264
+ end
265
+ end
266
+
267
+ # Public: Search Array of mappings for closest offset.
268
+ #
269
+ # mappings - Array of mapping Hash objects
270
+ # offset - Array [line, column]
271
+ #
272
+ # Returns mapping Hash object.
273
+ def bsearch_mappings(mappings, offset, from = 0, to = mappings.size - 1)
274
+ mid = (from + to) / 2
275
+
276
+ if from > to
277
+ return from < 1 ? nil : mappings[from-1]
278
+ end
279
+
280
+ case compare_source_offsets(offset, mappings[mid][:generated])
281
+ when 0
282
+ mappings[mid]
283
+ when -1
284
+ bsearch_mappings(mappings, offset, from, mid - 1)
285
+ when 1
286
+ bsearch_mappings(mappings, offset, mid + 1, to)
287
+ end
288
+ end
289
+
290
+ # Public: Decode VLQ mappings and match up sources and symbol names.
291
+ #
292
+ # str - VLQ string from 'mappings' attribute
293
+ # sources - Array of Strings from 'sources' attribute
294
+ # names - Array of Strings from 'names' attribute
295
+ #
296
+ # Returns an Array of Mappings.
297
+ def decode_vlq_mappings(str, sources: [], names: [])
298
+ mappings = []
299
+
300
+ source_id = 0
301
+ original_line = 1
302
+ original_column = 0
303
+ name_id = 0
304
+
305
+ vlq_decode_mappings(str).each_with_index do |group, index|
306
+ generated_column = 0
307
+ generated_line = index + 1
308
+
309
+ group.each do |segment|
310
+ generated_column += segment[0]
311
+ generated = [generated_line, generated_column]
312
+
313
+ if segment.size >= 4
314
+ source_id += segment[1]
315
+ original_line += segment[2]
316
+ original_column += segment[3]
317
+
318
+ source = sources[source_id]
319
+ original = [original_line, original_column]
320
+ else
321
+ # TODO: Research this case
322
+ next
323
+ end
324
+
325
+ if segment[4]
326
+ name_id += segment[4]
327
+ name = names[name_id]
328
+ end
329
+
330
+ mapping = {source: source, generated: generated, original: original}
331
+ mapping[:name] = name if name
332
+ mappings << mapping
333
+ end
334
+ end
335
+
336
+ mappings
337
+ end
338
+
339
+ # Public: Encode mappings Hash into a VLQ encoded String.
340
+ #
341
+ # mappings - Array of Hash mapping objects
342
+ # sources - Array of String sources (default: mappings source order)
343
+ # names - Array of String names (default: mappings name order)
344
+ #
345
+ # Returns a VLQ encoded String.
346
+ def encode_vlq_mappings(mappings, sources: nil, names: nil)
347
+ sources ||= mappings.map { |m| m[:source] }.uniq.compact
348
+ names ||= mappings.map { |m| m[:name] }.uniq.compact
349
+
350
+ sources_index = Hash[sources.each_with_index.to_a]
351
+ names_index = Hash[names.each_with_index.to_a]
352
+
353
+ source_id = 0
354
+ source_line = 1
355
+ source_column = 0
356
+ name_id = 0
357
+
358
+ by_lines = mappings.group_by { |m| m[:generated][0] }
359
+
360
+ ary = (1..(by_lines.keys.max || 1)).map do |line|
361
+ generated_column = 0
362
+
363
+ (by_lines[line] || []).map do |mapping|
364
+ group = []
365
+ group << mapping[:generated][1] - generated_column
366
+ group << sources_index[mapping[:source]] - source_id
367
+ group << mapping[:original][0] - source_line
368
+ group << mapping[:original][1] - source_column
369
+ group << names_index[mapping[:name]] - name_id if mapping[:name]
370
+
371
+ generated_column = mapping[:generated][1]
372
+ source_id = sources_index[mapping[:source]]
373
+ source_line = mapping[:original][0]
374
+ source_column = mapping[:original][1]
375
+ name_id = names_index[mapping[:name]] if mapping[:name]
376
+
377
+ group
378
+ end
379
+ end
380
+
381
+ vlq_encode_mappings(ary)
382
+ end
383
+
384
+ # Public: Base64 VLQ encoding
385
+ #
386
+ # Adopted from ConradIrwin/ruby-source_map
387
+ # https://github.com/ConradIrwin/ruby-source_map/blob/master/lib/source_map/vlq.rb
388
+ #
389
+ # Resources
390
+ #
391
+ # http://en.wikipedia.org/wiki/Variable-length_quantity
392
+ # https://docs.google.com/document/d/1U1RGAehQwRypUTovF1KRlpiOFze0b-_2gc6fAH0KY0k/edit
393
+ # https://github.com/mozilla/source-map/blob/master/lib/source-map/base64-vlq.js
394
+ #
395
+ VLQ_BASE_SHIFT = 5
396
+ VLQ_BASE = 1 << VLQ_BASE_SHIFT
397
+ VLQ_BASE_MASK = VLQ_BASE - 1
398
+ VLQ_CONTINUATION_BIT = VLQ_BASE
399
+
400
+ BASE64_DIGITS = 'ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz0123456789+/'.split('')
401
+ BASE64_VALUES = (0...64).inject({}) { |h, i| h[BASE64_DIGITS[i]] = i; h }
402
+
403
+ # Public: Encode a list of numbers into a compact VLQ string.
404
+ #
405
+ # ary - An Array of Integers
406
+ #
407
+ # Returns a VLQ String.
408
+ def vlq_encode(ary)
409
+ result = []
410
+ ary.each do |n|
411
+ vlq = n < 0 ? ((-n) << 1) + 1 : n << 1
412
+ loop do
413
+ digit = vlq & VLQ_BASE_MASK
414
+ vlq >>= VLQ_BASE_SHIFT
415
+ digit |= VLQ_CONTINUATION_BIT if vlq > 0
416
+ result << BASE64_DIGITS[digit]
417
+
418
+ break unless vlq > 0
419
+ end
420
+ end
421
+ result.join
422
+ end
423
+
424
+ # Public: Decode a VLQ string.
425
+ #
426
+ # str - VLQ encoded String
427
+ #
428
+ # Returns an Array of Integers.
429
+ def vlq_decode(str)
430
+ result = []
431
+ shift = 0
432
+ value = 0
433
+ i = 0
434
+
435
+ while i < str.size do
436
+ digit = BASE64_VALUES[str[i]]
437
+ raise ArgumentError unless digit
438
+ continuation = (digit & VLQ_CONTINUATION_BIT) != 0
439
+ digit &= VLQ_BASE_MASK
440
+ value += digit << shift
441
+ if continuation
442
+ shift += VLQ_BASE_SHIFT
443
+ else
444
+ result << ((value & 1) == 1 ? -(value >> 1) : value >> 1)
445
+ value = shift = 0
446
+ end
447
+ i += 1
448
+ end
449
+ result
450
+ end
451
+
452
+ # Public: Encode a mapping array into a compact VLQ string.
453
+ #
454
+ # ary - Two dimensional Array of Integers.
455
+ #
456
+ # Returns a VLQ encoded String seperated by , and ;.
457
+ def vlq_encode_mappings(ary)
458
+ ary.map { |group|
459
+ group.map { |segment|
460
+ vlq_encode(segment)
461
+ }.join(',')
462
+ }.join(';')
463
+ end
464
+
465
+ # Public: Decode a VLQ string into mapping numbers.
466
+ #
467
+ # str - VLQ encoded String
468
+ #
469
+ # Returns an two dimensional Array of Integers.
470
+ def vlq_decode_mappings(str)
471
+ mappings = []
472
+
473
+ str.split(';').each_with_index do |group, index|
474
+ mappings[index] = []
475
+ group.split(',').each do |segment|
476
+ mappings[index] << vlq_decode(segment)
477
+ end
478
+ end
479
+
480
+ mappings
481
+ end
482
+ end
483
+ end
@@ -1,3 +1,4 @@
1
+ # frozen_string_literal: true
1
2
  require 'sprockets/http_utils'
2
3
  require 'sprockets/processor_utils'
3
4
  require 'sprockets/utils'
@@ -18,6 +19,8 @@ module Sprockets
18
19
  config[:transformers]
19
20
  end
20
21
 
22
+ Transformer = Struct.new :from, :to, :proc
23
+
21
24
  # Public: Register a transformer from and to a mime type.
22
25
  #
23
26
  # from - String mime type
@@ -33,10 +36,30 @@ module Sprockets
33
36
  #
34
37
  # Returns nothing.
35
38
  def register_transformer(from, to, proc)
36
- self.config = hash_reassoc(config, :registered_transformers, from) do |transformers|
37
- transformers.merge(to => proc)
39
+ self.config = hash_reassoc(config, :registered_transformers) do |transformers|
40
+ transformers << Transformer.new(from, to, proc)
41
+ end
42
+ compute_transformers!(self.config[:registered_transformers])
43
+ end
44
+
45
+ # Internal: Register transformer for existing type adding a suffix.
46
+ #
47
+ # types - Array of existing mime type Strings
48
+ # type_format - String suffix formatting string
49
+ # extname - String extension to append
50
+ # processor - Callable block that accepts an input Hash.
51
+ #
52
+ # Returns nothing.
53
+ def register_transformer_suffix(types, type_format, extname, processor)
54
+ Array(types).each do |type|
55
+ extensions, charset = mime_types[type].values_at(:extensions, :charset)
56
+ parts = type.split('/')
57
+ suffix_type = type_format.sub('\1', parts[0]).sub('\2', parts[1])
58
+ extensions = extensions.map { |ext| "#{ext}#{extname}" }
59
+
60
+ register_mime_type(suffix_type, extensions: extensions, charset: charset)
61
+ register_transformer(suffix_type, type, processor)
38
62
  end
39
- compute_transformers!
40
63
  end
41
64
 
42
65
  # Internal: Resolve target mime type that the source type should be
@@ -89,53 +112,58 @@ module Sprockets
89
112
  # types - Array of mime type steps
90
113
  #
91
114
  # Returns Processor.
92
- def compose_transformers(transformers, types)
115
+ def compose_transformers(transformers, types, preprocessors, postprocessors)
93
116
  if types.length < 2
94
117
  raise ArgumentError, "too few transform types: #{types.inspect}"
95
118
  end
96
119
 
97
- i = 0
98
- processors = []
99
-
100
- loop do
101
- src = types[i]
102
- dst = types[i+1]
103
- break unless src && dst
104
-
120
+ processors = types.each_cons(2).map { |src, dst|
105
121
  unless processor = transformers[src][dst]
106
122
  raise ArgumentError, "missing transformer for type: #{src} to #{dst}"
107
123
  end
108
- processors.concat config[:postprocessors][src]
109
- processors << processor
110
- processors.concat config[:preprocessors][dst]
124
+ processor
125
+ }
111
126
 
112
- i += 1
113
- end
114
-
115
- if processors.size > 1
116
- compose_processors(*processors.reverse)
117
- elsif processors.size == 1
118
- processors.first
119
- end
127
+ compose_transformer_list processors, preprocessors, postprocessors
120
128
  end
121
129
 
122
130
  private
123
- def compute_transformers!
124
- registered_transformers = self.config[:registered_transformers]
125
- transformers = Hash.new { {} }
131
+ def compose_transformer_list(transformers, preprocessors, postprocessors)
132
+ processors = []
133
+
134
+ transformers.each do |processor|
135
+ processors.concat postprocessors[processor.from]
136
+ processors << processor.proc
137
+ processors.concat preprocessors[processor.to]
138
+ end
139
+
140
+ if processors.size > 1
141
+ compose_processors(*processors.reverse)
142
+ elsif processors.size == 1
143
+ processors.first
144
+ end
145
+ end
146
+
147
+ def compute_transformers!(registered_transformers)
148
+ preprocessors = self.config[:preprocessors]
149
+ postprocessors = self.config[:postprocessors]
150
+ transformers = Hash.new { {} }
126
151
  inverted_transformers = Hash.new { Set.new }
152
+ incoming_edges = registered_transformers.group_by(&:from)
153
+
154
+ registered_transformers.each do |t|
155
+ traversals = dfs_paths([t]) { |k| incoming_edges.fetch(k.to, []) }
127
156
 
128
- registered_transformers.keys.flat_map do |key|
129
- dfs_paths([key]) { |k| registered_transformers[k].keys }
130
- end.each do |types|
131
- src, dst = types.first, types.last
132
- processor = compose_transformers(registered_transformers, types)
157
+ traversals.each do |nodes|
158
+ src, dst = nodes.first.from, nodes.last.to
159
+ processor = compose_transformer_list nodes, preprocessors, postprocessors
133
160
 
134
- transformers[src] = {} unless transformers.key?(src)
135
- transformers[src][dst] = processor
161
+ transformers[src] = {} unless transformers.key?(src)
162
+ transformers[src][dst] = processor
136
163
 
137
- inverted_transformers[dst] = Set.new unless inverted_transformers.key?(dst)
138
- inverted_transformers[dst] << src
164
+ inverted_transformers[dst] = Set.new unless inverted_transformers.key?(dst)
165
+ inverted_transformers[dst] << src
166
+ end
139
167
  end
140
168
 
141
169
  self.config = hash_reassoc(config, :transformers) { transformers }
@@ -1,4 +1,7 @@
1
+ # frozen_string_literal: true
1
2
  require 'sprockets/autoload'
3
+ require 'sprockets/digest_utils'
4
+ require 'sprockets/source_map_utils'
2
5
 
3
6
  module Sprockets
4
7
  # Public: Uglifier/Uglify compressor.
@@ -14,7 +17,7 @@ module Sprockets
14
17
  # Sprockets::UglifierCompressor.new(comments: :copyright)
15
18
  #
16
19
  class UglifierCompressor
17
- VERSION = '1'
20
+ VERSION = '3'
18
21
 
19
22
  # Public: Return singleton instance with default options.
20
23
  #
@@ -34,30 +37,30 @@ module Sprockets
34
37
  attr_reader :cache_key
35
38
 
36
39
  def initialize(options = {})
37
- # Feature detect Uglifier 2.0 option support
38
- if Autoload::Uglifier::DEFAULTS[:copyright]
39
- # Uglifier < 2.x
40
- options[:copyright] ||= false
41
- else
42
- # Uglifier >= 2.x
43
- options[:copyright] ||= :none
44
- end
40
+ options[:comments] ||= :none
45
41
 
46
- @uglifier = Autoload::Uglifier.new(options)
47
-
48
- @cache_key = [
49
- self.class.name,
50
- Autoload::Uglifier::VERSION,
51
- VERSION,
52
- options
53
- ].freeze
42
+ @options = options
43
+ @cache_key = "#{self.class.name}:#{Autoload::Uglifier::VERSION}:#{VERSION}:#{DigestUtils.digest(options)}".freeze
54
44
  end
55
45
 
56
46
  def call(input)
57
- data = input[:data]
58
- input[:cache].fetch(@cache_key + [data]) do
59
- @uglifier.compile(data)
47
+ case Autoload::Uglifier::VERSION.to_i
48
+ when 1
49
+ raise "uglifier 1.x is no longer supported, please upgrade to 2.x or newer"
50
+ when 2
51
+ input_options = { source_filename: input[:filename] }
52
+ else
53
+ input_options = { source_map: { filename: input[:filename] } }
60
54
  end
55
+
56
+ uglifier = Autoload::Uglifier.new(@options.merge(input_options))
57
+
58
+ js, map = uglifier.compile_with_map(input[:data])
59
+
60
+ map = SourceMapUtils.format_source_map(JSON.parse(map), input)
61
+ map = SourceMapUtils.combine_source_maps(input[:metadata][:map], map)
62
+
63
+ { data: js, map: map }
61
64
  end
62
65
  end
63
66
  end