condenser 1.3 → 1.5

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (63) hide show
  1. checksums.yaml +4 -4
  2. data/lib/condenser/asset.rb +103 -25
  3. data/lib/condenser/build_cache.rb +23 -8
  4. data/lib/condenser/cache/file_store.rb +1 -0
  5. data/lib/condenser/cache/memory_store.rb +1 -0
  6. data/lib/condenser/cache/null_store.rb +1 -0
  7. data/lib/condenser/cache_store.rb +1 -0
  8. data/lib/condenser/context.rb +1 -0
  9. data/lib/condenser/encoding_utils.rb +2 -0
  10. data/lib/condenser/environment.rb +2 -0
  11. data/lib/condenser/errors.rb +2 -0
  12. data/lib/condenser/export.rb +11 -6
  13. data/lib/condenser/helpers/parse_helpers.rb +23 -1
  14. data/lib/condenser/manifest.rb +3 -1
  15. data/lib/condenser/minifiers/sass_minifier.rb +2 -0
  16. data/lib/condenser/minifiers/terser_minifier.rb +2 -0
  17. data/lib/condenser/minifiers/uglify_minifier.rb +2 -0
  18. data/lib/condenser/pipeline.rb +2 -0
  19. data/lib/condenser/processors/babel_processor.rb +19 -16
  20. data/lib/condenser/processors/css_media_combiner_processor.rb +7 -5
  21. data/lib/condenser/processors/js_analyzer.rb +149 -42
  22. data/lib/condenser/processors/node_processor.rb +3 -0
  23. data/lib/condenser/processors/purgecss_processor.rb +2 -0
  24. data/lib/condenser/processors/rollup_processor.rb +289 -136
  25. data/lib/condenser/resolve.rb +41 -10
  26. data/lib/condenser/server.rb +22 -20
  27. data/lib/condenser/templating_engine/ejs.rb +2 -0
  28. data/lib/condenser/templating_engine/erb.rb +2 -0
  29. data/lib/condenser/transformers/dart_sass_transformer.rb +5 -3
  30. data/lib/condenser/transformers/jst_transformer.rb +2 -0
  31. data/lib/condenser/transformers/sass/functions.rb +2 -0
  32. data/lib/condenser/transformers/sass/importer.rb +2 -0
  33. data/lib/condenser/transformers/sass.rb +2 -0
  34. data/lib/condenser/transformers/sass_transformer.rb +2 -0
  35. data/lib/condenser/transformers/svg_transformer/base.rb +2 -0
  36. data/lib/condenser/transformers/svg_transformer/tag.rb +2 -0
  37. data/lib/condenser/transformers/svg_transformer/template.rb +3 -1
  38. data/lib/condenser/transformers/svg_transformer/template_error.rb +2 -0
  39. data/lib/condenser/transformers/svg_transformer/value.rb +2 -0
  40. data/lib/condenser/transformers/svg_transformer/var_generator.rb +2 -0
  41. data/lib/condenser/transformers/svg_transformer.rb +2 -0
  42. data/lib/condenser/utils.rb +2 -0
  43. data/lib/condenser/version.rb +3 -1
  44. data/lib/condenser/writers/brotli_writer.rb +2 -0
  45. data/lib/condenser/writers/file_writer.rb +2 -0
  46. data/lib/condenser/writers/zlib_writer.rb +2 -0
  47. data/lib/condenser.rb +2 -0
  48. data/lib/rake/condensertask.rb +2 -0
  49. data/test/cache_test.rb +115 -20
  50. data/test/dependency_test.rb +51 -2
  51. data/test/manifest_test.rb +17 -2
  52. data/test/postprocessors/css_media_combiner_test.rb +9 -12
  53. data/test/preprocessor/babel_test.rb +876 -349
  54. data/test/preprocessor/js_analyzer_test.rb +208 -4
  55. data/test/processors/rollup/dynamic_import_test.rb +358 -0
  56. data/test/processors/rollup_test.rb +37 -56
  57. data/test/resolve_test.rb +14 -9
  58. data/test/server_test.rb +10 -9
  59. data/test/test_helper.rb +6 -3
  60. data/test/transformers/dart_scss_test.rb +2 -2
  61. data/test/transformers/scss_test.rb +2 -2
  62. metadata +6 -11
  63. data/lib/condenser/minifiers/package-lock.json +0 -25
checksums.yaml CHANGED
@@ -1,7 +1,7 @@
1
1
  ---
2
2
  SHA256:
3
- metadata.gz: 52e335c087aab2b9bb3ef4540e54bfc5b8de0a888cab8f095c2f1dd778458bd4
4
- data.tar.gz: 566502264a1049f4ff5e33154caa56ef64ed74d14a8d72abd5e31628d59986c1
3
+ metadata.gz: fdcbd2e642b5cc57eac94430a67e47501bd392ab8af410e89a07a47da1e339ba
4
+ data.tar.gz: 82a762a471c4c934846ceb833bf12a6a59761aca72b99cd731d3ca8986cc2290
5
5
  SHA512:
6
- metadata.gz: 5c0fdf66cb6b17ff6a519af5205061f37e3af2f12896e45c2798154c46ba2db3dea57e219595b33e5433a82db17751bc8444453c5bc0371fb27013e97157352d
7
- data.tar.gz: 2486bd59e00c2d70ea683d2f69f1a71b0981b557144a08fd4965f9fe38a0c2da68d79e1c6fcba359718af3f9c26f4978619c3bb651f394cbb0db0172c450096f
6
+ metadata.gz: 6d2c9f090dfa9a869074596e55a7cc195e5aacc0a9485d06b5f8e46489caf2cabb306fc6a669793e77bf97c7c68cf265293b683f48b0e02cae14f6e74d01d55d
7
+ data.tar.gz: f7a3f2ca45c330b04019877e08a7b90e99c54ce142c1fff0f8b6ee0718b98212af03ee891785760ace67ab286968838763f7c9f64e89404b170190eef758f1b7
@@ -1,3 +1,5 @@
1
+ # frozen_string_literal: true
2
+
1
3
  require 'set'
2
4
  require 'digest/md5'
3
5
  require 'digest/sha1'
@@ -10,7 +12,7 @@ class Condenser
10
12
  include EncodingUtils
11
13
 
12
14
  attr_reader :environment, :filename, :content_types, :source_file, :source_path
13
- attr_reader :linked_assets, :content_types_digest, :exports
15
+ attr_reader :content_types_digest, :exports, :type
14
16
  attr_writer :source, :sourcemap
15
17
 
16
18
  attr_accessor :imports, :processed
@@ -65,33 +67,50 @@ class Condenser
65
67
  def process_dependencies
66
68
  deps = @environment.cache.fetch "direct-deps/#{cache_key}" do
67
69
  process
68
- @process_dependencies
70
+ # Sort so etag and cache key are same irrelevant of ordering of
71
+ # dependencies
72
+ @process_dependencies.map { |fn| [normalize_filename_base(fn[0]), fn[1]] }.sort_by { |d| d[0] }
69
73
  end
70
74
 
71
- d = []
72
- deps.each do |i|
73
- i = [i, @content_types] if i.is_a?(String)
75
+ deps.inject([]) do |memo, i|
76
+ i[0] = File.join(@environment.base, i[0].delete_prefix('!')) if i[0].start_with?('!') && @environment.base
74
77
  @environment.resolve(i[0], File.dirname(@source_file), accept: i[1]).each do |asset|
75
- d << asset
78
+ memo << asset
76
79
  end
80
+ memo
77
81
  end
78
- d
79
82
  end
80
83
 
81
84
  def export_dependencies
82
85
  deps = @environment.cache.fetch "export-deps/#{cache_key}" do
83
86
  process
84
- @export_dependencies + @process_dependencies
87
+ # Sort so etag and cache key are same irrelevant of ordering of
88
+ # dependencies
89
+ (@export_dependencies + @process_dependencies).map { |fn| [normalize_filename_base(fn[0]), fn[1]] }.sort_by { |d| d[0] }
90
+ end
91
+
92
+ deps.inject([]) do |memo, i|
93
+ i[0] = File.join(@environment.base, i[0].delete_prefix('!')) if i[0].start_with?('!') && @environment.base
94
+ @environment.resolve(i[0], File.dirname(@source_file), accept: i[1], npm: true).each do |asset|
95
+ memo << asset
96
+ end
97
+ memo
98
+ end
99
+ end
100
+
101
+ def linked_assets
102
+ deps = @environment.cache.fetch "linked-assets/#{cache_key}" do
103
+ process
104
+ @linked_assets.map { |fn| [normalize_filename_base(fn[0]), fn[1]] }
85
105
  end
86
106
 
87
- d = []
88
- deps.each do |i|
89
- i = [i, @content_types] if i.is_a?(String)
107
+ deps.inject([]) do |memo, i|
108
+ i[0] = File.join(@environment.base, i[0].delete_prefix('!')) if i[0].start_with?('!') && @environment.base
90
109
  @environment.resolve(i[0], File.dirname(@source_file), accept: i[1]).each do |asset|
91
- d << asset
110
+ memo << asset
92
111
  end
112
+ memo
93
113
  end
94
- d
95
114
  end
96
115
 
97
116
  def has_default_export?
@@ -123,17 +142,27 @@ class Condenser
123
142
  end
124
143
  end
125
144
 
126
- def all_process_dependencies
127
- f = [@source_file]
128
- all_dependenies(process_dependencies, Set.new, :process_dependencies) do |dep|
145
+ def all_process_dependencies(visited = Set.new)
146
+ f = Set.new
147
+ if !visited.include?(@source_file)
148
+ f << @source_file
149
+ visited << self.source_file
150
+ end
151
+
152
+ all_dependenies(process_dependencies, visited, :process_dependencies) do |dep|
129
153
  f << dep.source_file
130
154
  end
131
155
  f
132
156
  end
133
157
 
134
- def all_export_dependencies
135
- f = [@source_file]
136
- all_dependenies(export_dependencies, Set.new, :export_dependencies) do |dep|
158
+ def all_export_dependencies(visited = Set.new)
159
+ f = Set.new
160
+ if !visited.include?(@source_file)
161
+ f << @source_file
162
+ visited << self.source_file
163
+ end
164
+
165
+ all_dependenies(export_dependencies, visited, :export_dependencies) do |dep|
137
166
  f << dep.source_file
138
167
  end
139
168
  f
@@ -143,19 +172,29 @@ class Condenser
143
172
  @cache_key ||= Digest::SHA1.base64digest(JSON.generate([
144
173
  Condenser::VERSION,
145
174
  @environment.pipline_digest,
146
- @environment.base ? @source_file.delete_prefix(@environment.base) : @source_file,
175
+ normalize_filename_base(@source_file),
147
176
  Digest::SHA256.file(@source_file).hexdigest,
148
177
  @content_types_digest
149
178
  ]))
150
179
  end
151
180
 
181
+ # Remove Enviroment base if it exists. This allows two of the same repos
182
+ # in a different location to use the same cache (like capistrano deploys)
183
+ def normalize_filename_base(source_filename)
184
+ if @environment.base && source_filename.start_with?(@environment.base)
185
+ '!'+source_filename.delete_prefix(@environment.base).delete_prefix(File::SEPARATOR)
186
+ else
187
+ source_filename
188
+ end
189
+ end
190
+
152
191
  def process_cache_version
153
192
  return @pcv if @pcv
154
193
 
155
194
  f = []
156
195
  all_dependenies(process_dependencies, Set.new, :process_dependencies) do |dep|
157
196
  f << [
158
- @environment.base ? dep.source_file.delete_prefix(@environment.base) : dep.source_file,
197
+ normalize_filename_base(dep.source_file),
159
198
  Digest::SHA256.file(dep.source_file).hexdigest
160
199
  ]
161
200
  end
@@ -169,12 +208,12 @@ class Condenser
169
208
  f = []
170
209
  all_dependenies(export_dependencies, Set.new, :export_dependencies) do |dep|
171
210
  f << [
172
- @environment.base ? dep.source_file.delete_prefix(@environment.base) : dep.source_file,
211
+ normalize_filename_base(dep.source_file),
173
212
  Digest::SHA256.file(dep.source_file).hexdigest
174
213
  ]
175
214
  end
176
215
 
177
- @ecv = Digest::SHA1.base64digest(JSON.generate(f))
216
+ @ecv = Digest::SHA1.hexdigest(JSON.generate(f))
178
217
  end
179
218
 
180
219
  def needs_reprocessing!
@@ -276,6 +315,9 @@ class Condenser
276
315
 
277
316
  data[:digest] = @environment.digestor.digest(data[:source])
278
317
  data[:digest_name] = @environment.digestor.name.sub(/^.*::/, '').downcase
318
+ data[:process_dependencies] = normialize_dependency_names(data[:process_dependencies])
319
+ data[:export_dependencies] = normialize_dependency_names(data[:export_dependencies])
320
+ data[:linked_assets] = normialize_dependency_names(data[:linked_assets])
279
321
 
280
322
  # Do this here and at the end so cache_key can be calculated if we
281
323
  # run this block
@@ -293,6 +335,15 @@ class Condenser
293
335
  @processors = data[:processors]
294
336
  @processors_loaded = true
295
337
  @processed = true
338
+ @type = data[:type]
339
+
340
+ digestor = @environment.digestor.new
341
+ digestor << data[:source]
342
+ all_dependenies(export_dependencies, Set.new, :export_dependencies) do |dep|
343
+ digestor << dep.source
344
+ end
345
+ data[:etag] = digestor.digest.unpack('H*'.freeze).first
346
+ @etag = data[:etag]
296
347
  data
297
348
  end
298
349
  end
@@ -309,11 +360,24 @@ class Condenser
309
360
  @default_export = result[:default_export]
310
361
  @exports = result[:exports]
311
362
  @processors = result[:processors]
363
+ @etag = result[:etag]
364
+ @type = result[:type]
312
365
  load_processors
313
366
 
314
367
  @processed = true
315
368
  end
316
369
 
370
+ def normialize_dependency_names(deps)
371
+ deps.map do |fn|
372
+ if fn.is_a?(String)
373
+ dirname, basename, extensions, mime_types = @environment.decompose_path(fn, source_file)
374
+ [dirname ? File.join(dirname, basename) : basename, mime_types.empty? ? @content_types : mime_types]
375
+ else
376
+ fn
377
+ end
378
+ end
379
+ end
380
+
317
381
  def export
318
382
  return @export if @export
319
383
 
@@ -322,6 +386,9 @@ class Condenser
322
386
  process
323
387
  dirname, basename, extensions, mime_types = @environment.decompose_path(@filename)
324
388
  data = {
389
+ etag: @etag,
390
+ type: @type,
391
+
325
392
  source: @source.dup,
326
393
  source_file: @source_file,
327
394
 
@@ -398,7 +465,11 @@ class Condenser
398
465
  process
399
466
  @digest.unpack('H*'.freeze).first
400
467
  end
401
- alias_method :etag, :hexdigest
468
+
469
+ def etag
470
+ process
471
+ @etag
472
+ end
402
473
 
403
474
  def integrity
404
475
  process
@@ -406,7 +477,14 @@ class Condenser
406
477
  end
407
478
 
408
479
  def to_json
409
- { path: path, digest: hexdigest, size: size, integrity: integrity }
480
+ {
481
+ path: path,
482
+ etag: etag,
483
+ type: type,
484
+ size: size,
485
+ digest: hexdigest,
486
+ integrity: integrity
487
+ }
410
488
  end
411
489
 
412
490
  def write(output_directory)
@@ -1,3 +1,5 @@
1
+ # frozen_string_literal: true
2
+
1
3
  class Condenser
2
4
  class BuildCache
3
5
 
@@ -22,6 +24,10 @@ class Condenser
22
24
  else
23
25
  @semaphore = Mutex.new
24
26
  @listener = Listen.to(*path) do |modified, added, removed|
27
+ modified = Set.new(modified)
28
+ added = Set.new(added)
29
+ removed = Set.new(removed)
30
+
25
31
  @semaphore.synchronize do
26
32
  @logger.debug { "build cache semaphore locked by #{Thread.current.object_id}" }
27
33
  @logger.debug do
@@ -122,16 +128,25 @@ class Condenser
122
128
 
123
129
  def []=(value, assets)
124
130
  @lookup_cache[value] = assets
131
+
132
+ if @fetching.nil?
133
+ begin
134
+ assets.each do |asset|
135
+ @fetching = Set.new
136
+ asset.all_process_dependencies(@fetching).each do |pd|
137
+ @process_dependencies[pd] ||= Set.new
138
+ @process_dependencies[pd] << asset
139
+ end
125
140
 
126
- assets.each do |asset|
127
- asset.all_process_dependencies.each do |pd|
128
- @process_dependencies[pd] ||= Set.new
129
- @process_dependencies[pd] << asset
130
- end
141
+ @fetching = Set.new
142
+ asset.all_export_dependencies(@fetching).each do |pd|
143
+ @export_dependencies[pd] ||= Set.new
131
144
 
132
- asset.all_export_dependencies.each do |pd|
133
- @export_dependencies[pd] ||= Set.new
134
- @export_dependencies[pd] << asset
145
+ @export_dependencies[pd] << asset
146
+ end
147
+ end
148
+ ensure
149
+ @fetching = nil
135
150
  end
136
151
  end
137
152
  end
@@ -1,4 +1,5 @@
1
1
  # frozen_string_literal: true
2
+
2
3
  class Condenser::Cache
3
4
  class FileStore < Condenser::CacheStore
4
5
  GITKEEP_FILES = ['.gitkeep', '.keep'].freeze
@@ -1,4 +1,5 @@
1
1
  # frozen_string_literal: true
2
+
2
3
  class Condenser::Cache
3
4
  # Public: Basic in memory LRU cache.
4
5
  #
@@ -1,4 +1,5 @@
1
1
  # frozen_string_literal: true
2
+
2
3
  class Condenser::Cache
3
4
  class NullStore
4
5
 
@@ -1,4 +1,5 @@
1
1
  # frozen_string_literal: true
2
+
2
3
  class Condenser::CacheStore
3
4
 
4
5
  def fetch(key)
@@ -1,4 +1,5 @@
1
1
  # frozen_string_literal: true
2
+
2
3
  require 'set'
3
4
  require 'condenser/errors'
4
5
 
@@ -1,3 +1,5 @@
1
+ # frozen_string_literal: true
2
+
1
3
  class Condenser
2
4
  module EncodingUtils
3
5
 
@@ -1,3 +1,5 @@
1
+ # frozen_string_literal: true
2
+
1
3
  require 'digest/sha2'
2
4
  require 'condenser/context'
3
5
  require 'condenser/cache_store'
@@ -1,3 +1,5 @@
1
+ # frozen_string_literal: true
2
+
1
3
  # Basic Condenser error classes
2
4
  class Condenser
3
5
  class Error < StandardError; end
@@ -1,7 +1,9 @@
1
+ # frozen_string_literal: true
2
+
1
3
  class Condenser
2
4
  class Export
3
5
 
4
- attr_reader :filename, :source, :sourcemap, :content_types, :digest, :digest_name
6
+ attr_reader :filename, :source, :sourcemap, :content_types, :digest, :digest_name, :etag, :type
5
7
 
6
8
  def initialize(env, input={})
7
9
  @environment = env
@@ -12,6 +14,8 @@ class Condenser
12
14
  @content_types = input[:content_types]
13
15
  @digest = input[:digest]
14
16
  @digest_name = input[:digest_name]
17
+ @etag = input[:etag]
18
+ @type = input[:type]
15
19
  end
16
20
 
17
21
  def path
@@ -43,7 +47,6 @@ class Condenser
43
47
  def hexdigest
44
48
  @digest.unpack('H*'.freeze).first
45
49
  end
46
- alias_method :etag, :hexdigest
47
50
 
48
51
  def integrity
49
52
  "#{@digest_name}-#{[@digest].pack('m0')}"
@@ -51,10 +54,12 @@ class Condenser
51
54
 
52
55
  def to_json
53
56
  {
54
- 'path' => path,
55
- 'size' => size,
56
- 'digest' => hexdigest,
57
- 'integrity' => integrity
57
+ path: path,
58
+ etag: etag,
59
+ type: type,
60
+ size: size,
61
+ digest: hexdigest,
62
+ integrity: integrity
58
63
  }
59
64
  end
60
65
 
@@ -1,9 +1,11 @@
1
+ # frozen_string_literal: true
2
+
1
3
  module Condenser::ParseHelpers
2
4
 
3
5
  attr_accessor :matched
4
6
 
5
7
  def eos?
6
- @index >= (@source.size - 1)
8
+ @index >= @source.size
7
9
  end
8
10
 
9
11
  def scan_until(r)
@@ -55,4 +57,24 @@ module Condenser::ParseHelpers
55
57
  "#{lineno.to_s.rjust(4)}: " + @source[start..uptop] + "\n #{'-'* (@index-1-start)}#{'^'*(@matched.length)}"
56
58
  end
57
59
 
60
+ def gobble(r)
61
+ if r.is_a?(Regexp)
62
+ m = @source.match(r, @index)
63
+ if m&.begin(0) == @index
64
+ scan_until(r)
65
+ end
66
+ else
67
+ forward(1)
68
+ @source[@index-1];
69
+ end
70
+ end
71
+
72
+ def peek(n=1)
73
+ if n.is_a?(Regexp)
74
+ @source.match(n, @index)
75
+ else
76
+ @source.slice(@index, n)
77
+ end
78
+ end
79
+
58
80
  end
@@ -1,3 +1,5 @@
1
+ # frozen_string_literal: true
2
+
1
3
  class Condenser
2
4
  class Manifest
3
5
 
@@ -72,7 +74,7 @@ class Condenser
72
74
  @data[asset.filename] = export.to_json
73
75
  outputs = export.write(@dir)
74
76
  asset.linked_assets.each do |la|
75
- @environment.resolve(la).each { |a| outputs += add_asset(a) }
77
+ outputs += add_asset(la)
76
78
  end
77
79
  outputs
78
80
  end
@@ -1,3 +1,5 @@
1
+ # frozen_string_literal: true
2
+
1
3
  class Condenser::SassMinifier
2
4
 
3
5
  def self.instance
@@ -1,3 +1,5 @@
1
+ # frozen_string_literal: true
2
+
1
3
  class Condenser::TerserMinifier < Condenser::NodeProcessor
2
4
 
3
5
  def initialize(dir, options = {})
@@ -1,3 +1,5 @@
1
+ # frozen_string_literal: true
2
+
1
3
  class Condenser::UglifyMinifier < Condenser::NodeProcessor
2
4
 
3
5
  class Error < StandardError
@@ -1,3 +1,5 @@
1
+ # frozen_string_literal: true
2
+
1
3
  class Condenser
2
4
  module Pipeline
3
5
 
@@ -1,3 +1,5 @@
1
+ # frozen_string_literal: true
2
+
1
3
  require 'json'
2
4
 
3
5
  class Condenser::BabelProcessor < Condenser::NodeProcessor
@@ -16,6 +18,7 @@ class Condenser::BabelProcessor < Condenser::NodeProcessor
16
18
  targets: { browsers: '> 1% and not dead' }
17
19
  }]
18
20
  ]
21
+ options[:highlightCode] = false if !options.has_key?(:highlightCode)
19
22
 
20
23
  packages = options.slice(:plugins, :presets).values.reduce(&:+).map { |p| p.is_a?(Array) ? p[0] : p}
21
24
  packages.unshift('@babel/core')
@@ -80,11 +83,17 @@ class Condenser::BabelProcessor < Condenser::NodeProcessor
80
83
  end
81
84
  end
82
85
 
83
-
86
+ opts['preset']&.each do |preset|
87
+ preset[0] = preset[0].gsub(/"@?babel[\/-][^"]+"/) { |m| "require(#{m})"}
88
+ end
89
+ opts['plugins']&.each do |preset|
90
+ preset[0] = preset[0].gsub(/"@?babel[\/-][^"]+"/) { |m| "require(#{m})"}
91
+ end
92
+
84
93
  result = exec_runtime(<<-JS)
85
94
  const babel = require("#{File.join(npm_module_path('@babel/core'))}");
86
95
  const source = #{JSON.generate(input[:source])};
87
- const options = #{JSON.generate(opts).gsub(/"@?babel[\/-][^"]+"/) { |m| "require(#{m})"}};
96
+ const options = #{JSON.generate(opts)};
88
97
 
89
98
  let imports = [];
90
99
  let defaultExport = false;
@@ -95,23 +104,17 @@ class Condenser::BabelProcessor < Condenser::NodeProcessor
95
104
  ImportDeclaration(path, state) {
96
105
  imports.push(path.node.source.value);
97
106
  },
98
- ExportDefaultDeclaration(path, state) {
107
+
108
+ ExportDeclaration(path, state) {
99
109
  hasExports = true;
100
- defaultExport = true;
110
+ if (path.node.source) {
111
+ imports.push(path.node.source.value);
112
+ }
101
113
  },
102
- ExportDefaultSpecifier(path, state) {
103
- hasExports = true;
114
+
115
+ ExportDefaultDeclaration(path, state) {
104
116
  defaultExport = true;
105
- },
106
- ExportAllDeclaration(path, state) {
107
- hasExports = true;
108
- },
109
- ExportNamedDeclaration(path, state) {
110
- hasExports = true;
111
- },
112
- ExportSpecifier(path, state) {
113
- hasExports = true;
114
- },
117
+ }
115
118
  }
116
119
  };
117
120
  });
@@ -1,3 +1,5 @@
1
+ # frozen_string_literal: true
2
+
1
3
  class Condenser::CSSMediaCombinerProcessor
2
4
 
3
5
  include Condenser::ParseHelpers
@@ -10,7 +12,7 @@ class Condenser::CSSMediaCombinerProcessor
10
12
  end
11
13
 
12
14
  def reduce_media_query(queries)
13
- output = ''
15
+ output = String.new
14
16
  queries.each do |query, contents|
15
17
  output << query if query
16
18
  output << if contents.is_a?(Hash)
@@ -30,12 +32,12 @@ class Condenser::CSSMediaCombinerProcessor
30
32
  @selectors = []
31
33
  @media_queries = {}
32
34
 
33
- input[:source] = ''
35
+ input[:source] = String.new
34
36
  while !eos?
35
37
  output = if @selectors.empty?
36
38
  input[:source]
37
39
  else
38
- (@selectors[0...-1].reduce(@media_queries) { |hash, selector| hash[selector] ||= {} }[@selectors.last] ||= '')
40
+ (@selectors[0...-1].reduce(@media_queries) { |hash, selector| hash[selector] ||= {} }[@selectors.last] ||= String.new)
39
41
  end
40
42
 
41
43
  case @stack.last
@@ -64,11 +66,11 @@ class Condenser::CSSMediaCombinerProcessor
64
66
  @stack.pop
65
67
  end
66
68
  else
67
- case scan_until(/(@media[^\{]*{|\Z)/)
69
+ case scan_until(/(@media[^\{]*{|\z)/)
68
70
  when ''
69
71
  output << pre_match
70
72
  else
71
- output << pre_match
73
+ output << pre_match.rstrip
72
74
  @selectors << matched.squish
73
75
  @stack << :media_query
74
76
  end