condenser 1.2 → 1.4

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (51) hide show
  1. checksums.yaml +4 -4
  2. data/lib/condenser/asset.rb +69 -35
  3. data/lib/condenser/build_cache.rb +22 -9
  4. data/lib/condenser/context.rb +9 -25
  5. data/lib/condenser/helpers/parse_helpers.rb +8 -1
  6. data/lib/condenser/manifest.rb +3 -1
  7. data/lib/condenser/pipeline.rb +8 -3
  8. data/lib/condenser/processors/babel_processor.rb +9 -15
  9. data/lib/condenser/processors/css_media_combiner_processor.rb +81 -0
  10. data/lib/condenser/processors/js_analyzer.rb +41 -8
  11. data/lib/condenser/processors/node_processor.rb +1 -0
  12. data/lib/condenser/processors/purgecss_processor.rb +6 -4
  13. data/lib/condenser/processors/rollup_processor.rb +38 -36
  14. data/lib/condenser/resolve.rb +27 -6
  15. data/lib/condenser/templating_engine/ejs.rb +1 -1
  16. data/lib/condenser/transformers/dart_sass_transformer.rb +285 -0
  17. data/lib/condenser/transformers/jst_transformer.rb +67 -17
  18. data/lib/condenser/transformers/sass/functions.rb +133 -0
  19. data/lib/condenser/transformers/sass/importer.rb +48 -0
  20. data/lib/condenser/transformers/sass.rb +4 -0
  21. data/lib/condenser/transformers/sass_transformer.rb +124 -281
  22. data/lib/condenser/transformers/svg_transformer/base.rb +26 -0
  23. data/lib/condenser/transformers/svg_transformer/tag.rb +54 -0
  24. data/lib/condenser/transformers/svg_transformer/template.rb +151 -0
  25. data/lib/condenser/transformers/svg_transformer/template_error.rb +2 -0
  26. data/lib/condenser/transformers/svg_transformer/value.rb +13 -0
  27. data/lib/condenser/transformers/svg_transformer/var_generator.rb +10 -0
  28. data/lib/condenser/transformers/svg_transformer.rb +19 -0
  29. data/lib/condenser/version.rb +1 -1
  30. data/lib/condenser.rb +17 -5
  31. data/test/cache_test.rb +157 -18
  32. data/test/dependency_test.rb +51 -2
  33. data/test/manifest_test.rb +34 -0
  34. data/test/minifiers/terser_minifier_test.rb +0 -1
  35. data/test/minifiers/uglify_minifier_test.rb +0 -1
  36. data/test/postprocessors/css_media_combiner_test.rb +107 -0
  37. data/test/postprocessors/purgecss_test.rb +62 -0
  38. data/test/preprocessor/babel_test.rb +703 -298
  39. data/test/preprocessor/js_analyzer_test.rb +35 -2
  40. data/test/processors/rollup_test.rb +50 -20
  41. data/test/resolve_test.rb +18 -9
  42. data/test/server_test.rb +15 -10
  43. data/test/templates/ejs_test.rb +2 -11
  44. data/test/templates/erb_test.rb +0 -5
  45. data/test/test_helper.rb +8 -3
  46. data/test/transformers/dart_scss_test.rb +139 -0
  47. data/test/transformers/jst_test.rb +165 -21
  48. data/test/transformers/scss_test.rb +14 -0
  49. data/test/transformers/svg_test.rb +40 -0
  50. metadata +23 -6
  51. data/lib/condenser/transformers/sass_transformer/importer.rb +0 -50
checksums.yaml CHANGED
@@ -1,7 +1,7 @@
1
1
  ---
2
2
  SHA256:
3
- metadata.gz: e2b1dab33dd825d2bf0902e871c68c0334d49ee7af48ee0875132f39adce20d2
4
- data.tar.gz: a3c40f6f19722d0cea00e28592cd2049db5185057b299bf697fc169c9a19c89a
3
+ metadata.gz: 171baa1e5cd8016c4b2e7f142147d174410085ede075c101da43b743a4ad6fec
4
+ data.tar.gz: 9c97b58fb2bf540b335f6504997da99fac72891f61a0a0631639ae3fd9f28482
5
5
  SHA512:
6
- metadata.gz: c1ca10d2f7dff116d1b4d4daeba50302509c7faffed458e2312d40c391b84a407d596487102a4e306abd9a96bd435c39d0fb23add68a3608aa64014b3fffe0d7
7
- data.tar.gz: 3f530d21b0ec272d51aabd96f0218dfc1a02dbb8845dbefd3d92cf1517447bc436785a102415ad9fda696df26d9769f67485f8530445eb66b48dc3318ffd8e72
6
+ metadata.gz: decc8b993d0e99c5212f3c0b1d4be88de43d49ab1386f32936a0dae482fc9b45de55cbbc51d99eafce58bcd68809a20fd27267ef0fe62d3aa3a244e33e68992e
7
+ data.tar.gz: e39a90c28c7beab789885e53e35ae0ad43340ff4009f81ad1586ac16eb0073dbaf999b69a94f78e796f5bfe5e93a16e0a45481e76b706d0e496dfbe5435b4966
@@ -65,33 +65,31 @@ class Condenser
65
65
  def process_dependencies
66
66
  deps = @environment.cache.fetch "direct-deps/#{cache_key}" do
67
67
  process
68
- @process_dependencies
68
+ @process_dependencies.map { |fn| [normalize_filename_base(fn[0]), fn[1]] }
69
69
  end
70
70
 
71
- d = []
72
- deps.each do |i|
73
- i = [i, @content_types] if i.is_a?(String)
71
+ deps.inject([]) do |memo, i|
72
+ i[0] = File.join(@environment.base, i[0].delete_prefix('!')) if i[0].start_with?('!') && @environment.base
74
73
  @environment.resolve(i[0], File.dirname(@source_file), accept: i[1]).each do |asset|
75
- d << asset
74
+ memo << asset
76
75
  end
76
+ memo
77
77
  end
78
- d
79
78
  end
80
79
 
81
80
  def export_dependencies
82
81
  deps = @environment.cache.fetch "export-deps/#{cache_key}" do
83
82
  process
84
- @export_dependencies + @process_dependencies
83
+ (@export_dependencies + @process_dependencies).map { |fn| [normalize_filename_base(fn[0]), fn[1]] }
85
84
  end
86
85
 
87
- d = []
88
- deps.each do |i|
89
- i = [i, @content_types] if i.is_a?(String)
86
+ deps.inject([]) do |memo, i|
87
+ i[0] = File.join(@environment.base, i[0].delete_prefix('!')) if i[0].start_with?('!') && @environment.base
90
88
  @environment.resolve(i[0], File.dirname(@source_file), accept: i[1]).each do |asset|
91
- d << asset
89
+ memo << asset
92
90
  end
91
+ memo
93
92
  end
94
- d
95
93
  end
96
94
 
97
95
  def has_default_export?
@@ -123,17 +121,27 @@ class Condenser
123
121
  end
124
122
  end
125
123
 
126
- def all_process_dependencies
127
- f = [@source_file]
128
- all_dependenies(process_dependencies, [], :process_dependencies) do |dep|
124
+ def all_process_dependencies(visited = Set.new)
125
+ f = []
126
+ if !visited.include?(@source_file)
127
+ f << @source_file
128
+ visited << self.source_file
129
+ end
130
+
131
+ all_dependenies(process_dependencies, visited, :process_dependencies) do |dep|
129
132
  f << dep.source_file
130
133
  end
131
134
  f
132
135
  end
133
136
 
134
- def all_export_dependencies
135
- f = [@source_file]
136
- all_dependenies(export_dependencies, [], :export_dependencies) do |dep|
137
+ def all_export_dependencies(visited = Set.new)
138
+ f = []
139
+ if !visited.include?(@source_file)
140
+ f << @source_file
141
+ visited << self.source_file
142
+ end
143
+
144
+ all_dependenies(export_dependencies, visited, :export_dependencies) do |dep|
137
145
  f << dep.source_file
138
146
  end
139
147
  f
@@ -143,19 +151,29 @@ class Condenser
143
151
  @cache_key ||= Digest::SHA1.base64digest(JSON.generate([
144
152
  Condenser::VERSION,
145
153
  @environment.pipline_digest,
146
- @environment.base ? @source_file.delete_prefix(@environment.base) : @source_file,
154
+ normalize_filename_base(@source_file),
147
155
  Digest::SHA256.file(@source_file).hexdigest,
148
156
  @content_types_digest
149
157
  ]))
150
158
  end
151
159
 
160
+ # Remove Enviroment base if it exists. This allows two of the same repos
161
+ # in a different location to use the same cache (like capistrano deploys)
162
+ def normalize_filename_base(source_filename)
163
+ if @environment.base && source_filename.start_with?(@environment.base)
164
+ '!'+source_filename.delete_prefix(@environment.base).delete_prefix(File::SEPARATOR)
165
+ else
166
+ source_filename
167
+ end
168
+ end
169
+
152
170
  def process_cache_version
153
171
  return @pcv if @pcv
154
172
 
155
173
  f = []
156
- all_dependenies(process_dependencies, [], :process_dependencies) do |dep|
174
+ all_dependenies(process_dependencies, Set.new, :process_dependencies) do |dep|
157
175
  f << [
158
- @environment.base ? dep.source_file.delete_prefix(@environment.base) : dep.source_file,
176
+ normalize_filename_base(dep.source_file),
159
177
  Digest::SHA256.file(dep.source_file).hexdigest
160
178
  ]
161
179
  end
@@ -167,9 +185,9 @@ class Condenser
167
185
  return @ecv if @ecv
168
186
 
169
187
  f = []
170
- all_dependenies(export_dependencies, [], :export_dependencies) do |dep|
188
+ all_dependenies(export_dependencies, Set.new, :export_dependencies) do |dep|
171
189
  f << [
172
- @environment.base ? dep.source_file.delete_prefix(@environment.base) : dep.source_file,
190
+ normalize_filename_base(dep.source_file),
173
191
  Digest::SHA256.file(dep.source_file).hexdigest
174
192
  ]
175
193
  end
@@ -206,9 +224,9 @@ class Condenser
206
224
  content_type: mime_types,
207
225
 
208
226
  map: nil,
209
- linked_assets: [],
210
- process_dependencies: [],
211
- export_dependencies: [],
227
+ linked_assets: Set.new,
228
+ process_dependencies: Set.new,
229
+ export_dependencies: Set.new,
212
230
 
213
231
  processors: Set.new
214
232
  }
@@ -276,6 +294,8 @@ class Condenser
276
294
 
277
295
  data[:digest] = @environment.digestor.digest(data[:source])
278
296
  data[:digest_name] = @environment.digestor.name.sub(/^.*::/, '').downcase
297
+ data[:process_dependencies] = normialize_dependency_names(data[:process_dependencies])
298
+ data[:export_dependencies] = normialize_dependency_names(data[:export_dependencies])
279
299
 
280
300
  # Do this here and at the end so cache_key can be calculated if we
281
301
  # run this block
@@ -285,15 +305,14 @@ class Condenser
285
305
  @content_types = data[:content_type]
286
306
  @digest = data[:digest]
287
307
  @digest_name = data[:digest_name]
288
- @linked_assets = data[:linked_assets]
289
- @process_dependencies = data[:process_dependencies]
290
- @export_dependencies = data[:export_dependencies]
308
+ @linked_assets = Set.new(data[:linked_assets])
309
+ @process_dependencies = Set.new(data[:process_dependencies])
310
+ @export_dependencies = Set.new(data[:export_dependencies])
291
311
  @default_export = data[:default_export]
292
312
  @exports = data[:exports]
293
313
  @processors = data[:processors]
294
314
  @processors_loaded = true
295
315
  @processed = true
296
-
297
316
  data
298
317
  end
299
318
  end
@@ -304,9 +323,9 @@ class Condenser
304
323
  @content_types = result[:content_type]
305
324
  @digest = result[:digest]
306
325
  @digest_name = result[:digest_name]
307
- @linked_assets = result[:linked_assets]
308
- @process_dependencies = result[:process_dependencies]
309
- @export_dependencies = result[:export_dependencies]
326
+ @linked_assets = Set.new(result[:linked_assets])
327
+ @process_dependencies = Set.new(result[:process_dependencies])
328
+ @export_dependencies = Set.new(result[:export_dependencies])
310
329
  @default_export = result[:default_export]
311
330
  @exports = result[:exports]
312
331
  @processors = result[:processors]
@@ -315,6 +334,17 @@ class Condenser
315
334
  @processed = true
316
335
  end
317
336
 
337
+ def normialize_dependency_names(deps)
338
+ deps.map do |fn|
339
+ if fn.is_a?(String)
340
+ dirname, basename, extensions, mime_types = @environment.decompose_path(fn, source_file)
341
+ [dirname ? File.join(dirname, basename) : basename, mime_types.empty? ? @content_types : mime_types]
342
+ else
343
+ fn
344
+ end
345
+ end
346
+ end
347
+
318
348
  def export
319
349
  return @export if @export
320
350
 
@@ -335,11 +365,15 @@ class Condenser
335
365
  export_dependencies: []
336
366
  }
337
367
 
338
- if exporter = @environment.exporters[content_type]
339
- exporter.call(@environment, data)
368
+ if @environment.exporters.has_key?(content_type)
369
+ @environment.exporters[content_type].each do |exporter|
370
+ @environment.logger.info { "Exporting #{self.filename} with #{exporter.name}" }
371
+ exporter.call(@environment, data)
372
+ end
340
373
  end
341
374
 
342
375
  if minifier = @environment.minifier_for(content_type)
376
+ @environment.logger.info { "Minifing #{self.filename} with #{minifier.name}" }
343
377
  minifier.call(@environment, data)
344
378
  end
345
379
 
@@ -22,6 +22,10 @@ class Condenser
22
22
  else
23
23
  @semaphore = Mutex.new
24
24
  @listener = Listen.to(*path) do |modified, added, removed|
25
+ modified = Set.new(modified)
26
+ added = Set.new(added)
27
+ removed = Set.new(removed)
28
+
25
29
  @semaphore.synchronize do
26
30
  @logger.debug { "build cache semaphore locked by #{Thread.current.object_id}" }
27
31
  @logger.debug do
@@ -33,7 +37,7 @@ class Condenser
33
37
  end
34
38
 
35
39
  globs = []
36
- (added + removed).each do |file|
40
+ (added + removed + modified).each do |file|
37
41
  globs << file.match(/([^\.]+)(\.|$)/).to_a[1]
38
42
  if path_match = @path.find { |p| file.start_with?(p) }
39
43
  a = file.delete_prefix(path_match).match(/([^\.]+)(\.|$)/).to_a[1]
@@ -122,16 +126,25 @@ class Condenser
122
126
 
123
127
  def []=(value, assets)
124
128
  @lookup_cache[value] = assets
129
+
130
+ if @fetching.nil?
131
+ begin
132
+ assets.each do |asset|
133
+ @fetching = Set.new
134
+ asset.all_process_dependencies(@fetching).each do |pd|
135
+ @process_dependencies[pd] ||= Set.new
136
+ @process_dependencies[pd] << asset
137
+ end
125
138
 
126
- assets.each do |asset|
127
- asset.all_process_dependencies.each do |pd|
128
- @process_dependencies[pd] ||= Set.new
129
- @process_dependencies[pd] << asset
130
- end
139
+ @fetching = Set.new
140
+ asset.all_export_dependencies(@fetching).each do |pd|
141
+ @export_dependencies[pd] ||= Set.new
131
142
 
132
- asset.all_export_dependencies.each do |pd|
133
- @export_dependencies[pd] ||= Set.new
134
- @export_dependencies[pd] << asset
143
+ @export_dependencies[pd] << asset
144
+ end
145
+ end
146
+ ensure
147
+ @fetching = nil
135
148
  end
136
149
  end
137
150
  end
@@ -35,7 +35,7 @@ class Condenser
35
35
  end
36
36
  end
37
37
 
38
- attr_reader :environment, :filename
38
+ attr_reader :environment, :filename, :links, :dependencies
39
39
 
40
40
  def initialize(environment)
41
41
  @environment = environment
@@ -110,29 +110,14 @@ class Condenser
110
110
  # `depend_on` allows you to state a dependency on a file without
111
111
  # including it.
112
112
  #
113
- # This is used for caching purposes. Any changes made to
114
- # the dependency file will invalidate the cache of the
115
- # source file.
116
- def depend_on(path)
117
- if environment.absolute_path?(path) && environment.stat(path)
118
- @dependencies << environment.build_file_digest_uri(path)
119
- else
120
- resolve(path)
121
- end
122
- nil
123
- end
124
-
125
- # `depend_on_asset` allows you to state an asset dependency
126
- # without including it.
127
- #
128
113
  # This is used for caching purposes. Any changes that would
129
114
  # invalidate the dependency asset will invalidate the source
130
- # file. Unlike `depend_on`, this will recursively include
131
- # the target asset's dependencies.
132
- def depend_on_asset(path)
133
- asset = environment.find!(path)
134
- @dependencies << asset.source_file
135
- asset
115
+ # file.
116
+ def depend_on(path)
117
+ d = environment.decompose_path(path)
118
+ @dependencies << [File.join(*d[0], d[1]), [d[3]]]
119
+
120
+ nil
136
121
  end
137
122
 
138
123
  # `depend_on_env` allows you to state a dependency on an environment
@@ -150,9 +135,8 @@ class Condenser
150
135
  #
151
136
  # Returns an Asset or nil.
152
137
  def link_asset(path)
153
- asset = depend_on_asset(path)
154
- @links << asset.path
155
- asset
138
+ depend_on(path)
139
+ @links << path
156
140
  end
157
141
 
158
142
  # Returns a `data:` URI with the contents of the asset at the specified
@@ -3,7 +3,7 @@ module Condenser::ParseHelpers
3
3
  attr_accessor :matched
4
4
 
5
5
  def eos?
6
- @index >= @source.size
6
+ @index >= (@source.size - 1)
7
7
  end
8
8
 
9
9
  def scan_until(r)
@@ -55,4 +55,11 @@ module Condenser::ParseHelpers
55
55
  "#{lineno.to_s.rjust(4)}: " + @source[start..uptop] + "\n #{'-'* (@index-1-start)}#{'^'*(@matched.length)}"
56
56
  end
57
57
 
58
+ def gobble(r)
59
+ m = @source.match(r, @index)
60
+ if m&.begin(0) == @index
61
+ scan_until(r)
62
+ end
63
+ end
64
+
58
65
  end
@@ -71,7 +71,9 @@ class Condenser
71
71
 
72
72
  @data[asset.filename] = export.to_json
73
73
  outputs = export.write(@dir)
74
- asset.linked_assets.each { |a| outputs += add_asset(a) }
74
+ asset.linked_assets.each do |la|
75
+ @environment.resolve(la).each { |a| outputs += add_asset(a) }
76
+ end
75
77
  outputs
76
78
  end
77
79
 
@@ -98,11 +98,16 @@ class Condenser
98
98
  end
99
99
 
100
100
  def register_exporter(mime_type, engine)
101
- @exporters[mime_type] = engine
101
+ @exporters[mime_type] ||= []
102
+ @exporters[mime_type] << engine
102
103
  end
103
104
 
104
- def unregister_exporter(mime_type, engine)
105
- @exporters[mime_type] = nil
105
+ def unregister_exporter(mime_type, engine=nil)
106
+ if engine.nil?
107
+ @exporters[mime_type].clear
108
+ else
109
+ @exporters[mime_type]&.reject! { |e| e == engine || e.is_a?(engine) }
110
+ end
106
111
  end
107
112
 
108
113
  def register_minifier(mime_type, engine)
@@ -4,7 +4,7 @@ class Condenser::BabelProcessor < Condenser::NodeProcessor
4
4
 
5
5
  attr_accessor :options
6
6
 
7
- def initialize(dir = nil, options = {})
7
+ def initialize(dir = nil, **options)
8
8
  super(dir)
9
9
 
10
10
  options[:plugins] ||= [
@@ -95,23 +95,17 @@ class Condenser::BabelProcessor < Condenser::NodeProcessor
95
95
  ImportDeclaration(path, state) {
96
96
  imports.push(path.node.source.value);
97
97
  },
98
- ExportDefaultDeclaration(path, state) {
98
+
99
+ ExportDeclaration(path, state) {
99
100
  hasExports = true;
100
- defaultExport = true;
101
+ if (path.node.source) {
102
+ imports.push(path.node.source.value);
103
+ }
101
104
  },
102
- ExportDefaultSpecifier(path, state) {
103
- hasExports = true;
105
+
106
+ ExportDefaultDeclaration(path, state) {
104
107
  defaultExport = true;
105
- },
106
- ExportAllDeclaration(path, state) {
107
- hasExports = true;
108
- },
109
- ExportNamedDeclaration(path, state) {
110
- hasExports = true;
111
- },
112
- ExportSpecifier(path, state) {
113
- hasExports = true;
114
- },
108
+ }
115
109
  }
116
110
  };
117
111
  });
@@ -0,0 +1,81 @@
1
+ class Condenser::CSSMediaCombinerProcessor
2
+
3
+ include Condenser::ParseHelpers
4
+
5
+ def self.setup(env)
6
+ end
7
+
8
+ def self.call(environment, input)
9
+ new.call(environment, input)
10
+ end
11
+
12
+ def reduce_media_query(queries)
13
+ output = ''
14
+ queries.each do |query, contents|
15
+ output << query if query
16
+ output << if contents.is_a?(Hash)
17
+ reduce_media_query(contents)
18
+ else
19
+ contents + '}'
20
+ end
21
+ end
22
+ output
23
+ end
24
+
25
+ def call(environment, input)
26
+ seek(0)
27
+ @sourcefile = input[:source_file]
28
+ @source = input[:source]
29
+ @stack = []
30
+ @selectors = []
31
+ @media_queries = {}
32
+
33
+ input[:source] = ''
34
+ while !eos?
35
+ output = if @selectors.empty?
36
+ input[:source]
37
+ else
38
+ (@selectors[0...-1].reduce(@media_queries) { |hash, selector| hash[selector] ||= {} }[@selectors.last] ||= '')
39
+ end
40
+
41
+ case @stack.last
42
+ when :media_query
43
+ scan_until(/(@media[^\{]*{|\{|\})/)
44
+ case matched
45
+ when '{'
46
+ output << pre_match << matched
47
+ @stack << :statement
48
+ when '}'
49
+ output << pre_match
50
+ @stack.pop
51
+ @selectors.pop
52
+ else
53
+ output << pre_match
54
+ @selectors << matched.squish
55
+ @stack << :media_query
56
+ end
57
+ when :statement
58
+ scan_until(/(\{|\})/)
59
+ output << pre_match << matched
60
+ case matched
61
+ when '{'
62
+ @stack << :statement
63
+ when '}'
64
+ @stack.pop
65
+ end
66
+ else
67
+ case scan_until(/(@media[^\{]*{|\Z)/)
68
+ when ''
69
+ output << pre_match
70
+ else
71
+ output << pre_match
72
+ @selectors << matched.squish
73
+ @stack << :media_query
74
+ end
75
+ end
76
+ end
77
+
78
+ input[:source] << reduce_media_query(@media_queries)
79
+ end
80
+
81
+ end
@@ -1,5 +1,3 @@
1
- require 'condenser/helpers/parse_helpers'
2
-
3
1
  class Condenser::JSAnalyzer
4
2
 
5
3
  include Condenser::ParseHelpers
@@ -17,7 +15,7 @@ class Condenser::JSAnalyzer
17
15
  @source = input[:source]
18
16
  @stack = [:main]
19
17
 
20
- input[:export_dependencies] ||= []
18
+ input[:export_dependencies] ||= Set.new
21
19
 
22
20
  scan_until(/\A(\/\/[^\n]*(\n|\z))*/)
23
21
  if matched
@@ -30,6 +28,7 @@ class Condenser::JSAnalyzer
30
28
  end
31
29
 
32
30
  last_postion = nil
31
+ last_stack = nil
33
32
  while !eos?
34
33
  case @stack.last
35
34
 
@@ -55,6 +54,32 @@ class Condenser::JSAnalyzer
55
54
  scan_until(/(;|\n)/)
56
55
  @stack.pop
57
56
 
57
+ when :export
58
+ input[:exports] = true;
59
+ input[:default_export] = true if gobble(/\s+default/)
60
+ gobble(/\s+/)
61
+
62
+ if gobble(/\{/)
63
+ @stack << :brackets
64
+ elsif gobble(/\*/)
65
+ @stack << :export_from
66
+ else
67
+ @stack.pop
68
+ end
69
+
70
+ when :export_from
71
+ if gobble(/\s+from\s+/)
72
+ scan_until(/\"|\'/)
73
+ input[:export_dependencies] << case matched
74
+ when '"'
75
+ double_quoted_value
76
+ when "'"
77
+ single_quoted_value
78
+ end
79
+ end
80
+ @stack.pop
81
+ @stack.pop
82
+
58
83
  else
59
84
  scan_until(/(\/\/|\/\*|\/|\(|\)|\{|\}|\"|\'|\`|export|import|\z)/)
60
85
 
@@ -87,15 +112,17 @@ class Condenser::JSAnalyzer
87
112
  @stack.push :brackets
88
113
  when '}'
89
114
  case @stack.last
90
- when :brackets, :tick_statment
115
+ when :tick_statment
116
+ @stack.pop
117
+ when :brackets
91
118
  @stack.pop
119
+ @stack << :export_from if @stack.last == :export
92
120
  else
93
121
  raise unexptected_token("}")
94
122
  end
95
123
  when 'export'
96
124
  if @stack.last == :main
97
- input[:exports] = true;
98
- input[:default_export] = true if next_word == 'default'
125
+ @stack << :export
99
126
  end
100
127
  when 'import'
101
128
  if @stack.last == :main
@@ -106,10 +133,13 @@ class Condenser::JSAnalyzer
106
133
  end
107
134
  end
108
135
 
109
- if last_postion == @index
136
+ if last_postion == @index && last_stack == @stack.last
137
+ syntax_error = Condenser::SyntaxError.new("Error parsing JS file with JSAnalyzer")
138
+ syntax_error.instance_variable_set(:@path, @sourcefile)
110
139
  raise Condenser::SyntaxError, "Error parsing JS file with JSAnalyzer"
111
140
  else
112
141
  last_postion = @index
142
+ last_stack = @stack.last
113
143
  end
114
144
  end
115
145
  end
@@ -123,7 +153,10 @@ class Condenser::JSAnalyzer
123
153
  message << "\n#{lineno.to_s.rjust(4)}: " << @source[start..uptop]
124
154
  message << "\n #{'-'* (@index-1-start)}#{'^'*(@matched.length)}"
125
155
  message << "\n"
126
- Condenser::SyntaxError.new(message)
156
+
157
+ syntax_error = Condenser::SyntaxError.new(message)
158
+ syntax_error.instance_variable_set(:@path, @sourcefile)
159
+ syntax_error
127
160
  end
128
161
 
129
162
  def double_quoted_value
@@ -60,6 +60,7 @@ class Condenser
60
60
  lineno = lines[0][/\((\d+):\d+\)$/, 1] if lines[0]
61
61
  lineno ||= 1
62
62
  error.set_backtrace(["#{source_file}:#{lineno}"] + caller)
63
+ error.instance_variable_set(:@path, source_file)
63
64
  error
64
65
  end
65
66
 
@@ -34,7 +34,7 @@ class Condenser::PurgeCSSProcessor < Condenser::NodeProcessor
34
34
  const { PurgeCSS } = require("#{File.join(npm_module_path('purgecss'))}")
35
35
  const options = #{@options.to_json}
36
36
  options.css = [{
37
- raw: #{input[:source].inspect}
37
+ raw: #{JSON.dump(input[:source])}
38
38
  }]
39
39
  if(options.safelist) {
40
40
  options.safelist = options.safelist.map(s => {
@@ -44,13 +44,16 @@ class Condenser::PurgeCSSProcessor < Condenser::NodeProcessor
44
44
  return s
45
45
  })
46
46
  }
47
+ if(!options.defaultExtractor) {
48
+ options.defaultExtractor = content => content.match(/[\\w\\-\\/\\:]+(?<!:)/g) || []
49
+ }
47
50
  const result = new PurgeCSS().purge(options)
48
51
  try {
49
52
  result.then(
50
53
  r => console.log(JSON.stringify({
51
54
  success: r[0]
52
55
  })),
53
- function() {console.log(JSON.stringify({'error': arguments}))}
56
+ function(e) {console.log(JSON.stringify({'error': [e.name, e.message, e.stack]}))}
54
57
  )
55
58
  } catch(e) {
56
59
  console.log(JSON.stringify({'error': [e.name, e.message, e.stack]}));
@@ -60,11 +63,10 @@ class Condenser::PurgeCSSProcessor < Condenser::NodeProcessor
60
63
  if result['error'][0] == 'SyntaxError'
61
64
  raise exec_syntax_error(result['error'][1], "/assets/#{input[:filename]}")
62
65
  else
63
- raise exec_runtime_error(result['error'][0] + ': ' + result['error'][1])
66
+ raise exec_runtime_error(result['error']["0"]["name"] + ": " + result['error']["0"]["reason"])
64
67
  end
65
68
  else
66
69
  input[:source] = result["success"]["css"]
67
70
  end
68
71
  end
69
-
70
72
  end