condenser 1.0 → 1.3

Sign up to get free protection for your applications and to get access to all the features.
Files changed (53) hide show
  1. checksums.yaml +4 -4
  2. data/lib/condenser/asset.rb +41 -17
  3. data/lib/condenser/build_cache.rb +1 -1
  4. data/lib/condenser/context.rb +9 -25
  5. data/lib/condenser/helpers/parse_helpers.rb +1 -1
  6. data/lib/condenser/manifest.rb +3 -1
  7. data/lib/condenser/minifiers/terser_minifier.rb +7 -9
  8. data/lib/condenser/pipeline.rb +8 -3
  9. data/lib/condenser/processors/babel_processor.rb +1 -1
  10. data/lib/condenser/processors/css_media_combiner_processor.rb +81 -0
  11. data/lib/condenser/processors/js_analyzer.rb +0 -2
  12. data/lib/condenser/processors/node_processor.rb +4 -0
  13. data/lib/condenser/processors/purgecss_processor.rb +72 -0
  14. data/lib/condenser/processors/rollup_processor.rb +37 -37
  15. data/lib/condenser/resolve.rb +1 -3
  16. data/lib/condenser/templating_engine/ejs.rb +1 -1
  17. data/lib/condenser/transformers/dart_sass_transformer.rb +285 -0
  18. data/lib/condenser/transformers/jst_transformer.rb +67 -17
  19. data/lib/condenser/transformers/sass/functions.rb +133 -0
  20. data/lib/condenser/transformers/sass/importer.rb +48 -0
  21. data/lib/condenser/transformers/sass.rb +4 -0
  22. data/lib/condenser/transformers/sass_transformer.rb +124 -281
  23. data/lib/condenser/transformers/svg_transformer/base.rb +26 -0
  24. data/lib/condenser/transformers/svg_transformer/tag.rb +54 -0
  25. data/lib/condenser/transformers/svg_transformer/template.rb +151 -0
  26. data/lib/condenser/transformers/svg_transformer/template_error.rb +2 -0
  27. data/lib/condenser/transformers/svg_transformer/value.rb +13 -0
  28. data/lib/condenser/transformers/svg_transformer/var_generator.rb +10 -0
  29. data/lib/condenser/transformers/svg_transformer.rb +19 -0
  30. data/lib/condenser/version.rb +1 -1
  31. data/lib/condenser.rb +18 -5
  32. data/test/cache_stores/file_store_test.rb +1 -1
  33. data/test/cache_test.rb +73 -1
  34. data/test/dependency_test.rb +2 -2
  35. data/test/manifest_test.rb +34 -0
  36. data/test/minifiers/terser_minifier_test.rb +2 -3
  37. data/test/minifiers/uglify_minifier_test.rb +0 -1
  38. data/test/postprocessors/css_media_combiner_test.rb +107 -0
  39. data/test/postprocessors/purgecss_test.rb +145 -0
  40. data/test/preprocessor/babel_test.rb +702 -268
  41. data/test/preprocessor/js_analyzer_test.rb +0 -2
  42. data/test/processors/rollup_test.rb +50 -20
  43. data/test/resolve_test.rb +8 -9
  44. data/test/server_test.rb +6 -1
  45. data/test/templates/ejs_test.rb +2 -11
  46. data/test/templates/erb_test.rb +0 -5
  47. data/test/test_helper.rb +4 -2
  48. data/test/transformers/dart_scss_test.rb +139 -0
  49. data/test/transformers/jst_test.rb +165 -21
  50. data/test/transformers/scss_test.rb +14 -0
  51. data/test/transformers/svg_test.rb +40 -0
  52. metadata +26 -6
  53. data/lib/condenser/transformers/sass_transformer/importer.rb +0 -50
checksums.yaml CHANGED
@@ -1,7 +1,7 @@
1
1
  ---
2
2
  SHA256:
3
- metadata.gz: 24f2e6a83798d231764a5940648a38a65f72c34e55e70e833292397bb98347a8
4
- data.tar.gz: 384ceedcef19c3137ad969a255fe8519eb37ac20116c54879ccaa19a05a331c3
3
+ metadata.gz: 52e335c087aab2b9bb3ef4540e54bfc5b8de0a888cab8f095c2f1dd778458bd4
4
+ data.tar.gz: 566502264a1049f4ff5e33154caa56ef64ed74d14a8d72abd5e31628d59986c1
5
5
  SHA512:
6
- metadata.gz: 906d4e278f46053c31bd55943fab2100468f604aa43472a98993ea71350384abde3b0181f08d8357c814f020c897a359fe496397012a402602f0cdc8777695ae
7
- data.tar.gz: f96cfdc8570e74939b71a4257a3b8647495aac682f49cc5a60e39f653367fbf6e73fe77cd5965e1f92b4cab899cf3262be7533158cf4760d2e7e166a244fe0e7
6
+ metadata.gz: 5c0fdf66cb6b17ff6a519af5205061f37e3af2f12896e45c2798154c46ba2db3dea57e219595b33e5433a82db17751bc8444453c5bc0371fb27013e97157352d
7
+ data.tar.gz: 2486bd59e00c2d70ea683d2f69f1a71b0981b557144a08fd4965f9fe38a0c2da68d79e1c6fcba359718af3f9c26f4978619c3bb651f394cbb0db0172c450096f
@@ -125,7 +125,7 @@ class Condenser
125
125
 
126
126
  def all_process_dependencies
127
127
  f = [@source_file]
128
- all_dependenies(process_dependencies, [], :process_dependencies) do |dep|
128
+ all_dependenies(process_dependencies, Set.new, :process_dependencies) do |dep|
129
129
  f << dep.source_file
130
130
  end
131
131
  f
@@ -133,7 +133,7 @@ class Condenser
133
133
 
134
134
  def all_export_dependencies
135
135
  f = [@source_file]
136
- all_dependenies(export_dependencies, [], :export_dependencies) do |dep|
136
+ all_dependenies(export_dependencies, Set.new, :export_dependencies) do |dep|
137
137
  f << dep.source_file
138
138
  end
139
139
  f
@@ -153,13 +153,13 @@ class Condenser
153
153
  return @pcv if @pcv
154
154
 
155
155
  f = []
156
- all_dependenies(process_dependencies, [], :process_dependencies) do |dep|
156
+ all_dependenies(process_dependencies, Set.new, :process_dependencies) do |dep|
157
157
  f << [
158
158
  @environment.base ? dep.source_file.delete_prefix(@environment.base) : dep.source_file,
159
159
  Digest::SHA256.file(dep.source_file).hexdigest
160
160
  ]
161
161
  end
162
-
162
+
163
163
  @pcv = Digest::SHA1.base64digest(JSON.generate(f))
164
164
  end
165
165
 
@@ -167,7 +167,7 @@ class Condenser
167
167
  return @ecv if @ecv
168
168
 
169
169
  f = []
170
- all_dependenies(export_dependencies, [], :export_dependencies) do |dep|
170
+ all_dependenies(export_dependencies, Set.new, :export_dependencies) do |dep|
171
171
  f << [
172
172
  @environment.base ? dep.source_file.delete_prefix(@environment.base) : dep.source_file,
173
173
  Digest::SHA256.file(dep.source_file).hexdigest
@@ -206,9 +206,9 @@ class Condenser
206
206
  content_type: mime_types,
207
207
 
208
208
  map: nil,
209
- linked_assets: [],
210
- process_dependencies: [],
211
- export_dependencies: [],
209
+ linked_assets: Set.new,
210
+ process_dependencies: Set.new,
211
+ export_dependencies: Set.new,
212
212
 
213
213
  processors: Set.new
214
214
  }
@@ -241,6 +241,7 @@ class Condenser
241
241
  data[:processors] << processor_klass.name
242
242
  @environment.load_processors(processor_klass)
243
243
 
244
+ @environment.logger.info { "Pre Processing #{self.filename} with #{processor.name}" }
244
245
  processor.call(@environment, data)
245
246
  end
246
247
  end
@@ -257,6 +258,17 @@ class Condenser
257
258
  data[:content_type] << to_mime_type
258
259
  end
259
260
  end
261
+
262
+ if @environment.postprocessors.has_key?(data[:content_type].last)
263
+ @environment.postprocessors[data[:content_type].last].each do |processor|
264
+ processor_klass = (processor.is_a?(Class) ? processor : processor.class)
265
+ data[:processors] << processor_klass.name
266
+ @environment.load_processors(processor_klass)
267
+
268
+ @environment.logger.info { "Post Processing #{self.filename} with #{processor.name}" }
269
+ processor.call(@environment, data)
270
+ end
271
+ end
260
272
 
261
273
  if mime_types != @content_types
262
274
  raise ContentTypeMismatch, "mime type(s) \"#{@content_types.join(', ')}\" does not match requested mime type(s) \"#{data[:mime_types].join(', ')}\""
@@ -273,15 +285,14 @@ class Condenser
273
285
  @content_types = data[:content_type]
274
286
  @digest = data[:digest]
275
287
  @digest_name = data[:digest_name]
276
- @linked_assets = data[:linked_assets]
277
- @process_dependencies = data[:process_dependencies]
278
- @export_dependencies = data[:export_dependencies]
288
+ @linked_assets = Set.new(data[:linked_assets])
289
+ @process_dependencies = Set.new(data[:process_dependencies])
290
+ @export_dependencies = Set.new(data[:export_dependencies])
279
291
  @default_export = data[:default_export]
280
292
  @exports = data[:exports]
281
293
  @processors = data[:processors]
282
294
  @processors_loaded = true
283
295
  @processed = true
284
-
285
296
  data
286
297
  end
287
298
  end
@@ -292,9 +303,9 @@ class Condenser
292
303
  @content_types = result[:content_type]
293
304
  @digest = result[:digest]
294
305
  @digest_name = result[:digest_name]
295
- @linked_assets = result[:linked_assets]
296
- @process_dependencies = result[:process_dependencies]
297
- @export_dependencies = result[:export_dependencies]
306
+ @linked_assets = Set.new(result[:linked_assets])
307
+ @process_dependencies = Set.new(result[:process_dependencies])
308
+ @export_dependencies = Set.new(result[:export_dependencies])
298
309
  @default_export = result[:default_export]
299
310
  @exports = result[:exports]
300
311
  @processors = result[:processors]
@@ -323,11 +334,15 @@ class Condenser
323
334
  export_dependencies: []
324
335
  }
325
336
 
326
- if exporter = @environment.exporters[content_type]
327
- exporter.call(@environment, data)
337
+ if @environment.exporters.has_key?(content_type)
338
+ @environment.exporters[content_type].each do |exporter|
339
+ @environment.logger.info { "Exporting #{self.filename} with #{exporter.name}" }
340
+ exporter.call(@environment, data)
341
+ end
328
342
  end
329
343
 
330
344
  if minifier = @environment.minifier_for(content_type)
345
+ @environment.logger.info { "Minifing #{self.filename} with #{minifier.name}" }
331
346
  minifier.call(@environment, data)
332
347
  end
333
348
 
@@ -335,6 +350,15 @@ class Condenser
335
350
  data[:digest_name] = @environment.digestor.name.sub(/^.*::/, '').downcase
336
351
  data
337
352
  end
353
+
354
+ if @environment.build_cache.listening
355
+ # TODO we could skip file and all their depencies here if they are
356
+ # already in build_cache.@export_dependencies
357
+ all_export_dependencies.each do |sf|
358
+ @environment.build_cache.instance_variable_get(:@export_dependencies)[sf]&.add(self)
359
+ end
360
+ end
361
+
338
362
  Export.new(@environment, data)
339
363
  end
340
364
  end
@@ -33,7 +33,7 @@ class Condenser
33
33
  end
34
34
 
35
35
  globs = []
36
- (added + removed).each do |file|
36
+ (added + removed + modified).each do |file|
37
37
  globs << file.match(/([^\.]+)(\.|$)/).to_a[1]
38
38
  if path_match = @path.find { |p| file.start_with?(p) }
39
39
  a = file.delete_prefix(path_match).match(/([^\.]+)(\.|$)/).to_a[1]
@@ -35,7 +35,7 @@ class Condenser
35
35
  end
36
36
  end
37
37
 
38
- attr_reader :environment, :filename
38
+ attr_reader :environment, :filename, :links, :dependencies
39
39
 
40
40
  def initialize(environment)
41
41
  @environment = environment
@@ -110,29 +110,14 @@ class Condenser
110
110
  # `depend_on` allows you to state a dependency on a file without
111
111
  # including it.
112
112
  #
113
- # This is used for caching purposes. Any changes made to
114
- # the dependency file will invalidate the cache of the
115
- # source file.
116
- def depend_on(path)
117
- if environment.absolute_path?(path) && environment.stat(path)
118
- @dependencies << environment.build_file_digest_uri(path)
119
- else
120
- resolve(path)
121
- end
122
- nil
123
- end
124
-
125
- # `depend_on_asset` allows you to state an asset dependency
126
- # without including it.
127
- #
128
113
  # This is used for caching purposes. Any changes that would
129
114
  # invalidate the dependency asset will invalidate the source
130
- # file. Unlike `depend_on`, this will recursively include
131
- # the target asset's dependencies.
132
- def depend_on_asset(path)
133
- asset = environment.find!(path)
134
- @dependencies << asset.source_file
135
- asset
115
+ # file.
116
+ def depend_on(path)
117
+ d = environment.decompose_path(path)
118
+ @dependencies << [File.join(*d[0], d[1]), [d[3]]]
119
+
120
+ nil
136
121
  end
137
122
 
138
123
  # `depend_on_env` allows you to state a dependency on an environment
@@ -150,9 +135,8 @@ class Condenser
150
135
  #
151
136
  # Returns an Asset or nil.
152
137
  def link_asset(path)
153
- asset = depend_on_asset(path)
154
- @links << asset.path
155
- asset
138
+ depend_on(path)
139
+ @links << path
156
140
  end
157
141
 
158
142
  # Returns a `data:` URI with the contents of the asset at the specified
@@ -3,7 +3,7 @@ module Condenser::ParseHelpers
3
3
  attr_accessor :matched
4
4
 
5
5
  def eos?
6
- @index >= @source.size
6
+ @index >= (@source.size - 1)
7
7
  end
8
8
 
9
9
  def scan_until(r)
@@ -71,7 +71,9 @@ class Condenser
71
71
 
72
72
  @data[asset.filename] = export.to_json
73
73
  outputs = export.write(@dir)
74
- asset.linked_assets.each { |a| outputs += add_asset(a) }
74
+ asset.linked_assets.each do |la|
75
+ @environment.resolve(la).each { |a| outputs += add_asset(a) }
76
+ end
75
77
  outputs
76
78
  end
77
79
 
@@ -5,9 +5,8 @@ class Condenser::TerserMinifier < Condenser::NodeProcessor
5
5
  npm_install('terser')
6
6
 
7
7
  @options = options.merge({
8
- warnings: true,
9
- sourceMap: false,
10
- keep_classnames: true
8
+ keep_classnames: true,
9
+ keep_fnames: true
11
10
  }).freeze
12
11
  end
13
12
 
@@ -28,13 +27,12 @@ class Condenser::TerserMinifier < Condenser::NodeProcessor
28
27
  const options = #{JSON.generate(opts)};
29
28
 
30
29
 
31
- var result = Terser.minify(source, options);
32
- if (result.error !== undefined) {
33
- console.log(JSON.stringify({'error': result.error.name + ": " + result.error.message}));
34
- process.exit(1);
35
- } else {
30
+ Terser.minify(source, options).then((result) => {
36
31
  console.log(JSON.stringify(result));
37
- }
32
+ }, (error) => {
33
+ console.log(JSON.stringify({'error': error.name + ": " + error.message}));
34
+ process.exit(1);
35
+ });
38
36
  JS
39
37
 
40
38
  exec_runtime_error(result['error']) if result['error']
@@ -98,11 +98,16 @@ class Condenser
98
98
  end
99
99
 
100
100
  def register_exporter(mime_type, engine)
101
- @exporters[mime_type] = engine
101
+ @exporters[mime_type] ||= []
102
+ @exporters[mime_type] << engine
102
103
  end
103
104
 
104
- def unregister_exporter(mime_type, engine)
105
- @exporters[mime_type] = nil
105
+ def unregister_exporter(mime_type, engine=nil)
106
+ if engine.nil?
107
+ @exporters[mime_type].clear
108
+ else
109
+ @exporters[mime_type]&.reject! { |e| e == engine || e.is_a?(engine) }
110
+ end
106
111
  end
107
112
 
108
113
  def register_minifier(mime_type, engine)
@@ -4,7 +4,7 @@ class Condenser::BabelProcessor < Condenser::NodeProcessor
4
4
 
5
5
  attr_accessor :options
6
6
 
7
- def initialize(dir = nil, options = {})
7
+ def initialize(dir = nil, **options)
8
8
  super(dir)
9
9
 
10
10
  options[:plugins] ||= [
@@ -0,0 +1,81 @@
1
+ class Condenser::CSSMediaCombinerProcessor
2
+
3
+ include Condenser::ParseHelpers
4
+
5
+ def self.setup(env)
6
+ end
7
+
8
+ def self.call(environment, input)
9
+ new.call(environment, input)
10
+ end
11
+
12
+ def reduce_media_query(queries)
13
+ output = ''
14
+ queries.each do |query, contents|
15
+ output << query if query
16
+ output << if contents.is_a?(Hash)
17
+ reduce_media_query(contents)
18
+ else
19
+ contents + '}'
20
+ end
21
+ end
22
+ output
23
+ end
24
+
25
+ def call(environment, input)
26
+ seek(0)
27
+ @sourcefile = input[:source_file]
28
+ @source = input[:source]
29
+ @stack = []
30
+ @selectors = []
31
+ @media_queries = {}
32
+
33
+ input[:source] = ''
34
+ while !eos?
35
+ output = if @selectors.empty?
36
+ input[:source]
37
+ else
38
+ (@selectors[0...-1].reduce(@media_queries) { |hash, selector| hash[selector] ||= {} }[@selectors.last] ||= '')
39
+ end
40
+
41
+ case @stack.last
42
+ when :media_query
43
+ scan_until(/(@media[^\{]*{|\{|\})/)
44
+ case matched
45
+ when '{'
46
+ output << pre_match << matched
47
+ @stack << :statement
48
+ when '}'
49
+ output << pre_match
50
+ @stack.pop
51
+ @selectors.pop
52
+ else
53
+ output << pre_match
54
+ @selectors << matched.squish
55
+ @stack << :media_query
56
+ end
57
+ when :statement
58
+ scan_until(/(\{|\})/)
59
+ output << pre_match << matched
60
+ case matched
61
+ when '{'
62
+ @stack << :statement
63
+ when '}'
64
+ @stack.pop
65
+ end
66
+ else
67
+ case scan_until(/(@media[^\{]*{|\Z)/)
68
+ when ''
69
+ output << pre_match
70
+ else
71
+ output << pre_match
72
+ @selectors << matched.squish
73
+ @stack << :media_query
74
+ end
75
+ end
76
+ end
77
+
78
+ input[:source] << reduce_media_query(@media_queries)
79
+ end
80
+
81
+ end
@@ -1,5 +1,3 @@
1
- require 'condenser/helpers/parse_helpers'
2
-
3
1
  class Condenser::JSAnalyzer
4
2
 
5
3
  include Condenser::ParseHelpers
@@ -8,6 +8,10 @@ class Condenser
8
8
 
9
9
  def self.setup(environment)
10
10
  end
11
+
12
+ def name
13
+ self.class.name
14
+ end
11
15
 
12
16
  def self.call(environment, input)
13
17
  @instances ||= {}
@@ -0,0 +1,72 @@
1
+ require 'json'
2
+
3
+ class Condenser::PurgeCSSProcessor < Condenser::NodeProcessor
4
+
5
+ attr_accessor :options
6
+
7
+ # Public: initialize with custom options.
8
+ #
9
+ # dir - String (path to node_modules directory)
10
+ # options - Hash
11
+ # content - Array - html files to process
12
+ # ex. [File.expand_path('./docs-src/**/*.erb'), File.expand_path('./docs-src/assets/javascripts/**/*.js')]
13
+ #
14
+ # Options are passed to PurgeCSS checkout [PurgeCSS Configurations](https://purgecss.com/configuration.html)
15
+ #
16
+
17
+ def self.call(environment, input)
18
+ @instances ||= {}
19
+ @instances[environment] ||= new(environment.npm_path, {
20
+ content: [File.join(environment.base, '**/*.html'), File.join(environment.base, '**/*.js')]
21
+ })
22
+ @instances[environment].call(environment, input)
23
+ end
24
+
25
+
26
+ def initialize(dir = nil, options = {})
27
+ super(dir)
28
+ @options = options
29
+ npm_install('purgecss')
30
+ end
31
+
32
+ def call(environment, input)
33
+ result = exec_runtime(<<-JS)
34
+ const { PurgeCSS } = require("#{File.join(npm_module_path('purgecss'))}")
35
+ const options = #{@options.to_json}
36
+ options.css = [{
37
+ raw: #{JSON.dump(input[:source])}
38
+ }]
39
+ if(options.safelist) {
40
+ options.safelist = options.safelist.map(s => {
41
+ if(s[0] == "/" && s[s.length - 1] == "/") {
42
+ return new RegExp(s.slice(1, -1))
43
+ }
44
+ return s
45
+ })
46
+ }
47
+ if(!options.defaultExtractor) {
48
+ options.defaultExtractor = content => content.match(/[\\w\\-\\/\\:]+(?<!:)/g) || []
49
+ }
50
+ const result = new PurgeCSS().purge(options)
51
+ try {
52
+ result.then(
53
+ r => console.log(JSON.stringify({
54
+ success: r[0]
55
+ })),
56
+ function(e) {console.log(JSON.stringify({'error': [e.name, e.message, e.stack]}))}
57
+ )
58
+ } catch(e) {
59
+ console.log(JSON.stringify({'error': [e.name, e.message, e.stack]}));
60
+ }
61
+ JS
62
+ if result['error']
63
+ if result['error'][0] == 'SyntaxError'
64
+ raise exec_syntax_error(result['error'][1], "/assets/#{input[:filename]}")
65
+ else
66
+ raise exec_runtime_error(result['error']["0"]["name"] + ": " + result['error']["0"]["reason"])
67
+ end
68
+ else
69
+ input[:source] = result["success"]["css"]
70
+ end
71
+ end
72
+ end
@@ -7,7 +7,7 @@ class Condenser::RollupProcessor < Condenser::NodeProcessor
7
7
 
8
8
  def initialize(dir = nil, options = {})
9
9
  super(dir)
10
- npm_install('rollup', 'rollup-plugin-commonjs', 'rollup-plugin-node-resolve')
10
+ npm_install('rollup', '@rollup/plugin-commonjs', '@rollup/plugin-node-resolve')
11
11
 
12
12
  @options = options.freeze
13
13
  end
@@ -50,7 +50,12 @@ class Condenser::RollupProcessor < Condenser::NodeProcessor
50
50
  return '';
51
51
  } catch(e) {
52
52
  if (e.name === "SyntaxError") {
53
- if (e.message.startsWith('Unexpected token { in JSON at position ')) {
53
+ if (e.message.startsWith('Unexpected non-whitespace character after JSON at position ')) {
54
+ var pos = parseInt(e.message.slice(59));
55
+ emitMessages(buffer.slice(0,pos));
56
+ return emitMessages(buffer.slice(pos));
57
+ } else if (e.message.startsWith('Unexpected token { in JSON at position ')) {
58
+ // This can be removed, once dropping support for node <= v18
54
59
  var pos = parseInt(e.message.slice(39));
55
60
  emitMessages(buffer.slice(0,pos));
56
61
  return emitMessages(buffer.slice(pos));
@@ -70,8 +75,8 @@ class Condenser::RollupProcessor < Condenser::NodeProcessor
70
75
  });
71
76
 
72
77
  const rollup = require("#{npm_module_path('rollup')}");
73
- const commonjs = require("#{npm_module_path('rollup-plugin-commonjs')}");
74
- const nodeResolve = require("#{npm_module_path('rollup-plugin-node-resolve')}");
78
+ const commonjs = require("#{npm_module_path('@rollup/plugin-commonjs')}");
79
+ const nodeResolve = require("#{npm_module_path('@rollup/plugin-node-resolve')}").nodeResolve;
75
80
  var rid = 0;
76
81
  var renderStack = {};
77
82
  var nodeResolver = null;
@@ -95,9 +100,8 @@ class Condenser::RollupProcessor < Condenser::NodeProcessor
95
100
  mainFields: ['module', 'main'],
96
101
  // modulesOnly: true,
97
102
  // preferBuiltins: false,
98
- customResolveOptions: {
99
- moduleDirectory: '#{npm_module_path}'
100
- }
103
+ moduleDirectories: [],
104
+ modulePaths: ['#{npm_module_path}']
101
105
  });
102
106
  }
103
107
 
@@ -105,9 +109,9 @@ class Condenser::RollupProcessor < Condenser::NodeProcessor
105
109
  inputOptions.plugins = [];
106
110
  inputOptions.plugins.push({
107
111
  name: 'condenser',
108
- resolveId: function (importee, importer) {
112
+ resolveId: function (importee, importer, options) {
109
113
  if (importee.startsWith('\\0') || (importer && importer.startsWith('\\0'))) {
110
- return;
114
+ return null;
111
115
  }
112
116
 
113
117
  if (!(importer in renderStack)) {
@@ -115,24 +119,15 @@ class Condenser::RollupProcessor < Condenser::NodeProcessor
115
119
  }
116
120
 
117
121
  return request('resolve', [importee, importer]).then((value) => {
118
- if (nodeResolver && (value === null || value === undefined)) {
119
- return nodeResolver.resolveId.call(this, importee, importer).then((value) => {
120
- if (!(value === null || value === undefined) && !renderStack[importer].includes(value.id)) {
121
- renderStack[importer].push(value.id);
122
- }
123
- return value;
124
- });
125
- }
126
-
127
- if (!(value === null || value === undefined) && !renderStack[importer].includes(value)) {
128
- renderStack[importer].push(value);
129
- }
130
- return value;
122
+ if (!(value === null || value === undefined) && !renderStack[importer].includes(value)) {
123
+ renderStack[importer].push(value);
124
+ }
125
+ return value;
131
126
  });
132
127
  },
133
128
  load: function(id) {
134
129
  if (id.startsWith('\\0')) {
135
- return;
130
+ return null;
136
131
  }
137
132
 
138
133
  return request('load', [id]).then(function(value) {
@@ -140,17 +135,19 @@ class Condenser::RollupProcessor < Condenser::NodeProcessor
140
135
  });
141
136
  }
142
137
  });
138
+
143
139
  inputOptions.plugins.push(nodeResolver);
144
140
  inputOptions.plugins.push(commonjs());
145
141
 
146
- inputOptions.plugins.push({
147
- name: 'nullHanlder',
148
- resolveId: function (importee, importer) {
149
- request('error', ["AssetNotFound", importee, importer, renderStack]).then(function(value) {
150
- process.exit(1);
151
- });
152
- }
153
- });
142
+ // inputOptions.plugins.push({
143
+ // name: 'nullHanlder',
144
+ // resolveId: function (importee, importer, options) {
145
+ // request('log', [importee, importer, options])
146
+ // // request('error', ["AssetNotFound", importee, importer, renderStack]).then(function(value) {
147
+ // // process.exit(1);
148
+ // // });
149
+ // }
150
+ // });
154
151
 
155
152
  const outputOptions = #{JSON.generate(output_options)};
156
153
 
@@ -204,10 +201,16 @@ class Condenser::RollupProcessor < Condenser::NodeProcessor
204
201
  @entry
205
202
  elsif importee.start_with?('@babel/runtime') || importee.start_with?('core-js-pure') || importee.start_with?('regenerator-runtime')
206
203
  x = File.join(npm_module_path, importee.gsub(/^\.\//, File.dirname(importer) + '/')).sub('/node_modules/regenerator-runtime', '/node_modules/regenerator-runtime/runtime.js')
207
- x = "#{x}.js" if !x.end_with?('.js')
204
+ x = "#{x}.js" if !x.end_with?('.js', '.mjs')
205
+ File.file?(x) ? x : (x.delete_suffix('.js') + "/index.js")
206
+ elsif npm_module_path && importee&.start_with?(npm_module_path)
207
+ x = importee.end_with?('.js', '.mjs') ? importee : "#{importee}.js"
208
+ x = (x.delete_suffix('.js') + "/index.js") if !File.file?(x)
209
+ x
210
+ elsif importee.start_with?('.') && importer.start_with?(npm_module_path)
211
+ x = File.expand_path(importee, File.dirname(importer))
212
+ x = "#{x}.js" if !x.end_with?('.js', '.mjs')
208
213
  File.file?(x) ? x : (x.delete_suffix('.js') + "/index.js")
209
- elsif npm_module_path && importer.start_with?(npm_module_path)
210
- nil
211
214
  elsif importee.end_with?('*')
212
215
  File.join(File.dirname(importee), '*')
213
216
  else
@@ -217,8 +220,6 @@ class Condenser::RollupProcessor < Condenser::NodeProcessor
217
220
  importee = message['args'].first
218
221
  if importee == @entry
219
222
  { code: @input[:source], map: @input[:map] }
220
- elsif importee.start_with?(npm_module_path)
221
- { code: File.read(importee), map: nil }
222
223
  elsif importee.end_with?('*')
223
224
  importees = @environment.resolve(importee, importer ? File.dirname(@entry == importer ? @input[:source_file] : importer) : nil, accept: @input[:content_types].last)
224
225
  code = ""
@@ -263,7 +264,6 @@ class Condenser::RollupProcessor < Condenser::NodeProcessor
263
264
  # when 'get_cache'
264
265
  # io.write(JSON.generate({rid: message['rid'], return: [(@environment.cache.get('rollup') || '{}')] }))
265
266
  end
266
-
267
267
  io.write(JSON.generate({rid: message['rid'], return: ret}))
268
268
  end
269
269
  end
@@ -38,9 +38,7 @@ class Condenser
38
38
  end
39
39
 
40
40
  paths.each do |path|
41
- glob = path
42
- glob = File.join(glob, dirname) if dirname
43
- glob = File.join(glob, basename)
41
+ glob = File.join(*[path, dirname, basename].compact)
44
42
  glob << '.*' unless glob.end_with?('*')
45
43
 
46
44
  Dir.glob(glob).sort.each do |f|
@@ -1,4 +1,4 @@
1
- class Condenser::EjsTemplare
1
+ class Condenser::EjsTemplate
2
2
 
3
3
  def self.setup(environment)
4
4
  require 'ejs' unless defined?(::EJS)