tapioca 0.5.6 → 0.6.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (42) hide show
  1. checksums.yaml +4 -4
  2. data/README.md +114 -23
  3. data/lib/tapioca/cli.rb +188 -64
  4. data/lib/tapioca/compilers/dsl/active_record_associations.rb +94 -8
  5. data/lib/tapioca/compilers/dsl/active_record_columns.rb +5 -4
  6. data/lib/tapioca/compilers/dsl/active_record_relations.rb +703 -0
  7. data/lib/tapioca/compilers/dsl/active_record_scope.rb +43 -13
  8. data/lib/tapioca/compilers/dsl/active_record_typed_store.rb +2 -4
  9. data/lib/tapioca/compilers/dsl/base.rb +25 -42
  10. data/lib/tapioca/compilers/dsl/extensions/frozen_record.rb +29 -0
  11. data/lib/tapioca/compilers/dsl/frozen_record.rb +37 -0
  12. data/lib/tapioca/compilers/dsl/helper/active_record_constants.rb +27 -0
  13. data/lib/tapioca/compilers/dsl/param_helper.rb +52 -0
  14. data/lib/tapioca/compilers/dsl/rails_generators.rb +120 -0
  15. data/lib/tapioca/compilers/dsl_compiler.rb +32 -6
  16. data/lib/tapioca/compilers/sorbet.rb +2 -0
  17. data/lib/tapioca/compilers/symbol_table/symbol_generator.rb +47 -46
  18. data/lib/tapioca/executor.rb +79 -0
  19. data/lib/tapioca/gemfile.rb +23 -0
  20. data/lib/tapioca/generators/base.rb +11 -18
  21. data/lib/tapioca/generators/dsl.rb +33 -38
  22. data/lib/tapioca/generators/gem.rb +50 -29
  23. data/lib/tapioca/generators/init.rb +41 -16
  24. data/lib/tapioca/generators/todo.rb +6 -6
  25. data/lib/tapioca/helpers/cli_helper.rb +26 -0
  26. data/lib/tapioca/helpers/config_helper.rb +84 -0
  27. data/lib/tapioca/helpers/test/content.rb +51 -0
  28. data/lib/tapioca/helpers/test/isolation.rb +125 -0
  29. data/lib/tapioca/helpers/test/template.rb +34 -0
  30. data/lib/tapioca/internal.rb +3 -2
  31. data/lib/tapioca/rbi_ext/model.rb +12 -9
  32. data/lib/tapioca/reflection.rb +13 -0
  33. data/lib/tapioca/trackers/autoload.rb +70 -0
  34. data/lib/tapioca/trackers/constant_definition.rb +42 -0
  35. data/lib/tapioca/trackers/mixin.rb +78 -0
  36. data/lib/tapioca/trackers.rb +14 -0
  37. data/lib/tapioca/version.rb +1 -1
  38. data/lib/tapioca.rb +28 -2
  39. metadata +19 -7
  40. data/lib/tapioca/config.rb +0 -45
  41. data/lib/tapioca/config_builder.rb +0 -73
  42. data/lib/tapioca/constant_locator.rb +0 -40
@@ -85,26 +85,13 @@ module Tapioca
85
85
 
86
86
  sig { params(tree: RBI::Tree, symbol: String).void }
87
87
  def generate_from_symbol(tree, symbol)
88
- constant = resolve_constant(symbol)
88
+ constant = constantize(symbol)
89
89
 
90
90
  return unless constant
91
91
 
92
92
  compile(tree, symbol, constant)
93
93
  end
94
94
 
95
- sig do
96
- params(
97
- symbol: String,
98
- inherit: T::Boolean,
99
- namespace: Module
100
- ).returns(BasicObject).checked(:never)
101
- end
102
- def resolve_constant(symbol, inherit: false, namespace: Object)
103
- namespace.const_get(symbol, inherit)
104
- rescue NameError, LoadError, RuntimeError, ArgumentError, TypeError
105
- nil
106
- end
107
-
108
95
  sig { params(tree: RBI::Tree, name: T.nilable(String), constant: BasicObject).void.checked(:never) }
109
96
  def compile(tree, name, constant)
110
97
  return unless constant
@@ -269,7 +256,7 @@ module Tapioca
269
256
  def compile_subconstants(tree, name, constant)
270
257
  constants_of(constant).sort.uniq.map do |constant_name|
271
258
  symbol = (name == "Object" ? "" : name) + "::#{constant_name}"
272
- subconstant = resolve_constant(symbol)
259
+ subconstant = constantize(symbol)
273
260
 
274
261
  # Don't compile modules of Object because Object::Foo == Foo
275
262
  # Don't compile modules of BasicObject because BasicObject::BasicObject == BasicObject
@@ -300,7 +287,7 @@ module Tapioca
300
287
  # variable via the value of the type variable constant.
301
288
  subconstant_to_name_lookup = constants_of(constant)
302
289
  .each_with_object({}.compare_by_identity) do |constant_name, table|
303
- table[resolve_constant(constant_name.to_s, namespace: constant)] = constant_name.to_s
290
+ table[constantize(constant_name.to_s, namespace: constant)] = constant_name.to_s
304
291
  end
305
292
 
306
293
  # Map each type variable to its string representation.
@@ -356,7 +343,7 @@ module Tapioca
356
343
  superclass_name = name_of(superclass)
357
344
  next unless superclass_name
358
345
 
359
- resolved_superclass = resolve_constant(superclass_name)
346
+ resolved_superclass = constantize(superclass_name)
360
347
  next unless Module === resolved_superclass
361
348
  next if name_of(resolved_superclass) == constant_name
362
349
 
@@ -382,43 +369,44 @@ module Tapioca
382
369
  interesting_ancestors = interesting_ancestors_of(constant)
383
370
  interesting_singleton_class_ancestors = interesting_ancestors_of(singleton_class)
384
371
 
385
- prepend = interesting_ancestors.take_while { |c| !are_equal?(constant, c) }
386
- include = interesting_ancestors.drop(prepend.size + 1)
387
- extend = interesting_singleton_class_ancestors.reject do |mod|
372
+ prepends = interesting_ancestors.take_while { |c| !are_equal?(constant, c) }
373
+ includes = interesting_ancestors.drop(prepends.size + 1)
374
+ extends = interesting_singleton_class_ancestors.reject do |mod|
388
375
  Module != class_of(mod) || are_equal?(mod, singleton_class)
389
376
  end
390
377
 
391
- prepend
392
- .reverse
393
- .select { |mod| (name = name_of(mod)) && !name.start_with?("T::") }
394
- .map do |mod|
395
- add_to_symbol_queue(name_of(mod))
396
-
397
- # TODO: Sorbet currently does not handle prepend
398
- # properly for method resolution, so we generate an
399
- # include statement instead
400
- qname = qualified_name_of(mod)
401
- tree << RBI::Include.new(T.must(qname))
402
- end
378
+ add_mixins(tree, prepends.reverse, Trackers::Mixin::Type::Prepend)
379
+ add_mixins(tree, includes.reverse, Trackers::Mixin::Type::Include)
380
+ add_mixins(tree, extends.reverse, Trackers::Mixin::Type::Extend)
381
+ end
403
382
 
404
- include
405
- .reverse
406
- .select { |mod| (name = name_of(mod)) && !name.start_with?("T::") }
407
- .map do |mod|
408
- add_to_symbol_queue(name_of(mod))
383
+ sig do
384
+ params(
385
+ tree: RBI::Tree,
386
+ mods: T::Array[Module],
387
+ mixin_type: Trackers::Mixin::Type
388
+ ).void
389
+ end
390
+ def add_mixins(tree, mods, mixin_type)
391
+ mods
392
+ .select do |mod|
393
+ name = name_of(mod)
409
394
 
410
- qname = qualified_name_of(mod)
411
- tree << RBI::Include.new(T.must(qname))
395
+ name && !name.start_with?("T::")
412
396
  end
413
-
414
- extend
415
- .reverse
416
- .select { |mod| (name = name_of(mod)) && !name.start_with?("T::") }
417
397
  .map do |mod|
418
398
  add_to_symbol_queue(name_of(mod))
419
399
 
420
400
  qname = qualified_name_of(mod)
421
- tree << RBI::Extend.new(T.must(qname))
401
+ case mixin_type
402
+ # TODO: Sorbet currently does not handle prepend
403
+ # properly for method resolution, so we generate an
404
+ # include statement instead
405
+ when Trackers::Mixin::Type::Include, Trackers::Mixin::Type::Prepend
406
+ tree << RBI::Include.new(T.must(qname))
407
+ when Trackers::Mixin::Type::Extend
408
+ tree << RBI::Extend.new(T.must(qname))
409
+ end
422
410
  end
423
411
  end
424
412
 
@@ -647,7 +635,7 @@ module Tapioca
647
635
  sig { params(constant: Module, strict: T::Boolean).returns(T::Boolean) }
648
636
  def defined_in_gem?(constant, strict: true)
649
637
  files = Set.new(get_file_candidates(constant))
650
- .merge(Tapioca::ConstantLocator.files_for(constant))
638
+ .merge(Tapioca::Trackers::ConstantDefinition.files_for(constant))
651
639
 
652
640
  return !strict if files.empty?
653
641
 
@@ -656,6 +644,19 @@ module Tapioca
656
644
  end
657
645
  end
658
646
 
647
+ sig do
648
+ params(
649
+ mod: Module,
650
+ mixin_type: Trackers::Mixin::Type,
651
+ mixin_locations: T::Hash[Trackers::Mixin::Type, T::Hash[Module, T::Array[String]]]
652
+ ).returns(T::Boolean)
653
+ end
654
+ def mixed_in_by_gem?(mod, mixin_type, mixin_locations)
655
+ locations = mixin_locations.dig(mixin_type, mod)
656
+ return true unless locations
657
+ locations.any? { |location| gem.contains_path?(location) }
658
+ end
659
+
659
660
  sig { params(constant: Module).returns(T::Array[String]) }
660
661
  def get_file_candidates(constant)
661
662
  wrapped_module = Pry::WrappedModule.new(constant)
@@ -727,7 +728,7 @@ module Tapioca
727
728
  return name if name
728
729
  name = super(constant)
729
730
  return if name.nil?
730
- return unless are_equal?(constant, resolve_constant(name, inherit: true))
731
+ return unless are_equal?(constant, constantize(name, inherit: true))
731
732
  name = "Struct" if name =~ /^(::)?Struct::[^:]+$/
732
733
  name
733
734
  end
@@ -0,0 +1,79 @@
1
+ # typed: strict
2
+ # frozen_string_literal: true
3
+
4
+ require "etc"
5
+
6
+ module Tapioca
7
+ class Executor
8
+ extend T::Sig
9
+
10
+ MINIMUM_ITEMS_PER_WORKER = T.let(2, Integer)
11
+
12
+ sig { params(queue: T::Array[T.untyped], number_of_workers: T.nilable(Integer)).void }
13
+ def initialize(queue, number_of_workers: nil)
14
+ @queue = queue
15
+
16
+ # Forking workers is expensive and not worth it for a low number of gems. Here we assign the number of workers to
17
+ # be the minimum between the number of available processors (max) or the number of workers to make sure that each
18
+ # one has at least 4 items to process
19
+ @number_of_workers = T.let(
20
+ number_of_workers || [Etc.nprocessors, (queue.length.to_f / MINIMUM_ITEMS_PER_WORKER).ceil].min,
21
+ Integer
22
+ )
23
+
24
+ # The number of items that will be processed per worker, so that we can split the queue into groups and assign
25
+ # them to each one of the workers
26
+ @items_per_worker = T.let((queue.length.to_f / @number_of_workers).ceil, Integer)
27
+ end
28
+
29
+ sig do
30
+ type_parameters(:T).params(
31
+ block: T.proc.params(item: T.untyped).returns(T.type_parameter(:T))
32
+ ).returns(T::Array[T.type_parameter(:T)])
33
+ end
34
+ def run_in_parallel(&block)
35
+ # If we only have one worker selected, it's not worth forking, just run sequentially
36
+ return @queue.map { |item| block.call(item) } if @number_of_workers == 1
37
+
38
+ read_pipes = []
39
+ write_pipes = []
40
+
41
+ # If we have more than one worker, fork the pool by shifting the expected number of items per worker from the
42
+ # queue
43
+ workers = (0...@number_of_workers).map do
44
+ items = @queue.shift(@items_per_worker)
45
+
46
+ # Each worker has their own pair of pipes, so that we can read the result from each worker separately
47
+ read, write = IO.pipe
48
+ read_pipes << read
49
+ write_pipes << write
50
+
51
+ fork do
52
+ read.close
53
+ result = items.map { |item| block.call(item) }
54
+
55
+ # Pack the result as a Base64 string of the Marshal dump of the array of values returned by the block that we
56
+ # ran in parallel
57
+ packed = [Marshal.dump(result)].pack("m")
58
+ write.puts(packed)
59
+ write.close
60
+ end
61
+ end
62
+
63
+ # Close all the write pipes, then read and close from all the read pipes
64
+ write_pipes.each(&:close)
65
+ result = read_pipes.map do |pipe|
66
+ content = pipe.read
67
+ pipe.close
68
+ content
69
+ end
70
+
71
+ # Wait until all the workers finish. Notice that waiting for the PIDs can only happen after we read and close the
72
+ # pipe or else we may end up in a condition where writing to the pipe hangs indefinitely
73
+ workers.each { |pid| Process.waitpid(pid) }
74
+
75
+ # Decode the value back into the Ruby objects by doing the inverse of what each worker does
76
+ result.flat_map { |item| T.unsafe(Marshal.load(item.unpack1("m"))) }
77
+ end
78
+ end
79
+ end
@@ -105,6 +105,7 @@ module Tapioca
105
105
  real_gem_path = to_realpath(@spec.full_gem_path)
106
106
  @full_gem_path = T.let(real_gem_path, String)
107
107
  @version = T.let(version_string, String)
108
+ @exported_rbi_files = T.let(nil, T.nilable(T::Array[String]))
108
109
  end
109
110
 
110
111
  sig { params(gemfile_dir: String).returns(T::Boolean) }
@@ -151,6 +152,28 @@ module Tapioca
151
152
  files.each { |path| YARD.parse(path.to_s, [], Logger::Severity::FATAL) }
152
153
  end
153
154
 
155
+ sig { returns(T::Array[String]) }
156
+ def exported_rbi_files
157
+ @exported_rbi_files ||= Dir.glob("#{full_gem_path}/rbi/**/*.rbi")
158
+ end
159
+
160
+ sig { returns(T::Boolean) }
161
+ def export_rbi_files?
162
+ exported_rbi_files.any?
163
+ end
164
+
165
+ sig { returns(RBI::MergeTree) }
166
+ def exported_rbi_tree
167
+ rewriter = RBI::Rewriters::Merge.new(keep: RBI::Rewriters::Merge::Keep::NONE)
168
+
169
+ exported_rbi_files.each do |file|
170
+ rbi = RBI::Parser.parse_file(file)
171
+ rewriter.merge(rbi)
172
+ end
173
+
174
+ rewriter.tree
175
+ end
176
+
154
177
  private
155
178
 
156
179
  sig { returns(T::Boolean) }
@@ -1,9 +1,6 @@
1
1
  # typed: strict
2
2
  # frozen_string_literal: true
3
3
 
4
- # TODO: Remove me when logging logic has been abstracted.
5
- require "thor"
6
-
7
4
  module Tapioca
8
5
  module Generators
9
6
  class Base
@@ -14,7 +11,7 @@ module Tapioca
14
11
  include Thor::Actions
15
12
  end
16
13
 
17
- # TODO: Remove me when logging logic has been abstracted
14
+ include CliHelper
18
15
  include Thor::Base
19
16
 
20
17
  abstract!
@@ -30,20 +27,6 @@ module Tapioca
30
27
 
31
28
  private
32
29
 
33
- # TODO: Remove me when logging logic has been abstracted
34
- sig { params(message: String, color: T.any(Symbol, T::Array[Symbol])).void }
35
- def say_error(message = "", *color)
36
- force_new_line = (message.to_s !~ /( |\t)\Z/)
37
- # NOTE: This is a hack. We're no longer subclassing from Thor::Shell::Color
38
- # so we no longer have access to the prepare_message call.
39
- # We should update this to remove this.
40
- buffer = shell.send(:prepare_message, *T.unsafe([message, *T.unsafe(color)]))
41
- buffer << "\n" if force_new_line && !message.to_s.end_with?("\n")
42
-
43
- $stderr.print(buffer)
44
- $stderr.flush
45
- end
46
-
47
30
  sig do
48
31
  params(
49
32
  path: T.any(String, Pathname),
@@ -56,6 +39,16 @@ module Tapioca
56
39
  def create_file(path, content, force: true, skip: false, verbose: true)
57
40
  @file_writer.create_file(path, force: force, skip: skip, verbose: verbose) { content }
58
41
  end
42
+
43
+ sig do
44
+ params(
45
+ path: T.any(String, Pathname),
46
+ verbose: T::Boolean
47
+ ).void
48
+ end
49
+ def remove_file(path, verbose: true)
50
+ @file_writer.remove_file(path, verbose: verbose)
51
+ end
59
52
  end
60
53
  end
61
54
  end
@@ -8,8 +8,8 @@ module Tapioca
8
8
  params(
9
9
  requested_constants: T::Array[String],
10
10
  outpath: Pathname,
11
- generators: T::Array[String],
12
- exclude_generators: T::Array[String],
11
+ only: T::Array[String],
12
+ exclude: T::Array[String],
13
13
  file_header: T::Boolean,
14
14
  compiler_path: String,
15
15
  tapioca_path: String,
@@ -17,14 +17,15 @@ module Tapioca
17
17
  file_writer: Thor::Actions,
18
18
  should_verify: T::Boolean,
19
19
  quiet: T::Boolean,
20
- verbose: T::Boolean
20
+ verbose: T::Boolean,
21
+ number_of_workers: T.nilable(Integer),
21
22
  ).void
22
23
  end
23
24
  def initialize(
24
25
  requested_constants:,
25
26
  outpath:,
26
- generators:,
27
- exclude_generators:,
27
+ only:,
28
+ exclude:,
28
29
  file_header:,
29
30
  compiler_path:,
30
31
  tapioca_path:,
@@ -32,18 +33,20 @@ module Tapioca
32
33
  file_writer: FileWriter.new,
33
34
  should_verify: false,
34
35
  quiet: false,
35
- verbose: false
36
+ verbose: false,
37
+ number_of_workers: nil
36
38
  )
37
39
  @requested_constants = requested_constants
38
40
  @outpath = outpath
39
- @generators = generators
40
- @exclude_generators = exclude_generators
41
+ @only = only
42
+ @exclude = exclude
41
43
  @file_header = file_header
42
44
  @compiler_path = compiler_path
43
45
  @tapioca_path = tapioca_path
44
46
  @should_verify = should_verify
45
47
  @quiet = quiet
46
48
  @verbose = verbose
49
+ @number_of_workers = number_of_workers
47
50
 
48
51
  super(default_command: default_command, file_writer: file_writer)
49
52
 
@@ -52,6 +55,7 @@ module Tapioca
52
55
 
53
56
  sig { override.void }
54
57
  def generate
58
+ load_dsl_extensions
55
59
  load_application(eager_load: @requested_constants.empty?)
56
60
  abort_if_pending_migrations!
57
61
  load_dsl_generators
@@ -68,31 +72,31 @@ module Tapioca
68
72
 
69
73
  compiler = Compilers::DslCompiler.new(
70
74
  requested_constants: constantize(@requested_constants),
71
- requested_generators: constantize_generators(@generators),
72
- excluded_generators: constantize_generators(@exclude_generators),
75
+ requested_generators: constantize_generators(@only),
76
+ excluded_generators: constantize_generators(@exclude),
73
77
  error_handler: ->(error) {
74
78
  say_error(error, :bold, :red)
75
- }
79
+ },
80
+ number_of_workers: @number_of_workers
76
81
  )
77
82
 
78
- compiler.run do |constant, contents|
83
+ processed_files = compiler.run do |constant, contents|
79
84
  constant_name = T.must(Reflection.name_of(constant))
80
85
 
81
86
  if @verbose && !@quiet
82
87
  say_status(:processing, constant_name, :yellow)
83
88
  end
84
89
 
85
- filename = compile_dsl_rbi(
90
+ compile_dsl_rbi(
86
91
  constant_name,
87
92
  contents,
88
93
  outpath: outpath,
89
94
  quiet: @should_verify || @quiet && !@verbose
90
95
  )
91
-
92
- if filename
93
- rbi_files_to_purge.delete(filename)
94
- end
95
96
  end
97
+
98
+ processed_files.each { |filename| rbi_files_to_purge.delete(T.must(filename)) }
99
+
96
100
  say("")
97
101
 
98
102
  if @should_verify
@@ -171,7 +175,8 @@ module Tapioca
171
175
  unless unprocessable_constants.empty?
172
176
  unprocessable_constants.each do |name, _|
173
177
  say("Error: Cannot find constant '#{name}'", :red)
174
- remove(dsl_rbi_filename(name))
178
+ filename = dsl_rbi_filename(name)
179
+ remove_file(filename) if File.file?(filename)
175
180
  end
176
181
 
177
182
  exit(1)
@@ -182,17 +187,9 @@ module Tapioca
182
187
 
183
188
  sig { params(generator_names: T::Array[String]).returns(T::Array[T.class_of(Compilers::Dsl::Base)]) }
184
189
  def constantize_generators(generator_names)
185
- generator_map = generator_names.map do |name|
186
- # Try to find built-in tapioca generator first, then globally defined generator. The
187
- # explicit `break` ensures the class is returned, not the `potential_name`.
188
- generator_klass = ["Tapioca::Compilers::Dsl::#{name}", name].find do |potential_name|
189
- break Object.const_get(potential_name)
190
- rescue NameError
191
- # Skip if we can't find generator by the potential name
192
- end
193
-
194
- [name, generator_klass]
195
- end.to_h
190
+ generator_map = generator_names.to_h do |name|
191
+ [name, Compilers::Dsl::Base.resolve(name)]
192
+ end
196
193
 
197
194
  unprocessable_generators = generator_map.select { |_, v| v.nil? }
198
195
  unless unprocessable_generators.empty?
@@ -203,7 +200,7 @@ module Tapioca
203
200
  exit(1)
204
201
  end
205
202
 
206
- generator_map.values
203
+ T.cast(generator_map.values, T::Array[T.class_of(Compilers::Dsl::Base)])
207
204
  end
208
205
 
209
206
  sig do
@@ -245,7 +242,7 @@ module Tapioca
245
242
  say("Removing stale RBI files...")
246
243
 
247
244
  files.sort.each do |filename|
248
- remove(filename)
245
+ remove_file(filename)
249
246
  end
250
247
  say("")
251
248
  end
@@ -256,13 +253,6 @@ module Tapioca
256
253
  @outpath / "#{underscore(constant_name)}.rbi"
257
254
  end
258
255
 
259
- sig { params(filename: Pathname).void }
260
- def remove(filename)
261
- return unless filename.exist?
262
- say("-- Removing: #{filename}")
263
- filename.unlink
264
- end
265
-
266
256
  sig { params(tmp_dir: Pathname).returns(T::Hash[String, Symbol]) }
267
257
  def verify_dsl_rbi(tmp_dir:)
268
258
  diff = {}
@@ -357,6 +347,11 @@ module Tapioca
357
347
  def generate_command_for(constant)
358
348
  "#{@default_command} dsl #{constant}"
359
349
  end
350
+
351
+ sig { void }
352
+ def load_dsl_extensions
353
+ Dir["#{__dir__}/../compilers/dsl/extensions/*.rb"].sort.each { |f| require(f) }
354
+ end
360
355
  end
361
356
  end
362
357
  end