unparser 0.1.5 → 0.1.6

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (96) hide show
  1. checksums.yaml +4 -4
  2. data/.gitignore +38 -2
  3. data/.travis.yml +3 -6
  4. data/Changelog.md +7 -0
  5. data/Gemfile +1 -0
  6. data/Gemfile.devtools +22 -17
  7. data/README.md +3 -4
  8. data/TODO +3 -2
  9. data/bin/unparser +10 -0
  10. data/circle.yml +2 -0
  11. data/config/flay.yml +1 -1
  12. data/config/flog.yml +1 -1
  13. data/config/reek.yml +10 -0
  14. data/config/rubocop.yml +9 -17
  15. data/lib/unparser.rb +8 -1
  16. data/lib/unparser/buffer.rb +2 -0
  17. data/lib/unparser/cli.rb +113 -0
  18. data/lib/unparser/cli/differ.rb +30 -0
  19. data/lib/unparser/cli/preprocessor.rb +196 -0
  20. data/lib/unparser/cli/source.rb +118 -0
  21. data/lib/unparser/comments.rb +64 -23
  22. data/lib/unparser/constants.rb +19 -7
  23. data/lib/unparser/emitter.rb +23 -25
  24. data/lib/unparser/emitter/alias.rb +2 -0
  25. data/lib/unparser/emitter/argument.rb +2 -0
  26. data/lib/unparser/emitter/assignment.rb +3 -1
  27. data/lib/unparser/emitter/begin.rb +3 -6
  28. data/lib/unparser/emitter/binary.rb +2 -0
  29. data/lib/unparser/emitter/block.rb +2 -0
  30. data/lib/unparser/emitter/break.rb +4 -5
  31. data/lib/unparser/emitter/case.rb +2 -0
  32. data/lib/unparser/emitter/cbase.rb +2 -0
  33. data/lib/unparser/emitter/class.rb +4 -3
  34. data/lib/unparser/emitter/def.rb +2 -0
  35. data/lib/unparser/emitter/defined.rb +2 -0
  36. data/lib/unparser/emitter/empty.rb +2 -0
  37. data/lib/unparser/emitter/ensure.rb +2 -0
  38. data/lib/unparser/emitter/flipflop.rb +2 -0
  39. data/lib/unparser/emitter/for.rb +2 -0
  40. data/lib/unparser/emitter/hookexe.rb +2 -0
  41. data/lib/unparser/emitter/if.rb +2 -0
  42. data/lib/unparser/emitter/literal.rb +2 -0
  43. data/lib/unparser/emitter/literal/array.rb +33 -0
  44. data/lib/unparser/emitter/literal/dynamic.rb +21 -1
  45. data/lib/unparser/emitter/literal/dynamic_body.rb +9 -5
  46. data/lib/unparser/emitter/literal/execute_string.rb +2 -0
  47. data/lib/unparser/emitter/literal/hash.rb +136 -0
  48. data/lib/unparser/emitter/literal/primitive.rb +4 -2
  49. data/lib/unparser/emitter/literal/range.rb +2 -0
  50. data/lib/unparser/emitter/literal/regexp.rb +4 -2
  51. data/lib/unparser/emitter/literal/singleton.rb +2 -0
  52. data/lib/unparser/emitter/match.rb +2 -0
  53. data/lib/unparser/emitter/module.rb +2 -3
  54. data/lib/unparser/emitter/next.rb +2 -0
  55. data/lib/unparser/emitter/op_assign.rb +3 -1
  56. data/lib/unparser/emitter/redo.rb +2 -0
  57. data/lib/unparser/emitter/repetition.rb +2 -0
  58. data/lib/unparser/emitter/resbody.rb +2 -0
  59. data/lib/unparser/emitter/rescue.rb +2 -0
  60. data/lib/unparser/emitter/retry.rb +2 -0
  61. data/lib/unparser/emitter/return.rb +19 -4
  62. data/lib/unparser/emitter/root.rb +13 -0
  63. data/lib/unparser/emitter/send.rb +26 -22
  64. data/lib/unparser/emitter/send/arguments.rb +46 -0
  65. data/lib/unparser/emitter/send/attribute_assignment.rb +35 -0
  66. data/lib/unparser/emitter/send/binary.rb +2 -0
  67. data/lib/unparser/emitter/send/index.rb +2 -0
  68. data/lib/unparser/emitter/send/regular.rb +4 -2
  69. data/lib/unparser/emitter/send/unary.rb +3 -1
  70. data/lib/unparser/emitter/splat.rb +2 -0
  71. data/lib/unparser/emitter/super.rb +2 -0
  72. data/lib/unparser/emitter/undef.rb +2 -0
  73. data/lib/unparser/emitter/variable.rb +2 -0
  74. data/lib/unparser/emitter/yield.rb +2 -0
  75. data/lib/unparser/finalize.rb +2 -0
  76. data/lib/unparser/node_helpers.rb +19 -0
  77. data/spec/spec_helper.rb +10 -0
  78. data/spec/unit/unparser/buffer/append_spec.rb +2 -0
  79. data/spec/unit/unparser/buffer/append_without_prefix_spec.rb +2 -0
  80. data/spec/unit/unparser/buffer/capture_content_spec.rb +2 -0
  81. data/spec/unit/unparser/buffer/content_spec.rb +3 -1
  82. data/spec/unit/unparser/buffer/fresh_line_spec.rb +2 -0
  83. data/spec/unit/unparser/buffer/indent_spec.rb +3 -1
  84. data/spec/unit/unparser/buffer/nl_spec.rb +2 -0
  85. data/spec/unit/unparser/buffer/unindent_spec.rb +2 -0
  86. data/spec/unit/unparser/comments/consume_spec.rb +2 -1
  87. data/spec/unit/unparser/comments/take_all_spec.rb +2 -1
  88. data/spec/unit/unparser/comments/take_before_spec.rb +6 -5
  89. data/spec/unit/unparser/comments/take_eol_comments_spec.rb +2 -1
  90. data/spec/unit/unparser/emitter/class_methods/handle_spec.rb +2 -0
  91. data/spec/unit/unparser_spec.rb +110 -57
  92. data/unparser.gemspec +5 -4
  93. metadata +32 -12
  94. data/bin/test-unparser +0 -26
  95. data/lib/unparser/emitter/literal/composed.rb +0 -64
  96. data/spec/unit/unparser/comments/skip_eol_comment_spec.rb +0 -29
@@ -0,0 +1,30 @@
1
+ # encoding: utf-8
2
+
3
+ module Unparser
4
+ class CLI
5
+ # Unparser CLI specific differ
6
+ class Differ < Mutant::Differ
7
+ include Procto.call(:colorized_diff)
8
+
9
+ # Return source diff
10
+ #
11
+ # FIXME: Multiple diffs get screwed up!
12
+ #
13
+ # @return [String]
14
+ # if there is a diff
15
+ #
16
+ # @return [nil]
17
+ # otherwise
18
+ #
19
+ # @api private
20
+ #
21
+ def diff
22
+ diffs.map do |piece|
23
+ Diff::LCS::Hunk.new(old, new, piece, max_length, old.length - new.length).diff(:unified) << "\n"
24
+ end.join
25
+ end
26
+ memoize :diff
27
+
28
+ end # Differ
29
+ end # CLI
30
+ end # Unparser
@@ -0,0 +1,196 @@
1
+ # encoding: utf-8
2
+
3
+ module Unparser
4
+ class CLI
5
+
6
+ # CLI Specific preprocessor used for equivalency testing
7
+ class Preprocessor
8
+ include Adamantium::Flat, NodeHelpers, AbstractType, Concord.new(:node), Procto.call(:result)
9
+
10
+ # Return preprocessor result
11
+ #
12
+ # @return [Parser::AST::Node]
13
+ #
14
+ # @api private
15
+ #
16
+ abstract_method :result
17
+
18
+ # Run preprocessor for node
19
+ #
20
+ # @param [Parser::AST::Node, nil] node
21
+ #
22
+ # @return [Parser::AST::Node, nil]
23
+ #
24
+ # @api private
25
+ #
26
+ def self.run(node)
27
+ return if node.nil?
28
+ REGISTRY.fetch(node.type, Noop).new(node).result
29
+ end
30
+
31
+ REGISTRY = {}
32
+
33
+ # Register preprocessor
34
+ #
35
+ # @param [Symbol] type
36
+ #
37
+ # @return [undefined]
38
+ #
39
+ # @api private
40
+ #
41
+ def self.register(type)
42
+ REGISTRY[type] = self
43
+ end
44
+ private_class_method :register
45
+
46
+ private
47
+
48
+ # Visit node
49
+ #
50
+ # @param [Parser::AST::Node]
51
+ #
52
+ # @api private
53
+ #
54
+ def visit(node)
55
+ self.class.run(node)
56
+ end
57
+
58
+ # Return children
59
+ #
60
+ # @return [Array<Parser::AST::Node>]
61
+ #
62
+ # @api private
63
+ #
64
+ def children
65
+ node.children
66
+ end
67
+
68
+ # Return mapped children
69
+ #
70
+ # @return [Array<Parser::Ast::Node>]
71
+ #
72
+ # @api private
73
+ #
74
+ def mapped_children
75
+ children.map do |node|
76
+ if node.kind_of?(Parser::AST::Node)
77
+ visit(node)
78
+ else
79
+ node
80
+ end
81
+ end
82
+ end
83
+
84
+ # Noop preprocessor that just passes through noode.
85
+ class Noop < self
86
+
87
+ # Return preprocessor result
88
+ #
89
+ # @return [Parser::AST::Node]
90
+ #
91
+ # @api private
92
+ #
93
+ def result
94
+ s(node.type, *mapped_children)
95
+ end
96
+ end # Noop
97
+
98
+ # Preprocessor for dynamic string nodes. Collapses adjacent string segments into one.
99
+ class Dstr < self
100
+
101
+ register :dstr
102
+
103
+ # Return preprocessor result
104
+ #
105
+ # @return [Parser::AST::Node]
106
+ #
107
+ # @api private
108
+ #
109
+ def result
110
+ if collapsed_children.all? { |node| node.type == :str }
111
+ s(:str, collapsed_children.map { |node| node.children.first }.join)
112
+ else
113
+ node.updated(nil, collapsed_children)
114
+ end
115
+ end
116
+
117
+ private
118
+
119
+ # Return collapsed children
120
+ #
121
+ # @return [Array<Parser::AST::Node>]
122
+ #
123
+ # @api private
124
+ #
125
+ def collapsed_children
126
+ chunked_children.each_with_object([]) do |(type, nodes), aggregate|
127
+ if type == :str
128
+ aggregate << s(:str, nodes.map { |node| node.children.first }.join)
129
+ else
130
+ aggregate.concat(nodes)
131
+ end
132
+ end
133
+ end
134
+ memoize :collapsed_children
135
+
136
+ # Return chunked children
137
+ #
138
+ # @return [Array<Parser::AST::Node>]
139
+ #
140
+ # @api private
141
+ #
142
+ def chunked_children
143
+ mapped_children.chunk do |item|
144
+ item.type
145
+ end
146
+ end
147
+
148
+ end # Begin
149
+
150
+ # Preprocessor for regexp nodes. Normalizes quoting.
151
+ class Regexp < self
152
+
153
+ register :regexp
154
+
155
+ # Return preprocesso result
156
+ #
157
+ # @return [Parser::AST::Node]
158
+ #
159
+ # @api private
160
+ #
161
+ def result
162
+ location = node.location
163
+ if location && location.begin.source.start_with?('%r')
164
+ Parser::CurrentRuby.parse(Unparser.unparse(node))
165
+ else
166
+ node
167
+ end
168
+ end
169
+ end
170
+
171
+ # Preprocessor for begin nodes. Removes begin nodes with one child.
172
+ #
173
+ # These superflownosely currently get generated by unparser.
174
+ #
175
+ class Begin < self
176
+
177
+ register :begin
178
+
179
+ # Return preprocessor result
180
+ #
181
+ # @return [Parser::AST::Node]
182
+ #
183
+ # @api private
184
+ #
185
+ def result
186
+ if children.one?
187
+ visit(children.first)
188
+ else
189
+ Noop.call(node)
190
+ end
191
+ end
192
+
193
+ end # Begin
194
+ end # Preprocessor
195
+ end # CLI
196
+ end # Unparser
@@ -0,0 +1,118 @@
1
+ # encoding: utf-8
2
+
3
+ module Unparser
4
+ class CLI
5
+ # Source representation for CLI sources
6
+ class Source
7
+ include AbstractType, Adamantium::Flat
8
+
9
+ # Test if source could be unparsed successfully
10
+ #
11
+ # @return [true]
12
+ # if source could be unparsed successfully
13
+ #
14
+ # @return [false]
15
+ #
16
+ # @api private
17
+ #
18
+ def success?
19
+ original_ast == generated_ast
20
+ end
21
+
22
+ # Return error report
23
+ #
24
+ # @return [String]
25
+ #
26
+ # @api private
27
+ #
28
+ def error_report
29
+ diff = Differ.call(
30
+ original_ast.inspect.lines.map(&:chomp),
31
+ generated_ast.inspect.lines.map(&:chomp)
32
+ )
33
+ "#{diff}\nOriginal:\n#{original_source}\nGenerated:\n#{generated_source}"
34
+ end
35
+ memoize :error_report
36
+
37
+ private
38
+
39
+ # Return generated source
40
+ #
41
+ # @return [String]
42
+ #
43
+ # @api private
44
+ #
45
+ def generated_source
46
+ Unparser.unparse(original_ast)
47
+ end
48
+ memoize :generated_source
49
+
50
+ # Return generated AST
51
+ #
52
+ # @return [Parser::AST::Node]
53
+ #
54
+ # @api private
55
+ #
56
+ def generated_ast
57
+ Preprocessor.run(Parser::CurrentRuby.parse(generated_source))
58
+ end
59
+ memoize :generated_ast
60
+
61
+ # Return original AST
62
+ #
63
+ # @return [Parser::AST::Node]
64
+ #
65
+ # @api private
66
+ #
67
+ def original_ast
68
+ Preprocessor.run(Parser::CurrentRuby.parse(original_source))
69
+ end
70
+ memoize :original_ast
71
+
72
+ # CLI source from string
73
+ class String < self
74
+ include Concord.new(:original_source)
75
+
76
+ # Return identification
77
+ #
78
+ # @return [String]
79
+ #
80
+ # @api private
81
+ #
82
+ def identification
83
+ '(string)'
84
+ end
85
+
86
+ end # String
87
+
88
+ # CLI source from file
89
+ class File < self
90
+ include Concord.new(:file_name)
91
+
92
+ # Return identification
93
+ #
94
+ # @return [String]
95
+ #
96
+ # @api private
97
+ #
98
+ def identification
99
+ "(#{file_name})"
100
+ end
101
+
102
+ private
103
+
104
+ # Return original source
105
+ #
106
+ # @return [String]
107
+ #
108
+ # @api private
109
+ #
110
+ def original_source
111
+ ::File.read(file_name)
112
+ end
113
+ memoize :original_source
114
+
115
+ end # File
116
+ end # Source
117
+ end # CLI
118
+ end # Unparser
@@ -1,17 +1,35 @@
1
+ # encoding: utf-8
2
+
1
3
  module Unparser
2
4
 
3
5
  # Holds the comments that remain to be emitted
4
6
  class Comments
5
7
 
8
+ # Proxy to singleton
9
+ #
10
+ # NOTICE:
11
+ # Delegating to stateless helpers is a pattern I saw many times in our code.
12
+ # Maybe we should make another helper module? include SingletonDelegator.new(:source_range) ?
13
+ #
14
+ # @return [undefined]
15
+ #
16
+ # @api private
17
+ #
18
+ def source_range(*arguments)
19
+ self.class.source_range(*arguments)
20
+ end
21
+
6
22
  # Initialize object
7
23
  #
8
24
  # @param [Array] comments
9
25
  #
10
26
  # @return [undefined]
11
27
  #
28
+ # @api private
29
+ #
12
30
  def initialize(comments)
13
31
  @comments = comments.dup
14
- @last_range_consumed = @eol_text_to_skip = nil
32
+ @last_range_consumed = nil
15
33
  end
16
34
 
17
35
  # Consume part or all of the node
@@ -21,39 +39,33 @@ module Unparser
21
39
  #
22
40
  # @return [undefined]
23
41
  #
24
- def consume(node, source_part = :expression)
25
- location = node.location
26
- return unless location
27
- @last_range_consumed = location.public_send(source_part)
28
- end
29
-
30
- # Skip any EOL comment with the specified text next time they're taken
31
- #
32
- # @param [String] comment_text
42
+ # @api private
33
43
  #
34
- # @return [undefined]
35
- #
36
- def skip_eol_comment(comment_text)
37
- @eol_text_to_skip = comment_text
44
+ def consume(node, source_part = :expression)
45
+ range = source_range(node, source_part)
46
+ if range
47
+ @last_range_consumed = range
48
+ end
38
49
  end
39
50
 
40
51
  # Take end-of-line comments
41
52
  #
42
53
  # @return [Array]
43
54
  #
55
+ # @api private
56
+ #
44
57
  def take_eol_comments
45
- text_to_skip = @eol_text_to_skip
46
- @eol_text_to_skip = nil
47
- return [] unless @last_range_consumed
58
+ return EMPTY_ARRAY unless @last_range_consumed
48
59
  comments = take_up_to_line(@last_range_consumed.end.line)
49
- eol_comments = unshift_documents(comments)
50
- eol_comments.reject { |comment| comment.text == text_to_skip }
60
+ unshift_documents(comments)
51
61
  end
52
62
 
53
63
  # Take all remaining comments
54
64
  #
55
65
  # @return [Array]
56
66
  #
67
+ # @api private
68
+ #
57
69
  def take_all
58
70
  take_while { true }
59
71
  end
@@ -65,24 +77,49 @@ module Unparser
65
77
  #
66
78
  # @return [Array]
67
79
  #
80
+ # @api private
81
+ #
68
82
  def take_before(node, source_part)
69
- location = node.location
70
- if location.respond_to?(source_part)
71
- range = location.public_send(source_part)
83
+ range = source_range(node, source_part)
84
+ if range
72
85
  take_while { |comment| comment.location.expression.end_pos <= range.begin_pos }
73
86
  else
74
87
  EMPTY_ARRAY
75
88
  end
76
89
  end
77
90
 
91
+ # Return source location part
92
+ #
93
+ # FIXME: This method should not be needed. It does to much inline signalling.
94
+ #
95
+ # @param [Parser::AST::Node] node
96
+ # @param [Symbol] part
97
+ #
98
+ # @return [Parser::Source::Range]
99
+ # if present
100
+ #
101
+ # @return [nil]
102
+ # otherwise
103
+ #
104
+ # @api private
105
+ #
106
+ def self.source_range(node, part)
107
+ location = node.location
108
+ if location && location.respond_to?(part)
109
+ location.public_send(part)
110
+ end
111
+ end
112
+
78
113
  private
79
114
 
80
115
  # Take comments while the provided block returns true
81
116
  #
82
- # @yield [comment]
117
+ # @yield [Parser::Source::Comment]
83
118
  #
84
119
  # @return [Array]
85
120
  #
121
+ # @api private
122
+ #
86
123
  def take_while
87
124
  number_to_take = @comments.index { |comment| !yield(comment) } || @comments.size
88
125
  @comments.shift(number_to_take)
@@ -94,6 +131,8 @@ module Unparser
94
131
  #
95
132
  # @return [Array]
96
133
  #
134
+ # @api private
135
+ #
97
136
  def take_up_to_line(line)
98
137
  take_while { |comment| comment.location.expression.line <= line }
99
138
  end
@@ -104,6 +143,8 @@ module Unparser
104
143
  #
105
144
  # @return [Array]
106
145
  #
146
+ # @api private
147
+ #
107
148
  def unshift_documents(comments)
108
149
  doc_comments, other_comments = comments.partition(&:document?)
109
150
  doc_comments.reverse_each { |comment| @comments.unshift(comment) }