rtext 0.8.1 → 0.10.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (41) hide show
  1. checksums.yaml +4 -4
  2. data/CHANGELOG +120 -89
  3. data/Project.yaml +15 -0
  4. data/RText_Protocol +47 -4
  5. data/lib/rtext/context_builder.rb +49 -8
  6. data/lib/rtext/default_completer.rb +212 -163
  7. data/lib/rtext/default_service_provider.rb +3 -3
  8. data/lib/rtext/frontend/connector.rb +130 -56
  9. data/lib/rtext/instantiator.rb +11 -3
  10. data/lib/rtext/language.rb +5 -5
  11. data/lib/rtext/serializer.rb +3 -3
  12. data/lib/rtext/service.rb +281 -253
  13. data/lib/rtext/tokenizer.rb +2 -2
  14. metadata +33 -33
  15. data/Rakefile +0 -46
  16. data/test/completer_test.rb +0 -606
  17. data/test/context_builder_test.rb +0 -948
  18. data/test/frontend/context_test.rb +0 -301
  19. data/test/instantiator_test.rb +0 -1704
  20. data/test/integration/backend.out +0 -13
  21. data/test/integration/crash_on_request_editor.rb +0 -12
  22. data/test/integration/ecore_editor.rb +0 -50
  23. data/test/integration/frontend.log +0 -38203
  24. data/test/integration/model/invalid_encoding.invenc +0 -2
  25. data/test/integration/model/test.crash_on_request +0 -18
  26. data/test/integration/model/test.crashing_backend +0 -18
  27. data/test/integration/model/test.dont_open_socket +0 -0
  28. data/test/integration/model/test.invalid_cmd_line +0 -0
  29. data/test/integration/model/test.not_in_rtext +0 -0
  30. data/test/integration/model/test_large_with_errors.ect3 +0 -43523
  31. data/test/integration/model/test_metamodel.ect +0 -24
  32. data/test/integration/model/test_metamodel2.ect +0 -5
  33. data/test/integration/model/test_metamodel3.ect4 +0 -7
  34. data/test/integration/model/test_metamodel_error.ect2 +0 -3
  35. data/test/integration/model/test_metamodel_ok.ect2 +0 -18
  36. data/test/integration/test.rb +0 -966
  37. data/test/link_detector_test.rb +0 -287
  38. data/test/message_helper_test.rb +0 -118
  39. data/test/rtext_test.rb +0 -11
  40. data/test/serializer_test.rb +0 -1004
  41. data/test/tokenizer_test.rb +0 -173
@@ -1,173 +0,0 @@
1
- $:.unshift File.join(File.dirname(__FILE__),"..","lib")
2
-
3
- require 'test/unit'
4
- require 'rtext/tokenizer'
5
- require 'rtext/generic'
6
-
7
- class TokenizerTest < Test::Unit::TestCase
8
- include RText::Tokenizer
9
-
10
- def test_simple
11
- assert_tokens [
12
- Token.new(:identifier, "TestNode", 1, 1, 8),
13
- Token.new(:integer, 1, 1, 10, 10),
14
- Token.new(",", nil, 1, 11, 11),
15
- Token.new(:identifier, "bla", 1, 13, 15),
16
- Token.new(",", nil, 1, 16, 16),
17
- Token.new(:float, 0.4, 1, 18, 20),
18
- Token.new(",", nil, 1, 21, 21),
19
- Token.new(:label, "label", 1, 23, 28),
20
- Token.new(:integer, 4, 1, 30, 30),
21
- Token.new(",", nil, 1, 31, 31),
22
- Token.new(:string, "string", 1, 33, 40),
23
- Token.new(:newline, nil, 1, nil, nil)
24
- ], "TestNode 1, bla, 0.4, label: 4, \"string\""
25
- end
26
-
27
- def test_more
28
- assert_tokens [
29
- Token.new(:identifier, "TestNode", 1, 1, 8),
30
- Token.new(:boolean, true, 1, 10, 13),
31
- Token.new(",", nil, 1, 14, 14),
32
- Token.new(:integer, 0xfaa, 1, 16, 20),
33
- Token.new(:integer, -3, 1, 22, 23),
34
- Token.new(:reference, "/a/b", 1, 25, 28),
35
- Token.new(:newline, nil, 1, nil, nil)
36
- ], <<-END
37
- TestNode true, 0xfaa -3 /a/b
38
- END
39
- end
40
-
41
- def test_comments_and_annotation
42
- assert_tokens [
43
- Token.new(:comment, " comment", 1, 1, 9),
44
- Token.new(:newline, nil, 1, nil, nil),
45
- Token.new(:annotation, " annotation", 2, 1, 12),
46
- Token.new(:newline, nil, 2, nil, nil),
47
- Token.new(:identifier, "TestNode", 3, 1, 8),
48
- Token.new(:comment, "comment2", 3, 10, 18),
49
- Token.new(:newline, nil, 3, nil, nil)
50
- ], <<-END
51
- # comment
52
- @ annotation
53
- TestNode #comment2
54
- END
55
- end
56
-
57
- def test_generic
58
- tokens = do_tokenize("<name>")
59
- assert_equal :generic, tokens.first.kind
60
- assert_equal "name", tokens.first.value.string
61
- assert_equal 1, tokens.first.line
62
- assert_equal 1, tokens.first.scol
63
- assert_equal 6, tokens.first.ecol
64
- end
65
-
66
- def test_generic_bad
67
- tokens = do_tokenize("<a>b>")
68
- assert_equal :generic, tokens.first.kind
69
- assert_equal "a", tokens.first.value.string
70
- assert_equal 1, tokens.first.line
71
- assert_equal 1, tokens.first.scol
72
- assert_equal 3, tokens.first.ecol
73
- assert_equal :identifier, tokens[1].kind
74
- assert_equal :error, tokens[2].kind
75
- end
76
-
77
- def test_generic_percent
78
- tokens = do_tokenize("<%name%>")
79
- assert_equal :generic, tokens.first.kind
80
- assert_equal "name", tokens.first.value.string
81
- assert_equal 1, tokens.first.line
82
- assert_equal 1, tokens.first.scol
83
- assert_equal 8, tokens.first.ecol
84
- end
85
-
86
- def test_generic_percent_angle_close
87
- tokens = do_tokenize("<% a > b < c %>")
88
- assert_equal :generic, tokens.first.kind
89
- assert_equal " a > b < c ", tokens.first.value.string
90
- assert_equal 1, tokens.first.line
91
- assert_equal 1, tokens.first.scol
92
- assert_equal 15, tokens.first.ecol
93
- end
94
-
95
- def test_generic_percent_bad
96
- tokens = do_tokenize("<%= a %> b %>")
97
- assert_equal :generic, tokens.first.kind
98
- assert_equal "= a ", tokens.first.value.string
99
- assert_equal 1, tokens.first.line
100
- assert_equal 1, tokens.first.scol
101
- assert_equal 8, tokens.first.ecol
102
- assert_equal :identifier, tokens[1].kind
103
- assert_equal :error, tokens[2].kind
104
- end
105
-
106
- def test_error
107
- assert_tokens [
108
- Token.new(:error, "\"", 1, 1, 1),
109
- Token.new(:identifier, "open", 1, 2, 5),
110
- Token.new(:newline, nil, 1, nil, nil)
111
- ], <<-END
112
- "open
113
- END
114
- end
115
-
116
- def test_with_bom
117
- assert_tokens [
118
- Token.new(:identifier, "TestNode", 1, 1, 8),
119
- Token.new(:integer, 1, 1, 10, 10),
120
- Token.new(:newline, nil, 1, nil, nil)
121
- ], "\xEF\xBB\xBFTestNode 1"
122
- end
123
-
124
- def test_excessive_newline
125
- assert_tokens [
126
- Token.new(:identifier, "TestNode", 3, 1, 8),
127
- Token.new(:newline, nil, 3, nil, nil),
128
- Token.new(:identifier, "TestNode", 5, 1, 8),
129
- Token.new(:newline, nil, 5, nil, nil)
130
- ], %Q(
131
-
132
- TestNode
133
-
134
- TestNode
135
-
136
- )
137
- end
138
-
139
- def test_only_newline
140
- assert_tokens [
141
- ], %Q(
142
-
143
-
144
- )
145
- end
146
-
147
- def test_linebreak
148
- assert_tokens [
149
- Token.new(:identifier, "TestNode", 2, 1, 8),
150
- Token.new(:identifier, "someNode", 2, 10, 17),
151
- Token.new(",", nil, 2, 18, 18),
152
- Token.new(:newline, nil, 2, nil, nil),
153
- Token.new(:label, "label", 3, 3, 8),
154
- Token.new(:identifier, "x", 3, 10, 10),
155
- Token.new(:newline, nil, 3, nil, nil),
156
- ], %Q(
157
- TestNode someNode,
158
- label: x
159
- )
160
-
161
- end
162
-
163
- def do_tokenize(str)
164
- tokenize(str, /\A\/[\/\w]+/)
165
- end
166
-
167
- def assert_tokens(expected, str)
168
- tokens = tokenize(str, /\A\/[\/\w]+/)
169
- assert_equal(expected, tokens)
170
- end
171
-
172
- end
173
-