collie-lsp 0.1.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- checksums.yaml +7 -0
- data/.collie.yml.example +144 -0
- data/.rspec +3 -0
- data/CHANGELOG.md +10 -0
- data/LICENSE +21 -0
- data/README.md +273 -0
- data/Rakefile +8 -0
- data/collie-lsp.gemspec +36 -0
- data/exe/collie-lsp +9 -0
- data/lib/collie_lsp/collie_wrapper.rb +97 -0
- data/lib/collie_lsp/document_store.rb +66 -0
- data/lib/collie_lsp/handlers/code_action.rb +77 -0
- data/lib/collie_lsp/handlers/completion.rb +72 -0
- data/lib/collie_lsp/handlers/definition.rb +117 -0
- data/lib/collie_lsp/handlers/diagnostics.rb +64 -0
- data/lib/collie_lsp/handlers/document_symbol.rb +185 -0
- data/lib/collie_lsp/handlers/folding_range.rb +211 -0
- data/lib/collie_lsp/handlers/formatting.rb +55 -0
- data/lib/collie_lsp/handlers/hover.rb +104 -0
- data/lib/collie_lsp/handlers/references.rb +185 -0
- data/lib/collie_lsp/handlers/rename.rb +226 -0
- data/lib/collie_lsp/handlers/semantic_tokens.rb +302 -0
- data/lib/collie_lsp/handlers/workspace_symbol.rb +161 -0
- data/lib/collie_lsp/protocol/initialize.rb +58 -0
- data/lib/collie_lsp/protocol/shutdown.rb +25 -0
- data/lib/collie_lsp/protocol/text_document.rb +81 -0
- data/lib/collie_lsp/server.rb +104 -0
- data/lib/collie_lsp/version.rb +5 -0
- data/lib/collie_lsp.rb +25 -0
- data/vscode-extension/.gitignore +3 -0
- data/vscode-extension/.vscode/launch.json +17 -0
- data/vscode-extension/.vscode/tasks.json +14 -0
- data/vscode-extension/README.md +35 -0
- data/vscode-extension/package.json +49 -0
- data/vscode-extension/src/extension.ts +48 -0
- data/vscode-extension/test-grammar.y +42 -0
- data/vscode-extension/tsconfig.json +12 -0
- metadata +98 -0
|
@@ -0,0 +1,226 @@
|
|
|
1
|
+
# frozen_string_literal: true
|
|
2
|
+
|
|
3
|
+
module CollieLsp
|
|
4
|
+
module Handlers
|
|
5
|
+
# Rename symbol support
|
|
6
|
+
module Rename
|
|
7
|
+
module_function
|
|
8
|
+
|
|
9
|
+
# Handle textDocument/rename request
|
|
10
|
+
# @param request [Hash] LSP request
|
|
11
|
+
# @param document_store [DocumentStore] Document store
|
|
12
|
+
# @param _collie [CollieWrapper] Collie wrapper (unused)
|
|
13
|
+
# @param writer [Object] Response writer
|
|
14
|
+
def handle(request, document_store, _collie, writer)
|
|
15
|
+
uri = request[:params][:textDocument][:uri]
|
|
16
|
+
position = request[:params][:position]
|
|
17
|
+
new_name = request[:params][:newName]
|
|
18
|
+
doc = document_store.get(uri)
|
|
19
|
+
|
|
20
|
+
unless doc
|
|
21
|
+
writer.write(id: request[:id], result: nil)
|
|
22
|
+
return
|
|
23
|
+
end
|
|
24
|
+
|
|
25
|
+
ast = doc[:ast]
|
|
26
|
+
unless ast
|
|
27
|
+
writer.write(id: request[:id], result: nil)
|
|
28
|
+
return
|
|
29
|
+
end
|
|
30
|
+
|
|
31
|
+
# Find symbol at position
|
|
32
|
+
symbol = find_symbol_at_position(doc[:text], position)
|
|
33
|
+
unless symbol
|
|
34
|
+
writer.write(id: request[:id], result: nil)
|
|
35
|
+
return
|
|
36
|
+
end
|
|
37
|
+
|
|
38
|
+
# Validate new name
|
|
39
|
+
unless valid_name?(symbol, new_name, ast)
|
|
40
|
+
writer.write(id: request[:id], result: nil)
|
|
41
|
+
return
|
|
42
|
+
end
|
|
43
|
+
|
|
44
|
+
# Build workspace edit with all occurrences
|
|
45
|
+
workspace_edit = build_workspace_edit(uri, symbol, new_name, doc[:text], ast)
|
|
46
|
+
|
|
47
|
+
writer.write(
|
|
48
|
+
id: request[:id],
|
|
49
|
+
result: workspace_edit
|
|
50
|
+
)
|
|
51
|
+
end
|
|
52
|
+
|
|
53
|
+
# Find symbol at the given position
|
|
54
|
+
# @param text [String] Document text
|
|
55
|
+
# @param position [Hash] LSP position
|
|
56
|
+
# @return [String, nil] Symbol name or nil
|
|
57
|
+
def find_symbol_at_position(text, position)
|
|
58
|
+
lines = text.lines
|
|
59
|
+
line = lines[position[:line]]
|
|
60
|
+
return nil unless line
|
|
61
|
+
|
|
62
|
+
# Extract word at character position
|
|
63
|
+
char = position[:character]
|
|
64
|
+
start_pos = char
|
|
65
|
+
end_pos = char
|
|
66
|
+
|
|
67
|
+
# Move backwards to find word start
|
|
68
|
+
start_pos -= 1 while start_pos.positive? && line[start_pos - 1] =~ /[A-Za-z0-9_]/
|
|
69
|
+
# Move forwards to find word end
|
|
70
|
+
end_pos += 1 while end_pos < line.length && line[end_pos] =~ /[A-Za-z0-9_]/
|
|
71
|
+
|
|
72
|
+
line[start_pos...end_pos]
|
|
73
|
+
end
|
|
74
|
+
|
|
75
|
+
# Validate the new name based on symbol type
|
|
76
|
+
# @param old_name [String] Old symbol name
|
|
77
|
+
# @param new_name [String] New symbol name
|
|
78
|
+
# @param ast [Hash] Parsed AST
|
|
79
|
+
# @return [Boolean] True if valid
|
|
80
|
+
def valid_name?(old_name, new_name, ast)
|
|
81
|
+
return false if new_name.empty?
|
|
82
|
+
|
|
83
|
+
# Check if old symbol is a token (should be UPPER_CASE)
|
|
84
|
+
is_token = ast[:declarations]&.any? do |decl|
|
|
85
|
+
decl[:kind] == :token && decl[:names]&.include?(old_name)
|
|
86
|
+
end
|
|
87
|
+
|
|
88
|
+
pattern = if is_token
|
|
89
|
+
# Token names should be uppercase
|
|
90
|
+
/^[A-Z][A-Z0-9_]*$/
|
|
91
|
+
else
|
|
92
|
+
# Nonterminal names should be lowercase
|
|
93
|
+
/^[a-z][a-z0-9_]*$/
|
|
94
|
+
end
|
|
95
|
+
|
|
96
|
+
!!(new_name =~ pattern)
|
|
97
|
+
end
|
|
98
|
+
|
|
99
|
+
# Build workspace edit with all rename changes
|
|
100
|
+
# @param uri [String] Document URI
|
|
101
|
+
# @param old_name [String] Old symbol name
|
|
102
|
+
# @param new_name [String] New symbol name
|
|
103
|
+
# @param text [String] Document text
|
|
104
|
+
# @param ast [Hash] Parsed AST
|
|
105
|
+
# @return [Hash] LSP workspace edit
|
|
106
|
+
def build_workspace_edit(uri, old_name, new_name, text, ast)
|
|
107
|
+
# Find all occurrences of the symbol
|
|
108
|
+
locations = find_all_occurrences(text, old_name, ast)
|
|
109
|
+
|
|
110
|
+
edits = locations.map do |loc|
|
|
111
|
+
{
|
|
112
|
+
range: {
|
|
113
|
+
start: {
|
|
114
|
+
line: loc[:line] - 1,
|
|
115
|
+
character: loc[:column] - 1
|
|
116
|
+
},
|
|
117
|
+
end: {
|
|
118
|
+
line: loc[:line] - 1,
|
|
119
|
+
character: loc[:column] + old_name.length - 1
|
|
120
|
+
}
|
|
121
|
+
},
|
|
122
|
+
newText: new_name
|
|
123
|
+
}
|
|
124
|
+
end
|
|
125
|
+
|
|
126
|
+
{
|
|
127
|
+
changes: {
|
|
128
|
+
uri => edits
|
|
129
|
+
}
|
|
130
|
+
}
|
|
131
|
+
end
|
|
132
|
+
|
|
133
|
+
# Find all occurrences of a symbol in the document
|
|
134
|
+
# @param text [String] Document text
|
|
135
|
+
# @param symbol [String] Symbol to find
|
|
136
|
+
# @param ast [Hash] Parsed AST
|
|
137
|
+
# @return [Array<Hash>] Array of locations
|
|
138
|
+
def find_all_occurrences(text, symbol, ast)
|
|
139
|
+
locations = []
|
|
140
|
+
|
|
141
|
+
# Find declaration
|
|
142
|
+
decl_loc = find_declaration_location(ast, symbol)
|
|
143
|
+
locations << decl_loc if decl_loc
|
|
144
|
+
|
|
145
|
+
# Find all usages in rules
|
|
146
|
+
locations.concat(find_usage_locations(text, symbol, ast))
|
|
147
|
+
|
|
148
|
+
locations.uniq { |loc| [loc[:line], loc[:column]] }
|
|
149
|
+
end
|
|
150
|
+
|
|
151
|
+
# Find declaration location
|
|
152
|
+
# @param ast [Hash] Parsed AST
|
|
153
|
+
# @param symbol [String] Symbol name
|
|
154
|
+
# @return [Hash, nil] Location or nil
|
|
155
|
+
def find_declaration_location(ast, symbol)
|
|
156
|
+
# Check token declarations
|
|
157
|
+
ast[:declarations]&.each do |decl|
|
|
158
|
+
next unless decl[:kind] == :token
|
|
159
|
+
|
|
160
|
+
return decl[:location] if decl[:names]&.include?(symbol) && decl[:location]
|
|
161
|
+
end
|
|
162
|
+
|
|
163
|
+
# Check nonterminal rules
|
|
164
|
+
rule = ast[:rules]&.find { |r| r[:name] == symbol }
|
|
165
|
+
return rule[:location] if rule&.dig(:location)
|
|
166
|
+
|
|
167
|
+
nil
|
|
168
|
+
end
|
|
169
|
+
|
|
170
|
+
# Find all usage locations in rules
|
|
171
|
+
# @param text [String] Document text
|
|
172
|
+
# @param symbol [String] Symbol to find
|
|
173
|
+
# @param ast [Hash] Parsed AST
|
|
174
|
+
# @return [Array<Hash>] Array of locations
|
|
175
|
+
def find_usage_locations(text, symbol, ast)
|
|
176
|
+
locations = []
|
|
177
|
+
lines = text.lines
|
|
178
|
+
|
|
179
|
+
ast[:rules]&.each do |rule|
|
|
180
|
+
next unless rule[:location]
|
|
181
|
+
|
|
182
|
+
# Search from rule location onwards
|
|
183
|
+
start_line = rule[:location][:line] - 1
|
|
184
|
+
search_end = find_rule_end(lines, start_line)
|
|
185
|
+
|
|
186
|
+
(start_line..search_end).each do |line_idx|
|
|
187
|
+
line = lines[line_idx]
|
|
188
|
+
next unless line
|
|
189
|
+
|
|
190
|
+
# Find all occurrences in this line
|
|
191
|
+
col = 0
|
|
192
|
+
while (pos = line.index(symbol, col))
|
|
193
|
+
# Verify it's a whole word (not part of another identifier)
|
|
194
|
+
before_char = pos.positive? ? line[pos - 1] : ' '
|
|
195
|
+
after_char = line[pos + symbol.length] || ' '
|
|
196
|
+
|
|
197
|
+
if before_char !~ /[A-Za-z0-9_]/ && after_char !~ /[A-Za-z0-9_]/
|
|
198
|
+
locations << {
|
|
199
|
+
line: line_idx + 1,
|
|
200
|
+
column: pos + 1
|
|
201
|
+
}
|
|
202
|
+
end
|
|
203
|
+
|
|
204
|
+
col = pos + 1
|
|
205
|
+
end
|
|
206
|
+
end
|
|
207
|
+
end
|
|
208
|
+
|
|
209
|
+
locations
|
|
210
|
+
end
|
|
211
|
+
|
|
212
|
+
# Find the end line of a rule
|
|
213
|
+
# @param lines [Array<String>] Document lines
|
|
214
|
+
# @param start_line [Integer] Rule start line
|
|
215
|
+
# @return [Integer] End line index
|
|
216
|
+
def find_rule_end(lines, start_line)
|
|
217
|
+
# Look for the semicolon that ends the rule
|
|
218
|
+
(start_line...lines.length).each do |idx|
|
|
219
|
+
return idx if lines[idx]&.include?(';')
|
|
220
|
+
end
|
|
221
|
+
|
|
222
|
+
lines.length - 1
|
|
223
|
+
end
|
|
224
|
+
end
|
|
225
|
+
end
|
|
226
|
+
end
|
|
@@ -0,0 +1,302 @@
|
|
|
1
|
+
# frozen_string_literal: true
|
|
2
|
+
|
|
3
|
+
module CollieLsp
|
|
4
|
+
module Handlers
|
|
5
|
+
# Semantic tokens support for syntax highlighting
|
|
6
|
+
module SemanticTokens
|
|
7
|
+
module_function
|
|
8
|
+
|
|
9
|
+
# LSP semantic token types
|
|
10
|
+
TOKEN_TYPES = %w[
|
|
11
|
+
namespace
|
|
12
|
+
type
|
|
13
|
+
class
|
|
14
|
+
enum
|
|
15
|
+
interface
|
|
16
|
+
struct
|
|
17
|
+
typeParameter
|
|
18
|
+
parameter
|
|
19
|
+
variable
|
|
20
|
+
property
|
|
21
|
+
enumMember
|
|
22
|
+
event
|
|
23
|
+
function
|
|
24
|
+
method
|
|
25
|
+
macro
|
|
26
|
+
keyword
|
|
27
|
+
modifier
|
|
28
|
+
comment
|
|
29
|
+
string
|
|
30
|
+
number
|
|
31
|
+
regexp
|
|
32
|
+
operator
|
|
33
|
+
].freeze
|
|
34
|
+
|
|
35
|
+
# LSP semantic token modifiers
|
|
36
|
+
TOKEN_MODIFIERS = %w[
|
|
37
|
+
declaration
|
|
38
|
+
definition
|
|
39
|
+
readonly
|
|
40
|
+
static
|
|
41
|
+
deprecated
|
|
42
|
+
abstract
|
|
43
|
+
async
|
|
44
|
+
modification
|
|
45
|
+
documentation
|
|
46
|
+
defaultLibrary
|
|
47
|
+
].freeze
|
|
48
|
+
|
|
49
|
+
# Handle textDocument/semanticTokens/full request
|
|
50
|
+
# @param request [Hash] LSP request
|
|
51
|
+
# @param document_store [DocumentStore] Document store
|
|
52
|
+
# @param _collie [CollieWrapper] Collie wrapper (unused)
|
|
53
|
+
# @param writer [Object] Response writer
|
|
54
|
+
def handle(request, document_store, _collie, writer)
|
|
55
|
+
uri = request[:params][:textDocument][:uri]
|
|
56
|
+
doc = document_store.get(uri)
|
|
57
|
+
|
|
58
|
+
unless doc
|
|
59
|
+
writer.write(id: request[:id], result: { data: [] })
|
|
60
|
+
return
|
|
61
|
+
end
|
|
62
|
+
|
|
63
|
+
ast = doc[:ast]
|
|
64
|
+
unless ast
|
|
65
|
+
writer.write(id: request[:id], result: { data: [] })
|
|
66
|
+
return
|
|
67
|
+
end
|
|
68
|
+
|
|
69
|
+
# Build semantic tokens
|
|
70
|
+
tokens = build_semantic_tokens(doc[:text], ast)
|
|
71
|
+
|
|
72
|
+
writer.write(
|
|
73
|
+
id: request[:id],
|
|
74
|
+
result: { data: tokens }
|
|
75
|
+
)
|
|
76
|
+
end
|
|
77
|
+
|
|
78
|
+
# Build semantic tokens array
|
|
79
|
+
# @param text [String] Document text
|
|
80
|
+
# @param ast [Hash] Parsed AST
|
|
81
|
+
# @return [Array<Integer>] Encoded semantic tokens
|
|
82
|
+
def build_semantic_tokens(text, ast)
|
|
83
|
+
tokens = []
|
|
84
|
+
symbol_info = build_symbol_info(ast)
|
|
85
|
+
|
|
86
|
+
lines = text.lines
|
|
87
|
+
lines.each_with_index do |line, line_idx|
|
|
88
|
+
tokens.concat(tokenize_line(line, line_idx, symbol_info))
|
|
89
|
+
end
|
|
90
|
+
|
|
91
|
+
# Convert to LSP format (delta encoding)
|
|
92
|
+
encode_tokens(tokens)
|
|
93
|
+
end
|
|
94
|
+
|
|
95
|
+
# Build symbol information from AST
|
|
96
|
+
# @param ast [Hash] Parsed AST
|
|
97
|
+
# @return [Hash] Symbol information
|
|
98
|
+
def build_symbol_info(ast)
|
|
99
|
+
info = { tokens: {}, nonterminals: {}, keywords: {} }
|
|
100
|
+
|
|
101
|
+
# Collect token declarations
|
|
102
|
+
ast[:declarations]&.each do |decl|
|
|
103
|
+
next unless decl[:kind] == :token
|
|
104
|
+
|
|
105
|
+
decl[:names]&.each do |name|
|
|
106
|
+
info[:tokens][name] = true
|
|
107
|
+
end
|
|
108
|
+
end
|
|
109
|
+
|
|
110
|
+
# Collect nonterminal rules
|
|
111
|
+
ast[:rules]&.each do |rule|
|
|
112
|
+
info[:nonterminals][rule[:name]] = true
|
|
113
|
+
end
|
|
114
|
+
|
|
115
|
+
# Grammar keywords
|
|
116
|
+
%w[%token %type %left %right %nonassoc %prec %union %start].each do |kw|
|
|
117
|
+
info[:keywords][kw] = true
|
|
118
|
+
end
|
|
119
|
+
|
|
120
|
+
info
|
|
121
|
+
end
|
|
122
|
+
|
|
123
|
+
# Tokenize a single line
|
|
124
|
+
# @param line [String] Line text
|
|
125
|
+
# @param line_idx [Integer] Line index
|
|
126
|
+
# @param symbol_info [Hash] Symbol information
|
|
127
|
+
# @return [Array<Hash>] Tokens in this line
|
|
128
|
+
def tokenize_line(line, line_idx, symbol_info)
|
|
129
|
+
tokens = []
|
|
130
|
+
pos = 0
|
|
131
|
+
|
|
132
|
+
while pos < line.length
|
|
133
|
+
# Skip whitespace
|
|
134
|
+
if line[pos] =~ /\s/
|
|
135
|
+
pos += 1
|
|
136
|
+
next
|
|
137
|
+
end
|
|
138
|
+
|
|
139
|
+
# Check for keywords
|
|
140
|
+
if line[pos] == '%'
|
|
141
|
+
keyword = extract_keyword(line, pos)
|
|
142
|
+
if keyword && symbol_info[:keywords][keyword]
|
|
143
|
+
tokens << create_token(line_idx, pos, keyword.length, :keyword)
|
|
144
|
+
pos += keyword.length
|
|
145
|
+
next
|
|
146
|
+
end
|
|
147
|
+
end
|
|
148
|
+
|
|
149
|
+
# Check for comments
|
|
150
|
+
if line[pos..(pos + 1)] == '//'
|
|
151
|
+
# Rest of line is a comment
|
|
152
|
+
tokens << create_token(line_idx, pos, line.length - pos, :comment)
|
|
153
|
+
break
|
|
154
|
+
end
|
|
155
|
+
|
|
156
|
+
if line[pos..(pos + 1)] == '/*'
|
|
157
|
+
# Block comment (simplified - doesn't handle multi-line)
|
|
158
|
+
end_pos = line.index('*/', pos + 2)
|
|
159
|
+
if end_pos
|
|
160
|
+
tokens << create_token(line_idx, pos, end_pos + 2 - pos, :comment)
|
|
161
|
+
pos = end_pos + 2
|
|
162
|
+
next
|
|
163
|
+
end
|
|
164
|
+
end
|
|
165
|
+
|
|
166
|
+
# Check for strings
|
|
167
|
+
if ['"', "'"].include?(line[pos])
|
|
168
|
+
str_len = extract_string_length(line, pos)
|
|
169
|
+
if str_len
|
|
170
|
+
tokens << create_token(line_idx, pos, str_len, :string)
|
|
171
|
+
pos += str_len
|
|
172
|
+
next
|
|
173
|
+
end
|
|
174
|
+
end
|
|
175
|
+
|
|
176
|
+
# Check for identifiers
|
|
177
|
+
if line[pos] =~ /[A-Za-z_]/
|
|
178
|
+
identifier = extract_identifier(line, pos)
|
|
179
|
+
if identifier
|
|
180
|
+
type = classify_identifier(identifier, symbol_info)
|
|
181
|
+
tokens << create_token(line_idx, pos, identifier.length, type)
|
|
182
|
+
pos += identifier.length
|
|
183
|
+
next
|
|
184
|
+
end
|
|
185
|
+
end
|
|
186
|
+
|
|
187
|
+
# Check for operators
|
|
188
|
+
if line[pos] =~ /[{}:;|]/
|
|
189
|
+
tokens << create_token(line_idx, pos, 1, :operator)
|
|
190
|
+
pos += 1
|
|
191
|
+
next
|
|
192
|
+
end
|
|
193
|
+
|
|
194
|
+
# Skip unrecognized characters
|
|
195
|
+
pos += 1
|
|
196
|
+
end
|
|
197
|
+
|
|
198
|
+
tokens
|
|
199
|
+
end
|
|
200
|
+
|
|
201
|
+
# Extract keyword from position
|
|
202
|
+
# @param line [String] Line text
|
|
203
|
+
# @param pos [Integer] Starting position
|
|
204
|
+
# @return [String, nil] Keyword or nil
|
|
205
|
+
def extract_keyword(line, pos)
|
|
206
|
+
return nil unless line[pos] == '%'
|
|
207
|
+
|
|
208
|
+
match = line[pos..].match(/^%[a-z]+/)
|
|
209
|
+
match&.to_s
|
|
210
|
+
end
|
|
211
|
+
|
|
212
|
+
# Extract string length
|
|
213
|
+
# @param line [String] Line text
|
|
214
|
+
# @param pos [Integer] Starting position
|
|
215
|
+
# @return [Integer, nil] String length or nil
|
|
216
|
+
def extract_string_length(line, pos)
|
|
217
|
+
quote = line[pos]
|
|
218
|
+
end_pos = pos + 1
|
|
219
|
+
|
|
220
|
+
while end_pos < line.length
|
|
221
|
+
return end_pos - pos + 1 if line[end_pos] == quote && line[end_pos - 1] != '\\'
|
|
222
|
+
|
|
223
|
+
end_pos += 1
|
|
224
|
+
end
|
|
225
|
+
|
|
226
|
+
nil
|
|
227
|
+
end
|
|
228
|
+
|
|
229
|
+
# Extract identifier
|
|
230
|
+
# @param line [String] Line text
|
|
231
|
+
# @param pos [Integer] Starting position
|
|
232
|
+
# @return [String, nil] Identifier or nil
|
|
233
|
+
def extract_identifier(line, pos)
|
|
234
|
+
match = line[pos..].match(/^[A-Za-z_][A-Za-z0-9_]*/)
|
|
235
|
+
match&.to_s
|
|
236
|
+
end
|
|
237
|
+
|
|
238
|
+
# Classify identifier type
|
|
239
|
+
# @param identifier [String] Identifier name
|
|
240
|
+
# @param symbol_info [Hash] Symbol information
|
|
241
|
+
# @return [Symbol] Token type
|
|
242
|
+
def classify_identifier(identifier, symbol_info)
|
|
243
|
+
return :enumMember if symbol_info[:tokens][identifier]
|
|
244
|
+
return :function if symbol_info[:nonterminals][identifier]
|
|
245
|
+
|
|
246
|
+
:variable
|
|
247
|
+
end
|
|
248
|
+
|
|
249
|
+
# Create a token
|
|
250
|
+
# @param line [Integer] Line number
|
|
251
|
+
# @param col [Integer] Column number
|
|
252
|
+
# @param length [Integer] Token length
|
|
253
|
+
# @param type [Symbol] Token type
|
|
254
|
+
# @return [Hash] Token hash
|
|
255
|
+
def create_token(line, col, length, type)
|
|
256
|
+
{
|
|
257
|
+
line: line,
|
|
258
|
+
startChar: col,
|
|
259
|
+
length: length,
|
|
260
|
+
tokenType: token_type_index(type),
|
|
261
|
+
tokenModifiers: 0
|
|
262
|
+
}
|
|
263
|
+
end
|
|
264
|
+
|
|
265
|
+
# Get token type index
|
|
266
|
+
# @param type [Symbol] Token type symbol
|
|
267
|
+
# @return [Integer] Token type index
|
|
268
|
+
def token_type_index(type)
|
|
269
|
+
type_str = type.to_s
|
|
270
|
+
index = TOKEN_TYPES.index(type_str)
|
|
271
|
+
index || TOKEN_TYPES.index('variable')
|
|
272
|
+
end
|
|
273
|
+
|
|
274
|
+
# Encode tokens in LSP delta format
|
|
275
|
+
# @param tokens [Array<Hash>] Tokens
|
|
276
|
+
# @return [Array<Integer>] Encoded tokens
|
|
277
|
+
def encode_tokens(tokens)
|
|
278
|
+
encoded = []
|
|
279
|
+
prev_line = 0
|
|
280
|
+
prev_char = 0
|
|
281
|
+
|
|
282
|
+
tokens.sort_by { |t| [t[:line], t[:startChar]] }.each do |token|
|
|
283
|
+
delta_line = token[:line] - prev_line
|
|
284
|
+
delta_char = delta_line.zero? ? token[:startChar] - prev_char : token[:startChar]
|
|
285
|
+
|
|
286
|
+
encoded.push(
|
|
287
|
+
delta_line,
|
|
288
|
+
delta_char,
|
|
289
|
+
token[:length],
|
|
290
|
+
token[:tokenType],
|
|
291
|
+
token[:tokenModifiers]
|
|
292
|
+
)
|
|
293
|
+
|
|
294
|
+
prev_line = token[:line]
|
|
295
|
+
prev_char = token[:startChar]
|
|
296
|
+
end
|
|
297
|
+
|
|
298
|
+
encoded
|
|
299
|
+
end
|
|
300
|
+
end
|
|
301
|
+
end
|
|
302
|
+
end
|
|
@@ -0,0 +1,161 @@
|
|
|
1
|
+
# frozen_string_literal: true
|
|
2
|
+
|
|
3
|
+
module CollieLsp
|
|
4
|
+
module Handlers
|
|
5
|
+
# Workspace-wide symbol search
|
|
6
|
+
module WorkspaceSymbol
|
|
7
|
+
module_function
|
|
8
|
+
|
|
9
|
+
# Handle workspace/symbol request
|
|
10
|
+
# @param request [Hash] LSP request
|
|
11
|
+
# @param document_store [DocumentStore] Document store
|
|
12
|
+
# @param _collie [CollieWrapper] Collie wrapper (unused)
|
|
13
|
+
# @param writer [Object] Response writer
|
|
14
|
+
def handle(request, document_store, _collie, writer)
|
|
15
|
+
query = request[:params][:query] || ''
|
|
16
|
+
symbols = search_symbols(query, document_store)
|
|
17
|
+
|
|
18
|
+
writer.write(
|
|
19
|
+
id: request[:id],
|
|
20
|
+
result: symbols
|
|
21
|
+
)
|
|
22
|
+
end
|
|
23
|
+
|
|
24
|
+
# Search for symbols across all open documents
|
|
25
|
+
# @param query [String] Search query
|
|
26
|
+
# @param document_store [DocumentStore] Document store
|
|
27
|
+
# @return [Array<Hash>] Matching symbols
|
|
28
|
+
def search_symbols(query, document_store)
|
|
29
|
+
symbols = []
|
|
30
|
+
|
|
31
|
+
# Search in all open documents
|
|
32
|
+
document_store.instance_variable_get(:@documents).each do |uri, doc|
|
|
33
|
+
next unless doc[:ast]
|
|
34
|
+
|
|
35
|
+
symbols.concat(search_in_document(query, uri, doc[:ast]))
|
|
36
|
+
end
|
|
37
|
+
|
|
38
|
+
# Sort by relevance (exact matches first, then contains)
|
|
39
|
+
symbols.sort_by { |s| symbol_relevance(s[:name], query) }
|
|
40
|
+
end
|
|
41
|
+
|
|
42
|
+
# Search for symbols in a single document
|
|
43
|
+
# @param query [String] Search query
|
|
44
|
+
# @param uri [String] Document URI
|
|
45
|
+
# @param ast [Hash] Parsed AST
|
|
46
|
+
# @return [Array<Hash>] Matching symbols
|
|
47
|
+
def search_in_document(query, uri, ast)
|
|
48
|
+
symbols = []
|
|
49
|
+
|
|
50
|
+
# Search token declarations
|
|
51
|
+
ast[:declarations]&.each do |decl|
|
|
52
|
+
next unless decl[:kind] == :token && decl[:location]
|
|
53
|
+
|
|
54
|
+
decl[:names]&.each do |name|
|
|
55
|
+
next unless matches_query?(name, query)
|
|
56
|
+
|
|
57
|
+
symbols << create_symbol_info(
|
|
58
|
+
name: name,
|
|
59
|
+
kind: 14, # Constant
|
|
60
|
+
uri: uri,
|
|
61
|
+
location: decl[:location],
|
|
62
|
+
container_name: 'Tokens'
|
|
63
|
+
)
|
|
64
|
+
end
|
|
65
|
+
|
|
66
|
+
# Search type declarations
|
|
67
|
+
next unless decl[:kind] == :type && decl[:location]
|
|
68
|
+
|
|
69
|
+
decl[:names]&.each do |name|
|
|
70
|
+
next unless matches_query?(name, query)
|
|
71
|
+
|
|
72
|
+
symbols << create_symbol_info(
|
|
73
|
+
name: name,
|
|
74
|
+
kind: 7, # Class
|
|
75
|
+
uri: uri,
|
|
76
|
+
location: decl[:location],
|
|
77
|
+
container_name: 'Types'
|
|
78
|
+
)
|
|
79
|
+
end
|
|
80
|
+
end
|
|
81
|
+
|
|
82
|
+
# Search nonterminal rules
|
|
83
|
+
ast[:rules]&.each do |rule|
|
|
84
|
+
next unless rule[:location] && matches_query?(rule[:name], query)
|
|
85
|
+
|
|
86
|
+
symbols << create_symbol_info(
|
|
87
|
+
name: rule[:name],
|
|
88
|
+
kind: 12, # Function
|
|
89
|
+
uri: uri,
|
|
90
|
+
location: rule[:location],
|
|
91
|
+
container_name: 'Rules'
|
|
92
|
+
)
|
|
93
|
+
end
|
|
94
|
+
|
|
95
|
+
symbols
|
|
96
|
+
end
|
|
97
|
+
|
|
98
|
+
# Check if a symbol name matches the query
|
|
99
|
+
# @param name [String] Symbol name
|
|
100
|
+
# @param query [String] Search query
|
|
101
|
+
# @return [Boolean] True if matches
|
|
102
|
+
def matches_query?(name, query)
|
|
103
|
+
return true if query.empty?
|
|
104
|
+
|
|
105
|
+
# Case-insensitive substring match
|
|
106
|
+
name.downcase.include?(query.downcase)
|
|
107
|
+
end
|
|
108
|
+
|
|
109
|
+
# Calculate symbol relevance score
|
|
110
|
+
# @param name [String] Symbol name
|
|
111
|
+
# @param query [String] Search query
|
|
112
|
+
# @return [Integer] Relevance score (lower is better)
|
|
113
|
+
def symbol_relevance(name, query)
|
|
114
|
+
return 0 if query.empty?
|
|
115
|
+
|
|
116
|
+
name_lower = name.downcase
|
|
117
|
+
query_lower = query.downcase
|
|
118
|
+
|
|
119
|
+
# Exact match
|
|
120
|
+
return 1 if name_lower == query_lower
|
|
121
|
+
|
|
122
|
+
# Starts with query
|
|
123
|
+
return 2 if name_lower.start_with?(query_lower)
|
|
124
|
+
|
|
125
|
+
# Contains query
|
|
126
|
+
return 3 if name_lower.include?(query_lower)
|
|
127
|
+
|
|
128
|
+
# No match
|
|
129
|
+
4
|
|
130
|
+
end
|
|
131
|
+
|
|
132
|
+
# Create a symbol information object
|
|
133
|
+
# @param name [String] Symbol name
|
|
134
|
+
# @param kind [Integer] LSP symbol kind
|
|
135
|
+
# @param uri [String] Document URI
|
|
136
|
+
# @param location [Hash] Symbol location
|
|
137
|
+
# @param container_name [String] Container name
|
|
138
|
+
# @return [Hash] LSP symbol information
|
|
139
|
+
def create_symbol_info(name:, kind:, uri:, location:, container_name: nil)
|
|
140
|
+
line = location[:line] - 1
|
|
141
|
+
column = location[:column] - 1
|
|
142
|
+
|
|
143
|
+
info = {
|
|
144
|
+
name: name,
|
|
145
|
+
kind: kind,
|
|
146
|
+
location: {
|
|
147
|
+
uri: uri,
|
|
148
|
+
range: {
|
|
149
|
+
start: { line: line, character: column },
|
|
150
|
+
end: { line: line, character: column + name.length }
|
|
151
|
+
}
|
|
152
|
+
}
|
|
153
|
+
}
|
|
154
|
+
|
|
155
|
+
info[:containerName] = container_name if container_name
|
|
156
|
+
|
|
157
|
+
info
|
|
158
|
+
end
|
|
159
|
+
end
|
|
160
|
+
end
|
|
161
|
+
end
|