robotcode-robot 0.68.2__py3-none-any.whl → 0.68.5__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- robotcode/robot/__version__.py +1 -1
- robotcode/robot/config/model.py +63 -51
- robotcode/robot/diagnostics/document_cache_helper.py +523 -0
- robotcode/robot/diagnostics/entities.py +2 -1
- robotcode/robot/diagnostics/errors.py +33 -0
- robotcode/robot/diagnostics/imports_manager.py +1499 -0
- robotcode/robot/diagnostics/library_doc.py +3 -2
- robotcode/robot/diagnostics/model_helper.py +799 -0
- robotcode/robot/diagnostics/namespace.py +2165 -0
- robotcode/robot/diagnostics/namespace_analyzer.py +1121 -0
- robotcode/robot/diagnostics/workspace_config.py +50 -0
- robotcode/robot/utils/ast.py +6 -5
- robotcode/robot/utils/stubs.py +17 -1
- {robotcode_robot-0.68.2.dist-info → robotcode_robot-0.68.5.dist-info}/METADATA +2 -2
- robotcode_robot-0.68.5.dist-info/RECORD +29 -0
- robotcode_robot-0.68.2.dist-info/RECORD +0 -22
- {robotcode_robot-0.68.2.dist-info → robotcode_robot-0.68.5.dist-info}/WHEEL +0 -0
- {robotcode_robot-0.68.2.dist-info → robotcode_robot-0.68.5.dist-info}/licenses/LICENSE.txt +0 -0
@@ -0,0 +1,2165 @@
|
|
1
|
+
import ast
|
2
|
+
import enum
|
3
|
+
import itertools
|
4
|
+
import re
|
5
|
+
import time
|
6
|
+
import weakref
|
7
|
+
from collections import OrderedDict, defaultdict
|
8
|
+
from concurrent.futures import CancelledError
|
9
|
+
from itertools import chain
|
10
|
+
from pathlib import Path
|
11
|
+
from typing import (
|
12
|
+
Any,
|
13
|
+
Dict,
|
14
|
+
Iterable,
|
15
|
+
Iterator,
|
16
|
+
List,
|
17
|
+
NamedTuple,
|
18
|
+
Optional,
|
19
|
+
Sequence,
|
20
|
+
Set,
|
21
|
+
Tuple,
|
22
|
+
Union,
|
23
|
+
)
|
24
|
+
|
25
|
+
from robot.errors import VariableError
|
26
|
+
from robot.libraries import STDLIBS
|
27
|
+
from robot.parsing.lexer.tokens import Token
|
28
|
+
from robot.parsing.model.blocks import (
|
29
|
+
Keyword,
|
30
|
+
SettingSection,
|
31
|
+
TestCase,
|
32
|
+
VariableSection,
|
33
|
+
)
|
34
|
+
from robot.parsing.model.statements import Arguments, Statement
|
35
|
+
from robot.parsing.model.statements import LibraryImport as RobotLibraryImport
|
36
|
+
from robot.parsing.model.statements import ResourceImport as RobotResourceImport
|
37
|
+
from robot.parsing.model.statements import (
|
38
|
+
VariablesImport as RobotVariablesImport,
|
39
|
+
)
|
40
|
+
from robot.variables.search import (
|
41
|
+
is_scalar_assign,
|
42
|
+
is_variable,
|
43
|
+
search_variable,
|
44
|
+
)
|
45
|
+
from robotcode.core.concurrent import RLock
|
46
|
+
from robotcode.core.event import event
|
47
|
+
from robotcode.core.lsp.types import (
|
48
|
+
CodeDescription,
|
49
|
+
Diagnostic,
|
50
|
+
DiagnosticRelatedInformation,
|
51
|
+
DiagnosticSeverity,
|
52
|
+
DiagnosticTag,
|
53
|
+
DocumentUri,
|
54
|
+
Location,
|
55
|
+
Position,
|
56
|
+
Range,
|
57
|
+
)
|
58
|
+
from robotcode.core.text_document import TextDocument
|
59
|
+
from robotcode.core.uri import Uri
|
60
|
+
from robotcode.core.utils.logging import LoggingDescriptor
|
61
|
+
|
62
|
+
from ..utils import get_robot_version
|
63
|
+
from ..utils.ast import (
|
64
|
+
range_from_node,
|
65
|
+
range_from_token,
|
66
|
+
strip_variable_token,
|
67
|
+
tokenize_variables,
|
68
|
+
)
|
69
|
+
from ..utils.match import eq_namespace
|
70
|
+
from ..utils.stubs import Languages
|
71
|
+
from ..utils.variables import BUILTIN_VARIABLES
|
72
|
+
from ..utils.visitor import Visitor
|
73
|
+
from .entities import (
|
74
|
+
ArgumentDefinition,
|
75
|
+
BuiltInVariableDefinition,
|
76
|
+
CommandLineVariableDefinition,
|
77
|
+
EnvironmentVariableDefinition,
|
78
|
+
Import,
|
79
|
+
InvalidVariableError,
|
80
|
+
LibraryEntry,
|
81
|
+
LibraryImport,
|
82
|
+
LocalVariableDefinition,
|
83
|
+
ResourceEntry,
|
84
|
+
ResourceImport,
|
85
|
+
VariableDefinition,
|
86
|
+
VariableMatcher,
|
87
|
+
VariablesEntry,
|
88
|
+
VariablesImport,
|
89
|
+
)
|
90
|
+
from .errors import DIAGNOSTICS_SOURCE_NAME, Error
|
91
|
+
from .imports_manager import ImportsManager
|
92
|
+
from .library_doc import (
|
93
|
+
BUILTIN_LIBRARY_NAME,
|
94
|
+
DEFAULT_LIBRARIES,
|
95
|
+
KeywordDoc,
|
96
|
+
KeywordError,
|
97
|
+
KeywordMatcher,
|
98
|
+
LibraryDoc,
|
99
|
+
)
|
100
|
+
|
101
|
+
EXTRACT_COMMENT_PATTERN = re.compile(r".*(?:^ *|\t+| {2,})#(?P<comment>.*)$")
|
102
|
+
ROBOTCODE_PATTERN = re.compile(r"(?P<marker>\brobotcode\b)\s*:\s*(?P<rule>\b\w+\b)")
|
103
|
+
|
104
|
+
|
105
|
+
class DiagnosticsError(Exception):
|
106
|
+
pass
|
107
|
+
|
108
|
+
|
109
|
+
class DiagnosticsWarningError(DiagnosticsError):
|
110
|
+
pass
|
111
|
+
|
112
|
+
|
113
|
+
class ImportError(DiagnosticsError):
|
114
|
+
pass
|
115
|
+
|
116
|
+
|
117
|
+
class NameSpaceError(Exception):
|
118
|
+
pass
|
119
|
+
|
120
|
+
|
121
|
+
class VariablesVisitor(Visitor):
|
122
|
+
def get(self, source: str, model: ast.AST) -> List[VariableDefinition]:
|
123
|
+
self._results: List[VariableDefinition] = []
|
124
|
+
self.source = source
|
125
|
+
self.visit(model)
|
126
|
+
return self._results
|
127
|
+
|
128
|
+
def visit_Section(self, node: ast.AST) -> None: # noqa: N802
|
129
|
+
if isinstance(node, VariableSection):
|
130
|
+
self.generic_visit(node)
|
131
|
+
|
132
|
+
def visit_Variable(self, node: Statement) -> None: # noqa: N802
|
133
|
+
name_token = node.get_token(Token.VARIABLE)
|
134
|
+
if name_token is None:
|
135
|
+
return
|
136
|
+
|
137
|
+
name = name_token.value
|
138
|
+
|
139
|
+
if name is not None:
|
140
|
+
match = search_variable(name, ignore_errors=True)
|
141
|
+
if not match.is_assign(allow_assign_mark=True):
|
142
|
+
return
|
143
|
+
|
144
|
+
if name.endswith("="):
|
145
|
+
name = name[:-1].rstrip()
|
146
|
+
|
147
|
+
values = node.get_values(Token.ARGUMENT)
|
148
|
+
has_value = bool(values)
|
149
|
+
value = tuple(
|
150
|
+
s.replace(
|
151
|
+
"${CURDIR}",
|
152
|
+
str(Path(self.source).parent).replace("\\", "\\\\"),
|
153
|
+
)
|
154
|
+
for s in values
|
155
|
+
)
|
156
|
+
|
157
|
+
self._results.append(
|
158
|
+
VariableDefinition(
|
159
|
+
name=name,
|
160
|
+
name_token=strip_variable_token(
|
161
|
+
Token(
|
162
|
+
name_token.type,
|
163
|
+
name,
|
164
|
+
name_token.lineno,
|
165
|
+
name_token.col_offset,
|
166
|
+
name_token.error,
|
167
|
+
)
|
168
|
+
),
|
169
|
+
line_no=node.lineno,
|
170
|
+
col_offset=node.col_offset,
|
171
|
+
end_line_no=node.lineno,
|
172
|
+
end_col_offset=node.end_col_offset,
|
173
|
+
source=self.source,
|
174
|
+
has_value=has_value,
|
175
|
+
resolvable=True,
|
176
|
+
value=value,
|
177
|
+
)
|
178
|
+
)
|
179
|
+
|
180
|
+
|
181
|
+
class BlockVariableVisitor(Visitor):
|
182
|
+
def __init__(
|
183
|
+
self,
|
184
|
+
library_doc: LibraryDoc,
|
185
|
+
source: str,
|
186
|
+
position: Optional[Position] = None,
|
187
|
+
in_args: bool = True,
|
188
|
+
) -> None:
|
189
|
+
super().__init__()
|
190
|
+
self.library_doc = library_doc
|
191
|
+
self.source = source
|
192
|
+
self.position = position
|
193
|
+
self.in_args = in_args
|
194
|
+
|
195
|
+
self._results: Dict[str, VariableDefinition] = {}
|
196
|
+
self.current_kw_doc: Optional[KeywordDoc] = None
|
197
|
+
|
198
|
+
def get(self, model: ast.AST) -> List[VariableDefinition]:
|
199
|
+
self._results = {}
|
200
|
+
|
201
|
+
self.visit(model)
|
202
|
+
|
203
|
+
return list(self._results.values())
|
204
|
+
|
205
|
+
def visit(self, node: ast.AST) -> None:
|
206
|
+
if self.position is None or self.position >= range_from_node(node).start:
|
207
|
+
super().visit(node)
|
208
|
+
|
209
|
+
def visit_Keyword(self, node: ast.AST) -> None: # noqa: N802
|
210
|
+
try:
|
211
|
+
self.generic_visit(node)
|
212
|
+
finally:
|
213
|
+
self.current_kw_doc = None
|
214
|
+
|
215
|
+
def visit_KeywordName(self, node: Statement) -> None: # noqa: N802
|
216
|
+
from .model_helper import ModelHelper
|
217
|
+
|
218
|
+
name_token = node.get_token(Token.KEYWORD_NAME)
|
219
|
+
|
220
|
+
if name_token is not None and name_token.value:
|
221
|
+
keyword = ModelHelper.get_keyword_definition_at_token(self.library_doc, name_token)
|
222
|
+
self.current_kw_doc = keyword
|
223
|
+
|
224
|
+
for variable_token in filter(
|
225
|
+
lambda e: e.type == Token.VARIABLE,
|
226
|
+
tokenize_variables(name_token, identifiers="$", ignore_errors=True),
|
227
|
+
):
|
228
|
+
if variable_token.value:
|
229
|
+
match = search_variable(variable_token.value, "$", ignore_errors=True)
|
230
|
+
if match.base is None:
|
231
|
+
continue
|
232
|
+
name = match.base.split(":", 1)[0]
|
233
|
+
full_name = f"{match.identifier}{{{name}}}"
|
234
|
+
var_token = strip_variable_token(variable_token)
|
235
|
+
var_token.value = name
|
236
|
+
self._results[full_name] = ArgumentDefinition(
|
237
|
+
name=full_name,
|
238
|
+
name_token=var_token,
|
239
|
+
line_no=variable_token.lineno,
|
240
|
+
col_offset=variable_token.col_offset,
|
241
|
+
end_line_no=variable_token.lineno,
|
242
|
+
end_col_offset=variable_token.end_col_offset,
|
243
|
+
source=self.source,
|
244
|
+
keyword_doc=self.current_kw_doc,
|
245
|
+
)
|
246
|
+
|
247
|
+
def get_variable_token(self, token: Token) -> Optional[Token]:
|
248
|
+
return next(
|
249
|
+
(
|
250
|
+
v
|
251
|
+
for v in itertools.dropwhile(
|
252
|
+
lambda t: t.type in Token.NON_DATA_TOKENS,
|
253
|
+
tokenize_variables(token, ignore_errors=True, extra_types={Token.VARIABLE}),
|
254
|
+
)
|
255
|
+
if v.type == Token.VARIABLE
|
256
|
+
),
|
257
|
+
None,
|
258
|
+
)
|
259
|
+
|
260
|
+
def visit_Arguments(self, node: Statement) -> None: # noqa: N802
|
261
|
+
args: List[str] = []
|
262
|
+
|
263
|
+
arguments = node.get_tokens(Token.ARGUMENT)
|
264
|
+
|
265
|
+
for argument_token in arguments:
|
266
|
+
try:
|
267
|
+
argument = self.get_variable_token(argument_token)
|
268
|
+
|
269
|
+
if argument is not None and argument.value != "@{}":
|
270
|
+
if (
|
271
|
+
self.in_args
|
272
|
+
and self.position is not None
|
273
|
+
and self.position in range_from_token(argument_token)
|
274
|
+
and self.position > range_from_token(argument).end
|
275
|
+
):
|
276
|
+
break
|
277
|
+
|
278
|
+
if argument.value not in args:
|
279
|
+
args.append(argument.value)
|
280
|
+
arg_def = ArgumentDefinition(
|
281
|
+
name=argument.value,
|
282
|
+
name_token=strip_variable_token(argument),
|
283
|
+
line_no=argument.lineno,
|
284
|
+
col_offset=argument.col_offset,
|
285
|
+
end_line_no=argument.lineno,
|
286
|
+
end_col_offset=argument.end_col_offset,
|
287
|
+
source=self.source,
|
288
|
+
keyword_doc=self.current_kw_doc,
|
289
|
+
)
|
290
|
+
self._results[argument.value] = arg_def
|
291
|
+
|
292
|
+
except VariableError:
|
293
|
+
pass
|
294
|
+
|
295
|
+
def visit_ExceptHeader(self, node: Statement) -> None: # noqa: N802
|
296
|
+
variables = node.get_tokens(Token.VARIABLE)[:1]
|
297
|
+
if variables and is_scalar_assign(variables[0].value):
|
298
|
+
try:
|
299
|
+
variable = self.get_variable_token(variables[0])
|
300
|
+
|
301
|
+
if variable is not None:
|
302
|
+
self._results[variable.value] = LocalVariableDefinition(
|
303
|
+
name=variable.value,
|
304
|
+
name_token=strip_variable_token(variable),
|
305
|
+
line_no=variable.lineno,
|
306
|
+
col_offset=variable.col_offset,
|
307
|
+
end_line_no=variable.lineno,
|
308
|
+
end_col_offset=variable.end_col_offset,
|
309
|
+
source=self.source,
|
310
|
+
)
|
311
|
+
|
312
|
+
except VariableError:
|
313
|
+
pass
|
314
|
+
|
315
|
+
def visit_KeywordCall(self, node: Statement) -> None: # noqa: N802
|
316
|
+
# TODO analyze "Set Local/Global/Suite Variable"
|
317
|
+
|
318
|
+
for assign_token in node.get_tokens(Token.ASSIGN):
|
319
|
+
variable_token = self.get_variable_token(assign_token)
|
320
|
+
|
321
|
+
try:
|
322
|
+
if variable_token is not None:
|
323
|
+
if (
|
324
|
+
self.position is not None
|
325
|
+
and self.position in range_from_node(node)
|
326
|
+
and self.position > range_from_token(variable_token).end
|
327
|
+
):
|
328
|
+
continue
|
329
|
+
|
330
|
+
if variable_token.value not in self._results:
|
331
|
+
self._results[variable_token.value] = LocalVariableDefinition(
|
332
|
+
name=variable_token.value,
|
333
|
+
name_token=strip_variable_token(variable_token),
|
334
|
+
line_no=variable_token.lineno,
|
335
|
+
col_offset=variable_token.col_offset,
|
336
|
+
end_line_no=variable_token.lineno,
|
337
|
+
end_col_offset=variable_token.end_col_offset,
|
338
|
+
source=self.source,
|
339
|
+
)
|
340
|
+
|
341
|
+
except VariableError:
|
342
|
+
pass
|
343
|
+
|
344
|
+
def visit_InlineIfHeader(self, node: Statement) -> None: # noqa: N802
|
345
|
+
for assign_token in node.get_tokens(Token.ASSIGN):
|
346
|
+
variable_token = self.get_variable_token(assign_token)
|
347
|
+
|
348
|
+
try:
|
349
|
+
if variable_token is not None:
|
350
|
+
if (
|
351
|
+
self.position is not None
|
352
|
+
and self.position in range_from_node(node)
|
353
|
+
and self.position > range_from_token(variable_token).end
|
354
|
+
):
|
355
|
+
continue
|
356
|
+
|
357
|
+
if variable_token.value not in self._results:
|
358
|
+
self._results[variable_token.value] = LocalVariableDefinition(
|
359
|
+
name=variable_token.value,
|
360
|
+
name_token=strip_variable_token(variable_token),
|
361
|
+
line_no=variable_token.lineno,
|
362
|
+
col_offset=variable_token.col_offset,
|
363
|
+
end_line_no=variable_token.lineno,
|
364
|
+
end_col_offset=variable_token.end_col_offset,
|
365
|
+
source=self.source,
|
366
|
+
)
|
367
|
+
|
368
|
+
except VariableError:
|
369
|
+
pass
|
370
|
+
|
371
|
+
def visit_ForHeader(self, node: Statement) -> None: # noqa: N802
|
372
|
+
variables = node.get_tokens(Token.VARIABLE)
|
373
|
+
for variable in variables:
|
374
|
+
variable_token = self.get_variable_token(variable)
|
375
|
+
if variable_token is not None and variable_token.value and variable_token.value not in self._results:
|
376
|
+
self._results[variable_token.value] = LocalVariableDefinition(
|
377
|
+
name=variable_token.value,
|
378
|
+
name_token=strip_variable_token(variable_token),
|
379
|
+
line_no=variable_token.lineno,
|
380
|
+
col_offset=variable_token.col_offset,
|
381
|
+
end_line_no=variable_token.lineno,
|
382
|
+
end_col_offset=variable_token.end_col_offset,
|
383
|
+
source=self.source,
|
384
|
+
)
|
385
|
+
|
386
|
+
def visit_Var(self, node: Statement) -> None: # noqa: N802
|
387
|
+
variable = node.get_token(Token.VARIABLE)
|
388
|
+
if variable is None:
|
389
|
+
return
|
390
|
+
try:
|
391
|
+
if not is_variable(variable.value):
|
392
|
+
return
|
393
|
+
|
394
|
+
self._results[variable.value] = LocalVariableDefinition(
|
395
|
+
name=variable.value,
|
396
|
+
name_token=strip_variable_token(variable),
|
397
|
+
line_no=variable.lineno,
|
398
|
+
col_offset=variable.col_offset,
|
399
|
+
end_line_no=variable.lineno,
|
400
|
+
end_col_offset=variable.end_col_offset,
|
401
|
+
source=self.source,
|
402
|
+
)
|
403
|
+
|
404
|
+
except VariableError:
|
405
|
+
pass
|
406
|
+
|
407
|
+
|
408
|
+
class ImportVisitor(Visitor):
|
409
|
+
def get(self, source: str, model: ast.AST) -> List[Import]:
|
410
|
+
self._results: List[Import] = []
|
411
|
+
self.source = source
|
412
|
+
self.visit(model)
|
413
|
+
return self._results
|
414
|
+
|
415
|
+
def visit_Section(self, node: ast.AST) -> None: # noqa: N802
|
416
|
+
if isinstance(node, SettingSection):
|
417
|
+
self.generic_visit(node)
|
418
|
+
|
419
|
+
def visit_LibraryImport(self, node: RobotLibraryImport) -> None: # noqa: N802
|
420
|
+
name = node.get_token(Token.NAME)
|
421
|
+
|
422
|
+
separator = node.get_token(Token.WITH_NAME)
|
423
|
+
alias_token = node.get_tokens(Token.NAME)[-1] if separator else None
|
424
|
+
|
425
|
+
last_data_token = next(v for v in reversed(node.tokens) if v.type not in Token.NON_DATA_TOKENS)
|
426
|
+
if node.name:
|
427
|
+
self._results.append(
|
428
|
+
LibraryImport(
|
429
|
+
name=node.name,
|
430
|
+
name_token=name if name is not None else None,
|
431
|
+
args=node.args,
|
432
|
+
alias=node.alias,
|
433
|
+
alias_token=alias_token,
|
434
|
+
line_no=node.lineno,
|
435
|
+
col_offset=node.col_offset,
|
436
|
+
end_line_no=last_data_token.lineno
|
437
|
+
if last_data_token is not None
|
438
|
+
else node.end_lineno
|
439
|
+
if node.end_lineno is not None
|
440
|
+
else -1,
|
441
|
+
end_col_offset=last_data_token.end_col_offset
|
442
|
+
if last_data_token is not None
|
443
|
+
else node.end_col_offset
|
444
|
+
if node.end_col_offset is not None
|
445
|
+
else -1,
|
446
|
+
source=self.source,
|
447
|
+
)
|
448
|
+
)
|
449
|
+
|
450
|
+
def visit_ResourceImport(self, node: RobotResourceImport) -> None: # noqa: N802
|
451
|
+
name = node.get_token(Token.NAME)
|
452
|
+
|
453
|
+
last_data_token = next(v for v in reversed(node.tokens) if v.type not in Token.NON_DATA_TOKENS)
|
454
|
+
if node.name:
|
455
|
+
self._results.append(
|
456
|
+
ResourceImport(
|
457
|
+
name=node.name,
|
458
|
+
name_token=name if name is not None else None,
|
459
|
+
line_no=node.lineno,
|
460
|
+
col_offset=node.col_offset,
|
461
|
+
end_line_no=last_data_token.lineno
|
462
|
+
if last_data_token is not None
|
463
|
+
else node.end_lineno
|
464
|
+
if node.end_lineno is not None
|
465
|
+
else -1,
|
466
|
+
end_col_offset=last_data_token.end_col_offset
|
467
|
+
if last_data_token is not None
|
468
|
+
else node.end_col_offset
|
469
|
+
if node.end_col_offset is not None
|
470
|
+
else -1,
|
471
|
+
source=self.source,
|
472
|
+
)
|
473
|
+
)
|
474
|
+
|
475
|
+
def visit_VariablesImport(self, node: RobotVariablesImport) -> None: # noqa: N802
|
476
|
+
name = node.get_token(Token.NAME)
|
477
|
+
|
478
|
+
last_data_token = next(v for v in reversed(node.tokens) if v.type not in Token.NON_DATA_TOKENS)
|
479
|
+
if node.name:
|
480
|
+
self._results.append(
|
481
|
+
VariablesImport(
|
482
|
+
name=node.name,
|
483
|
+
name_token=name if name is not None else None,
|
484
|
+
args=node.args,
|
485
|
+
line_no=node.lineno,
|
486
|
+
col_offset=node.col_offset,
|
487
|
+
end_line_no=last_data_token.lineno
|
488
|
+
if last_data_token is not None
|
489
|
+
else node.end_lineno
|
490
|
+
if node.end_lineno is not None
|
491
|
+
else -1,
|
492
|
+
end_col_offset=last_data_token.end_col_offset
|
493
|
+
if last_data_token is not None
|
494
|
+
else node.end_col_offset
|
495
|
+
if node.end_col_offset is not None
|
496
|
+
else -1,
|
497
|
+
source=self.source,
|
498
|
+
)
|
499
|
+
)
|
500
|
+
|
501
|
+
|
502
|
+
class DocumentType(enum.Enum):
|
503
|
+
UNKNOWN = "unknown"
|
504
|
+
GENERAL = "robot"
|
505
|
+
RESOURCE = "resource"
|
506
|
+
INIT = "init"
|
507
|
+
|
508
|
+
|
509
|
+
class Namespace:
|
510
|
+
_logger = LoggingDescriptor()
|
511
|
+
|
512
|
+
@_logger.call
|
513
|
+
def __init__(
|
514
|
+
self,
|
515
|
+
imports_manager: ImportsManager,
|
516
|
+
model: ast.AST,
|
517
|
+
source: str,
|
518
|
+
document: Optional[TextDocument] = None,
|
519
|
+
document_type: Optional[DocumentType] = None,
|
520
|
+
languages: Optional[Languages] = None,
|
521
|
+
workspace_languages: Optional[Languages] = None,
|
522
|
+
) -> None:
|
523
|
+
super().__init__()
|
524
|
+
|
525
|
+
self.imports_manager = imports_manager
|
526
|
+
|
527
|
+
self.model = model
|
528
|
+
self.source = source
|
529
|
+
self._document = weakref.ref(document) if document is not None else None
|
530
|
+
self.document_type: Optional[DocumentType] = document_type
|
531
|
+
self.languages = languages
|
532
|
+
self.workspace_languages = workspace_languages
|
533
|
+
|
534
|
+
self._libraries: Dict[str, LibraryEntry] = OrderedDict()
|
535
|
+
self._namespaces: Optional[Dict[KeywordMatcher, List[LibraryEntry]]] = None
|
536
|
+
self._libraries_matchers: Optional[Dict[KeywordMatcher, LibraryEntry]] = None
|
537
|
+
self._resources: Dict[str, ResourceEntry] = OrderedDict()
|
538
|
+
self._resources_matchers: Optional[Dict[KeywordMatcher, ResourceEntry]] = None
|
539
|
+
self._variables: Dict[str, VariablesEntry] = OrderedDict()
|
540
|
+
self._initialized = False
|
541
|
+
self._invalid = False
|
542
|
+
self._initialize_lock = RLock(default_timeout=120, name="Namespace.initialize")
|
543
|
+
self._analyzed = False
|
544
|
+
self._analyze_lock = RLock(default_timeout=120, name="Namespace.analyze")
|
545
|
+
self._library_doc: Optional[LibraryDoc] = None
|
546
|
+
self._library_doc_lock = RLock(default_timeout=120, name="Namespace.library_doc")
|
547
|
+
self._imports: Optional[List[Import]] = None
|
548
|
+
self._import_entries: Dict[Import, LibraryEntry] = OrderedDict()
|
549
|
+
self._own_variables: Optional[List[VariableDefinition]] = None
|
550
|
+
self._own_variables_lock = RLock(default_timeout=120, name="Namespace.own_variables")
|
551
|
+
self._global_variables: Optional[List[VariableDefinition]] = None
|
552
|
+
self._global_variables_lock = RLock(default_timeout=120, name="Namespace.global_variables")
|
553
|
+
|
554
|
+
self._diagnostics: List[Diagnostic] = []
|
555
|
+
self._keyword_references: Dict[KeywordDoc, Set[Location]] = {}
|
556
|
+
self._variable_references: Dict[VariableDefinition, Set[Location]] = {}
|
557
|
+
self._local_variable_assignments: Dict[VariableDefinition, Set[Range]] = {}
|
558
|
+
self._namespace_references: Dict[LibraryEntry, Set[Location]] = {}
|
559
|
+
|
560
|
+
self._imported_keywords: Optional[List[KeywordDoc]] = None
|
561
|
+
self._imported_keywords_lock = RLock(default_timeout=120, name="Namespace.imported_keywords")
|
562
|
+
self._keywords: Optional[List[KeywordDoc]] = None
|
563
|
+
self._keywords_lock = RLock(default_timeout=120, name="Namespace.keywords")
|
564
|
+
|
565
|
+
# TODO: how to get the search order from model
|
566
|
+
self.search_order: Tuple[str, ...] = ()
|
567
|
+
|
568
|
+
self._finder: Optional[KeywordFinder] = None
|
569
|
+
|
570
|
+
self.imports_manager.imports_changed.add(self.imports_changed)
|
571
|
+
self.imports_manager.libraries_changed.add(self.libraries_changed)
|
572
|
+
self.imports_manager.resources_changed.add(self.resources_changed)
|
573
|
+
self.imports_manager.variables_changed.add(self.variables_changed)
|
574
|
+
|
575
|
+
self._in_initialize = False
|
576
|
+
|
577
|
+
self._ignored_lines: Optional[List[int]] = None
|
578
|
+
|
579
|
+
@event
|
580
|
+
def has_invalidated(sender) -> None:
|
581
|
+
...
|
582
|
+
|
583
|
+
@event
|
584
|
+
def has_initialized(sender) -> None:
|
585
|
+
...
|
586
|
+
|
587
|
+
@event
|
588
|
+
def has_imports_changed(sender) -> None:
|
589
|
+
...
|
590
|
+
|
591
|
+
@event
|
592
|
+
def has_analysed(sender) -> None:
|
593
|
+
...
|
594
|
+
|
595
|
+
@property
|
596
|
+
def document(self) -> Optional[TextDocument]:
|
597
|
+
return self._document() if self._document is not None else None
|
598
|
+
|
599
|
+
def imports_changed(self, sender: Any, uri: DocumentUri) -> None:
|
600
|
+
# TODO: optimise this by checking our imports
|
601
|
+
if self.document is not None:
|
602
|
+
self.document.set_data(Namespace.DataEntry, None)
|
603
|
+
|
604
|
+
self.invalidate()
|
605
|
+
|
606
|
+
@_logger.call
|
607
|
+
def libraries_changed(self, sender: Any, libraries: List[LibraryDoc]) -> None:
|
608
|
+
if not self.initialized or self.invalid:
|
609
|
+
return
|
610
|
+
|
611
|
+
invalidate = False
|
612
|
+
|
613
|
+
for p in libraries:
|
614
|
+
if any(e for e in self._libraries.values() if e.library_doc == p):
|
615
|
+
invalidate = True
|
616
|
+
break
|
617
|
+
|
618
|
+
if invalidate:
|
619
|
+
if self.document is not None:
|
620
|
+
self.document.set_data(Namespace.DataEntry, None)
|
621
|
+
|
622
|
+
self.invalidate()
|
623
|
+
|
624
|
+
@_logger.call
|
625
|
+
def resources_changed(self, sender: Any, resources: List[LibraryDoc]) -> None:
|
626
|
+
if not self.initialized or self.invalid:
|
627
|
+
return
|
628
|
+
|
629
|
+
invalidate = False
|
630
|
+
|
631
|
+
for p in resources:
|
632
|
+
if any(e for e in self._resources.values() if e.library_doc.source == p.source):
|
633
|
+
invalidate = True
|
634
|
+
break
|
635
|
+
|
636
|
+
if invalidate:
|
637
|
+
if self.document is not None:
|
638
|
+
self.document.set_data(Namespace.DataEntry, None)
|
639
|
+
|
640
|
+
self.invalidate()
|
641
|
+
|
642
|
+
@_logger.call
|
643
|
+
def variables_changed(self, sender: Any, variables: List[LibraryDoc]) -> None:
|
644
|
+
if not self.initialized or self.invalid:
|
645
|
+
return
|
646
|
+
|
647
|
+
invalidate = False
|
648
|
+
|
649
|
+
for p in variables:
|
650
|
+
if any(e for e in self._variables.values() if e.library_doc.source == p.source):
|
651
|
+
invalidate = True
|
652
|
+
break
|
653
|
+
|
654
|
+
if invalidate:
|
655
|
+
if self.document is not None:
|
656
|
+
self.document.set_data(Namespace.DataEntry, None)
|
657
|
+
|
658
|
+
self.invalidate()
|
659
|
+
|
660
|
+
def is_initialized(self) -> bool:
|
661
|
+
with self._initialize_lock:
|
662
|
+
return self._initialized
|
663
|
+
|
664
|
+
def _invalidate(self) -> None:
|
665
|
+
self._invalid = True
|
666
|
+
|
667
|
+
@_logger.call
|
668
|
+
def invalidate(self) -> None:
|
669
|
+
with self._initialize_lock:
|
670
|
+
self._invalidate()
|
671
|
+
self.has_invalidated(self)
|
672
|
+
|
673
|
+
@_logger.call
|
674
|
+
def get_diagnostics(self) -> List[Diagnostic]:
|
675
|
+
self.ensure_initialized()
|
676
|
+
|
677
|
+
self.analyze()
|
678
|
+
|
679
|
+
return self._diagnostics
|
680
|
+
|
681
|
+
@_logger.call
|
682
|
+
def get_keyword_references(self) -> Dict[KeywordDoc, Set[Location]]:
|
683
|
+
self.ensure_initialized()
|
684
|
+
|
685
|
+
self.analyze()
|
686
|
+
|
687
|
+
return self._keyword_references
|
688
|
+
|
689
|
+
def get_variable_references(
|
690
|
+
self,
|
691
|
+
) -> Dict[VariableDefinition, Set[Location]]:
|
692
|
+
self.ensure_initialized()
|
693
|
+
|
694
|
+
self.analyze()
|
695
|
+
|
696
|
+
return self._variable_references
|
697
|
+
|
698
|
+
def get_local_variable_assignments(
|
699
|
+
self,
|
700
|
+
) -> Dict[VariableDefinition, Set[Range]]:
|
701
|
+
self.ensure_initialized()
|
702
|
+
|
703
|
+
self.analyze()
|
704
|
+
|
705
|
+
return self._local_variable_assignments
|
706
|
+
|
707
|
+
def get_namespace_references(self) -> Dict[LibraryEntry, Set[Location]]:
|
708
|
+
self.ensure_initialized()
|
709
|
+
|
710
|
+
self.analyze()
|
711
|
+
|
712
|
+
return self._namespace_references
|
713
|
+
|
714
|
+
def get_import_entries(self) -> Dict[Import, LibraryEntry]:
|
715
|
+
self.ensure_initialized()
|
716
|
+
|
717
|
+
return self._import_entries
|
718
|
+
|
719
|
+
def get_libraries(self) -> Dict[str, LibraryEntry]:
|
720
|
+
self.ensure_initialized()
|
721
|
+
|
722
|
+
return self._libraries
|
723
|
+
|
724
|
+
def get_namespaces(self) -> Dict[KeywordMatcher, List[LibraryEntry]]:
|
725
|
+
self.ensure_initialized()
|
726
|
+
|
727
|
+
if self._namespaces is None:
|
728
|
+
self._namespaces = defaultdict(list)
|
729
|
+
|
730
|
+
for v in (self.get_libraries()).values():
|
731
|
+
self._namespaces[KeywordMatcher(v.alias or v.name or v.import_name, is_namespace=True)].append(v)
|
732
|
+
for v in (self.get_resources()).values():
|
733
|
+
self._namespaces[KeywordMatcher(v.alias or v.name or v.import_name, is_namespace=True)].append(v)
|
734
|
+
return self._namespaces
|
735
|
+
|
736
|
+
def get_resources(self) -> Dict[str, ResourceEntry]:
|
737
|
+
self.ensure_initialized()
|
738
|
+
|
739
|
+
return self._resources
|
740
|
+
|
741
|
+
def get_imported_variables(self) -> Dict[str, VariablesEntry]:
|
742
|
+
self.ensure_initialized()
|
743
|
+
|
744
|
+
return self._variables
|
745
|
+
|
746
|
+
@_logger.call
|
747
|
+
def get_library_doc(self) -> LibraryDoc:
|
748
|
+
with self._library_doc_lock:
|
749
|
+
if self._library_doc is None:
|
750
|
+
self._library_doc = self.imports_manager.get_libdoc_from_model(
|
751
|
+
self.model,
|
752
|
+
self.source,
|
753
|
+
model_type="RESOURCE",
|
754
|
+
append_model_errors=self.document_type is not None and self.document_type == DocumentType.RESOURCE,
|
755
|
+
)
|
756
|
+
|
757
|
+
return self._library_doc
|
758
|
+
|
759
|
+
class DataEntry(NamedTuple):
|
760
|
+
libraries: Dict[str, LibraryEntry] = OrderedDict()
|
761
|
+
resources: Dict[str, ResourceEntry] = OrderedDict()
|
762
|
+
variables: Dict[str, VariablesEntry] = OrderedDict()
|
763
|
+
diagnostics: List[Diagnostic] = []
|
764
|
+
import_entries: Dict[Import, LibraryEntry] = OrderedDict()
|
765
|
+
imported_keywords: Optional[List[KeywordDoc]] = None
|
766
|
+
|
767
|
+
@_logger.call(condition=lambda self: not self._initialized)
|
768
|
+
def ensure_initialized(self) -> bool:
|
769
|
+
run_initialize = False
|
770
|
+
imports_changed = False
|
771
|
+
|
772
|
+
with self._initialize_lock:
|
773
|
+
if not self._initialized:
|
774
|
+
if self._in_initialize:
|
775
|
+
self._logger.critical(lambda: f"already initialized {self.document}")
|
776
|
+
|
777
|
+
self._in_initialize = True
|
778
|
+
|
779
|
+
try:
|
780
|
+
self._logger.debug(lambda: f"ensure_initialized -> initialize {self.document}")
|
781
|
+
|
782
|
+
imports = self.get_imports()
|
783
|
+
|
784
|
+
data_entry: Optional[Namespace.DataEntry] = None
|
785
|
+
if self.document is not None:
|
786
|
+
# check or save several data in documents data cache,
|
787
|
+
# if imports are different, then the data is invalid
|
788
|
+
old_imports: Optional[List[Import]] = self.document.get_data(Namespace)
|
789
|
+
if old_imports is None:
|
790
|
+
self.document.set_data(Namespace, imports)
|
791
|
+
elif old_imports != imports:
|
792
|
+
imports_changed = True
|
793
|
+
|
794
|
+
self.document.set_data(Namespace, imports)
|
795
|
+
self.document.set_data(Namespace.DataEntry, None)
|
796
|
+
else:
|
797
|
+
data_entry = self.document.get_data(Namespace.DataEntry)
|
798
|
+
|
799
|
+
if data_entry is not None:
|
800
|
+
self._libraries = data_entry.libraries.copy()
|
801
|
+
self._resources = data_entry.resources.copy()
|
802
|
+
self._variables = data_entry.variables.copy()
|
803
|
+
self._diagnostics = data_entry.diagnostics.copy()
|
804
|
+
self._import_entries = data_entry.import_entries.copy()
|
805
|
+
self._imported_keywords = (
|
806
|
+
data_entry.imported_keywords.copy() if data_entry.imported_keywords else None
|
807
|
+
)
|
808
|
+
else:
|
809
|
+
variables = self.get_resolvable_variables()
|
810
|
+
|
811
|
+
self._import_default_libraries(variables)
|
812
|
+
self._import_imports(
|
813
|
+
imports,
|
814
|
+
str(Path(self.source).parent),
|
815
|
+
top_level=True,
|
816
|
+
variables=variables,
|
817
|
+
)
|
818
|
+
|
819
|
+
if self.document is not None:
|
820
|
+
self.document.set_data(
|
821
|
+
Namespace.DataEntry,
|
822
|
+
Namespace.DataEntry(
|
823
|
+
self._libraries.copy(),
|
824
|
+
self._resources.copy(),
|
825
|
+
self._variables.copy(),
|
826
|
+
self._diagnostics.copy(),
|
827
|
+
self._import_entries.copy(),
|
828
|
+
self._imported_keywords.copy() if self._imported_keywords else None,
|
829
|
+
),
|
830
|
+
)
|
831
|
+
|
832
|
+
self._reset_global_variables()
|
833
|
+
|
834
|
+
self._initialized = True
|
835
|
+
run_initialize = True
|
836
|
+
|
837
|
+
except BaseException:
|
838
|
+
if self.document is not None:
|
839
|
+
self.document.remove_data(Namespace)
|
840
|
+
self.document.remove_data(Namespace.DataEntry)
|
841
|
+
|
842
|
+
self._invalidate()
|
843
|
+
raise
|
844
|
+
finally:
|
845
|
+
self._in_initialize = False
|
846
|
+
|
847
|
+
if run_initialize:
|
848
|
+
self.has_initialized(self)
|
849
|
+
|
850
|
+
if imports_changed:
|
851
|
+
self.has_imports_changed(self)
|
852
|
+
|
853
|
+
return self._initialized
|
854
|
+
|
855
|
+
@property
|
856
|
+
def initialized(self) -> bool:
|
857
|
+
return self._initialized
|
858
|
+
|
859
|
+
@property
|
860
|
+
def invalid(self) -> bool:
|
861
|
+
return self._invalid
|
862
|
+
|
863
|
+
@_logger.call
|
864
|
+
def get_imports(self) -> List[Import]:
|
865
|
+
if self._imports is None:
|
866
|
+
self._imports = ImportVisitor().get(self.source, self.model)
|
867
|
+
|
868
|
+
return self._imports
|
869
|
+
|
870
|
+
@_logger.call
|
871
|
+
def get_own_variables(self) -> List[VariableDefinition]:
|
872
|
+
with self._own_variables_lock:
|
873
|
+
if self._own_variables is None:
|
874
|
+
self._own_variables = VariablesVisitor().get(self.source, self.model)
|
875
|
+
|
876
|
+
return self._own_variables
|
877
|
+
|
878
|
+
_builtin_variables: Optional[List[BuiltInVariableDefinition]] = None
|
879
|
+
|
880
|
+
@classmethod
|
881
|
+
def get_builtin_variables(cls) -> List[BuiltInVariableDefinition]:
|
882
|
+
if cls._builtin_variables is None:
|
883
|
+
cls._builtin_variables = [BuiltInVariableDefinition(0, 0, 0, 0, "", n, None) for n in BUILTIN_VARIABLES]
|
884
|
+
|
885
|
+
return cls._builtin_variables
|
886
|
+
|
887
|
+
@_logger.call
|
888
|
+
def get_command_line_variables(self) -> List[VariableDefinition]:
|
889
|
+
return self.imports_manager.get_command_line_variables()
|
890
|
+
|
891
|
+
def _reset_global_variables(self) -> None:
|
892
|
+
with self._global_variables_lock:
|
893
|
+
self._global_variables = None
|
894
|
+
|
895
|
+
def get_global_variables(self) -> List[VariableDefinition]:
|
896
|
+
with self._global_variables_lock:
|
897
|
+
if self._global_variables is None:
|
898
|
+
self._global_variables = list(
|
899
|
+
itertools.chain(
|
900
|
+
self.get_command_line_variables(),
|
901
|
+
self.get_own_variables(),
|
902
|
+
*(e.variables for e in self._resources.values()),
|
903
|
+
*(e.variables for e in self._variables.values()),
|
904
|
+
self.get_builtin_variables(),
|
905
|
+
)
|
906
|
+
)
|
907
|
+
|
908
|
+
return self._global_variables
|
909
|
+
|
910
|
+
def yield_variables(
|
911
|
+
self,
|
912
|
+
nodes: Optional[List[ast.AST]] = None,
|
913
|
+
position: Optional[Position] = None,
|
914
|
+
skip_commandline_variables: bool = False,
|
915
|
+
) -> Iterator[Tuple[VariableMatcher, VariableDefinition]]:
|
916
|
+
yielded: Dict[VariableMatcher, VariableDefinition] = {}
|
917
|
+
|
918
|
+
test_or_keyword_nodes = list(
|
919
|
+
itertools.dropwhile(
|
920
|
+
lambda v: not isinstance(v, (TestCase, Keyword)),
|
921
|
+
nodes if nodes else [],
|
922
|
+
)
|
923
|
+
)
|
924
|
+
test_or_keyword = test_or_keyword_nodes[0] if test_or_keyword_nodes else None
|
925
|
+
|
926
|
+
for var in chain(
|
927
|
+
*[
|
928
|
+
(
|
929
|
+
BlockVariableVisitor(
|
930
|
+
self.get_library_doc(),
|
931
|
+
self.source,
|
932
|
+
position,
|
933
|
+
isinstance(test_or_keyword_nodes[-1], Arguments) if nodes else False,
|
934
|
+
).get(test_or_keyword)
|
935
|
+
)
|
936
|
+
if test_or_keyword is not None
|
937
|
+
else []
|
938
|
+
],
|
939
|
+
self.get_global_variables(),
|
940
|
+
):
|
941
|
+
if var.matcher not in yielded:
|
942
|
+
if skip_commandline_variables and isinstance(var, CommandLineVariableDefinition):
|
943
|
+
continue
|
944
|
+
|
945
|
+
yielded[var.matcher] = var
|
946
|
+
|
947
|
+
yield var.matcher, var
|
948
|
+
|
949
|
+
def get_resolvable_variables(
|
950
|
+
self,
|
951
|
+
nodes: Optional[List[ast.AST]] = None,
|
952
|
+
position: Optional[Position] = None,
|
953
|
+
) -> Dict[str, Any]:
|
954
|
+
return {
|
955
|
+
v.name: v.value
|
956
|
+
for k, v in self.yield_variables(nodes, position, skip_commandline_variables=True)
|
957
|
+
if v.has_value
|
958
|
+
}
|
959
|
+
|
960
|
+
def get_variable_matchers(
|
961
|
+
self,
|
962
|
+
nodes: Optional[List[ast.AST]] = None,
|
963
|
+
position: Optional[Position] = None,
|
964
|
+
) -> Dict[VariableMatcher, VariableDefinition]:
|
965
|
+
self.ensure_initialized()
|
966
|
+
|
967
|
+
return {m: v for m, v in self.yield_variables(nodes, position)}
|
968
|
+
|
969
|
+
@_logger.call
|
970
|
+
def find_variable(
|
971
|
+
self,
|
972
|
+
name: str,
|
973
|
+
nodes: Optional[List[ast.AST]] = None,
|
974
|
+
position: Optional[Position] = None,
|
975
|
+
skip_commandline_variables: bool = False,
|
976
|
+
ignore_error: bool = False,
|
977
|
+
) -> Optional[VariableDefinition]:
|
978
|
+
self.ensure_initialized()
|
979
|
+
|
980
|
+
if name[:2] == "%{" and name[-1] == "}":
|
981
|
+
var_name, _, default_value = name[2:-1].partition("=")
|
982
|
+
return EnvironmentVariableDefinition(
|
983
|
+
0,
|
984
|
+
0,
|
985
|
+
0,
|
986
|
+
0,
|
987
|
+
"",
|
988
|
+
f"%{{{var_name}}}",
|
989
|
+
None,
|
990
|
+
default_value=default_value or None,
|
991
|
+
)
|
992
|
+
|
993
|
+
try:
|
994
|
+
matcher = VariableMatcher(name)
|
995
|
+
|
996
|
+
for m, v in self.yield_variables(
|
997
|
+
nodes,
|
998
|
+
position,
|
999
|
+
skip_commandline_variables=skip_commandline_variables,
|
1000
|
+
):
|
1001
|
+
if matcher == m:
|
1002
|
+
return v
|
1003
|
+
except InvalidVariableError:
|
1004
|
+
if not ignore_error:
|
1005
|
+
raise
|
1006
|
+
|
1007
|
+
return None
|
1008
|
+
|
1009
|
+
def _import_imports(
|
1010
|
+
self,
|
1011
|
+
imports: Iterable[Import],
|
1012
|
+
base_dir: str,
|
1013
|
+
*,
|
1014
|
+
top_level: bool = False,
|
1015
|
+
variables: Optional[Dict[str, Any]] = None,
|
1016
|
+
source: Optional[str] = None,
|
1017
|
+
parent_import: Optional[Import] = None,
|
1018
|
+
) -> None:
|
1019
|
+
def _import(
|
1020
|
+
value: Import, variables: Optional[Dict[str, Any]] = None
|
1021
|
+
) -> Tuple[Optional[LibraryEntry], Optional[Dict[str, Any]]]:
|
1022
|
+
result: Optional[LibraryEntry] = None
|
1023
|
+
try:
|
1024
|
+
if isinstance(value, LibraryImport):
|
1025
|
+
if value.name is None:
|
1026
|
+
raise NameSpaceError("Library setting requires value.")
|
1027
|
+
|
1028
|
+
result = self._get_library_entry(
|
1029
|
+
value.name,
|
1030
|
+
value.args,
|
1031
|
+
value.alias,
|
1032
|
+
base_dir,
|
1033
|
+
sentinel=value,
|
1034
|
+
variables=variables,
|
1035
|
+
)
|
1036
|
+
result.import_range = value.range
|
1037
|
+
result.import_source = value.source
|
1038
|
+
result.alias_range = value.alias_range
|
1039
|
+
|
1040
|
+
self._import_entries[value] = result
|
1041
|
+
|
1042
|
+
if (
|
1043
|
+
top_level
|
1044
|
+
and result.library_doc.errors is None
|
1045
|
+
and (len(result.library_doc.keywords) == 0 and not bool(result.library_doc.has_listener))
|
1046
|
+
):
|
1047
|
+
self.append_diagnostics(
|
1048
|
+
range=value.range,
|
1049
|
+
message=f"Imported library '{value.name}' contains no keywords.",
|
1050
|
+
severity=DiagnosticSeverity.WARNING,
|
1051
|
+
source=DIAGNOSTICS_SOURCE_NAME,
|
1052
|
+
code=Error.LIBRARY_CONTAINS_NO_KEYWORDS,
|
1053
|
+
)
|
1054
|
+
elif isinstance(value, ResourceImport):
|
1055
|
+
if value.name is None:
|
1056
|
+
raise NameSpaceError("Resource setting requires value.")
|
1057
|
+
|
1058
|
+
source = self.imports_manager.find_resource(value.name, base_dir, variables=variables)
|
1059
|
+
|
1060
|
+
if self.source == source:
|
1061
|
+
if parent_import:
|
1062
|
+
self.append_diagnostics(
|
1063
|
+
range=parent_import.range,
|
1064
|
+
message="Possible circular import.",
|
1065
|
+
severity=DiagnosticSeverity.INFORMATION,
|
1066
|
+
source=DIAGNOSTICS_SOURCE_NAME,
|
1067
|
+
related_information=[
|
1068
|
+
DiagnosticRelatedInformation(
|
1069
|
+
location=Location(
|
1070
|
+
str(Uri.from_path(value.source)),
|
1071
|
+
value.range,
|
1072
|
+
),
|
1073
|
+
message=f"'{Path(self.source).name}' is also imported here.",
|
1074
|
+
)
|
1075
|
+
]
|
1076
|
+
if value.source
|
1077
|
+
else None,
|
1078
|
+
code=Error.POSSIBLE_CIRCULAR_IMPORT,
|
1079
|
+
)
|
1080
|
+
else:
|
1081
|
+
result = self._get_resource_entry(
|
1082
|
+
value.name,
|
1083
|
+
base_dir,
|
1084
|
+
sentinel=value,
|
1085
|
+
variables=variables,
|
1086
|
+
)
|
1087
|
+
result.import_range = value.range
|
1088
|
+
result.import_source = value.source
|
1089
|
+
|
1090
|
+
self._import_entries[value] = result
|
1091
|
+
if result.variables:
|
1092
|
+
variables = None
|
1093
|
+
|
1094
|
+
if top_level and (
|
1095
|
+
not result.library_doc.errors
|
1096
|
+
and top_level
|
1097
|
+
and not result.imports
|
1098
|
+
and not result.variables
|
1099
|
+
and not result.library_doc.keywords
|
1100
|
+
):
|
1101
|
+
self.append_diagnostics(
|
1102
|
+
range=value.range,
|
1103
|
+
message=f"Imported resource file '{value.name}' is empty.",
|
1104
|
+
severity=DiagnosticSeverity.WARNING,
|
1105
|
+
source=DIAGNOSTICS_SOURCE_NAME,
|
1106
|
+
code=Error.RESOURCE_EMPTY,
|
1107
|
+
)
|
1108
|
+
|
1109
|
+
elif isinstance(value, VariablesImport):
|
1110
|
+
if value.name is None:
|
1111
|
+
raise NameSpaceError("Variables setting requires value.")
|
1112
|
+
|
1113
|
+
result = self._get_variables_entry(
|
1114
|
+
value.name,
|
1115
|
+
value.args,
|
1116
|
+
base_dir,
|
1117
|
+
sentinel=value,
|
1118
|
+
variables=variables,
|
1119
|
+
)
|
1120
|
+
|
1121
|
+
result.import_range = value.range
|
1122
|
+
result.import_source = value.source
|
1123
|
+
|
1124
|
+
self._import_entries[value] = result
|
1125
|
+
variables = None
|
1126
|
+
else:
|
1127
|
+
raise DiagnosticsError("Unknown import type.")
|
1128
|
+
|
1129
|
+
if top_level and result is not None:
|
1130
|
+
if result.library_doc.source is not None and result.library_doc.errors:
|
1131
|
+
if any(err.source and Path(err.source).is_absolute() for err in result.library_doc.errors):
|
1132
|
+
self.append_diagnostics(
|
1133
|
+
range=value.range,
|
1134
|
+
message="Import definition contains errors.",
|
1135
|
+
severity=DiagnosticSeverity.ERROR,
|
1136
|
+
source=DIAGNOSTICS_SOURCE_NAME,
|
1137
|
+
related_information=[
|
1138
|
+
DiagnosticRelatedInformation(
|
1139
|
+
location=Location(
|
1140
|
+
uri=str(Uri.from_path(err.source)),
|
1141
|
+
range=Range(
|
1142
|
+
start=Position(
|
1143
|
+
line=err.line_no - 1
|
1144
|
+
if err.line_no is not None
|
1145
|
+
else max(
|
1146
|
+
result.library_doc.line_no,
|
1147
|
+
0,
|
1148
|
+
),
|
1149
|
+
character=0,
|
1150
|
+
),
|
1151
|
+
end=Position(
|
1152
|
+
line=err.line_no - 1
|
1153
|
+
if err.line_no is not None
|
1154
|
+
else max(
|
1155
|
+
result.library_doc.line_no,
|
1156
|
+
0,
|
1157
|
+
),
|
1158
|
+
character=0,
|
1159
|
+
),
|
1160
|
+
),
|
1161
|
+
),
|
1162
|
+
message=err.message,
|
1163
|
+
)
|
1164
|
+
for err in result.library_doc.errors
|
1165
|
+
if err.source is not None
|
1166
|
+
],
|
1167
|
+
code=Error.IMPORT_CONTAINS_ERRORS,
|
1168
|
+
)
|
1169
|
+
for err in filter(
|
1170
|
+
lambda e: e.source is None or not Path(e.source).is_absolute(),
|
1171
|
+
result.library_doc.errors,
|
1172
|
+
):
|
1173
|
+
self.append_diagnostics(
|
1174
|
+
range=value.range,
|
1175
|
+
message=err.message,
|
1176
|
+
severity=DiagnosticSeverity.ERROR,
|
1177
|
+
source=DIAGNOSTICS_SOURCE_NAME,
|
1178
|
+
code=err.type_name,
|
1179
|
+
)
|
1180
|
+
elif result.library_doc.errors is not None:
|
1181
|
+
for err in result.library_doc.errors:
|
1182
|
+
self.append_diagnostics(
|
1183
|
+
range=value.range,
|
1184
|
+
message=err.message,
|
1185
|
+
severity=DiagnosticSeverity.ERROR,
|
1186
|
+
source=DIAGNOSTICS_SOURCE_NAME,
|
1187
|
+
code=err.type_name,
|
1188
|
+
)
|
1189
|
+
|
1190
|
+
except (SystemExit, KeyboardInterrupt):
|
1191
|
+
raise
|
1192
|
+
except BaseException as e:
|
1193
|
+
if top_level:
|
1194
|
+
self.append_diagnostics(
|
1195
|
+
range=value.range,
|
1196
|
+
message=str(e),
|
1197
|
+
severity=DiagnosticSeverity.ERROR,
|
1198
|
+
source=DIAGNOSTICS_SOURCE_NAME,
|
1199
|
+
code=type(e).__qualname__,
|
1200
|
+
)
|
1201
|
+
finally:
|
1202
|
+
self._reset_global_variables()
|
1203
|
+
|
1204
|
+
return result, variables
|
1205
|
+
|
1206
|
+
current_time = time.monotonic()
|
1207
|
+
self._logger.debug(lambda: f"start imports for {self.document if top_level else source}")
|
1208
|
+
try:
|
1209
|
+
for imp in imports:
|
1210
|
+
if variables is None:
|
1211
|
+
variables = self.get_resolvable_variables()
|
1212
|
+
|
1213
|
+
entry, variables = _import(imp, variables=variables)
|
1214
|
+
|
1215
|
+
if entry is not None:
|
1216
|
+
if isinstance(entry, ResourceEntry):
|
1217
|
+
assert entry.library_doc.source is not None
|
1218
|
+
already_imported_resources = next(
|
1219
|
+
(e for e in self._resources.values() if e.library_doc.source == entry.library_doc.source),
|
1220
|
+
None,
|
1221
|
+
)
|
1222
|
+
|
1223
|
+
if already_imported_resources is None and entry.library_doc.source != self.source:
|
1224
|
+
self._resources[entry.import_name] = entry
|
1225
|
+
try:
|
1226
|
+
self._import_imports(
|
1227
|
+
entry.imports,
|
1228
|
+
str(Path(entry.library_doc.source).parent),
|
1229
|
+
top_level=False,
|
1230
|
+
variables=variables,
|
1231
|
+
source=entry.library_doc.source,
|
1232
|
+
parent_import=imp if top_level else parent_import,
|
1233
|
+
)
|
1234
|
+
except (SystemExit, KeyboardInterrupt):
|
1235
|
+
raise
|
1236
|
+
except BaseException as e:
|
1237
|
+
if top_level:
|
1238
|
+
self.append_diagnostics(
|
1239
|
+
range=entry.import_range,
|
1240
|
+
message=str(e) or type(entry).__name__,
|
1241
|
+
severity=DiagnosticSeverity.ERROR,
|
1242
|
+
source=DIAGNOSTICS_SOURCE_NAME,
|
1243
|
+
code=type(e).__qualname__,
|
1244
|
+
)
|
1245
|
+
else:
|
1246
|
+
if top_level:
|
1247
|
+
if entry.library_doc.source == self.source:
|
1248
|
+
self.append_diagnostics(
|
1249
|
+
range=entry.import_range,
|
1250
|
+
message="Recursive resource import.",
|
1251
|
+
severity=DiagnosticSeverity.INFORMATION,
|
1252
|
+
source=DIAGNOSTICS_SOURCE_NAME,
|
1253
|
+
code=Error.RECURSIVE_IMPORT,
|
1254
|
+
)
|
1255
|
+
elif (
|
1256
|
+
already_imported_resources is not None
|
1257
|
+
and already_imported_resources.library_doc.source
|
1258
|
+
):
|
1259
|
+
self.append_diagnostics(
|
1260
|
+
range=entry.import_range,
|
1261
|
+
message=f"Resource {entry} already imported.",
|
1262
|
+
severity=DiagnosticSeverity.INFORMATION,
|
1263
|
+
source=DIAGNOSTICS_SOURCE_NAME,
|
1264
|
+
related_information=[
|
1265
|
+
DiagnosticRelatedInformation(
|
1266
|
+
location=Location(
|
1267
|
+
uri=str(Uri.from_path(already_imported_resources.import_source)),
|
1268
|
+
range=already_imported_resources.import_range,
|
1269
|
+
),
|
1270
|
+
message="",
|
1271
|
+
)
|
1272
|
+
]
|
1273
|
+
if already_imported_resources.import_source
|
1274
|
+
else None,
|
1275
|
+
code=Error.RESOURCE_ALREADY_IMPORTED,
|
1276
|
+
)
|
1277
|
+
|
1278
|
+
elif isinstance(entry, VariablesEntry):
|
1279
|
+
already_imported_variables = [
|
1280
|
+
e
|
1281
|
+
for e in self._variables.values()
|
1282
|
+
if e.library_doc.source == entry.library_doc.source
|
1283
|
+
and e.alias == entry.alias
|
1284
|
+
and e.args == entry.args
|
1285
|
+
]
|
1286
|
+
if (
|
1287
|
+
top_level
|
1288
|
+
and already_imported_variables
|
1289
|
+
and already_imported_variables[0].library_doc.source
|
1290
|
+
):
|
1291
|
+
self.append_diagnostics(
|
1292
|
+
range=entry.import_range,
|
1293
|
+
message=f'Variables "{entry}" already imported.',
|
1294
|
+
severity=DiagnosticSeverity.INFORMATION,
|
1295
|
+
source=DIAGNOSTICS_SOURCE_NAME,
|
1296
|
+
related_information=[
|
1297
|
+
DiagnosticRelatedInformation(
|
1298
|
+
location=Location(
|
1299
|
+
uri=str(Uri.from_path(already_imported_variables[0].import_source)),
|
1300
|
+
range=already_imported_variables[0].import_range,
|
1301
|
+
),
|
1302
|
+
message="",
|
1303
|
+
)
|
1304
|
+
]
|
1305
|
+
if already_imported_variables[0].import_source
|
1306
|
+
else None,
|
1307
|
+
code=Error.VARIABLES_ALREADY_IMPORTED,
|
1308
|
+
)
|
1309
|
+
|
1310
|
+
if (entry.alias or entry.name or entry.import_name) not in self._variables:
|
1311
|
+
self._variables[entry.alias or entry.name or entry.import_name] = entry
|
1312
|
+
|
1313
|
+
elif isinstance(entry, LibraryEntry):
|
1314
|
+
if top_level and entry.name == BUILTIN_LIBRARY_NAME and entry.alias is None:
|
1315
|
+
self.append_diagnostics(
|
1316
|
+
range=entry.import_range,
|
1317
|
+
message=f'Library "{entry}" is not imported,'
|
1318
|
+
' because it would override the "BuiltIn" library.',
|
1319
|
+
severity=DiagnosticSeverity.INFORMATION,
|
1320
|
+
source=DIAGNOSTICS_SOURCE_NAME,
|
1321
|
+
related_information=[
|
1322
|
+
DiagnosticRelatedInformation(
|
1323
|
+
location=Location(
|
1324
|
+
uri=str(Uri.from_path(entry.import_source)),
|
1325
|
+
range=entry.import_range,
|
1326
|
+
),
|
1327
|
+
message="",
|
1328
|
+
)
|
1329
|
+
]
|
1330
|
+
if entry.import_source
|
1331
|
+
else None,
|
1332
|
+
code=Error.LIBRARY_OVERRIDES_BUILTIN,
|
1333
|
+
)
|
1334
|
+
continue
|
1335
|
+
|
1336
|
+
already_imported_library = [
|
1337
|
+
e
|
1338
|
+
for e in self._libraries.values()
|
1339
|
+
if e.library_doc.source == entry.library_doc.source
|
1340
|
+
and e.library_doc.member_name == entry.library_doc.member_name
|
1341
|
+
and e.alias == entry.alias
|
1342
|
+
and e.args == entry.args
|
1343
|
+
]
|
1344
|
+
if top_level and already_imported_library and already_imported_library[0].library_doc.source:
|
1345
|
+
self.append_diagnostics(
|
1346
|
+
range=entry.import_range,
|
1347
|
+
message=f'Library "{entry}" already imported.',
|
1348
|
+
severity=DiagnosticSeverity.INFORMATION,
|
1349
|
+
source=DIAGNOSTICS_SOURCE_NAME,
|
1350
|
+
related_information=[
|
1351
|
+
DiagnosticRelatedInformation(
|
1352
|
+
location=Location(
|
1353
|
+
uri=str(Uri.from_path(already_imported_library[0].import_source)),
|
1354
|
+
range=already_imported_library[0].import_range,
|
1355
|
+
),
|
1356
|
+
message="",
|
1357
|
+
)
|
1358
|
+
]
|
1359
|
+
if already_imported_library[0].import_source
|
1360
|
+
else None,
|
1361
|
+
code=Error.LIBRARY_ALREADY_IMPORTED,
|
1362
|
+
)
|
1363
|
+
|
1364
|
+
if (entry.alias or entry.name or entry.import_name) not in self._libraries:
|
1365
|
+
self._libraries[entry.alias or entry.name or entry.import_name] = entry
|
1366
|
+
finally:
|
1367
|
+
self._logger.debug(
|
1368
|
+
lambda: "end import imports for "
|
1369
|
+
f"{self.document if top_level else source} in {time.monotonic() - current_time}s"
|
1370
|
+
)
|
1371
|
+
|
1372
|
+
def _import_default_libraries(self, variables: Optional[Dict[str, Any]] = None) -> None:
|
1373
|
+
def _import_lib(library: str, variables: Optional[Dict[str, Any]] = None) -> Optional[LibraryEntry]:
|
1374
|
+
try:
|
1375
|
+
return self._get_library_entry(
|
1376
|
+
library,
|
1377
|
+
(),
|
1378
|
+
None,
|
1379
|
+
str(Path(self.source).parent),
|
1380
|
+
is_default_library=True,
|
1381
|
+
variables=variables,
|
1382
|
+
)
|
1383
|
+
except (SystemExit, KeyboardInterrupt):
|
1384
|
+
raise
|
1385
|
+
except BaseException as e:
|
1386
|
+
self.append_diagnostics(
|
1387
|
+
range=Range.zero(),
|
1388
|
+
message=f"Can't import default library '{library}': {str(e) or type(e).__name__}",
|
1389
|
+
severity=DiagnosticSeverity.ERROR,
|
1390
|
+
source="Robot",
|
1391
|
+
code=type(e).__qualname__,
|
1392
|
+
)
|
1393
|
+
return None
|
1394
|
+
|
1395
|
+
self._logger.debug(lambda: f"start import default libraries for document {self.document}")
|
1396
|
+
try:
|
1397
|
+
for library in DEFAULT_LIBRARIES:
|
1398
|
+
e = _import_lib(library, variables or self.get_resolvable_variables())
|
1399
|
+
if e is not None:
|
1400
|
+
self._libraries[e.alias or e.name or e.import_name] = e
|
1401
|
+
finally:
|
1402
|
+
self._logger.debug(lambda: f"end import default libraries for document {self.document}")
|
1403
|
+
|
1404
|
+
@_logger.call
|
1405
|
+
def _get_library_entry(
|
1406
|
+
self,
|
1407
|
+
name: str,
|
1408
|
+
args: Tuple[Any, ...],
|
1409
|
+
alias: Optional[str],
|
1410
|
+
base_dir: str,
|
1411
|
+
*,
|
1412
|
+
is_default_library: bool = False,
|
1413
|
+
sentinel: Any = None,
|
1414
|
+
variables: Optional[Dict[str, Any]] = None,
|
1415
|
+
) -> LibraryEntry:
|
1416
|
+
library_doc = self.imports_manager.get_libdoc_for_library_import(
|
1417
|
+
name,
|
1418
|
+
args,
|
1419
|
+
base_dir=base_dir,
|
1420
|
+
sentinel=None if is_default_library else sentinel,
|
1421
|
+
variables=variables or self.get_resolvable_variables(),
|
1422
|
+
)
|
1423
|
+
|
1424
|
+
return LibraryEntry(
|
1425
|
+
name=library_doc.name,
|
1426
|
+
import_name=name,
|
1427
|
+
library_doc=library_doc,
|
1428
|
+
args=args,
|
1429
|
+
alias=alias,
|
1430
|
+
)
|
1431
|
+
|
1432
|
+
@_logger.call
|
1433
|
+
def get_imported_library_libdoc(
|
1434
|
+
self, name: str, args: Tuple[str, ...] = (), alias: Optional[str] = None
|
1435
|
+
) -> Optional[LibraryDoc]:
|
1436
|
+
self.ensure_initialized()
|
1437
|
+
|
1438
|
+
return next(
|
1439
|
+
(
|
1440
|
+
v.library_doc
|
1441
|
+
for e, v in self._import_entries.items()
|
1442
|
+
if isinstance(e, LibraryImport) and v.import_name == name and v.args == args and v.alias == alias
|
1443
|
+
),
|
1444
|
+
None,
|
1445
|
+
)
|
1446
|
+
|
1447
|
+
@_logger.call
|
1448
|
+
def _get_resource_entry(
|
1449
|
+
self,
|
1450
|
+
name: str,
|
1451
|
+
base_dir: str,
|
1452
|
+
*,
|
1453
|
+
sentinel: Any = None,
|
1454
|
+
variables: Optional[Dict[str, Any]] = None,
|
1455
|
+
) -> ResourceEntry:
|
1456
|
+
(
|
1457
|
+
namespace,
|
1458
|
+
library_doc,
|
1459
|
+
) = self.imports_manager.get_namespace_and_libdoc_for_resource_import(
|
1460
|
+
name,
|
1461
|
+
base_dir,
|
1462
|
+
sentinel=sentinel,
|
1463
|
+
variables=variables or self.get_resolvable_variables(),
|
1464
|
+
)
|
1465
|
+
|
1466
|
+
return ResourceEntry(
|
1467
|
+
name=library_doc.name,
|
1468
|
+
import_name=name,
|
1469
|
+
library_doc=library_doc,
|
1470
|
+
imports=namespace.get_imports(),
|
1471
|
+
variables=namespace.get_own_variables(),
|
1472
|
+
)
|
1473
|
+
|
1474
|
+
@_logger.call
|
1475
|
+
def get_imported_resource_libdoc(self, name: str) -> Optional[LibraryDoc]:
|
1476
|
+
self.ensure_initialized()
|
1477
|
+
|
1478
|
+
return next(
|
1479
|
+
(
|
1480
|
+
v.library_doc
|
1481
|
+
for e, v in self._import_entries.items()
|
1482
|
+
if isinstance(e, ResourceImport) and v.import_name == name
|
1483
|
+
),
|
1484
|
+
None,
|
1485
|
+
)
|
1486
|
+
|
1487
|
+
@_logger.call
|
1488
|
+
def _get_variables_entry(
|
1489
|
+
self,
|
1490
|
+
name: str,
|
1491
|
+
args: Tuple[Any, ...],
|
1492
|
+
base_dir: str,
|
1493
|
+
*,
|
1494
|
+
sentinel: Any = None,
|
1495
|
+
variables: Optional[Dict[str, Any]] = None,
|
1496
|
+
) -> VariablesEntry:
|
1497
|
+
library_doc = self.imports_manager.get_libdoc_for_variables_import(
|
1498
|
+
name,
|
1499
|
+
args,
|
1500
|
+
base_dir=base_dir,
|
1501
|
+
sentinel=sentinel,
|
1502
|
+
variables=variables or self.get_resolvable_variables(),
|
1503
|
+
)
|
1504
|
+
|
1505
|
+
return VariablesEntry(
|
1506
|
+
name=library_doc.name,
|
1507
|
+
import_name=name,
|
1508
|
+
library_doc=library_doc,
|
1509
|
+
args=args,
|
1510
|
+
variables=library_doc.variables,
|
1511
|
+
)
|
1512
|
+
|
1513
|
+
@_logger.call
|
1514
|
+
def get_imported_variables_libdoc(self, name: str, args: Tuple[str, ...] = ()) -> Optional[LibraryDoc]:
|
1515
|
+
self.ensure_initialized()
|
1516
|
+
|
1517
|
+
return next(
|
1518
|
+
(
|
1519
|
+
v.library_doc
|
1520
|
+
for e, v in self._import_entries.items()
|
1521
|
+
if isinstance(e, VariablesImport) and v.import_name == name and v.args == args
|
1522
|
+
),
|
1523
|
+
None,
|
1524
|
+
)
|
1525
|
+
|
1526
|
+
def get_imported_keywords(self) -> List[KeywordDoc]:
|
1527
|
+
with self._imported_keywords_lock:
|
1528
|
+
if self._imported_keywords is None:
|
1529
|
+
self._imported_keywords = list(
|
1530
|
+
itertools.chain(
|
1531
|
+
*(e.library_doc.keywords for e in self._libraries.values()),
|
1532
|
+
*(e.library_doc.keywords for e in self._resources.values()),
|
1533
|
+
)
|
1534
|
+
)
|
1535
|
+
|
1536
|
+
return self._imported_keywords
|
1537
|
+
|
1538
|
+
@_logger.call
|
1539
|
+
def iter_all_keywords(self) -> Iterator[KeywordDoc]:
|
1540
|
+
import itertools
|
1541
|
+
|
1542
|
+
libdoc = self.get_library_doc()
|
1543
|
+
|
1544
|
+
for doc in itertools.chain(
|
1545
|
+
self.get_imported_keywords(),
|
1546
|
+
libdoc.keywords if libdoc is not None else [],
|
1547
|
+
):
|
1548
|
+
yield doc
|
1549
|
+
|
1550
|
+
@_logger.call
|
1551
|
+
def get_keywords(self) -> List[KeywordDoc]:
|
1552
|
+
with self._keywords_lock:
|
1553
|
+
if self._keywords is None:
|
1554
|
+
current_time = time.monotonic()
|
1555
|
+
self._logger.debug("start collecting keywords")
|
1556
|
+
try:
|
1557
|
+
i = 0
|
1558
|
+
|
1559
|
+
self.ensure_initialized()
|
1560
|
+
|
1561
|
+
result: Dict[KeywordMatcher, KeywordDoc] = {}
|
1562
|
+
|
1563
|
+
for doc in self.iter_all_keywords():
|
1564
|
+
i += 1
|
1565
|
+
result[doc.matcher] = doc
|
1566
|
+
|
1567
|
+
self._keywords = list(result.values())
|
1568
|
+
except BaseException:
|
1569
|
+
self._logger.debug("Canceled collecting keywords ")
|
1570
|
+
raise
|
1571
|
+
else:
|
1572
|
+
self._logger.debug(
|
1573
|
+
lambda: f"end collecting {len(self._keywords) if self._keywords else 0}"
|
1574
|
+
f" keywords in {time.monotonic() - current_time}s analyze {i} keywords"
|
1575
|
+
)
|
1576
|
+
|
1577
|
+
return self._keywords
|
1578
|
+
|
1579
|
+
def append_diagnostics(
|
1580
|
+
self,
|
1581
|
+
range: Range,
|
1582
|
+
message: str,
|
1583
|
+
severity: Optional[DiagnosticSeverity] = None,
|
1584
|
+
code: Union[int, str, None] = None,
|
1585
|
+
code_description: Optional[CodeDescription] = None,
|
1586
|
+
source: Optional[str] = None,
|
1587
|
+
tags: Optional[List[DiagnosticTag]] = None,
|
1588
|
+
related_information: Optional[List[DiagnosticRelatedInformation]] = None,
|
1589
|
+
data: Optional[Any] = None,
|
1590
|
+
) -> None:
|
1591
|
+
if self._should_ignore(range):
|
1592
|
+
return
|
1593
|
+
|
1594
|
+
self._diagnostics.append(
|
1595
|
+
Diagnostic(
|
1596
|
+
range,
|
1597
|
+
message,
|
1598
|
+
severity,
|
1599
|
+
code,
|
1600
|
+
code_description,
|
1601
|
+
source,
|
1602
|
+
tags,
|
1603
|
+
related_information,
|
1604
|
+
data,
|
1605
|
+
)
|
1606
|
+
)
|
1607
|
+
|
1608
|
+
@_logger.call(condition=lambda self: not self._analyzed)
|
1609
|
+
def analyze(self) -> None:
|
1610
|
+
import time
|
1611
|
+
|
1612
|
+
from .namespace_analyzer import NamespaceAnalyzer
|
1613
|
+
|
1614
|
+
with self._analyze_lock:
|
1615
|
+
if not self._analyzed:
|
1616
|
+
canceled = False
|
1617
|
+
|
1618
|
+
self._logger.debug(lambda: f"start analyze {self.document}")
|
1619
|
+
start_time = time.monotonic()
|
1620
|
+
|
1621
|
+
try:
|
1622
|
+
result = NamespaceAnalyzer(
|
1623
|
+
self.model,
|
1624
|
+
self,
|
1625
|
+
self.create_finder(),
|
1626
|
+
self.get_ignored_lines(self.document) if self.document is not None else [],
|
1627
|
+
).run()
|
1628
|
+
|
1629
|
+
self._diagnostics += result.diagnostics
|
1630
|
+
self._keyword_references = result.keyword_references
|
1631
|
+
self._variable_references = result.variable_references
|
1632
|
+
self._local_variable_assignments = result.local_variable_assignments
|
1633
|
+
self._namespace_references = result.namespace_references
|
1634
|
+
|
1635
|
+
lib_doc = self.get_library_doc()
|
1636
|
+
|
1637
|
+
if lib_doc.errors is not None:
|
1638
|
+
for err in lib_doc.errors:
|
1639
|
+
self.append_diagnostics(
|
1640
|
+
range=Range(
|
1641
|
+
start=Position(
|
1642
|
+
line=((err.line_no - 1) if err.line_no is not None else 0),
|
1643
|
+
character=0,
|
1644
|
+
),
|
1645
|
+
end=Position(
|
1646
|
+
line=((err.line_no - 1) if err.line_no is not None else 0),
|
1647
|
+
character=0,
|
1648
|
+
),
|
1649
|
+
),
|
1650
|
+
message=err.message,
|
1651
|
+
severity=DiagnosticSeverity.ERROR,
|
1652
|
+
source=DIAGNOSTICS_SOURCE_NAME,
|
1653
|
+
code=err.type_name,
|
1654
|
+
)
|
1655
|
+
# TODO: implement CancelationToken
|
1656
|
+
except CancelledError:
|
1657
|
+
canceled = True
|
1658
|
+
self._logger.debug("analyzing canceled")
|
1659
|
+
raise
|
1660
|
+
finally:
|
1661
|
+
self._analyzed = not canceled
|
1662
|
+
|
1663
|
+
self._logger.debug(
|
1664
|
+
lambda: f"end analyzed {self.document} succeed in {time.monotonic() - start_time}s"
|
1665
|
+
if self._analyzed
|
1666
|
+
else f"end analyzed {self.document} failed in {time.monotonic() - start_time}s"
|
1667
|
+
)
|
1668
|
+
|
1669
|
+
self.has_analysed(self)
|
1670
|
+
|
1671
|
+
def get_finder(self) -> "KeywordFinder":
|
1672
|
+
if self._finder is None:
|
1673
|
+
self._finder = self.create_finder()
|
1674
|
+
return self._finder
|
1675
|
+
|
1676
|
+
def create_finder(self) -> "KeywordFinder":
|
1677
|
+
self.ensure_initialized()
|
1678
|
+
return KeywordFinder(self, self.get_library_doc())
|
1679
|
+
|
1680
|
+
@_logger.call(condition=lambda self, name, **kwargs: self._finder is not None and name not in self._finder._cache)
|
1681
|
+
def find_keyword(
|
1682
|
+
self,
|
1683
|
+
name: Optional[str],
|
1684
|
+
*,
|
1685
|
+
raise_keyword_error: bool = True,
|
1686
|
+
handle_bdd_style: bool = True,
|
1687
|
+
) -> Optional[KeywordDoc]:
|
1688
|
+
finder = self._finder if self._finder is not None else self.get_finder()
|
1689
|
+
|
1690
|
+
return finder.find_keyword(
|
1691
|
+
name,
|
1692
|
+
raise_keyword_error=raise_keyword_error,
|
1693
|
+
handle_bdd_style=handle_bdd_style,
|
1694
|
+
)
|
1695
|
+
|
1696
|
+
@classmethod
|
1697
|
+
def get_ignored_lines(cls, document: TextDocument) -> List[int]:
|
1698
|
+
return document.get_cache(cls.__get_ignored_lines)
|
1699
|
+
|
1700
|
+
@staticmethod
|
1701
|
+
def __get_ignored_lines(document: TextDocument) -> List[int]:
|
1702
|
+
result = []
|
1703
|
+
lines = document.get_lines()
|
1704
|
+
for line_no, line in enumerate(lines):
|
1705
|
+
comment = EXTRACT_COMMENT_PATTERN.match(line)
|
1706
|
+
if comment and comment.group("comment"):
|
1707
|
+
for match in ROBOTCODE_PATTERN.finditer(comment.group("comment")):
|
1708
|
+
if match.group("rule") == "ignore":
|
1709
|
+
result.append(line_no)
|
1710
|
+
|
1711
|
+
return result
|
1712
|
+
|
1713
|
+
@classmethod
|
1714
|
+
def should_ignore(cls, document: Optional[TextDocument], range: Range) -> bool:
|
1715
|
+
return cls.__should_ignore(
|
1716
|
+
cls.get_ignored_lines(document) if document is not None else [],
|
1717
|
+
range,
|
1718
|
+
)
|
1719
|
+
|
1720
|
+
def _should_ignore(self, range: Range) -> bool:
|
1721
|
+
if self._ignored_lines is None:
|
1722
|
+
self._ignored_lines = self.get_ignored_lines(self.document) if self.document is not None else []
|
1723
|
+
|
1724
|
+
return self.__should_ignore(self._ignored_lines, range)
|
1725
|
+
|
1726
|
+
@staticmethod
|
1727
|
+
def __should_ignore(lines: List[int], range: Range) -> bool:
|
1728
|
+
import builtins
|
1729
|
+
|
1730
|
+
return any(line_no in lines for line_no in builtins.range(range.start.line, range.end.line + 1))
|
1731
|
+
|
1732
|
+
|
1733
|
+
class DiagnosticsEntry(NamedTuple):
|
1734
|
+
message: str
|
1735
|
+
severity: DiagnosticSeverity
|
1736
|
+
code: Optional[str] = None
|
1737
|
+
|
1738
|
+
|
1739
|
+
class CancelSearchError(Exception):
|
1740
|
+
pass
|
1741
|
+
|
1742
|
+
|
1743
|
+
DEFAULT_BDD_PREFIXES = {"Given ", "When ", "Then ", "And ", "But "}
|
1744
|
+
|
1745
|
+
|
1746
|
+
class KeywordFinder:
|
1747
|
+
def __init__(self, namespace: Namespace, library_doc: LibraryDoc) -> None:
|
1748
|
+
self.namespace = namespace
|
1749
|
+
self.self_library_doc = library_doc
|
1750
|
+
|
1751
|
+
self.diagnostics: List[DiagnosticsEntry] = []
|
1752
|
+
self.multiple_keywords_result: Optional[List[KeywordDoc]] = None
|
1753
|
+
self._cache: Dict[
|
1754
|
+
Tuple[Optional[str], bool],
|
1755
|
+
Tuple[
|
1756
|
+
Optional[KeywordDoc],
|
1757
|
+
List[DiagnosticsEntry],
|
1758
|
+
Optional[List[KeywordDoc]],
|
1759
|
+
],
|
1760
|
+
] = {}
|
1761
|
+
self.handle_bdd_style = True
|
1762
|
+
self._all_keywords: Optional[List[LibraryEntry]] = None
|
1763
|
+
self._resource_keywords: Optional[List[ResourceEntry]] = None
|
1764
|
+
self._library_keywords: Optional[List[LibraryEntry]] = None
|
1765
|
+
|
1766
|
+
def reset_diagnostics(self) -> None:
|
1767
|
+
self.diagnostics = []
|
1768
|
+
self.multiple_keywords_result = None
|
1769
|
+
|
1770
|
+
def find_keyword(
|
1771
|
+
self,
|
1772
|
+
name: Optional[str],
|
1773
|
+
*,
|
1774
|
+
raise_keyword_error: bool = False,
|
1775
|
+
handle_bdd_style: bool = True,
|
1776
|
+
) -> Optional[KeywordDoc]:
|
1777
|
+
try:
|
1778
|
+
self.reset_diagnostics()
|
1779
|
+
|
1780
|
+
self.handle_bdd_style = handle_bdd_style
|
1781
|
+
|
1782
|
+
cached = self._cache.get((name, self.handle_bdd_style), None)
|
1783
|
+
|
1784
|
+
if cached is not None:
|
1785
|
+
self.diagnostics = cached[1]
|
1786
|
+
self.multiple_keywords_result = cached[2]
|
1787
|
+
return cached[0]
|
1788
|
+
|
1789
|
+
try:
|
1790
|
+
result = self._find_keyword(name)
|
1791
|
+
if result is None:
|
1792
|
+
self.diagnostics.append(
|
1793
|
+
DiagnosticsEntry(
|
1794
|
+
f"No keyword with name '{name}' found.",
|
1795
|
+
DiagnosticSeverity.ERROR,
|
1796
|
+
Error.KEYWORD_NOT_FOUND,
|
1797
|
+
)
|
1798
|
+
)
|
1799
|
+
except KeywordError as e:
|
1800
|
+
if e.multiple_keywords:
|
1801
|
+
self._add_to_multiple_keywords_result(e.multiple_keywords)
|
1802
|
+
|
1803
|
+
if raise_keyword_error:
|
1804
|
+
raise
|
1805
|
+
|
1806
|
+
result = None
|
1807
|
+
self.diagnostics.append(DiagnosticsEntry(str(e), DiagnosticSeverity.ERROR, Error.KEYWORD_ERROR))
|
1808
|
+
|
1809
|
+
self._cache[(name, self.handle_bdd_style)] = (
|
1810
|
+
result,
|
1811
|
+
self.diagnostics,
|
1812
|
+
self.multiple_keywords_result,
|
1813
|
+
)
|
1814
|
+
|
1815
|
+
return result
|
1816
|
+
except CancelSearchError:
|
1817
|
+
return None
|
1818
|
+
|
1819
|
+
def _find_keyword(self, name: Optional[str]) -> Optional[KeywordDoc]:
|
1820
|
+
if not name:
|
1821
|
+
self.diagnostics.append(
|
1822
|
+
DiagnosticsEntry(
|
1823
|
+
"Keyword name cannot be empty.",
|
1824
|
+
DiagnosticSeverity.ERROR,
|
1825
|
+
Error.KEYWORD_ERROR,
|
1826
|
+
)
|
1827
|
+
)
|
1828
|
+
raise CancelSearchError
|
1829
|
+
if not isinstance(name, str):
|
1830
|
+
self.diagnostics.append( # type: ignore
|
1831
|
+
DiagnosticsEntry(
|
1832
|
+
"Keyword name must be a string.",
|
1833
|
+
DiagnosticSeverity.ERROR,
|
1834
|
+
Error.KEYWORD_ERROR,
|
1835
|
+
)
|
1836
|
+
)
|
1837
|
+
raise CancelSearchError
|
1838
|
+
|
1839
|
+
result = self._get_keyword_from_self(name)
|
1840
|
+
if not result and "." in name:
|
1841
|
+
result = self._get_explicit_keyword(name)
|
1842
|
+
|
1843
|
+
if not result:
|
1844
|
+
result = self._get_implicit_keyword(name)
|
1845
|
+
|
1846
|
+
if not result and self.handle_bdd_style:
|
1847
|
+
return self._get_bdd_style_keyword(name)
|
1848
|
+
|
1849
|
+
return result
|
1850
|
+
|
1851
|
+
def _get_keyword_from_self(self, name: str) -> Optional[KeywordDoc]:
|
1852
|
+
if get_robot_version() >= (6, 0):
|
1853
|
+
found: List[Tuple[Optional[LibraryEntry], KeywordDoc]] = [
|
1854
|
+
(None, v) for v in self.self_library_doc.keywords.get_all(name)
|
1855
|
+
]
|
1856
|
+
if len(found) > 1:
|
1857
|
+
found = self._select_best_matches(found)
|
1858
|
+
if len(found) > 1:
|
1859
|
+
self.diagnostics.append(
|
1860
|
+
DiagnosticsEntry(
|
1861
|
+
self._create_multiple_keywords_found_message(name, found, implicit=False),
|
1862
|
+
DiagnosticSeverity.ERROR,
|
1863
|
+
Error.KEYWORD_ERROR,
|
1864
|
+
)
|
1865
|
+
)
|
1866
|
+
raise CancelSearchError
|
1867
|
+
|
1868
|
+
if len(found) == 1:
|
1869
|
+
# TODO warning if keyword found is defined in resource and suite
|
1870
|
+
return found[0][1]
|
1871
|
+
|
1872
|
+
return None
|
1873
|
+
|
1874
|
+
try:
|
1875
|
+
return self.self_library_doc.keywords.get(name, None)
|
1876
|
+
except KeywordError as e:
|
1877
|
+
self.diagnostics.append(DiagnosticsEntry(str(e), DiagnosticSeverity.ERROR, Error.KEYWORD_ERROR))
|
1878
|
+
raise CancelSearchError from e
|
1879
|
+
|
1880
|
+
def _yield_owner_and_kw_names(self, full_name: str) -> Iterator[Tuple[str, ...]]:
|
1881
|
+
tokens = full_name.split(".")
|
1882
|
+
for i in range(1, len(tokens)):
|
1883
|
+
yield ".".join(tokens[:i]), ".".join(tokens[i:])
|
1884
|
+
|
1885
|
+
def _get_explicit_keyword(self, name: str) -> Optional[KeywordDoc]:
|
1886
|
+
found: List[Tuple[Optional[LibraryEntry], KeywordDoc]] = []
|
1887
|
+
for owner_name, kw_name in self._yield_owner_and_kw_names(name):
|
1888
|
+
found.extend(self.find_keywords(owner_name, kw_name))
|
1889
|
+
|
1890
|
+
if get_robot_version() >= (6, 0) and len(found) > 1:
|
1891
|
+
found = self._select_best_matches(found)
|
1892
|
+
|
1893
|
+
if len(found) > 1:
|
1894
|
+
self.diagnostics.append(
|
1895
|
+
DiagnosticsEntry(
|
1896
|
+
self._create_multiple_keywords_found_message(name, found, implicit=False),
|
1897
|
+
DiagnosticSeverity.ERROR,
|
1898
|
+
Error.KEYWORD_ERROR,
|
1899
|
+
)
|
1900
|
+
)
|
1901
|
+
raise CancelSearchError
|
1902
|
+
|
1903
|
+
return found[0][1] if found else None
|
1904
|
+
|
1905
|
+
def find_keywords(self, owner_name: str, name: str) -> List[Tuple[LibraryEntry, KeywordDoc]]:
|
1906
|
+
if self._all_keywords is None:
|
1907
|
+
self._all_keywords = list(
|
1908
|
+
chain(
|
1909
|
+
self.namespace._libraries.values(),
|
1910
|
+
self.namespace._resources.values(),
|
1911
|
+
)
|
1912
|
+
)
|
1913
|
+
|
1914
|
+
if get_robot_version() >= (6, 0):
|
1915
|
+
result: List[Tuple[LibraryEntry, KeywordDoc]] = []
|
1916
|
+
for v in self._all_keywords:
|
1917
|
+
if eq_namespace(v.alias or v.name, owner_name):
|
1918
|
+
result.extend((v, kw) for kw in v.library_doc.keywords.get_all(name))
|
1919
|
+
return result
|
1920
|
+
|
1921
|
+
result = []
|
1922
|
+
for v in self._all_keywords:
|
1923
|
+
if eq_namespace(v.alias or v.name, owner_name):
|
1924
|
+
kw = v.library_doc.keywords.get(name, None)
|
1925
|
+
if kw is not None:
|
1926
|
+
result.append((v, kw))
|
1927
|
+
return result
|
1928
|
+
|
1929
|
+
def _add_to_multiple_keywords_result(self, kw: Iterable[KeywordDoc]) -> None:
|
1930
|
+
if self.multiple_keywords_result is None:
|
1931
|
+
self.multiple_keywords_result = list(kw)
|
1932
|
+
else:
|
1933
|
+
self.multiple_keywords_result.extend(kw)
|
1934
|
+
|
1935
|
+
def _create_multiple_keywords_found_message(
|
1936
|
+
self,
|
1937
|
+
name: str,
|
1938
|
+
found: Sequence[Tuple[Optional[LibraryEntry], KeywordDoc]],
|
1939
|
+
implicit: bool = True,
|
1940
|
+
) -> str:
|
1941
|
+
self._add_to_multiple_keywords_result([k for _, k in found])
|
1942
|
+
|
1943
|
+
if any(e[1].is_embedded for e in found):
|
1944
|
+
error = f"Multiple keywords matching name '{name}' found"
|
1945
|
+
else:
|
1946
|
+
error = f"Multiple keywords with name '{name}' found"
|
1947
|
+
|
1948
|
+
if implicit:
|
1949
|
+
error += ". Give the full name of the keyword you want to use"
|
1950
|
+
|
1951
|
+
names = sorted(f"{e[1].name if e[0] is None else f'{e[0].alias or e[0].name}.{e[1].name}'}" for e in found)
|
1952
|
+
return "\n ".join([f"{error}:", *names])
|
1953
|
+
|
1954
|
+
def _get_implicit_keyword(self, name: str) -> Optional[KeywordDoc]:
|
1955
|
+
result = self._get_keyword_from_resource_files(name)
|
1956
|
+
if not result:
|
1957
|
+
return self._get_keyword_from_libraries(name)
|
1958
|
+
return result
|
1959
|
+
|
1960
|
+
def _prioritize_same_file_or_public(
|
1961
|
+
self, entries: List[Tuple[Optional[LibraryEntry], KeywordDoc]]
|
1962
|
+
) -> List[Tuple[Optional[LibraryEntry], KeywordDoc]]:
|
1963
|
+
matches = [h for h in entries if h[1].source == self.namespace.source]
|
1964
|
+
if matches:
|
1965
|
+
return matches
|
1966
|
+
|
1967
|
+
matches = [handler for handler in entries if not handler[1].is_private()]
|
1968
|
+
|
1969
|
+
return matches or entries
|
1970
|
+
|
1971
|
+
def _select_best_matches(
|
1972
|
+
self, entries: List[Tuple[Optional[LibraryEntry], KeywordDoc]]
|
1973
|
+
) -> List[Tuple[Optional[LibraryEntry], KeywordDoc]]:
|
1974
|
+
normal = [hand for hand in entries if not hand[1].is_embedded]
|
1975
|
+
if normal:
|
1976
|
+
return normal
|
1977
|
+
|
1978
|
+
matches = [hand for hand in entries if not self._is_worse_match_than_others(hand, entries)]
|
1979
|
+
return matches or entries
|
1980
|
+
|
1981
|
+
def _is_worse_match_than_others(
|
1982
|
+
self,
|
1983
|
+
candidate: Tuple[Optional[LibraryEntry], KeywordDoc],
|
1984
|
+
alternatives: List[Tuple[Optional[LibraryEntry], KeywordDoc]],
|
1985
|
+
) -> bool:
|
1986
|
+
for other in alternatives:
|
1987
|
+
if (
|
1988
|
+
candidate[1] is not other[1]
|
1989
|
+
and self._is_better_match(other, candidate)
|
1990
|
+
and not self._is_better_match(candidate, other)
|
1991
|
+
):
|
1992
|
+
return True
|
1993
|
+
return False
|
1994
|
+
|
1995
|
+
def _is_better_match(
|
1996
|
+
self,
|
1997
|
+
candidate: Tuple[Optional[LibraryEntry], KeywordDoc],
|
1998
|
+
other: Tuple[Optional[LibraryEntry], KeywordDoc],
|
1999
|
+
) -> bool:
|
2000
|
+
return (
|
2001
|
+
other[1].matcher.embedded_arguments.match(candidate[1].name) is not None
|
2002
|
+
and candidate[1].matcher.embedded_arguments.match(other[1].name) is None
|
2003
|
+
)
|
2004
|
+
|
2005
|
+
def _get_keyword_from_resource_files(self, name: str) -> Optional[KeywordDoc]:
|
2006
|
+
if self._resource_keywords is None:
|
2007
|
+
self._resource_keywords = list(chain(self.namespace._resources.values()))
|
2008
|
+
|
2009
|
+
if get_robot_version() >= (6, 0):
|
2010
|
+
found: List[Tuple[Optional[LibraryEntry], KeywordDoc]] = []
|
2011
|
+
for v in self._resource_keywords:
|
2012
|
+
r = v.library_doc.keywords.get_all(name)
|
2013
|
+
if r:
|
2014
|
+
found.extend([(v, k) for k in r])
|
2015
|
+
else:
|
2016
|
+
found = []
|
2017
|
+
for k in self._resource_keywords:
|
2018
|
+
s = k.library_doc.keywords.get(name, None)
|
2019
|
+
if s is not None:
|
2020
|
+
found.append((k, s))
|
2021
|
+
|
2022
|
+
if not found:
|
2023
|
+
return None
|
2024
|
+
|
2025
|
+
if get_robot_version() >= (6, 0):
|
2026
|
+
if len(found) > 1:
|
2027
|
+
found = self._prioritize_same_file_or_public(found)
|
2028
|
+
|
2029
|
+
if len(found) > 1:
|
2030
|
+
found = self._select_best_matches(found)
|
2031
|
+
|
2032
|
+
if len(found) > 1:
|
2033
|
+
found = self._get_keyword_based_on_search_order(found)
|
2034
|
+
|
2035
|
+
else:
|
2036
|
+
if len(found) > 1:
|
2037
|
+
found = self._get_keyword_based_on_search_order(found)
|
2038
|
+
|
2039
|
+
if len(found) == 1:
|
2040
|
+
return found[0][1]
|
2041
|
+
|
2042
|
+
self.diagnostics.append(
|
2043
|
+
DiagnosticsEntry(
|
2044
|
+
self._create_multiple_keywords_found_message(name, found),
|
2045
|
+
DiagnosticSeverity.ERROR,
|
2046
|
+
Error.KEYWORD_ERROR,
|
2047
|
+
)
|
2048
|
+
)
|
2049
|
+
raise CancelSearchError
|
2050
|
+
|
2051
|
+
def _get_keyword_based_on_search_order(
|
2052
|
+
self, entries: List[Tuple[Optional[LibraryEntry], KeywordDoc]]
|
2053
|
+
) -> List[Tuple[Optional[LibraryEntry], KeywordDoc]]:
|
2054
|
+
for libname in self.namespace.search_order:
|
2055
|
+
for e in entries:
|
2056
|
+
if e[0] is not None and eq_namespace(libname, e[0].alias or e[0].name):
|
2057
|
+
return [e]
|
2058
|
+
|
2059
|
+
return entries
|
2060
|
+
|
2061
|
+
def _get_keyword_from_libraries(self, name: str) -> Optional[KeywordDoc]:
|
2062
|
+
if self._library_keywords is None:
|
2063
|
+
self._library_keywords = list(chain(self.namespace._libraries.values()))
|
2064
|
+
|
2065
|
+
if get_robot_version() >= (6, 0):
|
2066
|
+
found: List[Tuple[Optional[LibraryEntry], KeywordDoc]] = []
|
2067
|
+
for v in self._library_keywords:
|
2068
|
+
r = v.library_doc.keywords.get_all(name)
|
2069
|
+
if r:
|
2070
|
+
found.extend([(v, k) for k in r])
|
2071
|
+
else:
|
2072
|
+
found = []
|
2073
|
+
|
2074
|
+
for k in self._library_keywords:
|
2075
|
+
s = k.library_doc.keywords.get(name, None)
|
2076
|
+
if s is not None:
|
2077
|
+
found.append((k, s))
|
2078
|
+
|
2079
|
+
if not found:
|
2080
|
+
return None
|
2081
|
+
|
2082
|
+
if get_robot_version() >= (6, 0):
|
2083
|
+
if len(found) > 1:
|
2084
|
+
found = self._select_best_matches(found)
|
2085
|
+
if len(found) > 1:
|
2086
|
+
found = self._get_keyword_based_on_search_order(found)
|
2087
|
+
else:
|
2088
|
+
if len(found) > 1:
|
2089
|
+
found = self._get_keyword_based_on_search_order(found)
|
2090
|
+
if len(found) == 2:
|
2091
|
+
found = self._filter_stdlib_runner(*found)
|
2092
|
+
|
2093
|
+
if len(found) == 1:
|
2094
|
+
return found[0][1]
|
2095
|
+
|
2096
|
+
self.diagnostics.append(
|
2097
|
+
DiagnosticsEntry(
|
2098
|
+
self._create_multiple_keywords_found_message(name, found),
|
2099
|
+
DiagnosticSeverity.ERROR,
|
2100
|
+
Error.KEYWORD_ERROR,
|
2101
|
+
)
|
2102
|
+
)
|
2103
|
+
raise CancelSearchError
|
2104
|
+
|
2105
|
+
def _filter_stdlib_runner(
|
2106
|
+
self,
|
2107
|
+
entry1: Tuple[Optional[LibraryEntry], KeywordDoc],
|
2108
|
+
entry2: Tuple[Optional[LibraryEntry], KeywordDoc],
|
2109
|
+
) -> List[Tuple[Optional[LibraryEntry], KeywordDoc]]:
|
2110
|
+
stdlibs_without_remote = STDLIBS - {"Remote"}
|
2111
|
+
if entry1[0] is not None and entry1[0].name in stdlibs_without_remote:
|
2112
|
+
standard, custom = entry1, entry2
|
2113
|
+
elif entry2[0] is not None and entry2[0].name in stdlibs_without_remote:
|
2114
|
+
standard, custom = entry2, entry1
|
2115
|
+
else:
|
2116
|
+
return [entry1, entry2]
|
2117
|
+
|
2118
|
+
self.diagnostics.append(
|
2119
|
+
DiagnosticsEntry(
|
2120
|
+
self._create_custom_and_standard_keyword_conflict_warning_message(custom, standard),
|
2121
|
+
DiagnosticSeverity.WARNING,
|
2122
|
+
Error.KEYWORD_ERROR,
|
2123
|
+
)
|
2124
|
+
)
|
2125
|
+
|
2126
|
+
return [custom]
|
2127
|
+
|
2128
|
+
def _create_custom_and_standard_keyword_conflict_warning_message(
|
2129
|
+
self,
|
2130
|
+
custom: Tuple[Optional[LibraryEntry], KeywordDoc],
|
2131
|
+
standard: Tuple[Optional[LibraryEntry], KeywordDoc],
|
2132
|
+
) -> str:
|
2133
|
+
custom_with_name = standard_with_name = ""
|
2134
|
+
if custom[0] is not None and custom[0].alias is not None:
|
2135
|
+
custom_with_name = " imported as '%s'" % custom[0].alias
|
2136
|
+
if standard[0] is not None and standard[0].alias is not None:
|
2137
|
+
standard_with_name = " imported as '%s'" % standard[0].alias
|
2138
|
+
return (
|
2139
|
+
f"Keyword '{standard[1].name}' found both from a custom test library "
|
2140
|
+
f"'{'' if custom[0] is None else custom[0].name}'{custom_with_name} "
|
2141
|
+
f"and a standard library '{standard[1].name}'{standard_with_name}. "
|
2142
|
+
f"The custom keyword is used. To select explicitly, and to get "
|
2143
|
+
f"rid of this warning, use either "
|
2144
|
+
f"'{'' if custom[0] is None else custom[0].alias or custom[0].name}.{custom[1].name}' "
|
2145
|
+
f"or '{'' if standard[0] is None else standard[0].alias or standard[0].name}.{standard[1].name}'."
|
2146
|
+
)
|
2147
|
+
|
2148
|
+
def _get_bdd_style_keyword(self, name: str) -> Optional[KeywordDoc]:
|
2149
|
+
if get_robot_version() < (6, 0):
|
2150
|
+
lower = name.lower()
|
2151
|
+
for prefix in ["given ", "when ", "then ", "and ", "but "]:
|
2152
|
+
if lower.startswith(prefix):
|
2153
|
+
return self._find_keyword(name[len(prefix) :])
|
2154
|
+
return None
|
2155
|
+
|
2156
|
+
parts = name.split()
|
2157
|
+
if len(parts) < 2:
|
2158
|
+
return None
|
2159
|
+
for index in range(1, len(parts)):
|
2160
|
+
prefix = " ".join(parts[:index]).title()
|
2161
|
+
if prefix.title() in (
|
2162
|
+
self.namespace.languages.bdd_prefixes if self.namespace.languages is not None else DEFAULT_BDD_PREFIXES
|
2163
|
+
):
|
2164
|
+
return self._find_keyword(" ".join(parts[index:]))
|
2165
|
+
return None
|