robotcode-robot 0.95.1__py3-none-any.whl → 0.96.0__py3-none-any.whl

Sign up to get free protection for your applications and to get access to all the features.
@@ -1 +1 @@
1
- __version__ = "0.95.1"
1
+ __version__ = "0.96.0"
@@ -1,3 +1,4 @@
1
+ import functools
1
2
  from dataclasses import dataclass, field
2
3
  from enum import Enum
3
4
  from typing import (
@@ -142,7 +143,7 @@ class VariableMatcher:
142
143
 
143
144
  self.base = match.base
144
145
 
145
- self.normalized_name = str(normalize(self.base))
146
+ self.normalized_name = normalize(self.base)
146
147
 
147
148
  def __eq__(self, o: object) -> bool:
148
149
  if type(o) is VariableMatcher:
@@ -154,7 +155,7 @@ class VariableMatcher:
154
155
  if base is None:
155
156
  return False
156
157
 
157
- normalized = str(normalize(base))
158
+ normalized = normalize(base)
158
159
  return self.normalized_name == normalized
159
160
 
160
161
  return False
@@ -194,10 +195,9 @@ class VariableDefinition(SourceEntity):
194
195
  value: Any = field(default=None, compare=False)
195
196
  value_is_native: bool = field(default=False, compare=False)
196
197
 
197
- matcher: VariableMatcher = field(init=False, compare=False)
198
-
199
- def __post_init__(self) -> None:
200
- self.matcher = VariableMatcher(self.name)
198
+ @functools.cached_property
199
+ def matcher(self) -> VariableMatcher:
200
+ return VariableMatcher(self.name)
201
201
 
202
202
  @single_call
203
203
  def __hash__(self) -> int:
@@ -573,12 +573,10 @@ class ImportsManager:
573
573
  self._resource_document_changed_timer_interval = 1
574
574
  self._resource_document_changed_documents: Set[TextDocument] = set()
575
575
 
576
- self._resource_libdoc_cache: "weakref.WeakKeyDictionary[ast.AST, Dict[Tuple[str, bool], LibraryDoc]]" = (
576
+ self._resource_libdoc_cache: "weakref.WeakKeyDictionary[ast.AST, Dict[str, LibraryDoc]]" = (
577
577
  weakref.WeakKeyDictionary()
578
578
  )
579
579
 
580
- self._process_pool_executor: Optional[ProcessPoolExecutor] = None
581
-
582
580
  def __del__(self) -> None:
583
581
  try:
584
582
  if self._executor is not None:
@@ -1229,9 +1227,6 @@ class ImportsManager:
1229
1227
  finally:
1230
1228
  executor.shutdown(wait=True)
1231
1229
 
1232
- if result.stdout:
1233
- self._logger.warning(lambda: f"stdout captured at loading library {name}{args!r}:\n{result.stdout}")
1234
-
1235
1230
  try:
1236
1231
  if meta is not None:
1237
1232
  meta.has_errors = bool(result.errors)
@@ -1302,9 +1297,8 @@ class ImportsManager:
1302
1297
  self,
1303
1298
  model: ast.AST,
1304
1299
  source: str,
1305
- append_model_errors: bool = True,
1306
1300
  ) -> LibraryDoc:
1307
- key = (source, append_model_errors)
1301
+ key = source
1308
1302
 
1309
1303
  entry = None
1310
1304
  if model in self._resource_libdoc_cache:
@@ -1313,11 +1307,7 @@ class ImportsManager:
1313
1307
  if entry and key in entry:
1314
1308
  return entry[key]
1315
1309
 
1316
- result = get_model_doc(
1317
- model=model,
1318
- source=source,
1319
- append_model_errors=append_model_errors,
1320
- )
1310
+ result = get_model_doc(model=model, source=source)
1321
1311
  if entry is None:
1322
1312
  entry = {}
1323
1313
  self._resource_libdoc_cache[model] = entry
@@ -1409,9 +1399,6 @@ class ImportsManager:
1409
1399
  finally:
1410
1400
  executor.shutdown(True)
1411
1401
 
1412
- if result.stdout:
1413
- self._logger.warning(lambda: f"stdout captured at loading variables {name}{args!r}:\n{result.stdout}")
1414
-
1415
1402
  try:
1416
1403
  if meta is not None:
1417
1404
  meta_file = meta.filepath_base + ".meta"
@@ -39,9 +39,8 @@ DEFAULT_BDD_PREFIXES = {"Given ", "When ", "Then ", "And ", "But "}
39
39
 
40
40
 
41
41
  class KeywordFinder:
42
- def __init__(self, namespace: "Namespace", library_doc: LibraryDoc) -> None:
43
- self.namespace = namespace
44
- self.self_library_doc = library_doc
42
+ def __init__(self, namespace: "Namespace") -> None:
43
+ self._namespace = namespace
45
44
 
46
45
  self.diagnostics: List[DiagnosticsEntry] = []
47
46
  self.result_bdd_prefix: Optional[str] = None
@@ -57,9 +56,9 @@ class KeywordFinder:
57
56
  ],
58
57
  ] = {}
59
58
 
60
- self._all_keywords: Optional[List[LibraryEntry]] = None
61
- self._resource_imports: Optional[List[ResourceEntry]] = None
62
- self._library_imports: Optional[List[LibraryEntry]] = None
59
+ @functools.cached_property
60
+ def _library_doc(self) -> LibraryDoc:
61
+ return self._namespace.get_library_doc()
63
62
 
64
63
  def reset_diagnostics(self) -> None:
65
64
  self.diagnostics = []
@@ -162,7 +161,7 @@ class KeywordFinder:
162
161
  def _get_keyword_from_self(self, name: str) -> Optional[KeywordDoc]:
163
162
  if get_robot_version() >= (6, 0):
164
163
  found: List[Tuple[Optional[LibraryEntry], KeywordDoc]] = [
165
- (None, v) for v in self.self_library_doc.keywords.iter_all(name)
164
+ (None, v) for v in self._library_doc.keywords.iter_all(name)
166
165
  ]
167
166
  if len(found) > 1:
168
167
  found = self._select_best_matches(found)
@@ -183,7 +182,7 @@ class KeywordFinder:
183
182
  return None
184
183
 
185
184
  try:
186
- return self.self_library_doc.keywords.get(name, None)
185
+ return self._library_doc.keywords.get(name, None)
187
186
  except KeywordError as e:
188
187
  self.diagnostics.append(DiagnosticsEntry(str(e), DiagnosticSeverity.ERROR, Error.KEYWORD_ERROR))
189
188
  raise CancelSearchError from e
@@ -213,15 +212,16 @@ class KeywordFinder:
213
212
 
214
213
  return found[0][1] if found else None
215
214
 
216
- def find_keywords(self, owner_name: str, name: str) -> List[Tuple[LibraryEntry, KeywordDoc]]:
217
- if self._all_keywords is None:
218
- self._all_keywords = list(
219
- chain(
220
- self.namespace._libraries.values(),
221
- self.namespace._resources.values(),
222
- )
215
+ @functools.cached_property
216
+ def _all_keywords(self) -> List[LibraryEntry]:
217
+ return list(
218
+ chain(
219
+ self._namespace._libraries.values(),
220
+ self._namespace._resources.values(),
223
221
  )
222
+ )
224
223
 
224
+ def find_keywords(self, owner_name: str, name: str) -> List[Tuple[LibraryEntry, KeywordDoc]]:
225
225
  if get_robot_version() >= (6, 0):
226
226
  result: List[Tuple[LibraryEntry, KeywordDoc]] = []
227
227
  for v in self._all_keywords:
@@ -271,11 +271,11 @@ class KeywordFinder:
271
271
  def _prioritize_same_file_or_public(
272
272
  self, entries: List[Tuple[Optional[LibraryEntry], KeywordDoc]]
273
273
  ) -> List[Tuple[Optional[LibraryEntry], KeywordDoc]]:
274
- matches = [h for h in entries if h[1].source == self.namespace.source]
274
+ matches = [h for h in entries if h[1].source == self._namespace.source]
275
275
  if matches:
276
276
  return matches
277
277
 
278
- matches = [handler for handler in entries if not handler[1].is_private()]
278
+ matches = [handler for handler in entries if not handler[1].is_private]
279
279
 
280
280
  return matches or entries
281
281
 
@@ -318,10 +318,11 @@ class KeywordFinder:
318
318
  and candidate[1].matcher.embedded_arguments.match(other[1].name) is None
319
319
  )
320
320
 
321
- def _get_keyword_from_resource_files(self, name: str) -> Optional[KeywordDoc]:
322
- if self._resource_imports is None:
323
- self._resource_imports = list(chain(self.namespace._resources.values()))
321
+ @functools.cached_property
322
+ def _resource_imports(self) -> List[ResourceEntry]:
323
+ return list(chain(self._namespace._resources.values()))
324
324
 
325
+ def _get_keyword_from_resource_files(self, name: str) -> Optional[KeywordDoc]:
325
326
  if get_robot_version() >= (6, 0):
326
327
  found: List[Tuple[Optional[LibraryEntry], KeywordDoc]] = [
327
328
  (v, k) for v in self._resource_imports for k in v.library_doc.keywords.iter_all(name)
@@ -365,17 +366,18 @@ class KeywordFinder:
365
366
  def _get_keyword_based_on_search_order(
366
367
  self, entries: List[Tuple[Optional[LibraryEntry], KeywordDoc]]
367
368
  ) -> List[Tuple[Optional[LibraryEntry], KeywordDoc]]:
368
- for libname in self.namespace.search_order:
369
+ for libname in self._namespace.search_order:
369
370
  for e in entries:
370
371
  if e[0] is not None and eq_namespace(libname, e[0].alias or e[0].name):
371
372
  return [e]
372
373
 
373
374
  return entries
374
375
 
375
- def _get_keyword_from_libraries(self, name: str) -> Optional[KeywordDoc]:
376
- if self._library_imports is None:
377
- self._library_imports = list(chain(self.namespace._libraries.values()))
376
+ @functools.cached_property
377
+ def _library_imports(self) -> List[LibraryEntry]:
378
+ return list(chain(self._namespace._libraries.values()))
378
379
 
380
+ def _get_keyword_from_libraries(self, name: str) -> Optional[KeywordDoc]:
379
381
  if get_robot_version() >= (6, 0):
380
382
  found: List[Tuple[Optional[LibraryEntry], KeywordDoc]] = [
381
383
  (v, k) for v in self._library_imports for k in v.library_doc.keywords.iter_all(name)
@@ -462,8 +464,8 @@ class KeywordFinder:
462
464
  def bdd_prefix_regexp(self) -> "re.Pattern[str]":
463
465
  prefixes = (
464
466
  "|".join(
465
- self.namespace.languages.bdd_prefixes
466
- if self.namespace.languages is not None
467
+ self._namespace.languages.bdd_prefixes
468
+ if self._namespace.languages is not None
467
469
  else ["given", "when", "then", "and", "but"]
468
470
  )
469
471
  .replace(" ", r"\s")
@@ -44,7 +44,7 @@ from robot.output.logger import LOGGER
44
44
  from robot.output.loggerhelper import AbstractLogger
45
45
  from robot.parsing.lexer.tokens import Token
46
46
  from robot.parsing.lexer.tokens import Token as RobotToken
47
- from robot.parsing.model.blocks import Keyword
47
+ from robot.parsing.model.blocks import Keyword, KeywordSection, Section, SettingSection
48
48
  from robot.parsing.model.statements import Arguments, KeywordName
49
49
  from robot.running.arguments.argumentresolver import ArgumentResolver, DictToKwargs, NamedArgumentResolver
50
50
  from robot.running.arguments.argumentresolver import VariableReplacer as ArgumentsVariableReplacer
@@ -64,7 +64,18 @@ from robot.variables.finders import VariableFinder
64
64
  from robot.variables.replacer import VariableReplacer
65
65
  from robotcode.core.lsp.types import Position, Range
66
66
  from robotcode.core.utils.path import normalized_path
67
- from robotcode.robot.diagnostics.entities import (
67
+
68
+ from ..utils import get_robot_version
69
+ from ..utils.ast import (
70
+ cached_isinstance,
71
+ get_variable_token,
72
+ range_from_token,
73
+ strip_variable_token,
74
+ )
75
+ from ..utils.markdownformatter import MarkDownFormatter
76
+ from ..utils.match import normalize, normalize_namespace
77
+ from ..utils.variables import contains_variable
78
+ from .entities import (
68
79
  ArgumentDefinition,
69
80
  ImportedVariableDefinition,
70
81
  LibraryArgumentDefinition,
@@ -72,18 +83,6 @@ from robotcode.robot.diagnostics.entities import (
72
83
  SourceEntity,
73
84
  single_call,
74
85
  )
75
- from robotcode.robot.utils import get_robot_version
76
- from robotcode.robot.utils.ast import (
77
- cached_isinstance,
78
- get_variable_token,
79
- iter_nodes,
80
- range_from_token,
81
- strip_variable_token,
82
- )
83
- from robotcode.robot.utils.markdownformatter import MarkDownFormatter
84
- from robotcode.robot.utils.match import normalize, normalize_namespace
85
-
86
- from ..utils.variables import contains_variable
87
86
 
88
87
  if get_robot_version() < (7, 0):
89
88
  from robot.running.handlers import _PythonHandler, _PythonInitHandler # pyright: ignore[reportMissingImports]
@@ -201,22 +200,36 @@ def convert_from_rest(text: str) -> str:
201
200
 
202
201
  if get_robot_version() >= (6, 0):
203
202
 
204
- @functools.lru_cache(maxsize=None)
203
+ # monkey patch robot framework
204
+ _old_from_name = EmbeddedArguments.from_name
205
+
206
+ @functools.lru_cache(maxsize=8192)
207
+ def _new_from_name(name: str) -> EmbeddedArguments:
208
+ return _old_from_name(name)
209
+
210
+ EmbeddedArguments.from_name = _new_from_name
211
+
205
212
  def _get_embedded_arguments(name: str) -> Any:
206
213
  try:
207
214
  return EmbeddedArguments.from_name(name)
208
215
  except (VariableError, DataError):
209
216
  return ()
210
217
 
218
+ def _match_embedded(embedded_arguments: EmbeddedArguments, name: str) -> bool:
219
+ return embedded_arguments.match(name) is not None
220
+
211
221
  else:
212
222
 
213
- @functools.lru_cache(maxsize=None)
223
+ @functools.lru_cache(maxsize=8192)
214
224
  def _get_embedded_arguments(name: str) -> Any:
215
225
  try:
216
226
  return EmbeddedArguments(name)
217
227
  except (VariableError, DataError):
218
228
  return ()
219
229
 
230
+ def _match_embedded(embedded_arguments: EmbeddedArguments, name: str) -> bool:
231
+ return embedded_arguments.name.match(name) is not None
232
+
220
233
 
221
234
  def is_embedded_keyword(name: str) -> bool:
222
235
  try:
@@ -243,31 +256,20 @@ class KeywordMatcher:
243
256
  self.embedded_arguments: Optional[EmbeddedArguments] = (
244
257
  _get_embedded_arguments(self.name) or None if self._can_have_embedded else None
245
258
  )
246
- self._match_cache: Dict[str, bool] = {}
247
259
 
248
260
  @property
249
261
  def normalized_name(self) -> str:
250
262
  if self._normalized_name is None:
251
- self._normalized_name = str(normalize_namespace(self.name) if self._is_namespace else normalize(self.name))
263
+ self._normalized_name = normalize_namespace(self.name) if self._is_namespace else normalize(self.name)
252
264
 
253
265
  return self._normalized_name
254
266
 
255
- if get_robot_version() >= (6, 0):
256
-
257
- def __match_embedded(self, name: str) -> bool:
258
- return self.embedded_arguments is not None and self.embedded_arguments.match(name) is not None
259
-
260
- else:
261
-
262
- def __match_embedded(self, name: str) -> bool:
263
- return self.embedded_arguments is not None and self.embedded_arguments.name.match(name) is not None
264
-
265
267
  def __eq__(self, o: object) -> bool:
266
268
  if type(o) is KeywordMatcher:
267
269
  if self._is_namespace != o._is_namespace:
268
270
  return False
269
271
 
270
- if not self.embedded_arguments:
272
+ if self.embedded_arguments is not None:
271
273
  return self.normalized_name == o.normalized_name
272
274
 
273
275
  o = o.name
@@ -275,17 +277,28 @@ class KeywordMatcher:
275
277
  if type(o) is not str:
276
278
  return False
277
279
 
278
- if self.embedded_arguments:
279
- return self.__match_embedded(o)
280
+ return self.match_string(o)
281
+
282
+ def match_string(self, o: str) -> bool:
283
+ if self.embedded_arguments is not None:
284
+ return _match_embedded(self.embedded_arguments, o)
280
285
 
281
- return self.normalized_name == str(normalize_namespace(o) if self._is_namespace else normalize(o))
286
+ return self.normalized_name == (normalize_namespace(o) if self._is_namespace else normalize(o))
282
287
 
283
288
  @single_call
284
289
  def __hash__(self) -> int:
285
290
  return hash(
286
- (self.embedded_arguments.name, tuple(self.embedded_arguments.args))
287
- if self.embedded_arguments
288
- else (self.normalized_name, self._is_namespace)
291
+ (
292
+ self.normalized_name,
293
+ self._is_namespace,
294
+ self._can_have_embedded,
295
+ self.embedded_arguments.name if self.embedded_arguments else None,
296
+ (
297
+ tuple(self.embedded_arguments.args)
298
+ if self.embedded_arguments and self.embedded_arguments.args
299
+ else None
300
+ ),
301
+ )
289
302
  )
290
303
 
291
304
  def __str__(self) -> str:
@@ -612,7 +625,6 @@ class KeywordDoc(SourceEntity):
612
625
  libname: Optional[str] = None
613
626
  libtype: Optional[str] = None
614
627
  longname: Optional[str] = None
615
- is_embedded: bool = False
616
628
  errors: Optional[List[Error]] = field(default=None, compare=False)
617
629
  doc_format: str = ROBOT_DOC_FORMAT
618
630
  is_error_handler: bool = False
@@ -661,6 +673,10 @@ class KeywordDoc(SourceEntity):
661
673
  def __str__(self) -> str:
662
674
  return f"{self.name}({', '.join(str(arg) for arg in self.arguments)})"
663
675
 
676
+ @functools.cached_property
677
+ def is_embedded(self) -> bool:
678
+ return self.matcher.embedded_arguments is not None
679
+
664
680
  @functools.cached_property
665
681
  def matcher(self) -> KeywordMatcher:
666
682
  return KeywordMatcher(self.name)
@@ -690,16 +706,16 @@ class KeywordDoc(SourceEntity):
690
706
 
691
707
  return Range.invalid()
692
708
 
693
- @single_call
709
+ @functools.cached_property
694
710
  def normalized_tags(self) -> List[str]:
695
711
  return [normalize(tag) for tag in self.tags]
696
712
 
697
- @single_call
713
+ @functools.cached_property
698
714
  def is_private(self) -> bool:
699
715
  if get_robot_version() < (6, 0):
700
716
  return False
701
717
 
702
- return "robot:private" in self.normalized_tags()
718
+ return "robot:private" in self.normalized_tags
703
719
 
704
720
  @functools.cached_property
705
721
  def range(self) -> Range:
@@ -885,7 +901,6 @@ class KeywordDoc(SourceEntity):
885
901
  self.type,
886
902
  self.libname,
887
903
  self.libtype,
888
- self.is_embedded,
889
904
  self.is_initializer,
890
905
  self.is_error_handler,
891
906
  self.doc_format,
@@ -969,7 +984,7 @@ class KeywordStore:
969
984
  return list(self.iter_all(key))
970
985
 
971
986
  def iter_all(self, key: str) -> Iterable[KeywordDoc]:
972
- yield from (v for v in self.keywords if v.matcher == key)
987
+ return (v for v in self.keywords if v.matcher.match_string(key))
973
988
 
974
989
 
975
990
  @dataclass
@@ -1282,12 +1297,12 @@ class VariablesDoc(LibraryDoc):
1282
1297
  return result
1283
1298
 
1284
1299
 
1285
- @functools.lru_cache(maxsize=256)
1300
+ @functools.lru_cache(maxsize=8192)
1286
1301
  def is_library_by_path(path: str) -> bool:
1287
1302
  return path.lower().endswith((".py", "/", os.sep))
1288
1303
 
1289
1304
 
1290
- @functools.lru_cache(maxsize=256)
1305
+ @functools.lru_cache(maxsize=8192)
1291
1306
  def is_variables_by_path(path: str) -> bool:
1292
1307
  if get_robot_version() >= (6, 1):
1293
1308
  return path.lower().endswith((".py", ".yml", ".yaml", ".json", "/", os.sep))
@@ -2016,7 +2031,6 @@ def get_library_doc(
2016
2031
  libname=libdoc.name,
2017
2032
  libtype=libdoc.type,
2018
2033
  longname=f"{libdoc.name}.{kw[0].name}",
2019
- is_embedded=is_embedded_keyword(kw[0].name),
2020
2034
  doc_format=str(lib.doc_format) or ROBOT_DOC_FORMAT,
2021
2035
  is_error_handler=kw[1].is_error_handler,
2022
2036
  error_handler_message=kw[1].error_handler_message,
@@ -2696,133 +2710,146 @@ def complete_variables_import(
2696
2710
  return list(set(result))
2697
2711
 
2698
2712
 
2699
- def get_model_doc(
2700
- model: ast.AST,
2701
- source: str,
2702
- append_model_errors: bool = True,
2703
- ) -> LibraryDoc:
2704
- errors: List[Error] = []
2705
- keyword_name_nodes: Dict[int, KeywordName] = {}
2706
- keywords_nodes: Dict[int, Keyword] = {}
2707
- for node in iter_nodes(model):
2708
- if cached_isinstance(node, Keyword):
2709
- node.lineno
2710
- keywords_nodes[node.lineno] = node
2711
- if cached_isinstance(node, KeywordName):
2712
- keyword_name_nodes[node.lineno] = node
2713
-
2714
- error = getattr(node, "error", None)
2715
- if error is not None:
2716
- errors.append(
2717
- Error(
2718
- message=error,
2719
- type_name="ModelError",
2720
- source=source,
2721
- line_no=node.lineno, # type: ignore
2722
- )
2723
- )
2724
- if append_model_errors:
2725
- node_errors = getattr(node, "errors", None)
2726
- if node_errors is not None:
2727
- for e in node_errors:
2728
- errors.append(
2729
- Error(
2730
- message=e,
2731
- type_name="ModelError",
2732
- source=source,
2733
- line_no=node.lineno, # type: ignore
2734
- )
2735
- )
2736
-
2737
- def get_keyword_name_token_from_line(line: int) -> Optional[Token]:
2738
- keyword_name = keyword_name_nodes.get(line, None)
2739
- if keyword_name is None:
2740
- return None
2741
- return cast(Token, keyword_name.get_token(RobotToken.KEYWORD_NAME))
2713
+ if get_robot_version() < (7, 0):
2742
2714
 
2743
- def get_argument_definitions_from_line(
2744
- line: int,
2745
- ) -> List[ArgumentDefinition]:
2746
- keyword_node = keywords_nodes.get(line, None)
2747
- if keyword_node is None:
2748
- return []
2715
+ class _MyUserLibrary(UserLibrary):
2716
+ current_kw: Any = None
2749
2717
 
2750
- arguments_node = next(
2751
- (n for n in ast.walk(keyword_node) if isinstance(n, Arguments)),
2752
- None,
2753
- )
2754
- if arguments_node is None:
2755
- return []
2718
+ def __init__(self, *args: Any, **kwargs: Any) -> None:
2719
+ self.errors: List[Error] = []
2720
+ super().__init__(*args, **kwargs)
2756
2721
 
2757
- args: List[str] = []
2758
- arguments = arguments_node.get_tokens(RobotToken.ARGUMENT)
2759
- argument_definitions = []
2722
+ def _log_creating_failed(self, handler: UserErrorHandler, error: BaseException) -> None:
2723
+ err = Error(
2724
+ message=f"Creating keyword '{handler.name}' failed: {error!s}",
2725
+ type_name=type(error).__qualname__,
2726
+ source=self.current_kw.source if self.current_kw is not None else None,
2727
+ line_no=self.current_kw.lineno if self.current_kw is not None else None,
2728
+ )
2729
+ self.errors.append(err)
2760
2730
 
2761
- for argument_token in (cast(RobotToken, e) for e in arguments):
2731
+ def _create_handler(self, kw: Any) -> Any:
2732
+ self.current_kw = kw
2762
2733
  try:
2763
- argument = get_variable_token(argument_token)
2764
-
2765
- if argument is not None and argument.value != "@{}":
2766
- if argument.value not in args:
2767
- args.append(argument.value)
2768
- arg_def = ArgumentDefinition(
2769
- name=argument.value,
2770
- name_token=strip_variable_token(argument),
2771
- line_no=argument.lineno,
2772
- col_offset=argument.col_offset,
2773
- end_line_no=argument.lineno,
2774
- end_col_offset=argument.end_col_offset,
2775
- source=source,
2776
- )
2777
- argument_definitions.append(arg_def)
2734
+ handler = super()._create_handler(kw)
2735
+ handler.errors = None
2736
+ except DataError as e:
2737
+ err = Error(
2738
+ message=str(e),
2739
+ type_name=type(e).__qualname__,
2740
+ source=kw.source,
2741
+ line_no=kw.lineno,
2742
+ )
2743
+ self.errors.append(err)
2778
2744
 
2779
- except VariableError:
2780
- pass
2745
+ handler = UserErrorHandler(e, kw.name, self.name)
2746
+ handler.source = kw.source
2747
+ handler.lineno = kw.lineno
2781
2748
 
2782
- return argument_definitions
2749
+ handler.errors = [err]
2783
2750
 
2784
- res = ResourceFile(source=source)
2751
+ return handler
2785
2752
 
2786
- with LOGGER.cache_only:
2787
- ResourceBuilder(res).visit(model)
2788
2753
 
2789
- if get_robot_version() < (7, 0):
2754
+ def _get_keyword_name_token_from_line(keyword_name_nodes: Dict[int, KeywordName], line: int) -> Optional[Token]:
2755
+ keyword_name = keyword_name_nodes.get(line, None)
2756
+ if keyword_name is None:
2757
+ return None
2758
+ return cast(Token, keyword_name.get_token(RobotToken.KEYWORD_NAME))
2790
2759
 
2791
- class MyUserLibrary(UserLibrary):
2792
- current_kw: Any = None
2793
2760
 
2794
- def _log_creating_failed(self, handler: UserErrorHandler, error: BaseException) -> None:
2795
- err = Error(
2796
- message=f"Creating keyword '{handler.name}' failed: {error!s}",
2797
- type_name=type(error).__qualname__,
2798
- source=self.current_kw.source if self.current_kw is not None else None,
2799
- line_no=self.current_kw.lineno if self.current_kw is not None else None,
2800
- )
2801
- errors.append(err)
2761
+ def _get_argument_definitions_from_line(
2762
+ keywords_nodes: Dict[int, Keyword],
2763
+ source: Optional[str],
2764
+ line: int,
2765
+ ) -> List[ArgumentDefinition]:
2766
+ keyword_node = keywords_nodes.get(line, None)
2767
+ if keyword_node is None:
2768
+ return []
2802
2769
 
2803
- def _create_handler(self, kw: Any) -> Any:
2804
- self.current_kw = kw
2805
- try:
2806
- handler = super()._create_handler(kw)
2807
- handler.errors = None
2808
- except DataError as e:
2809
- err = Error(
2810
- message=str(e),
2811
- type_name=type(e).__qualname__,
2812
- source=kw.source,
2813
- line_no=kw.lineno,
2770
+ arguments_node = next(
2771
+ (n for n in ast.walk(keyword_node) if isinstance(n, Arguments)),
2772
+ None,
2773
+ )
2774
+ if arguments_node is None:
2775
+ return []
2776
+
2777
+ args: List[str] = []
2778
+ arguments = arguments_node.get_tokens(RobotToken.ARGUMENT)
2779
+ argument_definitions = []
2780
+
2781
+ for argument_token in (cast(RobotToken, e) for e in arguments):
2782
+ try:
2783
+ argument = get_variable_token(argument_token)
2784
+
2785
+ if argument is not None and argument.value != "@{}":
2786
+ if argument.value not in args:
2787
+ args.append(argument.value)
2788
+ arg_def = ArgumentDefinition(
2789
+ name=argument.value,
2790
+ name_token=strip_variable_token(argument),
2791
+ line_no=argument.lineno,
2792
+ col_offset=argument.col_offset,
2793
+ end_line_no=argument.lineno,
2794
+ end_col_offset=argument.end_col_offset,
2795
+ source=source,
2814
2796
  )
2815
- errors.append(err)
2797
+ argument_definitions.append(arg_def)
2816
2798
 
2817
- handler = UserErrorHandler(e, kw.name, self.name)
2818
- handler.source = kw.source
2819
- handler.lineno = kw.lineno
2799
+ except VariableError:
2800
+ pass
2820
2801
 
2821
- handler.errors = [err]
2802
+ return argument_definitions
2803
+
2804
+
2805
+ class _MyResourceBuilder(ResourceBuilder):
2806
+ def __init__(self, resource: Any) -> None:
2807
+ super().__init__(resource)
2808
+ self.keyword_name_nodes: Dict[int, KeywordName] = {}
2809
+ self.keywords_nodes: Dict[int, Keyword] = {}
2810
+
2811
+ def visit_Section(self, node: Section) -> None: # noqa: N802
2812
+ if isinstance(node, (SettingSection, KeywordSection)):
2813
+ self.generic_visit(node)
2814
+
2815
+ def visit_Keyword(self, node: Keyword) -> None: # noqa: N802
2816
+ self.keywords_nodes[node.lineno] = node
2817
+ super().visit_Keyword(node)
2818
+ if node.header is not None:
2819
+ self.keyword_name_nodes[node.lineno] = node.header
2820
+
2821
+
2822
+ def _get_kw_errors(kw: Any) -> Any:
2823
+ r = kw.errors if hasattr(kw, "errors") else None
2824
+ if get_robot_version() >= (7, 0) and kw.error:
2825
+ if not r:
2826
+ r = []
2827
+ r.append(
2828
+ Error(
2829
+ message=str(kw.error),
2830
+ type_name="KeywordError",
2831
+ source=kw.source,
2832
+ line_no=kw.lineno,
2833
+ )
2834
+ )
2835
+ return r
2822
2836
 
2823
- return handler
2824
2837
 
2825
- lib = MyUserLibrary(res)
2838
+ def get_model_doc(
2839
+ model: ast.AST,
2840
+ source: str,
2841
+ ) -> LibraryDoc:
2842
+ res = ResourceFile(source=source)
2843
+
2844
+ res_builder = _MyResourceBuilder(res)
2845
+ with LOGGER.cache_only:
2846
+ res_builder.visit(model)
2847
+
2848
+ keyword_name_nodes: Dict[int, KeywordName] = res_builder.keyword_name_nodes
2849
+ keywords_nodes: Dict[int, Keyword] = res_builder.keywords_nodes
2850
+
2851
+ if get_robot_version() < (7, 0):
2852
+ lib = _MyUserLibrary(res)
2826
2853
  else:
2827
2854
  lib = res
2828
2855
 
@@ -2833,24 +2860,8 @@ def get_model_doc(
2833
2860
  scope="GLOBAL",
2834
2861
  source=source,
2835
2862
  line_no=1,
2836
- errors=errors,
2837
2863
  )
2838
2864
 
2839
- def get_kw_errors(kw: Any) -> Any:
2840
- r = kw.errors if hasattr(kw, "errors") else None
2841
- if get_robot_version() >= (7, 0) and kw.error:
2842
- if not r:
2843
- r = []
2844
- r.append(
2845
- Error(
2846
- message=str(kw.error),
2847
- type_name="KeywordError",
2848
- source=kw.source,
2849
- line_no=kw.lineno,
2850
- )
2851
- )
2852
- return r
2853
-
2854
2865
  libdoc.keywords = KeywordStore(
2855
2866
  source=libdoc.name,
2856
2867
  source_type=libdoc.type,
@@ -2861,7 +2872,7 @@ def get_model_doc(
2861
2872
  doc=kw[0].doc,
2862
2873
  tags=list(kw[0].tags),
2863
2874
  source=str(kw[0].source),
2864
- name_token=get_keyword_name_token_from_line(kw[0].lineno),
2875
+ name_token=_get_keyword_name_token_from_line(keyword_name_nodes, kw[0].lineno),
2865
2876
  line_no=kw[0].lineno if kw[0].lineno is not None else -1,
2866
2877
  col_offset=-1,
2867
2878
  end_col_offset=-1,
@@ -2869,8 +2880,7 @@ def get_model_doc(
2869
2880
  libname=libdoc.name,
2870
2881
  libtype=libdoc.type,
2871
2882
  longname=f"{libdoc.name}.{kw[0].name}",
2872
- is_embedded=is_embedded_keyword(kw[0].name),
2873
- errors=get_kw_errors(kw[1]),
2883
+ errors=_get_kw_errors(kw[1]),
2874
2884
  is_error_handler=isinstance(kw[1], UserErrorHandler),
2875
2885
  error_handler_message=(
2876
2886
  str(cast(UserErrorHandler, kw[1]).error) if isinstance(kw[1], UserErrorHandler) else None
@@ -2878,7 +2888,7 @@ def get_model_doc(
2878
2888
  arguments_spec=ArgumentSpec.from_robot_argument_spec(
2879
2889
  kw[1].arguments if get_robot_version() < (7, 0) else kw[1].args
2880
2890
  ),
2881
- argument_definitions=get_argument_definitions_from_line(kw[0].lineno),
2891
+ argument_definitions=_get_argument_definitions_from_line(keywords_nodes, source, kw[0].lineno),
2882
2892
  )
2883
2893
  for kw in [
2884
2894
  (KeywordDocBuilder(resource=True).build_keyword(lw), lw)
@@ -910,11 +910,7 @@ class Namespace:
910
910
  def get_library_doc(self) -> LibraryDoc:
911
911
  with self._library_doc_lock:
912
912
  if self._library_doc is None:
913
- self._library_doc = self.imports_manager.get_libdoc_from_model(
914
- self.model,
915
- self.source,
916
- append_model_errors=self.document_type is not None and self.document_type == DocumentType.RESOURCE,
917
- )
913
+ self._library_doc = self.imports_manager.get_libdoc_from_model(self.model, self.source)
918
914
 
919
915
  return self._library_doc
920
916
 
@@ -1887,7 +1883,6 @@ class Namespace:
1887
1883
  source=DIAGNOSTICS_SOURCE_NAME,
1888
1884
  code=err.type_name,
1889
1885
  )
1890
- # TODO: implement CancelationToken
1891
1886
  except CancelledError:
1892
1887
  canceled = True
1893
1888
  self._logger.debug("analyzing canceled")
@@ -1904,7 +1899,7 @@ class Namespace:
1904
1899
 
1905
1900
  def create_finder(self) -> "KeywordFinder":
1906
1901
  self.ensure_initialized()
1907
- return KeywordFinder(self, self.get_library_doc())
1902
+ return KeywordFinder(self)
1908
1903
 
1909
1904
  @_logger.call(condition=lambda self, name, **kwargs: self._finder is not None and name not in self._finder._cache)
1910
1905
  def find_keyword(
@@ -1,4 +1,5 @@
1
1
  import ast
2
+ import functools
2
3
  import itertools
3
4
  import os
4
5
  import token as python_token
@@ -70,7 +71,7 @@ from .entities import (
70
71
  )
71
72
  from .errors import DIAGNOSTICS_SOURCE_NAME, Error
72
73
  from .keyword_finder import KeywordFinder
73
- from .library_doc import KeywordDoc, is_embedded_keyword
74
+ from .library_doc import KeywordDoc, LibraryDoc, is_embedded_keyword
74
75
  from .model_helper import ModelHelper
75
76
 
76
77
  if TYPE_CHECKING:
@@ -697,7 +698,7 @@ class NamespaceAnalyzer(Visitor):
697
698
  code=Error.RESERVED_KEYWORD,
698
699
  )
699
700
 
700
- if get_robot_version() >= (6, 0) and result.is_resource_keyword and result.is_private():
701
+ if get_robot_version() >= (6, 0) and result.is_resource_keyword and result.is_private:
701
702
  if self._namespace.source != result.source:
702
703
  self._append_diagnostics(
703
704
  range=kw_range,
@@ -1042,12 +1043,14 @@ class NamespaceAnalyzer(Visitor):
1042
1043
  if name_token is not None and name_token.value:
1043
1044
  self._analyze_token_variables(name_token, DiagnosticSeverity.HINT)
1044
1045
 
1046
+ @functools.cached_property
1047
+ def _namespace_lib_doc(self) -> LibraryDoc:
1048
+ return self._namespace.get_library_doc()
1049
+
1045
1050
  def visit_Keyword(self, node: Keyword) -> None: # noqa: N802
1046
1051
  if node.name:
1047
1052
  name_token = node.header.get_token(Token.KEYWORD_NAME)
1048
- self._current_keyword_doc = ModelHelper.get_keyword_definition_at_token(
1049
- self._namespace.get_library_doc(), name_token
1050
- )
1053
+ self._current_keyword_doc = ModelHelper.get_keyword_definition_at_token(self._namespace_lib_doc, name_token)
1051
1054
 
1052
1055
  if self._current_keyword_doc is not None and self._current_keyword_doc not in self._keyword_references:
1053
1056
  self._keyword_references[self._current_keyword_doc] = set()
@@ -67,6 +67,12 @@ class AnalysisDiagnosticModifiersConfig(ConfigBase):
67
67
  hint: List[str] = field(default_factory=list)
68
68
 
69
69
 
70
+ @config_section("robotcode.workspace")
71
+ @dataclass
72
+ class WorkspaceConfig(ConfigBase):
73
+ exclude_patterns: List[str] = field(default_factory=list)
74
+
75
+
70
76
  @dataclass
71
77
  class WorkspaceAnalysisConfig:
72
78
  cache: CacheConfig = field(default_factory=CacheConfig)
@@ -80,23 +80,23 @@ class FirstAndLastRealStatementFinder(Visitor):
80
80
  self.last_statement = statement
81
81
 
82
82
 
83
+ _NON_DATA_TOKENS = {
84
+ Token.SEPARATOR,
85
+ Token.CONTINUATION,
86
+ Token.EOL,
87
+ Token.EOS,
88
+ }
89
+
90
+ _NON_DATA_TOKENS_WITH_COMMENT = {*_NON_DATA_TOKENS, Token.COMMENT}
91
+
92
+
83
93
  def _get_non_data_range_from_node(
84
94
  node: ast.AST, only_start: bool = False, allow_comments: bool = False
85
95
  ) -> Optional[Range]:
96
+ non_data_tokens = _NON_DATA_TOKENS_WITH_COMMENT if allow_comments else _NON_DATA_TOKENS
86
97
  if cached_isinstance(node, Statement) and node.tokens:
87
98
  start_token = next(
88
- (
89
- v
90
- for v in node.tokens
91
- if v.type
92
- not in [
93
- Token.SEPARATOR,
94
- *([] if allow_comments else [Token.COMMENT]),
95
- Token.CONTINUATION,
96
- Token.EOL,
97
- Token.EOS,
98
- ]
99
- ),
99
+ (v for v in node.tokens if v.type not in non_data_tokens),
100
100
  None,
101
101
  )
102
102
 
@@ -106,18 +106,7 @@ def _get_non_data_range_from_node(
106
106
  end_tokens = node.tokens
107
107
 
108
108
  end_token = next(
109
- (
110
- v
111
- for v in reversed(end_tokens)
112
- if v.type
113
- not in [
114
- Token.SEPARATOR,
115
- *([] if allow_comments else [Token.COMMENT]),
116
- Token.CONTINUATION,
117
- Token.EOL,
118
- Token.EOS,
119
- ]
120
- ),
109
+ (v for v in reversed(end_tokens) if v.type not in non_data_tokens),
121
110
  None,
122
111
  )
123
112
  if start_token is not None and end_token is not None:
@@ -282,35 +271,35 @@ def tokenize_variables(
282
271
  return _tokenize_variables(token, variables)
283
272
 
284
273
 
285
- if get_robot_version() < (7, 0):
286
-
287
- def _tokenize_variables(token: Token, variables: Any) -> Iterator[Token]:
288
- lineno = token.lineno
289
- col_offset = token.col_offset
290
- remaining = ""
291
- for before, variable, remaining in variables:
292
- if before:
293
- yield Token(token.type, before, lineno, col_offset)
294
- col_offset += len(before)
295
- yield Token(Token.VARIABLE, variable, lineno, col_offset)
296
- col_offset += len(variable)
297
- if remaining:
298
- yield Token(token.type, remaining, lineno, col_offset)
299
-
300
- else:
301
-
302
- def _tokenize_variables(token: Token, variables: Any) -> Iterator[Token]:
303
- lineno = token.lineno
304
- col_offset = token.col_offset
305
- after = ""
306
- for match in variables:
307
- if match.before:
308
- yield Token(token.type, match.before, lineno, col_offset)
309
- yield Token(Token.VARIABLE, match.match, lineno, col_offset + match.start)
310
- col_offset += match.end
311
- after = match.after
312
- if after:
313
- yield Token(token.type, after, lineno, col_offset)
274
+ def _tokenize_variables_before7(token: Token, variables: Any) -> Iterator[Token]:
275
+ lineno = token.lineno
276
+ col_offset = token.col_offset
277
+ remaining = ""
278
+ for before, variable, remaining in variables:
279
+ if before:
280
+ yield Token(token.type, before, lineno, col_offset)
281
+ col_offset += len(before)
282
+ yield Token(Token.VARIABLE, variable, lineno, col_offset)
283
+ col_offset += len(variable)
284
+ if remaining:
285
+ yield Token(token.type, remaining, lineno, col_offset)
286
+
287
+
288
+ def _tokenize_variables_v7(token: Token, variables: Any) -> Iterator[Token]:
289
+ lineno = token.lineno
290
+ col_offset = token.col_offset
291
+ after = ""
292
+ for match in variables:
293
+ if match.before:
294
+ yield Token(token.type, match.before, lineno, col_offset)
295
+ yield Token(Token.VARIABLE, match.match, lineno, col_offset + match.start)
296
+ col_offset += match.end
297
+ after = match.after
298
+ if after:
299
+ yield Token(token.type, after, lineno, col_offset)
300
+
301
+
302
+ _tokenize_variables = _tokenize_variables_before7 if get_robot_version() < (7, 0) else _tokenize_variables_v7
314
303
 
315
304
 
316
305
  def iter_over_keyword_names_and_owners(
@@ -3,7 +3,7 @@ from __future__ import annotations
3
3
  import itertools
4
4
  import re
5
5
  from abc import ABC, abstractmethod
6
- from typing import Any, Callable, Iterator, List, Optional, Tuple
6
+ from typing import Any, Callable, Final, Iterator, List, Optional, Tuple
7
7
 
8
8
 
9
9
  class Formatter(ABC):
@@ -87,7 +87,7 @@ class SingleLineFormatter(Formatter):
87
87
 
88
88
 
89
89
  class HeaderFormatter(SingleLineFormatter):
90
- _regex = re.compile(r"^(={1,5})\s+(\S.*?)\s+\1$")
90
+ _regex: Final["re.Pattern[str]"] = re.compile(r"^(={1,5})\s+(\S.*?)\s+\1$")
91
91
 
92
92
  def match(self, line: str) -> Optional[re.Match[str]]:
93
93
  return self._regex.match(line)
@@ -103,8 +103,8 @@ class HeaderFormatter(SingleLineFormatter):
103
103
 
104
104
  class LinkFormatter:
105
105
  _image_exts = (".jpg", ".jpeg", ".png", ".gif", ".bmp", ".svg")
106
- _link = re.compile(r"\[(.+?\|.*?)\]")
107
- _url = re.compile(
106
+ _link: Final["re.Pattern[str]"] = re.compile(r"\[(.+?\|.*?)\]")
107
+ _url: Final["re.Pattern[str]"] = re.compile(
108
108
  r"""
109
109
  ((^|\ ) ["'(\[{]*) # begin of line or space and opt. any char "'([{
110
110
  ([a-z][\w+-.]*://[^\s|]+?) # url
@@ -177,7 +177,7 @@ class LinkFormatter:
177
177
 
178
178
 
179
179
  class LineFormatter:
180
- _bold = re.compile(
180
+ _bold: Final["re.Pattern[str]"] = re.compile(
181
181
  r"""
182
182
  ( # prefix (group 1)
183
183
  (^|\ ) # begin of line or space
@@ -193,7 +193,7 @@ class LineFormatter:
193
193
  """,
194
194
  re.VERBOSE,
195
195
  )
196
- _italic = re.compile(
196
+ _italic: Final["re.Pattern[str]"] = re.compile(
197
197
  r"""
198
198
  ( (^|\ ) ["'(]* ) # begin of line or space and opt. any char "'(
199
199
  _ # start of italic
@@ -203,7 +203,7 @@ _ # end of italic
203
203
  """,
204
204
  re.VERBOSE,
205
205
  )
206
- _code = re.compile(
206
+ _code: Final["re.Pattern[str]"] = re.compile(
207
207
  r"""
208
208
  ( (^|\ ) ["'(]* ) # same as above with _ changed to ``
209
209
  ``
@@ -296,7 +296,7 @@ class ListFormatter(Formatter):
296
296
 
297
297
 
298
298
  class RulerFormatter(SingleLineFormatter):
299
- regex = re.compile("^-{3,}$")
299
+ regex: Final["re.Pattern[str]"] = re.compile("^-{3,}$")
300
300
 
301
301
  def match(self, line: str) -> Optional[re.Match[str]]:
302
302
  return self.regex.match(line)
@@ -306,9 +306,9 @@ class RulerFormatter(SingleLineFormatter):
306
306
 
307
307
 
308
308
  class TableFormatter(Formatter):
309
- _table_line = re.compile(r"^\| (.* |)\|$")
310
- _line_splitter = re.compile(r" \|(?= )")
311
- _format_cell_content = _line_formatter.format
309
+ _table_line: Final["re.Pattern[str]"] = re.compile(r"^\| (.* |)\|$")
310
+ _line_splitter: Final["re.Pattern[str]"] = re.compile(r" \|(?= )")
311
+ _format_cell_content: Final[Callable[[str], str]] = _line_formatter.format
312
312
 
313
313
  def _handles(self, line: str) -> bool:
314
314
  return self._table_line.match(line) is not None
@@ -2,16 +2,17 @@ from functools import lru_cache
2
2
 
3
3
  _transform_table = str.maketrans("", "", "_ ")
4
4
 
5
+ _transform_table_namespace = str.maketrans("", "", " ")
5
6
 
6
- @lru_cache(maxsize=None)
7
+
8
+ @lru_cache(maxsize=8192)
7
9
  def normalize(text: str) -> str:
8
- # return text.lower().replace("_", "").replace(" ", "")
9
- return text.casefold().translate(_transform_table)
10
+ return text.translate(_transform_table).casefold()
10
11
 
11
12
 
12
- @lru_cache(maxsize=None)
13
+ @lru_cache(maxsize=8192)
13
14
  def normalize_namespace(text: str) -> str:
14
- return text.lower().replace(" ", "")
15
+ return text.translate(_transform_table_namespace).casefold()
15
16
 
16
17
 
17
18
  def eq(str1: str, str2: str) -> bool:
@@ -1,12 +1,6 @@
1
- from typing import Any, Dict, Iterator, List, Protocol, Set, runtime_checkable
1
+ from typing import Any, Dict, Iterator, List, Protocol, Set
2
2
 
3
3
 
4
- @runtime_checkable
5
- class BodyBlock(Protocol):
6
- body: List[Any]
7
-
8
-
9
- @runtime_checkable
10
4
  class Languages(Protocol):
11
5
  languages: List[Any]
12
6
  headers: Dict[str, str]
@@ -47,26 +47,26 @@ BUILTIN_VARIABLES = [
47
47
  ]
48
48
 
49
49
 
50
- @functools.lru_cache(maxsize=512)
50
+ @functools.lru_cache(maxsize=8192)
51
51
  def contains_variable(string: str, identifiers: str = "$@&") -> bool:
52
52
  return cast(bool, robot_contains_variable(string, identifiers))
53
53
 
54
54
 
55
- @functools.lru_cache(maxsize=512)
55
+ @functools.lru_cache(maxsize=8192)
56
56
  def is_scalar_assign(string: str, allow_assign_mark: bool = False) -> bool:
57
57
  return cast(bool, robot_is_scalar_assign(string, allow_assign_mark))
58
58
 
59
59
 
60
- @functools.lru_cache(maxsize=512)
60
+ @functools.lru_cache(maxsize=8192)
61
61
  def is_variable(string: str, identifiers: str = "$@&") -> bool:
62
62
  return cast(bool, robot_is_variable(string, identifiers))
63
63
 
64
64
 
65
- @functools.lru_cache(maxsize=512)
65
+ @functools.lru_cache(maxsize=8192)
66
66
  def search_variable(string: str, identifiers: str = "$@&%*", ignore_errors: bool = False) -> RobotVariableMatch:
67
67
  return robot_search_variable(string, identifiers, ignore_errors)
68
68
 
69
69
 
70
- @functools.lru_cache(maxsize=512)
70
+ @functools.lru_cache(maxsize=8192)
71
71
  def split_from_equals(string: str) -> Tuple[str, Optional[str]]:
72
72
  return cast(Tuple[str, Optional[str]], robot_split_from_equals(string))
@@ -3,6 +3,7 @@ from abc import ABC
3
3
  from typing import (
4
4
  Any,
5
5
  Callable,
6
+ ClassVar,
6
7
  Dict,
7
8
  Iterator,
8
9
  Optional,
@@ -37,7 +38,7 @@ def iter_field_values(node: ast.AST) -> Iterator[Any]:
37
38
 
38
39
 
39
40
  class VisitorFinder(ABC):
40
- __cls_finder_cache__: Dict[Type[Any], Optional[Callable[..., Any]]]
41
+ __cls_finder_cache__: ClassVar[Dict[Type[Any], Optional[Callable[..., Any]]]]
41
42
 
42
43
  def __init_subclass__(cls, **kwargs: Any) -> None:
43
44
  super().__init_subclass__(**kwargs)
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.3
2
2
  Name: robotcode-robot
3
- Version: 0.95.1
3
+ Version: 0.96.0
4
4
  Summary: Support classes for RobotCode for handling Robot Framework projects.
5
5
  Project-URL: Homepage, https://robotcode.io
6
6
  Project-URL: Donate, https://opencollective.com/robotcode
@@ -25,8 +25,8 @@ Classifier: Programming Language :: Python :: Implementation :: PyPy
25
25
  Classifier: Topic :: Utilities
26
26
  Classifier: Typing :: Typed
27
27
  Requires-Python: >=3.8
28
- Requires-Dist: platformdirs<4.2.0,>=3.2.0
29
- Requires-Dist: robotcode-core==0.95.1
28
+ Requires-Dist: platformdirs<4.4.0,>=3.2.0
29
+ Requires-Dist: robotcode-core==0.96.0
30
30
  Requires-Dist: robotframework>=4.1.0
31
31
  Requires-Dist: tomli>=1.1.0; python_version < '3.11'
32
32
  Description-Content-Type: text/markdown
@@ -0,0 +1,32 @@
1
+ robotcode/robot/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
2
+ robotcode/robot/__version__.py,sha256=B-kjJajURz8O11CWqC3d4KQ5aeL4lukk9XP51Xbpo8o,23
3
+ robotcode/robot/py.typed,sha256=bWew9mHgMy8LqMu7RuqQXFXLBxh2CRx0dUbSx-3wE48,27
4
+ robotcode/robot/config/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
5
+ robotcode/robot/config/loader.py,sha256=bNJwr_XdCoUzpG2ag0BH33PIfiCwn0GMxn7q_Sw3zOk,8103
6
+ robotcode/robot/config/model.py,sha256=sgr6-4_E06g-yIXW41Z-NtIXZ_7JMmR5WvUD7kTUqu4,89106
7
+ robotcode/robot/config/utils.py,sha256=xY-LH31BidWzonpvSrle-4HvKrp02I7IRqU2JwlL4Ls,2931
8
+ robotcode/robot/diagnostics/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
9
+ robotcode/robot/diagnostics/data_cache.py,sha256=Wge9HuxSUiBVMmrmlsYSMmG2ad7f3Texwox0Dm8lN7U,2969
10
+ robotcode/robot/diagnostics/diagnostics_modifier.py,sha256=3dDsu8-ET6weIvv7Sk3IQaPYFNxnXUs8Y7gpGTjfOBs,9796
11
+ robotcode/robot/diagnostics/document_cache_helper.py,sha256=n903UxVXM4Uq4fPxN5s-dugQAKcWUwf4Nw4q0CJV7aw,23902
12
+ robotcode/robot/diagnostics/entities.py,sha256=b4u2yQN8MDg90RoTMaW7iLogiDNwOAtK180KCB94RfE,10970
13
+ robotcode/robot/diagnostics/errors.py,sha256=vRH7HiZOfQIC-L7ys2Bj9ULYxLpUH7I03qJRSkEx08k,1813
14
+ robotcode/robot/diagnostics/imports_manager.py,sha256=zMHqs8WGis9r74FR6Jb4AqpR15EmGtdBWpMq9R_GZMA,58604
15
+ robotcode/robot/diagnostics/keyword_finder.py,sha256=dm4BA0ccp5V4C65CkSYUJUNXegSmvG24uu09T3eL6a4,17319
16
+ robotcode/robot/diagnostics/library_doc.py,sha256=VPCX7xp-0LJiYSFLO68y8MuNAMIYcnhJTIHRmWPpl30,100507
17
+ robotcode/robot/diagnostics/model_helper.py,sha256=ltuUNWwZJFBmMFXIomMmW1IP5v7tMpQSoC1YbncgoNI,30985
18
+ robotcode/robot/diagnostics/namespace.py,sha256=lJOkaS_yCp8SVhURqh5NqAsm394s0cHZUMQwVeh9nno,75159
19
+ robotcode/robot/diagnostics/namespace_analyzer.py,sha256=MgEoEGH7FvwVYoR3wA0JEGQxMWJTUUHq10NrorJV5LY,74183
20
+ robotcode/robot/diagnostics/workspace_config.py,sha256=0QLcjyyDHsirH0Y6o1RvBEi3_jgJxcp2zvHT1dSQuWU,2627
21
+ robotcode/robot/utils/__init__.py,sha256=OjNPMn_XSnfaMCyKd8Kmq6vlRt6mIGlzW4qiiD3ykUg,447
22
+ robotcode/robot/utils/ast.py,sha256=eqAVVquoRbMw3WvGmK6FnkUjZzAxHAitVjqK-vx-HSY,10764
23
+ robotcode/robot/utils/markdownformatter.py,sha256=SdHFfK9OdBnljWMP5r5Jy2behtHy-_Myd7GV4hiH-kI,11688
24
+ robotcode/robot/utils/match.py,sha256=9tG1OD9KS1v9ocWgsERSf6z_w9gAeE5LourNUYHzvTM,653
25
+ robotcode/robot/utils/robot_path.py,sha256=Z-GVBOPA_xeD20bCJi4_AWaU0eQWvCym-YFtyRpXARE,1767
26
+ robotcode/robot/utils/stubs.py,sha256=umugZYAyneFNgqRJBRMJPzm0u0B_TH8Sx_y-ykXnxpw,351
27
+ robotcode/robot/utils/variables.py,sha256=-ldL8mRRSYYW2pwlm8IpoDeQcG6LYBqaYyV_7U3xsIc,2174
28
+ robotcode/robot/utils/visitor.py,sha256=nP3O0qh3YYuxR6S8wYJRBFfNwIVgsgohURBlrnFkRYQ,2299
29
+ robotcode_robot-0.96.0.dist-info/METADATA,sha256=PPFZwhpqJldo2MUkr0Kg4Qa_hx4EkAbJIZCqsG124UE,2240
30
+ robotcode_robot-0.96.0.dist-info/WHEEL,sha256=1yFddiXMmvYK7QYTqtRNtX66WJ0Mz8PYEiEUoOUUxRY,87
31
+ robotcode_robot-0.96.0.dist-info/licenses/LICENSE.txt,sha256=B05uMshqTA74s-0ltyHKI6yoPfJ3zYgQbvcXfDVGFf8,10280
32
+ robotcode_robot-0.96.0.dist-info/RECORD,,
@@ -1,32 +0,0 @@
1
- robotcode/robot/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
2
- robotcode/robot/__version__.py,sha256=h3UCuPK_uHJxqKWB4LppsykwINZBaI076ST-mAa3CFU,23
3
- robotcode/robot/py.typed,sha256=bWew9mHgMy8LqMu7RuqQXFXLBxh2CRx0dUbSx-3wE48,27
4
- robotcode/robot/config/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
5
- robotcode/robot/config/loader.py,sha256=bNJwr_XdCoUzpG2ag0BH33PIfiCwn0GMxn7q_Sw3zOk,8103
6
- robotcode/robot/config/model.py,sha256=sgr6-4_E06g-yIXW41Z-NtIXZ_7JMmR5WvUD7kTUqu4,89106
7
- robotcode/robot/config/utils.py,sha256=xY-LH31BidWzonpvSrle-4HvKrp02I7IRqU2JwlL4Ls,2931
8
- robotcode/robot/diagnostics/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
9
- robotcode/robot/diagnostics/data_cache.py,sha256=Wge9HuxSUiBVMmrmlsYSMmG2ad7f3Texwox0Dm8lN7U,2969
10
- robotcode/robot/diagnostics/diagnostics_modifier.py,sha256=3dDsu8-ET6weIvv7Sk3IQaPYFNxnXUs8Y7gpGTjfOBs,9796
11
- robotcode/robot/diagnostics/document_cache_helper.py,sha256=n903UxVXM4Uq4fPxN5s-dugQAKcWUwf4Nw4q0CJV7aw,23902
12
- robotcode/robot/diagnostics/entities.py,sha256=um9Yes1LUO30cRgL-JCC_WE3zMPJwoEFkFrJdNkzxwY,11000
13
- robotcode/robot/diagnostics/errors.py,sha256=vRH7HiZOfQIC-L7ys2Bj9ULYxLpUH7I03qJRSkEx08k,1813
14
- robotcode/robot/diagnostics/imports_manager.py,sha256=lmwg_wYFZLNx_o0u856_5JihXHPLBei2vfr6Puhlm-c,59127
15
- robotcode/robot/diagnostics/keyword_finder.py,sha256=2FpPgor3RnT17Kor9L1XhLXcKn1DI43AVoYexdcM-Bs,17418
16
- robotcode/robot/diagnostics/library_doc.py,sha256=uVRldDJ-cIHxS9hj4QnMAiGPzcrUNVyMGFvRyshU5H4,100520
17
- robotcode/robot/diagnostics/model_helper.py,sha256=ltuUNWwZJFBmMFXIomMmW1IP5v7tMpQSoC1YbncgoNI,30985
18
- robotcode/robot/diagnostics/namespace.py,sha256=Y6HDBKIYyCc3qCg2TT-orB9mASd-Ii4fkZuIpcFQMbk,75417
19
- robotcode/robot/diagnostics/namespace_analyzer.py,sha256=NlvfAEYH_GyE1ZQ1JH9vR9yPfki3Xmw9TyNEc-B0mtM,74067
20
- robotcode/robot/diagnostics/workspace_config.py,sha256=3SoewUj_LZB1Ki5hXM8oxQpJr6vyiog66SUw-ibODSA,2478
21
- robotcode/robot/utils/__init__.py,sha256=OjNPMn_XSnfaMCyKd8Kmq6vlRt6mIGlzW4qiiD3ykUg,447
22
- robotcode/robot/utils/ast.py,sha256=7TxZiQhh4Nk33it0Q9P6nnmmYiB7317SpXR7QB57MiY,11091
23
- robotcode/robot/utils/markdownformatter.py,sha256=Cj4NjComTcNZf8uuezvtBbZqPMLjS237RknMopZYETk,11418
24
- robotcode/robot/utils/match.py,sha256=Vtz1ueT6DIZZ4hKyXgvTg1A3x2puBwHgvjw1oAYBn5w,632
25
- robotcode/robot/utils/robot_path.py,sha256=Z-GVBOPA_xeD20bCJi4_AWaU0eQWvCym-YFtyRpXARE,1767
26
- robotcode/robot/utils/stubs.py,sha256=g-DrP8io1Ft5w3flcZXrjkDCCmEQBZDVbzWt4P14Jcs,457
27
- robotcode/robot/utils/variables.py,sha256=XNPUDpghGy_f_Fne9lJ4OST-kFi-72Nrr0yJUu6f_Oc,2169
28
- robotcode/robot/utils/visitor.py,sha256=GPMHgWZLEgbrmY0AxlhsXrHBY8Fgo5-XmYAJXbmSD8w,2275
29
- robotcode_robot-0.95.1.dist-info/METADATA,sha256=9rVUX0KWKqm-PM8ulPubu_EATUd7gOfx_oCF6S8GfPc,2240
30
- robotcode_robot-0.95.1.dist-info/WHEEL,sha256=1yFddiXMmvYK7QYTqtRNtX66WJ0Mz8PYEiEUoOUUxRY,87
31
- robotcode_robot-0.95.1.dist-info/licenses/LICENSE.txt,sha256=B05uMshqTA74s-0ltyHKI6yoPfJ3zYgQbvcXfDVGFf8,10280
32
- robotcode_robot-0.95.1.dist-info/RECORD,,