robotcode-robot 0.95.0__py3-none-any.whl → 0.95.2__py3-none-any.whl

Sign up to get free protection for your applications and to get access to all the features.
@@ -44,7 +44,7 @@ from robot.output.logger import LOGGER
44
44
  from robot.output.loggerhelper import AbstractLogger
45
45
  from robot.parsing.lexer.tokens import Token
46
46
  from robot.parsing.lexer.tokens import Token as RobotToken
47
- from robot.parsing.model.blocks import Keyword
47
+ from robot.parsing.model.blocks import Keyword, KeywordSection, Section, SettingSection
48
48
  from robot.parsing.model.statements import Arguments, KeywordName
49
49
  from robot.running.arguments.argumentresolver import ArgumentResolver, DictToKwargs, NamedArgumentResolver
50
50
  from robot.running.arguments.argumentresolver import VariableReplacer as ArgumentsVariableReplacer
@@ -64,7 +64,18 @@ from robot.variables.finders import VariableFinder
64
64
  from robot.variables.replacer import VariableReplacer
65
65
  from robotcode.core.lsp.types import Position, Range
66
66
  from robotcode.core.utils.path import normalized_path
67
- from robotcode.robot.diagnostics.entities import (
67
+
68
+ from ..utils import get_robot_version
69
+ from ..utils.ast import (
70
+ cached_isinstance,
71
+ get_variable_token,
72
+ range_from_token,
73
+ strip_variable_token,
74
+ )
75
+ from ..utils.markdownformatter import MarkDownFormatter
76
+ from ..utils.match import normalize, normalize_namespace
77
+ from ..utils.variables import contains_variable
78
+ from .entities import (
68
79
  ArgumentDefinition,
69
80
  ImportedVariableDefinition,
70
81
  LibraryArgumentDefinition,
@@ -72,18 +83,6 @@ from robotcode.robot.diagnostics.entities import (
72
83
  SourceEntity,
73
84
  single_call,
74
85
  )
75
- from robotcode.robot.utils import get_robot_version
76
- from robotcode.robot.utils.ast import (
77
- cached_isinstance,
78
- get_variable_token,
79
- range_from_token,
80
- strip_variable_token,
81
- )
82
- from robotcode.robot.utils.markdownformatter import MarkDownFormatter
83
- from robotcode.robot.utils.match import normalize, normalize_namespace
84
- from robotcode.robot.utils.stubs import HasError, HasErrors
85
-
86
- from ..utils.variables import contains_variable
87
86
 
88
87
  if get_robot_version() < (7, 0):
89
88
  from robot.running.handlers import _PythonHandler, _PythonInitHandler # pyright: ignore[reportMissingImports]
@@ -201,22 +200,36 @@ def convert_from_rest(text: str) -> str:
201
200
 
202
201
  if get_robot_version() >= (6, 0):
203
202
 
204
- @functools.lru_cache(maxsize=None)
203
+ # monkey patch robot framework
204
+ _old_from_name = EmbeddedArguments.from_name
205
+
206
+ @functools.lru_cache(maxsize=8192)
207
+ def _new_from_name(name: str) -> EmbeddedArguments:
208
+ return _old_from_name(name)
209
+
210
+ EmbeddedArguments.from_name = _new_from_name
211
+
205
212
  def _get_embedded_arguments(name: str) -> Any:
206
213
  try:
207
214
  return EmbeddedArguments.from_name(name)
208
215
  except (VariableError, DataError):
209
216
  return ()
210
217
 
218
+ def _match_embedded(embedded_arguments: EmbeddedArguments, name: str) -> bool:
219
+ return embedded_arguments.match(name) is not None
220
+
211
221
  else:
212
222
 
213
- @functools.lru_cache(maxsize=None)
223
+ @functools.lru_cache(maxsize=8192)
214
224
  def _get_embedded_arguments(name: str) -> Any:
215
225
  try:
216
226
  return EmbeddedArguments(name)
217
227
  except (VariableError, DataError):
218
228
  return ()
219
229
 
230
+ def _match_embedded(embedded_arguments: EmbeddedArguments, name: str) -> bool:
231
+ return embedded_arguments.name.match(name) is not None
232
+
220
233
 
221
234
  def is_embedded_keyword(name: str) -> bool:
222
235
  try:
@@ -239,59 +252,50 @@ class KeywordMatcher:
239
252
  self._can_have_embedded = can_have_embedded and not is_namespace
240
253
  self._is_namespace = is_namespace
241
254
  self._normalized_name: Optional[str] = None
242
- self._embedded_arguments: Any = None
255
+
256
+ self.embedded_arguments: Optional[EmbeddedArguments] = (
257
+ _get_embedded_arguments(self.name) or None if self._can_have_embedded else None
258
+ )
243
259
 
244
260
  @property
245
261
  def normalized_name(self) -> str:
246
262
  if self._normalized_name is None:
247
- self._normalized_name = str(normalize_namespace(self.name) if self._is_namespace else normalize(self.name))
263
+ self._normalized_name = normalize_namespace(self.name) if self._is_namespace else normalize(self.name)
248
264
 
249
265
  return self._normalized_name
250
266
 
251
- @property
252
- def embedded_arguments(self) -> Any:
253
- if self._embedded_arguments is None:
254
- if self._can_have_embedded:
255
- self._embedded_arguments = _get_embedded_arguments(self.name)
256
- else:
257
- self._embedded_arguments = ()
258
-
259
- return self._embedded_arguments
260
-
261
- if get_robot_version() >= (6, 0):
262
-
263
- def __match_embedded(self, name: str) -> bool:
264
- return self.embedded_arguments.match(name) is not None
265
-
266
- else:
267
-
268
- def __match_embedded(self, name: str) -> bool:
269
- return self.embedded_arguments.name.match(name) is not None
270
-
271
267
  def __eq__(self, o: object) -> bool:
272
- if cached_isinstance(o, KeywordMatcher):
268
+ if type(o) is KeywordMatcher:
273
269
  if self._is_namespace != o._is_namespace:
274
270
  return False
275
271
 
276
- if not self.embedded_arguments:
272
+ if self.embedded_arguments is not None:
277
273
  return self.normalized_name == o.normalized_name
278
274
 
279
275
  o = o.name
280
276
 
281
- if not cached_isinstance(o, str):
277
+ if type(o) is not str:
282
278
  return False
283
279
 
284
- if self.embedded_arguments:
285
- return self.__match_embedded(o)
280
+ return self.match_string(o)
286
281
 
287
- return self.normalized_name == str(normalize_namespace(o) if self._is_namespace else normalize(o))
282
+ def match_string(self, o: str) -> bool:
283
+ if self.embedded_arguments is not None:
284
+ return _match_embedded(self.embedded_arguments, o)
285
+
286
+ return self.normalized_name == (normalize_namespace(o) if self._is_namespace else normalize(o))
288
287
 
289
288
  @single_call
290
289
  def __hash__(self) -> int:
291
290
  return hash(
292
- (self.embedded_arguments.name, tuple(self.embedded_arguments.args))
293
- if self.embedded_arguments
294
- else (self.normalized_name, self._is_namespace)
291
+ (
292
+ self.normalized_name,
293
+ self._is_namespace,
294
+ self._can_have_embedded,
295
+ self.embedded_arguments,
296
+ self.embedded_arguments.name if self.embedded_arguments else None,
297
+ self.embedded_arguments.args if self.embedded_arguments else None,
298
+ )
295
299
  )
296
300
 
297
301
  def __str__(self) -> str:
@@ -618,7 +622,6 @@ class KeywordDoc(SourceEntity):
618
622
  libname: Optional[str] = None
619
623
  libtype: Optional[str] = None
620
624
  longname: Optional[str] = None
621
- is_embedded: bool = False
622
625
  errors: Optional[List[Error]] = field(default=None, compare=False)
623
626
  doc_format: str = ROBOT_DOC_FORMAT
624
627
  is_error_handler: bool = False
@@ -667,13 +670,15 @@ class KeywordDoc(SourceEntity):
667
670
  def __str__(self) -> str:
668
671
  return f"{self.name}({', '.join(str(arg) for arg in self.arguments)})"
669
672
 
670
- @property
673
+ @functools.cached_property
674
+ def is_embedded(self) -> bool:
675
+ return self.matcher.embedded_arguments is not None
676
+
677
+ @functools.cached_property
671
678
  def matcher(self) -> KeywordMatcher:
672
- if not hasattr(self, "__matcher"):
673
- self.__matcher = KeywordMatcher(self.name)
674
- return self.__matcher
679
+ return KeywordMatcher(self.name)
675
680
 
676
- @property
681
+ @functools.cached_property
677
682
  def is_deprecated(self) -> bool:
678
683
  return self.deprecated or DEPRECATED_PATTERN.match(self.doc) is not None
679
684
 
@@ -685,31 +690,31 @@ class KeywordDoc(SourceEntity):
685
690
  def is_library_keyword(self) -> bool:
686
691
  return self.libtype == "LIBRARY"
687
692
 
688
- @property
693
+ @functools.cached_property
689
694
  def deprecated_message(self) -> str:
690
695
  if (m := DEPRECATED_PATTERN.match(self.doc)) is not None:
691
696
  return m.group("message").strip()
692
697
  return ""
693
698
 
694
- @property
699
+ @functools.cached_property
695
700
  def name_range(self) -> Range:
696
701
  if self.name_token is not None:
697
702
  return range_from_token(self.name_token)
698
703
 
699
704
  return Range.invalid()
700
705
 
701
- @single_call
706
+ @functools.cached_property
702
707
  def normalized_tags(self) -> List[str]:
703
708
  return [normalize(tag) for tag in self.tags]
704
709
 
705
- @single_call
710
+ @functools.cached_property
706
711
  def is_private(self) -> bool:
707
712
  if get_robot_version() < (6, 0):
708
713
  return False
709
714
 
710
- return "robot:private" in self.normalized_tags()
715
+ return "robot:private" in self.normalized_tags
711
716
 
712
- @property
717
+ @functools.cached_property
713
718
  def range(self) -> Range:
714
719
  if self.name_token is not None:
715
720
  return range_from_token(self.name_token)
@@ -820,7 +825,7 @@ class KeywordDoc(SourceEntity):
820
825
 
821
826
  return result
822
827
 
823
- @property
828
+ @functools.cached_property
824
829
  def signature(self) -> str:
825
830
  return (
826
831
  f'({self.type}) "{self.name}": ('
@@ -893,7 +898,6 @@ class KeywordDoc(SourceEntity):
893
898
  self.type,
894
899
  self.libname,
895
900
  self.libtype,
896
- self.is_embedded,
897
901
  self.is_initializer,
898
902
  self.is_error_handler,
899
903
  self.doc_format,
@@ -919,19 +923,13 @@ class KeywordStore:
919
923
  source_type: Optional[str] = None
920
924
  keywords: List[KeywordDoc] = field(default_factory=list)
921
925
 
922
- @property
923
- def _matchers(self) -> Dict[KeywordMatcher, KeywordDoc]:
924
- if not hasattr(self, "__matchers"):
925
- self.__matchers = {v.matcher: v for v in self.keywords}
926
- return self.__matchers
927
-
928
926
  def __getitem__(self, key: str) -> KeywordDoc:
929
- items = [(k, v) for k, v in self._matchers.items() if k == key]
927
+ items = [v for v in self.keywords if v.matcher == key]
930
928
 
931
929
  if not items:
932
930
  raise KeyError
933
931
  if len(items) == 1:
934
- return items[0][1]
932
+ return items[0]
935
933
 
936
934
  if self.source and self.source_type:
937
935
  file_info = ""
@@ -946,14 +944,14 @@ class KeywordStore:
946
944
  else:
947
945
  file_info = "File"
948
946
  error = [f"{file_info} contains multiple keywords matching name '{key}':"]
949
- names = sorted(k.name for k, v in items)
947
+ names = sorted(v.name for v in items)
950
948
  raise KeywordError(
951
949
  "\n ".join(error + names),
952
- multiple_keywords=[v for _, v in items],
950
+ multiple_keywords=[v for v in items],
953
951
  )
954
952
 
955
953
  def __contains__(self, _x: object) -> bool:
956
- return any(k == _x for k in self._matchers.keys())
954
+ return any(v.matcher == _x for v in self.keywords)
957
955
 
958
956
  def __len__(self) -> int:
959
957
  return len(self.keywords)
@@ -983,7 +981,7 @@ class KeywordStore:
983
981
  return list(self.iter_all(key))
984
982
 
985
983
  def iter_all(self, key: str) -> Iterable[KeywordDoc]:
986
- yield from (v for k, v in self._matchers.items() if k == key)
984
+ return (v for v in self.keywords if v.matcher.match_string(key))
987
985
 
988
986
 
989
987
  @dataclass
@@ -1296,12 +1294,12 @@ class VariablesDoc(LibraryDoc):
1296
1294
  return result
1297
1295
 
1298
1296
 
1299
- @functools.lru_cache(maxsize=256)
1297
+ @functools.lru_cache(maxsize=8192)
1300
1298
  def is_library_by_path(path: str) -> bool:
1301
1299
  return path.lower().endswith((".py", "/", os.sep))
1302
1300
 
1303
1301
 
1304
- @functools.lru_cache(maxsize=256)
1302
+ @functools.lru_cache(maxsize=8192)
1305
1303
  def is_variables_by_path(path: str) -> bool:
1306
1304
  if get_robot_version() >= (6, 1):
1307
1305
  return path.lower().endswith((".py", ".yml", ".yaml", ".json", "/", os.sep))
@@ -2030,7 +2028,6 @@ def get_library_doc(
2030
2028
  libname=libdoc.name,
2031
2029
  libtype=libdoc.type,
2032
2030
  longname=f"{libdoc.name}.{kw[0].name}",
2033
- is_embedded=is_embedded_keyword(kw[0].name),
2034
2031
  doc_format=str(lib.doc_format) or ROBOT_DOC_FORMAT,
2035
2032
  is_error_handler=kw[1].is_error_handler,
2036
2033
  error_handler_message=kw[1].error_handler_message,
@@ -2710,133 +2707,146 @@ def complete_variables_import(
2710
2707
  return list(set(result))
2711
2708
 
2712
2709
 
2713
- def get_model_doc(
2714
- model: ast.AST,
2715
- source: str,
2716
- append_model_errors: bool = True,
2717
- ) -> LibraryDoc:
2718
- errors: List[Error] = []
2719
- keyword_name_nodes: List[KeywordName] = []
2720
- keywords_nodes: List[Keyword] = []
2721
- for node in ast.walk(model):
2722
- if isinstance(node, Keyword):
2723
- keywords_nodes.append(node)
2724
- if isinstance(node, KeywordName):
2725
- keyword_name_nodes.append(node)
2726
-
2727
- error = node.error if isinstance(node, HasError) else None
2728
- if error is not None:
2729
- errors.append(
2730
- Error(
2731
- message=error,
2732
- type_name="ModelError",
2733
- source=source,
2734
- line_no=node.lineno, # type: ignore
2735
- )
2736
- )
2737
- if append_model_errors:
2738
- node_errors = node.errors if isinstance(node, HasErrors) else None
2739
- if node_errors is not None:
2740
- for e in node_errors:
2741
- errors.append(
2742
- Error(
2743
- message=e,
2744
- type_name="ModelError",
2745
- source=source,
2746
- line_no=node.lineno, # type: ignore
2747
- )
2748
- )
2710
+ if get_robot_version() < (7, 0):
2749
2711
 
2750
- def get_keyword_name_token_from_line(line: int) -> Optional[Token]:
2751
- for keyword_name in keyword_name_nodes:
2752
- if keyword_name.lineno == line:
2753
- return cast(Token, keyword_name.get_token(RobotToken.KEYWORD_NAME))
2712
+ class _MyUserLibrary(UserLibrary):
2713
+ current_kw: Any = None
2754
2714
 
2755
- return None
2715
+ def __init__(self, *args: Any, **kwargs: Any) -> None:
2716
+ self.errors: List[Error] = []
2717
+ super().__init__(*args, **kwargs)
2756
2718
 
2757
- def get_argument_definitions_from_line(
2758
- line: int,
2759
- ) -> List[ArgumentDefinition]:
2760
- keyword_node = next((k for k in keywords_nodes if k.lineno == line), None)
2761
- if keyword_node is None:
2762
- return []
2719
+ def _log_creating_failed(self, handler: UserErrorHandler, error: BaseException) -> None:
2720
+ err = Error(
2721
+ message=f"Creating keyword '{handler.name}' failed: {error!s}",
2722
+ type_name=type(error).__qualname__,
2723
+ source=self.current_kw.source if self.current_kw is not None else None,
2724
+ line_no=self.current_kw.lineno if self.current_kw is not None else None,
2725
+ )
2726
+ self.errors.append(err)
2763
2727
 
2764
- arguments_node = next(
2765
- (n for n in ast.walk(keyword_node) if isinstance(n, Arguments)),
2766
- None,
2767
- )
2768
- if arguments_node is None:
2769
- return []
2728
+ def _create_handler(self, kw: Any) -> Any:
2729
+ self.current_kw = kw
2730
+ try:
2731
+ handler = super()._create_handler(kw)
2732
+ handler.errors = None
2733
+ except DataError as e:
2734
+ err = Error(
2735
+ message=str(e),
2736
+ type_name=type(e).__qualname__,
2737
+ source=kw.source,
2738
+ line_no=kw.lineno,
2739
+ )
2740
+ self.errors.append(err)
2770
2741
 
2771
- args: List[str] = []
2772
- arguments = arguments_node.get_tokens(RobotToken.ARGUMENT)
2773
- argument_definitions = []
2742
+ handler = UserErrorHandler(e, kw.name, self.name)
2743
+ handler.source = kw.source
2744
+ handler.lineno = kw.lineno
2774
2745
 
2775
- for argument_token in (cast(RobotToken, e) for e in arguments):
2776
- try:
2777
- argument = get_variable_token(argument_token)
2778
-
2779
- if argument is not None and argument.value != "@{}":
2780
- if argument.value not in args:
2781
- args.append(argument.value)
2782
- arg_def = ArgumentDefinition(
2783
- name=argument.value,
2784
- name_token=strip_variable_token(argument),
2785
- line_no=argument.lineno,
2786
- col_offset=argument.col_offset,
2787
- end_line_no=argument.lineno,
2788
- end_col_offset=argument.end_col_offset,
2789
- source=source,
2790
- )
2791
- argument_definitions.append(arg_def)
2746
+ handler.errors = [err]
2792
2747
 
2793
- except VariableError:
2794
- pass
2748
+ return handler
2795
2749
 
2796
- return argument_definitions
2797
2750
 
2798
- res = ResourceFile(source=source)
2751
+ def _get_keyword_name_token_from_line(keyword_name_nodes: Dict[int, KeywordName], line: int) -> Optional[Token]:
2752
+ keyword_name = keyword_name_nodes.get(line, None)
2753
+ if keyword_name is None:
2754
+ return None
2755
+ return cast(Token, keyword_name.get_token(RobotToken.KEYWORD_NAME))
2799
2756
 
2800
- with LOGGER.cache_only:
2801
- ResourceBuilder(res).visit(model)
2802
2757
 
2803
- if get_robot_version() < (7, 0):
2758
+ def _get_argument_definitions_from_line(
2759
+ keywords_nodes: Dict[int, Keyword],
2760
+ source: Optional[str],
2761
+ line: int,
2762
+ ) -> List[ArgumentDefinition]:
2763
+ keyword_node = keywords_nodes.get(line, None)
2764
+ if keyword_node is None:
2765
+ return []
2804
2766
 
2805
- class MyUserLibrary(UserLibrary):
2806
- current_kw: Any = None
2767
+ arguments_node = next(
2768
+ (n for n in ast.walk(keyword_node) if isinstance(n, Arguments)),
2769
+ None,
2770
+ )
2771
+ if arguments_node is None:
2772
+ return []
2807
2773
 
2808
- def _log_creating_failed(self, handler: UserErrorHandler, error: BaseException) -> None:
2809
- err = Error(
2810
- message=f"Creating keyword '{handler.name}' failed: {error!s}",
2811
- type_name=type(error).__qualname__,
2812
- source=self.current_kw.source if self.current_kw is not None else None,
2813
- line_no=self.current_kw.lineno if self.current_kw is not None else None,
2814
- )
2815
- errors.append(err)
2774
+ args: List[str] = []
2775
+ arguments = arguments_node.get_tokens(RobotToken.ARGUMENT)
2776
+ argument_definitions = []
2816
2777
 
2817
- def _create_handler(self, kw: Any) -> Any:
2818
- self.current_kw = kw
2819
- try:
2820
- handler = super()._create_handler(kw)
2821
- handler.errors = None
2822
- except DataError as e:
2823
- err = Error(
2824
- message=str(e),
2825
- type_name=type(e).__qualname__,
2826
- source=kw.source,
2827
- line_no=kw.lineno,
2778
+ for argument_token in (cast(RobotToken, e) for e in arguments):
2779
+ try:
2780
+ argument = get_variable_token(argument_token)
2781
+
2782
+ if argument is not None and argument.value != "@{}":
2783
+ if argument.value not in args:
2784
+ args.append(argument.value)
2785
+ arg_def = ArgumentDefinition(
2786
+ name=argument.value,
2787
+ name_token=strip_variable_token(argument),
2788
+ line_no=argument.lineno,
2789
+ col_offset=argument.col_offset,
2790
+ end_line_no=argument.lineno,
2791
+ end_col_offset=argument.end_col_offset,
2792
+ source=source,
2828
2793
  )
2829
- errors.append(err)
2794
+ argument_definitions.append(arg_def)
2830
2795
 
2831
- handler = UserErrorHandler(e, kw.name, self.name)
2832
- handler.source = kw.source
2833
- handler.lineno = kw.lineno
2796
+ except VariableError:
2797
+ pass
2834
2798
 
2835
- handler.errors = [err]
2799
+ return argument_definitions
2800
+
2801
+
2802
+ class _MyResourceBuilder(ResourceBuilder):
2803
+ def __init__(self, resource: Any) -> None:
2804
+ super().__init__(resource)
2805
+ self.keyword_name_nodes: Dict[int, KeywordName] = {}
2806
+ self.keywords_nodes: Dict[int, Keyword] = {}
2807
+
2808
+ def visit_Section(self, node: Section) -> None: # noqa: N802
2809
+ if isinstance(node, (SettingSection, KeywordSection)):
2810
+ self.generic_visit(node)
2811
+
2812
+ def visit_Keyword(self, node: Keyword) -> None: # noqa: N802
2813
+ self.keywords_nodes[node.lineno] = node
2814
+ super().visit_Keyword(node)
2815
+ if node.header is not None:
2816
+ self.keyword_name_nodes[node.lineno] = node.header
2817
+
2818
+
2819
+ def _get_kw_errors(kw: Any) -> Any:
2820
+ r = kw.errors if hasattr(kw, "errors") else None
2821
+ if get_robot_version() >= (7, 0) and kw.error:
2822
+ if not r:
2823
+ r = []
2824
+ r.append(
2825
+ Error(
2826
+ message=str(kw.error),
2827
+ type_name="KeywordError",
2828
+ source=kw.source,
2829
+ line_no=kw.lineno,
2830
+ )
2831
+ )
2832
+ return r
2836
2833
 
2837
- return handler
2838
2834
 
2839
- lib = MyUserLibrary(res)
2835
+ def get_model_doc(
2836
+ model: ast.AST,
2837
+ source: str,
2838
+ ) -> LibraryDoc:
2839
+ res = ResourceFile(source=source)
2840
+
2841
+ res_builder = _MyResourceBuilder(res)
2842
+ with LOGGER.cache_only:
2843
+ res_builder.visit(model)
2844
+
2845
+ keyword_name_nodes: Dict[int, KeywordName] = res_builder.keyword_name_nodes
2846
+ keywords_nodes: Dict[int, Keyword] = res_builder.keywords_nodes
2847
+
2848
+ if get_robot_version() < (7, 0):
2849
+ lib = _MyUserLibrary(res)
2840
2850
  else:
2841
2851
  lib = res
2842
2852
 
@@ -2847,24 +2857,8 @@ def get_model_doc(
2847
2857
  scope="GLOBAL",
2848
2858
  source=source,
2849
2859
  line_no=1,
2850
- errors=errors,
2851
2860
  )
2852
2861
 
2853
- def get_kw_errors(kw: Any) -> Any:
2854
- r = kw.errors if hasattr(kw, "errors") else None
2855
- if get_robot_version() >= (7, 0) and kw.error:
2856
- if not r:
2857
- r = []
2858
- r.append(
2859
- Error(
2860
- message=str(kw.error),
2861
- type_name="KeywordError",
2862
- source=kw.source,
2863
- line_no=kw.lineno,
2864
- )
2865
- )
2866
- return r
2867
-
2868
2862
  libdoc.keywords = KeywordStore(
2869
2863
  source=libdoc.name,
2870
2864
  source_type=libdoc.type,
@@ -2875,7 +2869,7 @@ def get_model_doc(
2875
2869
  doc=kw[0].doc,
2876
2870
  tags=list(kw[0].tags),
2877
2871
  source=str(kw[0].source),
2878
- name_token=get_keyword_name_token_from_line(kw[0].lineno),
2872
+ name_token=_get_keyword_name_token_from_line(keyword_name_nodes, kw[0].lineno),
2879
2873
  line_no=kw[0].lineno if kw[0].lineno is not None else -1,
2880
2874
  col_offset=-1,
2881
2875
  end_col_offset=-1,
@@ -2883,8 +2877,7 @@ def get_model_doc(
2883
2877
  libname=libdoc.name,
2884
2878
  libtype=libdoc.type,
2885
2879
  longname=f"{libdoc.name}.{kw[0].name}",
2886
- is_embedded=is_embedded_keyword(kw[0].name),
2887
- errors=get_kw_errors(kw[1]),
2880
+ errors=_get_kw_errors(kw[1]),
2888
2881
  is_error_handler=isinstance(kw[1], UserErrorHandler),
2889
2882
  error_handler_message=(
2890
2883
  str(cast(UserErrorHandler, kw[1]).error) if isinstance(kw[1], UserErrorHandler) else None
@@ -2892,7 +2885,7 @@ def get_model_doc(
2892
2885
  arguments_spec=ArgumentSpec.from_robot_argument_spec(
2893
2886
  kw[1].arguments if get_robot_version() < (7, 0) else kw[1].args
2894
2887
  ),
2895
- argument_definitions=get_argument_definitions_from_line(kw[0].lineno),
2888
+ argument_definitions=_get_argument_definitions_from_line(keywords_nodes, source, kw[0].lineno),
2896
2889
  )
2897
2890
  for kw in [
2898
2891
  (KeywordDocBuilder(resource=True).build_keyword(lw), lw)
@@ -665,12 +665,12 @@ class ModelHelper:
665
665
 
666
666
  @classmethod
667
667
  def get_keyword_definition_at_token(cls, library_doc: LibraryDoc, token: Token) -> Optional[KeywordDoc]:
668
- return cls.get_keyword_definition_at_line(library_doc, token.value, token.lineno)
668
+ return cls.get_keyword_definition_at_line(library_doc, token.lineno)
669
669
 
670
670
  @classmethod
671
- def get_keyword_definition_at_line(cls, library_doc: LibraryDoc, value: str, line: int) -> Optional[KeywordDoc]:
671
+ def get_keyword_definition_at_line(cls, library_doc: LibraryDoc, line: int) -> Optional[KeywordDoc]:
672
672
  return next(
673
- (k for k in library_doc.keywords.iter_all(value) if k.line_no == line),
673
+ (k for k in library_doc.keywords.keywords if k.line_no == line),
674
674
  None,
675
675
  )
676
676
 
@@ -910,11 +910,7 @@ class Namespace:
910
910
  def get_library_doc(self) -> LibraryDoc:
911
911
  with self._library_doc_lock:
912
912
  if self._library_doc is None:
913
- self._library_doc = self.imports_manager.get_libdoc_from_model(
914
- self.model,
915
- self.source,
916
- append_model_errors=self.document_type is not None and self.document_type == DocumentType.RESOURCE,
917
- )
913
+ self._library_doc = self.imports_manager.get_libdoc_from_model(self.model, self.source)
918
914
 
919
915
  return self._library_doc
920
916
 
@@ -1887,7 +1883,6 @@ class Namespace:
1887
1883
  source=DIAGNOSTICS_SOURCE_NAME,
1888
1884
  code=err.type_name,
1889
1885
  )
1890
- # TODO: implement CancelationToken
1891
1886
  except CancelledError:
1892
1887
  canceled = True
1893
1888
  self._logger.debug("analyzing canceled")
@@ -1904,7 +1899,7 @@ class Namespace:
1904
1899
 
1905
1900
  def create_finder(self) -> "KeywordFinder":
1906
1901
  self.ensure_initialized()
1907
- return KeywordFinder(self, self.get_library_doc())
1902
+ return KeywordFinder(self)
1908
1903
 
1909
1904
  @_logger.call(condition=lambda self, name, **kwargs: self._finder is not None and name not in self._finder._cache)
1910
1905
  def find_keyword(