robotcode-robot 0.94.0__py3-none-any.whl → 0.95.1__py3-none-any.whl

Sign up to get free protection for your applications and to get access to all the features.
@@ -1 +1 @@
1
- __version__ = "0.94.0"
1
+ __version__ = "0.95.1"
@@ -0,0 +1,83 @@
1
+ import pickle
2
+ from abc import ABC, abstractmethod
3
+ from enum import Enum
4
+ from pathlib import Path
5
+ from typing import Any, Tuple, Type, TypeVar, Union, cast
6
+
7
+ from robotcode.core.utils.dataclasses import as_json, from_json
8
+
9
+ _T = TypeVar("_T")
10
+
11
+
12
+ class CacheSection(Enum):
13
+ LIBRARY = "libdoc"
14
+ VARIABLES = "variables"
15
+
16
+
17
+ class DataCache(ABC):
18
+ @abstractmethod
19
+ def cache_data_exists(self, section: CacheSection, entry_name: str) -> bool: ...
20
+
21
+ @abstractmethod
22
+ def read_cache_data(
23
+ self, section: CacheSection, entry_name: str, types: Union[Type[_T], Tuple[Type[_T], ...]]
24
+ ) -> _T: ...
25
+
26
+ @abstractmethod
27
+ def save_cache_data(self, section: CacheSection, entry_name: str, data: Any) -> None: ...
28
+
29
+
30
+ class JsonDataCache(DataCache):
31
+ def __init__(self, cache_dir: Path) -> None:
32
+ self.cache_dir = cache_dir
33
+
34
+ def build_cache_data_filename(self, section: CacheSection, entry_name: str) -> Path:
35
+ return self.cache_dir / section.value / (entry_name + ".json")
36
+
37
+ def cache_data_exists(self, section: CacheSection, entry_name: str) -> bool:
38
+ cache_file = self.build_cache_data_filename(section, entry_name)
39
+ return cache_file.exists()
40
+
41
+ def read_cache_data(
42
+ self, section: CacheSection, entry_name: str, types: Union[Type[_T], Tuple[Type[_T], ...]]
43
+ ) -> _T:
44
+ cache_file = self.build_cache_data_filename(section, entry_name)
45
+ return from_json(cache_file.read_text("utf-8"), types)
46
+
47
+ def save_cache_data(self, section: CacheSection, entry_name: str, data: Any) -> None:
48
+ cached_file = self.build_cache_data_filename(section, entry_name)
49
+
50
+ cached_file.parent.mkdir(parents=True, exist_ok=True)
51
+ cached_file.write_text(as_json(data), "utf-8")
52
+
53
+
54
+ class PickleDataCache(DataCache):
55
+ def __init__(self, cache_dir: Path) -> None:
56
+ self.cache_dir = cache_dir
57
+
58
+ def build_cache_data_filename(self, section: CacheSection, entry_name: str) -> Path:
59
+ return self.cache_dir / section.value / (entry_name + ".pkl")
60
+
61
+ def cache_data_exists(self, section: CacheSection, entry_name: str) -> bool:
62
+ cache_file = self.build_cache_data_filename(section, entry_name)
63
+ return cache_file.exists()
64
+
65
+ def read_cache_data(
66
+ self, section: CacheSection, entry_name: str, types: Union[Type[_T], Tuple[Type[_T], ...]]
67
+ ) -> _T:
68
+ cache_file = self.build_cache_data_filename(section, entry_name)
69
+
70
+ with cache_file.open("rb") as f:
71
+ result = pickle.load(f)
72
+
73
+ if isinstance(result, types):
74
+ return cast(_T, result)
75
+
76
+ raise TypeError(f"Expected {types} but got {type(result)}")
77
+
78
+ def save_cache_data(self, section: CacheSection, entry_name: str, data: Any) -> None:
79
+ cached_file = self.build_cache_data_filename(section, entry_name)
80
+
81
+ cached_file.parent.mkdir(parents=True, exist_ok=True)
82
+ with cached_file.open("wb") as f:
83
+ pickle.dump(data, f)
@@ -12,11 +12,11 @@ from typing import (
12
12
  )
13
13
 
14
14
  from robot.parsing.lexer.tokens import Token
15
- from robot.variables.search import search_variable
16
15
  from robotcode.core.lsp.types import Position, Range
17
16
  from robotcode.robot.utils.match import normalize
18
17
 
19
18
  from ..utils.ast import range_from_token
19
+ from ..utils.variables import search_variable
20
20
 
21
21
  if TYPE_CHECKING:
22
22
  from robotcode.robot.diagnostics.library_doc import KeywordDoc, LibraryDoc
@@ -145,10 +145,10 @@ class VariableMatcher:
145
145
  self.normalized_name = str(normalize(self.base))
146
146
 
147
147
  def __eq__(self, o: object) -> bool:
148
- if isinstance(o, VariableMatcher):
148
+ if type(o) is VariableMatcher:
149
149
  return o.normalized_name == self.normalized_name
150
150
 
151
- if isinstance(o, str):
151
+ if type(o) is str:
152
152
  match = search_variable(o, "$@&%", ignore_errors=True)
153
153
  base = match.base
154
154
  if base is None:
@@ -6,7 +6,7 @@ DIAGNOSTICS_SOURCE_NAME = "robotcode"
6
6
  @final
7
7
  class Error:
8
8
  VARIABLE_NOT_FOUND = "VariableNotFound"
9
- ENVIROMMENT_VARIABLE_NOT_FOUND = "EnvirommentVariableNotFound"
9
+ ENVIRONMENT_VARIABLE_NOT_FOUND = "EnvironmentVariableNotFound"
10
10
  KEYWORD_NOT_FOUND = "KeywordNotFound"
11
11
  LIBRARY_CONTAINS_NO_KEYWORDS = "LibraryContainsNoKeywords"
12
12
  POSSIBLE_CIRCULAR_IMPORT = "PossibleCircularImport"
@@ -25,6 +25,7 @@ from typing import (
25
25
  final,
26
26
  )
27
27
 
28
+ from robot.libraries import STDLIBS
28
29
  from robot.utils.text import split_args_from_name_or_path
29
30
  from robotcode.core.concurrent import RLock, run_as_task
30
31
  from robotcode.core.documents_manager import DocumentsManager
@@ -35,14 +36,16 @@ from robotcode.core.lsp.types import DocumentUri, FileChangeType, FileEvent
35
36
  from robotcode.core.text_document import TextDocument
36
37
  from robotcode.core.uri import Uri
37
38
  from robotcode.core.utils.caching import SimpleLRUCache
38
- from robotcode.core.utils.dataclasses import as_json, from_json
39
- from robotcode.core.utils.glob_path import Pattern, iter_files
39
+ from robotcode.core.utils.glob_path import Pattern
40
40
  from robotcode.core.utils.logging import LoggingDescriptor
41
41
  from robotcode.core.utils.path import normalized_path, path_is_relative_to
42
42
 
43
43
  from ..__version__ import __version__
44
44
  from ..utils import get_robot_version, get_robot_version_str
45
45
  from ..utils.robot_path import find_file_ex
46
+ from ..utils.variables import contains_variable
47
+ from .data_cache import CacheSection
48
+ from .data_cache import PickleDataCache as DefaultDataCache
46
49
  from .entities import (
47
50
  CommandLineVariableDefinition,
48
51
  VariableDefinition,
@@ -521,18 +524,10 @@ class ImportsManager:
521
524
  self._logger.trace(lambda: f"use {cache_base_path} as base for caching")
522
525
 
523
526
  self.cache_path = cache_base_path / ".robotcode_cache"
524
-
525
- self.lib_doc_cache_path = (
526
- self.cache_path
527
- / f"{sys.version_info.major}.{sys.version_info.minor}.{sys.version_info.micro}"
528
- / get_robot_version_str()
529
- / "libdoc"
530
- )
531
- self.variables_doc_cache_path = (
527
+ self.data_cache = DefaultDataCache(
532
528
  self.cache_path
533
529
  / f"{sys.version_info.major}.{sys.version_info.minor}.{sys.version_info.micro}"
534
530
  / get_robot_version_str()
535
- / "variables"
536
531
  )
537
532
 
538
533
  self.cmd_variables = variables
@@ -564,9 +559,9 @@ class ImportsManager:
564
559
  if environment:
565
560
  self._environment.update(environment)
566
561
 
567
- self._library_files_cache = SimpleLRUCache(1024)
568
- self._resource_files_cache = SimpleLRUCache(1024)
569
- self._variables_files_cache = SimpleLRUCache(1024)
562
+ self._library_files_cache = SimpleLRUCache(2048)
563
+ self._resource_files_cache = SimpleLRUCache(2048)
564
+ self._variables_files_cache = SimpleLRUCache(2048)
570
565
 
571
566
  self._executor_lock = RLock(default_timeout=120, name="ImportsManager._executor_lock")
572
567
  self._executor: Optional[ProcessPoolExecutor] = None
@@ -582,6 +577,8 @@ class ImportsManager:
582
577
  weakref.WeakKeyDictionary()
583
578
  )
584
579
 
580
+ self._process_pool_executor: Optional[ProcessPoolExecutor] = None
581
+
585
582
  def __del__(self) -> None:
586
583
  try:
587
584
  if self._executor is not None:
@@ -899,12 +896,14 @@ class ImportsManager:
899
896
  )
900
897
 
901
898
  if result is not None:
899
+ # TODO: use IgnoreSpec instead of this
902
900
  ignore_arguments = any(
903
901
  (p.matches(result.name) if result.name is not None else False)
904
902
  or (p.matches(result.origin) if result.origin is not None else False)
905
903
  for p in self.ignore_arguments_for_library_patters
906
904
  )
907
905
 
906
+ # TODO: use IgnoreSpec instead of this
908
907
  if any(
909
908
  (p.matches(result.name) if result.name is not None else False)
910
909
  or (p.matches(result.origin) if result.origin is not None else False)
@@ -918,16 +917,16 @@ class ImportsManager:
918
917
  return None, import_name, ignore_arguments
919
918
 
920
919
  if result.origin is not None:
921
- result.mtimes = {result.origin: Path(result.origin).stat().st_mtime_ns}
920
+ result.mtimes = {result.origin: os.stat(result.origin, follow_symlinks=False).st_mtime_ns}
922
921
 
923
922
  if result.submodule_search_locations:
924
923
  if result.mtimes is None:
925
924
  result.mtimes = {}
926
925
  result.mtimes.update(
927
926
  {
928
- str(f): f.stat().st_mtime_ns
927
+ str(f): os.stat(f, follow_symlinks=False).st_mtime_ns
929
928
  for f in itertools.chain(
930
- *(iter_files(loc, "**/*.py") for loc in result.submodule_search_locations)
929
+ *(Path(loc).rglob("**/*.py") for loc in result.submodule_search_locations)
931
930
  )
932
931
  }
933
932
  )
@@ -987,16 +986,16 @@ class ImportsManager:
987
986
  return None, import_name
988
987
 
989
988
  if result.origin is not None:
990
- result.mtimes = {result.origin: Path(result.origin).stat().st_mtime_ns}
989
+ result.mtimes = {result.origin: os.stat(result.origin, follow_symlinks=False).st_mtime_ns}
991
990
 
992
991
  if result.submodule_search_locations:
993
992
  if result.mtimes is None:
994
993
  result.mtimes = {}
995
994
  result.mtimes.update(
996
995
  {
997
- str(f): f.stat().st_mtime_ns
996
+ str(f): os.stat(f, follow_symlinks=False).st_mtime_ns
998
997
  for f in itertools.chain(
999
- *(iter_files(loc, "**/*.py") for loc in result.submodule_search_locations)
998
+ *(Path(loc).rglob("**/*.py") for loc in result.submodule_search_locations)
1000
999
  )
1001
1000
  }
1002
1001
  )
@@ -1015,7 +1014,10 @@ class ImportsManager:
1015
1014
  base_dir: str,
1016
1015
  variables: Optional[Dict[str, Any]] = None,
1017
1016
  ) -> str:
1018
- return self._library_files_cache.get(self._find_library, name, base_dir, variables)
1017
+ if contains_variable(name, "$@&%"):
1018
+ return self._library_files_cache.get(self._find_library, name, base_dir, variables)
1019
+
1020
+ return self._library_files_cache.get(self._find_library_simple, name, base_dir)
1019
1021
 
1020
1022
  def _find_library(
1021
1023
  self,
@@ -1023,17 +1025,19 @@ class ImportsManager:
1023
1025
  base_dir: str,
1024
1026
  variables: Optional[Dict[str, Any]] = None,
1025
1027
  ) -> str:
1026
- from robot.libraries import STDLIBS
1027
- from robot.variables.search import contains_variable
1028
+ return find_library(
1029
+ name,
1030
+ str(self.root_folder),
1031
+ base_dir,
1032
+ self.get_resolvable_command_line_variables(),
1033
+ variables,
1034
+ )
1028
1035
 
1029
- if contains_variable(name, "$@&%"):
1030
- return find_library(
1031
- name,
1032
- str(self.root_folder),
1033
- base_dir,
1034
- self.get_resolvable_command_line_variables(),
1035
- variables,
1036
- )
1036
+ def _find_library_simple(
1037
+ self,
1038
+ name: str,
1039
+ base_dir: str,
1040
+ ) -> str:
1037
1041
 
1038
1042
  if name in STDLIBS:
1039
1043
  result = ROBOT_LIBRARY_PACKAGE + "." + name
@@ -1052,7 +1056,10 @@ class ImportsManager:
1052
1056
  file_type: str = "Resource",
1053
1057
  variables: Optional[Dict[str, Any]] = None,
1054
1058
  ) -> str:
1055
- return self._resource_files_cache.get(self.__find_resource, name, base_dir, file_type, variables)
1059
+ if contains_variable(name, "$@&%"):
1060
+ return self._resource_files_cache.get(self.__find_resource, name, base_dir, file_type, variables)
1061
+
1062
+ return self._resource_files_cache.get(self.__find_resource_simple, name, base_dir, file_type)
1056
1063
 
1057
1064
  @_logger.call
1058
1065
  def __find_resource(
@@ -1062,64 +1069,71 @@ class ImportsManager:
1062
1069
  file_type: str = "Resource",
1063
1070
  variables: Optional[Dict[str, Any]] = None,
1064
1071
  ) -> str:
1065
- from robot.variables.search import contains_variable
1072
+ return find_file(
1073
+ name,
1074
+ str(self.root_folder),
1075
+ base_dir,
1076
+ self.get_resolvable_command_line_variables(),
1077
+ variables,
1078
+ file_type,
1079
+ )
1066
1080
 
1067
- if contains_variable(name, "$@&%"):
1068
- return find_file(
1081
+ def __find_resource_simple(
1082
+ self,
1083
+ name: str,
1084
+ base_dir: str,
1085
+ file_type: str = "Resource",
1086
+ ) -> str:
1087
+ return find_file_ex(name, base_dir, file_type)
1088
+
1089
+ def find_variables(
1090
+ self,
1091
+ name: str,
1092
+ base_dir: str,
1093
+ variables: Optional[Dict[str, Any]] = None,
1094
+ resolve_variables: bool = True,
1095
+ resolve_command_line_vars: bool = True,
1096
+ ) -> str:
1097
+ if resolve_variables and contains_variable(name, "$@&%"):
1098
+ return self._variables_files_cache.get(
1099
+ self.__find_variables,
1069
1100
  name,
1070
- str(self.root_folder),
1071
1101
  base_dir,
1072
- self.get_resolvable_command_line_variables(),
1073
1102
  variables,
1074
- file_type,
1103
+ resolve_command_line_vars,
1075
1104
  )
1105
+ return self._variables_files_cache.get(self.__find_variables_simple, name, base_dir)
1076
1106
 
1077
- return str(find_file_ex(name, base_dir, file_type))
1078
-
1079
- def find_variables(
1107
+ @_logger.call
1108
+ def __find_variables(
1080
1109
  self,
1081
1110
  name: str,
1082
1111
  base_dir: str,
1083
1112
  variables: Optional[Dict[str, Any]] = None,
1084
- resolve_variables: bool = True,
1085
1113
  resolve_command_line_vars: bool = True,
1086
1114
  ) -> str:
1087
- return self._variables_files_cache.get(
1088
- self.__find_variables,
1115
+ return find_variables(
1089
1116
  name,
1117
+ str(self.root_folder),
1090
1118
  base_dir,
1119
+ self.get_resolvable_command_line_variables() if resolve_command_line_vars else None,
1091
1120
  variables,
1092
- resolve_variables,
1093
- resolve_command_line_vars,
1094
1121
  )
1095
1122
 
1096
1123
  @_logger.call
1097
- def __find_variables(
1124
+ def __find_variables_simple(
1098
1125
  self,
1099
1126
  name: str,
1100
1127
  base_dir: str,
1101
- variables: Optional[Dict[str, Any]] = None,
1102
- resolve_variables: bool = True,
1103
- resolve_command_line_vars: bool = True,
1104
1128
  ) -> str:
1105
- from robot.variables.search import contains_variable
1106
-
1107
- if resolve_variables and contains_variable(name, "$@&%"):
1108
- return find_variables(
1109
- name,
1110
- str(self.root_folder),
1111
- base_dir,
1112
- self.get_resolvable_command_line_variables() if resolve_command_line_vars else None,
1113
- variables,
1114
- )
1115
1129
 
1116
1130
  if get_robot_version() >= (5, 0):
1117
1131
  if is_variables_by_path(name):
1118
- return str(find_file_ex(name, base_dir, "Variables"))
1132
+ return find_file_ex(name, base_dir, "Variables")
1119
1133
 
1120
1134
  return name
1121
1135
 
1122
- return str(find_file_ex(name, base_dir, "Variables"))
1136
+ return find_file_ex(name, base_dir, "Variables")
1123
1137
 
1124
1138
  @property
1125
1139
  def executor(self) -> ProcessPoolExecutor:
@@ -1155,29 +1169,26 @@ class ImportsManager:
1155
1169
 
1156
1170
  if meta is not None and not meta.has_errors:
1157
1171
 
1158
- meta_file = Path(self.lib_doc_cache_path, meta.filepath_base + ".meta.json")
1159
- if meta_file.exists():
1172
+ meta_file = meta.filepath_base + ".meta"
1173
+ if self.data_cache.cache_data_exists(CacheSection.LIBRARY, meta_file):
1160
1174
  try:
1161
1175
  spec_path = None
1162
1176
  try:
1163
- saved_meta = from_json(meta_file.read_text("utf-8"), LibraryMetaData)
1177
+ saved_meta = self.data_cache.read_cache_data(CacheSection.LIBRARY, meta_file, LibraryMetaData)
1164
1178
  if saved_meta.has_errors:
1165
1179
  self._logger.debug(
1166
- lambda: "Saved library spec for {name}{args!r} is not used "
1180
+ lambda: f"Saved library spec for {name}{args!r} is not used "
1167
1181
  "due to errors in meta data",
1168
1182
  context_name="import",
1169
1183
  )
1170
1184
 
1171
1185
  if not saved_meta.has_errors and saved_meta == meta:
1172
- spec_path = Path(
1173
- self.lib_doc_cache_path,
1174
- meta.filepath_base + ".spec.json",
1175
- )
1186
+ spec_path = meta.filepath_base + ".spec"
1176
1187
 
1177
1188
  self._logger.debug(
1178
1189
  lambda: f"Use cached library meta data for {name}", context_name="import"
1179
1190
  )
1180
- return from_json(spec_path.read_text("utf-8"), LibraryDoc)
1191
+ return self.data_cache.read_cache_data(CacheSection.LIBRARY, spec_path, LibraryDoc)
1181
1192
 
1182
1193
  except (SystemExit, KeyboardInterrupt):
1183
1194
  raise
@@ -1191,6 +1202,9 @@ class ImportsManager:
1191
1202
  self._logger.exception(e)
1192
1203
 
1193
1204
  self._logger.debug(lambda: f"Load library in process {name}{args!r}", context_name="import")
1205
+ # if self._process_pool_executor is None:
1206
+ # self._process_pool_executor = ProcessPoolExecutor(max_workers=1, mp_context=mp.get_context("spawn"))
1207
+ # executor = self._process_pool_executor
1194
1208
  executor = ProcessPoolExecutor(max_workers=1, mp_context=mp.get_context("spawn"))
1195
1209
  try:
1196
1210
  try:
@@ -1222,19 +1236,17 @@ class ImportsManager:
1222
1236
  if meta is not None:
1223
1237
  meta.has_errors = bool(result.errors)
1224
1238
 
1225
- meta_file = Path(self.lib_doc_cache_path, meta.filepath_base + ".meta.json")
1226
- spec_file = Path(self.lib_doc_cache_path, meta.filepath_base + ".spec.json")
1227
-
1228
- spec_file.parent.mkdir(parents=True, exist_ok=True)
1239
+ meta_file = meta.filepath_base + ".meta"
1240
+ spec_file = meta.filepath_base + ".spec"
1229
1241
 
1230
1242
  try:
1231
- spec_file.write_text(as_json(result), "utf-8")
1243
+ self.data_cache.save_cache_data(CacheSection.LIBRARY, spec_file, result)
1232
1244
  except (SystemExit, KeyboardInterrupt):
1233
1245
  raise
1234
1246
  except BaseException as e:
1235
1247
  raise RuntimeError(f"Cannot write spec file for library '{name}' to '{spec_file}'") from e
1236
1248
 
1237
- meta_file.write_text(as_json(meta), "utf-8")
1249
+ self.data_cache.save_cache_data(CacheSection.LIBRARY, meta_file, meta)
1238
1250
  else:
1239
1251
  self._logger.debug(lambda: f"Skip caching library {name}{args!r}", context_name="import")
1240
1252
  except (SystemExit, KeyboardInterrupt):
@@ -1351,21 +1363,17 @@ class ImportsManager:
1351
1363
  )
1352
1364
 
1353
1365
  if meta is not None:
1354
- meta_file = Path(
1355
- self.variables_doc_cache_path,
1356
- meta.filepath_base + ".meta.json",
1357
- )
1358
- if meta_file.exists():
1366
+ meta_file = meta.filepath_base + ".meta"
1367
+
1368
+ if self.data_cache.cache_data_exists(CacheSection.VARIABLES, meta_file):
1359
1369
  try:
1360
1370
  spec_path = None
1361
1371
  try:
1362
- saved_meta = from_json(meta_file.read_text("utf-8"), LibraryMetaData)
1372
+ saved_meta = self.data_cache.read_cache_data(CacheSection.VARIABLES, meta_file, LibraryMetaData)
1363
1373
  if saved_meta == meta:
1364
- spec_path = Path(
1365
- self.variables_doc_cache_path,
1366
- meta.filepath_base + ".spec.json",
1367
- )
1368
- return from_json(spec_path.read_text("utf-8"), VariablesDoc)
1374
+ spec_path = meta.filepath_base + ".spec"
1375
+
1376
+ return self.data_cache.read_cache_data(CacheSection.VARIABLES, spec_path, VariablesDoc)
1369
1377
  except (SystemExit, KeyboardInterrupt):
1370
1378
  raise
1371
1379
  except BaseException as e:
@@ -1406,23 +1414,16 @@ class ImportsManager:
1406
1414
 
1407
1415
  try:
1408
1416
  if meta is not None:
1409
- meta_file = Path(
1410
- self.variables_doc_cache_path,
1411
- meta.filepath_base + ".meta.json",
1412
- )
1413
- spec_file = Path(
1414
- self.variables_doc_cache_path,
1415
- meta.filepath_base + ".spec.json",
1416
- )
1417
- spec_file.parent.mkdir(parents=True, exist_ok=True)
1417
+ meta_file = meta.filepath_base + ".meta"
1418
+ spec_file = meta.filepath_base + ".spec"
1418
1419
 
1419
1420
  try:
1420
- spec_file.write_text(as_json(result), "utf-8")
1421
+ self.data_cache.save_cache_data(CacheSection.VARIABLES, spec_file, result)
1421
1422
  except (SystemExit, KeyboardInterrupt):
1422
1423
  raise
1423
1424
  except BaseException as e:
1424
1425
  raise RuntimeError(f"Cannot write spec file for variables '{name}' to '{spec_file}'") from e
1425
- meta_file.write_text(as_json(meta), "utf-8")
1426
+ self.data_cache.save_cache_data(CacheSection.VARIABLES, meta_file, meta)
1426
1427
  else:
1427
1428
  self._logger.debug(lambda: f"Skip caching variables {name}{args!r}", context_name="import")
1428
1429
  except (SystemExit, KeyboardInterrupt):