omdev 0.0.0.dev24__py3-none-any.whl → 0.0.0.dev26__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of omdev might be problematic. Click here for more details.

@@ -0,0 +1,93 @@
1
+ import hashlib
2
+ import typing as ta
3
+ import urllib.parse
4
+ import urllib.request
5
+
6
+ from omlish import cached
7
+ from omlish import check
8
+ from omlish import dataclasses as dc
9
+ from omlish import lang
10
+ from omlish import marshal as msh
11
+ from omlish.formats import json
12
+
13
+ from .consts import MARSHAL_VERSION
14
+
15
+
16
+ ##
17
+
18
+
19
+ @dc.dataclass(frozen=True)
20
+ class CacheDataSpec(lang.Abstract, lang.Sealed):
21
+ marshal_version: int = dc.field(default=MARSHAL_VERSION, kw_only=True)
22
+
23
+ @cached.property
24
+ def json(self) -> str:
25
+ return json.dumps_compact(msh.marshal(self, CacheDataSpec))
26
+
27
+ @cached.property
28
+ def digest(self) -> str:
29
+ return hashlib.md5(self.json.encode('utf-8')).hexdigest() # noqa
30
+
31
+
32
+ ##
33
+
34
+
35
+ def _maybe_sorted_strs(v: ta.Iterable[str] | None) -> ta.Sequence[str] | None:
36
+ if v is None:
37
+ return None
38
+ return sorted(set(check.not_isinstance(v, str)))
39
+
40
+
41
+ @dc.dataclass(frozen=True)
42
+ class GitCacheDataSpec(CacheDataSpec):
43
+ url: str
44
+
45
+ branch: str | None = dc.field(default=None, kw_only=True)
46
+ rev: str | None = dc.field(default=None, kw_only=True)
47
+
48
+ subtrees: ta.Sequence[str] = dc.xfield(default=None, kw_only=True, coerce=_maybe_sorted_strs)
49
+
50
+
51
+ ##
52
+
53
+
54
+ @dc.dataclass(frozen=True)
55
+ class HttpCacheDataSpec(CacheDataSpec):
56
+ url: str = dc.xfield(validate=lambda u: bool(urllib.parse.urlparse(u)))
57
+ file_name: str | None = None
58
+
59
+ @cached.property
60
+ def file_name_or_default(self) -> str:
61
+ if self.file_name is not None:
62
+ return self.file_name
63
+ return urllib.parse.urlparse(self.url).path.split('/')[-1]
64
+
65
+
66
+ ##
67
+
68
+
69
+ def _repo_str(s: str) -> str:
70
+ u, r = check.non_empty_str(s).split('/')
71
+ check.non_empty_str(u)
72
+ check.non_empty_str(r)
73
+ return s
74
+
75
+
76
+ @dc.dataclass(frozen=True)
77
+ class GithubContentCacheDataSpec(CacheDataSpec):
78
+ repo: str = dc.field(validate=_repo_str) # type: ignore
79
+ rev: str
80
+ files: lang.SequenceNotStr[str]
81
+
82
+
83
+ ##
84
+
85
+
86
+ @lang.cached_function
87
+ def _install_standard_marshalling() -> None:
88
+ specs_poly = msh.polymorphism_from_subclasses(CacheDataSpec)
89
+ msh.STANDARD_MARSHALER_FACTORIES[0:0] = [msh.PolymorphismMarshalerFactory(specs_poly)]
90
+ msh.STANDARD_UNMARSHALER_FACTORIES[0:0] = [msh.PolymorphismUnmarshalerFactory(specs_poly)]
91
+
92
+
93
+ _install_standard_marshalling()
omdev/findmagic.py CHANGED
@@ -17,6 +17,13 @@ def find_magic(
17
17
  *,
18
18
  py: bool = False,
19
19
  ) -> ta.Iterator[str]:
20
+ if isinstance(roots, str):
21
+ raise TypeError(roots)
22
+ if isinstance(magics, str):
23
+ raise TypeError(magics)
24
+ if isinstance(exts, str):
25
+ raise TypeError(exts)
26
+
20
27
  if not magics:
21
28
  raise Exception('Must specify magics')
22
29
  if not exts:
omdev/git.py ADDED
@@ -0,0 +1,62 @@
1
+ import os.path
2
+ import subprocess
3
+ import typing as ta
4
+
5
+
6
+ def clone_subtree(
7
+ *,
8
+ base_dir: str,
9
+ repo_url: str,
10
+ repo_dir: str,
11
+ branch: str | None = None,
12
+ rev: str | None = None,
13
+ repo_subtrees: ta.Sequence[str],
14
+ ) -> None:
15
+ if not bool(branch) ^ bool(rev):
16
+ raise ValueError('must set branch or rev')
17
+
18
+ if isinstance(repo_subtrees, str):
19
+ raise TypeError(repo_subtrees)
20
+
21
+ git_opts = [
22
+ '-c', 'advice.detachedHead=false',
23
+ ]
24
+
25
+ subprocess.check_call(
26
+ [
27
+ 'git',
28
+ *git_opts,
29
+ 'clone',
30
+ '-n',
31
+ '--depth=1',
32
+ '--filter=tree:0',
33
+ *(['-b', branch] if branch else []),
34
+ '--single-branch',
35
+ repo_url,
36
+ repo_dir,
37
+ ],
38
+ cwd=base_dir,
39
+ )
40
+
41
+ rd = os.path.join(base_dir, repo_dir)
42
+ subprocess.check_call(
43
+ [
44
+ 'git',
45
+ *git_opts,
46
+ 'sparse-checkout',
47
+ 'set',
48
+ '--no-cone',
49
+ *repo_subtrees,
50
+ ],
51
+ cwd=rd,
52
+ )
53
+
54
+ subprocess.check_call(
55
+ [
56
+ 'git',
57
+ *git_opts,
58
+ 'checkout',
59
+ *([rev] if rev else []),
60
+ ],
61
+ cwd=rd,
62
+ )
omdev/manifests.py ADDED
@@ -0,0 +1,247 @@
1
+ """
2
+ !!! manifests! get-manifest, _manifest.py
3
+ - dumb dicts, root keys are 'types'
4
+ - get put in _manifest.py, root level dict or smth
5
+ - IMPORT files w comment
6
+ - comment must immediately precede a global val setter
7
+ - val is grabbed from imported module dict by name
8
+ - value is repr'd somehow (roundtrip checked) (naw, json lol)
9
+ - dumped in _manifest.py
10
+ - # @omlish-manifest \n _CACHE_MANIFEST = {'cache': {'name': 'llm', …
11
+ - also can do prechecks!
12
+ """
13
+ # ruff: noqa: UP006 UP007
14
+ # @omlish-lite
15
+ import argparse
16
+ import collections
17
+ import dataclasses as dc
18
+ import inspect
19
+ import json
20
+ import os.path
21
+ import re
22
+ import shlex
23
+ import subprocess
24
+ import sys
25
+ import time
26
+ import typing as ta
27
+
28
+ from omlish.lite.cached import cached_nullary
29
+ from omlish.lite.json import json_dumps_pretty
30
+ from omlish.lite.logs import configure_standard_logging
31
+ from omlish.lite.logs import log
32
+
33
+ from . import findmagic
34
+
35
+
36
+ ##
37
+
38
+
39
+ @dc.dataclass(frozen=True)
40
+ class ManifestOrigin:
41
+ module: str
42
+ attr: str
43
+
44
+ file: str
45
+ line: int
46
+
47
+
48
+ @dc.dataclass(frozen=True)
49
+ class Manifest(ManifestOrigin):
50
+ value: ta.Any
51
+
52
+
53
+ MANIFEST_MAGIC = '# @omlish-manifest'
54
+
55
+ _MANIFEST_GLOBAL_PAT = re.compile(r'^(?P<name>[A-Za-z_][A-Za-z0-9_]*)\s*=.*')
56
+
57
+
58
+ def _dump_module_manifests(spec: str, *attrs: str) -> None:
59
+ import importlib
60
+ import json
61
+
62
+ mod = importlib.import_module(spec)
63
+
64
+ out = {}
65
+ for attr in attrs:
66
+ manifest = getattr(mod, attr)
67
+
68
+ manifest_json = json.dumps(manifest)
69
+ rt_manifest = json.loads(manifest_json)
70
+
71
+ if rt_manifest != manifest:
72
+ raise Exception(f'Manifest failed to roundtrip: {manifest} != {rt_manifest}')
73
+
74
+ out[attr] = rt_manifest
75
+
76
+ out_json = json.dumps(out, indent=None, separators=(',', ':'))
77
+ print(out_json)
78
+
79
+
80
+ @cached_nullary
81
+ def _payload_src() -> str:
82
+ return inspect.getsource(_dump_module_manifests)
83
+
84
+
85
+ def build_module_manifests(
86
+ file: str,
87
+ base: str,
88
+ *,
89
+ shell_wrap: bool = True,
90
+ warn_threshold_s: ta.Optional[float] = 1.,
91
+ ) -> ta.Sequence[Manifest]:
92
+ log.info('Extracting manifests from file %s', file)
93
+
94
+ if not file.endswith('.py'):
95
+ raise Exception(file)
96
+
97
+ mod_name = file.rpartition('.')[0].replace(os.sep, '.')
98
+ mod_base = mod_name.split('.')[0]
99
+ if mod_base != (first_dir := file.split(os.path.sep)[0]):
100
+ raise Exception(f'Unexpected module base: {mod_base=} != {first_dir=}')
101
+
102
+ with open(os.path.join(base, file)) as f:
103
+ src = f.read()
104
+
105
+ origins: ta.List[ManifestOrigin] = []
106
+ lines = src.splitlines(keepends=True)
107
+ for i, l in enumerate(lines):
108
+ if l.startswith(MANIFEST_MAGIC):
109
+ if (m := _MANIFEST_GLOBAL_PAT.match(nl := lines[i + 1])) is None:
110
+ raise Exception(nl)
111
+
112
+ origins.append(ManifestOrigin(
113
+ module='.'.join(['', *mod_name.split('.')[1:]]),
114
+ attr=m.groupdict()['name'],
115
+
116
+ file=os.path.join(*os.path.split(file)[1:]), # noqa
117
+ line=i + 1,
118
+ ))
119
+
120
+ if not origins:
121
+ raise Exception('no manifests found')
122
+
123
+ if (dups := [k for k, v in collections.Counter(o.attr for o in origins).items() if v > 1]):
124
+ raise Exception(f'Duplicate attrs: {dups}')
125
+
126
+ attrs = [o.attr for o in origins]
127
+
128
+ subproc_src = '\n\n'.join([
129
+ _payload_src(),
130
+ f'_dump_module_manifests({mod_name!r}, {", ".join(repr(a) for a in attrs)})\n',
131
+ ])
132
+
133
+ args = [
134
+ sys.executable,
135
+ '-c',
136
+ subproc_src,
137
+ ]
138
+
139
+ if shell_wrap:
140
+ args = ['sh', '-c', ' '.join(map(shlex.quote, args))]
141
+
142
+ start_time = time.time()
143
+
144
+ subproc_out = subprocess.check_output(args)
145
+
146
+ end_time = time.time()
147
+
148
+ if warn_threshold_s is not None and (elapsed_time := (end_time - start_time)) >= warn_threshold_s:
149
+ log.warning('Manifest extraction took a long time: %s, %.2f s', file, elapsed_time)
150
+
151
+ sp_lines = subproc_out.decode().strip().splitlines()
152
+ if len(sp_lines) != 1:
153
+ raise Exception('Unexpected subprocess output')
154
+
155
+ dct = json.loads(sp_lines[0])
156
+ if set(dct) != set(attrs):
157
+ raise Exception('Unexpected subprocess output keys')
158
+
159
+ out: ta.List[Manifest] = []
160
+
161
+ for o in origins:
162
+ value = dct[o.attr]
163
+
164
+ if not (
165
+ isinstance(value, ta.Mapping) and
166
+ all(isinstance(k, str) and k.startswith('$') and len(k) > 1 for k in value)
167
+ ):
168
+ raise TypeError(f'Manifests must be mapping of strings starting with $: {value!r}')
169
+
170
+ out.append(Manifest(
171
+ **dc.asdict(o),
172
+ value=value,
173
+ ))
174
+
175
+ return out
176
+
177
+
178
+ def build_package_manifests(
179
+ name: str,
180
+ base: str,
181
+ *,
182
+ write: bool = False,
183
+ ) -> ta.List[Manifest]:
184
+ pkg_dir = os.path.join(base, name)
185
+ if not os.path.isdir(pkg_dir) or not os.path.isfile(os.path.join(pkg_dir, '__init__.py')):
186
+ raise Exception(pkg_dir)
187
+
188
+ manifests: ta.List[Manifest] = []
189
+
190
+ for file in findmagic.find_magic(
191
+ [pkg_dir],
192
+ [MANIFEST_MAGIC],
193
+ ['py'],
194
+ ):
195
+ manifests.extend(build_module_manifests(os.path.relpath(file, base), base))
196
+
197
+ if write:
198
+ with open(os.path.join(pkg_dir, '_manifests.json'), 'w') as f:
199
+ f.write(json_dumps_pretty([dc.asdict(m) for m in manifests]))
200
+ f.write('\n')
201
+
202
+ return manifests
203
+
204
+
205
+ ##
206
+
207
+
208
+ if __name__ == '__main__':
209
+ def _gen_cmd(args) -> None:
210
+ if args.base is not None:
211
+ base = args.base
212
+ else:
213
+ base = os.getcwd()
214
+ base = os.path.abspath(base)
215
+ if not os.path.isdir(base):
216
+ raise RuntimeError(base)
217
+
218
+ for pkg in args.package:
219
+ ms = build_package_manifests(
220
+ pkg,
221
+ base,
222
+ write=args.write or False,
223
+ )
224
+ if not args.quiet:
225
+ print(json_dumps_pretty([dc.asdict(m) for m in ms]))
226
+
227
+ def _main(argv=None) -> None:
228
+ configure_standard_logging('INFO')
229
+
230
+ parser = argparse.ArgumentParser()
231
+ subparsers = parser.add_subparsers()
232
+
233
+ parser_gen = subparsers.add_parser('gen')
234
+ parser_gen.add_argument('-b', '--base')
235
+ parser_gen.add_argument('-w', '--write', action='store_true')
236
+ parser_gen.add_argument('-q', '--quiet', action='store_true')
237
+ parser_gen.add_argument('package', nargs='*')
238
+
239
+ parser_gen.set_defaults(func=_gen_cmd)
240
+
241
+ args = parser.parse_args(argv)
242
+ if not getattr(args, 'func', None):
243
+ parser.print_help()
244
+ else:
245
+ args.func(args)
246
+
247
+ _main()
omdev/pyproject/pkg.py CHANGED
@@ -30,13 +30,14 @@ import dataclasses as dc
30
30
  import importlib
31
31
  import os.path
32
32
  import shutil
33
- import subprocess
34
33
  import sys
34
+ import tempfile
35
35
  import types
36
36
  import typing as ta
37
37
 
38
38
  from omlish.lite.cached import cached_nullary
39
39
  from omlish.lite.logs import log
40
+ from omlish.lite.subprocesses import subprocess_check_call
40
41
 
41
42
  from ..cexts.magic import CextMagic
42
43
  from ..findmagic import find_magic
@@ -161,13 +162,12 @@ class BasePyprojectPackageGenerator(abc.ABC):
161
162
  build_output_dir: ta.Optional[str] = None,
162
163
  *,
163
164
  add_revision: bool = False,
165
+ test: bool = False,
164
166
  ) -> None:
165
- subprocess.check_call(
166
- [
167
- sys.executable,
168
- '-m',
169
- 'build',
170
- ],
167
+ subprocess_check_call(
168
+ sys.executable,
169
+ '-m',
170
+ 'build',
171
171
  cwd=self._pkg_dir(),
172
172
  )
173
173
 
@@ -176,6 +176,25 @@ class BasePyprojectPackageGenerator(abc.ABC):
176
176
  if add_revision:
177
177
  GitRevisionAdder().add_to(dist_dir)
178
178
 
179
+ if test:
180
+ for fn in os.listdir(dist_dir):
181
+ tmp_dir = tempfile.mkdtemp()
182
+
183
+ subprocess_check_call(
184
+ sys.executable,
185
+ '-m', 'venv',
186
+ 'test-install',
187
+ cwd=tmp_dir,
188
+ )
189
+
190
+ subprocess_check_call(
191
+ os.path.join(tmp_dir, 'test-install', 'bin', 'python3'),
192
+ '-m', 'pip',
193
+ 'install',
194
+ os.path.abspath(os.path.join(dist_dir, fn)),
195
+ cwd=tmp_dir,
196
+ )
197
+
179
198
  if build_output_dir is not None:
180
199
  for fn in os.listdir(dist_dir):
181
200
  shutil.copyfile(os.path.join(dist_dir, fn), os.path.join(build_output_dir, fn))
@@ -255,6 +274,7 @@ class PyprojectPackageGenerator(BasePyprojectPackageGenerator):
255
274
  st.pop('cexts', None)
256
275
 
257
276
  self._move_dict_key(st, 'find_packages', pyp_dct, 'tool.setuptools.packages.find')
277
+ self._move_dict_key(st, 'package_data', pyp_dct, 'tool.setuptools.package-data')
258
278
 
259
279
  mani_in = st.pop('manifest_in', None)
260
280
 
@@ -337,9 +357,14 @@ class _PyprojectCextPackageGenerator(BasePyprojectPackageGenerator):
337
357
  st = specs.setuptools
338
358
  pyp_dct['tool.setuptools'] = st
339
359
 
340
- st.pop('cexts', None)
341
- st.pop('find_packages', None)
342
- st.pop('manifest_in', None)
360
+ for k in [
361
+ 'cexts',
362
+
363
+ 'find_packages',
364
+ 'package_data',
365
+ 'manifest_in',
366
+ ]:
367
+ st.pop(k, None)
343
368
 
344
369
  pyp_dct['tool.setuptools.packages.find'] = {
345
370
  'include': [],
omdev/scripts/interp.py CHANGED
@@ -3,7 +3,7 @@
3
3
  # @omlish-lite
4
4
  # @omlish-script
5
5
  # @omdev-amalg-output ../interp/cli.py
6
- # ruff: noqa: UP007
6
+ # ruff: noqa: N802 UP006 UP007 UP036
7
7
  """
8
8
  TODO:
9
9
  - partial best-matches - '3.12'
@@ -31,6 +31,17 @@ import threading
31
31
  import typing as ta
32
32
 
33
33
 
34
+ ########################################
35
+
36
+
37
+ if sys.version_info < (3, 8):
38
+ raise OSError(
39
+ f'Requires python (3, 8), got {sys.version_info} from {sys.executable}') # noqa
40
+
41
+
42
+ ########################################
43
+
44
+
34
45
  # ../../versioning/versions.py
35
46
  VersionLocalType = ta.Tuple[ta.Union[int, str], ...]
36
47
  VersionCmpPrePostDevType = ta.Union['InfinityVersionType', 'NegativeInfinityVersionType', ta.Tuple[str, int]]
@@ -72,7 +83,6 @@ CallableVersionOperator = ta.Callable[['Version', str], bool]
72
83
  # Apache License, Version 2.0, and the BSD License. See the LICENSE file in the root of this repository for complete
73
84
  # details.
74
85
  # https://github.com/pypa/packaging/blob/2c885fe91a54559e2382902dce28428ad2887be5/src/packaging/version.py
75
- # ruff: noqa: UP006 UP007
76
86
 
77
87
 
78
88
  ##
@@ -479,7 +489,6 @@ class cached_nullary: # noqa
479
489
 
480
490
  ########################################
481
491
  # ../../../omlish/lite/check.py
482
- # ruff: noqa: UP006 UP007
483
492
 
484
493
 
485
494
  def check_isinstance(v: T, spec: ta.Union[ta.Type[T], tuple]) -> T:
@@ -539,7 +548,6 @@ json_dumps_compact: ta.Callable[..., str] = functools.partial(json.dumps, **JSON
539
548
 
540
549
  ########################################
541
550
  # ../../../omlish/lite/reflect.py
542
- # ruff: noqa: UP006
543
551
 
544
552
 
545
553
  _GENERIC_ALIAS_TYPES = (
@@ -640,7 +648,6 @@ def is_sunder(name: str) -> bool:
640
648
  # Apache License, Version 2.0, and the BSD License. See the LICENSE file in the root of this repository for complete
641
649
  # details.
642
650
  # https://github.com/pypa/packaging/blob/2c885fe91a54559e2382902dce28428ad2887be5/src/packaging/specifiers.py
643
- # ruff: noqa: UP006 UP007
644
651
 
645
652
 
646
653
  ##
@@ -1146,7 +1153,6 @@ TODO:
1146
1153
  - translate json keys
1147
1154
  - debug
1148
1155
  """
1149
- # ruff: noqa: UP006 UP007 N802
1150
1156
 
1151
1157
 
1152
1158
  log = logging.getLogger(__name__)
@@ -1345,46 +1351,51 @@ def configure_standard_logging(
1345
1351
  *,
1346
1352
  json: bool = False,
1347
1353
  target: ta.Optional[logging.Logger] = None,
1348
- no_check: bool = False,
1354
+ force: bool = False,
1349
1355
  ) -> ta.Optional[StandardLogHandler]:
1350
- if target is None:
1351
- target = logging.root
1356
+ logging._acquireLock() # type: ignore # noqa
1357
+ try:
1358
+ if target is None:
1359
+ target = logging.root
1352
1360
 
1353
- #
1361
+ #
1354
1362
 
1355
- if not no_check:
1356
- if any(isinstance(h, StandardLogHandler) for h in list(target.handlers)):
1357
- return None
1363
+ if not force:
1364
+ if any(isinstance(h, StandardLogHandler) for h in list(target.handlers)):
1365
+ return None
1358
1366
 
1359
- #
1367
+ #
1360
1368
 
1361
- handler = logging.StreamHandler()
1369
+ handler = logging.StreamHandler()
1362
1370
 
1363
- #
1371
+ #
1364
1372
 
1365
- formatter: logging.Formatter
1366
- if json:
1367
- formatter = JsonLogFormatter()
1368
- else:
1369
- formatter = StandardLogFormatter(StandardLogFormatter.build_log_format(STANDARD_LOG_FORMAT_PARTS))
1370
- handler.setFormatter(formatter)
1373
+ formatter: logging.Formatter
1374
+ if json:
1375
+ formatter = JsonLogFormatter()
1376
+ else:
1377
+ formatter = StandardLogFormatter(StandardLogFormatter.build_log_format(STANDARD_LOG_FORMAT_PARTS))
1378
+ handler.setFormatter(formatter)
1371
1379
 
1372
- #
1380
+ #
1373
1381
 
1374
- handler.addFilter(TidLogFilter())
1382
+ handler.addFilter(TidLogFilter())
1375
1383
 
1376
- #
1384
+ #
1377
1385
 
1378
- target.addHandler(handler)
1386
+ target.addHandler(handler)
1379
1387
 
1380
- #
1388
+ #
1381
1389
 
1382
- if level is not None:
1383
- target.setLevel(level)
1390
+ if level is not None:
1391
+ target.setLevel(level)
1384
1392
 
1385
- #
1393
+ #
1394
+
1395
+ return StandardLogHandler(handler)
1386
1396
 
1387
- return StandardLogHandler(handler)
1397
+ finally:
1398
+ logging._releaseLock() # type: ignore # noqa
1388
1399
 
1389
1400
 
1390
1401
  ########################################
@@ -1407,7 +1418,6 @@ def check_runtime_version() -> None:
1407
1418
 
1408
1419
  ########################################
1409
1420
  # ../types.py
1410
- # ruff: noqa: UP006
1411
1421
 
1412
1422
 
1413
1423
  # See https://peps.python.org/pep-3149/
@@ -1499,7 +1509,6 @@ class Interp:
1499
1509
 
1500
1510
  ########################################
1501
1511
  # ../../../omlish/lite/subprocesses.py
1502
- # ruff: noqa: UP006 UP007
1503
1512
 
1504
1513
 
1505
1514
  ##
@@ -1606,7 +1615,6 @@ def subprocess_try_output_str(*args: str, **kwargs: ta.Any) -> ta.Optional[str]:
1606
1615
 
1607
1616
  ########################################
1608
1617
  # ../inspect.py
1609
- # ruff: noqa: UP006 UP007
1610
1618
 
1611
1619
 
1612
1620
  @dc.dataclass(frozen=True)
@@ -1770,7 +1778,6 @@ TODO:
1770
1778
  - optionally install / upgrade pyenv itself
1771
1779
  - new vers dont need these custom mac opts, only run on old vers
1772
1780
  """
1773
- # ruff: noqa: UP006 UP007
1774
1781
 
1775
1782
 
1776
1783
  ##
@@ -2165,7 +2172,6 @@ TODO:
2165
2172
  - python, python3, python3.12, ...
2166
2173
  - check if path py's are venvs: sys.prefix != sys.base_prefix
2167
2174
  """
2168
- # ruff: noqa: UP006 UP007
2169
2175
 
2170
2176
 
2171
2177
  ##
@@ -2274,7 +2280,6 @@ class SystemInterpProvider(InterpProvider):
2274
2280
 
2275
2281
  ########################################
2276
2282
  # ../resolvers.py
2277
- # ruff: noqa: UP006 UP007
2278
2283
 
2279
2284
 
2280
2285
  INTERP_PROVIDER_TYPES_BY_NAME: ta.Mapping[str, ta.Type[InterpProvider]] = {