omdev 0.0.0.dev7__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- omdev/__about__.py +35 -0
- omdev/__init__.py +0 -0
- omdev/amalg/__init__.py +0 -0
- omdev/amalg/__main__.py +4 -0
- omdev/amalg/amalg.py +513 -0
- omdev/classdot.py +61 -0
- omdev/cmake.py +164 -0
- omdev/exts/__init__.py +0 -0
- omdev/exts/_distutils/__init__.py +10 -0
- omdev/exts/_distutils/build_ext.py +367 -0
- omdev/exts/_distutils/compilers/__init__.py +3 -0
- omdev/exts/_distutils/compilers/ccompiler.py +1032 -0
- omdev/exts/_distutils/compilers/options.py +80 -0
- omdev/exts/_distutils/compilers/unixccompiler.py +385 -0
- omdev/exts/_distutils/dir_util.py +76 -0
- omdev/exts/_distutils/errors.py +62 -0
- omdev/exts/_distutils/extension.py +107 -0
- omdev/exts/_distutils/file_util.py +216 -0
- omdev/exts/_distutils/modified.py +47 -0
- omdev/exts/_distutils/spawn.py +103 -0
- omdev/exts/_distutils/sysconfig.py +349 -0
- omdev/exts/_distutils/util.py +201 -0
- omdev/exts/_distutils/version.py +308 -0
- omdev/exts/build.py +43 -0
- omdev/exts/cmake.py +195 -0
- omdev/exts/importhook.py +88 -0
- omdev/exts/scan.py +74 -0
- omdev/interp/__init__.py +1 -0
- omdev/interp/__main__.py +4 -0
- omdev/interp/cli.py +63 -0
- omdev/interp/inspect.py +105 -0
- omdev/interp/providers.py +67 -0
- omdev/interp/pyenv.py +353 -0
- omdev/interp/resolvers.py +76 -0
- omdev/interp/standalone.py +187 -0
- omdev/interp/system.py +125 -0
- omdev/interp/types.py +92 -0
- omdev/mypy/__init__.py +0 -0
- omdev/mypy/debug.py +86 -0
- omdev/pyproject/__init__.py +1 -0
- omdev/pyproject/__main__.py +4 -0
- omdev/pyproject/cli.py +319 -0
- omdev/pyproject/configs.py +97 -0
- omdev/pyproject/ext.py +107 -0
- omdev/pyproject/pkg.py +196 -0
- omdev/scripts/__init__.py +0 -0
- omdev/scripts/execrss.py +19 -0
- omdev/scripts/findimports.py +62 -0
- omdev/scripts/findmagic.py +70 -0
- omdev/scripts/interp.py +2118 -0
- omdev/scripts/pyproject.py +3584 -0
- omdev/scripts/traceimport.py +502 -0
- omdev/tokens.py +42 -0
- omdev/toml/__init__.py +1 -0
- omdev/toml/parser.py +823 -0
- omdev/toml/writer.py +104 -0
- omdev/tools/__init__.py +0 -0
- omdev/tools/dockertools.py +81 -0
- omdev/tools/sqlrepl.py +193 -0
- omdev/versioning/__init__.py +1 -0
- omdev/versioning/specifiers.py +531 -0
- omdev/versioning/versions.py +416 -0
- omdev-0.0.0.dev7.dist-info/LICENSE +21 -0
- omdev-0.0.0.dev7.dist-info/METADATA +24 -0
- omdev-0.0.0.dev7.dist-info/RECORD +67 -0
- omdev-0.0.0.dev7.dist-info/WHEEL +5 -0
- omdev-0.0.0.dev7.dist-info/top_level.txt +1 -0
|
@@ -0,0 +1,3584 @@
|
|
|
1
|
+
#!/usr/bin/env python3
|
|
2
|
+
# noinspection DuplicatedCode
|
|
3
|
+
# @omdev-amalg-output ../pyproject/cli.py
|
|
4
|
+
# ruff: noqa: UP006 UP007
|
|
5
|
+
"""
|
|
6
|
+
TODO:
|
|
7
|
+
- check / tests, src dir sets
|
|
8
|
+
- ci
|
|
9
|
+
- build / package / publish / version roll
|
|
10
|
+
- {pkg_name: [src_dirs]}, default excludes, generate MANIFST.in, ...
|
|
11
|
+
- env vars - PYTHONPATH
|
|
12
|
+
|
|
13
|
+
lookit:
|
|
14
|
+
- https://pdm-project.org/en/latest/
|
|
15
|
+
- https://rye.astral.sh/philosophy/
|
|
16
|
+
- https://github.com/indygreg/python-build-standalone/blob/main/pythonbuild/cpython.py
|
|
17
|
+
- https://astral.sh/blog/uv
|
|
18
|
+
- https://github.com/jazzband/pip-tools
|
|
19
|
+
- https://github.com/Osiris-Team/1JPM
|
|
20
|
+
- https://github.com/brettcannon/microvenv
|
|
21
|
+
- https://github.com/pypa/pipx
|
|
22
|
+
- https://github.com/tox-dev/tox/
|
|
23
|
+
"""
|
|
24
|
+
import abc
|
|
25
|
+
import argparse
|
|
26
|
+
import base64
|
|
27
|
+
import collections
|
|
28
|
+
import collections.abc
|
|
29
|
+
import dataclasses as dc
|
|
30
|
+
import datetime
|
|
31
|
+
import decimal
|
|
32
|
+
import enum
|
|
33
|
+
import fractions
|
|
34
|
+
import functools
|
|
35
|
+
import glob
|
|
36
|
+
import inspect
|
|
37
|
+
import itertools
|
|
38
|
+
import json
|
|
39
|
+
import logging
|
|
40
|
+
import os
|
|
41
|
+
import os.path
|
|
42
|
+
import re
|
|
43
|
+
import shlex
|
|
44
|
+
import shutil
|
|
45
|
+
import string
|
|
46
|
+
import subprocess
|
|
47
|
+
import sys
|
|
48
|
+
import types
|
|
49
|
+
import typing as ta
|
|
50
|
+
import uuid
|
|
51
|
+
import weakref # noqa
|
|
52
|
+
|
|
53
|
+
|
|
54
|
+
TomlParseFloat = ta.Callable[[str], ta.Any]
|
|
55
|
+
TomlKey = ta.Tuple[str, ...]
|
|
56
|
+
TomlPos = int # ta.TypeAlias
|
|
57
|
+
VersionLocalType = ta.Tuple[ta.Union[int, str], ...]
|
|
58
|
+
VersionCmpPrePostDevType = ta.Union['InfinityVersionType', 'NegativeInfinityVersionType', ta.Tuple[str, int]]
|
|
59
|
+
_VersionCmpLocalType0 = ta.Tuple[ta.Union[ta.Tuple[int, str], ta.Tuple['NegativeInfinityVersionType', ta.Union[int, str]]], ...] # noqa
|
|
60
|
+
VersionCmpLocalType = ta.Union['NegativeInfinityVersionType', _VersionCmpLocalType0]
|
|
61
|
+
VersionCmpKey = ta.Tuple[int, ta.Tuple[int, ...], VersionCmpPrePostDevType, VersionCmpPrePostDevType, VersionCmpPrePostDevType, VersionCmpLocalType] # noqa
|
|
62
|
+
VersionComparisonMethod = ta.Callable[[VersionCmpKey, VersionCmpKey], bool]
|
|
63
|
+
T = ta.TypeVar('T')
|
|
64
|
+
UnparsedVersion = ta.Union['Version', str]
|
|
65
|
+
UnparsedVersionVar = ta.TypeVar('UnparsedVersionVar', bound=UnparsedVersion)
|
|
66
|
+
CallableVersionOperator = ta.Callable[['Version', str], bool]
|
|
67
|
+
|
|
68
|
+
|
|
69
|
+
########################################
|
|
70
|
+
# ../../toml/parser.py
|
|
71
|
+
# SPDX-License-Identifier: MIT
|
|
72
|
+
# SPDX-FileCopyrightText: 2021 Taneli Hukkinen
|
|
73
|
+
# Licensed to PSF under a Contributor Agreement.
|
|
74
|
+
#
|
|
75
|
+
# PYTHON SOFTWARE FOUNDATION LICENSE VERSION 2
|
|
76
|
+
# --------------------------------------------
|
|
77
|
+
#
|
|
78
|
+
# 1. This LICENSE AGREEMENT is between the Python Software Foundation ("PSF"), and the Individual or Organization
|
|
79
|
+
# ("Licensee") accessing and otherwise using this software ("Python") in source or binary form and its associated
|
|
80
|
+
# documentation.
|
|
81
|
+
#
|
|
82
|
+
# 2. Subject to the terms and conditions of this License Agreement, PSF hereby grants Licensee a nonexclusive,
|
|
83
|
+
# royalty-free, world-wide license to reproduce, analyze, test, perform and/or display publicly, prepare derivative
|
|
84
|
+
# works, distribute, and otherwise use Python alone or in any derivative version, provided, however, that PSF's License
|
|
85
|
+
# Agreement and PSF's notice of copyright, i.e., "Copyright (c) 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008, 2009,
|
|
86
|
+
# 2010, 2011, 2012, 2013, 2014, 2015, 2016, 2017, 2018, 2019, 2020, 2021, 2022, 2023 Python Software Foundation; All
|
|
87
|
+
# Rights Reserved" are retained in Python alone or in any derivative version prepared by Licensee.
|
|
88
|
+
#
|
|
89
|
+
# 3. In the event Licensee prepares a derivative work that is based on or incorporates Python or any part thereof, and
|
|
90
|
+
# wants to make the derivative work available to others as provided herein, then Licensee hereby agrees to include in
|
|
91
|
+
# any such work a brief summary of the changes made to Python.
|
|
92
|
+
#
|
|
93
|
+
# 4. PSF is making Python available to Licensee on an "AS IS" basis. PSF MAKES NO REPRESENTATIONS OR WARRANTIES,
|
|
94
|
+
# EXPRESS OR IMPLIED. BY WAY OF EXAMPLE, BUT NOT LIMITATION, PSF MAKES NO AND DISCLAIMS ANY REPRESENTATION OR WARRANTY
|
|
95
|
+
# OF MERCHANTABILITY OR FITNESS FOR ANY PARTICULAR PURPOSE OR THAT THE USE OF PYTHON WILL NOT INFRINGE ANY THIRD PARTY
|
|
96
|
+
# RIGHTS.
|
|
97
|
+
#
|
|
98
|
+
# 5. PSF SHALL NOT BE LIABLE TO LICENSEE OR ANY OTHER USERS OF PYTHON FOR ANY INCIDENTAL, SPECIAL, OR CONSEQUENTIAL
|
|
99
|
+
# DAMAGES OR LOSS AS A RESULT OF MODIFYING, DISTRIBUTING, OR OTHERWISE USING PYTHON, OR ANY DERIVATIVE THEREOF, EVEN IF
|
|
100
|
+
# ADVISED OF THE POSSIBILITY THEREOF.
|
|
101
|
+
#
|
|
102
|
+
# 6. This License Agreement will automatically terminate upon a material breach of its terms and conditions.
|
|
103
|
+
#
|
|
104
|
+
# 7. Nothing in this License Agreement shall be deemed to create any relationship of agency, partnership, or joint
|
|
105
|
+
# venture between PSF and Licensee. This License Agreement does not grant permission to use PSF trademarks or trade
|
|
106
|
+
# name in a trademark sense to endorse or promote products or services of Licensee, or any third party.
|
|
107
|
+
#
|
|
108
|
+
# 8. By copying, installing or otherwise using Python, Licensee agrees to be bound by the terms and conditions of this
|
|
109
|
+
# License Agreement.
|
|
110
|
+
#
|
|
111
|
+
# https://github.com/python/cpython/blob/f5009b69e0cd94b990270e04e65b9d4d2b365844/Lib/tomllib/_parser.py
|
|
112
|
+
# ruff: noqa: UP006 UP007
|
|
113
|
+
|
|
114
|
+
|
|
115
|
+
##
|
|
116
|
+
|
|
117
|
+
|
|
118
|
+
_TOML_TIME_RE_STR = r'([01][0-9]|2[0-3]):([0-5][0-9]):([0-5][0-9])(?:\.([0-9]{1,6})[0-9]*)?'
|
|
119
|
+
|
|
120
|
+
TOML_RE_NUMBER = re.compile(
|
|
121
|
+
r"""
|
|
122
|
+
0
|
|
123
|
+
(?:
|
|
124
|
+
x[0-9A-Fa-f](?:_?[0-9A-Fa-f])* # hex
|
|
125
|
+
|
|
|
126
|
+
b[01](?:_?[01])* # bin
|
|
127
|
+
|
|
|
128
|
+
o[0-7](?:_?[0-7])* # oct
|
|
129
|
+
)
|
|
130
|
+
|
|
|
131
|
+
[+-]?(?:0|[1-9](?:_?[0-9])*) # dec, integer part
|
|
132
|
+
(?P<floatpart>
|
|
133
|
+
(?:\.[0-9](?:_?[0-9])*)? # optional fractional part
|
|
134
|
+
(?:[eE][+-]?[0-9](?:_?[0-9])*)? # optional exponent part
|
|
135
|
+
)
|
|
136
|
+
""",
|
|
137
|
+
flags=re.VERBOSE,
|
|
138
|
+
)
|
|
139
|
+
TOML_RE_LOCALTIME = re.compile(_TOML_TIME_RE_STR)
|
|
140
|
+
TOML_RE_DATETIME = re.compile(
|
|
141
|
+
rf"""
|
|
142
|
+
([0-9]{{4}})-(0[1-9]|1[0-2])-(0[1-9]|[12][0-9]|3[01]) # date, e.g. 1988-10-27
|
|
143
|
+
(?:
|
|
144
|
+
[Tt ]
|
|
145
|
+
{_TOML_TIME_RE_STR}
|
|
146
|
+
(?:([Zz])|([+-])([01][0-9]|2[0-3]):([0-5][0-9]))? # optional time offset
|
|
147
|
+
)?
|
|
148
|
+
""",
|
|
149
|
+
flags=re.VERBOSE,
|
|
150
|
+
)
|
|
151
|
+
|
|
152
|
+
|
|
153
|
+
def toml_match_to_datetime(match: re.Match) -> ta.Union[datetime.datetime, datetime.date]:
|
|
154
|
+
"""Convert a `RE_DATETIME` match to `datetime.datetime` or `datetime.date`.
|
|
155
|
+
|
|
156
|
+
Raises ValueError if the match does not correspond to a valid date or datetime.
|
|
157
|
+
"""
|
|
158
|
+
(
|
|
159
|
+
year_str,
|
|
160
|
+
month_str,
|
|
161
|
+
day_str,
|
|
162
|
+
hour_str,
|
|
163
|
+
minute_str,
|
|
164
|
+
sec_str,
|
|
165
|
+
micros_str,
|
|
166
|
+
zulu_time,
|
|
167
|
+
offset_sign_str,
|
|
168
|
+
offset_hour_str,
|
|
169
|
+
offset_minute_str,
|
|
170
|
+
) = match.groups()
|
|
171
|
+
year, month, day = int(year_str), int(month_str), int(day_str)
|
|
172
|
+
if hour_str is None:
|
|
173
|
+
return datetime.date(year, month, day)
|
|
174
|
+
hour, minute, sec = int(hour_str), int(minute_str), int(sec_str)
|
|
175
|
+
micros = int(micros_str.ljust(6, '0')) if micros_str else 0
|
|
176
|
+
if offset_sign_str:
|
|
177
|
+
tz: ta.Optional[datetime.tzinfo] = toml_cached_tz(
|
|
178
|
+
offset_hour_str, offset_minute_str, offset_sign_str,
|
|
179
|
+
)
|
|
180
|
+
elif zulu_time:
|
|
181
|
+
tz = datetime.UTC
|
|
182
|
+
else: # local date-time
|
|
183
|
+
tz = None
|
|
184
|
+
return datetime.datetime(year, month, day, hour, minute, sec, micros, tzinfo=tz)
|
|
185
|
+
|
|
186
|
+
|
|
187
|
+
@functools.lru_cache() # noqa
|
|
188
|
+
def toml_cached_tz(hour_str: str, minute_str: str, sign_str: str) -> datetime.timezone:
|
|
189
|
+
sign = 1 if sign_str == '+' else -1
|
|
190
|
+
return datetime.timezone(
|
|
191
|
+
datetime.timedelta(
|
|
192
|
+
hours=sign * int(hour_str),
|
|
193
|
+
minutes=sign * int(minute_str),
|
|
194
|
+
),
|
|
195
|
+
)
|
|
196
|
+
|
|
197
|
+
|
|
198
|
+
def toml_match_to_localtime(match: re.Match) -> datetime.time:
|
|
199
|
+
hour_str, minute_str, sec_str, micros_str = match.groups()
|
|
200
|
+
micros = int(micros_str.ljust(6, '0')) if micros_str else 0
|
|
201
|
+
return datetime.time(int(hour_str), int(minute_str), int(sec_str), micros)
|
|
202
|
+
|
|
203
|
+
|
|
204
|
+
def toml_match_to_number(match: re.Match, parse_float: TomlParseFloat) -> ta.Any:
|
|
205
|
+
if match.group('floatpart'):
|
|
206
|
+
return parse_float(match.group())
|
|
207
|
+
return int(match.group(), 0)
|
|
208
|
+
|
|
209
|
+
|
|
210
|
+
TOML_ASCII_CTRL = frozenset(chr(i) for i in range(32)) | frozenset(chr(127))
|
|
211
|
+
|
|
212
|
+
# Neither of these sets include quotation mark or backslash. They are currently handled as separate cases in the parser
|
|
213
|
+
# functions.
|
|
214
|
+
TOML_ILLEGAL_BASIC_STR_CHARS = TOML_ASCII_CTRL - frozenset('\t')
|
|
215
|
+
TOML_ILLEGAL_MULTILINE_BASIC_STR_CHARS = TOML_ASCII_CTRL - frozenset('\t\n')
|
|
216
|
+
|
|
217
|
+
TOML_ILLEGAL_LITERAL_STR_CHARS = TOML_ILLEGAL_BASIC_STR_CHARS
|
|
218
|
+
TOML_ILLEGAL_MULTILINE_LITERAL_STR_CHARS = TOML_ILLEGAL_MULTILINE_BASIC_STR_CHARS
|
|
219
|
+
|
|
220
|
+
TOML_ILLEGAL_COMMENT_CHARS = TOML_ILLEGAL_BASIC_STR_CHARS
|
|
221
|
+
|
|
222
|
+
TOML_WS = frozenset(' \t')
|
|
223
|
+
TOML_WS_AND_NEWLINE = TOML_WS | frozenset('\n')
|
|
224
|
+
TOML_BARE_KEY_CHARS = frozenset(string.ascii_letters + string.digits + '-_')
|
|
225
|
+
TOML_KEY_INITIAL_CHARS = TOML_BARE_KEY_CHARS | frozenset("\"'")
|
|
226
|
+
TOML_HEXDIGIT_CHARS = frozenset(string.hexdigits)
|
|
227
|
+
|
|
228
|
+
TOML_BASIC_STR_ESCAPE_REPLACEMENTS = types.MappingProxyType(
|
|
229
|
+
{
|
|
230
|
+
'\\b': '\u0008', # backspace
|
|
231
|
+
'\\t': '\u0009', # tab
|
|
232
|
+
'\\n': '\u000A', # linefeed
|
|
233
|
+
'\\f': '\u000C', # form feed
|
|
234
|
+
'\\r': '\u000D', # carriage return
|
|
235
|
+
'\\"': '\u0022', # quote
|
|
236
|
+
'\\\\': '\u005C', # backslash
|
|
237
|
+
},
|
|
238
|
+
)
|
|
239
|
+
|
|
240
|
+
|
|
241
|
+
class TomlDecodeError(ValueError):
|
|
242
|
+
"""An error raised if a document is not valid TOML."""
|
|
243
|
+
|
|
244
|
+
|
|
245
|
+
def toml_load(fp: ta.BinaryIO, /, *, parse_float: TomlParseFloat = float) -> ta.Dict[str, ta.Any]:
|
|
246
|
+
"""Parse TOML from a binary file object."""
|
|
247
|
+
b = fp.read()
|
|
248
|
+
try:
|
|
249
|
+
s = b.decode()
|
|
250
|
+
except AttributeError:
|
|
251
|
+
raise TypeError("File must be opened in binary mode, e.g. use `open('foo.toml', 'rb')`") from None
|
|
252
|
+
return toml_loads(s, parse_float=parse_float)
|
|
253
|
+
|
|
254
|
+
|
|
255
|
+
def toml_loads(s: str, /, *, parse_float: TomlParseFloat = float) -> ta.Dict[str, ta.Any]: # noqa: C901
|
|
256
|
+
"""Parse TOML from a string."""
|
|
257
|
+
|
|
258
|
+
# The spec allows converting "\r\n" to "\n", even in string literals. Let's do so to simplify parsing.
|
|
259
|
+
src = s.replace('\r\n', '\n')
|
|
260
|
+
pos = 0
|
|
261
|
+
out = TomlOutput(TomlNestedDict(), TomlFlags())
|
|
262
|
+
header: TomlKey = ()
|
|
263
|
+
parse_float = toml_make_safe_parse_float(parse_float)
|
|
264
|
+
|
|
265
|
+
# Parse one statement at a time (typically means one line in TOML source)
|
|
266
|
+
while True:
|
|
267
|
+
# 1. Skip line leading whitespace
|
|
268
|
+
pos = toml_skip_chars(src, pos, TOML_WS)
|
|
269
|
+
|
|
270
|
+
# 2. Parse rules. Expect one of the following:
|
|
271
|
+
# - end of file
|
|
272
|
+
# - end of line
|
|
273
|
+
# - comment
|
|
274
|
+
# - key/value pair
|
|
275
|
+
# - append dict to list (and move to its namespace)
|
|
276
|
+
# - create dict (and move to its namespace)
|
|
277
|
+
# Skip trailing whitespace when applicable.
|
|
278
|
+
try:
|
|
279
|
+
char = src[pos]
|
|
280
|
+
except IndexError:
|
|
281
|
+
break
|
|
282
|
+
if char == '\n':
|
|
283
|
+
pos += 1
|
|
284
|
+
continue
|
|
285
|
+
if char in TOML_KEY_INITIAL_CHARS:
|
|
286
|
+
pos = toml_key_value_rule(src, pos, out, header, parse_float)
|
|
287
|
+
pos = toml_skip_chars(src, pos, TOML_WS)
|
|
288
|
+
elif char == '[':
|
|
289
|
+
try:
|
|
290
|
+
second_char: ta.Optional[str] = src[pos + 1]
|
|
291
|
+
except IndexError:
|
|
292
|
+
second_char = None
|
|
293
|
+
out.flags.finalize_pending()
|
|
294
|
+
if second_char == '[':
|
|
295
|
+
pos, header = toml_create_list_rule(src, pos, out)
|
|
296
|
+
else:
|
|
297
|
+
pos, header = toml_create_dict_rule(src, pos, out)
|
|
298
|
+
pos = toml_skip_chars(src, pos, TOML_WS)
|
|
299
|
+
elif char != '#':
|
|
300
|
+
raise toml_suffixed_err(src, pos, 'Invalid statement')
|
|
301
|
+
|
|
302
|
+
# 3. Skip comment
|
|
303
|
+
pos = toml_skip_comment(src, pos)
|
|
304
|
+
|
|
305
|
+
# 4. Expect end of line or end of file
|
|
306
|
+
try:
|
|
307
|
+
char = src[pos]
|
|
308
|
+
except IndexError:
|
|
309
|
+
break
|
|
310
|
+
if char != '\n':
|
|
311
|
+
raise toml_suffixed_err(
|
|
312
|
+
src, pos, 'Expected newline or end of document after a statement',
|
|
313
|
+
)
|
|
314
|
+
pos += 1
|
|
315
|
+
|
|
316
|
+
return out.data.dict
|
|
317
|
+
|
|
318
|
+
|
|
319
|
+
class TomlFlags:
|
|
320
|
+
"""Flags that map to parsed keys/namespaces."""
|
|
321
|
+
|
|
322
|
+
# Marks an immutable namespace (inline array or inline table).
|
|
323
|
+
FROZEN = 0
|
|
324
|
+
# Marks a nest that has been explicitly created and can no longer be opened using the "[table]" syntax.
|
|
325
|
+
EXPLICIT_NEST = 1
|
|
326
|
+
|
|
327
|
+
def __init__(self) -> None:
|
|
328
|
+
self._flags: ta.Dict[str, dict] = {}
|
|
329
|
+
self._pending_flags: ta.Set[ta.Tuple[TomlKey, int]] = set()
|
|
330
|
+
|
|
331
|
+
def add_pending(self, key: TomlKey, flag: int) -> None:
|
|
332
|
+
self._pending_flags.add((key, flag))
|
|
333
|
+
|
|
334
|
+
def finalize_pending(self) -> None:
|
|
335
|
+
for key, flag in self._pending_flags:
|
|
336
|
+
self.set(key, flag, recursive=False)
|
|
337
|
+
self._pending_flags.clear()
|
|
338
|
+
|
|
339
|
+
def unset_all(self, key: TomlKey) -> None:
|
|
340
|
+
cont = self._flags
|
|
341
|
+
for k in key[:-1]:
|
|
342
|
+
if k not in cont:
|
|
343
|
+
return
|
|
344
|
+
cont = cont[k]['nested']
|
|
345
|
+
cont.pop(key[-1], None)
|
|
346
|
+
|
|
347
|
+
def set(self, key: TomlKey, flag: int, *, recursive: bool) -> None: # noqa: A003
|
|
348
|
+
cont = self._flags
|
|
349
|
+
key_parent, key_stem = key[:-1], key[-1]
|
|
350
|
+
for k in key_parent:
|
|
351
|
+
if k not in cont:
|
|
352
|
+
cont[k] = {'flags': set(), 'recursive_flags': set(), 'nested': {}}
|
|
353
|
+
cont = cont[k]['nested']
|
|
354
|
+
if key_stem not in cont:
|
|
355
|
+
cont[key_stem] = {'flags': set(), 'recursive_flags': set(), 'nested': {}}
|
|
356
|
+
cont[key_stem]['recursive_flags' if recursive else 'flags'].add(flag)
|
|
357
|
+
|
|
358
|
+
def is_(self, key: TomlKey, flag: int) -> bool:
|
|
359
|
+
if not key:
|
|
360
|
+
return False # document root has no flags
|
|
361
|
+
cont = self._flags
|
|
362
|
+
for k in key[:-1]:
|
|
363
|
+
if k not in cont:
|
|
364
|
+
return False
|
|
365
|
+
inner_cont = cont[k]
|
|
366
|
+
if flag in inner_cont['recursive_flags']:
|
|
367
|
+
return True
|
|
368
|
+
cont = inner_cont['nested']
|
|
369
|
+
key_stem = key[-1]
|
|
370
|
+
if key_stem in cont:
|
|
371
|
+
cont = cont[key_stem]
|
|
372
|
+
return flag in cont['flags'] or flag in cont['recursive_flags']
|
|
373
|
+
return False
|
|
374
|
+
|
|
375
|
+
|
|
376
|
+
class TomlNestedDict:
|
|
377
|
+
def __init__(self) -> None:
|
|
378
|
+
# The parsed content of the TOML document
|
|
379
|
+
self.dict: ta.Dict[str, ta.Any] = {}
|
|
380
|
+
|
|
381
|
+
def get_or_create_nest(
|
|
382
|
+
self,
|
|
383
|
+
key: TomlKey,
|
|
384
|
+
*,
|
|
385
|
+
access_lists: bool = True,
|
|
386
|
+
) -> dict:
|
|
387
|
+
cont: ta.Any = self.dict
|
|
388
|
+
for k in key:
|
|
389
|
+
if k not in cont:
|
|
390
|
+
cont[k] = {}
|
|
391
|
+
cont = cont[k]
|
|
392
|
+
if access_lists and isinstance(cont, list):
|
|
393
|
+
cont = cont[-1]
|
|
394
|
+
if not isinstance(cont, dict):
|
|
395
|
+
raise KeyError('There is no nest behind this key')
|
|
396
|
+
return cont
|
|
397
|
+
|
|
398
|
+
def append_nest_to_list(self, key: TomlKey) -> None:
|
|
399
|
+
cont = self.get_or_create_nest(key[:-1])
|
|
400
|
+
last_key = key[-1]
|
|
401
|
+
if last_key in cont:
|
|
402
|
+
list_ = cont[last_key]
|
|
403
|
+
if not isinstance(list_, list):
|
|
404
|
+
raise KeyError('An object other than list found behind this key')
|
|
405
|
+
list_.append({})
|
|
406
|
+
else:
|
|
407
|
+
cont[last_key] = [{}]
|
|
408
|
+
|
|
409
|
+
|
|
410
|
+
class TomlOutput(ta.NamedTuple):
|
|
411
|
+
data: TomlNestedDict
|
|
412
|
+
flags: TomlFlags
|
|
413
|
+
|
|
414
|
+
|
|
415
|
+
def toml_skip_chars(src: str, pos: TomlPos, chars: ta.Iterable[str]) -> TomlPos:
|
|
416
|
+
try:
|
|
417
|
+
while src[pos] in chars:
|
|
418
|
+
pos += 1
|
|
419
|
+
except IndexError:
|
|
420
|
+
pass
|
|
421
|
+
return pos
|
|
422
|
+
|
|
423
|
+
|
|
424
|
+
def toml_skip_until(
|
|
425
|
+
src: str,
|
|
426
|
+
pos: TomlPos,
|
|
427
|
+
expect: str,
|
|
428
|
+
*,
|
|
429
|
+
error_on: ta.FrozenSet[str],
|
|
430
|
+
error_on_eof: bool,
|
|
431
|
+
) -> TomlPos:
|
|
432
|
+
try:
|
|
433
|
+
new_pos = src.index(expect, pos)
|
|
434
|
+
except ValueError:
|
|
435
|
+
new_pos = len(src)
|
|
436
|
+
if error_on_eof:
|
|
437
|
+
raise toml_suffixed_err(src, new_pos, f'Expected {expect!r}') from None
|
|
438
|
+
|
|
439
|
+
if not error_on.isdisjoint(src[pos:new_pos]):
|
|
440
|
+
while src[pos] not in error_on:
|
|
441
|
+
pos += 1
|
|
442
|
+
raise toml_suffixed_err(src, pos, f'Found invalid character {src[pos]!r}')
|
|
443
|
+
return new_pos
|
|
444
|
+
|
|
445
|
+
|
|
446
|
+
def toml_skip_comment(src: str, pos: TomlPos) -> TomlPos:
|
|
447
|
+
try:
|
|
448
|
+
char: ta.Optional[str] = src[pos]
|
|
449
|
+
except IndexError:
|
|
450
|
+
char = None
|
|
451
|
+
if char == '#':
|
|
452
|
+
return toml_skip_until(
|
|
453
|
+
src, pos + 1, '\n', error_on=TOML_ILLEGAL_COMMENT_CHARS, error_on_eof=False,
|
|
454
|
+
)
|
|
455
|
+
return pos
|
|
456
|
+
|
|
457
|
+
|
|
458
|
+
def toml_skip_comments_and_array_ws(src: str, pos: TomlPos) -> TomlPos:
|
|
459
|
+
while True:
|
|
460
|
+
pos_before_skip = pos
|
|
461
|
+
pos = toml_skip_chars(src, pos, TOML_WS_AND_NEWLINE)
|
|
462
|
+
pos = toml_skip_comment(src, pos)
|
|
463
|
+
if pos == pos_before_skip:
|
|
464
|
+
return pos
|
|
465
|
+
|
|
466
|
+
|
|
467
|
+
def toml_create_dict_rule(src: str, pos: TomlPos, out: TomlOutput) -> ta.Tuple[TomlPos, TomlKey]:
|
|
468
|
+
pos += 1 # Skip "["
|
|
469
|
+
pos = toml_skip_chars(src, pos, TOML_WS)
|
|
470
|
+
pos, key = toml_parse_key(src, pos)
|
|
471
|
+
|
|
472
|
+
if out.flags.is_(key, TomlFlags.EXPLICIT_NEST) or out.flags.is_(key, TomlFlags.FROZEN):
|
|
473
|
+
raise toml_suffixed_err(src, pos, f'Cannot declare {key} twice')
|
|
474
|
+
out.flags.set(key, TomlFlags.EXPLICIT_NEST, recursive=False)
|
|
475
|
+
try:
|
|
476
|
+
out.data.get_or_create_nest(key)
|
|
477
|
+
except KeyError:
|
|
478
|
+
raise toml_suffixed_err(src, pos, 'Cannot overwrite a value') from None
|
|
479
|
+
|
|
480
|
+
if not src.startswith(']', pos):
|
|
481
|
+
raise toml_suffixed_err(src, pos, "Expected ']' at the end of a table declaration")
|
|
482
|
+
return pos + 1, key
|
|
483
|
+
|
|
484
|
+
|
|
485
|
+
def toml_create_list_rule(src: str, pos: TomlPos, out: TomlOutput) -> ta.Tuple[TomlPos, TomlKey]:
|
|
486
|
+
pos += 2 # Skip "[["
|
|
487
|
+
pos = toml_skip_chars(src, pos, TOML_WS)
|
|
488
|
+
pos, key = toml_parse_key(src, pos)
|
|
489
|
+
|
|
490
|
+
if out.flags.is_(key, TomlFlags.FROZEN):
|
|
491
|
+
raise toml_suffixed_err(src, pos, f'Cannot mutate immutable namespace {key}')
|
|
492
|
+
# Free the namespace now that it points to another empty list item...
|
|
493
|
+
out.flags.unset_all(key)
|
|
494
|
+
# ...but this key precisely is still prohibited from table declaration
|
|
495
|
+
out.flags.set(key, TomlFlags.EXPLICIT_NEST, recursive=False)
|
|
496
|
+
try:
|
|
497
|
+
out.data.append_nest_to_list(key)
|
|
498
|
+
except KeyError:
|
|
499
|
+
raise toml_suffixed_err(src, pos, 'Cannot overwrite a value') from None
|
|
500
|
+
|
|
501
|
+
if not src.startswith(']]', pos):
|
|
502
|
+
raise toml_suffixed_err(src, pos, "Expected ']]' at the end of an array declaration")
|
|
503
|
+
return pos + 2, key
|
|
504
|
+
|
|
505
|
+
|
|
506
|
+
def toml_key_value_rule(
|
|
507
|
+
src: str,
|
|
508
|
+
pos: TomlPos,
|
|
509
|
+
out: TomlOutput,
|
|
510
|
+
header: TomlKey,
|
|
511
|
+
parse_float: TomlParseFloat,
|
|
512
|
+
) -> TomlPos:
|
|
513
|
+
pos, key, value = toml_parse_key_value_pair(src, pos, parse_float)
|
|
514
|
+
key_parent, key_stem = key[:-1], key[-1]
|
|
515
|
+
abs_key_parent = header + key_parent
|
|
516
|
+
|
|
517
|
+
relative_path_cont_keys = (header + key[:i] for i in range(1, len(key)))
|
|
518
|
+
for cont_key in relative_path_cont_keys:
|
|
519
|
+
# Check that dotted key syntax does not redefine an existing table
|
|
520
|
+
if out.flags.is_(cont_key, TomlFlags.EXPLICIT_NEST):
|
|
521
|
+
raise toml_suffixed_err(src, pos, f'Cannot redefine namespace {cont_key}')
|
|
522
|
+
# Containers in the relative path can't be opened with the table syntax or dotted key/value syntax in following
|
|
523
|
+
# table sections.
|
|
524
|
+
out.flags.add_pending(cont_key, TomlFlags.EXPLICIT_NEST)
|
|
525
|
+
|
|
526
|
+
if out.flags.is_(abs_key_parent, TomlFlags.FROZEN):
|
|
527
|
+
raise toml_suffixed_err(
|
|
528
|
+
src,
|
|
529
|
+
pos,
|
|
530
|
+
f'Cannot mutate immutable namespace {abs_key_parent}',
|
|
531
|
+
)
|
|
532
|
+
|
|
533
|
+
try:
|
|
534
|
+
nest = out.data.get_or_create_nest(abs_key_parent)
|
|
535
|
+
except KeyError:
|
|
536
|
+
raise toml_suffixed_err(src, pos, 'Cannot overwrite a value') from None
|
|
537
|
+
if key_stem in nest:
|
|
538
|
+
raise toml_suffixed_err(src, pos, 'Cannot overwrite a value')
|
|
539
|
+
# Mark inline table and array namespaces recursively immutable
|
|
540
|
+
if isinstance(value, (dict, list)):
|
|
541
|
+
out.flags.set(header + key, TomlFlags.FROZEN, recursive=True)
|
|
542
|
+
nest[key_stem] = value
|
|
543
|
+
return pos
|
|
544
|
+
|
|
545
|
+
|
|
546
|
+
def toml_parse_key_value_pair(
|
|
547
|
+
src: str,
|
|
548
|
+
pos: TomlPos,
|
|
549
|
+
parse_float: TomlParseFloat,
|
|
550
|
+
) -> ta.Tuple[TomlPos, TomlKey, ta.Any]:
|
|
551
|
+
pos, key = toml_parse_key(src, pos)
|
|
552
|
+
try:
|
|
553
|
+
char: ta.Optional[str] = src[pos]
|
|
554
|
+
except IndexError:
|
|
555
|
+
char = None
|
|
556
|
+
if char != '=':
|
|
557
|
+
raise toml_suffixed_err(src, pos, "Expected '=' after a key in a key/value pair")
|
|
558
|
+
pos += 1
|
|
559
|
+
pos = toml_skip_chars(src, pos, TOML_WS)
|
|
560
|
+
pos, value = toml_parse_value(src, pos, parse_float)
|
|
561
|
+
return pos, key, value
|
|
562
|
+
|
|
563
|
+
|
|
564
|
+
def toml_parse_key(src: str, pos: TomlPos) -> ta.Tuple[TomlPos, TomlKey]:
|
|
565
|
+
pos, key_part = toml_parse_key_part(src, pos)
|
|
566
|
+
key: TomlKey = (key_part,)
|
|
567
|
+
pos = toml_skip_chars(src, pos, TOML_WS)
|
|
568
|
+
while True:
|
|
569
|
+
try:
|
|
570
|
+
char: ta.Optional[str] = src[pos]
|
|
571
|
+
except IndexError:
|
|
572
|
+
char = None
|
|
573
|
+
if char != '.':
|
|
574
|
+
return pos, key
|
|
575
|
+
pos += 1
|
|
576
|
+
pos = toml_skip_chars(src, pos, TOML_WS)
|
|
577
|
+
pos, key_part = toml_parse_key_part(src, pos)
|
|
578
|
+
key += (key_part,)
|
|
579
|
+
pos = toml_skip_chars(src, pos, TOML_WS)
|
|
580
|
+
|
|
581
|
+
|
|
582
|
+
def toml_parse_key_part(src: str, pos: TomlPos) -> ta.Tuple[TomlPos, str]:
|
|
583
|
+
try:
|
|
584
|
+
char: ta.Optional[str] = src[pos]
|
|
585
|
+
except IndexError:
|
|
586
|
+
char = None
|
|
587
|
+
if char in TOML_BARE_KEY_CHARS:
|
|
588
|
+
start_pos = pos
|
|
589
|
+
pos = toml_skip_chars(src, pos, TOML_BARE_KEY_CHARS)
|
|
590
|
+
return pos, src[start_pos:pos]
|
|
591
|
+
if char == "'":
|
|
592
|
+
return toml_parse_literal_str(src, pos)
|
|
593
|
+
if char == '"':
|
|
594
|
+
return toml_parse_one_line_basic_str(src, pos)
|
|
595
|
+
raise toml_suffixed_err(src, pos, 'Invalid initial character for a key part')
|
|
596
|
+
|
|
597
|
+
|
|
598
|
+
def toml_parse_one_line_basic_str(src: str, pos: TomlPos) -> ta.Tuple[TomlPos, str]:
|
|
599
|
+
pos += 1
|
|
600
|
+
return toml_parse_basic_str(src, pos, multiline=False)
|
|
601
|
+
|
|
602
|
+
|
|
603
|
+
def toml_parse_array(src: str, pos: TomlPos, parse_float: TomlParseFloat) -> ta.Tuple[TomlPos, list]:
|
|
604
|
+
pos += 1
|
|
605
|
+
array: list = []
|
|
606
|
+
|
|
607
|
+
pos = toml_skip_comments_and_array_ws(src, pos)
|
|
608
|
+
if src.startswith(']', pos):
|
|
609
|
+
return pos + 1, array
|
|
610
|
+
while True:
|
|
611
|
+
pos, val = toml_parse_value(src, pos, parse_float)
|
|
612
|
+
array.append(val)
|
|
613
|
+
pos = toml_skip_comments_and_array_ws(src, pos)
|
|
614
|
+
|
|
615
|
+
c = src[pos:pos + 1]
|
|
616
|
+
if c == ']':
|
|
617
|
+
return pos + 1, array
|
|
618
|
+
if c != ',':
|
|
619
|
+
raise toml_suffixed_err(src, pos, 'Unclosed array')
|
|
620
|
+
pos += 1
|
|
621
|
+
|
|
622
|
+
pos = toml_skip_comments_and_array_ws(src, pos)
|
|
623
|
+
if src.startswith(']', pos):
|
|
624
|
+
return pos + 1, array
|
|
625
|
+
|
|
626
|
+
|
|
627
|
+
def toml_parse_inline_table(src: str, pos: TomlPos, parse_float: TomlParseFloat) -> ta.Tuple[TomlPos, dict]:
|
|
628
|
+
pos += 1
|
|
629
|
+
nested_dict = TomlNestedDict()
|
|
630
|
+
flags = TomlFlags()
|
|
631
|
+
|
|
632
|
+
pos = toml_skip_chars(src, pos, TOML_WS)
|
|
633
|
+
if src.startswith('}', pos):
|
|
634
|
+
return pos + 1, nested_dict.dict
|
|
635
|
+
while True:
|
|
636
|
+
pos, key, value = toml_parse_key_value_pair(src, pos, parse_float)
|
|
637
|
+
key_parent, key_stem = key[:-1], key[-1]
|
|
638
|
+
if flags.is_(key, TomlFlags.FROZEN):
|
|
639
|
+
raise toml_suffixed_err(src, pos, f'Cannot mutate immutable namespace {key}')
|
|
640
|
+
try:
|
|
641
|
+
nest = nested_dict.get_or_create_nest(key_parent, access_lists=False)
|
|
642
|
+
except KeyError:
|
|
643
|
+
raise toml_suffixed_err(src, pos, 'Cannot overwrite a value') from None
|
|
644
|
+
if key_stem in nest:
|
|
645
|
+
raise toml_suffixed_err(src, pos, f'Duplicate inline table key {key_stem!r}')
|
|
646
|
+
nest[key_stem] = value
|
|
647
|
+
pos = toml_skip_chars(src, pos, TOML_WS)
|
|
648
|
+
c = src[pos:pos + 1]
|
|
649
|
+
if c == '}':
|
|
650
|
+
return pos + 1, nested_dict.dict
|
|
651
|
+
if c != ',':
|
|
652
|
+
raise toml_suffixed_err(src, pos, 'Unclosed inline table')
|
|
653
|
+
if isinstance(value, (dict, list)):
|
|
654
|
+
flags.set(key, TomlFlags.FROZEN, recursive=True)
|
|
655
|
+
pos += 1
|
|
656
|
+
pos = toml_skip_chars(src, pos, TOML_WS)
|
|
657
|
+
|
|
658
|
+
|
|
659
|
+
def toml_parse_basic_str_escape(
|
|
660
|
+
src: str,
|
|
661
|
+
pos: TomlPos,
|
|
662
|
+
*,
|
|
663
|
+
multiline: bool = False,
|
|
664
|
+
) -> ta.Tuple[TomlPos, str]:
|
|
665
|
+
escape_id = src[pos:pos + 2]
|
|
666
|
+
pos += 2
|
|
667
|
+
if multiline and escape_id in {'\\ ', '\\\t', '\\\n'}:
|
|
668
|
+
# Skip whitespace until next non-whitespace character or end of the doc. Error if non-whitespace is found before
|
|
669
|
+
# newline.
|
|
670
|
+
if escape_id != '\\\n':
|
|
671
|
+
pos = toml_skip_chars(src, pos, TOML_WS)
|
|
672
|
+
try:
|
|
673
|
+
char = src[pos]
|
|
674
|
+
except IndexError:
|
|
675
|
+
return pos, ''
|
|
676
|
+
if char != '\n':
|
|
677
|
+
raise toml_suffixed_err(src, pos, "Unescaped '\\' in a string")
|
|
678
|
+
pos += 1
|
|
679
|
+
pos = toml_skip_chars(src, pos, TOML_WS_AND_NEWLINE)
|
|
680
|
+
return pos, ''
|
|
681
|
+
if escape_id == '\\u':
|
|
682
|
+
return toml_parse_hex_char(src, pos, 4)
|
|
683
|
+
if escape_id == '\\U':
|
|
684
|
+
return toml_parse_hex_char(src, pos, 8)
|
|
685
|
+
try:
|
|
686
|
+
return pos, TOML_BASIC_STR_ESCAPE_REPLACEMENTS[escape_id]
|
|
687
|
+
except KeyError:
|
|
688
|
+
raise toml_suffixed_err(src, pos, "Unescaped '\\' in a string") from None
|
|
689
|
+
|
|
690
|
+
|
|
691
|
+
def toml_parse_basic_str_escape_multiline(src: str, pos: TomlPos) -> ta.Tuple[TomlPos, str]:
|
|
692
|
+
return toml_parse_basic_str_escape(src, pos, multiline=True)
|
|
693
|
+
|
|
694
|
+
|
|
695
|
+
def toml_parse_hex_char(src: str, pos: TomlPos, hex_len: int) -> ta.Tuple[TomlPos, str]:
|
|
696
|
+
hex_str = src[pos:pos + hex_len]
|
|
697
|
+
if len(hex_str) != hex_len or not TOML_HEXDIGIT_CHARS.issuperset(hex_str):
|
|
698
|
+
raise toml_suffixed_err(src, pos, 'Invalid hex value')
|
|
699
|
+
pos += hex_len
|
|
700
|
+
hex_int = int(hex_str, 16)
|
|
701
|
+
if not toml_is_unicode_scalar_value(hex_int):
|
|
702
|
+
raise toml_suffixed_err(src, pos, 'Escaped character is not a Unicode scalar value')
|
|
703
|
+
return pos, chr(hex_int)
|
|
704
|
+
|
|
705
|
+
|
|
706
|
+
def toml_parse_literal_str(src: str, pos: TomlPos) -> ta.Tuple[TomlPos, str]:
|
|
707
|
+
pos += 1 # Skip starting apostrophe
|
|
708
|
+
start_pos = pos
|
|
709
|
+
pos = toml_skip_until(
|
|
710
|
+
src, pos, "'", error_on=TOML_ILLEGAL_LITERAL_STR_CHARS, error_on_eof=True,
|
|
711
|
+
)
|
|
712
|
+
return pos + 1, src[start_pos:pos] # Skip ending apostrophe
|
|
713
|
+
|
|
714
|
+
|
|
715
|
+
def toml_parse_multiline_str(src: str, pos: TomlPos, *, literal: bool) -> ta.Tuple[TomlPos, str]:
|
|
716
|
+
pos += 3
|
|
717
|
+
if src.startswith('\n', pos):
|
|
718
|
+
pos += 1
|
|
719
|
+
|
|
720
|
+
if literal:
|
|
721
|
+
delim = "'"
|
|
722
|
+
end_pos = toml_skip_until(
|
|
723
|
+
src,
|
|
724
|
+
pos,
|
|
725
|
+
"'''",
|
|
726
|
+
error_on=TOML_ILLEGAL_MULTILINE_LITERAL_STR_CHARS,
|
|
727
|
+
error_on_eof=True,
|
|
728
|
+
)
|
|
729
|
+
result = src[pos:end_pos]
|
|
730
|
+
pos = end_pos + 3
|
|
731
|
+
else:
|
|
732
|
+
delim = '"'
|
|
733
|
+
pos, result = toml_parse_basic_str(src, pos, multiline=True)
|
|
734
|
+
|
|
735
|
+
# Add at maximum two extra apostrophes/quotes if the end sequence is 4 or 5 chars long instead of just 3.
|
|
736
|
+
if not src.startswith(delim, pos):
|
|
737
|
+
return pos, result
|
|
738
|
+
pos += 1
|
|
739
|
+
if not src.startswith(delim, pos):
|
|
740
|
+
return pos, result + delim
|
|
741
|
+
pos += 1
|
|
742
|
+
return pos, result + (delim * 2)
|
|
743
|
+
|
|
744
|
+
|
|
745
|
+
def toml_parse_basic_str(src: str, pos: TomlPos, *, multiline: bool) -> ta.Tuple[TomlPos, str]:
|
|
746
|
+
if multiline:
|
|
747
|
+
error_on = TOML_ILLEGAL_MULTILINE_BASIC_STR_CHARS
|
|
748
|
+
parse_escapes = toml_parse_basic_str_escape_multiline
|
|
749
|
+
else:
|
|
750
|
+
error_on = TOML_ILLEGAL_BASIC_STR_CHARS
|
|
751
|
+
parse_escapes = toml_parse_basic_str_escape
|
|
752
|
+
result = ''
|
|
753
|
+
start_pos = pos
|
|
754
|
+
while True:
|
|
755
|
+
try:
|
|
756
|
+
char = src[pos]
|
|
757
|
+
except IndexError:
|
|
758
|
+
raise toml_suffixed_err(src, pos, 'Unterminated string') from None
|
|
759
|
+
if char == '"':
|
|
760
|
+
if not multiline:
|
|
761
|
+
return pos + 1, result + src[start_pos:pos]
|
|
762
|
+
if src.startswith('"""', pos):
|
|
763
|
+
return pos + 3, result + src[start_pos:pos]
|
|
764
|
+
pos += 1
|
|
765
|
+
continue
|
|
766
|
+
if char == '\\':
|
|
767
|
+
result += src[start_pos:pos]
|
|
768
|
+
pos, parsed_escape = parse_escapes(src, pos)
|
|
769
|
+
result += parsed_escape
|
|
770
|
+
start_pos = pos
|
|
771
|
+
continue
|
|
772
|
+
if char in error_on:
|
|
773
|
+
raise toml_suffixed_err(src, pos, f'Illegal character {char!r}')
|
|
774
|
+
pos += 1
|
|
775
|
+
|
|
776
|
+
|
|
777
|
+
def toml_parse_value( # noqa: C901
|
|
778
|
+
src: str,
|
|
779
|
+
pos: TomlPos,
|
|
780
|
+
parse_float: TomlParseFloat,
|
|
781
|
+
) -> ta.Tuple[TomlPos, ta.Any]:
|
|
782
|
+
try:
|
|
783
|
+
char: ta.Optional[str] = src[pos]
|
|
784
|
+
except IndexError:
|
|
785
|
+
char = None
|
|
786
|
+
|
|
787
|
+
# IMPORTANT: order conditions based on speed of checking and likelihood
|
|
788
|
+
|
|
789
|
+
# Basic strings
|
|
790
|
+
if char == '"':
|
|
791
|
+
if src.startswith('"""', pos):
|
|
792
|
+
return toml_parse_multiline_str(src, pos, literal=False)
|
|
793
|
+
return toml_parse_one_line_basic_str(src, pos)
|
|
794
|
+
|
|
795
|
+
# Literal strings
|
|
796
|
+
if char == "'":
|
|
797
|
+
if src.startswith("'''", pos):
|
|
798
|
+
return toml_parse_multiline_str(src, pos, literal=True)
|
|
799
|
+
return toml_parse_literal_str(src, pos)
|
|
800
|
+
|
|
801
|
+
# Booleans
|
|
802
|
+
if char == 't':
|
|
803
|
+
if src.startswith('true', pos):
|
|
804
|
+
return pos + 4, True
|
|
805
|
+
if char == 'f':
|
|
806
|
+
if src.startswith('false', pos):
|
|
807
|
+
return pos + 5, False
|
|
808
|
+
|
|
809
|
+
# Arrays
|
|
810
|
+
if char == '[':
|
|
811
|
+
return toml_parse_array(src, pos, parse_float)
|
|
812
|
+
|
|
813
|
+
# Inline tables
|
|
814
|
+
if char == '{':
|
|
815
|
+
return toml_parse_inline_table(src, pos, parse_float)
|
|
816
|
+
|
|
817
|
+
# Dates and times
|
|
818
|
+
datetime_match = TOML_RE_DATETIME.match(src, pos)
|
|
819
|
+
if datetime_match:
|
|
820
|
+
try:
|
|
821
|
+
datetime_obj = toml_match_to_datetime(datetime_match)
|
|
822
|
+
except ValueError as e:
|
|
823
|
+
raise toml_suffixed_err(src, pos, 'Invalid date or datetime') from e
|
|
824
|
+
return datetime_match.end(), datetime_obj
|
|
825
|
+
localtime_match = TOML_RE_LOCALTIME.match(src, pos)
|
|
826
|
+
if localtime_match:
|
|
827
|
+
return localtime_match.end(), toml_match_to_localtime(localtime_match)
|
|
828
|
+
|
|
829
|
+
# Integers and "normal" floats. The regex will greedily match any type starting with a decimal char, so needs to be
|
|
830
|
+
# located after handling of dates and times.
|
|
831
|
+
number_match = TOML_RE_NUMBER.match(src, pos)
|
|
832
|
+
if number_match:
|
|
833
|
+
return number_match.end(), toml_match_to_number(number_match, parse_float)
|
|
834
|
+
|
|
835
|
+
# Special floats
|
|
836
|
+
first_three = src[pos:pos + 3]
|
|
837
|
+
if first_three in {'inf', 'nan'}:
|
|
838
|
+
return pos + 3, parse_float(first_three)
|
|
839
|
+
first_four = src[pos:pos + 4]
|
|
840
|
+
if first_four in {'-inf', '+inf', '-nan', '+nan'}:
|
|
841
|
+
return pos + 4, parse_float(first_four)
|
|
842
|
+
|
|
843
|
+
raise toml_suffixed_err(src, pos, 'Invalid value')
|
|
844
|
+
|
|
845
|
+
|
|
846
|
+
def toml_suffixed_err(src: str, pos: TomlPos, msg: str) -> TomlDecodeError:
|
|
847
|
+
"""Return a `TomlDecodeError` where error message is suffixed with coordinates in source."""
|
|
848
|
+
|
|
849
|
+
def coord_repr(src: str, pos: TomlPos) -> str:
|
|
850
|
+
if pos >= len(src):
|
|
851
|
+
return 'end of document'
|
|
852
|
+
line = src.count('\n', 0, pos) + 1
|
|
853
|
+
if line == 1:
|
|
854
|
+
column = pos + 1
|
|
855
|
+
else:
|
|
856
|
+
column = pos - src.rindex('\n', 0, pos)
|
|
857
|
+
return f'line {line}, column {column}'
|
|
858
|
+
|
|
859
|
+
return TomlDecodeError(f'{msg} (at {coord_repr(src, pos)})')
|
|
860
|
+
|
|
861
|
+
|
|
862
|
+
def toml_is_unicode_scalar_value(codepoint: int) -> bool:
|
|
863
|
+
return (0 <= codepoint <= 55295) or (57344 <= codepoint <= 1114111)
|
|
864
|
+
|
|
865
|
+
|
|
866
|
+
def toml_make_safe_parse_float(parse_float: TomlParseFloat) -> TomlParseFloat:
|
|
867
|
+
"""A decorator to make `parse_float` safe.
|
|
868
|
+
|
|
869
|
+
`parse_float` must not return dicts or lists, because these types would be mixed with parsed TOML tables and arrays,
|
|
870
|
+
thus confusing the parser. The returned decorated callable raises `ValueError` instead of returning illegal types.
|
|
871
|
+
"""
|
|
872
|
+
# The default `float` callable never returns illegal types. Optimize it.
|
|
873
|
+
if parse_float is float:
|
|
874
|
+
return float
|
|
875
|
+
|
|
876
|
+
def safe_parse_float(float_str: str) -> ta.Any:
|
|
877
|
+
float_value = parse_float(float_str)
|
|
878
|
+
if isinstance(float_value, (dict, list)):
|
|
879
|
+
raise ValueError('parse_float must not return dicts or lists') # noqa
|
|
880
|
+
return float_value
|
|
881
|
+
|
|
882
|
+
return safe_parse_float
|
|
883
|
+
|
|
884
|
+
|
|
885
|
+
########################################
|
|
886
|
+
# ../../versioning/versions.py
|
|
887
|
+
# Copyright (c) Donald Stufft and individual contributors.
|
|
888
|
+
# All rights reserved.
|
|
889
|
+
#
|
|
890
|
+
# Redistribution and use in source and binary forms, with or without modification, are permitted provided that the
|
|
891
|
+
# following conditions are met:
|
|
892
|
+
#
|
|
893
|
+
# 1. Redistributions of source code must retain the above copyright notice, this list of conditions and the
|
|
894
|
+
# following disclaimer.
|
|
895
|
+
#
|
|
896
|
+
# 2. Redistributions in binary form must reproduce the above copyright notice, this list of conditions and the
|
|
897
|
+
# following disclaimer in the documentation and/or other materials provided with the distribution.
|
|
898
|
+
#
|
|
899
|
+
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES,
|
|
900
|
+
# INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
|
|
901
|
+
# DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
|
|
902
|
+
# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
|
|
903
|
+
# SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
|
|
904
|
+
# WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
|
|
905
|
+
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. This file is dual licensed under the terms of the
|
|
906
|
+
# Apache License, Version 2.0, and the BSD License. See the LICENSE file in the root of this repository for complete
|
|
907
|
+
# details.
|
|
908
|
+
# https://github.com/pypa/packaging/blob/2c885fe91a54559e2382902dce28428ad2887be5/src/packaging/version.py
|
|
909
|
+
# ruff: noqa: UP006 UP007
|
|
910
|
+
|
|
911
|
+
|
|
912
|
+
##
|
|
913
|
+
|
|
914
|
+
|
|
915
|
+
class InfinityVersionType:
|
|
916
|
+
def __repr__(self) -> str:
|
|
917
|
+
return 'Infinity'
|
|
918
|
+
|
|
919
|
+
def __hash__(self) -> int:
|
|
920
|
+
return hash(repr(self))
|
|
921
|
+
|
|
922
|
+
def __lt__(self, other: object) -> bool:
|
|
923
|
+
return False
|
|
924
|
+
|
|
925
|
+
def __le__(self, other: object) -> bool:
|
|
926
|
+
return False
|
|
927
|
+
|
|
928
|
+
def __eq__(self, other: object) -> bool:
|
|
929
|
+
return isinstance(other, self.__class__)
|
|
930
|
+
|
|
931
|
+
def __gt__(self, other: object) -> bool:
|
|
932
|
+
return True
|
|
933
|
+
|
|
934
|
+
def __ge__(self, other: object) -> bool:
|
|
935
|
+
return True
|
|
936
|
+
|
|
937
|
+
def __neg__(self: object) -> 'NegativeInfinityVersionType':
|
|
938
|
+
return NegativeInfinityVersion
|
|
939
|
+
|
|
940
|
+
|
|
941
|
+
InfinityVersion = InfinityVersionType()
|
|
942
|
+
|
|
943
|
+
|
|
944
|
+
class NegativeInfinityVersionType:
|
|
945
|
+
def __repr__(self) -> str:
|
|
946
|
+
return '-Infinity'
|
|
947
|
+
|
|
948
|
+
def __hash__(self) -> int:
|
|
949
|
+
return hash(repr(self))
|
|
950
|
+
|
|
951
|
+
def __lt__(self, other: object) -> bool:
|
|
952
|
+
return True
|
|
953
|
+
|
|
954
|
+
def __le__(self, other: object) -> bool:
|
|
955
|
+
return True
|
|
956
|
+
|
|
957
|
+
def __eq__(self, other: object) -> bool:
|
|
958
|
+
return isinstance(other, self.__class__)
|
|
959
|
+
|
|
960
|
+
def __gt__(self, other: object) -> bool:
|
|
961
|
+
return False
|
|
962
|
+
|
|
963
|
+
def __ge__(self, other: object) -> bool:
|
|
964
|
+
return False
|
|
965
|
+
|
|
966
|
+
def __neg__(self: object) -> InfinityVersionType:
|
|
967
|
+
return InfinityVersion
|
|
968
|
+
|
|
969
|
+
|
|
970
|
+
NegativeInfinityVersion = NegativeInfinityVersionType()
|
|
971
|
+
|
|
972
|
+
|
|
973
|
+
##
|
|
974
|
+
|
|
975
|
+
|
|
976
|
+
class _Version(ta.NamedTuple):
|
|
977
|
+
epoch: int
|
|
978
|
+
release: ta.Tuple[int, ...]
|
|
979
|
+
dev: ta.Optional[ta.Tuple[str, int]]
|
|
980
|
+
pre: ta.Optional[ta.Tuple[str, int]]
|
|
981
|
+
post: ta.Optional[ta.Tuple[str, int]]
|
|
982
|
+
local: ta.Optional[VersionLocalType]
|
|
983
|
+
|
|
984
|
+
|
|
985
|
+
class InvalidVersion(ValueError): # noqa
|
|
986
|
+
pass
|
|
987
|
+
|
|
988
|
+
|
|
989
|
+
class _BaseVersion:
|
|
990
|
+
_key: ta.Tuple[ta.Any, ...]
|
|
991
|
+
|
|
992
|
+
def __hash__(self) -> int:
|
|
993
|
+
return hash(self._key)
|
|
994
|
+
|
|
995
|
+
def __lt__(self, other: '_BaseVersion') -> bool:
|
|
996
|
+
if not isinstance(other, _BaseVersion):
|
|
997
|
+
return NotImplemented # type: ignore
|
|
998
|
+
return self._key < other._key
|
|
999
|
+
|
|
1000
|
+
def __le__(self, other: '_BaseVersion') -> bool:
|
|
1001
|
+
if not isinstance(other, _BaseVersion):
|
|
1002
|
+
return NotImplemented # type: ignore
|
|
1003
|
+
return self._key <= other._key
|
|
1004
|
+
|
|
1005
|
+
def __eq__(self, other: object) -> bool:
|
|
1006
|
+
if not isinstance(other, _BaseVersion):
|
|
1007
|
+
return NotImplemented
|
|
1008
|
+
return self._key == other._key
|
|
1009
|
+
|
|
1010
|
+
def __ge__(self, other: '_BaseVersion') -> bool:
|
|
1011
|
+
if not isinstance(other, _BaseVersion):
|
|
1012
|
+
return NotImplemented # type: ignore
|
|
1013
|
+
return self._key >= other._key
|
|
1014
|
+
|
|
1015
|
+
def __gt__(self, other: '_BaseVersion') -> bool:
|
|
1016
|
+
if not isinstance(other, _BaseVersion):
|
|
1017
|
+
return NotImplemented # type: ignore
|
|
1018
|
+
return self._key > other._key
|
|
1019
|
+
|
|
1020
|
+
def __ne__(self, other: object) -> bool:
|
|
1021
|
+
if not isinstance(other, _BaseVersion):
|
|
1022
|
+
return NotImplemented
|
|
1023
|
+
return self._key != other._key
|
|
1024
|
+
|
|
1025
|
+
|
|
1026
|
+
_VERSION_PATTERN = r"""
|
|
1027
|
+
v?
|
|
1028
|
+
(?:
|
|
1029
|
+
(?:(?P<epoch>[0-9]+)!)?
|
|
1030
|
+
(?P<release>[0-9]+(?:\.[0-9]+)*)
|
|
1031
|
+
(?P<pre>
|
|
1032
|
+
[-_\.]?
|
|
1033
|
+
(?P<pre_l>alpha|a|beta|b|preview|pre|c|rc)
|
|
1034
|
+
[-_\.]?
|
|
1035
|
+
(?P<pre_n>[0-9]+)?
|
|
1036
|
+
)?
|
|
1037
|
+
(?P<post>
|
|
1038
|
+
(?:-(?P<post_n1>[0-9]+))
|
|
1039
|
+
|
|
|
1040
|
+
(?:
|
|
1041
|
+
[-_\.]?
|
|
1042
|
+
(?P<post_l>post|rev|r)
|
|
1043
|
+
[-_\.]?
|
|
1044
|
+
(?P<post_n2>[0-9]+)?
|
|
1045
|
+
)
|
|
1046
|
+
)?
|
|
1047
|
+
(?P<dev>
|
|
1048
|
+
[-_\.]?
|
|
1049
|
+
(?P<dev_l>dev)
|
|
1050
|
+
[-_\.]?
|
|
1051
|
+
(?P<dev_n>[0-9]+)?
|
|
1052
|
+
)?
|
|
1053
|
+
)
|
|
1054
|
+
(?:\+(?P<local>[a-z0-9]+(?:[-_\.][a-z0-9]+)*))?
|
|
1055
|
+
"""
|
|
1056
|
+
|
|
1057
|
+
VERSION_PATTERN = _VERSION_PATTERN
|
|
1058
|
+
|
|
1059
|
+
|
|
1060
|
+
class Version(_BaseVersion):
|
|
1061
|
+
_regex = re.compile(r'^\s*' + VERSION_PATTERN + r'\s*$', re.VERBOSE | re.IGNORECASE)
|
|
1062
|
+
_key: VersionCmpKey
|
|
1063
|
+
|
|
1064
|
+
def __init__(self, version: str) -> None:
|
|
1065
|
+
match = self._regex.search(version)
|
|
1066
|
+
if not match:
|
|
1067
|
+
raise InvalidVersion(f"Invalid version: '{version}'")
|
|
1068
|
+
|
|
1069
|
+
self._version = _Version(
|
|
1070
|
+
epoch=int(match.group('epoch')) if match.group('epoch') else 0,
|
|
1071
|
+
release=tuple(int(i) for i in match.group('release').split('.')),
|
|
1072
|
+
pre=_parse_letter_version(match.group('pre_l'), match.group('pre_n')),
|
|
1073
|
+
post=_parse_letter_version(match.group('post_l'), match.group('post_n1') or match.group('post_n2')),
|
|
1074
|
+
dev=_parse_letter_version(match.group('dev_l'), match.group('dev_n')),
|
|
1075
|
+
local=_parse_local_version(match.group('local')),
|
|
1076
|
+
)
|
|
1077
|
+
|
|
1078
|
+
self._key = _version_cmpkey(
|
|
1079
|
+
self._version.epoch,
|
|
1080
|
+
self._version.release,
|
|
1081
|
+
self._version.pre,
|
|
1082
|
+
self._version.post,
|
|
1083
|
+
self._version.dev,
|
|
1084
|
+
self._version.local,
|
|
1085
|
+
)
|
|
1086
|
+
|
|
1087
|
+
def __repr__(self) -> str:
|
|
1088
|
+
return f"<Version('{self}')>"
|
|
1089
|
+
|
|
1090
|
+
def __str__(self) -> str:
|
|
1091
|
+
parts = []
|
|
1092
|
+
|
|
1093
|
+
if self.epoch != 0:
|
|
1094
|
+
parts.append(f'{self.epoch}!')
|
|
1095
|
+
|
|
1096
|
+
parts.append('.'.join(str(x) for x in self.release))
|
|
1097
|
+
|
|
1098
|
+
if self.pre is not None:
|
|
1099
|
+
parts.append(''.join(str(x) for x in self.pre))
|
|
1100
|
+
|
|
1101
|
+
if self.post is not None:
|
|
1102
|
+
parts.append(f'.post{self.post}')
|
|
1103
|
+
|
|
1104
|
+
if self.dev is not None:
|
|
1105
|
+
parts.append(f'.dev{self.dev}')
|
|
1106
|
+
|
|
1107
|
+
if self.local is not None:
|
|
1108
|
+
parts.append(f'+{self.local}')
|
|
1109
|
+
|
|
1110
|
+
return ''.join(parts)
|
|
1111
|
+
|
|
1112
|
+
@property
|
|
1113
|
+
def epoch(self) -> int:
|
|
1114
|
+
return self._version.epoch
|
|
1115
|
+
|
|
1116
|
+
@property
|
|
1117
|
+
def release(self) -> ta.Tuple[int, ...]:
|
|
1118
|
+
return self._version.release
|
|
1119
|
+
|
|
1120
|
+
@property
|
|
1121
|
+
def pre(self) -> ta.Optional[ta.Tuple[str, int]]:
|
|
1122
|
+
return self._version.pre
|
|
1123
|
+
|
|
1124
|
+
@property
|
|
1125
|
+
def post(self) -> ta.Optional[int]:
|
|
1126
|
+
return self._version.post[1] if self._version.post else None
|
|
1127
|
+
|
|
1128
|
+
@property
|
|
1129
|
+
def dev(self) -> ta.Optional[int]:
|
|
1130
|
+
return self._version.dev[1] if self._version.dev else None
|
|
1131
|
+
|
|
1132
|
+
@property
|
|
1133
|
+
def local(self) -> ta.Optional[str]:
|
|
1134
|
+
if self._version.local:
|
|
1135
|
+
return '.'.join(str(x) for x in self._version.local)
|
|
1136
|
+
else:
|
|
1137
|
+
return None
|
|
1138
|
+
|
|
1139
|
+
@property
|
|
1140
|
+
def public(self) -> str:
|
|
1141
|
+
return str(self).split('+', 1)[0]
|
|
1142
|
+
|
|
1143
|
+
@property
|
|
1144
|
+
def base_version(self) -> str:
|
|
1145
|
+
parts = []
|
|
1146
|
+
|
|
1147
|
+
if self.epoch != 0:
|
|
1148
|
+
parts.append(f'{self.epoch}!')
|
|
1149
|
+
|
|
1150
|
+
parts.append('.'.join(str(x) for x in self.release))
|
|
1151
|
+
|
|
1152
|
+
return ''.join(parts)
|
|
1153
|
+
|
|
1154
|
+
@property
|
|
1155
|
+
def is_prerelease(self) -> bool:
|
|
1156
|
+
return self.dev is not None or self.pre is not None
|
|
1157
|
+
|
|
1158
|
+
@property
|
|
1159
|
+
def is_postrelease(self) -> bool:
|
|
1160
|
+
return self.post is not None
|
|
1161
|
+
|
|
1162
|
+
@property
|
|
1163
|
+
def is_devrelease(self) -> bool:
|
|
1164
|
+
return self.dev is not None
|
|
1165
|
+
|
|
1166
|
+
@property
|
|
1167
|
+
def major(self) -> int:
|
|
1168
|
+
return self.release[0] if len(self.release) >= 1 else 0
|
|
1169
|
+
|
|
1170
|
+
@property
|
|
1171
|
+
def minor(self) -> int:
|
|
1172
|
+
return self.release[1] if len(self.release) >= 2 else 0
|
|
1173
|
+
|
|
1174
|
+
@property
|
|
1175
|
+
def micro(self) -> int:
|
|
1176
|
+
return self.release[2] if len(self.release) >= 3 else 0
|
|
1177
|
+
|
|
1178
|
+
|
|
1179
|
+
def _parse_letter_version(
|
|
1180
|
+
letter: ta.Optional[str],
|
|
1181
|
+
number: ta.Union[str, bytes, ta.SupportsInt, None],
|
|
1182
|
+
) -> ta.Optional[ta.Tuple[str, int]]:
|
|
1183
|
+
if letter:
|
|
1184
|
+
if number is None:
|
|
1185
|
+
number = 0
|
|
1186
|
+
|
|
1187
|
+
letter = letter.lower()
|
|
1188
|
+
if letter == 'alpha':
|
|
1189
|
+
letter = 'a'
|
|
1190
|
+
elif letter == 'beta':
|
|
1191
|
+
letter = 'b'
|
|
1192
|
+
elif letter in ['c', 'pre', 'preview']:
|
|
1193
|
+
letter = 'rc'
|
|
1194
|
+
elif letter in ['rev', 'r']:
|
|
1195
|
+
letter = 'post'
|
|
1196
|
+
|
|
1197
|
+
return letter, int(number)
|
|
1198
|
+
if not letter and number:
|
|
1199
|
+
letter = 'post'
|
|
1200
|
+
return letter, int(number)
|
|
1201
|
+
|
|
1202
|
+
return None
|
|
1203
|
+
|
|
1204
|
+
|
|
1205
|
+
_local_version_separators = re.compile(r'[\._-]')
|
|
1206
|
+
|
|
1207
|
+
|
|
1208
|
+
def _parse_local_version(local: ta.Optional[str]) -> ta.Optional[VersionLocalType]:
|
|
1209
|
+
if local is not None:
|
|
1210
|
+
return tuple(
|
|
1211
|
+
part.lower() if not part.isdigit() else int(part)
|
|
1212
|
+
for part in _local_version_separators.split(local)
|
|
1213
|
+
)
|
|
1214
|
+
return None
|
|
1215
|
+
|
|
1216
|
+
|
|
1217
|
+
def _version_cmpkey(
|
|
1218
|
+
epoch: int,
|
|
1219
|
+
release: ta.Tuple[int, ...],
|
|
1220
|
+
pre: ta.Optional[ta.Tuple[str, int]],
|
|
1221
|
+
post: ta.Optional[ta.Tuple[str, int]],
|
|
1222
|
+
dev: ta.Optional[ta.Tuple[str, int]],
|
|
1223
|
+
local: ta.Optional[VersionLocalType],
|
|
1224
|
+
) -> VersionCmpKey:
|
|
1225
|
+
_release = tuple(reversed(list(itertools.dropwhile(lambda x: x == 0, reversed(release)))))
|
|
1226
|
+
|
|
1227
|
+
if pre is None and post is None and dev is not None:
|
|
1228
|
+
_pre: VersionCmpPrePostDevType = NegativeInfinityVersion
|
|
1229
|
+
elif pre is None:
|
|
1230
|
+
_pre = InfinityVersion
|
|
1231
|
+
else:
|
|
1232
|
+
_pre = pre
|
|
1233
|
+
|
|
1234
|
+
if post is None:
|
|
1235
|
+
_post: VersionCmpPrePostDevType = NegativeInfinityVersion
|
|
1236
|
+
else:
|
|
1237
|
+
_post = post
|
|
1238
|
+
|
|
1239
|
+
if dev is None:
|
|
1240
|
+
_dev: VersionCmpPrePostDevType = InfinityVersion
|
|
1241
|
+
else:
|
|
1242
|
+
_dev = dev
|
|
1243
|
+
|
|
1244
|
+
if local is None:
|
|
1245
|
+
_local: VersionCmpLocalType = NegativeInfinityVersion
|
|
1246
|
+
else:
|
|
1247
|
+
_local = tuple((i, '') if isinstance(i, int) else (NegativeInfinityVersion, i) for i in local)
|
|
1248
|
+
|
|
1249
|
+
return epoch, _release, _pre, _post, _dev, _local
|
|
1250
|
+
|
|
1251
|
+
|
|
1252
|
+
##
|
|
1253
|
+
|
|
1254
|
+
|
|
1255
|
+
def canonicalize_version(
|
|
1256
|
+
version: ta.Union[Version, str],
|
|
1257
|
+
*,
|
|
1258
|
+
strip_trailing_zero: bool = True,
|
|
1259
|
+
) -> str:
|
|
1260
|
+
if isinstance(version, str):
|
|
1261
|
+
try:
|
|
1262
|
+
parsed = Version(version)
|
|
1263
|
+
except InvalidVersion:
|
|
1264
|
+
return version
|
|
1265
|
+
else:
|
|
1266
|
+
parsed = version
|
|
1267
|
+
|
|
1268
|
+
parts = []
|
|
1269
|
+
|
|
1270
|
+
if parsed.epoch != 0:
|
|
1271
|
+
parts.append(f'{parsed.epoch}!')
|
|
1272
|
+
|
|
1273
|
+
release_segment = '.'.join(str(x) for x in parsed.release)
|
|
1274
|
+
if strip_trailing_zero:
|
|
1275
|
+
release_segment = re.sub(r'(\.0)+$', '', release_segment)
|
|
1276
|
+
parts.append(release_segment)
|
|
1277
|
+
|
|
1278
|
+
if parsed.pre is not None:
|
|
1279
|
+
parts.append(''.join(str(x) for x in parsed.pre))
|
|
1280
|
+
|
|
1281
|
+
if parsed.post is not None:
|
|
1282
|
+
parts.append(f'.post{parsed.post}')
|
|
1283
|
+
|
|
1284
|
+
if parsed.dev is not None:
|
|
1285
|
+
parts.append(f'.dev{parsed.dev}')
|
|
1286
|
+
|
|
1287
|
+
if parsed.local is not None:
|
|
1288
|
+
parts.append(f'+{parsed.local}')
|
|
1289
|
+
|
|
1290
|
+
return ''.join(parts)
|
|
1291
|
+
|
|
1292
|
+
|
|
1293
|
+
########################################
|
|
1294
|
+
# ../../../omlish/lite/cached.py
|
|
1295
|
+
|
|
1296
|
+
|
|
1297
|
+
class cached_nullary: # noqa
|
|
1298
|
+
def __init__(self, fn):
|
|
1299
|
+
super().__init__()
|
|
1300
|
+
self._fn = fn
|
|
1301
|
+
self._value = self._missing = object()
|
|
1302
|
+
functools.update_wrapper(self, fn)
|
|
1303
|
+
|
|
1304
|
+
def __call__(self, *args, **kwargs): # noqa
|
|
1305
|
+
if self._value is self._missing:
|
|
1306
|
+
self._value = self._fn()
|
|
1307
|
+
return self._value
|
|
1308
|
+
|
|
1309
|
+
def __get__(self, instance, owner): # noqa
|
|
1310
|
+
bound = instance.__dict__[self._fn.__name__] = self.__class__(self._fn.__get__(instance, owner))
|
|
1311
|
+
return bound
|
|
1312
|
+
|
|
1313
|
+
|
|
1314
|
+
########################################
|
|
1315
|
+
# ../../../omlish/lite/check.py
|
|
1316
|
+
# ruff: noqa: UP006 UP007
|
|
1317
|
+
|
|
1318
|
+
|
|
1319
|
+
def check_isinstance(v: T, spec: ta.Union[ta.Type[T], tuple]) -> T:
|
|
1320
|
+
if not isinstance(v, spec):
|
|
1321
|
+
raise TypeError(v)
|
|
1322
|
+
return v
|
|
1323
|
+
|
|
1324
|
+
|
|
1325
|
+
def check_not_isinstance(v: T, spec: ta.Union[type, tuple]) -> T:
|
|
1326
|
+
if isinstance(v, spec):
|
|
1327
|
+
raise TypeError(v)
|
|
1328
|
+
return v
|
|
1329
|
+
|
|
1330
|
+
|
|
1331
|
+
def check_not_none(v: ta.Optional[T]) -> T:
|
|
1332
|
+
if v is None:
|
|
1333
|
+
raise ValueError
|
|
1334
|
+
return v
|
|
1335
|
+
|
|
1336
|
+
|
|
1337
|
+
def check_not(v: ta.Any) -> None:
|
|
1338
|
+
if v:
|
|
1339
|
+
raise ValueError(v)
|
|
1340
|
+
return v
|
|
1341
|
+
|
|
1342
|
+
|
|
1343
|
+
########################################
|
|
1344
|
+
# ../../../omlish/lite/json.py
|
|
1345
|
+
|
|
1346
|
+
|
|
1347
|
+
##
|
|
1348
|
+
|
|
1349
|
+
|
|
1350
|
+
JSON_PRETTY_INDENT = 2
|
|
1351
|
+
|
|
1352
|
+
JSON_PRETTY_KWARGS: ta.Mapping[str, ta.Any] = dict(
|
|
1353
|
+
indent=JSON_PRETTY_INDENT,
|
|
1354
|
+
)
|
|
1355
|
+
|
|
1356
|
+
json_dump_pretty: ta.Callable[..., bytes] = functools.partial(json.dump, **JSON_PRETTY_KWARGS) # type: ignore
|
|
1357
|
+
json_dumps_pretty: ta.Callable[..., str] = functools.partial(json.dumps, **JSON_PRETTY_KWARGS)
|
|
1358
|
+
|
|
1359
|
+
|
|
1360
|
+
##
|
|
1361
|
+
|
|
1362
|
+
|
|
1363
|
+
JSON_COMPACT_SEPARATORS = (',', ':')
|
|
1364
|
+
|
|
1365
|
+
JSON_COMPACT_KWARGS: ta.Mapping[str, ta.Any] = dict(
|
|
1366
|
+
indent=None,
|
|
1367
|
+
separators=JSON_COMPACT_SEPARATORS,
|
|
1368
|
+
)
|
|
1369
|
+
|
|
1370
|
+
json_dump_compact: ta.Callable[..., bytes] = functools.partial(json.dump, **JSON_COMPACT_KWARGS) # type: ignore
|
|
1371
|
+
json_dumps_compact: ta.Callable[..., str] = functools.partial(json.dumps, **JSON_COMPACT_KWARGS)
|
|
1372
|
+
|
|
1373
|
+
|
|
1374
|
+
########################################
|
|
1375
|
+
# ../../../omlish/lite/reflect.py
|
|
1376
|
+
# ruff: noqa: UP006
|
|
1377
|
+
|
|
1378
|
+
|
|
1379
|
+
_GENERIC_ALIAS_TYPES = (
|
|
1380
|
+
ta._GenericAlias, # type: ignore # noqa
|
|
1381
|
+
*([ta._SpecialGenericAlias] if hasattr(ta, '_SpecialGenericAlias') else []), # noqa
|
|
1382
|
+
)
|
|
1383
|
+
|
|
1384
|
+
|
|
1385
|
+
def is_generic_alias(obj, *, origin: ta.Any = None) -> bool:
|
|
1386
|
+
return (
|
|
1387
|
+
isinstance(obj, _GENERIC_ALIAS_TYPES) and
|
|
1388
|
+
(origin is None or ta.get_origin(obj) is origin)
|
|
1389
|
+
)
|
|
1390
|
+
|
|
1391
|
+
|
|
1392
|
+
is_union_alias = functools.partial(is_generic_alias, origin=ta.Union)
|
|
1393
|
+
is_callable_alias = functools.partial(is_generic_alias, origin=ta.Callable)
|
|
1394
|
+
|
|
1395
|
+
|
|
1396
|
+
def is_optional_alias(spec: ta.Any) -> bool:
|
|
1397
|
+
return (
|
|
1398
|
+
isinstance(spec, _GENERIC_ALIAS_TYPES) and # noqa
|
|
1399
|
+
ta.get_origin(spec) is ta.Union and
|
|
1400
|
+
len(ta.get_args(spec)) == 2 and
|
|
1401
|
+
any(a in (None, type(None)) for a in ta.get_args(spec))
|
|
1402
|
+
)
|
|
1403
|
+
|
|
1404
|
+
|
|
1405
|
+
def get_optional_alias_arg(spec: ta.Any) -> ta.Any:
|
|
1406
|
+
[it] = [it for it in ta.get_args(spec) if it not in (None, type(None))]
|
|
1407
|
+
return it
|
|
1408
|
+
|
|
1409
|
+
|
|
1410
|
+
def deep_subclasses(cls: ta.Type[T]) -> ta.Iterator[ta.Type[T]]:
|
|
1411
|
+
seen = set()
|
|
1412
|
+
todo = list(reversed(cls.__subclasses__()))
|
|
1413
|
+
while todo:
|
|
1414
|
+
cur = todo.pop()
|
|
1415
|
+
if cur in seen:
|
|
1416
|
+
continue
|
|
1417
|
+
seen.add(cur)
|
|
1418
|
+
yield cur
|
|
1419
|
+
todo.extend(reversed(cur.__subclasses__()))
|
|
1420
|
+
|
|
1421
|
+
|
|
1422
|
+
########################################
|
|
1423
|
+
# ../../../omlish/lite/strings.py
|
|
1424
|
+
|
|
1425
|
+
|
|
1426
|
+
def camel_case(name: str) -> str:
|
|
1427
|
+
return ''.join(map(str.capitalize, name.split('_'))) # noqa
|
|
1428
|
+
|
|
1429
|
+
|
|
1430
|
+
def snake_case(name: str) -> str:
|
|
1431
|
+
uppers: list[int | None] = [i for i, c in enumerate(name) if c.isupper()]
|
|
1432
|
+
return '_'.join([name[l:r].lower() for l, r in zip([None, *uppers], [*uppers, None])]).strip('_')
|
|
1433
|
+
|
|
1434
|
+
|
|
1435
|
+
def is_dunder(name: str) -> bool:
|
|
1436
|
+
return (
|
|
1437
|
+
name[:2] == name[-2:] == '__' and
|
|
1438
|
+
name[2:3] != '_' and
|
|
1439
|
+
name[-3:-2] != '_' and
|
|
1440
|
+
len(name) > 4
|
|
1441
|
+
)
|
|
1442
|
+
|
|
1443
|
+
|
|
1444
|
+
def is_sunder(name: str) -> bool:
|
|
1445
|
+
return (
|
|
1446
|
+
name[0] == name[-1] == '_' and
|
|
1447
|
+
name[1:2] != '_' and
|
|
1448
|
+
name[-2:-1] != '_' and
|
|
1449
|
+
len(name) > 2
|
|
1450
|
+
)
|
|
1451
|
+
|
|
1452
|
+
|
|
1453
|
+
########################################
|
|
1454
|
+
# ../../versioning/specifiers.py
|
|
1455
|
+
# Copyright (c) Donald Stufft and individual contributors.
|
|
1456
|
+
# All rights reserved.
|
|
1457
|
+
#
|
|
1458
|
+
# Redistribution and use in source and binary forms, with or without modification, are permitted provided that the
|
|
1459
|
+
# following conditions are met:
|
|
1460
|
+
#
|
|
1461
|
+
# 1. Redistributions of source code must retain the above copyright notice, this list of conditions and the
|
|
1462
|
+
# following disclaimer.
|
|
1463
|
+
#
|
|
1464
|
+
# 2. Redistributions in binary form must reproduce the above copyright notice, this list of conditions and the
|
|
1465
|
+
# following disclaimer in the documentation and/or other materials provided with the distribution.
|
|
1466
|
+
#
|
|
1467
|
+
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES,
|
|
1468
|
+
# INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
|
|
1469
|
+
# DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
|
|
1470
|
+
# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
|
|
1471
|
+
# SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
|
|
1472
|
+
# WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
|
|
1473
|
+
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. This file is dual licensed under the terms of the
|
|
1474
|
+
# Apache License, Version 2.0, and the BSD License. See the LICENSE file in the root of this repository for complete
|
|
1475
|
+
# details.
|
|
1476
|
+
# https://github.com/pypa/packaging/blob/2c885fe91a54559e2382902dce28428ad2887be5/src/packaging/specifiers.py
|
|
1477
|
+
# ruff: noqa: UP006 UP007
|
|
1478
|
+
|
|
1479
|
+
|
|
1480
|
+
##
|
|
1481
|
+
|
|
1482
|
+
|
|
1483
|
+
def _coerce_version(version: UnparsedVersion) -> Version:
|
|
1484
|
+
if not isinstance(version, Version):
|
|
1485
|
+
version = Version(version)
|
|
1486
|
+
return version
|
|
1487
|
+
|
|
1488
|
+
|
|
1489
|
+
class InvalidSpecifier(ValueError): # noqa
|
|
1490
|
+
pass
|
|
1491
|
+
|
|
1492
|
+
|
|
1493
|
+
class BaseSpecifier(metaclass=abc.ABCMeta):
|
|
1494
|
+
@abc.abstractmethod
|
|
1495
|
+
def __str__(self) -> str:
|
|
1496
|
+
raise NotImplementedError
|
|
1497
|
+
|
|
1498
|
+
@abc.abstractmethod
|
|
1499
|
+
def __hash__(self) -> int:
|
|
1500
|
+
raise NotImplementedError
|
|
1501
|
+
|
|
1502
|
+
@abc.abstractmethod
|
|
1503
|
+
def __eq__(self, other: object) -> bool:
|
|
1504
|
+
raise NotImplementedError
|
|
1505
|
+
|
|
1506
|
+
@property
|
|
1507
|
+
@abc.abstractmethod
|
|
1508
|
+
def prereleases(self) -> ta.Optional[bool]:
|
|
1509
|
+
raise NotImplementedError
|
|
1510
|
+
|
|
1511
|
+
@prereleases.setter
|
|
1512
|
+
def prereleases(self, value: bool) -> None:
|
|
1513
|
+
raise NotImplementedError
|
|
1514
|
+
|
|
1515
|
+
@abc.abstractmethod
|
|
1516
|
+
def contains(self, item: str, prereleases: ta.Optional[bool] = None) -> bool:
|
|
1517
|
+
raise NotImplementedError
|
|
1518
|
+
|
|
1519
|
+
@abc.abstractmethod
|
|
1520
|
+
def filter(
|
|
1521
|
+
self,
|
|
1522
|
+
iterable: ta.Iterable[UnparsedVersionVar],
|
|
1523
|
+
prereleases: ta.Optional[bool] = None,
|
|
1524
|
+
) -> ta.Iterator[UnparsedVersionVar]:
|
|
1525
|
+
raise NotImplementedError
|
|
1526
|
+
|
|
1527
|
+
|
|
1528
|
+
class Specifier(BaseSpecifier):
|
|
1529
|
+
_operator_regex_str = r"""
|
|
1530
|
+
(?P<operator>(~=|==|!=|<=|>=|<|>|===))
|
|
1531
|
+
"""
|
|
1532
|
+
|
|
1533
|
+
_version_regex_str = r"""
|
|
1534
|
+
(?P<version>
|
|
1535
|
+
(?:
|
|
1536
|
+
(?<====)
|
|
1537
|
+
\s*
|
|
1538
|
+
[^\s;)]*
|
|
1539
|
+
)
|
|
1540
|
+
|
|
|
1541
|
+
(?:
|
|
1542
|
+
(?<===|!=)
|
|
1543
|
+
\s*
|
|
1544
|
+
v?
|
|
1545
|
+
(?:[0-9]+!)?
|
|
1546
|
+
[0-9]+(?:\.[0-9]+)*
|
|
1547
|
+
(?:
|
|
1548
|
+
\.\*
|
|
1549
|
+
|
|
|
1550
|
+
(?:
|
|
1551
|
+
[-_\.]?
|
|
1552
|
+
(alpha|beta|preview|pre|a|b|c|rc)
|
|
1553
|
+
[-_\.]?
|
|
1554
|
+
[0-9]*
|
|
1555
|
+
)?
|
|
1556
|
+
(?:
|
|
1557
|
+
(?:-[0-9]+)|(?:[-_\.]?(post|rev|r)[-_\.]?[0-9]*)
|
|
1558
|
+
)?
|
|
1559
|
+
(?:[-_\.]?dev[-_\.]?[0-9]*)?
|
|
1560
|
+
(?:\+[a-z0-9]+(?:[-_\.][a-z0-9]+)*)?
|
|
1561
|
+
)?
|
|
1562
|
+
)
|
|
1563
|
+
|
|
|
1564
|
+
(?:
|
|
1565
|
+
(?<=~=)
|
|
1566
|
+
\s*
|
|
1567
|
+
v?
|
|
1568
|
+
(?:[0-9]+!)?
|
|
1569
|
+
[0-9]+(?:\.[0-9]+)+
|
|
1570
|
+
(?:
|
|
1571
|
+
[-_\.]?
|
|
1572
|
+
(alpha|beta|preview|pre|a|b|c|rc)
|
|
1573
|
+
[-_\.]?
|
|
1574
|
+
[0-9]*
|
|
1575
|
+
)?
|
|
1576
|
+
(?:
|
|
1577
|
+
(?:-[0-9]+)|(?:[-_\.]?(post|rev|r)[-_\.]?[0-9]*)
|
|
1578
|
+
)?
|
|
1579
|
+
(?:[-_\.]?dev[-_\.]?[0-9]*)?
|
|
1580
|
+
)
|
|
1581
|
+
|
|
|
1582
|
+
(?:
|
|
1583
|
+
(?<!==|!=|~=)
|
|
1584
|
+
\s*
|
|
1585
|
+
v?
|
|
1586
|
+
(?:[0-9]+!)?
|
|
1587
|
+
[0-9]+(?:\.[0-9]+)*
|
|
1588
|
+
(?:
|
|
1589
|
+
[-_\.]?
|
|
1590
|
+
(alpha|beta|preview|pre|a|b|c|rc)
|
|
1591
|
+
[-_\.]?
|
|
1592
|
+
[0-9]*
|
|
1593
|
+
)?
|
|
1594
|
+
(?:
|
|
1595
|
+
(?:-[0-9]+)|(?:[-_\.]?(post|rev|r)[-_\.]?[0-9]*)
|
|
1596
|
+
)?
|
|
1597
|
+
(?:[-_\.]?dev[-_\.]?[0-9]*)?
|
|
1598
|
+
)
|
|
1599
|
+
)
|
|
1600
|
+
"""
|
|
1601
|
+
|
|
1602
|
+
_regex = re.compile(
|
|
1603
|
+
r'^\s*' + _operator_regex_str + _version_regex_str + r'\s*$',
|
|
1604
|
+
re.VERBOSE | re.IGNORECASE,
|
|
1605
|
+
)
|
|
1606
|
+
|
|
1607
|
+
OPERATORS: ta.ClassVar[ta.Mapping[str, str]] = {
|
|
1608
|
+
'~=': 'compatible',
|
|
1609
|
+
'==': 'equal',
|
|
1610
|
+
'!=': 'not_equal',
|
|
1611
|
+
'<=': 'less_than_equal',
|
|
1612
|
+
'>=': 'greater_than_equal',
|
|
1613
|
+
'<': 'less_than',
|
|
1614
|
+
'>': 'greater_than',
|
|
1615
|
+
'===': 'arbitrary',
|
|
1616
|
+
}
|
|
1617
|
+
|
|
1618
|
+
def __init__(
|
|
1619
|
+
self,
|
|
1620
|
+
spec: str = '',
|
|
1621
|
+
prereleases: ta.Optional[bool] = None,
|
|
1622
|
+
) -> None:
|
|
1623
|
+
match = self._regex.search(spec)
|
|
1624
|
+
if not match:
|
|
1625
|
+
raise InvalidSpecifier(f"Invalid specifier: '{spec}'")
|
|
1626
|
+
|
|
1627
|
+
self._spec: ta.Tuple[str, str] = (
|
|
1628
|
+
match.group('operator').strip(),
|
|
1629
|
+
match.group('version').strip(),
|
|
1630
|
+
)
|
|
1631
|
+
|
|
1632
|
+
self._prereleases = prereleases
|
|
1633
|
+
|
|
1634
|
+
@property # type: ignore
|
|
1635
|
+
def prereleases(self) -> bool:
|
|
1636
|
+
if self._prereleases is not None:
|
|
1637
|
+
return self._prereleases
|
|
1638
|
+
|
|
1639
|
+
operator, version = self._spec
|
|
1640
|
+
if operator in ['==', '>=', '<=', '~=', '===']:
|
|
1641
|
+
if operator == '==' and version.endswith('.*'):
|
|
1642
|
+
version = version[:-2]
|
|
1643
|
+
|
|
1644
|
+
if Version(version).is_prerelease:
|
|
1645
|
+
return True
|
|
1646
|
+
|
|
1647
|
+
return False
|
|
1648
|
+
|
|
1649
|
+
@prereleases.setter
|
|
1650
|
+
def prereleases(self, value: bool) -> None:
|
|
1651
|
+
self._prereleases = value
|
|
1652
|
+
|
|
1653
|
+
@property
|
|
1654
|
+
def operator(self) -> str:
|
|
1655
|
+
return self._spec[0]
|
|
1656
|
+
|
|
1657
|
+
@property
|
|
1658
|
+
def version(self) -> str:
|
|
1659
|
+
return self._spec[1]
|
|
1660
|
+
|
|
1661
|
+
def __repr__(self) -> str:
|
|
1662
|
+
pre = (
|
|
1663
|
+
f', prereleases={self.prereleases!r}'
|
|
1664
|
+
if self._prereleases is not None
|
|
1665
|
+
else ''
|
|
1666
|
+
)
|
|
1667
|
+
|
|
1668
|
+
return f'<{self.__class__.__name__}({str(self)!r}{pre})>'
|
|
1669
|
+
|
|
1670
|
+
def __str__(self) -> str:
|
|
1671
|
+
return '{}{}'.format(*self._spec)
|
|
1672
|
+
|
|
1673
|
+
@property
|
|
1674
|
+
def _canonical_spec(self) -> ta.Tuple[str, str]:
|
|
1675
|
+
canonical_version = canonicalize_version(
|
|
1676
|
+
self._spec[1],
|
|
1677
|
+
strip_trailing_zero=(self._spec[0] != '~='),
|
|
1678
|
+
)
|
|
1679
|
+
return self._spec[0], canonical_version
|
|
1680
|
+
|
|
1681
|
+
def __hash__(self) -> int:
|
|
1682
|
+
return hash(self._canonical_spec)
|
|
1683
|
+
|
|
1684
|
+
def __eq__(self, other: object) -> bool:
|
|
1685
|
+
if isinstance(other, str):
|
|
1686
|
+
try:
|
|
1687
|
+
other = self.__class__(str(other))
|
|
1688
|
+
except InvalidSpecifier:
|
|
1689
|
+
return NotImplemented
|
|
1690
|
+
elif not isinstance(other, self.__class__):
|
|
1691
|
+
return NotImplemented
|
|
1692
|
+
|
|
1693
|
+
return self._canonical_spec == other._canonical_spec
|
|
1694
|
+
|
|
1695
|
+
def _get_operator(self, op: str) -> CallableVersionOperator:
|
|
1696
|
+
operator_callable: CallableVersionOperator = getattr(self, f'_compare_{self.OPERATORS[op]}')
|
|
1697
|
+
return operator_callable
|
|
1698
|
+
|
|
1699
|
+
def _compare_compatible(self, prospective: Version, spec: str) -> bool:
|
|
1700
|
+
prefix = _version_join(list(itertools.takewhile(_is_not_version_suffix, _version_split(spec)))[:-1])
|
|
1701
|
+
prefix += '.*'
|
|
1702
|
+
return self._get_operator('>=')(prospective, spec) and self._get_operator('==')(prospective, prefix)
|
|
1703
|
+
|
|
1704
|
+
def _compare_equal(self, prospective: Version, spec: str) -> bool:
|
|
1705
|
+
if spec.endswith('.*'):
|
|
1706
|
+
normalized_prospective = canonicalize_version(prospective.public, strip_trailing_zero=False)
|
|
1707
|
+
normalized_spec = canonicalize_version(spec[:-2], strip_trailing_zero=False)
|
|
1708
|
+
split_spec = _version_split(normalized_spec)
|
|
1709
|
+
|
|
1710
|
+
split_prospective = _version_split(normalized_prospective)
|
|
1711
|
+
padded_prospective, _ = _pad_version(split_prospective, split_spec)
|
|
1712
|
+
shortened_prospective = padded_prospective[: len(split_spec)]
|
|
1713
|
+
|
|
1714
|
+
return shortened_prospective == split_spec
|
|
1715
|
+
|
|
1716
|
+
else:
|
|
1717
|
+
spec_version = Version(spec)
|
|
1718
|
+
if not spec_version.local:
|
|
1719
|
+
prospective = Version(prospective.public)
|
|
1720
|
+
return prospective == spec_version
|
|
1721
|
+
|
|
1722
|
+
def _compare_not_equal(self, prospective: Version, spec: str) -> bool:
|
|
1723
|
+
return not self._compare_equal(prospective, spec)
|
|
1724
|
+
|
|
1725
|
+
def _compare_less_than_equal(self, prospective: Version, spec: str) -> bool:
|
|
1726
|
+
return Version(prospective.public) <= Version(spec)
|
|
1727
|
+
|
|
1728
|
+
def _compare_greater_than_equal(self, prospective: Version, spec: str) -> bool:
|
|
1729
|
+
return Version(prospective.public) >= Version(spec)
|
|
1730
|
+
|
|
1731
|
+
def _compare_less_than(self, prospective: Version, spec_str: str) -> bool:
|
|
1732
|
+
spec = Version(spec_str)
|
|
1733
|
+
|
|
1734
|
+
if not prospective < spec:
|
|
1735
|
+
return False
|
|
1736
|
+
|
|
1737
|
+
if not spec.is_prerelease and prospective.is_prerelease:
|
|
1738
|
+
if Version(prospective.base_version) == Version(spec.base_version):
|
|
1739
|
+
return False
|
|
1740
|
+
|
|
1741
|
+
return True
|
|
1742
|
+
|
|
1743
|
+
def _compare_greater_than(self, prospective: Version, spec_str: str) -> bool:
|
|
1744
|
+
spec = Version(spec_str)
|
|
1745
|
+
|
|
1746
|
+
if not prospective > spec:
|
|
1747
|
+
return False
|
|
1748
|
+
|
|
1749
|
+
if not spec.is_postrelease and prospective.is_postrelease:
|
|
1750
|
+
if Version(prospective.base_version) == Version(spec.base_version):
|
|
1751
|
+
return False
|
|
1752
|
+
|
|
1753
|
+
if prospective.local is not None:
|
|
1754
|
+
if Version(prospective.base_version) == Version(spec.base_version):
|
|
1755
|
+
return False
|
|
1756
|
+
|
|
1757
|
+
return True
|
|
1758
|
+
|
|
1759
|
+
def _compare_arbitrary(self, prospective: Version, spec: str) -> bool:
|
|
1760
|
+
return str(prospective).lower() == str(spec).lower()
|
|
1761
|
+
|
|
1762
|
+
def __contains__(self, item: ta.Union[str, Version]) -> bool:
|
|
1763
|
+
return self.contains(item)
|
|
1764
|
+
|
|
1765
|
+
def contains(self, item: UnparsedVersion, prereleases: ta.Optional[bool] = None) -> bool:
|
|
1766
|
+
if prereleases is None:
|
|
1767
|
+
prereleases = self.prereleases
|
|
1768
|
+
|
|
1769
|
+
normalized_item = _coerce_version(item)
|
|
1770
|
+
|
|
1771
|
+
if normalized_item.is_prerelease and not prereleases:
|
|
1772
|
+
return False
|
|
1773
|
+
|
|
1774
|
+
operator_callable: CallableVersionOperator = self._get_operator(self.operator)
|
|
1775
|
+
return operator_callable(normalized_item, self.version)
|
|
1776
|
+
|
|
1777
|
+
def filter(
|
|
1778
|
+
self,
|
|
1779
|
+
iterable: ta.Iterable[UnparsedVersionVar],
|
|
1780
|
+
prereleases: ta.Optional[bool] = None,
|
|
1781
|
+
) -> ta.Iterator[UnparsedVersionVar]:
|
|
1782
|
+
yielded = False
|
|
1783
|
+
found_prereleases = []
|
|
1784
|
+
|
|
1785
|
+
kw = {'prereleases': prereleases if prereleases is not None else True}
|
|
1786
|
+
|
|
1787
|
+
for version in iterable:
|
|
1788
|
+
parsed_version = _coerce_version(version)
|
|
1789
|
+
|
|
1790
|
+
if self.contains(parsed_version, **kw):
|
|
1791
|
+
if parsed_version.is_prerelease and not (prereleases or self.prereleases):
|
|
1792
|
+
found_prereleases.append(version)
|
|
1793
|
+
else:
|
|
1794
|
+
yielded = True
|
|
1795
|
+
yield version
|
|
1796
|
+
|
|
1797
|
+
if not yielded and found_prereleases:
|
|
1798
|
+
for version in found_prereleases:
|
|
1799
|
+
yield version
|
|
1800
|
+
|
|
1801
|
+
|
|
1802
|
+
_version_prefix_regex = re.compile(r'^([0-9]+)((?:a|b|c|rc)[0-9]+)$')
|
|
1803
|
+
|
|
1804
|
+
|
|
1805
|
+
def _version_split(version: str) -> ta.List[str]:
|
|
1806
|
+
result: ta.List[str] = []
|
|
1807
|
+
|
|
1808
|
+
epoch, _, rest = version.rpartition('!')
|
|
1809
|
+
result.append(epoch or '0')
|
|
1810
|
+
|
|
1811
|
+
for item in rest.split('.'):
|
|
1812
|
+
match = _version_prefix_regex.search(item)
|
|
1813
|
+
if match:
|
|
1814
|
+
result.extend(match.groups())
|
|
1815
|
+
else:
|
|
1816
|
+
result.append(item)
|
|
1817
|
+
return result
|
|
1818
|
+
|
|
1819
|
+
|
|
1820
|
+
def _version_join(components: ta.List[str]) -> str:
|
|
1821
|
+
epoch, *rest = components
|
|
1822
|
+
return f"{epoch}!{'.'.join(rest)}"
|
|
1823
|
+
|
|
1824
|
+
|
|
1825
|
+
def _is_not_version_suffix(segment: str) -> bool:
|
|
1826
|
+
return not any(segment.startswith(prefix) for prefix in ('dev', 'a', 'b', 'rc', 'post'))
|
|
1827
|
+
|
|
1828
|
+
|
|
1829
|
+
def _pad_version(left: ta.List[str], right: ta.List[str]) -> ta.Tuple[ta.List[str], ta.List[str]]:
|
|
1830
|
+
left_split, right_split = [], []
|
|
1831
|
+
|
|
1832
|
+
left_split.append(list(itertools.takewhile(lambda x: x.isdigit(), left)))
|
|
1833
|
+
right_split.append(list(itertools.takewhile(lambda x: x.isdigit(), right)))
|
|
1834
|
+
|
|
1835
|
+
left_split.append(left[len(left_split[0]):])
|
|
1836
|
+
right_split.append(right[len(right_split[0]):])
|
|
1837
|
+
|
|
1838
|
+
left_split.insert(1, ['0'] * max(0, len(right_split[0]) - len(left_split[0])))
|
|
1839
|
+
right_split.insert(1, ['0'] * max(0, len(left_split[0]) - len(right_split[0])))
|
|
1840
|
+
|
|
1841
|
+
return (
|
|
1842
|
+
list(itertools.chain.from_iterable(left_split)),
|
|
1843
|
+
list(itertools.chain.from_iterable(right_split)),
|
|
1844
|
+
)
|
|
1845
|
+
|
|
1846
|
+
|
|
1847
|
+
class SpecifierSet(BaseSpecifier):
|
|
1848
|
+
def __init__(
|
|
1849
|
+
self,
|
|
1850
|
+
specifiers: str = '',
|
|
1851
|
+
prereleases: ta.Optional[bool] = None,
|
|
1852
|
+
) -> None:
|
|
1853
|
+
split_specifiers = [s.strip() for s in specifiers.split(',') if s.strip()]
|
|
1854
|
+
|
|
1855
|
+
self._specs = frozenset(map(Specifier, split_specifiers))
|
|
1856
|
+
self._prereleases = prereleases
|
|
1857
|
+
|
|
1858
|
+
@property
|
|
1859
|
+
def prereleases(self) -> ta.Optional[bool]:
|
|
1860
|
+
if self._prereleases is not None:
|
|
1861
|
+
return self._prereleases
|
|
1862
|
+
|
|
1863
|
+
if not self._specs:
|
|
1864
|
+
return None
|
|
1865
|
+
|
|
1866
|
+
return any(s.prereleases for s in self._specs)
|
|
1867
|
+
|
|
1868
|
+
@prereleases.setter
|
|
1869
|
+
def prereleases(self, value: bool) -> None:
|
|
1870
|
+
self._prereleases = value
|
|
1871
|
+
|
|
1872
|
+
def __repr__(self) -> str:
|
|
1873
|
+
pre = (
|
|
1874
|
+
f', prereleases={self.prereleases!r}'
|
|
1875
|
+
if self._prereleases is not None
|
|
1876
|
+
else ''
|
|
1877
|
+
)
|
|
1878
|
+
|
|
1879
|
+
return f'<SpecifierSet({str(self)!r}{pre})>'
|
|
1880
|
+
|
|
1881
|
+
def __str__(self) -> str:
|
|
1882
|
+
return ','.join(sorted(str(s) for s in self._specs))
|
|
1883
|
+
|
|
1884
|
+
def __hash__(self) -> int:
|
|
1885
|
+
return hash(self._specs)
|
|
1886
|
+
|
|
1887
|
+
def __and__(self, other: ta.Union['SpecifierSet', str]) -> 'SpecifierSet':
|
|
1888
|
+
if isinstance(other, str):
|
|
1889
|
+
other = SpecifierSet(other)
|
|
1890
|
+
elif not isinstance(other, SpecifierSet):
|
|
1891
|
+
return NotImplemented # type: ignore
|
|
1892
|
+
|
|
1893
|
+
specifier = SpecifierSet()
|
|
1894
|
+
specifier._specs = frozenset(self._specs | other._specs)
|
|
1895
|
+
|
|
1896
|
+
if self._prereleases is None and other._prereleases is not None:
|
|
1897
|
+
specifier._prereleases = other._prereleases
|
|
1898
|
+
elif self._prereleases is not None and other._prereleases is None:
|
|
1899
|
+
specifier._prereleases = self._prereleases
|
|
1900
|
+
elif self._prereleases == other._prereleases:
|
|
1901
|
+
specifier._prereleases = self._prereleases
|
|
1902
|
+
else:
|
|
1903
|
+
raise ValueError('Cannot combine SpecifierSets with True and False prerelease overrides.')
|
|
1904
|
+
|
|
1905
|
+
return specifier
|
|
1906
|
+
|
|
1907
|
+
def __eq__(self, other: object) -> bool:
|
|
1908
|
+
if isinstance(other, (str, Specifier)):
|
|
1909
|
+
other = SpecifierSet(str(other))
|
|
1910
|
+
elif not isinstance(other, SpecifierSet):
|
|
1911
|
+
return NotImplemented
|
|
1912
|
+
|
|
1913
|
+
return self._specs == other._specs
|
|
1914
|
+
|
|
1915
|
+
def __len__(self) -> int:
|
|
1916
|
+
return len(self._specs)
|
|
1917
|
+
|
|
1918
|
+
def __iter__(self) -> ta.Iterator[Specifier]:
|
|
1919
|
+
return iter(self._specs)
|
|
1920
|
+
|
|
1921
|
+
def __contains__(self, item: UnparsedVersion) -> bool:
|
|
1922
|
+
return self.contains(item)
|
|
1923
|
+
|
|
1924
|
+
def contains(
|
|
1925
|
+
self,
|
|
1926
|
+
item: UnparsedVersion,
|
|
1927
|
+
prereleases: ta.Optional[bool] = None,
|
|
1928
|
+
installed: ta.Optional[bool] = None,
|
|
1929
|
+
) -> bool:
|
|
1930
|
+
if not isinstance(item, Version):
|
|
1931
|
+
item = Version(item)
|
|
1932
|
+
|
|
1933
|
+
if prereleases is None:
|
|
1934
|
+
prereleases = self.prereleases
|
|
1935
|
+
|
|
1936
|
+
if not prereleases and item.is_prerelease:
|
|
1937
|
+
return False
|
|
1938
|
+
|
|
1939
|
+
if installed and item.is_prerelease:
|
|
1940
|
+
item = Version(item.base_version)
|
|
1941
|
+
|
|
1942
|
+
return all(s.contains(item, prereleases=prereleases) for s in self._specs)
|
|
1943
|
+
|
|
1944
|
+
def filter(
|
|
1945
|
+
self,
|
|
1946
|
+
iterable: ta.Iterable[UnparsedVersionVar],
|
|
1947
|
+
prereleases: ta.Optional[bool] = None,
|
|
1948
|
+
) -> ta.Iterator[UnparsedVersionVar]:
|
|
1949
|
+
if prereleases is None:
|
|
1950
|
+
prereleases = self.prereleases
|
|
1951
|
+
|
|
1952
|
+
if self._specs:
|
|
1953
|
+
for spec in self._specs:
|
|
1954
|
+
iterable = spec.filter(iterable, prereleases=bool(prereleases))
|
|
1955
|
+
return iter(iterable)
|
|
1956
|
+
|
|
1957
|
+
else:
|
|
1958
|
+
filtered: ta.List[UnparsedVersionVar] = []
|
|
1959
|
+
found_prereleases: ta.List[UnparsedVersionVar] = []
|
|
1960
|
+
|
|
1961
|
+
for item in iterable:
|
|
1962
|
+
parsed_version = _coerce_version(item)
|
|
1963
|
+
|
|
1964
|
+
if parsed_version.is_prerelease and not prereleases:
|
|
1965
|
+
if not filtered:
|
|
1966
|
+
found_prereleases.append(item)
|
|
1967
|
+
else:
|
|
1968
|
+
filtered.append(item)
|
|
1969
|
+
|
|
1970
|
+
if not filtered and found_prereleases and prereleases is None:
|
|
1971
|
+
return iter(found_prereleases)
|
|
1972
|
+
|
|
1973
|
+
return iter(filtered)
|
|
1974
|
+
|
|
1975
|
+
|
|
1976
|
+
########################################
|
|
1977
|
+
# ../../../omlish/lite/logs.py
|
|
1978
|
+
"""
|
|
1979
|
+
TODO:
|
|
1980
|
+
- debug
|
|
1981
|
+
"""
|
|
1982
|
+
# ruff: noqa: UP007
|
|
1983
|
+
|
|
1984
|
+
|
|
1985
|
+
log = logging.getLogger(__name__)
|
|
1986
|
+
|
|
1987
|
+
|
|
1988
|
+
class JsonLogFormatter(logging.Formatter):
|
|
1989
|
+
|
|
1990
|
+
KEYS: ta.Mapping[str, bool] = {
|
|
1991
|
+
'name': False,
|
|
1992
|
+
'msg': False,
|
|
1993
|
+
'args': False,
|
|
1994
|
+
'levelname': False,
|
|
1995
|
+
'levelno': False,
|
|
1996
|
+
'pathname': False,
|
|
1997
|
+
'filename': False,
|
|
1998
|
+
'module': False,
|
|
1999
|
+
'exc_info': True,
|
|
2000
|
+
'exc_text': True,
|
|
2001
|
+
'stack_info': True,
|
|
2002
|
+
'lineno': False,
|
|
2003
|
+
'funcName': False,
|
|
2004
|
+
'created': False,
|
|
2005
|
+
'msecs': False,
|
|
2006
|
+
'relativeCreated': False,
|
|
2007
|
+
'thread': False,
|
|
2008
|
+
'threadName': False,
|
|
2009
|
+
'processName': False,
|
|
2010
|
+
'process': False,
|
|
2011
|
+
}
|
|
2012
|
+
|
|
2013
|
+
def format(self, record: logging.LogRecord) -> str:
|
|
2014
|
+
dct = {
|
|
2015
|
+
k: v
|
|
2016
|
+
for k, o in self.KEYS.items()
|
|
2017
|
+
for v in [getattr(record, k)]
|
|
2018
|
+
if not (o and v is None)
|
|
2019
|
+
}
|
|
2020
|
+
return json_dumps_compact(dct)
|
|
2021
|
+
|
|
2022
|
+
|
|
2023
|
+
def configure_standard_logging(level: ta.Union[int, str] = logging.INFO) -> None:
|
|
2024
|
+
logging.root.addHandler(logging.StreamHandler())
|
|
2025
|
+
logging.root.setLevel(level)
|
|
2026
|
+
|
|
2027
|
+
|
|
2028
|
+
########################################
|
|
2029
|
+
# ../../../omlish/lite/marshal.py
|
|
2030
|
+
"""
|
|
2031
|
+
TODO:
|
|
2032
|
+
- pickle stdlib objs? have to pin to 3.8 pickle protocol, will be cross-version
|
|
2033
|
+
"""
|
|
2034
|
+
# ruff: noqa: UP006 UP007
|
|
2035
|
+
|
|
2036
|
+
|
|
2037
|
+
##
|
|
2038
|
+
|
|
2039
|
+
|
|
2040
|
+
class ObjMarshaler(abc.ABC):
|
|
2041
|
+
@abc.abstractmethod
|
|
2042
|
+
def marshal(self, o: ta.Any) -> ta.Any:
|
|
2043
|
+
raise NotImplementedError
|
|
2044
|
+
|
|
2045
|
+
@abc.abstractmethod
|
|
2046
|
+
def unmarshal(self, o: ta.Any) -> ta.Any:
|
|
2047
|
+
raise NotImplementedError
|
|
2048
|
+
|
|
2049
|
+
|
|
2050
|
+
class NopObjMarshaler(ObjMarshaler):
|
|
2051
|
+
def marshal(self, o: ta.Any) -> ta.Any:
|
|
2052
|
+
return o
|
|
2053
|
+
|
|
2054
|
+
def unmarshal(self, o: ta.Any) -> ta.Any:
|
|
2055
|
+
return o
|
|
2056
|
+
|
|
2057
|
+
|
|
2058
|
+
@dc.dataclass()
|
|
2059
|
+
class ProxyObjMarshaler(ObjMarshaler):
|
|
2060
|
+
m: ta.Optional[ObjMarshaler] = None
|
|
2061
|
+
|
|
2062
|
+
def marshal(self, o: ta.Any) -> ta.Any:
|
|
2063
|
+
return check_not_none(self.m).marshal(o)
|
|
2064
|
+
|
|
2065
|
+
def unmarshal(self, o: ta.Any) -> ta.Any:
|
|
2066
|
+
return check_not_none(self.m).unmarshal(o)
|
|
2067
|
+
|
|
2068
|
+
|
|
2069
|
+
@dc.dataclass(frozen=True)
|
|
2070
|
+
class CastObjMarshaler(ObjMarshaler):
|
|
2071
|
+
ty: type
|
|
2072
|
+
|
|
2073
|
+
def marshal(self, o: ta.Any) -> ta.Any:
|
|
2074
|
+
return o
|
|
2075
|
+
|
|
2076
|
+
def unmarshal(self, o: ta.Any) -> ta.Any:
|
|
2077
|
+
return self.ty(o)
|
|
2078
|
+
|
|
2079
|
+
|
|
2080
|
+
class DynamicObjMarshaler(ObjMarshaler):
|
|
2081
|
+
def marshal(self, o: ta.Any) -> ta.Any:
|
|
2082
|
+
return marshal_obj(o)
|
|
2083
|
+
|
|
2084
|
+
def unmarshal(self, o: ta.Any) -> ta.Any:
|
|
2085
|
+
return o
|
|
2086
|
+
|
|
2087
|
+
|
|
2088
|
+
@dc.dataclass(frozen=True)
|
|
2089
|
+
class Base64ObjMarshaler(ObjMarshaler):
|
|
2090
|
+
ty: type
|
|
2091
|
+
|
|
2092
|
+
def marshal(self, o: ta.Any) -> ta.Any:
|
|
2093
|
+
return base64.b64encode(o).decode('ascii')
|
|
2094
|
+
|
|
2095
|
+
def unmarshal(self, o: ta.Any) -> ta.Any:
|
|
2096
|
+
return self.ty(base64.b64decode(o))
|
|
2097
|
+
|
|
2098
|
+
|
|
2099
|
+
@dc.dataclass(frozen=True)
|
|
2100
|
+
class EnumObjMarshaler(ObjMarshaler):
|
|
2101
|
+
ty: type
|
|
2102
|
+
|
|
2103
|
+
def marshal(self, o: ta.Any) -> ta.Any:
|
|
2104
|
+
return o.name
|
|
2105
|
+
|
|
2106
|
+
def unmarshal(self, o: ta.Any) -> ta.Any:
|
|
2107
|
+
return self.ty.__members__[o] # type: ignore
|
|
2108
|
+
|
|
2109
|
+
|
|
2110
|
+
@dc.dataclass(frozen=True)
|
|
2111
|
+
class OptionalObjMarshaler(ObjMarshaler):
|
|
2112
|
+
item: ObjMarshaler
|
|
2113
|
+
|
|
2114
|
+
def marshal(self, o: ta.Any) -> ta.Any:
|
|
2115
|
+
if o is None:
|
|
2116
|
+
return None
|
|
2117
|
+
return self.item.marshal(o)
|
|
2118
|
+
|
|
2119
|
+
def unmarshal(self, o: ta.Any) -> ta.Any:
|
|
2120
|
+
if o is None:
|
|
2121
|
+
return None
|
|
2122
|
+
return self.item.unmarshal(o)
|
|
2123
|
+
|
|
2124
|
+
|
|
2125
|
+
@dc.dataclass(frozen=True)
|
|
2126
|
+
class MappingObjMarshaler(ObjMarshaler):
|
|
2127
|
+
ty: type
|
|
2128
|
+
km: ObjMarshaler
|
|
2129
|
+
vm: ObjMarshaler
|
|
2130
|
+
|
|
2131
|
+
def marshal(self, o: ta.Any) -> ta.Any:
|
|
2132
|
+
return {self.km.marshal(k): self.vm.marshal(v) for k, v in o.items()}
|
|
2133
|
+
|
|
2134
|
+
def unmarshal(self, o: ta.Any) -> ta.Any:
|
|
2135
|
+
return self.ty((self.km.unmarshal(k), self.vm.unmarshal(v)) for k, v in o.items())
|
|
2136
|
+
|
|
2137
|
+
|
|
2138
|
+
@dc.dataclass(frozen=True)
|
|
2139
|
+
class IterableObjMarshaler(ObjMarshaler):
|
|
2140
|
+
ty: type
|
|
2141
|
+
item: ObjMarshaler
|
|
2142
|
+
|
|
2143
|
+
def marshal(self, o: ta.Any) -> ta.Any:
|
|
2144
|
+
return [self.item.marshal(e) for e in o]
|
|
2145
|
+
|
|
2146
|
+
def unmarshal(self, o: ta.Any) -> ta.Any:
|
|
2147
|
+
return self.ty(self.item.unmarshal(e) for e in o)
|
|
2148
|
+
|
|
2149
|
+
|
|
2150
|
+
@dc.dataclass(frozen=True)
|
|
2151
|
+
class DataclassObjMarshaler(ObjMarshaler):
|
|
2152
|
+
ty: type
|
|
2153
|
+
fs: ta.Mapping[str, ObjMarshaler]
|
|
2154
|
+
|
|
2155
|
+
def marshal(self, o: ta.Any) -> ta.Any:
|
|
2156
|
+
return {k: m.marshal(getattr(o, k)) for k, m in self.fs.items()}
|
|
2157
|
+
|
|
2158
|
+
def unmarshal(self, o: ta.Any) -> ta.Any:
|
|
2159
|
+
return self.ty(**{k: self.fs[k].unmarshal(v) for k, v in o.items()})
|
|
2160
|
+
|
|
2161
|
+
|
|
2162
|
+
@dc.dataclass(frozen=True)
|
|
2163
|
+
class PolymorphicObjMarshaler(ObjMarshaler):
|
|
2164
|
+
class Impl(ta.NamedTuple):
|
|
2165
|
+
ty: type
|
|
2166
|
+
tag: str
|
|
2167
|
+
m: ObjMarshaler
|
|
2168
|
+
|
|
2169
|
+
impls_by_ty: ta.Mapping[type, Impl]
|
|
2170
|
+
impls_by_tag: ta.Mapping[str, Impl]
|
|
2171
|
+
|
|
2172
|
+
def marshal(self, o: ta.Any) -> ta.Any:
|
|
2173
|
+
impl = self.impls_by_ty[type(o)]
|
|
2174
|
+
return {impl.tag: impl.m.marshal(o)}
|
|
2175
|
+
|
|
2176
|
+
def unmarshal(self, o: ta.Any) -> ta.Any:
|
|
2177
|
+
[(t, v)] = o.items()
|
|
2178
|
+
impl = self.impls_by_tag[t]
|
|
2179
|
+
return impl.m.unmarshal(v)
|
|
2180
|
+
|
|
2181
|
+
|
|
2182
|
+
@dc.dataclass(frozen=True)
|
|
2183
|
+
class DatetimeObjMarshaler(ObjMarshaler):
|
|
2184
|
+
ty: type
|
|
2185
|
+
|
|
2186
|
+
def marshal(self, o: ta.Any) -> ta.Any:
|
|
2187
|
+
return o.isoformat()
|
|
2188
|
+
|
|
2189
|
+
def unmarshal(self, o: ta.Any) -> ta.Any:
|
|
2190
|
+
return self.ty.fromisoformat(o) # type: ignore
|
|
2191
|
+
|
|
2192
|
+
|
|
2193
|
+
class DecimalObjMarshaler(ObjMarshaler):
|
|
2194
|
+
def marshal(self, o: ta.Any) -> ta.Any:
|
|
2195
|
+
return str(check_isinstance(o, decimal.Decimal))
|
|
2196
|
+
|
|
2197
|
+
def unmarshal(self, v: ta.Any) -> ta.Any:
|
|
2198
|
+
return decimal.Decimal(check_isinstance(v, str))
|
|
2199
|
+
|
|
2200
|
+
|
|
2201
|
+
class FractionObjMarshaler(ObjMarshaler):
|
|
2202
|
+
def marshal(self, o: ta.Any) -> ta.Any:
|
|
2203
|
+
fr = check_isinstance(o, fractions.Fraction)
|
|
2204
|
+
return [fr.numerator, fr.denominator]
|
|
2205
|
+
|
|
2206
|
+
def unmarshal(self, v: ta.Any) -> ta.Any:
|
|
2207
|
+
num, denom = check_isinstance(v, list)
|
|
2208
|
+
return fractions.Fraction(num, denom)
|
|
2209
|
+
|
|
2210
|
+
|
|
2211
|
+
class UuidObjMarshaler(ObjMarshaler):
|
|
2212
|
+
def marshal(self, o: ta.Any) -> ta.Any:
|
|
2213
|
+
return str(o)
|
|
2214
|
+
|
|
2215
|
+
def unmarshal(self, o: ta.Any) -> ta.Any:
|
|
2216
|
+
return uuid.UUID(o)
|
|
2217
|
+
|
|
2218
|
+
|
|
2219
|
+
_OBJ_MARSHALERS: ta.Dict[ta.Any, ObjMarshaler] = {
|
|
2220
|
+
**{t: NopObjMarshaler() for t in (type(None),)},
|
|
2221
|
+
**{t: CastObjMarshaler(t) for t in (int, float, str, bool)},
|
|
2222
|
+
**{t: Base64ObjMarshaler(t) for t in (bytes, bytearray)},
|
|
2223
|
+
**{t: IterableObjMarshaler(t, DynamicObjMarshaler()) for t in (list, tuple, set, frozenset)},
|
|
2224
|
+
**{t: MappingObjMarshaler(t, DynamicObjMarshaler(), DynamicObjMarshaler()) for t in (dict,)},
|
|
2225
|
+
|
|
2226
|
+
ta.Any: DynamicObjMarshaler(),
|
|
2227
|
+
|
|
2228
|
+
**{t: DatetimeObjMarshaler(t) for t in (datetime.date, datetime.time, datetime.datetime)},
|
|
2229
|
+
decimal.Decimal: DecimalObjMarshaler(),
|
|
2230
|
+
fractions.Fraction: FractionObjMarshaler(),
|
|
2231
|
+
uuid.UUID: UuidObjMarshaler(),
|
|
2232
|
+
}
|
|
2233
|
+
|
|
2234
|
+
_OBJ_MARSHALER_GENERIC_MAPPING_TYPES: ta.Dict[ta.Any, type] = {
|
|
2235
|
+
**{t: t for t in (dict,)},
|
|
2236
|
+
**{t: dict for t in (collections.abc.Mapping, collections.abc.MutableMapping)},
|
|
2237
|
+
}
|
|
2238
|
+
|
|
2239
|
+
_OBJ_MARSHALER_GENERIC_ITERABLE_TYPES: ta.Dict[ta.Any, type] = {
|
|
2240
|
+
**{t: t for t in (list, tuple, set, frozenset)},
|
|
2241
|
+
**{t: frozenset for t in (collections.abc.Set, collections.abc.MutableSet)},
|
|
2242
|
+
**{t: tuple for t in (collections.abc.Sequence, collections.abc.MutableSequence)},
|
|
2243
|
+
}
|
|
2244
|
+
|
|
2245
|
+
|
|
2246
|
+
def register_opj_marshaler(ty: ta.Any, m: ObjMarshaler) -> None:
|
|
2247
|
+
if ty in _OBJ_MARSHALERS:
|
|
2248
|
+
raise KeyError(ty)
|
|
2249
|
+
_OBJ_MARSHALERS[ty] = m
|
|
2250
|
+
|
|
2251
|
+
|
|
2252
|
+
def _make_obj_marshaler(ty: ta.Any) -> ObjMarshaler:
|
|
2253
|
+
if isinstance(ty, type) and abc.ABC in ty.__bases__:
|
|
2254
|
+
impls = [ # type: ignore
|
|
2255
|
+
PolymorphicObjMarshaler.Impl(
|
|
2256
|
+
ity,
|
|
2257
|
+
ity.__qualname__,
|
|
2258
|
+
get_obj_marshaler(ity),
|
|
2259
|
+
)
|
|
2260
|
+
for ity in deep_subclasses(ty)
|
|
2261
|
+
if abc.ABC not in ity.__bases__
|
|
2262
|
+
]
|
|
2263
|
+
return PolymorphicObjMarshaler(
|
|
2264
|
+
{i.ty: i for i in impls},
|
|
2265
|
+
{i.tag: i for i in impls},
|
|
2266
|
+
)
|
|
2267
|
+
|
|
2268
|
+
if isinstance(ty, type) and issubclass(ty, enum.Enum):
|
|
2269
|
+
return EnumObjMarshaler(ty)
|
|
2270
|
+
|
|
2271
|
+
if dc.is_dataclass(ty):
|
|
2272
|
+
return DataclassObjMarshaler(
|
|
2273
|
+
ty,
|
|
2274
|
+
{f.name: get_obj_marshaler(f.type) for f in dc.fields(ty)},
|
|
2275
|
+
)
|
|
2276
|
+
|
|
2277
|
+
if is_generic_alias(ty):
|
|
2278
|
+
try:
|
|
2279
|
+
mt = _OBJ_MARSHALER_GENERIC_MAPPING_TYPES[ta.get_origin(ty)]
|
|
2280
|
+
except KeyError:
|
|
2281
|
+
pass
|
|
2282
|
+
else:
|
|
2283
|
+
k, v = ta.get_args(ty)
|
|
2284
|
+
return MappingObjMarshaler(mt, get_obj_marshaler(k), get_obj_marshaler(v))
|
|
2285
|
+
|
|
2286
|
+
try:
|
|
2287
|
+
st = _OBJ_MARSHALER_GENERIC_ITERABLE_TYPES[ta.get_origin(ty)]
|
|
2288
|
+
except KeyError:
|
|
2289
|
+
pass
|
|
2290
|
+
else:
|
|
2291
|
+
[e] = ta.get_args(ty)
|
|
2292
|
+
return IterableObjMarshaler(st, get_obj_marshaler(e))
|
|
2293
|
+
|
|
2294
|
+
if is_union_alias(ty):
|
|
2295
|
+
return OptionalObjMarshaler(get_obj_marshaler(get_optional_alias_arg(ty)))
|
|
2296
|
+
|
|
2297
|
+
raise TypeError(ty)
|
|
2298
|
+
|
|
2299
|
+
|
|
2300
|
+
def get_obj_marshaler(ty: ta.Any) -> ObjMarshaler:
|
|
2301
|
+
try:
|
|
2302
|
+
return _OBJ_MARSHALERS[ty]
|
|
2303
|
+
except KeyError:
|
|
2304
|
+
pass
|
|
2305
|
+
|
|
2306
|
+
p = ProxyObjMarshaler()
|
|
2307
|
+
_OBJ_MARSHALERS[ty] = p
|
|
2308
|
+
try:
|
|
2309
|
+
m = _make_obj_marshaler(ty)
|
|
2310
|
+
except Exception:
|
|
2311
|
+
del _OBJ_MARSHALERS[ty]
|
|
2312
|
+
raise
|
|
2313
|
+
else:
|
|
2314
|
+
p.m = m
|
|
2315
|
+
_OBJ_MARSHALERS[ty] = m
|
|
2316
|
+
return m
|
|
2317
|
+
|
|
2318
|
+
|
|
2319
|
+
def marshal_obj(o: ta.Any, ty: ta.Any = None) -> ta.Any:
|
|
2320
|
+
return get_obj_marshaler(ty if ty is not None else type(o)).marshal(o)
|
|
2321
|
+
|
|
2322
|
+
|
|
2323
|
+
def unmarshal_obj(o: ta.Any, ty: ta.Union[ta.Type[T], ta.Any]) -> T:
|
|
2324
|
+
return get_obj_marshaler(ty).unmarshal(o)
|
|
2325
|
+
|
|
2326
|
+
|
|
2327
|
+
########################################
|
|
2328
|
+
# ../../../omlish/lite/runtime.py
|
|
2329
|
+
|
|
2330
|
+
|
|
2331
|
+
@cached_nullary
|
|
2332
|
+
def is_debugger_attached() -> bool:
|
|
2333
|
+
return any(frame[1].endswith('pydevd.py') for frame in inspect.stack())
|
|
2334
|
+
|
|
2335
|
+
|
|
2336
|
+
REQUIRED_PYTHON_VERSION = (3, 8)
|
|
2337
|
+
|
|
2338
|
+
|
|
2339
|
+
def check_runtime_version() -> None:
|
|
2340
|
+
if sys.version_info < REQUIRED_PYTHON_VERSION:
|
|
2341
|
+
raise OSError(
|
|
2342
|
+
f'Requires python {REQUIRED_PYTHON_VERSION}, got {sys.version_info} from {sys.executable}') # noqa
|
|
2343
|
+
|
|
2344
|
+
|
|
2345
|
+
########################################
|
|
2346
|
+
# ../../interp/types.py
|
|
2347
|
+
# ruff: noqa: UP006
|
|
2348
|
+
|
|
2349
|
+
|
|
2350
|
+
# See https://peps.python.org/pep-3149/
|
|
2351
|
+
INTERP_OPT_GLYPHS_BY_ATTR: ta.Mapping[str, str] = collections.OrderedDict([
|
|
2352
|
+
('debug', 'd'),
|
|
2353
|
+
('threaded', 't'),
|
|
2354
|
+
])
|
|
2355
|
+
|
|
2356
|
+
INTERP_OPT_ATTRS_BY_GLYPH: ta.Mapping[str, str] = collections.OrderedDict(
|
|
2357
|
+
(g, a) for a, g in INTERP_OPT_GLYPHS_BY_ATTR.items()
|
|
2358
|
+
)
|
|
2359
|
+
|
|
2360
|
+
|
|
2361
|
+
@dc.dataclass(frozen=True)
|
|
2362
|
+
class InterpOpts:
|
|
2363
|
+
threaded: bool = False
|
|
2364
|
+
debug: bool = False
|
|
2365
|
+
|
|
2366
|
+
def __str__(self) -> str:
|
|
2367
|
+
return ''.join(g for a, g in INTERP_OPT_GLYPHS_BY_ATTR.items() if getattr(self, a))
|
|
2368
|
+
|
|
2369
|
+
@classmethod
|
|
2370
|
+
def parse(cls, s: str) -> 'InterpOpts':
|
|
2371
|
+
return cls(**{INTERP_OPT_ATTRS_BY_GLYPH[g]: True for g in s})
|
|
2372
|
+
|
|
2373
|
+
@classmethod
|
|
2374
|
+
def parse_suffix(cls, s: str) -> ta.Tuple[str, 'InterpOpts']:
|
|
2375
|
+
kw = {}
|
|
2376
|
+
while s and (a := INTERP_OPT_ATTRS_BY_GLYPH.get(s[-1])):
|
|
2377
|
+
s, kw[a] = s[:-1], True
|
|
2378
|
+
return s, cls(**kw)
|
|
2379
|
+
|
|
2380
|
+
|
|
2381
|
+
@dc.dataclass(frozen=True)
|
|
2382
|
+
class InterpVersion:
|
|
2383
|
+
version: Version
|
|
2384
|
+
opts: InterpOpts
|
|
2385
|
+
|
|
2386
|
+
def __str__(self) -> str:
|
|
2387
|
+
return str(self.version) + str(self.opts)
|
|
2388
|
+
|
|
2389
|
+
@classmethod
|
|
2390
|
+
def parse(cls, s: str) -> 'InterpVersion':
|
|
2391
|
+
s, o = InterpOpts.parse_suffix(s)
|
|
2392
|
+
v = Version(s)
|
|
2393
|
+
return cls(
|
|
2394
|
+
version=v,
|
|
2395
|
+
opts=o,
|
|
2396
|
+
)
|
|
2397
|
+
|
|
2398
|
+
@classmethod
|
|
2399
|
+
def try_parse(cls, s: str) -> ta.Optional['InterpVersion']:
|
|
2400
|
+
try:
|
|
2401
|
+
return cls.parse(s)
|
|
2402
|
+
except (KeyError, InvalidVersion):
|
|
2403
|
+
return None
|
|
2404
|
+
|
|
2405
|
+
|
|
2406
|
+
@dc.dataclass(frozen=True)
|
|
2407
|
+
class InterpSpecifier:
|
|
2408
|
+
specifier: Specifier
|
|
2409
|
+
opts: InterpOpts
|
|
2410
|
+
|
|
2411
|
+
def __str__(self) -> str:
|
|
2412
|
+
return str(self.specifier) + str(self.opts)
|
|
2413
|
+
|
|
2414
|
+
@classmethod
|
|
2415
|
+
def parse(cls, s: str) -> 'InterpSpecifier':
|
|
2416
|
+
s, o = InterpOpts.parse_suffix(s)
|
|
2417
|
+
if not any(s.startswith(o) for o in Specifier.OPERATORS):
|
|
2418
|
+
s = '~=' + s
|
|
2419
|
+
return cls(
|
|
2420
|
+
specifier=Specifier(s),
|
|
2421
|
+
opts=o,
|
|
2422
|
+
)
|
|
2423
|
+
|
|
2424
|
+
def contains(self, iv: InterpVersion) -> bool:
|
|
2425
|
+
return self.specifier.contains(iv.version) and self.opts == iv.opts
|
|
2426
|
+
|
|
2427
|
+
|
|
2428
|
+
@dc.dataclass(frozen=True)
|
|
2429
|
+
class Interp:
|
|
2430
|
+
exe: str
|
|
2431
|
+
version: InterpVersion
|
|
2432
|
+
|
|
2433
|
+
|
|
2434
|
+
########################################
|
|
2435
|
+
# ../configs.py
|
|
2436
|
+
# ruff: noqa: UP006 UP007
|
|
2437
|
+
|
|
2438
|
+
|
|
2439
|
+
@dc.dataclass(frozen=True)
|
|
2440
|
+
class VenvConfig:
|
|
2441
|
+
inherits: ta.Optional[ta.Sequence[str]] = None
|
|
2442
|
+
interp: ta.Optional[str] = None
|
|
2443
|
+
requires: ta.Optional[ta.List[str]] = None
|
|
2444
|
+
docker: ta.Optional[str] = None
|
|
2445
|
+
srcs: ta.Optional[ta.List[str]] = None
|
|
2446
|
+
|
|
2447
|
+
|
|
2448
|
+
@dc.dataclass(frozen=True)
|
|
2449
|
+
class PyprojectConfig:
|
|
2450
|
+
srcs: ta.Mapping[str, ta.Sequence[str]]
|
|
2451
|
+
venvs: ta.Mapping[str, VenvConfig]
|
|
2452
|
+
|
|
2453
|
+
venvs_dir: str = '.venvs'
|
|
2454
|
+
versions_file: ta.Optional[str] = '.versions'
|
|
2455
|
+
|
|
2456
|
+
|
|
2457
|
+
class PyprojectConfigPreparer:
|
|
2458
|
+
def __init__(
|
|
2459
|
+
self,
|
|
2460
|
+
*,
|
|
2461
|
+
python_versions: ta.Optional[ta.Mapping[str, str]] = None,
|
|
2462
|
+
) -> None:
|
|
2463
|
+
super().__init__()
|
|
2464
|
+
|
|
2465
|
+
self._python_versions = python_versions or {}
|
|
2466
|
+
|
|
2467
|
+
def _inherit_venvs(self, m: ta.Mapping[str, VenvConfig]) -> ta.Mapping[str, VenvConfig]:
|
|
2468
|
+
done: ta.Dict[str, VenvConfig] = {}
|
|
2469
|
+
|
|
2470
|
+
def rec(k):
|
|
2471
|
+
try:
|
|
2472
|
+
return done[k]
|
|
2473
|
+
except KeyError:
|
|
2474
|
+
pass
|
|
2475
|
+
|
|
2476
|
+
c = m[k]
|
|
2477
|
+
kw = dc.asdict(c)
|
|
2478
|
+
for i in c.inherits or ():
|
|
2479
|
+
ic = rec(i)
|
|
2480
|
+
kw.update({k: v for k, v in dc.asdict(ic).items() if v is not None and kw.get(k) is None})
|
|
2481
|
+
del kw['inherits']
|
|
2482
|
+
|
|
2483
|
+
d = done[k] = VenvConfig(**kw)
|
|
2484
|
+
return d
|
|
2485
|
+
|
|
2486
|
+
for k in m:
|
|
2487
|
+
rec(k)
|
|
2488
|
+
return done
|
|
2489
|
+
|
|
2490
|
+
def _resolve_srcs(
|
|
2491
|
+
self,
|
|
2492
|
+
lst: ta.Sequence[str],
|
|
2493
|
+
aliases: ta.Mapping[str, ta.Sequence[str]],
|
|
2494
|
+
) -> ta.List[str]:
|
|
2495
|
+
todo = list(reversed(lst))
|
|
2496
|
+
raw: ta.List[str] = []
|
|
2497
|
+
seen: ta.Set[str] = set()
|
|
2498
|
+
|
|
2499
|
+
while todo:
|
|
2500
|
+
cur = todo.pop()
|
|
2501
|
+
if cur in seen:
|
|
2502
|
+
continue
|
|
2503
|
+
|
|
2504
|
+
seen.add(cur)
|
|
2505
|
+
if not cur.startswith('@'):
|
|
2506
|
+
raw.append(cur)
|
|
2507
|
+
continue
|
|
2508
|
+
|
|
2509
|
+
todo.extend(aliases[cur[1:]][::-1])
|
|
2510
|
+
|
|
2511
|
+
return raw
|
|
2512
|
+
|
|
2513
|
+
def _fixup_interp(self, s: ta.Optional[str]) -> ta.Optional[str]:
|
|
2514
|
+
if not s or not s.startswith('@'):
|
|
2515
|
+
return s
|
|
2516
|
+
return self._python_versions[s[1:]]
|
|
2517
|
+
|
|
2518
|
+
def prepare_config(self, dct: ta.Mapping[str, ta.Any]) -> PyprojectConfig:
|
|
2519
|
+
pcfg: PyprojectConfig = unmarshal_obj(dct, PyprojectConfig)
|
|
2520
|
+
|
|
2521
|
+
ivs = dict(self._inherit_venvs(pcfg.venvs or {}))
|
|
2522
|
+
for k, v in ivs.items():
|
|
2523
|
+
v = dc.replace(v, srcs=self._resolve_srcs(v.srcs or [], pcfg.srcs or {}))
|
|
2524
|
+
v = dc.replace(v, interp=self._fixup_interp(v.interp))
|
|
2525
|
+
ivs[k] = v
|
|
2526
|
+
|
|
2527
|
+
pcfg = dc.replace(pcfg, venvs=ivs)
|
|
2528
|
+
return pcfg
|
|
2529
|
+
|
|
2530
|
+
|
|
2531
|
+
########################################
|
|
2532
|
+
# ../../../omlish/lite/subprocesses.py
|
|
2533
|
+
# ruff: noqa: UP006 UP007
|
|
2534
|
+
|
|
2535
|
+
|
|
2536
|
+
##
|
|
2537
|
+
|
|
2538
|
+
|
|
2539
|
+
_SUBPROCESS_SHELL_WRAP_EXECS = False
|
|
2540
|
+
|
|
2541
|
+
|
|
2542
|
+
def subprocess_shell_wrap_exec(*args: str) -> ta.Tuple[str, ...]:
|
|
2543
|
+
return ('sh', '-c', ' '.join(map(shlex.quote, args)))
|
|
2544
|
+
|
|
2545
|
+
|
|
2546
|
+
def subprocess_maybe_shell_wrap_exec(*args: str) -> ta.Tuple[str, ...]:
|
|
2547
|
+
if _SUBPROCESS_SHELL_WRAP_EXECS or is_debugger_attached():
|
|
2548
|
+
return subprocess_shell_wrap_exec(*args)
|
|
2549
|
+
else:
|
|
2550
|
+
return args
|
|
2551
|
+
|
|
2552
|
+
|
|
2553
|
+
def _prepare_subprocess_invocation(
|
|
2554
|
+
*args: str,
|
|
2555
|
+
env: ta.Optional[ta.Mapping[str, ta.Any]] = None,
|
|
2556
|
+
extra_env: ta.Optional[ta.Mapping[str, ta.Any]] = None,
|
|
2557
|
+
quiet: bool = False,
|
|
2558
|
+
shell: bool = False,
|
|
2559
|
+
**kwargs: ta.Any,
|
|
2560
|
+
) -> ta.Tuple[ta.Tuple[ta.Any, ...], ta.Dict[str, ta.Any]]:
|
|
2561
|
+
log.debug(args)
|
|
2562
|
+
if extra_env:
|
|
2563
|
+
log.debug(extra_env)
|
|
2564
|
+
|
|
2565
|
+
if extra_env:
|
|
2566
|
+
env = {**(env if env is not None else os.environ), **extra_env}
|
|
2567
|
+
|
|
2568
|
+
if quiet and 'stderr' not in kwargs:
|
|
2569
|
+
if not log.isEnabledFor(logging.DEBUG):
|
|
2570
|
+
kwargs['stderr'] = subprocess.DEVNULL
|
|
2571
|
+
|
|
2572
|
+
if not shell:
|
|
2573
|
+
args = subprocess_maybe_shell_wrap_exec(*args)
|
|
2574
|
+
|
|
2575
|
+
return args, dict(
|
|
2576
|
+
env=env,
|
|
2577
|
+
shell=shell,
|
|
2578
|
+
**kwargs,
|
|
2579
|
+
)
|
|
2580
|
+
|
|
2581
|
+
|
|
2582
|
+
def subprocess_check_call(*args: str, stdout=sys.stderr, **kwargs: ta.Any) -> None:
|
|
2583
|
+
args, kwargs = _prepare_subprocess_invocation(*args, stdout=stdout, **kwargs)
|
|
2584
|
+
return subprocess.check_call(args, **kwargs) # type: ignore
|
|
2585
|
+
|
|
2586
|
+
|
|
2587
|
+
def subprocess_check_output(*args: str, **kwargs: ta.Any) -> bytes:
|
|
2588
|
+
args, kwargs = _prepare_subprocess_invocation(*args, **kwargs)
|
|
2589
|
+
return subprocess.check_output(args, **kwargs)
|
|
2590
|
+
|
|
2591
|
+
|
|
2592
|
+
def subprocess_check_output_str(*args: str, **kwargs: ta.Any) -> str:
|
|
2593
|
+
return subprocess_check_output(*args, **kwargs).decode().strip()
|
|
2594
|
+
|
|
2595
|
+
|
|
2596
|
+
##
|
|
2597
|
+
|
|
2598
|
+
|
|
2599
|
+
DEFAULT_SUBPROCESS_TRY_EXCEPTIONS: ta.Tuple[ta.Type[Exception], ...] = (
|
|
2600
|
+
FileNotFoundError,
|
|
2601
|
+
subprocess.CalledProcessError,
|
|
2602
|
+
)
|
|
2603
|
+
|
|
2604
|
+
|
|
2605
|
+
def subprocess_try_call(
|
|
2606
|
+
*args: str,
|
|
2607
|
+
try_exceptions: ta.Tuple[ta.Type[Exception], ...] = DEFAULT_SUBPROCESS_TRY_EXCEPTIONS,
|
|
2608
|
+
**kwargs: ta.Any,
|
|
2609
|
+
) -> bool:
|
|
2610
|
+
try:
|
|
2611
|
+
subprocess_check_call(*args, **kwargs)
|
|
2612
|
+
except try_exceptions as e: # noqa
|
|
2613
|
+
if log.isEnabledFor(logging.DEBUG):
|
|
2614
|
+
log.exception('command failed')
|
|
2615
|
+
return False
|
|
2616
|
+
else:
|
|
2617
|
+
return True
|
|
2618
|
+
|
|
2619
|
+
|
|
2620
|
+
def subprocess_try_output(
|
|
2621
|
+
*args: str,
|
|
2622
|
+
try_exceptions: ta.Tuple[ta.Type[Exception], ...] = DEFAULT_SUBPROCESS_TRY_EXCEPTIONS,
|
|
2623
|
+
**kwargs: ta.Any,
|
|
2624
|
+
) -> ta.Optional[bytes]:
|
|
2625
|
+
try:
|
|
2626
|
+
return subprocess_check_output(*args, **kwargs)
|
|
2627
|
+
except try_exceptions as e: # noqa
|
|
2628
|
+
if log.isEnabledFor(logging.DEBUG):
|
|
2629
|
+
log.exception('command failed')
|
|
2630
|
+
return None
|
|
2631
|
+
|
|
2632
|
+
|
|
2633
|
+
def subprocess_try_output_str(*args: str, **kwargs: ta.Any) -> ta.Optional[str]:
|
|
2634
|
+
out = subprocess_try_output(*args, **kwargs)
|
|
2635
|
+
return out.decode().strip() if out is not None else None
|
|
2636
|
+
|
|
2637
|
+
|
|
2638
|
+
########################################
|
|
2639
|
+
# ../../interp/inspect.py
|
|
2640
|
+
# ruff: noqa: UP006 UP007
|
|
2641
|
+
|
|
2642
|
+
|
|
2643
|
+
@dc.dataclass(frozen=True)
|
|
2644
|
+
class InterpInspection:
|
|
2645
|
+
exe: str
|
|
2646
|
+
version: Version
|
|
2647
|
+
|
|
2648
|
+
version_str: str
|
|
2649
|
+
config_vars: ta.Mapping[str, str]
|
|
2650
|
+
prefix: str
|
|
2651
|
+
base_prefix: str
|
|
2652
|
+
|
|
2653
|
+
@property
|
|
2654
|
+
def opts(self) -> InterpOpts:
|
|
2655
|
+
return InterpOpts(
|
|
2656
|
+
threaded=bool(self.config_vars.get('Py_GIL_DISABLED')),
|
|
2657
|
+
debug=bool(self.config_vars.get('Py_DEBUG')),
|
|
2658
|
+
)
|
|
2659
|
+
|
|
2660
|
+
@property
|
|
2661
|
+
def iv(self) -> InterpVersion:
|
|
2662
|
+
return InterpVersion(
|
|
2663
|
+
version=self.version,
|
|
2664
|
+
opts=self.opts,
|
|
2665
|
+
)
|
|
2666
|
+
|
|
2667
|
+
@property
|
|
2668
|
+
def is_venv(self) -> bool:
|
|
2669
|
+
return self.prefix != self.base_prefix
|
|
2670
|
+
|
|
2671
|
+
|
|
2672
|
+
class InterpInspector:
|
|
2673
|
+
|
|
2674
|
+
def __init__(self) -> None:
|
|
2675
|
+
super().__init__()
|
|
2676
|
+
|
|
2677
|
+
self._cache: ta.Dict[str, ta.Optional[InterpInspection]] = {}
|
|
2678
|
+
|
|
2679
|
+
_RAW_INSPECTION_CODE = """
|
|
2680
|
+
__import__('json').dumps(dict(
|
|
2681
|
+
version_str=__import__('sys').version,
|
|
2682
|
+
prefix=__import__('sys').prefix,
|
|
2683
|
+
base_prefix=__import__('sys').base_prefix,
|
|
2684
|
+
config_vars=__import__('sysconfig').get_config_vars(),
|
|
2685
|
+
))"""
|
|
2686
|
+
|
|
2687
|
+
_INSPECTION_CODE = ''.join(l.strip() for l in _RAW_INSPECTION_CODE.splitlines())
|
|
2688
|
+
|
|
2689
|
+
@staticmethod
|
|
2690
|
+
def _build_inspection(
|
|
2691
|
+
exe: str,
|
|
2692
|
+
output: str,
|
|
2693
|
+
) -> InterpInspection:
|
|
2694
|
+
dct = json.loads(output)
|
|
2695
|
+
|
|
2696
|
+
version = Version(dct['version_str'].split()[0])
|
|
2697
|
+
|
|
2698
|
+
return InterpInspection(
|
|
2699
|
+
exe=exe,
|
|
2700
|
+
version=version,
|
|
2701
|
+
**{k: dct[k] for k in (
|
|
2702
|
+
'version_str',
|
|
2703
|
+
'prefix',
|
|
2704
|
+
'base_prefix',
|
|
2705
|
+
'config_vars',
|
|
2706
|
+
)},
|
|
2707
|
+
)
|
|
2708
|
+
|
|
2709
|
+
@classmethod
|
|
2710
|
+
def running(cls) -> 'InterpInspection':
|
|
2711
|
+
return cls._build_inspection(sys.executable, eval(cls._INSPECTION_CODE)) # noqa
|
|
2712
|
+
|
|
2713
|
+
def _inspect(self, exe: str) -> InterpInspection:
|
|
2714
|
+
output = subprocess_check_output(exe, '-c', f'print({self._INSPECTION_CODE})', quiet=True)
|
|
2715
|
+
return self._build_inspection(exe, output.decode())
|
|
2716
|
+
|
|
2717
|
+
def inspect(self, exe: str) -> ta.Optional[InterpInspection]:
|
|
2718
|
+
try:
|
|
2719
|
+
return self._cache[exe]
|
|
2720
|
+
except KeyError:
|
|
2721
|
+
ret: ta.Optional[InterpInspection]
|
|
2722
|
+
try:
|
|
2723
|
+
ret = self._inspect(exe)
|
|
2724
|
+
except Exception as e: # noqa
|
|
2725
|
+
if log.isEnabledFor(logging.DEBUG):
|
|
2726
|
+
log.exception('Failed to inspect interp: %s', exe)
|
|
2727
|
+
ret = None
|
|
2728
|
+
self._cache[exe] = ret
|
|
2729
|
+
return ret
|
|
2730
|
+
|
|
2731
|
+
|
|
2732
|
+
INTERP_INSPECTOR = InterpInspector()
|
|
2733
|
+
|
|
2734
|
+
|
|
2735
|
+
########################################
|
|
2736
|
+
# ../../interp/providers.py
|
|
2737
|
+
"""
|
|
2738
|
+
TODO:
|
|
2739
|
+
- backends
|
|
2740
|
+
- local builds
|
|
2741
|
+
- deadsnakes?
|
|
2742
|
+
- loose versions
|
|
2743
|
+
"""
|
|
2744
|
+
|
|
2745
|
+
|
|
2746
|
+
##
|
|
2747
|
+
|
|
2748
|
+
|
|
2749
|
+
class InterpProvider(abc.ABC):
|
|
2750
|
+
name: ta.ClassVar[str]
|
|
2751
|
+
|
|
2752
|
+
def __init_subclass__(cls, **kwargs: ta.Any) -> None:
|
|
2753
|
+
super().__init_subclass__(**kwargs)
|
|
2754
|
+
if abc.ABC not in cls.__bases__ and 'name' not in cls.__dict__:
|
|
2755
|
+
sfx = 'InterpProvider'
|
|
2756
|
+
if not cls.__name__.endswith(sfx):
|
|
2757
|
+
raise NameError(cls)
|
|
2758
|
+
setattr(cls, 'name', snake_case(cls.__name__[:-len(sfx)]))
|
|
2759
|
+
|
|
2760
|
+
@abc.abstractmethod
|
|
2761
|
+
def get_installed_versions(self, spec: InterpSpecifier) -> ta.Sequence[InterpVersion]:
|
|
2762
|
+
raise NotImplementedError
|
|
2763
|
+
|
|
2764
|
+
@abc.abstractmethod
|
|
2765
|
+
def get_installed_version(self, version: InterpVersion) -> Interp:
|
|
2766
|
+
raise NotImplementedError
|
|
2767
|
+
|
|
2768
|
+
def get_installable_versions(self, spec: InterpSpecifier) -> ta.Sequence[InterpVersion]:
|
|
2769
|
+
return []
|
|
2770
|
+
|
|
2771
|
+
def install_version(self, version: InterpVersion) -> Interp:
|
|
2772
|
+
raise TypeError
|
|
2773
|
+
|
|
2774
|
+
|
|
2775
|
+
##
|
|
2776
|
+
|
|
2777
|
+
|
|
2778
|
+
class RunningInterpProvider(InterpProvider):
|
|
2779
|
+
@cached_nullary
|
|
2780
|
+
def version(self) -> InterpVersion:
|
|
2781
|
+
return InterpInspector.running().iv
|
|
2782
|
+
|
|
2783
|
+
def get_installed_versions(self, spec: InterpSpecifier) -> ta.Sequence[InterpVersion]:
|
|
2784
|
+
return [self.version()]
|
|
2785
|
+
|
|
2786
|
+
def get_installed_version(self, version: InterpVersion) -> Interp:
|
|
2787
|
+
if version != self.version():
|
|
2788
|
+
raise KeyError(version)
|
|
2789
|
+
return Interp(
|
|
2790
|
+
exe=sys.executable,
|
|
2791
|
+
version=self.version(),
|
|
2792
|
+
)
|
|
2793
|
+
|
|
2794
|
+
|
|
2795
|
+
########################################
|
|
2796
|
+
# ../../interp/pyenv.py
|
|
2797
|
+
"""
|
|
2798
|
+
TODO:
|
|
2799
|
+
- custom tags
|
|
2800
|
+
- optionally install / upgrade pyenv itself
|
|
2801
|
+
- new vers dont need these custom mac opts, only run on old vers
|
|
2802
|
+
"""
|
|
2803
|
+
# ruff: noqa: UP006 UP007
|
|
2804
|
+
|
|
2805
|
+
|
|
2806
|
+
##
|
|
2807
|
+
|
|
2808
|
+
|
|
2809
|
+
class Pyenv:
|
|
2810
|
+
|
|
2811
|
+
def __init__(
|
|
2812
|
+
self,
|
|
2813
|
+
*,
|
|
2814
|
+
root: ta.Optional[str] = None,
|
|
2815
|
+
) -> None:
|
|
2816
|
+
if root is not None and not (isinstance(root, str) and root):
|
|
2817
|
+
raise ValueError(f'pyenv_root: {root!r}')
|
|
2818
|
+
|
|
2819
|
+
super().__init__()
|
|
2820
|
+
|
|
2821
|
+
self._root_kw = root
|
|
2822
|
+
|
|
2823
|
+
@cached_nullary
|
|
2824
|
+
def root(self) -> ta.Optional[str]:
|
|
2825
|
+
if self._root_kw is not None:
|
|
2826
|
+
return self._root_kw
|
|
2827
|
+
|
|
2828
|
+
if shutil.which('pyenv'):
|
|
2829
|
+
return subprocess_check_output_str('pyenv', 'root')
|
|
2830
|
+
|
|
2831
|
+
d = os.path.expanduser('~/.pyenv')
|
|
2832
|
+
if os.path.isdir(d) and os.path.isfile(os.path.join(d, 'bin', 'pyenv')):
|
|
2833
|
+
return d
|
|
2834
|
+
|
|
2835
|
+
return None
|
|
2836
|
+
|
|
2837
|
+
@cached_nullary
|
|
2838
|
+
def exe(self) -> str:
|
|
2839
|
+
return os.path.join(check_not_none(self.root()), 'bin', 'pyenv')
|
|
2840
|
+
|
|
2841
|
+
def version_exes(self) -> ta.List[ta.Tuple[str, str]]:
|
|
2842
|
+
ret = []
|
|
2843
|
+
vp = os.path.join(self.root(), 'versions')
|
|
2844
|
+
for dn in os.listdir(vp):
|
|
2845
|
+
ep = os.path.join(vp, dn, 'bin', 'python')
|
|
2846
|
+
if not os.path.isfile(ep):
|
|
2847
|
+
continue
|
|
2848
|
+
ret.append((dn, ep))
|
|
2849
|
+
return ret
|
|
2850
|
+
|
|
2851
|
+
def installable_versions(self) -> ta.List[str]:
|
|
2852
|
+
ret = []
|
|
2853
|
+
s = subprocess_check_output_str(self.exe(), 'install', '--list')
|
|
2854
|
+
for l in s.splitlines():
|
|
2855
|
+
if not l.startswith(' '):
|
|
2856
|
+
continue
|
|
2857
|
+
l = l.strip()
|
|
2858
|
+
if not l:
|
|
2859
|
+
continue
|
|
2860
|
+
ret.append(l)
|
|
2861
|
+
return ret
|
|
2862
|
+
|
|
2863
|
+
|
|
2864
|
+
##
|
|
2865
|
+
|
|
2866
|
+
|
|
2867
|
+
@dc.dataclass(frozen=True)
|
|
2868
|
+
class PyenvInstallOpts:
|
|
2869
|
+
opts: ta.Sequence[str] = ()
|
|
2870
|
+
conf_opts: ta.Sequence[str] = ()
|
|
2871
|
+
cflags: ta.Sequence[str] = ()
|
|
2872
|
+
ldflags: ta.Sequence[str] = ()
|
|
2873
|
+
env: ta.Mapping[str, str] = dc.field(default_factory=dict)
|
|
2874
|
+
|
|
2875
|
+
def merge(self, *others: 'PyenvInstallOpts') -> 'PyenvInstallOpts':
|
|
2876
|
+
return PyenvInstallOpts(
|
|
2877
|
+
opts=list(itertools.chain.from_iterable(o.opts for o in [self, *others])),
|
|
2878
|
+
conf_opts=list(itertools.chain.from_iterable(o.conf_opts for o in [self, *others])),
|
|
2879
|
+
cflags=list(itertools.chain.from_iterable(o.cflags for o in [self, *others])),
|
|
2880
|
+
ldflags=list(itertools.chain.from_iterable(o.ldflags for o in [self, *others])),
|
|
2881
|
+
env=dict(itertools.chain.from_iterable(o.env.items() for o in [self, *others])),
|
|
2882
|
+
)
|
|
2883
|
+
|
|
2884
|
+
|
|
2885
|
+
DEFAULT_PYENV_INSTALL_OPTS = PyenvInstallOpts(opts=['-s', '-v'])
|
|
2886
|
+
DEBUG_PYENV_INSTALL_OPTS = PyenvInstallOpts(opts=['-g'])
|
|
2887
|
+
|
|
2888
|
+
|
|
2889
|
+
#
|
|
2890
|
+
|
|
2891
|
+
|
|
2892
|
+
class PyenvInstallOptsProvider(abc.ABC):
|
|
2893
|
+
@abc.abstractmethod
|
|
2894
|
+
def opts(self) -> PyenvInstallOpts:
|
|
2895
|
+
raise NotImplementedError
|
|
2896
|
+
|
|
2897
|
+
|
|
2898
|
+
class LinuxPyenvInstallOpts(PyenvInstallOptsProvider):
|
|
2899
|
+
def opts(self) -> PyenvInstallOpts:
|
|
2900
|
+
return PyenvInstallOpts()
|
|
2901
|
+
|
|
2902
|
+
|
|
2903
|
+
class DarwinPyenvInstallOpts(PyenvInstallOptsProvider):
|
|
2904
|
+
|
|
2905
|
+
@cached_nullary
|
|
2906
|
+
def framework_opts(self) -> PyenvInstallOpts:
|
|
2907
|
+
return PyenvInstallOpts(conf_opts=['--enable-framework'])
|
|
2908
|
+
|
|
2909
|
+
@cached_nullary
|
|
2910
|
+
def has_brew(self) -> bool:
|
|
2911
|
+
return shutil.which('brew') is not None
|
|
2912
|
+
|
|
2913
|
+
BREW_DEPS: ta.Sequence[str] = [
|
|
2914
|
+
'openssl',
|
|
2915
|
+
'readline',
|
|
2916
|
+
'sqlite3',
|
|
2917
|
+
'zlib',
|
|
2918
|
+
]
|
|
2919
|
+
|
|
2920
|
+
@cached_nullary
|
|
2921
|
+
def brew_deps_opts(self) -> PyenvInstallOpts:
|
|
2922
|
+
cflags = []
|
|
2923
|
+
ldflags = []
|
|
2924
|
+
for dep in self.BREW_DEPS:
|
|
2925
|
+
dep_prefix = subprocess_check_output_str('brew', '--prefix', dep)
|
|
2926
|
+
cflags.append(f'-I{dep_prefix}/include')
|
|
2927
|
+
ldflags.append(f'-L{dep_prefix}/lib')
|
|
2928
|
+
return PyenvInstallOpts(
|
|
2929
|
+
cflags=cflags,
|
|
2930
|
+
ldflags=ldflags,
|
|
2931
|
+
)
|
|
2932
|
+
|
|
2933
|
+
@cached_nullary
|
|
2934
|
+
def brew_tcl_opts(self) -> PyenvInstallOpts:
|
|
2935
|
+
if subprocess_try_output('brew', '--prefix', 'tcl-tk') is None:
|
|
2936
|
+
return PyenvInstallOpts()
|
|
2937
|
+
|
|
2938
|
+
tcl_tk_prefix = subprocess_check_output_str('brew', '--prefix', 'tcl-tk')
|
|
2939
|
+
tcl_tk_ver_str = subprocess_check_output_str('brew', 'ls', '--versions', 'tcl-tk')
|
|
2940
|
+
tcl_tk_ver = '.'.join(tcl_tk_ver_str.split()[1].split('.')[:2])
|
|
2941
|
+
|
|
2942
|
+
return PyenvInstallOpts(conf_opts=[
|
|
2943
|
+
f"--with-tcltk-includes='-I{tcl_tk_prefix}/include'",
|
|
2944
|
+
f"--with-tcltk-libs='-L{tcl_tk_prefix}/lib -ltcl{tcl_tk_ver} -ltk{tcl_tk_ver}'",
|
|
2945
|
+
])
|
|
2946
|
+
|
|
2947
|
+
@cached_nullary
|
|
2948
|
+
def brew_ssl_opts(self) -> PyenvInstallOpts:
|
|
2949
|
+
pkg_config_path = subprocess_check_output_str('brew', '--prefix', 'openssl')
|
|
2950
|
+
if 'PKG_CONFIG_PATH' in os.environ:
|
|
2951
|
+
pkg_config_path += ':' + os.environ['PKG_CONFIG_PATH']
|
|
2952
|
+
return PyenvInstallOpts(env={'PKG_CONFIG_PATH': pkg_config_path})
|
|
2953
|
+
|
|
2954
|
+
def opts(self) -> PyenvInstallOpts:
|
|
2955
|
+
return PyenvInstallOpts().merge(
|
|
2956
|
+
self.framework_opts(),
|
|
2957
|
+
self.brew_deps_opts(),
|
|
2958
|
+
self.brew_tcl_opts(),
|
|
2959
|
+
self.brew_ssl_opts(),
|
|
2960
|
+
)
|
|
2961
|
+
|
|
2962
|
+
|
|
2963
|
+
PLATFORM_PYENV_INSTALL_OPTS: ta.Dict[str, PyenvInstallOptsProvider] = {
|
|
2964
|
+
'darwin': DarwinPyenvInstallOpts(),
|
|
2965
|
+
'linux': LinuxPyenvInstallOpts(),
|
|
2966
|
+
}
|
|
2967
|
+
|
|
2968
|
+
|
|
2969
|
+
##
|
|
2970
|
+
|
|
2971
|
+
|
|
2972
|
+
class PyenvVersionInstaller:
|
|
2973
|
+
|
|
2974
|
+
def __init__(
|
|
2975
|
+
self,
|
|
2976
|
+
version: str,
|
|
2977
|
+
opts: ta.Optional[PyenvInstallOpts] = None,
|
|
2978
|
+
*,
|
|
2979
|
+
debug: bool = False,
|
|
2980
|
+
pyenv: Pyenv = Pyenv(),
|
|
2981
|
+
) -> None:
|
|
2982
|
+
super().__init__()
|
|
2983
|
+
|
|
2984
|
+
if opts is None:
|
|
2985
|
+
lst = [DEFAULT_PYENV_INSTALL_OPTS]
|
|
2986
|
+
if debug:
|
|
2987
|
+
lst.append(DEBUG_PYENV_INSTALL_OPTS)
|
|
2988
|
+
lst.append(PLATFORM_PYENV_INSTALL_OPTS[sys.platform].opts())
|
|
2989
|
+
opts = PyenvInstallOpts().merge(*lst)
|
|
2990
|
+
|
|
2991
|
+
self._version = version
|
|
2992
|
+
self._opts = opts
|
|
2993
|
+
self._debug = debug
|
|
2994
|
+
self._pyenv = pyenv
|
|
2995
|
+
|
|
2996
|
+
@property
|
|
2997
|
+
def version(self) -> str:
|
|
2998
|
+
return self._version
|
|
2999
|
+
|
|
3000
|
+
@property
|
|
3001
|
+
def opts(self) -> PyenvInstallOpts:
|
|
3002
|
+
return self._opts
|
|
3003
|
+
|
|
3004
|
+
@cached_nullary
|
|
3005
|
+
def install_name(self) -> str:
|
|
3006
|
+
return self._version + ('-debug' if self._debug else '')
|
|
3007
|
+
|
|
3008
|
+
@cached_nullary
|
|
3009
|
+
def install_dir(self) -> str:
|
|
3010
|
+
return str(os.path.join(check_not_none(self._pyenv.root()), 'versions', self.install_name()))
|
|
3011
|
+
|
|
3012
|
+
@cached_nullary
|
|
3013
|
+
def install(self) -> str:
|
|
3014
|
+
env = dict(self._opts.env)
|
|
3015
|
+
for k, l in [
|
|
3016
|
+
('CFLAGS', self._opts.cflags),
|
|
3017
|
+
('LDFLAGS', self._opts.ldflags),
|
|
3018
|
+
('PYTHON_CONFIGURE_OPTS', self._opts.conf_opts),
|
|
3019
|
+
]:
|
|
3020
|
+
v = ' '.join(l)
|
|
3021
|
+
if k in os.environ:
|
|
3022
|
+
v += ' ' + os.environ[k]
|
|
3023
|
+
env[k] = v
|
|
3024
|
+
|
|
3025
|
+
subprocess_check_call(self._pyenv.exe(), 'install', *self._opts.opts, self._version, env=env)
|
|
3026
|
+
|
|
3027
|
+
exe = os.path.join(self.install_dir(), 'bin', 'python')
|
|
3028
|
+
if not os.path.isfile(exe):
|
|
3029
|
+
raise RuntimeError(f'Interpreter not found: {exe}')
|
|
3030
|
+
return exe
|
|
3031
|
+
|
|
3032
|
+
|
|
3033
|
+
##
|
|
3034
|
+
|
|
3035
|
+
|
|
3036
|
+
class PyenvInterpProvider(InterpProvider):
|
|
3037
|
+
|
|
3038
|
+
def __init__(
|
|
3039
|
+
self,
|
|
3040
|
+
pyenv: Pyenv = Pyenv(),
|
|
3041
|
+
|
|
3042
|
+
inspect: bool = False,
|
|
3043
|
+
inspector: InterpInspector = INTERP_INSPECTOR,
|
|
3044
|
+
) -> None:
|
|
3045
|
+
super().__init__()
|
|
3046
|
+
|
|
3047
|
+
self._pyenv = pyenv
|
|
3048
|
+
|
|
3049
|
+
self._inspect = inspect
|
|
3050
|
+
self._inspector = inspector
|
|
3051
|
+
|
|
3052
|
+
#
|
|
3053
|
+
|
|
3054
|
+
@staticmethod
|
|
3055
|
+
def guess_version(s: str) -> ta.Optional[InterpVersion]:
|
|
3056
|
+
def strip_sfx(s: str, sfx: str) -> ta.Tuple[str, bool]:
|
|
3057
|
+
if s.endswith(sfx):
|
|
3058
|
+
return s[:-len(sfx)], True
|
|
3059
|
+
return s, False
|
|
3060
|
+
ok = {}
|
|
3061
|
+
s, ok['debug'] = strip_sfx(s, '-debug')
|
|
3062
|
+
s, ok['threaded'] = strip_sfx(s, 't')
|
|
3063
|
+
try:
|
|
3064
|
+
v = Version(s)
|
|
3065
|
+
except InvalidVersion:
|
|
3066
|
+
return None
|
|
3067
|
+
return InterpVersion(v, InterpOpts(**ok))
|
|
3068
|
+
|
|
3069
|
+
class Installed(ta.NamedTuple):
|
|
3070
|
+
name: str
|
|
3071
|
+
exe: str
|
|
3072
|
+
version: InterpVersion
|
|
3073
|
+
|
|
3074
|
+
def _make_installed(self, vn: str, ep: str) -> ta.Optional[Installed]:
|
|
3075
|
+
iv: ta.Optional[InterpVersion]
|
|
3076
|
+
if self._inspect:
|
|
3077
|
+
try:
|
|
3078
|
+
iv = check_not_none(self._inspector.inspect(ep)).iv
|
|
3079
|
+
except Exception as e: # noqa
|
|
3080
|
+
return None
|
|
3081
|
+
else:
|
|
3082
|
+
iv = self.guess_version(vn)
|
|
3083
|
+
if iv is None:
|
|
3084
|
+
return None
|
|
3085
|
+
return PyenvInterpProvider.Installed(
|
|
3086
|
+
name=vn,
|
|
3087
|
+
exe=ep,
|
|
3088
|
+
version=iv,
|
|
3089
|
+
)
|
|
3090
|
+
|
|
3091
|
+
def installed(self) -> ta.Sequence[Installed]:
|
|
3092
|
+
ret: ta.List[PyenvInterpProvider.Installed] = []
|
|
3093
|
+
for vn, ep in self._pyenv.version_exes():
|
|
3094
|
+
if (i := self._make_installed(vn, ep)) is None:
|
|
3095
|
+
log.debug('Invalid pyenv version: %s', vn)
|
|
3096
|
+
continue
|
|
3097
|
+
ret.append(i)
|
|
3098
|
+
return ret
|
|
3099
|
+
|
|
3100
|
+
#
|
|
3101
|
+
|
|
3102
|
+
def get_installed_versions(self, spec: InterpSpecifier) -> ta.Sequence[InterpVersion]:
|
|
3103
|
+
return [i.version for i in self.installed()]
|
|
3104
|
+
|
|
3105
|
+
def get_installed_version(self, version: InterpVersion) -> Interp:
|
|
3106
|
+
for i in self.installed():
|
|
3107
|
+
if i.version == version:
|
|
3108
|
+
return Interp(
|
|
3109
|
+
exe=i.exe,
|
|
3110
|
+
version=i.version,
|
|
3111
|
+
)
|
|
3112
|
+
raise KeyError(version)
|
|
3113
|
+
|
|
3114
|
+
#
|
|
3115
|
+
|
|
3116
|
+
def get_installable_versions(self, spec: InterpSpecifier) -> ta.Sequence[InterpVersion]:
|
|
3117
|
+
lst = []
|
|
3118
|
+
for vs in self._pyenv.installable_versions():
|
|
3119
|
+
if (iv := self.guess_version(vs)) is None:
|
|
3120
|
+
continue
|
|
3121
|
+
if iv.opts.debug:
|
|
3122
|
+
raise Exception('Pyenv installable versions not expected to have debug suffix')
|
|
3123
|
+
for d in [False, True]:
|
|
3124
|
+
lst.append(dc.replace(iv, opts=dc.replace(iv.opts, debug=d)))
|
|
3125
|
+
return lst
|
|
3126
|
+
|
|
3127
|
+
|
|
3128
|
+
########################################
|
|
3129
|
+
# ../../interp/system.py
|
|
3130
|
+
"""
|
|
3131
|
+
TODO:
|
|
3132
|
+
- python, python3, python3.12, ...
|
|
3133
|
+
- check if path py's are venvs: sys.prefix != sys.base_prefix
|
|
3134
|
+
"""
|
|
3135
|
+
# ruff: noqa: UP006 UP007
|
|
3136
|
+
|
|
3137
|
+
|
|
3138
|
+
##
|
|
3139
|
+
|
|
3140
|
+
|
|
3141
|
+
@dc.dataclass(frozen=True)
|
|
3142
|
+
class SystemInterpProvider(InterpProvider):
|
|
3143
|
+
cmd: str = 'python3'
|
|
3144
|
+
path: ta.Optional[str] = None
|
|
3145
|
+
|
|
3146
|
+
inspect: bool = False
|
|
3147
|
+
inspector: InterpInspector = INTERP_INSPECTOR
|
|
3148
|
+
|
|
3149
|
+
#
|
|
3150
|
+
|
|
3151
|
+
@staticmethod
|
|
3152
|
+
def _re_which(
|
|
3153
|
+
pat: re.Pattern,
|
|
3154
|
+
*,
|
|
3155
|
+
mode: int = os.F_OK | os.X_OK,
|
|
3156
|
+
path: ta.Optional[str] = None,
|
|
3157
|
+
) -> ta.List[str]:
|
|
3158
|
+
if path is None:
|
|
3159
|
+
path = os.environ.get('PATH', None)
|
|
3160
|
+
if path is None:
|
|
3161
|
+
try:
|
|
3162
|
+
path = os.confstr('CS_PATH')
|
|
3163
|
+
except (AttributeError, ValueError):
|
|
3164
|
+
path = os.defpath
|
|
3165
|
+
|
|
3166
|
+
if not path:
|
|
3167
|
+
return []
|
|
3168
|
+
|
|
3169
|
+
path = os.fsdecode(path)
|
|
3170
|
+
pathlst = path.split(os.pathsep)
|
|
3171
|
+
|
|
3172
|
+
def _access_check(fn: str, mode: int) -> bool:
|
|
3173
|
+
return os.path.exists(fn) and os.access(fn, mode)
|
|
3174
|
+
|
|
3175
|
+
out = []
|
|
3176
|
+
seen = set()
|
|
3177
|
+
for d in pathlst:
|
|
3178
|
+
normdir = os.path.normcase(d)
|
|
3179
|
+
if normdir not in seen:
|
|
3180
|
+
seen.add(normdir)
|
|
3181
|
+
if not _access_check(normdir, mode):
|
|
3182
|
+
continue
|
|
3183
|
+
for thefile in os.listdir(d):
|
|
3184
|
+
name = os.path.join(d, thefile)
|
|
3185
|
+
if not (
|
|
3186
|
+
os.path.isfile(name) and
|
|
3187
|
+
pat.fullmatch(thefile) and
|
|
3188
|
+
_access_check(name, mode)
|
|
3189
|
+
):
|
|
3190
|
+
continue
|
|
3191
|
+
out.append(name)
|
|
3192
|
+
|
|
3193
|
+
return out
|
|
3194
|
+
|
|
3195
|
+
@cached_nullary
|
|
3196
|
+
def exes(self) -> ta.List[str]:
|
|
3197
|
+
return self._re_which(
|
|
3198
|
+
re.compile(r'python3(\.\d+)?'),
|
|
3199
|
+
path=self.path,
|
|
3200
|
+
)
|
|
3201
|
+
|
|
3202
|
+
#
|
|
3203
|
+
|
|
3204
|
+
def get_exe_version(self, exe: str) -> ta.Optional[InterpVersion]:
|
|
3205
|
+
if not self.inspect:
|
|
3206
|
+
s = os.path.basename(exe)
|
|
3207
|
+
if s.startswith('python'):
|
|
3208
|
+
s = s[len('python'):]
|
|
3209
|
+
if '.' in s:
|
|
3210
|
+
try:
|
|
3211
|
+
return InterpVersion.parse(s)
|
|
3212
|
+
except InvalidVersion:
|
|
3213
|
+
pass
|
|
3214
|
+
ii = self.inspector.inspect(exe)
|
|
3215
|
+
return ii.iv if ii is not None else None
|
|
3216
|
+
|
|
3217
|
+
def exe_versions(self) -> ta.Sequence[ta.Tuple[str, InterpVersion]]:
|
|
3218
|
+
lst = []
|
|
3219
|
+
for e in self.exes():
|
|
3220
|
+
if (ev := self.get_exe_version(e)) is None:
|
|
3221
|
+
log.debug('Invalid system version: %s', e)
|
|
3222
|
+
continue
|
|
3223
|
+
lst.append((e, ev))
|
|
3224
|
+
return lst
|
|
3225
|
+
|
|
3226
|
+
#
|
|
3227
|
+
|
|
3228
|
+
def get_installed_versions(self, spec: InterpSpecifier) -> ta.Sequence[InterpVersion]:
|
|
3229
|
+
return [ev for e, ev in self.exe_versions()]
|
|
3230
|
+
|
|
3231
|
+
def get_installed_version(self, version: InterpVersion) -> Interp:
|
|
3232
|
+
for e, ev in self.exe_versions():
|
|
3233
|
+
if ev != version:
|
|
3234
|
+
continue
|
|
3235
|
+
return Interp(
|
|
3236
|
+
exe=e,
|
|
3237
|
+
version=ev,
|
|
3238
|
+
)
|
|
3239
|
+
raise KeyError(version)
|
|
3240
|
+
|
|
3241
|
+
|
|
3242
|
+
########################################
|
|
3243
|
+
# ../../interp/resolvers.py
|
|
3244
|
+
# ruff: noqa: UP006
|
|
3245
|
+
|
|
3246
|
+
|
|
3247
|
+
INTERP_PROVIDER_TYPES_BY_NAME: ta.Mapping[str, ta.Type[InterpProvider]] = {
|
|
3248
|
+
cls.name: cls for cls in deep_subclasses(InterpProvider) if abc.ABC not in cls.__bases__ # type: ignore
|
|
3249
|
+
}
|
|
3250
|
+
|
|
3251
|
+
|
|
3252
|
+
class InterpResolver:
|
|
3253
|
+
def __init__(
|
|
3254
|
+
self,
|
|
3255
|
+
providers: ta.Sequence[ta.Tuple[str, InterpProvider]],
|
|
3256
|
+
) -> None:
|
|
3257
|
+
super().__init__()
|
|
3258
|
+
self._providers: ta.Mapping[str, InterpProvider] = collections.OrderedDict(providers)
|
|
3259
|
+
|
|
3260
|
+
def resolve(self, spec: InterpSpecifier) -> Interp:
|
|
3261
|
+
lst = [
|
|
3262
|
+
(i, si)
|
|
3263
|
+
for i, p in enumerate(self._providers.values())
|
|
3264
|
+
for si in p.get_installed_versions(spec)
|
|
3265
|
+
if spec.contains(si)
|
|
3266
|
+
]
|
|
3267
|
+
best = sorted(lst, key=lambda t: (-t[0], t[1]))[-1]
|
|
3268
|
+
bi, bv = best
|
|
3269
|
+
bp = list(self._providers.values())[bi]
|
|
3270
|
+
return bp.get_installed_version(bv)
|
|
3271
|
+
|
|
3272
|
+
def list(self, spec: InterpSpecifier) -> None:
|
|
3273
|
+
print('installed:')
|
|
3274
|
+
for n, p in self._providers.items():
|
|
3275
|
+
lst = [
|
|
3276
|
+
si
|
|
3277
|
+
for si in p.get_installed_versions(spec)
|
|
3278
|
+
if spec.contains(si)
|
|
3279
|
+
]
|
|
3280
|
+
if lst:
|
|
3281
|
+
print(f' {n}')
|
|
3282
|
+
for si in lst:
|
|
3283
|
+
print(f' {si}')
|
|
3284
|
+
|
|
3285
|
+
print()
|
|
3286
|
+
|
|
3287
|
+
print('installable:')
|
|
3288
|
+
for n, p in self._providers.items():
|
|
3289
|
+
lst = [
|
|
3290
|
+
si
|
|
3291
|
+
for si in p.get_installable_versions(spec)
|
|
3292
|
+
if spec.contains(si)
|
|
3293
|
+
]
|
|
3294
|
+
if lst:
|
|
3295
|
+
print(f' {n}')
|
|
3296
|
+
for si in lst:
|
|
3297
|
+
print(f' {si}')
|
|
3298
|
+
|
|
3299
|
+
|
|
3300
|
+
DEFAULT_INTERP_RESOLVER = InterpResolver([(p.name, p) for p in [
|
|
3301
|
+
# pyenv is preferred to system interpreters as it tends to have more support for things like tkinter
|
|
3302
|
+
PyenvInterpProvider(),
|
|
3303
|
+
|
|
3304
|
+
RunningInterpProvider(),
|
|
3305
|
+
|
|
3306
|
+
SystemInterpProvider(),
|
|
3307
|
+
]])
|
|
3308
|
+
|
|
3309
|
+
|
|
3310
|
+
########################################
|
|
3311
|
+
# cli.py
|
|
3312
|
+
|
|
3313
|
+
|
|
3314
|
+
##
|
|
3315
|
+
|
|
3316
|
+
|
|
3317
|
+
@dc.dataclass(frozen=True)
|
|
3318
|
+
class VersionsFile:
|
|
3319
|
+
name: ta.Optional[str] = '.versions'
|
|
3320
|
+
|
|
3321
|
+
@staticmethod
|
|
3322
|
+
def parse(s: str) -> ta.Mapping[str, str]:
|
|
3323
|
+
return {
|
|
3324
|
+
k: v
|
|
3325
|
+
for l in s.splitlines()
|
|
3326
|
+
if (sl := l.split('#')[0].strip())
|
|
3327
|
+
for k, _, v in (sl.partition('='),)
|
|
3328
|
+
}
|
|
3329
|
+
|
|
3330
|
+
@cached_nullary
|
|
3331
|
+
def contents(self) -> ta.Mapping[str, str]:
|
|
3332
|
+
if not self.name or not os.path.exists(self.name):
|
|
3333
|
+
return {}
|
|
3334
|
+
with open(self.name) as f:
|
|
3335
|
+
s = f.read()
|
|
3336
|
+
return self.parse(s)
|
|
3337
|
+
|
|
3338
|
+
@staticmethod
|
|
3339
|
+
def get_pythons(d: ta.Mapping[str, str]) -> ta.Mapping[str, str]:
|
|
3340
|
+
pfx = 'PYTHON_'
|
|
3341
|
+
return {k[len(pfx):].lower(): v for k, v in d.items() if k.startswith(pfx)}
|
|
3342
|
+
|
|
3343
|
+
@cached_nullary
|
|
3344
|
+
def pythons(self) -> ta.Mapping[str, str]:
|
|
3345
|
+
return self.get_pythons(self.contents())
|
|
3346
|
+
|
|
3347
|
+
|
|
3348
|
+
##
|
|
3349
|
+
|
|
3350
|
+
|
|
3351
|
+
@cached_nullary
|
|
3352
|
+
def _script_rel_path() -> str:
|
|
3353
|
+
cwd = os.getcwd()
|
|
3354
|
+
if not (f := __file__).startswith(cwd):
|
|
3355
|
+
raise OSError(f'file {f} not in {cwd}')
|
|
3356
|
+
return f[len(cwd):].lstrip(os.sep)
|
|
3357
|
+
|
|
3358
|
+
|
|
3359
|
+
##
|
|
3360
|
+
|
|
3361
|
+
|
|
3362
|
+
class Venv:
|
|
3363
|
+
def __init__(
|
|
3364
|
+
self,
|
|
3365
|
+
name: str,
|
|
3366
|
+
cfg: VenvConfig,
|
|
3367
|
+
) -> None:
|
|
3368
|
+
super().__init__()
|
|
3369
|
+
self._name = name
|
|
3370
|
+
self._cfg = cfg
|
|
3371
|
+
|
|
3372
|
+
@property
|
|
3373
|
+
def cfg(self) -> VenvConfig:
|
|
3374
|
+
return self._cfg
|
|
3375
|
+
|
|
3376
|
+
DIR_NAME = '.venvs'
|
|
3377
|
+
|
|
3378
|
+
@property
|
|
3379
|
+
def dir_name(self) -> str:
|
|
3380
|
+
return os.path.join(self.DIR_NAME, self._name)
|
|
3381
|
+
|
|
3382
|
+
@cached_nullary
|
|
3383
|
+
def interp_exe(self) -> str:
|
|
3384
|
+
i = InterpSpecifier.parse(check_not_none(self._cfg.interp))
|
|
3385
|
+
return DEFAULT_INTERP_RESOLVER.resolve(i).exe
|
|
3386
|
+
|
|
3387
|
+
@cached_nullary
|
|
3388
|
+
def exe(self) -> str:
|
|
3389
|
+
ve = os.path.join(self.dir_name, 'bin/python')
|
|
3390
|
+
if not os.path.isfile(ve):
|
|
3391
|
+
raise Exception(f'venv exe {ve} does not exist or is not a file!')
|
|
3392
|
+
return ve
|
|
3393
|
+
|
|
3394
|
+
@cached_nullary
|
|
3395
|
+
def create(self) -> bool:
|
|
3396
|
+
if os.path.exists(dn := self.dir_name):
|
|
3397
|
+
if not os.path.isdir(dn):
|
|
3398
|
+
raise Exception(f'{dn} exists but is not a directory!')
|
|
3399
|
+
return False
|
|
3400
|
+
|
|
3401
|
+
log.info('Using interpreter %s', (ie := self.interp_exe()))
|
|
3402
|
+
subprocess_check_call(ie, '-m', 'venv', dn)
|
|
3403
|
+
|
|
3404
|
+
ve = self.exe()
|
|
3405
|
+
|
|
3406
|
+
subprocess_check_call(
|
|
3407
|
+
ve,
|
|
3408
|
+
'-m', 'pip',
|
|
3409
|
+
'install', '-v', '--upgrade',
|
|
3410
|
+
'pip',
|
|
3411
|
+
'setuptools',
|
|
3412
|
+
'wheel',
|
|
3413
|
+
)
|
|
3414
|
+
|
|
3415
|
+
if (sr := self._cfg.requires):
|
|
3416
|
+
subprocess_check_call(
|
|
3417
|
+
ve,
|
|
3418
|
+
'-m', 'pip',
|
|
3419
|
+
'install', '-v',
|
|
3420
|
+
*sr,
|
|
3421
|
+
)
|
|
3422
|
+
|
|
3423
|
+
return True
|
|
3424
|
+
|
|
3425
|
+
@staticmethod
|
|
3426
|
+
def _resolve_srcs(raw: ta.List[str]) -> ta.List[str]:
|
|
3427
|
+
out: list[str] = []
|
|
3428
|
+
seen: ta.Set[str] = set()
|
|
3429
|
+
for r in raw:
|
|
3430
|
+
es: list[str]
|
|
3431
|
+
if any(c in r for c in '*?'):
|
|
3432
|
+
es = list(glob.glob(r, recursive=True))
|
|
3433
|
+
else:
|
|
3434
|
+
es = [r]
|
|
3435
|
+
for e in es:
|
|
3436
|
+
if e not in seen:
|
|
3437
|
+
seen.add(e)
|
|
3438
|
+
out.append(e)
|
|
3439
|
+
return out
|
|
3440
|
+
|
|
3441
|
+
@cached_nullary
|
|
3442
|
+
def srcs(self) -> ta.Sequence[str]:
|
|
3443
|
+
return self._resolve_srcs(self._cfg.srcs or [])
|
|
3444
|
+
|
|
3445
|
+
|
|
3446
|
+
##
|
|
3447
|
+
|
|
3448
|
+
|
|
3449
|
+
class Run:
|
|
3450
|
+
def __init__(
|
|
3451
|
+
self,
|
|
3452
|
+
*,
|
|
3453
|
+
raw_cfg: ta.Union[ta.Mapping[str, ta.Any], str, None] = None,
|
|
3454
|
+
) -> None:
|
|
3455
|
+
super().__init__()
|
|
3456
|
+
|
|
3457
|
+
self._raw_cfg = raw_cfg
|
|
3458
|
+
|
|
3459
|
+
@cached_nullary
|
|
3460
|
+
def raw_cfg(self) -> ta.Mapping[str, ta.Any]:
|
|
3461
|
+
if self._raw_cfg is None:
|
|
3462
|
+
with open('pyproject.toml') as f:
|
|
3463
|
+
buf = f.read()
|
|
3464
|
+
elif isinstance(self._raw_cfg, str):
|
|
3465
|
+
buf = self._raw_cfg
|
|
3466
|
+
else:
|
|
3467
|
+
return self._raw_cfg
|
|
3468
|
+
return toml_loads(buf)
|
|
3469
|
+
|
|
3470
|
+
@cached_nullary
|
|
3471
|
+
def cfg(self) -> PyprojectConfig:
|
|
3472
|
+
dct = self.raw_cfg()['tool']['omlish']['pyproject']
|
|
3473
|
+
return PyprojectConfigPreparer(
|
|
3474
|
+
python_versions=VersionsFile().pythons(),
|
|
3475
|
+
).prepare_config(dct)
|
|
3476
|
+
|
|
3477
|
+
@cached_nullary
|
|
3478
|
+
def venvs(self) -> ta.Mapping[str, Venv]:
|
|
3479
|
+
return {
|
|
3480
|
+
n: Venv(n, c)
|
|
3481
|
+
for n, c in self.cfg().venvs.items()
|
|
3482
|
+
if not n.startswith('_')
|
|
3483
|
+
}
|
|
3484
|
+
|
|
3485
|
+
|
|
3486
|
+
##
|
|
3487
|
+
|
|
3488
|
+
|
|
3489
|
+
def _venv_cmd(args) -> None:
|
|
3490
|
+
venv = Run().venvs()[args.name]
|
|
3491
|
+
if (sd := venv.cfg.docker) is not None and sd != (cd := args._docker_container): # noqa
|
|
3492
|
+
script = ' '.join([
|
|
3493
|
+
'python3',
|
|
3494
|
+
shlex.quote(_script_rel_path()),
|
|
3495
|
+
f'--_docker_container={shlex.quote(sd)}',
|
|
3496
|
+
*map(shlex.quote, sys.argv[1:]),
|
|
3497
|
+
])
|
|
3498
|
+
subprocess_check_call(
|
|
3499
|
+
'docker',
|
|
3500
|
+
'compose',
|
|
3501
|
+
'-f', 'docker/compose.yml',
|
|
3502
|
+
'exec',
|
|
3503
|
+
*itertools.chain.from_iterable(
|
|
3504
|
+
('-e', f'{e}={os.environ.get(e, "")}' if '=' not in e else e)
|
|
3505
|
+
for e in (args.docker_env or [])
|
|
3506
|
+
),
|
|
3507
|
+
'-it', sd,
|
|
3508
|
+
'bash', '--login', '-c', script,
|
|
3509
|
+
)
|
|
3510
|
+
return
|
|
3511
|
+
|
|
3512
|
+
venv.create()
|
|
3513
|
+
|
|
3514
|
+
cmd = args.cmd
|
|
3515
|
+
if not cmd:
|
|
3516
|
+
pass
|
|
3517
|
+
|
|
3518
|
+
elif cmd == 'python':
|
|
3519
|
+
os.execl(
|
|
3520
|
+
(exe := venv.exe()),
|
|
3521
|
+
exe,
|
|
3522
|
+
*args.args,
|
|
3523
|
+
)
|
|
3524
|
+
|
|
3525
|
+
elif cmd == 'exe':
|
|
3526
|
+
check_not(args.args)
|
|
3527
|
+
print(venv.exe())
|
|
3528
|
+
|
|
3529
|
+
elif cmd == 'run':
|
|
3530
|
+
sh = check_not_none(shutil.which('bash'))
|
|
3531
|
+
script = ' '.join(args.args)
|
|
3532
|
+
if not script:
|
|
3533
|
+
script = sh
|
|
3534
|
+
os.execl(
|
|
3535
|
+
(bash := check_not_none(sh)),
|
|
3536
|
+
bash,
|
|
3537
|
+
'-c',
|
|
3538
|
+
f'. {venv.dir_name}/bin/activate && ' + script,
|
|
3539
|
+
)
|
|
3540
|
+
|
|
3541
|
+
elif cmd == 'srcs':
|
|
3542
|
+
check_not(args.args)
|
|
3543
|
+
print('\n'.join(venv.srcs()))
|
|
3544
|
+
|
|
3545
|
+
elif cmd == 'test':
|
|
3546
|
+
subprocess_check_call(venv.exe(), '-m', 'pytest', *(args.args or []), *venv.srcs())
|
|
3547
|
+
|
|
3548
|
+
else:
|
|
3549
|
+
raise Exception(f'unknown subcommand: {cmd}')
|
|
3550
|
+
|
|
3551
|
+
|
|
3552
|
+
##
|
|
3553
|
+
|
|
3554
|
+
|
|
3555
|
+
def _build_parser() -> argparse.ArgumentParser:
|
|
3556
|
+
parser = argparse.ArgumentParser()
|
|
3557
|
+
parser.add_argument('--_docker_container', help=argparse.SUPPRESS)
|
|
3558
|
+
|
|
3559
|
+
subparsers = parser.add_subparsers()
|
|
3560
|
+
|
|
3561
|
+
parser_resolve = subparsers.add_parser('venv')
|
|
3562
|
+
parser_resolve.add_argument('name')
|
|
3563
|
+
parser_resolve.add_argument('-e', '--docker-env', action='append')
|
|
3564
|
+
parser_resolve.add_argument('cmd', nargs='?')
|
|
3565
|
+
parser_resolve.add_argument('args', nargs=argparse.REMAINDER)
|
|
3566
|
+
parser_resolve.set_defaults(func=_venv_cmd)
|
|
3567
|
+
|
|
3568
|
+
return parser
|
|
3569
|
+
|
|
3570
|
+
|
|
3571
|
+
def _main(argv: ta.Optional[ta.Sequence[str]] = None) -> None:
|
|
3572
|
+
check_runtime_version()
|
|
3573
|
+
configure_standard_logging()
|
|
3574
|
+
|
|
3575
|
+
parser = _build_parser()
|
|
3576
|
+
args = parser.parse_args(argv)
|
|
3577
|
+
if not getattr(args, 'func', None):
|
|
3578
|
+
parser.print_help()
|
|
3579
|
+
else:
|
|
3580
|
+
args.func(args)
|
|
3581
|
+
|
|
3582
|
+
|
|
3583
|
+
if __name__ == '__main__':
|
|
3584
|
+
_main()
|