omdev 0.0.0.dev29__py3-none-any.whl → 0.0.0.dev31__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- omdev/cache/compute/cache.py +120 -0
- omdev/cache/compute/contexts.py +137 -0
- omdev/cache/compute/currents.py +78 -0
- omdev/cache/compute/fns.py +157 -0
- omdev/cache/compute/resolvers.py +23 -0
- omdev/cache/compute/storage.py +39 -0
- omdev/cache/compute/types.py +144 -0
- omdev/cache/data/__init__.py +9 -5
- omdev/cache/data/actions.py +8 -2
- omdev/cache/data/cache.py +66 -31
- omdev/cache/data/manifests.py +3 -3
- omdev/cache/data/specs.py +9 -6
- omdev/interp/pyenv.py +8 -1
- omdev/manifests.py +112 -18
- omdev/pyproject/cli.py +6 -2
- omdev/scripts/interp.py +28 -6
- omdev/scripts/pyproject.py +34 -8
- omdev/secrets.py +12 -0
- omdev/tools/piptools.py +24 -0
- {omdev-0.0.0.dev29.dist-info → omdev-0.0.0.dev31.dist-info}/METADATA +2 -2
- {omdev-0.0.0.dev29.dist-info → omdev-0.0.0.dev31.dist-info}/RECORD +25 -22
- omdev/cache/comp/cache.py +0 -137
- omdev/cache/comp/contexts.py +0 -136
- omdev/cache/comp/fns.py +0 -115
- omdev/cache/comp/resolvers.py +0 -23
- omdev/cache/comp/types.py +0 -92
- /omdev/cache/{comp → compute}/__init__.py +0 -0
- {omdev-0.0.0.dev29.dist-info → omdev-0.0.0.dev31.dist-info}/LICENSE +0 -0
- {omdev-0.0.0.dev29.dist-info → omdev-0.0.0.dev31.dist-info}/WHEEL +0 -0
- {omdev-0.0.0.dev29.dist-info → omdev-0.0.0.dev31.dist-info}/top_level.txt +0 -0
omdev/cache/data/__init__.py
CHANGED
|
@@ -1,3 +1,7 @@
|
|
|
1
|
+
from .actions import ( # noqa
|
|
2
|
+
ExtractAction,
|
|
3
|
+
)
|
|
4
|
+
|
|
1
5
|
from .cache import ( # noqa
|
|
2
6
|
Cache,
|
|
3
7
|
)
|
|
@@ -8,12 +12,12 @@ from .defaults import ( # noqa
|
|
|
8
12
|
)
|
|
9
13
|
|
|
10
14
|
from .manifests import ( # noqa
|
|
11
|
-
|
|
15
|
+
Manifest,
|
|
12
16
|
)
|
|
13
17
|
|
|
14
18
|
from .specs import ( # noqa
|
|
15
|
-
|
|
16
|
-
|
|
17
|
-
|
|
18
|
-
|
|
19
|
+
Spec,
|
|
20
|
+
GitSpec,
|
|
21
|
+
GithubContentSpec,
|
|
22
|
+
UrlSpec,
|
|
19
23
|
)
|
omdev/cache/data/actions.py
CHANGED
|
@@ -6,6 +6,7 @@ TODO:
|
|
|
6
6
|
"""
|
|
7
7
|
import typing as ta
|
|
8
8
|
|
|
9
|
+
from omlish import check
|
|
9
10
|
from omlish import dataclasses as dc
|
|
10
11
|
from omlish import lang
|
|
11
12
|
from omlish import marshal as msh
|
|
@@ -24,9 +25,14 @@ class Action(lang.Abstract, lang.Sealed):
|
|
|
24
25
|
##
|
|
25
26
|
|
|
26
27
|
|
|
28
|
+
def _non_empty_strs(v: ta.Sequence[str]) -> ta.Sequence[str]:
|
|
29
|
+
return [check.non_empty_str(s) for s in ([v] if isinstance(v, str) else v)]
|
|
30
|
+
|
|
31
|
+
|
|
27
32
|
@dc.dataclass(frozen=True)
|
|
28
33
|
class ExtractAction(Action, lang.Final):
|
|
29
|
-
files: ta.Sequence[str]
|
|
34
|
+
files: ta.Sequence[str] = dc.xfield(coerce=_non_empty_strs)
|
|
35
|
+
keep_archive: bool = False
|
|
30
36
|
|
|
31
37
|
|
|
32
38
|
##
|
|
@@ -34,7 +40,7 @@ class ExtractAction(Action, lang.Final):
|
|
|
34
40
|
|
|
35
41
|
@lang.cached_function
|
|
36
42
|
def _install_standard_marshalling() -> None:
|
|
37
|
-
actions_poly = msh.polymorphism_from_subclasses(Action)
|
|
43
|
+
actions_poly = msh.polymorphism_from_subclasses(Action, naming=msh.Naming.SNAKE, strip_suffix=True)
|
|
38
44
|
msh.STANDARD_MARSHALER_FACTORIES[0:0] = [msh.PolymorphismMarshalerFactory(actions_poly)]
|
|
39
45
|
msh.STANDARD_UNMARSHALER_FACTORIES[0:0] = [msh.PolymorphismUnmarshalerFactory(actions_poly)]
|
|
40
46
|
|
omdev/cache/data/cache.py
CHANGED
|
@@ -2,13 +2,18 @@
|
|
|
2
2
|
TODO:
|
|
3
3
|
- mirrors
|
|
4
4
|
- huggingface_hub
|
|
5
|
+
- datasets
|
|
6
|
+
- verify md5 (action)
|
|
5
7
|
- stupid little progress bars
|
|
6
8
|
- groups of multiple files downloaded - 'spec set'? idk
|
|
7
9
|
- torchvision.datasets.FashionMNIST
|
|
10
|
+
- chaining? or is this compcache..
|
|
11
|
+
- download resume ala hf_hub
|
|
8
12
|
"""
|
|
9
13
|
import logging
|
|
10
14
|
import os.path
|
|
11
15
|
import shutil
|
|
16
|
+
import subprocess
|
|
12
17
|
import tempfile
|
|
13
18
|
import urllib.parse
|
|
14
19
|
import urllib.request
|
|
@@ -16,14 +21,17 @@ import urllib.request
|
|
|
16
21
|
from omlish import check
|
|
17
22
|
from omlish import lang
|
|
18
23
|
from omlish import marshal as msh
|
|
24
|
+
from omlish import os as osu
|
|
19
25
|
from omlish.formats import json
|
|
20
26
|
|
|
21
27
|
from ... import git
|
|
22
|
-
from .
|
|
23
|
-
from .
|
|
24
|
-
from .
|
|
25
|
-
from .specs import
|
|
26
|
-
from .specs import
|
|
28
|
+
from .actions import Action
|
|
29
|
+
from .actions import ExtractAction
|
|
30
|
+
from .manifests import Manifest
|
|
31
|
+
from .specs import GithubContentSpec
|
|
32
|
+
from .specs import GitSpec
|
|
33
|
+
from .specs import Spec
|
|
34
|
+
from .specs import UrlSpec
|
|
27
35
|
|
|
28
36
|
|
|
29
37
|
log = logging.getLogger(__name__)
|
|
@@ -39,18 +47,20 @@ class Cache:
|
|
|
39
47
|
|
|
40
48
|
self._items_dir = os.path.join(base_dir, 'items')
|
|
41
49
|
|
|
50
|
+
#
|
|
51
|
+
|
|
42
52
|
def _fetch_url(self, url: str, out_file: str) -> None:
|
|
43
53
|
log.info('Fetching url: %s -> %s', url, out_file)
|
|
44
54
|
|
|
45
55
|
urllib.request.urlretrieve(url, out_file) # noqa
|
|
46
56
|
|
|
47
|
-
def _fetch_into(self, spec:
|
|
57
|
+
def _fetch_into(self, spec: Spec, data_dir: str) -> None:
|
|
48
58
|
log.info('Fetching spec: %s %r -> %s', spec.digest, spec, data_dir)
|
|
49
59
|
|
|
50
|
-
if isinstance(spec,
|
|
60
|
+
if isinstance(spec, UrlSpec):
|
|
51
61
|
self._fetch_url(spec.url, os.path.join(data_dir, spec.file_name_or_default))
|
|
52
62
|
|
|
53
|
-
elif isinstance(spec,
|
|
63
|
+
elif isinstance(spec, GithubContentSpec):
|
|
54
64
|
for repo_file in spec.files:
|
|
55
65
|
out_file = os.path.join(data_dir, repo_file)
|
|
56
66
|
if not os.path.abspath(out_file).startswith(os.path.abspath(data_dir)):
|
|
@@ -60,7 +70,7 @@ class Cache:
|
|
|
60
70
|
os.makedirs(os.path.dirname(out_file), exist_ok=True)
|
|
61
71
|
self._fetch_url(url, os.path.join(data_dir, out_file))
|
|
62
72
|
|
|
63
|
-
elif isinstance(spec,
|
|
73
|
+
elif isinstance(spec, GitSpec):
|
|
64
74
|
if not spec.subtrees:
|
|
65
75
|
raise NotImplementedError
|
|
66
76
|
|
|
@@ -92,38 +102,52 @@ class Cache:
|
|
|
92
102
|
else:
|
|
93
103
|
raise TypeError(spec)
|
|
94
104
|
|
|
95
|
-
def
|
|
105
|
+
def _perform_action(self, action: Action, data_dir: str) -> None:
|
|
106
|
+
if isinstance(action, ExtractAction):
|
|
107
|
+
for f in action.files:
|
|
108
|
+
file = os.path.join(data_dir, f)
|
|
109
|
+
if not os.path.isfile(file):
|
|
110
|
+
raise Exception(f'Not file: {file}')
|
|
111
|
+
|
|
112
|
+
if file.endswith('.tar.gz'):
|
|
113
|
+
subprocess.check_call(['tar', 'xzf', file], cwd=data_dir)
|
|
114
|
+
|
|
115
|
+
elif file.endswith('.zip'):
|
|
116
|
+
subprocess.check_call(['unzip', file], cwd=data_dir)
|
|
117
|
+
|
|
118
|
+
else:
|
|
119
|
+
raise Exception(f'Unhandled archive extension: {file}')
|
|
120
|
+
|
|
121
|
+
if not action.keep_archive:
|
|
122
|
+
os.unlink(file)
|
|
123
|
+
|
|
124
|
+
else:
|
|
125
|
+
raise TypeError(action)
|
|
126
|
+
|
|
127
|
+
def _return_val(self, spec: Spec, data_dir: str) -> str:
|
|
96
128
|
check.state(os.path.isdir(data_dir))
|
|
97
129
|
|
|
98
|
-
if isinstance(
|
|
99
|
-
|
|
100
|
-
if not os.path.isfile(data_file):
|
|
101
|
-
raise RuntimeError(data_file) # noqa
|
|
102
|
-
return data_file
|
|
130
|
+
if any(isinstance(a, ExtractAction) for a in spec.actions):
|
|
131
|
+
return data_dir
|
|
103
132
|
|
|
104
|
-
|
|
133
|
+
single_file: str
|
|
134
|
+
if isinstance(spec, UrlSpec):
|
|
135
|
+
single_file = os.path.join(data_dir, spec.file_name_or_default)
|
|
136
|
+
|
|
137
|
+
elif isinstance(spec, GithubContentSpec):
|
|
105
138
|
if len(spec.files) != 1:
|
|
106
139
|
return data_dir
|
|
107
|
-
|
|
108
|
-
if not os.path.isfile(data_file):
|
|
109
|
-
raise RuntimeError(data_file) # noqa
|
|
110
|
-
return data_file
|
|
140
|
+
single_file = os.path.join(data_dir, check.single(spec.files))
|
|
111
141
|
|
|
112
142
|
else:
|
|
113
143
|
return data_dir
|
|
114
144
|
|
|
115
|
-
|
|
116
|
-
|
|
145
|
+
if not os.path.isfile(single_file):
|
|
146
|
+
raise RuntimeError(single_file) # noqa
|
|
117
147
|
|
|
118
|
-
|
|
119
|
-
|
|
120
|
-
item_dir = os.path.join(self._items_dir, spec.digest)
|
|
121
|
-
if os.path.isdir(item_dir):
|
|
122
|
-
data_dir = os.path.join(item_dir, 'data')
|
|
123
|
-
return self._return_val(spec, data_dir)
|
|
124
|
-
|
|
125
|
-
#
|
|
148
|
+
return single_file
|
|
126
149
|
|
|
150
|
+
def _fetch_item(self, spec: Spec, item_dir: str) -> None:
|
|
127
151
|
tmp_dir = tempfile.mkdtemp()
|
|
128
152
|
|
|
129
153
|
#
|
|
@@ -133,11 +157,13 @@ class Cache:
|
|
|
133
157
|
|
|
134
158
|
start_at = lang.utcnow()
|
|
135
159
|
self._fetch_into(spec, fetch_dir)
|
|
160
|
+
for action in spec.actions:
|
|
161
|
+
self._perform_action(action, fetch_dir)
|
|
136
162
|
end_at = lang.utcnow()
|
|
137
163
|
|
|
138
164
|
#
|
|
139
165
|
|
|
140
|
-
manifest =
|
|
166
|
+
manifest = Manifest(
|
|
141
167
|
spec,
|
|
142
168
|
start_at=start_at,
|
|
143
169
|
end_at=end_at,
|
|
@@ -159,5 +185,14 @@ class Cache:
|
|
|
159
185
|
|
|
160
186
|
shutil.move(tmp_dir, item_dir)
|
|
161
187
|
|
|
188
|
+
def get(self, spec: Spec) -> str:
|
|
189
|
+
os.makedirs(self._items_dir, exist_ok=True)
|
|
190
|
+
|
|
191
|
+
item_dir = os.path.join(self._items_dir, spec.digest)
|
|
192
|
+
if not os.path.isdir(item_dir):
|
|
193
|
+
self._fetch_item(spec, item_dir)
|
|
194
|
+
|
|
195
|
+
osu.touch(os.path.join(item_dir, 'accessed'))
|
|
196
|
+
|
|
162
197
|
data_dir = os.path.join(item_dir, 'data')
|
|
163
198
|
return self._return_val(spec, data_dir)
|
omdev/cache/data/manifests.py
CHANGED
|
@@ -6,7 +6,7 @@ from omlish import dataclasses as dc
|
|
|
6
6
|
|
|
7
7
|
from ...git import get_git_revision
|
|
8
8
|
from .consts import SERIALIZATION_VERSION
|
|
9
|
-
from .specs import
|
|
9
|
+
from .specs import Spec
|
|
10
10
|
|
|
11
11
|
|
|
12
12
|
##
|
|
@@ -24,8 +24,8 @@ def _lib_revision() -> str | None:
|
|
|
24
24
|
|
|
25
25
|
|
|
26
26
|
@dc.dataclass(frozen=True)
|
|
27
|
-
class
|
|
28
|
-
spec:
|
|
27
|
+
class Manifest:
|
|
28
|
+
spec: Spec
|
|
29
29
|
|
|
30
30
|
start_at: datetime.datetime = dc.field(kw_only=True)
|
|
31
31
|
end_at: datetime.datetime = dc.field(kw_only=True)
|
omdev/cache/data/specs.py
CHANGED
|
@@ -10,6 +10,7 @@ from omlish import lang
|
|
|
10
10
|
from omlish import marshal as msh
|
|
11
11
|
from omlish.formats import json
|
|
12
12
|
|
|
13
|
+
from .actions import Action
|
|
13
14
|
from .consts import SERIALIZATION_VERSION
|
|
14
15
|
|
|
15
16
|
|
|
@@ -17,12 +18,14 @@ from .consts import SERIALIZATION_VERSION
|
|
|
17
18
|
|
|
18
19
|
|
|
19
20
|
@dc.dataclass(frozen=True)
|
|
20
|
-
class
|
|
21
|
+
class Spec(lang.Abstract, lang.Sealed):
|
|
21
22
|
serialization_version: int = dc.field(default=SERIALIZATION_VERSION, kw_only=True)
|
|
22
23
|
|
|
24
|
+
actions: ta.Sequence[Action] = dc.field(default=(), kw_only=True)
|
|
25
|
+
|
|
23
26
|
@cached.property
|
|
24
27
|
def json(self) -> str:
|
|
25
|
-
return json.dumps_compact(msh.marshal(self,
|
|
28
|
+
return json.dumps_compact(msh.marshal(self, Spec), sort_keys=True)
|
|
26
29
|
|
|
27
30
|
@cached.property
|
|
28
31
|
def digest(self) -> str:
|
|
@@ -39,7 +42,7 @@ def _maybe_sorted_strs(v: ta.Iterable[str] | None) -> ta.Sequence[str] | None:
|
|
|
39
42
|
|
|
40
43
|
|
|
41
44
|
@dc.dataclass(frozen=True)
|
|
42
|
-
class
|
|
45
|
+
class GitSpec(Spec):
|
|
43
46
|
url: str
|
|
44
47
|
|
|
45
48
|
branch: str | None = dc.field(default=None, kw_only=True)
|
|
@@ -52,7 +55,7 @@ class GitCacheDataSpec(CacheDataSpec):
|
|
|
52
55
|
|
|
53
56
|
|
|
54
57
|
@dc.dataclass(frozen=True)
|
|
55
|
-
class
|
|
58
|
+
class UrlSpec(Spec):
|
|
56
59
|
url: str = dc.xfield(validate=lambda u: bool(urllib.parse.urlparse(u)))
|
|
57
60
|
file_name: str | None = None
|
|
58
61
|
|
|
@@ -74,7 +77,7 @@ def _repo_str(s: str) -> str:
|
|
|
74
77
|
|
|
75
78
|
|
|
76
79
|
@dc.dataclass(frozen=True)
|
|
77
|
-
class
|
|
80
|
+
class GithubContentSpec(Spec):
|
|
78
81
|
repo: str = dc.field(validate=_repo_str) # type: ignore
|
|
79
82
|
rev: str
|
|
80
83
|
files: lang.SequenceNotStr[str]
|
|
@@ -85,7 +88,7 @@ class GithubContentCacheDataSpec(CacheDataSpec):
|
|
|
85
88
|
|
|
86
89
|
@lang.cached_function
|
|
87
90
|
def _install_standard_marshalling() -> None:
|
|
88
|
-
specs_poly = msh.polymorphism_from_subclasses(
|
|
91
|
+
specs_poly = msh.polymorphism_from_subclasses(Spec, naming=msh.Naming.SNAKE, strip_suffix=True)
|
|
89
92
|
msh.STANDARD_MARSHALER_FACTORIES[0:0] = [msh.PolymorphismMarshalerFactory(specs_poly)]
|
|
90
93
|
msh.STANDARD_UNMARSHALER_FACTORIES[0:0] = [msh.PolymorphismUnmarshalerFactory(specs_poly)]
|
|
91
94
|
|
omdev/interp/pyenv.py
CHANGED
|
@@ -3,7 +3,14 @@ TODO:
|
|
|
3
3
|
- custom tags
|
|
4
4
|
- optionally install / upgrade pyenv itself
|
|
5
5
|
- new vers dont need these custom mac opts, only run on old vers
|
|
6
|
-
|
|
6
|
+
|
|
7
|
+
TODO opts:
|
|
8
|
+
- --enable-loadable-sqlite-extensions LDFLAGS="-L/opt/homebrew/opt/sqlite/lib" CPPFLAGS="-I/opt/homebrew/opt/sqlite/include"
|
|
9
|
+
- --enable-shared
|
|
10
|
+
- --enable-optimizations
|
|
11
|
+
- --enable-profiling ?
|
|
12
|
+
- --enable-ipv6 ?
|
|
13
|
+
""" # noqa
|
|
7
14
|
# ruff: noqa: UP006 UP007
|
|
8
15
|
import abc
|
|
9
16
|
import dataclasses as dc
|
omdev/manifests.py
CHANGED
|
@@ -1,20 +1,15 @@
|
|
|
1
1
|
"""
|
|
2
|
-
|
|
3
|
-
-
|
|
4
|
-
-
|
|
5
|
-
|
|
6
|
-
- comment must immediately precede a global val setter
|
|
7
|
-
- val is grabbed from imported module dict by name
|
|
8
|
-
- value is repr'd somehow (roundtrip checked) (naw, json lol)
|
|
9
|
-
- dumped in _manifest.py
|
|
10
|
-
- # @omlish-manifest \n _CACHE_MANIFEST = {'cache': {'name': 'llm', …
|
|
11
|
-
- also can do prechecks!
|
|
2
|
+
TODO:
|
|
3
|
+
- split, cli.py / types.py
|
|
4
|
+
- opt to load only specified types
|
|
5
|
+
- prechecks
|
|
12
6
|
"""
|
|
13
7
|
# ruff: noqa: UP006 UP007
|
|
14
8
|
# @omlish-lite
|
|
15
9
|
import argparse
|
|
16
10
|
import collections
|
|
17
11
|
import dataclasses as dc
|
|
12
|
+
import importlib
|
|
18
13
|
import inspect
|
|
19
14
|
import json
|
|
20
15
|
import os.path
|
|
@@ -50,12 +45,50 @@ class Manifest(ManifestOrigin):
|
|
|
50
45
|
value: ta.Any
|
|
51
46
|
|
|
52
47
|
|
|
48
|
+
def load_manifest_entry(
|
|
49
|
+
entry: ta.Mapping[str, ta.Any],
|
|
50
|
+
*,
|
|
51
|
+
raw_values: bool = False,
|
|
52
|
+
) -> Manifest:
|
|
53
|
+
manifest = Manifest(**entry)
|
|
54
|
+
|
|
55
|
+
[(key, value_dct)] = manifest.value.items()
|
|
56
|
+
if not key.startswith('$'):
|
|
57
|
+
raise Exception(f'Bad key: {key}')
|
|
58
|
+
|
|
59
|
+
if not raw_values:
|
|
60
|
+
parts = key[1:].split('.')
|
|
61
|
+
pos = next(i for i, p in enumerate(parts) if p[0].isupper())
|
|
62
|
+
mod_name = '.'.join(parts[:pos])
|
|
63
|
+
mod = importlib.import_module(mod_name)
|
|
64
|
+
|
|
65
|
+
obj: ta.Any = mod
|
|
66
|
+
for ca in parts[pos:]:
|
|
67
|
+
obj = getattr(obj, ca)
|
|
68
|
+
cls = obj
|
|
69
|
+
if not isinstance(cls, type):
|
|
70
|
+
raise TypeError(cls)
|
|
71
|
+
|
|
72
|
+
if not dc.is_dataclass(cls):
|
|
73
|
+
raise TypeError(cls)
|
|
74
|
+
obj = cls(**value_dct) # noqa
|
|
75
|
+
|
|
76
|
+
manifest = dc.replace(manifest, value=obj)
|
|
77
|
+
|
|
78
|
+
return manifest
|
|
79
|
+
|
|
80
|
+
|
|
81
|
+
##
|
|
82
|
+
|
|
83
|
+
|
|
53
84
|
MANIFEST_MAGIC = '# @omlish-manifest'
|
|
54
85
|
|
|
55
86
|
_MANIFEST_GLOBAL_PAT = re.compile(r'^(?P<name>[A-Za-z_][A-Za-z0-9_]*)\s*=.*')
|
|
56
87
|
|
|
57
88
|
|
|
58
89
|
def _dump_module_manifests(spec: str, *attrs: str) -> None:
|
|
90
|
+
import collections.abc
|
|
91
|
+
import dataclasses as dc # noqa
|
|
59
92
|
import importlib
|
|
60
93
|
import json
|
|
61
94
|
|
|
@@ -65,13 +98,35 @@ def _dump_module_manifests(spec: str, *attrs: str) -> None:
|
|
|
65
98
|
for attr in attrs:
|
|
66
99
|
manifest = getattr(mod, attr)
|
|
67
100
|
|
|
68
|
-
|
|
69
|
-
|
|
101
|
+
if dc.is_dataclass(manifest):
|
|
102
|
+
cls = type(manifest)
|
|
103
|
+
manifest_json = json.dumps(dc.asdict(manifest)) # type: ignore
|
|
104
|
+
manifest_dct = json.loads(manifest_json)
|
|
105
|
+
|
|
106
|
+
rt_manifest = cls(**manifest_dct) # type: ignore
|
|
107
|
+
if rt_manifest != manifest:
|
|
108
|
+
raise Exception(f'Manifest failed to roundtrip: {manifest} -> {manifest_dct} != {rt_manifest}')
|
|
109
|
+
|
|
110
|
+
key = f'${cls.__module__}.{cls.__qualname__}'
|
|
111
|
+
out[attr] = {key: manifest_dct}
|
|
112
|
+
|
|
113
|
+
elif isinstance(manifest, collections.abc.Mapping):
|
|
114
|
+
[(key, manifest_dct)] = manifest.items()
|
|
115
|
+
if not key.startswith('$'): # noqa
|
|
116
|
+
raise Exception(f'Bad key: {key}')
|
|
117
|
+
|
|
118
|
+
if not isinstance(manifest_dct, collections.abc.Mapping):
|
|
119
|
+
raise Exception(f'Bad value: {manifest_dct}')
|
|
70
120
|
|
|
71
|
-
|
|
72
|
-
|
|
121
|
+
manifest_json = json.dumps(manifest_dct)
|
|
122
|
+
rt_manifest_dct = json.loads(manifest_json)
|
|
123
|
+
if manifest_dct != rt_manifest_dct:
|
|
124
|
+
raise Exception(f'Manifest failed to roundtrip: {manifest_dct} != {rt_manifest_dct}')
|
|
73
125
|
|
|
74
|
-
|
|
126
|
+
out[attr] = {key: manifest_dct}
|
|
127
|
+
|
|
128
|
+
else:
|
|
129
|
+
raise TypeError(f'Manifest must be dataclass or mapping: {manifest!r}')
|
|
75
130
|
|
|
76
131
|
out_json = json.dumps(out, indent=None, separators=(',', ':'))
|
|
77
132
|
print(out_json)
|
|
@@ -113,7 +168,7 @@ def build_module_manifests(
|
|
|
113
168
|
module='.'.join(['', *mod_name.split('.')[1:]]),
|
|
114
169
|
attr=m.groupdict()['name'],
|
|
115
170
|
|
|
116
|
-
file=
|
|
171
|
+
file=file,
|
|
117
172
|
line=i + 1,
|
|
118
173
|
))
|
|
119
174
|
|
|
@@ -205,8 +260,30 @@ def build_package_manifests(
|
|
|
205
260
|
##
|
|
206
261
|
|
|
207
262
|
|
|
263
|
+
def check_package_manifests(
|
|
264
|
+
name: str,
|
|
265
|
+
base: str,
|
|
266
|
+
) -> None:
|
|
267
|
+
pkg_dir = os.path.join(base, name)
|
|
268
|
+
if not os.path.isdir(pkg_dir) or not os.path.isfile(os.path.join(pkg_dir, '__init__.py')):
|
|
269
|
+
raise Exception(pkg_dir)
|
|
270
|
+
|
|
271
|
+
manifests_file = os.path.join(pkg_dir, '.manifests.json')
|
|
272
|
+
if not os.path.isfile(manifests_file):
|
|
273
|
+
raise Exception(f'No manifests file: {manifests_file}')
|
|
274
|
+
|
|
275
|
+
with open(manifests_file) as f:
|
|
276
|
+
manifests_json = json.load(f)
|
|
277
|
+
|
|
278
|
+
for entry in manifests_json:
|
|
279
|
+
load_manifest_entry(entry)
|
|
280
|
+
|
|
281
|
+
|
|
282
|
+
##
|
|
283
|
+
|
|
284
|
+
|
|
208
285
|
if __name__ == '__main__':
|
|
209
|
-
def
|
|
286
|
+
def _get_base(args) -> str:
|
|
210
287
|
if args.base is not None:
|
|
211
288
|
base = args.base
|
|
212
289
|
else:
|
|
@@ -214,6 +291,10 @@ if __name__ == '__main__':
|
|
|
214
291
|
base = os.path.abspath(base)
|
|
215
292
|
if not os.path.isdir(base):
|
|
216
293
|
raise RuntimeError(base)
|
|
294
|
+
return base
|
|
295
|
+
|
|
296
|
+
def _gen_cmd(args) -> None:
|
|
297
|
+
base = _get_base(args)
|
|
217
298
|
|
|
218
299
|
for pkg in args.package:
|
|
219
300
|
ms = build_package_manifests(
|
|
@@ -224,6 +305,15 @@ if __name__ == '__main__':
|
|
|
224
305
|
if not args.quiet:
|
|
225
306
|
print(json_dumps_pretty([dc.asdict(m) for m in ms]))
|
|
226
307
|
|
|
308
|
+
def _check_cmd(args) -> None:
|
|
309
|
+
base = _get_base(args)
|
|
310
|
+
|
|
311
|
+
for pkg in args.package:
|
|
312
|
+
check_package_manifests(
|
|
313
|
+
pkg,
|
|
314
|
+
base,
|
|
315
|
+
)
|
|
316
|
+
|
|
227
317
|
def _main(argv=None) -> None:
|
|
228
318
|
configure_standard_logging('INFO')
|
|
229
319
|
|
|
@@ -235,9 +325,13 @@ if __name__ == '__main__':
|
|
|
235
325
|
parser_gen.add_argument('-w', '--write', action='store_true')
|
|
236
326
|
parser_gen.add_argument('-q', '--quiet', action='store_true')
|
|
237
327
|
parser_gen.add_argument('package', nargs='*')
|
|
238
|
-
|
|
239
328
|
parser_gen.set_defaults(func=_gen_cmd)
|
|
240
329
|
|
|
330
|
+
parser_check = subparsers.add_parser('check')
|
|
331
|
+
parser_check.add_argument('-b', '--base')
|
|
332
|
+
parser_check.add_argument('package', nargs='*')
|
|
333
|
+
parser_check.set_defaults(func=_check_cmd)
|
|
334
|
+
|
|
241
335
|
args = parser.parse_args(argv)
|
|
242
336
|
if not getattr(args, 'func', None):
|
|
243
337
|
parser.print_help()
|
omdev/pyproject/cli.py
CHANGED
|
@@ -8,9 +8,8 @@ TODO:
|
|
|
8
8
|
- build / package / publish / version roll
|
|
9
9
|
- {pkg_name: [src_dirs]}, default excludes, generate MANIFST.in, ...
|
|
10
10
|
- env vars - PYTHONPATH
|
|
11
|
-
- optional uv backend
|
|
12
11
|
|
|
13
|
-
|
|
12
|
+
See:
|
|
14
13
|
- https://pdm-project.org/en/latest/
|
|
15
14
|
- https://rye.astral.sh/philosophy/
|
|
16
15
|
- https://github.com/indygreg/python-build-standalone/blob/main/pythonbuild/cpython.py
|
|
@@ -158,6 +157,11 @@ class Venv:
|
|
|
158
157
|
if (sr := self._cfg.requires):
|
|
159
158
|
rr = RequirementsRewriter(self._name)
|
|
160
159
|
reqs = [rr.rewrite(req) for req in sr]
|
|
160
|
+
|
|
161
|
+
# TODO: automatically try slower uv download when it fails? lol
|
|
162
|
+
# Caused by: Failed to download distribution due to network timeout. Try increasing UV_HTTP_TIMEOUT (current value: 30s). # noqa
|
|
163
|
+
# UV_CONCURRENT_DOWNLOADS=4 UV_HTTP_TIMEOUT=3600
|
|
164
|
+
|
|
161
165
|
subprocess_check_call(
|
|
162
166
|
ve,
|
|
163
167
|
'-m',
|
omdev/scripts/interp.py
CHANGED
|
@@ -13,6 +13,7 @@ TODO:
|
|
|
13
13
|
import abc
|
|
14
14
|
import argparse
|
|
15
15
|
import collections
|
|
16
|
+
import contextlib
|
|
16
17
|
import dataclasses as dc
|
|
17
18
|
import datetime
|
|
18
19
|
import functools
|
|
@@ -1352,6 +1353,24 @@ class StandardLogHandler(ProxyLogHandler):
|
|
|
1352
1353
|
##
|
|
1353
1354
|
|
|
1354
1355
|
|
|
1356
|
+
@contextlib.contextmanager
|
|
1357
|
+
def _locking_logging_module_lock() -> ta.Iterator[None]:
|
|
1358
|
+
if hasattr(logging, '_acquireLock'):
|
|
1359
|
+
logging._acquireLock() # noqa
|
|
1360
|
+
try:
|
|
1361
|
+
yield
|
|
1362
|
+
finally:
|
|
1363
|
+
logging._releaseLock() # type: ignore # noqa
|
|
1364
|
+
|
|
1365
|
+
elif hasattr(logging, '_lock'):
|
|
1366
|
+
# https://github.com/python/cpython/commit/74723e11109a320e628898817ab449b3dad9ee96
|
|
1367
|
+
with logging._lock: # noqa
|
|
1368
|
+
yield
|
|
1369
|
+
|
|
1370
|
+
else:
|
|
1371
|
+
raise Exception("Can't find lock in logging module")
|
|
1372
|
+
|
|
1373
|
+
|
|
1355
1374
|
def configure_standard_logging(
|
|
1356
1375
|
level: ta.Union[int, str] = logging.INFO,
|
|
1357
1376
|
*,
|
|
@@ -1359,8 +1378,7 @@ def configure_standard_logging(
|
|
|
1359
1378
|
target: ta.Optional[logging.Logger] = None,
|
|
1360
1379
|
force: bool = False,
|
|
1361
1380
|
) -> ta.Optional[StandardLogHandler]:
|
|
1362
|
-
|
|
1363
|
-
try:
|
|
1381
|
+
with _locking_logging_module_lock():
|
|
1364
1382
|
if target is None:
|
|
1365
1383
|
target = logging.root
|
|
1366
1384
|
|
|
@@ -1400,9 +1418,6 @@ def configure_standard_logging(
|
|
|
1400
1418
|
|
|
1401
1419
|
return StandardLogHandler(handler)
|
|
1402
1420
|
|
|
1403
|
-
finally:
|
|
1404
|
-
logging._releaseLock() # type: ignore # noqa
|
|
1405
|
-
|
|
1406
1421
|
|
|
1407
1422
|
########################################
|
|
1408
1423
|
# ../../../omlish/lite/runtime.py
|
|
@@ -1783,7 +1798,14 @@ TODO:
|
|
|
1783
1798
|
- custom tags
|
|
1784
1799
|
- optionally install / upgrade pyenv itself
|
|
1785
1800
|
- new vers dont need these custom mac opts, only run on old vers
|
|
1786
|
-
|
|
1801
|
+
|
|
1802
|
+
TODO opts:
|
|
1803
|
+
- --enable-loadable-sqlite-extensions LDFLAGS="-L/opt/homebrew/opt/sqlite/lib" CPPFLAGS="-I/opt/homebrew/opt/sqlite/include"
|
|
1804
|
+
- --enable-shared
|
|
1805
|
+
- --enable-optimizations
|
|
1806
|
+
- --enable-profiling ?
|
|
1807
|
+
- --enable-ipv6 ?
|
|
1808
|
+
""" # noqa
|
|
1787
1809
|
|
|
1788
1810
|
|
|
1789
1811
|
##
|