omdev 0.0.0.dev27__py3-none-any.whl → 0.0.0.dev29__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of omdev might be problematic. Click here for more details.
- omdev/cache/__init__.py +0 -0
- omdev/cache/comp/__init__.py +0 -0
- omdev/cache/comp/cache.py +137 -0
- omdev/cache/comp/contexts.py +136 -0
- omdev/cache/comp/fns.py +115 -0
- omdev/cache/comp/resolvers.py +23 -0
- omdev/cache/comp/types.py +92 -0
- omdev/{datacache → cache/data}/__init__.py +1 -1
- omdev/{datacache → cache/data}/actions.py +6 -0
- omdev/{datacache → cache/data}/cache.py +5 -6
- omdev/{datacache → cache/data}/defaults.py +3 -3
- omdev/{datacache → cache/data}/manifests.py +1 -1
- omdev/precheck/base.py +37 -0
- omdev/precheck/git.py +34 -0
- omdev/precheck/lite.py +135 -0
- omdev/precheck/precheck.py +5 -221
- omdev/precheck/scripts.py +42 -0
- omdev/tools/piptools.py +26 -0
- {omdev-0.0.0.dev27.dist-info → omdev-0.0.0.dev29.dist-info}/METADATA +2 -2
- {omdev-0.0.0.dev27.dist-info → omdev-0.0.0.dev29.dist-info}/RECORD +25 -13
- /omdev/{datacache → cache/data}/consts.py +0 -0
- /omdev/{datacache → cache/data}/specs.py +0 -0
- {omdev-0.0.0.dev27.dist-info → omdev-0.0.0.dev29.dist-info}/LICENSE +0 -0
- {omdev-0.0.0.dev27.dist-info → omdev-0.0.0.dev29.dist-info}/WHEEL +0 -0
- {omdev-0.0.0.dev27.dist-info → omdev-0.0.0.dev29.dist-info}/top_level.txt +0 -0
omdev/cache/__init__.py
ADDED
|
File without changes
|
|
File without changes
|
|
@@ -0,0 +1,137 @@
|
|
|
1
|
+
"""
|
|
2
|
+
TODO:
|
|
3
|
+
- decorator
|
|
4
|
+
- thread local cache instance - but shared
|
|
5
|
+
- arbitrary user-specified cache keys
|
|
6
|
+
- filesystem OPTIONAL
|
|
7
|
+
- locking
|
|
8
|
+
- Keyer scheme
|
|
9
|
+
- per-module-ish CACHE_VERSION convention
|
|
10
|
+
- are pickles stable?
|
|
11
|
+
- ComputeCache class
|
|
12
|
+
- Cacheable - fn is one
|
|
13
|
+
- ttl
|
|
14
|
+
- nice to have: np mmap
|
|
15
|
+
- compress?
|
|
16
|
+
- decos, descriptors, etc
|
|
17
|
+
- overlap w/ jobs/dags/batches/whatever
|
|
18
|
+
- joblib
|
|
19
|
+
- keep src anyway, but just for warn
|
|
20
|
+
- strip comments?
|
|
21
|
+
- ** INPUTS **
|
|
22
|
+
- if underlying impl changes, bust
|
|
23
|
+
- kinda reacty/reffy/signally
|
|
24
|
+
- decorator unwrapping and shit
|
|
25
|
+
- proactive deep invalidate
|
|
26
|
+
- tracked and versioned 'ops' but not result cached
|
|
27
|
+
- 'Versioned'
|
|
28
|
+
|
|
29
|
+
manifest stuff
|
|
30
|
+
- serialization_version
|
|
31
|
+
- lib_version
|
|
32
|
+
- lib_revision
|
|
33
|
+
|
|
34
|
+
fn manifest stuff
|
|
35
|
+
- source
|
|
36
|
+
- qualname
|
|
37
|
+
- location
|
|
38
|
+
|
|
39
|
+
See:
|
|
40
|
+
- https://github.com/amakelov/mandala
|
|
41
|
+
- https://jax.readthedocs.io/en/latest/autodidax.html
|
|
42
|
+
- tinyjit
|
|
43
|
+
- https://docs.python.org/3/library/pickle.html#pickle.Pickler.dispatch_table
|
|
44
|
+
|
|
45
|
+
names:
|
|
46
|
+
- CacheKey = unambiguous, fully qualified, unhashed map key - usually Cacheable + args
|
|
47
|
+
- Cacheable = usually a fn
|
|
48
|
+
- CacheableName = qualname of a cacheable
|
|
49
|
+
- dir structure: __package__/__qualname__/... ?
|
|
50
|
+
"""
|
|
51
|
+
import copy
|
|
52
|
+
import typing as ta
|
|
53
|
+
|
|
54
|
+
from omlish import collections as col
|
|
55
|
+
from omlish import dataclasses as dc
|
|
56
|
+
|
|
57
|
+
from .types import CacheableName
|
|
58
|
+
from .types import CacheableResolver
|
|
59
|
+
from .types import CacheableVersionMap
|
|
60
|
+
from .types import CacheKey
|
|
61
|
+
from .types import CacheResult
|
|
62
|
+
|
|
63
|
+
|
|
64
|
+
class Cache:
|
|
65
|
+
def __init__(
|
|
66
|
+
self,
|
|
67
|
+
resolver: CacheableResolver,
|
|
68
|
+
) -> None:
|
|
69
|
+
super().__init__()
|
|
70
|
+
|
|
71
|
+
self._resolver = resolver
|
|
72
|
+
|
|
73
|
+
self._dct: dict[CacheKey, Cache.Entry] = {}
|
|
74
|
+
|
|
75
|
+
self._stats = Cache.Stats()
|
|
76
|
+
|
|
77
|
+
@dc.dataclass()
|
|
78
|
+
class Stats:
|
|
79
|
+
num_hits: int = 0
|
|
80
|
+
num_misses: int = 0
|
|
81
|
+
num_invalidates: int = 0
|
|
82
|
+
num_puts: int = 0
|
|
83
|
+
|
|
84
|
+
@property
|
|
85
|
+
def stats(self) -> Stats:
|
|
86
|
+
return copy.deepcopy(self._stats)
|
|
87
|
+
|
|
88
|
+
@dc.dataclass(frozen=True)
|
|
89
|
+
class Entry:
|
|
90
|
+
key: CacheKey
|
|
91
|
+
versions: CacheableVersionMap
|
|
92
|
+
value: ta.Any
|
|
93
|
+
|
|
94
|
+
@dc.validate
|
|
95
|
+
def _check_types(self) -> bool:
|
|
96
|
+
return (
|
|
97
|
+
isinstance(self.key, CacheKey) and
|
|
98
|
+
isinstance(self.versions, col.frozendict)
|
|
99
|
+
)
|
|
100
|
+
|
|
101
|
+
def _build_version_map(self, names: ta.Iterable[CacheableName]) -> CacheableVersionMap:
|
|
102
|
+
dct = {}
|
|
103
|
+
for n in names:
|
|
104
|
+
c = self._resolver.resolve(n)
|
|
105
|
+
dct[n] = c.version
|
|
106
|
+
return col.frozendict(dct)
|
|
107
|
+
|
|
108
|
+
def get(self, key: CacheKey) -> CacheResult | None:
|
|
109
|
+
try:
|
|
110
|
+
entry = self._dct[key]
|
|
111
|
+
except KeyError:
|
|
112
|
+
self._stats.num_misses += 1
|
|
113
|
+
return None
|
|
114
|
+
|
|
115
|
+
new_versions = self._build_version_map(entry.versions)
|
|
116
|
+
if entry.versions != new_versions:
|
|
117
|
+
del self._dct[key]
|
|
118
|
+
self._stats.num_invalidates += 1
|
|
119
|
+
return None
|
|
120
|
+
|
|
121
|
+
self._stats.num_hits += 1
|
|
122
|
+
return CacheResult(
|
|
123
|
+
True,
|
|
124
|
+
entry.versions,
|
|
125
|
+
entry.value,
|
|
126
|
+
)
|
|
127
|
+
|
|
128
|
+
def put(self, key: CacheKey, versions: CacheableVersionMap, val: ta.Any) -> None:
|
|
129
|
+
if key in self._dct:
|
|
130
|
+
raise KeyError(key)
|
|
131
|
+
|
|
132
|
+
self._dct[key] = Cache.Entry(
|
|
133
|
+
key,
|
|
134
|
+
versions,
|
|
135
|
+
val,
|
|
136
|
+
)
|
|
137
|
+
self._stats.num_puts += 1
|
|
@@ -0,0 +1,136 @@
|
|
|
1
|
+
import contextlib
|
|
2
|
+
import typing as ta
|
|
3
|
+
|
|
4
|
+
from omlish import check
|
|
5
|
+
from omlish import lang
|
|
6
|
+
|
|
7
|
+
from .cache import Cache
|
|
8
|
+
from .types import Cacheable
|
|
9
|
+
from .types import CacheableVersionMap
|
|
10
|
+
from .types import CacheKey
|
|
11
|
+
from .types import CacheResult
|
|
12
|
+
from .types import merge_version_maps
|
|
13
|
+
|
|
14
|
+
|
|
15
|
+
CacheT = ta.TypeVar('CacheT', bound='Cache')
|
|
16
|
+
|
|
17
|
+
|
|
18
|
+
##
|
|
19
|
+
|
|
20
|
+
|
|
21
|
+
_CURRENT_CACHE: Cache | None = None
|
|
22
|
+
|
|
23
|
+
|
|
24
|
+
@contextlib.contextmanager
|
|
25
|
+
def cache_context(cache: CacheT) -> ta.Iterator[CacheT]:
|
|
26
|
+
global _CURRENT_CACHE
|
|
27
|
+
prev = _CURRENT_CACHE
|
|
28
|
+
try:
|
|
29
|
+
_CURRENT_CACHE = cache
|
|
30
|
+
yield cache
|
|
31
|
+
finally:
|
|
32
|
+
check.is_(_CURRENT_CACHE, cache)
|
|
33
|
+
_CURRENT_CACHE = prev
|
|
34
|
+
|
|
35
|
+
|
|
36
|
+
def get_current_cache() -> Cache | None:
|
|
37
|
+
return _CURRENT_CACHE
|
|
38
|
+
|
|
39
|
+
|
|
40
|
+
##
|
|
41
|
+
|
|
42
|
+
|
|
43
|
+
class CacheableContext(lang.Final):
|
|
44
|
+
def __init__(
|
|
45
|
+
self,
|
|
46
|
+
cacheable: Cacheable,
|
|
47
|
+
key: CacheKey,
|
|
48
|
+
*,
|
|
49
|
+
parent: ta.Optional['CacheableContext'] = None,
|
|
50
|
+
) -> None:
|
|
51
|
+
super().__init__()
|
|
52
|
+
self._cacheable = cacheable
|
|
53
|
+
self._key = key
|
|
54
|
+
self._parent = parent
|
|
55
|
+
|
|
56
|
+
self._result: CacheResult | None = None
|
|
57
|
+
self._children: list[CacheableContext] = []
|
|
58
|
+
|
|
59
|
+
if parent is not None:
|
|
60
|
+
check.state(not parent.has_result)
|
|
61
|
+
parent._children.append(self) # noqa
|
|
62
|
+
|
|
63
|
+
#
|
|
64
|
+
|
|
65
|
+
@property
|
|
66
|
+
def cacheable(self) -> Cacheable:
|
|
67
|
+
return self._cacheable
|
|
68
|
+
|
|
69
|
+
@property
|
|
70
|
+
def key(self) -> CacheKey:
|
|
71
|
+
return self._key
|
|
72
|
+
|
|
73
|
+
@property
|
|
74
|
+
def parent(self) -> ta.Optional['CacheableContext']:
|
|
75
|
+
return self._parent
|
|
76
|
+
|
|
77
|
+
@property
|
|
78
|
+
def children(self) -> ta.Sequence['CacheableContext']:
|
|
79
|
+
return self._children
|
|
80
|
+
|
|
81
|
+
#
|
|
82
|
+
|
|
83
|
+
@property
|
|
84
|
+
def has_result(self) -> bool:
|
|
85
|
+
return self._result is not None
|
|
86
|
+
|
|
87
|
+
def result(self) -> CacheResult:
|
|
88
|
+
return check.not_none(self._result)
|
|
89
|
+
|
|
90
|
+
def set_hit(self, result: CacheResult) -> None:
|
|
91
|
+
check.state(result.hit)
|
|
92
|
+
self._result = check.replacing_none(self._result, result)
|
|
93
|
+
self.result_versions()
|
|
94
|
+
|
|
95
|
+
def set_miss(self, val: ta.Any) -> None:
|
|
96
|
+
self._result = check.replacing_none(self._result, CacheResult(
|
|
97
|
+
False,
|
|
98
|
+
CacheableVersionMap(),
|
|
99
|
+
val,
|
|
100
|
+
))
|
|
101
|
+
self.result_versions()
|
|
102
|
+
|
|
103
|
+
@lang.cached_function
|
|
104
|
+
def result_versions(self) -> CacheableVersionMap:
|
|
105
|
+
r = check.not_none(self._result)
|
|
106
|
+
return merge_version_maps(
|
|
107
|
+
self._cacheable.as_version_map,
|
|
108
|
+
r.versions,
|
|
109
|
+
*[c.result_versions() for c in self._children],
|
|
110
|
+
)
|
|
111
|
+
|
|
112
|
+
|
|
113
|
+
#
|
|
114
|
+
|
|
115
|
+
|
|
116
|
+
_CURRENT_CACHEABLE_CONTEXT: CacheableContext | None = None
|
|
117
|
+
|
|
118
|
+
|
|
119
|
+
@contextlib.contextmanager
|
|
120
|
+
def cacheable_context(
|
|
121
|
+
cacheable: Cacheable,
|
|
122
|
+
key: CacheKey,
|
|
123
|
+
) -> ta.Iterator[CacheableContext]:
|
|
124
|
+
global _CURRENT_CACHEABLE_CONTEXT
|
|
125
|
+
prev = _CURRENT_CACHEABLE_CONTEXT
|
|
126
|
+
ctx = CacheableContext(
|
|
127
|
+
cacheable,
|
|
128
|
+
key,
|
|
129
|
+
parent=prev,
|
|
130
|
+
)
|
|
131
|
+
try:
|
|
132
|
+
_CURRENT_CACHEABLE_CONTEXT = ctx
|
|
133
|
+
yield ctx
|
|
134
|
+
finally:
|
|
135
|
+
check.is_(_CURRENT_CACHEABLE_CONTEXT, ctx)
|
|
136
|
+
_CURRENT_CACHEABLE_CONTEXT = prev
|
omdev/cache/comp/fns.py
ADDED
|
@@ -0,0 +1,115 @@
|
|
|
1
|
+
import functools
|
|
2
|
+
import importlib
|
|
3
|
+
import typing as ta
|
|
4
|
+
|
|
5
|
+
from omlish import cached
|
|
6
|
+
from omlish import check
|
|
7
|
+
from omlish import collections as col
|
|
8
|
+
from omlish import dataclasses as dc
|
|
9
|
+
from omlish import lang
|
|
10
|
+
|
|
11
|
+
from .contexts import cacheable_context
|
|
12
|
+
from .contexts import get_current_cache
|
|
13
|
+
from .types import Cacheable
|
|
14
|
+
from .types import CacheableName
|
|
15
|
+
from .types import CacheableResolver
|
|
16
|
+
from .types import CacheKey
|
|
17
|
+
|
|
18
|
+
|
|
19
|
+
T = ta.TypeVar('T')
|
|
20
|
+
|
|
21
|
+
|
|
22
|
+
##
|
|
23
|
+
|
|
24
|
+
|
|
25
|
+
@dc.dataclass(frozen=True)
|
|
26
|
+
class FnCacheableName(CacheableName, lang.Final):
|
|
27
|
+
module: str
|
|
28
|
+
qualname: str
|
|
29
|
+
|
|
30
|
+
|
|
31
|
+
@dc.dataclass(frozen=True)
|
|
32
|
+
class FnCacheable(Cacheable, lang.Final):
|
|
33
|
+
fn: ta.Callable
|
|
34
|
+
version: int = dc.xfield(override=True)
|
|
35
|
+
|
|
36
|
+
@cached.property
|
|
37
|
+
def name(self) -> FnCacheableName:
|
|
38
|
+
return FnCacheableName(self.fn.__module__, self.fn.__qualname__) # noqa
|
|
39
|
+
|
|
40
|
+
|
|
41
|
+
class FnCacheableResolver(CacheableResolver):
|
|
42
|
+
def resolve(self, name: CacheableName) -> Cacheable:
|
|
43
|
+
fname = check.isinstance(name, FnCacheableName)
|
|
44
|
+
|
|
45
|
+
mod = importlib.import_module(fname.module)
|
|
46
|
+
obj = mod
|
|
47
|
+
for a in fname.qualname.split('.'):
|
|
48
|
+
obj = getattr(obj, a)
|
|
49
|
+
|
|
50
|
+
check.callable(obj)
|
|
51
|
+
fc = check.isinstance(obj.__cacheable__, FnCacheable)
|
|
52
|
+
|
|
53
|
+
return fc
|
|
54
|
+
|
|
55
|
+
|
|
56
|
+
@dc.dataclass(frozen=True)
|
|
57
|
+
class FnCacheKey(CacheKey[FnCacheableName], lang.Final):
|
|
58
|
+
args: tuple
|
|
59
|
+
kwargs: col.frozendict[str, ta.Any]
|
|
60
|
+
|
|
61
|
+
@dc.validate
|
|
62
|
+
def _check_fn_types(self) -> bool:
|
|
63
|
+
return (
|
|
64
|
+
isinstance(self.name, FnCacheableName) and
|
|
65
|
+
isinstance(self.args, tuple) and
|
|
66
|
+
isinstance(self.kwargs, col.frozendict)
|
|
67
|
+
)
|
|
68
|
+
|
|
69
|
+
|
|
70
|
+
##
|
|
71
|
+
|
|
72
|
+
|
|
73
|
+
def cached_fn(version: int) -> ta.Callable[[T], T]:
|
|
74
|
+
def outer(fn):
|
|
75
|
+
@functools.wraps(fn)
|
|
76
|
+
def inner(*args, **kwargs):
|
|
77
|
+
# NOTE: just for testing :x allows updating
|
|
78
|
+
# TODO: proper wrapper obj probably (enforce name resolution)
|
|
79
|
+
cacheable = inner.__cacheable__ # type: ignore
|
|
80
|
+
|
|
81
|
+
if (cache := get_current_cache()) is not None:
|
|
82
|
+
key = FnCacheKey(
|
|
83
|
+
cacheable.name,
|
|
84
|
+
args,
|
|
85
|
+
col.frozendict(kwargs),
|
|
86
|
+
)
|
|
87
|
+
|
|
88
|
+
with cacheable_context(
|
|
89
|
+
cacheable,
|
|
90
|
+
key,
|
|
91
|
+
) as ctx:
|
|
92
|
+
if (hit := cache.get(key)) is not None:
|
|
93
|
+
ctx.set_hit(hit)
|
|
94
|
+
return hit.value
|
|
95
|
+
|
|
96
|
+
val = fn(*args, **kwargs)
|
|
97
|
+
ctx.set_miss(val)
|
|
98
|
+
cache.put(
|
|
99
|
+
key,
|
|
100
|
+
ctx.result_versions(),
|
|
101
|
+
val,
|
|
102
|
+
)
|
|
103
|
+
return val
|
|
104
|
+
|
|
105
|
+
else:
|
|
106
|
+
return fn(*args, **kwargs)
|
|
107
|
+
|
|
108
|
+
inner.__cacheable__ = FnCacheable( # type: ignore
|
|
109
|
+
fn,
|
|
110
|
+
version,
|
|
111
|
+
)
|
|
112
|
+
|
|
113
|
+
return inner
|
|
114
|
+
|
|
115
|
+
return outer # noqa
|
|
@@ -0,0 +1,23 @@
|
|
|
1
|
+
from .types import Cacheable
|
|
2
|
+
from .types import CacheableName
|
|
3
|
+
from .types import CacheableResolver
|
|
4
|
+
|
|
5
|
+
|
|
6
|
+
class CachingCacheableResolver(CacheableResolver):
|
|
7
|
+
def __init__(self, child: CacheableResolver) -> None:
|
|
8
|
+
super().__init__()
|
|
9
|
+
|
|
10
|
+
self._child = child
|
|
11
|
+
self._dct: dict[CacheableName, Cacheable] = {}
|
|
12
|
+
|
|
13
|
+
def clear(self) -> None:
|
|
14
|
+
self._dct.clear()
|
|
15
|
+
|
|
16
|
+
def resolve(self, name: CacheableName) -> Cacheable:
|
|
17
|
+
try:
|
|
18
|
+
return self._dct[name]
|
|
19
|
+
except KeyError:
|
|
20
|
+
pass
|
|
21
|
+
ret = self._child.resolve(name)
|
|
22
|
+
self._dct[name] = ret
|
|
23
|
+
return ret
|
|
@@ -0,0 +1,92 @@
|
|
|
1
|
+
import abc
|
|
2
|
+
import typing as ta
|
|
3
|
+
|
|
4
|
+
from omlish import cached
|
|
5
|
+
from omlish import collections as col
|
|
6
|
+
from omlish import dataclasses as dc
|
|
7
|
+
from omlish import lang
|
|
8
|
+
|
|
9
|
+
|
|
10
|
+
T = ta.TypeVar('T')
|
|
11
|
+
|
|
12
|
+
|
|
13
|
+
##
|
|
14
|
+
|
|
15
|
+
|
|
16
|
+
CacheableNameT = ta.TypeVar('CacheableNameT', bound='CacheableName')
|
|
17
|
+
|
|
18
|
+
|
|
19
|
+
class CacheableName(lang.Abstract):
|
|
20
|
+
pass
|
|
21
|
+
|
|
22
|
+
|
|
23
|
+
##
|
|
24
|
+
|
|
25
|
+
|
|
26
|
+
CacheableVersion: ta.TypeAlias = ta.Hashable
|
|
27
|
+
CacheableVersionMap: ta.TypeAlias = col.frozendict['CacheableName', CacheableVersion]
|
|
28
|
+
|
|
29
|
+
|
|
30
|
+
def merge_version_maps(
|
|
31
|
+
*dcts: ta.Mapping[CacheableName, CacheableVersion],
|
|
32
|
+
) -> CacheableVersionMap:
|
|
33
|
+
out: dict[CacheableName, CacheableVersion] = {}
|
|
34
|
+
for dct in dcts:
|
|
35
|
+
for name, version in dct.items():
|
|
36
|
+
try:
|
|
37
|
+
ex = out[name]
|
|
38
|
+
except KeyError:
|
|
39
|
+
out[name] = version
|
|
40
|
+
else:
|
|
41
|
+
if ex != version:
|
|
42
|
+
raise Exception(f'Version mismatch: {ex} {version}')
|
|
43
|
+
return col.frozendict(out)
|
|
44
|
+
|
|
45
|
+
|
|
46
|
+
##
|
|
47
|
+
|
|
48
|
+
|
|
49
|
+
class Cacheable(lang.Abstract):
|
|
50
|
+
@property
|
|
51
|
+
@abc.abstractmethod
|
|
52
|
+
def name(self) -> CacheableName:
|
|
53
|
+
raise NotImplementedError
|
|
54
|
+
|
|
55
|
+
@property
|
|
56
|
+
@abc.abstractmethod
|
|
57
|
+
def version(self) -> CacheableVersion:
|
|
58
|
+
raise NotImplementedError
|
|
59
|
+
|
|
60
|
+
@cached.property
|
|
61
|
+
def as_version_map(self) -> CacheableVersionMap:
|
|
62
|
+
return col.frozendict({self.name: self.version})
|
|
63
|
+
|
|
64
|
+
|
|
65
|
+
##
|
|
66
|
+
|
|
67
|
+
|
|
68
|
+
@dc.dataclass(frozen=True)
|
|
69
|
+
@dc.extra_params(cache_hash=True)
|
|
70
|
+
class CacheKey(lang.Abstract, ta.Generic[CacheableNameT]):
|
|
71
|
+
name: CacheableNameT
|
|
72
|
+
|
|
73
|
+
@dc.validate
|
|
74
|
+
def _check_types(self) -> bool:
|
|
75
|
+
hash(self)
|
|
76
|
+
return isinstance(self.name, CacheableName)
|
|
77
|
+
|
|
78
|
+
|
|
79
|
+
@dc.dataclass(frozen=True)
|
|
80
|
+
class CacheResult(ta.Generic[T], lang.Final):
|
|
81
|
+
hit: bool
|
|
82
|
+
versions: CacheableVersionMap
|
|
83
|
+
value: T
|
|
84
|
+
|
|
85
|
+
|
|
86
|
+
##
|
|
87
|
+
|
|
88
|
+
|
|
89
|
+
class CacheableResolver(lang.Abstract):
|
|
90
|
+
@abc.abstractmethod
|
|
91
|
+
def resolve(self, name: CacheableName) -> Cacheable:
|
|
92
|
+
raise NotImplementedError
|
|
@@ -1,11 +1,10 @@
|
|
|
1
1
|
"""
|
|
2
2
|
TODO:
|
|
3
|
+
- mirrors
|
|
3
4
|
- huggingface_hub
|
|
4
|
-
- postprocessing?
|
|
5
|
-
- unarchive
|
|
6
5
|
- stupid little progress bars
|
|
7
|
-
-
|
|
8
|
-
-
|
|
6
|
+
- groups of multiple files downloaded - 'spec set'? idk
|
|
7
|
+
- torchvision.datasets.FashionMNIST
|
|
9
8
|
"""
|
|
10
9
|
import logging
|
|
11
10
|
import os.path
|
|
@@ -19,7 +18,7 @@ from omlish import lang
|
|
|
19
18
|
from omlish import marshal as msh
|
|
20
19
|
from omlish.formats import json
|
|
21
20
|
|
|
22
|
-
from
|
|
21
|
+
from ... import git
|
|
23
22
|
from .manifests import CacheDataManifest
|
|
24
23
|
from .specs import CacheDataSpec
|
|
25
24
|
from .specs import GitCacheDataSpec
|
|
@@ -33,7 +32,7 @@ log = logging.getLogger(__name__)
|
|
|
33
32
|
##
|
|
34
33
|
|
|
35
34
|
|
|
36
|
-
class
|
|
35
|
+
class Cache:
|
|
37
36
|
def __init__(self, base_dir: str) -> None:
|
|
38
37
|
super().__init__()
|
|
39
38
|
self._base_dir = base_dir
|
|
@@ -2,7 +2,7 @@ import os.path
|
|
|
2
2
|
|
|
3
3
|
from omlish import lang
|
|
4
4
|
|
|
5
|
-
from .cache import
|
|
5
|
+
from .cache import Cache
|
|
6
6
|
|
|
7
7
|
|
|
8
8
|
##
|
|
@@ -14,5 +14,5 @@ def default_dir() -> str:
|
|
|
14
14
|
|
|
15
15
|
|
|
16
16
|
@lang.cached_function(lock=True)
|
|
17
|
-
def default() ->
|
|
18
|
-
return
|
|
17
|
+
def default() -> Cache:
|
|
18
|
+
return Cache(default_dir())
|
omdev/precheck/base.py
ADDED
|
@@ -0,0 +1,37 @@
|
|
|
1
|
+
import abc
|
|
2
|
+
import dataclasses as dc
|
|
3
|
+
import typing as ta
|
|
4
|
+
|
|
5
|
+
|
|
6
|
+
PrecheckConfigT = ta.TypeVar('PrecheckConfigT', bound='Precheck.Config')
|
|
7
|
+
|
|
8
|
+
|
|
9
|
+
##
|
|
10
|
+
|
|
11
|
+
|
|
12
|
+
@dc.dataclass(frozen=True, kw_only=True)
|
|
13
|
+
class PrecheckContext:
|
|
14
|
+
src_roots: ta.Sequence[str]
|
|
15
|
+
|
|
16
|
+
|
|
17
|
+
##
|
|
18
|
+
|
|
19
|
+
|
|
20
|
+
class Precheck(abc.ABC, ta.Generic[PrecheckConfigT]):
|
|
21
|
+
@dc.dataclass(frozen=True)
|
|
22
|
+
class Config:
|
|
23
|
+
pass
|
|
24
|
+
|
|
25
|
+
def __init__(self, context: PrecheckContext, config: PrecheckConfigT) -> None:
|
|
26
|
+
super().__init__()
|
|
27
|
+
self._context = context
|
|
28
|
+
self._config = config
|
|
29
|
+
|
|
30
|
+
@dc.dataclass(frozen=True)
|
|
31
|
+
class Violation:
|
|
32
|
+
pc: 'Precheck'
|
|
33
|
+
msg: str
|
|
34
|
+
|
|
35
|
+
@abc.abstractmethod
|
|
36
|
+
def run(self) -> ta.AsyncIterator[Violation]:
|
|
37
|
+
raise NotImplementedError
|
omdev/precheck/git.py
ADDED
|
@@ -0,0 +1,34 @@
|
|
|
1
|
+
import asyncio
|
|
2
|
+
import dataclasses as dc
|
|
3
|
+
import typing as ta
|
|
4
|
+
|
|
5
|
+
from .base import Precheck
|
|
6
|
+
from .base import PrecheckContext
|
|
7
|
+
|
|
8
|
+
|
|
9
|
+
##
|
|
10
|
+
|
|
11
|
+
|
|
12
|
+
class GitBlacklistPrecheck(Precheck['GitBlacklistPrecheck.Config']):
|
|
13
|
+
"""
|
|
14
|
+
TODO:
|
|
15
|
+
- globs
|
|
16
|
+
- regex
|
|
17
|
+
"""
|
|
18
|
+
|
|
19
|
+
@dc.dataclass(frozen=True)
|
|
20
|
+
class Config(Precheck.Config):
|
|
21
|
+
files: ta.Sequence[str] = (
|
|
22
|
+
'.env',
|
|
23
|
+
'secrets.yml',
|
|
24
|
+
)
|
|
25
|
+
|
|
26
|
+
def __init__(self, context: PrecheckContext, config: Config = Config()) -> None:
|
|
27
|
+
super().__init__(context, config)
|
|
28
|
+
|
|
29
|
+
async def run(self) -> ta.AsyncGenerator[Precheck.Violation, None]:
|
|
30
|
+
for f in self._config.files:
|
|
31
|
+
proc = await asyncio.create_subprocess_exec('git', 'status', '-s', f)
|
|
32
|
+
await proc.communicate()
|
|
33
|
+
if proc.returncode:
|
|
34
|
+
yield Precheck.Violation(self, f)
|
omdev/precheck/lite.py
ADDED
|
@@ -0,0 +1,135 @@
|
|
|
1
|
+
import asyncio
|
|
2
|
+
import dataclasses as dc
|
|
3
|
+
import glob
|
|
4
|
+
import inspect
|
|
5
|
+
import logging
|
|
6
|
+
import os.path
|
|
7
|
+
import subprocess
|
|
8
|
+
import textwrap
|
|
9
|
+
import typing as ta
|
|
10
|
+
|
|
11
|
+
from omdev import findmagic
|
|
12
|
+
from omlish import cached
|
|
13
|
+
|
|
14
|
+
from .base import Precheck
|
|
15
|
+
from .base import PrecheckContext
|
|
16
|
+
|
|
17
|
+
|
|
18
|
+
log = logging.getLogger(__name__)
|
|
19
|
+
|
|
20
|
+
|
|
21
|
+
##
|
|
22
|
+
|
|
23
|
+
|
|
24
|
+
class LitePython8Precheck(Precheck['LitePython8Precheck.Config']):
|
|
25
|
+
@dc.dataclass(frozen=True)
|
|
26
|
+
class Config(Precheck.Config):
|
|
27
|
+
pass
|
|
28
|
+
|
|
29
|
+
def __init__(self, context: PrecheckContext, config: Config = Config()) -> None:
|
|
30
|
+
super().__init__(context, config)
|
|
31
|
+
|
|
32
|
+
#
|
|
33
|
+
|
|
34
|
+
@staticmethod
|
|
35
|
+
def _load_file_module(fp: str) -> None:
|
|
36
|
+
import os.path # noqa
|
|
37
|
+
import types # noqa
|
|
38
|
+
|
|
39
|
+
fp = os.path.abspath(fp)
|
|
40
|
+
|
|
41
|
+
with open(fp) as f:
|
|
42
|
+
src = f.read()
|
|
43
|
+
|
|
44
|
+
mn = os.path.basename(fp).rpartition('.')[0]
|
|
45
|
+
|
|
46
|
+
mod = types.ModuleType(mn)
|
|
47
|
+
mod.__name__ = mn
|
|
48
|
+
mod.__file__ = fp
|
|
49
|
+
mod.__builtins__ = __builtins__ # type: ignore
|
|
50
|
+
mod.__spec__ = None
|
|
51
|
+
|
|
52
|
+
code = compile(src, fp, 'exec')
|
|
53
|
+
exec(code, mod.__dict__, mod.__dict__)
|
|
54
|
+
|
|
55
|
+
@cached.function
|
|
56
|
+
def _load_file_module_payload(self) -> str:
|
|
57
|
+
return '\n'.join([
|
|
58
|
+
'import sys',
|
|
59
|
+
'fp = sys.argv[-1]',
|
|
60
|
+
'',
|
|
61
|
+
textwrap.dedent('\n'.join(inspect.getsource(LitePython8Precheck._load_file_module).splitlines()[2:])),
|
|
62
|
+
])
|
|
63
|
+
|
|
64
|
+
#
|
|
65
|
+
|
|
66
|
+
async def _run_script(self, fp: str) -> list[Precheck.Violation]:
|
|
67
|
+
log.debug('%s: loading script %s', self.__class__.__name__, fp)
|
|
68
|
+
|
|
69
|
+
vs: list[Precheck.Violation] = []
|
|
70
|
+
|
|
71
|
+
proc = await asyncio.create_subprocess_exec(
|
|
72
|
+
'.venvs/8/bin/python',
|
|
73
|
+
'-c',
|
|
74
|
+
self._load_file_module_payload(),
|
|
75
|
+
fp,
|
|
76
|
+
stderr=subprocess.PIPE,
|
|
77
|
+
)
|
|
78
|
+
|
|
79
|
+
_, stderr = await proc.communicate()
|
|
80
|
+
if proc.returncode != 0:
|
|
81
|
+
vs.append(Precheck.Violation(self, f'lite script {fp} failed to load in python8: {stderr.decode()}'))
|
|
82
|
+
|
|
83
|
+
return vs
|
|
84
|
+
|
|
85
|
+
async def _run_one_module(self, fp: str) -> list[Precheck.Violation]:
|
|
86
|
+
vs: list[Precheck.Violation] = []
|
|
87
|
+
|
|
88
|
+
mod = fp.rpartition('.')[0].replace(os.sep, '.')
|
|
89
|
+
|
|
90
|
+
log.debug('%s: loading module %s', self.__class__.__name__, mod)
|
|
91
|
+
|
|
92
|
+
proc = await asyncio.create_subprocess_exec(
|
|
93
|
+
'.venvs/8/bin/python',
|
|
94
|
+
'-c',
|
|
95
|
+
f'import {mod}',
|
|
96
|
+
stderr=subprocess.PIPE,
|
|
97
|
+
)
|
|
98
|
+
|
|
99
|
+
_, stderr = await proc.communicate()
|
|
100
|
+
if proc.returncode != 0:
|
|
101
|
+
vs.append(Precheck.Violation(self, f'lite module {fp} failed to import in python8: {stderr.decode()}')) # noqa
|
|
102
|
+
|
|
103
|
+
return vs
|
|
104
|
+
|
|
105
|
+
async def _run_module(self, fp: str) -> list[Precheck.Violation]:
|
|
106
|
+
vs: list[Precheck.Violation] = []
|
|
107
|
+
|
|
108
|
+
if fp.endswith('__init__.py'):
|
|
109
|
+
pfps = glob.glob(os.path.join(os.path.dirname(fp), '**/*.py'), recursive=True)
|
|
110
|
+
else:
|
|
111
|
+
pfps = [fp]
|
|
112
|
+
|
|
113
|
+
for pfp in pfps:
|
|
114
|
+
vs.extend(await self._run_one_module(pfp))
|
|
115
|
+
|
|
116
|
+
return vs
|
|
117
|
+
|
|
118
|
+
async def run(self) -> ta.AsyncGenerator[Precheck.Violation, None]:
|
|
119
|
+
for fp in findmagic.find_magic(
|
|
120
|
+
self._context.src_roots,
|
|
121
|
+
['# @omlish-lite'],
|
|
122
|
+
['py'],
|
|
123
|
+
):
|
|
124
|
+
with open(fp) as f: # noqa # FIXME
|
|
125
|
+
src = f.read()
|
|
126
|
+
|
|
127
|
+
is_script = '# @omlish-script' in src.splitlines()
|
|
128
|
+
|
|
129
|
+
if is_script:
|
|
130
|
+
for v in await self._run_script(fp):
|
|
131
|
+
yield v
|
|
132
|
+
|
|
133
|
+
else:
|
|
134
|
+
for v in await self._run_module(fp):
|
|
135
|
+
yield v
|
omdev/precheck/precheck.py
CHANGED
|
@@ -17,28 +17,20 @@ TODO:
|
|
|
17
17
|
- https://github.com/pre-commit/pre-commit-hooks?tab=readme-ov-file#forbid-new-submodules
|
|
18
18
|
- don't check in .o's (omdev.ext import hook is dumb w build dir)
|
|
19
19
|
"""
|
|
20
|
-
import abc
|
|
21
20
|
import argparse
|
|
22
21
|
import asyncio
|
|
23
|
-
import dataclasses as dc
|
|
24
|
-
import glob
|
|
25
|
-
import inspect
|
|
26
22
|
import logging
|
|
27
23
|
import os.path
|
|
28
|
-
import stat
|
|
29
|
-
import subprocess
|
|
30
24
|
import sys
|
|
31
|
-
import textwrap
|
|
32
25
|
import typing as ta
|
|
33
26
|
|
|
34
|
-
from omdev import findimports
|
|
35
|
-
from omdev import findmagic
|
|
36
|
-
from omlish import cached
|
|
37
27
|
from omlish import logs
|
|
38
28
|
|
|
39
|
-
|
|
40
|
-
|
|
41
|
-
|
|
29
|
+
from .base import Precheck
|
|
30
|
+
from .base import PrecheckContext
|
|
31
|
+
from .git import GitBlacklistPrecheck
|
|
32
|
+
from .lite import LitePython8Precheck
|
|
33
|
+
from .scripts import ScriptDepsPrecheck
|
|
42
34
|
|
|
43
35
|
|
|
44
36
|
log = logging.getLogger(__name__)
|
|
@@ -47,214 +39,6 @@ log = logging.getLogger(__name__)
|
|
|
47
39
|
##
|
|
48
40
|
|
|
49
41
|
|
|
50
|
-
@dc.dataclass(frozen=True, kw_only=True)
|
|
51
|
-
class PrecheckContext:
|
|
52
|
-
src_roots: ta.Sequence[str]
|
|
53
|
-
|
|
54
|
-
|
|
55
|
-
##
|
|
56
|
-
|
|
57
|
-
|
|
58
|
-
class Precheck(abc.ABC, ta.Generic[PrecheckConfigT]):
|
|
59
|
-
@dc.dataclass(frozen=True)
|
|
60
|
-
class Config:
|
|
61
|
-
pass
|
|
62
|
-
|
|
63
|
-
def __init__(self, context: PrecheckContext, config: PrecheckConfigT) -> None:
|
|
64
|
-
super().__init__()
|
|
65
|
-
self._context = context
|
|
66
|
-
self._config = config
|
|
67
|
-
|
|
68
|
-
@dc.dataclass(frozen=True)
|
|
69
|
-
class Violation:
|
|
70
|
-
pc: 'Precheck'
|
|
71
|
-
msg: str
|
|
72
|
-
|
|
73
|
-
@abc.abstractmethod
|
|
74
|
-
def run(self) -> ta.AsyncIterator[Violation]:
|
|
75
|
-
raise NotImplementedError
|
|
76
|
-
|
|
77
|
-
|
|
78
|
-
##
|
|
79
|
-
|
|
80
|
-
|
|
81
|
-
class GitBlacklistPrecheck(Precheck['GitBlacklistPrecheck.Config']):
|
|
82
|
-
"""
|
|
83
|
-
TODO:
|
|
84
|
-
- globs
|
|
85
|
-
- regex
|
|
86
|
-
"""
|
|
87
|
-
|
|
88
|
-
@dc.dataclass(frozen=True)
|
|
89
|
-
class Config(Precheck.Config):
|
|
90
|
-
files: ta.Sequence[str] = (
|
|
91
|
-
'.env',
|
|
92
|
-
'secrets.yml',
|
|
93
|
-
)
|
|
94
|
-
|
|
95
|
-
def __init__(self, context: PrecheckContext, config: Config = Config()) -> None:
|
|
96
|
-
super().__init__(context, config)
|
|
97
|
-
|
|
98
|
-
async def run(self) -> ta.AsyncGenerator[Precheck.Violation, None]:
|
|
99
|
-
for f in self._config.files:
|
|
100
|
-
proc = await asyncio.create_subprocess_exec('git', 'status', '-s', f)
|
|
101
|
-
await proc.communicate()
|
|
102
|
-
if proc.returncode:
|
|
103
|
-
yield Precheck.Violation(self, f)
|
|
104
|
-
|
|
105
|
-
|
|
106
|
-
##
|
|
107
|
-
|
|
108
|
-
|
|
109
|
-
class ScriptDepsPrecheck(Precheck['ScriptDepsPrecheck.Config']):
|
|
110
|
-
@dc.dataclass(frozen=True)
|
|
111
|
-
class Config(Precheck.Config):
|
|
112
|
-
pass
|
|
113
|
-
|
|
114
|
-
def __init__(self, context: PrecheckContext, config: Config = Config()) -> None:
|
|
115
|
-
super().__init__(context, config)
|
|
116
|
-
|
|
117
|
-
async def run(self) -> ta.AsyncGenerator[Precheck.Violation, None]:
|
|
118
|
-
for fp in findmagic.find_magic(
|
|
119
|
-
self._context.src_roots,
|
|
120
|
-
['# @omlish-script'],
|
|
121
|
-
['py'],
|
|
122
|
-
):
|
|
123
|
-
if not (stat.S_IXUSR & os.stat(fp).st_mode):
|
|
124
|
-
yield Precheck.Violation(self, f'script {fp} is not executable')
|
|
125
|
-
|
|
126
|
-
with open(fp) as f: # noqa # FIXME
|
|
127
|
-
src = f.read()
|
|
128
|
-
|
|
129
|
-
if not src.startswith('#!/usr/bin/env python3\n'):
|
|
130
|
-
yield Precheck.Violation(self, f'script {fp} lacks correct shebang')
|
|
131
|
-
|
|
132
|
-
imps = findimports.find_imports(fp)
|
|
133
|
-
deps = findimports.get_import_deps(imps)
|
|
134
|
-
if deps:
|
|
135
|
-
yield Precheck.Violation(self, f'script {fp} has deps: {deps}')
|
|
136
|
-
|
|
137
|
-
|
|
138
|
-
##
|
|
139
|
-
|
|
140
|
-
|
|
141
|
-
class LitePython8Precheck(Precheck['LitePython8Precheck.Config']):
|
|
142
|
-
@dc.dataclass(frozen=True)
|
|
143
|
-
class Config(Precheck.Config):
|
|
144
|
-
pass
|
|
145
|
-
|
|
146
|
-
def __init__(self, context: PrecheckContext, config: Config = Config()) -> None:
|
|
147
|
-
super().__init__(context, config)
|
|
148
|
-
|
|
149
|
-
#
|
|
150
|
-
|
|
151
|
-
@staticmethod
|
|
152
|
-
def _load_file_module(fp: str) -> None:
|
|
153
|
-
import os.path # noqa
|
|
154
|
-
import types # noqa
|
|
155
|
-
|
|
156
|
-
fp = os.path.abspath(fp)
|
|
157
|
-
|
|
158
|
-
with open(fp) as f:
|
|
159
|
-
src = f.read()
|
|
160
|
-
|
|
161
|
-
mn = os.path.basename(fp).rpartition('.')[0]
|
|
162
|
-
|
|
163
|
-
mod = types.ModuleType(mn)
|
|
164
|
-
mod.__name__ = mn
|
|
165
|
-
mod.__file__ = fp
|
|
166
|
-
mod.__builtins__ = __builtins__ # type: ignore
|
|
167
|
-
mod.__spec__ = None
|
|
168
|
-
|
|
169
|
-
code = compile(src, fp, 'exec')
|
|
170
|
-
exec(code, mod.__dict__, mod.__dict__)
|
|
171
|
-
|
|
172
|
-
@cached.function
|
|
173
|
-
def _load_file_module_payload(self) -> str:
|
|
174
|
-
return '\n'.join([
|
|
175
|
-
'import sys',
|
|
176
|
-
'fp = sys.argv[-1]',
|
|
177
|
-
'',
|
|
178
|
-
textwrap.dedent('\n'.join(inspect.getsource(LitePython8Precheck._load_file_module).splitlines()[2:])),
|
|
179
|
-
])
|
|
180
|
-
|
|
181
|
-
#
|
|
182
|
-
|
|
183
|
-
async def _run_script(self, fp: str) -> list[Precheck.Violation]:
|
|
184
|
-
log.debug('%s: loading script %s', self.__class__.__name__, fp)
|
|
185
|
-
|
|
186
|
-
vs: list[Precheck.Violation] = []
|
|
187
|
-
|
|
188
|
-
proc = await asyncio.create_subprocess_exec(
|
|
189
|
-
'.venvs/8/bin/python',
|
|
190
|
-
'-c',
|
|
191
|
-
self._load_file_module_payload(),
|
|
192
|
-
fp,
|
|
193
|
-
stderr=subprocess.PIPE,
|
|
194
|
-
)
|
|
195
|
-
|
|
196
|
-
_, stderr = await proc.communicate()
|
|
197
|
-
if proc.returncode != 0:
|
|
198
|
-
vs.append(Precheck.Violation(self, f'lite script {fp} failed to load in python8: {stderr.decode()}'))
|
|
199
|
-
|
|
200
|
-
return vs
|
|
201
|
-
|
|
202
|
-
async def _run_one_module(self, fp: str) -> list[Precheck.Violation]:
|
|
203
|
-
vs: list[Precheck.Violation] = []
|
|
204
|
-
|
|
205
|
-
mod = fp.rpartition('.')[0].replace(os.sep, '.')
|
|
206
|
-
|
|
207
|
-
log.debug('%s: loading module %s', self.__class__.__name__, mod)
|
|
208
|
-
|
|
209
|
-
proc = await asyncio.create_subprocess_exec(
|
|
210
|
-
'.venvs/8/bin/python',
|
|
211
|
-
'-c',
|
|
212
|
-
f'import {mod}',
|
|
213
|
-
stderr=subprocess.PIPE,
|
|
214
|
-
)
|
|
215
|
-
|
|
216
|
-
_, stderr = await proc.communicate()
|
|
217
|
-
if proc.returncode != 0:
|
|
218
|
-
vs.append(Precheck.Violation(self, f'lite module {fp} failed to import in python8: {stderr.decode()}')) # noqa
|
|
219
|
-
|
|
220
|
-
return vs
|
|
221
|
-
|
|
222
|
-
async def _run_module(self, fp: str) -> list[Precheck.Violation]:
|
|
223
|
-
vs: list[Precheck.Violation] = []
|
|
224
|
-
|
|
225
|
-
if fp.endswith('__init__.py'):
|
|
226
|
-
pfps = glob.glob(os.path.join(os.path.dirname(fp), '**/*.py'), recursive=True)
|
|
227
|
-
else:
|
|
228
|
-
pfps = [fp]
|
|
229
|
-
|
|
230
|
-
for pfp in pfps:
|
|
231
|
-
vs.extend(await self._run_one_module(pfp))
|
|
232
|
-
|
|
233
|
-
return vs
|
|
234
|
-
|
|
235
|
-
async def run(self) -> ta.AsyncGenerator[Precheck.Violation, None]:
|
|
236
|
-
for fp in findmagic.find_magic(
|
|
237
|
-
self._context.src_roots,
|
|
238
|
-
['# @omlish-lite'],
|
|
239
|
-
['py'],
|
|
240
|
-
):
|
|
241
|
-
with open(fp) as f: # noqa # FIXME
|
|
242
|
-
src = f.read()
|
|
243
|
-
|
|
244
|
-
is_script = '# @omlish-script' in src.splitlines()
|
|
245
|
-
|
|
246
|
-
if is_script:
|
|
247
|
-
for v in await self._run_script(fp):
|
|
248
|
-
yield v
|
|
249
|
-
|
|
250
|
-
else:
|
|
251
|
-
for v in await self._run_module(fp):
|
|
252
|
-
yield v
|
|
253
|
-
|
|
254
|
-
|
|
255
|
-
##
|
|
256
|
-
|
|
257
|
-
|
|
258
42
|
def _check_cmd(args) -> None:
|
|
259
43
|
if not os.path.isfile('pyproject.toml'):
|
|
260
44
|
raise RuntimeError('must run in project root')
|
|
@@ -0,0 +1,42 @@
|
|
|
1
|
+
import dataclasses as dc
|
|
2
|
+
import os
|
|
3
|
+
import stat
|
|
4
|
+
import typing as ta
|
|
5
|
+
|
|
6
|
+
from omdev import findimports
|
|
7
|
+
from omdev import findmagic
|
|
8
|
+
|
|
9
|
+
from .base import Precheck
|
|
10
|
+
from .base import PrecheckContext
|
|
11
|
+
|
|
12
|
+
|
|
13
|
+
##
|
|
14
|
+
|
|
15
|
+
|
|
16
|
+
class ScriptDepsPrecheck(Precheck['ScriptDepsPrecheck.Config']):
|
|
17
|
+
@dc.dataclass(frozen=True)
|
|
18
|
+
class Config(Precheck.Config):
|
|
19
|
+
pass
|
|
20
|
+
|
|
21
|
+
def __init__(self, context: PrecheckContext, config: Config = Config()) -> None:
|
|
22
|
+
super().__init__(context, config)
|
|
23
|
+
|
|
24
|
+
async def run(self) -> ta.AsyncGenerator[Precheck.Violation, None]:
|
|
25
|
+
for fp in findmagic.find_magic(
|
|
26
|
+
self._context.src_roots,
|
|
27
|
+
['# @omlish-script'],
|
|
28
|
+
['py'],
|
|
29
|
+
):
|
|
30
|
+
if not (stat.S_IXUSR & os.stat(fp).st_mode):
|
|
31
|
+
yield Precheck.Violation(self, f'script {fp} is not executable')
|
|
32
|
+
|
|
33
|
+
with open(fp) as f: # noqa # FIXME
|
|
34
|
+
src = f.read()
|
|
35
|
+
|
|
36
|
+
if not src.startswith('#!/usr/bin/env python3\n'):
|
|
37
|
+
yield Precheck.Violation(self, f'script {fp} lacks correct shebang')
|
|
38
|
+
|
|
39
|
+
imps = findimports.find_imports(fp)
|
|
40
|
+
deps = findimports.get_import_deps(imps)
|
|
41
|
+
if deps:
|
|
42
|
+
yield Precheck.Violation(self, f'script {fp} has deps: {deps}')
|
omdev/tools/piptools.py
ADDED
|
@@ -0,0 +1,26 @@
|
|
|
1
|
+
import io
|
|
2
|
+
import urllib.request
|
|
3
|
+
import xml.etree.ElementTree as ET # noqa
|
|
4
|
+
|
|
5
|
+
from omlish import argparse as ap
|
|
6
|
+
from omlish import check
|
|
7
|
+
|
|
8
|
+
|
|
9
|
+
PYPI_URL = 'https://pypi.org/'
|
|
10
|
+
|
|
11
|
+
|
|
12
|
+
class Cli(ap.Cli):
|
|
13
|
+
@ap.command(
|
|
14
|
+
ap.arg('package'),
|
|
15
|
+
)
|
|
16
|
+
def lookup_latest_version(self) -> None:
|
|
17
|
+
pkg_name = check.non_empty_str(self.args.package)
|
|
18
|
+
with urllib.request.urlopen(f'{PYPI_URL}rss/project/{pkg_name}/releases.xml') as resp: # noqa
|
|
19
|
+
rss = resp.read()
|
|
20
|
+
doc = ET.parse(io.BytesIO(rss)) # noqa
|
|
21
|
+
latest = check.not_none(doc.find('./channel/item/title')).text
|
|
22
|
+
print(latest)
|
|
23
|
+
|
|
24
|
+
|
|
25
|
+
if __name__ == '__main__':
|
|
26
|
+
Cli()()
|
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
Metadata-Version: 2.1
|
|
2
2
|
Name: omdev
|
|
3
|
-
Version: 0.0.0.
|
|
3
|
+
Version: 0.0.0.dev29
|
|
4
4
|
Summary: omdev
|
|
5
5
|
Author: wrmsr
|
|
6
6
|
License: BSD-3-Clause
|
|
@@ -12,7 +12,7 @@ Classifier: Operating System :: OS Independent
|
|
|
12
12
|
Classifier: Operating System :: POSIX
|
|
13
13
|
Requires-Python: ~=3.12
|
|
14
14
|
License-File: LICENSE
|
|
15
|
-
Requires-Dist: omlish ==0.0.0.
|
|
15
|
+
Requires-Dist: omlish ==0.0.0.dev29
|
|
16
16
|
Provides-Extra: all
|
|
17
17
|
Requires-Dist: pycparser ~=2.22 ; extra == 'all'
|
|
18
18
|
Requires-Dist: cffi ~=1.17 ; extra == 'all'
|
|
@@ -14,6 +14,20 @@ omdev/wheelfile.py,sha256=yfupGcGkbFlmzGzKU64k_vmOKpaKnUlDWxeGn2KdekU,10005
|
|
|
14
14
|
omdev/amalg/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
|
15
15
|
omdev/amalg/__main__.py,sha256=OE1udULO1g4McUbeg1CoHbSm4hbQ2kcE3ffEGxlnPh4,69
|
|
16
16
|
omdev/amalg/amalg.py,sha256=g7wwcPE2G9qmzh8M9eZAscOYWKo3ldI8bNxEXFnmzLE,14064
|
|
17
|
+
omdev/cache/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
|
18
|
+
omdev/cache/comp/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
|
19
|
+
omdev/cache/comp/cache.py,sha256=0VDub_za4GlR5gIynqK4CnhhZFs-cdtlf_lwMhgtFa4,3387
|
|
20
|
+
omdev/cache/comp/contexts.py,sha256=1pkZXeNG3Ehrwb6vZHWOgR0zDSXSTKsPrdSDW5tIRKs,3091
|
|
21
|
+
omdev/cache/comp/fns.py,sha256=72yQ1e7NH8GxZ-ppcik1JLMEJGzrBvHgI5PSp4HcXVM,2943
|
|
22
|
+
omdev/cache/comp/resolvers.py,sha256=Do7_PKy-09hyptgKel1vKQnHcpc3wjCP-1St3unTspc,614
|
|
23
|
+
omdev/cache/comp/types.py,sha256=vFrcB8WftWDHQnrHG0lRXMBStWCm2M_Q4Xft-Rz7xnc,1907
|
|
24
|
+
omdev/cache/data/__init__.py,sha256=7286T46a7gUEvC0gvSYZtfyS_h2oZJx0I2zE6UUTJkc,294
|
|
25
|
+
omdev/cache/data/actions.py,sha256=OrCjo0INKeG8iEvXnQ9LI8PD8WF4d1vxrBwQzuA2ASw,829
|
|
26
|
+
omdev/cache/data/cache.py,sha256=VQQpC5c2TcpLtFG87nkMdJ6_jgmRsocRuZe2FdzTddo,4768
|
|
27
|
+
omdev/cache/data/consts.py,sha256=d6W_aeMqgah6PmPYi9RA8Be54oQ4BcNCy8kDQ7FlB_Q,26
|
|
28
|
+
omdev/cache/data/defaults.py,sha256=HrapVUIf9Ozu3qSfRPyQj-vx-dz6Yyedjb-k3yV4CW8,277
|
|
29
|
+
omdev/cache/data/manifests.py,sha256=6SqUiAlzkuDwIApEjzh5P6VimRVGk3iMwXjEPH2zvrc,1006
|
|
30
|
+
omdev/cache/data/specs.py,sha256=ggvbp4nSEnXJImt_stVh4OsOYNzPIVGXAWplr17yEyg,2290
|
|
17
31
|
omdev/cexts/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
|
18
32
|
omdev/cexts/_boilerplate.cc,sha256=aOWF_5C2pqnIrkT1ykEaL7N2pIpamW6pdXriRbd3lvs,1725
|
|
19
33
|
omdev/cexts/build.py,sha256=F3z1-CjDlEM-Gzi5IunKUBO52qdH_pMsFylobTdGJnI,2654
|
|
@@ -37,13 +51,6 @@ omdev/cexts/_distutils/compilers/__init__.py,sha256=amL_zrFlba0lHIvpqDne9uhqhLem
|
|
|
37
51
|
omdev/cexts/_distutils/compilers/ccompiler.py,sha256=cTs88qrvj0hBVXHfemSDE_du_nEA4_qo3Qst5TpQkVI,43606
|
|
38
52
|
omdev/cexts/_distutils/compilers/options.py,sha256=H7r5IcLvga5Fs3jjXWIT-6ap3JBduXRKgtpDmSGCZxs,3818
|
|
39
53
|
omdev/cexts/_distutils/compilers/unixccompiler.py,sha256=o1h8QuyupLntv4F21_XjzAZmCiwwxJuTmOirvBSL-Qw,15419
|
|
40
|
-
omdev/datacache/__init__.py,sha256=Jn_QRQn8L3-L0P-w9s5sBBku2tnFTOS3oc_U2PF8SSk,298
|
|
41
|
-
omdev/datacache/actions.py,sha256=uqG6UM16o7GFDEoCWwzNKSE95fOGYmSLKGRvV7pokz4,783
|
|
42
|
-
omdev/datacache/cache.py,sha256=nb7tu8FnPhpI8sK5uwoec4RZJ7eEd8wiEGwqYC6UeH0,4785
|
|
43
|
-
omdev/datacache/consts.py,sha256=d6W_aeMqgah6PmPYi9RA8Be54oQ4BcNCy8kDQ7FlB_Q,26
|
|
44
|
-
omdev/datacache/defaults.py,sha256=eRwkBRVaPSgYsfejgo-3SJGx2vyX_H4azhJ_un6-f74,289
|
|
45
|
-
omdev/datacache/manifests.py,sha256=J8bK05xq0_r8dMBK7GsYT1-zO2jk-fx7Ati0S_IHN0Y,1005
|
|
46
|
-
omdev/datacache/specs.py,sha256=ggvbp4nSEnXJImt_stVh4OsOYNzPIVGXAWplr17yEyg,2290
|
|
47
54
|
omdev/interp/__init__.py,sha256=Y3l4WY4JRi2uLG6kgbGp93fuGfkxkKwZDvhsa0Rwgtk,15
|
|
48
55
|
omdev/interp/__main__.py,sha256=gFhR9DikwDZk0LqgdR3qq_aXQHThUOPllDmHDOfnFAU,67
|
|
49
56
|
omdev/interp/cli.py,sha256=8T3qLXTC2mni5FXDHkHN3mZG9_BnjkDMXYy6EYbAYR8,1679
|
|
@@ -58,7 +65,11 @@ omdev/mypy/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
|
|
58
65
|
omdev/mypy/debug.py,sha256=WcZw-3Z1njg_KFGqi3DB6RuqbBa3dLArJnjVCuY1Mn0,3003
|
|
59
66
|
omdev/precheck/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
|
60
67
|
omdev/precheck/__main__.py,sha256=wKF_2KP2Yn1hKDEOCGR_fm5zu9UHMWCZtuEmWjpprrU,72
|
|
61
|
-
omdev/precheck/
|
|
68
|
+
omdev/precheck/base.py,sha256=a_lGoFM-QhL8u8XDUYFhb-feEyfPbP4j8lcmNO51sHY,732
|
|
69
|
+
omdev/precheck/git.py,sha256=APC5Ln7x0zDrQiGPRWPsBcVJK3vWhbU-brqR5M63JQA,849
|
|
70
|
+
omdev/precheck/lite.py,sha256=MLeDZP2UexNZzYTcSx4-LrhA97kCKn8tXrGkhsJb6I0,3649
|
|
71
|
+
omdev/precheck/precheck.py,sha256=LUqh501TpLgBL_ZBW-enEMqaYnWzPMViECpnHGKaCIc,2462
|
|
72
|
+
omdev/precheck/scripts.py,sha256=qq6MXkxgrYngPg5pWnXH4uRSuRkP3mFqbeml1UmvGBc,1265
|
|
62
73
|
omdev/pyproject/__init__.py,sha256=Y3l4WY4JRi2uLG6kgbGp93fuGfkxkKwZDvhsa0Rwgtk,15
|
|
63
74
|
omdev/pyproject/__main__.py,sha256=gFhR9DikwDZk0LqgdR3qq_aXQHThUOPllDmHDOfnFAU,67
|
|
64
75
|
omdev/pyproject/cexts.py,sha256=x13piOOnNrYbA17qZLDVuR0p1sqhgEwpk4FtImX-klM,4281
|
|
@@ -79,13 +90,14 @@ omdev/tools/dockertools.py,sha256=0RoUThTqv4ySJZX0aufYeQWD2bp-BMplQ8Y4WvDpguA,52
|
|
|
79
90
|
omdev/tools/gittools.py,sha256=zPy2D5WDs-CbwT86_T_hbaq5yCuss5e-ouUccXC6xlg,578
|
|
80
91
|
omdev/tools/importscan.py,sha256=XRLiasVSaTIp-jnO0-Nfhi0t6gnv_hVy5j2nVfEvuMI,3831
|
|
81
92
|
omdev/tools/importtrace.py,sha256=oDry9CwIv5h96wSaTVKJ0qQ5vMGxYE5oBtfF-GYNLJs,13430
|
|
93
|
+
omdev/tools/piptools.py,sha256=etJBfjtUZ5HeQ7C4sxWmeshT-xXPdhScqEHWbca98n0,657
|
|
82
94
|
omdev/tools/rst.py,sha256=6dWk8QZHoGiLSuBw3TKsXZjjFK6wWBEtPi9krdCLKKg,977
|
|
83
95
|
omdev/tools/sqlrepl.py,sha256=v9uVQ4nvquSXcQVYIFq34ikumSILvKqzD6lUKLcncCE,5646
|
|
84
96
|
omdev/versioning/__init__.py,sha256=Y3l4WY4JRi2uLG6kgbGp93fuGfkxkKwZDvhsa0Rwgtk,15
|
|
85
97
|
omdev/versioning/specifiers.py,sha256=6Odf9e6farwlPRsD_YqwTfYKG-BXn_dIcKtqfkhfodI,17432
|
|
86
98
|
omdev/versioning/versions.py,sha256=ei2eopEsJq3zSMJmezK1nzZgikgCdxFtnF3f69nCRZQ,12246
|
|
87
|
-
omdev-0.0.0.
|
|
88
|
-
omdev-0.0.0.
|
|
89
|
-
omdev-0.0.0.
|
|
90
|
-
omdev-0.0.0.
|
|
91
|
-
omdev-0.0.0.
|
|
99
|
+
omdev-0.0.0.dev29.dist-info/LICENSE,sha256=B_hVtavaA8zCYDW99DYdcpDLKz1n3BBRjZrcbv8uG8c,1451
|
|
100
|
+
omdev-0.0.0.dev29.dist-info/METADATA,sha256=_kVRNNJ4CIoIyEtgTCcAJzicnJevUfpkRjtXGcONxCQ,1252
|
|
101
|
+
omdev-0.0.0.dev29.dist-info/WHEEL,sha256=GV9aMThwP_4oNCtvEC2ec3qUYutgWeAzklro_0m4WJQ,91
|
|
102
|
+
omdev-0.0.0.dev29.dist-info/top_level.txt,sha256=1nr7j30fEWgLYHW3lGR9pkdHkb7knv1U1ES1XRNVQ6k,6
|
|
103
|
+
omdev-0.0.0.dev29.dist-info/RECORD,,
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|