omdev 0.0.0.dev209__py3-none-any.whl → 0.0.0.dev211__py3-none-any.whl

Sign up to get free protection for your applications and to get access to all the features.
omdev/scripts/ci.py ADDED
@@ -0,0 +1,3435 @@
1
+ #!/usr/bin/env python3
2
+ # noinspection DuplicatedCode
3
+ # @omlish-lite
4
+ # @omlish-script
5
+ # @omlish-amalg-output ../ci/cli.py
6
+ # ruff: noqa: N802 UP006 UP007 UP036
7
+ """
8
+ Inputs:
9
+ - requirements.txt
10
+ - ci.Dockerfile
11
+ - compose.yml
12
+
13
+ ==
14
+
15
+ ./python -m ci run --cache-dir ci/cache ci/project omlish-ci
16
+ """
17
+ import abc
18
+ import argparse
19
+ import asyncio
20
+ import collections
21
+ import contextlib
22
+ import dataclasses as dc
23
+ import datetime
24
+ import functools
25
+ import hashlib
26
+ import inspect
27
+ import itertools
28
+ import json
29
+ import logging
30
+ import os
31
+ import os.path
32
+ import shlex
33
+ import shutil
34
+ import subprocess
35
+ import sys
36
+ import tarfile
37
+ import tempfile
38
+ import threading
39
+ import time
40
+ import types
41
+ import typing as ta
42
+
43
+
44
+ ########################################
45
+
46
+
47
+ if sys.version_info < (3, 8):
48
+ raise OSError(f'Requires python (3, 8), got {sys.version_info} from {sys.executable}') # noqa
49
+
50
+
51
+ ########################################
52
+
53
+
54
+ # shell.py
55
+ T = ta.TypeVar('T')
56
+
57
+ # ../../omlish/lite/cached.py
58
+ CallableT = ta.TypeVar('CallableT', bound=ta.Callable)
59
+
60
+ # ../../omlish/lite/check.py
61
+ SizedT = ta.TypeVar('SizedT', bound=ta.Sized)
62
+ CheckMessage = ta.Union[str, ta.Callable[..., ta.Optional[str]], None] # ta.TypeAlias
63
+ CheckLateConfigureFn = ta.Callable[['Checks'], None] # ta.TypeAlias
64
+ CheckOnRaiseFn = ta.Callable[[Exception], None] # ta.TypeAlias
65
+ CheckExceptionFactory = ta.Callable[..., Exception] # ta.TypeAlias
66
+ CheckArgsRenderer = ta.Callable[..., ta.Optional[str]] # ta.TypeAlias
67
+
68
+ # ../../omlish/argparse/cli.py
69
+ ArgparseCmdFn = ta.Callable[[], ta.Optional[int]] # ta.TypeAlias
70
+
71
+ # ../../omlish/lite/contextmanagers.py
72
+ ExitStackedT = ta.TypeVar('ExitStackedT', bound='ExitStacked')
73
+
74
+ # ../../omlish/subprocesses.py
75
+ SubprocessChannelOption = ta.Literal['pipe', 'stdout', 'devnull'] # ta.TypeAlias
76
+
77
+
78
+ ########################################
79
+ # ../shell.py
80
+
81
+
82
+ ##
83
+
84
+
85
+ @dc.dataclass(frozen=True)
86
+ class ShellCmd:
87
+ s: str
88
+
89
+ env: ta.Optional[ta.Mapping[str, str]] = None
90
+
91
+ def build_run_kwargs(
92
+ self,
93
+ *,
94
+ env: ta.Optional[ta.Mapping[str, str]] = None,
95
+ **kwargs: ta.Any,
96
+ ) -> ta.Dict[str, ta.Any]:
97
+ if env is None:
98
+ env = os.environ
99
+ if self.env:
100
+ if (ek := set(env) & set(self.env)):
101
+ raise KeyError(*ek)
102
+ env = {**env, **self.env}
103
+
104
+ return dict(
105
+ env=env,
106
+ **kwargs,
107
+ )
108
+
109
+ def run(self, fn: ta.Callable[..., T], **kwargs) -> T:
110
+ return fn(
111
+ 'sh', '-c', self.s,
112
+ **self.build_run_kwargs(**kwargs),
113
+ )
114
+
115
+
116
+ ########################################
117
+ # ../../../omlish/lite/cached.py
118
+
119
+
120
+ ##
121
+
122
+
123
+ class _AbstractCachedNullary:
124
+ def __init__(self, fn):
125
+ super().__init__()
126
+ self._fn = fn
127
+ self._value = self._missing = object()
128
+ functools.update_wrapper(self, fn)
129
+
130
+ def __call__(self, *args, **kwargs): # noqa
131
+ raise TypeError
132
+
133
+ def __get__(self, instance, owner): # noqa
134
+ bound = instance.__dict__[self._fn.__name__] = self.__class__(self._fn.__get__(instance, owner))
135
+ return bound
136
+
137
+
138
+ ##
139
+
140
+
141
+ class _CachedNullary(_AbstractCachedNullary):
142
+ def __call__(self, *args, **kwargs): # noqa
143
+ if self._value is self._missing:
144
+ self._value = self._fn()
145
+ return self._value
146
+
147
+
148
+ def cached_nullary(fn: CallableT) -> CallableT:
149
+ return _CachedNullary(fn) # type: ignore
150
+
151
+
152
+ def static_init(fn: CallableT) -> CallableT:
153
+ fn = cached_nullary(fn)
154
+ fn()
155
+ return fn
156
+
157
+
158
+ ##
159
+
160
+
161
+ class _AsyncCachedNullary(_AbstractCachedNullary):
162
+ async def __call__(self, *args, **kwargs):
163
+ if self._value is self._missing:
164
+ self._value = await self._fn()
165
+ return self._value
166
+
167
+
168
+ def async_cached_nullary(fn): # ta.Callable[..., T]) -> ta.Callable[..., T]:
169
+ return _AsyncCachedNullary(fn)
170
+
171
+
172
+ ########################################
173
+ # ../../../omlish/lite/check.py
174
+ """
175
+ TODO:
176
+ - def maybe(v: lang.Maybe[T])
177
+ - def not_ ?
178
+ - ** class @dataclass Raise - user message should be able to be an exception type or instance or factory
179
+ """
180
+
181
+
182
+ ##
183
+
184
+
185
+ class Checks:
186
+ def __init__(self) -> None:
187
+ super().__init__()
188
+
189
+ self._config_lock = threading.RLock()
190
+ self._on_raise_fns: ta.Sequence[CheckOnRaiseFn] = []
191
+ self._exception_factory: CheckExceptionFactory = Checks.default_exception_factory
192
+ self._args_renderer: ta.Optional[CheckArgsRenderer] = None
193
+ self._late_configure_fns: ta.Sequence[CheckLateConfigureFn] = []
194
+
195
+ @staticmethod
196
+ def default_exception_factory(exc_cls: ta.Type[Exception], *args, **kwargs) -> Exception:
197
+ return exc_cls(*args, **kwargs) # noqa
198
+
199
+ #
200
+
201
+ def register_on_raise(self, fn: CheckOnRaiseFn) -> None:
202
+ with self._config_lock:
203
+ self._on_raise_fns = [*self._on_raise_fns, fn]
204
+
205
+ def unregister_on_raise(self, fn: CheckOnRaiseFn) -> None:
206
+ with self._config_lock:
207
+ self._on_raise_fns = [e for e in self._on_raise_fns if e != fn]
208
+
209
+ #
210
+
211
+ def set_exception_factory(self, factory: CheckExceptionFactory) -> None:
212
+ self._exception_factory = factory
213
+
214
+ def set_args_renderer(self, renderer: ta.Optional[CheckArgsRenderer]) -> None:
215
+ self._args_renderer = renderer
216
+
217
+ #
218
+
219
+ def register_late_configure(self, fn: CheckLateConfigureFn) -> None:
220
+ with self._config_lock:
221
+ self._late_configure_fns = [*self._late_configure_fns, fn]
222
+
223
+ def _late_configure(self) -> None:
224
+ if not self._late_configure_fns:
225
+ return
226
+
227
+ with self._config_lock:
228
+ if not (lc := self._late_configure_fns):
229
+ return
230
+
231
+ for fn in lc:
232
+ fn(self)
233
+
234
+ self._late_configure_fns = []
235
+
236
+ #
237
+
238
+ class _ArgsKwargs:
239
+ def __init__(self, *args, **kwargs):
240
+ self.args = args
241
+ self.kwargs = kwargs
242
+
243
+ def _raise(
244
+ self,
245
+ exception_type: ta.Type[Exception],
246
+ default_message: str,
247
+ message: CheckMessage,
248
+ ak: _ArgsKwargs = _ArgsKwargs(),
249
+ *,
250
+ render_fmt: ta.Optional[str] = None,
251
+ ) -> ta.NoReturn:
252
+ exc_args = ()
253
+ if callable(message):
254
+ message = ta.cast(ta.Callable, message)(*ak.args, **ak.kwargs)
255
+ if isinstance(message, tuple):
256
+ message, *exc_args = message # type: ignore
257
+
258
+ if message is None:
259
+ message = default_message
260
+
261
+ self._late_configure()
262
+
263
+ if render_fmt is not None and (af := self._args_renderer) is not None:
264
+ rendered_args = af(render_fmt, *ak.args)
265
+ if rendered_args is not None:
266
+ message = f'{message} : {rendered_args}'
267
+
268
+ exc = self._exception_factory(
269
+ exception_type,
270
+ message,
271
+ *exc_args,
272
+ *ak.args,
273
+ **ak.kwargs,
274
+ )
275
+
276
+ for fn in self._on_raise_fns:
277
+ fn(exc)
278
+
279
+ raise exc
280
+
281
+ #
282
+
283
+ def _unpack_isinstance_spec(self, spec: ta.Any) -> tuple:
284
+ if isinstance(spec, type):
285
+ return (spec,)
286
+ if not isinstance(spec, tuple):
287
+ spec = (spec,)
288
+ if None in spec:
289
+ spec = tuple(filter(None, spec)) + (None.__class__,) # noqa
290
+ if ta.Any in spec:
291
+ spec = (object,)
292
+ return spec
293
+
294
+ def isinstance(self, v: ta.Any, spec: ta.Union[ta.Type[T], tuple], msg: CheckMessage = None) -> T: # noqa
295
+ if not isinstance(v, self._unpack_isinstance_spec(spec)):
296
+ self._raise(
297
+ TypeError,
298
+ 'Must be instance',
299
+ msg,
300
+ Checks._ArgsKwargs(v, spec),
301
+ render_fmt='not isinstance(%s, %s)',
302
+ )
303
+
304
+ return v
305
+
306
+ def of_isinstance(self, spec: ta.Union[ta.Type[T], tuple], msg: CheckMessage = None) -> ta.Callable[[ta.Any], T]:
307
+ def inner(v):
308
+ return self.isinstance(v, self._unpack_isinstance_spec(spec), msg)
309
+
310
+ return inner
311
+
312
+ def cast(self, v: ta.Any, cls: ta.Type[T], msg: CheckMessage = None) -> T: # noqa
313
+ if not isinstance(v, cls):
314
+ self._raise(
315
+ TypeError,
316
+ 'Must be instance',
317
+ msg,
318
+ Checks._ArgsKwargs(v, cls),
319
+ )
320
+
321
+ return v
322
+
323
+ def of_cast(self, cls: ta.Type[T], msg: CheckMessage = None) -> ta.Callable[[T], T]:
324
+ def inner(v):
325
+ return self.cast(v, cls, msg)
326
+
327
+ return inner
328
+
329
+ def not_isinstance(self, v: T, spec: ta.Any, msg: CheckMessage = None) -> T: # noqa
330
+ if isinstance(v, self._unpack_isinstance_spec(spec)):
331
+ self._raise(
332
+ TypeError,
333
+ 'Must not be instance',
334
+ msg,
335
+ Checks._ArgsKwargs(v, spec),
336
+ render_fmt='isinstance(%s, %s)',
337
+ )
338
+
339
+ return v
340
+
341
+ def of_not_isinstance(self, spec: ta.Any, msg: CheckMessage = None) -> ta.Callable[[T], T]:
342
+ def inner(v):
343
+ return self.not_isinstance(v, self._unpack_isinstance_spec(spec), msg)
344
+
345
+ return inner
346
+
347
+ ##
348
+
349
+ def issubclass(self, v: ta.Type[T], spec: ta.Any, msg: CheckMessage = None) -> ta.Type[T]: # noqa
350
+ if not issubclass(v, spec):
351
+ self._raise(
352
+ TypeError,
353
+ 'Must be subclass',
354
+ msg,
355
+ Checks._ArgsKwargs(v, spec),
356
+ render_fmt='not issubclass(%s, %s)',
357
+ )
358
+
359
+ return v
360
+
361
+ def not_issubclass(self, v: ta.Type[T], spec: ta.Any, msg: CheckMessage = None) -> ta.Type[T]: # noqa
362
+ if issubclass(v, spec):
363
+ self._raise(
364
+ TypeError,
365
+ 'Must not be subclass',
366
+ msg,
367
+ Checks._ArgsKwargs(v, spec),
368
+ render_fmt='issubclass(%s, %s)',
369
+ )
370
+
371
+ return v
372
+
373
+ #
374
+
375
+ def in_(self, v: T, c: ta.Container[T], msg: CheckMessage = None) -> T:
376
+ if v not in c:
377
+ self._raise(
378
+ ValueError,
379
+ 'Must be in',
380
+ msg,
381
+ Checks._ArgsKwargs(v, c),
382
+ render_fmt='%s not in %s',
383
+ )
384
+
385
+ return v
386
+
387
+ def not_in(self, v: T, c: ta.Container[T], msg: CheckMessage = None) -> T:
388
+ if v in c:
389
+ self._raise(
390
+ ValueError,
391
+ 'Must not be in',
392
+ msg,
393
+ Checks._ArgsKwargs(v, c),
394
+ render_fmt='%s in %s',
395
+ )
396
+
397
+ return v
398
+
399
+ def empty(self, v: SizedT, msg: CheckMessage = None) -> SizedT:
400
+ if len(v) != 0:
401
+ self._raise(
402
+ ValueError,
403
+ 'Must be empty',
404
+ msg,
405
+ Checks._ArgsKwargs(v),
406
+ render_fmt='%s',
407
+ )
408
+
409
+ return v
410
+
411
+ def iterempty(self, v: ta.Iterable[T], msg: CheckMessage = None) -> ta.Iterable[T]:
412
+ it = iter(v)
413
+ try:
414
+ next(it)
415
+ except StopIteration:
416
+ pass
417
+ else:
418
+ self._raise(
419
+ ValueError,
420
+ 'Must be empty',
421
+ msg,
422
+ Checks._ArgsKwargs(v),
423
+ render_fmt='%s',
424
+ )
425
+
426
+ return v
427
+
428
+ def not_empty(self, v: SizedT, msg: CheckMessage = None) -> SizedT:
429
+ if len(v) == 0:
430
+ self._raise(
431
+ ValueError,
432
+ 'Must not be empty',
433
+ msg,
434
+ Checks._ArgsKwargs(v),
435
+ render_fmt='%s',
436
+ )
437
+
438
+ return v
439
+
440
+ def unique(self, it: ta.Iterable[T], msg: CheckMessage = None) -> ta.Iterable[T]:
441
+ dupes = [e for e, c in collections.Counter(it).items() if c > 1]
442
+ if dupes:
443
+ self._raise(
444
+ ValueError,
445
+ 'Must be unique',
446
+ msg,
447
+ Checks._ArgsKwargs(it, dupes),
448
+ )
449
+
450
+ return it
451
+
452
+ def single(self, obj: ta.Iterable[T], message: CheckMessage = None) -> T:
453
+ try:
454
+ [value] = obj
455
+ except ValueError:
456
+ self._raise(
457
+ ValueError,
458
+ 'Must be single',
459
+ message,
460
+ Checks._ArgsKwargs(obj),
461
+ render_fmt='%s',
462
+ )
463
+
464
+ return value
465
+
466
+ def opt_single(self, obj: ta.Iterable[T], message: CheckMessage = None) -> ta.Optional[T]:
467
+ it = iter(obj)
468
+ try:
469
+ value = next(it)
470
+ except StopIteration:
471
+ return None
472
+
473
+ try:
474
+ next(it)
475
+ except StopIteration:
476
+ return value # noqa
477
+
478
+ self._raise(
479
+ ValueError,
480
+ 'Must be empty or single',
481
+ message,
482
+ Checks._ArgsKwargs(obj),
483
+ render_fmt='%s',
484
+ )
485
+
486
+ raise RuntimeError # noqa
487
+
488
+ #
489
+
490
+ def none(self, v: ta.Any, msg: CheckMessage = None) -> None:
491
+ if v is not None:
492
+ self._raise(
493
+ ValueError,
494
+ 'Must be None',
495
+ msg,
496
+ Checks._ArgsKwargs(v),
497
+ render_fmt='%s',
498
+ )
499
+
500
+ def not_none(self, v: ta.Optional[T], msg: CheckMessage = None) -> T:
501
+ if v is None:
502
+ self._raise(
503
+ ValueError,
504
+ 'Must not be None',
505
+ msg,
506
+ Checks._ArgsKwargs(v),
507
+ render_fmt='%s',
508
+ )
509
+
510
+ return v
511
+
512
+ #
513
+
514
+ def equal(self, v: T, o: ta.Any, msg: CheckMessage = None) -> T:
515
+ if o != v:
516
+ self._raise(
517
+ ValueError,
518
+ 'Must be equal',
519
+ msg,
520
+ Checks._ArgsKwargs(v, o),
521
+ render_fmt='%s != %s',
522
+ )
523
+
524
+ return v
525
+
526
+ def is_(self, v: T, o: ta.Any, msg: CheckMessage = None) -> T:
527
+ if o is not v:
528
+ self._raise(
529
+ ValueError,
530
+ 'Must be the same',
531
+ msg,
532
+ Checks._ArgsKwargs(v, o),
533
+ render_fmt='%s is not %s',
534
+ )
535
+
536
+ return v
537
+
538
+ def is_not(self, v: T, o: ta.Any, msg: CheckMessage = None) -> T:
539
+ if o is v:
540
+ self._raise(
541
+ ValueError,
542
+ 'Must not be the same',
543
+ msg,
544
+ Checks._ArgsKwargs(v, o),
545
+ render_fmt='%s is %s',
546
+ )
547
+
548
+ return v
549
+
550
+ def callable(self, v: T, msg: CheckMessage = None) -> T: # noqa
551
+ if not callable(v):
552
+ self._raise(
553
+ TypeError,
554
+ 'Must be callable',
555
+ msg,
556
+ Checks._ArgsKwargs(v),
557
+ render_fmt='%s',
558
+ )
559
+
560
+ return v # type: ignore
561
+
562
+ def non_empty_str(self, v: ta.Optional[str], msg: CheckMessage = None) -> str:
563
+ if not isinstance(v, str) or not v:
564
+ self._raise(
565
+ ValueError,
566
+ 'Must be non-empty str',
567
+ msg,
568
+ Checks._ArgsKwargs(v),
569
+ render_fmt='%s',
570
+ )
571
+
572
+ return v
573
+
574
+ def replacing(self, expected: ta.Any, old: ta.Any, new: T, msg: CheckMessage = None) -> T:
575
+ if old != expected:
576
+ self._raise(
577
+ ValueError,
578
+ 'Must be replacing',
579
+ msg,
580
+ Checks._ArgsKwargs(expected, old, new),
581
+ render_fmt='%s -> %s -> %s',
582
+ )
583
+
584
+ return new
585
+
586
+ def replacing_none(self, old: ta.Any, new: T, msg: CheckMessage = None) -> T:
587
+ if old is not None:
588
+ self._raise(
589
+ ValueError,
590
+ 'Must be replacing None',
591
+ msg,
592
+ Checks._ArgsKwargs(old, new),
593
+ render_fmt='%s -> %s',
594
+ )
595
+
596
+ return new
597
+
598
+ #
599
+
600
+ def arg(self, v: bool, msg: CheckMessage = None) -> None:
601
+ if not v:
602
+ self._raise(
603
+ RuntimeError,
604
+ 'Argument condition not met',
605
+ msg,
606
+ Checks._ArgsKwargs(v),
607
+ render_fmt='%s',
608
+ )
609
+
610
+ def state(self, v: bool, msg: CheckMessage = None) -> None:
611
+ if not v:
612
+ self._raise(
613
+ RuntimeError,
614
+ 'State condition not met',
615
+ msg,
616
+ Checks._ArgsKwargs(v),
617
+ render_fmt='%s',
618
+ )
619
+
620
+
621
+ check = Checks()
622
+
623
+
624
+ ########################################
625
+ # ../../../omlish/lite/json.py
626
+
627
+
628
+ ##
629
+
630
+
631
+ JSON_PRETTY_INDENT = 2
632
+
633
+ JSON_PRETTY_KWARGS: ta.Mapping[str, ta.Any] = dict(
634
+ indent=JSON_PRETTY_INDENT,
635
+ )
636
+
637
+ json_dump_pretty: ta.Callable[..., bytes] = functools.partial(json.dump, **JSON_PRETTY_KWARGS) # type: ignore
638
+ json_dumps_pretty: ta.Callable[..., str] = functools.partial(json.dumps, **JSON_PRETTY_KWARGS)
639
+
640
+
641
+ ##
642
+
643
+
644
+ JSON_COMPACT_SEPARATORS = (',', ':')
645
+
646
+ JSON_COMPACT_KWARGS: ta.Mapping[str, ta.Any] = dict(
647
+ indent=None,
648
+ separators=JSON_COMPACT_SEPARATORS,
649
+ )
650
+
651
+ json_dump_compact: ta.Callable[..., bytes] = functools.partial(json.dump, **JSON_COMPACT_KWARGS) # type: ignore
652
+ json_dumps_compact: ta.Callable[..., str] = functools.partial(json.dumps, **JSON_COMPACT_KWARGS)
653
+
654
+
655
+ ########################################
656
+ # ../../../omlish/lite/logs.py
657
+
658
+
659
+ log = logging.getLogger(__name__)
660
+
661
+
662
+ ########################################
663
+ # ../../../omlish/lite/reflect.py
664
+
665
+
666
+ ##
667
+
668
+
669
+ _GENERIC_ALIAS_TYPES = (
670
+ ta._GenericAlias, # type: ignore # noqa
671
+ *([ta._SpecialGenericAlias] if hasattr(ta, '_SpecialGenericAlias') else []), # noqa
672
+ )
673
+
674
+
675
+ def is_generic_alias(obj, *, origin: ta.Any = None) -> bool:
676
+ return (
677
+ isinstance(obj, _GENERIC_ALIAS_TYPES) and
678
+ (origin is None or ta.get_origin(obj) is origin)
679
+ )
680
+
681
+
682
+ is_union_alias = functools.partial(is_generic_alias, origin=ta.Union)
683
+ is_callable_alias = functools.partial(is_generic_alias, origin=ta.Callable)
684
+
685
+
686
+ ##
687
+
688
+
689
+ def is_optional_alias(spec: ta.Any) -> bool:
690
+ return (
691
+ isinstance(spec, _GENERIC_ALIAS_TYPES) and # noqa
692
+ ta.get_origin(spec) is ta.Union and
693
+ len(ta.get_args(spec)) == 2 and
694
+ any(a in (None, type(None)) for a in ta.get_args(spec))
695
+ )
696
+
697
+
698
+ def get_optional_alias_arg(spec: ta.Any) -> ta.Any:
699
+ [it] = [it for it in ta.get_args(spec) if it not in (None, type(None))]
700
+ return it
701
+
702
+
703
+ ##
704
+
705
+
706
+ def is_new_type(spec: ta.Any) -> bool:
707
+ if isinstance(ta.NewType, type):
708
+ return isinstance(spec, ta.NewType)
709
+ else:
710
+ # Before https://github.com/python/cpython/commit/c2f33dfc83ab270412bf243fb21f724037effa1a
711
+ return isinstance(spec, types.FunctionType) and spec.__code__ is ta.NewType.__code__.co_consts[1] # type: ignore # noqa
712
+
713
+
714
+ def get_new_type_supertype(spec: ta.Any) -> ta.Any:
715
+ return spec.__supertype__
716
+
717
+
718
+ ##
719
+
720
+
721
+ def is_literal_type(spec: ta.Any) -> bool:
722
+ if hasattr(ta, '_LiteralGenericAlias'):
723
+ return isinstance(spec, ta._LiteralGenericAlias) # noqa
724
+ else:
725
+ return (
726
+ isinstance(spec, ta._GenericAlias) and # type: ignore # noqa
727
+ spec.__origin__ is ta.Literal
728
+ )
729
+
730
+
731
+ def get_literal_type_args(spec: ta.Any) -> ta.Iterable[ta.Any]:
732
+ return spec.__args__
733
+
734
+
735
+ ##
736
+
737
+
738
+ def deep_subclasses(cls: ta.Type[T]) -> ta.Iterator[ta.Type[T]]:
739
+ seen = set()
740
+ todo = list(reversed(cls.__subclasses__()))
741
+ while todo:
742
+ cur = todo.pop()
743
+ if cur in seen:
744
+ continue
745
+ seen.add(cur)
746
+ yield cur
747
+ todo.extend(reversed(cur.__subclasses__()))
748
+
749
+
750
+ ########################################
751
+ # ../../../omlish/lite/strings.py
752
+
753
+
754
+ ##
755
+
756
+
757
+ def camel_case(name: str, *, lower: bool = False) -> str:
758
+ if not name:
759
+ return ''
760
+ s = ''.join(map(str.capitalize, name.split('_'))) # noqa
761
+ if lower:
762
+ s = s[0].lower() + s[1:]
763
+ return s
764
+
765
+
766
+ def snake_case(name: str) -> str:
767
+ uppers: list[int | None] = [i for i, c in enumerate(name) if c.isupper()]
768
+ return '_'.join([name[l:r].lower() for l, r in zip([None, *uppers], [*uppers, None])]).strip('_')
769
+
770
+
771
+ ##
772
+
773
+
774
+ def is_dunder(name: str) -> bool:
775
+ return (
776
+ name[:2] == name[-2:] == '__' and
777
+ name[2:3] != '_' and
778
+ name[-3:-2] != '_' and
779
+ len(name) > 4
780
+ )
781
+
782
+
783
+ def is_sunder(name: str) -> bool:
784
+ return (
785
+ name[0] == name[-1] == '_' and
786
+ name[1:2] != '_' and
787
+ name[-2:-1] != '_' and
788
+ len(name) > 2
789
+ )
790
+
791
+
792
+ ##
793
+
794
+
795
+ def strip_with_newline(s: str) -> str:
796
+ if not s:
797
+ return ''
798
+ return s.strip() + '\n'
799
+
800
+
801
+ @ta.overload
802
+ def split_keep_delimiter(s: str, d: str) -> str:
803
+ ...
804
+
805
+
806
+ @ta.overload
807
+ def split_keep_delimiter(s: bytes, d: bytes) -> bytes:
808
+ ...
809
+
810
+
811
+ def split_keep_delimiter(s, d):
812
+ ps = []
813
+ i = 0
814
+ while i < len(s):
815
+ if (n := s.find(d, i)) < i:
816
+ ps.append(s[i:])
817
+ break
818
+ ps.append(s[i:n + 1])
819
+ i = n + 1
820
+ return ps
821
+
822
+
823
+ ##
824
+
825
+
826
+ def attr_repr(obj: ta.Any, *attrs: str) -> str:
827
+ return f'{type(obj).__name__}({", ".join(f"{attr}={getattr(obj, attr)!r}" for attr in attrs)})'
828
+
829
+
830
+ ##
831
+
832
+
833
+ FORMAT_NUM_BYTES_SUFFIXES: ta.Sequence[str] = ['B', 'kB', 'MB', 'GB', 'TB', 'PB', 'EB']
834
+
835
+
836
+ def format_num_bytes(num_bytes: int) -> str:
837
+ for i, suffix in enumerate(FORMAT_NUM_BYTES_SUFFIXES):
838
+ value = num_bytes / 1024 ** i
839
+ if num_bytes < 1024 ** (i + 1):
840
+ if value.is_integer():
841
+ return f'{int(value)}{suffix}'
842
+ else:
843
+ return f'{value:.2f}{suffix}'
844
+
845
+ return f'{num_bytes / 1024 ** (len(FORMAT_NUM_BYTES_SUFFIXES) - 1):.2f}{FORMAT_NUM_BYTES_SUFFIXES[-1]}'
846
+
847
+
848
+ ########################################
849
+ # ../../../omlish/logs/filters.py
850
+
851
+
852
+ class TidLogFilter(logging.Filter):
853
+ def filter(self, record):
854
+ record.tid = threading.get_native_id()
855
+ return True
856
+
857
+
858
+ ########################################
859
+ # ../../../omlish/logs/proxy.py
860
+
861
+
862
+ class ProxyLogFilterer(logging.Filterer):
863
+ def __init__(self, underlying: logging.Filterer) -> None: # noqa
864
+ self._underlying = underlying
865
+
866
+ @property
867
+ def underlying(self) -> logging.Filterer:
868
+ return self._underlying
869
+
870
+ @property
871
+ def filters(self):
872
+ return self._underlying.filters
873
+
874
+ @filters.setter
875
+ def filters(self, filters):
876
+ self._underlying.filters = filters
877
+
878
+ def addFilter(self, filter): # noqa
879
+ self._underlying.addFilter(filter)
880
+
881
+ def removeFilter(self, filter): # noqa
882
+ self._underlying.removeFilter(filter)
883
+
884
+ def filter(self, record):
885
+ return self._underlying.filter(record)
886
+
887
+
888
+ class ProxyLogHandler(ProxyLogFilterer, logging.Handler):
889
+ def __init__(self, underlying: logging.Handler) -> None: # noqa
890
+ ProxyLogFilterer.__init__(self, underlying)
891
+
892
+ _underlying: logging.Handler
893
+
894
+ @property
895
+ def underlying(self) -> logging.Handler:
896
+ return self._underlying
897
+
898
+ def get_name(self):
899
+ return self._underlying.get_name()
900
+
901
+ def set_name(self, name):
902
+ self._underlying.set_name(name)
903
+
904
+ @property
905
+ def name(self):
906
+ return self._underlying.name
907
+
908
+ @property
909
+ def level(self):
910
+ return self._underlying.level
911
+
912
+ @level.setter
913
+ def level(self, level):
914
+ self._underlying.level = level
915
+
916
+ @property
917
+ def formatter(self):
918
+ return self._underlying.formatter
919
+
920
+ @formatter.setter
921
+ def formatter(self, formatter):
922
+ self._underlying.formatter = formatter
923
+
924
+ def createLock(self):
925
+ self._underlying.createLock()
926
+
927
+ def acquire(self):
928
+ self._underlying.acquire()
929
+
930
+ def release(self):
931
+ self._underlying.release()
932
+
933
+ def setLevel(self, level):
934
+ self._underlying.setLevel(level)
935
+
936
+ def format(self, record):
937
+ return self._underlying.format(record)
938
+
939
+ def emit(self, record):
940
+ self._underlying.emit(record)
941
+
942
+ def handle(self, record):
943
+ return self._underlying.handle(record)
944
+
945
+ def setFormatter(self, fmt):
946
+ self._underlying.setFormatter(fmt)
947
+
948
+ def flush(self):
949
+ self._underlying.flush()
950
+
951
+ def close(self):
952
+ self._underlying.close()
953
+
954
+ def handleError(self, record):
955
+ self._underlying.handleError(record)
956
+
957
+
958
+ ########################################
959
+ # ../cache.py
960
+
961
+
962
+ ##
963
+
964
+
965
+ @abc.abstractmethod
966
+ class FileCache(abc.ABC):
967
+ @abc.abstractmethod
968
+ def get_file(self, key: str) -> ta.Optional[str]:
969
+ raise NotImplementedError
970
+
971
+ @abc.abstractmethod
972
+ def put_file(self, key: str, file_path: str) -> ta.Optional[str]:
973
+ raise NotImplementedError
974
+
975
+
976
+ #
977
+
978
+
979
+ class DirectoryFileCache(FileCache):
980
+ def __init__(self, dir: str) -> None: # noqa
981
+ super().__init__()
982
+
983
+ self._dir = dir
984
+
985
+ #
986
+
987
+ def get_cache_file_path(
988
+ self,
989
+ key: str,
990
+ *,
991
+ make_dirs: bool = False,
992
+ ) -> str:
993
+ if make_dirs:
994
+ os.makedirs(self._dir, exist_ok=True)
995
+ return os.path.join(self._dir, key)
996
+
997
+ def format_incomplete_file(self, f: str) -> str:
998
+ return os.path.join(os.path.dirname(f), f'_{os.path.basename(f)}.incomplete')
999
+
1000
+ #
1001
+
1002
+ def get_file(self, key: str) -> ta.Optional[str]:
1003
+ cache_file_path = self.get_cache_file_path(key)
1004
+ if not os.path.exists(cache_file_path):
1005
+ return None
1006
+ return cache_file_path
1007
+
1008
+ def put_file(self, key: str, file_path: str) -> None:
1009
+ cache_file_path = self.get_cache_file_path(key, make_dirs=True)
1010
+ shutil.copyfile(file_path, cache_file_path)
1011
+
1012
+
1013
+ ##
1014
+
1015
+
1016
+ class ShellCache(abc.ABC):
1017
+ @abc.abstractmethod
1018
+ def get_file_cmd(self, key: str) -> ta.Optional[ShellCmd]:
1019
+ raise NotImplementedError
1020
+
1021
+ class PutFileCmdContext(abc.ABC):
1022
+ def __init__(self) -> None:
1023
+ super().__init__()
1024
+
1025
+ self._state: ta.Literal['open', 'committed', 'aborted'] = 'open'
1026
+
1027
+ @property
1028
+ def state(self) -> ta.Literal['open', 'committed', 'aborted']:
1029
+ return self._state
1030
+
1031
+ #
1032
+
1033
+ @property
1034
+ @abc.abstractmethod
1035
+ def cmd(self) -> ShellCmd:
1036
+ raise NotImplementedError
1037
+
1038
+ #
1039
+
1040
+ def __enter__(self):
1041
+ return self
1042
+
1043
+ def __exit__(self, exc_type, exc_val, exc_tb):
1044
+ if exc_val is None:
1045
+ self.commit()
1046
+ else:
1047
+ self.abort()
1048
+
1049
+ #
1050
+
1051
+ @abc.abstractmethod
1052
+ def _commit(self) -> None:
1053
+ raise NotImplementedError
1054
+
1055
+ def commit(self) -> None:
1056
+ if self._state == 'committed':
1057
+ return
1058
+ elif self._state == 'open':
1059
+ self._commit()
1060
+ self._state = 'committed'
1061
+ else:
1062
+ raise RuntimeError(self._state)
1063
+
1064
+ #
1065
+
1066
+ @abc.abstractmethod
1067
+ def _abort(self) -> None:
1068
+ raise NotImplementedError
1069
+
1070
+ def abort(self) -> None:
1071
+ if self._state == 'aborted':
1072
+ return
1073
+ elif self._state == 'open':
1074
+ self._abort()
1075
+ self._state = 'committed'
1076
+ else:
1077
+ raise RuntimeError(self._state)
1078
+
1079
+ @abc.abstractmethod
1080
+ def put_file_cmd(self, key: str) -> PutFileCmdContext:
1081
+ raise NotImplementedError
1082
+
1083
+
1084
+ #
1085
+
1086
+
1087
+ class DirectoryShellCache(ShellCache):
1088
+ def __init__(self, dfc: DirectoryFileCache) -> None:
1089
+ super().__init__()
1090
+
1091
+ self._dfc = dfc
1092
+
1093
+ def get_file_cmd(self, key: str) -> ta.Optional[ShellCmd]:
1094
+ f = self._dfc.get_file(key)
1095
+ if f is None:
1096
+ return None
1097
+ return ShellCmd(f'cat {shlex.quote(f)}')
1098
+
1099
+ class _PutFileCmdContext(ShellCache.PutFileCmdContext): # noqa
1100
+ def __init__(self, tf: str, f: str) -> None:
1101
+ super().__init__()
1102
+
1103
+ self._tf = tf
1104
+ self._f = f
1105
+
1106
+ @property
1107
+ def cmd(self) -> ShellCmd:
1108
+ return ShellCmd(f'cat > {shlex.quote(self._tf)}')
1109
+
1110
+ def _commit(self) -> None:
1111
+ os.replace(self._tf, self._f)
1112
+
1113
+ def _abort(self) -> None:
1114
+ os.unlink(self._tf)
1115
+
1116
+ def put_file_cmd(self, key: str) -> ShellCache.PutFileCmdContext:
1117
+ f = self._dfc.get_cache_file_path(key, make_dirs=True)
1118
+ return self._PutFileCmdContext(self._dfc.format_incomplete_file(f), f)
1119
+
1120
+
1121
+ ########################################
1122
+ # ../github/cacheapi.py
1123
+ """
1124
+ export FILE_SIZE=$(stat --format="%s" $FILE)
1125
+
1126
+ export CACHE_ID=$(curl -s \
1127
+ -X POST \
1128
+ "${ACTIONS_CACHE_URL}_apis/artifactcache/caches" \
1129
+ -H 'Content-Type: application/json' \
1130
+ -H 'Accept: application/json;api-version=6.0-preview.1' \
1131
+ -H "Authorization: Bearer $ACTIONS_RUNTIME_TOKEN" \
1132
+ -d '{"key": "'"$CACHE_KEY"'", "cacheSize": '"$FILE_SIZE"'}' \
1133
+ | jq .cacheId)
1134
+
1135
+ curl -s \
1136
+ -X PATCH \
1137
+ "${ACTIONS_CACHE_URL}_apis/artifactcache/caches/$CACHE_ID" \
1138
+ -H 'Content-Type: application/octet-stream' \
1139
+ -H 'Accept: application/json;api-version=6.0-preview.1' \
1140
+ -H "Authorization: Bearer $ACTIONS_RUNTIME_TOKEN" \
1141
+ -H "Content-Range: bytes 0-$((FILE_SIZE - 1))/*" \
1142
+ --data-binary @"$FILE"
1143
+
1144
+ curl -s \
1145
+ -X POST \
1146
+ "${ACTIONS_CACHE_URL}_apis/artifactcache/caches/$CACHE_ID" \
1147
+ -H 'Content-Type: application/json' \
1148
+ -H 'Accept: application/json;api-version=6.0-preview.1' \
1149
+ -H "Authorization: Bearer $ACTIONS_RUNTIME_TOKEN" \
1150
+ -d '{"size": '"$(stat --format="%s" $FILE)"'}'
1151
+
1152
+ curl -s \
1153
+ -X GET \
1154
+ "${ACTIONS_CACHE_URL}_apis/artifactcache/cache?keys=$CACHE_KEY" \
1155
+ -H 'Content-Type: application/json' \
1156
+ -H "Authorization: Bearer $ACTIONS_RUNTIME_TOKEN" \
1157
+ | jq .
1158
+ """
1159
+
1160
+
1161
+ ##
1162
+
1163
+
1164
+ class GithubCacheServiceV1:
1165
+ API_VERSION = '6.0-preview.1'
1166
+
1167
+ @classmethod
1168
+ def get_service_url(cls, base_url: str) -> str:
1169
+ return f'{base_url.rstrip("/")}/_apis/artifactcache'
1170
+
1171
+ #
1172
+
1173
+ @classmethod
1174
+ def dataclass_to_json(cls, obj: ta.Any) -> ta.Any:
1175
+ return {
1176
+ camel_case(k, lower=True): v
1177
+ for k, v in dc.asdict(obj).items()
1178
+ if v is not None
1179
+ }
1180
+
1181
+ @classmethod
1182
+ def dataclass_from_json(cls, dcls: ta.Type[T], obj: ta.Any) -> T:
1183
+ return dcls(**{
1184
+ snake_case(k): v
1185
+ for k, v in obj.items()
1186
+ })
1187
+
1188
+ #
1189
+
1190
+ @dc.dataclass(frozen=True)
1191
+ class ArtifactCacheEntry:
1192
+ cache_key: ta.Optional[str]
1193
+ scope: ta.Optional[str]
1194
+ cache_version: ta.Optional[str]
1195
+ creation_time: ta.Optional[str]
1196
+ archive_location: ta.Optional[str]
1197
+
1198
+ @dc.dataclass(frozen=True)
1199
+ class ArtifactCacheList:
1200
+ total_count: int
1201
+ artifact_caches: ta.Optional[ta.Sequence['GithubCacheServiceV1.ArtifactCacheEntry']]
1202
+
1203
+ #
1204
+
1205
+ @dc.dataclass(frozen=True)
1206
+ class ReserveCacheRequest:
1207
+ key: str
1208
+ cache_size: ta.Optional[int]
1209
+ version: ta.Optional[str] = None
1210
+
1211
+ @dc.dataclass(frozen=True)
1212
+ class ReserveCacheResponse:
1213
+ cache_id: int
1214
+
1215
+ #
1216
+
1217
+ @dc.dataclass(frozen=True)
1218
+ class CommitCacheRequest:
1219
+ size: int
1220
+
1221
+ #
1222
+
1223
+ class CompressionMethod:
1224
+ GZIP = 'gzip'
1225
+ ZSTD_WITHOUT_LONG = 'zstd-without-long'
1226
+ ZSTD = 'zstd'
1227
+
1228
+ @dc.dataclass(frozen=True)
1229
+ class InternalCacheOptions:
1230
+ compression_method: ta.Optional[str] # CompressionMethod
1231
+ enable_cross_os_archive: ta.Optional[bool]
1232
+ cache_size: ta.Optional[int]
1233
+
1234
+
1235
+ class GithubCacheServiceV2:
1236
+ SERVICE_NAME = 'github.actions.results.api.v1.CacheService'
1237
+
1238
+ @dc.dataclass(frozen=True)
1239
+ class Method:
1240
+ name: str
1241
+ request: type
1242
+ response: type
1243
+
1244
+ #
1245
+
1246
+ class CacheScopePermission:
1247
+ READ = 1
1248
+ WRITE = 2
1249
+ ALL = READ | WRITE
1250
+
1251
+ @dc.dataclass(frozen=True)
1252
+ class CacheScope:
1253
+ scope: str
1254
+ permission: int # CacheScopePermission
1255
+
1256
+ @dc.dataclass(frozen=True)
1257
+ class CacheMetadata:
1258
+ repository_id: int
1259
+ scope: ta.Sequence['GithubCacheServiceV2.CacheScope']
1260
+
1261
+ #
1262
+
1263
+ @dc.dataclass(frozen=True)
1264
+ class CreateCacheEntryRequest:
1265
+ key: str
1266
+ version: str
1267
+ metadata: ta.Optional['GithubCacheServiceV2.CacheMetadata'] = None
1268
+
1269
+ @dc.dataclass(frozen=True)
1270
+ class CreateCacheEntryResponse:
1271
+ ok: bool
1272
+ signed_upload_url: str
1273
+
1274
+ CREATE_CACHE_ENTRY_METHOD = Method(
1275
+ 'CreateCacheEntry',
1276
+ CreateCacheEntryRequest,
1277
+ CreateCacheEntryResponse,
1278
+ )
1279
+
1280
+ #
1281
+
1282
+ @dc.dataclass(frozen=True)
1283
+ class FinalizeCacheEntryUploadRequest:
1284
+ key: str
1285
+ size_bytes: int
1286
+ version: str
1287
+ metadata: ta.Optional['GithubCacheServiceV2.CacheMetadata'] = None
1288
+
1289
+ @dc.dataclass(frozen=True)
1290
+ class FinalizeCacheEntryUploadResponse:
1291
+ ok: bool
1292
+ entry_id: str
1293
+
1294
+ FINALIZE_CACHE_ENTRY_METHOD = Method(
1295
+ 'FinalizeCacheEntryUpload',
1296
+ FinalizeCacheEntryUploadRequest,
1297
+ FinalizeCacheEntryUploadResponse,
1298
+ )
1299
+
1300
+ #
1301
+
1302
+ @dc.dataclass(frozen=True)
1303
+ class GetCacheEntryDownloadUrlRequest:
1304
+ key: str
1305
+ restore_keys: ta.Sequence[str]
1306
+ version: str
1307
+ metadata: ta.Optional['GithubCacheServiceV2.CacheMetadata'] = None
1308
+
1309
+ @dc.dataclass(frozen=True)
1310
+ class GetCacheEntryDownloadUrlResponse:
1311
+ ok: bool
1312
+ signed_download_url: str
1313
+ matched_key: str
1314
+
1315
+ GET_CACHE_ENTRY_DOWNLOAD_URL_METHOD = Method(
1316
+ 'GetCacheEntryDownloadURL',
1317
+ GetCacheEntryDownloadUrlRequest,
1318
+ GetCacheEntryDownloadUrlResponse,
1319
+ )
1320
+
1321
+
1322
+ ########################################
1323
+ # ../utils.py
1324
+
1325
+
1326
+ ##
1327
+
1328
+
1329
+ def make_temp_file() -> str:
1330
+ file_fd, file = tempfile.mkstemp()
1331
+ os.close(file_fd)
1332
+ return file
1333
+
1334
+
1335
+ ##
1336
+
1337
+
1338
+ def read_yaml_file(yaml_file: str) -> ta.Any:
1339
+ yaml = __import__('yaml')
1340
+
1341
+ with open(yaml_file) as f:
1342
+ return yaml.safe_load(f)
1343
+
1344
+
1345
+ ##
1346
+
1347
+
1348
+ def sha256_str(s: str) -> str:
1349
+ return hashlib.sha256(s.encode('utf-8')).hexdigest()
1350
+
1351
+
1352
+ ##
1353
+
1354
+
1355
+ class LogTimingContext:
1356
+ DEFAULT_LOG: ta.ClassVar[logging.Logger] = log
1357
+
1358
+ def __init__(
1359
+ self,
1360
+ description: str,
1361
+ *,
1362
+ log: ta.Optional[logging.Logger] = None, # noqa
1363
+ level: int = logging.DEBUG,
1364
+ ) -> None:
1365
+ super().__init__()
1366
+
1367
+ self._description = description
1368
+ self._log = log if log is not None else self.DEFAULT_LOG
1369
+ self._level = level
1370
+
1371
+ def set_description(self, description: str) -> 'LogTimingContext':
1372
+ self._description = description
1373
+ return self
1374
+
1375
+ _begin_time: float
1376
+ _end_time: float
1377
+
1378
+ def __enter__(self) -> 'LogTimingContext':
1379
+ self._begin_time = time.time()
1380
+
1381
+ self._log.log(self._level, f'Begin {self._description}') # noqa
1382
+
1383
+ return self
1384
+
1385
+ def __exit__(self, exc_type, exc_val, exc_tb):
1386
+ self._end_time = time.time()
1387
+
1388
+ self._log.log(
1389
+ self._level,
1390
+ f'End {self._description} - {self._end_time - self._begin_time:0.2f} s elapsed',
1391
+ )
1392
+
1393
+
1394
+ log_timing_context = LogTimingContext
1395
+
1396
+
1397
+ ########################################
1398
+ # ../../../omlish/argparse/cli.py
1399
+ """
1400
+ TODO:
1401
+ - default command
1402
+ - auto match all underscores to hyphens
1403
+ - pre-run, post-run hooks
1404
+ - exitstack?
1405
+ """
1406
+
1407
+
1408
+ ##
1409
+
1410
+
1411
+ @dc.dataclass(eq=False)
1412
+ class ArgparseArg:
1413
+ args: ta.Sequence[ta.Any]
1414
+ kwargs: ta.Mapping[str, ta.Any]
1415
+ dest: ta.Optional[str] = None
1416
+
1417
+ def __get__(self, instance, owner=None):
1418
+ if instance is None:
1419
+ return self
1420
+ return getattr(instance.args, self.dest) # type: ignore
1421
+
1422
+
1423
+ def argparse_arg(*args, **kwargs) -> ArgparseArg:
1424
+ return ArgparseArg(args, kwargs)
1425
+
1426
+
1427
+ #
1428
+
1429
+
1430
+ @dc.dataclass(eq=False)
1431
+ class ArgparseCmd:
1432
+ name: str
1433
+ fn: ArgparseCmdFn
1434
+ args: ta.Sequence[ArgparseArg] = () # noqa
1435
+
1436
+ # _: dc.KW_ONLY
1437
+
1438
+ aliases: ta.Optional[ta.Sequence[str]] = None
1439
+ parent: ta.Optional['ArgparseCmd'] = None
1440
+ accepts_unknown: bool = False
1441
+
1442
+ def __post_init__(self) -> None:
1443
+ def check_name(s: str) -> None:
1444
+ check.isinstance(s, str)
1445
+ check.not_in('_', s)
1446
+ check.not_empty(s)
1447
+ check_name(self.name)
1448
+ check.not_isinstance(self.aliases, str)
1449
+ for a in self.aliases or []:
1450
+ check_name(a)
1451
+
1452
+ check.arg(callable(self.fn))
1453
+ check.arg(all(isinstance(a, ArgparseArg) for a in self.args))
1454
+ check.isinstance(self.parent, (ArgparseCmd, type(None)))
1455
+ check.isinstance(self.accepts_unknown, bool)
1456
+
1457
+ functools.update_wrapper(self, self.fn)
1458
+
1459
+ def __get__(self, instance, owner=None):
1460
+ if instance is None:
1461
+ return self
1462
+ return dc.replace(self, fn=self.fn.__get__(instance, owner)) # noqa
1463
+
1464
+ def __call__(self, *args, **kwargs) -> ta.Optional[int]:
1465
+ return self.fn(*args, **kwargs)
1466
+
1467
+
1468
+ def argparse_cmd(
1469
+ *args: ArgparseArg,
1470
+ name: ta.Optional[str] = None,
1471
+ aliases: ta.Optional[ta.Iterable[str]] = None,
1472
+ parent: ta.Optional[ArgparseCmd] = None,
1473
+ accepts_unknown: bool = False,
1474
+ ) -> ta.Any: # ta.Callable[[ArgparseCmdFn], ArgparseCmd]: # FIXME
1475
+ for arg in args:
1476
+ check.isinstance(arg, ArgparseArg)
1477
+ check.isinstance(name, (str, type(None)))
1478
+ check.isinstance(parent, (ArgparseCmd, type(None)))
1479
+ check.not_isinstance(aliases, str)
1480
+
1481
+ def inner(fn):
1482
+ return ArgparseCmd(
1483
+ (name if name is not None else fn.__name__).replace('_', '-'),
1484
+ fn,
1485
+ args,
1486
+ aliases=tuple(aliases) if aliases is not None else None,
1487
+ parent=parent,
1488
+ accepts_unknown=accepts_unknown,
1489
+ )
1490
+
1491
+ return inner
1492
+
1493
+
1494
+ ##
1495
+
1496
+
1497
+ def _get_argparse_arg_ann_kwargs(ann: ta.Any) -> ta.Mapping[str, ta.Any]:
1498
+ if ann is str:
1499
+ return {}
1500
+ elif ann is int:
1501
+ return {'type': int}
1502
+ elif ann is bool:
1503
+ return {'action': 'store_true'}
1504
+ elif ann is list:
1505
+ return {'action': 'append'}
1506
+ elif is_optional_alias(ann):
1507
+ return _get_argparse_arg_ann_kwargs(get_optional_alias_arg(ann))
1508
+ else:
1509
+ raise TypeError(ann)
1510
+
1511
+
1512
+ class _ArgparseCliAnnotationBox:
1513
+ def __init__(self, annotations: ta.Mapping[str, ta.Any]) -> None:
1514
+ super().__init__()
1515
+ self.__annotations__ = annotations # type: ignore
1516
+
1517
+
1518
+ class ArgparseCli:
1519
+ def __init__(self, argv: ta.Optional[ta.Sequence[str]] = None) -> None:
1520
+ super().__init__()
1521
+
1522
+ self._argv = argv if argv is not None else sys.argv[1:]
1523
+
1524
+ self._args, self._unknown_args = self.get_parser().parse_known_args(self._argv)
1525
+
1526
+ #
1527
+
1528
+ def __init_subclass__(cls, **kwargs: ta.Any) -> None:
1529
+ super().__init_subclass__(**kwargs)
1530
+
1531
+ ns = cls.__dict__
1532
+ objs = {}
1533
+ mro = cls.__mro__[::-1]
1534
+ for bns in [bcls.__dict__ for bcls in reversed(mro)] + [ns]:
1535
+ bseen = set() # type: ignore
1536
+ for k, v in bns.items():
1537
+ if isinstance(v, (ArgparseCmd, ArgparseArg)):
1538
+ check.not_in(v, bseen)
1539
+ bseen.add(v)
1540
+ objs[k] = v
1541
+ elif k in objs:
1542
+ del [k]
1543
+
1544
+ #
1545
+
1546
+ anns = ta.get_type_hints(_ArgparseCliAnnotationBox({
1547
+ **{k: v for bcls in reversed(mro) for k, v in getattr(bcls, '__annotations__', {}).items()},
1548
+ **ns.get('__annotations__', {}),
1549
+ }), globalns=ns.get('__globals__', {}))
1550
+
1551
+ #
1552
+
1553
+ if '_parser' in ns:
1554
+ parser = check.isinstance(ns['_parser'], argparse.ArgumentParser)
1555
+ else:
1556
+ parser = argparse.ArgumentParser()
1557
+ setattr(cls, '_parser', parser)
1558
+
1559
+ #
1560
+
1561
+ subparsers = parser.add_subparsers()
1562
+
1563
+ for att, obj in objs.items():
1564
+ if isinstance(obj, ArgparseCmd):
1565
+ if obj.parent is not None:
1566
+ raise NotImplementedError
1567
+
1568
+ for cn in [obj.name, *(obj.aliases or [])]:
1569
+ subparser = subparsers.add_parser(cn)
1570
+
1571
+ for arg in (obj.args or []):
1572
+ if (
1573
+ len(arg.args) == 1 and
1574
+ isinstance(arg.args[0], str) and
1575
+ not (n := check.isinstance(arg.args[0], str)).startswith('-') and
1576
+ 'metavar' not in arg.kwargs
1577
+ ):
1578
+ subparser.add_argument(
1579
+ n.replace('-', '_'),
1580
+ **arg.kwargs,
1581
+ metavar=n,
1582
+ )
1583
+ else:
1584
+ subparser.add_argument(*arg.args, **arg.kwargs)
1585
+
1586
+ subparser.set_defaults(_cmd=obj)
1587
+
1588
+ elif isinstance(obj, ArgparseArg):
1589
+ if att in anns:
1590
+ ann_kwargs = _get_argparse_arg_ann_kwargs(anns[att])
1591
+ obj.kwargs = {**ann_kwargs, **obj.kwargs}
1592
+
1593
+ if not obj.dest:
1594
+ if 'dest' in obj.kwargs:
1595
+ obj.dest = obj.kwargs['dest']
1596
+ else:
1597
+ obj.dest = obj.kwargs['dest'] = att # type: ignore
1598
+
1599
+ parser.add_argument(*obj.args, **obj.kwargs)
1600
+
1601
+ else:
1602
+ raise TypeError(obj)
1603
+
1604
+ #
1605
+
1606
+ _parser: ta.ClassVar[argparse.ArgumentParser]
1607
+
1608
+ @classmethod
1609
+ def get_parser(cls) -> argparse.ArgumentParser:
1610
+ return cls._parser
1611
+
1612
+ @property
1613
+ def argv(self) -> ta.Sequence[str]:
1614
+ return self._argv
1615
+
1616
+ @property
1617
+ def args(self) -> argparse.Namespace:
1618
+ return self._args
1619
+
1620
+ @property
1621
+ def unknown_args(self) -> ta.Sequence[str]:
1622
+ return self._unknown_args
1623
+
1624
+ #
1625
+
1626
+ def _bind_cli_cmd(self, cmd: ArgparseCmd) -> ta.Callable:
1627
+ return cmd.__get__(self, type(self))
1628
+
1629
+ def prepare_cli_run(self) -> ta.Optional[ta.Callable]:
1630
+ cmd = getattr(self.args, '_cmd', None)
1631
+
1632
+ if self._unknown_args and not (cmd is not None and cmd.accepts_unknown):
1633
+ msg = f'unrecognized arguments: {" ".join(self._unknown_args)}'
1634
+ if (parser := self.get_parser()).exit_on_error: # type: ignore
1635
+ parser.error(msg)
1636
+ else:
1637
+ raise argparse.ArgumentError(None, msg)
1638
+
1639
+ if cmd is None:
1640
+ self.get_parser().print_help()
1641
+ return None
1642
+
1643
+ return self._bind_cli_cmd(cmd)
1644
+
1645
+ #
1646
+
1647
+ def cli_run(self) -> ta.Optional[int]:
1648
+ if (fn := self.prepare_cli_run()) is None:
1649
+ return 0
1650
+
1651
+ return fn()
1652
+
1653
+ def cli_run_and_exit(self) -> ta.NoReturn:
1654
+ sys.exit(rc if isinstance(rc := self.cli_run(), int) else 0)
1655
+
1656
+ def __call__(self, *, exit: bool = False) -> ta.Optional[int]: # noqa
1657
+ if exit:
1658
+ return self.cli_run_and_exit()
1659
+ else:
1660
+ return self.cli_run()
1661
+
1662
+ #
1663
+
1664
+ async def async_cli_run(
1665
+ self,
1666
+ *,
1667
+ force_async: bool = False,
1668
+ ) -> ta.Optional[int]:
1669
+ if (fn := self.prepare_cli_run()) is None:
1670
+ return 0
1671
+
1672
+ if force_async:
1673
+ is_async = True
1674
+ else:
1675
+ tfn = fn
1676
+ if isinstance(tfn, ArgparseCmd):
1677
+ tfn = tfn.fn
1678
+ is_async = inspect.iscoroutinefunction(tfn)
1679
+
1680
+ if is_async:
1681
+ return await fn()
1682
+ else:
1683
+ return fn()
1684
+
1685
+
1686
+ ########################################
1687
+ # ../../../omlish/lite/contextmanagers.py
1688
+
1689
+
1690
+ ##
1691
+
1692
+
1693
+ class ExitStacked:
1694
+ _exit_stack: ta.Optional[contextlib.ExitStack] = None
1695
+
1696
+ def __enter__(self: ExitStackedT) -> ExitStackedT:
1697
+ check.state(self._exit_stack is None)
1698
+ es = self._exit_stack = contextlib.ExitStack()
1699
+ es.__enter__()
1700
+ return self
1701
+
1702
+ def __exit__(self, exc_type, exc_val, exc_tb):
1703
+ if (es := self._exit_stack) is None:
1704
+ return None
1705
+ self._exit_contexts()
1706
+ return es.__exit__(exc_type, exc_val, exc_tb)
1707
+
1708
+ def _exit_contexts(self) -> None:
1709
+ pass
1710
+
1711
+ def _enter_context(self, cm: ta.ContextManager[T]) -> T:
1712
+ es = check.not_none(self._exit_stack)
1713
+ return es.enter_context(cm)
1714
+
1715
+
1716
+ ##
1717
+
1718
+
1719
+ @contextlib.contextmanager
1720
+ def defer(fn: ta.Callable) -> ta.Generator[ta.Callable, None, None]:
1721
+ try:
1722
+ yield fn
1723
+ finally:
1724
+ fn()
1725
+
1726
+
1727
+ @contextlib.contextmanager
1728
+ def attr_setting(obj, attr, val, *, default=None): # noqa
1729
+ not_set = object()
1730
+ orig = getattr(obj, attr, not_set)
1731
+ try:
1732
+ setattr(obj, attr, val)
1733
+ if orig is not not_set:
1734
+ yield orig
1735
+ else:
1736
+ yield default
1737
+ finally:
1738
+ if orig is not_set:
1739
+ delattr(obj, attr)
1740
+ else:
1741
+ setattr(obj, attr, orig)
1742
+
1743
+
1744
+ ##
1745
+
1746
+
1747
+ class aclosing(contextlib.AbstractAsyncContextManager): # noqa
1748
+ def __init__(self, thing):
1749
+ self.thing = thing
1750
+
1751
+ async def __aenter__(self):
1752
+ return self.thing
1753
+
1754
+ async def __aexit__(self, *exc_info):
1755
+ await self.thing.aclose()
1756
+
1757
+
1758
+ ########################################
1759
+ # ../../../omlish/lite/runtime.py
1760
+
1761
+
1762
+ @cached_nullary
1763
+ def is_debugger_attached() -> bool:
1764
+ return any(frame[1].endswith('pydevd.py') for frame in inspect.stack())
1765
+
1766
+
1767
+ LITE_REQUIRED_PYTHON_VERSION = (3, 8)
1768
+
1769
+
1770
+ def check_lite_runtime_version() -> None:
1771
+ if sys.version_info < LITE_REQUIRED_PYTHON_VERSION:
1772
+ raise OSError(f'Requires python {LITE_REQUIRED_PYTHON_VERSION}, got {sys.version_info} from {sys.executable}') # noqa
1773
+
1774
+
1775
+ ########################################
1776
+ # ../../../omlish/logs/json.py
1777
+ """
1778
+ TODO:
1779
+ - translate json keys
1780
+ """
1781
+
1782
+
1783
+ class JsonLogFormatter(logging.Formatter):
1784
+ KEYS: ta.Mapping[str, bool] = {
1785
+ 'name': False,
1786
+ 'msg': False,
1787
+ 'args': False,
1788
+ 'levelname': False,
1789
+ 'levelno': False,
1790
+ 'pathname': False,
1791
+ 'filename': False,
1792
+ 'module': False,
1793
+ 'exc_info': True,
1794
+ 'exc_text': True,
1795
+ 'stack_info': True,
1796
+ 'lineno': False,
1797
+ 'funcName': False,
1798
+ 'created': False,
1799
+ 'msecs': False,
1800
+ 'relativeCreated': False,
1801
+ 'thread': False,
1802
+ 'threadName': False,
1803
+ 'processName': False,
1804
+ 'process': False,
1805
+ }
1806
+
1807
+ def __init__(
1808
+ self,
1809
+ *args: ta.Any,
1810
+ json_dumps: ta.Optional[ta.Callable[[ta.Any], str]] = None,
1811
+ **kwargs: ta.Any,
1812
+ ) -> None:
1813
+ super().__init__(*args, **kwargs)
1814
+
1815
+ if json_dumps is None:
1816
+ json_dumps = json_dumps_compact
1817
+ self._json_dumps = json_dumps
1818
+
1819
+ def format(self, record: logging.LogRecord) -> str:
1820
+ dct = {
1821
+ k: v
1822
+ for k, o in self.KEYS.items()
1823
+ for v in [getattr(record, k)]
1824
+ if not (o and v is None)
1825
+ }
1826
+ return self._json_dumps(dct)
1827
+
1828
+
1829
+ ########################################
1830
+ # ../../../omlish/logs/standard.py
1831
+ """
1832
+ TODO:
1833
+ - structured
1834
+ - prefixed
1835
+ - debug
1836
+ - optional noisy? noisy will never be lite - some kinda configure_standard callback mechanism?
1837
+ """
1838
+
1839
+
1840
+ ##
1841
+
1842
+
1843
+ STANDARD_LOG_FORMAT_PARTS = [
1844
+ ('asctime', '%(asctime)-15s'),
1845
+ ('process', 'pid=%(process)-6s'),
1846
+ ('thread', 'tid=%(thread)x'),
1847
+ ('levelname', '%(levelname)s'),
1848
+ ('name', '%(name)s'),
1849
+ ('separator', '::'),
1850
+ ('message', '%(message)s'),
1851
+ ]
1852
+
1853
+
1854
+ class StandardLogFormatter(logging.Formatter):
1855
+ @staticmethod
1856
+ def build_log_format(parts: ta.Iterable[ta.Tuple[str, str]]) -> str:
1857
+ return ' '.join(v for k, v in parts)
1858
+
1859
+ converter = datetime.datetime.fromtimestamp # type: ignore
1860
+
1861
+ def formatTime(self, record, datefmt=None):
1862
+ ct = self.converter(record.created) # type: ignore
1863
+ if datefmt:
1864
+ return ct.strftime(datefmt) # noqa
1865
+ else:
1866
+ t = ct.strftime('%Y-%m-%d %H:%M:%S')
1867
+ return '%s.%03d' % (t, record.msecs) # noqa
1868
+
1869
+
1870
+ ##
1871
+
1872
+
1873
+ class StandardConfiguredLogHandler(ProxyLogHandler):
1874
+ def __init_subclass__(cls, **kwargs):
1875
+ raise TypeError('This class serves only as a marker and should not be subclassed.')
1876
+
1877
+
1878
+ ##
1879
+
1880
+
1881
+ @contextlib.contextmanager
1882
+ def _locking_logging_module_lock() -> ta.Iterator[None]:
1883
+ if hasattr(logging, '_acquireLock'):
1884
+ logging._acquireLock() # noqa
1885
+ try:
1886
+ yield
1887
+ finally:
1888
+ logging._releaseLock() # type: ignore # noqa
1889
+
1890
+ elif hasattr(logging, '_lock'):
1891
+ # https://github.com/python/cpython/commit/74723e11109a320e628898817ab449b3dad9ee96
1892
+ with logging._lock: # noqa
1893
+ yield
1894
+
1895
+ else:
1896
+ raise Exception("Can't find lock in logging module")
1897
+
1898
+
1899
+ def configure_standard_logging(
1900
+ level: ta.Union[int, str] = logging.INFO,
1901
+ *,
1902
+ json: bool = False,
1903
+ target: ta.Optional[logging.Logger] = None,
1904
+ force: bool = False,
1905
+ handler_factory: ta.Optional[ta.Callable[[], logging.Handler]] = None,
1906
+ ) -> ta.Optional[StandardConfiguredLogHandler]:
1907
+ with _locking_logging_module_lock():
1908
+ if target is None:
1909
+ target = logging.root
1910
+
1911
+ #
1912
+
1913
+ if not force:
1914
+ if any(isinstance(h, StandardConfiguredLogHandler) for h in list(target.handlers)):
1915
+ return None
1916
+
1917
+ #
1918
+
1919
+ if handler_factory is not None:
1920
+ handler = handler_factory()
1921
+ else:
1922
+ handler = logging.StreamHandler()
1923
+
1924
+ #
1925
+
1926
+ formatter: logging.Formatter
1927
+ if json:
1928
+ formatter = JsonLogFormatter()
1929
+ else:
1930
+ formatter = StandardLogFormatter(StandardLogFormatter.build_log_format(STANDARD_LOG_FORMAT_PARTS))
1931
+ handler.setFormatter(formatter)
1932
+
1933
+ #
1934
+
1935
+ handler.addFilter(TidLogFilter())
1936
+
1937
+ #
1938
+
1939
+ target.addHandler(handler)
1940
+
1941
+ #
1942
+
1943
+ if level is not None:
1944
+ target.setLevel(level)
1945
+
1946
+ #
1947
+
1948
+ return StandardConfiguredLogHandler(handler)
1949
+
1950
+
1951
+ ########################################
1952
+ # ../../../omlish/subprocesses.py
1953
+
1954
+
1955
+ ##
1956
+
1957
+
1958
+ SUBPROCESS_CHANNEL_OPTION_VALUES: ta.Mapping[SubprocessChannelOption, int] = {
1959
+ 'pipe': subprocess.PIPE,
1960
+ 'stdout': subprocess.STDOUT,
1961
+ 'devnull': subprocess.DEVNULL,
1962
+ }
1963
+
1964
+
1965
+ ##
1966
+
1967
+
1968
+ _SUBPROCESS_SHELL_WRAP_EXECS = False
1969
+
1970
+
1971
+ def subprocess_shell_wrap_exec(*cmd: str) -> ta.Tuple[str, ...]:
1972
+ return ('sh', '-c', ' '.join(map(shlex.quote, cmd)))
1973
+
1974
+
1975
+ def subprocess_maybe_shell_wrap_exec(*cmd: str) -> ta.Tuple[str, ...]:
1976
+ if _SUBPROCESS_SHELL_WRAP_EXECS or is_debugger_attached():
1977
+ return subprocess_shell_wrap_exec(*cmd)
1978
+ else:
1979
+ return cmd
1980
+
1981
+
1982
+ ##
1983
+
1984
+
1985
+ def subprocess_close(
1986
+ proc: subprocess.Popen,
1987
+ timeout: ta.Optional[float] = None,
1988
+ ) -> None:
1989
+ # TODO: terminate, sleep, kill
1990
+ if proc.stdout:
1991
+ proc.stdout.close()
1992
+ if proc.stderr:
1993
+ proc.stderr.close()
1994
+ if proc.stdin:
1995
+ proc.stdin.close()
1996
+
1997
+ proc.wait(timeout)
1998
+
1999
+
2000
+ ##
2001
+
2002
+
2003
+ class BaseSubprocesses(abc.ABC): # noqa
2004
+ DEFAULT_LOGGER: ta.ClassVar[ta.Optional[logging.Logger]] = None
2005
+
2006
+ def __init__(
2007
+ self,
2008
+ *,
2009
+ log: ta.Optional[logging.Logger] = None,
2010
+ try_exceptions: ta.Optional[ta.Tuple[ta.Type[Exception], ...]] = None,
2011
+ ) -> None:
2012
+ super().__init__()
2013
+
2014
+ self._log = log if log is not None else self.DEFAULT_LOGGER
2015
+ self._try_exceptions = try_exceptions if try_exceptions is not None else self.DEFAULT_TRY_EXCEPTIONS
2016
+
2017
+ def set_logger(self, log: ta.Optional[logging.Logger]) -> None:
2018
+ self._log = log
2019
+
2020
+ #
2021
+
2022
+ def prepare_args(
2023
+ self,
2024
+ *cmd: str,
2025
+ env: ta.Optional[ta.Mapping[str, ta.Any]] = None,
2026
+ extra_env: ta.Optional[ta.Mapping[str, ta.Any]] = None,
2027
+ quiet: bool = False,
2028
+ shell: bool = False,
2029
+ **kwargs: ta.Any,
2030
+ ) -> ta.Tuple[ta.Tuple[ta.Any, ...], ta.Dict[str, ta.Any]]:
2031
+ if self._log:
2032
+ self._log.debug('Subprocesses.prepare_args: cmd=%r', cmd)
2033
+ if extra_env:
2034
+ self._log.debug('Subprocesses.prepare_args: extra_env=%r', extra_env)
2035
+
2036
+ if extra_env:
2037
+ env = {**(env if env is not None else os.environ), **extra_env}
2038
+
2039
+ if quiet and 'stderr' not in kwargs:
2040
+ if self._log and not self._log.isEnabledFor(logging.DEBUG):
2041
+ kwargs['stderr'] = subprocess.DEVNULL
2042
+
2043
+ if not shell:
2044
+ cmd = subprocess_maybe_shell_wrap_exec(*cmd)
2045
+
2046
+ return cmd, dict(
2047
+ env=env,
2048
+ shell=shell,
2049
+ **kwargs,
2050
+ )
2051
+
2052
+ @contextlib.contextmanager
2053
+ def wrap_call(self, *cmd: ta.Any, **kwargs: ta.Any) -> ta.Iterator[None]:
2054
+ start_time = time.time()
2055
+ try:
2056
+ if self._log:
2057
+ self._log.debug('Subprocesses.wrap_call.try: cmd=%r', cmd)
2058
+ yield
2059
+
2060
+ except Exception as exc: # noqa
2061
+ if self._log:
2062
+ self._log.debug('Subprocesses.wrap_call.except: exc=%r', exc)
2063
+ raise
2064
+
2065
+ finally:
2066
+ end_time = time.time()
2067
+ elapsed_s = end_time - start_time
2068
+ if self._log:
2069
+ self._log.debug('sSubprocesses.wrap_call.finally: elapsed_s=%f cmd=%r', elapsed_s, cmd)
2070
+
2071
+ @contextlib.contextmanager
2072
+ def prepare_and_wrap(
2073
+ self,
2074
+ *cmd: ta.Any,
2075
+ **kwargs: ta.Any,
2076
+ ) -> ta.Iterator[ta.Tuple[
2077
+ ta.Tuple[ta.Any, ...],
2078
+ ta.Dict[str, ta.Any],
2079
+ ]]:
2080
+ cmd, kwargs = self.prepare_args(*cmd, **kwargs)
2081
+ with self.wrap_call(*cmd, **kwargs):
2082
+ yield cmd, kwargs
2083
+
2084
+ #
2085
+
2086
+ DEFAULT_TRY_EXCEPTIONS: ta.Tuple[ta.Type[Exception], ...] = (
2087
+ FileNotFoundError,
2088
+ subprocess.CalledProcessError,
2089
+ )
2090
+
2091
+ def try_fn(
2092
+ self,
2093
+ fn: ta.Callable[..., T],
2094
+ *cmd: str,
2095
+ try_exceptions: ta.Optional[ta.Tuple[ta.Type[Exception], ...]] = None,
2096
+ **kwargs: ta.Any,
2097
+ ) -> ta.Union[T, Exception]:
2098
+ if try_exceptions is None:
2099
+ try_exceptions = self._try_exceptions
2100
+
2101
+ try:
2102
+ return fn(*cmd, **kwargs)
2103
+
2104
+ except try_exceptions as e: # noqa
2105
+ if self._log and self._log.isEnabledFor(logging.DEBUG):
2106
+ self._log.exception('command failed')
2107
+ return e
2108
+
2109
+ async def async_try_fn(
2110
+ self,
2111
+ fn: ta.Callable[..., ta.Awaitable[T]],
2112
+ *cmd: ta.Any,
2113
+ try_exceptions: ta.Optional[ta.Tuple[ta.Type[Exception], ...]] = None,
2114
+ **kwargs: ta.Any,
2115
+ ) -> ta.Union[T, Exception]:
2116
+ if try_exceptions is None:
2117
+ try_exceptions = self._try_exceptions
2118
+
2119
+ try:
2120
+ return await fn(*cmd, **kwargs)
2121
+
2122
+ except try_exceptions as e: # noqa
2123
+ if self._log and self._log.isEnabledFor(logging.DEBUG):
2124
+ self._log.exception('command failed')
2125
+ return e
2126
+
2127
+
2128
+ ##
2129
+
2130
+
2131
+ class AbstractSubprocesses(BaseSubprocesses, abc.ABC):
2132
+ @abc.abstractmethod
2133
+ def check_call(
2134
+ self,
2135
+ *cmd: str,
2136
+ stdout: ta.Any = sys.stderr,
2137
+ **kwargs: ta.Any,
2138
+ ) -> None:
2139
+ raise NotImplementedError
2140
+
2141
+ @abc.abstractmethod
2142
+ def check_output(
2143
+ self,
2144
+ *cmd: str,
2145
+ **kwargs: ta.Any,
2146
+ ) -> bytes:
2147
+ raise NotImplementedError
2148
+
2149
+ #
2150
+
2151
+ def check_output_str(
2152
+ self,
2153
+ *cmd: str,
2154
+ **kwargs: ta.Any,
2155
+ ) -> str:
2156
+ return self.check_output(*cmd, **kwargs).decode().strip()
2157
+
2158
+ #
2159
+
2160
+ def try_call(
2161
+ self,
2162
+ *cmd: str,
2163
+ **kwargs: ta.Any,
2164
+ ) -> bool:
2165
+ if isinstance(self.try_fn(self.check_call, *cmd, **kwargs), Exception):
2166
+ return False
2167
+ else:
2168
+ return True
2169
+
2170
+ def try_output(
2171
+ self,
2172
+ *cmd: str,
2173
+ **kwargs: ta.Any,
2174
+ ) -> ta.Optional[bytes]:
2175
+ if isinstance(ret := self.try_fn(self.check_output, *cmd, **kwargs), Exception):
2176
+ return None
2177
+ else:
2178
+ return ret
2179
+
2180
+ def try_output_str(
2181
+ self,
2182
+ *cmd: str,
2183
+ **kwargs: ta.Any,
2184
+ ) -> ta.Optional[str]:
2185
+ if (ret := self.try_output(*cmd, **kwargs)) is None:
2186
+ return None
2187
+ else:
2188
+ return ret.decode().strip()
2189
+
2190
+
2191
+ ##
2192
+
2193
+
2194
+ class Subprocesses(AbstractSubprocesses):
2195
+ def check_call(
2196
+ self,
2197
+ *cmd: str,
2198
+ stdout: ta.Any = sys.stderr,
2199
+ **kwargs: ta.Any,
2200
+ ) -> None:
2201
+ with self.prepare_and_wrap(*cmd, stdout=stdout, **kwargs) as (cmd, kwargs): # noqa
2202
+ subprocess.check_call(cmd, **kwargs)
2203
+
2204
+ def check_output(
2205
+ self,
2206
+ *cmd: str,
2207
+ **kwargs: ta.Any,
2208
+ ) -> bytes:
2209
+ with self.prepare_and_wrap(*cmd, **kwargs) as (cmd, kwargs): # noqa
2210
+ return subprocess.check_output(cmd, **kwargs)
2211
+
2212
+
2213
+ subprocesses = Subprocesses()
2214
+
2215
+
2216
+ ##
2217
+
2218
+
2219
+ class AbstractAsyncSubprocesses(BaseSubprocesses):
2220
+ @abc.abstractmethod
2221
+ async def check_call(
2222
+ self,
2223
+ *cmd: str,
2224
+ stdout: ta.Any = sys.stderr,
2225
+ **kwargs: ta.Any,
2226
+ ) -> None:
2227
+ raise NotImplementedError
2228
+
2229
+ @abc.abstractmethod
2230
+ async def check_output(
2231
+ self,
2232
+ *cmd: str,
2233
+ **kwargs: ta.Any,
2234
+ ) -> bytes:
2235
+ raise NotImplementedError
2236
+
2237
+ #
2238
+
2239
+ async def check_output_str(
2240
+ self,
2241
+ *cmd: str,
2242
+ **kwargs: ta.Any,
2243
+ ) -> str:
2244
+ return (await self.check_output(*cmd, **kwargs)).decode().strip()
2245
+
2246
+ #
2247
+
2248
+ async def try_call(
2249
+ self,
2250
+ *cmd: str,
2251
+ **kwargs: ta.Any,
2252
+ ) -> bool:
2253
+ if isinstance(await self.async_try_fn(self.check_call, *cmd, **kwargs), Exception):
2254
+ return False
2255
+ else:
2256
+ return True
2257
+
2258
+ async def try_output(
2259
+ self,
2260
+ *cmd: str,
2261
+ **kwargs: ta.Any,
2262
+ ) -> ta.Optional[bytes]:
2263
+ if isinstance(ret := await self.async_try_fn(self.check_output, *cmd, **kwargs), Exception):
2264
+ return None
2265
+ else:
2266
+ return ret
2267
+
2268
+ async def try_output_str(
2269
+ self,
2270
+ *cmd: str,
2271
+ **kwargs: ta.Any,
2272
+ ) -> ta.Optional[str]:
2273
+ if (ret := await self.try_output(*cmd, **kwargs)) is None:
2274
+ return None
2275
+ else:
2276
+ return ret.decode().strip()
2277
+
2278
+
2279
+ ########################################
2280
+ # ../compose.py
2281
+ """
2282
+ TODO:
2283
+ - fix rmi - only when not referenced anymore
2284
+ """
2285
+
2286
+
2287
+ ##
2288
+
2289
+
2290
+ def get_compose_service_dependencies(
2291
+ compose_file: str,
2292
+ service: str,
2293
+ ) -> ta.Dict[str, str]:
2294
+ compose_dct = read_yaml_file(compose_file)
2295
+
2296
+ services = compose_dct['services']
2297
+ service_dct = services[service]
2298
+
2299
+ out = {}
2300
+ for dep_service in service_dct.get('depends_on', []):
2301
+ dep_service_dct = services[dep_service]
2302
+ out[dep_service] = dep_service_dct['image']
2303
+
2304
+ return out
2305
+
2306
+
2307
+ ##
2308
+
2309
+
2310
+ class DockerComposeRun(ExitStacked):
2311
+ @dc.dataclass(frozen=True)
2312
+ class Config:
2313
+ compose_file: str
2314
+ service: str
2315
+
2316
+ image: str
2317
+
2318
+ cmd: ShellCmd
2319
+
2320
+ #
2321
+
2322
+ run_options: ta.Optional[ta.Sequence[str]] = None
2323
+
2324
+ cwd: ta.Optional[str] = None
2325
+
2326
+ #
2327
+
2328
+ no_dependency_cleanup: bool = False
2329
+
2330
+ #
2331
+
2332
+ def __post_init__(self) -> None:
2333
+ check.not_isinstance(self.run_options, str)
2334
+
2335
+ def __init__(self, cfg: Config) -> None:
2336
+ super().__init__()
2337
+
2338
+ self._cfg = cfg
2339
+
2340
+ self._subprocess_kwargs = {
2341
+ **(dict(cwd=self._cfg.cwd) if self._cfg.cwd is not None else {}),
2342
+ }
2343
+
2344
+ #
2345
+
2346
+ @property
2347
+ def image_tag(self) -> str:
2348
+ pfx = 'sha256:'
2349
+ if (image := self._cfg.image).startswith(pfx):
2350
+ image = image[len(pfx):]
2351
+
2352
+ return f'{self._cfg.service}:{image}'
2353
+
2354
+ @cached_nullary
2355
+ def tag_image(self) -> str:
2356
+ image_tag = self.image_tag
2357
+
2358
+ subprocesses.check_call(
2359
+ 'docker',
2360
+ 'tag',
2361
+ self._cfg.image,
2362
+ image_tag,
2363
+ **self._subprocess_kwargs,
2364
+ )
2365
+
2366
+ def delete_tag() -> None:
2367
+ subprocesses.check_call(
2368
+ 'docker',
2369
+ 'rmi',
2370
+ image_tag,
2371
+ **self._subprocess_kwargs,
2372
+ )
2373
+
2374
+ self._enter_context(defer(delete_tag)) # noqa
2375
+
2376
+ return image_tag
2377
+
2378
+ #
2379
+
2380
+ def _rewrite_compose_dct(self, in_dct: ta.Dict[str, ta.Any]) -> ta.Dict[str, ta.Any]:
2381
+ out = dict(in_dct)
2382
+
2383
+ #
2384
+
2385
+ in_services = in_dct['services']
2386
+ out['services'] = out_services = {}
2387
+
2388
+ #
2389
+
2390
+ in_service: dict = in_services[self._cfg.service]
2391
+ out_services[self._cfg.service] = out_service = dict(in_service)
2392
+
2393
+ out_service['image'] = self.image_tag
2394
+
2395
+ for k in ['build', 'platform']:
2396
+ if k in out_service:
2397
+ del out_service[k]
2398
+
2399
+ out_service['links'] = [
2400
+ f'{l}:{l}' if ':' not in l else l
2401
+ for l in out_service.get('links', [])
2402
+ ]
2403
+
2404
+ #
2405
+
2406
+ depends_on = in_service.get('depends_on', [])
2407
+
2408
+ for dep_service, in_dep_service_dct in list(in_services.items()):
2409
+ if dep_service not in depends_on:
2410
+ continue
2411
+
2412
+ out_dep_service: dict = dict(in_dep_service_dct)
2413
+ out_services[dep_service] = out_dep_service
2414
+
2415
+ out_dep_service['ports'] = []
2416
+
2417
+ #
2418
+
2419
+ return out
2420
+
2421
+ @cached_nullary
2422
+ def rewrite_compose_file(self) -> str:
2423
+ in_dct = read_yaml_file(self._cfg.compose_file)
2424
+
2425
+ out_dct = self._rewrite_compose_dct(in_dct)
2426
+
2427
+ #
2428
+
2429
+ out_compose_file = make_temp_file()
2430
+ self._enter_context(defer(lambda: os.unlink(out_compose_file))) # noqa
2431
+
2432
+ compose_json = json_dumps_pretty(out_dct)
2433
+
2434
+ with open(out_compose_file, 'w') as f:
2435
+ f.write(compose_json)
2436
+
2437
+ return out_compose_file
2438
+
2439
+ #
2440
+
2441
+ def _cleanup_dependencies(self) -> None:
2442
+ subprocesses.check_call(
2443
+ 'docker',
2444
+ 'compose',
2445
+ '-f', self.rewrite_compose_file(),
2446
+ 'down',
2447
+ )
2448
+
2449
+ def run(self) -> None:
2450
+ self.tag_image()
2451
+
2452
+ compose_file = self.rewrite_compose_file()
2453
+
2454
+ with contextlib.ExitStack() as es:
2455
+ if not self._cfg.no_dependency_cleanup:
2456
+ es.enter_context(defer(self._cleanup_dependencies)) # noqa
2457
+
2458
+ sh_cmd = ' '.join([
2459
+ 'docker',
2460
+ 'compose',
2461
+ '-f', compose_file,
2462
+ 'run',
2463
+ '--rm',
2464
+ *itertools.chain.from_iterable(['-e', k] for k in (self._cfg.cmd.env or [])),
2465
+ *(self._cfg.run_options or []),
2466
+ self._cfg.service,
2467
+ 'sh', '-c', shlex.quote(self._cfg.cmd.s),
2468
+ ])
2469
+
2470
+ run_cmd = dc.replace(self._cfg.cmd, s=sh_cmd)
2471
+
2472
+ run_cmd.run(
2473
+ subprocesses.check_call,
2474
+ **self._subprocess_kwargs,
2475
+ )
2476
+
2477
+
2478
+ ########################################
2479
+ # ../docker.py
2480
+ """
2481
+ TODO:
2482
+ - some less stupid Dockerfile hash
2483
+ - doesn't change too much though
2484
+ """
2485
+
2486
+
2487
+ ##
2488
+
2489
+
2490
+ def build_docker_file_hash(docker_file: str) -> str:
2491
+ with open(docker_file) as f:
2492
+ contents = f.read()
2493
+
2494
+ return sha256_str(contents)
2495
+
2496
+
2497
+ ##
2498
+
2499
+
2500
+ def read_docker_tar_image_tag(tar_file: str) -> str:
2501
+ with tarfile.open(tar_file) as tf:
2502
+ with contextlib.closing(check.not_none(tf.extractfile('manifest.json'))) as mf:
2503
+ m = mf.read()
2504
+
2505
+ manifests = json.loads(m.decode('utf-8'))
2506
+ manifest = check.single(manifests)
2507
+ tag = check.non_empty_str(check.single(manifest['RepoTags']))
2508
+ return tag
2509
+
2510
+
2511
+ def read_docker_tar_image_id(tar_file: str) -> str:
2512
+ with tarfile.open(tar_file) as tf:
2513
+ with contextlib.closing(check.not_none(tf.extractfile('index.json'))) as mf:
2514
+ i = mf.read()
2515
+
2516
+ index = json.loads(i.decode('utf-8'))
2517
+ manifest = check.single(index['manifests'])
2518
+ image_id = check.non_empty_str(manifest['digest'])
2519
+ return image_id
2520
+
2521
+
2522
+ ##
2523
+
2524
+
2525
+ def is_docker_image_present(image: str) -> bool:
2526
+ out = subprocesses.check_output(
2527
+ 'docker',
2528
+ 'images',
2529
+ '--format', 'json',
2530
+ image,
2531
+ )
2532
+
2533
+ out_s = out.decode('utf-8').strip()
2534
+ if not out_s:
2535
+ return False
2536
+
2537
+ json.loads(out_s) # noqa
2538
+ return True
2539
+
2540
+
2541
+ def pull_docker_image(
2542
+ image: str,
2543
+ ) -> None:
2544
+ subprocesses.check_call(
2545
+ 'docker',
2546
+ 'pull',
2547
+ image,
2548
+ )
2549
+
2550
+
2551
+ def build_docker_image(
2552
+ docker_file: str,
2553
+ *,
2554
+ cwd: ta.Optional[str] = None,
2555
+ ) -> str:
2556
+ id_file = make_temp_file()
2557
+ with defer(lambda: os.unlink(id_file)):
2558
+ subprocesses.check_call(
2559
+ 'docker',
2560
+ 'build',
2561
+ '-f', os.path.abspath(docker_file),
2562
+ '--iidfile', id_file,
2563
+ '--squash',
2564
+ '.',
2565
+ **(dict(cwd=cwd) if cwd is not None else {}),
2566
+ )
2567
+
2568
+ with open(id_file) as f:
2569
+ image_id = check.single(f.read().strip().splitlines()).strip()
2570
+
2571
+ return image_id
2572
+
2573
+
2574
+ ##
2575
+
2576
+
2577
+ def save_docker_tar_cmd(
2578
+ image: str,
2579
+ output_cmd: ShellCmd,
2580
+ ) -> None:
2581
+ cmd = dc.replace(output_cmd, s=f'docker save {image} | {output_cmd.s}')
2582
+ cmd.run(subprocesses.check_call)
2583
+
2584
+
2585
+ def save_docker_tar(
2586
+ image: str,
2587
+ tar_file: str,
2588
+ ) -> None:
2589
+ return save_docker_tar_cmd(
2590
+ image,
2591
+ ShellCmd(f'cat > {shlex.quote(tar_file)}'),
2592
+ )
2593
+
2594
+
2595
+ #
2596
+
2597
+
2598
+ def load_docker_tar_cmd(
2599
+ input_cmd: ShellCmd,
2600
+ ) -> str:
2601
+ cmd = dc.replace(input_cmd, s=f'{input_cmd.s} | docker load')
2602
+
2603
+ out = cmd.run(subprocesses.check_output).decode()
2604
+
2605
+ line = check.single(out.strip().splitlines())
2606
+ loaded = line.partition(':')[2].strip()
2607
+ return loaded
2608
+
2609
+
2610
+ def load_docker_tar(
2611
+ tar_file: str,
2612
+ ) -> str:
2613
+ return load_docker_tar_cmd(ShellCmd(f'cat {shlex.quote(tar_file)}'))
2614
+
2615
+
2616
+ ########################################
2617
+ # ../github/cache.py
2618
+
2619
+
2620
+ ##
2621
+
2622
+
2623
+ class GithubV1CacheShellClient:
2624
+ BASE_URL_ENV_KEY = 'ACTIONS_CACHE_URL'
2625
+ AUTH_TOKEN_ENV_KEY = 'ACTIONS_RUNTIME_TOKEN' # noqa
2626
+
2627
+ def __init__(
2628
+ self,
2629
+ *,
2630
+ base_url: ta.Optional[str] = None,
2631
+ auth_token: ta.Optional[str] = None,
2632
+ ) -> None:
2633
+ super().__init__()
2634
+
2635
+ if base_url is None:
2636
+ base_url = os.environ[self.BASE_URL_ENV_KEY]
2637
+ self._base_url = check.non_empty_str(base_url)
2638
+
2639
+ if auth_token is None:
2640
+ auth_token = os.environ.get(self.AUTH_TOKEN_ENV_KEY)
2641
+ self._auth_token = auth_token
2642
+
2643
+ self._service_url = GithubCacheServiceV1.get_service_url(self._base_url)
2644
+
2645
+ #
2646
+
2647
+ _MISSING = object()
2648
+
2649
+ def build_headers(
2650
+ self,
2651
+ *,
2652
+ auth_token: ta.Any = _MISSING,
2653
+ content_type: ta.Optional[str] = None,
2654
+ ) -> ta.Dict[str, str]:
2655
+ dct = {
2656
+ 'Accept': f'application/json;api-version={GithubCacheServiceV1.API_VERSION}',
2657
+ }
2658
+
2659
+ if auth_token is self._MISSING:
2660
+ auth_token = self._auth_token
2661
+ if auth_token:
2662
+ dct['Authorization'] = f'Bearer {auth_token}'
2663
+
2664
+ if content_type is not None:
2665
+ dct['Content-Type'] = content_type
2666
+
2667
+ return dct
2668
+
2669
+ #
2670
+
2671
+ HEADER_AUTH_TOKEN_ENV_KEY = '_GITHUB_CACHE_AUTH_TOKEN' # noqa
2672
+
2673
+ def build_curl_cmd(
2674
+ self,
2675
+ method: str,
2676
+ url: str,
2677
+ *,
2678
+ json_content: bool = False,
2679
+ content_type: ta.Optional[str] = None,
2680
+ ) -> ShellCmd:
2681
+ if content_type is None and json_content:
2682
+ content_type = 'application/json'
2683
+
2684
+ env = {}
2685
+
2686
+ header_auth_token: ta.Optional[str]
2687
+ if self._auth_token:
2688
+ env[self.HEADER_AUTH_TOKEN_ENV_KEY] = self._auth_token
2689
+ header_auth_token = f'${self.HEADER_AUTH_TOKEN_ENV_KEY}'
2690
+ else:
2691
+ header_auth_token = None
2692
+
2693
+ hdrs = self.build_headers(
2694
+ auth_token=header_auth_token,
2695
+ content_type=content_type,
2696
+ )
2697
+
2698
+ url = f'{self._service_url}/{url}'
2699
+
2700
+ cmd = ' '.join([
2701
+ 'curl',
2702
+ '-s',
2703
+ '-X', method,
2704
+ url,
2705
+ *[f'-H "{k}: {v}"' for k, v in hdrs.items()],
2706
+ ])
2707
+
2708
+ return ShellCmd(
2709
+ cmd,
2710
+ env=env,
2711
+ )
2712
+
2713
+ def build_post_json_curl_cmd(
2714
+ self,
2715
+ url: str,
2716
+ obj: ta.Any,
2717
+ **kwargs: ta.Any,
2718
+ ) -> ShellCmd:
2719
+ curl_cmd = self.build_curl_cmd(
2720
+ 'POST',
2721
+ url,
2722
+ json_content=True,
2723
+ **kwargs,
2724
+ )
2725
+
2726
+ obj_json = json_dumps_compact(obj)
2727
+
2728
+ return dc.replace(curl_cmd, s=f'{curl_cmd.s} -d {shlex.quote(obj_json)}')
2729
+
2730
+ #
2731
+
2732
+ @dc.dataclass()
2733
+ class CurlError(RuntimeError):
2734
+ status_code: int
2735
+ body: ta.Optional[bytes]
2736
+
2737
+ def __str__(self) -> str:
2738
+ return repr(self)
2739
+
2740
+ @dc.dataclass(frozen=True)
2741
+ class CurlResult:
2742
+ status_code: int
2743
+ body: ta.Optional[bytes]
2744
+
2745
+ def as_error(self) -> 'GithubV1CacheShellClient.CurlError':
2746
+ return GithubV1CacheShellClient.CurlError(
2747
+ status_code=self.status_code,
2748
+ body=self.body,
2749
+ )
2750
+
2751
+ def run_curl_cmd(
2752
+ self,
2753
+ cmd: ShellCmd,
2754
+ *,
2755
+ raise_: bool = False,
2756
+ ) -> CurlResult:
2757
+ out_file = make_temp_file()
2758
+ with defer(lambda: os.unlink(out_file)):
2759
+ run_cmd = dc.replace(cmd, s=f"{cmd.s} -o {out_file} -w '%{{json}}'")
2760
+
2761
+ out_json_bytes = run_cmd.run(subprocesses.check_output)
2762
+
2763
+ out_json = json.loads(out_json_bytes.decode())
2764
+ status_code = check.isinstance(out_json['response_code'], int)
2765
+
2766
+ with open(out_file, 'rb') as f:
2767
+ body = f.read()
2768
+
2769
+ result = self.CurlResult(
2770
+ status_code=status_code,
2771
+ body=body,
2772
+ )
2773
+
2774
+ if raise_ and (500 <= status_code <= 600):
2775
+ raise result.as_error()
2776
+
2777
+ return result
2778
+
2779
+ def run_json_curl_cmd(
2780
+ self,
2781
+ cmd: ShellCmd,
2782
+ *,
2783
+ success_status_codes: ta.Optional[ta.Container[int]] = None,
2784
+ ) -> ta.Optional[ta.Any]:
2785
+ result = self.run_curl_cmd(cmd, raise_=True)
2786
+
2787
+ if success_status_codes is not None:
2788
+ is_success = result.status_code in success_status_codes
2789
+ else:
2790
+ is_success = 200 <= result.status_code < 300
2791
+
2792
+ if is_success:
2793
+ if not (body := result.body):
2794
+ return None
2795
+ return json.loads(body.decode('utf-8-sig'))
2796
+
2797
+ elif result.status_code == 404:
2798
+ return None
2799
+
2800
+ else:
2801
+ raise result.as_error()
2802
+
2803
+ #
2804
+
2805
+ def build_get_entry_curl_cmd(self, key: str) -> ShellCmd:
2806
+ return self.build_curl_cmd(
2807
+ 'GET',
2808
+ f'cache?keys={key}',
2809
+ )
2810
+
2811
+ def run_get_entry(self, key: str) -> ta.Optional[GithubCacheServiceV1.ArtifactCacheEntry]:
2812
+ curl_cmd = self.build_get_entry_curl_cmd(key)
2813
+
2814
+ obj = self.run_json_curl_cmd(
2815
+ curl_cmd,
2816
+ success_status_codes=[200, 204],
2817
+ )
2818
+ if obj is None:
2819
+ return None
2820
+
2821
+ return GithubCacheServiceV1.dataclass_from_json(
2822
+ GithubCacheServiceV1.ArtifactCacheEntry,
2823
+ obj,
2824
+ )
2825
+
2826
+ #
2827
+
2828
+ def build_download_get_entry_cmd(
2829
+ self,
2830
+ entry: GithubCacheServiceV1.ArtifactCacheEntry,
2831
+ out_file: str,
2832
+ ) -> ShellCmd:
2833
+ return ShellCmd(' '.join([
2834
+ 'aria2c',
2835
+ '-x', '4',
2836
+ '-o', out_file,
2837
+ check.non_empty_str(entry.archive_location),
2838
+ ]))
2839
+
2840
+ def download_get_entry(
2841
+ self,
2842
+ entry: GithubCacheServiceV1.ArtifactCacheEntry,
2843
+ out_file: str,
2844
+ ) -> None:
2845
+ dl_cmd = self.build_download_get_entry_cmd(entry, out_file)
2846
+ dl_cmd.run(subprocesses.check_call)
2847
+
2848
+ #
2849
+
2850
+ def upload_cache_entry(
2851
+ self,
2852
+ key: str,
2853
+ in_file: str,
2854
+ ) -> None:
2855
+ check.state(os.path.isfile(in_file))
2856
+
2857
+ file_size = os.stat(in_file).st_size
2858
+
2859
+ reserve_req = GithubCacheServiceV1.ReserveCacheRequest(
2860
+ key=key,
2861
+ cache_size=file_size,
2862
+ )
2863
+ reserve_cmd = self.build_post_json_curl_cmd(
2864
+ 'caches',
2865
+ GithubCacheServiceV1.dataclass_to_json(reserve_req),
2866
+ )
2867
+ reserve_resp_obj: ta.Any = check.not_none(self.run_json_curl_cmd(
2868
+ reserve_cmd,
2869
+ success_status_codes=[201],
2870
+ ))
2871
+ reserve_resp = GithubCacheServiceV1.dataclass_from_json( # noqa
2872
+ GithubCacheServiceV1.ReserveCacheResponse,
2873
+ reserve_resp_obj,
2874
+ )
2875
+
2876
+ raise NotImplementedError
2877
+
2878
+
2879
+ ##
2880
+
2881
+
2882
+ class GithubShellCache(ShellCache):
2883
+ def __init__(
2884
+ self,
2885
+ dir: str, # noqa
2886
+ *,
2887
+ client: ta.Optional[GithubV1CacheShellClient] = None,
2888
+ ) -> None:
2889
+ super().__init__()
2890
+
2891
+ self._dir = check.not_none(dir)
2892
+
2893
+ if client is None:
2894
+ client = GithubV1CacheShellClient()
2895
+ self._client = client
2896
+
2897
+ self._local = DirectoryFileCache(self._dir)
2898
+
2899
+ def get_file_cmd(self, key: str) -> ta.Optional[ShellCmd]:
2900
+ local_file = self._local.get_cache_file_path(key)
2901
+ if os.path.exists(local_file):
2902
+ return ShellCmd(f'cat {shlex.quote(local_file)}')
2903
+
2904
+ if (entry := self._client.run_get_entry(key)) is None:
2905
+ return None
2906
+
2907
+ tmp_file = self._local.format_incomplete_file(local_file)
2908
+ try:
2909
+ self._client.download_get_entry(entry, tmp_file)
2910
+
2911
+ os.replace(tmp_file, local_file)
2912
+
2913
+ except BaseException: # noqa
2914
+ os.unlink(tmp_file)
2915
+
2916
+ raise
2917
+
2918
+ return ShellCmd(f'cat {shlex.quote(local_file)}')
2919
+
2920
+ class _PutFileCmdContext(ShellCache.PutFileCmdContext): # noqa
2921
+ def __init__(
2922
+ self,
2923
+ owner: 'GithubShellCache',
2924
+ key: str,
2925
+ tmp_file: str,
2926
+ local_file: str,
2927
+ ) -> None:
2928
+ super().__init__()
2929
+
2930
+ self._owner = owner
2931
+ self._key = key
2932
+ self._tmp_file = tmp_file
2933
+ self._local_file = local_file
2934
+
2935
+ @property
2936
+ def cmd(self) -> ShellCmd:
2937
+ return ShellCmd(f'cat > {shlex.quote(self._tmp_file)}')
2938
+
2939
+ def _commit(self) -> None:
2940
+ os.replace(self._tmp_file, self._local_file)
2941
+
2942
+ self._owner._client.upload_cache_entry(self._key, self._local_file) # noqa
2943
+
2944
+ def _abort(self) -> None:
2945
+ os.unlink(self._tmp_file)
2946
+
2947
+ def put_file_cmd(self, key: str) -> ShellCache.PutFileCmdContext:
2948
+ local_file = self._local.get_cache_file_path(key, make_dirs=True)
2949
+ return self._PutFileCmdContext(
2950
+ self,
2951
+ key,
2952
+ self._local.format_incomplete_file(local_file),
2953
+ local_file,
2954
+ )
2955
+
2956
+
2957
+ ########################################
2958
+ # ../requirements.py
2959
+ """
2960
+ TODO:
2961
+ - pip compile lol
2962
+ - but still support git+ stuff
2963
+ - req.txt format aware hash
2964
+ - more than just whitespace
2965
+ - pyproject req rewriting
2966
+ - download_requirements bootstrap off prev? not worth the dl?
2967
+ - big deps (torch) change less, probably worth it
2968
+ - follow embedded -r automatically like pyp
2969
+ """
2970
+
2971
+
2972
+ ##
2973
+
2974
+
2975
+ def build_requirements_hash(
2976
+ requirements_txts: ta.Sequence[str],
2977
+ ) -> str:
2978
+ txt_file_contents: dict = {}
2979
+
2980
+ for txt_file in requirements_txts:
2981
+ txt_file_name = os.path.basename(txt_file)
2982
+ check.not_in(txt_file_name, txt_file_contents)
2983
+ with open(txt_file) as f:
2984
+ txt_contents = f.read()
2985
+ txt_file_contents[txt_file_name] = txt_contents
2986
+
2987
+ #
2988
+
2989
+ lines = []
2990
+ for txt_file, txt_contents in sorted(txt_file_contents.items()):
2991
+ txt_hash = sha256_str(txt_contents)
2992
+ lines.append(f'{txt_file}={txt_hash}')
2993
+
2994
+ return sha256_str('\n'.join(lines))
2995
+
2996
+
2997
+ ##
2998
+
2999
+
3000
+ def download_requirements(
3001
+ image: str,
3002
+ requirements_dir: str,
3003
+ requirements_txts: ta.Sequence[str],
3004
+ ) -> None:
3005
+ requirements_txt_dir = tempfile.mkdtemp()
3006
+ with defer(lambda: shutil.rmtree(requirements_txt_dir)):
3007
+ for rt in requirements_txts:
3008
+ shutil.copyfile(rt, os.path.join(requirements_txt_dir, os.path.basename(rt)))
3009
+
3010
+ subprocesses.check_call(
3011
+ 'docker',
3012
+ 'run',
3013
+ '--rm',
3014
+ '-i',
3015
+ '-v', f'{os.path.abspath(requirements_dir)}:/requirements',
3016
+ '-v', f'{requirements_txt_dir}:/requirements_txt',
3017
+ image,
3018
+ 'pip',
3019
+ 'download',
3020
+ '-d', '/requirements',
3021
+ *itertools.chain.from_iterable([
3022
+ ['-r', f'/requirements_txt/{os.path.basename(rt)}']
3023
+ for rt in requirements_txts
3024
+ ]),
3025
+ )
3026
+
3027
+
3028
+ ########################################
3029
+ # ../ci.py
3030
+
3031
+
3032
+ class Ci(ExitStacked):
3033
+ FILE_NAME_HASH_LEN = 16
3034
+
3035
+ @dc.dataclass(frozen=True)
3036
+ class Config:
3037
+ project_dir: str
3038
+
3039
+ docker_file: str
3040
+
3041
+ compose_file: str
3042
+ service: str
3043
+
3044
+ cmd: ShellCmd
3045
+
3046
+ requirements_txts: ta.Optional[ta.Sequence[str]] = None
3047
+
3048
+ always_pull: bool = False
3049
+
3050
+ def __post_init__(self) -> None:
3051
+ check.not_isinstance(self.requirements_txts, str)
3052
+
3053
+ def __init__(
3054
+ self,
3055
+ cfg: Config,
3056
+ *,
3057
+ shell_cache: ta.Optional[ShellCache] = None,
3058
+ file_cache: ta.Optional[FileCache] = None,
3059
+ ) -> None:
3060
+ super().__init__()
3061
+
3062
+ self._cfg = cfg
3063
+ self._shell_cache = shell_cache
3064
+ self._file_cache = file_cache
3065
+
3066
+ #
3067
+
3068
+ def _load_cache_docker_image(self, key: str) -> ta.Optional[str]:
3069
+ if self._shell_cache is None:
3070
+ return None
3071
+
3072
+ get_cache_cmd = self._shell_cache.get_file_cmd(key)
3073
+ if get_cache_cmd is None:
3074
+ return None
3075
+
3076
+ get_cache_cmd = dc.replace(get_cache_cmd, s=f'{get_cache_cmd.s} | zstd -cd --long') # noqa
3077
+
3078
+ return load_docker_tar_cmd(get_cache_cmd)
3079
+
3080
+ def _save_cache_docker_image(self, key: str, image: str) -> None:
3081
+ if self._shell_cache is None:
3082
+ return
3083
+
3084
+ with self._shell_cache.put_file_cmd(key) as put_cache:
3085
+ put_cache_cmd = put_cache.cmd
3086
+
3087
+ put_cache_cmd = dc.replace(put_cache_cmd, s=f'zstd | {put_cache_cmd.s}')
3088
+
3089
+ save_docker_tar_cmd(image, put_cache_cmd)
3090
+
3091
+ #
3092
+
3093
+ def _load_docker_image(self, image: str) -> None:
3094
+ if not self._cfg.always_pull and is_docker_image_present(image):
3095
+ return
3096
+
3097
+ dep_suffix = image
3098
+ for c in '/:.-_':
3099
+ dep_suffix = dep_suffix.replace(c, '-')
3100
+
3101
+ cache_key = f'docker-{dep_suffix}'
3102
+ if self._load_cache_docker_image(cache_key) is not None:
3103
+ return
3104
+
3105
+ pull_docker_image(image)
3106
+
3107
+ self._save_cache_docker_image(cache_key, image)
3108
+
3109
+ def load_docker_image(self, image: str) -> None:
3110
+ with log_timing_context(f'Load docker image: {image}'):
3111
+ self._load_docker_image(image)
3112
+
3113
+ @cached_nullary
3114
+ def load_compose_service_dependencies(self) -> None:
3115
+ deps = get_compose_service_dependencies(
3116
+ self._cfg.compose_file,
3117
+ self._cfg.service,
3118
+ )
3119
+
3120
+ for dep_image in deps.values():
3121
+ self.load_docker_image(dep_image)
3122
+
3123
+ #
3124
+
3125
+ def _resolve_ci_image(self) -> str:
3126
+ docker_file_hash = build_docker_file_hash(self._cfg.docker_file)[:self.FILE_NAME_HASH_LEN]
3127
+
3128
+ cache_key = f'ci-{docker_file_hash}'
3129
+ if (cache_image_id := self._load_cache_docker_image(cache_key)) is not None:
3130
+ return cache_image_id
3131
+
3132
+ image_id = build_docker_image(
3133
+ self._cfg.docker_file,
3134
+ cwd=self._cfg.project_dir,
3135
+ )
3136
+
3137
+ self._save_cache_docker_image(cache_key, image_id)
3138
+
3139
+ return image_id
3140
+
3141
+ @cached_nullary
3142
+ def resolve_ci_image(self) -> str:
3143
+ with log_timing_context('Resolve ci image') as ltc:
3144
+ image_id = self._resolve_ci_image()
3145
+ ltc.set_description(f'Resolve ci image: {image_id}')
3146
+ return image_id
3147
+
3148
+ #
3149
+
3150
+ def _resolve_requirements_dir(self) -> str:
3151
+ requirements_txts = [
3152
+ os.path.join(self._cfg.project_dir, rf)
3153
+ for rf in check.not_none(self._cfg.requirements_txts)
3154
+ ]
3155
+
3156
+ requirements_hash = build_requirements_hash(requirements_txts)[:self.FILE_NAME_HASH_LEN]
3157
+
3158
+ tar_file_key = f'requirements-{requirements_hash}'
3159
+ tar_file_name = f'{tar_file_key}.tar'
3160
+
3161
+ temp_dir = tempfile.mkdtemp()
3162
+ self._enter_context(defer(lambda: shutil.rmtree(temp_dir))) # noqa
3163
+
3164
+ if self._file_cache is not None and (cache_tar_file := self._file_cache.get_file(tar_file_key)):
3165
+ with tarfile.open(cache_tar_file) as tar:
3166
+ tar.extractall(path=temp_dir) # noqa
3167
+
3168
+ return temp_dir
3169
+
3170
+ temp_requirements_dir = os.path.join(temp_dir, 'requirements')
3171
+ os.makedirs(temp_requirements_dir)
3172
+
3173
+ download_requirements(
3174
+ self.resolve_ci_image(),
3175
+ temp_requirements_dir,
3176
+ requirements_txts,
3177
+ )
3178
+
3179
+ if self._file_cache is not None:
3180
+ temp_tar_file = os.path.join(temp_dir, tar_file_name)
3181
+
3182
+ with tarfile.open(temp_tar_file, 'w') as tar:
3183
+ for requirement_file in os.listdir(temp_requirements_dir):
3184
+ tar.add(
3185
+ os.path.join(temp_requirements_dir, requirement_file),
3186
+ arcname=requirement_file,
3187
+ )
3188
+
3189
+ self._file_cache.put_file(os.path.basename(tar_file_key), temp_tar_file)
3190
+
3191
+ return temp_requirements_dir
3192
+
3193
+ @cached_nullary
3194
+ def resolve_requirements_dir(self) -> str:
3195
+ with log_timing_context('Resolve requirements dir') as ltc:
3196
+ requirements_dir = self._resolve_requirements_dir()
3197
+ ltc.set_description(f'Resolve requirements dir: {requirements_dir}')
3198
+ return requirements_dir
3199
+
3200
+ #
3201
+
3202
+ def _run_compose_(self) -> None:
3203
+ setup_cmds = [
3204
+ 'pip install --root-user-action ignore --find-links /requirements --no-index uv',
3205
+ (
3206
+ 'uv pip install --system --find-links /requirements ' +
3207
+ ' '.join(f'-r /project/{rf}' for rf in self._cfg.requirements_txts or [])
3208
+ ),
3209
+ ]
3210
+
3211
+ #
3212
+
3213
+ ci_cmd = dc.replace(self._cfg.cmd, s=' && '.join([
3214
+ *setup_cmds,
3215
+ f'({self._cfg.cmd.s})',
3216
+ ]))
3217
+
3218
+ #
3219
+
3220
+ with DockerComposeRun(DockerComposeRun.Config(
3221
+ compose_file=self._cfg.compose_file,
3222
+ service=self._cfg.service,
3223
+
3224
+ image=self.resolve_ci_image(),
3225
+
3226
+ cmd=ci_cmd,
3227
+
3228
+ run_options=[
3229
+ '-v', f'{os.path.abspath(self._cfg.project_dir)}:/project',
3230
+ '-v', f'{os.path.abspath(self.resolve_requirements_dir())}:/requirements',
3231
+ ],
3232
+
3233
+ cwd=self._cfg.project_dir,
3234
+ )) as ci_compose_run:
3235
+ ci_compose_run.run()
3236
+
3237
+ def _run_compose(self) -> None:
3238
+ with log_timing_context('Run compose'):
3239
+ self._run_compose_()
3240
+
3241
+ #
3242
+
3243
+ def run(self) -> None:
3244
+ self.load_compose_service_dependencies()
3245
+
3246
+ self.resolve_ci_image()
3247
+
3248
+ self.resolve_requirements_dir()
3249
+
3250
+ self._run_compose()
3251
+
3252
+
3253
+ ########################################
3254
+ # ../github/cli.py
3255
+ """
3256
+ See:
3257
+ - https://docs.github.com/en/rest/actions/cache?apiVersion=2022-11-28
3258
+ """
3259
+
3260
+
3261
+ class GithubCli(ArgparseCli):
3262
+ @argparse_cmd(
3263
+ argparse_arg('key'),
3264
+ )
3265
+ def get_cache_entry(self) -> None:
3266
+ shell_client = GithubV1CacheShellClient()
3267
+ entry = shell_client.run_get_entry(self.args.key)
3268
+ if entry is None:
3269
+ return
3270
+ print(json_dumps_pretty(dc.asdict(entry))) # noqa
3271
+
3272
+ @argparse_cmd(
3273
+ argparse_arg('repository-id'),
3274
+ )
3275
+ def list_cache_entries(self) -> None:
3276
+ raise NotImplementedError
3277
+
3278
+
3279
+ ########################################
3280
+ # cli.py
3281
+
3282
+
3283
+ class CiCli(ArgparseCli):
3284
+ #
3285
+
3286
+ @argparse_cmd(
3287
+ argparse_arg('requirements-txt', nargs='+'),
3288
+ )
3289
+ def print_requirements_hash(self) -> None:
3290
+ requirements_txts = self.args.requirements_txt
3291
+
3292
+ print(build_requirements_hash(requirements_txts))
3293
+
3294
+ #
3295
+
3296
+ @argparse_cmd(
3297
+ argparse_arg('compose-file'),
3298
+ argparse_arg('service'),
3299
+ )
3300
+ def dump_compose_deps(self) -> None:
3301
+ compose_file = self.args.compose_file
3302
+ service = self.args.service
3303
+
3304
+ print(get_compose_service_dependencies(
3305
+ compose_file,
3306
+ service,
3307
+ ))
3308
+
3309
+ #
3310
+
3311
+ @argparse_cmd(
3312
+ accepts_unknown=True,
3313
+ )
3314
+ def github(self) -> ta.Optional[int]:
3315
+ return GithubCli(self.unknown_args).cli_run()
3316
+
3317
+ #
3318
+
3319
+ @argparse_cmd(
3320
+ argparse_arg('project-dir'),
3321
+ argparse_arg('service'),
3322
+ argparse_arg('--docker-file'),
3323
+ argparse_arg('--compose-file'),
3324
+ argparse_arg('-r', '--requirements-txt', action='append'),
3325
+ argparse_arg('--github-cache', action='store_true'),
3326
+ argparse_arg('--cache-dir'),
3327
+ argparse_arg('--always-pull', action='store_true'),
3328
+ )
3329
+ async def run(self) -> None:
3330
+ project_dir = self.args.project_dir
3331
+ docker_file = self.args.docker_file
3332
+ compose_file = self.args.compose_file
3333
+ service = self.args.service
3334
+ requirements_txts = self.args.requirements_txt
3335
+ cache_dir = self.args.cache_dir
3336
+ always_pull = self.args.always_pull
3337
+
3338
+ #
3339
+
3340
+ check.state(os.path.isdir(project_dir))
3341
+
3342
+ #
3343
+
3344
+ def find_alt_file(*alts: str) -> ta.Optional[str]:
3345
+ for alt in alts:
3346
+ alt_file = os.path.abspath(os.path.join(project_dir, alt))
3347
+ if os.path.isfile(alt_file):
3348
+ return alt_file
3349
+ return None
3350
+
3351
+ if docker_file is None:
3352
+ docker_file = find_alt_file(
3353
+ 'docker/ci/Dockerfile',
3354
+ 'docker/ci.Dockerfile',
3355
+ 'ci.Dockerfile',
3356
+ 'Dockerfile',
3357
+ )
3358
+ check.state(os.path.isfile(docker_file))
3359
+
3360
+ if compose_file is None:
3361
+ compose_file = find_alt_file(
3362
+ 'docker/compose.yml',
3363
+ 'compose.yml',
3364
+ )
3365
+ check.state(os.path.isfile(compose_file))
3366
+
3367
+ if not requirements_txts:
3368
+ requirements_txts = []
3369
+ for rf in [
3370
+ 'requirements.txt',
3371
+ 'requirements-dev.txt',
3372
+ 'requirements-ci.txt',
3373
+ ]:
3374
+ if os.path.exists(os.path.join(project_dir, rf)):
3375
+ requirements_txts.append(rf)
3376
+ else:
3377
+ for rf in requirements_txts:
3378
+ check.state(os.path.isfile(rf))
3379
+
3380
+ #
3381
+
3382
+ shell_cache: ta.Optional[ShellCache] = None
3383
+ file_cache: ta.Optional[FileCache] = None
3384
+ if cache_dir is not None:
3385
+ if not os.path.exists(cache_dir):
3386
+ os.makedirs(cache_dir)
3387
+ check.state(os.path.isdir(cache_dir))
3388
+
3389
+ directory_file_cache = DirectoryFileCache(cache_dir)
3390
+
3391
+ file_cache = directory_file_cache
3392
+
3393
+ if self.args.github_cache:
3394
+ shell_cache = GithubShellCache(cache_dir)
3395
+ else:
3396
+ shell_cache = DirectoryShellCache(directory_file_cache)
3397
+
3398
+ #
3399
+
3400
+ with Ci(
3401
+ Ci.Config(
3402
+ project_dir=project_dir,
3403
+
3404
+ docker_file=docker_file,
3405
+
3406
+ compose_file=compose_file,
3407
+ service=service,
3408
+
3409
+ requirements_txts=requirements_txts,
3410
+
3411
+ cmd=ShellCmd(' && '.join([
3412
+ 'cd /project',
3413
+ 'python3 -m pytest -svv test.py',
3414
+ ])),
3415
+
3416
+ always_pull=always_pull,
3417
+ ),
3418
+ file_cache=file_cache,
3419
+ shell_cache=shell_cache,
3420
+ ) as ci:
3421
+ ci.run()
3422
+
3423
+
3424
+ async def _async_main() -> ta.Optional[int]:
3425
+ return await CiCli().async_cli_run()
3426
+
3427
+
3428
+ def _main() -> None:
3429
+ configure_standard_logging('DEBUG')
3430
+
3431
+ sys.exit(rc if isinstance(rc := asyncio.run(_async_main()), int) else 0)
3432
+
3433
+
3434
+ if __name__ == '__main__':
3435
+ _main()