omdev 0.0.0.dev242__py3-none-any.whl → 0.0.0.dev243__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -6,6 +6,7 @@ import json
6
6
  import os.path
7
7
  import typing as ta
8
8
 
9
+ from omlish.asyncs.asyncio.sockets import asyncio_wait_until_can_connect
9
10
  from omlish.asyncs.asyncio.subprocesses import asyncio_subprocesses
10
11
  from omlish.lite.check import check
11
12
  from omlish.lite.json import json_dumps_compact
@@ -126,17 +127,14 @@ class CacheServedDockerCache(DockerCache):
126
127
  dds_run_task = asyncio.create_task(dds.run())
127
128
  try:
128
129
  timeout = Timeout.of(self._config.server_start_timeout)
129
- while True:
130
- timeout()
131
- try:
132
- reader, writer = await asyncio.open_connection('localhost', self._config.port)
133
- except Exception as e: # noqa
134
- log.exception('Failed to connect to cache server - will try again')
135
- else:
136
- writer.close()
137
- await asyncio.wait_for(writer.wait_closed(), timeout=timeout.remaining())
138
- break
139
- await asyncio.sleep(self._config.server_start_sleep)
130
+
131
+ await asyncio_wait_until_can_connect(
132
+ 'localhost',
133
+ self._config.port,
134
+ timeout=timeout,
135
+ on_fail=lambda _: log.exception('Failed to connect to cache server - will try again'),
136
+ sleep_s=self._config.server_start_sleep,
137
+ )
140
138
 
141
139
  if (prc := self._config.pull_run_cmd) is not None:
142
140
  pull_cmd = [
omdev/oci/tars.py CHANGED
@@ -79,11 +79,7 @@ class OciDataTarWriter(ExitStacked):
79
79
  tar_sha256=self._tw.sha256(),
80
80
  )
81
81
 
82
- def __enter__(self) -> 'OciDataTarWriter':
83
- super().__enter__()
84
-
85
- #
86
-
82
+ def _enter_contexts(self) -> None:
87
83
  self._cw = self._FileWrapper(self._f)
88
84
 
89
85
  if self._compression is OciCompression.GZIP:
@@ -112,16 +108,12 @@ class OciDataTarWriter(ExitStacked):
112
108
  self._tw = self._FileWrapper(self._cf)
113
109
 
114
110
  self._tf = self._enter_context(
115
- tarfile.open( # type: ignore
111
+ tarfile.open( # type: ignore # noqa
116
112
  fileobj=self._tw,
117
113
  mode='w',
118
114
  ),
119
115
  )
120
116
 
121
- #
122
-
123
- return self
124
-
125
117
  def tar_file(self) -> tarfile.TarFile:
126
118
  return self._tf
127
119
 
omdev/scripts/ci.py CHANGED
@@ -2607,6 +2607,48 @@ class ArgparseCli:
2607
2607
  return fn()
2608
2608
 
2609
2609
 
2610
+ ########################################
2611
+ # ../../../omlish/asyncs/asyncio/sockets.py
2612
+
2613
+
2614
+ async def asyncio_wait_until_can_connect(
2615
+ host: ta.Any = None,
2616
+ port: ta.Any = None,
2617
+ *,
2618
+ timeout: ta.Optional[TimeoutLike] = None,
2619
+ on_fail: ta.Optional[ta.Callable[[BaseException], None]] = None,
2620
+ sleep_s: float = .1,
2621
+ exception: ta.Union[ta.Type[BaseException], ta.Tuple[ta.Type[BaseException], ...]] = (Exception,),
2622
+ ) -> None:
2623
+ timeout = Timeout.of(timeout)
2624
+
2625
+ async def inner():
2626
+ while True:
2627
+ timeout()
2628
+
2629
+ try:
2630
+ reader, writer = await asyncio.open_connection(host, port)
2631
+
2632
+ except asyncio.CancelledError:
2633
+ raise
2634
+
2635
+ except exception as e: # noqa
2636
+ if on_fail is not None:
2637
+ on_fail(e)
2638
+
2639
+ else:
2640
+ writer.close()
2641
+ await asyncio.wait_for(writer.wait_closed(), timeout=timeout.or_(None))
2642
+ break
2643
+
2644
+ await asyncio.sleep(min(sleep_s, timeout.remaining()))
2645
+
2646
+ if timeout() != float('inf'):
2647
+ await asyncio.wait_for(inner(), timeout=timeout())
2648
+ else:
2649
+ await inner()
2650
+
2651
+
2610
2652
  ########################################
2611
2653
  # ../../../omlish/asyncs/asyncio/timeouts.py
2612
2654
 
@@ -3041,20 +3083,64 @@ class HttpRequestParser:
3041
3083
 
3042
3084
 
3043
3085
  class ExitStacked:
3086
+ def __init_subclass__(cls, **kwargs: ta.Any) -> None:
3087
+ super().__init_subclass__(**kwargs)
3088
+
3089
+ for a in ('__enter__', '__exit__'):
3090
+ for b in cls.__bases__:
3091
+ if b is ExitStacked:
3092
+ continue
3093
+ try:
3094
+ fn = getattr(b, a)
3095
+ except AttributeError:
3096
+ pass
3097
+ else:
3098
+ if fn is not getattr(ExitStacked, a):
3099
+ raise TypeError(f'ExitStacked subclass {cls} must not not override {a} via {b}')
3100
+
3044
3101
  _exit_stack: ta.Optional[contextlib.ExitStack] = None
3045
3102
 
3103
+ @contextlib.contextmanager
3104
+ def _exit_stacked_init_wrapper(self) -> ta.Iterator[None]:
3105
+ """
3106
+ Overridable wrapper around __enter__ which deliberately does not have access to an _exit_stack yet. Intended for
3107
+ things like wrapping __enter__ in a lock.
3108
+ """
3109
+
3110
+ yield
3111
+
3112
+ @ta.final
3046
3113
  def __enter__(self: ExitStackedT) -> ExitStackedT:
3047
- check.state(self._exit_stack is None)
3048
- es = self._exit_stack = contextlib.ExitStack()
3049
- es.__enter__()
3050
- return self
3114
+ """
3115
+ Final because any contexts entered during this init must be exited if any exception is thrown, and user
3116
+ overriding would likely interfere with that. Override `_enter_contexts` for such init.
3117
+ """
3118
+
3119
+ with self._exit_stacked_init_wrapper():
3120
+ check.state(self._exit_stack is None)
3121
+ es = self._exit_stack = contextlib.ExitStack()
3122
+ es.__enter__()
3123
+ try:
3124
+ self._enter_contexts()
3125
+ except Exception: # noqa
3126
+ es.__exit__(*sys.exc_info())
3127
+ raise
3128
+ return self
3051
3129
 
3130
+ @ta.final
3052
3131
  def __exit__(self, exc_type, exc_val, exc_tb):
3053
3132
  if (es := self._exit_stack) is None:
3054
3133
  return None
3055
- self._exit_contexts()
3134
+ try:
3135
+ self._exit_contexts()
3136
+ except Exception: # noqa
3137
+ es.__exit__(*sys.exc_info())
3138
+ raise
3056
3139
  return es.__exit__(exc_type, exc_val, exc_tb)
3057
3140
 
3141
+ def _enter_contexts(self) -> None:
3142
+ pass
3143
+
3058
3144
  def _exit_contexts(self) -> None:
3059
3145
  pass
3060
3146
 
@@ -3064,20 +3150,54 @@ class ExitStacked:
3064
3150
 
3065
3151
 
3066
3152
  class AsyncExitStacked:
3153
+ def __init_subclass__(cls, **kwargs: ta.Any) -> None:
3154
+ super().__init_subclass__(**kwargs)
3155
+
3156
+ for a in ('__aenter__', '__aexit__'):
3157
+ for b in cls.__bases__:
3158
+ if b is AsyncExitStacked:
3159
+ continue
3160
+ try:
3161
+ fn = getattr(b, a)
3162
+ except AttributeError:
3163
+ pass
3164
+ else:
3165
+ if fn is not getattr(AsyncExitStacked, a):
3166
+ raise TypeError(f'AsyncExitStacked subclass {cls} must not not override {a} via {b}')
3167
+
3067
3168
  _exit_stack: ta.Optional[contextlib.AsyncExitStack] = None
3068
3169
 
3170
+ @contextlib.asynccontextmanager
3171
+ async def _async_exit_stacked_init_wrapper(self) -> ta.AsyncGenerator[None, None]:
3172
+ yield
3173
+
3174
+ @ta.final
3069
3175
  async def __aenter__(self: AsyncExitStackedT) -> AsyncExitStackedT:
3070
- check.state(self._exit_stack is None)
3071
- es = self._exit_stack = contextlib.AsyncExitStack()
3072
- await es.__aenter__()
3073
- return self
3176
+ async with self._async_exit_stacked_init_wrapper():
3177
+ check.state(self._exit_stack is None)
3178
+ es = self._exit_stack = contextlib.AsyncExitStack()
3179
+ await es.__aenter__()
3180
+ try:
3181
+ await self._async_enter_contexts()
3182
+ except Exception: # noqa
3183
+ await es.__aexit__(*sys.exc_info())
3184
+ raise
3185
+ return self
3074
3186
 
3187
+ @ta.final
3075
3188
  async def __aexit__(self, exc_type, exc_val, exc_tb):
3076
3189
  if (es := self._exit_stack) is None:
3077
3190
  return None
3078
- await self._async_exit_contexts()
3191
+ try:
3192
+ await self._async_exit_contexts()
3193
+ except Exception: # noqa
3194
+ await es.__aexit__(*sys.exc_info())
3195
+ raise
3079
3196
  return await es.__aexit__(exc_type, exc_val, exc_tb)
3080
3197
 
3198
+ async def _async_enter_contexts(self) -> None:
3199
+ pass
3200
+
3081
3201
  async def _async_exit_contexts(self) -> None:
3082
3202
  pass
3083
3203
 
@@ -6952,11 +7072,7 @@ class OciDataTarWriter(ExitStacked):
6952
7072
  tar_sha256=self._tw.sha256(),
6953
7073
  )
6954
7074
 
6955
- def __enter__(self) -> 'OciDataTarWriter':
6956
- super().__enter__()
6957
-
6958
- #
6959
-
7075
+ def _enter_contexts(self) -> None:
6960
7076
  self._cw = self._FileWrapper(self._f)
6961
7077
 
6962
7078
  if self._compression is OciCompression.GZIP:
@@ -6985,16 +7101,12 @@ class OciDataTarWriter(ExitStacked):
6985
7101
  self._tw = self._FileWrapper(self._cf)
6986
7102
 
6987
7103
  self._tf = self._enter_context(
6988
- tarfile.open( # type: ignore
7104
+ tarfile.open( # type: ignore # noqa
6989
7105
  fileobj=self._tw,
6990
7106
  mode='w',
6991
7107
  ),
6992
7108
  )
6993
7109
 
6994
- #
6995
-
6996
- return self
6997
-
6998
7110
  def tar_file(self) -> tarfile.TarFile:
6999
7111
  return self._tf
7000
7112
 
@@ -8800,58 +8912,88 @@ class SocketServer(abc.ABC):
8800
8912
 
8801
8913
  #
8802
8914
 
8803
- @contextlib.contextmanager
8804
- def _listen_context(self) -> ta.Iterator[SelectorProtocol]:
8805
- with contextlib.ExitStack() as es:
8806
- es.enter_context(self._lock)
8807
- es.enter_context(self._binder)
8915
+ class PollResult(enum.Enum):
8916
+ TIMEOUT = enum.auto()
8917
+ CONNECTION = enum.auto()
8918
+ ERROR = enum.auto()
8919
+ SHUTDOWN = enum.auto()
8808
8920
 
8809
- self._binder.listen()
8921
+ class PollContext(ExitStacked, abc.ABC):
8922
+ @abc.abstractmethod
8923
+ def poll(self, timeout: ta.Optional[float] = None) -> 'SocketServer.PollResult':
8924
+ raise NotImplementedError
8810
8925
 
8811
- self._is_shutdown.clear()
8812
- try:
8813
- # XXX: Consider using another file descriptor or connecting to the socket to wake this up instead of
8814
- # polling. Polling reduces our responsiveness to a shutdown request and wastes cpu at all other times.
8815
- with self.Selector() as selector:
8816
- selector.register(self._binder.fileno(), selectors.EVENT_READ)
8926
+ class _PollContext(PollContext):
8927
+ def __init__(self, server: 'SocketServer') -> None:
8928
+ super().__init__()
8817
8929
 
8818
- yield selector
8930
+ self._server = server
8819
8931
 
8820
- finally:
8821
- self._is_shutdown.set()
8932
+ _selector: ta.Any = None
8822
8933
 
8823
- @contextlib.contextmanager
8824
- def loop_context(self, poll_interval: ta.Optional[float] = None) -> ta.Iterator[ta.Iterator[bool]]:
8825
- if poll_interval is None:
8826
- poll_interval = self._poll_interval
8934
+ def _enter_contexts(self) -> None:
8935
+ self._enter_context(self._server._lock) # noqa: SLF001
8936
+ self._enter_context(self._server._binder) # noqa: SLF001
8827
8937
 
8828
- with self._listen_context() as selector:
8829
- def loop():
8830
- while not self._should_shutdown:
8831
- ready = selector.select(poll_interval)
8938
+ self._server._binder.listen() # noqa: SLF001
8939
+
8940
+ self._server._is_shutdown.clear() # noqa: SLF001
8941
+ self._enter_context(defer(self._server._is_shutdown.set)) # noqa
8832
8942
 
8833
- # bpo-35017: shutdown() called during select(), exit immediately.
8834
- if self._should_shutdown:
8835
- break # type: ignore[unreachable]
8943
+ # XXX: Consider using another file descriptor or connecting to the socket to wake this up instead of
8944
+ # polling. Polling reduces our responsiveness to a shutdown request and wastes cpu at all other times.
8945
+ self._selector = self._enter_context(self._server.Selector())
8946
+ self._selector.register(self._server._binder.fileno(), selectors.EVENT_READ) # noqa: SLF001
8836
8947
 
8837
- if ready:
8838
- try:
8839
- conn = self._binder.accept()
8948
+ def poll(self, timeout: ta.Optional[float] = None) -> 'SocketServer.PollResult':
8949
+ if self._server._should_shutdown: # noqa: SLF001
8950
+ return SocketServer.PollResult.SHUTDOWN
8840
8951
 
8841
- except OSError as exc:
8842
- self._handle_error(exc)
8952
+ ready = self._selector.select(timeout)
8843
8953
 
8844
- return
8954
+ # bpo-35017: shutdown() called during select(), exit immediately.
8955
+ if self._server._should_shutdown: # noqa: SLF001
8956
+ return SocketServer.PollResult.SHUTDOWN # type: ignore[unreachable]
8845
8957
 
8846
- try:
8847
- self._handler(conn)
8958
+ if not ready:
8959
+ return SocketServer.PollResult.TIMEOUT
8960
+
8961
+ try:
8962
+ conn = self._server._binder.accept() # noqa: SLF001
8848
8963
 
8849
- except Exception as exc: # noqa
8850
- self._handle_error(exc, conn)
8964
+ except OSError as exc:
8965
+ self._server._handle_error(exc) # noqa: SLF001
8966
+
8967
+ return SocketServer.PollResult.ERROR
8968
+
8969
+ try:
8970
+ self._server._handler(conn) # noqa: SLF001
8971
+
8972
+ except Exception as exc: # noqa
8973
+ self._server._handle_error(exc, conn) # noqa: SLF001
8974
+
8975
+ close_socket_immediately(conn.socket)
8976
+
8977
+ return SocketServer.PollResult.CONNECTION
8978
+
8979
+ def poll_context(self) -> PollContext:
8980
+ return self._PollContext(self)
8981
+
8982
+ #
8851
8983
 
8852
- close_socket_immediately(conn.socket)
8984
+ @contextlib.contextmanager
8985
+ def loop_context(self, poll_interval: ta.Optional[float] = None) -> ta.Iterator[ta.Iterator[bool]]:
8986
+ if poll_interval is None:
8987
+ poll_interval = self._poll_interval
8853
8988
 
8854
- yield bool(ready)
8989
+ with self.poll_context() as pc:
8990
+ def loop():
8991
+ while True:
8992
+ res = pc.poll(poll_interval)
8993
+ if res in (SocketServer.PollResult.ERROR, SocketServer.PollResult.SHUTDOWN):
8994
+ return
8995
+ else:
8996
+ yield res == SocketServer.PollResult.CONNECTION
8855
8997
 
8856
8998
  yield loop()
8857
8999
 
@@ -11460,17 +11602,14 @@ class CacheServedDockerCache(DockerCache):
11460
11602
  dds_run_task = asyncio.create_task(dds.run())
11461
11603
  try:
11462
11604
  timeout = Timeout.of(self._config.server_start_timeout)
11463
- while True:
11464
- timeout()
11465
- try:
11466
- reader, writer = await asyncio.open_connection('localhost', self._config.port)
11467
- except Exception as e: # noqa
11468
- log.exception('Failed to connect to cache server - will try again')
11469
- else:
11470
- writer.close()
11471
- await asyncio.wait_for(writer.wait_closed(), timeout=timeout.remaining())
11472
- break
11473
- await asyncio.sleep(self._config.server_start_sleep)
11605
+
11606
+ await asyncio_wait_until_can_connect(
11607
+ 'localhost',
11608
+ self._config.port,
11609
+ timeout=timeout,
11610
+ on_fail=lambda _: log.exception('Failed to connect to cache server - will try again'),
11611
+ sleep_s=self._config.server_start_sleep,
11612
+ )
11474
11613
 
11475
11614
  if (prc := self._config.pull_run_cmd) is not None:
11476
11615
  pull_cmd = [
@@ -55,10 +55,8 @@ class DelimitingParser:
55
55
  class StreamBuilder(lang.ExitStacked):
56
56
  _builder: JsonObjectBuilder | None = None
57
57
 
58
- def __enter__(self) -> ta.Self:
59
- super().__enter__()
58
+ def _enter_contexts(self) -> None:
60
59
  self._builder = self._enter_context(JsonObjectBuilder())
61
- return self
62
60
 
63
61
  def build(self, e: JsonStreamParserEvent) -> ta.Generator[ta.Any, None, None]:
64
62
  yield from check.not_none(self._builder)(e)
@@ -69,12 +67,10 @@ class StreamParser(lang.ExitStacked):
69
67
  _lex: JsonStreamLexer
70
68
  _parse: JsonStreamParser
71
69
 
72
- def __enter__(self) -> ta.Self:
73
- super().__enter__()
70
+ def _enter_contexts(self) -> None:
74
71
  self._decoder = codecs.getincrementaldecoder('utf-8')()
75
72
  self._lex = self._enter_context(JsonStreamLexer())
76
73
  self._parse = self._enter_context(JsonStreamParser())
77
- return self
78
74
 
79
75
  def parse(self, b: bytes) -> ta.Generator[JsonStreamParserEvent, None, None]:
80
76
  for s in self._decoder.decode(b, not b):
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.2
2
2
  Name: omdev
3
- Version: 0.0.0.dev242
3
+ Version: 0.0.0.dev243
4
4
  Summary: omdev
5
5
  Author: wrmsr
6
6
  License: BSD-3-Clause
@@ -12,7 +12,7 @@ Classifier: Operating System :: OS Independent
12
12
  Classifier: Operating System :: POSIX
13
13
  Requires-Python: >=3.12
14
14
  License-File: LICENSE
15
- Requires-Dist: omlish==0.0.0.dev242
15
+ Requires-Dist: omlish==0.0.0.dev243
16
16
  Provides-Extra: all
17
17
  Requires-Dist: black~=25.1; extra == "all"
18
18
  Requires-Dist: pycparser~=2.22; extra == "all"
@@ -88,7 +88,7 @@ omdev/ci/docker/packing.py,sha256=Bl0aBkkdwES5ePGE3nmyg1CAQzmMDCVP2KTdZUITpQE,20
88
88
  omdev/ci/docker/repositories.py,sha256=ZWfObYdZXPn4BBeg8TsYeNVmH1EVEBadfRuxehAhxMM,1223
89
89
  omdev/ci/docker/utils.py,sha256=URioGRzqyqdJBZyOfzsrUwv5hSJ3WM23_sLHES9vamc,1129
90
90
  omdev/ci/docker/cacheserved/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
91
- omdev/ci/docker/cacheserved/cache.py,sha256=hQdeRMRO0N3l-B0-2Ykua7CGDJiQ40HzZYnzPmFje9Q,7191
91
+ omdev/ci/docker/cacheserved/cache.py,sha256=9bHrVxBS0tSbpGV-tdANxn-vs3A6HF_WYmRH3CbnTKE,7019
92
92
  omdev/ci/docker/cacheserved/manifests.py,sha256=C8VPMJoXEEOoTdzWTZLRBtb-bNxJrT5gcaq_ucsvc0I,3584
93
93
  omdev/ci/github/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
94
94
  omdev/ci/github/api.py,sha256=Vqza7Hm1OCSfZYgdXF4exkjneqNjFcdO1pl8qmODskU,5198
@@ -171,7 +171,7 @@ omdev/oci/dataserver.py,sha256=QIiJUZQPxL_yDomBuy9UdH3nL1OE2BkP7JxNz4Q3BxE,1863
171
171
  omdev/oci/loading.py,sha256=iDMr7JGYRS6CeVrc-LUVjvUA4a49tpgwpJ3y0IImW54,3855
172
172
  omdev/oci/media.py,sha256=PM2w1P3YxyvpfaHEDMD8iyBNJa18oVMOLF7KNb2R-DQ,5351
173
173
  omdev/oci/repositories.py,sha256=APNQoJxPlN6vaOmQg-MS6cHwGlpTCoVyG8WBjCIaEak,4513
174
- omdev/oci/tars.py,sha256=ijy27sJ0r3TtQGvEIk5ol4-kuHDktVQ88-75pvjSnds,3434
174
+ omdev/oci/tars.py,sha256=m2KwGYkvO-HjksFkYPjPdwTmIlryZDITDSeQC5UGAhc,3362
175
175
  omdev/oci/pack/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
176
176
  omdev/oci/pack/packing.py,sha256=K00GKBPGYYzFEK0CuQvoI7XRLULNld7_i7XcKdQ35zk,5413
177
177
  omdev/oci/pack/repositories.py,sha256=EwCJlngKjqpNgcEeszOOnj3WxlQTgLOKo_rwe2PW8JM,4948
@@ -212,7 +212,7 @@ omdev/pyproject/resources/docker-dev.sh,sha256=DHkz5D18jok_oDolfg2mqrvGRWFoCe9GQ
212
212
  omdev/pyproject/resources/python.sh,sha256=rFaN4SiJ9hdLDXXsDTwugI6zsw6EPkgYMmtacZeTbvw,749
213
213
  omdev/scripts/__init__.py,sha256=MKCvUAEQwsIvwLixwtPlpBqmkMXLCnjjXyAXvVpDwVk,91
214
214
  omdev/scripts/bumpversion.py,sha256=2NnfRsJiZNTg-LubIwXCm2vklG7-kIR8_xFUEZNxtiY,1119
215
- omdev/scripts/ci.py,sha256=qn0oXxQyXG6TpG8yBeTBkmiKCTkhhoHAN6XboExIR4c,328230
215
+ omdev/scripts/ci.py,sha256=ZmbSdJmiIefgfoRkoZYBqACECyMfZnf5aXpRz5UzfZo,333011
216
216
  omdev/scripts/execrss.py,sha256=mR0G0wERBYtQmVIn63lCIIFb5zkCM6X_XOENDFYDBKc,651
217
217
  omdev/scripts/exectime.py,sha256=S2O4MgtzTsFOY2IUJxsrnOIame9tEFc6aOlKP-F1JSg,1541
218
218
  omdev/scripts/importtrace.py,sha256=oa7CtcWJVMNDbyIEiRHej6ICfABfErMeo4_haIqe18Q,14041
@@ -248,15 +248,15 @@ omdev/tools/json/__main__.py,sha256=wqpkN_NsQyNwKW4qjVj8ADJ4_C98KhrFBtE-Z1UamfU,
248
248
  omdev/tools/json/cli.py,sha256=EubIMT-n2XsjWBZjSy2fWXqijlwrIhLsfbkg3SZzi28,9586
249
249
  omdev/tools/json/formats.py,sha256=RgtPdcs294o9n9czjafHppg1iSzD-olsIc3v8ApM9Os,1908
250
250
  omdev/tools/json/io.py,sha256=sfj2hJS9Hy3aUR8a_lLzOrYcmL9fSKyvOHiofdUASsI,1427
251
- omdev/tools/json/parsing.py,sha256=YOeTRY6Gd89EfcHvqXO5PRWJ3IgRCpNnI54Lb_N3v2k,2183
251
+ omdev/tools/json/parsing.py,sha256=BNCOcPs6eDvB6wePnjl9H0C-ngHxLRgzLM1EPetWnrQ,2093
252
252
  omdev/tools/json/processing.py,sha256=iFm5VqaxJ97WHaun2ed7NEjMxhFeJqf28bLNfoDJft0,1209
253
253
  omdev/tools/json/rendering.py,sha256=tMcjOW5edfozcMSTxxvF7WVTsbYLoe9bCKFh50qyaGw,2236
254
254
  omdev/tools/pawk/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
255
255
  omdev/tools/pawk/__main__.py,sha256=VCqeRVnqT1RPEoIrqHFSu4PXVMg4YEgF4qCQm90-eRI,66
256
256
  omdev/tools/pawk/pawk.py,sha256=zsEkfQX0jF5bn712uqPAyBSdJt2dno1LH2oeSMNfXQI,11424
257
- omdev-0.0.0.dev242.dist-info/LICENSE,sha256=B_hVtavaA8zCYDW99DYdcpDLKz1n3BBRjZrcbv8uG8c,1451
258
- omdev-0.0.0.dev242.dist-info/METADATA,sha256=-Au1sV0H8LhgFIXx279Cq3wL3EOg6QkIh6MgeNBZR3I,1636
259
- omdev-0.0.0.dev242.dist-info/WHEEL,sha256=jB7zZ3N9hIM9adW7qlTAyycLYW9npaWKLRzaoVcLKcM,91
260
- omdev-0.0.0.dev242.dist-info/entry_points.txt,sha256=dHLXFmq5D9B8qUyhRtFqTGWGxlbx3t5ejedjrnXNYLU,33
261
- omdev-0.0.0.dev242.dist-info/top_level.txt,sha256=1nr7j30fEWgLYHW3lGR9pkdHkb7knv1U1ES1XRNVQ6k,6
262
- omdev-0.0.0.dev242.dist-info/RECORD,,
257
+ omdev-0.0.0.dev243.dist-info/LICENSE,sha256=B_hVtavaA8zCYDW99DYdcpDLKz1n3BBRjZrcbv8uG8c,1451
258
+ omdev-0.0.0.dev243.dist-info/METADATA,sha256=RTZBTdIwM0qlSukFnKc503zmK9nPuDaJ67OuGFWudJ8,1636
259
+ omdev-0.0.0.dev243.dist-info/WHEEL,sha256=jB7zZ3N9hIM9adW7qlTAyycLYW9npaWKLRzaoVcLKcM,91
260
+ omdev-0.0.0.dev243.dist-info/entry_points.txt,sha256=dHLXFmq5D9B8qUyhRtFqTGWGxlbx3t5ejedjrnXNYLU,33
261
+ omdev-0.0.0.dev243.dist-info/top_level.txt,sha256=1nr7j30fEWgLYHW3lGR9pkdHkb7knv1U1ES1XRNVQ6k,6
262
+ omdev-0.0.0.dev243.dist-info/RECORD,,