omdev 0.0.0.dev23__py3-none-any.whl → 0.0.0.dev25__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of omdev might be problematic. Click here for more details.

omdev/scripts/interp.py CHANGED
@@ -1234,13 +1234,134 @@ class StandardLogFormatter(logging.Formatter):
1234
1234
  ##
1235
1235
 
1236
1236
 
1237
+ class ProxyLogFilterer(logging.Filterer):
1238
+ def __init__(self, underlying: logging.Filterer) -> None: # noqa
1239
+ self._underlying = underlying
1240
+
1241
+ @property
1242
+ def underlying(self) -> logging.Filterer:
1243
+ return self._underlying
1244
+
1245
+ @property
1246
+ def filters(self):
1247
+ return self._underlying.filters
1248
+
1249
+ @filters.setter
1250
+ def filters(self, filters):
1251
+ self._underlying.filters = filters
1252
+
1253
+ def addFilter(self, filter): # noqa
1254
+ self._underlying.addFilter(filter)
1255
+
1256
+ def removeFilter(self, filter): # noqa
1257
+ self._underlying.removeFilter(filter)
1258
+
1259
+ def filter(self, record):
1260
+ return self._underlying.filter(record)
1261
+
1262
+
1263
+ class ProxyLogHandler(ProxyLogFilterer, logging.Handler):
1264
+ def __init__(self, underlying: logging.Handler) -> None: # noqa
1265
+ ProxyLogFilterer.__init__(self, underlying)
1266
+
1267
+ _underlying: logging.Handler
1268
+
1269
+ @property
1270
+ def underlying(self) -> logging.Handler:
1271
+ return self._underlying
1272
+
1273
+ def get_name(self):
1274
+ return self._underlying.get_name()
1275
+
1276
+ def set_name(self, name):
1277
+ self._underlying.set_name(name)
1278
+
1279
+ @property
1280
+ def name(self):
1281
+ return self._underlying.name
1282
+
1283
+ @property
1284
+ def level(self):
1285
+ return self._underlying.level
1286
+
1287
+ @level.setter
1288
+ def level(self, level):
1289
+ self._underlying.level = level
1290
+
1291
+ @property
1292
+ def formatter(self):
1293
+ return self._underlying.formatter
1294
+
1295
+ @formatter.setter
1296
+ def formatter(self, formatter):
1297
+ self._underlying.formatter = formatter
1298
+
1299
+ def createLock(self):
1300
+ self._underlying.createLock()
1301
+
1302
+ def acquire(self):
1303
+ self._underlying.acquire()
1304
+
1305
+ def release(self):
1306
+ self._underlying.release()
1307
+
1308
+ def setLevel(self, level):
1309
+ self._underlying.setLevel(level)
1310
+
1311
+ def format(self, record):
1312
+ return self._underlying.format(record)
1313
+
1314
+ def emit(self, record):
1315
+ self._underlying.emit(record)
1316
+
1317
+ def handle(self, record):
1318
+ return self._underlying.handle(record)
1319
+
1320
+ def setFormatter(self, fmt):
1321
+ self._underlying.setFormatter(fmt)
1322
+
1323
+ def flush(self):
1324
+ self._underlying.flush()
1325
+
1326
+ def close(self):
1327
+ self._underlying.close()
1328
+
1329
+ def handleError(self, record):
1330
+ self._underlying.handleError(record)
1331
+
1332
+
1333
+ ##
1334
+
1335
+
1336
+ class StandardLogHandler(ProxyLogHandler):
1337
+ pass
1338
+
1339
+
1340
+ ##
1341
+
1342
+
1237
1343
  def configure_standard_logging(
1238
1344
  level: ta.Union[int, str] = logging.INFO,
1239
1345
  *,
1240
1346
  json: bool = False,
1241
- ) -> logging.Handler:
1347
+ target: ta.Optional[logging.Logger] = None,
1348
+ no_check: bool = False,
1349
+ ) -> ta.Optional[StandardLogHandler]:
1350
+ if target is None:
1351
+ target = logging.root
1352
+
1353
+ #
1354
+
1355
+ if not no_check:
1356
+ if any(isinstance(h, StandardLogHandler) for h in list(target.handlers)):
1357
+ return None
1358
+
1359
+ #
1360
+
1242
1361
  handler = logging.StreamHandler()
1243
1362
 
1363
+ #
1364
+
1244
1365
  formatter: logging.Formatter
1245
1366
  if json:
1246
1367
  formatter = JsonLogFormatter()
@@ -1248,14 +1369,22 @@ def configure_standard_logging(
1248
1369
  formatter = StandardLogFormatter(StandardLogFormatter.build_log_format(STANDARD_LOG_FORMAT_PARTS))
1249
1370
  handler.setFormatter(formatter)
1250
1371
 
1372
+ #
1373
+
1251
1374
  handler.addFilter(TidLogFilter())
1252
1375
 
1253
- logging.root.addHandler(handler)
1376
+ #
1377
+
1378
+ target.addHandler(handler)
1379
+
1380
+ #
1254
1381
 
1255
1382
  if level is not None:
1256
- logging.root.setLevel(level)
1383
+ target.setLevel(level)
1384
+
1385
+ #
1257
1386
 
1258
- return handler
1387
+ return StandardLogHandler(handler)
1259
1388
 
1260
1389
 
1261
1390
  ########################################
@@ -1705,6 +1834,14 @@ class Pyenv:
1705
1834
  ret.append(l)
1706
1835
  return ret
1707
1836
 
1837
+ def update(self) -> bool:
1838
+ if (root := self.root()) is None:
1839
+ return False
1840
+ if not os.path.isdir(os.path.join(root, '.git')):
1841
+ return False
1842
+ subprocess_check_call('git', 'pull', cwd=root)
1843
+ return True
1844
+
1708
1845
 
1709
1846
  ##
1710
1847
 
@@ -1905,6 +2042,10 @@ class PyenvInterpProvider(InterpProvider):
1905
2042
 
1906
2043
  inspect: bool = False,
1907
2044
  inspector: InterpInspector = INTERP_INSPECTOR,
2045
+
2046
+ *,
2047
+
2048
+ try_update: bool = False,
1908
2049
  ) -> None:
1909
2050
  super().__init__()
1910
2051
 
@@ -1913,6 +2054,8 @@ class PyenvInterpProvider(InterpProvider):
1913
2054
  self._inspect = inspect
1914
2055
  self._inspector = inspector
1915
2056
 
2057
+ self._try_update = try_update
2058
+
1916
2059
  #
1917
2060
 
1918
2061
  @staticmethod
@@ -1977,8 +2120,9 @@ class PyenvInterpProvider(InterpProvider):
1977
2120
 
1978
2121
  #
1979
2122
 
1980
- def get_installable_versions(self, spec: InterpSpecifier) -> ta.Sequence[InterpVersion]:
2123
+ def _get_installable_versions(self, spec: InterpSpecifier) -> ta.Sequence[InterpVersion]:
1981
2124
  lst = []
2125
+
1982
2126
  for vs in self._pyenv.installable_versions():
1983
2127
  if (iv := self.guess_version(vs)) is None:
1984
2128
  continue
@@ -1986,6 +2130,16 @@ class PyenvInterpProvider(InterpProvider):
1986
2130
  raise Exception('Pyenv installable versions not expected to have debug suffix')
1987
2131
  for d in [False, True]:
1988
2132
  lst.append(dc.replace(iv, opts=dc.replace(iv.opts, debug=d)))
2133
+
2134
+ return lst
2135
+
2136
+ def get_installable_versions(self, spec: InterpSpecifier) -> ta.Sequence[InterpVersion]:
2137
+ lst = self._get_installable_versions(spec)
2138
+
2139
+ if self._try_update and not any(v in spec for v in lst):
2140
+ if self._pyenv.update():
2141
+ lst = self._get_installable_versions(spec)
2142
+
1989
2143
  return lst
1990
2144
 
1991
2145
  def install_version(self, version: InterpVersion) -> Interp:
@@ -2208,7 +2362,7 @@ class InterpResolver:
2208
2362
 
2209
2363
  DEFAULT_INTERP_RESOLVER = InterpResolver([(p.name, p) for p in [
2210
2364
  # pyenv is preferred to system interpreters as it tends to have more support for things like tkinter
2211
- PyenvInterpProvider(),
2365
+ PyenvInterpProvider(try_update=True),
2212
2366
 
2213
2367
  RunningInterpProvider(),
2214
2368
 
@@ -115,6 +115,13 @@ def find_magic(
115
115
  *,
116
116
  py: bool = False,
117
117
  ) -> ta.Iterator[str]:
118
+ if isinstance(roots, str):
119
+ raise TypeError(roots)
120
+ if isinstance(magics, str):
121
+ raise TypeError(magics)
122
+ if isinstance(exts, str):
123
+ raise TypeError(exts)
124
+
118
125
  if not magics:
119
126
  raise Exception('Must specify magics')
120
127
  if not exts:
@@ -2580,13 +2587,134 @@ class StandardLogFormatter(logging.Formatter):
2580
2587
  ##
2581
2588
 
2582
2589
 
2590
+ class ProxyLogFilterer(logging.Filterer):
2591
+ def __init__(self, underlying: logging.Filterer) -> None: # noqa
2592
+ self._underlying = underlying
2593
+
2594
+ @property
2595
+ def underlying(self) -> logging.Filterer:
2596
+ return self._underlying
2597
+
2598
+ @property
2599
+ def filters(self):
2600
+ return self._underlying.filters
2601
+
2602
+ @filters.setter
2603
+ def filters(self, filters):
2604
+ self._underlying.filters = filters
2605
+
2606
+ def addFilter(self, filter): # noqa
2607
+ self._underlying.addFilter(filter)
2608
+
2609
+ def removeFilter(self, filter): # noqa
2610
+ self._underlying.removeFilter(filter)
2611
+
2612
+ def filter(self, record):
2613
+ return self._underlying.filter(record)
2614
+
2615
+
2616
+ class ProxyLogHandler(ProxyLogFilterer, logging.Handler):
2617
+ def __init__(self, underlying: logging.Handler) -> None: # noqa
2618
+ ProxyLogFilterer.__init__(self, underlying)
2619
+
2620
+ _underlying: logging.Handler
2621
+
2622
+ @property
2623
+ def underlying(self) -> logging.Handler:
2624
+ return self._underlying
2625
+
2626
+ def get_name(self):
2627
+ return self._underlying.get_name()
2628
+
2629
+ def set_name(self, name):
2630
+ self._underlying.set_name(name)
2631
+
2632
+ @property
2633
+ def name(self):
2634
+ return self._underlying.name
2635
+
2636
+ @property
2637
+ def level(self):
2638
+ return self._underlying.level
2639
+
2640
+ @level.setter
2641
+ def level(self, level):
2642
+ self._underlying.level = level
2643
+
2644
+ @property
2645
+ def formatter(self):
2646
+ return self._underlying.formatter
2647
+
2648
+ @formatter.setter
2649
+ def formatter(self, formatter):
2650
+ self._underlying.formatter = formatter
2651
+
2652
+ def createLock(self):
2653
+ self._underlying.createLock()
2654
+
2655
+ def acquire(self):
2656
+ self._underlying.acquire()
2657
+
2658
+ def release(self):
2659
+ self._underlying.release()
2660
+
2661
+ def setLevel(self, level):
2662
+ self._underlying.setLevel(level)
2663
+
2664
+ def format(self, record):
2665
+ return self._underlying.format(record)
2666
+
2667
+ def emit(self, record):
2668
+ self._underlying.emit(record)
2669
+
2670
+ def handle(self, record):
2671
+ return self._underlying.handle(record)
2672
+
2673
+ def setFormatter(self, fmt):
2674
+ self._underlying.setFormatter(fmt)
2675
+
2676
+ def flush(self):
2677
+ self._underlying.flush()
2678
+
2679
+ def close(self):
2680
+ self._underlying.close()
2681
+
2682
+ def handleError(self, record):
2683
+ self._underlying.handleError(record)
2684
+
2685
+
2686
+ ##
2687
+
2688
+
2689
+ class StandardLogHandler(ProxyLogHandler):
2690
+ pass
2691
+
2692
+
2693
+ ##
2694
+
2695
+
2583
2696
  def configure_standard_logging(
2584
2697
  level: ta.Union[int, str] = logging.INFO,
2585
2698
  *,
2586
2699
  json: bool = False,
2587
- ) -> logging.Handler:
2700
+ target: ta.Optional[logging.Logger] = None,
2701
+ no_check: bool = False,
2702
+ ) -> ta.Optional[StandardLogHandler]:
2703
+ if target is None:
2704
+ target = logging.root
2705
+
2706
+ #
2707
+
2708
+ if not no_check:
2709
+ if any(isinstance(h, StandardLogHandler) for h in list(target.handlers)):
2710
+ return None
2711
+
2712
+ #
2713
+
2588
2714
  handler = logging.StreamHandler()
2589
2715
 
2716
+ #
2717
+
2590
2718
  formatter: logging.Formatter
2591
2719
  if json:
2592
2720
  formatter = JsonLogFormatter()
@@ -2594,14 +2722,22 @@ def configure_standard_logging(
2594
2722
  formatter = StandardLogFormatter(StandardLogFormatter.build_log_format(STANDARD_LOG_FORMAT_PARTS))
2595
2723
  handler.setFormatter(formatter)
2596
2724
 
2725
+ #
2726
+
2597
2727
  handler.addFilter(TidLogFilter())
2598
2728
 
2599
- logging.root.addHandler(handler)
2729
+ #
2730
+
2731
+ target.addHandler(handler)
2732
+
2733
+ #
2600
2734
 
2601
2735
  if level is not None:
2602
- logging.root.setLevel(level)
2736
+ target.setLevel(level)
2603
2737
 
2604
- return handler
2738
+ #
2739
+
2740
+ return StandardLogHandler(handler)
2605
2741
 
2606
2742
 
2607
2743
  ########################################
@@ -3707,6 +3843,7 @@ class PyprojectPackageGenerator(BasePyprojectPackageGenerator):
3707
3843
  st.pop('cexts', None)
3708
3844
 
3709
3845
  self._move_dict_key(st, 'find_packages', pyp_dct, 'tool.setuptools.packages.find')
3846
+ self._move_dict_key(st, 'package_data', pyp_dct, 'tool.setuptools.package-data')
3710
3847
 
3711
3848
  mani_in = st.pop('manifest_in', None)
3712
3849
 
@@ -3789,9 +3926,14 @@ class _PyprojectCextPackageGenerator(BasePyprojectPackageGenerator):
3789
3926
  st = specs.setuptools
3790
3927
  pyp_dct['tool.setuptools'] = st
3791
3928
 
3792
- st.pop('cexts', None)
3793
- st.pop('find_packages', None)
3794
- st.pop('manifest_in', None)
3929
+ for k in [
3930
+ 'cexts',
3931
+
3932
+ 'find_packages',
3933
+ 'package_data',
3934
+ 'manifest_in',
3935
+ ]:
3936
+ st.pop(k, None)
3795
3937
 
3796
3938
  pyp_dct['tool.setuptools.packages.find'] = {
3797
3939
  'include': [],
@@ -3973,6 +4115,14 @@ class Pyenv:
3973
4115
  ret.append(l)
3974
4116
  return ret
3975
4117
 
4118
+ def update(self) -> bool:
4119
+ if (root := self.root()) is None:
4120
+ return False
4121
+ if not os.path.isdir(os.path.join(root, '.git')):
4122
+ return False
4123
+ subprocess_check_call('git', 'pull', cwd=root)
4124
+ return True
4125
+
3976
4126
 
3977
4127
  ##
3978
4128
 
@@ -4173,6 +4323,10 @@ class PyenvInterpProvider(InterpProvider):
4173
4323
 
4174
4324
  inspect: bool = False,
4175
4325
  inspector: InterpInspector = INTERP_INSPECTOR,
4326
+
4327
+ *,
4328
+
4329
+ try_update: bool = False,
4176
4330
  ) -> None:
4177
4331
  super().__init__()
4178
4332
 
@@ -4181,6 +4335,8 @@ class PyenvInterpProvider(InterpProvider):
4181
4335
  self._inspect = inspect
4182
4336
  self._inspector = inspector
4183
4337
 
4338
+ self._try_update = try_update
4339
+
4184
4340
  #
4185
4341
 
4186
4342
  @staticmethod
@@ -4245,8 +4401,9 @@ class PyenvInterpProvider(InterpProvider):
4245
4401
 
4246
4402
  #
4247
4403
 
4248
- def get_installable_versions(self, spec: InterpSpecifier) -> ta.Sequence[InterpVersion]:
4404
+ def _get_installable_versions(self, spec: InterpSpecifier) -> ta.Sequence[InterpVersion]:
4249
4405
  lst = []
4406
+
4250
4407
  for vs in self._pyenv.installable_versions():
4251
4408
  if (iv := self.guess_version(vs)) is None:
4252
4409
  continue
@@ -4254,6 +4411,16 @@ class PyenvInterpProvider(InterpProvider):
4254
4411
  raise Exception('Pyenv installable versions not expected to have debug suffix')
4255
4412
  for d in [False, True]:
4256
4413
  lst.append(dc.replace(iv, opts=dc.replace(iv.opts, debug=d)))
4414
+
4415
+ return lst
4416
+
4417
+ def get_installable_versions(self, spec: InterpSpecifier) -> ta.Sequence[InterpVersion]:
4418
+ lst = self._get_installable_versions(spec)
4419
+
4420
+ if self._try_update and not any(v in spec for v in lst):
4421
+ if self._pyenv.update():
4422
+ lst = self._get_installable_versions(spec)
4423
+
4257
4424
  return lst
4258
4425
 
4259
4426
  def install_version(self, version: InterpVersion) -> Interp:
@@ -4476,7 +4643,7 @@ class InterpResolver:
4476
4643
 
4477
4644
  DEFAULT_INTERP_RESOLVER = InterpResolver([(p.name, p) for p in [
4478
4645
  # pyenv is preferred to system interpreters as it tends to have more support for things like tkinter
4479
- PyenvInterpProvider(),
4646
+ PyenvInterpProvider(try_update=True),
4480
4647
 
4481
4648
  RunningInterpProvider(),
4482
4649
 
@@ -4679,18 +4846,30 @@ def _venv_cmd(args) -> None:
4679
4846
  f'--_docker_container={shlex.quote(sd)}',
4680
4847
  *map(shlex.quote, sys.argv[1:]),
4681
4848
  ])
4849
+
4850
+ docker_env = {
4851
+ 'DOCKER_HOST_PLATFORM': os.environ.get('DOCKER_HOST_PLATFORM', sys.platform),
4852
+ }
4853
+ for e in args.docker_env or []:
4854
+ if '=' in e:
4855
+ k, _, v = e.split('=')
4856
+ docker_env[k] = v
4857
+ else:
4858
+ docker_env[e] = os.environ.get(e, '')
4859
+
4682
4860
  subprocess_check_call(
4683
4861
  'docker',
4684
4862
  'compose',
4685
4863
  '-f', 'docker/compose.yml',
4686
4864
  'exec',
4687
4865
  *itertools.chain.from_iterable(
4688
- ('-e', f'{e}={os.environ.get(e, "")}' if '=' not in e else e)
4689
- for e in (args.docker_env or [])
4866
+ ('-e', f'{k}={v}')
4867
+ for k, v in docker_env.items()
4690
4868
  ),
4691
4869
  '-it', sd,
4692
4870
  'bash', '--login', '-c', script,
4693
4871
  )
4872
+
4694
4873
  return
4695
4874
 
4696
4875
  cmd = args.cmd
@@ -0,0 +1,187 @@
1
+ """
2
+ TODO:
3
+ - dump agg stats
4
+ - graphviz
5
+ """
6
+ import argparse
7
+ import dataclasses as dc
8
+ import inspect
9
+ import json
10
+ import os
11
+ import re
12
+ import shlex
13
+ import subprocess
14
+ import sys
15
+ import typing as ta
16
+
17
+ from omlish import concurrent as cu
18
+ from omlish import lang
19
+
20
+
21
+ ##
22
+
23
+
24
+ @dc.dataclass(frozen=True)
25
+ class Item:
26
+ spec: str
27
+
28
+ time: float
29
+ rss: int
30
+ imported: frozenset[str]
31
+
32
+
33
+ ##
34
+
35
+
36
+ def _payload(specs) -> None:
37
+ import resource
38
+ import sys
39
+ import time
40
+
41
+ def get_rss() -> int:
42
+ return resource.getrusage(resource.RUSAGE_SELF).ru_maxrss
43
+
44
+ start_modules = frozenset(sys.modules)
45
+ start_rss = get_rss()
46
+ start_time = time.time()
47
+
48
+ for spec in specs:
49
+ exec(f'import {spec}')
50
+
51
+ end_time = time.time()
52
+ end_rss = get_rss()
53
+ end_modules = frozenset(sys.modules)
54
+
55
+ import json
56
+
57
+ def json_dumps(obj):
58
+ return json.dumps(obj, indent=None, separators=(',', ':'))
59
+
60
+ print(json_dumps({
61
+ 'time': end_time - start_time,
62
+ 'rss': end_rss - start_rss,
63
+ 'imported': sorted(end_modules - start_modules),
64
+ }))
65
+
66
+
67
+ @lang.cached_function
68
+ def payload_src() -> str:
69
+ return inspect.getsource(_payload)
70
+
71
+
72
+ def run_one(
73
+ spec: str,
74
+ *,
75
+ shell_wrap: bool = True,
76
+ ) -> Item:
77
+ spec_payload_src = '\n\n'.join([
78
+ payload_src(),
79
+ f'_payload([{spec!r}])',
80
+ ])
81
+
82
+ args = [
83
+ sys.executable,
84
+ '-c',
85
+ spec_payload_src,
86
+ ]
87
+ if shell_wrap:
88
+ args = ['sh', '-c', ' '.join(map(shlex.quote, args))]
89
+
90
+ output = subprocess.check_output(args)
91
+
92
+ output_lines = output.decode().strip().splitlines()
93
+ if not output_lines:
94
+ raise Exception(f'no output: {spec}')
95
+ if len(output_lines) > 1:
96
+ print(f'warning: unexpected output: {spec}')
97
+
98
+ dct = json.loads(output_lines[-1])
99
+ return Item(
100
+ spec=spec,
101
+ **dct,
102
+ )
103
+
104
+
105
+ ##
106
+
107
+
108
+ def _find_specs(
109
+ *roots: str,
110
+ filters: ta.Iterable[str] | None = None,
111
+ ) -> ta.Sequence[str]:
112
+ filter_pats = [re.compile(f) for f in filters or []]
113
+
114
+ out: list[str] = []
115
+ stk: list[str] = list(reversed(roots))
116
+ while stk:
117
+ cur = stk.pop()
118
+ if os.sep in cur:
119
+ if os.path.isdir(cur):
120
+ stk.extend(reversed([
121
+ os.path.join(cur, c)
122
+ for c in os.listdir(cur)
123
+ ]))
124
+ continue
125
+
126
+ if not cur.endswith('.py'):
127
+ continue
128
+
129
+ spec = cur.rpartition('.')[0].replace(os.sep, '.')
130
+
131
+ else:
132
+ spec = cur
133
+
134
+ if any(p.fullmatch(spec) for p in filter_pats):
135
+ continue
136
+
137
+ out.append(spec)
138
+
139
+ return out
140
+
141
+
142
+ def run(
143
+ *roots: str,
144
+ filters: ta.Iterable[str] | None = None,
145
+ num_threads: int | None = 0,
146
+ **kwargs: ta.Any,
147
+ ) -> ta.Mapping[str, Item]:
148
+ specs = _find_specs(*roots, filters=filters)
149
+
150
+ out: dict[str, Item] = {}
151
+ with cu.new_executor(num_threads) as ex:
152
+ futs = [ex.submit(run_one, spec, **kwargs) for spec in specs]
153
+ for fut in futs:
154
+ item = fut.result()
155
+ out[item.spec] = item
156
+
157
+ return out
158
+
159
+
160
+ ##
161
+
162
+
163
+ def _main() -> None:
164
+ parser = argparse.ArgumentParser()
165
+ parser.add_argument('-j', '--jobs', type=int)
166
+ parser.add_argument('-f', '--filter', action='append')
167
+ parser.add_argument('-t', '--filter-tests', action='store_true')
168
+ parser.add_argument('root', nargs='+')
169
+ args = parser.parse_args()
170
+
171
+ filters = [*(args.filter or [])]
172
+ if args.filter_tests:
173
+ filters.extend([
174
+ r'.*\.conftest',
175
+ r'.*\.tests\..*',
176
+ ])
177
+
178
+ for item in run(
179
+ *args.root,
180
+ filters=filters,
181
+ num_threads=args.jobs,
182
+ ).values():
183
+ print(json.dumps(dc.asdict(item)))
184
+
185
+
186
+ if __name__ == '__main__':
187
+ _main()