atex 0.5__py3-none-any.whl → 0.8__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (46) hide show
  1. atex/__init__.py +2 -12
  2. atex/cli/__init__.py +13 -13
  3. atex/cli/fmf.py +93 -0
  4. atex/cli/testingfarm.py +71 -61
  5. atex/connection/__init__.py +117 -0
  6. atex/connection/ssh.py +390 -0
  7. atex/executor/__init__.py +2 -0
  8. atex/executor/duration.py +60 -0
  9. atex/executor/executor.py +378 -0
  10. atex/executor/reporter.py +106 -0
  11. atex/executor/scripts.py +155 -0
  12. atex/executor/testcontrol.py +353 -0
  13. atex/fmf.py +217 -0
  14. atex/orchestrator/__init__.py +2 -0
  15. atex/orchestrator/aggregator.py +106 -0
  16. atex/orchestrator/orchestrator.py +324 -0
  17. atex/provision/__init__.py +101 -90
  18. atex/provision/libvirt/VM_PROVISION +8 -0
  19. atex/provision/libvirt/__init__.py +4 -4
  20. atex/provision/podman/README +59 -0
  21. atex/provision/podman/host_container.sh +74 -0
  22. atex/provision/testingfarm/__init__.py +2 -0
  23. atex/{testingfarm.py → provision/testingfarm/api.py} +170 -132
  24. atex/provision/testingfarm/testingfarm.py +236 -0
  25. atex/util/__init__.py +5 -10
  26. atex/util/dedent.py +1 -1
  27. atex/util/log.py +20 -12
  28. atex/util/path.py +16 -0
  29. atex/util/ssh_keygen.py +14 -0
  30. atex/util/subprocess.py +14 -13
  31. atex/util/threads.py +55 -0
  32. {atex-0.5.dist-info → atex-0.8.dist-info}/METADATA +97 -2
  33. atex-0.8.dist-info/RECORD +37 -0
  34. atex/cli/minitmt.py +0 -82
  35. atex/minitmt/__init__.py +0 -115
  36. atex/minitmt/fmf.py +0 -168
  37. atex/minitmt/report.py +0 -174
  38. atex/minitmt/scripts.py +0 -51
  39. atex/minitmt/testme.py +0 -3
  40. atex/orchestrator.py +0 -38
  41. atex/ssh.py +0 -320
  42. atex/util/lockable_class.py +0 -38
  43. atex-0.5.dist-info/RECORD +0 -26
  44. {atex-0.5.dist-info → atex-0.8.dist-info}/WHEEL +0 -0
  45. {atex-0.5.dist-info → atex-0.8.dist-info}/entry_points.txt +0 -0
  46. {atex-0.5.dist-info → atex-0.8.dist-info}/licenses/COPYING.txt +0 -0
atex/__init__.py CHANGED
@@ -18,18 +18,8 @@ def __dir__():
18
18
 
19
19
  # lazily import submodules
20
20
  def __getattr__(attr):
21
- # # from mod import *
22
- # if attr == '__all__':
23
- # print("importing all")
24
- # for mod in __all__:
25
- # _importlib.import_module(f'.{mod}', __name__)
26
- # return __all__
27
- # # accessing __all__, __getattr__, etc. directly
28
- # elif attr in globals():
29
- # print("importing globals")
30
- # return globals()[attr]
31
21
  # importing a module known to exist
32
22
  if attr in __all__:
33
- return _importlib.import_module(f'.{attr}', __name__)
23
+ return _importlib.import_module(f".{attr}", __name__)
34
24
  else:
35
- raise AttributeError(f'module {__name__} has no attribute {attr}')
25
+ raise AttributeError(f"module '{__name__}' has no attribute '{attr}'")
atex/cli/__init__.py CHANGED
@@ -32,16 +32,16 @@ def setup_logging(level):
32
32
  logging.basicConfig(
33
33
  level=level,
34
34
  stream=sys.stderr,
35
- format='%(asctime)s %(name)s: %(message)s',
36
- datefmt='%Y-%m-%d %H:%M:%S',
35
+ format="%(asctime)s %(name)s: %(message)s",
36
+ datefmt="%Y-%m-%d %H:%M:%S",
37
37
  )
38
38
 
39
39
 
40
40
  def collect_modules():
41
41
  for info in pkgutil.iter_modules(__spec__.submodule_search_locations):
42
- mod = importlib.import_module(f'.{info.name}', __name__)
43
- if not hasattr(mod, 'CLI_SPEC'):
44
- raise ValueError(f"CLI submodule {info.name} does not define CLI_SPEC")
42
+ mod = importlib.import_module(f".{info.name}", __name__)
43
+ if not hasattr(mod, "CLI_SPEC"):
44
+ raise ValueError(f"CLI submodule '{info.name}' does not define CLI_SPEC")
45
45
  yield (info.name, mod.CLI_SPEC)
46
46
 
47
47
 
@@ -50,28 +50,28 @@ def main():
50
50
 
51
51
  log_grp = parser.add_mutually_exclusive_group()
52
52
  log_grp.add_argument(
53
- '--debug', '-d', action='store_const', dest='loglevel', const=logging.DEBUG,
53
+ "--debug", "-d", action="store_const", dest="loglevel", const=logging.DEBUG,
54
54
  help="enable extra debugging (logging.DEBUG)",
55
55
  )
56
56
  log_grp.add_argument(
57
- '--quiet', '-q', action='store_const', dest='loglevel', const=logging.WARNING,
57
+ "--quiet", "-q", action="store_const", dest="loglevel", const=logging.WARNING,
58
58
  help="be quiet during normal operation (logging.WARNING)",
59
59
  )
60
60
  parser.set_defaults(loglevel=logging.INFO)
61
61
 
62
62
  mains = {}
63
- subparsers = parser.add_subparsers(dest='_module', metavar='<module>', required=True)
63
+ subparsers = parser.add_subparsers(dest="_module", metavar="<module>", required=True)
64
64
  for name, spec in collect_modules():
65
- aliases = spec['aliases'] if 'aliases' in spec else ()
65
+ aliases = spec["aliases"] if "aliases" in spec else ()
66
66
  subp = subparsers.add_parser(
67
67
  name,
68
68
  aliases=aliases,
69
- help=spec['help'],
69
+ help=spec["help"],
70
70
  )
71
- spec['args'](subp)
72
- mains[name] = spec['main']
71
+ spec["args"](subp)
72
+ mains[name] = spec["main"]
73
73
  for alias in aliases:
74
- mains[alias] = spec['main']
74
+ mains[alias] = spec["main"]
75
75
 
76
76
  args = parser.parse_args()
77
77
 
atex/cli/fmf.py ADDED
@@ -0,0 +1,93 @@
1
+ import sys
2
+ import pprint
3
+
4
+ from .. import fmf
5
+
6
+
7
+ def _fatal(msg):
8
+ print(msg, file=sys.stderr)
9
+ sys.exit(1)
10
+
11
+
12
+ def _get_context(args):
13
+ context = {}
14
+ if args.context:
15
+ for c in args.context:
16
+ key, value = c.split("=", 1)
17
+ context[key] = value
18
+ return context or None
19
+
20
+
21
+ def discover(args):
22
+ result = fmf.FMFTests(args.root, args.plan, context=_get_context(args))
23
+ for name in result.tests:
24
+ print(name)
25
+
26
+
27
+ def show(args):
28
+ result = fmf.FMFTests(args.root, args.plan, context=_get_context(args))
29
+ if tests := list(result.match(args.test)):
30
+ for test in tests:
31
+ print(f"\n--- {test.name} ---")
32
+ pprint.pprint(test.data)
33
+ else:
34
+ _fatal(f"Not reachable via {args.plan} discovery: {args.test}")
35
+
36
+
37
+ def prepare(args):
38
+ result = fmf.FMFTests(args.root, args.plan, context=_get_context(args))
39
+ print("--- fmf root ---")
40
+ print(str(result.root))
41
+ print("--- prepare packages ---")
42
+ print("\n".join(result.prepare_pkgs))
43
+ print("--- plan environment ---")
44
+ print("\n".join("{k}={v}" for k,v in result.plan_env))
45
+ for script in result.prepare_scripts:
46
+ print("--- prepare script ---")
47
+ print(script)
48
+ print("----------------------")
49
+
50
+
51
+ def parse_args(parser):
52
+ parser.add_argument("--root", help="path to directory with fmf tests", default=".")
53
+ parser.add_argument("--context", "-c", help="tmt style key=value context", action="append")
54
+ cmds = parser.add_subparsers(
55
+ dest="_cmd", help="executor feature", metavar="<cmd>", required=True,
56
+ )
57
+
58
+ cmd = cmds.add_parser(
59
+ "discover", aliases=("di",),
60
+ help="list tests, post-processed by tmt plans",
61
+ )
62
+ cmd.add_argument("plan", help="tmt plan to use for discovery")
63
+
64
+ cmd = cmds.add_parser(
65
+ "show",
66
+ help="show fmf data of a test",
67
+ )
68
+ cmd.add_argument("plan", help="tmt plan to use for discovery")
69
+ cmd.add_argument("test", help="fmf style test regex")
70
+
71
+ cmd = cmds.add_parser(
72
+ "prepare",
73
+ help="show prepare-related FMFTests details",
74
+ )
75
+ cmd.add_argument("plan", help="tmt plan to parse")
76
+
77
+
78
+ def main(args):
79
+ if args._cmd in ("discover", "di"):
80
+ discover(args)
81
+ elif args._cmd == "show":
82
+ show(args)
83
+ elif args._cmd == "prepare":
84
+ prepare(args)
85
+ else:
86
+ raise RuntimeError(f"unknown args: {args}")
87
+
88
+
89
+ CLI_SPEC = {
90
+ "help": "simple CLI interface to atex.fmf",
91
+ "args": parse_args,
92
+ "main": main,
93
+ }
atex/cli/testingfarm.py CHANGED
@@ -1,16 +1,17 @@
1
1
  import sys
2
+ import json
2
3
  import pprint
3
4
 
4
5
  from .. import util
5
- from .. import testingfarm as tf
6
+ from ..provision.testingfarm import api as tf
6
7
 
7
8
 
8
9
  def _get_api(args):
9
10
  api_args = {}
10
11
  if args.url:
11
- api_args['url'] = args.url
12
+ api_args["url"] = args.url
12
13
  if args.token:
13
- api_args['token'] = args.token
14
+ api_args["token"] = args.token
14
15
  return tf.TestingFarmAPI(**api_args)
15
16
 
16
17
 
@@ -27,9 +28,9 @@ def whoami(args):
27
28
  def composes(args):
28
29
  api = _get_api(args)
29
30
  comps = api.composes(ranch=args.ranch)
30
- comps_list = comps['composes']
31
+ comps_list = comps["composes"]
31
32
  for comp in comps_list:
32
- print(comp['name'])
33
+ print(comp["name"])
33
34
 
34
35
 
35
36
  def get_request(args):
@@ -49,6 +50,8 @@ def search_requests(args):
49
50
  reply = api.search_requests(
50
51
  state=args.state,
51
52
  mine=not args.all,
53
+ user_id=args.user_id,
54
+ token_id=args.token_id,
52
55
  ranch=args.ranch,
53
56
  created_before=args.before,
54
57
  created_after=args.after,
@@ -56,27 +59,31 @@ def search_requests(args):
56
59
  if not reply:
57
60
  return
58
61
 
59
- for req in sorted(reply, key=lambda x: x['created']):
60
- req_id = req['id']
61
- created = req['created'].partition('.')[0]
62
+ if args.json:
63
+ for req in sorted(reply, key=lambda x: x["created"]):
64
+ print(json.dumps(req))
65
+ else:
66
+ for req in sorted(reply, key=lambda x: x["created"]):
67
+ req_id = req["id"]
68
+ created = req["created"].partition(".")[0]
62
69
 
63
- envs = []
64
- for env in req['environments_requested']:
65
- if 'os' in env and env['os'] and 'compose' in env['os']:
66
- compose = env['os']['compose']
67
- arch = env['arch']
68
- if compose and arch:
69
- envs.append(f'{compose}@{arch}')
70
- envs_str = ', '.join(envs)
70
+ envs = []
71
+ for env in req["environments_requested"]:
72
+ if "os" in env and env["os"] and "compose" in env["os"]:
73
+ compose = env["os"]["compose"]
74
+ arch = env["arch"]
75
+ if compose and arch:
76
+ envs.append(f"{compose}@{arch}")
77
+ envs_str = ", ".join(envs)
71
78
 
72
- print(f'{created} {req_id} : {envs_str}')
79
+ print(f"{created} {req_id} : {envs_str}")
73
80
 
74
81
 
75
82
  def reserve(args):
76
83
  util.info(f"Reserving {args.compose} on {args.arch} for {args.timeout} minutes")
77
84
 
78
85
  if args.hvm:
79
- hardware = {'virtualization': {'is-supported': True}}
86
+ hardware = {"virtualization": {"is-supported": True}}
80
87
  else:
81
88
  hardware = None
82
89
 
@@ -96,12 +103,12 @@ def reserve(args):
96
103
  res.request.assert_alive()
97
104
  except tf.GoneAwayError as e:
98
105
  print(e)
99
- raise SystemExit(1)
106
+ raise SystemExit(1) from None
100
107
 
101
108
  proc = util.subprocess_run([
102
- 'ssh', '-q', '-i', m.ssh_key,
103
- '-oStrictHostKeyChecking=no', '-oUserKnownHostsFile=/dev/null',
104
- f'{m.user}@{m.host}',
109
+ "ssh", "-q", "-i", m.ssh_key,
110
+ "-oStrictHostKeyChecking=no", "-oUserKnownHostsFile=/dev/null",
111
+ f"{m.user}@{m.host}",
105
112
  ])
106
113
  if proc.returncode != 0:
107
114
  print(
@@ -123,7 +130,7 @@ def watch_pipeline(args):
123
130
 
124
131
  util.info(f"Waiting for {args.request_id} to be 'running'")
125
132
  try:
126
- request.wait_for_state('running')
133
+ request.wait_for_state("running")
127
134
  except tf.GoneAwayError:
128
135
  util.info(f"Request {args.request_id} already finished")
129
136
  return
@@ -132,96 +139,99 @@ def watch_pipeline(args):
132
139
  try:
133
140
  for line in tf.PipelineLogStreamer(request):
134
141
  sys.stdout.write(line)
135
- sys.stdout.write('\n')
142
+ sys.stdout.write("\n")
136
143
  except tf.GoneAwayError:
137
144
  util.info(f"Request {args.request_id} finished, exiting")
138
145
 
139
146
 
140
147
  def parse_args(parser):
141
- parser.add_argument('--url', help="Testing Farm API URL")
142
- parser.add_argument('--token', help="Testing Farm API auth token")
148
+ parser.add_argument("--url", help="Testing Farm API URL")
149
+ parser.add_argument("--token", help="Testing Farm API auth token")
143
150
  cmds = parser.add_subparsers(
144
- dest='_cmd', help="TF helper to run", metavar='<cmd>', required=True,
151
+ dest="_cmd", help="TF helper to run", metavar="<cmd>", required=True,
145
152
  )
146
153
 
147
154
  cmd = cmds.add_parser(
148
- 'whoami',
155
+ "whoami",
149
156
  help="print out details about active TF token",
150
157
  )
151
158
  cmd = cmds.add_parser(
152
- 'about',
159
+ "about",
153
160
  help="print out details about TF instance (url)",
154
161
  )
155
162
 
156
163
  cmd = cmds.add_parser(
157
- 'composes',
164
+ "composes",
158
165
  help="list all composes available on a given ranch",
159
166
  )
160
- cmd.add_argument('ranch', nargs='?', help="Testing Farm ranch (autodetected if token)")
167
+ cmd.add_argument("ranch", nargs="?", help="Testing Farm ranch (autodetected if token)")
161
168
 
162
169
  cmd = cmds.add_parser(
163
- 'get-request', aliases=('gr',),
170
+ "get-request", aliases=("gr",),
164
171
  help="retrieve and print JSON of a Testing Farm request",
165
172
  )
166
- cmd.add_argument('request_id', help="Testing Farm request UUID")
173
+ cmd.add_argument("request_id", help="Testing Farm request UUID")
167
174
 
168
175
  cmd = cmds.add_parser(
169
- 'cancel',
176
+ "cancel",
170
177
  help="cancel a Testing Farm request",
171
178
  )
172
- cmd.add_argument('request_id', help="Testing Farm request UUID")
179
+ cmd.add_argument("request_id", help="Testing Farm request UUID")
173
180
 
174
181
  cmd = cmds.add_parser(
175
- 'search-requests', aliases=('sr',),
182
+ "search-requests", aliases=("sr",),
176
183
  help="return a list of requests matching the criteria",
177
184
  )
178
- cmd.add_argument('--state', help="request state (running, etc.)", required=True)
179
- cmd.add_argument('--all', help="all requests, not just owned by token", action='store_true')
180
- cmd.add_argument('--ranch', help="Testing Farm ranch")
181
- cmd.add_argument('--before', help="only requests created before ISO8601")
182
- cmd.add_argument('--after', help="only requests created after ISO8601")
185
+ cmd.add_argument("--state", help="request state (running, etc.)", required=True)
186
+ cmd.add_argument("--all", help="all requests, not just owned by token", action="store_true")
187
+ cmd.add_argument("--ranch", help="Testing Farm ranch (detected from token)")
188
+ cmd.add_argument("--user-id", help="'user_id' request field (detected from token)")
189
+ cmd.add_argument("--token-id", help="'token_id' request field (detected from token)")
190
+ cmd.add_argument("--before", help="only requests created before ISO8601")
191
+ cmd.add_argument("--after", help="only requests created after ISO8601")
192
+ cmd.add_argument("--json", help="full details, one request per line", action="store_true")
183
193
 
184
194
  cmd = cmds.add_parser(
185
- 'reserve',
195
+ "reserve",
186
196
  help="reserve a system and ssh into it",
187
197
  )
188
- cmd.add_argument('--compose', '-c', help="OS compose to install", required=True)
189
- cmd.add_argument('--arch', '-a', help="system HW architecture", default='x86_64')
190
- cmd.add_argument('--timeout', '-t', help="pipeline timeout (in minutes)", type=int, default=60)
191
- cmd.add_argument('--ssh-key', help="path to a ssh private key file like 'id_rsa'")
192
- cmd.add_argument('--hvm', help="request a HVM virtualization capable HW", action='store_true')
198
+ cmd.add_argument("--compose", "-c", help="OS compose to install", required=True)
199
+ cmd.add_argument("--arch", "-a", help="system HW architecture", default="x86_64")
200
+ cmd.add_argument("--timeout", "-t", help="pipeline timeout (in minutes)", type=int, default=60)
201
+ cmd.add_argument("--ssh-key", help="path to a ssh private key file like 'id_rsa'")
202
+ cmd.add_argument("--hvm", help="request a HVM virtualization capable HW", action="store_true")
193
203
 
194
204
  cmd = cmds.add_parser(
195
- 'watch-pipeline', aliases=('wp',),
205
+ "watch-pipeline", aliases=("wp",),
196
206
  help="continuously output pipeline.log like 'tail -f'",
197
207
  )
198
- cmd.add_argument('request_id', help="Testing Farm request UUID")
208
+ cmd.add_argument("request_id", help="Testing Farm request UUID")
199
209
 
200
210
 
201
211
  def main(args):
202
- if args._cmd == 'whoami':
212
+ if args._cmd == "whoami":
203
213
  whoami(args)
204
- elif args._cmd == 'about':
214
+ elif args._cmd == "about":
205
215
  about(args)
206
- elif args._cmd == 'composes':
216
+ elif args._cmd == "composes":
207
217
  composes(args)
208
- elif args._cmd in ('get-request', 'gr'):
218
+ elif args._cmd in ("get-request", "gr"):
209
219
  get_request(args)
210
- elif args._cmd == 'cancel':
220
+ elif args._cmd == "cancel":
211
221
  cancel(args)
212
- elif args._cmd in ('search-requests', 'sr'):
222
+ elif args._cmd in ("search-requests", "sr"):
213
223
  search_requests(args)
214
- elif args._cmd == 'reserve':
224
+ elif args._cmd == "reserve":
215
225
  reserve(args)
216
- elif args._cmd in ('watch-pipeline', 'wp'):
226
+ elif args._cmd in ("watch-pipeline", "wp"):
217
227
  watch_pipeline(args)
218
228
  else:
219
229
  raise RuntimeError(f"unknown args: {args}")
220
230
 
221
231
 
222
232
  CLI_SPEC = {
223
- 'aliases': ('tf',),
224
- 'help': "various utils for Testing Farm",
225
- 'args': parse_args,
226
- 'main': main,
233
+ "aliases": ("tf",),
234
+ "help": "various utils for Testing Farm",
235
+ "args": parse_args,
236
+ "main": main,
227
237
  }
@@ -0,0 +1,117 @@
1
+ import importlib as _importlib
2
+ import pkgutil as _pkgutil
3
+ import threading as _threading
4
+
5
+ from .. import util as _util
6
+
7
+
8
+ class Connection:
9
+ """
10
+ A unified API for connecting to a remote system, running multiple commands,
11
+ rsyncing files to/from it and checking for connection state.
12
+
13
+ conn = Connection()
14
+ conn.connect()
15
+ proc = conn.cmd(["ls", "/"])
16
+ #proc = conn.cmd(["ls", "/"], func=subprocess.Popen) # non-blocking
17
+ #output = conn.cmd(["ls", "/"], func=subprocess.check_output) # stdout
18
+ conn.rsync("-v", "remote:/etc/passwd", "passwd")
19
+ conn.disconnect()
20
+
21
+ # or as try/except/finally
22
+ conn = Connection()
23
+ try:
24
+ conn.connect()
25
+ ...
26
+ finally:
27
+ conn.disconnect()
28
+
29
+ # or via Context Manager
30
+ with Connection() as conn:
31
+ ...
32
+
33
+ Note that internal connection handling must be implemented as thread-aware,
34
+ ie. disconnect() might be called from a different thread while connect()
35
+ or cmd() are still running.
36
+ Similarly, multiple threads may run cmd() or rsync() independently.
37
+ """
38
+
39
+ def __init__(self):
40
+ """
41
+ Initialize the connection instance.
42
+ If extending __init__, always call 'super().__init__()' at the top.
43
+ """
44
+ self.lock = _threading.RLock()
45
+
46
+ def __enter__(self):
47
+ self.connect()
48
+ return self
49
+
50
+ def __exit__(self, exc_type, exc_value, traceback):
51
+ self.disconnect()
52
+
53
+ def connect(self, block=True):
54
+ """
55
+ Establish a persistent connection to the remote.
56
+
57
+ If 'block' is True, wait for the connection to be up,
58
+ otherwise raise BlockingIOError if the connection is still down.
59
+ """
60
+ raise NotImplementedError(f"'connect' not implemented for {self.__class__.__name__}")
61
+
62
+ def disconnect(self):
63
+ """
64
+ Destroy the persistent connection to the remote.
65
+ """
66
+ raise NotImplementedError(f"'disconnect' not implemented for {self.__class__.__name__}")
67
+
68
+ def cmd(self, command, func=_util.subprocess_run, **func_args):
69
+ """
70
+ Execute a single command on the remote, using subprocess-like semantics.
71
+
72
+ 'command' is the command with arguments, as a tuple/list.
73
+
74
+ 'func' is the subprocess function to use (.run(), .Popen, etc.).
75
+
76
+ 'func_args' are further keyword arguments to pass to 'func'.
77
+ """
78
+ raise NotImplementedError(f"'cmd' not implemented for {self.__class__.__name__}")
79
+
80
+ def rsync(self, *args, func=_util.subprocess_run, **func_args):
81
+ """
82
+ Synchronize local/remote files/directories via 'rsync'.
83
+
84
+ Pass *args like rsync(1) CLI arguments, incl. option arguments, ie.
85
+ .rsync("-vr", "local_path/", "remote:remote_path")
86
+ .rsync("-z", "remote:remote_file" ".")
87
+
88
+ To indicate remote path, use any string followed by a colon, the remote
89
+ name does not matter as an internally-handled '-e' option dictates all
90
+ the connection details.
91
+
92
+ 'func' is a subprocess function to use (.run(), .Popen, etc.).
93
+
94
+ 'func_args' are further keyword arguments to pass to 'func'.
95
+
96
+ The remote must have rsync(1) already installed.
97
+ """
98
+ raise NotImplementedError(f"'rsync' not implemented for {self.__class__.__name__}")
99
+
100
+
101
+ _submodules = [
102
+ info.name for info in _pkgutil.iter_modules(__spec__.submodule_search_locations)
103
+ ]
104
+
105
+ __all__ = [*_submodules, Connection.__name__] # noqa: PLE0604
106
+
107
+
108
+ def __dir__():
109
+ return __all__
110
+
111
+
112
+ # lazily import submodules
113
+ def __getattr__(attr):
114
+ if attr in _submodules:
115
+ return _importlib.import_module(f".{attr}", __name__)
116
+ else:
117
+ raise AttributeError(f"module '{__name__}' has no attribute '{attr}'")