devstack-cli 9.0.0__py3-none-any.whl → 10.0.157__py3-none-any.whl

Sign up to get free protection for your applications and to get access to all the features.
cli.py CHANGED
@@ -1,42 +1,69 @@
1
1
  import argparse
2
2
  import asyncio
3
+ import configparser
3
4
  import contextlib
5
+ import datetime
4
6
  import functools
5
7
  import io
8
+ import itertools
9
+ import json
6
10
  import logging
11
+ import os
7
12
  import pathlib
13
+ import readline
8
14
  import shlex
15
+ import shutil
16
+ import signal
9
17
  import stat
10
18
  import string
11
19
  import sys
12
- import tempfile
13
20
  import termios
14
- import time
15
21
  import tty
16
22
  import typing
17
23
 
24
+ import aiofiles
25
+ import aiohttp
26
+ import asyncssh
18
27
  import paramiko
19
28
  import paramiko.sftp_client
20
29
  import rich.console
30
+ import rich.highlighter
31
+ import rich.json
21
32
  import rich.logging
22
33
  import rich.markup
34
+ import rich.pretty
23
35
  import rich.progress
24
- import version
25
36
  import watchdog.events
26
37
  import watchdog.observers
38
+ import yarl
39
+
40
+ import version
41
+
42
+
43
+ def sigint_handler(signum: int, frame, *, cli: 'Cli') -> None:
44
+ if cli.terminal_process is None:
45
+ return
46
+ #cli.terminal_process.send_signal(signal.SIGINT)
47
+ cli.terminal_process.stdin.write('\x03')
48
+
27
49
 
28
- REMOTE_USERNAME = 'devstack-user'
29
- REMOTE_SOURCE_DIRECTORY = '/home/devstack-user/starflows'
30
- REMOTE_OUTPUT_DIRECTORY = '/home/devstack-user/starflows-output'
31
50
  EVENT_DEBOUNCE_SECONDS = .5
51
+ RETRY_DELAY_SECONDS = 30
32
52
 
33
- logging.basicConfig(level=logging.INFO, handlers=[rich.logging.RichHandler()], format='%(message)s')
53
+ logging.basicConfig(level=logging.INFO, handlers=[], format='%(message)s')
34
54
  logger = logging.getLogger('cli')
55
+ logger.addHandler(rich.logging.RichHandler())
56
+ json_logger = logging.getLogger('cli-json')
57
+ json_logger.addHandler(rich.logging.RichHandler(highlighter=rich.highlighter.JSONHighlighter()))
35
58
 
36
59
  class SubprocessError(Exception):
37
60
  """A subprocess call returned with non-zero."""
38
61
 
39
62
 
63
+ class InitializationError(Exception):
64
+ """Initialization of devstack-cli failed"""
65
+
66
+
40
67
  class FileSystemEventHandlerToQueue(watchdog.events.FileSystemEventHandler):
41
68
  def __init__(
42
69
  self: 'FileSystemEventHandlerToQueue',
@@ -78,6 +105,7 @@ async def run_subprocess(
78
105
  print_stdout: bool = True,
79
106
  capture_stderr: bool = True,
80
107
  print_stderr: bool = True,
108
+ print_to_debug_log: bool = False,
81
109
  ) -> None:
82
110
  args_str = ' '.join(args)
83
111
  process = await asyncio.create_subprocess_exec(
@@ -110,7 +138,10 @@ async def run_subprocess(
110
138
  if capture_stdout and stdout_readline in done:
111
139
  stdout_line = await stdout_readline
112
140
  if print_stdout and stdout_line.decode().strip():
113
- logger.info('%s: %s', name, stdout_line.decode().strip())
141
+ if print_to_debug_log:
142
+ logger.debug('%s: %s', name, stdout_line.decode().strip())
143
+ else:
144
+ logger.info('%s: %s', name, stdout_line.decode().strip())
114
145
  stdout += stdout_line + b'\n'
115
146
  stdout_readline = asyncio.create_task(process.stdout.readline())
116
147
  pending.add(stdout_readline)
@@ -147,157 +178,856 @@ async def run_subprocess(
147
178
 
148
179
 
149
180
  def _get_event_significant_path(event: watchdog.events.FileSystemEvent) -> str:
150
- if hasattr(event, 'dest_path'):
181
+ if hasattr(event, 'dest_path') and event.dest_path != '':
151
182
  return event.dest_path
152
183
  return event.src_path
153
184
 
154
185
 
155
- def is_relative_to(self: pathlib.Path, other: pathlib.Path) -> bool:
156
- return other == self or other in self.parents
186
+ def _is_relative_to(self: pathlib.Path, other: pathlib.Path) -> bool:
187
+ return pathlib.Path(other) == pathlib.Path(self) or pathlib.Path(other) in pathlib.Path(self).parents
188
+
189
+
190
+ async def _create_temp_file(
191
+ *,
192
+ exit_stack: contextlib.AsyncExitStack,
193
+ content: typing.Union[bytes, str],
194
+ ) -> aiofiles.tempfile.NamedTemporaryFile:
195
+ temp_file = await exit_stack.enter_async_context(
196
+ aiofiles.tempfile.NamedTemporaryFile(
197
+ 'wb+',
198
+ delete=False,
199
+ ),
200
+ )
201
+ await temp_file.write(content.encode() if isinstance(content, str) else content)
202
+ await temp_file.close()
203
+ return temp_file
157
204
 
158
205
 
159
206
  class Cli:
160
- def __init__(self: 'Cli', args: argparse.Namespace) -> None:
161
- rich.print(f'Cloudomation devstack-cli {version.MAJOR}+{version.BRANCH_NAME}.{version.BUILD_DATE}.{version.SHORT_SHA}')
162
- self.hostname = args.hostname
163
- self.local_source_directory = pathlib.Path(args.source_directory)
164
-
165
- self.local_output_directory = pathlib.Path(args.output_directory) if args.output_directory else None
166
- if (
167
- self.local_output_directory is not None
168
- and (
169
- is_relative_to(self.local_source_directory, self.local_output_directory)
170
- or is_relative_to(self.local_output_directory, self.local_source_directory)
171
- )
172
- ):
173
- logger.error('Source-directory and output-directory must not overlap!')
174
- sys.exit(1)
175
- self.ssh_client = None
176
- self.sftp_client = None
177
- self.filesystem_watch_task = None
178
- self.known_hosts_file = None
207
+ def __init__(self: 'Cli') -> None:
208
+ self.config_file: typing.Optional[pathlib.Path] = None
209
+ self.args: typing.Optional[argparse.Namespace] = None
210
+ self.config: typing.Optional[configparser.ConfigParser] = None
211
+ self.password: typing.Optional[str] = None
212
+ self.session: typing.Optional[aiohttp.ClientSession] = None
213
+ self.workspace_url: typing.Optional[yarl.URL] = None
214
+ self.sync_task: typing.Optional[asyncio.Task] = None
215
+ self.port_forwarding_task: typing.Optional[asyncio.Task] = None
216
+ self.logs_task: typing.Optional[asyncio.Task] = None
217
+ self.exit_stack: typing.Optional[contextlib.AsyncExitStack] = None
218
+ self.cdes: typing.List[dict] = []
219
+ self.cde: typing.Optional[dict] = None
220
+ self.cde_type: typing.Optional[dict] = None
221
+ self.ssh_client: typing.Optional[paramiko.SSHClient] = None
222
+ self.sftp_client: typing.Optional[paramiko.sftp_client.SFTPClient] = None
223
+ self.known_hosts_file: typing.Optional[aiofiles.tempfile.NamedTemporaryFile] = None
179
224
  self.console = rich.console.Console()
180
- if args.verbose:
181
- logger.setLevel(logging.DEBUG)
225
+ self._fd = sys.stdin.fileno()
226
+ self._tcattr = termios.tcgetattr(self._fd)
227
+ self.terminal_process = None
228
+
229
+ @property
230
+ def cde_name(self: 'Cli') -> typing.Optional[str]:
231
+ return self.cde['name'] if self.cde is not None else None
232
+
233
+ @property
234
+ def is_cde_running(self: 'Cli') -> bool:
235
+ if self.cde is None:
236
+ return None
237
+ if not self.cde['exists_remotely']:
238
+ return False
239
+ return self.cde['value']['is-running'] and self.cde['provisioning_state'] == 'READY'
240
+
241
+ @property
242
+ def hostname(self: 'Cli') -> typing.Optional[str]:
243
+ return self.cde['value']['hostname'] if self.cde is not None else None
244
+
245
+ @property
246
+ def local_source_directory(self: 'Cli') -> typing.Optional[pathlib.Path]:
247
+ return pathlib.Path(os.path.expandvars(self.cde['source_directory'])) if self.cde else None
248
+
249
+ @property
250
+ def local_output_directory(self: 'Cli') -> typing.Optional[pathlib.Path]:
251
+ return pathlib.Path(os.path.expandvars(self.cde['output_directory'])) if self.cde and self.cde.get('output_directory') else None
182
252
 
183
253
  async def run(self: 'Cli') -> None:
184
- self.loop = asyncio.get_running_loop()
185
- key_queue = asyncio.Queue()
186
- await self._prepare_known_hosts()
187
254
  try:
188
- await self._connect_to_rde()
189
- await self._init_local_cache()
190
- sync_task = asyncio.create_task(self._start_sync())
191
- port_forwarding_task = asyncio.create_task(self._start_port_forwarding())
192
- logs_task = None
193
- await self._setup_keyboard(key_queue)
194
- try:
195
- logger.info('Ready!')
196
- key_queue.put_nowait('h')
255
+ self.loop = asyncio.get_running_loop()
256
+ self.loop.add_signal_handler(
257
+ signal.SIGWINCH,
258
+ self._window_resized,
259
+ )
260
+ self.key_queue = asyncio.Queue()
261
+ await self._parse_arguments()
262
+ # print version after parse_arguments to avoid duplication when using "--version"
263
+ rich.print(f'Cloudomation devstack-cli {version.MAJOR}+{version.BRANCH_NAME}.{version.BUILD_DATE}.{version.SHORT_SHA}')
264
+ rich.print('''[bold white on blue]
265
+ :=+********+=:
266
+ -+****************+-
267
+ =**********************=
268
+ :**************************:
269
+ -****************************-:=+****+=:
270
+ .**************=-*************************:
271
+ =**************. -************************-
272
+ *************** -********++*************
273
+ .**************= ::.. *************
274
+ .=****************: *************:
275
+ =**************=-. .**************+=:
276
+ .************+-. .*******************+:
277
+ **************=: +********************+
278
+ =*****************+=: +*********************
279
+ **********************: +********************=
280
+ **********************= .--::.. *********************
281
+ =********************** .=*******************************
282
+ **********************. =********************************=
283
+ .+********************+=*********************************+
284
+ -*****************************************************=
285
+ -+************************************************=.
286
+ :-=+**************************************+=-.
287
+ ''') # noqa: W291
288
+ async with self._key_press_to_queue(), \
289
+ aiohttp.ClientSession(trust_env=True) as self.session, \
290
+ contextlib.AsyncExitStack() as self.exit_stack:
291
+ await self._load_global_config()
292
+ await self._check_config()
293
+ await self._print_help()
294
+ await self._process_args()
197
295
  while True:
198
- key_press = await key_queue.get()
199
- # check status
200
- if sync_task is not None and sync_task.done():
201
- sync_task = None
202
- if port_forwarding_task is not None and port_forwarding_task.done():
203
- port_forwarding_task = None
204
- if logs_task is not None and logs_task.done():
205
- logs_task = None
206
-
207
- if key_press == 'h':
208
- table = rich.table.Table(title='Help')
209
- table.add_column('Key', style='cyan')
210
- table.add_column('Function')
211
- table.add_column('Status')
212
- table.add_row('h', 'Print help')
213
- table.add_row('v', 'Toggle debug logs', '[green]on' if logger.getEffectiveLevel() == logging.DEBUG else '[red]off')
214
- table.add_row('s', 'Toggle file sync', '[red]off' if sync_task is None else '[green]on')
215
- table.add_row('p', 'Toggle port forwarding', '[red]off' if port_forwarding_task is None else '[green]on')
216
- table.add_row('l', 'Toggle following logs', '[red]off' if logs_task is None else '[green]on')
217
- table.add_row('q', 'Quit')
218
- rich.print(table)
219
- elif key_press == 'v':
220
- if logger.getEffectiveLevel() == logging.INFO:
221
- logger.info('Enabling debug logs')
222
- logger.setLevel(logging.DEBUG)
223
- else:
224
- logger.info('Disabling debug logs')
225
- logger.setLevel(logging.INFO)
226
- elif key_press == 's':
227
- if sync_task is None:
228
- sync_task = asyncio.create_task(self._start_sync())
229
- else:
230
- sync_task.cancel()
231
- try:
232
- await sync_task
233
- except asyncio.CancelledError:
234
- pass
235
- except Exception:
236
- logger.exception('Error during file sync')
237
- sync_task = None
238
- elif key_press == 'p':
239
- if port_forwarding_task is None:
240
- port_forwarding_task = asyncio.create_task(self._start_port_forwarding())
241
- else:
242
- port_forwarding_task.cancel()
243
- try:
244
- await port_forwarding_task
245
- except asyncio.CancelledError:
246
- pass
247
- except Exception:
248
- logger.exception('Error during port forwarding')
249
- port_forwarding_task = None
250
- elif key_press == 'l':
251
- if logs_task is None:
252
- logs_task = asyncio.create_task(self._start_logs())
253
- else:
254
- logs_task.cancel()
255
- try:
256
- await logs_task
257
- except asyncio.CancelledError:
258
- pass
259
- except Exception:
260
- logger.exception('Error during logs')
261
- logs_task = None
262
- elif key_press == 'q':
263
- break
264
- elif ord(key_press) == 10: # return
265
- rich.print('')
266
- else:
267
- logger.debug('Unknown keypress "%s" (%d)', key_press if key_press in string.printable else '?', ord(key_press))
268
- finally:
269
- await self._reset_keyboard()
270
- if port_forwarding_task is not None:
271
- port_forwarding_task.cancel()
272
- with contextlib.suppress(asyncio.CancelledError):
273
- await port_forwarding_task
274
- if sync_task is not None:
275
- sync_task.cancel()
276
- with contextlib.suppress(asyncio.CancelledError):
277
- await sync_task
278
- if logs_task is not None:
279
- logs_task.cancel()
280
- with contextlib.suppress(asyncio.CancelledError):
281
- await logs_task
282
- await self._disconnect_from_rde()
283
- finally:
284
- await self._cleanup_known_hosts_file()
296
+ key_press = await self.key_queue.get()
297
+ await self._handle_key_press(key_press)
298
+ except InitializationError as ex:
299
+ logger.error(ex) # noqa: TRY400
300
+ except Exception:
301
+ logger.exception('Unhandled exception')
285
302
 
286
- async def _setup_keyboard(self: 'Cli', queue: asyncio.Queue) -> None:
303
+ def _window_resized(self: 'Cli', *args, **kwargs) -> None:
304
+ if self.terminal_process is None:
305
+ return
306
+ terminal_size = shutil.get_terminal_size()
307
+ self.terminal_process.change_terminal_size(terminal_size.columns, terminal_size.lines)
308
+
309
+ async def _parse_arguments(self: 'Cli') -> None:
310
+ config_home = os.environ.get('XDG_CONFIG_HOME', '$HOME/.config')
311
+ default_config_file = pathlib.Path(os.path.expandvars(config_home)) / 'devstack-cli.conf'
312
+ parser = argparse.ArgumentParser(
313
+ fromfile_prefix_chars='@',
314
+ #formatter_class=argparse.ArgumentDefaultsHelpFormatter,
315
+ )
316
+ parser.add_argument(
317
+ '-c', '--config-file',
318
+ type=str,
319
+ help='path to a devstack-cli configuration file',
320
+ default=str(default_config_file),
321
+ )
322
+ parser.add_argument(
323
+ '--workspace-url',
324
+ type=str,
325
+ help='the URL of your Cloudomation workspace',
326
+ )
327
+ parser.add_argument(
328
+ '-u', '--user-name',
329
+ type=str,
330
+ help='a user name to authenticate to the Cloudomation workspace',
331
+ )
332
+ parser.add_argument(
333
+ '--maximum-uptime-hours',
334
+ type=int,
335
+ help='the number of hours before an CDE is automatically stopped',
336
+ )
337
+ parser.add_argument(
338
+ '-n', '--cde-name',
339
+ type=str,
340
+ help='the name of the CDE',
341
+ )
342
+ parser.add_argument(
343
+ '-s', '--start',
344
+ action='store_true',
345
+ help='start CDE',
346
+ )
347
+ parser.add_argument(
348
+ '--stop',
349
+ action='store_true',
350
+ help='stop CDE',
351
+ )
352
+ parser.add_argument(
353
+ '-w', '--wait-running',
354
+ action='store_true',
355
+ help='wait until CDE is running. implies "--start".',
356
+ )
357
+ parser.add_argument(
358
+ '-o', '--connect',
359
+ action='store_true',
360
+ help='connect to CDE. implies "--start" and "--wait-running".',
361
+ )
362
+ parser.add_argument(
363
+ '-p', '--port-forwarding',
364
+ action='store_true',
365
+ help='enable port-forwarding. implies "--start", "--wait-running", and "--connect".',
366
+ )
367
+ parser.add_argument(
368
+ '-f ', '--file-sync',
369
+ action='store_true',
370
+ help='enable file-sync implies "--start", "--wait-running", and "--connect".',
371
+ )
372
+ parser.add_argument(
373
+ '-l', '--logs',
374
+ action='store_true',
375
+ help='enable following logs implies "--start", "--wait-running", and "--connect".',
376
+ )
377
+ parser.add_argument(
378
+ '-t', '--terminal',
379
+ action='store_true',
380
+ help='open interactive terminal implies "--start", "--wait-running", and "--connect".',
381
+ )
382
+ parser.add_argument(
383
+ '-q', '--quit',
384
+ action='store_true',
385
+ help='exit after processing command line arguments.',
386
+ )
387
+
388
+ # parser.add_argument(
389
+ # '-s', '--source-directory',
390
+ # type=str,
391
+ # help='a local directory where the sources of the CDE will be stored',
392
+ # )
393
+ # parser.add_argument(
394
+ # '-o', '--output-directory',
395
+ # type=str,
396
+ # help='a local directory where the outputs of the CDE will be stored',
397
+ # )
398
+ # parser.add_argument(
399
+ # '--remote-source-directory',
400
+ # type=str,
401
+ # help='a remote directory where the sources of the CDE are stored',
402
+ # )
403
+ # parser.add_argument(
404
+ # '--remote-output-directory',
405
+ # type=str,
406
+ # help='a remote directory where the outputs of the CDE are stored',
407
+ # )
408
+ # parser.add_argument(
409
+ # '--remote-username',
410
+ # type=str,
411
+ # help='the username on the CDE',
412
+ # )
413
+ parser.add_argument(
414
+ '-v', '--verbose',
415
+ action='store_true',
416
+ help='enable debug logging',
417
+ )
418
+ parser.add_argument(
419
+ '-V', '--version',
420
+ action='version',
421
+ version=f'Cloudomation devstack-cli {version.MAJOR}+{version.BRANCH_NAME}.{version.BUILD_DATE}.{version.SHORT_SHA}',
422
+ )
423
+ self.args = parser.parse_args()
424
+
425
+ if self.args.port_forwarding:
426
+ self.args.connect = True
427
+ if self.args.file_sync:
428
+ self.args.connect = True
429
+ if self.args.logs:
430
+ self.args.connect = True
431
+ if self.args.terminal:
432
+ self.args.connect = True
433
+ if self.args.connect:
434
+ self.args.wait_running = True
435
+ if self.args.wait_running:
436
+ self.args.start = True
437
+
438
+ if self.args.verbose:
439
+ logger.setLevel(logging.DEBUG)
440
+ json_logger.setLevel(logging.DEBUG)
441
+ asyncssh.set_log_level(logging.DEBUG)
442
+ else:
443
+ logger.setLevel(logging.INFO)
444
+ json_logger.setLevel(logging.INFO)
445
+ asyncssh.set_log_level(logging.WARNING)
446
+
447
+ @contextlib.asynccontextmanager
448
+ async def _key_press_to_queue(self: 'Cli'):
287
449
  self._fd = sys.stdin.fileno()
288
450
  self._tcattr = termios.tcgetattr(self._fd)
289
451
  tty.setcbreak(self._fd)
290
452
  def on_stdin() -> None:
291
- self.loop.call_soon_threadsafe(queue.put_nowait, sys.stdin.read(1))
453
+ self.loop.call_soon_threadsafe(self.key_queue.put_nowait, sys.stdin.buffer.raw.read(1).decode())
292
454
  self.loop.add_reader(sys.stdin, on_stdin)
455
+ try:
456
+ yield
457
+ finally:
458
+ self.loop.remove_reader(sys.stdin)
459
+ termios.tcsetattr(self._fd, termios.TCSADRAIN, self._tcattr)
293
460
 
294
- async def _reset_keyboard(self: 'Cli') -> None:
295
- termios.tcsetattr(self._fd, termios.TCSADRAIN, self._tcattr)
461
+ async def _load_global_config(self: 'Cli') -> None:
462
+ self.config_file = pathlib.Path(os.path.expandvars(self.args.config_file))
463
+ self.config_file.parent.mkdir(parents=True, exist_ok=True) # make sure the config directory exists
464
+ self.config = configparser.ConfigParser()
465
+ if not self.config_file.exists():
466
+ logger.info('No configuration file exists at "%s". Creating a new configuration.', self.config_file)
467
+ else:
468
+ logger.info('Loading configuration from %s', self.config_file)
469
+ async with aiofiles.open(self.config_file, mode='r') as f:
470
+ config_str = await f.read()
471
+ self.config.read_string(config_str, source=self.config_file)
472
+ self.config.setdefault('global', {})
473
+
474
+ workspace_url = self.args.workspace_url or self.config['global'].get('workspace_url')
475
+ if not workspace_url:
476
+ workspace_url = self._console_input('Enter the URL of your Cloudomation workspace: ', prefill='https://')
477
+ self.config['global']['workspace_url'] = workspace_url
478
+ self.workspace_url = yarl.URL(workspace_url)
479
+
480
+ user_name = self.args.user_name or self.config['global'].get('user_name')
481
+ if not user_name:
482
+ user_name = self._console_input(f'Enter your user-name to authenticate to {workspace_url}: ')
483
+ self.config['global']['user_name'] = user_name
484
+
485
+ self.password = os.environ.get('DEVSTACK_CLI_PASSWORD')
486
+ if not self.password:
487
+ self.password = self._console_input(f'Enter your password to authenticate "{user_name}" to {workspace_url}: ', password=True)
488
+
489
+ maximum_uptime_hours = self.args.maximum_uptime_hours or self.config['global'].get('maximum_uptime_hours')
490
+ if not maximum_uptime_hours:
491
+ while True:
492
+ maximum_uptime_hours = self._console_input('How many hours should an CDE remain started until it is automatically stopped: ', prefill='8')
493
+ try:
494
+ int(maximum_uptime_hours)
495
+ except ValueError:
496
+ logger.error('"%s" is not a valid number', maximum_uptime_hours) # noqa: TRY400
497
+ else:
498
+ break
499
+ self.config['global']['maximum_uptime_hours'] = maximum_uptime_hours
500
+
501
+ await self._write_config_file()
502
+
503
+ async def _write_config_file(self: 'Cli') -> None:
504
+ logger.debug('Writing configuration file %s', self.config_file)
505
+ config_str = io.StringIO()
506
+ self.config.write(config_str)
507
+ async with aiofiles.open(self.config_file, mode='w') as f:
508
+ await f.write(config_str.getvalue())
509
+
510
+ def _console_input(self: 'Cli', prompt: str, *, password: bool = False, prefill: str = '') -> str:
296
511
  self.loop.remove_reader(sys.stdin)
512
+ termios.tcsetattr(self._fd, termios.TCSADRAIN, self._tcattr)
513
+ readline.set_startup_hook(lambda: readline.insert_text(prefill))
514
+ try:
515
+ response = self.console.input(prompt, password=password)
516
+ finally:
517
+ readline.set_startup_hook()
518
+ tty.setcbreak(self._fd)
519
+ def on_stdin() -> None:
520
+ self.loop.call_soon_threadsafe(self.key_queue.put_nowait, sys.stdin.read(1))
521
+ self.loop.add_reader(sys.stdin, on_stdin)
522
+ return response
523
+
524
+ async def _check_config(self: 'Cli') -> None:
525
+ logger.debug('Checking if Cloudomation workspace at %s is alive', self.config['global']['workspace_url'])
526
+ try:
527
+ response = await self.session.get(
528
+ url=self.workspace_url / 'api/latest/alive',
529
+ )
530
+ except aiohttp.client_exceptions.ClientConnectorError as ex:
531
+ raise InitializationError(f'Failed to verify Cloudomation workspace alive: {ex!s}') from ex
532
+ if response.status != 200:
533
+ raise InitializationError(f'Failed to verify Cloudomation workspace alive: {response.reason} ({response.status}):\n{await response.text()}')
534
+ workspace_info = await response.json()
535
+ logger.info('Connected to Cloudomation workspace %s', self.workspace_url)
536
+ json_logger.debug(json.dumps(workspace_info, indent=4, sort_keys=True))
297
537
 
298
- async def _prepare_known_hosts(self: 'Cli') -> None:
299
- self.known_hosts_file = tempfile.NamedTemporaryFile(delete=False)
300
- logger.info('Writing temporary known_hosts file "%s"', self.known_hosts_file.name)
538
+ logger.debug('Logging in as "%s" to Cloudomation workspace at %s', self.config['global']['user_name'], self.config['global']['workspace_url'])
539
+ response = await self.session.post(
540
+ url=self.workspace_url / 'api/latest/auth/login',
541
+ json={
542
+ 'user_name': self.config['global']['user_name'],
543
+ 'password': self.password,
544
+ },
545
+ )
546
+ if response.status != 200:
547
+ raise InitializationError(f'Failed to login to Cloudomation workspace: {response.reason} ({response.status}):\n{await response.text()}')
548
+ self.user_info = await response.json()
549
+ logger.info('Logged in to Cloudomation workspace')
550
+ json_logger.debug(json.dumps(self.user_info, indent=4, sort_keys=True))
551
+
552
+ response = await self.session.get(
553
+ url=self.workspace_url / 'api/latest/object_template/cde-type',
554
+ params={
555
+ 'by': 'name',
556
+ },
557
+ )
558
+ if response.status != 200:
559
+ raise InitializationError(f'Failed to fetch "cde-type" object template: {response.reason} ({response.status}):\n{await response.text()}\nIs the "DevStack" bundle installed?')
560
+ self.cde_type_template = (await response.json())['object_template']
561
+ logger.debug('The "cde-type" object template')
562
+ json_logger.debug(json.dumps(self.cde_type_template, indent=4, sort_keys=True))
563
+
564
+ response = await self.session.get(
565
+ url=self.workspace_url / 'api/latest/object_template/cde',
566
+ params={
567
+ 'by': 'name',
568
+ },
569
+ )
570
+ if response.status != 200:
571
+ raise InitializationError(f'Failed to fetch "cde" object template: {response.reason} ({response.status}):\n{await response.text()}\nIs the "DevStack" bundle installed?')
572
+ self.cde_template = (await response.json())['object_template']
573
+ logger.debug('The "cde" object template')
574
+ json_logger.debug(json.dumps(self.cde_template, indent=4, sort_keys=True))
575
+
576
+ response = await self.session.get(
577
+ url=self.workspace_url / 'api/latest/custom_object',
578
+ params={
579
+ 'filter': json.dumps({
580
+ 'field': 'object_template_id',
581
+ 'op': 'eq',
582
+ 'value': self.cde_type_template['id'],
583
+ }),
584
+ 'plain': 'true',
585
+ },
586
+ )
587
+ if response.status != 200:
588
+ raise InitializationError(f'Failed to fetch "cde-type" custom objects: {response.reason} ({response.status}):\n{await response.text()}')
589
+ self.cde_types = await response.json()
590
+ logger.debug('The "cde-type" custom objects')
591
+ json_logger.debug(json.dumps(self.cde_types, indent=4, sort_keys=True))
592
+
593
+ # logger.info('Using configuration of CDE "%s"', self.cde_name)
594
+ # json_logger.debug(json.dumps(self.cde_config, indent=4, sort_keys=True))
595
+
596
+ async def _print_help(self: 'Cli') -> None:
597
+ await self._update_cde_list()
598
+ await self._check_background_tasks()
599
+ table = rich.table.Table(title='Help')
600
+ table.add_column('Key', style='cyan bold')
601
+ table.add_column('Function')
602
+ table.add_column('Status')
603
+
604
+ # global commands
605
+ table.add_row('h, [SPACE]', 'Print [cyan bold]h[/cyan bold]elp and status')
606
+ table.add_row('v', 'Toggle [cyan bold]v[/cyan bold]erbose debug logs', '[green]on' if logger.getEffectiveLevel() == logging.DEBUG else '[red]off')
607
+ table.add_row('q, [ESC]', '[cyan bold]Q[/cyan bold]uit')
608
+ table.add_row('#', 'DEBUG')
609
+ table.add_row('n', 'Create [cyan bold]n[/cyan bold]ew CDE')
610
+
611
+ # CDE selection
612
+ if self.cdes:
613
+ table.add_section()
614
+ table.add_row('', '== CDE selection ==')
615
+ for i, cde in enumerate(self.cdes.values(), start=1):
616
+ cde_type = await self._get_cde_type_of_cde(cde)
617
+ if not cde_type:
618
+ continue
619
+ cde_type_name = cde_type['name']
620
+ if self.cde and self.cde['name'] == cde['name']:
621
+ table.add_row(str(i), f"Select \"{cde['name']}\" ({cde_type_name}) CDE", f"[{cde['status_color']}]{cde['status']} [italic default](selected)")
622
+ else:
623
+ table.add_row(str(i), f"Select \"{cde['name']}\" ({cde_type_name}) CDE", f"[{cde['status_color']}]{cde['status']}")
624
+
625
+ # CDE operations
626
+ table.add_section()
627
+ table.add_row('', '== CDE operations ==')
628
+ if self.cde:
629
+ table.add_row('w', f'[cyan bold]W[/cyan bold]ait for "{self.cde_name}" CDE to be running')
630
+ if self.cde['status'] == 'running':
631
+ table.add_row('o', f"C[cyan bold]o[/cyan bold]nnect to \"{cde['name']}\" CDE")
632
+ elif self.cde['status'] == 'connected':
633
+ table.add_row('o', f"Disc[cyan bold]o[/cyan bold]nnect from \"{cde['name']}\" CDE")
634
+ else:
635
+ table.add_row('o', f"Connect to \"{cde['name']}\" CDE", 'N/A: CDE is not running', style='bright_black italic')
636
+ table.add_row('c', f"[cyan bold]C[/cyan bold]onfigure \"{cde['name']}\" CDE")
637
+ if self.cde['status'] in ('stopped', 'deleted'):
638
+ table.add_row('s', f'[cyan bold]S[/cyan bold]tart "{self.cde_name}" CDE')
639
+ elif self.cde['status'] in ('running', 'connected'):
640
+ table.add_row('s', f'[cyan bold]S[/cyan bold]top "{self.cde_name}" CDE')
641
+ else:
642
+ table.add_row('s', 'Start/stop CDE', 'N/A: CDE is transitioning', style='bright_black italic')
643
+ table.add_row('d', f'[cyan bold]D[/cyan bold]elete "{self.cde_name}" CDE')
644
+ else:
645
+ table.add_row('w', 'Wait for CDE to be running', 'N/A: no CDE selected', style='bright_black italic')
646
+ table.add_row('o', 'Connect to CDE', 'N/A: no CDE selected', style='bright_black italic')
647
+ table.add_row('c', 'Configure CDE', 'N/A: no CDE selected', style='bright_black italic')
648
+ table.add_row('s', 'Start/stop CDE', 'N/A: no CDE selected', style='bright_black italic')
649
+ table.add_row('d', 'Delete CDE', 'N/A: no CDE selected', style='bright_black italic')
650
+
651
+ # CDE connection
652
+ table.add_section()
653
+ table.add_row('', '== CDE connection ==')
654
+ if self.cde and self.cde['status'] == 'connected':
655
+ table.add_row('p', 'Toggle [cyan bold]p[/cyan bold]ort forwarding', '[red]off' if self.port_forwarding_task is None else '[green]on')
656
+ table.add_row('f', 'Toggle [cyan bold]f[/cyan bold]ile sync', '[red]off' if self.sync_task is None else '[green]on')
657
+ table.add_row('l', 'Toggle following [cyan bold]l[/cyan bold]ogs', '[red]off' if self.logs_task is None else '[green]on')
658
+ table.add_row('t', 'Open an interactive terminal session on the CDE')
659
+ else:
660
+ table.add_row('p', 'Toggle port forwarding', 'N/A: not connected', style='bright_black italic')
661
+ table.add_row('f', 'Toggle file sync', 'N/A: not connected', style='bright_black italic')
662
+ table.add_row('l', 'Toggle following logs', 'N/A: not connected', style='bright_black italic')
663
+ table.add_row('t', 'Open an interactive terminal session on the CDE', 'N/A: not connected', style='bright_black italic')
664
+ rich.print(table)
665
+
666
+ async def _update_cde_list(self: 'Cli') -> None:
667
+ logger.info('Fetching updated CDE list from Cloudomation workspace')
668
+ try:
669
+ response = await self.session.get(
670
+ url=self.workspace_url / 'api/latest/custom_object',
671
+ params={
672
+ 'filter': json.dumps({
673
+ 'and': [
674
+ {
675
+ 'field': 'object_template_id',
676
+ 'op': 'eq',
677
+ 'value': self.cde_template['id'],
678
+ },
679
+ {
680
+ 'field': 'created_by',
681
+ 'op': 'eq',
682
+ 'value': self.user_info['identity_id'],
683
+ },
684
+ ],
685
+ }),
686
+ 'plain': 'true',
687
+ },
688
+ )
689
+ except (aiohttp.ClientError, aiohttp.ClientResponseError) as ex:
690
+ logger.error('Failed to fetch CDE list: %s', str(ex)) # noqa: TRY400
691
+ return
692
+ if response.status != 200:
693
+ logger.error('Failed to fetch CDE list: %s (%s):\n%s', response.reason, response.status, await response.text())
694
+ return
695
+ response = await response.json()
696
+ self.cdes = {
697
+ cde['name']: {
698
+ **cde,
699
+ 'exists_remotely': True,
700
+ }
701
+ for cde
702
+ in response
703
+ }
704
+ # combine with CDE infos from local config file
705
+ for cde_config_key, cde_config_value in self.config.items():
706
+ if not cde_config_key.startswith('cde.'):
707
+ continue
708
+ cur_cde_name = cde_config_key[4:]
709
+ self.cdes.setdefault(cur_cde_name, {}).update({
710
+ **cde_config_value,
711
+ 'name': cur_cde_name,
712
+ 'exists_locally': True,
713
+ })
714
+ # enrich CDE infos with:
715
+ # - combined status: provisioning_state & is-running & exists locally only
716
+ # - exists_locally: cde name present in config file
717
+ # - exists_remotely: remote config exists
718
+ for cde in self.cdes.values():
719
+ cde.setdefault('exists_remotely', False)
720
+ cde.setdefault('exists_locally', False)
721
+ if not cde['exists_locally']:
722
+ cde['status'] = 'not configured'
723
+ cde['status_color'] = 'yellow'
724
+ elif not cde['exists_remotely']:
725
+ cde['status'] = 'deleted'
726
+ cde['status_color'] = 'red'
727
+ elif cde['provisioning_state'] == 'READY':
728
+ if cde['value']['is-running']:
729
+ if cde['value'].get('hostname'):
730
+ if self.ssh_client is None:
731
+ cde['status'] = 'running'
732
+ cde['status_color'] = 'green'
733
+ else:
734
+ cde['status'] = 'connected'
735
+ cde['status_color'] = 'green bold'
736
+ else:
737
+ cde['status'] = 'starting'
738
+ cde['status_color'] = 'blue'
739
+ else:
740
+ cde['status'] = 'stopped'
741
+ cde['status_color'] = 'red'
742
+ elif cde['provisioning_state'].endswith('_FAILED'):
743
+ cde['status'] = cde['provisioning_state'].lower()
744
+ cde['status_color'] = 'red'
745
+ else:
746
+ cde['status'] = cde['provisioning_state'].lower()
747
+ cde['status_color'] = 'blue'
748
+
749
+ logger.debug('Your CDEs')
750
+ json_logger.debug(json.dumps(self.cdes, indent=4, sort_keys=True))
751
+
752
+ if self.cde:
753
+ try:
754
+ # update selected cde info from fetched list
755
+ await self._select_cde(self.cde_name, quiet=True)
756
+ except KeyError:
757
+ logger.warning('Selected CDE "%s" does not exist any more. Unselecting.', self.cde_name)
758
+ self.cde = None
759
+
760
+ async def _check_background_tasks(self: 'Cli') -> None:
761
+ if self.sync_task is not None and self.sync_task.done():
762
+ self.sync_task = None
763
+ if self.port_forwarding_task is not None and self.port_forwarding_task.done():
764
+ self.port_forwarding_task = None
765
+ if self.logs_task is not None and self.logs_task.done():
766
+ self.logs_task = None
767
+ if self.ssh_client is not None:
768
+ transport = self.ssh_client.get_transport()
769
+ if transport.is_active():
770
+ try:
771
+ transport.send_ignore()
772
+ except EOFError:
773
+ # connection is closed
774
+ logger.warning('SSH connection is not alive, disconnecting.')
775
+ self.ssh_client.close()
776
+ self.ssh_client = None
777
+ else:
778
+ logger.warning('SSH connection is not alive, disconnecting.')
779
+ self.ssh_client.close()
780
+ self.ssh_client = None
781
+ if self.ssh_client is None:
782
+ # we are not connected to any cde. make sure background tasks are cancelled
783
+ if self.sync_task:
784
+ self.sync_task.cancel()
785
+ self.sync_task = None
786
+ if self.port_forwarding_task:
787
+ self.port_forwarding_task.cancel()
788
+ self.port_forwarding_task = None
789
+ if self.logs_task:
790
+ self.logs_task.cancel()
791
+ self.logs_task = None
792
+ if self.sftp_client is not None:
793
+ self.sftp_client.close()
794
+ self.sftp_client = None
795
+
796
+
797
+ async def _get_cde_type_of_cde(self: 'Cli', cde: dict) -> typing.Optional[dict]:
798
+ if cde['exists_remotely']:
799
+ try:
800
+ cde_type = next(cde_type for cde_type in self.cde_types if cde_type['id'] == cde['value']['cde-type'])
801
+ except StopIteration:
802
+ logger.error('CDE type ID "%s" not found', cde['value']['cde-type']) # noqa: TRY400
803
+ return None
804
+ elif cde['exists_locally']:
805
+ try:
806
+ cde_type = next(cde_type for cde_type in self.cde_types if cde_type['name'] == cde['cde_type'])
807
+ except StopIteration:
808
+ logger.error('CDE type "%s" not found', cde['cde_type']) # noqa: TRY400
809
+ return None
810
+ else:
811
+ logger.error('CDE does not exist')
812
+ return None
813
+ return cde_type
814
+
815
+ async def _process_args(self: 'Cli') -> None:
816
+ if self.args.cde_name:
817
+ await self._select_cde(self.args.cde_name)
818
+ elif 'last_cde_name' in self.config['global']:
819
+ await self._select_cde(self.config['global']['last_cde_name'])
820
+
821
+ if self.args.start:
822
+ await self._start_cde()
823
+ elif self.args.stop:
824
+ await self._stop_cde()
825
+
826
+ if self.args.wait_running and self.cde['status'] == 'not configured':
827
+ await self._configure_cde()
828
+
829
+ if self.args.wait_running and self.cde['status'] != 'running':
830
+ await self._wait_running()
831
+
832
+ if self.args.connect:
833
+ await self._connect_cde()
834
+
835
+ if self.args.port_forwarding:
836
+ await self._start_port_forwarding()
837
+
838
+ if self.args.file_sync:
839
+ await self._start_sync()
840
+
841
+ if self.args.logs:
842
+ await self._start_logs()
843
+
844
+ if self.args.terminal:
845
+ await self._open_terminal()
846
+
847
+ if self.args.quit:
848
+ raise KeyboardInterrupt
849
+
850
+ async def _handle_key_press(self: 'Cli', key_press: str) -> None:
851
+ if key_press in ('h', ' '):
852
+ await self._print_help()
853
+ elif key_press == 'v':
854
+ if logger.getEffectiveLevel() == logging.INFO:
855
+ logger.info('Enabling debug logs')
856
+ logger.setLevel(logging.DEBUG)
857
+ else:
858
+ logger.info('Disabling debug logs')
859
+ logger.setLevel(logging.INFO)
860
+ elif key_press == 'q':
861
+ raise asyncio.CancelledError
862
+ elif key_press == '\x1b': # escape
863
+ await asyncio.sleep(0) # event loop tick for queue.put_nowait be handled
864
+ if self.key_queue.empty():
865
+ # single escape press
866
+ raise asyncio.CancelledError
867
+ # escape sequence
868
+ seq = ''
869
+ while not self.key_queue.empty():
870
+ seq += await self.key_queue.get()
871
+ await asyncio.sleep(0) # event loop tick for queue.put_nowait be handled
872
+ logger.warning('Ignoring escape sequence "%s"', seq)
873
+ elif key_press == '#':
874
+ if self.cde:
875
+ logger.info('CDE config')
876
+ json_logger.info(json.dumps(self.cde, indent=4, sort_keys=True))
877
+ if self.cde_type:
878
+ logger.info('CDE type config')
879
+ json_logger.info(json.dumps(self.cde_type, indent=4, sort_keys=True))
880
+ elif key_press == 'n':
881
+ await self._create_cde()
882
+ elif key_press in (str(i) for i in range(1, len(self.cdes)+1)):
883
+ cde_name = list(self.cdes.values())[int(key_press)-1]['name']
884
+ await self._select_cde(cde_name)
885
+ elif key_press == 'w':
886
+ await self._wait_running()
887
+ elif key_press == 'o':
888
+ await self._connect_disconnect_cde()
889
+ elif key_press == 'c':
890
+ await self._configure_cde()
891
+ elif key_press == 's':
892
+ await self._start_stop_cde()
893
+ elif key_press == 'd':
894
+ await self._delete_cde()
895
+ elif key_press == 'p':
896
+ await self._toggle_port_forwarding()
897
+ elif key_press == 'f':
898
+ await self._toggle_sync()
899
+ elif key_press == 'l':
900
+ await self._toggle_logs()
901
+ elif key_press == 't':
902
+ await self._open_terminal()
903
+ elif key_press == '\x0a': # return
904
+ rich.print('')
905
+ else:
906
+ logger.warning('Unknown keypress "%s" (%d)', key_press if key_press in string.printable else '?', ord(key_press))
907
+
908
+ async def _create_cde(self: 'Cli') -> None:
909
+ logger.info('Creating new CDE')
910
+ table = rich.table.Table(title='CDE types')
911
+ table.add_column('Key', style='cyan bold')
912
+ table.add_column('Name')
913
+ table.add_column('Description')
914
+ for i, cde_type in enumerate(self.cde_types, start=1):
915
+ table.add_row(str(i), cde_type['name'], cde_type['description'])
916
+ table.add_row('ESC', 'Cancel')
917
+ rich.print(table)
918
+ logger.info('Choose a CDE type (1-%d):', len(self.cde_types))
919
+ key_press = await self.key_queue.get()
920
+ if key_press == chr(27):
921
+ logger.warning('Aborting')
922
+ return
923
+ try:
924
+ cde_type = self.cde_types[int(key_press)-1]
925
+ except (IndexError, ValueError):
926
+ logger.error('Invalid choice "%s"', key_press) # noqa: TRY400
927
+ return
928
+ cde_name = self._console_input('Choose a name for your CDE: ', prefill=f"{self.user_info['name']}-{cde_type['name']}")
929
+ await self._create_cde_api_call(cde_name, cde_type['id'])
930
+ await self._update_cde_list()
931
+ await self._select_cde(cde_name)
932
+
933
+ async def _create_cde_api_call(self: 'Cli', cde_name: str, cde_type_id: str) -> None:
934
+ maximum_uptime_hours = int(self.config['global'].get('maximum_uptime_hours', '8'))
935
+ stop_at = (datetime.datetime.now(datetime.timezone.utc) + datetime.timedelta(hours=maximum_uptime_hours)).isoformat()
936
+ try:
937
+ response = await self.session.post(
938
+ url=self.workspace_url / 'api/latest/custom_object',
939
+ json={
940
+ 'name': cde_name,
941
+ 'object_template_id': self.cde_template['id'],
942
+ 'value': {
943
+ 'cde-type': cde_type_id,
944
+ 'user': self.user_info['identity_id'],
945
+ 'feature-branch-mapping': None,
946
+ 'stop-at': stop_at,
947
+ },
948
+ },
949
+ params={
950
+ 'plain': 'true',
951
+ },
952
+ )
953
+ except (aiohttp.ClientError, aiohttp.ClientResponseError) as ex:
954
+ logger.error('Failed to create CDE: %s', str(ex)) # noqa: TRY400
955
+ return
956
+ if response.status != 200:
957
+ logger.error('Failed to create CDE: %s (%s):\n%s', response.reason, response.status, await response.text())
958
+ return
959
+
960
+ async def _select_cde(self: 'Cli', cde_name: str, *, quiet: bool = False) -> None:
961
+ try:
962
+ self.cde = self.cdes[cde_name]
963
+ except IndexError:
964
+ logger.error('Cannot select CDE "%s". No such CDE', cde_name) # noqa: TRY400
965
+ return
966
+ if not quiet:
967
+ logger.info('Selecting "%s" CDE', self.cde_name)
968
+ self.cde_type = await self._get_cde_type_of_cde(self.cde)
969
+ self.config['global']['last_cde_name'] = self.cde_name
970
+ await self._write_config_file()
971
+
972
+ async def _wait_running(self: 'Cli') -> None:
973
+ logger.info('Waiting for CDE "%s" to reach status running...', self.cde_name)
974
+ while True:
975
+ await self._update_cde_list()
976
+ if self.cde['status'] == 'running':
977
+ break
978
+ if self.cde['status'].endswith('_failed') or self.cde['status'] in {'not configured', 'deleted', 'connected', 'stopped'}:
979
+ logger.error('CDE "%s" failed to reach status running and is now in status "%s".', self.cde_name, self.cde['status'])
980
+ return
981
+ await asyncio.sleep(10)
982
+ logger.info('CDE "%s" is now running', self.cde_name)
983
+
984
+ async def _connect_disconnect_cde(self: 'Cli') -> None:
985
+ await self._update_cde_list()
986
+ if not self.cde:
987
+ logger.error('No CDE is selected. Cannot connect.')
988
+ return
989
+ if self.cde['status'] == 'running':
990
+ await self._connect_cde()
991
+ elif self.cde['status'] == 'connected':
992
+ await self._disconnect_cde()
993
+ else:
994
+ logger.error('CDE is not running. Cannot connect.')
995
+ return
996
+
997
+ async def _connect_cde(self: 'Cli') -> None:
998
+ logger.info('Connecting to CDE')
999
+ known_hosts = await self._get_known_hosts()
1000
+ if known_hosts is None:
1001
+ return
1002
+ self.known_hosts_file = await _create_temp_file(exit_stack=self.exit_stack, content=known_hosts)
1003
+ self.ssh_client = paramiko.SSHClient()
1004
+ self.ssh_client.set_missing_host_key_policy(paramiko.AutoAddPolicy())
1005
+ try:
1006
+ self.ssh_client.connect(
1007
+ hostname=self.hostname,
1008
+ username=self.cde_type['value']['remote-username'],
1009
+ timeout=30,
1010
+ )
1011
+ except TimeoutError:
1012
+ logger.error('Timeout while connecting to CDE. Is your CDE running?') # noqa: TRY400
1013
+ self.ssh_client = None
1014
+ return
1015
+ transport = self.ssh_client.get_transport()
1016
+ transport.set_keepalive(30)
1017
+ self.sftp_client = paramiko.sftp_client.SFTPClient.from_transport(transport)
1018
+ logger.info('Connected to CDE')
1019
+
1020
+ async def _get_known_hosts(self: 'Cli') -> typing.Optional[str]:
1021
+ if self.cde['value']['hostkey']:
1022
+ if self.cde['value']['hostkey'].startswith(self.cde['value']['hostname']):
1023
+ return self.cde['value']['hostkey']
1024
+ return f"{self.cde['value']['hostname']} {self.cde['value']['hostkey']}"
1025
+ if not self.cde:
1026
+ logger.error('No CDE is selected. Cannot fetch host-key.')
1027
+ return None
1028
+ if not self.is_cde_running:
1029
+ logger.error('CDE is not running. Cannot fetch host-key.')
1030
+ return None
301
1031
  logger.debug('Scanning hostkeys of "%s"', self.hostname)
302
1032
  try:
303
1033
  stdout, stderr = await run_subprocess(
@@ -310,47 +1040,366 @@ class Cli:
310
1040
  print_stderr=False,
311
1041
  )
312
1042
  except SubprocessError as ex:
313
- logger.error('%s Failed to fetch hostkeys. Is you RDE running?', ex) # noqa: TRY400
1043
+ logger.error('%s Failed to fetch hostkeys. Is you CDE running?', ex) # noqa: TRY400
314
1044
  sys.exit(1)
315
- self.known_hosts_file.write(stdout)
1045
+ known_hosts = stdout
316
1046
  with contextlib.suppress(FileNotFoundError):
317
- self.known_hosts_file.write(pathlib.Path('~/.ssh/known_hosts').expanduser().read_bytes())
318
- self.known_hosts_file.close()
1047
+ known_hosts += pathlib.Path(os.path.expandvars('$HOME/.ssh/known_hosts')).read_bytes()
1048
+ return known_hosts
1049
+
1050
+ async def _disconnect_cde(self: 'Cli') -> None:
1051
+ logger.info('Disconnecting from CDE')
1052
+ if self.sftp_client is not None:
1053
+ self.sftp_client.close()
1054
+ self.sftp_client = None
1055
+ if self.ssh_client is not None:
1056
+ self.ssh_client.close()
1057
+ self.ssh_client = None
1058
+ self.known_hosts_file = None
1059
+ logger.debug('Disconnected from CDE')
319
1060
 
320
- async def _cleanup_known_hosts_file(self: 'Cli') -> None:
321
- if self.known_hosts_file is None:
1061
+ async def _configure_cde(self: 'Cli') -> None:
1062
+ await self._update_cde_list()
1063
+ if not self.cde:
1064
+ logger.error('No CDE is selected. Cannot configure CDE.')
322
1065
  return
323
- pathlib.Path(self.known_hosts_file.name).unlink()
1066
+ cde_config_key = f'cde.{self.cde_name}'
1067
+ if cde_config_key not in self.config:
1068
+ logger.info('Creating new configuration for CDE "%s".', self.cde_name)
1069
+ self.config[cde_config_key] = {
1070
+ 'cde_type': self.cde_type['name'],
1071
+ }
1072
+ source_directory = self._console_input(
1073
+ f'Choose a local directory where the sources of the "{self.cde_name}" CDE will be stored: ',
1074
+ prefill=self.config[cde_config_key].get('source_directory', f'$HOME/{self.cde_type["name"].replace(" ", "-")}'),
1075
+ )
1076
+ self.config[cde_config_key]['source_directory'] = source_directory
1077
+ while True:
1078
+ output_directory = self._console_input(
1079
+ f'Choose a local directory where the outputs of the "{self.cde_name}" CDE will be stored: ',
1080
+ prefill=self.config[cde_config_key].get('output_directory', f'$HOME/{self.cde_type["name"].replace(" ", "-")}-output'),
1081
+ )
1082
+ if (
1083
+ _is_relative_to(source_directory, output_directory)
1084
+ or _is_relative_to(output_directory, source_directory)
1085
+ ):
1086
+ logger.error('Source-directory and output-directory must not overlap!')
1087
+ else:
1088
+ break
1089
+ self.config[cde_config_key]['output_directory'] = output_directory
1090
+ while True:
1091
+ maximum_uptime_hours = self._console_input(
1092
+ 'How many hours should this CDE remain started until it is automatically stopped: ',
1093
+ prefill=self.config['global'].get('maximum_uptime_hours', '8'),
1094
+ )
1095
+ try:
1096
+ int(maximum_uptime_hours)
1097
+ except ValueError:
1098
+ logger.error('"%s" is not a valid number', maximum_uptime_hours) # noqa: TRY400
1099
+ else:
1100
+ break
1101
+ self.config[cde_config_key]['maximum_uptime_hours'] = maximum_uptime_hours
324
1102
 
325
- async def _connect_to_rde(self: 'Cli') -> None:
326
- self.ssh_client = paramiko.SSHClient()
327
- self.ssh_client.set_missing_host_key_policy(paramiko.AutoAddPolicy())
328
- logger.info('Connecting to RDE')
1103
+ await self._write_config_file()
1104
+ logger.info('CDE "%s" configured.', self.cde_name)
1105
+
1106
+ async def _start_stop_cde(self: 'Cli') -> None:
1107
+ await self._update_cde_list()
1108
+ if not self.cde:
1109
+ logger.error('No CDE is selected. Cannot start/stop CDE.')
1110
+ return
1111
+ if self.cde['status'] in ('stopped', 'deleted'):
1112
+ await self._start_cde()
1113
+ elif self.cde['status'] in ('running', 'connected'):
1114
+ await self._stop_cde()
1115
+
1116
+ async def _start_cde(self: 'Cli') -> None:
1117
+ logger.info('Start CDE')
1118
+ if not self.cde['exists_remotely']:
1119
+ await self._create_cde_api_call(self.cde['name'], self.cde_type['id'])
1120
+ else:
1121
+ await self._start_cde_api_call()
1122
+
1123
+ async def _stop_cde(self: 'Cli') -> None:
1124
+ logger.info('Stop CDE')
1125
+ await self._stop_cde_api_call()
1126
+ # cde was running, is now stopping
1127
+ if self.sync_task:
1128
+ self.sync_task.cancel()
1129
+ self.sync_task = None
1130
+ if self.port_forwarding_task:
1131
+ self.port_forwarding_task.cancel()
1132
+ self.port_forwarding_task = None
1133
+ if self.logs_task:
1134
+ self.logs_task.cancel()
1135
+ self.logs_task = None
1136
+ if self.ssh_client is not None:
1137
+ self.ssh_client.close()
1138
+ self.ssh_client = None
1139
+
1140
+ async def _start_cde_api_call(self: 'Cli') -> None:
1141
+ maximum_uptime_hours = int(self.config['global'].get('maximum_uptime_hours', '8'))
1142
+ stop_at = (datetime.datetime.now(datetime.timezone.utc) + datetime.timedelta(hours=maximum_uptime_hours)).isoformat()
329
1143
  try:
330
- self.ssh_client.connect(
331
- hostname=self.hostname,
332
- username=REMOTE_USERNAME,
333
- timeout=30,
1144
+ response = await self.session.patch(
1145
+ url=self.workspace_url / 'api/latest/custom_object' / self.cde['id'],
1146
+ json={
1147
+ 'value': {
1148
+ 'is-running': True,
1149
+ 'stop-at': stop_at,
1150
+ },
1151
+ },
334
1152
  )
335
- except TimeoutError:
336
- logger.exception('Timeout while connecting to RDE. Is your RDE running?')
337
- sys.exit(1)
1153
+ except (aiohttp.ClientError, aiohttp.ClientResponseError) as ex:
1154
+ logger.error('Failed to start CDE: %s', str(ex)) # noqa: TRY400
1155
+ return
1156
+ if response.status != 200:
1157
+ logger.error('Failed to start CDE: %s (%s):\n%s', response.reason, response.status, await response.text())
338
1158
  return
339
- transport = self.ssh_client.get_transport()
340
- self.sftp_client = paramiko.sftp_client.SFTPClient.from_transport(transport)
341
1159
 
342
- async def _disconnect_from_rde(self: 'Cli') -> None:
343
- if self.sftp_client is not None:
344
- self.sftp_client.close()
345
- self.sftp_client = None
1160
+ async def _stop_cde_api_call(self: 'Cli') -> None:
1161
+ try:
1162
+ response = await self.session.patch(
1163
+ url=self.workspace_url / 'api/latest/custom_object' / self.cde['id'],
1164
+ json={
1165
+ 'value': {
1166
+ 'is-running': False,
1167
+ },
1168
+ },
1169
+ )
1170
+ except (aiohttp.ClientError, aiohttp.ClientResponseError) as ex:
1171
+ logger.error('Failed to stop CDE: %s', str(ex)) # noqa: TRY400
1172
+ return
1173
+ if response.status != 200:
1174
+ logger.error('Failed to stop CDE: %s (%s):\n%s', response.reason, response.status, await response.text())
1175
+ return
1176
+
1177
+ async def _delete_cde(self: 'Cli') -> None:
1178
+ await self._update_cde_list()
1179
+ if not self.cde:
1180
+ logger.error('No CDE is selected. Cannot delete CDE.')
1181
+ return
1182
+ logger.info('Deleting CDE "%s"', self.cde_name)
1183
+ try:
1184
+ response = await self.session.delete(
1185
+ url=self.workspace_url / 'api/latest/custom_object' / self.cde['id'],
1186
+ params={
1187
+ 'permanently': 'true',
1188
+ },
1189
+ )
1190
+ except (aiohttp.ClientError, aiohttp.ClientResponseError) as ex:
1191
+ logger.error('Failed to delete CDE: %s', str(ex)) # noqa: TRY400
1192
+ return
1193
+ if response.status != 204:
1194
+ logger.error('Failed to delete CDE: %s (%s)', response.reason, response.status)
1195
+ return
1196
+ if self.sync_task:
1197
+ self.sync_task.cancel()
1198
+ self.sync_task = None
1199
+ if self.port_forwarding_task:
1200
+ self.port_forwarding_task.cancel()
1201
+ self.port_forwarding_task = None
1202
+ if self.logs_task:
1203
+ self.logs_task.cancel()
1204
+ self.logs_task = None
346
1205
  if self.ssh_client is not None:
347
1206
  self.ssh_client.close()
348
1207
  self.ssh_client = None
349
1208
 
1209
+ #####
1210
+ ##### PORT FORWARDING
1211
+ #####
1212
+ async def _toggle_port_forwarding(self: 'Cli') -> None:
1213
+ await self._update_cde_list()
1214
+ if self.port_forwarding_task is None:
1215
+ await self._start_port_forwarding()
1216
+ else:
1217
+ await self._stop_port_forwarding()
1218
+
1219
+ async def _start_port_forwarding(self: 'Cli') -> None:
1220
+ if not self.cde:
1221
+ logger.error('No CDE is selected. Cannot start port forwarding.')
1222
+ return
1223
+ if not self.is_cde_running:
1224
+ logger.error('CDE is not running. Cannot start port forwarding.')
1225
+ return
1226
+ if self.ssh_client is None:
1227
+ logger.error('Not connected to CDE. Cannot start port forwarding.')
1228
+ return
1229
+ self.port_forwarding_task = asyncio.create_task(self._bg_port_forwarding())
1230
+
1231
+ async def _stop_port_forwarding(self: 'Cli') -> None:
1232
+ self.port_forwarding_task.cancel()
1233
+ self.port_forwarding_task = None
1234
+
1235
+ async def _bg_port_forwarding(self: 'Cli') -> None:
1236
+ service_ports = self.cde_type['value'].get('service-ports')
1237
+ if service_ports is None:
1238
+ service_ports = [
1239
+ '8443:443',
1240
+ 5678,
1241
+ 6678,
1242
+ 7678,
1243
+ 8678,
1244
+ 3000,
1245
+ 2022,
1246
+ ]
1247
+ service_ports = [
1248
+ (port, port) if isinstance(port, int) else tuple(map(int, port.split(':', 1)))
1249
+ for port
1250
+ in service_ports
1251
+ ]
1252
+ while True:
1253
+ logger.info('Starting port forwarding of %s', ', '.join(str(port[0]) for port in service_ports))
1254
+ try:
1255
+ await run_subprocess(
1256
+ 'ssh',
1257
+ [
1258
+ '-o', 'ConnectTimeout=10',
1259
+ '-o', f'UserKnownHostsFile={self.known_hosts_file.name}',
1260
+ '-NT',
1261
+ f"{self.cde_type['value']['remote-username']}@{self.hostname}",
1262
+ *itertools.chain.from_iterable([
1263
+ ('-L', f'{port[0]}:localhost:{port[1]}')
1264
+ for port
1265
+ in service_ports
1266
+ ]),
1267
+
1268
+ ],
1269
+ name='Port forwarding',
1270
+ capture_stdout=False,
1271
+ )
1272
+ except asyncio.CancelledError:
1273
+ logger.info('Port forwarding interrupted')
1274
+ raise
1275
+ except SubprocessError as ex:
1276
+ logger.error('Port forwarding failed:\n%s: %s', type(ex).__name__, str(ex)) # noqa: TRY400
1277
+ logger.info('Will retry port forwarding in %s seconds', RETRY_DELAY_SECONDS)
1278
+ await asyncio.sleep(RETRY_DELAY_SECONDS)
1279
+ await self._check_background_tasks()
1280
+ except Exception:
1281
+ logger.exception('Port forwarding failed')
1282
+ logger.info('Will retry port forwarding in %s seconds', RETRY_DELAY_SECONDS)
1283
+ await asyncio.sleep(RETRY_DELAY_SECONDS)
1284
+ await self._check_background_tasks()
1285
+ else:
1286
+ logger.info('Port forwarding done')
1287
+ break
1288
+
1289
+ #####
1290
+ ##### FILE SYNC
1291
+ #####
1292
+ async def _toggle_sync(self: 'Cli') -> None:
1293
+ await self._update_cde_list()
1294
+ if self.sync_task is None:
1295
+ await self._start_sync()
1296
+ else:
1297
+ await self._stop_sync()
1298
+
1299
+ async def _start_sync(self: 'Cli') -> None:
1300
+ if not self.cde:
1301
+ logger.error('No CDE is selected. Cannot start file sync.')
1302
+ return
1303
+ if not self.is_cde_running:
1304
+ logger.error('CDE is not running. Cannot start file sync.')
1305
+ return
1306
+ if self.sftp_client is None:
1307
+ logger.error('Not connected to CDE. Cannot start file sync.')
1308
+ return
1309
+ self.sync_task = asyncio.create_task(self._bg_sync())
1310
+
1311
+ async def _stop_sync(self: 'Cli') -> None:
1312
+ self.sync_task.cancel()
1313
+ self.sync_task = None
1314
+
1315
+ async def _bg_sync(self: 'Cli') -> None:
1316
+ while True:
1317
+ logger.info('Starting file sync')
1318
+ try:
1319
+ await self._init_local_cache()
1320
+ except OSError as ex:
1321
+ logger.error('Failed to initialize local cache: %s', str(ex)) # noqa: TRY400
1322
+ return
1323
+ filesystem_event_queue = asyncio.Queue()
1324
+ filesystem_watch_task = asyncio.create_task(
1325
+ self._watch_filesystem(
1326
+ queue=filesystem_event_queue,
1327
+ ),
1328
+ )
1329
+ if self.local_output_directory:
1330
+ remote_sync_task = asyncio.create_task(
1331
+ self._remote_sync(),
1332
+ )
1333
+ else:
1334
+ remote_sync_task = None
1335
+ background_sync_task = None
1336
+ try:
1337
+ while True:
1338
+ filesystem_events = []
1339
+ if background_sync_task is not None:
1340
+ background_sync_task.cancel()
1341
+ with contextlib.suppress(asyncio.CancelledError):
1342
+ await background_sync_task
1343
+ background_sync_task = asyncio.create_task(self._background_sync())
1344
+ filesystem_events.append(await filesystem_event_queue.get())
1345
+ logger.debug('first event, debouncing...')
1346
+ # debounce
1347
+ await asyncio.sleep(EVENT_DEBOUNCE_SECONDS)
1348
+ logger.debug('collecting changes')
1349
+ while not filesystem_event_queue.empty():
1350
+ filesystem_events.append(filesystem_event_queue.get_nowait())
1351
+ for event in filesystem_events:
1352
+ logger.debug('non-unique event: %s', event)
1353
+ # remove duplicates
1354
+ events = [
1355
+ event
1356
+ for i, event
1357
+ in enumerate(filesystem_events)
1358
+ if _get_event_significant_path(event) not in (
1359
+ _get_event_significant_path(later_event)
1360
+ for later_event
1361
+ in filesystem_events[i+1:]
1362
+ )
1363
+ ]
1364
+ for i, event in enumerate(events, start=1):
1365
+ logger.debug('unique event [%d/%d]: %s', i, len(events), event)
1366
+ await self._process_sync_event(event)
1367
+ except asyncio.CancelledError:
1368
+ logger.info('File sync interrupted')
1369
+ raise
1370
+ except OSError as ex:
1371
+ logger.error('File sync failed: %s', str(ex)) # noqa: TRY400
1372
+ logger.info('Will retry file sync in %s seconds', RETRY_DELAY_SECONDS)
1373
+ await asyncio.sleep(RETRY_DELAY_SECONDS)
1374
+ await self._check_background_tasks()
1375
+ except Exception:
1376
+ logger.exception('File sync failed')
1377
+ logger.info('Will retry file sync in %s seconds', RETRY_DELAY_SECONDS)
1378
+ await asyncio.sleep(RETRY_DELAY_SECONDS)
1379
+ await self._check_background_tasks()
1380
+ else:
1381
+ logger.info('File sync stopped')
1382
+ break
1383
+ finally:
1384
+ filesystem_watch_task.cancel()
1385
+ with contextlib.suppress(asyncio.CancelledError):
1386
+ await filesystem_watch_task
1387
+ if remote_sync_task is not None:
1388
+ remote_sync_task.cancel()
1389
+ with contextlib.suppress(asyncio.CancelledError):
1390
+ await remote_sync_task
1391
+ if background_sync_task is not None:
1392
+ background_sync_task.cancel()
1393
+ with contextlib.suppress(asyncio.CancelledError):
1394
+ await background_sync_task
1395
+
350
1396
  async def _init_local_cache(self: 'Cli') -> None:
351
1397
  self.local_source_directory.mkdir(parents=True, exist_ok=True)
352
1398
  logger.debug('Listing remote items')
353
- listing = self.sftp_client.listdir_attr(REMOTE_SOURCE_DIRECTORY)
1399
+ try:
1400
+ listing = self.sftp_client.listdir_attr(self.cde_type['value']['remote-source-directory'])
1401
+ except FileNotFoundError as ex:
1402
+ raise InitializationError(f"Remote source directory {self.cde_type['value']['remote-source-directory']} does not exist") from ex
354
1403
 
355
1404
  logger.info('Processing %d remote items...', len(listing))
356
1405
  for file_info in rich.progress.track(
@@ -363,8 +1412,10 @@ class Cli:
363
1412
  logger.info('Processing "%s"', file_info.filename)
364
1413
  try:
365
1414
  result = await self._process_remote_item(file_info)
366
- except SubprocessError:
367
- logger.exception('Failed')
1415
+ except SubprocessError as ex:
1416
+ logger.error('Processing of remote item failed:\n%s: %s', type(ex).__name__, str(ex)) # noqa: TRY400
1417
+ except Exception:
1418
+ logger.exception('Processing of remote item failed')
368
1419
  else:
369
1420
  logger.info(result)
370
1421
 
@@ -374,7 +1425,7 @@ class Cli:
374
1425
  if file_info.st_mode & stat.S_IFDIR:
375
1426
  # check if .git exists
376
1427
  try:
377
- git_stat = self.sftp_client.stat(f'{REMOTE_SOURCE_DIRECTORY}/{filename}/.git')
1428
+ git_stat = self.sftp_client.stat(f"{self.cde_type['value']['remote-source-directory']}/{filename}/.git")
378
1429
  except FileNotFoundError:
379
1430
  pass
380
1431
  else:
@@ -393,7 +1444,7 @@ class Cli:
393
1444
  '-e', f'ssh -o ConnectTimeout=10 -o UserKnownHostsFile={self.known_hosts_file.name}',
394
1445
  '--archive',
395
1446
  '--checksum',
396
- f'{REMOTE_USERNAME}@{self.hostname}:{REMOTE_SOURCE_DIRECTORY}/{filename}/',
1447
+ f"{self.cde_type['value']['remote-username']}@{self.hostname}:{self.cde_type['value']['remote-source-directory']}/{filename}/",
397
1448
  str(self.local_source_directory / filename),
398
1449
  ],
399
1450
  name='Copy remote directory',
@@ -405,7 +1456,7 @@ class Cli:
405
1456
  executor=None,
406
1457
  func=functools.partial(
407
1458
  self.sftp_client.get,
408
- remotepath=f'{REMOTE_SOURCE_DIRECTORY}/{filename}',
1459
+ remotepath=f"{self.cde_type['value']['remote-source-directory']}/{filename}",
409
1460
  localpath=str(self.local_source_directory / filename),
410
1461
  ),
411
1462
  )
@@ -417,7 +1468,7 @@ class Cli:
417
1468
  [
418
1469
  'clone',
419
1470
  '-q',
420
- f'{REMOTE_USERNAME}@{self.hostname}:{REMOTE_SOURCE_DIRECTORY}/{filename}',
1471
+ f"{self.cde_type['value']['remote-username']}@{self.hostname}:{self.cde_type['value']['remote-source-directory']}/{filename}",
421
1472
  ],
422
1473
  name='Git clone',
423
1474
  cwd=self.local_source_directory,
@@ -432,7 +1483,7 @@ class Cli:
432
1483
  shlex.join([
433
1484
  'git',
434
1485
  '-C',
435
- f'{REMOTE_SOURCE_DIRECTORY}/{filename}',
1486
+ f"{self.cde_type['value']['remote-source-directory']}/{filename}",
436
1487
  'config',
437
1488
  '--get',
438
1489
  'remote.origin.url',
@@ -456,77 +1507,40 @@ class Cli:
456
1507
  )
457
1508
  return f'Cloned repository "{filename}"'
458
1509
 
459
- async def _start_sync(self: 'Cli') -> None:
460
- logger.info('Starting file sync')
461
- filesystem_event_queue = asyncio.Queue()
462
- filesystem_watch_task = asyncio.create_task(
463
- self._watch_filesystem(
464
- queue=filesystem_event_queue,
465
- ),
466
- )
467
- if self.local_output_directory:
468
- remote_sync_task = asyncio.create_task(
469
- self._remote_sync(),
470
- )
471
- else:
472
- remote_sync_task = None
473
- background_sync_task = None
474
- try:
475
- while True:
476
- filesystem_events = []
477
- if background_sync_task is not None:
478
- background_sync_task.cancel()
479
- with contextlib.suppress(asyncio.CancelledError):
480
- await background_sync_task
481
- background_sync_task = asyncio.create_task(self._background_sync())
482
- filesystem_events.append(await filesystem_event_queue.get())
483
- logger.debug('first event, debouncing...')
484
- # debounce
485
- await asyncio.sleep(EVENT_DEBOUNCE_SECONDS)
486
- logger.debug('collecting changes')
487
- while not filesystem_event_queue.empty():
488
- filesystem_events.append(filesystem_event_queue.get_nowait())
489
- for event in filesystem_events:
490
- logger.debug('non-unique event: %s', event)
491
- # remove duplicates
492
- events = [
493
- event
494
- for i, event
495
- in enumerate(filesystem_events)
496
- if _get_event_significant_path(event) not in (
497
- _get_event_significant_path(later_event)
498
- for later_event
499
- in filesystem_events[i+1:]
500
- )
501
- ]
502
- for i, event in enumerate(events, start=1):
503
- logger.debug('unique event [%d/%d]: %s', i, len(events), event)
504
- await self._process_sync_event(event)
505
- except asyncio.CancelledError:
506
- logger.info('File sync interrupted')
507
- raise
508
- except Exception:
509
- logger.exception('File sync failed')
510
- else:
511
- logger.info('File sync stopped')
512
- finally:
513
- filesystem_watch_task.cancel()
514
- with contextlib.suppress(asyncio.CancelledError):
515
- await filesystem_watch_task
516
- if remote_sync_task is not None:
517
- remote_sync_task.cancel()
518
- with contextlib.suppress(asyncio.CancelledError):
519
- await remote_sync_task
520
- if background_sync_task is not None:
521
- background_sync_task.cancel()
522
- with contextlib.suppress(asyncio.CancelledError):
523
- await background_sync_task
524
-
525
1510
  async def _background_sync(self: 'Cli') -> None:
526
1511
  logger.debug('Starting background sync')
527
1512
  self.local_source_directory.mkdir(parents=True, exist_ok=True)
528
1513
  with contextlib.suppress(OSError):
529
- self.sftp_client.mkdir(REMOTE_SOURCE_DIRECTORY)
1514
+ self.sftp_client.mkdir(self.cde_type['value']['remote-source-directory'])
1515
+ file_sync_exclusions = self.cde_type['value'].get('file-sync-exclusions')
1516
+ if file_sync_exclusions is None:
1517
+ file_sync_exclusions = [
1518
+ 'build-cache-*', # TODO: make exclusions configurable
1519
+ 'dev-tool/config',
1520
+ 'alembic.ini',
1521
+ 'cypress/screenshots',
1522
+ 'cypress/videos',
1523
+ 'flow_api',
1524
+ '.git',
1525
+ '__pycache__',
1526
+ '.cache',
1527
+ 'node_modules',
1528
+ '.venv',
1529
+ 'bundle-content', # until https://app.clickup.com/t/86bxn0exx
1530
+ 'cloudomation-fe/build',
1531
+ 'devstack-self-service-portal/vite-cache',
1532
+ 'devstack-self-service-portal/dist',
1533
+ 'documentation/generator/generated',
1534
+ 'version.py',
1535
+ 'instantclient-basic-linux.x64.zip',
1536
+ 'msodbcsql.deb',
1537
+ 'auth/report',
1538
+ 'cloudomation-fe/.env',
1539
+ 'cloudomation/tmp_git_task',
1540
+ 'cloudomation/tmp',
1541
+ 'cloudomation/notifications',
1542
+ 'documentation/versioned_docs',
1543
+ ]
530
1544
  try:
531
1545
  await run_subprocess(
532
1546
  'rsync',
@@ -534,43 +1548,27 @@ class Cli:
534
1548
  '-e', f'ssh -o ConnectTimeout=10 -o UserKnownHostsFile={self.known_hosts_file.name}',
535
1549
  '--archive',
536
1550
  '--delete',
537
- '--exclude', 'build-cache-*', # TODO: make exclusions configurable
538
- '--exclude', 'dev-tool/config',
539
- '--exclude', 'alembic.ini',
540
- '--exclude', 'cypress/screenshots',
541
- '--exclude', 'cypress/videos',
542
- '--exclude', 'flow_api',
543
- '--exclude', '.git',
544
- '--exclude', '__pycache__',
545
- '--exclude', '.cache',
546
- '--exclude', 'node_modules',
547
- '--exclude', '.venv',
548
- '--exclude', 'bundle-content', # until https://app.clickup.com/t/86bxn0exx
549
- '--exclude', 'cloudomation-fe/build',
550
- '--exclude', 'devstack-self-service-portal/vite-cache',
551
- '--exclude', 'devstack-self-service-portal/dist',
552
- '--exclude', 'documentation/generator/generated',
553
- '--exclude', 'version.py',
554
- '--exclude', 'instantclient-basic-linux.x64.zip',
555
- '--exclude', 'msodbcsql.deb',
556
- '--exclude', 'auth/report',
557
- '--exclude', 'cloudomation-fe/.env',
558
- '--exclude', 'cloudomation/tmp_git_task',
559
- '--exclude', 'cloudomation/tmp',
560
- '--exclude', 'cloudomation/notifications',
561
- '--exclude', 'documentation/versioned_docs',
1551
+ '--checksum', # do not compare timestamps. new CDE template will have all timestamps new,
1552
+ # but we only want to copy if the content is different
1553
+ '--ignore-times', # we also use this to avoid syncing timestamps on all directories
1554
+ *itertools.chain.from_iterable([
1555
+ ('--exclude', exclusion)
1556
+ for exclusion
1557
+ in file_sync_exclusions
1558
+ ]),
562
1559
  '--human-readable',
563
- '--info=name1',
1560
+ '--verbose',
564
1561
  f'{self.local_source_directory}/',
565
- f'{REMOTE_USERNAME}@{self.hostname}:{REMOTE_SOURCE_DIRECTORY}',
1562
+ f"{self.cde_type['value']['remote-username']}@{self.hostname}:{self.cde_type['value']['remote-source-directory']}",
566
1563
  ],
567
1564
  name='Background sync',
1565
+ print_to_debug_log=True,
568
1566
  )
569
1567
  except asyncio.CancelledError:
570
1568
  logger.debug('Background sync interrupted')
571
1569
  raise
572
1570
  except SubprocessError as ex:
573
- logger.error('Background sync failed: %s', ex) # noqa: TRY400
1571
+ logger.error('Background sync failed:\n%s: %s', type(ex).__name__, str(ex)) # noqa: TRY400
574
1572
  except Exception:
575
1573
  logger.exception('Background sync failed')
576
1574
  else:
@@ -579,7 +1577,7 @@ class Cli:
579
1577
  async def _reverse_background_sync(self: 'Cli') -> None:
580
1578
  logger.debug('Starting reverse background sync')
581
1579
  with contextlib.suppress(OSError):
582
- self.sftp_client.mkdir(REMOTE_OUTPUT_DIRECTORY)
1580
+ self.sftp_client.mkdir(self.cde_type['value']['remote-output-directory'])
583
1581
  self.local_output_directory.mkdir(parents=True, exist_ok=True)
584
1582
  try:
585
1583
  stdout, stderr = await run_subprocess(
@@ -589,8 +1587,7 @@ class Cli:
589
1587
  '--archive',
590
1588
  '--exclude', '__pycache__',
591
1589
  '--human-readable',
592
- '--info=name1',
593
- f'{REMOTE_USERNAME}@{self.hostname}:{REMOTE_OUTPUT_DIRECTORY}/',
1590
+ f"{self.cde_type['value']['remote-username']}@{self.hostname}:{self.cde_type['value']['remote-output-directory']}/",
594
1591
  str(self.local_output_directory),
595
1592
  ],
596
1593
  name='Reverse background sync',
@@ -598,7 +1595,9 @@ class Cli:
598
1595
  except asyncio.CancelledError:
599
1596
  logger.debug('Reverse background sync interrupted')
600
1597
  raise
601
- except SubprocessError:
1598
+ except SubprocessError as ex:
1599
+ logger.error('Reverse background sync failed:\n%s: %s', type(ex).__name__, str(ex)) # noqa: TRY400
1600
+ except Exception:
602
1601
  logger.exception('Reverse background sync failed')
603
1602
  else:
604
1603
  logger.debug('Reverse background sync done')
@@ -633,7 +1632,7 @@ class Cli:
633
1632
  async def _process_sync_event(self: 'Cli', event: watchdog.events.FileSystemEvent) -> None:
634
1633
  local_path = pathlib.Path(event.src_path)
635
1634
  relative_path = local_path.relative_to(self.local_source_directory)
636
- remote_path = f'{REMOTE_SOURCE_DIRECTORY}/{relative_path}'
1635
+ remote_path = f"{self.cde_type['value']['remote-source-directory']}/{relative_path}"
637
1636
  if isinstance(event, watchdog.events.DirCreatedEvent):
638
1637
  await self._remote_directory_create(remote_path)
639
1638
  elif isinstance(event, watchdog.events.DirDeletedEvent):
@@ -649,7 +1648,7 @@ class Cli:
649
1648
  elif isinstance(event, watchdog.events.FileMovedEvent):
650
1649
  dest_local_path = pathlib.Path(event.dest_path)
651
1650
  dest_relative_path = dest_local_path.relative_to(self.local_source_directory)
652
- dest_remote_path = f'{REMOTE_SOURCE_DIRECTORY}/{dest_relative_path}'
1651
+ dest_remote_path = f"{self.cde_type['value']['remote-source-directory']}/{dest_relative_path}"
653
1652
  stat = dest_local_path.stat()
654
1653
  times = (stat.st_atime, stat.st_mtime)
655
1654
  await self._remote_file_move(remote_path, dest_remote_path, times)
@@ -697,147 +1696,126 @@ class Cli:
697
1696
  self.sftp_client.rename(remote_path, dest_remote_path)
698
1697
  self.sftp_client.utime(dest_remote_path, times)
699
1698
 
700
- async def _start_port_forwarding(self: 'Cli') -> None:
701
- logger.info('Starting port forwarding of ports 8443, 5678, 6678, 7678, 8678, 3000, 2022')
702
- try:
703
- await run_subprocess(
704
- 'ssh',
705
- [
706
- '-o', 'ConnectTimeout=10',
707
- '-o', f'UserKnownHostsFile={self.known_hosts_file.name}',
708
- '-NT',
709
- f'{REMOTE_USERNAME}@{self.hostname}',
710
- '-L', '8443:localhost:443', # TODO: make ports configurable
711
- '-L', '5678:localhost:5678',
712
- '-L', '6678:localhost:6678',
713
- '-L', '7678:localhost:7678',
714
- '-L', '8678:localhost:8678',
715
- '-L', '3000:localhost:3000',
716
- '-L', '2022:localhost:2022',
717
- ],
718
- name='Port forwarding',
719
- capture_stdout=False,
720
- )
721
- except asyncio.CancelledError:
722
- logger.info('Port forwarding interrupted')
723
- raise
724
- except SubprocessError:
725
- logger.exception('Port forwarding failed')
1699
+ #####
1700
+ ##### LOGS
1701
+ #####
1702
+ async def _toggle_logs(self: 'Cli') -> None:
1703
+ await self._update_cde_list()
1704
+ if self.logs_task is None:
1705
+ await self._start_logs()
726
1706
  else:
727
- logger.info('Port forwarding done')
728
-
1707
+ await self._stop_logs()
729
1708
 
730
1709
  async def _start_logs(self: 'Cli') -> None:
731
- logger.info('Following logs')
732
- stdout_queue = asyncio.Queue()
733
- stderr_queue = asyncio.Queue()
734
- stream_task = self.loop.run_in_executor(
735
- executor=None,
736
- func=functools.partial(
737
- self._stream_logs,
738
- stdout_queue=stdout_queue,
739
- stderr_queue=stderr_queue,
740
- ),
741
- )
742
- try:
743
- stdout_get = asyncio.create_task(stdout_queue.get())
744
- stderr_get = asyncio.create_task(stderr_queue.get())
745
- while True:
746
- done, pending = await asyncio.wait(
747
- {stdout_get, stderr_get},
748
- return_when=asyncio.FIRST_COMPLETED,
749
- )
750
- if stdout_get in done:
751
- stdout = await stdout_get
752
- if stdout is not None:
753
- self.console.print(rich.markup.escape(stdout.strip()), style='default on grey23', justify='left')
754
- stdout_get = asyncio.create_task(stdout_queue.get())
755
- if stderr_get in done:
756
- stderr = await stderr_get
757
- if stderr is not None:
758
- self.console.print(rich.markup.escape(stderr.strip()), style='default on red', justify='left')
759
- stderr_get = asyncio.create_task(stderr_queue.get())
760
- except asyncio.CancelledError:
761
- logger.info('Following logs interrupted')
762
- raise
763
- except Exception:
764
- logger.exception('Following logs failed')
765
- else:
766
- logger.info('Stopped following logs')
767
- finally:
768
- stream_task.cancel()
769
- with contextlib.suppress(asyncio.CancelledError):
770
- await stream_task
1710
+ if not self.cde:
1711
+ logger.error('No CDE is selected. Cannot follow logs.')
1712
+ return
1713
+ if not self.is_cde_running:
1714
+ logger.error('CDE is not running. Cannot follow logs.')
1715
+ return
1716
+ if self.ssh_client is None:
1717
+ logger.error('Not connected to CDE. Cannot follow logs.')
1718
+ return
1719
+ self.logs_task = asyncio.create_task(self._bg_logs())
771
1720
 
1721
+ async def _stop_logs(self: 'Cli') -> None:
1722
+ self.logs_task.cancel()
1723
+ self.logs_task = None
772
1724
 
773
- def _stream_logs(
774
- self: 'Cli',
775
- stdout_queue: asyncio.Queue,
776
- stderr_queue: asyncio.Queue,
777
- ) -> None:
778
- ssh_stdin, ssh_stdout, ssh_stderr = self.ssh_client.exec_command(
779
- 'cd /home/devstack-user/starflows/research/dev-tool && . dev.sh logs',
780
- get_pty=False,
781
- timeout=0,
782
- )
783
- ssh_stdin.close()
784
- have_stdout = False
785
- have_stderr = False
1725
+ async def _bg_logs(self: 'Cli') -> None:
786
1726
  while True:
1727
+ logger.info('Following logs')
787
1728
  try:
788
- stdout = ssh_stdout.readline(1024)
789
- except TimeoutError:
790
- have_stdout = False
1729
+ async with asyncssh.connect(
1730
+ self.hostname,
1731
+ connect_timeout=10,
1732
+ known_hosts=self.known_hosts_file.name,
1733
+ username=self.cde_type['value']['remote-username'],
1734
+ term_type=os.environ.get('TERM'),
1735
+ ) as conn:
1736
+ await conn.run(input='dev.sh logs\n', stdout=sys.stdout, stderr=sys.stderr, recv_eof=False)
1737
+ except asyncio.CancelledError:
1738
+ logger.info('Following logs interrupted')
1739
+ raise
1740
+ except Exception:
1741
+ logger.exception('Following logs failed')
1742
+ logger.info('Will retry following logs in %s seconds', RETRY_DELAY_SECONDS)
1743
+ await asyncio.sleep(RETRY_DELAY_SECONDS)
1744
+ await self._check_background_tasks()
791
1745
  else:
792
- have_stdout = True
1746
+ logger.info('Stopped following logs')
1747
+ break
1748
+
1749
+ #####
1750
+ ##### TERMINAL
1751
+ #####
1752
+ async def _open_terminal(self: 'Cli') -> None:
1753
+ await self._update_cde_list()
1754
+ if not self.cde:
1755
+ logger.error('No CDE is selected. Cannot open terminal.')
1756
+ return
1757
+ if not self.is_cde_running:
1758
+ logger.error('CDE is not running. Cannot open terminal.')
1759
+ return
1760
+ if self.ssh_client is None:
1761
+ logger.error('Not connected to CDE. Cannot open terminal.')
1762
+ return
1763
+ while True:
1764
+ logger.info('Opening interactive terminal (press CTRL+D or enter "exit" to close)')
1765
+ await self._reset_keyboard()
1766
+ _fd = sys.stdin.fileno()
1767
+ _tcattr = termios.tcgetattr(_fd)
1768
+ tty.setcbreak(_fd)
793
1769
  try:
794
- stderr = ssh_stderr.readline(1024)
795
- except TimeoutError:
796
- have_stderr = False
1770
+ terminal_size = shutil.get_terminal_size()
1771
+ async with asyncssh.connect(
1772
+ self.hostname,
1773
+ connect_timeout=10,
1774
+ known_hosts=self.known_hosts_file.name,
1775
+ username=self.cde_type['value']['remote-username'],
1776
+ term_type=os.environ.get('TERM'),
1777
+ term_size=(terminal_size.columns, terminal_size.lines),
1778
+ ) as conn:
1779
+ try:
1780
+ async with conn.create_process(
1781
+ stdin=os.dup(sys.stdin.fileno()),
1782
+ stdout=os.dup(sys.stdout.fileno()),
1783
+ stderr=os.dup(sys.stderr.fileno()),
1784
+ ) as self.terminal_process:
1785
+ await self.terminal_process.wait()
1786
+ finally:
1787
+ self.terminal_process = None
1788
+ except asyncio.CancelledError:
1789
+ logger.info('Interactive terminal interrupted')
1790
+ raise
1791
+ except Exception:
1792
+ logger.exception('Interactive terminal failed')
1793
+ logger.info('Will retry interactive terminal in %s seconds', RETRY_DELAY_SECONDS)
1794
+ await asyncio.sleep(RETRY_DELAY_SECONDS)
1795
+ await self._check_background_tasks()
797
1796
  else:
798
- have_stderr = True
799
- if have_stdout and stdout:
800
- self.loop.call_soon_threadsafe(stdout_queue.put_nowait, stdout)
801
- if have_stderr and stderr:
802
- self.loop.call_soon_threadsafe(stderr_queue.put_nowait, stderr)
803
- if have_stdout and not stdout and have_stderr and not stderr:
1797
+ logger.info('Interactive terminal closed')
804
1798
  break
805
- if not have_stdout and not have_stderr:
806
- time.sleep(.5)
807
- self.loop.call_soon_threadsafe(stdout_queue.put_nowait, None)
808
- self.loop.call_soon_threadsafe(stderr_queue.put_nowait, None)
1799
+ finally:
1800
+ termios.tcsetattr(_fd, termios.TCSADRAIN, _tcattr)
1801
+ await self._setup_keyboard()
809
1802
 
1803
+ async def _setup_keyboard(self: 'Cli') -> None:
1804
+ self._fd = sys.stdin.fileno()
1805
+ self._tcattr = termios.tcgetattr(self._fd)
1806
+ tty.setcbreak(self._fd)
1807
+ def on_stdin() -> None:
1808
+ self.loop.call_soon_threadsafe(self.key_queue.put_nowait, sys.stdin.read(1))
1809
+ self.loop.add_reader(sys.stdin, on_stdin)
810
1810
 
811
- def main() -> None:
812
- parser = argparse.ArgumentParser()
813
- parser.add_argument(
814
- '-H', '--hostname',
815
- required=True,
816
- help='the IP or hostname of the RDE',
817
- )
818
- parser.add_argument(
819
- '-s', '--source-directory',
820
- required=True,
821
- help='a local directory where the sources from the RDE are cached',
822
- )
823
- parser.add_argument(
824
- '-o', '--output-directory',
825
- help='a local directory where artifacts created on the RDE are stored',
826
- )
827
- parser.add_argument(
828
- '-v', '--verbose',
829
- action='store_true',
830
- help='enable debug logging',
831
- )
832
- parser.add_argument(
833
- '-V', '--version',
834
- action='version',
835
- version=f'Cloudomation devstack-cli {version.MAJOR}+{version.BRANCH_NAME}.{version.BUILD_DATE}.{version.SHORT_SHA}',
836
- )
837
- args = parser.parse_args()
1811
+ async def _reset_keyboard(self: 'Cli') -> None:
1812
+ self.loop.remove_reader(sys.stdin)
1813
+ termios.tcsetattr(self._fd, termios.TCSADRAIN, self._tcattr)
838
1814
 
839
- cli = Cli(args)
840
- with contextlib.suppress(KeyboardInterrupt):
1815
+ def main() -> None:
1816
+ cli = Cli()
1817
+ signal.signal(signal.SIGINT, functools.partial(sigint_handler, cli=cli))
1818
+ with contextlib.suppress(asyncio.CancelledError):
841
1819
  asyncio.run(cli.run())
842
1820
  logger.info('Bye!')
843
1821