osism 0.20250514.0__py3-none-any.whl → 0.20250525.0__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
osism/api.py CHANGED
@@ -46,23 +46,23 @@ class LogConfig(BaseModel):
46
46
  LOG_LEVEL: str = "DEBUG"
47
47
 
48
48
  # Logging config
49
- version = 1
50
- disable_existing_loggers = False
51
- formatters = {
49
+ version: int = 1
50
+ disable_existing_loggers: bool = False
51
+ formatters: dict = {
52
52
  "default": {
53
53
  "()": "uvicorn.logging.DefaultFormatter",
54
54
  "fmt": LOG_FORMAT,
55
55
  "datefmt": "%Y-%m-%d %H:%M:%S",
56
56
  },
57
57
  }
58
- handlers = {
58
+ handlers: dict = {
59
59
  "default": {
60
60
  "formatter": "default",
61
61
  "class": "logging.StreamHandler",
62
62
  "stream": "ext://sys.stderr",
63
63
  },
64
64
  }
65
- loggers = {
65
+ loggers: dict = {
66
66
  "api": {"handlers": ["default"], "level": LOG_LEVEL},
67
67
  }
68
68
 
osism/commands/compute.py CHANGED
@@ -1,6 +1,7 @@
1
1
  # SPDX-License-Identifier: Apache-2.0
2
2
 
3
3
  import time
4
+ import datetime
4
5
 
5
6
  from cliff.command import Command
6
7
  from jc import parse
@@ -322,7 +323,13 @@ class ComputeMigrate(Command):
322
323
  parser.add_argument(
323
324
  "--no-wait",
324
325
  default=False,
325
- help="Do not wait for completion of migration",
326
+ help="Do not wait for completion of migration (Resize of cold migrated instances will not be confirmed!)",
327
+ action="store_true",
328
+ )
329
+ parser.add_argument(
330
+ "--no-cold-migration",
331
+ default=False,
332
+ help="Do not cold migrate instances",
326
333
  action="store_true",
327
334
  )
328
335
  parser.add_argument(
@@ -368,6 +375,7 @@ class ComputeMigrate(Command):
368
375
  target = parsed_args.target
369
376
  force = parsed_args.force
370
377
  no_wait = parsed_args.no_wait
378
+ no_cold_migration = parsed_args.no_cold_migration
371
379
  yes = parsed_args.yes
372
380
  domain = parsed_args.domain
373
381
  project = parsed_args.project
@@ -393,9 +401,13 @@ class ComputeMigrate(Command):
393
401
  logger.info(f"No migratable instances found on node {host}")
394
402
 
395
403
  for server in result:
396
- if server[2] not in ["ACTIVE", "PAUSED"]:
404
+ if server[2] in ["ACTIVE", "PAUSED"]:
405
+ migration_type = "live"
406
+ elif server[2] in ["SHUTOFF"] and not no_cold_migration:
407
+ migration_type = "cold"
408
+ else:
397
409
  logger.info(
398
- f"{server[0]} ({server[1]}) in status {server[2]} cannot be live migrated"
410
+ f"{server[0]} ({server[1]}) in status {server[2]} cannot be migrated"
399
411
  )
400
412
  continue
401
413
 
@@ -403,27 +415,248 @@ class ComputeMigrate(Command):
403
415
  answer = "yes"
404
416
  else:
405
417
  answer = prompt(
406
- f"Live migrate server {server[0]} ({server[1]}) [yes/no]: "
418
+ f"{migration_type.capitalize()} migrate server {server[0]} ({server[1]}) [yes/no]: "
407
419
  )
408
420
 
409
421
  if answer in ["yes", "y"]:
410
- logger.info(f"Live migrating server {server[0]}")
411
- conn.compute.live_migrate_server(
412
- server[0], host=target, block_migration="auto", force=force
422
+ logger.info(
423
+ f"{migration_type.capitalize()} migrating server {server[0]}"
413
424
  )
425
+ if migration_type == "live":
426
+ conn.compute.live_migrate_server(
427
+ server[0], host=target, block_migration="auto", force=force
428
+ )
429
+ elif migration_type == "cold":
430
+ conn.compute.migrate_server(server[0], host=target)
414
431
 
415
432
  if not no_wait:
416
- inner_wait = True
417
- while inner_wait:
433
+ while True:
418
434
  time.sleep(2)
419
435
  s = conn.compute.get_server(server[0])
420
- if s.status in ["MIGRATING"]:
436
+ if (
437
+ migration_type == "live"
438
+ and s.status in ["MIGRATING"]
439
+ or migration_type == "cold"
440
+ and s.status in ["RESIZE"]
441
+ ):
421
442
  logger.info(
422
- f"Live migration of {server[0]} ({server[1]}) is still in progress"
443
+ f"{migration_type.capitalize()} migration of {server[0]} ({server[1]}) is still in progress"
423
444
  )
424
- inner_wait = True
445
+ elif migration_type == "cold" and s.status in ["VERIFY_RESIZE"]:
446
+ try:
447
+ conn.compute.confirm_server_resize(s)
448
+ logger.info(
449
+ f"{migration_type.capitalize()} migration of {server[0]} ({server[1]}) confirmed"
450
+ )
451
+ except Exception as exc:
452
+ logger.error(
453
+ f"{migration_type.capitalize()} migration of {server[0]} ({server[1]}) could not be confirmed"
454
+ )
455
+ raise exc
456
+ # NOTE: There seems to be no simple way to check whether the resize
457
+ # has been confirmed. The state is still "VERIFY_RESIZE" afterwards.
458
+ # Therefore we drop out without waiting for the "SHUTOFF" state
459
+ break
425
460
  else:
426
- inner_wait = False
461
+ logger.info(
462
+ f"{migration_type.capitalize()} migration of {server[0]} ({server[1]}) completed with status {s.status}"
463
+ )
464
+ break
465
+
466
+
467
+ class ComputeMigrationList(Command):
468
+ def get_parser(self, prog_name):
469
+ parser = super(ComputeMigrationList, self).get_parser(prog_name)
470
+ parser.add_argument(
471
+ "--host",
472
+ default=None,
473
+ type=str,
474
+ help="Only list migrations with the given host as source or destination",
475
+ )
476
+ parser.add_argument(
477
+ "--server",
478
+ default=None,
479
+ type=str,
480
+ help="Only list migrations for the given instance (name or ID)",
481
+ )
482
+ parser.add_argument(
483
+ "--user",
484
+ default=None,
485
+ type=str,
486
+ help="Only list migrations for the given user (name or ID)",
487
+ )
488
+ parser.add_argument(
489
+ "--user-domain",
490
+ default=None,
491
+ type=str,
492
+ help="Domain the user belongs to (name or ID)",
493
+ )
494
+ parser.add_argument(
495
+ "--project",
496
+ default=None,
497
+ type=str,
498
+ help="Only list migrations for the given project (name or ID)",
499
+ )
500
+ parser.add_argument(
501
+ "--project-domain",
502
+ default=None,
503
+ type=str,
504
+ help="Domain the project belongs to (name or ID)",
505
+ )
506
+ parser.add_argument(
507
+ "--status",
508
+ default=None,
509
+ type=str,
510
+ help="Only list migrations with the given status",
511
+ )
512
+ parser.add_argument(
513
+ "--type",
514
+ default=None,
515
+ choices=["migration", "live-migration", "evacuation", "resize"],
516
+ type=str,
517
+ help="Only list migrations with the given type",
518
+ )
519
+ parser.add_argument(
520
+ "--changes-since",
521
+ default=None,
522
+ type=datetime.datetime.fromisoformat,
523
+ help="Only list migrations last chganged since the given date in ISO 8601 format (CCYY-MM-DDThh:mm:ss±hh:mm)",
524
+ )
525
+ parser.add_argument(
526
+ "--changes-before",
527
+ default=None,
528
+ type=datetime.datetime.fromisoformat,
529
+ help="Only list migrations last chganged before the given date in ISO 8601 format (CCYY-MM-DDThh:mm:ss±hh:mm)",
530
+ )
531
+ return parser
532
+
533
+ def take_action(self, parsed_args):
534
+ host = parsed_args.host
535
+ server = parsed_args.server
536
+ user = parsed_args.user
537
+ user_domain = parsed_args.user_domain
538
+ project = parsed_args.project
539
+ project_domain = parsed_args.project_domain
540
+ status = parsed_args.status
541
+ migration_type = parsed_args.type
542
+ changes_since = parsed_args.changes_since
543
+ changes_before = parsed_args.changes_before
544
+
545
+ if changes_before and changes_since:
546
+ if not changes_since <= changes_before:
547
+ logger.error(
548
+ "changes-since needs to be less or equal to changes-before"
549
+ )
550
+ return
551
+
552
+ conn = get_cloud_connection()
553
+
554
+ user_id = None
555
+ if user:
556
+ user_query = {}
557
+
558
+ if user_domain:
559
+ u_d = conn.identity.find_domain(user_domain, ignore_missing=True)
560
+ if u_d and "id" in u_d:
561
+ user_query = dict(domain_id=u_d.id)
562
+ else:
563
+ logger.error(f"No domain found for {user_domain}")
564
+ return
565
+
566
+ u = conn.identity.find_user(user, ignore_missing=True, **user_query)
567
+ if u and "id" in u:
568
+ user_id = u.id
569
+ else:
570
+ logger.error(f"No user found for {user}")
571
+ return
572
+
573
+ project_id = None
574
+ if project:
575
+ project_query = {}
576
+
577
+ if project_domain:
578
+ p_d = conn.identity.find_domain(project_domain, ignore_missing=True)
579
+ if p_d and "id" in p_d:
580
+ project_query = dict(domain_id=p_d.id)
581
+ else:
582
+ logger.error(f"No domain found for {project_domain}")
583
+ return
584
+
585
+ p = conn.identity.find_project(
586
+ project, ignore_missing=True, **project_query
587
+ )
588
+ if p and "id" in p:
589
+ project_id = p.id
590
+ else:
591
+ logger.error(f"No project found for {project}")
592
+ return
593
+
594
+ instance_uuid = None
595
+ if server:
596
+ try:
597
+ s = conn.compute.find_server(
598
+ server, details=False, ignore_missing=False, all_projects=True
599
+ )
600
+ if s and "id" in s:
601
+ instance_uuid = s.id
602
+ else:
603
+ raise openstack.exceptions.NotFoundException
604
+ except openstack.exceptions.DuplicateResource:
605
+ logger.error(f"Multiple servers where found for {server}")
606
+ return
607
+ except openstack.exceptions.NotFoundException:
608
+ logger.error(f"No server found for {server}")
609
+ return
610
+
611
+ query = {}
612
+ if host:
613
+ query.update(dict(host=host))
614
+ if instance_uuid:
615
+ query.update(dict(instance_uuid=instance_uuid))
616
+ if status:
617
+ query.update(dict(status=status))
618
+ if migration_type:
619
+ query.update(dict(migration_type=migration_type))
620
+ if user_id:
621
+ query.update(dict(user_id=user_id))
622
+ if project_id:
623
+ query.update(dict(project_id=project_id))
624
+ if changes_since:
625
+ query.update(dict(changes_since=changes_since))
626
+ if changes_before:
627
+ query.update(dict(changes_before=changes_before))
628
+
629
+ migrations = conn.compute.migrations(**query)
630
+ result = [
631
+ [
632
+ m.source_compute,
633
+ m.dest_compute,
634
+ m.status,
635
+ m.migration_type,
636
+ m["instance_uuid"],
637
+ m.user_id,
638
+ m.created_at,
639
+ m.updated_at,
640
+ ]
641
+ for m in migrations
642
+ ]
643
+
644
+ print(
645
+ tabulate(
646
+ result,
647
+ headers=[
648
+ "Source",
649
+ "Destintion",
650
+ "Status",
651
+ "Type",
652
+ "Server UUID",
653
+ "User",
654
+ "Created At",
655
+ "Updated At",
656
+ ],
657
+ tablefmt="psql",
658
+ )
659
+ )
427
660
 
428
661
 
429
662
  class ComputeStart(Command):
osism/commands/netbox.py CHANGED
@@ -28,9 +28,7 @@ class Ironic(Command):
28
28
  def take_action(self, parsed_args):
29
29
  wait = not parsed_args.no_wait
30
30
 
31
- task = conductor.sync_netbox_with_ironic.delay(
32
- force_update=parsed_args.force_update
33
- )
31
+ task = conductor.sync_ironic.delay(force_update=parsed_args.force_update)
34
32
  if wait:
35
33
  logger.info(
36
34
  f"Task {task.task_id} (sync ironic) is running. Wait. No more output."
@@ -38,6 +36,27 @@ class Ironic(Command):
38
36
  task.wait(timeout=None, interval=0.5)
39
37
 
40
38
 
39
+ class Sync(Command):
40
+ def get_parser(self, prog_name):
41
+ parser = super(Sync, self).get_parser(prog_name)
42
+ parser.add_argument(
43
+ "--no-wait",
44
+ help="Do not wait until the sync has been completed",
45
+ action="store_true",
46
+ )
47
+ return parser
48
+
49
+ def take_action(self, parsed_args):
50
+ wait = not parsed_args.no_wait
51
+
52
+ task = conductor.sync_netbox.delay()
53
+ if wait:
54
+ logger.info(
55
+ f"Task {task.task_id} (sync netbox) is running. Wait. No more output."
56
+ )
57
+ task.wait(timeout=None, interval=0.5)
58
+
59
+
41
60
  class Manage(Command):
42
61
  def get_parser(self, prog_name):
43
62
  parser = super(Manage, self).get_parser(prog_name)
osism/settings.py CHANGED
@@ -35,8 +35,8 @@ INVENTORY_RECONCILER_SCHEDULE = float(
35
35
 
36
36
  OSISM_API_URL = os.getenv("OSISM_API_URL", None)
37
37
 
38
- NETBOX_FILTER_LIST = os.getenv(
39
- "NETBOX_FILTER_LIST",
38
+ NETBOX_FILTER_CONDUCTOR = os.getenv(
39
+ "NETBOX_FILTER_CONDUCTOR",
40
40
  "[{'state': 'active', 'tag': ['managed-by-ironic']}]",
41
41
  )
42
42
 
osism/tasks/ansible.py CHANGED
@@ -1,8 +1,9 @@
1
1
  # SPDX-License-Identifier: Apache-2.0
2
2
 
3
3
  from celery import Celery
4
+ from pottery import Redlock
4
5
 
5
- from osism import settings
6
+ from osism import settings, utils
6
7
  from osism.tasks import Config, run_ansible_in_environment
7
8
 
8
9
  app = Celery("ansible")
@@ -11,9 +12,14 @@ app.config_from_object(Config)
11
12
 
12
13
  @app.on_after_configure.connect
13
14
  def setup_periodic_tasks(sender, **kwargs):
14
- sender.add_periodic_task(
15
- settings.GATHER_FACTS_SCHEDULE, gather_facts.s(), expires=10
15
+ lock = Redlock(
16
+ key="lock_osism_tasks_ansible_setup_periodic_tasks",
17
+ masters={utils.redis},
16
18
  )
19
+ if settings.GATHER_FACTS_SCHEDULE > 0 and lock.acquire(timeout=10):
20
+ sender.add_periodic_task(
21
+ settings.GATHER_FACTS_SCHEDULE, gather_facts.s(), expires=10
22
+ )
17
23
 
18
24
 
19
25
  @app.task(bind=True, name="osism.tasks.ansible.gather_facts")
osism/tasks/conductor.py CHANGED
@@ -33,7 +33,7 @@ def get_nb_device_query_list():
33
33
  "tag",
34
34
  "state",
35
35
  ]
36
- nb_device_query_list = yaml.safe_load(settings.NETBOX_FILTER_LIST)
36
+ nb_device_query_list = yaml.safe_load(settings.NETBOX_FILTER_CONDUCTOR)
37
37
  if type(nb_device_query_list) is not list:
38
38
  raise TypeError
39
39
  for nb_device_query in nb_device_query_list:
@@ -55,11 +55,11 @@ def get_nb_device_query_list():
55
55
  raise ValueError(f"Invalid name {value_name} for {key}")
56
56
  except (yaml.YAMLError, TypeError):
57
57
  logger.error(
58
- f"Setting NETBOX_FILTER_LIST needs to be an array of mappings containing supported netbox device filters: {supported_nb_device_filters}"
58
+ f"Setting NETBOX_FILTER_CONDUCTOR needs to be an array of mappings containing supported netbox device filters: {supported_nb_device_filters}"
59
59
  )
60
60
  nb_device_query_list = []
61
61
  except ValueError as exc:
62
- logger.error(f"Unknown value in NETBOX_FILTER_LIST: {exc}")
62
+ logger.error(f"Unknown value in NETBOX_FILTER_CONDUCTOR: {exc}")
63
63
  nb_device_query_list = []
64
64
 
65
65
  return nb_device_query_list
@@ -145,8 +145,13 @@ def get_ironic_parameters(self):
145
145
  return {}
146
146
 
147
147
 
148
- @app.task(bind=True, name="osism.tasks.conductor.sync_netbox_with_ironic")
149
- def sync_netbox_with_ironic(self, force_update=False):
148
+ @app.task(bind=True, name="osism.tasks.conductor.sync_netbox")
149
+ def sync_netbox(self, force_update=False):
150
+ logger.info("Not implemented")
151
+
152
+
153
+ @app.task(bind=True, name="osism.tasks.conductor.sync_ironic")
154
+ def sync_ironic(self, force_update=False):
150
155
  def deep_compare(a, b, updates):
151
156
  """
152
157
  Find items in a that do not exist in b or are different.
@@ -307,7 +312,7 @@ def sync_netbox_with_ironic(self, force_update=False):
307
312
  ]
308
313
 
309
314
  lock = Redlock(
310
- key=f"lock_osism_tasks_conductor_sync_netbox_with_ironic-{device.name}",
315
+ key=f"lock_osism_tasks_conductor_sync_ironic-{device.name}",
311
316
  masters={utils.redis},
312
317
  auto_release_time=600,
313
318
  )
osism/tasks/reconciler.py CHANGED
@@ -6,6 +6,7 @@ import subprocess
6
6
  from celery import Celery
7
7
  from loguru import logger
8
8
  from pottery import Redlock
9
+
9
10
  from osism import settings, utils
10
11
  from osism.tasks import Config
11
12
 
@@ -15,9 +16,14 @@ app.config_from_object(Config)
15
16
 
16
17
  @app.on_after_configure.connect
17
18
  def setup_periodic_tasks(sender, **kwargs):
18
- sender.add_periodic_task(
19
- settings.INVENTORY_RECONCILER_SCHEDULE, run_on_change.s(), expires=10
19
+ lock = Redlock(
20
+ key="lock_osism_tasks_reconciler_setup_periodic_tasks",
21
+ masters={utils.redis},
20
22
  )
23
+ if settings.INVENTORY_RECONCILER_SCHEDULE > 0 and lock.acquire(timeout=10):
24
+ sender.add_periodic_task(
25
+ settings.INVENTORY_RECONCILER_SCHEDULE, run_on_change.s(), expires=10
26
+ )
21
27
 
22
28
 
23
29
  @app.task(bind=True, name="osism.tasks.reconciler.run")
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.4
2
2
  Name: osism
3
- Version: 0.20250514.0
3
+ Version: 0.20250525.0
4
4
  Summary: OSISM manager interface
5
5
  Home-page: https://github.com/osism/python-osism
6
6
  Author: OSISM GmbH
@@ -28,15 +28,15 @@ Requires-Dist: Jinja2==3.1.6
28
28
  Requires-Dist: PyYAML==6.0.2
29
29
  Requires-Dist: ara==1.7.2
30
30
  Requires-Dist: celery[redis]==5.5.2
31
- Requires-Dist: cliff==4.9.1
31
+ Requires-Dist: cliff==4.10.0
32
32
  Requires-Dist: deepdiff==8.5.0
33
33
  Requires-Dist: docker==7.1.0
34
34
  Requires-Dist: dtrack-auditor==1.5.0
35
35
  Requires-Dist: fastapi==0.115.12
36
36
  Requires-Dist: flower==2.0.1
37
- Requires-Dist: hiredis==3.1.1
37
+ Requires-Dist: hiredis==3.2.1
38
38
  Requires-Dist: jc==1.25.5
39
- Requires-Dist: keystoneauth1==5.10.0
39
+ Requires-Dist: keystoneauth1==5.11.0
40
40
  Requires-Dist: kombu==5.5.3
41
41
  Requires-Dist: kubernetes==32.0.1
42
42
  Requires-Dist: loguru==0.7.3
@@ -45,20 +45,19 @@ Requires-Dist: netmiko==4.5.0
45
45
  Requires-Dist: openstacksdk==4.5.0
46
46
  Requires-Dist: pottery==3.0.1
47
47
  Requires-Dist: prompt-toolkit==3.0.51
48
- Requires-Dist: pydantic==1.10.22
49
- Requires-Dist: pynetbox==7.4.1
48
+ Requires-Dist: pynetbox==7.5.0
50
49
  Requires-Dist: pytest-testinfra==10.2.2
51
50
  Requires-Dist: python-dateutil==2.9.0.post0
52
- Requires-Dist: setuptools==80.4.0
51
+ Requires-Dist: setuptools==80.8.0
53
52
  Requires-Dist: sqlmodel==0.0.24
54
- Requires-Dist: sushy==5.5.0
53
+ Requires-Dist: sushy==5.6.0
55
54
  Requires-Dist: tabulate==0.9.0
56
55
  Requires-Dist: transitions==0.9.2
57
56
  Requires-Dist: uvicorn[standard]==0.34.2
58
57
  Requires-Dist: watchdog==6.0.0
59
58
  Provides-Extra: ansible
60
59
  Requires-Dist: ansible-runner==2.4.1; extra == "ansible"
61
- Requires-Dist: ansible-core==2.18.5; extra == "ansible"
60
+ Requires-Dist: ansible-core==2.18.6; extra == "ansible"
62
61
  Provides-Extra: openstack-image-manager
63
62
  Requires-Dist: openstack-image-manager==0.20250508.0; extra == "openstack-image-manager"
64
63
  Dynamic: author
@@ -1,20 +1,20 @@
1
1
  osism/__init__.py,sha256=1UiNTBus0V0f2AbZQzAtVtu6zkfCCrw0OTq--NwFAqY,341
2
2
  osism/__main__.py,sha256=ILe4gu61xEISiBsxanqTQIdSkV-YhpZXTRlguCYyssk,141
3
- osism/api.py,sha256=_d7KvjvTK3PA-ZcAJKD7kXiSZrEQ8YV8NF7MYYriW3U,4327
3
+ osism/api.py,sha256=t3HebSzk6fyY7bLJD9P95oEL1qWYXzpX6Yk1o_nVkMo,4356
4
4
  osism/main.py,sha256=Dt2-9sLXcS-Ny4DAz7hrha-KRc7zd7BFUTRdfs_X8z4,893
5
- osism/settings.py,sha256=09nVwFNWrx2LY-kw8kDG_gMEG72o49BvU8eqoqgQvuc,1299
5
+ osism/settings.py,sha256=mkvbxVQ64ZD7Ypk-bRePHn0gZ5j6Lcu2a578eLU0gQs,1309
6
6
  osism/actions/__init__.py,sha256=bG7Ffen4LvQtgnYPFEpFccsWs81t4zqqeqn9ZeirH6E,38
7
7
  osism/commands/__init__.py,sha256=Ag4wX_DCgXRdoLn6t069jqb3DdRylsX2nyYkiyCx4uk,456
8
8
  osism/commands/apply.py,sha256=mH3-NctgevVzP_1IW92FQeiYMCPB49K5hXbxmTY2vnA,16795
9
9
  osism/commands/compose.py,sha256=iqzG7mS9E1VWaLNN6yQowjOqiHn3BMdj-yfXb3Dc4Ok,1200
10
- osism/commands/compute.py,sha256=XqOdnTqQ5Yqz__4rXbr64ctfe9Qq342IVFLnKmvjVpI,17226
10
+ osism/commands/compute.py,sha256=cgqXWJa5wAvn-7e3FWCgX6hie_aK0yrKRkcNzjLXwDY,25799
11
11
  osism/commands/configuration.py,sha256=sPe8b0dVKFRbr30xoeVdAnHbGwCwgUh0xa_Vzv5pSQQ,954
12
12
  osism/commands/console.py,sha256=8BPz1hio5Wi6kONVAWFuSqkDRrMcLEYeFIY8dbtN6e4,3218
13
13
  osism/commands/container.py,sha256=Fku2GaCM3Idq_FxExUtNqjrEM0XYjpVvXmueSVO8S_c,1601
14
14
  osism/commands/get.py,sha256=ryytjtXWmlMV0NucP5tGkMZu0nIlC4xVtjRk4iMZ06c,8967
15
15
  osism/commands/log.py,sha256=2IpYuosC7FZwwLvM8HmKSU1NRNIelVVYzqjjVMCrOJk,4072
16
16
  osism/commands/manage.py,sha256=WxUZEhylZj2IhydAe3BAr3S5ED6opG243skfSq5q41s,11971
17
- osism/commands/netbox.py,sha256=XDJVuoh2pk7XKGAElnxIkJ_v8bZ2glk0FYynVzNnSPE,6059
17
+ osism/commands/netbox.py,sha256=70GjyPYSVH6dOYwEx1vuF5y9CtEE4vOiH_UNSYuqpjc,6646
18
18
  osism/commands/noset.py,sha256=7zDFuFMyNpo7DUOKcNiYV8nodtdMOYFp5LDPcuJhlZ8,1481
19
19
  osism/commands/reconciler.py,sha256=xOyPzQj66xwjdQd2ysCTHX2yBvmMVMppUDZTas6voXc,2882
20
20
  osism/commands/server.py,sha256=avmoOv5rjOi-fN2A-27cPwOtiy2Q2j6UFtCh3QrfWAI,7512
@@ -36,20 +36,20 @@ osism/plugins/__init__.py,sha256=bG7Ffen4LvQtgnYPFEpFccsWs81t4zqqeqn9ZeirH6E,38
36
36
  osism/services/__init__.py,sha256=bG7Ffen4LvQtgnYPFEpFccsWs81t4zqqeqn9ZeirH6E,38
37
37
  osism/services/listener.py,sha256=eEamlQsJqCuU9K2QFmk3yM9LAJZEanVcTLtGMsNCKjs,9783
38
38
  osism/tasks/__init__.py,sha256=ZEu_KYsapTYp0etr-rLqie_NT_LndHDDpx53xITru5Y,8691
39
- osism/tasks/ansible.py,sha256=RcLxLrjzL5_X6OjNHm3H0lZlmKKlYKIANB0M4_d4chE,1109
39
+ osism/tasks/ansible.py,sha256=_2zrHwynwwEv9nDnX-LbNCzcwy9dTUGo_yyutt34HyQ,1346
40
40
  osism/tasks/ceph.py,sha256=eIQkah3Kj4INtOkF9kTjHbXJ3_J2lg48EWJKfHc-UYw,615
41
- osism/tasks/conductor.py,sha256=Rxx8LrHVMksVpywpzi-av8Nj8qiBTmOFG9P_ksUe6SE,19477
41
+ osism/tasks/conductor.py,sha256=nUaP9lQyfqKbtt--uDVbyiJMbmNx_vI7lYqYzSbLH4E,19599
42
42
  osism/tasks/kolla.py,sha256=wJQpWn_01iWLkr7l7T7RNrQGfRgsgmYi4WQlTmNGvew,618
43
43
  osism/tasks/kubernetes.py,sha256=VzXq_VrYU_CLm4cOruqnE3Kq2ydfO9glZ3p0bp3OYoc,625
44
44
  osism/tasks/netbox.py,sha256=QVOLiTH2Su237YAS0QfXbQ86E-OA1JzrFDfyi9JBmvk,5658
45
45
  osism/tasks/openstack.py,sha256=g15tCll5vP1pC6ysxRCTZxplsdGmXbxaCH3k1Qdv5Xg,6367
46
- osism/tasks/reconciler.py,sha256=b6IRJBYvG_lRlQ6cQ46jSZJPNJeTI7igaCJ_7AzgIDQ,1767
46
+ osism/tasks/reconciler.py,sha256=tnZEZZpveBCK4vHZkHE6wDcHfJAlsPcSjIVxB5ItSFM,1981
47
47
  osism/utils/__init__.py,sha256=_Y4qchR5yyI_JKhBWd_jcsvDLYZjxO0c3iMA_VRQl58,4304
48
- osism-0.20250514.0.dist-info/licenses/AUTHORS,sha256=oWotd63qsnNR945QLJP9mEXaXNtCMaesfo8ZNuLjwpU,39
49
- osism-0.20250514.0.dist-info/licenses/LICENSE,sha256=tAkwu8-AdEyGxGoSvJ2gVmQdcicWw3j1ZZueVV74M-E,11357
50
- osism-0.20250514.0.dist-info/METADATA,sha256=xH8NKJUuInnfv4yHUIA00DMY_SGxYjaqXvDt4_OW-Jo,2935
51
- osism-0.20250514.0.dist-info/WHEEL,sha256=DnLRTWE75wApRYVsjgc6wsVswC54sMSJhAEd4xhDpBk,91
52
- osism-0.20250514.0.dist-info/entry_points.txt,sha256=IEmaQFJyKGN4mtNTLf9ogw7RXjEma4mSOUy_CEC4wzA,2975
53
- osism-0.20250514.0.dist-info/pbr.json,sha256=PlM5t0PTJqXXvUbF4gDXHHJ-HyaeT0oUE17z93Yqg7A,47
54
- osism-0.20250514.0.dist-info/top_level.txt,sha256=8L8dsI9hcaGHsdnR4k_LN9EM78EhwrXRFHyAryPXZtY,6
55
- osism-0.20250514.0.dist-info/RECORD,,
48
+ osism-0.20250525.0.dist-info/licenses/AUTHORS,sha256=EKFIR9F27AvoEXp1cA6FkGbjEOFt4Rcbipr5RJc7jSs,64
49
+ osism-0.20250525.0.dist-info/licenses/LICENSE,sha256=tAkwu8-AdEyGxGoSvJ2gVmQdcicWw3j1ZZueVV74M-E,11357
50
+ osism-0.20250525.0.dist-info/METADATA,sha256=fLU5Yf-qJPuZHU_QTKcNWqzKUjISG7yOVM9qKonpO2A,2903
51
+ osism-0.20250525.0.dist-info/WHEEL,sha256=zaaOINJESkSfm_4HQVc5ssNzHCPXhJm0kEUakpsEHaU,91
52
+ osism-0.20250525.0.dist-info/entry_points.txt,sha256=NR2buBhAPM7jc94gdtM_-kEwSIBz0l9x0nUg3fElybc,3051
53
+ osism-0.20250525.0.dist-info/pbr.json,sha256=xuXy9_UHMyU8zzNeRX1aaXpOYUbOci-0c7GrNIbGlmE,47
54
+ osism-0.20250525.0.dist-info/top_level.txt,sha256=8L8dsI9hcaGHsdnR4k_LN9EM78EhwrXRFHyAryPXZtY,6
55
+ osism-0.20250525.0.dist-info/RECORD,,
@@ -1,5 +1,5 @@
1
1
  Wheel-Version: 1.0
2
- Generator: setuptools (80.4.0)
2
+ Generator: setuptools (80.8.0)
3
3
  Root-Is-Purelib: true
4
4
  Tag: py3-none-any
5
5
 
@@ -31,6 +31,7 @@ manage compute enable = osism.commands.compute:ComputeEnable
31
31
  manage compute evacuate = osism.commands.compute:ComputeEvacuate
32
32
  manage compute list = osism.commands.compute:ComputeList
33
33
  manage compute migrate = osism.commands.compute:ComputeMigrate
34
+ manage compute migration list = osism.commands.compute:ComputeMigrationList
34
35
  manage compute start = osism.commands.compute:ComputeStart
35
36
  manage compute stop = osism.commands.compute:ComputeStop
36
37
  manage flavors = osism.commands.manage:Flavors
@@ -0,0 +1 @@
1
+ renovate[bot] <29139614+renovate[bot]@users.noreply.github.com>
@@ -0,0 +1 @@
1
+ {"git_version": "3c67308", "is_release": false}
@@ -1 +0,0 @@
1
- Christian Berendt <berendt@osism.tech>
@@ -1 +0,0 @@
1
- {"git_version": "6bd0c0e", "is_release": false}