osi-dump 0.1__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- osi_dump/__init__.py +0 -0
- osi_dump/__main__.py +4 -0
- osi_dump/batch_handler/__init__.py +5 -0
- osi_dump/batch_handler/floating_ip_batch_handler.py +57 -0
- osi_dump/batch_handler/hypervisor_batch_handler.py +55 -0
- osi_dump/batch_handler/instance_batch_handler.py +54 -0
- osi_dump/batch_handler/project_batch_handler.py +51 -0
- osi_dump/batch_handler/volume_batch_handler.py +45 -0
- osi_dump/cli.py +156 -0
- osi_dump/exporter/__init__.py +0 -0
- osi_dump/exporter/floating_ip/__init__.py +0 -0
- osi_dump/exporter/floating_ip/excel_floating_ip_exporter.py +30 -0
- osi_dump/exporter/floating_ip/floating_ip_exporter.py +7 -0
- osi_dump/exporter/hypervisor/__init__.py +0 -0
- osi_dump/exporter/hypervisor/excel_hypervisor_exporter.py +30 -0
- osi_dump/exporter/hypervisor/hypervisor_exporter.py +7 -0
- osi_dump/exporter/instance/__init__.py +0 -0
- osi_dump/exporter/instance/excel_instance_exporter.py +29 -0
- osi_dump/exporter/instance/instance_exporter.py +7 -0
- osi_dump/exporter/project/__init__.py +0 -0
- osi_dump/exporter/project/excel_project_exporter.py +30 -0
- osi_dump/exporter/project/project_exporter.py +7 -0
- osi_dump/exporter/volume/__init__.py +0 -0
- osi_dump/exporter/volume/excel_volume_exporter.py +29 -0
- osi_dump/exporter/volume/volume_exporter.py +7 -0
- osi_dump/importer/floating_ip/__init__.py +0 -0
- osi_dump/importer/floating_ip/floating_ip_importer.py +9 -0
- osi_dump/importer/floating_ip/openstack_floating_ip_importer.py +68 -0
- osi_dump/importer/hypervisor/__init__.py +0 -0
- osi_dump/importer/hypervisor/hypervisor_importer.py +9 -0
- osi_dump/importer/hypervisor/openstack_hypervisor_importer.py +79 -0
- osi_dump/importer/instance/__init__.py +0 -0
- osi_dump/importer/instance/instance_importer.py +9 -0
- osi_dump/importer/instance/openstack_instance_importer.py +107 -0
- osi_dump/importer/project/__init__.py +0 -0
- osi_dump/importer/project/openstack_project_importer.py +60 -0
- osi_dump/importer/project/project_importer.py +9 -0
- osi_dump/importer/volume/__init__.py +0 -0
- osi_dump/importer/volume/openstack_volume_importer.py +64 -0
- osi_dump/importer/volume/volume_importer.py +9 -0
- osi_dump/model/__init__.py +0 -0
- osi_dump/model/authentication_info.py +12 -0
- osi_dump/model/floating_ip.py +24 -0
- osi_dump/model/hypervisor.py +12 -0
- osi_dump/model/instance.py +20 -0
- osi_dump/model/project.py +13 -0
- osi_dump/model/volume.py +21 -0
- osi_dump/os_connection/__init__.py +0 -0
- osi_dump/os_connection/get_connections.py +67 -0
- osi_dump/util/__init__.py +6 -0
- osi_dump/util/create_file.py +11 -0
- osi_dump/util/excel_autosize_column.py +39 -0
- osi_dump/util/excel_sort_sheet.py +35 -0
- osi_dump/util/export_data_excel.py +36 -0
- osi_dump/util/extract_hostname.py +5 -0
- osi_dump/util/validate_dir_path.py +20 -0
- osi_dump-0.1.dist-info/METADATA +39 -0
- osi_dump-0.1.dist-info/RECORD +61 -0
- osi_dump-0.1.dist-info/WHEEL +5 -0
- osi_dump-0.1.dist-info/entry_points.txt +2 -0
- osi_dump-0.1.dist-info/top_level.txt +1 -0
@@ -0,0 +1,68 @@
|
|
1
|
+
import logging
|
2
|
+
|
3
|
+
import concurrent
|
4
|
+
|
5
|
+
from openstack.connection import Connection
|
6
|
+
from openstack.network.v2.floating_ip import FloatingIP as OSFloatingIP
|
7
|
+
|
8
|
+
from osi_dump.importer.floating_ip.floating_ip_importer import FloatingIPImporter
|
9
|
+
from osi_dump.model.floating_ip import FloatingIP
|
10
|
+
|
11
|
+
logger = logging.getLogger(__name__)
|
12
|
+
|
13
|
+
|
14
|
+
class OpenStackFloatingIPImporter(FloatingIPImporter):
|
15
|
+
def __init__(self, connection: Connection):
|
16
|
+
self.connection = connection
|
17
|
+
|
18
|
+
def import_floating_ips(self) -> list[FloatingIP]:
|
19
|
+
"""Import instances information from Openstack
|
20
|
+
|
21
|
+
Raises:
|
22
|
+
Exception: Raises exception if fetching server failed
|
23
|
+
|
24
|
+
Returns:
|
25
|
+
list[Instance]: _description_
|
26
|
+
"""
|
27
|
+
|
28
|
+
logger.info(f"Importing floating ips for {self.connection.auth['auth_url']}")
|
29
|
+
|
30
|
+
try:
|
31
|
+
osfloating_ips: list[OSFloatingIP] = list(
|
32
|
+
self.connection.list_floating_ips()
|
33
|
+
)
|
34
|
+
except Exception as e:
|
35
|
+
raise Exception(
|
36
|
+
f"Can not fetch floating IPs for {self.connection.auth['auth_url']}"
|
37
|
+
) from e
|
38
|
+
|
39
|
+
floating_ips: list[FloatingIP] = []
|
40
|
+
|
41
|
+
with concurrent.futures.ThreadPoolExecutor() as executor:
|
42
|
+
futures = [
|
43
|
+
executor.submit(self._get_floating_ip_info, osfloating_ip)
|
44
|
+
for osfloating_ip in osfloating_ips
|
45
|
+
]
|
46
|
+
for future in concurrent.futures.as_completed(futures):
|
47
|
+
floating_ips.append(future.result())
|
48
|
+
|
49
|
+
logger.info(f"Imported floating ips for {self.connection.auth['auth_url']}")
|
50
|
+
|
51
|
+
return floating_ips
|
52
|
+
|
53
|
+
def _get_floating_ip_info(self, floating_ip: OSFloatingIP) -> FloatingIP:
|
54
|
+
|
55
|
+
ret_floating_ip = FloatingIP(
|
56
|
+
floating_ip_id=floating_ip.id,
|
57
|
+
project_id=floating_ip.project_id,
|
58
|
+
floating_ip_address=floating_ip.floating_ip_address,
|
59
|
+
floating_network=floating_ip.floating_network_id,
|
60
|
+
fixed_ip_address=floating_ip.fixed_ip_address,
|
61
|
+
router_id=floating_ip.router_id,
|
62
|
+
port_id=floating_ip.port_id,
|
63
|
+
status=floating_ip.status,
|
64
|
+
created_at=floating_ip.created_at,
|
65
|
+
updated_at=floating_ip.updated_at,
|
66
|
+
)
|
67
|
+
|
68
|
+
return ret_floating_ip
|
File without changes
|
@@ -0,0 +1,79 @@
|
|
1
|
+
import logging
|
2
|
+
|
3
|
+
import concurrent
|
4
|
+
|
5
|
+
from openstack.connection import Connection
|
6
|
+
from openstack.compute.v2.hypervisor import Hypervisor as OSHypervisor
|
7
|
+
|
8
|
+
from openstack.placement.v1._proxy import Proxy as PlacementProxy
|
9
|
+
from openstack.placement.v1.resource_provider_inventory import ResourceProviderInventory
|
10
|
+
|
11
|
+
from osi_dump.importer.hypervisor.hypervisor_importer import HypervisorImporter
|
12
|
+
from osi_dump.model.hypervisor import Hypervisor
|
13
|
+
|
14
|
+
logger = logging.getLogger(__name__)
|
15
|
+
|
16
|
+
|
17
|
+
class OpenStackHypervisorImporter(HypervisorImporter):
|
18
|
+
def __init__(self, connection: Connection):
|
19
|
+
self.connection = connection
|
20
|
+
|
21
|
+
def import_hypervisors(self) -> list[Hypervisor]:
|
22
|
+
"""Import hypervisors information from Openstack
|
23
|
+
|
24
|
+
Raises:
|
25
|
+
Exception: Raises exception if fetching hypervisor failed
|
26
|
+
|
27
|
+
Returns:
|
28
|
+
list[Hypervisor]: _description_
|
29
|
+
"""
|
30
|
+
|
31
|
+
logger.info(f"Importing hypervisors for {self.connection.auth['auth_url']}")
|
32
|
+
|
33
|
+
try:
|
34
|
+
oshypervisors: list[OSHypervisor] = list(self.connection.list_hypervisors())
|
35
|
+
except Exception as e:
|
36
|
+
raise Exception(
|
37
|
+
f"Can not fetch hypervisor for {self.connection.auth['auth_url']}"
|
38
|
+
) from e
|
39
|
+
|
40
|
+
hypervisors: list[Hypervisor] = []
|
41
|
+
|
42
|
+
with concurrent.futures.ThreadPoolExecutor() as executor:
|
43
|
+
futures = [
|
44
|
+
executor.submit(self._get_hypervisor_info, hypervisor)
|
45
|
+
for hypervisor in oshypervisors
|
46
|
+
]
|
47
|
+
for future in concurrent.futures.as_completed(futures):
|
48
|
+
hypervisors.append(future.result())
|
49
|
+
|
50
|
+
logger.info(f"Imported hypervisors for {self.connection.auth['auth_url']}")
|
51
|
+
|
52
|
+
return hypervisors
|
53
|
+
|
54
|
+
def _get_hypervisor_info(self, hypervisor: OSHypervisor) -> Hypervisor:
|
55
|
+
|
56
|
+
placement_proxy: PlacementProxy = self.connection.placement
|
57
|
+
|
58
|
+
rpi: ResourceProviderInventory = list(
|
59
|
+
placement_proxy.resource_provider_inventories(
|
60
|
+
resource_provider=hypervisor.id
|
61
|
+
)
|
62
|
+
)
|
63
|
+
|
64
|
+
vcpu = rpi[0]
|
65
|
+
memory = rpi[1]
|
66
|
+
disk = rpi[2]
|
67
|
+
|
68
|
+
ret_hypervisor = Hypervisor(
|
69
|
+
hypervisor_id=hypervisor.id,
|
70
|
+
hypervisor_type=hypervisor.hypervisor_type,
|
71
|
+
name=hypervisor.name,
|
72
|
+
state=hypervisor.state,
|
73
|
+
status=hypervisor.status,
|
74
|
+
local_disk_size=disk["max_unit"],
|
75
|
+
memory_size=memory["max_unit"] + memory["reserved"],
|
76
|
+
vpus=vcpu["max_unit"],
|
77
|
+
)
|
78
|
+
|
79
|
+
return ret_hypervisor
|
File without changes
|
@@ -0,0 +1,107 @@
|
|
1
|
+
import logging
|
2
|
+
|
3
|
+
import concurrent
|
4
|
+
|
5
|
+
from openstack.connection import Connection
|
6
|
+
from openstack.compute.v2.server import Server
|
7
|
+
|
8
|
+
from osi_dump.importer.instance.instance_importer import InstanceImporter
|
9
|
+
from osi_dump.model.instance import Instance
|
10
|
+
|
11
|
+
logger = logging.getLogger(__name__)
|
12
|
+
|
13
|
+
|
14
|
+
class OpenStackInstanceImporter(InstanceImporter):
|
15
|
+
def __init__(self, connection: Connection):
|
16
|
+
self.connection = connection
|
17
|
+
|
18
|
+
def import_instances(self) -> list[Instance]:
|
19
|
+
"""Import instances information from Openstack
|
20
|
+
|
21
|
+
Raises:
|
22
|
+
Exception: Raises exception if fetching server failed
|
23
|
+
|
24
|
+
Returns:
|
25
|
+
list[Instance]: _description_
|
26
|
+
"""
|
27
|
+
|
28
|
+
logger.info(f"Importing instances for {self.connection.auth['auth_url']}")
|
29
|
+
|
30
|
+
try:
|
31
|
+
servers: list[Server] = list(
|
32
|
+
self.connection.compute.servers(details=True, all_projects=True)
|
33
|
+
)
|
34
|
+
except Exception as e:
|
35
|
+
raise Exception(
|
36
|
+
f"Can not fetch instances for {self.connection.auth['auth_url']}"
|
37
|
+
) from e
|
38
|
+
|
39
|
+
instances: list[Instance] = []
|
40
|
+
|
41
|
+
with concurrent.futures.ThreadPoolExecutor() as executor:
|
42
|
+
futures = [
|
43
|
+
executor.submit(self._get_instance_info, server) for server in servers
|
44
|
+
]
|
45
|
+
for future in concurrent.futures.as_completed(futures):
|
46
|
+
instances.append(future.result())
|
47
|
+
|
48
|
+
logger.info(f"Imported instances for {self.connection.auth['auth_url']}")
|
49
|
+
|
50
|
+
return instances
|
51
|
+
|
52
|
+
def _get_instance_info(self, server: Server) -> Instance:
|
53
|
+
|
54
|
+
# Lấy thông tin project
|
55
|
+
try:
|
56
|
+
project = self.connection.identity.get_project(server.project_id)
|
57
|
+
project_name = project.name
|
58
|
+
project_id = project.id
|
59
|
+
except Exception as e:
|
60
|
+
logger.warn(
|
61
|
+
f"Unable to obtain project name for instance: {server.name}: {e}"
|
62
|
+
)
|
63
|
+
project_name = None
|
64
|
+
project_id = None
|
65
|
+
|
66
|
+
# Lấy thông tin domain
|
67
|
+
try:
|
68
|
+
domain = self.connection.identity.get_domain(project.domain_id)
|
69
|
+
domain_name = domain.name
|
70
|
+
except Exception as e:
|
71
|
+
logger.warning(
|
72
|
+
f"Unable to obtain domain name for instance {server.name}: {e}"
|
73
|
+
)
|
74
|
+
domain_name = None
|
75
|
+
|
76
|
+
# Lấy thông tin IPv4 private
|
77
|
+
private_v4_ips = []
|
78
|
+
floating_ip = None
|
79
|
+
|
80
|
+
try:
|
81
|
+
for ips in server.addresses.values():
|
82
|
+
for ip in ips:
|
83
|
+
if ip["OS-EXT-IPS:type"] == "fixed":
|
84
|
+
private_v4_ips.append(ip["addr"])
|
85
|
+
elif ip["OS-EXT-IPS:type"] == "floating":
|
86
|
+
floating_ip = ip["addr"]
|
87
|
+
except Exception as e:
|
88
|
+
logger.warning(
|
89
|
+
f"Unable to obtain IP address information for instance {server.name}: {e}"
|
90
|
+
)
|
91
|
+
|
92
|
+
instance = Instance(
|
93
|
+
instance_id=server.id,
|
94
|
+
instance_name=server.name,
|
95
|
+
project_id=project_id,
|
96
|
+
project_name=project_name,
|
97
|
+
domain_name=domain_name,
|
98
|
+
private_v4_ips=private_v4_ips,
|
99
|
+
floating_ip=floating_ip,
|
100
|
+
status=server.status,
|
101
|
+
ram=server.flavor["ram"],
|
102
|
+
vcpus=server.flavor["vcpus"],
|
103
|
+
created_at=server.created_at,
|
104
|
+
updated_at=server.updated_at,
|
105
|
+
)
|
106
|
+
|
107
|
+
return instance
|
File without changes
|
@@ -0,0 +1,60 @@
|
|
1
|
+
import logging
|
2
|
+
|
3
|
+
import concurrent
|
4
|
+
|
5
|
+
from openstack.connection import Connection
|
6
|
+
from openstack.identity.v3.project import Project as OSProject
|
7
|
+
|
8
|
+
from osi_dump.importer.project.project_importer import ProjectImporter
|
9
|
+
from osi_dump.model.project import Project
|
10
|
+
|
11
|
+
logger = logging.getLogger(__name__)
|
12
|
+
|
13
|
+
|
14
|
+
class OpenStackProjectImporter(ProjectImporter):
|
15
|
+
def __init__(self, connection: Connection):
|
16
|
+
self.connection = connection
|
17
|
+
|
18
|
+
def import_projects(self) -> list[Project]:
|
19
|
+
"""Import projects information from Openstack
|
20
|
+
|
21
|
+
Raises:
|
22
|
+
Exception: Raises exception if fetching project failed
|
23
|
+
|
24
|
+
Returns:
|
25
|
+
list[Instance]: _description_
|
26
|
+
"""
|
27
|
+
|
28
|
+
logger.info(f"Importing projects for {self.connection.auth['auth_url']}")
|
29
|
+
|
30
|
+
try:
|
31
|
+
osprojects: list[OSProject] = list(self.connection.identity.projects())
|
32
|
+
except Exception as e:
|
33
|
+
raise Exception(
|
34
|
+
f"Can not fetch projects for {self.connection.auth['auth_url']}"
|
35
|
+
) from e
|
36
|
+
|
37
|
+
projects: list[Project] = []
|
38
|
+
|
39
|
+
with concurrent.futures.ThreadPoolExecutor() as executor:
|
40
|
+
futures = [
|
41
|
+
executor.submit(self._get_project_info, project)
|
42
|
+
for project in osprojects
|
43
|
+
]
|
44
|
+
for future in concurrent.futures.as_completed(futures):
|
45
|
+
projects.append(future.result())
|
46
|
+
|
47
|
+
logger.info(f"Imported projects for {self.connection.auth['auth_url']}")
|
48
|
+
|
49
|
+
return projects
|
50
|
+
|
51
|
+
def _get_project_info(self, project: OSProject) -> Project:
|
52
|
+
project_ret = Project(
|
53
|
+
project_id=project.id,
|
54
|
+
project_name=project.name,
|
55
|
+
domain_id=project.domain_id,
|
56
|
+
enabled=project.is_enabled,
|
57
|
+
parent_id=project.parent_id,
|
58
|
+
)
|
59
|
+
|
60
|
+
return project_ret
|
File without changes
|
@@ -0,0 +1,64 @@
|
|
1
|
+
import logging
|
2
|
+
|
3
|
+
import concurrent
|
4
|
+
|
5
|
+
from openstack.connection import Connection
|
6
|
+
|
7
|
+
from openstack.block_storage.v3.volume import Volume as OSVolume
|
8
|
+
|
9
|
+
from osi_dump.importer.volume.volume_importer import VolumeImporter
|
10
|
+
from osi_dump.model.volume import Volume
|
11
|
+
|
12
|
+
logger = logging.getLogger(__name__)
|
13
|
+
|
14
|
+
|
15
|
+
class OpenStackVolumeImporter(VolumeImporter):
|
16
|
+
def __init__(self, connection: Connection):
|
17
|
+
self.connection = connection
|
18
|
+
|
19
|
+
def import_volumes(self) -> list[Volume]:
|
20
|
+
"""Import hypervisors information from Openstack
|
21
|
+
|
22
|
+
Raises:
|
23
|
+
Exception: Raises exception if fetching hypervisor failed
|
24
|
+
|
25
|
+
Returns:
|
26
|
+
list[Hypervisor]: _description_
|
27
|
+
"""
|
28
|
+
|
29
|
+
logger.info(f"Importing volumes for {self.connection.auth['auth_url']}")
|
30
|
+
|
31
|
+
try:
|
32
|
+
osvolumes: list[OSVolume] = list(self.connection.list_volumes())
|
33
|
+
except Exception as e:
|
34
|
+
raise Exception(
|
35
|
+
f"Can not fetch volumes for {self.connection.auth['auth_url']}"
|
36
|
+
) from e
|
37
|
+
|
38
|
+
volumes: list[Volume] = []
|
39
|
+
|
40
|
+
with concurrent.futures.ThreadPoolExecutor() as executor:
|
41
|
+
futures = [
|
42
|
+
executor.submit(self._get_volume_info, volume) for volume in osvolumes
|
43
|
+
]
|
44
|
+
for future in concurrent.futures.as_completed(futures):
|
45
|
+
volumes.append(future.result())
|
46
|
+
|
47
|
+
logger.info(f"Imported volumes for {self.connection.auth['auth_url']}")
|
48
|
+
|
49
|
+
return volumes
|
50
|
+
|
51
|
+
def _get_volume_info(self, volume: OSVolume) -> Volume:
|
52
|
+
|
53
|
+
ret_volume = Volume(
|
54
|
+
volume_id=volume.id,
|
55
|
+
project_id=volume.project_id,
|
56
|
+
status=volume.status,
|
57
|
+
attachments=[att["server_id"] for att in volume.attachments],
|
58
|
+
type=volume.volume_type,
|
59
|
+
size=volume.size,
|
60
|
+
updated_at=volume.updated_at,
|
61
|
+
created_at=volume.created_at,
|
62
|
+
)
|
63
|
+
|
64
|
+
return ret_volume
|
File without changes
|
@@ -0,0 +1,24 @@
|
|
1
|
+
from typing import Optional
|
2
|
+
|
3
|
+
from pydantic import BaseModel, ConfigDict, ValidationError
|
4
|
+
|
5
|
+
|
6
|
+
class FloatingIP(BaseModel):
|
7
|
+
model_config = ConfigDict(strict=True)
|
8
|
+
|
9
|
+
floating_ip_id: str
|
10
|
+
project_id: Optional[str]
|
11
|
+
|
12
|
+
floating_network: str
|
13
|
+
|
14
|
+
floating_ip_address: str
|
15
|
+
|
16
|
+
fixed_ip_address: Optional[str]
|
17
|
+
|
18
|
+
router_id: Optional[str]
|
19
|
+
|
20
|
+
port_id: Optional[str]
|
21
|
+
|
22
|
+
status: str
|
23
|
+
created_at: str
|
24
|
+
updated_at: str
|
@@ -0,0 +1,20 @@
|
|
1
|
+
from typing import Optional
|
2
|
+
|
3
|
+
from pydantic import BaseModel, ConfigDict, ValidationError
|
4
|
+
|
5
|
+
|
6
|
+
class Instance(BaseModel):
|
7
|
+
model_config = ConfigDict(strict=True)
|
8
|
+
|
9
|
+
instance_id: str
|
10
|
+
instance_name: Optional[str]
|
11
|
+
project_id: Optional[str]
|
12
|
+
project_name: Optional[str]
|
13
|
+
domain_name: Optional[str]
|
14
|
+
private_v4_ips: Optional[list[str]]
|
15
|
+
floating_ip: Optional[str]
|
16
|
+
status: str
|
17
|
+
ram: int
|
18
|
+
vcpus: int
|
19
|
+
created_at: str
|
20
|
+
updated_at: str
|
@@ -0,0 +1,13 @@
|
|
1
|
+
from typing import Optional
|
2
|
+
|
3
|
+
from pydantic import BaseModel, ConfigDict, ValidationError
|
4
|
+
|
5
|
+
|
6
|
+
class Project(BaseModel):
|
7
|
+
model_config = ConfigDict(strict=True)
|
8
|
+
|
9
|
+
project_id: str
|
10
|
+
project_name: Optional[str]
|
11
|
+
domain_id: Optional[str]
|
12
|
+
enabled: bool
|
13
|
+
parent_id: Optional[str]
|
osi_dump/model/volume.py
ADDED
@@ -0,0 +1,21 @@
|
|
1
|
+
from typing import Optional
|
2
|
+
|
3
|
+
from pydantic import BaseModel, ConfigDict, ValidationError
|
4
|
+
|
5
|
+
|
6
|
+
class Volume(BaseModel):
|
7
|
+
model_config = ConfigDict(strict=True)
|
8
|
+
|
9
|
+
volume_id: str
|
10
|
+
|
11
|
+
project_id: Optional[str]
|
12
|
+
|
13
|
+
attachments: Optional[list[str]]
|
14
|
+
|
15
|
+
status: str
|
16
|
+
|
17
|
+
type: str
|
18
|
+
size: int
|
19
|
+
|
20
|
+
updated_at: str
|
21
|
+
created_at: str
|
File without changes
|
@@ -0,0 +1,67 @@
|
|
1
|
+
import json
|
2
|
+
import logging
|
3
|
+
|
4
|
+
from pydantic import ValidationError
|
5
|
+
|
6
|
+
import typer
|
7
|
+
|
8
|
+
import openstack
|
9
|
+
from openstack.connection import Connection
|
10
|
+
|
11
|
+
from osi_dump.model.authentication_info import AuthenticationInfo
|
12
|
+
|
13
|
+
|
14
|
+
logger = logging.getLogger(__name__)
|
15
|
+
|
16
|
+
TIMEOUT_SECOND = 30
|
17
|
+
|
18
|
+
|
19
|
+
def get_connections(file_path) -> list[Connection]:
|
20
|
+
auths = _parse_authentication_info(file_path=file_path)
|
21
|
+
|
22
|
+
logger.info("Getting connections")
|
23
|
+
|
24
|
+
connections = []
|
25
|
+
auth_urls = []
|
26
|
+
|
27
|
+
for auth in auths:
|
28
|
+
try:
|
29
|
+
connection = openstack.connect(
|
30
|
+
auth_url=auth.auth_url,
|
31
|
+
project_name=auth.project_name,
|
32
|
+
username=auth.username,
|
33
|
+
password=auth.password,
|
34
|
+
project_domain_name=auth.project_domain_name,
|
35
|
+
user_domain_name=auth.user_domain_name,
|
36
|
+
timeout=TIMEOUT_SECOND,
|
37
|
+
)
|
38
|
+
|
39
|
+
connections.append(connection)
|
40
|
+
auth_urls.append(auth.auth_url)
|
41
|
+
except Exception as e:
|
42
|
+
logger.warning(f"Skipping {auth.auth_url}... error: {e}")
|
43
|
+
pass
|
44
|
+
|
45
|
+
logger.info("Established connection success with: ")
|
46
|
+
for auth_url in auth_urls:
|
47
|
+
logger.info(f"{auth_url}")
|
48
|
+
|
49
|
+
return connections
|
50
|
+
|
51
|
+
|
52
|
+
def _parse_authentication_info(file_path: str) -> list[AuthenticationInfo]:
|
53
|
+
|
54
|
+
with open(file_path, "r") as file:
|
55
|
+
objects = json.load(file)
|
56
|
+
if not isinstance(objects, list) or len(objects) == 0:
|
57
|
+
raise ValueError(
|
58
|
+
"The JSON file must contain a list with at least one object."
|
59
|
+
)
|
60
|
+
|
61
|
+
try:
|
62
|
+
ret = [AuthenticationInfo.model_validate(obj) for obj in objects]
|
63
|
+
except ValidationError as e:
|
64
|
+
logger.error(e.errors())
|
65
|
+
raise typer.Exit(1)
|
66
|
+
|
67
|
+
return ret
|
@@ -0,0 +1,6 @@
|
|
1
|
+
from .excel_autosize_column import excel_autosize_column
|
2
|
+
from .extract_hostname import extract_hostname
|
3
|
+
from .create_file import create_file
|
4
|
+
from .export_data_excel import export_data_excel
|
5
|
+
from .excel_sort_sheet import excel_sort_sheet
|
6
|
+
from .validate_dir_path import validate_dir_path
|
@@ -0,0 +1,39 @@
|
|
1
|
+
from openpyxl import load_workbook
|
2
|
+
|
3
|
+
|
4
|
+
def excel_autosize_column(file_path: str):
|
5
|
+
"""Auto size column for all sheets in excel file
|
6
|
+
|
7
|
+
Args:
|
8
|
+
file_path (str): Path to excel file
|
9
|
+
|
10
|
+
Raises:
|
11
|
+
e: Exception when loading workbook
|
12
|
+
"""
|
13
|
+
try:
|
14
|
+
wb = load_workbook(file_path)
|
15
|
+
except Exception as e:
|
16
|
+
raise e
|
17
|
+
|
18
|
+
# Iterate through all sheets
|
19
|
+
for sheet_name in wb.sheetnames:
|
20
|
+
ws = wb[sheet_name]
|
21
|
+
|
22
|
+
# Auto-size columns
|
23
|
+
for col in ws.columns:
|
24
|
+
max_length = 0
|
25
|
+
column = col[0].column_letter # Get the column name (e.g., 'A')
|
26
|
+
for cell in col:
|
27
|
+
try:
|
28
|
+
if cell.value:
|
29
|
+
cell_length = len(str(cell.value))
|
30
|
+
if cell_length > max_length:
|
31
|
+
max_length = cell_length
|
32
|
+
except:
|
33
|
+
pass
|
34
|
+
adjusted_width = max_length + 2 # Adjust as needed for padding
|
35
|
+
ws.column_dimensions[column].width = adjusted_width
|
36
|
+
|
37
|
+
# Save the modified workbook
|
38
|
+
|
39
|
+
wb.save(file_path)
|