service-client-utils 0.1.0__tar.gz
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- service_client_utils-0.1.0/PKG-INFO +6 -0
- service_client_utils-0.1.0/README.md +66 -0
- service_client_utils-0.1.0/pyproject.toml +18 -0
- service_client_utils-0.1.0/service_client_utils/astra_sim.py +241 -0
- service_client_utils-0.1.0/service_client_utils/astra_sim_client.py +198 -0
- service_client_utils-0.1.0/service_client_utils/common.py +319 -0
- service_client_utils-0.1.0/service_client_utils/config_to_schema.py +411 -0
- service_client_utils-0.1.0/service_client_utils.egg-info/PKG-INFO +6 -0
- service_client_utils-0.1.0/service_client_utils.egg-info/SOURCES.txt +11 -0
- service_client_utils-0.1.0/service_client_utils.egg-info/dependency_links.txt +1 -0
- service_client_utils-0.1.0/service_client_utils.egg-info/requires.txt +3 -0
- service_client_utils-0.1.0/service_client_utils.egg-info/top_level.txt +1 -0
- service_client_utils-0.1.0/setup.cfg +4 -0
|
@@ -0,0 +1,66 @@
|
|
|
1
|
+
# Client Scripts
|
|
2
|
+
|
|
3
|
+
This directory contains the client-scripts, which include utility modules and sample Jupyter notebooks for interacting with different simulation backends.
|
|
4
|
+
|
|
5
|
+
## Directory Structure
|
|
6
|
+
|
|
7
|
+
- **notebooks/**
|
|
8
|
+
Contains sample notebooks for all supported backends. The notebooks are present in the notebooks folder and are as follows:
|
|
9
|
+
|
|
10
|
+
| Notebook Path | Description |
|
|
11
|
+
|----------------------------------------|-------------|
|
|
12
|
+
| analytical_congestion_aware_sample.ipynb | Simulation using analytical congestion-aware backend |
|
|
13
|
+
| analytical_congestion_unaware_sample.ipynb | Simulation using analytical congestion-unaware backend |
|
|
14
|
+
| config_to_schema_sample.ipynb | Config files to Astra-sim schema conversion |
|
|
15
|
+
| htsim_sample.ipynb | htsim backend simulation |
|
|
16
|
+
| load_existing_et_example.ipynb | Loads execution trace for ns-3 simulation |
|
|
17
|
+
| ns3_sample.ipynb | ns-3 backend simulation |
|
|
18
|
+
|
|
19
|
+
- **notebooks/infragraph/**
|
|
20
|
+
Contains the Infragraph notebook for the NS3 backend.
|
|
21
|
+
Users can build fabrics using Infragraph and execute corresponding NS3 simulations.
|
|
22
|
+
| Notebook Path | Description |
|
|
23
|
+
|----------------------------------------------|-------------|
|
|
24
|
+
| infragraph/htsim_clos_fabric_2tier.ipynb | htsim simulation: 2-tier Clos fabric |
|
|
25
|
+
| infragraph/htsim_clos_fabric_3tier.ipynb | htsim simulation: 3-tier Clos fabric |
|
|
26
|
+
| infragraph/ns3_clos_fabric_2tier.ipynb | ns-3 simulation: 2-tier Clos fabric |
|
|
27
|
+
| infragraph/ns3_clos_fabric_3tier.ipynb | ns-3 simulation: 3-tier Clos fabric |
|
|
28
|
+
| infragraph/ns3_infragraph_sample_dgx_device.ipynb | ns-3 simulation: single-tier fabric with DGX devices |
|
|
29
|
+
| infragraph/ns3_infragraph_sample_generic_devices.ipynb | ns-3 simulation: single-tier fabric with generic devices |
|
|
30
|
+
|
|
31
|
+
## Notebook Sections
|
|
32
|
+
|
|
33
|
+
Each notebook follows a structured workflow, divided into these main sections:
|
|
34
|
+
|
|
35
|
+
1. **Importing the utilities**
|
|
36
|
+
Load the helper modules required for client-side interactions.
|
|
37
|
+
|
|
38
|
+
2. **Creating the AstraSim object**
|
|
39
|
+
The user initializes an AstraSim object by connecting to the service using its IP address and port number and assigning a tag for identification.
|
|
40
|
+
|
|
41
|
+
3. **Creating configurations with the SDK**
|
|
42
|
+
The AstraSim object contains a configuration object that allows defining both the AstraSim and Infragraph configurations.
|
|
43
|
+
Users can either upload their existing execution traces or generate workloads by specifying:
|
|
44
|
+
- The target collective operation
|
|
45
|
+
- The data size
|
|
46
|
+
- The NPU range `[0, n]`
|
|
47
|
+
The repo uses mlcommons chakra to create execution trace for the specified npu range
|
|
48
|
+
|
|
49
|
+
|
|
50
|
+
This enables flexible workload generation tailored to various simulation setups.
|
|
51
|
+
|
|
52
|
+
4. **Running the simulation**
|
|
53
|
+
The simulation is triggered using a single function that abstracts multiple backend API calls.
|
|
54
|
+
This operation handles the following automatically:
|
|
55
|
+
- Uploading the workload
|
|
56
|
+
- Setting the configuration
|
|
57
|
+
- Running the simulation
|
|
58
|
+
- Polling the simulation status
|
|
59
|
+
|
|
60
|
+
Once the status is marked as completed, the tool downloads the generated result files.
|
|
61
|
+
At present, a basic NS3 translator is available for interpreting these output files.
|
|
62
|
+
|
|
63
|
+
## Notes on Tags
|
|
64
|
+
|
|
65
|
+
Tags are identifiers used to distinguish simulation runs or client instances.
|
|
66
|
+
They help organize configurations and manage simulation results efficiently.
|
|
@@ -0,0 +1,18 @@
|
|
|
1
|
+
[build-system]
|
|
2
|
+
requires = ["setuptools>=77", "wheel"]
|
|
3
|
+
build-backend = "setuptools.build_meta"
|
|
4
|
+
|
|
5
|
+
[project]
|
|
6
|
+
name = "service_client_utils"
|
|
7
|
+
version = "0.1.0"
|
|
8
|
+
dependencies = [
|
|
9
|
+
"networkx",
|
|
10
|
+
"pandas",
|
|
11
|
+
"grpcio",
|
|
12
|
+
]
|
|
13
|
+
|
|
14
|
+
[tool.setuptools]
|
|
15
|
+
packages = ["service_client_utils",]
|
|
16
|
+
|
|
17
|
+
[tool.setuptools.package-dir]
|
|
18
|
+
service_client_utils = "service_client_utils"
|
|
@@ -0,0 +1,241 @@
|
|
|
1
|
+
"""
|
|
2
|
+
MIT License
|
|
3
|
+
|
|
4
|
+
Copyright (c) 2025 Keysight Technologies
|
|
5
|
+
|
|
6
|
+
Permission is hereby granted, free of charge, to any person obtaining a copy
|
|
7
|
+
of this software and associated documentation files (the "Software"), to deal
|
|
8
|
+
in the Software without restriction, including without limitation the rights
|
|
9
|
+
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
|
|
10
|
+
copies of the Software, and to permit persons to whom the Software is
|
|
11
|
+
furnished to do so, subject to the following conditions:
|
|
12
|
+
|
|
13
|
+
The above copyright notice and this permission notice shall be included in all
|
|
14
|
+
copies or substantial portions of the Software.
|
|
15
|
+
|
|
16
|
+
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
|
17
|
+
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
|
18
|
+
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
|
|
19
|
+
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
|
20
|
+
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
|
|
21
|
+
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
|
|
22
|
+
SOFTWARE.
|
|
23
|
+
"""
|
|
24
|
+
|
|
25
|
+
import os
|
|
26
|
+
import time
|
|
27
|
+
import grpc
|
|
28
|
+
from enum import Enum
|
|
29
|
+
import pandas as pd
|
|
30
|
+
|
|
31
|
+
from chakra.schema.protobuf.et_def_pb2 import (
|
|
32
|
+
Node as ChakraNode,
|
|
33
|
+
BoolList,
|
|
34
|
+
GlobalMetadata,
|
|
35
|
+
AttributeProto as ChakraAttr,
|
|
36
|
+
COMM_COLL_NODE,
|
|
37
|
+
ALL_REDUCE,
|
|
38
|
+
ALL_TO_ALL,
|
|
39
|
+
BARRIER,
|
|
40
|
+
REDUCE,
|
|
41
|
+
REDUCE_SCATTER,
|
|
42
|
+
GATHER,
|
|
43
|
+
)
|
|
44
|
+
|
|
45
|
+
from service_client_utils.common import Utilities, FileFolderUtils, StatUtil
|
|
46
|
+
|
|
47
|
+
from service_client_utils.astra_sim_client import AstraSimClient
|
|
48
|
+
import astra_sim_sdk.astra_sim_sdk as astra_sim_sdk
|
|
49
|
+
|
|
50
|
+
pd.options.mode.chained_assignment = None
|
|
51
|
+
|
|
52
|
+
|
|
53
|
+
class Collective(Enum):
|
|
54
|
+
"""
|
|
55
|
+
Enum class that holds the collective name to string name
|
|
56
|
+
"""
|
|
57
|
+
|
|
58
|
+
ALLREDUCE = "allreduce"
|
|
59
|
+
ALLTOALL = "alltoall"
|
|
60
|
+
BARRIER = "barrier"
|
|
61
|
+
REDUCE = "reduce"
|
|
62
|
+
REDUCESCATTER = "reducescatter"
|
|
63
|
+
GATHER = "gather"
|
|
64
|
+
|
|
65
|
+
|
|
66
|
+
class NetworkBackend(Enum):
|
|
67
|
+
"""
|
|
68
|
+
Enum class that holds the network backend name to string name
|
|
69
|
+
"""
|
|
70
|
+
|
|
71
|
+
ANALYTICAL_CONGESTION_AWARE = "ANALYTICAL_CONGESTION_AWARE"
|
|
72
|
+
ANALYTICAL_CONGESTION_UNAWARE = "ANALYTICAL_CONGESTION_UNAWARE"
|
|
73
|
+
NS3 = "NS3"
|
|
74
|
+
HTSIM = "HTSIM"
|
|
75
|
+
|
|
76
|
+
|
|
77
|
+
class AstraSim:
|
|
78
|
+
"""
|
|
79
|
+
Root class that is used to configure the server, configure astra-sim and run simulation
|
|
80
|
+
"""
|
|
81
|
+
|
|
82
|
+
def __init__(
|
|
83
|
+
self,
|
|
84
|
+
server_endpoint,
|
|
85
|
+
tag="",
|
|
86
|
+
backend_name="NS3",
|
|
87
|
+
):
|
|
88
|
+
self._server_endpoint = server_endpoint
|
|
89
|
+
self._astra_sim_client = AstraSimClient()
|
|
90
|
+
FileFolderUtils(tag)
|
|
91
|
+
self.configuration = astra_sim_sdk.Config()
|
|
92
|
+
self._backend_name = backend_name
|
|
93
|
+
self.tag = tag
|
|
94
|
+
self._validate_server_endpoint()
|
|
95
|
+
|
|
96
|
+
def _validate_server_endpoint(self):
|
|
97
|
+
"""
|
|
98
|
+
Validate if a gRPC server is reachable.
|
|
99
|
+
"""
|
|
100
|
+
try:
|
|
101
|
+
channel = grpc.insecure_channel(self._server_endpoint)
|
|
102
|
+
grpc.channel_ready_future(channel).result(timeout=2)
|
|
103
|
+
print(f"Successfully connected to gRPC server at {self._server_endpoint}")
|
|
104
|
+
|
|
105
|
+
except grpc.FutureTimeoutError as exc:
|
|
106
|
+
raise ConnectionError(
|
|
107
|
+
f"Could not connect to gRPC server at {self._server_endpoint}. "
|
|
108
|
+
"Ensure the server is running and reachable."
|
|
109
|
+
) from exc
|
|
110
|
+
|
|
111
|
+
def generate_collective(self, collective, coll_size, npu_range):
|
|
112
|
+
"""
|
|
113
|
+
This is a wrapper on top of generate_chakra_node to generate the collective
|
|
114
|
+
"""
|
|
115
|
+
if not npu_range:
|
|
116
|
+
print("NPU range is not defined")
|
|
117
|
+
return ""
|
|
118
|
+
|
|
119
|
+
if len(npu_range) < 2:
|
|
120
|
+
print("NPU range not set correctly")
|
|
121
|
+
return ""
|
|
122
|
+
|
|
123
|
+
return WorkloadConfiguration.generate_chakra_node(
|
|
124
|
+
collective=collective, coll_size=coll_size, npu_range=npu_range, tag=self.tag
|
|
125
|
+
)
|
|
126
|
+
|
|
127
|
+
def run_simulation(self, network_backend):
|
|
128
|
+
"""
|
|
129
|
+
A wrapper call over multiple operations allowing to upload, run, and download files for a simulation
|
|
130
|
+
"""
|
|
131
|
+
workload_dir = self.configuration.common_config.workload
|
|
132
|
+
workload_folder = os.path.dirname(workload_dir)
|
|
133
|
+
self._astra_sim_client.set_url(self._server_endpoint) # type: ignore
|
|
134
|
+
self._astra_sim_client.pack_zip(workload_folder)
|
|
135
|
+
self._astra_sim_client.upload_config()
|
|
136
|
+
self._astra_sim_client.set_config(self.configuration)
|
|
137
|
+
self._astra_sim_client.run_simulation(network_backend.value)
|
|
138
|
+
while True:
|
|
139
|
+
status = self._astra_sim_client.get_status()
|
|
140
|
+
if status in ["completed", "failed", "terminated"]:
|
|
141
|
+
break
|
|
142
|
+
print(f"astra-sim server Status: {status}")
|
|
143
|
+
time.sleep(2)
|
|
144
|
+
self._astra_sim_client.get_file("simulation.log")
|
|
145
|
+
if self._astra_sim_client.get_status() in ["failed", "terminated"]:
|
|
146
|
+
print("Simulation " + self._astra_sim_client.get_status())
|
|
147
|
+
else:
|
|
148
|
+
print("Downloading Output files....")
|
|
149
|
+
self._astra_sim_client.download_files()
|
|
150
|
+
print("All files downloaded Successfully")
|
|
151
|
+
if network_backend.value == "NS3":
|
|
152
|
+
print("Translating Metrics...")
|
|
153
|
+
if self._backend_name == "NS3":
|
|
154
|
+
StatUtil.ns3_fct_csv()
|
|
155
|
+
StatUtil.ns3_flow_statistics()
|
|
156
|
+
print("All metrics translated successfully")
|
|
157
|
+
print("Simulation completed")
|
|
158
|
+
|
|
159
|
+
def download_configuration(self):
|
|
160
|
+
"""
|
|
161
|
+
Function that downloads the configuration in zip format consisting of all the files required for running the simulation.
|
|
162
|
+
"""
|
|
163
|
+
try:
|
|
164
|
+
self._astra_sim_client.get_config()
|
|
165
|
+
except FileNotFoundError as e:
|
|
166
|
+
print(f"Configuration file not found: {e}")
|
|
167
|
+
raise
|
|
168
|
+
except Exception as e:
|
|
169
|
+
print(f"Failed to download configuration: {e}")
|
|
170
|
+
raise
|
|
171
|
+
|
|
172
|
+
|
|
173
|
+
class WorkloadConfiguration:
|
|
174
|
+
"""
|
|
175
|
+
Static class that handles the chakra workload confiuration
|
|
176
|
+
"""
|
|
177
|
+
|
|
178
|
+
@staticmethod
|
|
179
|
+
def get_collectives():
|
|
180
|
+
"""
|
|
181
|
+
Returns all the supported collective
|
|
182
|
+
"""
|
|
183
|
+
return """
|
|
184
|
+
Supported:
|
|
185
|
+
ALL_REDUCE
|
|
186
|
+
ALL_GATHER
|
|
187
|
+
ALL_TO_ALL
|
|
188
|
+
BARRIER
|
|
189
|
+
REDUCE
|
|
190
|
+
REDUCE_SCATTER_BLOCK
|
|
191
|
+
GATHER
|
|
192
|
+
Not Supported:
|
|
193
|
+
REDUCE_SCATTER
|
|
194
|
+
BROADCAST
|
|
195
|
+
ALL_GATHER
|
|
196
|
+
"""
|
|
197
|
+
|
|
198
|
+
@staticmethod
|
|
199
|
+
def generate_chakra_node(collective, npu_range, coll_size, tag):
|
|
200
|
+
"""
|
|
201
|
+
This generates the chakra node for the given npu range [m - n], the collective and collective_size inside the tag directory
|
|
202
|
+
"""
|
|
203
|
+
collective = collective.value
|
|
204
|
+
collective_dir = os.path.join(FileFolderUtils().CONFIG_DIR, FileFolderUtils().WORKLOAD_DIR)
|
|
205
|
+
Utilities.delete_folder(collective_dir)
|
|
206
|
+
|
|
207
|
+
os.makedirs(collective_dir)
|
|
208
|
+
|
|
209
|
+
for npu_id in range(npu_range[0], npu_range[1]):
|
|
210
|
+
output_filename = f"{collective_dir}/{tag}.{str(npu_id)}.et"
|
|
211
|
+
with open(output_filename, "wb") as et:
|
|
212
|
+
# Chakra Metadata
|
|
213
|
+
Utilities.encode_message(et, GlobalMetadata(version="0.0.4"))
|
|
214
|
+
|
|
215
|
+
# create Chakra Node
|
|
216
|
+
node = ChakraNode()
|
|
217
|
+
node.id = 1
|
|
218
|
+
node.name = collective
|
|
219
|
+
node.type = COMM_COLL_NODE
|
|
220
|
+
|
|
221
|
+
# assign attributes
|
|
222
|
+
node.attr.append(ChakraAttr(name="is_cpu_op", bool_val=False))
|
|
223
|
+
if collective == "allreduce":
|
|
224
|
+
node.attr.append(ChakraAttr(name="comm_type", int64_val=ALL_REDUCE))
|
|
225
|
+
elif collective == "alltoall":
|
|
226
|
+
node.attr.append(ChakraAttr(name="comm_type", int64_val=ALL_TO_ALL))
|
|
227
|
+
elif collective == "barrier":
|
|
228
|
+
node.attr.append(ChakraAttr(name="comm_type", int64_val=BARRIER))
|
|
229
|
+
elif collective == "reduce":
|
|
230
|
+
node.attr.append(ChakraAttr(name="comm_type", int64_val=REDUCE))
|
|
231
|
+
elif collective == "reducescatter":
|
|
232
|
+
node.attr.append(ChakraAttr(name="comm_type", int64_val=REDUCE_SCATTER))
|
|
233
|
+
elif collective == "gather":
|
|
234
|
+
node.attr.append(ChakraAttr(name="comm_type", int64_val=GATHER))
|
|
235
|
+
node.attr.append(ChakraAttr(name="comm_size", int64_val=coll_size))
|
|
236
|
+
node.attr.append(ChakraAttr(name="involved_dim", bool_list=BoolList(values=[True])))
|
|
237
|
+
|
|
238
|
+
# store Chakra ET file
|
|
239
|
+
Utilities.encode_message(et, node)
|
|
240
|
+
print("Generated " + str(npu_range[1] - npu_range[0]) + " et in " + collective_dir)
|
|
241
|
+
return os.path.join(collective_dir, tag)
|
|
@@ -0,0 +1,198 @@
|
|
|
1
|
+
"""
|
|
2
|
+
MIT License
|
|
3
|
+
|
|
4
|
+
Copyright (c) 2025 Keysight Technologies
|
|
5
|
+
|
|
6
|
+
Permission is hereby granted, free of charge, to any person obtaining a copy
|
|
7
|
+
of this software and associated documentation files (the "Software"), to deal
|
|
8
|
+
in the Software without restriction, including without limitation the rights
|
|
9
|
+
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
|
|
10
|
+
copies of the Software, and to permit persons to whom the Software is
|
|
11
|
+
furnished to do so, subject to the following conditions:
|
|
12
|
+
|
|
13
|
+
The above copyright notice and this permission notice shall be included in all
|
|
14
|
+
copies or substantial portions of the Software.
|
|
15
|
+
|
|
16
|
+
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
|
17
|
+
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
|
18
|
+
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
|
|
19
|
+
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
|
20
|
+
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
|
|
21
|
+
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
|
|
22
|
+
SOFTWARE.
|
|
23
|
+
"""
|
|
24
|
+
|
|
25
|
+
import os
|
|
26
|
+
import json
|
|
27
|
+
import logging
|
|
28
|
+
import threading
|
|
29
|
+
|
|
30
|
+
import astra_sim_sdk.astra_sim_sdk as astra_sim
|
|
31
|
+
from service_client_utils.common import Utilities, FileFolderUtils
|
|
32
|
+
|
|
33
|
+
|
|
34
|
+
class AstraSimClient:
|
|
35
|
+
_instance = None
|
|
36
|
+
|
|
37
|
+
def __new__(cls, *args, **kwargs):
|
|
38
|
+
if not cls._instance:
|
|
39
|
+
cls._instance = super(AstraSimClient, cls).__new__(cls, *args, **kwargs)
|
|
40
|
+
return cls._instance
|
|
41
|
+
|
|
42
|
+
def __init__(self):
|
|
43
|
+
self._url = ""
|
|
44
|
+
self._api = astra_sim.api(
|
|
45
|
+
location="localhost:50051",
|
|
46
|
+
transport=astra_sim.Transport.GRPC,
|
|
47
|
+
logger=None,
|
|
48
|
+
loglevel=logging.ERROR,
|
|
49
|
+
)
|
|
50
|
+
self._api.enable_grpc_streaming = True # type: ignore
|
|
51
|
+
# api.chunk_size = 1
|
|
52
|
+
self._api.request_timeout = 180 # type: ignore
|
|
53
|
+
self._stopping_event = threading.Event()
|
|
54
|
+
self._backend = None
|
|
55
|
+
|
|
56
|
+
def set_url(self, url):
|
|
57
|
+
"""
|
|
58
|
+
Set the service URL
|
|
59
|
+
"""
|
|
60
|
+
self._url = url
|
|
61
|
+
self._api = astra_sim.api(
|
|
62
|
+
location=url,
|
|
63
|
+
transport=astra_sim.Transport.GRPC,
|
|
64
|
+
logger=None,
|
|
65
|
+
loglevel=logging.ERROR,
|
|
66
|
+
)
|
|
67
|
+
self._api.enable_grpc_streaming = True # type: ignore
|
|
68
|
+
|
|
69
|
+
def get_api(self):
|
|
70
|
+
"""
|
|
71
|
+
Returns the grpc api
|
|
72
|
+
"""
|
|
73
|
+
return self._api
|
|
74
|
+
|
|
75
|
+
def get_api_stub(self):
|
|
76
|
+
"""
|
|
77
|
+
Returns the grpc api stub
|
|
78
|
+
"""
|
|
79
|
+
return self.get_api()._get_stub() # type: ignore
|
|
80
|
+
|
|
81
|
+
def upload_config(self):
|
|
82
|
+
"""
|
|
83
|
+
Function that uploads config from the zip path which is inside the <tag> dir name
|
|
84
|
+
"""
|
|
85
|
+
with open(FileFolderUtils().ZIP_PATH, "rb") as f:
|
|
86
|
+
file_data = f.read()
|
|
87
|
+
return self.get_api().upload_config(payload=file_data)
|
|
88
|
+
|
|
89
|
+
def set_config(self, config):
|
|
90
|
+
"""
|
|
91
|
+
Function that sets the config in the service
|
|
92
|
+
"""
|
|
93
|
+
response = self.get_api().set_config(payload=config)
|
|
94
|
+
print(response)
|
|
95
|
+
|
|
96
|
+
def run_simulation(self, backend):
|
|
97
|
+
"""
|
|
98
|
+
Function that triggers simulation run in service directory
|
|
99
|
+
"""
|
|
100
|
+
control = astra_sim.Control(choice="start")
|
|
101
|
+
if "ANALYTICAL_CONGESTION_AWARE" == backend.upper():
|
|
102
|
+
control.start.backend = astra_sim.ControlStart.ANALYTICAL_CONGESTION_AWARE
|
|
103
|
+
elif "ANALYTICAL_CONGESTION_UNAWARE" == backend.upper():
|
|
104
|
+
control.start.backend = astra_sim.ControlStart.ANALYTICAL_CONGESTION_UNAWARE
|
|
105
|
+
elif "NS3" == backend.upper():
|
|
106
|
+
control.start.backend = astra_sim.ControlStart.NS3
|
|
107
|
+
elif "HTSIM" == backend.upper():
|
|
108
|
+
control.start.backend = astra_sim.ControlStart.HTSIM
|
|
109
|
+
# control.start.action = astra_sim.ControlStart.NS3
|
|
110
|
+
response = self.get_api().set_control_action(control)
|
|
111
|
+
print(response)
|
|
112
|
+
|
|
113
|
+
def get_status(self):
|
|
114
|
+
"""
|
|
115
|
+
Function to get the simulation status from service
|
|
116
|
+
"""
|
|
117
|
+
response = self.get_api().get_status()
|
|
118
|
+
return response.status # type: ignore
|
|
119
|
+
|
|
120
|
+
def get_file(self, filename):
|
|
121
|
+
"""
|
|
122
|
+
Function to download the result file from service
|
|
123
|
+
"""
|
|
124
|
+
result = self.get_api().result()
|
|
125
|
+
result.filename = filename
|
|
126
|
+
|
|
127
|
+
response = self.get_api().get_result(result)
|
|
128
|
+
if response is None:
|
|
129
|
+
open(os.path.join(FileFolderUtils().OUTPUT_DIR, filename), "w", encoding="utf-8").close()
|
|
130
|
+
else:
|
|
131
|
+
if response is not None:
|
|
132
|
+
bytes_data = response.read() # type: ignore
|
|
133
|
+
# data_str = bytes_data.decode("utf-8")
|
|
134
|
+
# Step 2: Decode bytes to string
|
|
135
|
+
with open(os.path.join(FileFolderUtils().OUTPUT_DIR, filename), "wb") as f:
|
|
136
|
+
f.write(bytes_data)
|
|
137
|
+
else:
|
|
138
|
+
raise FileNotFoundError("Unable to read file")
|
|
139
|
+
|
|
140
|
+
def get_metadata(self):
|
|
141
|
+
"""
|
|
142
|
+
Function to download the result files metadata from service
|
|
143
|
+
"""
|
|
144
|
+
# result = astra_sim.Result(choice="metadata")
|
|
145
|
+
result = self.get_api().result()
|
|
146
|
+
result.choice = "metadata"
|
|
147
|
+
response = self.get_api().get_result(result)
|
|
148
|
+
# Step 1: Read bytes from the stream
|
|
149
|
+
bytes_data = response.read() # type: ignore
|
|
150
|
+
data = bytes_data.decode("utf-8")
|
|
151
|
+
|
|
152
|
+
# Step 3: Parse string to dict
|
|
153
|
+
data_dict = json.loads(data)
|
|
154
|
+
filelist = []
|
|
155
|
+
for filedata in data_dict:
|
|
156
|
+
filelist.append(filedata["filename"])
|
|
157
|
+
return filelist
|
|
158
|
+
|
|
159
|
+
def download_files(self):
|
|
160
|
+
"""
|
|
161
|
+
Function to download the result files from service
|
|
162
|
+
"""
|
|
163
|
+
print("Transferring Files from ASTRA-sim server")
|
|
164
|
+
metadata = self.get_metadata()
|
|
165
|
+
if len(metadata) == 0:
|
|
166
|
+
print("Result Files missing")
|
|
167
|
+
return []
|
|
168
|
+
|
|
169
|
+
# Utilities.delete_folder(FileFolderUtils().OUTPUT_DIR)
|
|
170
|
+
|
|
171
|
+
for file in metadata:
|
|
172
|
+
# download the file
|
|
173
|
+
print(f"Downloading file: {file}")
|
|
174
|
+
self.get_file(file)
|
|
175
|
+
return metadata
|
|
176
|
+
|
|
177
|
+
def pack_zip(self, workload_folder=""):
|
|
178
|
+
"""
|
|
179
|
+
Function that zips all the workloads in the config dir
|
|
180
|
+
"""
|
|
181
|
+
print("Generating Configuration ZIP now")
|
|
182
|
+
if workload_folder == "":
|
|
183
|
+
workload_folder = FileFolderUtils().CONFIG_DIR
|
|
184
|
+
Utilities.zip_folder(os.path.join(workload_folder, ".."), FileFolderUtils().ZIP_PATH)
|
|
185
|
+
print("pack_zip complete")
|
|
186
|
+
|
|
187
|
+
def get_config(self):
|
|
188
|
+
"""
|
|
189
|
+
Function that downloads the configuration in zip format consisting of all the files required for running the simulation.
|
|
190
|
+
"""
|
|
191
|
+
config_response = self.get_api().get_config()
|
|
192
|
+
if config_response is None:
|
|
193
|
+
raise FileNotFoundError("Server couldn't return config")
|
|
194
|
+
zip_bytes = config_response.read()
|
|
195
|
+
print(f"Downloaded all configuration in {FileFolderUtils().SERVER_CONFIG_ZIP}")
|
|
196
|
+
output_path = os.path.join(FileFolderUtils().OUTPUT_DIR, FileFolderUtils().SERVER_CONFIG_ZIP)
|
|
197
|
+
with open(output_path, "wb") as f:
|
|
198
|
+
f.write(zip_bytes)
|
|
@@ -0,0 +1,319 @@
|
|
|
1
|
+
"""
|
|
2
|
+
MIT License
|
|
3
|
+
|
|
4
|
+
Copyright (c) 2025 Keysight Technologies
|
|
5
|
+
|
|
6
|
+
Permission is hereby granted, free of charge, to any person obtaining a copy
|
|
7
|
+
of this software and associated documentation files (the "Software"), to deal
|
|
8
|
+
in the Software without restriction, including without limitation the rights
|
|
9
|
+
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
|
|
10
|
+
copies of the Software, and to permit persons to whom the Software is
|
|
11
|
+
furnished to do so, subject to the following conditions:
|
|
12
|
+
|
|
13
|
+
The above copyright notice and this permission notice shall be included in all
|
|
14
|
+
copies or substantial portions of the Software.
|
|
15
|
+
|
|
16
|
+
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
|
17
|
+
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
|
18
|
+
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
|
|
19
|
+
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
|
20
|
+
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
|
|
21
|
+
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
|
|
22
|
+
SOFTWARE.
|
|
23
|
+
"""
|
|
24
|
+
|
|
25
|
+
import os
|
|
26
|
+
import shutil
|
|
27
|
+
import json
|
|
28
|
+
import zipfile
|
|
29
|
+
import struct
|
|
30
|
+
import pandas as pd
|
|
31
|
+
|
|
32
|
+
pd.options.mode.chained_assignment = None
|
|
33
|
+
|
|
34
|
+
|
|
35
|
+
class FileFolderUtils:
|
|
36
|
+
"""
|
|
37
|
+
Class that holds the file folder utilities and the paths
|
|
38
|
+
"""
|
|
39
|
+
|
|
40
|
+
_instance = None
|
|
41
|
+
_initialized = False
|
|
42
|
+
|
|
43
|
+
def __new__(cls, *args, **kwargs):
|
|
44
|
+
if not cls._instance:
|
|
45
|
+
cls._instance = super(FileFolderUtils, cls).__new__(cls)
|
|
46
|
+
return cls._instance
|
|
47
|
+
|
|
48
|
+
@classmethod
|
|
49
|
+
def get_instance(cls):
|
|
50
|
+
"""
|
|
51
|
+
Returns the instance of the FileFolderUtils class
|
|
52
|
+
"""
|
|
53
|
+
return cls._instance
|
|
54
|
+
|
|
55
|
+
UTILS_PATH = os.path.abspath(__file__)
|
|
56
|
+
SRC_DIR = os.path.dirname(UTILS_PATH)
|
|
57
|
+
HOME_DIR = os.path.join(SRC_DIR, "..")
|
|
58
|
+
|
|
59
|
+
def __init__(self, tag=""):
|
|
60
|
+
if not self._initialized:
|
|
61
|
+
self._initialized = True
|
|
62
|
+
if tag == "":
|
|
63
|
+
tag = "new_run"
|
|
64
|
+
self.tag_name = tag
|
|
65
|
+
self.TRIAL_DIR = ""
|
|
66
|
+
self.TAG_DIR = ""
|
|
67
|
+
self.INFRA_DIR = ""
|
|
68
|
+
self.CONFIG_DIR = ""
|
|
69
|
+
self.OUTPUT_DIR = ""
|
|
70
|
+
self.ZIP_DIR = ""
|
|
71
|
+
self.ZIP_FILE_NAME = r"config.zip"
|
|
72
|
+
self.SERVER_ZIP_FILE_NAME = r"server_configuration.zip"
|
|
73
|
+
self.INFRA_JSON_FILENAME = "infra.json"
|
|
74
|
+
self.BIND_JSON_FILENAME = "bind.json"
|
|
75
|
+
self.NETWORK_CONFIGURATION_FILENAME = "network_config.txt"
|
|
76
|
+
self.NS3_TOPOLOGY_FILENAME = "nc-topology-file.txt"
|
|
77
|
+
self.REMOTE_MEMORY_FILENAME = "RemoteMemory.json"
|
|
78
|
+
self.SYSTEM_CONFIGURATION_FILENAME = "system.json"
|
|
79
|
+
self.LOGICAL_TOPOLOGY_FILENAME = "logical.json"
|
|
80
|
+
self.ZIP_PATH = ""
|
|
81
|
+
self.WORKLOAD_DIR = "workload"
|
|
82
|
+
self.set_tag(tag)
|
|
83
|
+
|
|
84
|
+
def set_tag(self, tag):
|
|
85
|
+
"""
|
|
86
|
+
Sets the tag - creates the folders and subfolders with the tag name to provide unique testing configurations and results
|
|
87
|
+
"""
|
|
88
|
+
self.tag_name = tag
|
|
89
|
+
self.TRIAL_DIR = os.path.join(FileFolderUtils.HOME_DIR, "trial")
|
|
90
|
+
self.TAG_DIR = os.path.join(self.TRIAL_DIR, self.tag_name)
|
|
91
|
+
self.INFRA_DIR = os.path.join(self.TAG_DIR, "infrastructure")
|
|
92
|
+
self.CONFIG_DIR = os.path.join(self.TAG_DIR, "configuration")
|
|
93
|
+
self.OUTPUT_DIR = os.path.join(self.TAG_DIR, "output")
|
|
94
|
+
self.ZIP_DIR = os.path.join(self.TAG_DIR, "zip_dir")
|
|
95
|
+
self.ZIP_PATH = os.path.join(self.TAG_DIR, self.ZIP_FILE_NAME)
|
|
96
|
+
self.SERVER_CONFIG_ZIP = os.path.join(self.TAG_DIR, self.SERVER_ZIP_FILE_NAME)
|
|
97
|
+
self.reset_directories()
|
|
98
|
+
|
|
99
|
+
def reset_directories(self):
|
|
100
|
+
"""
|
|
101
|
+
Resets the directories: INFRA_DIR, CONFIG_DIR, OUTPUT_DIR, ZIP_DIR
|
|
102
|
+
"""
|
|
103
|
+
print("Resetting test directory")
|
|
104
|
+
Utilities.delete_folder(self.TAG_DIR)
|
|
105
|
+
Utilities.delete_folder(self.INFRA_DIR)
|
|
106
|
+
Utilities.delete_folder(self.CONFIG_DIR)
|
|
107
|
+
Utilities.delete_folder(self.OUTPUT_DIR)
|
|
108
|
+
Utilities.delete_folder(self.ZIP_DIR)
|
|
109
|
+
|
|
110
|
+
Utilities.create_folder(self.TAG_DIR)
|
|
111
|
+
Utilities.create_folder(self.INFRA_DIR)
|
|
112
|
+
Utilities.create_folder(self.CONFIG_DIR)
|
|
113
|
+
Utilities.create_folder(self.OUTPUT_DIR)
|
|
114
|
+
Utilities.create_folder(self.ZIP_DIR)
|
|
115
|
+
|
|
116
|
+
|
|
117
|
+
class Utilities:
|
|
118
|
+
"""
|
|
119
|
+
Utilities class that has multiple static function calls like zip_folder, delete_folder, create_folder and so on
|
|
120
|
+
"""
|
|
121
|
+
|
|
122
|
+
@staticmethod
|
|
123
|
+
def zip_folder(folder_path: str, output_path: str):
|
|
124
|
+
"""
|
|
125
|
+
Function that zip a given folder - files and subdirectories inside it and dumps to an output path
|
|
126
|
+
Note that the output path is a full path of the zip file and the directories mentioned in the path should exist
|
|
127
|
+
"""
|
|
128
|
+
print("output_path: " + output_path)
|
|
129
|
+
print("folder_path: " + folder_path)
|
|
130
|
+
with zipfile.ZipFile(output_path, "w", zipfile.ZIP_DEFLATED) as zipf:
|
|
131
|
+
for root, _, files in os.walk(folder_path):
|
|
132
|
+
for file in files:
|
|
133
|
+
file_path = os.path.join(root, file)
|
|
134
|
+
arcname = os.path.relpath(file_path, folder_path)
|
|
135
|
+
zipf.write(file_path, arcname)
|
|
136
|
+
|
|
137
|
+
@staticmethod
|
|
138
|
+
def delete_folder(folder):
|
|
139
|
+
"""
|
|
140
|
+
Function that deletes a folder and its contents
|
|
141
|
+
"""
|
|
142
|
+
if not os.path.exists(folder):
|
|
143
|
+
# print(f"The folder {folder} does not exist.")
|
|
144
|
+
return
|
|
145
|
+
|
|
146
|
+
for filename in os.listdir(folder):
|
|
147
|
+
file_path = os.path.join(folder, filename)
|
|
148
|
+
|
|
149
|
+
try:
|
|
150
|
+
if os.path.isfile(file_path) or os.path.islink(file_path):
|
|
151
|
+
os.unlink(file_path)
|
|
152
|
+
elif os.path.isdir(file_path):
|
|
153
|
+
shutil.rmtree(file_path)
|
|
154
|
+
except Exception as e:
|
|
155
|
+
print(f"Failed to delete {file_path}. Reason: {e}")
|
|
156
|
+
|
|
157
|
+
os.rmdir(folder)
|
|
158
|
+
print("All contents of the folder " + folder + " have been deleted.")
|
|
159
|
+
|
|
160
|
+
@staticmethod
|
|
161
|
+
def create_folder(folder: str):
|
|
162
|
+
"""
|
|
163
|
+
Function that creates a folder from the parameter: folder
|
|
164
|
+
"""
|
|
165
|
+
try:
|
|
166
|
+
if not os.path.exists(folder):
|
|
167
|
+
os.makedirs(folder)
|
|
168
|
+
# print(f"Folder created at: {folder}")
|
|
169
|
+
else:
|
|
170
|
+
print(f"Folder already exists at: {folder}")
|
|
171
|
+
except Exception as e:
|
|
172
|
+
print(f"An error occurred: {e}")
|
|
173
|
+
|
|
174
|
+
@staticmethod
|
|
175
|
+
def encode_variant_32(out_file, value):
|
|
176
|
+
"""
|
|
177
|
+
The encoding of the Varint32 is copied from
|
|
178
|
+
google.protobuf.internal.encoder and is only repeated here to
|
|
179
|
+
avoid depending on the internal functions in the library.
|
|
180
|
+
"""
|
|
181
|
+
bits = value & 0x7F
|
|
182
|
+
value >>= 7
|
|
183
|
+
while value:
|
|
184
|
+
out_file.write(struct.pack("<B", 0x80 | bits))
|
|
185
|
+
bits = value & 0x7F
|
|
186
|
+
value >>= 7
|
|
187
|
+
out_file.write(struct.pack("<B", bits))
|
|
188
|
+
|
|
189
|
+
@staticmethod
|
|
190
|
+
def encode_message(out_file, message):
|
|
191
|
+
"""
|
|
192
|
+
Encoded a message with the length prepended as a 32-bit varint.
|
|
193
|
+
"""
|
|
194
|
+
out = message.SerializeToString()
|
|
195
|
+
Utilities.encode_variant_32(out_file, len(out))
|
|
196
|
+
out_file.write(out)
|
|
197
|
+
|
|
198
|
+
@staticmethod
|
|
199
|
+
def serialize_ns3_configuration_to_dict(file_path: str) -> dict:
|
|
200
|
+
"""
|
|
201
|
+
Function that parses the ns3 network file and returns a dictionary
|
|
202
|
+
"""
|
|
203
|
+
config = {}
|
|
204
|
+
special_keys = {"KMAX_MAP", "KMIN_MAP", "PMAX_MAP"}
|
|
205
|
+
with open(file_path, "r", encoding="utf-8") as network_file:
|
|
206
|
+
for line in network_file:
|
|
207
|
+
line_parts = line.strip().split()
|
|
208
|
+
if not line_parts:
|
|
209
|
+
continue
|
|
210
|
+
key, values = line_parts[0], line_parts[1:]
|
|
211
|
+
if key in special_keys:
|
|
212
|
+
config[key] = " ".join(values)
|
|
213
|
+
else:
|
|
214
|
+
if len(values) == 1:
|
|
215
|
+
config[key] = values[0]
|
|
216
|
+
else:
|
|
217
|
+
config[key] = values
|
|
218
|
+
return config
|
|
219
|
+
|
|
220
|
+
@staticmethod
|
|
221
|
+
def to_dict(obj):
|
|
222
|
+
"""Safe conversion: works whether serialize() returns str or dict."""
|
|
223
|
+
serialized = obj.serialize()
|
|
224
|
+
return json.loads(serialized) if isinstance(serialized, str) else serialized
|
|
225
|
+
|
|
226
|
+
|
|
227
|
+
class StatUtil:
|
|
228
|
+
"""
|
|
229
|
+
Class that holds the utilities for statistics
|
|
230
|
+
"""
|
|
231
|
+
|
|
232
|
+
@staticmethod
|
|
233
|
+
def ns3_flow_statistics():
|
|
234
|
+
"""
|
|
235
|
+
This generates ns3 flow statistics by reading fct.txt
|
|
236
|
+
"""
|
|
237
|
+
df = pd.read_csv(
|
|
238
|
+
os.path.join(FileFolderUtils().OUTPUT_DIR, "fct.txt"),
|
|
239
|
+
delim_whitespace=True,
|
|
240
|
+
header=None,
|
|
241
|
+
)
|
|
242
|
+
|
|
243
|
+
df.columns = [
|
|
244
|
+
"Source ip",
|
|
245
|
+
"Destination ip",
|
|
246
|
+
"Source Port",
|
|
247
|
+
"Destination Port",
|
|
248
|
+
"Data size (B)",
|
|
249
|
+
"Start Time",
|
|
250
|
+
"FCT",
|
|
251
|
+
"Standalone FCT",
|
|
252
|
+
]
|
|
253
|
+
df["Source ip"] = df["Source ip"].apply(StatUtil.hex_to_ip)
|
|
254
|
+
df["Destination ip"] = df["Destination ip"].apply(StatUtil.hex_to_ip)
|
|
255
|
+
df["Total Bytes Tx"] = df["Data size (B)"]
|
|
256
|
+
df["Total Bytes Rx"] = df["Total Bytes Tx"]
|
|
257
|
+
df["Completion time (ms)"] = df["FCT"] / 1000000
|
|
258
|
+
df["Start (ms)"] = df["Start Time"] / 1000000
|
|
259
|
+
df["End (ms)"] = df["Start (ms)"] + df["Completion time (ms)"]
|
|
260
|
+
df["FCT"] = df["FCT"].astype(int)
|
|
261
|
+
|
|
262
|
+
df.to_csv(os.path.join(FileFolderUtils().OUTPUT_DIR, "flow_stats.csv"), index=False)
|
|
263
|
+
print(
|
|
264
|
+
"Generated: flow_stats.csv at: ",
|
|
265
|
+
os.path.join(FileFolderUtils().OUTPUT_DIR, "flow_stats.csv"),
|
|
266
|
+
)
|
|
267
|
+
return df
|
|
268
|
+
|
|
269
|
+
@staticmethod
|
|
270
|
+
def ns3_fct_csv():
|
|
271
|
+
"""
|
|
272
|
+
This generates ns3 fct statistics by reading fct.txt - creates a dataframe from the txt file
|
|
273
|
+
"""
|
|
274
|
+
df = pd.read_csv(
|
|
275
|
+
os.path.join(FileFolderUtils().OUTPUT_DIR, "fct.txt"),
|
|
276
|
+
delim_whitespace=True,
|
|
277
|
+
header=None,
|
|
278
|
+
)
|
|
279
|
+
df.columns = [
|
|
280
|
+
"Source Hex ip",
|
|
281
|
+
"Destination Hex ip",
|
|
282
|
+
"Source Port",
|
|
283
|
+
"Destination Port",
|
|
284
|
+
"Data size (B)",
|
|
285
|
+
"Start Time",
|
|
286
|
+
"FCT",
|
|
287
|
+
"Standalone FCT",
|
|
288
|
+
]
|
|
289
|
+
|
|
290
|
+
df.to_csv(os.path.join(FileFolderUtils().OUTPUT_DIR, "fct.csv"), index=False)
|
|
291
|
+
print("Generated fct.csv at: ", os.path.join(FileFolderUtils().OUTPUT_DIR, "fct.csv"))
|
|
292
|
+
return df
|
|
293
|
+
|
|
294
|
+
@staticmethod
|
|
295
|
+
def hex_to_ip(hex_str):
|
|
296
|
+
"""
|
|
297
|
+
static method that converts a hex string to an IP Address
|
|
298
|
+
"""
|
|
299
|
+
try:
|
|
300
|
+
hex_str = hex_str.replace("a", "A")
|
|
301
|
+
hex_str = hex_str.replace("b", "B")
|
|
302
|
+
hex_str = hex_str.replace("c", "C")
|
|
303
|
+
hex_str = hex_str.replace("d", "D")
|
|
304
|
+
hex_str = hex_str.replace("e", "E")
|
|
305
|
+
hex_str = hex_str.replace("f", "F")
|
|
306
|
+
# Ensure it's exactly 6 hex digits
|
|
307
|
+
|
|
308
|
+
# Convert to integer
|
|
309
|
+
num = int(hex_str, 16)
|
|
310
|
+
|
|
311
|
+
octet1 = (num >> 24) & 0xFF
|
|
312
|
+
octet2 = (num >> 16) & 0xFF
|
|
313
|
+
octet3 = (num >> 8) & 0xFF
|
|
314
|
+
octet4 = num & 0xFF
|
|
315
|
+
|
|
316
|
+
# Format as IPv4 address
|
|
317
|
+
return f"{octet1}.{octet2}.{octet3}.{octet4}"
|
|
318
|
+
except ValueError:
|
|
319
|
+
return "Invalid Hex"
|
|
@@ -0,0 +1,411 @@
|
|
|
1
|
+
"""
|
|
2
|
+
MIT License
|
|
3
|
+
|
|
4
|
+
Copyright (c) 2025 Keysight Technologies
|
|
5
|
+
|
|
6
|
+
Permission is hereby granted, free of charge, to any person obtaining a copy
|
|
7
|
+
of this software and associated documentation files (the "Software"), to deal
|
|
8
|
+
in the Software without restriction, including without limitation the rights
|
|
9
|
+
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
|
|
10
|
+
copies of the Software, and to permit persons to whom the Software is
|
|
11
|
+
furnished to do so, subject to the following conditions:
|
|
12
|
+
|
|
13
|
+
The above copyright notice and this permission notice shall be included in all
|
|
14
|
+
copies or substantial portions of the Software.
|
|
15
|
+
|
|
16
|
+
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
|
17
|
+
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
|
18
|
+
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
|
|
19
|
+
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
|
20
|
+
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
|
|
21
|
+
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
|
|
22
|
+
SOFTWARE.
|
|
23
|
+
"""
|
|
24
|
+
|
|
25
|
+
import json
|
|
26
|
+
import yaml
|
|
27
|
+
import toml
|
|
28
|
+
import traceback
|
|
29
|
+
import astra_sim_sdk.astra_sim_sdk as astra_sim
|
|
30
|
+
|
|
31
|
+
try:
|
|
32
|
+
from common import Utilities
|
|
33
|
+
except:
|
|
34
|
+
from .common import Utilities
|
|
35
|
+
|
|
36
|
+
|
|
37
|
+
class TranslateConfig:
|
|
38
|
+
"""
|
|
39
|
+
Class that holds translators that convert a file configuration to an astra-sim object config
|
|
40
|
+
"""
|
|
41
|
+
|
|
42
|
+
@staticmethod
|
|
43
|
+
def translate_remote_memory(remote_memory_file_path: str, configuration: astra_sim.Config):
|
|
44
|
+
"""
|
|
45
|
+
This translates the remote memory file to astra-sim config
|
|
46
|
+
The main inputs are the:
|
|
47
|
+
- remote_memory_file_path - full file path of the remote memory configuration
|
|
48
|
+
- configuration - which is astra-sim config
|
|
49
|
+
The file values are set in the configuration object at: configuration.common_config.remote_memory
|
|
50
|
+
"""
|
|
51
|
+
try:
|
|
52
|
+
with open(remote_memory_file_path, "r", encoding="utf-8") as rm_file:
|
|
53
|
+
rm = json.load(rm_file)
|
|
54
|
+
new_conf = {}
|
|
55
|
+
for key in rm:
|
|
56
|
+
new_conf[key.replace("-", "_")] = rm[key]
|
|
57
|
+
configuration.common_config.remote_memory.deserialize(new_conf)
|
|
58
|
+
except Exception as e:
|
|
59
|
+
print(f"Error translating remote memory configuration from file '{remote_memory_file_path}': {e}")
|
|
60
|
+
traceback.print_exc()
|
|
61
|
+
raise RuntimeError("Failed to translate remote memory file configuration.") from e
|
|
62
|
+
|
|
63
|
+
@staticmethod
|
|
64
|
+
def translate_system_configuration(system_config_file_path: str, configuration: astra_sim.Config):
|
|
65
|
+
"""
|
|
66
|
+
This translates the system file to astra-sim config
|
|
67
|
+
The main inputs are the:
|
|
68
|
+
- system_config_file_path - full file location of system configuration
|
|
69
|
+
- configuration - which is astra-sim config
|
|
70
|
+
The file values are set in the configuration object at: configuration.common_config.system
|
|
71
|
+
"""
|
|
72
|
+
try:
|
|
73
|
+
with open(system_config_file_path, "r", encoding="utf-8") as sc_file:
|
|
74
|
+
sc = json.load(sc_file)
|
|
75
|
+
new_conf = {}
|
|
76
|
+
for key in sc:
|
|
77
|
+
new_conf[key.replace("-", "_")] = sc[key]
|
|
78
|
+
if "L" in new_conf:
|
|
79
|
+
new_conf["latency"] = new_conf["L"]
|
|
80
|
+
del new_conf["L"]
|
|
81
|
+
if "o" in new_conf:
|
|
82
|
+
new_conf["overhead"] = new_conf["o"]
|
|
83
|
+
del new_conf["o"]
|
|
84
|
+
|
|
85
|
+
if "g" in new_conf:
|
|
86
|
+
new_conf["gap"] = new_conf["g"]
|
|
87
|
+
del new_conf["g"]
|
|
88
|
+
|
|
89
|
+
if "G" in new_conf:
|
|
90
|
+
new_conf["global_memory"] = new_conf["G"]
|
|
91
|
+
del new_conf["G"]
|
|
92
|
+
|
|
93
|
+
configuration.common_config.system.deserialize(new_conf)
|
|
94
|
+
except Exception as e:
|
|
95
|
+
print(f"Error translating system config configuration from file '{system_config_file_path}': {e}")
|
|
96
|
+
traceback.print_exc()
|
|
97
|
+
raise RuntimeError("Failed to translate system config file configuration.") from e
|
|
98
|
+
|
|
99
|
+
@staticmethod
|
|
100
|
+
def translate_communicator_configuration(
|
|
101
|
+
communicator_config_file_path: str, configuration: astra_sim.Config
|
|
102
|
+
):
|
|
103
|
+
"""
|
|
104
|
+
This translates the communicator group file to astra-sim config
|
|
105
|
+
The main inputs are the:
|
|
106
|
+
- communicator_config_file_path - full file path of communicator group configuration
|
|
107
|
+
- configuration - which is astra-sim config
|
|
108
|
+
The file values are set in the configuration object at: configuration.common_config.communicator_group
|
|
109
|
+
"""
|
|
110
|
+
try:
|
|
111
|
+
with open(communicator_config_file_path, "r", encoding="utf-8") as cg_file:
|
|
112
|
+
cg = json.load(cg_file)
|
|
113
|
+
for key, value in cg.items():
|
|
114
|
+
configuration.common_config.communicator_group.add(key, value)
|
|
115
|
+
except Exception as e:
|
|
116
|
+
print(
|
|
117
|
+
f"Error translating communicator configuration from file '{communicator_config_file_path}': {e}"
|
|
118
|
+
)
|
|
119
|
+
traceback.print_exc()
|
|
120
|
+
raise RuntimeError("Failed to translate communicator file configuration.") from e
|
|
121
|
+
|
|
122
|
+
@staticmethod
|
|
123
|
+
def translate_ns3_nc_topology_configuration(
|
|
124
|
+
nc_topology_config_file_path: str, configuration: astra_sim.Config
|
|
125
|
+
):
|
|
126
|
+
"""
|
|
127
|
+
This translates the ns3 nc topology file to astra-sim config
|
|
128
|
+
The main inputs are the:
|
|
129
|
+
- nc_topology_config_file_path - full file path of ns3 nc topoology txt
|
|
130
|
+
- configuration - which is astra-sim config
|
|
131
|
+
The file values are set in the configuration object at: configuration.network_backend.ns3.topology.nc_topology
|
|
132
|
+
"""
|
|
133
|
+
try:
|
|
134
|
+
with open(nc_topology_config_file_path, "r", encoding="utf-8") as nc_topo_file:
|
|
135
|
+
config_topo = configuration.network_backend.ns3.topology.nc_topology
|
|
136
|
+
for line_number, line in enumerate(nc_topo_file):
|
|
137
|
+
line_list = line.strip().split()
|
|
138
|
+
if not line_list:
|
|
139
|
+
continue # incase if there are no switches
|
|
140
|
+
if line_number == 0:
|
|
141
|
+
total_nodes, total_switches, total_links = line_list[0], line_list[1], line_list[2]
|
|
142
|
+
config_topo.total_nodes = int(total_nodes)
|
|
143
|
+
config_topo.total_switches = int(total_switches)
|
|
144
|
+
config_topo.total_links = int(total_links)
|
|
145
|
+
elif line_number == 1:
|
|
146
|
+
switch_list = [int(x) for x in line_list]
|
|
147
|
+
config_topo.switch_ids = switch_list
|
|
148
|
+
|
|
149
|
+
else:
|
|
150
|
+
src_index, dest_index, bandwidth, latency, error_rate = (
|
|
151
|
+
line_list[0],
|
|
152
|
+
line_list[1],
|
|
153
|
+
line_list[2],
|
|
154
|
+
line_list[3],
|
|
155
|
+
line_list[4],
|
|
156
|
+
)
|
|
157
|
+
config_topo.connections.add(
|
|
158
|
+
int(src_index), int(dest_index), bandwidth, latency, error_rate
|
|
159
|
+
)
|
|
160
|
+
except Exception as e:
|
|
161
|
+
print(
|
|
162
|
+
f"Error translating nc topology configuration from file '{nc_topology_config_file_path}': {e}"
|
|
163
|
+
)
|
|
164
|
+
traceback.print_exc()
|
|
165
|
+
raise RuntimeError("Failed to translate nc topology file configuration.") from e
|
|
166
|
+
|
|
167
|
+
@staticmethod
|
|
168
|
+
def translate_ns3_network_configuration(network_config_file_path: str, configuration: astra_sim.Config):
|
|
169
|
+
"""
|
|
170
|
+
This translates the ns3 network file to astra-sim config
|
|
171
|
+
The main inputs are the:
|
|
172
|
+
- nc_topology_config_file_path - full file path of ns3 network configuration
|
|
173
|
+
- configuration - which is astra-sim config
|
|
174
|
+
The file values are set in the configuration object at: onfiguration.network_backend.ns3.network
|
|
175
|
+
"""
|
|
176
|
+
try:
|
|
177
|
+
network_dict = Utilities.serialize_ns3_configuration_to_dict(network_config_file_path)
|
|
178
|
+
config_network = configuration.network_backend.ns3.network
|
|
179
|
+
config_network.enable_qcn = int(network_dict["ENABLE_QCN"])
|
|
180
|
+
config_network.use_dynamic_pfc_threshold = int(network_dict["USE_DYNAMIC_PFC_THRESHOLD"])
|
|
181
|
+
config_network.packet_payload_size = int(network_dict["PACKET_PAYLOAD_SIZE"])
|
|
182
|
+
config_network.topology_file = network_dict["TOPOLOGY_FILE"]
|
|
183
|
+
config_network.flow_file = network_dict["FLOW_FILE"]
|
|
184
|
+
config_network.trace_file = network_dict["TRACE_FILE"]
|
|
185
|
+
config_network.trace_output_file = network_dict["TRACE_OUTPUT_FILE"]
|
|
186
|
+
config_network.fct_output_file = network_dict["FCT_OUTPUT_FILE"]
|
|
187
|
+
config_network.pfc_output_file = network_dict["PFC_OUTPUT_FILE"]
|
|
188
|
+
config_network.qlen_mon_file = network_dict["QLEN_MON_FILE"]
|
|
189
|
+
config_network.qlen_mon_start = int(network_dict["QLEN_MON_START"])
|
|
190
|
+
config_network.qlen_mon_end = int(network_dict["QLEN_MON_END"])
|
|
191
|
+
config_network.simulator_stop_time = float(network_dict["SIMULATOR_STOP_TIME"])
|
|
192
|
+
config_network.cc_mode = int(network_dict["CC_MODE"])
|
|
193
|
+
config_network.alpha_resume_interval = int(network_dict["ALPHA_RESUME_INTERVAL"])
|
|
194
|
+
config_network.rate_decrease_interval = int(network_dict["RATE_DECREASE_INTERVAL"])
|
|
195
|
+
config_network.clamp_target_rate = int(network_dict["CLAMP_TARGET_RATE"])
|
|
196
|
+
config_network.rp_timer = int(network_dict["RP_TIMER"])
|
|
197
|
+
config_network.ewma_gain = float(network_dict["EWMA_GAIN"])
|
|
198
|
+
config_network.fast_recovery_times = int(network_dict["FAST_RECOVERY_TIMES"])
|
|
199
|
+
config_network.rate_ai = network_dict["RATE_AI"]
|
|
200
|
+
config_network.rate_hai = network_dict["RATE_HAI"]
|
|
201
|
+
config_network.min_rate = network_dict["MIN_RATE"]
|
|
202
|
+
config_network.dctcp_rate_ai = network_dict["DCTCP_RATE_AI"]
|
|
203
|
+
config_network.error_rate_per_link = float(network_dict["ERROR_RATE_PER_LINK"])
|
|
204
|
+
config_network.l2_chunk_size = int(network_dict["L2_CHUNK_SIZE"])
|
|
205
|
+
config_network.l2_ack_interval = int(network_dict["L2_ACK_INTERVAL"])
|
|
206
|
+
config_network.l2_back_to_zero = int(network_dict["L2_BACK_TO_ZERO"])
|
|
207
|
+
config_network.has_win = int(network_dict["HAS_WIN"])
|
|
208
|
+
config_network.global_t = int(network_dict["GLOBAL_T"])
|
|
209
|
+
config_network.var_win = int(network_dict["VAR_WIN"])
|
|
210
|
+
config_network.fast_react = int(network_dict["FAST_REACT"])
|
|
211
|
+
config_network.u_target = float(network_dict["U_TARGET"])
|
|
212
|
+
config_network.mi_thresh = int(network_dict["MI_THRESH"])
|
|
213
|
+
config_network.int_multi = int(network_dict["INT_MULTI"])
|
|
214
|
+
config_network.multi_rate = int(network_dict["MULTI_RATE"])
|
|
215
|
+
config_network.sample_feedback = int(network_dict["SAMPLE_FEEDBACK"])
|
|
216
|
+
config_network.pint_log_base = float(network_dict["PINT_LOG_BASE"])
|
|
217
|
+
config_network.pint_prob = float(network_dict["PINT_PROB"])
|
|
218
|
+
config_network.rate_bound = int(network_dict["RATE_BOUND"])
|
|
219
|
+
config_network.ack_high_prio = int(network_dict["ACK_HIGH_PRIO"])
|
|
220
|
+
config_network.link_down = [int(x) for x in network_dict["LINK_DOWN"]]
|
|
221
|
+
config_network.enable_trace = int(network_dict["ENABLE_TRACE"])
|
|
222
|
+
config_network.kmax_map = network_dict["KMAX_MAP"]
|
|
223
|
+
config_network.kmin_map = network_dict["KMIN_MAP"]
|
|
224
|
+
config_network.pmax_map = network_dict["PMAX_MAP"]
|
|
225
|
+
config_network.buffer_size = int(network_dict["BUFFER_SIZE"])
|
|
226
|
+
config_network.nic_total_pause_time = int(network_dict["NIC_TOTAL_PAUSE_TIME"])
|
|
227
|
+
except Exception as e:
|
|
228
|
+
print(
|
|
229
|
+
f"Error translating ns3 network file configuration from file '{network_config_file_path}': {e}"
|
|
230
|
+
)
|
|
231
|
+
traceback.print_exc()
|
|
232
|
+
raise RuntimeError("Failed to translate ns3 network file configuration.") from e
|
|
233
|
+
|
|
234
|
+
@staticmethod
|
|
235
|
+
def translate_ns3_logical_configuration(logical_config_file_path: str, configuration: astra_sim.Config):
|
|
236
|
+
"""
|
|
237
|
+
This translates the ns3 logical configuration file to astra-sim config
|
|
238
|
+
The main inputs are the:
|
|
239
|
+
- nc_topology_config_file_path - full file path of ns3 network configuration
|
|
240
|
+
- configuration - which is astra-sim config
|
|
241
|
+
The file values are set in the configuration object at: configuration.network_backend.ns3.logical_topology.logical_dimensions
|
|
242
|
+
"""
|
|
243
|
+
try:
|
|
244
|
+
with open(logical_config_file_path, "r", encoding="utf-8") as lc_file:
|
|
245
|
+
lc = json.load(lc_file)
|
|
246
|
+
new_conf = {}
|
|
247
|
+
logical_dims_value = lc.get("logical-dims")
|
|
248
|
+
if not logical_dims_value:
|
|
249
|
+
raise ValueError("no value for logical_dims")
|
|
250
|
+
else:
|
|
251
|
+
new_conf["logical_dims"] = logical_dims_value
|
|
252
|
+
|
|
253
|
+
configuration.network_backend.ns3.logical_topology.deserialize(new_conf)
|
|
254
|
+
|
|
255
|
+
configuration.network_backend.ns3.logical_topology.logical_dimensions = new_conf["logical_dims"]
|
|
256
|
+
except Exception as e:
|
|
257
|
+
print(f"Error translating ns3 logical configuration from file '{logical_config_file_path}': {e}")
|
|
258
|
+
traceback.print_exc()
|
|
259
|
+
raise RuntimeError("Failed to translate ns3 logical configuration.") from e
|
|
260
|
+
|
|
261
|
+
@staticmethod
|
|
262
|
+
def translate_analytical_network(
|
|
263
|
+
network_config_file_path: str, configuration: astra_sim.Config, backend_name: str
|
|
264
|
+
):
|
|
265
|
+
"""
|
|
266
|
+
This translates the analytical network configuration file to astra-sim config
|
|
267
|
+
The main inputs are the:
|
|
268
|
+
- network_config_file_path - full file path of analytical network configuration
|
|
269
|
+
- configuration - which is astra-sim config
|
|
270
|
+
The file values are set in the configuration object
|
|
271
|
+
"""
|
|
272
|
+
try:
|
|
273
|
+
backend_list = ["analytical_congestion_aware", "analytical_congestion_unaware", "htsim"]
|
|
274
|
+
if backend_name in backend_list:
|
|
275
|
+
config_network = getattr(configuration.network_backend, backend_name)
|
|
276
|
+
config_topo = config_network.topology.network
|
|
277
|
+
with open(network_config_file_path, "r", encoding="utf-8") as network_file:
|
|
278
|
+
network_yaml = yaml.safe_load(network_file)
|
|
279
|
+
number_of_dim = len(network_yaml["topology"])
|
|
280
|
+
for i in range(number_of_dim):
|
|
281
|
+
topo_value = network_yaml["topology"][i].lower()
|
|
282
|
+
npu_count = network_yaml["npus_count"][i]
|
|
283
|
+
bandwidth = network_yaml["bandwidth"][i]
|
|
284
|
+
latency = network_yaml["latency"][i]
|
|
285
|
+
config_topo.add(topo_value, npu_count, bandwidth, latency)
|
|
286
|
+
else:
|
|
287
|
+
return f"please select backend_name from {backend_list}"
|
|
288
|
+
except Exception as e:
|
|
289
|
+
print(
|
|
290
|
+
f"Error translating analytical network configuration from file '{network_config_file_path}': {e}"
|
|
291
|
+
)
|
|
292
|
+
traceback.print_exc()
|
|
293
|
+
raise RuntimeError("Failed to translate analytical network configuration.") from e
|
|
294
|
+
|
|
295
|
+
@staticmethod
|
|
296
|
+
def translate_htsim_fat_tree_topology(htsim_fat_tree_path: str, configuration: astra_sim.Config):
|
|
297
|
+
"""
|
|
298
|
+
This translates the htsim network fat tree topology configuration file to astra-sim config
|
|
299
|
+
The main inputs are the:
|
|
300
|
+
- htsim_fat_tree_path - full file path of htsim fat tree
|
|
301
|
+
- configuration - which is astra-sim config
|
|
302
|
+
The file values are set in the configuration object: configuration.network_backend.htsim.topology.network_topology_configuration.htsim_topology.fat_tree
|
|
303
|
+
"""
|
|
304
|
+
try:
|
|
305
|
+
topo_dict = TranslateConfig._parse_htsim_topo(htsim_fat_tree_path)
|
|
306
|
+
config_network_topo = (
|
|
307
|
+
configuration.network_backend.htsim.topology.network_topology_configuration.htsim_topology.fat_tree
|
|
308
|
+
)
|
|
309
|
+
config_network_topo.nodes = topo_dict["Nodes"]
|
|
310
|
+
config_network_topo.tiers = topo_dict["Tiers"]
|
|
311
|
+
config_network_topo.podsize = topo_dict["Podsize"]
|
|
312
|
+
|
|
313
|
+
for key, value in topo_dict.items():
|
|
314
|
+
if not key.startswith("tier_"):
|
|
315
|
+
continue # skip non-tier keys
|
|
316
|
+
|
|
317
|
+
tier_obj = getattr(config_network_topo, key)
|
|
318
|
+
mapping = {
|
|
319
|
+
"Downlink_speed_Gbps": "downlink_speed_gbps",
|
|
320
|
+
"Downlink_Latency_ns": "downlink_latency_ns",
|
|
321
|
+
"Radix_Down": "radix_down",
|
|
322
|
+
"Radix_Up": "radix_up",
|
|
323
|
+
"Bundle": "bundle",
|
|
324
|
+
}
|
|
325
|
+
|
|
326
|
+
for k, attr in mapping.items():
|
|
327
|
+
if k in value:
|
|
328
|
+
setattr(tier_obj, attr, value[k])
|
|
329
|
+
except Exception as e:
|
|
330
|
+
print(f"Error translating htsim fat tree configuration from file '{htsim_fat_tree_path}': {e}")
|
|
331
|
+
traceback.print_exc()
|
|
332
|
+
raise RuntimeError("Failed to translate htsim fat tree network configuration.") from e
|
|
333
|
+
|
|
334
|
+
@staticmethod
|
|
335
|
+
def translate_ns3_trace_file_to_schema(ns3_trace_file_path: str, configuration: astra_sim.Config):
|
|
336
|
+
"""
|
|
337
|
+
This translates the ns3 trace file to astra-sim config
|
|
338
|
+
The main inputs are the:
|
|
339
|
+
- ns3_trace_file_path - full file path of ns3 trace txt
|
|
340
|
+
- configuration - which is astra-sim config
|
|
341
|
+
The file values are set in the configuration object at: configuration.network_backend.ns3.trace.trace_ids
|
|
342
|
+
"""
|
|
343
|
+
try:
|
|
344
|
+
trace_list = []
|
|
345
|
+
with open(ns3_trace_file_path, "r", encoding="utf-8") as trace_file:
|
|
346
|
+
next(trace_file) # skip the first line, that is the number of npus
|
|
347
|
+
trace_list = list(map(int, trace_file.readline().split()))
|
|
348
|
+
configuration.network_backend.ns3.trace.trace_ids = trace_list
|
|
349
|
+
except Exception as e:
|
|
350
|
+
print(f"Error translating ns3 trace file configuration from file '{ns3_trace_file_path}': {e}")
|
|
351
|
+
traceback.print_exc()
|
|
352
|
+
raise RuntimeError("Failed to translate ns3 trace file configuration.") from e
|
|
353
|
+
|
|
354
|
+
@staticmethod
|
|
355
|
+
def translate_logging_file_to_schema(logging_file_path: str, configruation: astra_sim.Config):
|
|
356
|
+
"""
|
|
357
|
+
This translates the logging configuration to astra-sim config
|
|
358
|
+
The main inputs are the:
|
|
359
|
+
- logging_file_path - full file path of logging toml file
|
|
360
|
+
- configuration - which is astra-sim config
|
|
361
|
+
The file values are set in the configuration object at: configruation.common_config.logging
|
|
362
|
+
"""
|
|
363
|
+
try:
|
|
364
|
+
with open(logging_file_path, "r", encoding="utf-8") as toml_file:
|
|
365
|
+
toml_data = toml.load(toml_file) # parses the TOML file into a Python dict
|
|
366
|
+
|
|
367
|
+
for sink in toml_data.get("sink", []):
|
|
368
|
+
configruation.common_config.logging.sink.add(**sink)
|
|
369
|
+
for logger in toml_data.get("logger", []):
|
|
370
|
+
configruation.common_config.logging.logger.add(**logger)
|
|
371
|
+
except Exception as e:
|
|
372
|
+
print(f"Error translating logging configuration from file '{logging_file_path}': {e}")
|
|
373
|
+
traceback.print_exc()
|
|
374
|
+
raise RuntimeError("Failed to translate logging configuration.") from e
|
|
375
|
+
|
|
376
|
+
@staticmethod
|
|
377
|
+
def _parse_htsim_topo(file_path: str) -> dict:
|
|
378
|
+
"""
|
|
379
|
+
static private function used to parse htsim topology
|
|
380
|
+
"""
|
|
381
|
+
result = {}
|
|
382
|
+
current_tier = None
|
|
383
|
+
with open(file_path, "r", encoding="utf-8") as f:
|
|
384
|
+
for line in f:
|
|
385
|
+
line = line.strip()
|
|
386
|
+
if not line:
|
|
387
|
+
continue
|
|
388
|
+
parts = line.split()
|
|
389
|
+
key = parts[0]
|
|
390
|
+
|
|
391
|
+
# check for Tier Section
|
|
392
|
+
if key == "Tier":
|
|
393
|
+
tier_num = parts[1]
|
|
394
|
+
current_tier = f"tier_{tier_num}"
|
|
395
|
+
result[current_tier] = {}
|
|
396
|
+
continue
|
|
397
|
+
|
|
398
|
+
value = parts[1] if len(parts) > 1 else None
|
|
399
|
+
if value is not None:
|
|
400
|
+
try:
|
|
401
|
+
value = int(value)
|
|
402
|
+
except ValueError:
|
|
403
|
+
try:
|
|
404
|
+
value = float(value)
|
|
405
|
+
except ValueError:
|
|
406
|
+
pass
|
|
407
|
+
if current_tier:
|
|
408
|
+
result[current_tier][key] = value
|
|
409
|
+
else:
|
|
410
|
+
result[key] = value
|
|
411
|
+
return result
|
|
@@ -0,0 +1,11 @@
|
|
|
1
|
+
README.md
|
|
2
|
+
pyproject.toml
|
|
3
|
+
service_client_utils/astra_sim.py
|
|
4
|
+
service_client_utils/astra_sim_client.py
|
|
5
|
+
service_client_utils/common.py
|
|
6
|
+
service_client_utils/config_to_schema.py
|
|
7
|
+
service_client_utils.egg-info/PKG-INFO
|
|
8
|
+
service_client_utils.egg-info/SOURCES.txt
|
|
9
|
+
service_client_utils.egg-info/dependency_links.txt
|
|
10
|
+
service_client_utils.egg-info/requires.txt
|
|
11
|
+
service_client_utils.egg-info/top_level.txt
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
service_client_utils
|