idmtools-platform-general 0.0.0.dev0__py3-none-any.whl → 0.0.2__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- idmtools_platform_file/__init__.py +18 -0
- idmtools_platform_file/assets/__init__.py +77 -0
- idmtools_platform_file/assets/_run.sh.jinja2 +47 -0
- idmtools_platform_file/assets/batch.sh.jinja2 +24 -0
- idmtools_platform_file/assets/run_simulation.sh +8 -0
- idmtools_platform_file/cli/__init__.py +5 -0
- idmtools_platform_file/cli/file.py +185 -0
- idmtools_platform_file/file_operations/__init__.py +4 -0
- idmtools_platform_file/file_operations/file_operations.py +298 -0
- idmtools_platform_file/file_operations/operations_interface.py +74 -0
- idmtools_platform_file/file_platform.py +288 -0
- idmtools_platform_file/platform_operations/__init__.py +5 -0
- idmtools_platform_file/platform_operations/asset_collection_operations.py +172 -0
- idmtools_platform_file/platform_operations/experiment_operations.py +314 -0
- idmtools_platform_file/platform_operations/json_metadata_operations.py +320 -0
- idmtools_platform_file/platform_operations/simulation_operations.py +212 -0
- idmtools_platform_file/platform_operations/suite_operations.py +243 -0
- idmtools_platform_file/platform_operations/utils.py +461 -0
- idmtools_platform_file/plugin_info.py +82 -0
- idmtools_platform_file/tools/__init__.py +4 -0
- idmtools_platform_file/tools/job_history.py +334 -0
- idmtools_platform_file/tools/status_report/__init__.py +4 -0
- idmtools_platform_file/tools/status_report/status_report.py +222 -0
- idmtools_platform_file/tools/status_report/utils.py +159 -0
- idmtools_platform_general-0.0.2.dist-info/METADATA +81 -0
- idmtools_platform_general-0.0.2.dist-info/RECORD +36 -0
- idmtools_platform_general-0.0.2.dist-info/entry_points.txt +6 -0
- idmtools_platform_general-0.0.2.dist-info/licenses/LICENSE.TXT +3 -0
- idmtools_platform_general-0.0.2.dist-info/top_level.txt +3 -0
- idmtools_platform_process/__init__.py +17 -0
- idmtools_platform_process/platform_operations/__init__.py +5 -0
- idmtools_platform_process/platform_operations/experiment_operations.py +53 -0
- idmtools_platform_process/plugin_info.py +80 -0
- idmtools_platform_process/process_platform.py +52 -0
- tests/input/hello.sh +2 -0
- idmtools_platform_general/__init__.py +0 -8
- idmtools_platform_general-0.0.0.dev0.dist-info/METADATA +0 -41
- idmtools_platform_general-0.0.0.dev0.dist-info/RECORD +0 -5
- idmtools_platform_general-0.0.0.dev0.dist-info/top_level.txt +0 -1
- {idmtools_platform_general-0.0.0.dev0.dist-info → idmtools_platform_general-0.0.2.dist-info}/WHEEL +0 -0
|
@@ -0,0 +1,334 @@
|
|
|
1
|
+
"""
|
|
2
|
+
idmtools JobHistory Utility.
|
|
3
|
+
|
|
4
|
+
Copyright 2021, Bill & Melinda Gates Foundation. All rights reserved.
|
|
5
|
+
"""
|
|
6
|
+
import diskcache
|
|
7
|
+
from pathlib import Path
|
|
8
|
+
from datetime import datetime
|
|
9
|
+
from typing import NoReturn, Dict, Tuple, List
|
|
10
|
+
from idmtools.core import ItemType
|
|
11
|
+
from idmtools.core.platform_factory import Platform
|
|
12
|
+
from idmtools.entities.experiment import Experiment
|
|
13
|
+
from idmtools_platform_container.utils.general import normalize_path, is_valid_uuid
|
|
14
|
+
from logging import getLogger
|
|
15
|
+
|
|
16
|
+
logger = getLogger(__name__)
|
|
17
|
+
user_logger = getLogger('user')
|
|
18
|
+
|
|
19
|
+
JOB_HISTORY_DIR = "idmtools_experiment_history"
|
|
20
|
+
|
|
21
|
+
|
|
22
|
+
def initialize():
|
|
23
|
+
"""
|
|
24
|
+
Initialization decorator for JobHistory.
|
|
25
|
+
Returns:
|
|
26
|
+
Wrapper function
|
|
27
|
+
"""
|
|
28
|
+
|
|
29
|
+
def wrap(func):
|
|
30
|
+
def wrapped_f(*args, **kwargs):
|
|
31
|
+
JobHistory.initialization()
|
|
32
|
+
value = func(*args, **kwargs)
|
|
33
|
+
return value
|
|
34
|
+
|
|
35
|
+
return wrapped_f
|
|
36
|
+
|
|
37
|
+
return wrap
|
|
38
|
+
|
|
39
|
+
|
|
40
|
+
class JobHistory:
|
|
41
|
+
"""Job History Utility for idmtools Container Platform."""
|
|
42
|
+
history = None
|
|
43
|
+
history_path = Path.home().joinpath(".idmtools").joinpath(JOB_HISTORY_DIR)
|
|
44
|
+
|
|
45
|
+
@classmethod
|
|
46
|
+
def initialization(cls):
|
|
47
|
+
"""Initialize JobHistory."""
|
|
48
|
+
if cls.history is None:
|
|
49
|
+
cls.history_path.mkdir(parents=True, exist_ok=True)
|
|
50
|
+
cls.history = diskcache.Cache(str(cls.history_path))
|
|
51
|
+
|
|
52
|
+
@classmethod
|
|
53
|
+
@initialize()
|
|
54
|
+
def save_job(cls, job_dir: str, container_id: str, experiment: Experiment, platform=None) -> NoReturn:
|
|
55
|
+
"""
|
|
56
|
+
Save job to history.
|
|
57
|
+
Args:
|
|
58
|
+
job_dir: job directory
|
|
59
|
+
container_id: container id
|
|
60
|
+
experiment: Experiment
|
|
61
|
+
platform: Platform
|
|
62
|
+
Returns:
|
|
63
|
+
NoReturn
|
|
64
|
+
"""
|
|
65
|
+
cache = cls.history
|
|
66
|
+
|
|
67
|
+
if platform is None:
|
|
68
|
+
platform = Platform("File", job_directory=job_dir)
|
|
69
|
+
|
|
70
|
+
# Get current datetime
|
|
71
|
+
current_datetime = datetime.now().strftime("%Y-%m-%d %H:%M:%S")
|
|
72
|
+
if experiment.parent:
|
|
73
|
+
new_item = {"JOB_DIRECTORY": normalize_path(job_dir),
|
|
74
|
+
"SUITE_NAME": experiment.parent.name,
|
|
75
|
+
"SUITE_ID": experiment.parent_id,
|
|
76
|
+
"EXPERIMENT_DIR": normalize_path(platform.get_directory(experiment)),
|
|
77
|
+
"EXPERIMENT_NAME": experiment.name,
|
|
78
|
+
"EXPERIMENT_ID": experiment.id,
|
|
79
|
+
"CONTAINER": container_id,
|
|
80
|
+
"CREATED": current_datetime}
|
|
81
|
+
else:
|
|
82
|
+
new_item = {"JOB_DIRECTORY": normalize_path(job_dir),
|
|
83
|
+
"EXPERIMENT_DIR": normalize_path(platform.get_directory(experiment)),
|
|
84
|
+
"EXPERIMENT_NAME": experiment.name,
|
|
85
|
+
"EXPERIMENT_ID": experiment.id,
|
|
86
|
+
"CONTAINER": container_id,
|
|
87
|
+
"CREATED": current_datetime}
|
|
88
|
+
cache.set(experiment.id, new_item)
|
|
89
|
+
cache.close()
|
|
90
|
+
|
|
91
|
+
@classmethod
|
|
92
|
+
@initialize()
|
|
93
|
+
def get_job(cls, exp_id: str) -> Dict:
|
|
94
|
+
"""
|
|
95
|
+
Get job from history.
|
|
96
|
+
Args:
|
|
97
|
+
exp_id: Experiment ID
|
|
98
|
+
Returns:
|
|
99
|
+
job data in dict
|
|
100
|
+
"""
|
|
101
|
+
if not is_valid_uuid(exp_id):
|
|
102
|
+
return None
|
|
103
|
+
|
|
104
|
+
cache = cls.history
|
|
105
|
+
value, expire_time = cache.get(exp_id, expire_time=True)
|
|
106
|
+
if value is None:
|
|
107
|
+
if exp_id in list(cache):
|
|
108
|
+
logger.debug(f"Item {exp_id} expired.")
|
|
109
|
+
else:
|
|
110
|
+
logger.debug(f"Item {exp_id} not found.")
|
|
111
|
+
else:
|
|
112
|
+
local_expire_time = datetime.fromtimestamp(expire_time) if expire_time else None
|
|
113
|
+
expire_time_str = local_expire_time.strftime('%Y-%m-%d %H:%M:%S') if local_expire_time else None
|
|
114
|
+
if expire_time_str:
|
|
115
|
+
value['EXPIRE'] = expire_time_str
|
|
116
|
+
|
|
117
|
+
return value
|
|
118
|
+
|
|
119
|
+
@classmethod
|
|
120
|
+
def get_job_dir(cls, exp_id: str) -> str:
|
|
121
|
+
"""
|
|
122
|
+
Get job directory from history.
|
|
123
|
+
Args:
|
|
124
|
+
exp_id: Experiment ID
|
|
125
|
+
Returns:
|
|
126
|
+
job directory
|
|
127
|
+
"""
|
|
128
|
+
if not is_valid_uuid(exp_id):
|
|
129
|
+
user_logger.info(f"Invalid item id: {exp_id}")
|
|
130
|
+
return None
|
|
131
|
+
|
|
132
|
+
data = cls.get_job(exp_id)
|
|
133
|
+
if data is None:
|
|
134
|
+
return None
|
|
135
|
+
return data['JOB_DIRECTORY']
|
|
136
|
+
|
|
137
|
+
@classmethod
|
|
138
|
+
@initialize()
|
|
139
|
+
def get_item_path(cls, item_id: str) -> Tuple:
|
|
140
|
+
"""
|
|
141
|
+
Get item path from history.
|
|
142
|
+
Args:
|
|
143
|
+
item_id: Suite/Experiment/Simulation ID
|
|
144
|
+
Returns:
|
|
145
|
+
item path, item type
|
|
146
|
+
"""
|
|
147
|
+
if not is_valid_uuid(item_id):
|
|
148
|
+
logger.debug(f"Invalid item id: {item_id}")
|
|
149
|
+
return
|
|
150
|
+
|
|
151
|
+
cache = cls.history
|
|
152
|
+
item = cache.get(item_id)
|
|
153
|
+
# Consider Experiment case
|
|
154
|
+
if item:
|
|
155
|
+
return Path(item['EXPERIMENT_DIR']), ItemType.EXPERIMENT
|
|
156
|
+
|
|
157
|
+
for key in cache:
|
|
158
|
+
value = cache.get(key)
|
|
159
|
+
suite_id = value.get('SUITE_ID')
|
|
160
|
+
exp_dir = value.get('EXPERIMENT_DIR')
|
|
161
|
+
|
|
162
|
+
# Consider Suite case
|
|
163
|
+
if suite_id == item_id:
|
|
164
|
+
return Path(exp_dir).parent, ItemType.SUITE
|
|
165
|
+
|
|
166
|
+
# Consider Simulation case
|
|
167
|
+
pattern = f'*{item_id}/metadata.json'
|
|
168
|
+
for meta_file in Path(exp_dir).glob(pattern=pattern):
|
|
169
|
+
sim_dir = meta_file.parent
|
|
170
|
+
return sim_dir, ItemType.SIMULATION
|
|
171
|
+
|
|
172
|
+
return None
|
|
173
|
+
|
|
174
|
+
@classmethod
|
|
175
|
+
@initialize()
|
|
176
|
+
def view_history(cls, container_id: str = None) -> List:
|
|
177
|
+
"""
|
|
178
|
+
View job history.
|
|
179
|
+
Args:
|
|
180
|
+
container_id: Container ID
|
|
181
|
+
Returns:
|
|
182
|
+
list of job data
|
|
183
|
+
"""
|
|
184
|
+
cache = cls.history
|
|
185
|
+
data = []
|
|
186
|
+
for key in cache:
|
|
187
|
+
value, expire_time = cache.get(key, expire_time=True)
|
|
188
|
+
if value is None:
|
|
189
|
+
if key in list(cache):
|
|
190
|
+
user_logger.info(f"Item {key} expired.")
|
|
191
|
+
else:
|
|
192
|
+
user_logger.info(f"Item {key} not found.")
|
|
193
|
+
continue
|
|
194
|
+
|
|
195
|
+
local_expire_time = datetime.fromtimestamp(expire_time) if expire_time else None
|
|
196
|
+
expire_time_str = local_expire_time.strftime('%Y-%m-%d %H:%M:%S') if local_expire_time else None
|
|
197
|
+
if expire_time_str:
|
|
198
|
+
value['EXPIRE'] = expire_time_str
|
|
199
|
+
|
|
200
|
+
if container_id is not None:
|
|
201
|
+
if value['CONTAINER'] == container_id:
|
|
202
|
+
data.append(value)
|
|
203
|
+
else:
|
|
204
|
+
data.append(value)
|
|
205
|
+
|
|
206
|
+
# Sort data by datetime
|
|
207
|
+
sorted_data = sorted(data, key=lambda x: datetime.strptime(x["CREATED"], "%Y-%m-%d %H:%M:%S"), reverse=True)
|
|
208
|
+
return sorted_data
|
|
209
|
+
|
|
210
|
+
@classmethod
|
|
211
|
+
@initialize()
|
|
212
|
+
def delete(cls, exp_id: str) -> NoReturn:
|
|
213
|
+
"""
|
|
214
|
+
Delete job from history.
|
|
215
|
+
Args:
|
|
216
|
+
exp_id: Experiment ID
|
|
217
|
+
Returns:
|
|
218
|
+
NoReturn
|
|
219
|
+
"""
|
|
220
|
+
cache = cls.history
|
|
221
|
+
cache.pop(exp_id)
|
|
222
|
+
cache.close()
|
|
223
|
+
|
|
224
|
+
@classmethod
|
|
225
|
+
@initialize()
|
|
226
|
+
def expire_history(cls, dt: str = None) -> NoReturn:
|
|
227
|
+
"""
|
|
228
|
+
Expire job history based on the input expiration time.
|
|
229
|
+
Args:
|
|
230
|
+
dt: datetime to expire (format like "2024-07-30 15:12:05")
|
|
231
|
+
Returns:
|
|
232
|
+
NoReturn
|
|
233
|
+
"""
|
|
234
|
+
from datetime import datetime
|
|
235
|
+
# Parse the datetime string into a datetime object
|
|
236
|
+
dt_object = datetime.strptime(dt, '%Y-%m-%d %H:%M:%S') if dt else None
|
|
237
|
+
|
|
238
|
+
# Convert the datetime object to a timestamp (seconds since epoch)
|
|
239
|
+
timestamp = dt_object.timestamp() if dt_object else None
|
|
240
|
+
|
|
241
|
+
cache = cls.history
|
|
242
|
+
cache.expire(now=timestamp)
|
|
243
|
+
cache.close()
|
|
244
|
+
|
|
245
|
+
@classmethod
|
|
246
|
+
@initialize()
|
|
247
|
+
def clear(cls, container_id: str = None) -> NoReturn:
|
|
248
|
+
"""
|
|
249
|
+
Clear job history.
|
|
250
|
+
Args:
|
|
251
|
+
container_id: Container ID
|
|
252
|
+
Returns:
|
|
253
|
+
NoReturn
|
|
254
|
+
"""
|
|
255
|
+
cache = cls.history
|
|
256
|
+
if container_id is None:
|
|
257
|
+
cache.clear()
|
|
258
|
+
else:
|
|
259
|
+
for key in cache:
|
|
260
|
+
value = cache.get(key)
|
|
261
|
+
if value is None:
|
|
262
|
+
user_logger.info(f"key {key} not found in cache")
|
|
263
|
+
continue
|
|
264
|
+
if value['CONTAINER'] == container_id:
|
|
265
|
+
cache.delete(key)
|
|
266
|
+
|
|
267
|
+
cache.close()
|
|
268
|
+
|
|
269
|
+
@classmethod
|
|
270
|
+
@initialize()
|
|
271
|
+
def volume(cls) -> NoReturn:
|
|
272
|
+
"""Clear job history."""
|
|
273
|
+
cache = cls.history
|
|
274
|
+
return cache.volume()
|
|
275
|
+
|
|
276
|
+
@classmethod
|
|
277
|
+
@initialize()
|
|
278
|
+
def sync(cls) -> NoReturn:
|
|
279
|
+
"""Sync job history."""
|
|
280
|
+
cache = cls.history
|
|
281
|
+
for key in cache:
|
|
282
|
+
value = cache.get(key)
|
|
283
|
+
exp_dir = value.get('EXPERIMENT_DIR')
|
|
284
|
+
|
|
285
|
+
root = Path(exp_dir)
|
|
286
|
+
if not root.exists():
|
|
287
|
+
cache.pop(key)
|
|
288
|
+
logger.debug(f"Remove job {key} from job history.")
|
|
289
|
+
|
|
290
|
+
cache.close()
|
|
291
|
+
|
|
292
|
+
@classmethod
|
|
293
|
+
@initialize()
|
|
294
|
+
def count(cls, container_id: str = None) -> int:
|
|
295
|
+
"""
|
|
296
|
+
Count job history.
|
|
297
|
+
Args:
|
|
298
|
+
container_id: Container ID
|
|
299
|
+
Returns:
|
|
300
|
+
job history count
|
|
301
|
+
"""
|
|
302
|
+
if container_id is None:
|
|
303
|
+
return len(cls.history)
|
|
304
|
+
else:
|
|
305
|
+
jobs = [key for key in cls.history if cls.history[key]['CONTAINER'] == container_id]
|
|
306
|
+
return len(jobs)
|
|
307
|
+
|
|
308
|
+
@classmethod
|
|
309
|
+
@initialize()
|
|
310
|
+
def container_history(cls) -> List:
|
|
311
|
+
"""List of job containers."""
|
|
312
|
+
cache = cls.history
|
|
313
|
+
data = {}
|
|
314
|
+
|
|
315
|
+
for key in cache:
|
|
316
|
+
value = cache[key]
|
|
317
|
+
container_id = value['CONTAINER']
|
|
318
|
+
if container_id not in data:
|
|
319
|
+
data[container_id] = []
|
|
320
|
+
data[container_id].append(key)
|
|
321
|
+
|
|
322
|
+
return data
|
|
323
|
+
|
|
324
|
+
@classmethod
|
|
325
|
+
@initialize()
|
|
326
|
+
def verify_container(cls, container_id) -> bool:
|
|
327
|
+
"""Verify history container."""
|
|
328
|
+
cache = cls.history
|
|
329
|
+
|
|
330
|
+
for key in cache:
|
|
331
|
+
value = cache[key]
|
|
332
|
+
if container_id.startswith(value['CONTAINER']):
|
|
333
|
+
return True
|
|
334
|
+
return False
|
|
@@ -0,0 +1,222 @@
|
|
|
1
|
+
"""
|
|
2
|
+
This is a FilePlatform simulation status utility.
|
|
3
|
+
|
|
4
|
+
Copyright 2021, Bill & Melinda Gates Foundation. All rights reserved.
|
|
5
|
+
"""
|
|
6
|
+
import copy
|
|
7
|
+
import json
|
|
8
|
+
from logging import getLogger
|
|
9
|
+
from collections import Counter
|
|
10
|
+
from dataclasses import dataclass, field
|
|
11
|
+
from typing import Dict, List, Tuple, TYPE_CHECKING
|
|
12
|
+
from idmtools.core import ItemType, EntityStatus
|
|
13
|
+
from idmtools.entities.experiment import Experiment
|
|
14
|
+
from idmtools_platform_file.tools.status_report.utils import get_latest_experiment
|
|
15
|
+
from idmtools_platform_file.platform_operations.utils import FILE_MAPS
|
|
16
|
+
|
|
17
|
+
if TYPE_CHECKING: # pragma: no cover
|
|
18
|
+
from idmtools.entities.iplatform import IPlatform
|
|
19
|
+
|
|
20
|
+
user_logger = getLogger('user')
|
|
21
|
+
|
|
22
|
+
|
|
23
|
+
@dataclass(repr=False)
|
|
24
|
+
class StatusReporter:
|
|
25
|
+
"""
|
|
26
|
+
A class to wrap the functions involved in retrieving simulations status.
|
|
27
|
+
"""
|
|
28
|
+
platform: 'IPlatform' # noqa F821
|
|
29
|
+
scope: Tuple[str, ItemType] = field(default=None)
|
|
30
|
+
|
|
31
|
+
_exp: Experiment = field(default=None, init=False, compare=False)
|
|
32
|
+
_summary: Dict = field(default_factory=dict, init=False, compare=False)
|
|
33
|
+
_report: Dict = field(default_factory=dict, init=False, compare=False)
|
|
34
|
+
_pending: List = field(default_factory=list, init=False, compare=False)
|
|
35
|
+
|
|
36
|
+
def __post_init__(self):
|
|
37
|
+
self.initialize()
|
|
38
|
+
|
|
39
|
+
def initialize(self) -> None:
|
|
40
|
+
"""
|
|
41
|
+
Determine the experiment and build dictionary with basic info.
|
|
42
|
+
Returns:
|
|
43
|
+
None
|
|
44
|
+
"""
|
|
45
|
+
if self.scope is not None:
|
|
46
|
+
item = self.platform.get_item(self.scope[0], self.scope[1])
|
|
47
|
+
if self.scope[1] == ItemType.SUITE:
|
|
48
|
+
# Only consider the first experiment
|
|
49
|
+
self._exp = item.experiments[0]
|
|
50
|
+
elif self.scope[1] == ItemType.EXPERIMENT:
|
|
51
|
+
self._exp = item
|
|
52
|
+
else:
|
|
53
|
+
raise RuntimeError('Only support Suite/Experiment.')
|
|
54
|
+
else:
|
|
55
|
+
exp_dic = get_latest_experiment(self.platform)
|
|
56
|
+
self._exp = self.platform.get_item(exp_dic['experiment_id'], ItemType.EXPERIMENT)
|
|
57
|
+
exp_dir = self.platform.get_directory_by_id(exp_dic['experiment_id'], ItemType.EXPERIMENT)
|
|
58
|
+
last_suite_dir = exp_dir.parent
|
|
59
|
+
|
|
60
|
+
user_logger.info('------------------------------')
|
|
61
|
+
user_logger.info(f'last suite dir: {last_suite_dir}')
|
|
62
|
+
user_logger.info(f'last experiment dir: {exp_dir}')
|
|
63
|
+
user_logger.info('------------------------------')
|
|
64
|
+
|
|
65
|
+
if self._exp.parent is not None:
|
|
66
|
+
self._summary = dict(suite=self._exp.parent.id, experiment=self._exp.id,
|
|
67
|
+
job_directory=self.platform.job_directory)
|
|
68
|
+
else:
|
|
69
|
+
self._summary = dict(suite=None, experiment=self._exp.id,
|
|
70
|
+
job_directory=self.platform.job_directory)
|
|
71
|
+
|
|
72
|
+
def apply_filters(self, status_filter: Tuple[str] = None, sim_filter: Tuple[str] = None,
|
|
73
|
+
verbose: bool = True) -> None:
|
|
74
|
+
"""
|
|
75
|
+
Filter simulations.
|
|
76
|
+
Args:
|
|
77
|
+
status_filter: tuple with target status
|
|
78
|
+
sim_filter: tuple with simulation id
|
|
79
|
+
verbose: True/False to include simulation directory
|
|
80
|
+
Returns:
|
|
81
|
+
None
|
|
82
|
+
"""
|
|
83
|
+
# Make sure we get the latest status
|
|
84
|
+
self.platform.refresh_status(self._exp)
|
|
85
|
+
|
|
86
|
+
# Filter simulations and format the results
|
|
87
|
+
_simulations = self._exp.simulations
|
|
88
|
+
for sim in _simulations:
|
|
89
|
+
# Apply simulation filter
|
|
90
|
+
if sim_filter is not None and sim.id not in sim_filter:
|
|
91
|
+
continue
|
|
92
|
+
|
|
93
|
+
sim_dir = self.platform.get_directory(sim)
|
|
94
|
+
job_status_path = sim_dir.joinpath("job_status.txt")
|
|
95
|
+
if not job_status_path.exists():
|
|
96
|
+
self._pending.append(f" {sim.id}")
|
|
97
|
+
continue
|
|
98
|
+
|
|
99
|
+
status = open(job_status_path).read().strip()
|
|
100
|
+
# Apply status filter
|
|
101
|
+
if status_filter is not None and status not in status_filter:
|
|
102
|
+
continue
|
|
103
|
+
|
|
104
|
+
# Format the results
|
|
105
|
+
d = dict(status=status)
|
|
106
|
+
if verbose:
|
|
107
|
+
d["WorkDir"] = str(self.platform.get_directory(sim))
|
|
108
|
+
self._report[sim.id] = d
|
|
109
|
+
|
|
110
|
+
@staticmethod
|
|
111
|
+
def output_definition() -> None:
|
|
112
|
+
"""
|
|
113
|
+
Output the status definition.
|
|
114
|
+
Returns:
|
|
115
|
+
None
|
|
116
|
+
"""
|
|
117
|
+
file_map = copy.deepcopy(FILE_MAPS)
|
|
118
|
+
file_map.pop('None', None)
|
|
119
|
+
user_logger.info('------------------------------')
|
|
120
|
+
user_logger.info("STATUS DEFINITION")
|
|
121
|
+
user_logger.info(f"{'0: '.ljust(20)} {file_map['0'].name}")
|
|
122
|
+
user_logger.info(f"{'-1: '.ljust(20)} {file_map['-1'].name}")
|
|
123
|
+
user_logger.info(f"{'100: '.ljust(20)} {file_map['100'].name}")
|
|
124
|
+
user_logger.info('------------------------------')
|
|
125
|
+
|
|
126
|
+
def output_summary(self) -> None:
|
|
127
|
+
"""
|
|
128
|
+
Output suite/experiment id and job directory.
|
|
129
|
+
Returns:
|
|
130
|
+
None
|
|
131
|
+
"""
|
|
132
|
+
if self._summary:
|
|
133
|
+
if self._summary['suite'] is not None:
|
|
134
|
+
user_logger.info(f"{'suite: '.ljust(20)} {self._summary['suite']}")
|
|
135
|
+
user_logger.info(f"{'experiment: '.ljust(20)} {self._summary['experiment']}")
|
|
136
|
+
user_logger.info(f"{'job directory: '.ljust(20)} {self._summary['job_directory']}")
|
|
137
|
+
|
|
138
|
+
def output_results(self, status_filter: Tuple[str] = None, sim_filter: Tuple[str] = None, verbose: bool = True,
|
|
139
|
+
display: bool = True):
|
|
140
|
+
"""
|
|
141
|
+
Output search results.
|
|
142
|
+
Args:
|
|
143
|
+
status_filter: status filter
|
|
144
|
+
sim_filter: simulation filter
|
|
145
|
+
verbose: True/False
|
|
146
|
+
display: True/False
|
|
147
|
+
|
|
148
|
+
Returns:
|
|
149
|
+
None
|
|
150
|
+
"""
|
|
151
|
+
_status_list = [v["status"] for k, v in self._report.items()]
|
|
152
|
+
_sim_not_run_list = [sim for sim in self._exp.simulations if sim.status == EntityStatus.CREATED]
|
|
153
|
+
_simulation_count = len(self._exp.simulations)
|
|
154
|
+
|
|
155
|
+
# print report
|
|
156
|
+
user_logger.info(f"{'status filter: '.ljust(20)} {status_filter}")
|
|
157
|
+
user_logger.info(f"{'sim filter: '.ljust(20)} {sim_filter}")
|
|
158
|
+
user_logger.info(f"{'verbose: '.ljust(20)} {verbose}")
|
|
159
|
+
user_logger.info(f"{'display: '.ljust(20)} {display}")
|
|
160
|
+
user_logger.info(f"{'Simulation Count: '.ljust(20)} {_simulation_count}")
|
|
161
|
+
user_logger.info(f"{'Match Count: '.ljust(20)} {len(self._report)} ({dict(Counter(_status_list))})")
|
|
162
|
+
user_logger.info(f"{'Not Running Count: '.ljust(20)} {len(_sim_not_run_list)}")
|
|
163
|
+
|
|
164
|
+
if self._exp.status is None:
|
|
165
|
+
user_logger.info(f'\nExperiment Status: {None}')
|
|
166
|
+
else:
|
|
167
|
+
user_logger.info(f'\nExperiment Status: {self._exp.status.name}')
|
|
168
|
+
|
|
169
|
+
def output_status_report(self, status_filter: Tuple[str] = None, sim_filter: Tuple[str] = None,
|
|
170
|
+
verbose: bool = True, display: bool = True, display_count: int = 20) -> None:
|
|
171
|
+
"""
|
|
172
|
+
Output simulations status with possible override parameters.
|
|
173
|
+
Args:
|
|
174
|
+
status_filter: tuple with target status
|
|
175
|
+
sim_filter: tuple with simulation id
|
|
176
|
+
verbose: True/False to include simulation directory
|
|
177
|
+
display: True/False to print the searched results
|
|
178
|
+
display_count: how many to print
|
|
179
|
+
Returns:
|
|
180
|
+
None
|
|
181
|
+
"""
|
|
182
|
+
if status_filter is None:
|
|
183
|
+
status_filter = ('0', '-1', '100')
|
|
184
|
+
|
|
185
|
+
self.apply_filters(status_filter, sim_filter, verbose)
|
|
186
|
+
|
|
187
|
+
self.output_summary()
|
|
188
|
+
|
|
189
|
+
if display:
|
|
190
|
+
if display_count is None or len(self._report) <= display_count:
|
|
191
|
+
report_view_dict = self._report
|
|
192
|
+
else:
|
|
193
|
+
report_view_dict = dict(list(self._report.items())[0:display_count])
|
|
194
|
+
user_logger.info(json.dumps(report_view_dict, indent=3))
|
|
195
|
+
|
|
196
|
+
self.output_definition()
|
|
197
|
+
|
|
198
|
+
if display and len(self._report) > display_count:
|
|
199
|
+
user_logger.info(f"ONLY DISPLAY {display_count} ITEMS")
|
|
200
|
+
|
|
201
|
+
self.output_results(status_filter, sim_filter, verbose, display)
|
|
202
|
+
|
|
203
|
+
|
|
204
|
+
def generate_status_report(platform: 'IPlatform', scope: Tuple[str, ItemType] = None, status_filter: Tuple[str] = None,
|
|
205
|
+
sim_filter: Tuple[str] = None, verbose: bool = True, display: bool = True,
|
|
206
|
+
display_count: int = 20) -> None:
|
|
207
|
+
"""
|
|
208
|
+
The entry point of status viewer.
|
|
209
|
+
Args:
|
|
210
|
+
platform: idmtools Platform
|
|
211
|
+
scope: the search base
|
|
212
|
+
status_filter: tuple with target status
|
|
213
|
+
sim_filter: tuple with simulation id
|
|
214
|
+
verbose: True/False to include simulation directory
|
|
215
|
+
display: True/False to print the search results
|
|
216
|
+
display_count: how many to print
|
|
217
|
+
Returns:
|
|
218
|
+
None
|
|
219
|
+
"""
|
|
220
|
+
sr = StatusReporter(scope=scope, platform=platform)
|
|
221
|
+
sr.output_status_report(status_filter=status_filter, sim_filter=sim_filter, verbose=verbose, display=display,
|
|
222
|
+
display_count=display_count)
|