hpcflow-new2 0.2.0a158__py3-none-any.whl → 0.2.0a160__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (36) hide show
  1. hpcflow/_version.py +1 -1
  2. hpcflow/app.py +0 -3
  3. hpcflow/sdk/__init__.py +2 -0
  4. hpcflow/sdk/app.py +91 -18
  5. hpcflow/sdk/cli.py +18 -0
  6. hpcflow/sdk/cli_common.py +16 -0
  7. hpcflow/sdk/config/config.py +0 -4
  8. hpcflow/sdk/core/actions.py +20 -7
  9. hpcflow/sdk/core/command_files.py +4 -4
  10. hpcflow/sdk/core/element.py +15 -16
  11. hpcflow/sdk/core/rule.py +2 -0
  12. hpcflow/sdk/core/run_dir_files.py +63 -0
  13. hpcflow/sdk/core/task.py +34 -35
  14. hpcflow/sdk/core/utils.py +37 -15
  15. hpcflow/sdk/core/workflow.py +147 -49
  16. hpcflow/sdk/data/config_schema.yaml +0 -6
  17. hpcflow/sdk/demo/cli.py +12 -0
  18. hpcflow/sdk/log.py +2 -2
  19. hpcflow/sdk/persistence/base.py +142 -12
  20. hpcflow/sdk/persistence/json.py +84 -63
  21. hpcflow/sdk/persistence/pending.py +21 -7
  22. hpcflow/sdk/persistence/utils.py +2 -1
  23. hpcflow/sdk/persistence/zarr.py +143 -108
  24. hpcflow/sdk/runtime.py +0 -12
  25. hpcflow/sdk/submission/jobscript.py +25 -4
  26. hpcflow/sdk/submission/schedulers/sge.py +3 -0
  27. hpcflow/sdk/submission/schedulers/slurm.py +3 -0
  28. hpcflow/sdk/submission/shells/bash.py +2 -2
  29. hpcflow/sdk/submission/shells/powershell.py +2 -2
  30. hpcflow/sdk/submission/submission.py +24 -7
  31. hpcflow/tests/scripts/test_main_scripts.py +40 -0
  32. hpcflow/tests/unit/test_utils.py +28 -0
  33. {hpcflow_new2-0.2.0a158.dist-info → hpcflow_new2-0.2.0a160.dist-info}/METADATA +1 -2
  34. {hpcflow_new2-0.2.0a158.dist-info → hpcflow_new2-0.2.0a160.dist-info}/RECORD +36 -35
  35. {hpcflow_new2-0.2.0a158.dist-info → hpcflow_new2-0.2.0a160.dist-info}/WHEEL +0 -0
  36. {hpcflow_new2-0.2.0a158.dist-info → hpcflow_new2-0.2.0a160.dist-info}/entry_points.txt +0 -0
@@ -492,6 +492,7 @@ class Workflow:
492
492
  ts_fmt: Optional[str] = None,
493
493
  ts_name_fmt: Optional[str] = None,
494
494
  store_kwargs: Optional[Dict] = None,
495
+ status: Optional[Any] = None,
495
496
  ) -> app.Workflow:
496
497
  """Generate from a `WorkflowTemplate` object.
497
498
 
@@ -521,22 +522,38 @@ class Workflow:
521
522
  store_kwargs
522
523
  Keyword arguments to pass to the store's `write_empty_workflow` method.
523
524
  """
524
- wk = cls._write_empty_workflow(
525
- template=template,
526
- path=path,
527
- name=name,
528
- overwrite=overwrite,
529
- store=store,
530
- ts_fmt=ts_fmt,
531
- ts_name_fmt=ts_name_fmt,
532
- store_kwargs=store_kwargs,
533
- )
534
- with wk._store.cached_load():
535
- with wk.batch_update(is_workflow_creation=True):
536
- for task in template.tasks:
537
- wk._add_task(task)
538
- for loop in template.loops:
539
- wk._add_loop(loop)
525
+ if status:
526
+ status.update("Generating empty workflow...")
527
+ try:
528
+ wk = cls._write_empty_workflow(
529
+ template=template,
530
+ path=path,
531
+ name=name,
532
+ overwrite=overwrite,
533
+ store=store,
534
+ ts_fmt=ts_fmt,
535
+ ts_name_fmt=ts_name_fmt,
536
+ store_kwargs=store_kwargs,
537
+ )
538
+ with wk._store.cached_load():
539
+ with wk.batch_update(is_workflow_creation=True):
540
+ for idx, task in enumerate(template.tasks):
541
+ if status:
542
+ status.update(
543
+ f"Adding task {idx + 1}/{len(template.tasks)} "
544
+ f"({task.name!r})..."
545
+ )
546
+ wk._add_task(task)
547
+ for idx, loop in enumerate(template.loops):
548
+ if status:
549
+ status.update(
550
+ f"Adding loop {idx + 1}/" f"{len(template.loops)}..."
551
+ )
552
+ wk._add_loop(loop)
553
+ except Exception:
554
+ if status:
555
+ status.stop()
556
+ raise
540
557
  return wk
541
558
 
542
559
  @classmethod
@@ -668,6 +685,7 @@ class Workflow:
668
685
  ts_name_fmt: Optional[str] = None,
669
686
  store_kwargs: Optional[Dict] = None,
670
687
  variables: Optional[Dict[str, str]] = None,
688
+ status: Optional[Any] = None,
671
689
  ) -> app.Workflow:
672
690
  """Generate from a JSON file.
673
691
 
@@ -712,6 +730,7 @@ class Workflow:
712
730
  ts_fmt,
713
731
  ts_name_fmt,
714
732
  store_kwargs,
733
+ status,
715
734
  )
716
735
 
717
736
  @classmethod
@@ -726,6 +745,7 @@ class Workflow:
726
745
  ts_name_fmt: Optional[str] = None,
727
746
  store_kwargs: Optional[Dict] = None,
728
747
  variables: Optional[Dict[str, str]] = None,
748
+ status: Optional[Any] = None,
729
749
  ) -> app.Workflow:
730
750
  """Generate from a JSON string.
731
751
 
@@ -770,6 +790,7 @@ class Workflow:
770
790
  ts_fmt,
771
791
  ts_name_fmt,
772
792
  store_kwargs,
793
+ status,
773
794
  )
774
795
 
775
796
  @classmethod
@@ -786,6 +807,7 @@ class Workflow:
786
807
  ts_name_fmt: Optional[str] = None,
787
808
  store_kwargs: Optional[Dict] = None,
788
809
  variables: Optional[Dict[str, str]] = None,
810
+ status: Optional[Any] = None,
789
811
  ) -> app.Workflow:
790
812
  """Generate from either a YAML or JSON file, depending on the file extension.
791
813
 
@@ -821,11 +843,16 @@ class Workflow:
821
843
  variables
822
844
  String variables to substitute in the file given by `template_path`.
823
845
  """
824
- template = cls.app.WorkflowTemplate.from_file(
825
- template_path,
826
- template_format,
827
- variables=variables,
828
- )
846
+ try:
847
+ template = cls.app.WorkflowTemplate.from_file(
848
+ template_path,
849
+ template_format,
850
+ variables=variables,
851
+ )
852
+ except Exception:
853
+ if status:
854
+ status.stop()
855
+ raise
829
856
  return cls.from_template(
830
857
  template,
831
858
  path,
@@ -835,6 +862,7 @@ class Workflow:
835
862
  ts_fmt,
836
863
  ts_name_fmt,
837
864
  store_kwargs,
865
+ status,
838
866
  )
839
867
 
840
868
  @classmethod
@@ -1262,7 +1290,6 @@ class Workflow:
1262
1290
  @TimeIt.decorator
1263
1291
  def get_EARs_from_IDs(self, id_lst: Iterable[int]) -> List[app.ElementActionRun]:
1264
1292
  """Return element action run objects from a list of IDs."""
1265
-
1266
1293
  self.app.persistence_logger.debug(f"get_EARs_from_IDs: id_lst={id_lst!r}")
1267
1294
 
1268
1295
  store_EARs = self._store.get_EARs(id_lst)
@@ -1276,6 +1303,10 @@ class Workflow:
1276
1303
  task_IDs = [i.task_ID for i in store_elems]
1277
1304
  store_tasks = self._store.get_tasks_by_IDs(task_IDs)
1278
1305
 
1306
+ # to allow for bulk retrieval of elements/iterations
1307
+ element_idx_by_task = defaultdict(set)
1308
+ iter_idx_by_task_elem = defaultdict(lambda: defaultdict(set))
1309
+
1279
1310
  index_paths = []
1280
1311
  for rn, it, el, tk in zip(store_EARs, store_iters, store_elems, store_tasks):
1281
1312
  act_idx = rn.action_idx
@@ -1291,12 +1322,25 @@ class Workflow:
1291
1322
  "task_idx": tk.index,
1292
1323
  }
1293
1324
  )
1325
+ element_idx_by_task[tk.index].add(elem_idx)
1326
+ iter_idx_by_task_elem[tk.index][elem_idx].add(iter_idx)
1327
+
1328
+ # retrieve elements/iterations:
1329
+ iters_by_task_elem = defaultdict(lambda: defaultdict(dict))
1330
+ for task_idx, elem_idx in element_idx_by_task.items():
1331
+ elements = self.tasks[task_idx].elements[list(elem_idx)]
1332
+ for elem_i in elements:
1333
+ elem_i_iters_idx = iter_idx_by_task_elem[task_idx][elem_i.index]
1334
+ elem_iters = [elem_i.iterations[j] for j in elem_i_iters_idx]
1335
+ iters_by_task_elem[task_idx][elem_i.index].update(
1336
+ dict(zip(elem_i_iters_idx, elem_iters))
1337
+ )
1294
1338
 
1295
1339
  objs = []
1296
1340
  for idx_dat in index_paths:
1297
- task = self.tasks[idx_dat["task_idx"]]
1298
- elem = task.elements[idx_dat["elem_idx"]]
1299
- iter_ = elem.iterations[idx_dat["iter_idx"]]
1341
+ iter_ = iters_by_task_elem[idx_dat["task_idx"]][idx_dat["elem_idx"]][
1342
+ idx_dat["iter_idx"]
1343
+ ]
1300
1344
  run = iter_.actions[idx_dat["action_idx"]].runs[idx_dat["run_idx"]]
1301
1345
  objs.append(run)
1302
1346
 
@@ -1580,6 +1624,13 @@ class Workflow:
1580
1624
  id_lst = list(range(num_params))
1581
1625
  return self._store.get_parameters(id_lst, **kwargs)
1582
1626
 
1627
+ @TimeIt.decorator
1628
+ def get_all_parameter_sources(self, **kwargs: Dict) -> List[Dict]:
1629
+ """Retrieve all store parameters."""
1630
+ num_params = self._store._get_num_total_parameters()
1631
+ id_lst = list(range(num_params))
1632
+ return self._store.get_parameter_sources(id_lst, **kwargs)
1633
+
1583
1634
  @TimeIt.decorator
1584
1635
  def get_all_parameter_data(self, **kwargs: Dict) -> Dict[int, Any]:
1585
1636
  """Retrieve all workflow parameter data."""
@@ -1729,6 +1780,7 @@ class Workflow:
1729
1780
  return self._store._get_num_total_elem_iters()
1730
1781
 
1731
1782
  @property
1783
+ @TimeIt.decorator
1732
1784
  def num_EARs(self):
1733
1785
  return self._store._get_num_total_EARs()
1734
1786
 
@@ -1758,10 +1810,14 @@ class Workflow:
1758
1810
  return Path(self.path) / self._exec_dir_name
1759
1811
 
1760
1812
  @TimeIt.decorator
1761
- def get_task_elements(self, task: app.Task, selection: slice) -> List[app.Element]:
1813
+ def get_task_elements(
1814
+ self,
1815
+ task: app.Task,
1816
+ idx_lst: Optional[List[int]] = None,
1817
+ ) -> List[app.Element]:
1762
1818
  return [
1763
1819
  self.app.Element(task=task, **{k: v for k, v in i.items() if k != "task_ID"})
1764
- for i in self._store.get_task_elements(task.insert_ID, selection)
1820
+ for i in self._store.get_task_elements(task.insert_ID, idx_lst)
1765
1821
  ]
1766
1822
 
1767
1823
  def set_EAR_submission_index(self, EAR_ID: int, sub_idx: int) -> None:
@@ -1928,6 +1984,7 @@ class Workflow:
1928
1984
  for element in task.elements[:]:
1929
1985
  yield element
1930
1986
 
1987
+ @TimeIt.decorator
1931
1988
  def get_iteration_task_pathway(self):
1932
1989
  pathway = []
1933
1990
  for task in self.tasks:
@@ -1951,9 +2008,10 @@ class Workflow:
1951
2008
 
1952
2009
  return pathway
1953
2010
 
2011
+ @TimeIt.decorator
1954
2012
  def _submit(
1955
2013
  self,
1956
- status,
2014
+ status: Optional[Any] = None,
1957
2015
  ignore_errors: Optional[bool] = False,
1958
2016
  JS_parallelism: Optional[bool] = None,
1959
2017
  print_stdout: Optional[bool] = False,
@@ -1965,10 +2023,12 @@ class Workflow:
1965
2023
  # generate a new submission if there are no pending submissions:
1966
2024
  pending = [i for i in self.submissions if i.needs_submit]
1967
2025
  if not pending:
1968
- status.update("Adding new submission...")
2026
+ if status:
2027
+ status.update("Adding new submission...")
1969
2028
  new_sub = self._add_submission(tasks=tasks, JS_parallelism=JS_parallelism)
1970
2029
  if not new_sub:
1971
- status.stop()
2030
+ if status:
2031
+ status.stop()
1972
2032
  raise ValueError("No pending element action runs to submit!")
1973
2033
  pending = [new_sub]
1974
2034
 
@@ -1978,7 +2038,8 @@ class Workflow:
1978
2038
 
1979
2039
  # for direct execution the submission must be persistent at submit-time, because
1980
2040
  # it will be read by a new instance of the app:
1981
- status.update("Committing to the store...")
2041
+ if status:
2042
+ status.update("Committing to the store...")
1982
2043
  self._store._pending.commit_all()
1983
2044
 
1984
2045
  # submit all pending submissions:
@@ -1986,7 +2047,8 @@ class Workflow:
1986
2047
  submitted_js = {}
1987
2048
  for sub in pending:
1988
2049
  try:
1989
- status.update(f"Preparing submission {sub.index}...")
2050
+ if status:
2051
+ status.update(f"Preparing submission {sub.index}...")
1990
2052
  sub_js_idx = sub.submit(
1991
2053
  status=status,
1992
2054
  ignore_errors=ignore_errors,
@@ -2008,6 +2070,8 @@ class Workflow:
2008
2070
  add_to_known: Optional[bool] = True,
2009
2071
  return_idx: Optional[bool] = False,
2010
2072
  tasks: Optional[List[int]] = None,
2073
+ cancel: Optional[bool] = False,
2074
+ status: Optional[bool] = True,
2011
2075
  ) -> Dict[int, int]:
2012
2076
  """Submit the workflow for execution.
2013
2077
 
@@ -2034,39 +2098,52 @@ class Workflow:
2034
2098
  List of task indices to include in the new submission if no submissions
2035
2099
  already exist. By default all tasks are included if a new submission is
2036
2100
  created.
2101
+ cancel
2102
+ Immediately cancel the submission. Useful for testing and benchmarking.
2103
+ status
2104
+ If True, display a live status to track submission progress.
2037
2105
  """
2038
2106
 
2039
- console = rich.console.Console()
2040
- status = console.status("Submitting workflow...")
2041
- status.start()
2107
+ if status:
2108
+ console = rich.console.Console()
2109
+ status = console.status("Submitting workflow...")
2110
+ status.start()
2042
2111
 
2043
2112
  with self._store.cached_load():
2044
2113
  if not self._store.is_submittable:
2045
- status.stop()
2114
+ if status:
2115
+ status.stop()
2046
2116
  raise NotImplementedError("The workflow is not submittable.")
2047
2117
  with self.batch_update():
2048
2118
  # commit updates before raising exception:
2049
2119
  try:
2050
- exceptions, submitted_js = self._submit(
2051
- ignore_errors=ignore_errors,
2052
- JS_parallelism=JS_parallelism,
2053
- print_stdout=print_stdout,
2054
- status=status,
2055
- add_to_known=add_to_known,
2056
- tasks=tasks,
2057
- )
2120
+ with self._store.cache_ctx():
2121
+ exceptions, submitted_js = self._submit(
2122
+ ignore_errors=ignore_errors,
2123
+ JS_parallelism=JS_parallelism,
2124
+ print_stdout=print_stdout,
2125
+ status=status,
2126
+ add_to_known=add_to_known,
2127
+ tasks=tasks,
2128
+ )
2058
2129
  except Exception:
2059
- status.stop()
2130
+ if status:
2131
+ status.stop()
2060
2132
  raise
2061
2133
 
2062
2134
  if exceptions:
2063
2135
  msg = "\n" + "\n\n".join([i.message for i in exceptions])
2064
- status.stop()
2136
+ if status:
2137
+ status.stop()
2065
2138
  raise WorkflowSubmissionFailure(msg)
2066
2139
 
2067
- status.stop()
2140
+ if status:
2141
+ status.stop()
2068
2142
 
2069
- if wait:
2143
+ if cancel:
2144
+ self.cancel()
2145
+
2146
+ elif wait:
2070
2147
  self.wait(submitted_js)
2071
2148
 
2072
2149
  if return_idx:
@@ -2279,6 +2356,7 @@ class Workflow:
2279
2356
  )
2280
2357
  self._abort_run_ID(submission_idx, run.id_)
2281
2358
 
2359
+ @TimeIt.decorator
2282
2360
  def cancel(self, hard=False):
2283
2361
  """Cancel any running jobscripts."""
2284
2362
  for sub in self.submissions:
@@ -2291,6 +2369,7 @@ class Workflow:
2291
2369
  with self.batch_update():
2292
2370
  return self._add_submission(tasks, JS_parallelism)
2293
2371
 
2372
+ @TimeIt.decorator
2294
2373
  def _add_submission(
2295
2374
  self, tasks: Optional[List[int]] = None, JS_parallelism: Optional[bool] = None
2296
2375
  ) -> app.Submission:
@@ -2325,6 +2404,7 @@ class Workflow:
2325
2404
 
2326
2405
  return self.submissions[new_idx]
2327
2406
 
2407
+ @TimeIt.decorator
2328
2408
  def resolve_jobscripts(
2329
2409
  self, tasks: Optional[List[int]] = None
2330
2410
  ) -> List[app.Jobscript]:
@@ -2341,6 +2421,7 @@ class Workflow:
2341
2421
 
2342
2422
  return js_objs
2343
2423
 
2424
+ @TimeIt.decorator
2344
2425
  def _resolve_singular_jobscripts(
2345
2426
  self, tasks: Optional[List[int]] = None
2346
2427
  ) -> Tuple[Dict[int, Dict], Dict]:
@@ -2359,6 +2440,10 @@ class Workflow:
2359
2440
  if not tasks:
2360
2441
  tasks = list(range(self.num_tasks))
2361
2442
 
2443
+ if self._store.use_cache:
2444
+ # pre-cache parameter sources (used in `EAR.get_EAR_dependencies`):
2445
+ self.get_all_parameter_sources()
2446
+
2362
2447
  submission_jobscripts = {}
2363
2448
  all_element_deps = {}
2364
2449
 
@@ -2405,6 +2490,19 @@ class Workflow:
2405
2490
  "resource_hash": res_hash[js_dat["resources"]],
2406
2491
  "dependencies": {},
2407
2492
  }
2493
+
2494
+ all_EAR_IDs = []
2495
+ for js_elem_idx, (elem_idx, act_indices) in enumerate(
2496
+ js_dat["elements"].items()
2497
+ ):
2498
+ for act_idx in act_indices:
2499
+ EAR_ID_i = EAR_map[act_idx, elem_idx].item()
2500
+ all_EAR_IDs.append(EAR_ID_i)
2501
+ js_act_idx = task_actions.index([task.insert_ID, act_idx, 0])
2502
+ js_i["EAR_ID"][js_act_idx][js_elem_idx] = EAR_ID_i
2503
+
2504
+ all_EAR_objs = dict(zip(all_EAR_IDs, self.get_EARs_from_IDs(all_EAR_IDs)))
2505
+
2408
2506
  for js_elem_idx, (elem_idx, act_indices) in enumerate(
2409
2507
  js_dat["elements"].items()
2410
2508
  ):
@@ -2416,7 +2514,7 @@ class Workflow:
2416
2514
  js_i["EAR_ID"][js_act_idx][js_elem_idx] = EAR_ID_i
2417
2515
 
2418
2516
  # get indices of EARs that this element depends on:
2419
- EAR_objs = self.get_EARs_from_IDs(all_EAR_IDs)
2517
+ EAR_objs = [all_EAR_objs[k] for k in all_EAR_IDs]
2420
2518
  EAR_deps = [i.get_EAR_dependencies() for i in EAR_objs]
2421
2519
  EAR_deps_flat = [j for i in EAR_deps for j in i]
2422
2520
  EAR_deps_EAR_idx = [
@@ -2,7 +2,6 @@ rules:
2
2
  - path: []
3
3
  condition:
4
4
  value.allowed_keys:
5
- - telemetry
6
5
  - machine
7
6
  - user_name
8
7
  - user_orcid
@@ -24,7 +23,6 @@ rules:
24
23
  - path: []
25
24
  condition:
26
25
  value.required_keys:
27
- - telemetry
28
26
  - machine
29
27
  - task_schema_sources
30
28
  - parameter_sources
@@ -54,10 +52,6 @@ rules:
54
52
  - save # Config method
55
53
  - reload # Config method
56
54
 
57
- - path: [telemetry]
58
- condition:
59
- value.type.equal_to: bool
60
-
61
55
  - path: [machine]
62
56
  doc: >
63
57
  A label that references the current machine. By default, this uses the return from
hpcflow/sdk/demo/cli.py CHANGED
@@ -17,6 +17,9 @@ from hpcflow.sdk.cli_common import (
17
17
  add_to_known_opt,
18
18
  print_idx_opt,
19
19
  tasks_opt,
20
+ cancel_opt,
21
+ submit_status_opt,
22
+ make_status_opt,
20
23
  )
21
24
 
22
25
 
@@ -100,6 +103,7 @@ def get_demo_workflow_CLI(app):
100
103
  @ts_fmt_option
101
104
  @ts_name_fmt_option
102
105
  @variables_option
106
+ @make_status_opt
103
107
  def make_demo_workflow(
104
108
  workflow_name,
105
109
  format,
@@ -110,6 +114,7 @@ def get_demo_workflow_CLI(app):
110
114
  ts_fmt=None,
111
115
  ts_name_fmt=None,
112
116
  variables=None,
117
+ status=True,
113
118
  ):
114
119
  wk = app.make_demo_workflow(
115
120
  workflow_name=workflow_name,
@@ -121,6 +126,7 @@ def get_demo_workflow_CLI(app):
121
126
  ts_fmt=ts_fmt,
122
127
  ts_name_fmt=ts_name_fmt,
123
128
  variables=dict(variables),
129
+ status=status,
124
130
  )
125
131
  click.echo(wk.path)
126
132
 
@@ -139,6 +145,8 @@ def get_demo_workflow_CLI(app):
139
145
  @add_to_known_opt
140
146
  @print_idx_opt
141
147
  @tasks_opt
148
+ @cancel_opt
149
+ @submit_status_opt
142
150
  def make_and_submit_demo_workflow(
143
151
  workflow_name,
144
152
  format,
@@ -154,6 +162,8 @@ def get_demo_workflow_CLI(app):
154
162
  add_to_known=True,
155
163
  print_idx=False,
156
164
  tasks=None,
165
+ cancel=False,
166
+ status=True,
157
167
  ):
158
168
  out = app.make_and_submit_demo_workflow(
159
169
  workflow_name=workflow_name,
@@ -170,6 +180,8 @@ def get_demo_workflow_CLI(app):
170
180
  add_to_known=add_to_known,
171
181
  return_idx=print_idx,
172
182
  tasks=tasks,
183
+ cancel=cancel,
184
+ status=status,
173
185
  )
174
186
  if print_idx:
175
187
  click.echo(out)
hpcflow/sdk/log.py CHANGED
@@ -97,7 +97,7 @@ class TimeIt:
97
97
  max_str = f"{v['max']:10.6f}" if number > 1 else f"{f'-':^12s}"
98
98
  stddev_str = f"({v['stddev']:8.6f})" if number > 1 else f"{f' ':^10s}"
99
99
  out.append(
100
- f"{k_str:.<60s} {v['sum']:12.6f} "
100
+ f"{k_str:.<80s} {v['sum']:12.6f} "
101
101
  f"{v['mean']:10.6f} {stddev_str} {number:8d} "
102
102
  f"{min_str} {max_str} "
103
103
  )
@@ -114,7 +114,7 @@ class TimeIt:
114
114
  summary = cls.summarise()
115
115
 
116
116
  out = [
117
- f"{'function':^60s} {'sum /s':^12s} {'mean (stddev) /s':^20s} {'N':^8s} "
117
+ f"{'function':^80s} {'sum /s':^12s} {'mean (stddev) /s':^20s} {'N':^8s} "
118
118
  f"{'min /s':^12s} {'max /s':^12s}"
119
119
  ]
120
120
  out += _format_nodes(summary)