hpcflow-new2 0.2.0a160__py3-none-any.whl → 0.2.0a161__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
hpcflow/_version.py CHANGED
@@ -1 +1 @@
1
- __version__ = "0.2.0a160"
1
+ __version__ = "0.2.0a161"
hpcflow/sdk/app.py CHANGED
@@ -940,7 +940,15 @@ class BaseApp(metaclass=Singleton):
940
940
  return self.user_data_hostname_dir / self.known_subs_file_name
941
941
 
942
942
  def _format_known_submissions_line(
943
- self, local_id, workflow_id, submit_time, sub_idx, is_active, wk_path
943
+ self,
944
+ local_id,
945
+ workflow_id,
946
+ submit_time,
947
+ sub_idx,
948
+ is_active,
949
+ wk_path,
950
+ start_time,
951
+ end_time,
944
952
  ):
945
953
  line = [
946
954
  str(local_id),
@@ -949,20 +957,31 @@ class BaseApp(metaclass=Singleton):
949
957
  str(sub_idx),
950
958
  submit_time,
951
959
  str(wk_path),
960
+ start_time,
961
+ end_time,
952
962
  ]
953
963
  return self._known_subs_file_sep.join(line) + "\n"
954
964
 
955
965
  def _parse_known_submissions_line(self, line: str) -> Dict:
956
- local_id, workflow_id, is_active, sub_idx, submit_time, path_i = line.split(
957
- self._known_subs_file_sep, maxsplit=5
958
- )
966
+ (
967
+ local_id,
968
+ workflow_id,
969
+ is_active,
970
+ sub_idx,
971
+ submit_time,
972
+ path_i,
973
+ start_time,
974
+ end_time,
975
+ ) = line.split(self._known_subs_file_sep, maxsplit=7)
959
976
  item = {
960
977
  "local_id": int(local_id),
961
978
  "workflow_id": workflow_id,
962
979
  "is_active": bool(int(is_active)),
963
- "submit_time": submit_time,
964
980
  "sub_idx": int(sub_idx),
965
- "path": path_i.strip(),
981
+ "submit_time": submit_time,
982
+ "path": path_i,
983
+ "start_time": start_time,
984
+ "end_time": end_time.strip(),
966
985
  }
967
986
  return item
968
987
 
@@ -1016,6 +1035,8 @@ class BaseApp(metaclass=Singleton):
1016
1035
  submit_time=sub_time,
1017
1036
  sub_idx=sub_idx,
1018
1037
  wk_path=wk_path,
1038
+ start_time="",
1039
+ end_time="",
1019
1040
  )
1020
1041
  with self.known_subs_file_path.open("at", newline="\n") as fh:
1021
1042
  # TODO: check wk_path is an absolute path? what about if a remote fsspec path?
@@ -1027,16 +1048,21 @@ class BaseApp(metaclass=Singleton):
1027
1048
  return next_id
1028
1049
 
1029
1050
  @TimeIt.decorator
1030
- def set_inactive_in_known_subs_file(self, inactive_IDs: List[int]):
1031
- """Set workflows in the known-submissions file to the non-running state.
1051
+ def update_known_subs_file(
1052
+ self,
1053
+ inactive_IDs: List[int],
1054
+ start_times: Dict[int, str],
1055
+ end_times: Dict[int, str],
1056
+ ):
1057
+ """Update submission records in the known-submission file.
1032
1058
 
1033
1059
  Note we aim for atomicity to help with the scenario where a new workflow
1034
1060
  submission is adding itself to the file at the same time as we have decided an
1035
1061
  existing workflow should no longer be part of this file. Ideally, such a scenario
1036
1062
  should not arise because both operations should only ever be interactively
1037
- initiated by the single user (`Workflow.submit` and `App.get_known_submissions`). If this
1038
- operation is atomic, then at least the known-submissions file should be left in a
1039
- usable (but inaccurate) state.
1063
+ initiated by the single user (`Workflow.submit` and `App.get_known_submissions`).
1064
+ If this operation is atomic, then at least the known-submissions file should be
1065
+ left in a usable (but inaccurate) state.
1040
1066
 
1041
1067
  Returns
1042
1068
  -------
@@ -1066,30 +1092,41 @@ class BaseApp(metaclass=Singleton):
1066
1092
  continue
1067
1093
  item = self._parse_known_submissions_line(line)
1068
1094
  line_IDs.append(item["local_id"])
1069
- is_active = item["is_active"]
1095
+ shows_as_active = item["is_active"]
1096
+ is_inactive = item["local_id"] in inactive_IDs
1097
+ start_time = item["start_time"] or start_times.get(item["local_id"], "")
1098
+ end_time = item["end_time"] or end_times.get(item["local_id"], "")
1099
+
1100
+ update_inactive = is_inactive and shows_as_active
1101
+ update_start = item["local_id"] in start_times
1102
+ update_end = item["local_id"] in end_times
1070
1103
 
1071
- if item["local_id"] in inactive_IDs and is_active:
1072
- # need to modify to set as inactive:
1073
- non_run_line = self._format_known_submissions_line(
1104
+ if update_inactive or update_start or update_end:
1105
+
1106
+ updated = self._format_known_submissions_line(
1074
1107
  local_id=item["local_id"],
1075
1108
  workflow_id=item["workflow_id"],
1076
- is_active=False,
1109
+ is_active=not is_inactive,
1077
1110
  submit_time=item["submit_time"],
1078
1111
  sub_idx=item["sub_idx"],
1079
1112
  wk_path=item["path"],
1113
+ start_time=start_time,
1114
+ end_time=end_time,
1080
1115
  )
1081
- new_lines.append(non_run_line)
1082
- is_active = False
1116
+ new_lines.append(updated)
1117
+
1083
1118
  self.submission_logger.debug(
1084
- f"will set the following (workflow, submission) from the "
1085
- f"known-submissions file to inactive: "
1119
+ f"Updating (workflow, submission) from the known-submissions file: "
1120
+ f"{'set to inactive; ' if update_inactive else ''}"
1121
+ f"{f'set start_time: {start_time!r}; ' if update_start else ''}"
1122
+ f"{f'set end_time: {end_time!r}; ' if update_end else ''}"
1086
1123
  f"({item['path']}, {item['sub_idx']})"
1087
1124
  )
1088
1125
  else:
1089
1126
  # leave this one alone:
1090
1127
  new_lines.append(line + "\n")
1091
1128
 
1092
- if not is_active:
1129
+ if is_inactive:
1093
1130
  line_date[ln_idx] = item["submit_time"]
1094
1131
 
1095
1132
  ld_srt_idx = list(dict(sorted(line_date.items(), key=lambda i: i[1])).keys())
@@ -1644,6 +1681,10 @@ class BaseApp(metaclass=Singleton):
1644
1681
 
1645
1682
  out = []
1646
1683
  inactive_IDs = []
1684
+ start_times = {}
1685
+ end_times = {}
1686
+
1687
+ ts_fmt = self._submission_ts_fmt
1647
1688
 
1648
1689
  try:
1649
1690
  if status:
@@ -1658,14 +1699,31 @@ class BaseApp(metaclass=Singleton):
1658
1699
  # loop in reverse so we process more-recent submissions first:
1659
1700
  for file_dat_i in known_subs[::-1]:
1660
1701
  submit_time_str = file_dat_i["submit_time"]
1661
- submit_time_obj = datetime.strptime(submit_time_str, self._submission_ts_fmt)
1702
+ submit_time_obj = datetime.strptime(submit_time_str, ts_fmt)
1662
1703
  submit_time_obj = submit_time_obj.replace(tzinfo=timezone.utc).astimezone()
1704
+
1705
+ start_time_str = file_dat_i["start_time"]
1706
+ start_time_obj = None
1707
+ if start_time_str:
1708
+ start_time_obj = datetime.strptime(start_time_str, ts_fmt)
1709
+ start_time_obj = start_time_obj.replace(tzinfo=timezone.utc).astimezone()
1710
+
1711
+ end_time_str = file_dat_i["end_time"]
1712
+ end_time_obj = None
1713
+ if end_time_str:
1714
+ end_time_obj = datetime.strptime(end_time_str, ts_fmt)
1715
+ end_time_obj = end_time_obj.replace(tzinfo=timezone.utc).astimezone()
1716
+
1663
1717
  out_item = {
1664
1718
  "local_id": file_dat_i["local_id"],
1665
1719
  "workflow_id": file_dat_i["workflow_id"],
1666
1720
  "workflow_path": file_dat_i["path"],
1667
1721
  "submit_time": submit_time_str,
1668
1722
  "submit_time_obj": submit_time_obj,
1723
+ "start_time": start_time_str,
1724
+ "start_time_obj": start_time_obj,
1725
+ "end_time": end_time_str,
1726
+ "end_time_obj": end_time_obj,
1669
1727
  "sub_idx": file_dat_i["sub_idx"],
1670
1728
  "jobscripts": [],
1671
1729
  "active_jobscripts": {},
@@ -1683,6 +1741,8 @@ class BaseApp(metaclass=Singleton):
1683
1741
  if status:
1684
1742
  status.update(f"Inspecting workflow {file_dat_i['path']!r}.")
1685
1743
  wk_i = self.Workflow(file_dat_i["path"])
1744
+ except KeyboardInterrupt:
1745
+ raise
1686
1746
  except Exception:
1687
1747
  wk_i = None
1688
1748
  self.submission_logger.info(
@@ -1720,10 +1780,24 @@ class BaseApp(metaclass=Singleton):
1720
1780
  {
1721
1781
  "jobscripts": all_jobscripts,
1722
1782
  "submission": sub,
1723
- "sub_start_time": sub.start_time,
1724
- "sub_end_time": sub.end_time,
1725
1783
  }
1726
1784
  )
1785
+ if not out_item["start_time"]:
1786
+ start_time_obj = sub.start_time
1787
+ if start_time_obj:
1788
+ start_time = datetime.strftime(start_time_obj, ts_fmt)
1789
+ out_item["start_time"] = start_time
1790
+ start_times[file_dat_i["local_id"]] = start_time
1791
+ out_item["start_time_obj"] = start_time_obj
1792
+
1793
+ if not out_item["end_time"]:
1794
+ end_time_obj = sub.end_time
1795
+ if end_time_obj:
1796
+ end_time = datetime.strftime(end_time_obj, ts_fmt)
1797
+ out_item["end_time"] = end_time
1798
+ end_times[file_dat_i["local_id"]] = end_time
1799
+ out_item["end_time_obj"] = end_time_obj
1800
+
1727
1801
  if file_dat_i["is_active"]:
1728
1802
  # check it really is active:
1729
1803
  run_key = (file_dat_i["path"], file_dat_i["sub_idx"])
@@ -1732,6 +1806,8 @@ class BaseApp(metaclass=Singleton):
1732
1806
  else:
1733
1807
  try:
1734
1808
  act_i_js = sub.get_active_jobscripts(as_json=as_json)
1809
+ except KeyboardInterrupt:
1810
+ raise
1735
1811
  except Exception:
1736
1812
  self.submission_logger.info(
1737
1813
  f"failed to retrieve active jobscripts from workflow "
@@ -1754,8 +1830,10 @@ class BaseApp(metaclass=Singleton):
1754
1830
 
1755
1831
  out.append(out_item)
1756
1832
 
1757
- if inactive_IDs and not no_update:
1758
- removed_IDs = self.set_inactive_in_known_subs_file(inactive_IDs)
1833
+ if (inactive_IDs or start_times or end_times) and not no_update:
1834
+ removed_IDs = self.update_known_subs_file(
1835
+ inactive_IDs, start_times, end_times
1836
+ )
1759
1837
  # remove these from the output, to avoid confusion (if kept, they would not
1760
1838
  # appear in the next invocation of this method):
1761
1839
  out = [i for i in out if i["local_id"] not in removed_IDs]
@@ -1769,7 +1847,7 @@ class BaseApp(metaclass=Singleton):
1769
1847
  out_access = sorted(
1770
1848
  out_access,
1771
1849
  key=lambda i: (
1772
- i["sub_end_time"] or i["sub_start_time"] or i["submit_time_obj"]
1850
+ i["end_time_obj"] or i["start_time_obj"] or i["submit_time_obj"]
1773
1851
  ),
1774
1852
  reverse=True,
1775
1853
  )
@@ -1829,6 +1907,7 @@ class BaseApp(metaclass=Singleton):
1829
1907
  )
1830
1908
  rich_print(group)
1831
1909
 
1910
+ @TimeIt.decorator
1832
1911
  def _show(
1833
1912
  self,
1834
1913
  max_recent: int = 3,
@@ -1901,7 +1980,7 @@ class BaseApp(metaclass=Singleton):
1901
1980
  no_update=no_update,
1902
1981
  status=status,
1903
1982
  )
1904
- except Exception:
1983
+ except (Exception, KeyboardInterrupt):
1905
1984
  status.stop()
1906
1985
  raise
1907
1986
  else:
@@ -1952,8 +2031,8 @@ class BaseApp(metaclass=Singleton):
1952
2031
 
1953
2032
  start_time, end_time = None, None
1954
2033
  if not no_access:
1955
- start_time = dat_i["submission"].start_time
1956
- end_time = dat_i["submission"].end_time if not act_js else None
2034
+ start_time = dat_i["start_time_obj"]
2035
+ end_time = dat_i["end_time_obj"]
1957
2036
 
1958
2037
  if "actions" in columns:
1959
2038
  if not no_access:
@@ -144,10 +144,9 @@ class InputFileGenerator(JSONLike):
144
144
  self.app.ActionRule.check_missing(f"input_files.{self.input_file.label}")
145
145
  ] + self.rules
146
146
 
147
- def compose_source(self, action) -> str:
147
+ def compose_source(self, snip_path) -> str:
148
148
  """Generate the file contents of this input file generator source."""
149
149
 
150
- snip_path = action.get_snippet_script_path(self.script)
151
150
  script_main_func = snip_path.stem
152
151
  with snip_path.open("rt") as fp:
153
152
  script_str = fp.read()
@@ -190,9 +189,14 @@ class InputFileGenerator(JSONLike):
190
189
  return out
191
190
 
192
191
  def write_source(self, action):
193
- script_path = action.get_script_name(self.script)
194
- with Path(script_path).open("wt", newline="\n") as fp:
195
- fp.write(self.compose_source(action))
192
+
193
+ # write the script if it is specified as a snippet script, otherwise we assume
194
+ # the script already exists in the working directory:
195
+ snip_path = action.get_snippet_script_path(self.script)
196
+ if snip_path:
197
+ source_str = self.compose_source(snip_path)
198
+ with Path(snip_path.name).open("wt", newline="\n") as fp:
199
+ fp.write(source_str)
196
200
 
197
201
 
198
202
  @dataclass
@@ -284,14 +288,13 @@ class OutputFileParser(JSONLike):
284
288
  for i in self.output_files
285
289
  ] + self.rules
286
290
 
287
- def compose_source(self, action) -> str:
291
+ def compose_source(self, snip_path) -> str:
288
292
  """Generate the file contents of this output file parser source."""
289
293
 
290
294
  if self.output is None:
291
295
  # might be used just for saving files:
292
296
  return
293
297
 
294
- snip_path = action.get_snippet_script_path(self.script)
295
298
  script_main_func = snip_path.stem
296
299
  with snip_path.open("rt") as fp:
297
300
  script_str = fp.read()
@@ -343,9 +346,14 @@ class OutputFileParser(JSONLike):
343
346
  if self.output is None:
344
347
  # might be used just for saving files:
345
348
  return
346
- script_path = action.get_script_name(self.script)
347
- with Path(script_path).open("wt", newline="\n") as fp:
348
- fp.write(self.compose_source(action))
349
+
350
+ # write the script if it is specified as a snippet script, otherwise we assume
351
+ # the script already exists in the working directory:
352
+ snip_path = action.get_snippet_script_path(self.script)
353
+ if snip_path:
354
+ source_str = self.compose_source(snip_path)
355
+ with Path(snip_path.name).open("wt", newline="\n") as fp:
356
+ fp.write(source_str)
349
357
 
350
358
 
351
359
  class _FileContentsSpecifier(JSONLike):
@@ -537,6 +537,7 @@ def test_scheduler_config_defaults(new_null_config, tmp_path):
537
537
  wkt = hf.WorkflowTemplate(name="temp", tasks=[t1, t2])
538
538
  wk = hf.Workflow.from_template(
539
539
  template=wkt,
540
+ path=tmp_path,
540
541
  )
541
542
  sub = wk.add_submission()
542
543
  assert sub.jobscripts[0].resources.scheduler_args == {"options": {"a": "c"}}
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.1
2
2
  Name: hpcflow-new2
3
- Version: 0.2.0a160
3
+ Version: 0.2.0a161
4
4
  Summary: Computational workflow management
5
5
  License: MIT
6
6
  Author: aplowman
@@ -1,7 +1,7 @@
1
1
  hpcflow/__init__.py,sha256=WIETuRHeOp2SqUqHUzpjQ-lk9acbYv-6aWOhZPRdlhs,64
2
2
  hpcflow/__pyinstaller/__init__.py,sha256=YOzBlPSck6slucv6lJM9K80JtsJWxXRL00cv6tRj3oc,98
3
3
  hpcflow/__pyinstaller/hook-hpcflow.py,sha256=SeMopsPkhCyd9gqIrzwFNRj3ZlkUlUYl-74QYz61mo4,1089
4
- hpcflow/_version.py,sha256=SLYvEGJeZwqDNbRaoPhsYZ7OzpYUUDy4We22ieDRFfQ,26
4
+ hpcflow/_version.py,sha256=f61tdocu5S9MzLcsCrrQ4yVYVvCmCf9Z86QwA-46gcI,26
5
5
  hpcflow/app.py,sha256=d-kgfnZNlqlCi2H8bK26714brD_u3ibN3FaEZgjF9aA,1332
6
6
  hpcflow/cli.py,sha256=G2J3D9v6MnMWOWMMWK6UEKLn_6wnV9lT_qygEBBxg-I,66
7
7
  hpcflow/data/demo_data_manifest/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
@@ -34,7 +34,7 @@ hpcflow/data/workflows/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3
34
34
  hpcflow/data/workflows/workflow_1.yaml,sha256=lF7Re2SVc_5gQk5AwB0gXaq-n-T5ia4su3zNQ9oMRV0,220
35
35
  hpcflow/examples.ipynb,sha256=cLKp4QsxwwMXRanDnfWY9kqsV23q6G4raOpu6IZXnMw,28553
36
36
  hpcflow/sdk/__init__.py,sha256=SdzVm7dydLv2kmr2tqrH14Gf1GEAEhsEuAuqiGBLHhM,5700
37
- hpcflow/sdk/app.py,sha256=tDcEaT_YWLnFy4Wi4UIwTxwvHO3F80Gxc1yd1tnmDc0,92880
37
+ hpcflow/sdk/app.py,sha256=7f7jR3eX9iVQ69YwPOoOmeJaAFmwJcj24UIVkshzHpA,95754
38
38
  hpcflow/sdk/cli.py,sha256=y6eRg36D_IzIljc9TMvmm69NqX2zZoKUnBufX9UbN1U,35025
39
39
  hpcflow/sdk/cli_common.py,sha256=kDSIe90mxD37lQqvIXDyRauLtTRRnNueSljcRPF_w0M,4738
40
40
  hpcflow/sdk/config/__init__.py,sha256=qJrrxcAN4f1u_RyTtXgz-xlTLwNafE9v0VEMP1x6-bU,70
@@ -45,7 +45,7 @@ hpcflow/sdk/config/config_file.py,sha256=JlMcprj0aujFVk8552ahP2f8EXB0tglMaHwzbcG
45
45
  hpcflow/sdk/config/errors.py,sha256=2D7HJ1dbyeoD3xk4MuaGSsbJsUyQzyw8kaThEBZfP2I,6876
46
46
  hpcflow/sdk/core/__init__.py,sha256=GcIklEsXy3M5PWpmxyhd2KoI0u6HjXRIjD_aR1bgRjo,215
47
47
  hpcflow/sdk/core/actions.py,sha256=539vu9ts5u5Poah6-ZGOEANrb58NPEbhhoVlS7ajayE,74544
48
- hpcflow/sdk/core/command_files.py,sha256=yxTQnxVO0q3qfp1nEy-FSSWzHFSdhnbtQ6YvcjJX5UQ,18181
48
+ hpcflow/sdk/core/command_files.py,sha256=mo7JzKko2WQ_DOFbJFyuvnnRN3pj1hiqPe7tRNSNDV4,18515
49
49
  hpcflow/sdk/core/commands.py,sha256=-Tiu7zVVwWr1xiTXVB9oH3E4g09ebRRtHSRrMdFDCRY,12060
50
50
  hpcflow/sdk/core/element.py,sha256=ksNVQcQX_-IdofAsc4Us3Mw_rEMLO8D5zOM8Cmajaw8,45669
51
51
  hpcflow/sdk/core/environment.py,sha256=DGUz1NvliKh6opP0IueGHD69rn_8wFLhDsq6kAmEgM4,4849
@@ -137,7 +137,7 @@ hpcflow/tests/unit/test_runtime.py,sha256=HjHPTS3UkX1LcwheFgpp4px_VlRis8KAE2Hoeq
137
137
  hpcflow/tests/unit/test_schema_input.py,sha256=spkTtvNuheh-y29Tsx7YRX6y3dV80vXx0hcg0jVfMp4,12084
138
138
  hpcflow/tests/unit/test_shell.py,sha256=FDtQ9fHRhSKiVtxMJ8BRisoeSvvk8zmJndTB4LlhqGc,3442
139
139
  hpcflow/tests/unit/test_slurm.py,sha256=ewfNuXXUEEelAxcd7MBbAQ-RCvU8xBenHTAyfXYF-R0,1064
140
- hpcflow/tests/unit/test_submission.py,sha256=fPemvWs7rMelKW_2ctEUUjnckGQFXgDzlFRYVY19eJs,16659
140
+ hpcflow/tests/unit/test_submission.py,sha256=kQ3ksjGlfp47AYuwTA27RDX2XxRU3YxKlKC1ACTbXw8,16682
141
141
  hpcflow/tests/unit/test_task.py,sha256=94TwyjlhKMRRXTQjys2a1PiK7A-rCzhnvrkk4vRz39I,70000
142
142
  hpcflow/tests/unit/test_task_schema.py,sha256=7a7o42gQhrZPMXfH0a6sGzFCJnuFrbDEl9u3u_bFsgw,3624
143
143
  hpcflow/tests/unit/test_utils.py,sha256=JMhSRZFqmI9ZhREJet9en_y3aRVlQlWE7OKpkdt8SVI,14172
@@ -147,7 +147,7 @@ hpcflow/tests/unit/test_workflow_template.py,sha256=EItRqUyXpU2z_z1rvpRqa848YOkX
147
147
  hpcflow/tests/workflows/test_jobscript.py,sha256=9sp1o0g72JZbv2QlOl5v7wCZEFjotxiIKGNUxVaFgaA,724
148
148
  hpcflow/tests/workflows/test_workflows.py,sha256=xai6FRtGqG4lStJk6KmsqPUSuvqs9FrsBOxMVALshIs,13400
149
149
  hpcflow/viz_demo.ipynb,sha256=1QdnVsk72vihv2L6hOGyk318uEa22ZSgGxQCa7hW2oo,6238
150
- hpcflow_new2-0.2.0a160.dist-info/METADATA,sha256=PiPRoSuisn-l79v7FIky-jd5I2RpwzBE2eJ9kzPNuR0,2473
151
- hpcflow_new2-0.2.0a160.dist-info/WHEEL,sha256=kLuE8m1WYU0Ig0_YEGrXyTtiJvKPpLpDEiChiNyei5Y,88
152
- hpcflow_new2-0.2.0a160.dist-info/entry_points.txt,sha256=aoGtCnFdfPcXfBdu2zZyMOJoz6fPgdR0elqsgrE-USU,106
153
- hpcflow_new2-0.2.0a160.dist-info/RECORD,,
150
+ hpcflow_new2-0.2.0a161.dist-info/METADATA,sha256=Fq-m5tYcROogOvrwnQds7tMF1xN_r_fza4kYOoK9O0Q,2473
151
+ hpcflow_new2-0.2.0a161.dist-info/WHEEL,sha256=kLuE8m1WYU0Ig0_YEGrXyTtiJvKPpLpDEiChiNyei5Y,88
152
+ hpcflow_new2-0.2.0a161.dist-info/entry_points.txt,sha256=aoGtCnFdfPcXfBdu2zZyMOJoz6fPgdR0elqsgrE-USU,106
153
+ hpcflow_new2-0.2.0a161.dist-info/RECORD,,