fractal-server 2.14.4a0__py3-none-any.whl → 2.14.5__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (25) hide show
  1. fractal_server/__init__.py +1 -1
  2. fractal_server/app/runner/exceptions.py +1 -6
  3. fractal_server/app/runner/executors/base_runner.py +16 -4
  4. fractal_server/app/runner/executors/call_command_wrapper.py +52 -0
  5. fractal_server/app/runner/executors/local/runner.py +52 -13
  6. fractal_server/app/runner/executors/slurm_common/base_slurm_runner.py +87 -52
  7. fractal_server/app/runner/executors/slurm_common/remote.py +47 -92
  8. fractal_server/app/runner/executors/slurm_common/slurm_job_task_models.py +20 -19
  9. fractal_server/app/runner/executors/slurm_ssh/runner.py +1 -2
  10. fractal_server/app/runner/executors/slurm_sudo/_subprocess_run_as_user.py +1 -4
  11. fractal_server/app/runner/executors/slurm_sudo/runner.py +3 -11
  12. fractal_server/app/runner/task_files.py +0 -8
  13. fractal_server/app/runner/v2/_slurm_ssh.py +1 -2
  14. fractal_server/app/runner/v2/_slurm_sudo.py +1 -2
  15. fractal_server/app/runner/v2/runner_functions.py +16 -30
  16. fractal_server/app/runner/versions.py +2 -11
  17. fractal_server/config.py +0 -9
  18. fractal_server/ssh/_fabric.py +4 -1
  19. {fractal_server-2.14.4a0.dist-info → fractal_server-2.14.5.dist-info}/METADATA +1 -7
  20. {fractal_server-2.14.4a0.dist-info → fractal_server-2.14.5.dist-info}/RECORD +23 -24
  21. fractal_server/app/runner/executors/slurm_common/utils_executors.py +0 -58
  22. fractal_server/app/runner/v2/runner_functions_low_level.py +0 -122
  23. {fractal_server-2.14.4a0.dist-info → fractal_server-2.14.5.dist-info}/LICENSE +0 -0
  24. {fractal_server-2.14.4a0.dist-info → fractal_server-2.14.5.dist-info}/WHEEL +0 -0
  25. {fractal_server-2.14.4a0.dist-info → fractal_server-2.14.5.dist-info}/entry_points.txt +0 -0
@@ -1,27 +1,10 @@
1
- # This adapts clusterfutures <https://github.com/sampsyo/clusterfutures>
2
- # Original Copyright
3
- # Copyright 2021 Adrian Sampson <asampson@cs.washington.edu>
4
- # License: MIT
5
- #
6
- # Modified by:
7
- # Jacopo Nespolo <jacopo.nespolo@exact-lab.it>
8
- # Tommaso Comparin <tommaso.comparin@exact-lab.it>
9
- #
10
- # Copyright 2022 (C) Friedrich Miescher Institute for Biomedical Research and
11
- # University of Zurich
12
- """
13
- This module provides a simple self-standing script that executes arbitrary
14
- python code received via pickled files on a cluster node.
15
- """
16
1
  import argparse
2
+ import json
17
3
  import logging
18
4
  import os
19
5
  import sys
20
- from typing import Literal
21
- from typing import Union
22
-
23
- import cloudpickle
24
6
 
7
+ from ..call_command_wrapper import call_command_wrapper
25
8
  from fractal_server import __VERSION__
26
9
 
27
10
 
@@ -33,59 +16,6 @@ class FractalVersionMismatch(RuntimeError):
33
16
  pass
34
17
 
35
18
 
36
- def _check_versions_mismatch(
37
- server_versions: dict[
38
- Literal["python", "fractal_server", "cloudpickle"],
39
- Union[str, tuple[int]],
40
- ]
41
- ):
42
- """
43
- Compare the server {python,cloudpickle,fractal_server} versions with the
44
- ones available to the current worker
45
-
46
- Arguments:
47
- server_versions:
48
- The version used in the fractal-server instance that created the
49
- cloudpickle file
50
-
51
- Raises:
52
- FractalVersionMismatch: If the cloudpickle or fractal_server versions
53
- do not match with the ones on the server
54
- """
55
-
56
- server_python_version = list(server_versions["python"])
57
- worker_python_version = list(sys.version_info[:3])
58
- if worker_python_version != server_python_version:
59
- if worker_python_version[:2] != server_python_version[:2]:
60
- # FIXME: Turn this into an error, in some version post 2.14.
61
- logging.error(
62
- f"{server_python_version=} but {worker_python_version=}. "
63
- "This configuration will be deprecated in a future version, "
64
- "please contact the admin of this Fractal instance."
65
- )
66
- else:
67
- # Major.minor versions match, patch versions differ
68
- logging.warning(
69
- f"{server_python_version=} but {worker_python_version=}."
70
- )
71
-
72
- server_cloudpickle_version = server_versions["cloudpickle"]
73
- worker_cloudpickle_version = cloudpickle.__version__
74
- if worker_cloudpickle_version != server_cloudpickle_version:
75
- raise FractalVersionMismatch(
76
- f"{server_cloudpickle_version=} but "
77
- f"{worker_cloudpickle_version=}"
78
- )
79
-
80
- server_fractal_server_version = server_versions["fractal_server"]
81
- worker_fractal_server_version = __VERSION__
82
- if worker_fractal_server_version != server_fractal_server_version:
83
- raise FractalVersionMismatch(
84
- f"{server_fractal_server_version=} but "
85
- f"{worker_fractal_server_version=}"
86
- )
87
-
88
-
89
19
  def worker(
90
20
  *,
91
21
  in_fname: str,
@@ -95,8 +25,8 @@ def worker(
95
25
  Execute a job, possibly on a remote node.
96
26
 
97
27
  Arguments:
98
- in_fname: Absolute path to the input pickle file (must be readable).
99
- out_fname: Absolute path of the output pickle file (must be writeable).
28
+ in_fname: Absolute path to the input file (must be readable).
29
+ out_fname: Absolute path of the output file (must be writeable).
100
30
  """
101
31
 
102
32
  # Create output folder, if missing
@@ -107,19 +37,49 @@ def worker(
107
37
 
108
38
  # Execute the job and capture exceptions
109
39
  try:
110
- with open(in_fname, "rb") as f:
111
- indata = f.read()
112
- server_versions, fun, args, kwargs = cloudpickle.loads(indata)
113
- _check_versions_mismatch(server_versions)
40
+ with open(in_fname, "r") as f:
41
+ input_data = json.load(f)
42
+
43
+ server_python_version = input_data["python_version"]
44
+ server_fractal_server_version = input_data["fractal_server_version"]
45
+
46
+ # Fractal-server version must be identical
47
+ worker_fractal_server_version = __VERSION__
48
+ if worker_fractal_server_version != server_fractal_server_version:
49
+ raise FractalVersionMismatch(
50
+ f"{server_fractal_server_version=} but "
51
+ f"{worker_fractal_server_version=}"
52
+ )
53
+
54
+ # Python version mismatch only raises a warning
55
+ worker_python_version = tuple(sys.version_info[:3])
56
+ if worker_python_version != server_python_version:
57
+ if worker_python_version[:2] != server_python_version[:2]:
58
+ logging.warning(
59
+ f"{server_python_version=} but {worker_python_version=}."
60
+ )
61
+
62
+ # Extract some useful paths
63
+ metadiff_file_remote = input_data["metadiff_file_remote"]
64
+ log_path = input_data["log_file_remote"]
65
+
66
+ # Execute command
67
+ full_command = input_data["full_command"]
68
+ call_command_wrapper(cmd=full_command, log_path=log_path)
69
+
70
+ try:
71
+ with open(metadiff_file_remote, "r") as f:
72
+ out_meta = json.load(f)
73
+ result = (True, out_meta)
74
+ except FileNotFoundError:
75
+ # Command completed, but it produced no metadiff file
76
+ result = (True, None)
114
77
 
115
- result = (True, fun(*args, **kwargs))
116
- out = cloudpickle.dumps(result)
117
78
  except Exception as e:
118
79
  # Exception objects are not serialisable. Here we save the relevant
119
80
  # exception contents in a serializable dictionary. Note that whenever
120
81
  # the task failed "properly", the exception is a `TaskExecutionError`
121
82
  # and it has additional attributes.
122
-
123
83
  import traceback
124
84
 
125
85
  exc_type, exc_value, traceback_obj = sys.exc_info()
@@ -131,33 +91,28 @@ def worker(
131
91
  )
132
92
  traceback_string = "".join(traceback_list)
133
93
  exc_proxy = dict(
134
- exc_type_name=exc_type.__name__,
94
+ exc_type_name=type(e).__name__,
135
95
  traceback_string=traceback_string,
136
- workflow_task_order=getattr(e, "workflow_task_order", None),
137
- workflow_task_id=getattr(e, "workflow_task_id", None),
138
- task_name=getattr(e, "task_name", None),
139
96
  )
140
97
  result = (False, exc_proxy)
141
- out = cloudpickle.dumps(result)
142
98
 
143
- # Write the output pickle file
144
- with open(out_fname, "wb") as f:
145
- f.write(out)
99
+ # Write output file
100
+ with open(out_fname, "w") as f:
101
+ json.dump(result, f, indent=2)
146
102
 
147
103
 
148
104
  if __name__ == "__main__":
149
-
150
105
  parser = argparse.ArgumentParser()
151
106
  parser.add_argument(
152
107
  "--input-file",
153
108
  type=str,
154
- help="Path of input pickle file",
109
+ help="Path of input JSON file",
155
110
  required=True,
156
111
  )
157
112
  parser.add_argument(
158
113
  "--output-file",
159
114
  type=str,
160
- help="Path of output pickle file",
115
+ help="Path of output JSON file",
161
116
  required=True,
162
117
  )
163
118
  parsed_args = parser.parse_args()
@@ -19,48 +19,49 @@ class SlurmTask(BaseModel):
19
19
  task_files: TaskFiles
20
20
  index: int
21
21
 
22
+ workflow_task_order: int
23
+ workflow_task_id: int
24
+ task_name: str
25
+
22
26
  @property
23
- def input_pickle_file_local_path(self) -> Path:
27
+ def input_file_local_path(self) -> Path:
24
28
  return (
25
- self.workdir_local / f"{self.prefix}-{self.component}-input.pickle"
29
+ self.workdir_local / f"{self.prefix}-{self.component}-input.json"
26
30
  )
27
31
 
28
32
  @property
29
- def input_pickle_file_remote_path(self) -> Path:
33
+ def input_file_remote_path(self) -> Path:
30
34
  return (
31
- self.workdir_remote
32
- / f"{self.prefix}-{self.component}-input.pickle"
35
+ self.workdir_remote / f"{self.prefix}-{self.component}-input.json"
33
36
  )
34
37
 
35
38
  @property
36
- def output_pickle_file_local_path(self) -> Path:
39
+ def output_file_local_path(self) -> Path:
37
40
  return (
38
- self.workdir_local
39
- / f"{self.prefix}-{self.component}-output.pickle"
41
+ self.workdir_local / f"{self.prefix}-{self.component}-output.json"
40
42
  )
41
43
 
42
44
  @property
43
- def output_pickle_file_remote_path(self) -> Path:
45
+ def output_file_remote_path(self) -> Path:
44
46
  return (
45
- self.workdir_remote
46
- / f"{self.prefix}-{self.component}-output.pickle"
47
+ self.workdir_remote / f"{self.prefix}-{self.component}-output.json"
47
48
  )
48
49
 
49
50
  @property
50
- def input_pickle_file_local(self) -> str:
51
- return self.input_pickle_file_local_path.as_posix()
51
+ def input_file_local(self) -> str:
52
+ return self.input_file_local_path.as_posix()
52
53
 
53
54
  @property
54
- def input_pickle_file_remote(self) -> str:
55
- return self.input_pickle_file_remote_path.as_posix()
55
+ def input_file_remote(self) -> str:
56
+ return self.input_file_remote_path.as_posix()
56
57
 
57
58
  @property
58
- def output_pickle_file_local(self) -> str:
59
- return self.output_pickle_file_local_path.as_posix()
59
+ def output_file_local(self) -> str:
60
+ return self.output_file_local_path.as_posix()
60
61
 
61
62
  @property
62
- def output_pickle_file_remote(self) -> str:
63
- return self.output_pickle_file_remote_path.as_posix()
63
+ def output_file_remote(self) -> str:
64
+ return self.output_file_remote_path.as_posix()
64
65
 
65
66
 
66
67
  class SlurmJob(BaseModel):
@@ -99,9 +99,8 @@ class SlurmSSHRunner(BaseSlurmRunner):
99
99
  for task in _slurm_job.tasks:
100
100
  _single_job_filelist.extend(
101
101
  [
102
- task.output_pickle_file_remote_path.name,
102
+ task.output_file_remote_path.name,
103
103
  task.task_files.log_file_remote_path.name,
104
- task.task_files.args_file_remote_path.name,
105
104
  task.task_files.metadiff_file_remote_path.name,
106
105
  ]
107
106
  )
@@ -29,7 +29,6 @@ def _run_command_as_user(
29
29
  *,
30
30
  cmd: str,
31
31
  user: Optional[str] = None,
32
- encoding: Optional[str] = "utf-8",
33
32
  check: bool = False,
34
33
  ) -> subprocess.CompletedProcess:
35
34
  """
@@ -38,8 +37,6 @@ def _run_command_as_user(
38
37
  Arguments:
39
38
  cmd: Command to be run
40
39
  user: User to be impersonated
41
- encoding: Argument for `subprocess.run`. Note that this must be `None`
42
- to have stdout/stderr as bytes.
43
40
  check: If `True`, check that `returncode=0` and fail otherwise.
44
41
 
45
42
  Raises:
@@ -57,7 +54,7 @@ def _run_command_as_user(
57
54
  res = subprocess.run( # nosec
58
55
  shlex.split(new_cmd),
59
56
  capture_output=True,
60
- encoding=encoding,
57
+ encoding="utf-8",
61
58
  )
62
59
  logger.debug(f"[_run_command_as_user] {res.returncode=}")
63
60
  logger.debug(f"[_run_command_as_user] {res.stdout=}")
@@ -102,17 +102,13 @@ class SudoSlurmRunner(BaseSlurmRunner):
102
102
  source_target_list.extend(
103
103
  [
104
104
  (
105
- task.output_pickle_file_remote,
106
- task.output_pickle_file_local,
105
+ task.output_file_remote,
106
+ task.output_file_local,
107
107
  ),
108
108
  (
109
109
  task.task_files.log_file_remote,
110
110
  task.task_files.log_file_local,
111
111
  ),
112
- (
113
- task.task_files.args_file_remote,
114
- task.task_files.args_file_local,
115
- ),
116
112
  (
117
113
  task.task_files.metadiff_file_remote,
118
114
  task.task_files.metadiff_file_local,
@@ -121,17 +117,14 @@ class SudoSlurmRunner(BaseSlurmRunner):
121
117
  )
122
118
 
123
119
  for source, target in source_target_list:
124
- # NOTE: By setting encoding=None, we read/write bytes instead
125
- # of strings; this is needed to also handle pickle files.
126
120
  try:
127
121
  res = _run_command_as_user(
128
122
  cmd=f"cat {source}",
129
123
  user=self.slurm_user,
130
- encoding=None,
131
124
  check=True,
132
125
  )
133
126
  # Write local file
134
- with open(target, "wb") as f:
127
+ with open(target, "w") as f:
135
128
  f.write(res.stdout)
136
129
  logger.debug(
137
130
  f"[_fetch_artifacts_single_job] Copied {source} into "
@@ -171,7 +164,6 @@ class SudoSlurmRunner(BaseSlurmRunner):
171
164
  res = _run_command_as_user(
172
165
  cmd=cmd,
173
166
  user=self.slurm_user,
174
- encoding="utf-8",
175
167
  check=True,
176
168
  )
177
169
  return res.stdout
@@ -134,14 +134,6 @@ class TaskFiles(BaseModel):
134
134
  def metadiff_file_remote(self) -> str:
135
135
  return self.metadiff_file_remote_path.as_posix()
136
136
 
137
- @property
138
- def remote_files_dict(self) -> dict[str, str]:
139
- return dict(
140
- args_file_remote=self.args_file_remote,
141
- metadiff_file_remote=self.metadiff_file_remote,
142
- log_file_remote=self.log_file_remote,
143
- )
144
-
145
137
 
146
138
  def enrich_task_files_multisubmit(
147
139
  *,
@@ -13,8 +13,7 @@
13
13
  """
14
14
  Slurm Backend
15
15
 
16
- This backend runs fractal workflows in a SLURM cluster using Clusterfutures
17
- Executor objects.
16
+ This backend runs fractal workflows in a SLURM cluster.
18
17
  """
19
18
  from pathlib import Path
20
19
  from typing import Optional
@@ -13,8 +13,7 @@
13
13
  """
14
14
  Slurm Backend
15
15
 
16
- This backend runs fractal workflows in a SLURM cluster using Clusterfutures
17
- Executor objects.
16
+ This backend runs fractal workflows in a SLURM cluster.
18
17
  """
19
18
  from pathlib import Path
20
19
  from typing import Optional
@@ -1,4 +1,3 @@
1
- import functools
2
1
  from pathlib import Path
3
2
  from typing import Any
4
3
  from typing import Callable
@@ -12,7 +11,6 @@ from ..exceptions import JobExecutionError
12
11
  from ..exceptions import TaskOutputValidationError
13
12
  from .db_tools import update_status_of_history_unit
14
13
  from .deduplicate_list import deduplicate_list
15
- from .runner_functions_low_level import run_single_task
16
14
  from .task_interface import InitTaskOutput
17
15
  from .task_interface import TaskOutput
18
16
  from fractal_server.app.db import get_sync_db
@@ -207,13 +205,10 @@ def run_v2_task_non_parallel(
207
205
  )
208
206
 
209
207
  result, exception = runner.submit(
210
- functools.partial(
211
- run_single_task,
212
- command=task.command_non_parallel,
213
- workflow_task_order=wftask.order,
214
- workflow_task_id=wftask.task_id,
215
- task_name=wftask.task.name,
216
- ),
208
+ base_command=task.command_non_parallel,
209
+ workflow_task_order=wftask.order,
210
+ workflow_task_id=wftask.task_id,
211
+ task_name=wftask.task.name,
217
212
  parameters=function_kwargs,
218
213
  task_type=task_type,
219
214
  task_files=task_files,
@@ -335,13 +330,10 @@ def run_v2_task_parallel(
335
330
  )
336
331
 
337
332
  results, exceptions = runner.multisubmit(
338
- functools.partial(
339
- run_single_task,
340
- command=task.command_parallel,
341
- workflow_task_order=wftask.order,
342
- workflow_task_id=wftask.task_id,
343
- task_name=wftask.task.name,
344
- ),
333
+ base_command=task.command_parallel,
334
+ workflow_task_order=wftask.order,
335
+ workflow_task_id=wftask.task_id,
336
+ task_name=wftask.task.name,
345
337
  list_parameters=list_function_kwargs,
346
338
  task_type="parallel",
347
339
  list_task_files=list_task_files,
@@ -456,13 +448,10 @@ def run_v2_task_compound(
456
448
  ],
457
449
  )
458
450
  result, exception = runner.submit(
459
- functools.partial(
460
- run_single_task,
461
- command=task.command_non_parallel,
462
- workflow_task_order=wftask.order,
463
- workflow_task_id=wftask.task_id,
464
- task_name=wftask.task.name,
465
- ),
451
+ base_command=task.command_non_parallel,
452
+ workflow_task_order=wftask.order,
453
+ workflow_task_id=wftask.task_id,
454
+ task_name=wftask.task.name,
466
455
  parameters=function_kwargs,
467
456
  task_type=task_type,
468
457
  task_files=task_files_init,
@@ -559,13 +548,10 @@ def run_v2_task_compound(
559
548
  history_unit_ids = [history_unit.id for history_unit in history_units]
560
549
 
561
550
  results, exceptions = runner.multisubmit(
562
- functools.partial(
563
- run_single_task,
564
- command=task.command_parallel,
565
- workflow_task_order=wftask.order,
566
- workflow_task_id=wftask.task_id,
567
- task_name=wftask.task.name,
568
- ),
551
+ base_command=task.command_parallel,
552
+ workflow_task_order=wftask.order,
553
+ workflow_task_id=wftask.task_id,
554
+ task_name=wftask.task.name,
569
555
  list_parameters=list_function_kwargs,
570
556
  task_type=task_type,
571
557
  list_task_files=list_task_files,
@@ -2,24 +2,15 @@ import json
2
2
  import sys
3
3
  from typing import Union
4
4
 
5
- import cloudpickle
6
-
7
5
  import fractal_server
8
6
 
9
7
 
10
8
  def get_versions() -> dict[str, Union[list[int], str]]:
11
9
  """
12
- Extract versions of Python, fractal-server and cloudpickle.
13
-
14
- This information is useful to check compatibility of two Python
15
- interpreters when running tasks: the current interpreter (which prepares
16
- the input pickles and orchestrates workflow execution) and a remote
17
- interpreter (e.g. the one defined in the `FRACTAL_SLURM_WORKER_PYTHON`
18
- configuration variable) that executes the tasks.
10
+ Extract versions of Python and fractal-server.
19
11
  """
20
12
  versions = dict(
21
- python=list(sys.version_info[:3]),
22
- cloudpickle=cloudpickle.__version__,
13
+ python=tuple(sys.version_info[:3]),
23
14
  fractal_server=fractal_server.__VERSION__,
24
15
  )
25
16
  return versions
fractal_server/config.py CHANGED
@@ -460,15 +460,6 @@ class Settings(BaseSettings):
460
460
  running a task that produces multiple SLURM jobs.
461
461
  """
462
462
 
463
- FRACTAL_SLURM_ERROR_HANDLING_INTERVAL: int = 5
464
- """
465
- Interval to wait (in seconds) when the SLURM backend does not find an
466
- output pickle file - which could be due to several reasons (e.g. the SLURM
467
- job was cancelled or failed, or writing the file is taking long). If the
468
- file is still missing after this time interval, this leads to a
469
- `JobExecutionError`.
470
- """
471
-
472
463
  FRACTAL_PIP_CACHE_DIR: Optional[AbsolutePathStr] = None
473
464
  """
474
465
  Absolute path to the cache directory for `pip`; if unset,
@@ -56,6 +56,7 @@ def _acquire_lock_with_timeout(
56
56
  """
57
57
  logger = get_logger(logger_name)
58
58
  logger.info(f"Trying to acquire lock for '{label}', with {timeout=}")
59
+ t_start_lock_acquire = time.perf_counter()
59
60
  result = lock.acquire(timeout=timeout)
60
61
  try:
61
62
  if not result:
@@ -64,7 +65,9 @@ def _acquire_lock_with_timeout(
64
65
  f"Failed to acquire lock for '{label}' within "
65
66
  f"{timeout} seconds"
66
67
  )
67
- logger.info(f"Lock for '{label}' was acquired.")
68
+ t_end_lock_acquire = time.perf_counter()
69
+ elapsed = t_end_lock_acquire - t_start_lock_acquire
70
+ logger.info(f"Lock for '{label}' was acquired - {elapsed=:.4f} s")
68
71
  yield result
69
72
  finally:
70
73
  if result:
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.3
2
2
  Name: fractal-server
3
- Version: 2.14.4a0
3
+ Version: 2.14.5
4
4
  Summary: Backend component of the Fractal analytics platform
5
5
  License: BSD-3-Clause
6
6
  Author: Tommaso Comparin
@@ -12,7 +12,6 @@ Classifier: Programming Language :: Python :: 3.10
12
12
  Classifier: Programming Language :: Python :: 3.11
13
13
  Classifier: Programming Language :: Python :: 3.12
14
14
  Requires-Dist: alembic (>=1.13.1,<2.0.0)
15
- Requires-Dist: cloudpickle (>=3.1.0,<3.2.0)
16
15
  Requires-Dist: cryptography (>=44.0.1,<44.1.0)
17
16
  Requires-Dist: fabric (>=3.2.2,<3.3.0)
18
17
  Requires-Dist: fastapi (>=0.115.0,<0.116.0)
@@ -65,8 +64,3 @@ Fractal was conceived in the Liberali Lab at the Friedrich Miescher Institute fo
65
64
 
66
65
  Unless otherwise specified, Fractal components are released under the BSD 3-Clause License, and copyright is with the BioVisionCenter at the University of Zurich.
67
66
 
68
- The SLURM compatibility layer is based on
69
- [`clusterfutures`](https://github.com/sampsyo/clusterfutures), by
70
- [@sampsyo](https://github.com/sampsyo) and collaborators, and it is released
71
- under the terms of the MIT license.
72
-