earthkit-workflows 0.5.0__py3-none-any.whl → 0.6.0__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (44) hide show
  1. cascade/benchmarks/anemoi.py +1 -1
  2. cascade/benchmarks/dask.py +4 -4
  3. cascade/benchmarks/dist.py +3 -3
  4. cascade/benchmarks/job1.py +4 -5
  5. cascade/benchmarks/matmul.py +4 -4
  6. cascade/benchmarks/tests.py +3 -3
  7. cascade/benchmarks/util.py +22 -19
  8. cascade/controller/act.py +7 -0
  9. cascade/controller/core.py +31 -4
  10. cascade/controller/impl.py +5 -4
  11. cascade/controller/notify.py +4 -1
  12. cascade/executor/bridge.py +17 -4
  13. cascade/executor/checkpoints.py +42 -0
  14. cascade/executor/data_server.py +38 -5
  15. cascade/executor/executor.py +3 -1
  16. cascade/executor/msg.py +21 -2
  17. cascade/executor/platform.py +1 -1
  18. cascade/executor/runner/entrypoint.py +2 -2
  19. cascade/executor/runner/memory.py +1 -1
  20. cascade/gateway/api.py +2 -7
  21. cascade/gateway/client.py +1 -1
  22. cascade/gateway/router.py +9 -170
  23. cascade/gateway/server.py +5 -4
  24. cascade/gateway/spawning.py +163 -0
  25. cascade/low/builders.py +2 -2
  26. cascade/low/core.py +30 -1
  27. cascade/low/dask.py +1 -1
  28. cascade/low/execution_context.py +15 -5
  29. cascade/low/func.py +1 -1
  30. cascade/low/into.py +9 -3
  31. cascade/scheduler/assign.py +11 -11
  32. cascade/shm/api.py +4 -4
  33. cascade/shm/client.py +1 -0
  34. cascade/shm/disk.py +2 -2
  35. earthkit/workflows/_version.py +1 -1
  36. earthkit/workflows/backends/__init__.py +0 -1
  37. earthkit/workflows/backends/earthkit.py +1 -1
  38. earthkit/workflows/fluent.py +14 -11
  39. earthkit_workflows-0.6.0.dist-info/METADATA +132 -0
  40. {earthkit_workflows-0.5.0.dist-info → earthkit_workflows-0.6.0.dist-info}/RECORD +43 -41
  41. {earthkit_workflows-0.5.0.dist-info → earthkit_workflows-0.6.0.dist-info}/WHEEL +1 -1
  42. earthkit_workflows-0.5.0.dist-info/METADATA +0 -44
  43. {earthkit_workflows-0.5.0.dist-info → earthkit_workflows-0.6.0.dist-info}/licenses/LICENSE +0 -0
  44. {earthkit_workflows-0.5.0.dist-info → earthkit_workflows-0.6.0.dist-info}/top_level.txt +0 -0
cascade/low/func.py CHANGED
@@ -170,7 +170,7 @@ def pydantic_recursive_collect(
170
170
 
171
171
  # NOTE a bit ugly, instead of attr it would be better to accept a signature/protocol type
172
172
 
173
- results: list[str] = []
173
+ results: list[tuple[str, Any]] = []
174
174
  if hasattr(base, attr):
175
175
  results.extend((prefix, e) for e in getattr(base, attr)())
176
176
  generator: Iterable[tuple[Any, Any]]
cascade/low/into.py CHANGED
@@ -31,7 +31,14 @@ def node2task(name: str, node: dict) -> tuple[TaskInstance, list[Task2TaskEdge]]
31
31
  elif isinstance(node["payload"], tuple):
32
32
  payload_tuple = node["payload"]
33
33
 
34
- func = cast(Callable, payload_tuple[0])
34
+ func_def: dict[str, Any] = (
35
+ {"entrypoint": payload_tuple[0], "func": None}
36
+ if isinstance(payload_tuple[0], str)
37
+ else {
38
+ "func": TaskDefinition.func_enc(cast(Callable, payload_tuple[0])),
39
+ "entrypoint": "",
40
+ }
41
+ )
35
42
  args = cast(list[Any], payload_tuple[1])
36
43
  kwargs = cast(dict[str, Any], payload_tuple[2])
37
44
  metadata: dict[str, Any] = {}
@@ -72,9 +79,8 @@ def node2task(name: str, node: dict) -> tuple[TaskInstance, list[Task2TaskEdge]]
72
79
  outputs = node["outputs"] if node["outputs"] else [Node.DEFAULT_OUTPUT]
73
80
 
74
81
  definition = TaskDefinition(
75
- func=TaskDefinition.func_enc(func),
82
+ **func_def,
76
83
  environment=cast(list[str], metadata.get("environment", [])),
77
- entrypoint="",
78
84
  input_schema=input_schema,
79
85
  output_schema=[(e, "Any") for e in outputs],
80
86
  needs_gpu=cast(bool, metadata.get("needs_gpu", False)),
@@ -61,7 +61,7 @@ def build_assignment(
61
61
  for host, status in context.ds2host[dataset].items()
62
62
  if status in eligible_transmit
63
63
  ):
64
- prep.append((dataset, candidate))
64
+ prep.append((dataset, candidate)) # ty: ignore[unresolved-reference] # candidate walrus
65
65
  context.dataset_preparing(dataset, worker)
66
66
  else:
67
67
  # if we are dealing with the first task to assign, we don't expect to be here!
@@ -83,14 +83,14 @@ def build_assignment(
83
83
  raise ValueError(f"double assignment to {head} in fusing opportunities!")
84
84
  core.fusing_opportunities[head] = tasks
85
85
 
86
- # trim for only the necessary ones -- that is, having any edge outside of this current assignment
86
+ # trim for only the necessary ones: 1/ having any edge outside of this assignment 2/ global output 3/ persistable
87
87
  all_outputs = {ds for task in assigned for ds in context.task_o[task]}
88
88
  assigned_tasks = set(assigned)
89
89
  trimmed_outputs = {
90
90
  ds
91
91
  for ds in all_outputs
92
92
  if (context.edge_o[ds] - assigned_tasks)
93
- or (ds in context.job_instance.ext_outputs)
93
+ or context.publication_mandatory(ds)
94
94
  }
95
95
 
96
96
  return Assignment(
@@ -98,7 +98,7 @@ def build_assignment(
98
98
  tasks=assigned,
99
99
  prep=prep,
100
100
  outputs=trimmed_outputs,
101
- extra_env={},
101
+ extra_env=[],
102
102
  )
103
103
 
104
104
 
@@ -130,7 +130,7 @@ gang_port = 12355
130
130
 
131
131
  def _try_assign_gang(
132
132
  schedule: Schedule,
133
- gang: list[frozenset[TaskId]],
133
+ gang: frozenset[TaskId],
134
134
  workers: list[WorkerId],
135
135
  component_id: ComponentId,
136
136
  context: JobExecutionContext,
@@ -188,9 +188,9 @@ def _try_assign_gang(
188
188
  coordinator = (
189
189
  f"{context.environment.host_url_base[worker.host]}:{gang_port}"
190
190
  )
191
- assignment.extra_env["CASCADE_GANG_WORLD_SIZE"] = str(world_size)
192
- assignment.extra_env["CASCADE_GANG_RANK"] = str(rank)
193
- assignment.extra_env["CASCADE_GANG_COORDINATOR"] = coordinator
191
+ assignment.extra_env.append(("CASCADE_GANG_WORLD_SIZE", str(world_size)))
192
+ assignment.extra_env.append(("CASCADE_GANG_RANK", str(rank)))
193
+ assignment.extra_env.append(("CASCADE_GANG_COORDINATOR", coordinator))
194
194
  rank += 1
195
195
  yield assignment
196
196
  start = perf_counter_ns()
@@ -221,9 +221,9 @@ def _try_assign_gang(
221
221
  coordinator = (
222
222
  f"{context.environment.host_url_base[worker.host]}:{gang_port}"
223
223
  )
224
- assignment.extra_env["CASCADE_GANG_WORLD_SIZE"] = str(world_size)
225
- assignment.extra_env["CASCADE_GANG_RANK"] = str(rank)
226
- assignment.extra_env["CASCADE_GANG_COORDINATOR"] = coordinator
224
+ assignment.extra_env.append(("CASCADE_GANG_WORLD_SIZE", str(world_size)))
225
+ assignment.extra_env.append(("CASCADE_GANG_RANK", str(rank)))
226
+ assignment.extra_env.append(("CASCADE_GANG_COORDINATOR", coordinator))
227
227
  rank += 1
228
228
  yield assignment
229
229
  start = perf_counter_ns()
cascade/shm/api.py CHANGED
@@ -11,7 +11,7 @@ import os
11
11
  import socket
12
12
  from dataclasses import dataclass
13
13
  from enum import Enum, auto
14
- from typing import Protocol, Type, runtime_checkable
14
+ from typing import Protocol, Type, cast, runtime_checkable
15
15
 
16
16
  from typing_extensions import Self
17
17
 
@@ -28,7 +28,7 @@ def ser_str(s: str) -> bytes:
28
28
 
29
29
  def deser_str(b: memoryview) -> tuple[str, memoryview]:
30
30
  l = int.from_bytes(b[:4], "big")
31
- return str(b[4 : 4 + l], "ascii"), b[4 + l :]
31
+ return str(cast(bytes, b[4 : 4 + l]), "ascii"), b[4 + l :]
32
32
 
33
33
 
34
34
  @runtime_checkable
@@ -244,8 +244,8 @@ def ser(comm: Comm) -> bytes:
244
244
 
245
245
 
246
246
  def deser(data: bytes) -> Comm:
247
- data = memoryview(data)
248
- return b2c[data[:1]].deser(data[1:])
247
+ mv_data = memoryview(data)
248
+ return b2c[cast(bytes, mv_data[:1])].deser(mv_data[1:])
249
249
 
250
250
 
251
251
  client_socket_envvar = "CASCADE_SHM_SOCKET"
cascade/shm/client.py CHANGED
@@ -63,6 +63,7 @@ class AllocatedBuffer:
63
63
  def view(self) -> memoryview:
64
64
  if not self.shm:
65
65
  raise ValueError("shm already closed!")
66
+ assert self.shm.buf is not None
66
67
  mv = self.shm.buf[: self.l]
67
68
  if self.readonly:
68
69
  mv = mv.toreadonly()
cascade/shm/disk.py CHANGED
@@ -27,7 +27,7 @@ class Disk:
27
27
  def _page_in(self, shmid: str, size: int, callback: Callable[[bool], None]) -> None:
28
28
  try:
29
29
  chunk_size = 4096
30
- shm = SharedMemory(shmid, create=True, size=size)
30
+ shm = SharedMemory(shmid, create=True, size=size); assert shm.buf is not None
31
31
  with open(f"{self.root.name}/{shmid}", "rb") as f:
32
32
  i = 0
33
33
  while True:
@@ -51,7 +51,7 @@ class Disk:
51
51
 
52
52
  def _page_out(self, shmid: str, callback: Callable[[bool], None]) -> None:
53
53
  try:
54
- shm = SharedMemory(shmid, create=False)
54
+ shm = SharedMemory(shmid, create=False); assert shm.buf is not None
55
55
  with open(f"{self.root.name}/{shmid}", "wb") as f:
56
56
  f.write(shm.buf[:])
57
57
  shm.unlink()
@@ -1,2 +1,2 @@
1
1
  # Do not change! Do not track in version control!
2
- __version__ = "0.5.0"
2
+ __version__ = "0.6.0"
@@ -211,7 +211,6 @@ class Backend:
211
211
 
212
212
  try:
213
213
  from earthkit.data import FieldList, SimpleFieldList
214
-
215
214
  from earthkit.workflows.backends.earthkit import FieldListBackend
216
215
 
217
216
  BACKENDS[SimpleFieldList] = FieldListBackend
@@ -9,9 +9,9 @@
9
9
  from typing import TypeAlias
10
10
 
11
11
  import array_api_compat
12
+
12
13
  from earthkit.data import FieldList
13
14
  from earthkit.data.core.metadata import Metadata as ekdMetadata
14
-
15
15
  from earthkit.workflows.backends import num_args
16
16
 
17
17
 
@@ -25,9 +25,10 @@ import numpy as np
25
25
  import xarray as xr
26
26
 
27
27
  from . import backends
28
- from .graph import Graph
28
+ from .graph import Graph, Output
29
29
  from .graph import Node as BaseNode
30
- from .graph import Output
30
+
31
+ PayloadFunc = Callable | str
31
32
 
32
33
 
33
34
  class Payload:
@@ -35,7 +36,7 @@ class Payload:
35
36
 
36
37
  def __init__(
37
38
  self,
38
- func: Callable,
39
+ func: PayloadFunc,
39
40
  args: Iterable | None = None,
40
41
  kwargs: dict | None = None,
41
42
  metadata: dict[str, Any] | None = None,
@@ -68,6 +69,8 @@ class Payload:
68
69
  str, name of function, or if a partial function, the function name and partial
69
70
  arguments
70
71
  """
72
+ if isinstance(self.func, str):
73
+ return self.func
71
74
  if hasattr(self.func, "__name__"):
72
75
  return self.func.__name__
73
76
  return ""
@@ -122,7 +125,7 @@ def capture_payload_metadata(func: Callable[P, R]) -> Callable[P, R]:
122
125
  class Node(BaseNode):
123
126
  def __init__(
124
127
  self,
125
- payload: Callable | Payload,
128
+ payload: PayloadFunc | Payload,
126
129
  inputs: Input | Sequence[Input] = [],
127
130
  num_outputs: int = 1,
128
131
  name: str | None = None,
@@ -393,7 +396,7 @@ class Action:
393
396
 
394
397
  def map(
395
398
  self,
396
- payload: Callable | Payload | np.ndarray[Any, Any],
399
+ payload: PayloadFunc | Payload | np.ndarray[Any, Any],
397
400
  yields: Coord | None = None,
398
401
  ) -> "Action":
399
402
  """Apply specified payload on all nodes. If argument is an array of payloads,
@@ -417,7 +420,7 @@ class Action:
417
420
  array of nodes
418
421
  """
419
422
  # NOTE this method is really not mypy friendly, just ignore everything
420
- if not isinstance(payload, Callable | Payload): # type: ignore
423
+ if not isinstance(payload, PayloadFunc | Payload): # type: ignore
421
424
  payload = np.asarray(payload)
422
425
  assert payload.shape == self.nodes.shape, (
423
426
  f"For unique payloads for each node, payload shape {payload.shape}"
@@ -430,7 +433,7 @@ class Action:
430
433
  it = np.nditer(self.nodes, flags=["multi_index", "refs_ok"])
431
434
  node_payload = payload
432
435
  for node in it:
433
- if not isinstance(payload, Callable | Payload): # type: ignore
436
+ if not isinstance(payload, PayloadFunc | Payload): # type: ignore
434
437
  node_payload = payload[it.multi_index] # type: ignore
435
438
  new_nodes[it.multi_index] = Node(
436
439
  node_payload, # type: ignore
@@ -449,7 +452,7 @@ class Action:
449
452
 
450
453
  def reduce(
451
454
  self,
452
- payload: Callable | Payload,
455
+ payload: PayloadFunc | Payload,
453
456
  yields: Coord | None = None,
454
457
  dim: str = "",
455
458
  batch_size: int = 0,
@@ -491,7 +494,7 @@ class Action:
491
494
  if batch_size > 1 and batch_size < batched.nodes.sizes[dim]:
492
495
  if not getattr(payload.func, "batchable", False):
493
496
  raise ValueError(
494
- f"Function {payload.func.__name__} is not batchable, but batch_size {batch_size} is specified"
497
+ f"Function {payload.func.name()} is not batchable, but batch_size {batch_size} is specified"
495
498
  )
496
499
 
497
500
  while batch_size < batched.nodes.sizes[dim]:
@@ -839,7 +842,7 @@ class RegisteredAction:
839
842
 
840
843
 
841
844
  def _batch_transform(
842
- action: Action, selection: dict, payload: Callable | Payload
845
+ action: Action, selection: dict, payload: PayloadFunc | Payload
843
846
  ) -> Action:
844
847
  selected = action.select(selection, drop=True)
845
848
  dim = list(selection.keys())[0]
@@ -886,7 +889,7 @@ def _combine_nodes(
886
889
 
887
890
 
888
891
  def from_source(
889
- payloads_list: np.ndarray[Any, Any], # values are Callables
892
+ payloads_list: np.ndarray[Any, Any], # values are PayloadFunc
890
893
  yields: Coord | None = None,
891
894
  dims: list | None = None,
892
895
  coords: dict | None = None,
@@ -0,0 +1,132 @@
1
+ Metadata-Version: 2.4
2
+ Name: earthkit-workflows
3
+ Version: 0.6.0
4
+ Summary: Earthkit Workflows is a Python library for declaring earthkit task DAGs, as well as scheduling and executing them on heterogeneous computing systems.
5
+ Author-email: "European Centre for Medium-Range Weather Forecasts (ECMWF)" <software.support@ecmwf.int>
6
+ License-Expression: Apache-2.0
7
+ Requires-Python: >=3.10
8
+ Description-Content-Type: text/markdown
9
+ License-File: LICENSE
10
+ Requires-Dist: earthkit-data
11
+ Requires-Dist: cloudpickle
12
+ Requires-Dist: numpy
13
+ Requires-Dist: xarray
14
+ Requires-Dist: networkx
15
+ Requires-Dist: array-api-compat
16
+ Requires-Dist: sortedcontainers
17
+ Requires-Dist: pyvis
18
+ Requires-Dist: dill
19
+ Requires-Dist: pyrsistent
20
+ Requires-Dist: pydantic
21
+ Requires-Dist: pyzmq
22
+ Requires-Dist: fire
23
+ Requires-Dist: orjson
24
+ Dynamic: license-file
25
+
26
+ <p align="center">
27
+ <picture>
28
+ <source srcset="https://github.com/ecmwf/logos/raw/refs/heads/main/logos/earthkit/earthkit-workflows-dark.svg" media="(prefers-color-scheme: dark)">
29
+ <img src="https://github.com/ecmwf/logos/raw/refs/heads/main/logos/earthkit/earthkit-workflows-light.svg" height="120">
30
+ </picture>
31
+ </p>
32
+
33
+ <p align="center">
34
+ <a href="https://github.com/ecmwf/codex/raw/refs/heads/main/ESEE">
35
+ <img src="https://github.com/ecmwf/codex/raw/refs/heads/main/ESEE/production_chain_badge.svg" alt="ECMWF Software EnginE">
36
+ </a>
37
+ <a href="https://github.com/ecmwf/codex/raw/refs/heads/main/Project Maturity">
38
+ <img src="https://github.com/ecmwf/codex/raw/refs/heads/main/Project Maturity/emerging_badge.svg" alt="Maturity Level">
39
+ </a>
40
+ <a href="https://opensource.org/licenses/apache-2-0">
41
+ <img src="https://img.shields.io/badge/Licence-Apache 2.0-blue.svg" alt="Licence">
42
+ </a>
43
+ <a href="https://github.com/ecmwf/earthkit-workflows/tags">
44
+ <img src="https://img.shields.io/github/v/tag/ecmwf/earthkit-workflows?color=purple&label=Release" alt="Latest Release">
45
+ </a>
46
+ </p>
47
+
48
+ <p align="center">
49
+ <a href="#installation">Installation</a>
50
+
51
+ <a href="#quick-start">Quick Start</a>
52
+
53
+ <a href="#documentation">Documentation</a>
54
+ </p>
55
+
56
+ > \[!IMPORTANT\]
57
+ > This software is **Emerging** and subject to ECMWF's guidelines on [Software Maturity](https://github.com/ecmwf/codex/raw/refs/heads/main/Project%20Maturity).
58
+
59
+ **earthkit-workflows** is a Python library for declaring earthkit task as DAGs.
60
+ It contains an internal `cascade` engine for scheduling and executing task graphs almost optimally across heterogeneous platforms with complex network technologies and topologies.
61
+ It effectively performs task-based parallelism across CPUs, GPUs, distributed systems (HPC), and any combination thereof.
62
+ It is designed for a no-IO approach, where expensive storage of intermediate data is minimised whilst maximising all available transport technologies between different hardware.
63
+
64
+ Cascade is designed to work on well-profiled task graphs, where:
65
+ * the task graph is a static DAG,
66
+ * the DAG nodes are defined by tasks with well-known execution times,
67
+ * the DAG edges are defined by data dependencies with well-known data sizes,
68
+ * the characteristics of the hardware (processors, network connections) are known.
69
+
70
+ earthkit-workflows allows for declaring such task graphs using a neat fluent API, and interoperates pleasantly with the rest of the [earthkit](https://github.com/ecmwf/earthkit) ecosystem.
71
+
72
+ ## Installation
73
+
74
+ Install via `pip` with:
75
+
76
+ ```
77
+ $ pip install 'earthkit-workflows[all]'
78
+ ```
79
+
80
+ For development, you can use `pip install -e .` though there is currently an issue with earthkit masking. Additionally you may want to install pre-commit hooks via
81
+ ```
82
+ $ pip install pre-commit
83
+ $ pre-commit install
84
+ ```
85
+
86
+ ## Quick Start
87
+
88
+ *Note*: this section is moderately outdated.
89
+
90
+ We support two regimes for cascade executions -- local mode (ideal for developing and debugging small graphs) and distributed mode (assumed for slurm & HPC).
91
+
92
+ To launch in local mode, in your python repl / jupyno:
93
+ ```
94
+ import cascade.benchmarks.job1 as j1
95
+ import cascade.benchmarks.distributed as di
96
+ import cloudpickle
97
+
98
+ spec = di.ZmqClusterSpec.local(j1.get_prob())
99
+ print(spec.controller.outputs)
100
+ # prints out:
101
+ # {DatasetId(task='mean:dc9d90 ...
102
+ # defaults to all "sinks", but can be overridden
103
+
104
+ rv = di.launch_from_specs(spec, None)
105
+
106
+ for key, value in rv.outputs.items():
107
+ deser = cloudpickle.loads(value)
108
+ print(f"output {key} is of type {type(deser)}")
109
+ ```
110
+
111
+ For distributed mode, launch
112
+ ```
113
+ ./scripts/launch_slurm.sh ./localConfigs/<your_config.sh>
114
+ ```
115
+ Inside the `<your_config.sh>`, you define size of the cluster, logging directory output, which job to run... Pay special attention to definitions of your `venv` and `LD_LIBRARY_PATH` etc -- this is not autotamed.
116
+
117
+ Both of these examples hardcode particular job, `"job1"`, which is a benchmarking thing.
118
+ Most likely, you want to define your own -- for the local mode, just pass `cascade.Graph` instance to the call; in the dist mode, you need to provide that instance in the `cascade.benchmarks.__main__` modules instead (ideally by extending the `get_job` function).
119
+
120
+ There is also `python -m cascade.benchmarks local <..>` -- you may use that as an alternative path to local mode, for your own e2e tests.
121
+
122
+ ## Documentation
123
+
124
+ Not yet available.
125
+
126
+ ## Contributions and Support
127
+ Due to the maturity and status of the project, there is no support provided -- unless the usage of this project happens within some higher-status initiative that ECMWF participates at.
128
+ External contributions and created issues will be looked at, but are not guaranteed to be accepted or responded to.
129
+ In general, follow ECMWF's guidelines for [external contributions](https://github.com/ecmwf/codex/tree/main/External%20Contributions).
130
+
131
+ ## License
132
+ See [license](./LICENSE).
@@ -2,75 +2,77 @@ cascade/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
2
2
  cascade/py.typed,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
3
3
  cascade/benchmarks/__init__.py,sha256=Gu8kEApmJ2zsIhT2zpm1-6n84-OwWnz-0vO8UHYtBzo,528
4
4
  cascade/benchmarks/__main__.py,sha256=z3Ib0NlIgMrn2zjrJhqqnJkjCIb4xKDSpO5vF9j-Onc,966
5
- cascade/benchmarks/anemoi.py,sha256=qtAI03HdtAmcksCgjIEZyNyUNzMp370KF4lAh5g4cOk,1077
6
- cascade/benchmarks/dask.py,sha256=U0B0jpLIeIs4Zl0SX_opMypXQXOIS5ER6mGdtPCgqkQ,953
7
- cascade/benchmarks/dist.py,sha256=ngXJJzegnMUVwDFPvGMG6997lamB-aSEHi74oBbayrE,4116
5
+ cascade/benchmarks/anemoi.py,sha256=-AFw7MUG0VpKh18Nh_qGPGxDCfPd3wukSA1BuNm9dtg,1110
6
+ cascade/benchmarks/dask.py,sha256=T88t0YDHUDGN7B5L0ZdbgQnsKEYQgfUr8mLeRH1Hr-w,973
7
+ cascade/benchmarks/dist.py,sha256=FIrH6vSfUbHGZXFicrjGsOf2o05Z2VD1w5OoZzVr_hw,4215
8
8
  cascade/benchmarks/generators.py,sha256=NK4fFisWsZdMkA2Auzrn-P7G5D9AKpo2JVnqXE44YT8,2169
9
- cascade/benchmarks/job1.py,sha256=MOcZZYgf36MzHCjtby0lQyenM1ODUlagG8wtt2CbpnI,4640
10
- cascade/benchmarks/matmul.py,sha256=5STuvPY6Q37E2pKRCde9dQjL5M6tx7tkES9cBLZ6eK4,1972
9
+ cascade/benchmarks/job1.py,sha256=vj9DCCx8Wz7IR1iloCDYmCcsq8R60wdpXnwUrBHDPY4,4693
10
+ cascade/benchmarks/matmul.py,sha256=5XzZ5X-4bvhiGEQ6tK4Rbh9fnavJSFpfHFzoN-qdSSg,2099
11
11
  cascade/benchmarks/plotting.py,sha256=vSz9HHbqZwMXHpBUS-In6xsXGgK7QIoQTTiYfSwYwZs,4428
12
12
  cascade/benchmarks/reporting.py,sha256=MejaM-eekbMYLAnuBxGv_t4dR1ODJs4Rpc0fiZSGjyw,5410
13
- cascade/benchmarks/tests.py,sha256=eeQE0YR4FKi5k9BMJaTcXjKF5eIu3xXJsHc099P0Jio,5537
14
- cascade/benchmarks/util.py,sha256=wP7lDI6v9ATIF96uagVB-23EiagCTVYJhUUy-_CfqQ8,9892
13
+ cascade/benchmarks/tests.py,sha256=Ggrr1iYG_DJQjUslMCr45ZKbSmTYvy_W1WSBIZdZrKI,5615
14
+ cascade/benchmarks/util.py,sha256=c71xz2VCroGcNLj7EM1P4SsVrqBEimMRb8idFvEstf8,10178
15
15
  cascade/controller/__init__.py,sha256=p4C2p3S_0nUGamP9Mi6cSa5bvpiWbI6sVWtGhFnNqjw,1278
16
- cascade/controller/act.py,sha256=WHIsk4H-Bbyl_DABX2VWhyKy_cNnp12x1nilatPCL8I,2981
17
- cascade/controller/core.py,sha256=NqvZ5g5GNphwOpzdXbCI0_fxIzzmO97_n2xZKswK72Q,3589
18
- cascade/controller/impl.py,sha256=9jdTikYO8OkaNIfzatyr3Mhai5EfEhaeii9GaF9cQw4,3526
19
- cascade/controller/notify.py,sha256=5eSPKcxqrv9kHy7St-iIm1NttsyzcvwLhZI5dvr4cEY,5881
16
+ cascade/controller/act.py,sha256=b9GYnv5Qod7Avngy-j95WGXr5GIOsuPGhEewB6V9HbQ,3405
17
+ cascade/controller/core.py,sha256=iD16fvQEnb5v-hZK_-RIWjvr7m31l__w6L3feDRCwhE,4761
18
+ cascade/controller/impl.py,sha256=OvY0l3llTCC-TbVs4nui5AJEFrI717QvGXm9Nl7oPl0,3668
19
+ cascade/controller/notify.py,sha256=mikxMOmIwtXkj_CPY7pULlZ83k2t27DtdXzsqvlOWGw,6055
20
20
  cascade/controller/report.py,sha256=rKGYmq4nIiDqKuP_C7YSwpEAUOPdjILlDcbKkdUt30s,3772
21
- cascade/executor/bridge.py,sha256=WDE-GM2Bv7nUk1-nV-otMGuaRYw1-Vmd7PWploXBp6Y,8267
21
+ cascade/executor/bridge.py,sha256=2D_gKtn8nkGH4iS95VO-gKLFW1fumABBJcAiVSRE5pg,8839
22
+ cascade/executor/checkpoints.py,sha256=FIMpbdC5I58hiUlJ44NQMrFS4A-zF1jUqxwyN_xEOFs,1760
22
23
  cascade/executor/comms.py,sha256=-9qrKwva6WXkHRQtzSnLFy5gB3bOWuxYJP5fL6Uavw8,8736
23
24
  cascade/executor/config.py,sha256=8azy_sXdvDGO0zTNqA0pdtkXsyihM4FQ4U1W_3Dhua0,1571
24
- cascade/executor/data_server.py,sha256=TSFJdSR9PtKSvvLTosHt0ITQlqtGGAl5N_io6wtvL0A,13569
25
- cascade/executor/executor.py,sha256=OwLrhSLm4bIHsWdnjXlnQntxGIOHgrIPSSVZ5nbNWvQ,13686
26
- cascade/executor/msg.py,sha256=7HI0rKeCRaV1ONR4HWEa64nHbu-p6-QdBwJNitmst48,4340
27
- cascade/executor/platform.py,sha256=mRUauodvRle9rAbtFr5n9toKzIgt_pecNlhOjon4dvY,2348
25
+ cascade/executor/data_server.py,sha256=oPfPv1a0AplAyCeO2_mDD0xKGwnZFy0NxrlPh1V63cU,15184
26
+ cascade/executor/executor.py,sha256=RmLfgH2AmLU0jS0f6wRKnvdlFrpMDEs0cIive3Uxedw,13784
27
+ cascade/executor/msg.py,sha256=Nz6coPQwNZhWBhogWjbV-Xd3YGBI761r8wljpyEAMLg,4825
28
+ cascade/executor/platform.py,sha256=ALFXCZ6TiKLBuopHCRgF3x15ZLD4qYkes3yG7-mk7Ks,2372
28
29
  cascade/executor/serde.py,sha256=z6klTOZqW_BVGrbIRNz4FN0_XTfRiKBRQuvgsQIuyAo,2827
29
30
  cascade/executor/runner/__init__.py,sha256=30BM80ZyA7w3IrGiKKLSFuhRehbR2Mm99OJ8q5PJ63c,1547
30
- cascade/executor/runner/entrypoint.py,sha256=WyxOFGAYDQD_fXsM4H9_6xBrnAmQrCTUnljfcW6-BoM,7918
31
- cascade/executor/runner/memory.py,sha256=VEOrYfFNGNBM7vMY05wjbX3L0U-RJZWpm_Ud4bMUR5g,6486
31
+ cascade/executor/runner/entrypoint.py,sha256=-sGjfYRMhGQLY6QkmIf3v4BkvA2chSk_-mCJMMd7Ocw,7906
32
+ cascade/executor/runner/memory.py,sha256=yI9j4drpXy7_tg8cuNCYDYvLLhLDZX39-tRroU5NkyQ,6519
32
33
  cascade/executor/runner/packages.py,sha256=lic5ItjyDpcQVRBFOZssvnco9bmxWpq_JRFDeShVR8k,4150
33
34
  cascade/executor/runner/runner.py,sha256=zqpkvxdWLbwyUFaUbZmSj0KQEBNRpmF8gwVotiaamhc,4870
34
35
  cascade/gateway/__init__.py,sha256=1EzMKdLFXEucj0YWOlyVqLx4suOntitwM03T_rRubIk,829
35
36
  cascade/gateway/__main__.py,sha256=kmfklSeA7v5ie75SBHOql-eHuY6x4eTHlItMYqCQ1Pg,969
36
- cascade/gateway/api.py,sha256=vPYfiuEjBeddFnCPZpr4_9ovuhGdZ3_migzKTUtvF98,3050
37
- cascade/gateway/client.py,sha256=1p4Tvrf-BH0LQHOES5rY1z3JNIfmXcqWG2kYl4rpcE0,4061
38
- cascade/gateway/router.py,sha256=9oTkqssb3dHF24TIaAn_7oQoNfm4qkOvriufbOJxnyE,11582
39
- cascade/gateway/server.py,sha256=BfUKpU2nCEB_zI4BdZU_9zHYHX1WoQaLARCTxMSP0Nk,3971
37
+ cascade/gateway/api.py,sha256=3OV8hlgfRQA6Yru5T8S9mHENKQEEFFJfRGj6REDe9DI,2821
38
+ cascade/gateway/client.py,sha256=M_v5tT6tEp2dni6fviBsrEF8_SInI-EtntX3BjgjLIY,4067
39
+ cascade/gateway/router.py,sha256=_0qWehccKRJlnVwV-yQKOi5YeL7dgmGyATiG6Qdv5zo,5861
40
+ cascade/gateway/server.py,sha256=oyEuZIViIVvCcWoQ_xo9FRXe2e4aSZGoLs8c9jING-s,4076
41
+ cascade/gateway/spawning.py,sha256=gpOoa99th5VC_MFYh4NFFW7loVJHJMeFYNSua1Nl0mE,5447
40
42
  cascade/low/__init__.py,sha256=5cw2taOGITK_gFbICftzK2YLdEAnLUY5OzblFzdHss4,769
41
- cascade/low/builders.py,sha256=7TG3lPjO_iB7iqHkRiG-iWqyg6cZQL01cjH47wIP1qM,8408
42
- cascade/low/core.py,sha256=_3x4ka_pmCgZbfwFeyhq8S4M6wmh0s24VRCLhk5yQFM,6444
43
- cascade/low/dask.py,sha256=xToT_vyfkgUUxSFN7dS7qLttxzuBbBZfDylPzGg7sPg,3319
44
- cascade/low/execution_context.py,sha256=cdDJLYhreo4T7t4qXgFBosncubZpTrm0hELo7q4miqo,6640
45
- cascade/low/func.py,sha256=ihL5n3cK-IJnATgP4Dub2m-Mp_jHMxJzCA1v4uMEsi8,5211
46
- cascade/low/into.py,sha256=lDOpO4gX-154BgLJWonVQZiGRbUqv-GhYy8qWBqJ1QQ,3402
43
+ cascade/low/builders.py,sha256=ifqIrITlXKmBIvGpfa0pa8yeFSOi2sx9frDuOgpgMc0,8465
44
+ cascade/low/core.py,sha256=qY_tGsuf_sTj4oqVbkldnFyW4pGUrareW72JOr77XVA,7489
45
+ cascade/low/dask.py,sha256=eY4nYLRgffeiZKaQHJWdZzXLYcdk-2MszXacTD7j4ys,3314
46
+ cascade/low/execution_context.py,sha256=ook7-8v3GP0EhseChVWeqeQPjsoLreRK8qu77M30ST4,7098
47
+ cascade/low/func.py,sha256=vm-YU7xKKe8MpCL4LIhJ1m6rDuuIxbzrr1MuZN1t_gA,5223
48
+ cascade/low/into.py,sha256=UKdYry9x4-FCP_wdhNX4CU2CnyUJbrYevXj4rzBmgQw,3584
47
49
  cascade/low/tracing.py,sha256=qvGVKB1huwcYoyvMYN-2wQ92pLQTErocTjpIjWv9glA,4511
48
50
  cascade/low/views.py,sha256=UwafO2EQHre17GjG8hdzO8b6qBRtTRtDlhOc1pTf8Io,1822
49
51
  cascade/scheduler/__init__.py,sha256=VT2qQ0gOQWHC4-T0FcCs59w8WZ94j2nUn7tiGm5XepA,1148
50
52
  cascade/scheduler/api.py,sha256=UuomWS2ISuDw-ngFFUKLyucygpTWF0EBW8ZuF91EUBU,7778
51
- cascade/scheduler/assign.py,sha256=gpOLL22-k3ah4gihiztIGMX2uF0RdJ5AtJ8fOCJUviE,18362
53
+ cascade/scheduler/assign.py,sha256=vFE3d8WpymTAdT4QsIHe7fgN9YvkR5xRBThM9mM3aiY,18467
52
54
  cascade/scheduler/core.py,sha256=umORLC6SDeOyS4z8nQuVFkDukBJ96JfH4hdLSj6Km20,3378
53
55
  cascade/scheduler/precompute.py,sha256=AhTn8RgnU4XuV_WAgbVXz9z0YRpNS6LCY1dJeHdTfCc,8709
54
56
  cascade/shm/__init__.py,sha256=R9QgGSnsl_YDjFjAUQkoleM_5yGM37ce9S8a4ReA1mE,3854
55
57
  cascade/shm/algorithms.py,sha256=SGxnJF4ovUaywTunMJWkG77l5DN-jXx7HgABt3sRJXM,2356
56
- cascade/shm/api.py,sha256=TFK0ioKJpJ2-rTxwk_O5BtB6AKjgwfM8CIl-VZaUIZo,7180
57
- cascade/shm/client.py,sha256=7rUG0bra7XTJRumywQ-Gos4pWeZoXpTZqseh36uNWFg,6312
58
+ cascade/shm/api.py,sha256=WfbOnXaWhPvW8G85tOcD8tfIrvw8csYofFVWkJEEgvg,7221
59
+ cascade/shm/client.py,sha256=Kuhhtu7iepewFDWCu4rT6p0aGTrjbbKpx-3eT9OGf4w,6352
58
60
  cascade/shm/dataset.py,sha256=QAALiWK0fyMLet9XFXmATm-c9gTuF77cifGbjP3WjXo,13155
59
- cascade/shm/disk.py,sha256=Fdl_pKOseaXroRp01OwqWVsdI-sSmiFizIFCdxBuMWM,2653
61
+ cascade/shm/disk.py,sha256=dWWnOc1Sue4LqwgjUgZ7Ln7OMJo1ySB9J60Tt9znU40,2709
60
62
  cascade/shm/func.py,sha256=ZWikgnSLCmbSoW2LDRJwtjxdwTxkR00OUHAsIRQ-ChE,638
61
63
  cascade/shm/server.py,sha256=geWo2BuF8sa_BqY8akh6ardWFfKfDJktWujrdDHn624,5648
62
64
  earthkit/workflows/__init__.py,sha256=-p4anEn0YQbYWM2tbXb0Vc3wq4-m6kFhcNEgAVu5Jis,1948
63
- earthkit/workflows/_version.py,sha256=rhUN01Io6mJEPnWEuUb8vhmzNoYjbuT0JcvPlDVCEcE,72
65
+ earthkit/workflows/_version.py,sha256=e4A-4swOeKRurBJN2p8bRPY9Cpbvh1cTZi8iwsjxoxs,72
64
66
  earthkit/workflows/decorators.py,sha256=YK6AN-Ta9cAOX__DjZbn_vNYdpRL98N6dbF31E6Vu1c,1478
65
- earthkit/workflows/fluent.py,sha256=3CvZfdLjXCoGR0VJDTB8_PDFgR7n-UhGLdKo7E5zuvM,30161
67
+ earthkit/workflows/fluent.py,sha256=Y6dAiQmuOo_H6iV2OzjDzBbZObAa1JrKiEkYCdWxYQY,30262
66
68
  earthkit/workflows/mark.py,sha256=otgR6ar_9R7q5VRFD6RlLUROfjhyiaMIsgcleW2icKI,1322
67
69
  earthkit/workflows/taskgraph.py,sha256=RsT1Qlng1uPZSaSBNqE8vFsoI5J8DDcQl468YPX-kCY,4460
68
70
  earthkit/workflows/transformers.py,sha256=BsUUvnG-UyerT3XUYcHc1qJkSsLc0ZX3Zxqq70tJWLU,2105
69
71
  earthkit/workflows/utility.py,sha256=ygqn1s846WQbo7HGY46Z8N1AXrDFGwyygSgsv4YnGJ8,1344
70
72
  earthkit/workflows/visualise.py,sha256=WbqJWvn648B7Qo3VCKJyoJzU6Mgvv0p3UWZb0lf01m8,2290
71
- earthkit/workflows/backends/__init__.py,sha256=6ONg-EdNODiqeBZqyosI5iq1UfZfaOLqhAo8l8_wn9o,6519
73
+ earthkit/workflows/backends/__init__.py,sha256=4-An7MneAn7Gkk57MaJNfhat1480Vou33tFZu2P_sjA,6518
72
74
  earthkit/workflows/backends/arrayapi.py,sha256=QfUsTlYuFH3CroWdcf_XBcLnt2znMcS1HwNNEe8J0qU,2279
73
- earthkit/workflows/backends/earthkit.py,sha256=rZURJf6FLKcCjJkyWgOf6NqKjPZjSNX09dV_SicIlss,8958
75
+ earthkit/workflows/backends/earthkit.py,sha256=Rm9iItLjGho9DEu8yTJbt_hqvYRwi0IrqEN_M0KTI_M,8958
74
76
  earthkit/workflows/backends/xarray.py,sha256=4pnnPgIug4DmvhigkU0JsituvdvspuVA_vxbIsrq8-A,6762
75
77
  earthkit/workflows/graph/__init__.py,sha256=3svepPVOeG7LVKPjFNH4z2NIqOeDc_wGN9NRQ6Hg6zI,1099
76
78
  earthkit/workflows/graph/copy.py,sha256=gST073P-sbaP1FAY1jo2bZPtvBf6TISLMu6Crnw-hws,954
@@ -89,8 +91,8 @@ earthkit/workflows/graph/split.py,sha256=t-Sji5eZb01QO1szqmDNTodDDALqdo-0R0x1ESs
89
91
  earthkit/workflows/graph/transform.py,sha256=BZ8n7ePUnuGgoHkMqZC3SLzifu4oq6q6t6vka0khFtg,3842
90
92
  earthkit/workflows/graph/visit.py,sha256=MP-aFSqOl7aqJY2i7QTgY4epqb6yM7_lK3ofvOqfahw,1755
91
93
  earthkit/workflows/plugins/__init__.py,sha256=nhMAC0eMLxoJamjqB5Ns0OWy0OuxEJ_YvaDFGEQITls,129
92
- earthkit_workflows-0.5.0.dist-info/licenses/LICENSE,sha256=73MJ7twXMKnWwmzmrMiFwUeY7c6JTvxphVggeUq9Sq4,11381
93
- earthkit_workflows-0.5.0.dist-info/METADATA,sha256=eM5GF8HLO3dT9XDSMar9gYKveLjH1-XxMAXB7CHr8ag,1571
94
- earthkit_workflows-0.5.0.dist-info/WHEEL,sha256=_zCd3N1l69ArxyTb8rzEoP9TpbYXkqRFSNOD5OuxnTs,91
95
- earthkit_workflows-0.5.0.dist-info/top_level.txt,sha256=oNrH3Km3hK5kDkTOiM-8G8OQglvZcy-gUKy7rlooWXs,17
96
- earthkit_workflows-0.5.0.dist-info/RECORD,,
94
+ earthkit_workflows-0.6.0.dist-info/licenses/LICENSE,sha256=73MJ7twXMKnWwmzmrMiFwUeY7c6JTvxphVggeUq9Sq4,11381
95
+ earthkit_workflows-0.6.0.dist-info/METADATA,sha256=RqoF7hs8DPKBPNXVImg1K4ysoc41lg7YSnv0r9402Mo,5724
96
+ earthkit_workflows-0.6.0.dist-info/WHEEL,sha256=qELbo2s1Yzl39ZmrAibXA2jjPLUYfnVhUNTlyF1rq0Y,92
97
+ earthkit_workflows-0.6.0.dist-info/top_level.txt,sha256=oNrH3Km3hK5kDkTOiM-8G8OQglvZcy-gUKy7rlooWXs,17
98
+ earthkit_workflows-0.6.0.dist-info/RECORD,,
@@ -1,5 +1,5 @@
1
1
  Wheel-Version: 1.0
2
- Generator: setuptools (80.9.0)
2
+ Generator: setuptools (80.10.1)
3
3
  Root-Is-Purelib: true
4
4
  Tag: py3-none-any
5
5
 
@@ -1,44 +0,0 @@
1
- Metadata-Version: 2.4
2
- Name: earthkit-workflows
3
- Version: 0.5.0
4
- Summary: Earthkit Workflows is a Python library for declaring earthkit task DAGs, as well as scheduling and executing them on heterogeneous computing systems.
5
- Author-email: "European Centre for Medium-Range Weather Forecasts (ECMWF)" <software.support@ecmwf.int>
6
- License-Expression: Apache-2.0
7
- Requires-Python: >=3.10
8
- Description-Content-Type: text/markdown
9
- License-File: LICENSE
10
- Requires-Dist: earthkit-data
11
- Requires-Dist: cloudpickle
12
- Requires-Dist: numpy
13
- Requires-Dist: xarray
14
- Requires-Dist: networkx
15
- Requires-Dist: array-api-compat
16
- Requires-Dist: sortedcontainers
17
- Requires-Dist: pyvis
18
- Requires-Dist: dill
19
- Requires-Dist: pyrsistent
20
- Requires-Dist: pydantic
21
- Requires-Dist: zmq
22
- Requires-Dist: fire
23
- Requires-Dist: orjson
24
- Provides-Extra: tests
25
- Requires-Dist: pytest; extra == "tests"
26
- Requires-Dist: pytest-xdist; extra == "tests"
27
- Requires-Dist: earthkit-data; extra == "tests"
28
- Provides-Extra: lint
29
- Requires-Dist: black; extra == "lint"
30
- Requires-Dist: isort; extra == "lint"
31
- Requires-Dist: flake8; extra == "lint"
32
- Provides-Extra: gpu
33
- Requires-Dist: jax[cpu]; extra == "gpu"
34
- Requires-Dist: jax[cuda11_pip]; extra == "gpu"
35
- Requires-Dist: cupy-cuda11x; extra == "gpu"
36
- Requires-Dist: numba; extra == "gpu"
37
- Provides-Extra: examples
38
- Requires-Dist: cftime; extra == "examples"
39
- Requires-Dist: bokeh; extra == "examples"
40
- Provides-Extra: earthkit
41
- Requires-Dist: earthkit-data; extra == "earthkit"
42
- Provides-Extra: all
43
- Requires-Dist: cascade[earthkit,examples,gpu,kubernetes,lint,tests]; extra == "all"
44
- Dynamic: license-file