temporal-workdir 0.1.0__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- temporal_workdir/__init__.py +20 -0
- temporal_workdir/_archive.py +43 -0
- temporal_workdir/_temporal.py +112 -0
- temporal_workdir/_workspace.py +130 -0
- temporal_workdir-0.1.0.dist-info/METADATA +113 -0
- temporal_workdir-0.1.0.dist-info/RECORD +8 -0
- temporal_workdir-0.1.0.dist-info/WHEEL +4 -0
- temporal_workdir-0.1.0.dist-info/licenses/LICENSE +21 -0
|
@@ -0,0 +1,20 @@
|
|
|
1
|
+
"""Remote workspace sync for Temporal activities.
|
|
2
|
+
|
|
3
|
+
This package provides a :class:`Workspace` that syncs a local directory with
|
|
4
|
+
remote storage (GCS, S3, Azure, local, etc.) before and after a Temporal
|
|
5
|
+
activity executes. This enables file-based activities to work correctly across
|
|
6
|
+
distributed workers where disk state is not shared.
|
|
7
|
+
|
|
8
|
+
The storage backend is auto-detected from the URL scheme via `fsspec`_.
|
|
9
|
+
|
|
10
|
+
.. _fsspec: https://filesystem-spec.readthedocs.io/
|
|
11
|
+
"""
|
|
12
|
+
|
|
13
|
+
from temporal_workdir._temporal import get_workspace_path, workspace
|
|
14
|
+
from temporal_workdir._workspace import Workspace
|
|
15
|
+
|
|
16
|
+
__all__ = [
|
|
17
|
+
"Workspace",
|
|
18
|
+
"get_workspace_path",
|
|
19
|
+
"workspace",
|
|
20
|
+
]
|
|
@@ -0,0 +1,43 @@
|
|
|
1
|
+
"""Archive utilities for packing/unpacking workspace directories."""
|
|
2
|
+
|
|
3
|
+
import io
|
|
4
|
+
import tarfile
|
|
5
|
+
from pathlib import Path
|
|
6
|
+
|
|
7
|
+
|
|
8
|
+
def pack(directory: Path) -> bytes:
|
|
9
|
+
"""Pack a directory into a gzipped tar archive.
|
|
10
|
+
|
|
11
|
+
Args:
|
|
12
|
+
directory: Local directory to archive. Must exist.
|
|
13
|
+
|
|
14
|
+
Returns:
|
|
15
|
+
The tar.gz archive as bytes.
|
|
16
|
+
"""
|
|
17
|
+
buf = io.BytesIO()
|
|
18
|
+
with tarfile.open(fileobj=buf, mode="w:gz") as tar:
|
|
19
|
+
for entry in sorted(directory.rglob("*")):
|
|
20
|
+
if entry.is_file():
|
|
21
|
+
arcname = str(entry.relative_to(directory))
|
|
22
|
+
tar.add(str(entry), arcname=arcname)
|
|
23
|
+
return buf.getvalue()
|
|
24
|
+
|
|
25
|
+
|
|
26
|
+
def unpack(data: bytes, directory: Path) -> None:
|
|
27
|
+
"""Unpack a gzipped tar archive into a directory.
|
|
28
|
+
|
|
29
|
+
Args:
|
|
30
|
+
data: The tar.gz archive bytes.
|
|
31
|
+
directory: Target directory. Created if it doesn't exist.
|
|
32
|
+
"""
|
|
33
|
+
directory.mkdir(parents=True, exist_ok=True)
|
|
34
|
+
buf = io.BytesIO(data)
|
|
35
|
+
with tarfile.open(fileobj=buf, mode="r:gz") as tar:
|
|
36
|
+
# Security: prevent path traversal
|
|
37
|
+
for member in tar.getmembers():
|
|
38
|
+
member_path = Path(directory / member.name).resolve()
|
|
39
|
+
if not str(member_path).startswith(str(directory.resolve())):
|
|
40
|
+
raise ValueError(
|
|
41
|
+
f"Archive member {member.name!r} would escape target directory"
|
|
42
|
+
)
|
|
43
|
+
tar.extractall(path=str(directory), filter="data")
|
|
@@ -0,0 +1,112 @@
|
|
|
1
|
+
"""Temporal-specific integration for Workspace."""
|
|
2
|
+
|
|
3
|
+
from __future__ import annotations
|
|
4
|
+
|
|
5
|
+
import contextvars
|
|
6
|
+
import functools
|
|
7
|
+
from collections.abc import Callable
|
|
8
|
+
from pathlib import Path
|
|
9
|
+
from typing import Any, TypeVar
|
|
10
|
+
|
|
11
|
+
import temporalio.activity
|
|
12
|
+
|
|
13
|
+
from temporal_workdir._workspace import Workspace
|
|
14
|
+
|
|
15
|
+
F = TypeVar("F", bound=Callable[..., Any])
|
|
16
|
+
|
|
17
|
+
_current_workspace_path: contextvars.ContextVar[Path | None] = contextvars.ContextVar(
|
|
18
|
+
"_current_workspace_path", default=None
|
|
19
|
+
)
|
|
20
|
+
|
|
21
|
+
|
|
22
|
+
def workspace(
|
|
23
|
+
remote_url_template: str,
|
|
24
|
+
key_fn: Callable[..., dict[str, str]] | None = None,
|
|
25
|
+
**workspace_kwargs: Any,
|
|
26
|
+
) -> Callable[[F], F]:
|
|
27
|
+
"""Decorator that provides a :class:`Workspace` to a Temporal activity.
|
|
28
|
+
|
|
29
|
+
The workspace path is available via :func:`get_workspace_path` inside
|
|
30
|
+
the activity body. The workspace is pulled before execution and pushed
|
|
31
|
+
after successful completion.
|
|
32
|
+
|
|
33
|
+
Template variables in ``remote_url_template`` are resolved from
|
|
34
|
+
:func:`temporalio.activity.info` and, optionally, from ``key_fn``.
|
|
35
|
+
|
|
36
|
+
Built-in template variables (from ``activity.info()``):
|
|
37
|
+
|
|
38
|
+
- ``{workflow_id}`` — stable across retries
|
|
39
|
+
- ``{activity_id}`` — unique per scheduling within a workflow
|
|
40
|
+
- ``{activity_type}`` — the activity name
|
|
41
|
+
- ``{task_queue}``
|
|
42
|
+
|
|
43
|
+
Example::
|
|
44
|
+
|
|
45
|
+
@workspace("gs://bucket/{workflow_id}/{activity_type}")
|
|
46
|
+
@activity.defn
|
|
47
|
+
async def process(input: ProcessInput) -> Output:
|
|
48
|
+
ws = get_workspace_path()
|
|
49
|
+
data = json.loads((ws / "config.json").read_text())
|
|
50
|
+
...
|
|
51
|
+
|
|
52
|
+
# With key_fn for custom template vars:
|
|
53
|
+
@workspace(
|
|
54
|
+
"gs://bucket/{workflow_id}/{component}",
|
|
55
|
+
key_fn=lambda input: {"component": input.component_name},
|
|
56
|
+
)
|
|
57
|
+
@activity.defn
|
|
58
|
+
async def process(input: ProcessInput) -> Output:
|
|
59
|
+
...
|
|
60
|
+
|
|
61
|
+
Args:
|
|
62
|
+
remote_url_template: URL template with ``{var}`` placeholders.
|
|
63
|
+
key_fn: Optional function that receives the activity's positional
|
|
64
|
+
arguments and returns a dict of additional template variables.
|
|
65
|
+
**workspace_kwargs: Extra keyword arguments forwarded to
|
|
66
|
+
:class:`Workspace` (e.g., ``cleanup="keep"``).
|
|
67
|
+
"""
|
|
68
|
+
|
|
69
|
+
def decorator(fn: F) -> F:
|
|
70
|
+
@functools.wraps(fn)
|
|
71
|
+
async def wrapper(*args: Any, **kwargs: Any) -> Any:
|
|
72
|
+
info = temporalio.activity.info()
|
|
73
|
+
template_vars: dict[str, str] = {
|
|
74
|
+
"workflow_id": info.workflow_id or "",
|
|
75
|
+
"activity_id": info.activity_id,
|
|
76
|
+
"activity_type": info.activity_type,
|
|
77
|
+
"task_queue": info.task_queue,
|
|
78
|
+
}
|
|
79
|
+
if key_fn is not None:
|
|
80
|
+
template_vars.update(key_fn(*args))
|
|
81
|
+
|
|
82
|
+
remote_url = remote_url_template.format(**template_vars)
|
|
83
|
+
|
|
84
|
+
async with Workspace(remote_url, **workspace_kwargs) as ws:
|
|
85
|
+
token = _current_workspace_path.set(ws.path)
|
|
86
|
+
try:
|
|
87
|
+
return await fn(*args, **kwargs)
|
|
88
|
+
finally:
|
|
89
|
+
_current_workspace_path.reset(token)
|
|
90
|
+
|
|
91
|
+
return wrapper # type: ignore[return-value]
|
|
92
|
+
|
|
93
|
+
return decorator
|
|
94
|
+
|
|
95
|
+
|
|
96
|
+
def get_workspace_path() -> Path:
|
|
97
|
+
"""Get the workspace path for the currently executing activity.
|
|
98
|
+
|
|
99
|
+
Call this from inside an activity decorated with :func:`workspace`.
|
|
100
|
+
|
|
101
|
+
Returns:
|
|
102
|
+
The local workspace :class:`~pathlib.Path`.
|
|
103
|
+
|
|
104
|
+
Raises:
|
|
105
|
+
RuntimeError: If called outside a workspace-decorated activity.
|
|
106
|
+
"""
|
|
107
|
+
path = _current_workspace_path.get(None)
|
|
108
|
+
if path is None:
|
|
109
|
+
raise RuntimeError(
|
|
110
|
+
"get_workspace_path() called outside a workspace-decorated activity"
|
|
111
|
+
)
|
|
112
|
+
return path
|
|
@@ -0,0 +1,130 @@
|
|
|
1
|
+
"""Core Workspace class for syncing file trees with remote storage."""
|
|
2
|
+
|
|
3
|
+
from __future__ import annotations
|
|
4
|
+
|
|
5
|
+
import shutil
|
|
6
|
+
import tempfile
|
|
7
|
+
from pathlib import Path
|
|
8
|
+
from typing import Literal
|
|
9
|
+
from urllib.parse import urlparse
|
|
10
|
+
|
|
11
|
+
import fsspec
|
|
12
|
+
|
|
13
|
+
from temporal_workdir._archive import pack, unpack
|
|
14
|
+
|
|
15
|
+
|
|
16
|
+
class Workspace:
|
|
17
|
+
"""Sync a local directory with a remote storage location.
|
|
18
|
+
|
|
19
|
+
A Workspace maps a remote URL (the "key") to a local directory. On entry,
|
|
20
|
+
the remote archive is downloaded and unpacked. On clean exit, the local
|
|
21
|
+
directory is packed and uploaded back.
|
|
22
|
+
|
|
23
|
+
Works with any storage backend supported by fsspec (GCS, S3, Azure, local
|
|
24
|
+
filesystem, etc.). The backend is auto-detected from the URL scheme.
|
|
25
|
+
|
|
26
|
+
Usage::
|
|
27
|
+
|
|
28
|
+
async with Workspace("gs://bucket/state/component-x") as ws:
|
|
29
|
+
data = json.loads((ws.path / "component.json").read_text())
|
|
30
|
+
(ws.path / "output.csv").write_text("a,b\\n1,2")
|
|
31
|
+
# On clean exit: local dir archived and uploaded to remote
|
|
32
|
+
|
|
33
|
+
Args:
|
|
34
|
+
remote_url: Remote storage URL. The scheme determines the fsspec
|
|
35
|
+
backend (``gs://`` for GCS, ``s3://`` for S3, ``file://`` for
|
|
36
|
+
local, etc.). An ``.tar.gz`` suffix is appended automatically
|
|
37
|
+
for the archive file.
|
|
38
|
+
local_path: Local directory to use as the working copy. If ``None``,
|
|
39
|
+
a temporary directory is created.
|
|
40
|
+
cleanup: What to do with the local directory after push.
|
|
41
|
+
``"auto"`` deletes it, ``"keep"`` leaves it in place.
|
|
42
|
+
storage_options: Extra keyword arguments passed to
|
|
43
|
+
``fsspec.filesystem()``. Use for authentication, project IDs, etc.
|
|
44
|
+
"""
|
|
45
|
+
|
|
46
|
+
def __init__(
|
|
47
|
+
self,
|
|
48
|
+
remote_url: str,
|
|
49
|
+
local_path: Path | None = None,
|
|
50
|
+
cleanup: Literal["auto", "keep"] = "auto",
|
|
51
|
+
**storage_options: object,
|
|
52
|
+
) -> None:
|
|
53
|
+
self._remote_url = remote_url.rstrip("/")
|
|
54
|
+
self._archive_url = self._remote_url + ".tar.gz"
|
|
55
|
+
self._cleanup = cleanup
|
|
56
|
+
self._storage_options = storage_options
|
|
57
|
+
|
|
58
|
+
parsed = urlparse(self._archive_url)
|
|
59
|
+
self._protocol = parsed.scheme or "file"
|
|
60
|
+
# fsspec expects path without scheme for most backends
|
|
61
|
+
self._remote_path = (
|
|
62
|
+
parsed.netloc + parsed.path if parsed.netloc else parsed.path
|
|
63
|
+
)
|
|
64
|
+
|
|
65
|
+
self._fs = fsspec.filesystem(self._protocol, **storage_options)
|
|
66
|
+
|
|
67
|
+
if local_path is not None:
|
|
68
|
+
self._local_path = local_path
|
|
69
|
+
self._owns_tempdir = False
|
|
70
|
+
else:
|
|
71
|
+
self._local_path = Path(tempfile.mkdtemp(prefix="temporal-workdir-"))
|
|
72
|
+
self._owns_tempdir = True
|
|
73
|
+
|
|
74
|
+
@property
|
|
75
|
+
def path(self) -> Path:
|
|
76
|
+
"""The local working directory.
|
|
77
|
+
|
|
78
|
+
Read and write files here freely. Changes are pushed to remote storage
|
|
79
|
+
when the context manager exits cleanly.
|
|
80
|
+
"""
|
|
81
|
+
return self._local_path
|
|
82
|
+
|
|
83
|
+
async def pull(self) -> None:
|
|
84
|
+
"""Download and unpack the remote archive to the local directory.
|
|
85
|
+
|
|
86
|
+
If no archive exists at the remote URL, the local directory is left
|
|
87
|
+
empty (first run). Existing local files are removed before unpacking.
|
|
88
|
+
"""
|
|
89
|
+
if not self._fs.exists(self._remote_path):
|
|
90
|
+
self._local_path.mkdir(parents=True, exist_ok=True)
|
|
91
|
+
return
|
|
92
|
+
|
|
93
|
+
data = self._fs.cat_file(self._remote_path)
|
|
94
|
+
# Clear local dir before unpacking to avoid stale files
|
|
95
|
+
if self._local_path.exists():
|
|
96
|
+
shutil.rmtree(self._local_path)
|
|
97
|
+
unpack(data, self._local_path)
|
|
98
|
+
|
|
99
|
+
async def push(self) -> None:
|
|
100
|
+
"""Pack the local directory and upload to remote storage.
|
|
101
|
+
|
|
102
|
+
If the local directory is empty, the remote archive is deleted
|
|
103
|
+
(if it exists) to keep storage clean.
|
|
104
|
+
"""
|
|
105
|
+
files = list(self._local_path.rglob("*"))
|
|
106
|
+
if not any(f.is_file() for f in files):
|
|
107
|
+
# Empty workspace — remove remote archive if it exists
|
|
108
|
+
if self._fs.exists(self._remote_path):
|
|
109
|
+
self._fs.rm(self._remote_path)
|
|
110
|
+
return
|
|
111
|
+
|
|
112
|
+
data = pack(self._local_path)
|
|
113
|
+
self._fs.pipe_file(self._remote_path, data)
|
|
114
|
+
|
|
115
|
+
async def __aenter__(self) -> Workspace:
|
|
116
|
+
"""Pull remote state and return the workspace."""
|
|
117
|
+
await self.pull()
|
|
118
|
+
return self
|
|
119
|
+
|
|
120
|
+
async def __aexit__(
|
|
121
|
+
self,
|
|
122
|
+
exc_type: type[BaseException] | None,
|
|
123
|
+
exc_val: BaseException | None,
|
|
124
|
+
exc_tb: object,
|
|
125
|
+
) -> None:
|
|
126
|
+
"""Push local state on clean exit, then optionally clean up."""
|
|
127
|
+
if exc_type is None:
|
|
128
|
+
await self.push()
|
|
129
|
+
if self._cleanup == "auto" and self._owns_tempdir:
|
|
130
|
+
shutil.rmtree(self._local_path, ignore_errors=True)
|
|
@@ -0,0 +1,113 @@
|
|
|
1
|
+
Metadata-Version: 2.4
|
|
2
|
+
Name: temporal-workdir
|
|
3
|
+
Version: 0.1.0
|
|
4
|
+
Summary: Remote-backed workspace sync for Temporal activities
|
|
5
|
+
Project-URL: Homepage, https://github.com/saeedseyfi/temporal-workdir
|
|
6
|
+
Project-URL: Repository, https://github.com/saeedseyfi/temporal-workdir
|
|
7
|
+
Author-email: Saeed Seyfi <me@saeedseyfi.com>
|
|
8
|
+
License-Expression: MIT
|
|
9
|
+
License-File: LICENSE
|
|
10
|
+
Keywords: distributed,fsspec,temporal,workflow,workspace
|
|
11
|
+
Classifier: License :: OSI Approved :: MIT License
|
|
12
|
+
Classifier: Programming Language :: Python :: 3.10
|
|
13
|
+
Classifier: Programming Language :: Python :: 3.11
|
|
14
|
+
Classifier: Programming Language :: Python :: 3.12
|
|
15
|
+
Classifier: Programming Language :: Python :: 3.13
|
|
16
|
+
Requires-Python: >=3.10
|
|
17
|
+
Requires-Dist: fsspec>=2024.1.0
|
|
18
|
+
Requires-Dist: temporalio>=1.0.0
|
|
19
|
+
Provides-Extra: dev
|
|
20
|
+
Requires-Dist: pyright>=1.1; extra == 'dev'
|
|
21
|
+
Requires-Dist: pytest-asyncio>=0.21; extra == 'dev'
|
|
22
|
+
Requires-Dist: pytest>=7.0; extra == 'dev'
|
|
23
|
+
Requires-Dist: ruff>=0.5.0; extra == 'dev'
|
|
24
|
+
Description-Content-Type: text/markdown
|
|
25
|
+
|
|
26
|
+
# Workspace Sync for Temporal Activities
|
|
27
|
+
|
|
28
|
+
Sync a local directory with remote storage before and after a Temporal activity. Enables file-based activities to work across distributed workers where disk is not shared.
|
|
29
|
+
|
|
30
|
+
## Problem
|
|
31
|
+
|
|
32
|
+
Temporal activities that read/write files on local disk break when you scale to multiple worker instances. Each worker has its own disk. This module syncs a remote storage location to a local temp directory before the activity runs, and pushes changes back after.
|
|
33
|
+
|
|
34
|
+
## Install
|
|
35
|
+
|
|
36
|
+
```bash
|
|
37
|
+
pip install temporal-workdir
|
|
38
|
+
|
|
39
|
+
# With a specific cloud backend:
|
|
40
|
+
pip install temporal-workdir gcsfs # Google Cloud Storage
|
|
41
|
+
pip install temporal-workdir s3fs # Amazon S3
|
|
42
|
+
pip install temporal-workdir adlfs # Azure Blob Storage
|
|
43
|
+
```
|
|
44
|
+
|
|
45
|
+
## Usage
|
|
46
|
+
|
|
47
|
+
### As a context manager (generic, works anywhere)
|
|
48
|
+
|
|
49
|
+
```python
|
|
50
|
+
from temporal_workdir import Workspace
|
|
51
|
+
|
|
52
|
+
async with Workspace("gs://my-bucket/pipeline/component-x") as ws:
|
|
53
|
+
# ws.path is a local Path — read and write files normally
|
|
54
|
+
data = json.loads((ws.path / "component.json").read_text())
|
|
55
|
+
(ws.path / "result.csv").write_text("col1,col2\nval1,val2")
|
|
56
|
+
# On clean exit: local dir is archived and uploaded
|
|
57
|
+
# On exception: no upload (remote state unchanged)
|
|
58
|
+
```
|
|
59
|
+
|
|
60
|
+
### As a Temporal activity decorator
|
|
61
|
+
|
|
62
|
+
```python
|
|
63
|
+
from temporalio import activity
|
|
64
|
+
from temporal_workdir import workspace, get_workspace_path
|
|
65
|
+
|
|
66
|
+
@workspace("gs://my-bucket/{workflow_id}/{activity_type}")
|
|
67
|
+
@activity.defn
|
|
68
|
+
async def extract(input: ExtractInput) -> ExtractOutput:
|
|
69
|
+
ws = get_workspace_path()
|
|
70
|
+
# Template vars resolved from activity.info()
|
|
71
|
+
source = (ws / "source.json").read_text()
|
|
72
|
+
(ws / "output.csv").write_text(process(source))
|
|
73
|
+
return ExtractOutput(success=True)
|
|
74
|
+
```
|
|
75
|
+
|
|
76
|
+
### Custom template variables
|
|
77
|
+
|
|
78
|
+
```python
|
|
79
|
+
@workspace(
|
|
80
|
+
"gs://my-bucket/{workflow_id}/components/{component}",
|
|
81
|
+
key_fn=lambda input: {"component": input.component_name},
|
|
82
|
+
)
|
|
83
|
+
@activity.defn
|
|
84
|
+
async def register(input: RegisterInput) -> RegisterOutput:
|
|
85
|
+
ws = get_workspace_path()
|
|
86
|
+
...
|
|
87
|
+
```
|
|
88
|
+
|
|
89
|
+
## How It Works
|
|
90
|
+
|
|
91
|
+
1. **Pull**: On entry, downloads `{remote_url}.tar.gz` and unpacks to a temp directory
|
|
92
|
+
2. **Execute**: Your activity reads/writes files in the local directory
|
|
93
|
+
3. **Push**: On clean exit, packs the directory into `tar.gz` and uploads
|
|
94
|
+
|
|
95
|
+
If the archive doesn't exist yet (first run), the local directory starts empty. If the activity raises an exception, no push happens. Remote state is untouched.
|
|
96
|
+
|
|
97
|
+
## Storage Backends
|
|
98
|
+
|
|
99
|
+
Any backend supported by [fsspec](https://filesystem-spec.readthedocs.io/):
|
|
100
|
+
|
|
101
|
+
| Scheme | Backend | Extra package |
|
|
102
|
+
|--------|---------|--------------|
|
|
103
|
+
| `gs://` | Google Cloud Storage | `gcsfs` |
|
|
104
|
+
| `s3://` | Amazon S3 | `s3fs` |
|
|
105
|
+
| `az://` | Azure Blob Storage | `adlfs` |
|
|
106
|
+
| `file://` | Local filesystem | (none) |
|
|
107
|
+
| `memory://` | In-memory (testing) | (none) |
|
|
108
|
+
|
|
109
|
+
Pass backend-specific options as keyword arguments:
|
|
110
|
+
|
|
111
|
+
```python
|
|
112
|
+
Workspace("gs://bucket/key", project="my-gcp-project", token="cloud")
|
|
113
|
+
```
|
|
@@ -0,0 +1,8 @@
|
|
|
1
|
+
temporal_workdir/__init__.py,sha256=XZIJNC0RxXmy6wUsO5ovYP10dilUPDNyqQ960GT2etw,657
|
|
2
|
+
temporal_workdir/_archive.py,sha256=NDMj_3_yLW93ul_nxFXxZwsNHQK9daVasJCY3v0XyJM,1421
|
|
3
|
+
temporal_workdir/_temporal.py,sha256=6AGuYJ4Q4YPuszKx-lgDJwa2EKGKVH3fWhvsY7FmiZ8,3714
|
|
4
|
+
temporal_workdir/_workspace.py,sha256=-6wCo4BjeJ7SaHacDqaqvoJ7IoY7cTLWj5XWHjY2K5Y,4734
|
|
5
|
+
temporal_workdir-0.1.0.dist-info/METADATA,sha256=JZYZLEdNU8Crv84t1P2SxvZHODNwZrJb3ETZxrrFLiI,3956
|
|
6
|
+
temporal_workdir-0.1.0.dist-info/WHEEL,sha256=QccIxa26bgl1E6uMy58deGWi-0aeIkkangHcxk2kWfw,87
|
|
7
|
+
temporal_workdir-0.1.0.dist-info/licenses/LICENSE,sha256=vifq4N7-y4oruUiHcZ67R5Z6VguCHMul7LGv8n3ZrcI,1068
|
|
8
|
+
temporal_workdir-0.1.0.dist-info/RECORD,,
|
|
@@ -0,0 +1,21 @@
|
|
|
1
|
+
MIT License
|
|
2
|
+
|
|
3
|
+
Copyright (c) 2026 Saeed Seyfi
|
|
4
|
+
|
|
5
|
+
Permission is hereby granted, free of charge, to any person obtaining a copy
|
|
6
|
+
of this software and associated documentation files (the "Software"), to deal
|
|
7
|
+
in the Software without restriction, including without limitation the rights
|
|
8
|
+
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
|
|
9
|
+
copies of the Software, and to permit persons to whom the Software is
|
|
10
|
+
furnished to do so, subject to the following conditions:
|
|
11
|
+
|
|
12
|
+
The above copyright notice and this permission notice shall be included in all
|
|
13
|
+
copies or substantial portions of the Software.
|
|
14
|
+
|
|
15
|
+
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
|
16
|
+
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
|
17
|
+
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
|
|
18
|
+
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
|
19
|
+
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
|
|
20
|
+
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
|
|
21
|
+
SOFTWARE.
|