torchx-nightly 2025.9.15__py3-none-any.whl → 2025.9.17__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of torchx-nightly might be problematic. Click here for more details.
- torchx/cli/cmd_run.py +6 -3
- torchx/runner/api.py +30 -25
- torchx/runner/events/__init__.py +20 -10
- torchx/runner/events/api.py +1 -1
- torchx/schedulers/api.py +14 -14
- torchx/specs/finder.py +3 -22
- torchx/workspace/__init__.py +1 -1
- torchx/workspace/api.py +121 -2
- {torchx_nightly-2025.9.15.dist-info → torchx_nightly-2025.9.17.dist-info}/METADATA +1 -1
- {torchx_nightly-2025.9.15.dist-info → torchx_nightly-2025.9.17.dist-info}/RECORD +14 -14
- {torchx_nightly-2025.9.15.dist-info → torchx_nightly-2025.9.17.dist-info}/LICENSE +0 -0
- {torchx_nightly-2025.9.15.dist-info → torchx_nightly-2025.9.17.dist-info}/WHEEL +0 -0
- {torchx_nightly-2025.9.15.dist-info → torchx_nightly-2025.9.17.dist-info}/entry_points.txt +0 -0
- {torchx_nightly-2025.9.15.dist-info → torchx_nightly-2025.9.17.dist-info}/top_level.txt +0 -0
torchx/cli/cmd_run.py
CHANGED
|
@@ -36,6 +36,7 @@ from torchx.specs.finder import (
|
|
|
36
36
|
)
|
|
37
37
|
from torchx.util.log_tee_helpers import tee_logs
|
|
38
38
|
from torchx.util.types import none_throws
|
|
39
|
+
from torchx.workspace import Workspace
|
|
39
40
|
|
|
40
41
|
|
|
41
42
|
MISSING_COMPONENT_ERROR_MSG = (
|
|
@@ -92,7 +93,7 @@ def torchx_run_args_from_json(json_data: Dict[str, Any]) -> TorchXRunArgs:
|
|
|
92
93
|
|
|
93
94
|
torchx_args = TorchXRunArgs(**filtered_json_data)
|
|
94
95
|
if torchx_args.workspace == "":
|
|
95
|
-
torchx_args.workspace = f"
|
|
96
|
+
torchx_args.workspace = f"{Path.cwd()}"
|
|
96
97
|
return torchx_args
|
|
97
98
|
|
|
98
99
|
|
|
@@ -250,7 +251,7 @@ class CmdRun(SubCommand):
|
|
|
250
251
|
subparser.add_argument(
|
|
251
252
|
"--workspace",
|
|
252
253
|
"--buck-target",
|
|
253
|
-
default=f"
|
|
254
|
+
default=f"{Path.cwd()}",
|
|
254
255
|
action=torchxconfig_run,
|
|
255
256
|
help="local workspace to build/patch (buck-target of main binary if using buck)",
|
|
256
257
|
)
|
|
@@ -289,12 +290,14 @@ class CmdRun(SubCommand):
|
|
|
289
290
|
else args.component_args
|
|
290
291
|
)
|
|
291
292
|
try:
|
|
293
|
+
workspace = Workspace.from_str(args.workspace) if args.workspace else None
|
|
294
|
+
|
|
292
295
|
if args.dryrun:
|
|
293
296
|
dryrun_info = runner.dryrun_component(
|
|
294
297
|
args.component_name,
|
|
295
298
|
component_args,
|
|
296
299
|
args.scheduler,
|
|
297
|
-
workspace=
|
|
300
|
+
workspace=workspace,
|
|
298
301
|
cfg=args.scheduler_cfg,
|
|
299
302
|
parent_run_id=args.parent_run_id,
|
|
300
303
|
)
|
torchx/runner/api.py
CHANGED
|
@@ -54,7 +54,7 @@ from torchx.tracker.api import (
|
|
|
54
54
|
from torchx.util.session import get_session_id_or_create_new, TORCHX_INTERNAL_SESSION_ID
|
|
55
55
|
|
|
56
56
|
from torchx.util.types import none_throws
|
|
57
|
-
from torchx.workspace.api import WorkspaceMixin
|
|
57
|
+
from torchx.workspace.api import Workspace, WorkspaceMixin
|
|
58
58
|
|
|
59
59
|
if TYPE_CHECKING:
|
|
60
60
|
from typing_extensions import Self
|
|
@@ -171,7 +171,7 @@ class Runner:
|
|
|
171
171
|
component_args: Union[list[str], dict[str, Any]],
|
|
172
172
|
scheduler: str,
|
|
173
173
|
cfg: Optional[Mapping[str, CfgVal]] = None,
|
|
174
|
-
workspace: Optional[str] = None,
|
|
174
|
+
workspace: Optional[Union[Workspace, str]] = None,
|
|
175
175
|
parent_run_id: Optional[str] = None,
|
|
176
176
|
) -> AppHandle:
|
|
177
177
|
"""
|
|
@@ -206,7 +206,7 @@ class Runner:
|
|
|
206
206
|
ComponentNotFoundException: if the ``component_path`` is failed to resolve.
|
|
207
207
|
"""
|
|
208
208
|
|
|
209
|
-
with log_event("run_component"
|
|
209
|
+
with log_event("run_component") as ctx:
|
|
210
210
|
dryrun_info = self.dryrun_component(
|
|
211
211
|
component,
|
|
212
212
|
component_args,
|
|
@@ -217,7 +217,8 @@ class Runner:
|
|
|
217
217
|
)
|
|
218
218
|
handle = self.schedule(dryrun_info)
|
|
219
219
|
app = none_throws(dryrun_info._app)
|
|
220
|
-
|
|
220
|
+
|
|
221
|
+
ctx._torchx_event.workspace = str(workspace)
|
|
221
222
|
ctx._torchx_event.scheduler = none_throws(dryrun_info._scheduler)
|
|
222
223
|
ctx._torchx_event.app_image = app.roles[0].image
|
|
223
224
|
ctx._torchx_event.app_id = parse_app_handle(handle)[2]
|
|
@@ -230,7 +231,7 @@ class Runner:
|
|
|
230
231
|
component_args: Union[list[str], dict[str, Any]],
|
|
231
232
|
scheduler: str,
|
|
232
233
|
cfg: Optional[Mapping[str, CfgVal]] = None,
|
|
233
|
-
workspace: Optional[str] = None,
|
|
234
|
+
workspace: Optional[Union[Workspace, str]] = None,
|
|
234
235
|
parent_run_id: Optional[str] = None,
|
|
235
236
|
) -> AppDryRunInfo:
|
|
236
237
|
"""
|
|
@@ -259,7 +260,7 @@ class Runner:
|
|
|
259
260
|
app: AppDef,
|
|
260
261
|
scheduler: str,
|
|
261
262
|
cfg: Optional[Mapping[str, CfgVal]] = None,
|
|
262
|
-
workspace: Optional[str] = None,
|
|
263
|
+
workspace: Optional[Union[Workspace, str]] = None,
|
|
263
264
|
parent_run_id: Optional[str] = None,
|
|
264
265
|
) -> AppHandle:
|
|
265
266
|
"""
|
|
@@ -272,9 +273,7 @@ class Runner:
|
|
|
272
273
|
An application handle that is used to call other action APIs on the app.
|
|
273
274
|
"""
|
|
274
275
|
|
|
275
|
-
with log_event(
|
|
276
|
-
api="run", runcfg=json.dumps(cfg) if cfg else None, workspace=workspace
|
|
277
|
-
) as ctx:
|
|
276
|
+
with log_event(api="run") as ctx:
|
|
278
277
|
dryrun_info = self.dryrun(
|
|
279
278
|
app,
|
|
280
279
|
scheduler,
|
|
@@ -283,10 +282,15 @@ class Runner:
|
|
|
283
282
|
parent_run_id=parent_run_id,
|
|
284
283
|
)
|
|
285
284
|
handle = self.schedule(dryrun_info)
|
|
286
|
-
|
|
287
|
-
|
|
288
|
-
|
|
289
|
-
|
|
285
|
+
|
|
286
|
+
event = ctx._torchx_event
|
|
287
|
+
event.scheduler = scheduler
|
|
288
|
+
event.runcfg = json.dumps(cfg) if cfg else None
|
|
289
|
+
event.workspace = str(workspace)
|
|
290
|
+
event.app_id = parse_app_handle(handle)[2]
|
|
291
|
+
event.app_image = none_throws(dryrun_info._app).roles[0].image
|
|
292
|
+
event.app_metadata = app.metadata
|
|
293
|
+
|
|
290
294
|
return handle
|
|
291
295
|
|
|
292
296
|
def schedule(self, dryrun_info: AppDryRunInfo) -> AppHandle:
|
|
@@ -320,21 +324,22 @@ class Runner:
|
|
|
320
324
|
|
|
321
325
|
"""
|
|
322
326
|
scheduler = none_throws(dryrun_info._scheduler)
|
|
323
|
-
app_image = none_throws(dryrun_info._app).roles[0].image
|
|
324
327
|
cfg = dryrun_info._cfg
|
|
325
|
-
with log_event(
|
|
326
|
-
"schedule",
|
|
327
|
-
scheduler,
|
|
328
|
-
app_image=app_image,
|
|
329
|
-
runcfg=json.dumps(cfg) if cfg else None,
|
|
330
|
-
) as ctx:
|
|
328
|
+
with log_event("schedule") as ctx:
|
|
331
329
|
sched = self._scheduler(scheduler)
|
|
332
330
|
app_id = sched.schedule(dryrun_info)
|
|
333
331
|
app_handle = make_app_handle(scheduler, self._name, app_id)
|
|
332
|
+
|
|
334
333
|
app = none_throws(dryrun_info._app)
|
|
335
334
|
self._apps[app_handle] = app
|
|
336
|
-
|
|
337
|
-
ctx._torchx_event
|
|
335
|
+
|
|
336
|
+
event = ctx._torchx_event
|
|
337
|
+
event.scheduler = scheduler
|
|
338
|
+
event.runcfg = json.dumps(cfg) if cfg else None
|
|
339
|
+
event.app_id = app_id
|
|
340
|
+
event.app_image = none_throws(dryrun_info._app).roles[0].image
|
|
341
|
+
event.app_metadata = app.metadata
|
|
342
|
+
|
|
338
343
|
return app_handle
|
|
339
344
|
|
|
340
345
|
def name(self) -> str:
|
|
@@ -345,7 +350,7 @@ class Runner:
|
|
|
345
350
|
app: AppDef,
|
|
346
351
|
scheduler: str,
|
|
347
352
|
cfg: Optional[Mapping[str, CfgVal]] = None,
|
|
348
|
-
workspace: Optional[str] = None,
|
|
353
|
+
workspace: Optional[Union[Workspace, str]] = None,
|
|
349
354
|
parent_run_id: Optional[str] = None,
|
|
350
355
|
) -> AppDryRunInfo:
|
|
351
356
|
"""
|
|
@@ -414,7 +419,7 @@ class Runner:
|
|
|
414
419
|
"dryrun",
|
|
415
420
|
scheduler,
|
|
416
421
|
runcfg=json.dumps(cfg) if cfg else None,
|
|
417
|
-
workspace=workspace,
|
|
422
|
+
workspace=str(workspace),
|
|
418
423
|
):
|
|
419
424
|
sched = self._scheduler(scheduler)
|
|
420
425
|
resolved_cfg = sched.run_opts().resolve(cfg)
|
|
@@ -429,7 +434,7 @@ class Runner:
|
|
|
429
434
|
logger.info(
|
|
430
435
|
'To disable workspaces pass: --workspace="" from CLI or workspace=None programmatically.'
|
|
431
436
|
)
|
|
432
|
-
sched.
|
|
437
|
+
sched.build_workspace_and_update_role2(role, workspace, resolved_cfg)
|
|
433
438
|
|
|
434
439
|
if old_img != role.image:
|
|
435
440
|
logger.info(
|
torchx/runner/events/__init__.py
CHANGED
|
@@ -33,8 +33,9 @@ from torchx.util.session import get_session_id_or_create_new
|
|
|
33
33
|
|
|
34
34
|
from .api import SourceType, TorchxEvent # noqa F401
|
|
35
35
|
|
|
36
|
-
|
|
37
|
-
|
|
36
|
+
_events_logger: Optional[logging.Logger] = None
|
|
37
|
+
|
|
38
|
+
log: logging.Logger = logging.getLogger(__name__)
|
|
38
39
|
|
|
39
40
|
|
|
40
41
|
def _get_or_create_logger(destination: str = "null") -> logging.Logger:
|
|
@@ -51,19 +52,28 @@ def _get_or_create_logger(destination: str = "null") -> logging.Logger:
|
|
|
51
52
|
a new logger if None provided.
|
|
52
53
|
"""
|
|
53
54
|
global _events_logger
|
|
55
|
+
|
|
54
56
|
if _events_logger:
|
|
55
57
|
return _events_logger
|
|
56
|
-
|
|
57
|
-
|
|
58
|
-
|
|
59
|
-
|
|
60
|
-
|
|
61
|
-
|
|
62
|
-
|
|
58
|
+
else:
|
|
59
|
+
logging_handler = get_logging_handler(destination)
|
|
60
|
+
logging_handler.setLevel(logging.DEBUG)
|
|
61
|
+
_events_logger = logging.getLogger(f"torchx-events-{destination}")
|
|
62
|
+
# Do not propagate message to the root logger
|
|
63
|
+
_events_logger.propagate = False
|
|
64
|
+
_events_logger.addHandler(logging_handler)
|
|
65
|
+
|
|
66
|
+
assert _events_logger # make type-checker happy
|
|
67
|
+
return _events_logger
|
|
63
68
|
|
|
64
69
|
|
|
65
70
|
def record(event: TorchxEvent, destination: str = "null") -> None:
|
|
66
|
-
|
|
71
|
+
try:
|
|
72
|
+
serialized_event = event.serialize()
|
|
73
|
+
except Exception:
|
|
74
|
+
log.exception("failed to serialize event, will not record event")
|
|
75
|
+
else:
|
|
76
|
+
_get_or_create_logger(destination).info(serialized_event)
|
|
67
77
|
|
|
68
78
|
|
|
69
79
|
class log_event:
|
torchx/runner/events/api.py
CHANGED
|
@@ -29,7 +29,7 @@ class TorchxEvent:
|
|
|
29
29
|
scheduler: Scheduler that is used to execute request
|
|
30
30
|
api: Api name
|
|
31
31
|
app_id: Unique id that is set by the underlying scheduler
|
|
32
|
-
|
|
32
|
+
app_image: Image/container bundle that is used to execute request.
|
|
33
33
|
app_metadata: metadata to the app (treatment of metadata is scheduler dependent)
|
|
34
34
|
runcfg: Run config that was used to schedule app.
|
|
35
35
|
source: Type of source the event is generated.
|
torchx/schedulers/api.py
CHANGED
|
@@ -12,7 +12,7 @@ import re
|
|
|
12
12
|
from dataclasses import dataclass, field
|
|
13
13
|
from datetime import datetime
|
|
14
14
|
from enum import Enum
|
|
15
|
-
from typing import Generic, Iterable, List, Optional, TypeVar
|
|
15
|
+
from typing import Generic, Iterable, List, Optional, TypeVar, Union
|
|
16
16
|
|
|
17
17
|
from torchx.specs import (
|
|
18
18
|
AppDef,
|
|
@@ -23,7 +23,7 @@ from torchx.specs import (
|
|
|
23
23
|
RoleStatus,
|
|
24
24
|
runopts,
|
|
25
25
|
)
|
|
26
|
-
from torchx.workspace.api import WorkspaceMixin
|
|
26
|
+
from torchx.workspace.api import Workspace, WorkspaceMixin
|
|
27
27
|
|
|
28
28
|
|
|
29
29
|
DAYS_IN_2_WEEKS = 14
|
|
@@ -131,7 +131,7 @@ class Scheduler(abc.ABC, Generic[T, A, D]):
|
|
|
131
131
|
self,
|
|
132
132
|
app: A,
|
|
133
133
|
cfg: T,
|
|
134
|
-
workspace: Optional[str] = None,
|
|
134
|
+
workspace: Optional[Union[Workspace, str]] = None,
|
|
135
135
|
) -> str:
|
|
136
136
|
"""
|
|
137
137
|
Submits the application to be run by the scheduler.
|
|
@@ -144,10 +144,9 @@ class Scheduler(abc.ABC, Generic[T, A, D]):
|
|
|
144
144
|
# pyre-fixme: Generic cfg type passed to resolve
|
|
145
145
|
resolved_cfg = self.run_opts().resolve(cfg)
|
|
146
146
|
if workspace:
|
|
147
|
-
|
|
148
|
-
|
|
149
|
-
|
|
150
|
-
sched.build_workspace_and_update_role(role, workspace, resolved_cfg)
|
|
147
|
+
assert isinstance(self, WorkspaceMixin)
|
|
148
|
+
self.build_workspace_and_update_role2(app.roles[0], workspace, resolved_cfg)
|
|
149
|
+
|
|
151
150
|
# pyre-fixme: submit_dryrun takes Generic type for resolved_cfg
|
|
152
151
|
dryrun_info = self.submit_dryrun(app, resolved_cfg)
|
|
153
152
|
return self.schedule(dryrun_info)
|
|
@@ -356,13 +355,14 @@ class Scheduler(abc.ABC, Generic[T, A, D]):
|
|
|
356
355
|
|
|
357
356
|
Raises error if application is not compatible with scheduler
|
|
358
357
|
"""
|
|
359
|
-
if isinstance(app, AppDef):
|
|
360
|
-
|
|
361
|
-
|
|
362
|
-
|
|
363
|
-
|
|
364
|
-
|
|
365
|
-
|
|
358
|
+
if not isinstance(app, AppDef):
|
|
359
|
+
return
|
|
360
|
+
|
|
361
|
+
for role in app.roles:
|
|
362
|
+
if role.resource == NULL_RESOURCE:
|
|
363
|
+
raise ValueError(
|
|
364
|
+
f"No resource for role: {role.image}. Did you forget to attach resource to the role"
|
|
365
|
+
)
|
|
366
366
|
|
|
367
367
|
|
|
368
368
|
def filter_regex(regex: str, data: Iterable[str]) -> Iterable[str]:
|
torchx/specs/finder.py
CHANGED
|
@@ -278,25 +278,12 @@ class CustomComponentsFinder(ComponentsFinder):
|
|
|
278
278
|
linter_errors = validate(path, function_name, validators)
|
|
279
279
|
return [linter_error.description for linter_error in linter_errors]
|
|
280
280
|
|
|
281
|
-
def _get_path_to_function_decl(
|
|
282
|
-
self, function: Callable[..., Any] # pyre-ignore[2]
|
|
283
|
-
) -> str:
|
|
284
|
-
"""
|
|
285
|
-
Attempts to return the path to the file where the function is implemented.
|
|
286
|
-
This can be different from the path where the function is looked up, for example if we have:
|
|
287
|
-
my_component defined in some_file.py, imported in other_file.py
|
|
288
|
-
and the component is invoked as other_file.py:my_component
|
|
289
|
-
"""
|
|
290
|
-
# Unwrap decorated functions to get the original function
|
|
291
|
-
unwrapped_function = inspect.unwrap(function)
|
|
292
|
-
path_to_function_decl = inspect.getabsfile(unwrapped_function)
|
|
293
|
-
if path_to_function_decl is None or not os.path.isfile(path_to_function_decl):
|
|
294
|
-
return self._filepath
|
|
295
|
-
return path_to_function_decl
|
|
296
|
-
|
|
297
281
|
def find(
|
|
298
282
|
self, validators: Optional[List[ComponentFunctionValidator]]
|
|
299
283
|
) -> List[_Component]:
|
|
284
|
+
validation_errors = self._get_validation_errors(
|
|
285
|
+
self._filepath, self._function_name, validators
|
|
286
|
+
)
|
|
300
287
|
|
|
301
288
|
file_source = read_conf_file(self._filepath)
|
|
302
289
|
namespace = copy.copy(globals())
|
|
@@ -309,12 +296,6 @@ class CustomComponentsFinder(ComponentsFinder):
|
|
|
309
296
|
)
|
|
310
297
|
app_fn = namespace[self._function_name]
|
|
311
298
|
fn_desc, _ = get_fn_docstring(app_fn)
|
|
312
|
-
|
|
313
|
-
func_path = self._get_path_to_function_decl(app_fn)
|
|
314
|
-
validation_errors = self._get_validation_errors(
|
|
315
|
-
func_path, self._function_name, validators
|
|
316
|
-
)
|
|
317
|
-
|
|
318
299
|
return [
|
|
319
300
|
_Component(
|
|
320
301
|
name=f"{self._filepath}:{self._function_name}",
|
torchx/workspace/__init__.py
CHANGED
|
@@ -22,4 +22,4 @@ Example workspace paths:
|
|
|
22
22
|
* ``memory://foo-bar/`` an in-memory workspace for notebook/programmatic usage
|
|
23
23
|
"""
|
|
24
24
|
|
|
25
|
-
from torchx.workspace.api import walk_workspace, WorkspaceMixin # noqa: F401
|
|
25
|
+
from torchx.workspace.api import walk_workspace, Workspace, WorkspaceMixin # noqa: F401
|
torchx/workspace/api.py
CHANGED
|
@@ -9,9 +9,22 @@
|
|
|
9
9
|
import abc
|
|
10
10
|
import fnmatch
|
|
11
11
|
import posixpath
|
|
12
|
+
import shutil
|
|
13
|
+
import tempfile
|
|
12
14
|
import warnings
|
|
13
15
|
from dataclasses import dataclass
|
|
14
|
-
from
|
|
16
|
+
from pathlib import Path
|
|
17
|
+
from typing import (
|
|
18
|
+
Any,
|
|
19
|
+
Dict,
|
|
20
|
+
Generic,
|
|
21
|
+
Iterable,
|
|
22
|
+
Mapping,
|
|
23
|
+
Tuple,
|
|
24
|
+
TYPE_CHECKING,
|
|
25
|
+
TypeVar,
|
|
26
|
+
Union,
|
|
27
|
+
)
|
|
15
28
|
|
|
16
29
|
from torchx.specs import AppDef, CfgVal, Role, runopts
|
|
17
30
|
|
|
@@ -75,6 +88,71 @@ class WorkspaceBuilder(Generic[PackageType, WorkspaceConfigType]):
|
|
|
75
88
|
pass
|
|
76
89
|
|
|
77
90
|
|
|
91
|
+
@dataclass
|
|
92
|
+
class Workspace:
|
|
93
|
+
"""
|
|
94
|
+
Specifies a local "workspace" (a set of directories). Workspaces are ad-hoc built
|
|
95
|
+
into an (usually ephemeral) image. This effectively mirrors the local code changes
|
|
96
|
+
at job submission time.
|
|
97
|
+
|
|
98
|
+
For example:
|
|
99
|
+
|
|
100
|
+
1. ``projects={"~/github/torch": "torch"}`` copies ``~/github/torch/**`` into ``$REMOTE_WORKSPACE_ROOT/torch/**``
|
|
101
|
+
2. ``projects={"~/github/torch": ""}`` copies ``~/github/torch/**`` into ``$REMOTE_WORKSPACE_ROOT/**``
|
|
102
|
+
|
|
103
|
+
The exact location of ``$REMOTE_WORKSPACE_ROOT`` is implementation dependent and varies between
|
|
104
|
+
different implementations of :py:class:`~torchx.workspace.api.WorkspaceMixin`.
|
|
105
|
+
Check the scheduler documentation for details on which workspace it supports.
|
|
106
|
+
|
|
107
|
+
Note: ``projects`` maps the location of the local project to a sub-directory in the remote workspace root directory.
|
|
108
|
+
Typically the local project location is a directory path (e.g. ``/home/foo/github/torch``).
|
|
109
|
+
|
|
110
|
+
|
|
111
|
+
Attributes:
|
|
112
|
+
projects: mapping of local project to the sub-dir in the remote workspace dir.
|
|
113
|
+
"""
|
|
114
|
+
|
|
115
|
+
projects: dict[str, str]
|
|
116
|
+
|
|
117
|
+
def is_unmapped_single_project(self) -> bool:
|
|
118
|
+
"""
|
|
119
|
+
Returns ``True`` if this workspace only has 1 project
|
|
120
|
+
and its target mapping is an empty string.
|
|
121
|
+
"""
|
|
122
|
+
return len(self.projects) == 1 and not next(iter(self.projects.values()))
|
|
123
|
+
|
|
124
|
+
@staticmethod
|
|
125
|
+
def from_str(workspace: str) -> "Workspace":
|
|
126
|
+
import yaml
|
|
127
|
+
|
|
128
|
+
projects = yaml.safe_load(workspace)
|
|
129
|
+
if isinstance(projects, str): # single project workspace
|
|
130
|
+
projects = {projects: ""}
|
|
131
|
+
else: # multi-project workspace
|
|
132
|
+
# Replace None mappings with "" (empty string)
|
|
133
|
+
projects = {k: ("" if v is None else v) for k, v in projects.items()}
|
|
134
|
+
|
|
135
|
+
return Workspace(projects)
|
|
136
|
+
|
|
137
|
+
def __str__(self) -> str:
|
|
138
|
+
"""
|
|
139
|
+
Returns a string representation of the Workspace by concatenating
|
|
140
|
+
the project mappings using ';' as a delimiter and ':' between key and value.
|
|
141
|
+
If the single-project workspace with no target mapping, then simply
|
|
142
|
+
returns the src (local project dir)
|
|
143
|
+
|
|
144
|
+
NOTE: meant to be used for logging purposes not serde.
|
|
145
|
+
Therefore not symmetric with :py:func:`Workspace.from_str`.
|
|
146
|
+
|
|
147
|
+
"""
|
|
148
|
+
if self.is_unmapped_single_project():
|
|
149
|
+
return next(iter(self.projects))
|
|
150
|
+
else:
|
|
151
|
+
return ";".join(
|
|
152
|
+
k if not v else f"{k}:{v}" for k, v in self.projects.items()
|
|
153
|
+
)
|
|
154
|
+
|
|
155
|
+
|
|
78
156
|
class WorkspaceMixin(abc.ABC, Generic[T]):
|
|
79
157
|
"""
|
|
80
158
|
Note: (Prototype) this interface may change without notice!
|
|
@@ -100,9 +178,50 @@ class WorkspaceMixin(abc.ABC, Generic[T]):
|
|
|
100
178
|
"""
|
|
101
179
|
return runopts()
|
|
102
180
|
|
|
181
|
+
def build_workspace_and_update_role2(
|
|
182
|
+
self,
|
|
183
|
+
role: Role,
|
|
184
|
+
workspace: Union[Workspace, str],
|
|
185
|
+
cfg: Mapping[str, CfgVal],
|
|
186
|
+
) -> None:
|
|
187
|
+
"""
|
|
188
|
+
Same as :py:meth:`build_workspace_and_update_role` but operates
|
|
189
|
+
on :py:class:`Workspace` (supports multi-project workspaces)
|
|
190
|
+
as well as ``str`` (for backwards compatibility).
|
|
191
|
+
|
|
192
|
+
If ``workspace`` is a ``str`` this method simply calls
|
|
193
|
+
:py:meth:`build_workspace_and_update_role`.
|
|
194
|
+
|
|
195
|
+
If ``workspace`` is :py:class:`Workspace` then the default
|
|
196
|
+
impl copies all the projects into a tmp directory and passes the tmp dir to
|
|
197
|
+
:py:meth:`build_workspace_and_update_role`
|
|
198
|
+
|
|
199
|
+
Subclasses can override this method to customize multi-project
|
|
200
|
+
workspace building logic.
|
|
201
|
+
"""
|
|
202
|
+
if isinstance(workspace, Workspace):
|
|
203
|
+
if not workspace.is_unmapped_single_project():
|
|
204
|
+
with tempfile.TemporaryDirectory(suffix="torchx_workspace_") as outdir:
|
|
205
|
+
for src, dst in workspace.projects.items():
|
|
206
|
+
dst_path = Path(outdir) / dst
|
|
207
|
+
if Path(src).is_file():
|
|
208
|
+
shutil.copy2(src, dst_path)
|
|
209
|
+
else: # src is dir
|
|
210
|
+
shutil.copytree(src, dst_path, dirs_exist_ok=True)
|
|
211
|
+
|
|
212
|
+
self.build_workspace_and_update_role(role, outdir, cfg)
|
|
213
|
+
return
|
|
214
|
+
else: # single project workspace with no target mapping (treat like a str workspace)
|
|
215
|
+
workspace = str(workspace)
|
|
216
|
+
|
|
217
|
+
self.build_workspace_and_update_role(role, workspace, cfg)
|
|
218
|
+
|
|
103
219
|
@abc.abstractmethod
|
|
104
220
|
def build_workspace_and_update_role(
|
|
105
|
-
self,
|
|
221
|
+
self,
|
|
222
|
+
role: Role,
|
|
223
|
+
workspace: str,
|
|
224
|
+
cfg: Mapping[str, CfgVal],
|
|
106
225
|
) -> None:
|
|
107
226
|
"""
|
|
108
227
|
Builds the specified ``workspace`` with respect to ``img``
|
|
@@ -16,7 +16,7 @@ torchx/cli/cmd_configure.py,sha256=1kTv0qbsbV44So74plAySwWu56pQrqjhfW_kbfdC3Rw,1
|
|
|
16
16
|
torchx/cli/cmd_describe.py,sha256=E5disbHoKTsqYKp2s3DaFW9GDLCCOgdOc3pQoHKoyCs,1283
|
|
17
17
|
torchx/cli/cmd_list.py,sha256=4Y1ZOq-kqJbztoBt56hAW_InJEaJuDAjpKWgMhBw4II,1507
|
|
18
18
|
torchx/cli/cmd_log.py,sha256=v-EZYUDOcG95rEgTnrsmPJMUyxM9Mk8YFAJtUxtgViE,5475
|
|
19
|
-
torchx/cli/cmd_run.py,sha256=
|
|
19
|
+
torchx/cli/cmd_run.py,sha256=TshvEMTxMRj5O0KhetzHepZUaAFq8R5nFgY8GC_Gl6g,18576
|
|
20
20
|
torchx/cli/cmd_runopts.py,sha256=NWZiP8XpQjfTDJgays2c6MgL_8wxFoeDge6NstaZdKk,1302
|
|
21
21
|
torchx/cli/cmd_status.py,sha256=22IAEmKs0qkG6kJi83u9dRX2Q-ntT7yehVx7FxtY-vQ,2114
|
|
22
22
|
torchx/cli/cmd_tracker.py,sha256=RfLxE4Cq1wfk7k051RtZ8RPJp0pEKSCa3KmTeRs3LF8,5218
|
|
@@ -56,16 +56,16 @@ torchx/pipelines/kfp/__init__.py,sha256=8iJ8lql_fxwuk9VCYSxXnX6tPL228fB5mDZpOs-k
|
|
|
56
56
|
torchx/pipelines/kfp/adapter.py,sha256=5GeHULjb1kxG6wJtYVLpNkgdzUi4iYEaR42VFOwT6fY,9045
|
|
57
57
|
torchx/pipelines/kfp/version.py,sha256=mYBxd6bm4MeR34D--xo-JLQ9wHeAl_ZQLwbItCf9tr0,539
|
|
58
58
|
torchx/runner/__init__.py,sha256=x8Sz7s_tLxPgJgvWIhK4ju9BNZU61uBFywGwDY6CqJs,315
|
|
59
|
-
torchx/runner/api.py,sha256=
|
|
59
|
+
torchx/runner/api.py,sha256=0kDyOgmAcq0X-bTWiBIqX3BJOCYSa5-TZ7o2Hrqxzdw,30053
|
|
60
60
|
torchx/runner/config.py,sha256=20X-vveAJVjb1AjjDSC6x_BVcdrTj9_ZLt_CHTykiFo,18266
|
|
61
|
-
torchx/runner/events/__init__.py,sha256=
|
|
62
|
-
torchx/runner/events/api.py,sha256=
|
|
61
|
+
torchx/runner/events/__init__.py,sha256=cMiNjnr4eUNQ2Nxxtu4nsvN5lu56b-a6nJ-ct3i7DQk,5536
|
|
62
|
+
torchx/runner/events/api.py,sha256=bvxKBAYK8LzbrBNaNLgL1x0aivtfANmWo1EMGOrSR8k,2668
|
|
63
63
|
torchx/runner/events/handlers.py,sha256=ThHCIJW21BfBgB7b6ftyjASJmD1KdizpjuTtsyqnvJs,522
|
|
64
64
|
torchx/runtime/__init__.py,sha256=Wxje2BryzeQneFu5r6P9JJiEKG-_C9W1CcZ_JNrKT6g,593
|
|
65
65
|
torchx/runtime/tracking/__init__.py,sha256=dYnAPnrXYREfPXkpHhdOFkcYIODWEbA13PdD-wLQYBo,3055
|
|
66
66
|
torchx/runtime/tracking/api.py,sha256=SmUQyUKZqG3KlAhT7CJOGqRz1O274E4m63wQeOVq3CU,5472
|
|
67
67
|
torchx/schedulers/__init__.py,sha256=igIBdxGhkuzH7oYVFXIA9xwjkSn3QzWZ_9dhfdl_M0I,2299
|
|
68
|
-
torchx/schedulers/api.py,sha256=
|
|
68
|
+
torchx/schedulers/api.py,sha256=lfxNhrEO6eYYqVuQzzj9sTXrZShuZkyYxJ1jPE-Lvpo,14561
|
|
69
69
|
torchx/schedulers/aws_batch_scheduler.py,sha256=hFxYzSZEK2SVS5sEyQC5YvNI0JJUJUQsWORlYpj_h3M,28105
|
|
70
70
|
torchx/schedulers/aws_sagemaker_scheduler.py,sha256=flN8GumKE2Dz4X_foAt6Jnvt-ZVojWs6pcyrHwB0hz0,20921
|
|
71
71
|
torchx/schedulers/devices.py,sha256=RjVcu22ZRl_9OKtOtmA1A3vNXgu2qD6A9ST0L0Hsg4I,1734
|
|
@@ -86,7 +86,7 @@ torchx/specs/__init__.py,sha256=Gw_2actqR_oWFtxEkGXCxGk_yrWK5JDZzwysyyqmXao,6438
|
|
|
86
86
|
torchx/specs/api.py,sha256=wkhHOxeWH_tFO3npKqPhNg4VX2NH5gPIFEylkPBo3AU,41315
|
|
87
87
|
torchx/specs/builders.py,sha256=aozVl4q3h0mY5DDJCY1M1CyLC9SW66KJy8JIih8bZJo,13810
|
|
88
88
|
torchx/specs/file_linter.py,sha256=6_aoeuS5d9UwXseKKfPgWNTwxj-f7G1i3uO9mQepti4,14402
|
|
89
|
-
torchx/specs/finder.py,sha256=
|
|
89
|
+
torchx/specs/finder.py,sha256=FcB6jQTNwnpc4OMV0F349kk0leu6O7JYjH_GW2d6GXE,17503
|
|
90
90
|
torchx/specs/named_resources_aws.py,sha256=ISjHtifRJqB8u7PeAMiyLyO_S0WCaZiK-CFF3qe6JDU,11415
|
|
91
91
|
torchx/specs/named_resources_generic.py,sha256=Sg4tAdqiiWDrDz2Lj_pnfsjzGIXKTou73wPseh6j55w,2646
|
|
92
92
|
torchx/specs/test/components/__init__.py,sha256=J8qjUOysmcMAek2KFN13mViOXZxTYc5vCrF02t3VuFU,223
|
|
@@ -111,13 +111,13 @@ torchx/util/session.py,sha256=r6M_nyzXgcbk1GgYGZ324F_ehRGCqjjdVk4YgKxMj8M,1214
|
|
|
111
111
|
torchx/util/shlex.py,sha256=eXEKu8KC3zIcd8tEy9_s8Ds5oma8BORr-0VGWNpG2dk,463
|
|
112
112
|
torchx/util/strings.py,sha256=GkLWCmYS89Uv6bWc5hH0XwvHy7oQmprv2U7axC4A2e8,678
|
|
113
113
|
torchx/util/types.py,sha256=xelu9gOUQ540GvvzDqk1wYb4csB09OgYQJwlVz62O5o,8889
|
|
114
|
-
torchx/workspace/__init__.py,sha256=
|
|
115
|
-
torchx/workspace/api.py,sha256=
|
|
114
|
+
torchx/workspace/__init__.py,sha256=cZsKVvUWwDYcGhe6SCXQGBQfbk_yTnKEImOkI6xmu30,809
|
|
115
|
+
torchx/workspace/api.py,sha256=Ct_75VU94fsH9Rf1WRe-wJGpVgl5O05S_Dq_t2ArJWA,11348
|
|
116
116
|
torchx/workspace/dir_workspace.py,sha256=npNW_IjUZm_yS5r-8hrRkH46ndDd9a_eApT64m1S1T4,2268
|
|
117
117
|
torchx/workspace/docker_workspace.py,sha256=PFu2KQNVC-0p2aKJ-W_BKA9ZOmXdCY2ABEkCExp3udQ,10269
|
|
118
|
-
torchx_nightly-2025.9.
|
|
119
|
-
torchx_nightly-2025.9.
|
|
120
|
-
torchx_nightly-2025.9.
|
|
121
|
-
torchx_nightly-2025.9.
|
|
122
|
-
torchx_nightly-2025.9.
|
|
123
|
-
torchx_nightly-2025.9.
|
|
118
|
+
torchx_nightly-2025.9.17.dist-info/LICENSE,sha256=WVHfXhFC0Ia8LTKt_nJVYobdqTJVg_4J3Crrfm2A8KQ,1721
|
|
119
|
+
torchx_nightly-2025.9.17.dist-info/METADATA,sha256=yrw7Nu_XLhDmm0HDXN59bbVIADlRARi0yOUl6VnJnyU,6104
|
|
120
|
+
torchx_nightly-2025.9.17.dist-info/WHEEL,sha256=tZoeGjtWxWRfdplE7E3d45VPlLNQnvbKiYnx7gwAy8A,92
|
|
121
|
+
torchx_nightly-2025.9.17.dist-info/entry_points.txt,sha256=T328AMXeKI3JZnnxfkEew2ZcMN1oQDtkXjMz7lkV-P4,169
|
|
122
|
+
torchx_nightly-2025.9.17.dist-info/top_level.txt,sha256=pxew3bc2gsiViS0zADs0jb6kC5v8o_Yy_85fhHj_J1A,7
|
|
123
|
+
torchx_nightly-2025.9.17.dist-info/RECORD,,
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|