xpk 0.6.0__py3-none-any.whl → 0.7.0__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (80) hide show
  1. xpk/api/__init__.py +15 -0
  2. xpk/api/storage_crd.yaml +52 -0
  3. xpk/commands/batch.py +27 -5
  4. xpk/commands/cluster.py +104 -80
  5. xpk/commands/cluster_gcluster.py +94 -10
  6. xpk/commands/common.py +44 -0
  7. xpk/commands/config.py +29 -0
  8. xpk/commands/info.py +8 -10
  9. xpk/commands/inspector.py +5 -11
  10. xpk/commands/job.py +9 -7
  11. xpk/commands/kind.py +34 -4
  12. xpk/commands/kjob_common.py +44 -0
  13. xpk/commands/run.py +128 -0
  14. xpk/commands/shell.py +27 -7
  15. xpk/commands/storage.py +267 -0
  16. xpk/commands/version.py +6 -18
  17. xpk/commands/workload.py +381 -184
  18. xpk/core/blueprint/blueprint_definitions.py +1 -0
  19. xpk/core/blueprint/blueprint_generator.py +132 -76
  20. xpk/core/capacity.py +185 -0
  21. xpk/core/cluster.py +564 -0
  22. xpk/core/cluster_private.py +6 -3
  23. xpk/core/commands.py +18 -14
  24. xpk/core/config.py +179 -0
  25. xpk/core/docker_container.py +225 -0
  26. xpk/core/docker_image.py +210 -0
  27. xpk/core/docker_resources.py +350 -0
  28. xpk/core/filestore.py +251 -0
  29. xpk/core/gcloud_context.py +196 -0
  30. xpk/core/gcluster_manager.py +20 -2
  31. xpk/core/gcsfuse.py +50 -0
  32. xpk/core/kjob.py +257 -18
  33. xpk/core/kueue.py +12 -6
  34. xpk/core/monitoring.py +134 -0
  35. xpk/core/nap.py +32 -20
  36. xpk/core/network.py +377 -0
  37. xpk/core/nodepool.py +581 -0
  38. xpk/core/pathways.py +124 -45
  39. xpk/core/remote_state/__init__.py +15 -0
  40. xpk/core/remote_state/fuse_remote_state.py +99 -0
  41. xpk/core/remote_state/remote_state_client.py +38 -0
  42. xpk/core/resources.py +238 -0
  43. xpk/core/scheduling.py +253 -0
  44. xpk/core/storage.py +581 -0
  45. xpk/core/system_characteristics.py +38 -1
  46. xpk/core/vertex.py +105 -0
  47. xpk/core/workload.py +209 -1
  48. xpk/core/workload_decorators/rdma_decorator.py +25 -5
  49. xpk/core/workload_decorators/storage_decorator.py +52 -0
  50. xpk/core/workload_decorators/tcpxo_decorator.py +70 -37
  51. xpk/main.py +3 -1
  52. xpk/parser/batch.py +10 -151
  53. xpk/parser/cluster.py +49 -8
  54. xpk/parser/common.py +189 -1
  55. xpk/parser/config.py +49 -0
  56. xpk/parser/core.py +27 -1
  57. xpk/parser/info.py +2 -1
  58. xpk/parser/inspector.py +3 -3
  59. xpk/parser/job.py +25 -4
  60. xpk/parser/kind.py +3 -2
  61. xpk/parser/run.py +47 -0
  62. xpk/parser/shell.py +10 -1
  63. xpk/parser/storage.py +316 -0
  64. xpk/parser/validators.py +3 -3
  65. xpk/parser/workload.py +118 -76
  66. xpk/templates/__init__.py +15 -0
  67. xpk/templates/storage.yaml +13 -0
  68. xpk/utils/gcs_utils.py +125 -0
  69. xpk/utils/kubectl.py +57 -0
  70. xpk/utils/objects.py +8 -5
  71. xpk/utils/templates.py +28 -0
  72. xpk/utils/validation.py +80 -0
  73. {xpk-0.6.0.dist-info → xpk-0.7.0.dist-info}/METADATA +165 -14
  74. xpk-0.7.0.dist-info/RECORD +92 -0
  75. {xpk-0.6.0.dist-info → xpk-0.7.0.dist-info}/WHEEL +1 -1
  76. xpk/core/core.py +0 -2824
  77. xpk-0.6.0.dist-info/RECORD +0 -57
  78. {xpk-0.6.0.dist-info → xpk-0.7.0.dist-info}/LICENSE +0 -0
  79. {xpk-0.6.0.dist-info → xpk-0.7.0.dist-info}/entry_points.txt +0 -0
  80. {xpk-0.6.0.dist-info → xpk-0.7.0.dist-info}/top_level.txt +0 -0
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.2
2
2
  Name: xpk
3
- Version: 0.6.0
3
+ Version: 0.7.0
4
4
  Summary: xpk helps Cloud developers to orchestrate training jobs on accelerators on GKE.
5
5
  Author-email: XPK team <xpk-code-reviewers@google.com>
6
6
  License: Apache-2.0
@@ -11,18 +11,23 @@ Classifier: Programming Language :: Python :: 3.11
11
11
  Requires-Python: >=3.10
12
12
  Description-Content-Type: text/markdown
13
13
  License-File: LICENSE
14
- Requires-Dist: cloud-accelerator-diagnostics
15
- Requires-Dist: tabulate
16
- Requires-Dist: ruamel.yaml
17
- Requires-Dist: pyyaml
18
- Requires-Dist: docker
19
- Requires-Dist: packaging
14
+ Requires-Dist: cloud-accelerator-diagnostics==0.1.1
15
+ Requires-Dist: tabulate==0.9.0
16
+ Requires-Dist: ruamel.yaml==0.18.10
17
+ Requires-Dist: pyyaml==6.0.2
18
+ Requires-Dist: docker==7.1.0
19
+ Requires-Dist: kubernetes==31.0.0
20
+ Requires-Dist: google-cloud==0.34.0
21
+ Requires-Dist: google-api-core==2.24.1
22
+ Requires-Dist: packaging==24.2
23
+ Requires-Dist: google-cloud-filestore==1.12.0
24
+ Requires-Dist: google-cloud-storage==2.19.0
20
25
  Provides-Extra: dev
21
26
  Requires-Dist: pyink==24.3.0; extra == "dev"
22
27
  Requires-Dist: pylint>=2.6.0; extra == "dev"
23
28
  Requires-Dist: pre-commit; extra == "dev"
24
29
  Requires-Dist: pytest; extra == "dev"
25
- Requires-Dist: docker; extra == "dev"
30
+ Requires-Dist: docker==7.1.0; extra == "dev"
26
31
 
27
32
  <!--
28
33
  Copyright 2023 Google LLC
@@ -42,6 +47,8 @@ Requires-Dist: docker; extra == "dev"
42
47
 
43
48
  [![Build Tests](https://github.com/google/xpk/actions/workflows/build_tests.yaml/badge.svg)](https://github.com/google/xpk/actions/workflows/build_tests.yaml)
44
49
  [![Nightly Tests](https://github.com/google/xpk/actions/workflows/nightly_tests.yaml/badge.svg)](https://github.com/google/xpk/actions/workflows/nightly_tests.yaml)
50
+ [![Develop Tests](https://github.com/AI-Hypercomputer/xpk/actions/workflows/build_tests.yaml/badge.svg?branch=develop)](https://github.com/AI-Hypercomputer/xpk/actions/workflows/build_tests.yaml)
51
+ [![Develop Nightly Tests](https://github.com/AI-Hypercomputer/xpk/actions/workflows/nightly_tests.yaml/badge.svg?branch=develop)](https://github.com/AI-Hypercomputer/xpk/actions/workflows/nightly_tests.yaml)
45
52
 
46
53
  # Overview
47
54
 
@@ -80,7 +87,11 @@ and the following GPU types:
80
87
  and the following CPU types:
81
88
  * n2-standard-32
82
89
 
83
- # Cloud Console Permissions on the user or service account needed to run XPK:
90
+ xpk also supports Google Cloud Storage solutions:
91
+ * [Cloud Storage FUSE](#fuse)
92
+ * [Filestore](#filestore)
93
+
94
+ # Permissions needed on Cloud Console:
84
95
 
85
96
  * Artifact Registry Writer
86
97
  * Compute Admin
@@ -90,6 +101,7 @@ and the following CPU types:
90
101
  * Service Account User
91
102
  * Storage Admin
92
103
  * Vertex AI Administrator
104
+ * Filestore Editor (This role is neccessary if you want to run `storage create` command with `--type=gcpfilestore`)
93
105
 
94
106
  # Prerequisites
95
107
 
@@ -111,17 +123,28 @@ Following tools must be installed:
111
123
  # sudo may be required
112
124
  apt-get -y install make
113
125
  ```
114
- In addition, below dependencies will be installed with `make install` command:
126
+ In addition, below dependencies can be installed either using provided links or using `make install` command, if xpk is downloaded via `git clone` command:
115
127
  - kueuectl (install from [here](https://kueue.sigs.k8s.io/docs/reference/kubectl-kueue/installation/))
116
128
  - kjob (installation instructions [here](https://github.com/kubernetes-sigs/kjob/blob/main/docs/installation.md))
117
129
 
118
130
  # Installation
119
- To install xpk, run the following command and install additional tools, mentioned in [prerequisites](#prerequisites). [Makefile](https://github.com/AI-Hypercomputer/xpk/blob/main/Makefile) provides a way to install all neccessary tools:
131
+ To install xpk, install required tools mentioned in [prerequisites](#prerequisites). [Makefile](https://github.com/AI-Hypercomputer/xpk/blob/main/Makefile) provides a way to install all neccessary tools. XPK can be installed via pip:
120
132
 
121
133
  ```shell
122
134
  pip install xpk
123
135
  ```
124
136
 
137
+ If you see an error saying: `This environment is externally managed`, please use a virtual environment.
138
+
139
+ ```shell
140
+ ## One time step of creating the venv
141
+ VENV_DIR=~/venvp3
142
+ python3 -m venv $VENV_DIR
143
+ ## Enter your venv.
144
+ source $VENV_DIR/bin/activate
145
+ ## Clone the repository and installing dependencies.
146
+ pip install xpk
147
+ ```
125
148
 
126
149
  If you are running XPK by cloning GitHub repository, first run the
127
150
  following commands to begin using XPK commands:
@@ -174,6 +197,8 @@ cleanup with a `Cluster Delete`.
174
197
  If you have failures with workloads not running, use `xpk inspector` to investigate
175
198
  more.
176
199
 
200
+ If you need your Workloads to have persistent storage, use `xpk storage` to find out more.
201
+
177
202
  ## Cluster Create
178
203
 
179
204
  First set the project and zone through gcloud config or xpk arguments.
@@ -448,6 +473,101 @@ Currently, the below flags/arguments are supported for A3-Mega and A3-Ultra mach
448
473
  * --on-demand (only A3-Mega)
449
474
 
450
475
 
476
+ ## Storage
477
+ Currently XPK supports two types of storages: Cloud Storage FUSE and Google Cloud Filestore.
478
+
479
+ ### FUSE
480
+ A FUSE adapter lets you mount and access Cloud Storage buckets as local file systems, so applications can read and write objects in your bucket using standard file system semantics.
481
+
482
+ To use the GCS FUSE with XPK you need to create a [Storage Bucket](https://console.cloud.google.com/storage/).
483
+
484
+ Once it's ready you can use `xpk storage attach` with `--type=gcsfuse` command to attach a FUSE storage instance to your cluster:
485
+
486
+ ```shell
487
+ python3 xpk.py storage attach test-fuse-storage --type=gcsfuse \
488
+ --project=$PROJECT --cluster=$CLUSTER --zone=$ZONE
489
+ --mount-point='/test-mount-point' --readonly=false \
490
+ --bucket=test-bucket --size=1 --auto-mount=false
491
+ ```
492
+
493
+ Parameters:
494
+
495
+ - `--type` - type of the storage, currently xpk supports `gcsfuse` and `gcpfilestore` only.
496
+ - `--auto-mount` - if set to true all workloads will have this storage mounted by default.
497
+ - `--mount-point` - the path on which this storage should be mounted for a workload.
498
+ - `--readonly` - if set to true, workload can only read from storage.
499
+ - `--size` - size of the storage in Gb.
500
+ - `--bucket` - name of the storage bucket. If not set then the name of the storage is used as a bucket name.
501
+
502
+ ### Filestore
503
+
504
+ A Filestore adapter lets you mount and access [Filestore instances](https://cloud.google.com/filestore/) as local file systems, so applications can read and write objects in your volumes using standard file system semantics.
505
+
506
+ To create and attach a GCP Filestore instance to your cluster use `xpk storage create` command with `--type=gcpfilestore`:
507
+
508
+ ```shell
509
+ python3 xpk.py storage create test-fs-storage --type=gcpfilestore \
510
+ --auto-mount=false --mount-point=/data-fs --readonly=false \
511
+ --size=1024 --tier=BASIC_HDD --access_mode=ReadWriteMany --vol=default \
512
+ --project=$PROJECT --cluster=$CLUSTER --zone=$ZONE
513
+ ```
514
+
515
+ You can also attach an existing Filestore instance to your cluster using `xpk storage attach` command:
516
+
517
+ ```shell
518
+ python3 xpk.py storage attach test-fs-storage --type=gcpfilestore \
519
+ --auto-mount=false --mount-point=/data-fs --readonly=false \
520
+ --size=1024 --tier=BASIC_HDD --access_mode=ReadWriteMany --vol=default \
521
+ --project=$PROJECT --cluster=$CLUSTER --zone=$ZONE
522
+ ```
523
+
524
+ The command above is also useful when attaching multiple volumes from the same Filestore instance.
525
+
526
+ Commands `xpk storage create` and `xpk storage attach` with `--type=gcpfilestore` accept following arguments:
527
+ - `--type` - type of the storage.
528
+ - `--auto-mount` - if set to true all workloads will have this storage mounted by default.
529
+ - `--mount-point` - the path on which this storage should be mounted for a workload.
530
+ - `--readonly` - if set to true, workload can only read from storage.
531
+ - `--size` - size of the Filestore instance that will be created in Gb.
532
+ - `--tier` - tier of the Filestore instance that will be created. Possible options are: `[BASIC_HDD, BASIC_SSD, ZONAL, REGIONAL, ENTERPRISE]`
533
+ - `--access-mode` - access mode of the Filestore instance that will be created. Possible values are: `[ReadWriteOnce, ReadOnlyMany, ReadWriteMany]`
534
+ - `--vol` - file share name of the Filestore instance that will be created.
535
+ - `--instance` - the name of the Filestore instance. If not set then the name parameter is used as an instance name. Useful when connecting multiple volumes from the same Filestore instance.
536
+
537
+ ### List attached storages
538
+
539
+ ```shell
540
+ python3 xpk.py storage list \
541
+ --project=$PROJECT --cluster $CLUSTER --zone=$ZONE
542
+ ```
543
+
544
+ ### Running workloads with storage
545
+
546
+ If you specified `--auto-mount=true` when creating or attaching a storage, then all workloads deployed on the cluster will have the volume attached by default. Otherwise, in order to have the storage attached, you have to add `--storage` parameter to `workload create` command:
547
+
548
+ ```shell
549
+ python3 xpk.py workload create \
550
+ --workload xpk-test-workload --command "echo goodbye" \
551
+ --project=$PROJECT --cluster=$CLUSTER --zone=$ZONE \
552
+ --tpu-type=v5litepod-16 --storage=test-storage
553
+ ```
554
+
555
+ ### Detaching storage
556
+
557
+ ```shell
558
+ python3 xpk.py storage detach $STORAGE_NAME \
559
+ --project=$PROJECT --cluster=$CLUSTER --zone=$ZONE
560
+ ```
561
+
562
+ ### Deleting storage
563
+
564
+ XPK allows you to remove Filestore instances easily with `xpk storage delete` command. **Warning:** this deletes all data contained in the Filestore!
565
+
566
+ ```shell
567
+ python3 xpk.py storage delete test-fs-instance \
568
+ --project=$PROJECT --cluster=$CLUSTER --zone=$ZONE
569
+ ```
570
+
451
571
  ## Workload Create
452
572
  * Workload Create (submit training job):
453
573
 
@@ -455,7 +575,7 @@ Currently, the below flags/arguments are supported for A3-Mega and A3-Ultra mach
455
575
  python3 xpk.py workload create \
456
576
  --workload xpk-test-workload --command "echo goodbye" \
457
577
  --cluster xpk-test \
458
- --tpu-type=v5litepod-16
578
+ --tpu-type=v5litepod-16 --projet=$PROJECT
459
579
  ```
460
580
 
461
581
  * Workload Create for Pathways:
@@ -528,6 +648,8 @@ To submit jobs on a cluster with A3 machines, run the below command. To create a
528
648
  ```
529
649
  > The docker image flags/arguments introduced in [workloads section](#workload-create) can be used with A3 machines as well.
530
650
 
651
+ In order to run NCCL test on A3 Ultra machines check out [this guide](/examples/nccl/nccl.md).
652
+
531
653
  ### Workload Priority and Preemption
532
654
  * Set the priority level of your workload with `--priority=LEVEL`
533
655
 
@@ -666,8 +788,6 @@ Check out [MaxText example](https://github.com/google/maxtext/pull/570) on how t
666
788
  ```
667
789
 
668
790
  * Workload List supports waiting for the completion of a specific job. XPK will follow an existing job until it has finished or the `timeout`, if provided, has been reached and then list the job. If no `timeout` is specified, the default value is set to the max value, 1 week. You may also set `timeout=0` to poll the job once.
669
- (Note: `restart-on-user-code-failure` must be set
670
- when creating the workload otherwise the workload will always finish with `Completed` status.)
671
791
 
672
792
  Wait for a job to complete.
673
793
 
@@ -759,6 +879,35 @@ Inspector output is saved to a file.
759
879
  [XPK] Exiting XPK cleanly
760
880
  ```
761
881
 
882
+ ## Run
883
+ * `xpk run` lets you execute scripts on a cluster with ease. It automates task execution, handles interruptions, and streams job output to your console.
884
+
885
+ ```shell
886
+ python xpk.py run --kind-cluster -n 2 -t 0-2 examples/job.sh
887
+ ```
888
+
889
+ * Example Output:
890
+
891
+ ```shell
892
+ [XPK] Starting xpk
893
+ [XPK] Task: `get current-context` is implemented by `kubectl config current-context`, hiding output unless there is an error.
894
+ [XPK] No local cluster name specified. Using current-context `kind-kind`
895
+ [XPK] Task: `run task` is implemented by `kubectl kjob create slurm --profile xpk-def-app-profile --localqueue multislice-queue --wait --rm -- examples/job.sh --partition multislice-queue --ntasks 2 --time 0-2`. Streaming output and input live.
896
+ job.batch/xpk-def-app-profile-slurm-g4vr6 created
897
+ configmap/xpk-def-app-profile-slurm-g4vr6 created
898
+ service/xpk-def-app-profile-slurm-g4vr6 created
899
+ Starting log streaming for pod xpk-def-app-profile-slurm-g4vr6-1-4rmgk...
900
+ Now processing task ID: 3
901
+ Starting log streaming for pod xpk-def-app-profile-slurm-g4vr6-0-bg6dm...
902
+ Now processing task ID: 1
903
+ exit
904
+ exit
905
+ Now processing task ID: 2
906
+ exit
907
+ Job logs streaming finished.[XPK] Task: `run task` terminated with code `0`
908
+ [XPK] XPK Done.
909
+ ```
910
+
762
911
  ## GPU usage
763
912
 
764
913
  In order to use XPK for GPU, you can do so by using `device-type` flag.
@@ -1241,6 +1390,8 @@ gcloud beta compute reservations describe $RESERVATION --project=$PROJECT_ID --z
1241
1390
 
1242
1391
  ## 403 error on workload create when using `--base-docker-image` flag
1243
1392
  You need authority to push to the registry from your local machine. Try running `gcloud auth configure-docker`.
1393
+ ## `Kubernetes API exception` - 404 error
1394
+ If error of this kind appeared after updating xpk version it's possible that you need to rerun `cluster create` command in order to update resource definitions.
1244
1395
 
1245
1396
  # TPU Workload Debugging
1246
1397
 
@@ -0,0 +1,92 @@
1
+ xpk/__init__.py,sha256=7mu-VQDQMyxM5To0KOhuYe4y2TYGsEkfV7hXZmUyih4,561
2
+ xpk/main.py,sha256=wFc_kIM7kALGIY-JOcoa8m4BCWNRjl5tQ6ZDpv7HpSU,2350
3
+ xpk/api/__init__.py,sha256=YPwWBbgLAu7L-YlTVGB2r8ZV4TzypURMRBcehSHHlLY,561
4
+ xpk/api/storage_crd.yaml,sha256=r4WFXnSJJ25EUF-t4Ljfbl-cJoSaiFiZkP8451eTub4,1260
5
+ xpk/commands/__init__.py,sha256=YPwWBbgLAu7L-YlTVGB2r8ZV4TzypURMRBcehSHHlLY,561
6
+ xpk/commands/batch.py,sha256=OZoH2WsHaff2tZNU5bRqqnQGfmC_U0CZDIECpanwH8A,3862
7
+ xpk/commands/cluster.py,sha256=wF8pWeCwf6TtYxYaiaI1icDKXnGIDVYgi28FouciYQs,25097
8
+ xpk/commands/cluster_gcluster.py,sha256=-4vcxnOyd2GMKHYR1LBUYS7zQR3uJr5l5NFgu9Z33yI,9179
9
+ xpk/commands/common.py,sha256=ycvmnHoiM2gsY1DPDb2cwEB0YhDeAFCpHmd0jyvWGBo,1448
10
+ xpk/commands/config.py,sha256=gFNkf3ibsvZmcPpkpKXe-KJmHO5IKucNwLCXNgKvaDc,836
11
+ xpk/commands/info.py,sha256=ee_kwRLaLD4Hvw8155uK3oCdF9wQmoGsWwu7M1SjPkU,7338
12
+ xpk/commands/inspector.py,sha256=bwbZW-cLtiBw2V0zvoMprHWhgMbAYm0ez0GjjEqeUR8,12097
13
+ xpk/commands/job.py,sha256=luzLV7CSgXPUM8i1ZPh6n-YPj3w_O5dDoqUjWfdFvbc,5507
14
+ xpk/commands/kind.py,sha256=Vl3RT47kHCR0ORX9dK37HCiYtbmXJUCIAaq-QEbIclU,7578
15
+ xpk/commands/kjob_common.py,sha256=aR6k_6yacr76QZDQmdoPO0M4Tg6H7ZPooKUTnOVZwXY,1596
16
+ xpk/commands/run.py,sha256=-W32sfobmwxLNEQzBKFWgPs_UOWljRKjFyH-Unm9zsA,3853
17
+ xpk/commands/shell.py,sha256=ZODaPNSmWHOpW48eHEt35IoM4x-0GQUGaLjOxQ63QSY,4235
18
+ xpk/commands/storage.py,sha256=C1UOn1EBb-2ZvvqdL2I3qAVmoghVzaAWXrPPLG7R7dQ,8307
19
+ xpk/commands/version.py,sha256=CU4mb71r66U28krnPAopC6vBpdK-IGclsy5uNaQcgRY,824
20
+ xpk/commands/workload.py,sha256=N3hqe3tWuQMjGuk4DaiDoehgejYGYKwWXRygzJ58h-c,31710
21
+ xpk/core/__init__.py,sha256=YPwWBbgLAu7L-YlTVGB2r8ZV4TzypURMRBcehSHHlLY,561
22
+ xpk/core/capacity.py,sha256=pli6McSdPgGJxsBfJNVk5lCjehp1s1WI82ETAvJT1_I,5365
23
+ xpk/core/cluster.py,sha256=GPuasSTadvgmIb9Iw7giqw7FJDg0jEzIwbQLEkzjuvE,18352
24
+ xpk/core/cluster_private.py,sha256=J2-UZ6t5f-UNdcSkuUr2_f4xk6xyrMH9Muk56trBh4M,6657
25
+ xpk/core/commands.py,sha256=JiS4vJqWSLu8MKFBIKPBea9MKD2ZdpaQrziVQBqiDr4,10719
26
+ xpk/core/config.py,sha256=KqNSVWzXb5pUXId1TeG2mydpnBzF7ryzduW1h586fAU,5659
27
+ xpk/core/docker_container.py,sha256=GvkCJ2S5UKn8uh3pZhRd3X7iS0-PsQpRO8l7QhywVGc,7604
28
+ xpk/core/docker_image.py,sha256=fEdpLQg1C205mMbLREy48WnhvNv2Nm4KQFX87B2CuiA,6624
29
+ xpk/core/docker_manager.py,sha256=_fE27tDCJPd9dUfswYoQMzZRMAMfxq6SxdFdOT-gzIQ,10566
30
+ xpk/core/docker_resources.py,sha256=D4xqdBj7-ezSDNrb1DNVh4n8bzdBGSDfcDtqzXD84D8,11452
31
+ xpk/core/filestore.py,sha256=mCyZ4K1ggUAMWSopLeeb3yBS2dluF8GrrRry1HdiACU,7997
32
+ xpk/core/gcloud_context.py,sha256=p_LhWHo7GZonear2oupvTO-DpKqEkL0St7PnfxieRDY,5866
33
+ xpk/core/gcluster_manager.py,sha256=JFip2hInFczFP2h5AXa70IPIuTaJ475TG6GxkQjKOI8,6337
34
+ xpk/core/gcsfuse.py,sha256=rYeylcVylqV8UfnVe1keJ2ZT70TtE13wHWV2sHMKsgQ,1591
35
+ xpk/core/kjob.py,sha256=hI6A3ezW7AX_iQSI_CsdmCMTyW9FD_0Q7kut964xIzE,13859
36
+ xpk/core/kueue.py,sha256=krmpMNFpLd5refP1xvrqWO3RXblohpwThoWxCNKG5IA,10097
37
+ xpk/core/monitoring.py,sha256=v9MvLzNfvJAVby_ehSlPe6PaO0_pf3shkXg5gd-UWm8,4338
38
+ xpk/core/nap.py,sha256=BNO0fnTpza310cAVwITYktj1SN9tXVT_kCnsufKzYOE,12136
39
+ xpk/core/network.py,sha256=kfvOJREHAm9JtGYdi6csnJeZNg81cjf5-5ECweZ6sWw,10478
40
+ xpk/core/nodepool.py,sha256=1aBZXvaXWEXf2YJXj7w3NDQiPTLJ8b6cmizVPzeoVSY,22002
41
+ xpk/core/pathways.py,sha256=KgpptvcYb21cwYWGuwW3HXd8teYUAK3EhZIuYulVdqs,11587
42
+ xpk/core/ray.py,sha256=UxOpIc2enHi1fQ4h3KO8FH8bIyEMtYzGtPoeqJKGG4o,6337
43
+ xpk/core/resources.py,sha256=IXzvuA8saK6Xvv4MHTWYVeWJDR3MbH_RScd-Dp_qxlM,7669
44
+ xpk/core/scheduling.py,sha256=8BAg8YyftJULHeq-A5nmgpPYVjyEjbVjSG6cWYCAcX0,8348
45
+ xpk/core/storage.py,sha256=oduGqythFOGIZhN9H-nixLn0Zt-aEZunyLG15XCSpqs,18100
46
+ xpk/core/system_characteristics.py,sha256=6CwanJZ3jJCJAiVIr9QArBFIcYitt_YiJvb-K5nYjjk,31657
47
+ xpk/core/vertex.py,sha256=pD9UBL62xHomuqdNu7xKccfD2KCbjgohMk3AhX-CXSw,3644
48
+ xpk/core/workload.py,sha256=-lWKkQHaMgc8lBlI-pVnNdz9k5KhuMWL53RDVP9mXl8,11611
49
+ xpk/core/blueprint/__init__.py,sha256=YPwWBbgLAu7L-YlTVGB2r8ZV4TzypURMRBcehSHHlLY,561
50
+ xpk/core/blueprint/blueprint_definitions.py,sha256=tz2cL8mtRxQroa_EKvW5S6PZRuSezRqwrFcK0MaFyrg,1704
51
+ xpk/core/blueprint/blueprint_generator.py,sha256=OpQ2vwUGDO73MRrUUg6td-tXg2mZHx7MmeWNUkRbN9k,24893
52
+ xpk/core/remote_state/__init__.py,sha256=PkV8D9WOtlJHH5AIxsQaKeIBcmupT_Ol_bwJgN6G2I8,561
53
+ xpk/core/remote_state/fuse_remote_state.py,sha256=3Dx4ZZd0NFF5-MlqGWHzz8H4bjYiPOWdF_YSEnKUPQ8,3246
54
+ xpk/core/remote_state/remote_state_client.py,sha256=6PcR92Xy_RMjlF4AscanQ1jXNHnewLWGNC2v53jbzD4,1077
55
+ xpk/core/workload_decorators/__init__.py,sha256=YPwWBbgLAu7L-YlTVGB2r8ZV4TzypURMRBcehSHHlLY,561
56
+ xpk/core/workload_decorators/rdma_decorator.py,sha256=7Ps8QKtDpjgQ04-ZLfNNKFv4wdYdZhjL5NWeZcsgL8E,3977
57
+ xpk/core/workload_decorators/storage_decorator.py,sha256=KBt7zpcftczDZ_8a5Sy2MISrYcaH6Zknfbtro0Bmn_I,1737
58
+ xpk/core/workload_decorators/tcpxo_decorator.py,sha256=pj-sTUgVcRTv_BvymeVBVV6SvPSKD4vSVop4o5FklpI,6156
59
+ xpk/parser/__init__.py,sha256=YPwWBbgLAu7L-YlTVGB2r8ZV4TzypURMRBcehSHHlLY,561
60
+ xpk/parser/batch.py,sha256=mJU-Cp1yTLje59vD-B1IiBcUeD-ZmEsoeB4xhj9cflc,1406
61
+ xpk/parser/cluster.py,sha256=kEHq1zIfNCOnmf4cNTGCY0na7bylTmRZDAjjuRj7TkI,22196
62
+ xpk/parser/common.py,sha256=_F2rwsZka15difkvPA1yPARWr9I9ewx8PMzgwMLTvjM,7220
63
+ xpk/parser/config.py,sha256=-XnWx9aFsBW4Uzo_hpOMD2ZQ0bdZLvq1ksv83_5jqSM,1633
64
+ xpk/parser/core.py,sha256=VRJerlS92ufoQbG1mZv7B04DAP4qGkBHa4pRXgcbAs0,4761
65
+ xpk/parser/info.py,sha256=UJohxVVWdt9IgUXoPsrVae2DN1BjAVGWrSN2ajrB8RQ,1860
66
+ xpk/parser/inspector.py,sha256=hAPAZ2k9iSJgC1mjnz3rMleInsAQ8PmkyyUKFyBmsgY,1997
67
+ xpk/parser/job.py,sha256=5RdE70rucGfrsn65l7Ho6RmO06mag1S0AO-3saVuXyw,4328
68
+ xpk/parser/kind.py,sha256=sgPCqNVrgmFLcOBEbhlaphwVXxMh_opP9ntCq4KPePE,2682
69
+ xpk/parser/run.py,sha256=oi_ksSyJ8Ooffe2EgoV_ecpmXEmNGVotjpIQH-HjufE,1481
70
+ xpk/parser/shell.py,sha256=VC8p-kz9XjJZW9DXZ-rnv41XnRDRpQRFywHpB5j7tfc,1970
71
+ xpk/parser/storage.py,sha256=-0xN2OaDtQZpdFQskm-v2Bd1yLiBA-AGSNZxs6UOB7A,8996
72
+ xpk/parser/validators.py,sha256=-NBZelvfwZRzjz-YUCreD8EzMLHll8PZM-d-MVm2PG4,1192
73
+ xpk/parser/version.py,sha256=eJo4PAbbmRQZulgKBs_ytbVgV9zAaaXeNzMMxmgFMVY,769
74
+ xpk/parser/workload.py,sha256=GNcJEOvldVHKZPIO6cXAIXMpyHq2M9kdOJ7CZP86saU,24177
75
+ xpk/templates/__init__.py,sha256=7mu-VQDQMyxM5To0KOhuYe4y2TYGsEkfV7hXZmUyih4,561
76
+ xpk/templates/storage.yaml,sha256=AykdyMtDnKZF8Y_0BYxoYP03hEIzEk6iNalXAQHgAls,163
77
+ xpk/utils/__init__.py,sha256=YPwWBbgLAu7L-YlTVGB2r8ZV4TzypURMRBcehSHHlLY,561
78
+ xpk/utils/console.py,sha256=bKibWIswcB1aWGZp0ZpL-NEhvTrxJMy7wWD4-3BVTKI,1479
79
+ xpk/utils/file.py,sha256=jlv2o4ah9UmWJ7NuOCnTwtMZFLerOATBIMQeQ03-kIw,2142
80
+ xpk/utils/gcs_utils.py,sha256=zg-XSTv4G4TFjeT2bNBm2WLdDXPrOZi0rNv_JdppNg4,4113
81
+ xpk/utils/kubectl.py,sha256=-CyxSMTXMq05S0D53tp2Ue9j0UIpWgyEv8p7QJ2b1Ic,1758
82
+ xpk/utils/network.py,sha256=AAm9qGGFAEfAh1FK39muBheXAo7tdBlxR0A8Tg0TyYQ,4205
83
+ xpk/utils/objects.py,sha256=OwMNxB4TGX21qnJPdZo2YBMPMbQPqOtHMh19QhoRNRY,2498
84
+ xpk/utils/templates.py,sha256=g8zgR1MxyJmTmzM_wnvH30FmcbgQMC47UQwBtLj8B9k,807
85
+ xpk/utils/validation.py,sha256=bSJApIY0Lk48I4EEQP08ZUvolXt_APpYXVGJXFQ_YLA,2711
86
+ xpk/utils/yaml.py,sha256=j8xuAJ9yAAwnQi6ozwZ-nMnDyDnc3xWkeBZMtSuP4RU,844
87
+ xpk-0.7.0.dist-info/LICENSE,sha256=z8d0m5b2O9McPEK1xHG_dWgUBT6EfBDz6wA0F7xSPTA,11358
88
+ xpk-0.7.0.dist-info/METADATA,sha256=9BSoO9HYN_txLxZVtxBdl2ZObOzWsHBKNyn-sUHJoA0,63339
89
+ xpk-0.7.0.dist-info/WHEEL,sha256=beeZ86-EfXScwlR_HKu4SllMC9wUEj_8Z_4FJ3egI2w,91
90
+ xpk-0.7.0.dist-info/entry_points.txt,sha256=mzEtiIesFkT1kmcTUVDA1o3uOhiniX6tIz2wmOlMu1M,38
91
+ xpk-0.7.0.dist-info/top_level.txt,sha256=aDe4N0jicmuWExx_6w0TxWQJaEuPSs9BnLU-3aF1GLo,4
92
+ xpk-0.7.0.dist-info/RECORD,,
@@ -1,5 +1,5 @@
1
1
  Wheel-Version: 1.0
2
- Generator: setuptools (75.8.0)
2
+ Generator: setuptools (76.1.0)
3
3
  Root-Is-Purelib: true
4
4
  Tag: py3-none-any
5
5