luminarycloud 0.19.0__py3-none-any.whl → 0.22.0__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- luminarycloud/__init__.py +5 -1
- luminarycloud/_client/client.py +7 -0
- luminarycloud/_client/http_client.py +10 -8
- luminarycloud/_feature_flag.py +22 -0
- luminarycloud/_helpers/_create_simulation.py +7 -2
- luminarycloud/_helpers/_upload_mesh.py +1 -0
- luminarycloud/_helpers/_wait_for_mesh.py +6 -5
- luminarycloud/_helpers/_wait_for_simulation.py +3 -3
- luminarycloud/_helpers/download.py +3 -1
- luminarycloud/_helpers/pagination.py +62 -0
- luminarycloud/_helpers/proto_decorator.py +13 -5
- luminarycloud/_helpers/upload.py +18 -12
- luminarycloud/_proto/api/v0/luminarycloud/feature_flag/feature_flag_pb2.py +55 -0
- luminarycloud/_proto/api/v0/luminarycloud/feature_flag/feature_flag_pb2.pyi +52 -0
- luminarycloud/_proto/api/v0/luminarycloud/feature_flag/feature_flag_pb2_grpc.py +72 -0
- luminarycloud/_proto/api/v0/luminarycloud/feature_flag/feature_flag_pb2_grpc.pyi +35 -0
- luminarycloud/_proto/api/v0/luminarycloud/geometry/geometry_pb2.py +168 -124
- luminarycloud/_proto/api/v0/luminarycloud/geometry/geometry_pb2.pyi +133 -4
- luminarycloud/_proto/api/v0/luminarycloud/geometry/geometry_pb2_grpc.py +66 -0
- luminarycloud/_proto/api/v0/luminarycloud/geometry/geometry_pb2_grpc.pyi +20 -0
- luminarycloud/_proto/api/v0/luminarycloud/inference/inference_pb2.py +8 -8
- luminarycloud/_proto/api/v0/luminarycloud/inference/inference_pb2.pyi +5 -5
- luminarycloud/_proto/api/v0/luminarycloud/mesh/mesh_pb2.py +74 -73
- luminarycloud/_proto/api/v0/luminarycloud/mesh/mesh_pb2.pyi +17 -3
- luminarycloud/_proto/api/v0/luminarycloud/physics_ai/physics_ai_pb2.py +96 -25
- luminarycloud/_proto/api/v0/luminarycloud/physics_ai/physics_ai_pb2.pyi +235 -1
- luminarycloud/_proto/api/v0/luminarycloud/physics_ai/physics_ai_pb2_grpc.py +34 -0
- luminarycloud/_proto/api/v0/luminarycloud/physics_ai/physics_ai_pb2_grpc.pyi +12 -0
- luminarycloud/_proto/api/v0/luminarycloud/project/project_pb2.py +16 -16
- luminarycloud/_proto/api/v0/luminarycloud/project/project_pb2.pyi +7 -3
- luminarycloud/_proto/api/v0/luminarycloud/simulation/simulation_pb2.py +97 -61
- luminarycloud/_proto/api/v0/luminarycloud/simulation/simulation_pb2.pyi +77 -4
- luminarycloud/_proto/api/v0/luminarycloud/simulation/simulation_pb2_grpc.py +34 -0
- luminarycloud/_proto/api/v0/luminarycloud/simulation/simulation_pb2_grpc.pyi +12 -0
- luminarycloud/_proto/api/v0/luminarycloud/simulation_template/simulation_template_pb2.py +33 -31
- luminarycloud/_proto/api/v0/luminarycloud/simulation_template/simulation_template_pb2.pyi +23 -2
- luminarycloud/_proto/api/v0/luminarycloud/vis/vis_pb2.py +126 -27
- luminarycloud/_proto/api/v0/luminarycloud/vis/vis_pb2.pyi +183 -0
- luminarycloud/_proto/api/v0/luminarycloud/vis/vis_pb2_grpc.py +99 -0
- luminarycloud/_proto/api/v0/luminarycloud/vis/vis_pb2_grpc.pyi +30 -0
- luminarycloud/_proto/assistant/assistant_pb2.py +74 -41
- luminarycloud/_proto/assistant/assistant_pb2.pyi +64 -2
- luminarycloud/_proto/assistant/assistant_pb2_grpc.py +33 -0
- luminarycloud/_proto/assistant/assistant_pb2_grpc.pyi +10 -0
- luminarycloud/_proto/base/base_pb2.py +20 -7
- luminarycloud/_proto/base/base_pb2.pyi +38 -0
- luminarycloud/_proto/cad/shape_pb2.py +39 -19
- luminarycloud/_proto/cad/shape_pb2.pyi +86 -34
- luminarycloud/_proto/cad/transformation_pb2.py +60 -16
- luminarycloud/_proto/cad/transformation_pb2.pyi +138 -32
- luminarycloud/_proto/client/simulation_pb2.py +501 -348
- luminarycloud/_proto/client/simulation_pb2.pyi +607 -11
- luminarycloud/_proto/geometry/geometry_pb2.py +77 -63
- luminarycloud/_proto/geometry/geometry_pb2.pyi +42 -3
- luminarycloud/_proto/hexmesh/hexmesh_pb2.py +24 -18
- luminarycloud/_proto/hexmesh/hexmesh_pb2.pyi +23 -2
- luminarycloud/_proto/inferenceservice/inferenceservice_pb2.py +10 -10
- luminarycloud/_proto/inferenceservice/inferenceservice_pb2.pyi +5 -5
- luminarycloud/_proto/physicsaitrainingservice/physicsaitrainingservice_pb2.py +29 -0
- luminarycloud/_proto/physicsaitrainingservice/physicsaitrainingservice_pb2.pyi +7 -0
- luminarycloud/_proto/physicsaitrainingservice/physicsaitrainingservice_pb2_grpc.py +70 -0
- luminarycloud/_proto/physicsaitrainingservice/physicsaitrainingservice_pb2_grpc.pyi +30 -0
- luminarycloud/_proto/quantity/quantity_options_pb2.py +6 -6
- luminarycloud/_proto/quantity/quantity_options_pb2.pyi +10 -1
- luminarycloud/_proto/quantity/quantity_pb2.py +176 -167
- luminarycloud/_proto/quantity/quantity_pb2.pyi +11 -5
- luminarycloud/enum/__init__.py +1 -0
- luminarycloud/enum/gpu_type.py +2 -0
- luminarycloud/enum/quantity_type.py +9 -0
- luminarycloud/enum/vis_enums.py +23 -3
- luminarycloud/exceptions.py +7 -1
- luminarycloud/feature_modification.py +45 -35
- luminarycloud/geometry.py +107 -9
- luminarycloud/geometry_version.py +57 -3
- luminarycloud/mesh.py +1 -2
- luminarycloud/meshing/mesh_generation_params.py +8 -8
- luminarycloud/params/enum/_enum_wrappers.py +562 -30
- luminarycloud/params/simulation/adaptive_mesh_refinement_.py +4 -0
- luminarycloud/params/simulation/material/material_solid_.py +15 -1
- luminarycloud/params/simulation/physics/__init__.py +0 -1
- luminarycloud/params/simulation/physics/periodic_pair_.py +12 -31
- luminarycloud/physics_ai/architectures.py +58 -0
- luminarycloud/physics_ai/inference.py +13 -13
- luminarycloud/physics_ai/solution.py +3 -1
- luminarycloud/physics_ai/training_jobs.py +37 -0
- luminarycloud/pipelines/__init__.py +11 -3
- luminarycloud/pipelines/api.py +248 -16
- luminarycloud/pipelines/arguments.py +15 -0
- luminarycloud/pipelines/core.py +113 -96
- luminarycloud/pipelines/{operators.py → stages.py} +96 -39
- luminarycloud/project.py +15 -47
- luminarycloud/simulation.py +69 -5
- luminarycloud/simulation_param.py +0 -9
- luminarycloud/simulation_template.py +2 -1
- luminarycloud/types/matrix3.py +12 -0
- luminarycloud/vis/__init__.py +17 -0
- luminarycloud/vis/data_extraction.py +20 -4
- luminarycloud/vis/interactive_report.py +110 -0
- luminarycloud/vis/interactive_scene.py +29 -2
- luminarycloud/vis/report.py +252 -0
- luminarycloud/vis/visualization.py +127 -5
- luminarycloud/volume_selection.py +132 -69
- {luminarycloud-0.19.0.dist-info → luminarycloud-0.22.0.dist-info}/METADATA +1 -1
- {luminarycloud-0.19.0.dist-info → luminarycloud-0.22.0.dist-info}/RECORD +105 -97
- luminarycloud/params/simulation/physics/periodic_pair/__init__.py +0 -2
- luminarycloud/params/simulation/physics/periodic_pair/periodicity_type/__init__.py +0 -2
- luminarycloud/params/simulation/physics/periodic_pair/periodicity_type/rotational_periodicity_.py +0 -31
- luminarycloud/params/simulation/physics/periodic_pair/periodicity_type/translational_periodicity_.py +0 -29
- luminarycloud/params/simulation/physics/periodic_pair/periodicity_type_.py +0 -25
- {luminarycloud-0.19.0.dist-info → luminarycloud-0.22.0.dist-info}/WHEEL +0 -0
luminarycloud/pipelines/api.py
CHANGED
|
@@ -1,15 +1,39 @@
|
|
|
1
1
|
# Copyright 2023-2024 Luminary Cloud, Inc. All Rights Reserved.
|
|
2
|
-
from typing import Literal
|
|
2
|
+
from typing import Any, Literal
|
|
3
3
|
from dataclasses import dataclass
|
|
4
4
|
|
|
5
5
|
from datetime import datetime
|
|
6
|
+
from time import time, sleep
|
|
7
|
+
import logging
|
|
6
8
|
|
|
9
|
+
from .arguments import PipelineArgValueType
|
|
7
10
|
from ..pipelines import Pipeline, PipelineArgs
|
|
8
11
|
from .._client import get_default_client
|
|
9
12
|
|
|
13
|
+
logger = logging.getLogger(__name__)
|
|
14
|
+
|
|
15
|
+
|
|
16
|
+
@dataclass
|
|
17
|
+
class LogLine:
|
|
18
|
+
timestamp: datetime
|
|
19
|
+
level: int
|
|
20
|
+
message: str
|
|
21
|
+
|
|
22
|
+
@classmethod
|
|
23
|
+
def from_json(cls, json: dict) -> "LogLine":
|
|
24
|
+
return cls(
|
|
25
|
+
timestamp=datetime.fromisoformat(json["timestamp"]),
|
|
26
|
+
level=json["level"],
|
|
27
|
+
message=json["message"],
|
|
28
|
+
)
|
|
29
|
+
|
|
10
30
|
|
|
11
31
|
@dataclass
|
|
12
32
|
class PipelineRecord:
|
|
33
|
+
"""
|
|
34
|
+
A PipelineRecord represents a persisted pipeline.
|
|
35
|
+
"""
|
|
36
|
+
|
|
13
37
|
id: str
|
|
14
38
|
name: str
|
|
15
39
|
description: str | None
|
|
@@ -17,8 +41,10 @@ class PipelineRecord:
|
|
|
17
41
|
created_at: datetime
|
|
18
42
|
updated_at: datetime
|
|
19
43
|
|
|
20
|
-
|
|
21
|
-
|
|
44
|
+
# I don't think users need to get the Pipeline object from a PipelineRecord, but if they did,
|
|
45
|
+
# it would be done like this.
|
|
46
|
+
# def pipeline(self) -> Pipeline:
|
|
47
|
+
# return Pipeline._from_yaml(self.definition_yaml)
|
|
22
48
|
|
|
23
49
|
@classmethod
|
|
24
50
|
def from_json(cls, json: dict) -> "PipelineRecord":
|
|
@@ -31,15 +57,30 @@ class PipelineRecord:
|
|
|
31
57
|
updated_at=datetime.fromisoformat(json["updated_at"]),
|
|
32
58
|
)
|
|
33
59
|
|
|
60
|
+
def pipeline_jobs(self) -> "list[PipelineJobRecord]":
|
|
61
|
+
"""
|
|
62
|
+
Returns a list of pipeline jobs that were created from this pipeline.
|
|
63
|
+
|
|
64
|
+
Returns
|
|
65
|
+
-------
|
|
66
|
+
list[PipelineJobRecord]
|
|
67
|
+
A list of PipelineJobRecord objects.
|
|
68
|
+
"""
|
|
69
|
+
res = get_default_client().http.get(f"/rest/v0/pipelines/{self.id}/pipeline_jobs")
|
|
70
|
+
return [PipelineJobRecord.from_json(p) for p in res["data"]]
|
|
71
|
+
|
|
34
72
|
|
|
35
73
|
@dataclass
|
|
36
74
|
class PipelineJobRecord:
|
|
75
|
+
"""
|
|
76
|
+
A PipelineJobRecord represents a persisted pipeline job.
|
|
77
|
+
"""
|
|
78
|
+
|
|
37
79
|
id: str
|
|
38
80
|
pipeline_id: str
|
|
39
|
-
project_id: str
|
|
40
81
|
name: str
|
|
41
82
|
description: str | None
|
|
42
|
-
status: Literal["pending", "running", "completed", "failed"
|
|
83
|
+
status: Literal["pending", "running", "completed", "failed"]
|
|
43
84
|
created_at: datetime
|
|
44
85
|
updated_at: datetime
|
|
45
86
|
started_at: datetime | None
|
|
@@ -50,7 +91,6 @@ class PipelineJobRecord:
|
|
|
50
91
|
return cls(
|
|
51
92
|
id=json["id"],
|
|
52
93
|
pipeline_id=json["pipeline_id"],
|
|
53
|
-
project_id=json["project_id"],
|
|
54
94
|
name=json["name"],
|
|
55
95
|
description=json["description"],
|
|
56
96
|
status=json["status"],
|
|
@@ -62,6 +102,200 @@ class PipelineJobRecord:
|
|
|
62
102
|
),
|
|
63
103
|
)
|
|
64
104
|
|
|
105
|
+
def pipeline(self) -> PipelineRecord:
|
|
106
|
+
"""
|
|
107
|
+
Returns the pipeline that this pipeline job was created from.
|
|
108
|
+
|
|
109
|
+
Returns
|
|
110
|
+
-------
|
|
111
|
+
PipelineRecord
|
|
112
|
+
The PipelineRecord for the pipeline that this pipeline job was created from.
|
|
113
|
+
"""
|
|
114
|
+
return get_pipeline(self.pipeline_id)
|
|
115
|
+
|
|
116
|
+
def runs(self) -> "list[PipelineJobRunRecord]":
|
|
117
|
+
"""
|
|
118
|
+
Returns a list of runs for this pipeline job.
|
|
119
|
+
|
|
120
|
+
Returns
|
|
121
|
+
-------
|
|
122
|
+
list[PipelineJobRunRecord]
|
|
123
|
+
A list of PipelineJobRunRecord objects.
|
|
124
|
+
"""
|
|
125
|
+
res = get_default_client().http.get(f"/rest/v0/pipeline_jobs/{self.id}/runs")
|
|
126
|
+
return [PipelineJobRunRecord.from_json(r) for r in res["data"]]
|
|
127
|
+
|
|
128
|
+
def logs(self) -> list[LogLine]:
|
|
129
|
+
"""
|
|
130
|
+
Returns a list of log lines for this pipeline job.
|
|
131
|
+
|
|
132
|
+
Each log line is a LogLine object, which has a timestamp, level, and message.
|
|
133
|
+
|
|
134
|
+
Returns
|
|
135
|
+
-------
|
|
136
|
+
list[LogLine]
|
|
137
|
+
A list of LogLine objects.
|
|
138
|
+
"""
|
|
139
|
+
res = get_default_client().http.get(f"/rest/v0/pipeline_jobs/{self.id}/logs")
|
|
140
|
+
return [LogLine.from_json(l) for l in res["data"]]
|
|
141
|
+
|
|
142
|
+
def artifacts(self) -> list[dict]:
|
|
143
|
+
"""
|
|
144
|
+
Returns a list of artifacts that were produced by this pipeline job.
|
|
145
|
+
|
|
146
|
+
Artifacts are things like Geometries, Meshes, and Simulations. Each artifact is a dictionary
|
|
147
|
+
with an "id" key, which is an identifier for the artifact.
|
|
148
|
+
|
|
149
|
+
.. warning:: This feature is experimental and may change or be removed in the future.
|
|
150
|
+
|
|
151
|
+
Returns
|
|
152
|
+
-------
|
|
153
|
+
list[dict]
|
|
154
|
+
A list of artifact dictionaries.
|
|
155
|
+
"""
|
|
156
|
+
res = get_default_client().http.get(f"/rest/v0/pipeline_jobs/{self.id}/artifacts")
|
|
157
|
+
return res["data"]
|
|
158
|
+
|
|
159
|
+
def wait(
|
|
160
|
+
self,
|
|
161
|
+
*,
|
|
162
|
+
interval_seconds: float = 5,
|
|
163
|
+
timeout_seconds: float = float("inf"),
|
|
164
|
+
print_logs: bool = False,
|
|
165
|
+
) -> Literal["completed", "failed"]:
|
|
166
|
+
"""
|
|
167
|
+
Wait for the pipeline job to complete or fail.
|
|
168
|
+
|
|
169
|
+
This method polls the pipeline job status at regular intervals until it reaches
|
|
170
|
+
a terminal state (completed or failed).
|
|
171
|
+
|
|
172
|
+
Parameters
|
|
173
|
+
----------
|
|
174
|
+
interval_seconds : float
|
|
175
|
+
Number of seconds between status polls. Default is 5 seconds.
|
|
176
|
+
timeout_seconds : float
|
|
177
|
+
Number of seconds before the operation times out. Default is infinity.
|
|
178
|
+
print_logs : bool
|
|
179
|
+
If True, prints new log lines as they become available. Default is False.
|
|
180
|
+
|
|
181
|
+
Returns
|
|
182
|
+
-------
|
|
183
|
+
Literal["completed", "failed"]
|
|
184
|
+
The final status of the pipeline job.
|
|
185
|
+
|
|
186
|
+
Raises
|
|
187
|
+
------
|
|
188
|
+
TimeoutError
|
|
189
|
+
If the pipeline job does not complete within the specified timeout.
|
|
190
|
+
|
|
191
|
+
Examples
|
|
192
|
+
--------
|
|
193
|
+
>>> pipeline_job = pipelines.create_pipeline_job(pipeline.id, args, "My Job")
|
|
194
|
+
>>> final_status = pipeline_job.wait(timeout_seconds=3600)
|
|
195
|
+
>>> print(f"Pipeline job finished with status: {final_status}")
|
|
196
|
+
"""
|
|
197
|
+
deadline = time() + timeout_seconds
|
|
198
|
+
last_log_count = 0
|
|
199
|
+
|
|
200
|
+
while True:
|
|
201
|
+
# Refresh the pipeline job status
|
|
202
|
+
updated_job = get_pipeline_job(self.id)
|
|
203
|
+
|
|
204
|
+
# Print new logs if requested
|
|
205
|
+
if print_logs:
|
|
206
|
+
logs = updated_job.logs()
|
|
207
|
+
if len(logs) > last_log_count:
|
|
208
|
+
for log_line in logs[last_log_count:]:
|
|
209
|
+
print(f"[{log_line.timestamp}] {log_line.message}")
|
|
210
|
+
last_log_count = len(logs)
|
|
211
|
+
|
|
212
|
+
# Check if we've reached a terminal state
|
|
213
|
+
if updated_job.status == "completed":
|
|
214
|
+
logger.info(f"Pipeline job {self.id} completed successfully")
|
|
215
|
+
return "completed"
|
|
216
|
+
elif updated_job.status == "failed":
|
|
217
|
+
logger.warning(f"Pipeline job {self.id} failed")
|
|
218
|
+
return "failed"
|
|
219
|
+
|
|
220
|
+
# Check timeout
|
|
221
|
+
if time() >= deadline:
|
|
222
|
+
raise TimeoutError(
|
|
223
|
+
f"Timed out waiting for pipeline job {self.id} to complete. "
|
|
224
|
+
f"Current status: {updated_job.status}"
|
|
225
|
+
)
|
|
226
|
+
|
|
227
|
+
# Wait before next poll
|
|
228
|
+
sleep(max(0, min(interval_seconds, deadline - time())))
|
|
229
|
+
|
|
230
|
+
# Update self with the latest status
|
|
231
|
+
self.status = updated_job.status
|
|
232
|
+
self.updated_at = updated_job.updated_at
|
|
233
|
+
self.started_at = updated_job.started_at
|
|
234
|
+
self.completed_at = updated_job.completed_at
|
|
235
|
+
|
|
236
|
+
|
|
237
|
+
@dataclass
|
|
238
|
+
class PipelineJobRunRecord:
|
|
239
|
+
pipeline_job_id: str
|
|
240
|
+
idx: int
|
|
241
|
+
arguments: list[PipelineArgValueType]
|
|
242
|
+
status: Literal["pending", "running", "completed", "failed"]
|
|
243
|
+
|
|
244
|
+
@classmethod
|
|
245
|
+
def from_json(cls, json: dict) -> "PipelineJobRunRecord":
|
|
246
|
+
return cls(
|
|
247
|
+
pipeline_job_id=json["pipeline_job_id"],
|
|
248
|
+
idx=json["idx"],
|
|
249
|
+
arguments=json["arguments"],
|
|
250
|
+
status=json["status"],
|
|
251
|
+
)
|
|
252
|
+
|
|
253
|
+
def pipeline_job(self) -> PipelineJobRecord:
|
|
254
|
+
"""
|
|
255
|
+
Returns the pipeline job that this pipeline job run was created from.
|
|
256
|
+
|
|
257
|
+
Returns
|
|
258
|
+
-------
|
|
259
|
+
PipelineJobRecord
|
|
260
|
+
The PipelineJobRecord for the pipeline job that this pipeline job run was created from.
|
|
261
|
+
"""
|
|
262
|
+
return get_pipeline_job(self.pipeline_job_id)
|
|
263
|
+
|
|
264
|
+
def logs(self) -> list[LogLine]:
|
|
265
|
+
"""
|
|
266
|
+
Returns a list of log lines for this pipeline job run.
|
|
267
|
+
|
|
268
|
+
Each log line is a LogLine object, which has a timestamp, level, and message.
|
|
269
|
+
|
|
270
|
+
Returns
|
|
271
|
+
-------
|
|
272
|
+
list[LogLine]
|
|
273
|
+
A list of LogLine objects.
|
|
274
|
+
"""
|
|
275
|
+
res = get_default_client().http.get(
|
|
276
|
+
f"/rest/v0/pipeline_jobs/{self.pipeline_job_id}/runs/{self.idx}/logs"
|
|
277
|
+
)
|
|
278
|
+
return [LogLine.from_json(l) for l in res["data"]]
|
|
279
|
+
|
|
280
|
+
def artifacts(self) -> list[dict]:
|
|
281
|
+
"""
|
|
282
|
+
Returns a list of artifacts that were produced by this pipeline job run.
|
|
283
|
+
|
|
284
|
+
Artifacts are things like Geometries, Meshes, and Simulations. Each artifact is a dictionary
|
|
285
|
+
with an "id" key, which is an identifier for the artifact.
|
|
286
|
+
|
|
287
|
+
.. warning:: This feature is experimental and may change or be removed in the future.
|
|
288
|
+
|
|
289
|
+
Returns
|
|
290
|
+
-------
|
|
291
|
+
list[dict]
|
|
292
|
+
A list of artifact dictionaries.
|
|
293
|
+
"""
|
|
294
|
+
res = get_default_client().http.get(
|
|
295
|
+
f"/rest/v0/pipeline_jobs/{self.pipeline_job_id}/runs/{self.idx}/artifacts"
|
|
296
|
+
)
|
|
297
|
+
return res["data"]
|
|
298
|
+
|
|
65
299
|
|
|
66
300
|
def create_pipeline(
|
|
67
301
|
name: str, pipeline: Pipeline | str, description: str | None = None
|
|
@@ -88,7 +322,7 @@ def create_pipeline(
|
|
|
88
322
|
"description": description,
|
|
89
323
|
}
|
|
90
324
|
res = get_default_client().http.post("/rest/v0/pipelines", body)
|
|
91
|
-
return PipelineRecord.from_json(res)
|
|
325
|
+
return PipelineRecord.from_json(res["data"])
|
|
92
326
|
|
|
93
327
|
|
|
94
328
|
def list_pipelines() -> list[PipelineRecord]:
|
|
@@ -96,7 +330,7 @@ def list_pipelines() -> list[PipelineRecord]:
|
|
|
96
330
|
List all pipelines.
|
|
97
331
|
"""
|
|
98
332
|
res = get_default_client().http.get("/rest/v0/pipelines")
|
|
99
|
-
return [PipelineRecord.from_json(p) for p in res]
|
|
333
|
+
return [PipelineRecord.from_json(p) for p in res["data"]]
|
|
100
334
|
|
|
101
335
|
|
|
102
336
|
def get_pipeline(id: str) -> PipelineRecord:
|
|
@@ -109,11 +343,11 @@ def get_pipeline(id: str) -> PipelineRecord:
|
|
|
109
343
|
ID of the pipeline to fetch.
|
|
110
344
|
"""
|
|
111
345
|
res = get_default_client().http.get(f"/rest/v0/pipelines/{id}")
|
|
112
|
-
return PipelineRecord.from_json(res)
|
|
346
|
+
return PipelineRecord.from_json(res["data"])
|
|
113
347
|
|
|
114
348
|
|
|
115
349
|
def create_pipeline_job(
|
|
116
|
-
pipeline_id: str, args: PipelineArgs,
|
|
350
|
+
pipeline_id: str, args: PipelineArgs, name: str, description: str | None = None
|
|
117
351
|
) -> PipelineJobRecord:
|
|
118
352
|
"""
|
|
119
353
|
Create a new pipeline job.
|
|
@@ -124,8 +358,6 @@ def create_pipeline_job(
|
|
|
124
358
|
ID of the pipeline to invoke.
|
|
125
359
|
args : PipelineArgs
|
|
126
360
|
Arguments to pass to the pipeline.
|
|
127
|
-
project_id : str
|
|
128
|
-
ID of the project to run the pipeline job in.
|
|
129
361
|
name : str
|
|
130
362
|
Name of the pipeline job.
|
|
131
363
|
description : str, optional
|
|
@@ -135,13 +367,13 @@ def create_pipeline_job(
|
|
|
135
367
|
arg_rows = [row.row_values for row in args.rows]
|
|
136
368
|
body = {
|
|
137
369
|
"name": name,
|
|
138
|
-
"
|
|
370
|
+
"description": description,
|
|
139
371
|
"argument_names": [p.name for p in args.params],
|
|
140
372
|
"argument_rows": arg_rows,
|
|
141
373
|
}
|
|
142
374
|
|
|
143
375
|
res = get_default_client().http.post(f"/rest/v0/pipelines/{pipeline_id}/pipeline_jobs", body)
|
|
144
|
-
return PipelineJobRecord.from_json(res)
|
|
376
|
+
return PipelineJobRecord.from_json(res["data"])
|
|
145
377
|
|
|
146
378
|
|
|
147
379
|
def get_pipeline_job(id: str) -> PipelineJobRecord:
|
|
@@ -149,7 +381,7 @@ def get_pipeline_job(id: str) -> PipelineJobRecord:
|
|
|
149
381
|
Get a pipeline job by ID.
|
|
150
382
|
"""
|
|
151
383
|
res = get_default_client().http.get(f"/rest/v0/pipeline_jobs/{id}")
|
|
152
|
-
return PipelineJobRecord.from_json(res)
|
|
384
|
+
return PipelineJobRecord.from_json(res["data"])
|
|
153
385
|
|
|
154
386
|
|
|
155
387
|
def list_pipeline_jobs() -> list[PipelineJobRecord]:
|
|
@@ -157,4 +389,4 @@ def list_pipeline_jobs() -> list[PipelineJobRecord]:
|
|
|
157
389
|
List all pipeline jobs.
|
|
158
390
|
"""
|
|
159
391
|
res = get_default_client().http.get("/rest/v0/pipeline_jobs")
|
|
160
|
-
return [PipelineJobRecord.from_json(p) for p in res]
|
|
392
|
+
return [PipelineJobRecord.from_json(p) for p in res["data"]]
|
|
@@ -103,3 +103,18 @@ class PipelineArgs:
|
|
|
103
103
|
return (
|
|
104
104
|
f"PipelineArgs(param_names={[p.name for p in self.params]}, row_count={len(self.rows)})"
|
|
105
105
|
)
|
|
106
|
+
|
|
107
|
+
def print_as_table(self) -> None:
|
|
108
|
+
headers = [p.name for p in self.params]
|
|
109
|
+
row_strs = [[str(v) for v in row.row_values] for row in self.rows]
|
|
110
|
+
col_widths = [
|
|
111
|
+
max(len(headers[i]), *(len(r[i]) for r in row_strs)) for i in range(len(headers))
|
|
112
|
+
]
|
|
113
|
+
|
|
114
|
+
def format_row(values: list[str]) -> str:
|
|
115
|
+
return " | ".join(val.ljust(col_widths[i]) for i, val in enumerate(values))
|
|
116
|
+
|
|
117
|
+
print(format_row(headers))
|
|
118
|
+
print("-+-".join("-" * w for w in col_widths))
|
|
119
|
+
for r in row_strs:
|
|
120
|
+
print(format_row(r))
|