luminarycloud 0.20.0__py3-none-any.whl → 0.22.0__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (95) hide show
  1. luminarycloud/__init__.py +5 -1
  2. luminarycloud/_client/client.py +5 -0
  3. luminarycloud/_client/http_client.py +10 -8
  4. luminarycloud/_feature_flag.py +22 -0
  5. luminarycloud/_helpers/_create_simulation.py +7 -2
  6. luminarycloud/_helpers/_upload_mesh.py +1 -0
  7. luminarycloud/_helpers/download.py +3 -1
  8. luminarycloud/_helpers/pagination.py +62 -0
  9. luminarycloud/_helpers/proto_decorator.py +13 -5
  10. luminarycloud/_helpers/upload.py +18 -12
  11. luminarycloud/_proto/api/v0/luminarycloud/feature_flag/feature_flag_pb2.py +55 -0
  12. luminarycloud/_proto/api/v0/luminarycloud/feature_flag/feature_flag_pb2.pyi +52 -0
  13. luminarycloud/_proto/api/v0/luminarycloud/feature_flag/feature_flag_pb2_grpc.py +72 -0
  14. luminarycloud/_proto/api/v0/luminarycloud/feature_flag/feature_flag_pb2_grpc.pyi +35 -0
  15. luminarycloud/_proto/api/v0/luminarycloud/geometry/geometry_pb2.py +168 -124
  16. luminarycloud/_proto/api/v0/luminarycloud/geometry/geometry_pb2.pyi +133 -4
  17. luminarycloud/_proto/api/v0/luminarycloud/geometry/geometry_pb2_grpc.py +66 -0
  18. luminarycloud/_proto/api/v0/luminarycloud/geometry/geometry_pb2_grpc.pyi +20 -0
  19. luminarycloud/_proto/api/v0/luminarycloud/inference/inference_pb2.py +8 -8
  20. luminarycloud/_proto/api/v0/luminarycloud/inference/inference_pb2.pyi +5 -5
  21. luminarycloud/_proto/api/v0/luminarycloud/mesh/mesh_pb2.py +74 -73
  22. luminarycloud/_proto/api/v0/luminarycloud/mesh/mesh_pb2.pyi +17 -3
  23. luminarycloud/_proto/api/v0/luminarycloud/physics_ai/physics_ai_pb2.py +33 -20
  24. luminarycloud/_proto/api/v0/luminarycloud/physics_ai/physics_ai_pb2.pyi +21 -1
  25. luminarycloud/_proto/api/v0/luminarycloud/project/project_pb2.py +16 -16
  26. luminarycloud/_proto/api/v0/luminarycloud/project/project_pb2.pyi +7 -3
  27. luminarycloud/_proto/api/v0/luminarycloud/simulation/simulation_pb2.py +97 -61
  28. luminarycloud/_proto/api/v0/luminarycloud/simulation/simulation_pb2.pyi +72 -3
  29. luminarycloud/_proto/api/v0/luminarycloud/simulation/simulation_pb2_grpc.py +34 -0
  30. luminarycloud/_proto/api/v0/luminarycloud/simulation/simulation_pb2_grpc.pyi +12 -0
  31. luminarycloud/_proto/api/v0/luminarycloud/simulation_template/simulation_template_pb2.py +33 -31
  32. luminarycloud/_proto/api/v0/luminarycloud/simulation_template/simulation_template_pb2.pyi +23 -2
  33. luminarycloud/_proto/api/v0/luminarycloud/vis/vis_pb2.py +68 -19
  34. luminarycloud/_proto/api/v0/luminarycloud/vis/vis_pb2.pyi +98 -0
  35. luminarycloud/_proto/api/v0/luminarycloud/vis/vis_pb2_grpc.py +33 -0
  36. luminarycloud/_proto/api/v0/luminarycloud/vis/vis_pb2_grpc.pyi +10 -0
  37. luminarycloud/_proto/assistant/assistant_pb2.py +74 -41
  38. luminarycloud/_proto/assistant/assistant_pb2.pyi +64 -2
  39. luminarycloud/_proto/assistant/assistant_pb2_grpc.py +33 -0
  40. luminarycloud/_proto/assistant/assistant_pb2_grpc.pyi +10 -0
  41. luminarycloud/_proto/base/base_pb2.py +20 -7
  42. luminarycloud/_proto/base/base_pb2.pyi +38 -0
  43. luminarycloud/_proto/cad/shape_pb2.py +39 -19
  44. luminarycloud/_proto/cad/shape_pb2.pyi +86 -34
  45. luminarycloud/_proto/cad/transformation_pb2.py +60 -16
  46. luminarycloud/_proto/cad/transformation_pb2.pyi +138 -32
  47. luminarycloud/_proto/client/simulation_pb2.py +490 -348
  48. luminarycloud/_proto/client/simulation_pb2.pyi +570 -8
  49. luminarycloud/_proto/geometry/geometry_pb2.py +77 -63
  50. luminarycloud/_proto/geometry/geometry_pb2.pyi +42 -3
  51. luminarycloud/_proto/hexmesh/hexmesh_pb2.py +24 -18
  52. luminarycloud/_proto/hexmesh/hexmesh_pb2.pyi +23 -2
  53. luminarycloud/_proto/inferenceservice/inferenceservice_pb2.py +10 -10
  54. luminarycloud/_proto/inferenceservice/inferenceservice_pb2.pyi +5 -5
  55. luminarycloud/_proto/quantity/quantity_options_pb2.py +6 -6
  56. luminarycloud/_proto/quantity/quantity_options_pb2.pyi +10 -1
  57. luminarycloud/_proto/quantity/quantity_pb2.py +176 -167
  58. luminarycloud/_proto/quantity/quantity_pb2.pyi +11 -5
  59. luminarycloud/enum/__init__.py +1 -0
  60. luminarycloud/enum/gpu_type.py +2 -0
  61. luminarycloud/enum/quantity_type.py +9 -0
  62. luminarycloud/enum/vis_enums.py +23 -3
  63. luminarycloud/feature_modification.py +45 -35
  64. luminarycloud/geometry.py +104 -8
  65. luminarycloud/geometry_version.py +57 -3
  66. luminarycloud/meshing/mesh_generation_params.py +8 -8
  67. luminarycloud/params/enum/_enum_wrappers.py +537 -30
  68. luminarycloud/params/simulation/adaptive_mesh_refinement_.py +4 -0
  69. luminarycloud/params/simulation/physics/__init__.py +0 -1
  70. luminarycloud/params/simulation/physics/periodic_pair_.py +12 -31
  71. luminarycloud/physics_ai/architectures.py +5 -5
  72. luminarycloud/physics_ai/inference.py +13 -13
  73. luminarycloud/physics_ai/solution.py +3 -1
  74. luminarycloud/pipelines/__init__.py +11 -3
  75. luminarycloud/pipelines/api.py +240 -4
  76. luminarycloud/pipelines/arguments.py +15 -0
  77. luminarycloud/pipelines/core.py +113 -96
  78. luminarycloud/pipelines/{operators.py → stages.py} +96 -39
  79. luminarycloud/project.py +15 -47
  80. luminarycloud/simulation.py +66 -3
  81. luminarycloud/simulation_param.py +0 -9
  82. luminarycloud/types/matrix3.py +12 -0
  83. luminarycloud/vis/__init__.py +2 -0
  84. luminarycloud/vis/interactive_report.py +79 -93
  85. luminarycloud/vis/report.py +219 -65
  86. luminarycloud/vis/visualization.py +60 -0
  87. luminarycloud/volume_selection.py +132 -69
  88. {luminarycloud-0.20.0.dist-info → luminarycloud-0.22.0.dist-info}/METADATA +1 -1
  89. {luminarycloud-0.20.0.dist-info → luminarycloud-0.22.0.dist-info}/RECORD +90 -89
  90. luminarycloud/params/simulation/physics/periodic_pair/__init__.py +0 -2
  91. luminarycloud/params/simulation/physics/periodic_pair/periodicity_type/__init__.py +0 -2
  92. luminarycloud/params/simulation/physics/periodic_pair/periodicity_type/rotational_periodicity_.py +0 -31
  93. luminarycloud/params/simulation/physics/periodic_pair/periodicity_type/translational_periodicity_.py +0 -29
  94. luminarycloud/params/simulation/physics/periodic_pair/periodicity_type_.py +0 -25
  95. {luminarycloud-0.20.0.dist-info → luminarycloud-0.22.0.dist-info}/WHEEL +0 -0
@@ -41,6 +41,8 @@ class AdaptiveMeshRefinement(CodeRepr, ParamGroupWrapper[clientpb.AdaptiveMeshRe
41
41
  "The method to generate the computational mesh."
42
42
  target_cv_millions: int = 10
43
43
  "User-requested mesh size in millions of control volumes."
44
+ all_tet: enum.AllTet = enum.AllTet.ALL_TET_ON
45
+ "Automatically inserts high aspect ratio mesh elements in the boundary layer and ignores all adaptation boundary layer settings."
44
46
  user_scaling: LcFloat = 1.0
45
47
  "Scale factor between the geometry and the mesh."
46
48
  boundary_layer_profile: list[BoundaryLayerProfile] = field(default_factory=list)
@@ -55,6 +57,7 @@ class AdaptiveMeshRefinement(CodeRepr, ParamGroupWrapper[clientpb.AdaptiveMeshRe
55
57
  _proto.final_target_complexity.CopyFrom(_to_ad_proto(self.final_target_complexity))
56
58
  _proto.meshing_method = self.meshing_method.value
57
59
  _proto.target_cv_millions.value = self.target_cv_millions
60
+ _proto.all_tet = self.all_tet.value
58
61
  _proto.user_scaling.CopyFrom(_to_ad_proto(self.user_scaling))
59
62
  if self.boundary_layer_profile is not None:
60
63
  _proto.boundary_layer_profile.extend(v._to_proto() for v in self.boundary_layer_profile)
@@ -68,6 +71,7 @@ class AdaptiveMeshRefinement(CodeRepr, ParamGroupWrapper[clientpb.AdaptiveMeshRe
68
71
  self.final_target_complexity = _from_ad_proto(proto.final_target_complexity)
69
72
  self.meshing_method = enum.MeshingMethod(proto.meshing_method)
70
73
  self.target_cv_millions = proto.target_cv_millions.value
74
+ self.all_tet = enum.AllTet(proto.all_tet)
71
75
  self.user_scaling = _from_ad_proto(proto.user_scaling)
72
76
  self.boundary_layer_profile = [
73
77
  BoundaryLayerProfile.from_proto(v) for v in proto.boundary_layer_profile
@@ -1,6 +1,5 @@
1
1
  from . import fluid
2
2
  from . import heat
3
- from . import periodic_pair
4
3
  from . import solution_controls
5
4
  from .fluid_ import Fluid
6
5
  from .heat_ import Heat
@@ -17,17 +17,6 @@ import luminarycloud.params.enum._enum_wrappers as enum
17
17
 
18
18
  from luminarycloud.params.simulation._lib import ParamGroupWrapper, create_unique_id
19
19
 
20
- from luminarycloud.params.simulation.physics.periodic_pair.periodicity_type_ import PeriodicityType
21
- from luminarycloud.params.simulation.physics.periodic_pair.periodicity_type_ import *
22
- from luminarycloud.params.simulation.physics.periodic_pair.periodicity_type.rotational_periodicity_ import (
23
- RotationalPeriodicity,
24
- )
25
- from luminarycloud.params.simulation.physics.periodic_pair.periodicity_type.rotational_periodicity_ import *
26
- from luminarycloud.params.simulation.physics.periodic_pair.periodicity_type.translational_periodicity_ import (
27
- TranslationalPeriodicity,
28
- )
29
- from luminarycloud.params.simulation.physics.periodic_pair.periodicity_type.translational_periodicity_ import *
30
-
31
20
 
32
21
  @dataclass(kw_only=True)
33
22
  class PeriodicPair(CodeRepr, ParamGroupWrapper[clientpb.PeriodicPair]):
@@ -39,8 +28,12 @@ class PeriodicPair(CodeRepr, ParamGroupWrapper[clientpb.PeriodicPair]):
39
28
  ""
40
29
  surfaces_side_b: list[str] = field(default_factory=list)
41
30
  ""
42
- periodicity_type: PeriodicityType = field(default_factory=TranslationalPeriodicity)
43
- " Possible types: ``TranslationalPeriodicity``, ``RotationalPeriodicity`` from the ``periodicity_type`` module."
31
+ translation: Vector3 = field(default_factory=lambda: Vector3(0.0, 0.0, 0.0))
32
+ "Holds the translation vector in the x,y,z directions from each point on the first periodic boundary to its matching point on the second periodic boundary."
33
+ center: Vector3 = field(default_factory=lambda: Vector3(0.0, 0.0, 0.0))
34
+ "Origin for the rotational periodicity transformation."
35
+ rotation_vector: Vector3 = field(default_factory=lambda: Vector3(0.0, 0.0, 0.0))
36
+ "Holds the rotation vector needed to transform each point on the first periodic boundary to its matching point on the second periodic boundary. The vector direction defines the periodicity axis and its magnitude defines the periodicity angle, in degrees, from A to B according to the right-hand rule."
44
37
 
45
38
  def _to_proto(self) -> clientpb.PeriodicPair:
46
39
  _proto = clientpb.PeriodicPair()
@@ -50,28 +43,16 @@ class PeriodicPair(CodeRepr, ParamGroupWrapper[clientpb.PeriodicPair]):
50
43
  _proto.bound_a.extend(self.surfaces_side_a)
51
44
  if self.surfaces_side_b is not None:
52
45
  _proto.bound_b.extend(self.surfaces_side_b)
53
- if isinstance(self.periodicity_type, TranslationalPeriodicity):
54
- _proto.periodic_bc_type = clientpb.TRANSLATIONAL
55
- _proto.periodic_translation.CopyFrom(self.periodicity_type.translation._to_ad_proto())
56
- if isinstance(self.periodicity_type, RotationalPeriodicity):
57
- _proto.periodic_bc_type = clientpb.ROTATIONAL
58
- _proto.periodic_center_of_rotation.CopyFrom(self.periodicity_type.center._to_ad_proto())
59
- _proto.periodic_rotation_angles.CopyFrom(
60
- self.periodicity_type.rotation_vector._to_ad_proto()
61
- )
46
+ _proto.periodic_translation.CopyFrom(self.translation._to_ad_proto())
47
+ _proto.periodic_center_of_rotation.CopyFrom(self.center._to_ad_proto())
48
+ _proto.periodic_rotation_angles.CopyFrom(self.rotation_vector._to_ad_proto())
62
49
  return _proto
63
50
 
64
51
  def _from_proto(self, proto: clientpb.PeriodicPair) -> None:
65
52
  self.name = proto.periodic_pair_name
66
53
  self.surfaces_side_a.extend(proto.bound_a)
67
54
  self.surfaces_side_b.extend(proto.bound_b)
68
- if proto.periodic_bc_type == clientpb.INVALID_PERIODIC_BC_TYPE:
69
- raise ValueError("Invalid periodicity_type")
70
- elif proto.periodic_bc_type == clientpb.TRANSLATIONAL:
71
- self.periodicity_type = TranslationalPeriodicity()
72
- self.periodicity_type.translation._from_ad_proto(proto.periodic_translation)
73
- elif proto.periodic_bc_type == clientpb.ROTATIONAL:
74
- self.periodicity_type = RotationalPeriodicity()
75
- self.periodicity_type.center._from_ad_proto(proto.periodic_center_of_rotation)
76
- self.periodicity_type.rotation_vector._from_ad_proto(proto.periodic_rotation_angles)
55
+ self.translation._from_ad_proto(proto.periodic_translation)
56
+ self.center._from_ad_proto(proto.periodic_center_of_rotation)
57
+ self.rotation_vector._from_ad_proto(proto.periodic_rotation_angles)
77
58
  return None
@@ -56,14 +56,14 @@ class PhysicsAiArchitectureVersion(ProtoWrapperBase):
56
56
  config["priority_class"] = "prod-batch-priority"
57
57
  if "resources" not in config:
58
58
  config["resources"] = {}
59
- if "process_gpus" not in config["resources"]:
60
- config["resources"]["process_gpus"] = 8
59
+ if "process_cpus" not in config["resources"]:
60
+ config["resources"]["process_cpus"] = 8
61
61
  if "train_gpus" not in config["resources"]:
62
- config["resources"]["train_gpus"] = 8
62
+ config["resources"]["train_gpus"] = 1
63
63
  if "test_gpus" not in config["resources"]:
64
- config["resources"]["test_gpus"] = 8
64
+ config["resources"]["test_gpus"] = 1
65
65
  if "mode" not in config:
66
- config["mode"] = "full-gpu"
66
+ config["mode"] = "full"
67
67
 
68
68
  training_config_json = json.dumps(config, indent=2)
69
69
  external_dataset_uri = f"gs://training-data/architecture-{self.id}"
@@ -17,6 +17,7 @@ from ..project import Project
17
17
  from ..project import Project
18
18
  from .._helpers import upload_file
19
19
  from .._proto.upload import upload_pb2 as uploadpb
20
+ from ..types.ids import PhysicsAiModelVersionID
20
21
 
21
22
 
22
23
  @dataclass
@@ -50,7 +51,7 @@ class ExtAeroInferenceResult:
50
51
  def external_aero_inference(
51
52
  project: Project,
52
53
  stl_file: str,
53
- artifact_url: str,
54
+ model_version_id: PhysicsAiModelVersionID,
54
55
  conditions: Optional[Dict[str, Any]] = None,
55
56
  settings: Optional[Dict[str, Any]] = None,
56
57
  write_visualization_data=False,
@@ -62,8 +63,8 @@ def external_aero_inference(
62
63
  The project to which the inference files will be added.
63
64
  stl_file : str
64
65
  Fullpath the STL file to be used for inference.
65
- artifact_url : str
66
- Fullpath of the model artifact directory to be used for inference.
66
+ model_version_id : PhysicsAiModelVersionID
67
+ The ID of the trained model version to use for inference.
67
68
  conditions : Dict[str, Any], optional
68
69
  Dictionary of conditions to be passed to the inference service (e.g., alpha, beta, etc.).
69
70
  settings : Dict[str, Any], optional
@@ -80,7 +81,7 @@ def external_aero_inference(
80
81
  """
81
82
 
82
83
  result = perform_inference(
83
- project, stl_file, artifact_url, conditions, settings, write_visualization_data
84
+ project, stl_file, model_version_id, conditions, settings, write_visualization_data
84
85
  )
85
86
  return ExtAeroInferenceResult(result)
86
87
 
@@ -88,7 +89,7 @@ def external_aero_inference(
88
89
  def perform_inference(
89
90
  project: Project,
90
91
  stl_file: str,
91
- artifact_url: str,
92
+ model_version_id: PhysicsAiModelVersionID,
92
93
  conditions: Optional[Dict[str, Any]] = None,
93
94
  settings: Optional[Dict[str, Any]] = None,
94
95
  write_visualization_data=False,
@@ -100,8 +101,8 @@ def perform_inference(
100
101
  The project to which the inference files will be added.
101
102
  stl_file : str
102
103
  Fullpath the STL file to be used for inference.
103
- artifact_url : str
104
- Fullpath of the model artifact directory to be used for inference.
104
+ model_version_id : PhysicsAiModelVersionID
105
+ The ID of the trained model version to use for inference.
105
106
  conditions : Dict[str, Any], optional
106
107
  Dictionary of conditions to be passed to the inference service (e.g., alpha, beta, etc.).
107
108
  settings : Dict[str, Any], optional
@@ -142,7 +143,7 @@ def perform_inference(
142
143
  stl_url = upload_if_file(stl_file)
143
144
 
144
145
  raw = start_inference_job(
145
- project, stl_url, artifact_url, conditions, settings, write_visualization_data
146
+ project, stl_url, model_version_id, conditions, settings, write_visualization_data
146
147
  )
147
148
  currated: dict[str, Any] = {}
148
149
  for k, v in raw.items():
@@ -162,7 +163,7 @@ def perform_inference(
162
163
  def start_inference_job(
163
164
  project: Project,
164
165
  stl_url: str,
165
- artifact_url: str,
166
+ model_version_id: PhysicsAiModelVersionID,
166
167
  conditions: Optional[Dict[str, Any]] = None,
167
168
  settings: Optional[Dict[str, Any]] = None,
168
169
  write_visualization_data=False,
@@ -174,8 +175,8 @@ def start_inference_job(
174
175
  Reference to a project.
175
176
  stl_url : str
176
177
  URL of the STL file to be used for inference.
177
- artifact_url : str
178
- URL of the model artifact directory to be used for inference.
178
+ model_version_id : PhysicsAiModelVersionID
179
+ The ID of the trained model version to use for inference.
179
180
  conditions : Dict[str, Any], optional
180
181
  Dictionary of conditions to be passed to the inference service (e.g., alpha, beta, etc.).
181
182
  settings : Dict[str, Any], optional
@@ -203,13 +204,12 @@ def start_inference_job(
203
204
 
204
205
  req = inferencepb.CreateInferenceServiceJobRequest(
205
206
  stl_url=stl_url,
206
- artifact_url=artifact_url,
207
+ model_version_id=str(model_version_id),
207
208
  conditions=conditions_bytes,
208
209
  settings=settings_bytes,
209
210
  project_id=project.id,
210
211
  write_visualization_data=write_visualization_data,
211
212
  )
212
-
213
213
  res: inferencepb.CreateInferenceServiceJobResponse = (
214
214
  get_default_client().CreateInferenceServiceJob(req)
215
215
  )
@@ -1,7 +1,7 @@
1
1
  from __future__ import annotations
2
2
 
3
3
  import tarfile
4
- from typing import List, Optional, BinaryIO, cast
4
+ from typing import List, Optional, BinaryIO, cast, Dict
5
5
 
6
6
  from .._client import get_default_client
7
7
  from .._helpers.download import download_solution_physics_ai as _download_solution_physics_ai
@@ -16,6 +16,7 @@ def _download_processed_solution_physics_ai( # noqa: F841
16
16
  volume_fields_to_keep: Optional[List[QuantityType]] = None,
17
17
  process_volume: bool = False,
18
18
  single_precision: bool = True,
19
+ internal_options: Optional[Dict[str, str]] = None,
19
20
  ) -> tarfile.TarFile:
20
21
  """
21
22
  Download solution data with physics AI processing applied.
@@ -50,6 +51,7 @@ def _download_processed_solution_physics_ai( # noqa: F841
50
51
  volume_fields_to_keep=volume_fields_to_keep,
51
52
  process_volume=process_volume,
52
53
  single_precision=single_precision,
54
+ internal_options=internal_options,
53
55
  )
54
56
 
55
57
  assert stream is not None, "Failed to download solution data"
@@ -2,6 +2,8 @@
2
2
  from .core import (
3
3
  Pipeline as Pipeline,
4
4
  PipelineParameter as PipelineParameter,
5
+ # Stage base class, mainly exported for testing
6
+ Stage as Stage,
5
7
  )
6
8
 
7
9
  from .parameters import (
@@ -11,9 +13,7 @@ from .parameters import (
11
13
  BoolPipelineParameter as BoolPipelineParameter,
12
14
  )
13
15
 
14
- from .operators import (
15
- # Operator base class, mainly exported for testing
16
- Operator as Operator,
16
+ from .stages import (
17
17
  # PipelineOutputs, i.e. things that "flow" in a Pipeline
18
18
  PipelineOutputGeometry as PipelineOutputGeometry,
19
19
  PipelineOutputMesh as PipelineOutputMesh,
@@ -21,6 +21,8 @@ from .operators import (
21
21
  # Concrete operators and their output types
22
22
  ReadGeometry as ReadGeometry,
23
23
  ReadGeometryOutputs as ReadGeometryOutputs,
24
+ ReadMesh as ReadMesh,
25
+ ReadMeshOutputs as ReadMeshOutputs,
24
26
  ModifyGeometry as ModifyGeometry,
25
27
  ModifyGeometryOutputs as ModifyGeometryOutputs,
26
28
  Mesh as Mesh,
@@ -39,4 +41,10 @@ from .api import (
39
41
  list_pipelines as list_pipelines,
40
42
  get_pipeline as get_pipeline,
41
43
  create_pipeline_job as create_pipeline_job,
44
+ get_pipeline_job as get_pipeline_job,
45
+ list_pipeline_jobs as list_pipeline_jobs,
46
+ PipelineJobRecord as PipelineJobRecord,
47
+ PipelineRecord as PipelineRecord,
48
+ PipelineJobRunRecord as PipelineJobRunRecord,
49
+ LogLine as LogLine,
42
50
  )
@@ -1,15 +1,39 @@
1
1
  # Copyright 2023-2024 Luminary Cloud, Inc. All Rights Reserved.
2
- from typing import Literal
2
+ from typing import Any, Literal
3
3
  from dataclasses import dataclass
4
4
 
5
5
  from datetime import datetime
6
+ from time import time, sleep
7
+ import logging
6
8
 
9
+ from .arguments import PipelineArgValueType
7
10
  from ..pipelines import Pipeline, PipelineArgs
8
11
  from .._client import get_default_client
9
12
 
13
+ logger = logging.getLogger(__name__)
14
+
15
+
16
+ @dataclass
17
+ class LogLine:
18
+ timestamp: datetime
19
+ level: int
20
+ message: str
21
+
22
+ @classmethod
23
+ def from_json(cls, json: dict) -> "LogLine":
24
+ return cls(
25
+ timestamp=datetime.fromisoformat(json["timestamp"]),
26
+ level=json["level"],
27
+ message=json["message"],
28
+ )
29
+
10
30
 
11
31
  @dataclass
12
32
  class PipelineRecord:
33
+ """
34
+ A PipelineRecord represents a persisted pipeline.
35
+ """
36
+
13
37
  id: str
14
38
  name: str
15
39
  description: str | None
@@ -17,8 +41,10 @@ class PipelineRecord:
17
41
  created_at: datetime
18
42
  updated_at: datetime
19
43
 
20
- def pipeline(self) -> Pipeline:
21
- return Pipeline._from_yaml(self.definition_yaml)
44
+ # I don't think users need to get the Pipeline object from a PipelineRecord, but if they did,
45
+ # it would be done like this.
46
+ # def pipeline(self) -> Pipeline:
47
+ # return Pipeline._from_yaml(self.definition_yaml)
22
48
 
23
49
  @classmethod
24
50
  def from_json(cls, json: dict) -> "PipelineRecord":
@@ -31,14 +57,30 @@ class PipelineRecord:
31
57
  updated_at=datetime.fromisoformat(json["updated_at"]),
32
58
  )
33
59
 
60
+ def pipeline_jobs(self) -> "list[PipelineJobRecord]":
61
+ """
62
+ Returns a list of pipeline jobs that were created from this pipeline.
63
+
64
+ Returns
65
+ -------
66
+ list[PipelineJobRecord]
67
+ A list of PipelineJobRecord objects.
68
+ """
69
+ res = get_default_client().http.get(f"/rest/v0/pipelines/{self.id}/pipeline_jobs")
70
+ return [PipelineJobRecord.from_json(p) for p in res["data"]]
71
+
34
72
 
35
73
  @dataclass
36
74
  class PipelineJobRecord:
75
+ """
76
+ A PipelineJobRecord represents a persisted pipeline job.
77
+ """
78
+
37
79
  id: str
38
80
  pipeline_id: str
39
81
  name: str
40
82
  description: str | None
41
- status: Literal["pending", "running", "completed", "failed", "cancelled"]
83
+ status: Literal["pending", "running", "completed", "failed"]
42
84
  created_at: datetime
43
85
  updated_at: datetime
44
86
  started_at: datetime | None
@@ -60,6 +102,200 @@ class PipelineJobRecord:
60
102
  ),
61
103
  )
62
104
 
105
+ def pipeline(self) -> PipelineRecord:
106
+ """
107
+ Returns the pipeline that this pipeline job was created from.
108
+
109
+ Returns
110
+ -------
111
+ PipelineRecord
112
+ The PipelineRecord for the pipeline that this pipeline job was created from.
113
+ """
114
+ return get_pipeline(self.pipeline_id)
115
+
116
+ def runs(self) -> "list[PipelineJobRunRecord]":
117
+ """
118
+ Returns a list of runs for this pipeline job.
119
+
120
+ Returns
121
+ -------
122
+ list[PipelineJobRunRecord]
123
+ A list of PipelineJobRunRecord objects.
124
+ """
125
+ res = get_default_client().http.get(f"/rest/v0/pipeline_jobs/{self.id}/runs")
126
+ return [PipelineJobRunRecord.from_json(r) for r in res["data"]]
127
+
128
+ def logs(self) -> list[LogLine]:
129
+ """
130
+ Returns a list of log lines for this pipeline job.
131
+
132
+ Each log line is a LogLine object, which has a timestamp, level, and message.
133
+
134
+ Returns
135
+ -------
136
+ list[LogLine]
137
+ A list of LogLine objects.
138
+ """
139
+ res = get_default_client().http.get(f"/rest/v0/pipeline_jobs/{self.id}/logs")
140
+ return [LogLine.from_json(l) for l in res["data"]]
141
+
142
+ def artifacts(self) -> list[dict]:
143
+ """
144
+ Returns a list of artifacts that were produced by this pipeline job.
145
+
146
+ Artifacts are things like Geometries, Meshes, and Simulations. Each artifact is a dictionary
147
+ with an "id" key, which is an identifier for the artifact.
148
+
149
+ .. warning:: This feature is experimental and may change or be removed in the future.
150
+
151
+ Returns
152
+ -------
153
+ list[dict]
154
+ A list of artifact dictionaries.
155
+ """
156
+ res = get_default_client().http.get(f"/rest/v0/pipeline_jobs/{self.id}/artifacts")
157
+ return res["data"]
158
+
159
+ def wait(
160
+ self,
161
+ *,
162
+ interval_seconds: float = 5,
163
+ timeout_seconds: float = float("inf"),
164
+ print_logs: bool = False,
165
+ ) -> Literal["completed", "failed"]:
166
+ """
167
+ Wait for the pipeline job to complete or fail.
168
+
169
+ This method polls the pipeline job status at regular intervals until it reaches
170
+ a terminal state (completed or failed).
171
+
172
+ Parameters
173
+ ----------
174
+ interval_seconds : float
175
+ Number of seconds between status polls. Default is 5 seconds.
176
+ timeout_seconds : float
177
+ Number of seconds before the operation times out. Default is infinity.
178
+ print_logs : bool
179
+ If True, prints new log lines as they become available. Default is False.
180
+
181
+ Returns
182
+ -------
183
+ Literal["completed", "failed"]
184
+ The final status of the pipeline job.
185
+
186
+ Raises
187
+ ------
188
+ TimeoutError
189
+ If the pipeline job does not complete within the specified timeout.
190
+
191
+ Examples
192
+ --------
193
+ >>> pipeline_job = pipelines.create_pipeline_job(pipeline.id, args, "My Job")
194
+ >>> final_status = pipeline_job.wait(timeout_seconds=3600)
195
+ >>> print(f"Pipeline job finished with status: {final_status}")
196
+ """
197
+ deadline = time() + timeout_seconds
198
+ last_log_count = 0
199
+
200
+ while True:
201
+ # Refresh the pipeline job status
202
+ updated_job = get_pipeline_job(self.id)
203
+
204
+ # Print new logs if requested
205
+ if print_logs:
206
+ logs = updated_job.logs()
207
+ if len(logs) > last_log_count:
208
+ for log_line in logs[last_log_count:]:
209
+ print(f"[{log_line.timestamp}] {log_line.message}")
210
+ last_log_count = len(logs)
211
+
212
+ # Check if we've reached a terminal state
213
+ if updated_job.status == "completed":
214
+ logger.info(f"Pipeline job {self.id} completed successfully")
215
+ return "completed"
216
+ elif updated_job.status == "failed":
217
+ logger.warning(f"Pipeline job {self.id} failed")
218
+ return "failed"
219
+
220
+ # Check timeout
221
+ if time() >= deadline:
222
+ raise TimeoutError(
223
+ f"Timed out waiting for pipeline job {self.id} to complete. "
224
+ f"Current status: {updated_job.status}"
225
+ )
226
+
227
+ # Wait before next poll
228
+ sleep(max(0, min(interval_seconds, deadline - time())))
229
+
230
+ # Update self with the latest status
231
+ self.status = updated_job.status
232
+ self.updated_at = updated_job.updated_at
233
+ self.started_at = updated_job.started_at
234
+ self.completed_at = updated_job.completed_at
235
+
236
+
237
+ @dataclass
238
+ class PipelineJobRunRecord:
239
+ pipeline_job_id: str
240
+ idx: int
241
+ arguments: list[PipelineArgValueType]
242
+ status: Literal["pending", "running", "completed", "failed"]
243
+
244
+ @classmethod
245
+ def from_json(cls, json: dict) -> "PipelineJobRunRecord":
246
+ return cls(
247
+ pipeline_job_id=json["pipeline_job_id"],
248
+ idx=json["idx"],
249
+ arguments=json["arguments"],
250
+ status=json["status"],
251
+ )
252
+
253
+ def pipeline_job(self) -> PipelineJobRecord:
254
+ """
255
+ Returns the pipeline job that this pipeline job run was created from.
256
+
257
+ Returns
258
+ -------
259
+ PipelineJobRecord
260
+ The PipelineJobRecord for the pipeline job that this pipeline job run was created from.
261
+ """
262
+ return get_pipeline_job(self.pipeline_job_id)
263
+
264
+ def logs(self) -> list[LogLine]:
265
+ """
266
+ Returns a list of log lines for this pipeline job run.
267
+
268
+ Each log line is a LogLine object, which has a timestamp, level, and message.
269
+
270
+ Returns
271
+ -------
272
+ list[LogLine]
273
+ A list of LogLine objects.
274
+ """
275
+ res = get_default_client().http.get(
276
+ f"/rest/v0/pipeline_jobs/{self.pipeline_job_id}/runs/{self.idx}/logs"
277
+ )
278
+ return [LogLine.from_json(l) for l in res["data"]]
279
+
280
+ def artifacts(self) -> list[dict]:
281
+ """
282
+ Returns a list of artifacts that were produced by this pipeline job run.
283
+
284
+ Artifacts are things like Geometries, Meshes, and Simulations. Each artifact is a dictionary
285
+ with an "id" key, which is an identifier for the artifact.
286
+
287
+ .. warning:: This feature is experimental and may change or be removed in the future.
288
+
289
+ Returns
290
+ -------
291
+ list[dict]
292
+ A list of artifact dictionaries.
293
+ """
294
+ res = get_default_client().http.get(
295
+ f"/rest/v0/pipeline_jobs/{self.pipeline_job_id}/runs/{self.idx}/artifacts"
296
+ )
297
+ return res["data"]
298
+
63
299
 
64
300
  def create_pipeline(
65
301
  name: str, pipeline: Pipeline | str, description: str | None = None
@@ -103,3 +103,18 @@ class PipelineArgs:
103
103
  return (
104
104
  f"PipelineArgs(param_names={[p.name for p in self.params]}, row_count={len(self.rows)})"
105
105
  )
106
+
107
+ def print_as_table(self) -> None:
108
+ headers = [p.name for p in self.params]
109
+ row_strs = [[str(v) for v in row.row_values] for row in self.rows]
110
+ col_widths = [
111
+ max(len(headers[i]), *(len(r[i]) for r in row_strs)) for i in range(len(headers))
112
+ ]
113
+
114
+ def format_row(values: list[str]) -> str:
115
+ return " | ".join(val.ljust(col_widths[i]) for i, val in enumerate(values))
116
+
117
+ print(format_row(headers))
118
+ print("-+-".join("-" * w for w in col_widths))
119
+ for r in row_strs:
120
+ print(format_row(r))