luminarycloud 0.16.1__py3-none-any.whl → 0.16.2__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (30) hide show
  1. luminarycloud/_client/client.py +1 -1
  2. luminarycloud/_helpers/warnings/__init__.py +0 -1
  3. luminarycloud/_proto/api/v0/luminarycloud/thirdpartyintegration/onshape/onshape_pb2.py +88 -34
  4. luminarycloud/_proto/api/v0/luminarycloud/thirdpartyintegration/onshape/onshape_pb2.pyi +96 -6
  5. luminarycloud/_proto/api/v0/luminarycloud/thirdpartyintegration/onshape/onshape_pb2_grpc.py +68 -0
  6. luminarycloud/_proto/api/v0/luminarycloud/thirdpartyintegration/onshape/onshape_pb2_grpc.pyi +24 -0
  7. luminarycloud/_proto/cad/shape_pb2.py +78 -19
  8. luminarycloud/_proto/cad/transformation_pb2.py +34 -15
  9. luminarycloud/_proto/upload/upload_pb2.py +25 -15
  10. luminarycloud/_proto/upload/upload_pb2.pyi +31 -2
  11. luminarycloud/geometry.py +2 -2
  12. luminarycloud/named_variable_set.py +3 -4
  13. luminarycloud/outputs/stopping_conditions.py +0 -3
  14. luminarycloud/physics_ai/architectures.py +0 -4
  15. luminarycloud/physics_ai/inference.py +0 -4
  16. luminarycloud/physics_ai/models.py +0 -4
  17. luminarycloud/physics_ai/solution.py +2 -2
  18. luminarycloud/pipelines/__init__.py +6 -0
  19. luminarycloud/pipelines/arguments.py +105 -0
  20. luminarycloud/pipelines/core.py +204 -20
  21. luminarycloud/pipelines/operators.py +11 -9
  22. luminarycloud/pipelines/parameters.py +25 -4
  23. luminarycloud/project.py +4 -5
  24. luminarycloud/simulation_param.py +0 -2
  25. luminarycloud/simulation_template.py +1 -3
  26. luminarycloud/solution.py +1 -3
  27. {luminarycloud-0.16.1.dist-info → luminarycloud-0.16.2.dist-info}/METADATA +1 -1
  28. {luminarycloud-0.16.1.dist-info → luminarycloud-0.16.2.dist-info}/RECORD +29 -29
  29. luminarycloud/_helpers/warnings/experimental.py +0 -48
  30. {luminarycloud-0.16.1.dist-info → luminarycloud-0.16.2.dist-info}/WHEEL +0 -0
@@ -0,0 +1,105 @@
1
+ # Copyright 2025 Luminary Cloud, Inc. All Rights Reserved.
2
+ from typing import Any, Type
3
+
4
+ from .core import PipelineParameter
5
+
6
+
7
+ class _NVS(PipelineParameter):
8
+ @classmethod
9
+ def _represented_type(cls) -> Type:
10
+ return str
11
+
12
+ @classmethod
13
+ def _type_name(cls) -> str:
14
+ return "Named Variable Set"
15
+
16
+ def _validate(self) -> None:
17
+ if self.name != "$named-variable-set":
18
+ raise ValueError(
19
+ "The Named Variable Set PipelineParameter must be named '$named-variable-set'"
20
+ )
21
+
22
+ def _add_to_params(self, params: dict) -> None:
23
+ raise ValueError(
24
+ "The NamedVariableSet parameter cannot be used explicitly in a Pipeline. It can only be used in PipelineArgs."
25
+ )
26
+
27
+ def _is_valid_value(self, value: Any) -> bool:
28
+ return isinstance(value, str) and value.startswith("namedvarset-")
29
+
30
+
31
+ ArgNamedVariableSet = _NVS("$named-variable-set")
32
+ """
33
+ This can be used in a PipelineArgs params list to add a Named Variable Set column to the args table.
34
+ There must be zero or one of these in a PipelineArgs params list.
35
+ """
36
+
37
+ # The types that are allowed as PipelineArgs values. This is a union of all concrete
38
+ # PipelineParameters' "represented types".
39
+ PipelineArgValueType = str | int | float | bool
40
+
41
+
42
+ class PipelineArgsRow:
43
+ def __init__(self, args: "PipelineArgs", row_values: list[PipelineArgValueType]):
44
+ self.args = args
45
+ self.row_values = row_values
46
+ self._validate()
47
+
48
+ def _validate(self) -> None:
49
+ if len(self.row_values) != len(self.args.params):
50
+ raise ValueError(
51
+ f"PipelineArgs row wrong size. Expected {len(self.args.params)}, got {len(self.row_values)}"
52
+ )
53
+ for i, v in enumerate(self.row_values):
54
+ param = self.args.params[i]
55
+ if not param._is_valid_value(v):
56
+ raise ValueError(f"PipelineArgs value {v} is invalid for parameter {param}")
57
+
58
+ def value_for(self, param_name: str) -> PipelineArgValueType:
59
+ return self.row_values[self.args.column_for(param_name)]
60
+
61
+ def has_column_for(self, param_name: str) -> bool:
62
+ return self.args.has_column_for(param_name)
63
+
64
+ def __str__(self) -> str:
65
+ s = "PipelineArgsRow("
66
+ for i, v in enumerate(self.row_values):
67
+ s += f"{self.args.params[i].name}={repr(v)}, "
68
+ s += ")"
69
+ return s
70
+
71
+
72
+ class PipelineArgs:
73
+ def __init__(self, params: list[PipelineParameter], args: list[list[PipelineArgValueType]]):
74
+ self.params = params
75
+ self._param_index_by_name = {p.name: i for i, p in enumerate(params)}
76
+ self._validate_params()
77
+ self.rows = [PipelineArgsRow(self, arg) for arg in args]
78
+
79
+ def has_column_for(self, param_name: str) -> bool:
80
+ return param_name in self._param_index_by_name
81
+
82
+ def column_for(self, param_name: str) -> int:
83
+ if not self.has_column_for(param_name):
84
+ raise ValueError(f'Parameter "{param_name}" not found')
85
+ return self._param_index_by_name[param_name]
86
+
87
+ def _validate_params(self) -> None:
88
+ has_nvs = False
89
+ seen_param_names = set()
90
+ for p in self.params:
91
+ if isinstance(p, _NVS):
92
+ if has_nvs:
93
+ raise ValueError(
94
+ "There can be at most one Named Variable Set column in a PipelineArgs"
95
+ )
96
+ has_nvs = True
97
+ else:
98
+ if p.name in seen_param_names:
99
+ raise ValueError(f'There is more than one parameter named "{p.name}"')
100
+ seen_param_names.add(p.name)
101
+
102
+ def __str__(self) -> str:
103
+ return (
104
+ f"PipelineArgs(param_names={[p.name for p in self.params]}, row_count={len(self.rows)})"
105
+ )
@@ -1,14 +1,27 @@
1
1
  # Copyright 2025 Luminary Cloud, Inc. All Rights Reserved.
2
2
  from abc import ABC, abstractmethod
3
3
  from dataclasses import is_dataclass, fields
4
- from typing import Type, TypeVar, Generic
4
+ from typing import Any, Type, TypeVar, Generic
5
+ from typing_extensions import Self
5
6
  import re
6
7
  import yaml
7
8
 
8
- from .._helpers.warnings import experimental
9
9
  from ..pipeline_util.yaml import ensure_yamlizable
10
10
 
11
11
 
12
+ class PipelineParameterRegistry:
13
+ def __init__(self):
14
+ self.parameters = {}
15
+
16
+ def register(self, parameter_class: Type["PipelineParameter"]) -> None:
17
+ self.parameters[parameter_class._type_name()] = parameter_class
18
+
19
+ def get(self, type_name: str) -> Type["PipelineParameter"]:
20
+ if type_name not in self.parameters:
21
+ raise ValueError(f"Unknown parameter type: {type_name}")
22
+ return self.parameters[type_name]
23
+
24
+
12
25
  class PipelineParameter(ABC):
13
26
  """
14
27
  Base class for all concrete PipelineParameters.
@@ -20,10 +33,16 @@ class PipelineParameter(ABC):
20
33
 
21
34
  @property
22
35
  def type(self) -> str:
23
- return self._type()
36
+ return self.__class__._type_name()
37
+
38
+ @classmethod
39
+ @abstractmethod
40
+ def _represented_type(cls) -> Type:
41
+ pass
24
42
 
43
+ @classmethod
25
44
  @abstractmethod
26
- def _type(self) -> str:
45
+ def _type_name(cls) -> str:
27
46
  pass
28
47
 
29
48
  def _validate(self) -> None:
@@ -42,6 +61,30 @@ class PipelineParameter(ABC):
42
61
  def _to_pipeline_dict(self) -> tuple[dict, list["PipelineParameter"]]:
43
62
  return {"$pipeline_param": self.name}, [self]
44
63
 
64
+ def __str__(self) -> str:
65
+ return f'{self.__class__.__name__}(name="{self.name}")'
66
+
67
+ _registry = PipelineParameterRegistry()
68
+
69
+ def __init_subclass__(cls, **kwargs):
70
+ super().__init_subclass__(**kwargs)
71
+ PipelineParameter._registry.register(cls)
72
+
73
+ @classmethod
74
+ def _get_subclass(cls, parameter_type: str) -> Type["PipelineParameter"]:
75
+ return cls._registry.get(parameter_type)
76
+
77
+ def _is_valid_value(self, value: Any) -> bool:
78
+ return isinstance(value, self._represented_type())
79
+
80
+ def __hash__(self) -> int:
81
+ return hash((self.type, self.name))
82
+
83
+ def __eq__(self, other: object) -> bool:
84
+ if not isinstance(other, PipelineParameter):
85
+ return False
86
+ return self.__hash__() == other.__hash__()
87
+
45
88
 
46
89
  class PipelineInput:
47
90
  """
@@ -138,6 +181,25 @@ class OperatorOutputs(ABC):
138
181
  outputs[field.name] = field.type(owner, field.name)
139
182
  return cls(**outputs)
140
183
 
184
+ def downstream_inputs(self) -> list[PipelineInput]:
185
+ inputs = []
186
+ for field in fields(self):
187
+ inputs.extend(getattr(self, field.name).downstream_inputs)
188
+ return inputs
189
+
190
+
191
+ class OperatorRegistry:
192
+ def __init__(self):
193
+ self.operators = {}
194
+
195
+ def register(self, operator_class: Type["Operator"]) -> None:
196
+ self.operators[operator_class.__name__] = operator_class
197
+
198
+ def get(self, operator_name: str) -> Type["Operator"]:
199
+ if operator_name not in self.operators:
200
+ raise ValueError(f"Unknown operator: {operator_name}")
201
+ return self.operators[operator_name]
202
+
141
203
 
142
204
  TOutputs = TypeVar("TOutputs", bound=OperatorOutputs)
143
205
 
@@ -157,53 +219,92 @@ class Operator(Generic[TOutputs], ABC):
157
219
  self.outputs = outputs
158
220
  ensure_yamlizable(self._params_dict()[0], "Operator parameters")
159
221
 
160
- def _to_dict(self, id_for_task: dict) -> tuple[dict, list[PipelineParameter]]:
161
- params, params_list = self._params_dict()
222
+ def is_source(self) -> bool:
223
+ return len(self._inputs.inputs) == 0
224
+
225
+ def inputs_dict(self) -> dict[str, tuple["Operator", str]]:
226
+ inputs = {}
227
+ for pipeline_input in self._inputs.inputs:
228
+ inputs[pipeline_input.name] = (
229
+ pipeline_input.upstream_output.owner,
230
+ pipeline_input.upstream_output.name,
231
+ )
232
+ return inputs
233
+
234
+ def downstream_tasks(self) -> list["Operator"]:
235
+ return [input.owner for input in self.outputs.downstream_inputs()]
236
+
237
+ def _to_dict(self, id_for_task: dict) -> tuple[dict, set[PipelineParameter]]:
238
+ params, pipeline_params_set = self._params_dict()
162
239
  d = {
163
240
  "name": self._task_name,
164
241
  "operator": self._operator_name,
165
242
  "params": params,
166
243
  "inputs": self._inputs._to_dict(id_for_task),
167
244
  }
168
- return d, params_list
245
+ return d, pipeline_params_set
169
246
 
170
- def _params_dict(self) -> tuple[dict, list[PipelineParameter]]:
247
+ def _params_dict(self) -> tuple[dict, set[PipelineParameter]]:
171
248
  d = {}
172
- params = []
249
+ pipeline_params = set()
173
250
  for name, value in self._params.items():
174
251
  if hasattr(value, "_to_pipeline_dict"):
175
252
  d[name], downstream_params = value._to_pipeline_dict()
176
- params.extend(downstream_params)
253
+ pipeline_params.update(downstream_params)
177
254
  else:
178
255
  d[name] = value
179
- return d, params
256
+ return d, pipeline_params
180
257
 
181
258
  def __str__(self) -> str:
182
259
  return f'{self._operator_name}(name="{self._task_name}")'
183
260
 
261
+ _registry = OperatorRegistry()
262
+
263
+ def __init_subclass__(cls, **kwargs):
264
+ super().__init_subclass__(**kwargs)
265
+ Operator._registry.register(cls)
266
+
267
+ @classmethod
268
+ def _get_subclass(cls, operator_name: str) -> Type["Operator"]:
269
+ return cls._registry.get(operator_name)
270
+
271
+ @classmethod
272
+ def _parse_params(cls, params: dict) -> dict:
273
+ # Operators with params that are just primitives or PipelineParams have no parsing to do.
274
+ # Operators with more complicated params should override this method.
275
+ return params
276
+
184
277
 
185
- @experimental
186
278
  class Pipeline:
187
- def __init__(self, name: str, tasks: list[Operator]):
188
- self.name = name
279
+ def __init__(self, tasks: list[Operator]):
189
280
  self.tasks = tasks
281
+ self._task_ids = self._assign_ids_to_tasks()
190
282
 
191
283
  def to_yaml(self) -> str:
192
284
  return yaml.safe_dump(self._to_dict())
193
285
 
194
- def _to_dict(self) -> dict:
195
- id_for_task = self._assign_ids_to_tasks()
286
+ def pipeline_params(self) -> set[PipelineParameter]:
287
+ return self._tasks_dict_and_params()[1]
288
+
289
+ def _get_task_id(self, task: Operator) -> str:
290
+ return self._task_ids[task]
291
+
292
+ def _tasks_dict_and_params(self) -> tuple[dict, set[PipelineParameter]]:
293
+ id_for_task = self._task_ids
196
294
  tasks = {}
197
- params = []
295
+ params = set()
198
296
  for task in id_for_task.keys():
199
297
  task_dict, referenced_params = task._to_dict(id_for_task)
200
298
  tasks[id_for_task[task]] = task_dict
201
- params.extend(referenced_params)
299
+ params.update(referenced_params)
300
+ return tasks, params
301
+
302
+ def _to_dict(self) -> dict:
303
+ tasks, params = self._tasks_dict_and_params()
202
304
 
203
305
  d = {
204
306
  "lc_pipeline": {
205
307
  "schema_version": 1,
206
- "name": self.name,
207
308
  "params": self._pipeline_params_dict(params),
208
309
  "tasks": tasks,
209
310
  }
@@ -214,7 +315,7 @@ class Pipeline:
214
315
  def _assign_ids_to_tasks(self) -> dict[Operator, str]:
215
316
  return {task: f"t{i + 1}-{task._operator_name}" for i, task in enumerate(self.tasks)}
216
317
 
217
- def _pipeline_params_dict(self, params: list[PipelineParameter]) -> dict:
318
+ def _pipeline_params_dict(self, params: set[PipelineParameter]) -> dict:
218
319
  d: dict[str, dict] = {}
219
320
  for p in params:
220
321
  if p.name in d and d[p.name]["type"] != p.type:
@@ -223,3 +324,86 @@ class Pipeline:
223
324
  )
224
325
  d[p.name] = {"type": p.type}
225
326
  return d
327
+
328
+ @classmethod
329
+ def _from_yaml(cls, yaml_str: str) -> Self:
330
+ d = yaml.safe_load(yaml_str)
331
+ if "lc_pipeline" not in d:
332
+ raise ValueError("Invalid pipeline YAML: missing 'lc_pipeline' key")
333
+
334
+ d = d["lc_pipeline"]
335
+ if "schema_version" not in d:
336
+ raise ValueError("Invalid pipeline YAML: missing 'schema_version' key")
337
+ if "tasks" not in d:
338
+ raise ValueError("Invalid pipeline YAML: missing 'tasks' key")
339
+
340
+ if d["schema_version"] != 1:
341
+ raise ValueError(f"Unsupported schema version: {d['schema_version']}")
342
+
343
+ # first, parse the pipeline parameters...
344
+ parsed_params = {}
345
+ for param_name, param_metadata in d.get("params", {}).items():
346
+ parsed_params[param_name] = PipelineParameter._get_subclass(param_metadata["type"])(
347
+ param_name
348
+ )
349
+
350
+ # ...and use them as replacements for any references in the tasks' parameters
351
+ for task_dict in d["tasks"].values():
352
+ task_dict["params"] = _recursive_replace_pipeline_params(
353
+ task_dict["params"], parsed_params
354
+ )
355
+
356
+ # then, finish parsing the tasks
357
+ parsed_tasks = {}
358
+ for task_id in d["tasks"]:
359
+ _parse_task(d, task_id, parsed_tasks)
360
+
361
+ return cls(list(parsed_tasks.values()))
362
+
363
+
364
+ def _recursive_replace_pipeline_params(d: Any, parsed_params: dict) -> Any:
365
+ if isinstance(d, dict):
366
+ if "$pipeline_param" in d:
367
+ # d is a dict representation of a PipelineParameter, so return the actual PipelineParameter
368
+ pp_name = d["$pipeline_param"]
369
+ if pp_name not in parsed_params:
370
+ raise ValueError(
371
+ f'Pipeline parameter "{pp_name}" referenced in a pipeline task, but not found in pipeline\'s declared parameters'
372
+ )
373
+ return parsed_params[pp_name]
374
+ else:
375
+ return {
376
+ key: _recursive_replace_pipeline_params(value, parsed_params)
377
+ for key, value in d.items()
378
+ }
379
+ elif isinstance(d, list):
380
+ return [_recursive_replace_pipeline_params(item, parsed_params) for item in d]
381
+ else:
382
+ return d
383
+
384
+
385
+ def _parse_task(pipeline_dict: dict, task_id: str, all_tasks: dict[str, Operator]) -> Operator:
386
+ all_tasks_dict = pipeline_dict["tasks"]
387
+ if task_id in all_tasks:
388
+ return all_tasks[task_id]
389
+ task_dict = all_tasks_dict[task_id]
390
+ operator_name = task_dict["operator"]
391
+ operator_class = Operator._get_subclass(operator_name)
392
+
393
+ parsed_inputs = {}
394
+ for input_name, input_value in task_dict["inputs"].items():
395
+ source_task_id, source_output_name = input_value.split(".")
396
+ source_task = _parse_task(pipeline_dict, source_task_id, all_tasks)
397
+ source_output = getattr(source_task.outputs, source_output_name)
398
+ parsed_inputs[input_name] = source_output
399
+
400
+ parsed_params = operator_class._parse_params(task_dict["params"])
401
+
402
+ op_params = {
403
+ "task_name": task_dict["name"],
404
+ **parsed_params,
405
+ **parsed_inputs,
406
+ }
407
+ operator = operator_class(**op_params)
408
+ all_tasks[task_id] = operator
409
+ return operator
@@ -1,7 +1,6 @@
1
1
  # Copyright 2025 Luminary Cloud, Inc. All Rights Reserved.
2
2
  from dataclasses import dataclass
3
3
 
4
- from .._helpers.warnings import experimental
5
4
  from .core import Operator, OperatorInputs, OperatorOutputs, PipelineOutput
6
5
  from .parameters import StringPipelineParameter
7
6
  from ..meshing import MeshGenerationParams
@@ -40,7 +39,6 @@ class ReadGeometryOutputs(OperatorOutputs):
40
39
  """
41
40
 
42
41
 
43
- @experimental
44
42
  class ReadGeometry(Operator[ReadGeometryOutputs]):
45
43
  """
46
44
  Reads a Geometry into the Pipeline.
@@ -79,7 +77,6 @@ class ModifyGeometryOutputs(OperatorOutputs):
79
77
 
80
78
 
81
79
  # TODO: figure out what `mods` actually is. What does the non-pipeline geo mod interface look like?
82
- @experimental
83
80
  class ModifyGeometry(Operator[ModifyGeometryOutputs]):
84
81
  """
85
82
  Modifies a Geometry.
@@ -121,15 +118,14 @@ class MeshOutputs(OperatorOutputs):
121
118
  """The Mesh generated from the given Geometry."""
122
119
 
123
120
 
124
- @experimental
125
121
  class Mesh(Operator[MeshOutputs]):
126
122
  """
127
123
  Generates a Mesh from a Geometry.
128
124
 
129
125
  Parameters
130
126
  ----------
131
- mesh_gen_params : MeshGenerationParams
132
- The parameters to use for mesh generation.
127
+ max_cv_count : int
128
+ The maximum number of control volumes to generate.
133
129
  geometry : PipelineOutputGeometry
134
130
  The Geometry to mesh.
135
131
 
@@ -145,16 +141,23 @@ class Mesh(Operator[MeshOutputs]):
145
141
  self,
146
142
  *,
147
143
  task_name: str | None = None,
148
- mesh_gen_params: MeshGenerationParams,
144
+ max_cv_count: int,
149
145
  geometry: PipelineOutputGeometry,
150
146
  ):
151
147
  super().__init__(
152
148
  task_name,
153
- {"mesh_gen_params": mesh_gen_params},
149
+ {"max_cv_count": max_cv_count},
154
150
  OperatorInputs(self, geometry=(PipelineOutputGeometry, geometry)),
155
151
  MeshOutputs._instantiate_for(self),
156
152
  )
157
153
 
154
+ # TODO: bring back the full MeshGenerationParams, but we need to be able to hydrate it from the
155
+ # pipeline YAML. I can probably bake that logic into PipelineDictable, `from_pipeline_dict` or
156
+ # something.
157
+ # @classmethod
158
+ # def _parse_params(cls, params: dict) -> dict:
159
+ # return {"mesh_gen_params": MeshGenerationParams.from_pipeline_dict(**params["mesh_gen_params"])}
160
+
158
161
 
159
162
  @dataclass
160
163
  class SimulateOutputs(OperatorOutputs):
@@ -162,7 +165,6 @@ class SimulateOutputs(OperatorOutputs):
162
165
  """The Simulation."""
163
166
 
164
167
 
165
- @experimental
166
168
  class Simulate(Operator[SimulateOutputs]):
167
169
  """
168
170
  Runs a Simulation.
@@ -1,5 +1,6 @@
1
1
  # Copyright 2025 Luminary Cloud, Inc. All Rights Reserved.
2
2
  from .core import PipelineParameter
3
+ from typing import Type
3
4
 
4
5
 
5
6
  class StringPipelineParameter(PipelineParameter):
@@ -8,7 +9,12 @@ class StringPipelineParameter(PipelineParameter):
8
9
  allow its value to be set when the Pipeline is invoked.
9
10
  """
10
11
 
11
- def _type(self) -> str:
12
+ @classmethod
13
+ def _represented_type(cls) -> Type:
14
+ return str
15
+
16
+ @classmethod
17
+ def _type_name(cls) -> str:
12
18
  return "string"
13
19
 
14
20
 
@@ -18,7 +24,12 @@ class FloatPipelineParameter(PipelineParameter):
18
24
  allow its value to be set when the Pipeline is invoked.
19
25
  """
20
26
 
21
- def _type(self) -> str:
27
+ @classmethod
28
+ def _represented_type(cls) -> Type:
29
+ return float
30
+
31
+ @classmethod
32
+ def _type_name(cls) -> str:
22
33
  return "float"
23
34
 
24
35
 
@@ -28,7 +39,12 @@ class IntPipelineParameter(PipelineParameter):
28
39
  allow its value to be set when the Pipeline is invoked.
29
40
  """
30
41
 
31
- def _type(self) -> str:
42
+ @classmethod
43
+ def _represented_type(cls) -> Type:
44
+ return int
45
+
46
+ @classmethod
47
+ def _type_name(cls) -> str:
32
48
  return "int"
33
49
 
34
50
 
@@ -38,5 +54,10 @@ class BoolPipelineParameter(PipelineParameter):
38
54
  allow its value to be set when the Pipeline is invoked.
39
55
  """
40
56
 
41
- def _type(self) -> str:
57
+ @classmethod
58
+ def _represented_type(cls) -> Type:
59
+ return bool
60
+
61
+ @classmethod
62
+ def _type_name(cls) -> str:
42
63
  return "bool"
luminarycloud/project.py CHANGED
@@ -27,7 +27,7 @@ from ._helpers import (
27
27
  upload_mesh,
28
28
  upload_table_as_json,
29
29
  )
30
- from ._helpers.warnings import deprecated, experimental
30
+ from ._helpers.warnings import deprecated
31
31
  from ._proto.api.v0.luminarycloud.geometry import geometry_pb2 as geometrypb
32
32
  from ._proto.api.v0.luminarycloud.mesh import mesh_pb2 as meshpb
33
33
  from ._proto.api.v0.luminarycloud.named_variable_set import (
@@ -450,7 +450,6 @@ class Project(ProtoWrapperBase):
450
450
  # The name is lost in to/from proto conversions so make it equal to the id for consistency.
451
451
  return RectilinearTable(id=name, name=name, table_type=table_type)
452
452
 
453
- @experimental
454
453
  def set_surface_deformation(
455
454
  self,
456
455
  file_path: PathLike | str,
@@ -610,14 +609,13 @@ class Project(ProtoWrapperBase):
610
609
  res = get_default_client().CreateSimulationTemplate(req)
611
610
  return lc.SimulationTemplate(res.simulation_template)
612
611
 
613
- @experimental
614
612
  def create_named_variable_set(
615
613
  self, name: str, named_variables: dict[str, LcFloat]
616
614
  ) -> NamedVariableSet:
617
615
  """
618
616
  Create a new named variable set.
619
617
 
620
- Note: This feature is experimental and may change or be removed without notice.
618
+ .. warning:: This feature is experimental and may change or be removed without notice.
621
619
  """
622
620
  req = namedvariablepb.CreateNamedVariableSetRequest(
623
621
  project_id=self.id,
@@ -638,7 +636,6 @@ class Project(ProtoWrapperBase):
638
636
  return [lc.NamedVariableSet(n) for n in res.named_variable_sets]
639
637
 
640
638
 
641
- @experimental
642
639
  def add_named_variables_from_csv(project: Project, csv_path: str) -> list[NamedVariableSet]:
643
640
  """
644
641
  This function reads named variables from a CSV file and creates corresponding NamedVariableSets in the given project.
@@ -646,6 +643,8 @@ def add_named_variables_from_csv(project: Project, csv_path: str) -> list[NamedV
646
643
  name, var1, var2, ...
647
644
  name1, val1, val2, ...
648
645
  name2, val1, val2, ...
646
+
647
+ .. warning:: This feature is experimental and may change or be removed without notice.
649
648
  """
650
649
  import csv
651
650
 
@@ -10,7 +10,6 @@ from luminarycloud._helpers._simulation_params_from_json import (
10
10
  simulation_params_from_json_path,
11
11
  simulation_params_from_json_dict,
12
12
  )
13
- from luminarycloud._helpers.warnings import experimental
14
13
  from luminarycloud._proto.client import simulation_pb2 as clientpb
15
14
  from luminarycloud._proto.client.entity_pb2 import EntityIdentifier
16
15
  from luminarycloud._proto.output import output_pb2 as outputpb
@@ -257,7 +256,6 @@ class SimulationParam(_SimulationParam):
257
256
  )
258
257
  )
259
258
 
260
- @experimental
261
259
  def configure_adjoint_surface_output(
262
260
  self,
263
261
  quantity_type: QuantityType,
@@ -8,7 +8,7 @@ from difflib import Differ
8
8
  from .enum import (
9
9
  TableType,
10
10
  )
11
- from ._helpers.warnings import experimental, deprecated
11
+ from ._helpers.warnings import deprecated
12
12
  from ._client import get_default_client
13
13
  from ._helpers._simulation_params_from_json import simulation_params_from_json_path
14
14
  from ._helpers._timestamp_to_datetime import timestamp_to_datetime
@@ -430,7 +430,6 @@ class SimulationTemplate(ProtoWrapperBase):
430
430
  """
431
431
  return delete_stopping_condition(self.id, id)
432
432
 
433
- @experimental
434
433
  def get_general_stopping_conditions(self) -> GeneralStoppingConditions:
435
434
  """
436
435
  Get the general stopping conditions for this simulation template.
@@ -439,7 +438,6 @@ class SimulationTemplate(ProtoWrapperBase):
439
438
  """
440
439
  return get_general_stopping_conditions(self.id)
441
440
 
442
- @experimental
443
441
  def update_general_stopping_conditions(
444
442
  self,
445
443
  max_iterations: int | None = None,
luminarycloud/solution.py CHANGED
@@ -6,7 +6,6 @@ from os import PathLike
6
6
  import luminarycloud as lc
7
7
 
8
8
  from ._client import get_default_client
9
- from ._helpers.warnings import experimental
10
9
  from ._helpers.download import (
11
10
  download_surface_solution,
12
11
  download_volume_solution,
@@ -146,14 +145,13 @@ class Solution(ProtoWrapperBase):
146
145
  stream = download_surface_sensitivity_data(get_default_client(), self.id)
147
146
  _handle_surface_data_stream(stream, dst)
148
147
 
149
- @experimental
150
148
  def download_parameter_sensitivity_data(self, dst: Optional[PathLike] = None) -> None:
151
149
  """
152
150
  Download the parameter sensitivity data associated with an adjoint solution into the
153
151
  destination file or into a default-named file. The data consists of parameter names and
154
152
  sensitivity values (d "adjoint output" / d "SimulationParam parameter").
155
153
 
156
- NOTE: This is a very experimental feature, likely to change in the future in favor of
154
+ .. warning:: This is a very experimental feature, likely to change in the future in favor of
157
155
  including the sensitivities in a SimulationParam object directly.
158
156
  """
159
157
  stream = download_parameter_sensitivity_data(get_default_client(), self.id)
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.4
2
2
  Name: luminarycloud
3
- Version: 0.16.1
3
+ Version: 0.16.2
4
4
  Summary: Luminary Cloud SDK
5
5
  Project-URL: Homepage, https://www.luminarycloud.com/
6
6
  Project-URL: Documentation, https://app.luminarycloud.com/docs/api/