luminarycloud 0.22.1__py3-none-any.whl → 0.22.3__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (78) hide show
  1. luminarycloud/_client/client.py +5 -3
  2. luminarycloud/_client/retry_interceptor.py +13 -2
  3. luminarycloud/_helpers/__init__.py +9 -0
  4. luminarycloud/_helpers/_inference_jobs.py +227 -0
  5. luminarycloud/_helpers/_parse_iso_datetime.py +54 -0
  6. luminarycloud/_helpers/proto_decorator.py +38 -7
  7. luminarycloud/_proto/api/v0/luminarycloud/geometry/geometry_pb2.py +45 -25
  8. luminarycloud/_proto/api/v0/luminarycloud/geometry/geometry_pb2.pyi +30 -0
  9. luminarycloud/_proto/api/v0/luminarycloud/geometry/geometry_pb2_grpc.py +34 -0
  10. luminarycloud/_proto/api/v0/luminarycloud/geometry/geometry_pb2_grpc.pyi +12 -0
  11. luminarycloud/_proto/api/v0/luminarycloud/named_variable_set/named_variable_set_pb2.py +25 -3
  12. luminarycloud/_proto/api/v0/luminarycloud/named_variable_set/named_variable_set_pb2.pyi +30 -0
  13. luminarycloud/_proto/api/v0/luminarycloud/named_variable_set/named_variable_set_pb2_grpc.py +34 -0
  14. luminarycloud/_proto/api/v0/luminarycloud/named_variable_set/named_variable_set_pb2_grpc.pyi +12 -0
  15. luminarycloud/_proto/api/v0/luminarycloud/physics_ai/physics_ai_pb2.py +140 -45
  16. luminarycloud/_proto/api/v0/luminarycloud/physics_ai/physics_ai_pb2.pyi +322 -8
  17. luminarycloud/_proto/api/v0/luminarycloud/physics_ai/physics_ai_pb2_grpc.py +68 -0
  18. luminarycloud/_proto/api/v0/luminarycloud/physics_ai/physics_ai_pb2_grpc.pyi +24 -0
  19. luminarycloud/_proto/api/v0/luminarycloud/physicsaiinference/physicsaiinference_pb2.py +93 -33
  20. luminarycloud/_proto/api/v0/luminarycloud/physicsaiinference/physicsaiinference_pb2.pyi +105 -0
  21. luminarycloud/_proto/api/v0/luminarycloud/physicsaiinference/physicsaiinference_pb2_grpc.py +70 -0
  22. luminarycloud/_proto/api/v0/luminarycloud/physicsaiinference/physicsaiinference_pb2_grpc.pyi +29 -0
  23. luminarycloud/_proto/api/v0/luminarycloud/simulation/simulation_pb2.py +29 -7
  24. luminarycloud/_proto/api/v0/luminarycloud/simulation/simulation_pb2.pyi +39 -0
  25. luminarycloud/_proto/api/v0/luminarycloud/simulation/simulation_pb2_grpc.py +36 -0
  26. luminarycloud/_proto/api/v0/luminarycloud/simulation/simulation_pb2_grpc.pyi +18 -0
  27. luminarycloud/_proto/api/v0/luminarycloud/thirdpartyintegration/onshape/onshape_pb2.py +70 -70
  28. luminarycloud/_proto/api/v0/luminarycloud/thirdpartyintegration/onshape/onshape_pb2.pyi +5 -5
  29. luminarycloud/_proto/api/v0/luminarycloud/vis/vis_pb2.py +163 -153
  30. luminarycloud/_proto/api/v0/luminarycloud/vis/vis_pb2.pyi +37 -3
  31. luminarycloud/_proto/client/simulation_pb2.py +356 -337
  32. luminarycloud/_proto/client/simulation_pb2.pyi +89 -3
  33. luminarycloud/_proto/lcstatus/details/geometry/geometry_pb2.py +256 -0
  34. luminarycloud/_proto/lcstatus/details/geometry/geometry_pb2.pyi +472 -0
  35. luminarycloud/_proto/physicsaiinferenceservice/physicsaiinferenceservice_pb2.py +9 -4
  36. luminarycloud/_proto/physicsaitrainingservice/physicsaitrainingservice_pb2.py +6 -3
  37. luminarycloud/_proto/physicsaitrainingservice/physicsaitrainingservice_pb2_grpc.py +68 -0
  38. luminarycloud/_proto/physicsaitrainingservice/physicsaitrainingservice_pb2_grpc.pyi +24 -0
  39. luminarycloud/_proto/quantity/quantity_pb2.pyi +1 -1
  40. luminarycloud/_wrapper.py +53 -7
  41. luminarycloud/feature_modification.py +25 -32
  42. luminarycloud/geometry.py +6 -6
  43. luminarycloud/outputs/__init__.py +2 -0
  44. luminarycloud/outputs/output_definitions.py +3 -3
  45. luminarycloud/outputs/stopping_conditions.py +94 -0
  46. luminarycloud/params/enum/_enum_wrappers.py +16 -0
  47. luminarycloud/params/geometry/shapes.py +33 -33
  48. luminarycloud/params/simulation/adaptive_mesh_refinement/__init__.py +1 -0
  49. luminarycloud/params/simulation/adaptive_mesh_refinement/active_region_.py +83 -0
  50. luminarycloud/params/simulation/adaptive_mesh_refinement/boundary_layer_profile_.py +1 -1
  51. luminarycloud/params/simulation/adaptive_mesh_refinement_.py +8 -1
  52. luminarycloud/physics_ai/__init__.py +15 -0
  53. luminarycloud/physics_ai/architectures.py +1 -1
  54. luminarycloud/physics_ai/datasets.py +246 -0
  55. luminarycloud/physics_ai/inference.py +166 -199
  56. luminarycloud/physics_ai/models.py +22 -0
  57. luminarycloud/pipelines/__init__.py +11 -0
  58. luminarycloud/pipelines/api.py +106 -9
  59. luminarycloud/pipelines/core.py +358 -45
  60. luminarycloud/pipelines/flowables.py +138 -0
  61. luminarycloud/pipelines/stages.py +7 -31
  62. luminarycloud/project.py +56 -2
  63. luminarycloud/simulation.py +25 -0
  64. luminarycloud/types/__init__.py +2 -0
  65. luminarycloud/types/ids.py +2 -0
  66. luminarycloud/vis/__init__.py +1 -0
  67. luminarycloud/vis/filters.py +97 -0
  68. luminarycloud/vis/visualization.py +3 -0
  69. luminarycloud/volume_selection.py +6 -6
  70. luminarycloud/workflow_utils.py +149 -0
  71. {luminarycloud-0.22.1.dist-info → luminarycloud-0.22.3.dist-info}/METADATA +1 -1
  72. {luminarycloud-0.22.1.dist-info → luminarycloud-0.22.3.dist-info}/RECORD +73 -70
  73. luminarycloud/_proto/api/v0/luminarycloud/inference/inference_pb2.py +0 -61
  74. luminarycloud/_proto/api/v0/luminarycloud/inference/inference_pb2.pyi +0 -85
  75. luminarycloud/_proto/api/v0/luminarycloud/inference/inference_pb2_grpc.py +0 -67
  76. luminarycloud/_proto/api/v0/luminarycloud/inference/inference_pb2_grpc.pyi +0 -26
  77. luminarycloud/_proto/inferenceservice/inferenceservice_pb2.py +0 -69
  78. {luminarycloud-0.22.1.dist-info → luminarycloud-0.22.3.dist-info}/WHEEL +0 -0
@@ -8,6 +8,7 @@ import google.protobuf.descriptor
8
8
  import google.protobuf.internal.containers
9
9
  import google.protobuf.internal.enum_type_wrapper
10
10
  import google.protobuf.message
11
+ import google.protobuf.struct_pb2
11
12
  import google.protobuf.timestamp_pb2
12
13
  import luminarycloud._proto.api.v0.luminarycloud.common.common_pb2
13
14
  import luminarycloud._proto.base.base_pb2
@@ -303,7 +304,11 @@ class SurfaceGroup(google.protobuf.message.Message):
303
304
  global___SurfaceGroup = SurfaceGroup
304
305
 
305
306
  class GetSolutionDataPhysicsAIRequest(google.protobuf.message.Message):
306
- """Request message for download and process solutions for physics ai"""
307
+ """Request message for download and process solutions for physics ai.
308
+
309
+ Surfaces from exclude_tags are merged with exclude_surfaces to form the final
310
+ list of excluded surfaces.
311
+ """
307
312
 
308
313
  DESCRIPTOR: google.protobuf.descriptor.Descriptor
309
314
 
@@ -331,11 +336,15 @@ class GetSolutionDataPhysicsAIRequest(google.protobuf.message.Message):
331
336
  SINGLE_PRECISION_FIELD_NUMBER: builtins.int
332
337
  INTERNAL_OPTIONS_FIELD_NUMBER: builtins.int
333
338
  EXPORT_SURFACE_GROUPS_FIELD_NUMBER: builtins.int
339
+ SKIP_TAR_CREATION_FIELD_NUMBER: builtins.int
340
+ EXCLUDE_TAGS_FIELD_NUMBER: builtins.int
334
341
  solution_id: builtins.str
335
342
  """Required. The globally unique identifier for the solution."""
336
343
  @property
337
344
  def exclude_surfaces(self) -> google.protobuf.internal.containers.RepeatedScalarFieldContainer[builtins.str]:
338
- """List of surfaces to exclude from surface solution during physics AI processing"""
345
+ """List of mesh boundary names to exclude from surface solution during physics AI processing.
346
+ See message-level comment for how this interacts with exclude_tags.
347
+ """
339
348
  fill_holes: builtins.float
340
349
  """Size threshold for filling holes in the surface mesh (0 or negative value means no filling)"""
341
350
  @property
@@ -353,7 +362,21 @@ class GetSolutionDataPhysicsAIRequest(google.protobuf.message.Message):
353
362
  """Internal options for physics AI processing only available for staff users."""
354
363
  @property
355
364
  def export_surface_groups(self) -> google.protobuf.internal.containers.RepeatedCompositeFieldContainer[global___SurfaceGroup]:
356
- """List of surface groups to export as individual STL files"""
365
+ """List of surface groups to export as individual STL files
366
+ Excluded surfaces will not appear in surface groups.
367
+ """
368
+ skip_tar_creation: builtins.bool
369
+ """If true, skip tar.gz creation and only produce folder output with individual files.
370
+ Internal use only - for dataset export where tar.gz is not needed.
371
+ """
372
+ @property
373
+ def exclude_tags(self) -> google.protobuf.internal.containers.RepeatedScalarFieldContainer[builtins.str]:
374
+ """List of geometry tag names (e.g., "Farfield") to exclude from surface solution.
375
+ Tags are resolved to mesh boundary names server-side. Returns an error if
376
+ exclude_tags is used on a simulation without geometry tags. Individual tags
377
+ that are not found are skipped gracefully to support heterogeneous datasets.
378
+ See message-level comment for how this interacts with exclude_surfaces.
379
+ """
357
380
  def __init__(
358
381
  self,
359
382
  *,
@@ -366,8 +389,10 @@ class GetSolutionDataPhysicsAIRequest(google.protobuf.message.Message):
366
389
  single_precision: builtins.bool = ...,
367
390
  internal_options: collections.abc.Mapping[builtins.str, builtins.str] | None = ...,
368
391
  export_surface_groups: collections.abc.Iterable[global___SurfaceGroup] | None = ...,
392
+ skip_tar_creation: builtins.bool = ...,
393
+ exclude_tags: collections.abc.Iterable[builtins.str] | None = ...,
369
394
  ) -> None: ...
370
- def ClearField(self, field_name: typing_extensions.Literal["exclude_surfaces", b"exclude_surfaces", "export_surface_groups", b"export_surface_groups", "fill_holes", b"fill_holes", "internal_options", b"internal_options", "process_volume", b"process_volume", "single_precision", b"single_precision", "solution_id", b"solution_id", "surface_fields_to_keep", b"surface_fields_to_keep", "volume_fields_to_keep", b"volume_fields_to_keep"]) -> None: ...
395
+ def ClearField(self, field_name: typing_extensions.Literal["exclude_surfaces", b"exclude_surfaces", "exclude_tags", b"exclude_tags", "export_surface_groups", b"export_surface_groups", "fill_holes", b"fill_holes", "internal_options", b"internal_options", "process_volume", b"process_volume", "single_precision", b"single_precision", "skip_tar_creation", b"skip_tar_creation", "solution_id", b"solution_id", "surface_fields_to_keep", b"surface_fields_to_keep", "volume_fields_to_keep", b"volume_fields_to_keep"]) -> None: ...
371
396
 
372
397
  global___GetSolutionDataPhysicsAIRequest = GetSolutionDataPhysicsAIRequest
373
398
 
@@ -377,16 +402,35 @@ class GetSolutionDataPhysicsAIResponse(google.protobuf.message.Message):
377
402
  DESCRIPTOR: google.protobuf.descriptor.Descriptor
378
403
 
379
404
  FILE_FIELD_NUMBER: builtins.int
405
+ PHYSICS_AI_FILES_FIELD_NUMBER: builtins.int
406
+ PHYSICS_AI_FOLDER_URL_FIELD_NUMBER: builtins.int
380
407
  @property
381
408
  def file(self) -> luminarycloud._proto.api.v0.luminarycloud.common.common_pb2.File:
382
409
  """The processed solution data. Either a signed URL or a file ID."""
410
+ @property
411
+ def physics_ai_files(self) -> global___PhysicsAiDatasetCaseFiles:
412
+ """TODO: Need to implement operations/operation IDs for long-running processes.
413
+ Users should be able to poll for progress and then request the files/their location
414
+ rather than having synchronous blocking calls for potentially long-running exports.
415
+
416
+ Physics AI processed files. Contains individual files (merged_surfaces.vtp,
417
+ merged_surfaces.stl, etc.) that were generated during processing.
418
+ Internal use only - for server-side file operations.
419
+ """
420
+ physics_ai_folder_url: builtins.str
421
+ """GCS folder URL containing the physics AI processed files (e.g., "gs://bucket/path/").
422
+ Internal use only - for server-side file operations. Use with physics_ai_files
423
+ to construct full file paths: folder_url + file.name + "." + file.file_type
424
+ """
383
425
  def __init__(
384
426
  self,
385
427
  *,
386
428
  file: luminarycloud._proto.api.v0.luminarycloud.common.common_pb2.File | None = ...,
429
+ physics_ai_files: global___PhysicsAiDatasetCaseFiles | None = ...,
430
+ physics_ai_folder_url: builtins.str = ...,
387
431
  ) -> None: ...
388
- def HasField(self, field_name: typing_extensions.Literal["file", b"file"]) -> builtins.bool: ...
389
- def ClearField(self, field_name: typing_extensions.Literal["file", b"file"]) -> None: ...
432
+ def HasField(self, field_name: typing_extensions.Literal["file", b"file", "physics_ai_files", b"physics_ai_files"]) -> builtins.bool: ...
433
+ def ClearField(self, field_name: typing_extensions.Literal["file", b"file", "physics_ai_files", b"physics_ai_files", "physics_ai_folder_url", b"physics_ai_folder_url"]) -> None: ...
390
434
 
391
435
  global___GetSolutionDataPhysicsAIResponse = GetSolutionDataPhysicsAIResponse
392
436
 
@@ -431,6 +475,7 @@ class PhysicsAiTrainingJob(google.protobuf.message.Message):
431
475
  CREATION_TIME_FIELD_NUMBER: builtins.int
432
476
  UPDATE_TIME_FIELD_NUMBER: builtins.int
433
477
  COMPLETION_TIME_FIELD_NUMBER: builtins.int
478
+ DATASET_ID_FIELD_NUMBER: builtins.int
434
479
  id: builtins.str
435
480
  """Unique identifier for the training job."""
436
481
  architecture_version_id: builtins.str
@@ -465,6 +510,8 @@ class PhysicsAiTrainingJob(google.protobuf.message.Message):
465
510
  @property
466
511
  def completion_time(self) -> google.protobuf.timestamp_pb2.Timestamp:
467
512
  """Job completion time, set upon job completion. (mutable)"""
513
+ dataset_id: builtins.str
514
+ """Dataset ID used for training (if using dataset-based training)."""
468
515
  def __init__(
469
516
  self,
470
517
  *,
@@ -483,9 +530,10 @@ class PhysicsAiTrainingJob(google.protobuf.message.Message):
483
530
  creation_time: google.protobuf.timestamp_pb2.Timestamp | None = ...,
484
531
  update_time: google.protobuf.timestamp_pb2.Timestamp | None = ...,
485
532
  completion_time: google.protobuf.timestamp_pb2.Timestamp | None = ...,
533
+ dataset_id: builtins.str = ...,
486
534
  ) -> None: ...
487
535
  def HasField(self, field_name: typing_extensions.Literal["completion_time", b"completion_time", "creation_time", b"creation_time", "status", b"status", "update_time", b"update_time"]) -> builtins.bool: ...
488
- def ClearField(self, field_name: typing_extensions.Literal["architecture_version_id", b"architecture_version_id", "base_model_version_id", b"base_model_version_id", "completion_time", b"completion_time", "creation_time", b"creation_time", "error_message", b"error_message", "external_dataset_uri", b"external_dataset_uri", "id", b"id", "initialization_type", b"initialization_type", "output_model_version_id", b"output_model_version_id", "status", b"status", "training_config", b"training_config", "training_data_source_type", b"training_data_source_type", "training_description", b"training_description", "update_time", b"update_time", "user_id", b"user_id"]) -> None: ...
536
+ def ClearField(self, field_name: typing_extensions.Literal["architecture_version_id", b"architecture_version_id", "base_model_version_id", b"base_model_version_id", "completion_time", b"completion_time", "creation_time", b"creation_time", "dataset_id", b"dataset_id", "error_message", b"error_message", "external_dataset_uri", b"external_dataset_uri", "id", b"id", "initialization_type", b"initialization_type", "output_model_version_id", b"output_model_version_id", "status", b"status", "training_config", b"training_config", "training_data_source_type", b"training_data_source_type", "training_description", b"training_description", "update_time", b"update_time", "user_id", b"user_id"]) -> None: ...
489
537
 
490
538
  global___PhysicsAiTrainingJob = PhysicsAiTrainingJob
491
539
 
@@ -501,6 +549,7 @@ class SubmitTrainingJobRequest(google.protobuf.message.Message):
501
549
  TRAINING_CONFIG_FIELD_NUMBER: builtins.int
502
550
  INITIALIZATION_TYPE_FIELD_NUMBER: builtins.int
503
551
  BASE_MODEL_VERSION_ID_FIELD_NUMBER: builtins.int
552
+ DATASET_ID_FIELD_NUMBER: builtins.int
504
553
  architecture_version_id: builtins.str
505
554
  """Architecture version ID to train."""
506
555
  training_description: builtins.str
@@ -516,6 +565,8 @@ class SubmitTrainingJobRequest(google.protobuf.message.Message):
516
565
  """Model initialization type."""
517
566
  base_model_version_id: builtins.str
518
567
  """Base model version ID (if initializing from existing model)."""
568
+ dataset_id: builtins.str
569
+ """Dataset ID to use for training (if using dataset-based training)."""
519
570
  def __init__(
520
571
  self,
521
572
  *,
@@ -526,8 +577,9 @@ class SubmitTrainingJobRequest(google.protobuf.message.Message):
526
577
  training_config: builtins.str = ...,
527
578
  initialization_type: global___ModelInitializationType.ValueType = ...,
528
579
  base_model_version_id: builtins.str = ...,
580
+ dataset_id: builtins.str = ...,
529
581
  ) -> None: ...
530
- def ClearField(self, field_name: typing_extensions.Literal["architecture_version_id", b"architecture_version_id", "base_model_version_id", b"base_model_version_id", "external_dataset_uri", b"external_dataset_uri", "initialization_type", b"initialization_type", "training_config", b"training_config", "training_description", b"training_description", "training_solutions", b"training_solutions"]) -> None: ...
582
+ def ClearField(self, field_name: typing_extensions.Literal["architecture_version_id", b"architecture_version_id", "base_model_version_id", b"base_model_version_id", "dataset_id", b"dataset_id", "external_dataset_uri", b"external_dataset_uri", "initialization_type", b"initialization_type", "training_config", b"training_config", "training_description", b"training_description", "training_solutions", b"training_solutions"]) -> None: ...
531
583
 
532
584
  global___SubmitTrainingJobRequest = SubmitTrainingJobRequest
533
585
 
@@ -577,3 +629,265 @@ class CancelTrainingJobResponse(google.protobuf.message.Message):
577
629
  ) -> None: ...
578
630
 
579
631
  global___CancelTrainingJobResponse = CancelTrainingJobResponse
632
+
633
+ class PhysicsAiDataset(google.protobuf.message.Message):
634
+ """A Physics AI dataset"""
635
+
636
+ DESCRIPTOR: google.protobuf.descriptor.Descriptor
637
+
638
+ ID_FIELD_NUMBER: builtins.int
639
+ CREATED_BY_FIELD_NUMBER: builtins.int
640
+ NAME_FIELD_NUMBER: builtins.int
641
+ DESCRIPTION_FIELD_NUMBER: builtins.int
642
+ IS_LOCKED_FIELD_NUMBER: builtins.int
643
+ CREATION_TIME_FIELD_NUMBER: builtins.int
644
+ UPDATE_TIME_FIELD_NUMBER: builtins.int
645
+ LOCKED_AT_FIELD_NUMBER: builtins.int
646
+ EXPORT_CONFIG_FIELD_NUMBER: builtins.int
647
+ id: builtins.str
648
+ """Unique identifier for the dataset."""
649
+ created_by: builtins.str
650
+ """User ID who created the dataset (unset for platform-curated datasets)."""
651
+ name: builtins.str
652
+ """Name of the dataset."""
653
+ description: builtins.str
654
+ """Description of the dataset."""
655
+ is_locked: builtins.bool
656
+ """Whether the dataset is locked (immutable after first training)."""
657
+ @property
658
+ def creation_time(self) -> google.protobuf.timestamp_pb2.Timestamp:
659
+ """Dataset creation time."""
660
+ @property
661
+ def update_time(self) -> google.protobuf.timestamp_pb2.Timestamp:
662
+ """Dataset last update time."""
663
+ @property
664
+ def locked_at(self) -> google.protobuf.timestamp_pb2.Timestamp:
665
+ """Dataset locked time (if locked)."""
666
+ @property
667
+ def export_config(self) -> global___GetSolutionDataPhysicsAIRequest:
668
+ """Export configuration for extracting solution data for cases. The field
669
+ solution_id is ignored.
670
+ """
671
+ def __init__(
672
+ self,
673
+ *,
674
+ id: builtins.str = ...,
675
+ created_by: builtins.str | None = ...,
676
+ name: builtins.str = ...,
677
+ description: builtins.str | None = ...,
678
+ is_locked: builtins.bool = ...,
679
+ creation_time: google.protobuf.timestamp_pb2.Timestamp | None = ...,
680
+ update_time: google.protobuf.timestamp_pb2.Timestamp | None = ...,
681
+ locked_at: google.protobuf.timestamp_pb2.Timestamp | None = ...,
682
+ export_config: global___GetSolutionDataPhysicsAIRequest | None = ...,
683
+ ) -> None: ...
684
+ def HasField(self, field_name: typing_extensions.Literal["_created_by", b"_created_by", "_description", b"_description", "created_by", b"created_by", "creation_time", b"creation_time", "description", b"description", "export_config", b"export_config", "locked_at", b"locked_at", "update_time", b"update_time"]) -> builtins.bool: ...
685
+ def ClearField(self, field_name: typing_extensions.Literal["_created_by", b"_created_by", "_description", b"_description", "created_by", b"created_by", "creation_time", b"creation_time", "description", b"description", "export_config", b"export_config", "id", b"id", "is_locked", b"is_locked", "locked_at", b"locked_at", "name", b"name", "update_time", b"update_time"]) -> None: ...
686
+ @typing.overload
687
+ def WhichOneof(self, oneof_group: typing_extensions.Literal["_created_by", b"_created_by"]) -> typing_extensions.Literal["created_by"] | None: ...
688
+ @typing.overload
689
+ def WhichOneof(self, oneof_group: typing_extensions.Literal["_description", b"_description"]) -> typing_extensions.Literal["description"] | None: ...
690
+
691
+ global___PhysicsAiDataset = PhysicsAiDataset
692
+
693
+ class PhysicsAiDatasetCaseFile(google.protobuf.message.Message):
694
+ """A file in a Physics AI dataset case"""
695
+
696
+ DESCRIPTOR: google.protobuf.descriptor.Descriptor
697
+
698
+ FILE_TYPE_FIELD_NUMBER: builtins.int
699
+ NAME_FIELD_NUMBER: builtins.int
700
+ file_type: builtins.str
701
+ """File type (e.g., "vtp", "stl", "vtu")."""
702
+ name: builtins.str
703
+ """File name (e.g., "merged_surfaces")."""
704
+ def __init__(
705
+ self,
706
+ *,
707
+ file_type: builtins.str = ...,
708
+ name: builtins.str = ...,
709
+ ) -> None: ...
710
+ def ClearField(self, field_name: typing_extensions.Literal["file_type", b"file_type", "name", b"name"]) -> None: ...
711
+
712
+ global___PhysicsAiDatasetCaseFile = PhysicsAiDatasetCaseFile
713
+
714
+ class PhysicsAiDatasetCaseFiles(google.protobuf.message.Message):
715
+ """Wrapper message for storing repeated PhysicsAiDatasetCaseFile in JSONB."""
716
+
717
+ DESCRIPTOR: google.protobuf.descriptor.Descriptor
718
+
719
+ FILES_FIELD_NUMBER: builtins.int
720
+ @property
721
+ def files(self) -> google.protobuf.internal.containers.RepeatedCompositeFieldContainer[global___PhysicsAiDatasetCaseFile]: ...
722
+ def __init__(
723
+ self,
724
+ *,
725
+ files: collections.abc.Iterable[global___PhysicsAiDatasetCaseFile] | None = ...,
726
+ ) -> None: ...
727
+ def ClearField(self, field_name: typing_extensions.Literal["files", b"files"]) -> None: ...
728
+
729
+ global___PhysicsAiDatasetCaseFiles = PhysicsAiDatasetCaseFiles
730
+
731
+ class PhysicsAiDatasetCase(google.protobuf.message.Message):
732
+ """A case in a Physics AI dataset"""
733
+
734
+ DESCRIPTOR: google.protobuf.descriptor.Descriptor
735
+
736
+ CASE_ID_FIELD_NUMBER: builtins.int
737
+ SOLUTION_ID_FIELD_NUMBER: builtins.int
738
+ SIMULATION_ID_FIELD_NUMBER: builtins.int
739
+ PARAMS_FIELD_NUMBER: builtins.int
740
+ FILES_FIELD_NUMBER: builtins.int
741
+ SOURCE_METADATA_FIELD_NUMBER: builtins.int
742
+ case_id: builtins.str
743
+ """Unique identifier for this case within the dataset (server-generated)."""
744
+ solution_id: builtins.str
745
+ """Solution ID if this case was created from a simulation solution."""
746
+ simulation_id: builtins.str
747
+ """Simulation ID if this case was created from a simulation."""
748
+ @property
749
+ def params(self) -> google.protobuf.struct_pb2.Struct:
750
+ """Parameters for this case as JSON string."""
751
+ @property
752
+ def files(self) -> google.protobuf.internal.containers.RepeatedCompositeFieldContainer[global___PhysicsAiDatasetCaseFile]:
753
+ """Files associated with this case."""
754
+ @property
755
+ def source_metadata(self) -> google.protobuf.struct_pb2.Struct:
756
+ """Source metadata as JSON string."""
757
+ def __init__(
758
+ self,
759
+ *,
760
+ case_id: builtins.str = ...,
761
+ solution_id: builtins.str = ...,
762
+ simulation_id: builtins.str = ...,
763
+ params: google.protobuf.struct_pb2.Struct | None = ...,
764
+ files: collections.abc.Iterable[global___PhysicsAiDatasetCaseFile] | None = ...,
765
+ source_metadata: google.protobuf.struct_pb2.Struct | None = ...,
766
+ ) -> None: ...
767
+ def HasField(self, field_name: typing_extensions.Literal["params", b"params", "source_metadata", b"source_metadata"]) -> builtins.bool: ...
768
+ def ClearField(self, field_name: typing_extensions.Literal["case_id", b"case_id", "files", b"files", "params", b"params", "simulation_id", b"simulation_id", "solution_id", b"solution_id", "source_metadata", b"source_metadata"]) -> None: ...
769
+
770
+ global___PhysicsAiDatasetCase = PhysicsAiDatasetCase
771
+
772
+ class CreatePhysicsAiDatasetCaseInput(google.protobuf.message.Message):
773
+ """Input for creating a case in a Physics AI dataset (case_id is server-generated)"""
774
+
775
+ DESCRIPTOR: google.protobuf.descriptor.Descriptor
776
+
777
+ SOLUTION_ID_FIELD_NUMBER: builtins.int
778
+ SIMULATION_ID_FIELD_NUMBER: builtins.int
779
+ PARAMS_FIELD_NUMBER: builtins.int
780
+ FILES_FIELD_NUMBER: builtins.int
781
+ SOURCE_METADATA_FIELD_NUMBER: builtins.int
782
+ solution_id: builtins.str
783
+ """Solution ID if this case was created from a simulation solution."""
784
+ simulation_id: builtins.str
785
+ """Simulation ID if this case was created from a simulation."""
786
+ @property
787
+ def params(self) -> google.protobuf.struct_pb2.Struct:
788
+ """Parameters for this case as JSON string."""
789
+ @property
790
+ def files(self) -> google.protobuf.internal.containers.RepeatedCompositeFieldContainer[global___PhysicsAiDatasetCaseFile]:
791
+ """Files associated with this case."""
792
+ @property
793
+ def source_metadata(self) -> google.protobuf.struct_pb2.Struct:
794
+ """Source metadata as JSON string."""
795
+ def __init__(
796
+ self,
797
+ *,
798
+ solution_id: builtins.str = ...,
799
+ simulation_id: builtins.str = ...,
800
+ params: google.protobuf.struct_pb2.Struct | None = ...,
801
+ files: collections.abc.Iterable[global___PhysicsAiDatasetCaseFile] | None = ...,
802
+ source_metadata: google.protobuf.struct_pb2.Struct | None = ...,
803
+ ) -> None: ...
804
+ def HasField(self, field_name: typing_extensions.Literal["params", b"params", "source_metadata", b"source_metadata"]) -> builtins.bool: ...
805
+ def ClearField(self, field_name: typing_extensions.Literal["files", b"files", "params", b"params", "simulation_id", b"simulation_id", "solution_id", b"solution_id", "source_metadata", b"source_metadata"]) -> None: ...
806
+
807
+ global___CreatePhysicsAiDatasetCaseInput = CreatePhysicsAiDatasetCaseInput
808
+
809
+ class CreateDatasetRequest(google.protobuf.message.Message):
810
+ """Request message for creating a dataset"""
811
+
812
+ DESCRIPTOR: google.protobuf.descriptor.Descriptor
813
+
814
+ NAME_FIELD_NUMBER: builtins.int
815
+ DESCRIPTION_FIELD_NUMBER: builtins.int
816
+ CASES_FIELD_NUMBER: builtins.int
817
+ PARAMETER_SCHEMA_FIELD_NUMBER: builtins.int
818
+ EXPORT_CONFIG_FIELD_NUMBER: builtins.int
819
+ name: builtins.str
820
+ """Name of the dataset."""
821
+ description: builtins.str
822
+ """Optional description of the dataset."""
823
+ @property
824
+ def cases(self) -> google.protobuf.internal.containers.RepeatedCompositeFieldContainer[global___CreatePhysicsAiDatasetCaseInput]:
825
+ """Cases to add to the dataset (case_id will be server-generated)."""
826
+ @property
827
+ def parameter_schema(self) -> google.protobuf.struct_pb2.Struct:
828
+ """JSON schema defining allowed parameters for cases in this dataset."""
829
+ @property
830
+ def export_config(self) -> global___GetSolutionDataPhysicsAIRequest:
831
+ """Export configuration for extracting solution data forthe cases. The
832
+ solution_id field is ignored.
833
+ """
834
+ def __init__(
835
+ self,
836
+ *,
837
+ name: builtins.str = ...,
838
+ description: builtins.str = ...,
839
+ cases: collections.abc.Iterable[global___CreatePhysicsAiDatasetCaseInput] | None = ...,
840
+ parameter_schema: google.protobuf.struct_pb2.Struct | None = ...,
841
+ export_config: global___GetSolutionDataPhysicsAIRequest | None = ...,
842
+ ) -> None: ...
843
+ def HasField(self, field_name: typing_extensions.Literal["export_config", b"export_config", "parameter_schema", b"parameter_schema"]) -> builtins.bool: ...
844
+ def ClearField(self, field_name: typing_extensions.Literal["cases", b"cases", "description", b"description", "export_config", b"export_config", "name", b"name", "parameter_schema", b"parameter_schema"]) -> None: ...
845
+
846
+ global___CreateDatasetRequest = CreateDatasetRequest
847
+
848
+ class CreateDatasetResponse(google.protobuf.message.Message):
849
+ """Response message for creating a dataset"""
850
+
851
+ DESCRIPTOR: google.protobuf.descriptor.Descriptor
852
+
853
+ DATASET_FIELD_NUMBER: builtins.int
854
+ @property
855
+ def dataset(self) -> global___PhysicsAiDataset:
856
+ """The created dataset."""
857
+ def __init__(
858
+ self,
859
+ *,
860
+ dataset: global___PhysicsAiDataset | None = ...,
861
+ ) -> None: ...
862
+ def HasField(self, field_name: typing_extensions.Literal["dataset", b"dataset"]) -> builtins.bool: ...
863
+ def ClearField(self, field_name: typing_extensions.Literal["dataset", b"dataset"]) -> None: ...
864
+
865
+ global___CreateDatasetResponse = CreateDatasetResponse
866
+
867
+ class ListDatasetsRequest(google.protobuf.message.Message):
868
+ """Request message for listing datasets for the current user"""
869
+
870
+ DESCRIPTOR: google.protobuf.descriptor.Descriptor
871
+
872
+ def __init__(
873
+ self,
874
+ ) -> None: ...
875
+
876
+ global___ListDatasetsRequest = ListDatasetsRequest
877
+
878
+ class ListDatasetsResponse(google.protobuf.message.Message):
879
+ """Response message for listing datasets"""
880
+
881
+ DESCRIPTOR: google.protobuf.descriptor.Descriptor
882
+
883
+ DATASETS_FIELD_NUMBER: builtins.int
884
+ @property
885
+ def datasets(self) -> google.protobuf.internal.containers.RepeatedCompositeFieldContainer[global___PhysicsAiDataset]: ...
886
+ def __init__(
887
+ self,
888
+ *,
889
+ datasets: collections.abc.Iterable[global___PhysicsAiDataset] | None = ...,
890
+ ) -> None: ...
891
+ def ClearField(self, field_name: typing_extensions.Literal["datasets", b"datasets"]) -> None: ...
892
+
893
+ global___ListDatasetsResponse = ListDatasetsResponse
@@ -35,6 +35,16 @@ class PhysicsAiServiceStub(object):
35
35
  request_serializer=proto_dot_api_dot_v0_dot_luminarycloud_dot_physics__ai_dot_physics__ai__pb2.SubmitTrainingJobRequest.SerializeToString,
36
36
  response_deserializer=proto_dot_api_dot_v0_dot_luminarycloud_dot_physics__ai_dot_physics__ai__pb2.SubmitTrainingJobResponse.FromString,
37
37
  )
38
+ self.CreateDataset = channel.unary_unary(
39
+ '/luminary.proto.api.v0.luminarycloud.physics_ai.PhysicsAiService/CreateDataset',
40
+ request_serializer=proto_dot_api_dot_v0_dot_luminarycloud_dot_physics__ai_dot_physics__ai__pb2.CreateDatasetRequest.SerializeToString,
41
+ response_deserializer=proto_dot_api_dot_v0_dot_luminarycloud_dot_physics__ai_dot_physics__ai__pb2.CreateDatasetResponse.FromString,
42
+ )
43
+ self.ListDatasets = channel.unary_unary(
44
+ '/luminary.proto.api.v0.luminarycloud.physics_ai.PhysicsAiService/ListDatasets',
45
+ request_serializer=proto_dot_api_dot_v0_dot_luminarycloud_dot_physics__ai_dot_physics__ai__pb2.ListDatasetsRequest.SerializeToString,
46
+ response_deserializer=proto_dot_api_dot_v0_dot_luminarycloud_dot_physics__ai_dot_physics__ai__pb2.ListDatasetsResponse.FromString,
47
+ )
38
48
 
39
49
 
40
50
  class PhysicsAiServiceServicer(object):
@@ -69,6 +79,20 @@ class PhysicsAiServiceServicer(object):
69
79
  context.set_details('Method not implemented!')
70
80
  raise NotImplementedError('Method not implemented!')
71
81
 
82
+ def CreateDataset(self, request, context):
83
+ """Creates a new Physics AI dataset
84
+ """
85
+ context.set_code(grpc.StatusCode.UNIMPLEMENTED)
86
+ context.set_details('Method not implemented!')
87
+ raise NotImplementedError('Method not implemented!')
88
+
89
+ def ListDatasets(self, request, context):
90
+ """Lists datasets for the current user
91
+ """
92
+ context.set_code(grpc.StatusCode.UNIMPLEMENTED)
93
+ context.set_details('Method not implemented!')
94
+ raise NotImplementedError('Method not implemented!')
95
+
72
96
 
73
97
  def add_PhysicsAiServiceServicer_to_server(servicer, server):
74
98
  rpc_method_handlers = {
@@ -92,6 +116,16 @@ def add_PhysicsAiServiceServicer_to_server(servicer, server):
92
116
  request_deserializer=proto_dot_api_dot_v0_dot_luminarycloud_dot_physics__ai_dot_physics__ai__pb2.SubmitTrainingJobRequest.FromString,
93
117
  response_serializer=proto_dot_api_dot_v0_dot_luminarycloud_dot_physics__ai_dot_physics__ai__pb2.SubmitTrainingJobResponse.SerializeToString,
94
118
  ),
119
+ 'CreateDataset': grpc.unary_unary_rpc_method_handler(
120
+ servicer.CreateDataset,
121
+ request_deserializer=proto_dot_api_dot_v0_dot_luminarycloud_dot_physics__ai_dot_physics__ai__pb2.CreateDatasetRequest.FromString,
122
+ response_serializer=proto_dot_api_dot_v0_dot_luminarycloud_dot_physics__ai_dot_physics__ai__pb2.CreateDatasetResponse.SerializeToString,
123
+ ),
124
+ 'ListDatasets': grpc.unary_unary_rpc_method_handler(
125
+ servicer.ListDatasets,
126
+ request_deserializer=proto_dot_api_dot_v0_dot_luminarycloud_dot_physics__ai_dot_physics__ai__pb2.ListDatasetsRequest.FromString,
127
+ response_serializer=proto_dot_api_dot_v0_dot_luminarycloud_dot_physics__ai_dot_physics__ai__pb2.ListDatasetsResponse.SerializeToString,
128
+ ),
95
129
  }
96
130
  generic_handler = grpc.method_handlers_generic_handler(
97
131
  'luminary.proto.api.v0.luminarycloud.physics_ai.PhysicsAiService', rpc_method_handlers)
@@ -170,3 +204,37 @@ class PhysicsAiService(object):
170
204
  proto_dot_api_dot_v0_dot_luminarycloud_dot_physics__ai_dot_physics__ai__pb2.SubmitTrainingJobResponse.FromString,
171
205
  options, channel_credentials,
172
206
  insecure, call_credentials, compression, wait_for_ready, timeout, metadata)
207
+
208
+ @staticmethod
209
+ def CreateDataset(request,
210
+ target,
211
+ options=(),
212
+ channel_credentials=None,
213
+ call_credentials=None,
214
+ insecure=False,
215
+ compression=None,
216
+ wait_for_ready=None,
217
+ timeout=None,
218
+ metadata=None):
219
+ return grpc.experimental.unary_unary(request, target, '/luminary.proto.api.v0.luminarycloud.physics_ai.PhysicsAiService/CreateDataset',
220
+ proto_dot_api_dot_v0_dot_luminarycloud_dot_physics__ai_dot_physics__ai__pb2.CreateDatasetRequest.SerializeToString,
221
+ proto_dot_api_dot_v0_dot_luminarycloud_dot_physics__ai_dot_physics__ai__pb2.CreateDatasetResponse.FromString,
222
+ options, channel_credentials,
223
+ insecure, call_credentials, compression, wait_for_ready, timeout, metadata)
224
+
225
+ @staticmethod
226
+ def ListDatasets(request,
227
+ target,
228
+ options=(),
229
+ channel_credentials=None,
230
+ call_credentials=None,
231
+ insecure=False,
232
+ compression=None,
233
+ wait_for_ready=None,
234
+ timeout=None,
235
+ metadata=None):
236
+ return grpc.experimental.unary_unary(request, target, '/luminary.proto.api.v0.luminarycloud.physics_ai.PhysicsAiService/ListDatasets',
237
+ proto_dot_api_dot_v0_dot_luminarycloud_dot_physics__ai_dot_physics__ai__pb2.ListDatasetsRequest.SerializeToString,
238
+ proto_dot_api_dot_v0_dot_luminarycloud_dot_physics__ai_dot_physics__ai__pb2.ListDatasetsResponse.FromString,
239
+ options, channel_credentials,
240
+ insecure, call_credentials, compression, wait_for_ready, timeout, metadata)
@@ -30,6 +30,16 @@ class PhysicsAiServiceStub:
30
30
  luminarycloud._proto.api.v0.luminarycloud.physics_ai.physics_ai_pb2.SubmitTrainingJobResponse,
31
31
  ]
32
32
  """Submits a training job for a Physics AI architecture"""
33
+ CreateDataset: grpc.UnaryUnaryMultiCallable[
34
+ luminarycloud._proto.api.v0.luminarycloud.physics_ai.physics_ai_pb2.CreateDatasetRequest,
35
+ luminarycloud._proto.api.v0.luminarycloud.physics_ai.physics_ai_pb2.CreateDatasetResponse,
36
+ ]
37
+ """Creates a new Physics AI dataset"""
38
+ ListDatasets: grpc.UnaryUnaryMultiCallable[
39
+ luminarycloud._proto.api.v0.luminarycloud.physics_ai.physics_ai_pb2.ListDatasetsRequest,
40
+ luminarycloud._proto.api.v0.luminarycloud.physics_ai.physics_ai_pb2.ListDatasetsResponse,
41
+ ]
42
+ """Lists datasets for the current user"""
33
43
 
34
44
  class PhysicsAiServiceServicer(metaclass=abc.ABCMeta):
35
45
  """Manages physics ai architectures."""
@@ -62,5 +72,19 @@ class PhysicsAiServiceServicer(metaclass=abc.ABCMeta):
62
72
  context: grpc.ServicerContext,
63
73
  ) -> luminarycloud._proto.api.v0.luminarycloud.physics_ai.physics_ai_pb2.SubmitTrainingJobResponse:
64
74
  """Submits a training job for a Physics AI architecture"""
75
+ @abc.abstractmethod
76
+ def CreateDataset(
77
+ self,
78
+ request: luminarycloud._proto.api.v0.luminarycloud.physics_ai.physics_ai_pb2.CreateDatasetRequest,
79
+ context: grpc.ServicerContext,
80
+ ) -> luminarycloud._proto.api.v0.luminarycloud.physics_ai.physics_ai_pb2.CreateDatasetResponse:
81
+ """Creates a new Physics AI dataset"""
82
+ @abc.abstractmethod
83
+ def ListDatasets(
84
+ self,
85
+ request: luminarycloud._proto.api.v0.luminarycloud.physics_ai.physics_ai_pb2.ListDatasetsRequest,
86
+ context: grpc.ServicerContext,
87
+ ) -> luminarycloud._proto.api.v0.luminarycloud.physics_ai.physics_ai_pb2.ListDatasetsResponse:
88
+ """Lists datasets for the current user"""
65
89
 
66
90
  def add_PhysicsAiServiceServicer_to_server(servicer: PhysicsAiServiceServicer, server: grpc.Server) -> None: ...