ansys-fluent-core 0.34.dev0__py3-none-any.whl → 0.35.dev0__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of ansys-fluent-core might be problematic. Click here for more details.
- ansys/fluent/core/__init__.py +48 -84
- ansys/fluent/core/codegen/allapigen.py +2 -2
- ansys/fluent/core/codegen/builtin_settingsgen.py +41 -13
- ansys/fluent/core/codegen/datamodelgen.py +3 -1
- ansys/fluent/core/codegen/print_fluent_version.py +2 -2
- ansys/fluent/core/codegen/settingsgen.py +18 -6
- ansys/fluent/core/codegen/tuigen.py +6 -5
- ansys/fluent/core/data_model_cache.py +2 -2
- ansys/fluent/core/docker/docker_compose.py +11 -9
- ansys/fluent/core/docker/utils.py +35 -0
- ansys/fluent/core/examples/downloads.py +8 -11
- ansys/fluent/core/exceptions.py +13 -1
- ansys/fluent/core/field_data_interfaces.py +239 -38
- ansys/fluent/core/file_session.py +167 -61
- ansys/fluent/core/fluent_connection.py +41 -26
- ansys/fluent/core/generated/api_tree/api_objects.json +1 -1
- ansys/fluent/core/generated/datamodel_231/flicing.py +40 -40
- ansys/fluent/core/generated/datamodel_231/meshing.py +231 -231
- ansys/fluent/core/generated/datamodel_232/flicing.py +50 -50
- ansys/fluent/core/generated/datamodel_232/meshing.py +189 -189
- ansys/fluent/core/generated/datamodel_241/flicing.py +30 -30
- ansys/fluent/core/generated/datamodel_241/meshing.py +290 -290
- ansys/fluent/core/generated/datamodel_242/flicing.py +50 -50
- ansys/fluent/core/generated/datamodel_242/meshing.py +331 -331
- ansys/fluent/core/generated/datamodel_242/part_management.py +6 -6
- ansys/fluent/core/generated/datamodel_251/flicing.py +65 -65
- ansys/fluent/core/generated/datamodel_251/meshing.py +300 -300
- ansys/fluent/core/generated/datamodel_251/part_management.py +6 -6
- ansys/fluent/core/generated/datamodel_252/flicing.py +25 -25
- ansys/fluent/core/generated/datamodel_252/meshing.py +382 -382
- ansys/fluent/core/generated/datamodel_252/part_management.py +10 -10
- ansys/fluent/core/generated/datamodel_261/flicing.py +45 -45
- ansys/fluent/core/generated/datamodel_261/meshing.py +454 -435
- ansys/fluent/core/generated/datamodel_261/part_management.py +5 -5
- ansys/fluent/core/generated/datamodel_261/preferences.py +7 -0
- ansys/fluent/core/generated/fluent_version_252.py +1 -1
- ansys/fluent/core/generated/fluent_version_261.py +3 -3
- ansys/fluent/core/generated/meshing/tui_261.py +54 -3
- ansys/fluent/core/generated/solver/settings_231.py +1 -0
- ansys/fluent/core/generated/solver/settings_231.pyi +3025 -1
- ansys/fluent/core/generated/solver/settings_232.py +1 -0
- ansys/fluent/core/generated/solver/settings_232.pyi +3425 -1
- ansys/fluent/core/generated/solver/settings_241.py +1 -0
- ansys/fluent/core/generated/solver/settings_241.pyi +4423 -1
- ansys/fluent/core/generated/solver/settings_242.py +1 -0
- ansys/fluent/core/generated/solver/settings_242.pyi +5474 -1
- ansys/fluent/core/generated/solver/settings_251.py +11 -0
- ansys/fluent/core/generated/solver/settings_251.pyi +6006 -1
- ansys/fluent/core/generated/solver/settings_252.py +11 -1
- ansys/fluent/core/generated/solver/settings_252.pyi +6782 -2
- ansys/fluent/core/generated/solver/settings_261.py +5592 -2740
- ansys/fluent/core/generated/solver/settings_261.pyi +10335 -1994
- ansys/fluent/core/generated/solver/settings_builtin.py +560 -38
- ansys/fluent/core/generated/solver/settings_builtin.pyi +24 -18
- ansys/fluent/core/generated/solver/tui_261.py +409 -285
- ansys/fluent/core/launcher/container_launcher.py +25 -6
- ansys/fluent/core/launcher/error_handler.py +1 -1
- ansys/fluent/core/launcher/fluent_container.py +97 -45
- ansys/fluent/core/launcher/launch_options.py +5 -4
- ansys/fluent/core/launcher/launcher.py +18 -2
- ansys/fluent/core/launcher/launcher_utils.py +63 -15
- ansys/fluent/core/launcher/pim_launcher.py +17 -3
- ansys/fluent/core/launcher/process_launch_string.py +3 -2
- ansys/fluent/core/launcher/server_info.py +7 -3
- ansys/fluent/core/launcher/slurm_launcher.py +4 -2
- ansys/fluent/core/launcher/standalone_launcher.py +6 -3
- ansys/fluent/core/launcher/watchdog.py +6 -6
- ansys/fluent/core/launcher/watchdog_exec +1 -1
- ansys/fluent/core/logger.py +3 -1
- ansys/fluent/core/module_config.py +358 -0
- ansys/fluent/core/pyfluent_warnings.py +9 -3
- ansys/fluent/core/report.py +2 -2
- ansys/fluent/core/search.py +34 -13
- ansys/fluent/core/services/__init__.py +2 -2
- ansys/fluent/core/services/api_upgrade.py +3 -2
- ansys/fluent/core/services/app_utilities.py +39 -0
- ansys/fluent/core/services/datamodel_se.py +4 -2
- ansys/fluent/core/services/deprecated_field_data.py +4 -4
- ansys/fluent/core/services/field_data.py +185 -49
- ansys/fluent/core/services/health_check.py +3 -1
- ansys/fluent/core/services/interceptors.py +8 -6
- ansys/fluent/core/services/reduction.py +16 -5
- ansys/fluent/core/services/settings.py +1 -0
- ansys/fluent/core/session.py +47 -4
- ansys/fluent/core/session_pure_meshing.py +6 -6
- ansys/fluent/core/session_pure_meshing.pyi +1 -0
- ansys/fluent/core/session_shared.py +4 -4
- ansys/fluent/core/session_solver.py +41 -10
- ansys/fluent/core/session_solver.pyi +1 -0
- ansys/fluent/core/session_utilities.py +7 -0
- ansys/fluent/core/solver/error_message.py +2 -2
- ansys/fluent/core/solver/flobject.py +192 -123
- ansys/fluent/core/solver/function/reduction.py +37 -9
- ansys/fluent/core/solver/settings_builtin_bases.py +3 -3
- ansys/fluent/core/solver/settings_builtin_data.py +7 -17
- ansys/fluent/core/streaming_services/datamodel_event_streaming.py +3 -2
- ansys/fluent/core/streaming_services/datamodel_streaming.py +3 -1
- ansys/fluent/core/streaming_services/events_streaming.py +2 -18
- ansys/fluent/core/system_coupling.py +3 -1
- ansys/fluent/core/utils/__init__.py +0 -7
- ansys/fluent/core/utils/data_transfer.py +3 -3
- ansys/fluent/core/utils/file_transfer_service.py +24 -15
- ansys/fluent/core/utils/fluent_version.py +4 -6
- ansys/fluent/core/utils/networking.py +21 -11
- {ansys_fluent_core-0.34.dev0.dist-info → ansys_fluent_core-0.35.dev0.dist-info}/METADATA +10 -11
- {ansys_fluent_core-0.34.dev0.dist-info → ansys_fluent_core-0.35.dev0.dist-info}/RECORD +108 -106
- {ansys_fluent_core-0.34.dev0.dist-info → ansys_fluent_core-0.35.dev0.dist-info}/WHEEL +1 -1
- {ansys_fluent_core-0.34.dev0.dist-info/licenses → ansys_fluent_core-0.35.dev0.dist-info}/LICENSE +0 -0
|
@@ -25,14 +25,15 @@
|
|
|
25
25
|
from typing import Dict, List
|
|
26
26
|
import warnings
|
|
27
27
|
|
|
28
|
+
from deprecated.sphinx import deprecated
|
|
28
29
|
import numpy as np
|
|
29
30
|
|
|
30
31
|
from ansys.api.fluent.v0.field_data_pb2 import DataLocation
|
|
31
32
|
from ansys.fluent.core import PyFluentDeprecationWarning
|
|
32
33
|
from ansys.fluent.core.field_data_interfaces import (
|
|
33
34
|
BaseFieldInfo,
|
|
35
|
+
FieldBatch,
|
|
34
36
|
FieldDataSource,
|
|
35
|
-
FieldTransaction,
|
|
36
37
|
PathlinesFieldDataRequest,
|
|
37
38
|
ScalarFieldDataRequest,
|
|
38
39
|
SurfaceDataType,
|
|
@@ -41,6 +42,11 @@ from ansys.fluent.core.field_data_interfaces import (
|
|
|
41
42
|
_AllowedScalarFieldNames,
|
|
42
43
|
_AllowedSurfaceNames,
|
|
43
44
|
_ReturnFieldData,
|
|
45
|
+
_ScalarFields,
|
|
46
|
+
_SurfaceIds,
|
|
47
|
+
_SurfaceNames,
|
|
48
|
+
_transform_faces_connectivity_data,
|
|
49
|
+
_VectorFields,
|
|
44
50
|
)
|
|
45
51
|
from ansys.fluent.core.filereader.case_file import CaseFile
|
|
46
52
|
from ansys.fluent.core.filereader.data_file import (
|
|
@@ -81,8 +87,8 @@ def _data_type_convertor(args_dict):
|
|
|
81
87
|
return args_dict
|
|
82
88
|
|
|
83
89
|
|
|
84
|
-
class
|
|
85
|
-
"""Provides access to Fluent field data on surfaces collected via
|
|
90
|
+
class BatchFieldData:
|
|
91
|
+
"""Provides access to Fluent field data on surfaces collected via batches."""
|
|
86
92
|
|
|
87
93
|
def __init__(
|
|
88
94
|
self,
|
|
@@ -91,7 +97,7 @@ class TransactionFieldData:
|
|
|
91
97
|
allowed_surface_names,
|
|
92
98
|
allowed_scalar_field_names,
|
|
93
99
|
):
|
|
94
|
-
"""__init__ method of
|
|
100
|
+
"""__init__ method of BatchFieldData class."""
|
|
95
101
|
self.data = data
|
|
96
102
|
self._field_info = field_info
|
|
97
103
|
self._allowed_surface_names = allowed_surface_names
|
|
@@ -133,6 +139,7 @@ class TransactionFieldData:
|
|
|
133
139
|
kwargs.get("surfaces"),
|
|
134
140
|
self.get_surface_ids(kwargs.get("surfaces")),
|
|
135
141
|
surface_data,
|
|
142
|
+
flatten_connectivity=kwargs.get("flatten_connectivity"),
|
|
136
143
|
)
|
|
137
144
|
|
|
138
145
|
def _get_vector_field_data(
|
|
@@ -179,7 +186,20 @@ class TransactionFieldData:
|
|
|
179
186
|
| PathlinesFieldDataRequest
|
|
180
187
|
),
|
|
181
188
|
) -> Dict[int | str, Dict | np.array]:
|
|
182
|
-
"""Get the surface, scalar, vector or path-lines field data on a surface.
|
|
189
|
+
"""Get the surface, scalar, vector or path-lines field data on a surface.
|
|
190
|
+
|
|
191
|
+
Returns
|
|
192
|
+
-------
|
|
193
|
+
Dict[int | str, Dict | np.array]
|
|
194
|
+
Field data for the requested surface. If field data is unavailable for the surface,
|
|
195
|
+
an empty array is returned and a warning is issued. Users should always check
|
|
196
|
+
the array size before using the data.
|
|
197
|
+
|
|
198
|
+
Example:
|
|
199
|
+
data = get_field_data(field_data_request)[surface_id]
|
|
200
|
+
if data.size == 0:
|
|
201
|
+
# Handle missing data
|
|
202
|
+
"""
|
|
183
203
|
if isinstance(obj, SurfaceFieldDataRequest):
|
|
184
204
|
return self._get_surface_data(**obj._asdict())
|
|
185
205
|
elif isinstance(obj, ScalarFieldDataRequest):
|
|
@@ -196,7 +216,7 @@ class TransactionFieldData:
|
|
|
196
216
|
return self.data
|
|
197
217
|
|
|
198
218
|
|
|
199
|
-
class
|
|
219
|
+
class Batch(FieldBatch):
|
|
200
220
|
"""Populates field data on surfaces."""
|
|
201
221
|
|
|
202
222
|
class _SurfaceTransaction:
|
|
@@ -218,10 +238,10 @@ class Transaction(FieldTransaction):
|
|
|
218
238
|
self.surface_ids = surface_ids
|
|
219
239
|
|
|
220
240
|
def __init__(self, file_session, field_info):
|
|
221
|
-
"""__init__ method of
|
|
222
|
-
self.
|
|
223
|
-
self.
|
|
224
|
-
self.
|
|
241
|
+
"""__init__ method of Batch class."""
|
|
242
|
+
self._surface_batches = []
|
|
243
|
+
self._scalar_field_batches = []
|
|
244
|
+
self._vector_field_batches = []
|
|
225
245
|
self._file_session = file_session
|
|
226
246
|
self._field_info = field_info
|
|
227
247
|
self._cache_requests = []
|
|
@@ -287,10 +307,8 @@ class Transaction(FieldTransaction):
|
|
|
287
307
|
provide_vertices = SurfaceDataType.Vertices in data_types
|
|
288
308
|
provide_faces = SurfaceDataType.FacesConnectivity in data_types
|
|
289
309
|
for surface_id in self.get_surface_ids(surfaces):
|
|
290
|
-
self.
|
|
291
|
-
|
|
292
|
-
surface_id, provide_vertices, provide_faces
|
|
293
|
-
)
|
|
310
|
+
self._surface_batches.append(
|
|
311
|
+
Batch._SurfaceTransaction(surface_id, provide_vertices, provide_faces)
|
|
294
312
|
)
|
|
295
313
|
|
|
296
314
|
@all_deprecators(
|
|
@@ -360,14 +378,14 @@ class Transaction(FieldTransaction):
|
|
|
360
378
|
if len(self._file_session._data_file.get_phases()) > 1:
|
|
361
379
|
if not field_name.startswith("phase-"):
|
|
362
380
|
raise InvalidMultiPhaseFieldName()
|
|
363
|
-
self.
|
|
364
|
-
|
|
381
|
+
self._scalar_field_batches.append(
|
|
382
|
+
Batch._ScalarFieldTransaction(
|
|
365
383
|
field_name, surface_ids, field_name.split(":")[0]
|
|
366
384
|
)
|
|
367
385
|
)
|
|
368
386
|
else:
|
|
369
|
-
self.
|
|
370
|
-
|
|
387
|
+
self._scalar_field_batches.append(
|
|
388
|
+
Batch._ScalarFieldTransaction(field_name, surface_ids)
|
|
371
389
|
)
|
|
372
390
|
|
|
373
391
|
@all_deprecators(
|
|
@@ -422,14 +440,14 @@ class Transaction(FieldTransaction):
|
|
|
422
440
|
if len(self._file_session._data_file.get_phases()) > 1:
|
|
423
441
|
if not field_name.startswith("phase-"):
|
|
424
442
|
raise InvalidMultiPhaseFieldName()
|
|
425
|
-
self.
|
|
426
|
-
|
|
443
|
+
self._vector_field_batches.append(
|
|
444
|
+
Batch._VectorFieldTransaction(
|
|
427
445
|
field_name, surface_ids, field_name.split(":")[0]
|
|
428
446
|
)
|
|
429
447
|
)
|
|
430
448
|
else:
|
|
431
|
-
self.
|
|
432
|
-
|
|
449
|
+
self._vector_field_batches.append(
|
|
450
|
+
Batch._VectorFieldTransaction(field_name, surface_ids)
|
|
433
451
|
)
|
|
434
452
|
|
|
435
453
|
@all_deprecators(
|
|
@@ -503,14 +521,14 @@ class Transaction(FieldTransaction):
|
|
|
503
521
|
)
|
|
504
522
|
elif isinstance(req, ScalarFieldDataRequest):
|
|
505
523
|
self._add_scalar_fields_request(
|
|
506
|
-
field_name=req.field_name,
|
|
524
|
+
field_name=_to_scalar_field_name(req.field_name),
|
|
507
525
|
surfaces=req.surfaces,
|
|
508
526
|
node_value=req.node_value,
|
|
509
527
|
boundary_value=req.boundary_value,
|
|
510
528
|
)
|
|
511
529
|
elif isinstance(req, VectorFieldDataRequest):
|
|
512
530
|
self._add_vector_fields_request(
|
|
513
|
-
field_name=req.field_name,
|
|
531
|
+
field_name=_to_vector_field_name(req.field_name),
|
|
514
532
|
surfaces=req.surfaces,
|
|
515
533
|
)
|
|
516
534
|
elif isinstance(req, PathlinesFieldDataRequest):
|
|
@@ -555,47 +573,47 @@ class Transaction(FieldTransaction):
|
|
|
555
573
|
("boundaryValues", False),
|
|
556
574
|
)
|
|
557
575
|
|
|
558
|
-
for
|
|
576
|
+
for batch in self._scalar_field_batches:
|
|
559
577
|
if scalar_field_tag not in field_data:
|
|
560
578
|
field_data[scalar_field_tag] = {}
|
|
561
579
|
field_data_surface = field_data[scalar_field_tag]
|
|
562
|
-
for surface_id in
|
|
580
|
+
for surface_id in batch.surface_ids:
|
|
563
581
|
field_data_surface[surface_id] = {}
|
|
564
|
-
field_data_surface[surface_id][
|
|
582
|
+
field_data_surface[surface_id][batch.field_name] = (
|
|
565
583
|
self._file_session._data_file.get_face_scalar_field_data(
|
|
566
|
-
|
|
584
|
+
batch.phase_name, batch.field_name, surface_id
|
|
567
585
|
)
|
|
568
586
|
)
|
|
569
587
|
|
|
570
588
|
vector_field_tag = (("type", "vector-field"),)
|
|
571
589
|
|
|
572
|
-
for
|
|
573
|
-
if "velocity" not in
|
|
590
|
+
for batch in self._vector_field_batches:
|
|
591
|
+
if "velocity" not in batch.field_name:
|
|
574
592
|
raise InvalidFieldName()
|
|
575
593
|
if vector_field_tag not in field_data:
|
|
576
594
|
field_data[vector_field_tag] = {}
|
|
577
595
|
field_data_surface = field_data[vector_field_tag]
|
|
578
|
-
for surface_id in
|
|
596
|
+
for surface_id in batch.surface_ids:
|
|
579
597
|
field_data_surface[surface_id] = {}
|
|
580
|
-
field_data_surface[surface_id][
|
|
598
|
+
field_data_surface[surface_id][batch.field_name] = (
|
|
581
599
|
self._file_session._data_file.get_face_vector_field_data(
|
|
582
|
-
|
|
600
|
+
batch.phase_name, surface_id
|
|
583
601
|
)
|
|
584
602
|
)
|
|
585
603
|
field_data_surface[surface_id]["vector-scale"] = np.array([0.1])
|
|
586
604
|
|
|
587
|
-
for
|
|
605
|
+
for batch in self._surface_batches:
|
|
588
606
|
if (("type", "surface-data"),) not in field_data:
|
|
589
607
|
field_data[(("type", "surface-data"),)] = {}
|
|
590
608
|
field_data_surface = field_data[(("type", "surface-data"),)]
|
|
591
|
-
field_data_surface[
|
|
592
|
-
field_data_surface[
|
|
593
|
-
|
|
609
|
+
field_data_surface[batch.surface_id] = {}
|
|
610
|
+
field_data_surface[batch.surface_id]["faces"] = mesh.get_connectivity(
|
|
611
|
+
batch.surface_id
|
|
594
612
|
)
|
|
595
|
-
field_data_surface[
|
|
596
|
-
|
|
613
|
+
field_data_surface[batch.surface_id]["vertices"] = mesh.get_vertices(
|
|
614
|
+
batch.surface_id
|
|
597
615
|
)
|
|
598
|
-
return
|
|
616
|
+
return BatchFieldData(
|
|
599
617
|
field_data,
|
|
600
618
|
self._field_info,
|
|
601
619
|
_AllowedSurfaceNames(self._field_info),
|
|
@@ -610,10 +628,29 @@ class FileFieldData(FieldDataSource):
|
|
|
610
628
|
"""Initialize FileFieldData."""
|
|
611
629
|
self._file_session = file_session
|
|
612
630
|
self._field_info = field_info
|
|
631
|
+
self.scalar_fields = _ScalarFields(
|
|
632
|
+
self._field_info._get_scalar_fields_info, self._field_info
|
|
633
|
+
)
|
|
634
|
+
self.vector_fields = _VectorFields(self._field_info._get_vector_fields_info)
|
|
635
|
+
self.surfaces = _SurfaceNames(self._field_info._get_surfaces_info)
|
|
636
|
+
|
|
637
|
+
@property
|
|
638
|
+
def surface_ids(self):
|
|
639
|
+
"""Get the surface ids."""
|
|
640
|
+
return _SurfaceIds(
|
|
641
|
+
_get_surface_ids(
|
|
642
|
+
self._field_info, list(self._field_info._get_surfaces_info())
|
|
643
|
+
)
|
|
644
|
+
)
|
|
645
|
+
|
|
646
|
+
def new_batch(self):
|
|
647
|
+
"""Create a new field batch."""
|
|
648
|
+
return Batch(self._file_session, self._field_info)
|
|
613
649
|
|
|
650
|
+
@deprecated(version="0.34", reason="Use `new_batch` instead.")
|
|
614
651
|
def new_transaction(self):
|
|
615
652
|
"""Create a new field transaction."""
|
|
616
|
-
return
|
|
653
|
+
return self.new_batch()
|
|
617
654
|
|
|
618
655
|
def get_surface_ids(self, surfaces: List[str | int]) -> List[int]:
|
|
619
656
|
"""Get a list of surface ids based on surfaces provided as inputs."""
|
|
@@ -650,6 +687,7 @@ class FileFieldData(FieldDataSource):
|
|
|
650
687
|
data_types: List[SurfaceDataType] | List[str],
|
|
651
688
|
surfaces: List[int | str],
|
|
652
689
|
overset_mesh: bool | None = False,
|
|
690
|
+
flatten_connectivity: bool = False,
|
|
653
691
|
):
|
|
654
692
|
"""Get surface data (vertices and faces connectivity).
|
|
655
693
|
|
|
@@ -661,6 +699,8 @@ class FileFieldData(FieldDataSource):
|
|
|
661
699
|
List of surface IDS or surface names for the surface data.
|
|
662
700
|
overset_mesh : bool, optional
|
|
663
701
|
Whether to provide the overset method. The default is ``False``.
|
|
702
|
+
flatten_connectivity: bool, optional
|
|
703
|
+
Whether to provide faces connectivity data in flattened format.
|
|
664
704
|
|
|
665
705
|
Returns
|
|
666
706
|
-------
|
|
@@ -673,6 +713,7 @@ class FileFieldData(FieldDataSource):
|
|
|
673
713
|
data_types=data_types,
|
|
674
714
|
surfaces=surfaces,
|
|
675
715
|
overset_mesh=overset_mesh,
|
|
716
|
+
flatten_connectivity=flatten_connectivity,
|
|
676
717
|
)
|
|
677
718
|
|
|
678
719
|
def _get_surface_data(
|
|
@@ -680,6 +721,7 @@ class FileFieldData(FieldDataSource):
|
|
|
680
721
|
data_types: List[SurfaceDataType] | List[str],
|
|
681
722
|
surfaces: List[int | str],
|
|
682
723
|
overset_mesh: bool | None = False,
|
|
724
|
+
flatten_connectivity: bool = False,
|
|
683
725
|
):
|
|
684
726
|
for d_type in data_types:
|
|
685
727
|
if isinstance(d_type, str):
|
|
@@ -697,24 +739,28 @@ class FileFieldData(FieldDataSource):
|
|
|
697
739
|
}
|
|
698
740
|
|
|
699
741
|
if SurfaceDataType.FacesConnectivity in data_types:
|
|
700
|
-
|
|
701
|
-
|
|
702
|
-
self._file_session._case_file.get_mesh().get_connectivity(
|
|
742
|
+
if flatten_connectivity:
|
|
743
|
+
return {
|
|
744
|
+
surface: self._file_session._case_file.get_mesh().get_connectivity(
|
|
703
745
|
surface_ids[count]
|
|
704
746
|
)
|
|
747
|
+
for count, surface in enumerate(surfaces)
|
|
748
|
+
}
|
|
749
|
+
else:
|
|
750
|
+
warnings.warn(
|
|
751
|
+
"Structured face connectivity output is deprecated and will be replaced by the flat format "
|
|
752
|
+
"in a future release. In the current release, pass 'flatten_connectivity=True' argument while creating the "
|
|
753
|
+
"'SurfaceFieldDataRequest' to request data in the flat format.",
|
|
754
|
+
PyFluentDeprecationWarning,
|
|
705
755
|
)
|
|
706
|
-
|
|
707
|
-
|
|
708
|
-
|
|
709
|
-
|
|
710
|
-
|
|
711
|
-
|
|
712
|
-
|
|
713
|
-
|
|
714
|
-
end = i + 1 + data[i]
|
|
715
|
-
faces_data.append(data[i + 1 : end])
|
|
716
|
-
i = end
|
|
717
|
-
return faces_data
|
|
756
|
+
return {
|
|
757
|
+
surface: _transform_faces_connectivity_data(
|
|
758
|
+
self._file_session._case_file.get_mesh().get_connectivity(
|
|
759
|
+
surface_ids[count]
|
|
760
|
+
)
|
|
761
|
+
)
|
|
762
|
+
for count, surface in enumerate(surfaces)
|
|
763
|
+
}
|
|
718
764
|
|
|
719
765
|
@all_deprecators(
|
|
720
766
|
deprecate_arg_mappings=[
|
|
@@ -940,7 +986,20 @@ class FileFieldData(FieldDataSource):
|
|
|
940
986
|
| PathlinesFieldDataRequest
|
|
941
987
|
),
|
|
942
988
|
) -> Dict[int | str, Dict | np.array]:
|
|
943
|
-
"""Get the surface, scalar, vector or path-lines field data on a surface.
|
|
989
|
+
"""Get the surface, scalar, vector or path-lines field data on a surface.
|
|
990
|
+
|
|
991
|
+
Returns
|
|
992
|
+
-------
|
|
993
|
+
Dict[int | str, Dict | np.array]
|
|
994
|
+
Field data for the requested surface. If field data is unavailable for the surface,
|
|
995
|
+
an empty array is returned and a warning is issued. Users should always check
|
|
996
|
+
the array size before using the data.
|
|
997
|
+
|
|
998
|
+
Example:
|
|
999
|
+
data = get_field_data(field_data_request)[surface_id]
|
|
1000
|
+
if data.size == 0:
|
|
1001
|
+
# Handle missing data
|
|
1002
|
+
"""
|
|
944
1003
|
if isinstance(obj, SurfaceFieldDataRequest):
|
|
945
1004
|
return self._get_surface_data(**obj._asdict())
|
|
946
1005
|
elif isinstance(obj, ScalarFieldDataRequest):
|
|
@@ -951,11 +1010,11 @@ class FileFieldData(FieldDataSource):
|
|
|
951
1010
|
return self._get_pathlines_field_data(**obj._asdict())
|
|
952
1011
|
|
|
953
1012
|
|
|
954
|
-
class
|
|
1013
|
+
class _FileFieldInfo(BaseFieldInfo):
|
|
955
1014
|
"""File field info."""
|
|
956
1015
|
|
|
957
1016
|
def __init__(self, file_session):
|
|
958
|
-
"""Initialize
|
|
1017
|
+
"""Initialize _FileFieldInfo."""
|
|
959
1018
|
self._file_session = file_session
|
|
960
1019
|
|
|
961
1020
|
def get_scalar_field_range(
|
|
@@ -975,6 +1034,16 @@ class FileFieldInfo((BaseFieldInfo)):
|
|
|
975
1034
|
-------
|
|
976
1035
|
List[float]
|
|
977
1036
|
"""
|
|
1037
|
+
warnings.warn(
|
|
1038
|
+
"This usage is deprecated and will be removed in a future release. "
|
|
1039
|
+
f"Please use 'field_data.scalar_fields.range({field}, {node_value}, {surface_ids})' instead",
|
|
1040
|
+
PyFluentDeprecationWarning,
|
|
1041
|
+
)
|
|
1042
|
+
return self._get_scalar_field_range(field, node_value, surface_ids)
|
|
1043
|
+
|
|
1044
|
+
def _get_scalar_field_range(
|
|
1045
|
+
self, field: str, node_value: bool = False, surface_ids: List[int] = None
|
|
1046
|
+
) -> List[float]:
|
|
978
1047
|
minimum = None
|
|
979
1048
|
maximum = None
|
|
980
1049
|
if not surface_ids:
|
|
@@ -997,6 +1066,14 @@ class FileFieldInfo((BaseFieldInfo)):
|
|
|
997
1066
|
-------
|
|
998
1067
|
Dict
|
|
999
1068
|
"""
|
|
1069
|
+
warnings.warn(
|
|
1070
|
+
"This usage is deprecated and will be removed in a future release. "
|
|
1071
|
+
"Please use 'field_data.scalar_fields()' instead",
|
|
1072
|
+
PyFluentDeprecationWarning,
|
|
1073
|
+
)
|
|
1074
|
+
return self._get_scalar_fields_info()
|
|
1075
|
+
|
|
1076
|
+
def _get_scalar_fields_info(self):
|
|
1000
1077
|
phases = self._file_session._data_file.get_phases()
|
|
1001
1078
|
|
|
1002
1079
|
scalar_field_info = {}
|
|
@@ -1032,6 +1109,14 @@ class FileFieldInfo((BaseFieldInfo)):
|
|
|
1032
1109
|
-------
|
|
1033
1110
|
Dict
|
|
1034
1111
|
"""
|
|
1112
|
+
warnings.warn(
|
|
1113
|
+
"This usage is deprecated and will be removed in a future release. "
|
|
1114
|
+
"Please use 'field_data.vector_fields()' instead",
|
|
1115
|
+
PyFluentDeprecationWarning,
|
|
1116
|
+
)
|
|
1117
|
+
return self._get_vector_fields_info()
|
|
1118
|
+
|
|
1119
|
+
def _get_vector_fields_info(self):
|
|
1035
1120
|
phases = self._file_session._data_file.get_phases()
|
|
1036
1121
|
|
|
1037
1122
|
if len(phases) > 1:
|
|
@@ -1061,6 +1146,14 @@ class FileFieldInfo((BaseFieldInfo)):
|
|
|
1061
1146
|
-------
|
|
1062
1147
|
Dict
|
|
1063
1148
|
"""
|
|
1149
|
+
warnings.warn(
|
|
1150
|
+
"This usage is deprecated and will be removed in a future release. "
|
|
1151
|
+
"Please use 'field_data.surfaces()' instead",
|
|
1152
|
+
PyFluentDeprecationWarning,
|
|
1153
|
+
)
|
|
1154
|
+
return self._get_surfaces_info()
|
|
1155
|
+
|
|
1156
|
+
def _get_surfaces_info(self):
|
|
1064
1157
|
mesh = self._file_session._case_file.get_mesh()
|
|
1065
1158
|
surface_names = mesh.get_surface_names()
|
|
1066
1159
|
surface_ids = mesh.get_surface_ids()
|
|
@@ -1076,6 +1169,19 @@ class FileFieldInfo((BaseFieldInfo)):
|
|
|
1076
1169
|
return info
|
|
1077
1170
|
|
|
1078
1171
|
|
|
1172
|
+
class FileFieldInfo(_FileFieldInfo):
|
|
1173
|
+
"""File field info."""
|
|
1174
|
+
|
|
1175
|
+
def __init__(self, file_session):
|
|
1176
|
+
"""Initialize FileFieldInfo"""
|
|
1177
|
+
warnings.warn(
|
|
1178
|
+
"'FieldInfo' is deprecated and will be removed in a future release. "
|
|
1179
|
+
"Please use relevant methods from 'FieldData' instead",
|
|
1180
|
+
PyFluentDeprecationWarning,
|
|
1181
|
+
)
|
|
1182
|
+
super().__init__(file_session)
|
|
1183
|
+
|
|
1184
|
+
|
|
1079
1185
|
class FileSession:
|
|
1080
1186
|
"""File session to read case and data file."""
|
|
1081
1187
|
|
|
@@ -1141,7 +1247,7 @@ def _get_surface_ids(
|
|
|
1141
1247
|
surface_ids = []
|
|
1142
1248
|
for surf in surfaces:
|
|
1143
1249
|
if isinstance(surf, str):
|
|
1144
|
-
surface_ids.extend(field_info.
|
|
1250
|
+
surface_ids.extend(field_info._get_surfaces_info()[surf]["surface_id"])
|
|
1145
1251
|
else:
|
|
1146
1252
|
surface_ids.append(surf)
|
|
1147
1253
|
return surface_ids
|
|
@@ -1152,5 +1258,5 @@ class Fields:
|
|
|
1152
1258
|
|
|
1153
1259
|
def __init__(self, _session: FileSession):
|
|
1154
1260
|
"""Initialize Fields."""
|
|
1155
|
-
self.field_info =
|
|
1261
|
+
self.field_info = _FileFieldInfo(_session)
|
|
1156
1262
|
self.field_data = FileFieldData(_session, self.field_info)
|
|
@@ -42,11 +42,12 @@ from deprecated.sphinx import deprecated
|
|
|
42
42
|
import grpc
|
|
43
43
|
|
|
44
44
|
import ansys.fluent.core as pyfluent
|
|
45
|
-
from ansys.fluent.core.launcher.launcher_utils import
|
|
45
|
+
from ansys.fluent.core.launcher.launcher_utils import ComposeConfig
|
|
46
46
|
from ansys.fluent.core.services import service_creator
|
|
47
47
|
from ansys.fluent.core.services.app_utilities import (
|
|
48
48
|
AppUtilitiesOld,
|
|
49
49
|
AppUtilitiesService,
|
|
50
|
+
AppUtilitiesV252,
|
|
50
51
|
)
|
|
51
52
|
from ansys.fluent.core.services.scheme_eval import SchemeEvalService
|
|
52
53
|
from ansys.fluent.core.utils.execution import timeout_exec, timeout_loop
|
|
@@ -251,9 +252,9 @@ class FluentConnectionProperties:
|
|
|
251
252
|
|
|
252
253
|
def _get_ip_and_port(ip: str | None = None, port: int | None = None) -> (str, int):
|
|
253
254
|
if not ip:
|
|
254
|
-
ip =
|
|
255
|
+
ip = pyfluent.config.launch_fluent_ip or "127.0.0.1"
|
|
255
256
|
if not port:
|
|
256
|
-
port =
|
|
257
|
+
port = pyfluent.config.launch_fluent_port
|
|
257
258
|
if not port:
|
|
258
259
|
raise PortNotProvided()
|
|
259
260
|
return ip, port
|
|
@@ -277,18 +278,22 @@ class _ConnectionInterface:
|
|
|
277
278
|
self.scheme_eval = service_creator("scheme_eval").create(
|
|
278
279
|
self._scheme_eval_service
|
|
279
280
|
)
|
|
280
|
-
|
|
281
|
-
|
|
282
|
-
|
|
283
|
-
):
|
|
284
|
-
|
|
285
|
-
|
|
286
|
-
|
|
287
|
-
|
|
288
|
-
|
|
289
|
-
|
|
290
|
-
|
|
291
|
-
|
|
281
|
+
self._app_utilities_service = create_grpc_service(
|
|
282
|
+
AppUtilitiesService, error_state
|
|
283
|
+
)
|
|
284
|
+
match pyfluent.FluentVersion(self.scheme_eval.version):
|
|
285
|
+
case v if v < pyfluent.FluentVersion.v252:
|
|
286
|
+
self._app_utilities = AppUtilitiesOld(self.scheme_eval)
|
|
287
|
+
|
|
288
|
+
case pyfluent.FluentVersion.v252:
|
|
289
|
+
self._app_utilities = AppUtilitiesV252(
|
|
290
|
+
self._app_utilities_service, self.scheme_eval
|
|
291
|
+
)
|
|
292
|
+
|
|
293
|
+
case _:
|
|
294
|
+
self._app_utilities = service_creator("app_utilities").create(
|
|
295
|
+
self._app_utilities_service
|
|
296
|
+
)
|
|
292
297
|
|
|
293
298
|
@property
|
|
294
299
|
def product_build_info(self) -> str:
|
|
@@ -376,6 +381,7 @@ class FluentConnection:
|
|
|
376
381
|
slurm_job_id: str | None = None,
|
|
377
382
|
inside_container: bool | None = None,
|
|
378
383
|
container: ContainerT | None = None,
|
|
384
|
+
compose_config: ComposeConfig | None = None,
|
|
379
385
|
):
|
|
380
386
|
"""Initialize a Session.
|
|
381
387
|
|
|
@@ -415,12 +421,15 @@ class FluentConnection:
|
|
|
415
421
|
container: ContainerT, optional
|
|
416
422
|
The container instance if the Fluent session is running inside
|
|
417
423
|
a container.
|
|
424
|
+
compose_config: ComposeConfig, optional
|
|
425
|
+
Configuration for Docker Compose or Podman Compose.
|
|
418
426
|
|
|
419
427
|
Raises
|
|
420
428
|
------
|
|
421
429
|
PortNotProvided
|
|
422
430
|
If port is not provided.
|
|
423
431
|
"""
|
|
432
|
+
self._compose_config = compose_config if compose_config else ComposeConfig()
|
|
424
433
|
self._error_state = ErrorState()
|
|
425
434
|
self._data_valid = False
|
|
426
435
|
self._channel_str = None
|
|
@@ -442,7 +451,7 @@ class FluentConnection:
|
|
|
442
451
|
# At this point, the server must be running. If the following check_health()
|
|
443
452
|
# throws, we should not proceed.
|
|
444
453
|
# TODO: Show user-friendly error message.
|
|
445
|
-
if pyfluent.
|
|
454
|
+
if pyfluent.config.check_health:
|
|
446
455
|
self._health_check.check_health()
|
|
447
456
|
|
|
448
457
|
self._slurm_job_id = slurm_job_id
|
|
@@ -467,7 +476,7 @@ class FluentConnection:
|
|
|
467
476
|
and cortex_host is not None
|
|
468
477
|
):
|
|
469
478
|
logger.info("Checking if Fluent is running inside a container.")
|
|
470
|
-
if not is_compose
|
|
479
|
+
if not self._compose_config.is_compose:
|
|
471
480
|
inside_container = get_container(cortex_host)
|
|
472
481
|
logger.debug(f"get_container({cortex_host}): {inside_container}")
|
|
473
482
|
if inside_container is False:
|
|
@@ -544,7 +553,10 @@ class FluentConnection:
|
|
|
544
553
|
>>> session = pyfluent.launch_fluent()
|
|
545
554
|
>>> session.force_exit()
|
|
546
555
|
"""
|
|
547
|
-
if
|
|
556
|
+
if (
|
|
557
|
+
self.connection_properties.inside_container
|
|
558
|
+
or self._compose_config.is_compose
|
|
559
|
+
):
|
|
548
560
|
self._force_exit_container()
|
|
549
561
|
elif self._remote_instance is not None:
|
|
550
562
|
logger.error("Cannot execute cleanup script, Fluent running remotely.")
|
|
@@ -596,7 +608,7 @@ class FluentConnection:
|
|
|
596
608
|
def _force_exit_container(self):
|
|
597
609
|
"""Immediately terminates the Fluent client running inside a container, losing
|
|
598
610
|
unsaved progress and data."""
|
|
599
|
-
if is_compose
|
|
611
|
+
if self._compose_config.is_compose and self._container:
|
|
600
612
|
self._container.stop()
|
|
601
613
|
else:
|
|
602
614
|
container = self.connection_properties.inside_container
|
|
@@ -674,7 +686,10 @@ class FluentConnection:
|
|
|
674
686
|
else:
|
|
675
687
|
raise WaitTypeError()
|
|
676
688
|
|
|
677
|
-
if
|
|
689
|
+
if (
|
|
690
|
+
self.connection_properties.inside_container
|
|
691
|
+
and not self._compose_config.is_compose
|
|
692
|
+
):
|
|
678
693
|
_response = timeout_loop(
|
|
679
694
|
get_container,
|
|
680
695
|
wait,
|
|
@@ -743,16 +758,16 @@ class FluentConnection:
|
|
|
743
758
|
)
|
|
744
759
|
|
|
745
760
|
if timeout is None:
|
|
746
|
-
|
|
747
|
-
|
|
748
|
-
|
|
749
|
-
logger.debug("Found PYFLUENT_TIMEOUT_FORCE_EXIT env var")
|
|
761
|
+
config_timeout = pyfluent.config.force_exit_timeout
|
|
762
|
+
if config_timeout is not None:
|
|
763
|
+
logger.debug(f"Found force_exit_timeout config: '{config_timeout}'")
|
|
750
764
|
try:
|
|
751
|
-
timeout = float(
|
|
765
|
+
timeout = float(config_timeout)
|
|
752
766
|
logger.debug(f"Setting TIMEOUT_FORCE_EXIT to {timeout}")
|
|
753
767
|
except ValueError:
|
|
754
768
|
logger.debug(
|
|
755
|
-
"
|
|
769
|
+
"Invalid force_exit_timeout config. Must be a float or int. "
|
|
770
|
+
"Timeout forced exit is disabled."
|
|
756
771
|
)
|
|
757
772
|
|
|
758
773
|
if timeout is None:
|