ansys-fluent-core 0.33.0__py3-none-any.whl → 0.34.0__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of ansys-fluent-core might be problematic. Click here for more details.
- ansys/fluent/core/__init__.py +3 -3
- ansys/fluent/core/codegen/builtin_settingsgen.py +25 -19
- ansys/fluent/core/codegen/settingsgen.py +17 -5
- ansys/fluent/core/codegen/tuigen.py +2 -1
- ansys/fluent/core/docker/docker_compose.py +4 -1
- ansys/fluent/core/docker/utils.py +35 -0
- ansys/fluent/core/exceptions.py +13 -1
- ansys/fluent/core/field_data_interfaces.py +239 -38
- ansys/fluent/core/file_session.py +139 -59
- ansys/fluent/core/fluent_connection.py +23 -16
- ansys/fluent/core/generated/api_tree/api_objects.json +1 -1
- ansys/fluent/core/generated/datamodel_231/flicing.py +30 -30
- ansys/fluent/core/generated/datamodel_231/meshing.py +171 -171
- ansys/fluent/core/generated/datamodel_232/flicing.py +35 -35
- ansys/fluent/core/generated/datamodel_232/meshing.py +223 -223
- ansys/fluent/core/generated/datamodel_241/flicing.py +35 -35
- ansys/fluent/core/generated/datamodel_241/meshing.py +264 -264
- ansys/fluent/core/generated/datamodel_242/flicing.py +30 -30
- ansys/fluent/core/generated/datamodel_242/meshing.py +369 -369
- ansys/fluent/core/generated/datamodel_251/flicing.py +35 -35
- ansys/fluent/core/generated/datamodel_251/meshing.py +331 -331
- ansys/fluent/core/generated/datamodel_251/part_management.py +3 -3
- ansys/fluent/core/generated/datamodel_252/flicing.py +50 -50
- ansys/fluent/core/generated/datamodel_252/meshing.py +398 -398
- ansys/fluent/core/generated/datamodel_252/part_management.py +5 -5
- ansys/fluent/core/generated/datamodel_261/flicing.py +40 -40
- ansys/fluent/core/generated/datamodel_261/meshing.py +416 -397
- ansys/fluent/core/generated/datamodel_261/part_management.py +10 -10
- ansys/fluent/core/generated/datamodel_261/preferences.py +7 -0
- ansys/fluent/core/generated/fluent_version_261.py +3 -3
- ansys/fluent/core/generated/meshing/tui_261.py +1186 -1180
- ansys/fluent/core/generated/solver/settings_231.py +1 -0
- ansys/fluent/core/generated/solver/settings_231.pyi +3025 -1
- ansys/fluent/core/generated/solver/settings_232.py +1 -0
- ansys/fluent/core/generated/solver/settings_232.pyi +3425 -1
- ansys/fluent/core/generated/solver/settings_241.py +1 -0
- ansys/fluent/core/generated/solver/settings_241.pyi +4423 -1
- ansys/fluent/core/generated/solver/settings_242.py +1 -0
- ansys/fluent/core/generated/solver/settings_242.pyi +5474 -1
- ansys/fluent/core/generated/solver/settings_251.py +11 -0
- ansys/fluent/core/generated/solver/settings_251.pyi +6006 -1
- ansys/fluent/core/generated/solver/settings_252.py +11 -1
- ansys/fluent/core/generated/solver/settings_252.pyi +6782 -2
- ansys/fluent/core/generated/solver/settings_261.py +5592 -2740
- ansys/fluent/core/generated/solver/settings_261.pyi +10335 -1994
- ansys/fluent/core/generated/solver/settings_builtin.py +56 -22
- ansys/fluent/core/generated/solver/settings_builtin.pyi +22 -0
- ansys/fluent/core/generated/solver/tui_261.py +2445 -2281
- ansys/fluent/core/launcher/container_launcher.py +6 -2
- ansys/fluent/core/launcher/error_handler.py +1 -1
- ansys/fluent/core/launcher/fluent_container.py +53 -10
- ansys/fluent/core/launcher/launcher.py +3 -0
- ansys/fluent/core/launcher/watchdog.py +6 -6
- ansys/fluent/core/launcher/watchdog_exec +1 -1
- ansys/fluent/core/pyfluent_warnings.py +7 -1
- ansys/fluent/core/report.py +2 -0
- ansys/fluent/core/search.py +11 -3
- ansys/fluent/core/services/__init__.py +2 -2
- ansys/fluent/core/services/app_utilities.py +39 -0
- ansys/fluent/core/services/deprecated_field_data.py +4 -4
- ansys/fluent/core/services/field_data.py +158 -41
- ansys/fluent/core/services/reduction.py +16 -5
- ansys/fluent/core/services/settings.py +1 -0
- ansys/fluent/core/session.py +16 -1
- ansys/fluent/core/session_pure_meshing.py +5 -5
- ansys/fluent/core/session_pure_meshing.pyi +1 -0
- ansys/fluent/core/session_solver.py +33 -8
- ansys/fluent/core/session_solver.pyi +1 -0
- ansys/fluent/core/solver/error_message.py +2 -2
- ansys/fluent/core/solver/flobject.py +187 -120
- ansys/fluent/core/solver/function/reduction.py +37 -9
- ansys/fluent/core/solver/settings_builtin_data.py +5 -3
- ansys/fluent/core/utils/fluent_version.py +1 -3
- ansys/fluent/core/utils/networking.py +18 -8
- {ansys_fluent_core-0.33.0.dist-info → ansys_fluent_core-0.34.0.dist-info}/METADATA +10 -11
- {ansys_fluent_core-0.33.0.dist-info → ansys_fluent_core-0.34.0.dist-info}/RECORD +78 -77
- {ansys_fluent_core-0.33.0.dist-info → ansys_fluent_core-0.34.0.dist-info}/WHEEL +1 -1
- {ansys_fluent_core-0.33.0.dist-info/licenses → ansys_fluent_core-0.34.0.dist-info}/LICENSE +0 -0
ansys/fluent/core/__init__.py
CHANGED
|
@@ -77,9 +77,9 @@ from ansys.fluent.core.utils import fldoc, get_examples_download_dir
|
|
|
77
77
|
from ansys.fluent.core.utils.fluent_version import FluentVersion # noqa: F401
|
|
78
78
|
from ansys.fluent.core.utils.setup_for_fluent import setup_for_fluent # noqa: F401
|
|
79
79
|
|
|
80
|
-
__version__ = "0.
|
|
80
|
+
__version__ = "0.34.0"
|
|
81
81
|
|
|
82
|
-
_VERSION_INFO = "Build date:
|
|
82
|
+
_VERSION_INFO = "Build date: July 24, 2025 15:00 UTC ShaID: 7de106f"
|
|
83
83
|
"""
|
|
84
84
|
Global variable indicating the version info of the PyFluent package.
|
|
85
85
|
Build timestamp and commit hash are added to this variable during packaging.
|
|
@@ -155,7 +155,7 @@ CODEGEN_OUTDIR = os.getenv(
|
|
|
155
155
|
FLUENT_SHOW_MESH_AFTER_CASE_READ = False
|
|
156
156
|
|
|
157
157
|
# Whether to write the automatic transcript in Fluent
|
|
158
|
-
FLUENT_AUTOMATIC_TRANSCRIPT =
|
|
158
|
+
FLUENT_AUTOMATIC_TRANSCRIPT = os.getenv("PYFLUENT_FLUENT_AUTOMATIC_TRANSCRIPT") == "1"
|
|
159
159
|
|
|
160
160
|
# Whether to interrupt Fluent solver from PyFluent
|
|
161
161
|
SUPPORT_SOLVER_INTERRUPT = False
|
|
@@ -88,26 +88,32 @@ def generate(version: str):
|
|
|
88
88
|
f.write(f"class {name}(_{kind}Setting):\n")
|
|
89
89
|
doc_kind = "command" if kind == "Command" else "setting"
|
|
90
90
|
f.write(f' """{name} {doc_kind}."""\n\n')
|
|
91
|
-
f.write(" def __init__(self")
|
|
92
|
-
for named_object in named_objects:
|
|
93
|
-
f.write(f", {named_object}: str")
|
|
94
|
-
f.write(", settings_source: SettingsBase | Solver | None = None")
|
|
95
|
-
if kind == "NonCreatableNamedObject":
|
|
96
|
-
f.write(", name: str = None")
|
|
97
|
-
elif kind == "CreatableNamedObject":
|
|
98
|
-
f.write(", name: str = None, new_instance_name: str = None")
|
|
99
91
|
if kind == "Command":
|
|
100
|
-
f.write(
|
|
101
|
-
|
|
102
|
-
|
|
103
|
-
|
|
104
|
-
f.write(
|
|
105
|
-
|
|
106
|
-
|
|
107
|
-
|
|
108
|
-
|
|
109
|
-
|
|
110
|
-
|
|
92
|
+
f.write(
|
|
93
|
+
" def __new__(cls, settings_source: SettingsBase | Solver | None = None, **kwargs):\n"
|
|
94
|
+
)
|
|
95
|
+
f.write(" instance = super().__new__(cls)\n")
|
|
96
|
+
f.write(
|
|
97
|
+
" instance.__init__(settings_source=settings_source, **kwargs)\n"
|
|
98
|
+
)
|
|
99
|
+
f.write(" return instance(**kwargs")
|
|
100
|
+
else:
|
|
101
|
+
f.write(" def __init__(self")
|
|
102
|
+
for named_object in named_objects:
|
|
103
|
+
f.write(f", {named_object}: str")
|
|
104
|
+
f.write(", settings_source: SettingsBase | Solver | None = None")
|
|
105
|
+
if kind == "NonCreatableNamedObject":
|
|
106
|
+
f.write(", name: str = None")
|
|
107
|
+
elif kind == "CreatableNamedObject":
|
|
108
|
+
f.write(", name: str = None, new_instance_name: str = None")
|
|
109
|
+
f.write("):\n")
|
|
110
|
+
f.write(" super().__init__(settings_source=settings_source")
|
|
111
|
+
if kind == "NonCreatableNamedObject":
|
|
112
|
+
f.write(", name=name")
|
|
113
|
+
elif kind == "CreatableNamedObject":
|
|
114
|
+
f.write(", name=name, new_instance_name=new_instance_name")
|
|
115
|
+
for named_object in named_objects:
|
|
116
|
+
f.write(f", {named_object}={named_object}")
|
|
111
117
|
f.write(")\n\n")
|
|
112
118
|
|
|
113
119
|
with open(_PYI_FILE, "w") as f:
|
|
@@ -38,6 +38,7 @@ from ansys.fluent.core.solver.flobject import (
|
|
|
38
38
|
ListObject,
|
|
39
39
|
NamedObject,
|
|
40
40
|
get_cls,
|
|
41
|
+
to_constant_name,
|
|
41
42
|
to_python_name,
|
|
42
43
|
)
|
|
43
44
|
from ansys.fluent.core.utils.fix_doc import fix_settings_doc
|
|
@@ -107,10 +108,11 @@ def strip_parameters(docstring: str) -> str:
|
|
|
107
108
|
|
|
108
109
|
|
|
109
110
|
def _populate_data(cls, api_tree: dict, version: str) -> dict:
|
|
110
|
-
data = {
|
|
111
|
-
|
|
112
|
-
|
|
113
|
-
|
|
111
|
+
data = {
|
|
112
|
+
"version": version,
|
|
113
|
+
"name": cls.__name__,
|
|
114
|
+
"bases": [base.__name__ for base in cls.__bases__],
|
|
115
|
+
}
|
|
114
116
|
if "command" in cls.__doc__:
|
|
115
117
|
data["doc"] = strip_parameters(cls.__doc__)
|
|
116
118
|
else:
|
|
@@ -150,6 +152,7 @@ def _populate_data(cls, api_tree: dict, version: str) -> dict:
|
|
|
150
152
|
data["child_object_type"]["doc"] = f"'child_object_type' of {cls.__name__}."
|
|
151
153
|
else:
|
|
152
154
|
data["child_object_type"] = None
|
|
155
|
+
data["allowed_values"] = getattr(cls, "_allowed_values", [])
|
|
153
156
|
return data
|
|
154
157
|
|
|
155
158
|
|
|
@@ -306,7 +309,15 @@ def _write_data(cls_name: str, python_name: str, data: dict, f: IO, f_stub: IO |
|
|
|
306
309
|
if return_type:
|
|
307
310
|
s.write(f" return_type = {return_type!r}\n")
|
|
308
311
|
s_stub.write(" return_type: str\n")
|
|
312
|
+
for allowed_value in data["allowed_values"]:
|
|
313
|
+
s.write(
|
|
314
|
+
f" {to_constant_name(allowed_value)} = _FlStringConstant({allowed_value!r})\n"
|
|
315
|
+
)
|
|
316
|
+
s_stub.write(
|
|
317
|
+
f" {to_constant_name(allowed_value)}: Final[str] = {allowed_value!r}\n"
|
|
318
|
+
)
|
|
309
319
|
s.write("\n")
|
|
320
|
+
s_stub.write("\n")
|
|
310
321
|
for name, (python_name, data, hash_, should_write_stub) in classes_to_write.items():
|
|
311
322
|
if name not in _CLASS_WRITTEN:
|
|
312
323
|
_write_data(
|
|
@@ -370,10 +381,11 @@ def generate(version: str, static_infos: dict, verbose: bool = False) -> None:
|
|
|
370
381
|
header.write(" _InputFile,\n")
|
|
371
382
|
header.write(" _OutputFile,\n")
|
|
372
383
|
header.write(" _InOutFile,\n")
|
|
384
|
+
header.write(" _FlStringConstant,\n")
|
|
373
385
|
header.write(")\n\n")
|
|
374
386
|
f.write(header.getvalue())
|
|
375
387
|
f_stub.write(header.getvalue())
|
|
376
|
-
f_stub.write("from typing import Any\n\n")
|
|
388
|
+
f_stub.write("from typing import Any, Final\n\n")
|
|
377
389
|
f.write(f'SHASH = "{shash}"\n\n')
|
|
378
390
|
name = data["name"]
|
|
379
391
|
_NAME_BY_HASH[_gethash(data)] = name
|
|
@@ -52,6 +52,7 @@ import ansys.fluent.core as pyfluent
|
|
|
52
52
|
from ansys.fluent.core import FluentMode, launch_fluent
|
|
53
53
|
from ansys.fluent.core.codegen import StaticInfoType
|
|
54
54
|
from ansys.fluent.core.codegen.data.fluent_gui_help_patch import XML_HELP_PATCH
|
|
55
|
+
from ansys.fluent.core.docker.utils import get_ghcr_fluent_image_name
|
|
55
56
|
from ansys.fluent.core.services.datamodel_tui import (
|
|
56
57
|
convert_path_to_grpc_path,
|
|
57
58
|
convert_tui_menu_to_func_name,
|
|
@@ -97,7 +98,7 @@ _XML_HELPSTRINGS = {}
|
|
|
97
98
|
def _copy_tui_help_xml_file(version: str):
|
|
98
99
|
if os.getenv("PYFLUENT_LAUNCH_CONTAINER") == "1":
|
|
99
100
|
image_tag = os.getenv("FLUENT_IMAGE_TAG", "v25.1.0")
|
|
100
|
-
image_name = f"
|
|
101
|
+
image_name = f"{get_ghcr_fluent_image_name(image_tag)}:{image_tag}"
|
|
101
102
|
container_name = uuid.uuid4().hex
|
|
102
103
|
is_linux = platform.system() == "Linux"
|
|
103
104
|
subprocess.run(
|
|
@@ -26,6 +26,8 @@ import os
|
|
|
26
26
|
import subprocess
|
|
27
27
|
import uuid
|
|
28
28
|
|
|
29
|
+
from .utils import get_ghcr_fluent_image_name
|
|
30
|
+
|
|
29
31
|
|
|
30
32
|
class ComposeBasedLauncher:
|
|
31
33
|
"""Launch Fluent through docker or Podman compose."""
|
|
@@ -33,9 +35,10 @@ class ComposeBasedLauncher:
|
|
|
33
35
|
def __init__(self, *, container_dict):
|
|
34
36
|
self._compose_name = f"pyfluent_compose_{uuid.uuid4().hex}"
|
|
35
37
|
self._container_dict = container_dict
|
|
38
|
+
image_tag = os.getenv("FLUENT_IMAGE_TAG")
|
|
36
39
|
self._image_name = (
|
|
37
40
|
container_dict.get("fluent_image")
|
|
38
|
-
or f"
|
|
41
|
+
or f"{get_ghcr_fluent_image_name(image_tag)}:{image_tag}"
|
|
39
42
|
)
|
|
40
43
|
self._container_source = self._set_compose_cmds()
|
|
41
44
|
self._container_source.remove("compose")
|
|
@@ -0,0 +1,35 @@
|
|
|
1
|
+
# Copyright (C) 2021 - 2025 ANSYS, Inc. and/or its affiliates.
|
|
2
|
+
# SPDX-License-Identifier: MIT
|
|
3
|
+
#
|
|
4
|
+
#
|
|
5
|
+
# Permission is hereby granted, free of charge, to any person obtaining a copy
|
|
6
|
+
# of this software and associated documentation files (the "Software"), to deal
|
|
7
|
+
# in the Software without restriction, including without limitation the rights
|
|
8
|
+
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
|
|
9
|
+
# copies of the Software, and to permit persons to whom the Software is
|
|
10
|
+
# furnished to do so, subject to the following conditions:
|
|
11
|
+
#
|
|
12
|
+
# The above copyright notice and this permission notice shall be included in all
|
|
13
|
+
# copies or substantial portions of the Software.
|
|
14
|
+
#
|
|
15
|
+
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
|
16
|
+
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
|
17
|
+
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
|
|
18
|
+
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
|
19
|
+
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
|
|
20
|
+
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
|
|
21
|
+
# SOFTWARE.
|
|
22
|
+
|
|
23
|
+
"""
|
|
24
|
+
Utility functions for working with Fluent Docker images.
|
|
25
|
+
"""
|
|
26
|
+
|
|
27
|
+
|
|
28
|
+
def get_ghcr_fluent_image_name(image_tag: str):
|
|
29
|
+
"""
|
|
30
|
+
Get the Fluent image name from GitHub registry based on the image tag.
|
|
31
|
+
"""
|
|
32
|
+
if image_tag >= "v26.1":
|
|
33
|
+
return "ghcr.io/ansys/fluent"
|
|
34
|
+
else:
|
|
35
|
+
return "ghcr.io/ansys/pyfluent"
|
ansys/fluent/core/exceptions.py
CHANGED
|
@@ -34,7 +34,7 @@ class DisallowedValuesError(ValueError):
|
|
|
34
34
|
context: str | None = None,
|
|
35
35
|
name: Any | None = None,
|
|
36
36
|
allowed_values: Iterable[Any] | None = None,
|
|
37
|
-
):
|
|
37
|
+
) -> None:
|
|
38
38
|
"""Initialize DisallowedValuesError."""
|
|
39
39
|
super().__init__(
|
|
40
40
|
allowed_name_error_message(
|
|
@@ -50,3 +50,15 @@ class InvalidArgument(ValueError):
|
|
|
50
50
|
"""Raised when an argument value is inappropriate."""
|
|
51
51
|
|
|
52
52
|
pass
|
|
53
|
+
|
|
54
|
+
|
|
55
|
+
class BetaFeaturesNotEnabled(RuntimeError):
|
|
56
|
+
"""Raised when a beta feature is accessed without enabling beta features."""
|
|
57
|
+
|
|
58
|
+
def __init__(self, feature_name: str | None = None) -> None:
|
|
59
|
+
message = (
|
|
60
|
+
f"The feature '{feature_name}' is not available until 'enable_beta_features()' has been called."
|
|
61
|
+
if feature_name
|
|
62
|
+
else "This feature is not available until 'enable_beta_features()' has been called."
|
|
63
|
+
)
|
|
64
|
+
super().__init__(message)
|
|
@@ -21,15 +21,16 @@
|
|
|
21
21
|
# SOFTWARE.
|
|
22
22
|
|
|
23
23
|
"""Common interfaces for field data."""
|
|
24
|
-
|
|
25
24
|
from abc import ABC, abstractmethod
|
|
26
25
|
from enum import Enum
|
|
27
26
|
from typing import Callable, Dict, List, NamedTuple
|
|
27
|
+
import warnings
|
|
28
28
|
|
|
29
29
|
import numpy as np
|
|
30
30
|
import numpy.typing as npt
|
|
31
31
|
|
|
32
32
|
from ansys.fluent.core.exceptions import DisallowedValuesError
|
|
33
|
+
from ansys.fluent.core.pyfluent_warnings import PyFluentDeprecationWarning
|
|
33
34
|
from ansys.fluent.core.variable_strategies import (
|
|
34
35
|
FluentFieldDataNamingStrategy as naming_strategy,
|
|
35
36
|
)
|
|
@@ -50,15 +51,16 @@ class SurfaceFieldDataRequest(NamedTuple):
|
|
|
50
51
|
"""Container storing parameters for surface data request."""
|
|
51
52
|
|
|
52
53
|
data_types: List[SurfaceDataType] | List[str]
|
|
53
|
-
surfaces: List[int | str]
|
|
54
|
+
surfaces: List[int | str | object]
|
|
54
55
|
overset_mesh: bool | None = False
|
|
56
|
+
flatten_connectivity: bool = False
|
|
55
57
|
|
|
56
58
|
|
|
57
59
|
class ScalarFieldDataRequest(NamedTuple):
|
|
58
60
|
"""Container storing parameters for scalar field data request."""
|
|
59
61
|
|
|
60
62
|
field_name: str
|
|
61
|
-
surfaces: List[int | str]
|
|
63
|
+
surfaces: List[int | str | object]
|
|
62
64
|
node_value: bool | None = True
|
|
63
65
|
boundary_value: bool | None = True
|
|
64
66
|
|
|
@@ -67,14 +69,14 @@ class VectorFieldDataRequest(NamedTuple):
|
|
|
67
69
|
"""Container storing parameters for vector field data request."""
|
|
68
70
|
|
|
69
71
|
field_name: str
|
|
70
|
-
surfaces: List[int | str]
|
|
72
|
+
surfaces: List[int | str | object]
|
|
71
73
|
|
|
72
74
|
|
|
73
75
|
class PathlinesFieldDataRequest(NamedTuple):
|
|
74
76
|
"""Container storing parameters for path-lines field data request."""
|
|
75
77
|
|
|
76
78
|
field_name: str
|
|
77
|
-
surfaces: List[int | str]
|
|
79
|
+
surfaces: List[int | str | object]
|
|
78
80
|
additional_field_name: str = ""
|
|
79
81
|
provide_particle_time_field: bool | None = False
|
|
80
82
|
node_value: bool | None = True
|
|
@@ -87,6 +89,7 @@ class PathlinesFieldDataRequest(NamedTuple):
|
|
|
87
89
|
coarsen: int | None = 1
|
|
88
90
|
velocity_domain: str | None = "all-phases"
|
|
89
91
|
zones: list | None = None
|
|
92
|
+
flatten_connectivity: bool = False
|
|
90
93
|
|
|
91
94
|
|
|
92
95
|
class BaseFieldInfo(ABC):
|
|
@@ -215,25 +218,25 @@ class FieldDataSource(BaseFieldDataSource, ABC):
|
|
|
215
218
|
|
|
216
219
|
This class defines the interface for retrieving field data based on user requests.
|
|
217
220
|
In addition to the methods in `BaseFieldDataSource` it provides a method to create
|
|
218
|
-
new field
|
|
221
|
+
new field batch objects.
|
|
219
222
|
|
|
220
223
|
Implementing classes should define:
|
|
221
224
|
- A method to obtain surface IDs from user-provided surface names or numerical identifiers.
|
|
222
225
|
- A method to retrieve field data based on a given request.
|
|
223
|
-
- A method to create new field
|
|
226
|
+
- A method to create new field batch.
|
|
224
227
|
|
|
225
228
|
Subclasses must provide concrete implementations for all abstract methods.
|
|
226
229
|
"""
|
|
227
230
|
|
|
228
231
|
@abstractmethod
|
|
229
|
-
def
|
|
230
|
-
"""Create a new field
|
|
232
|
+
def new_batch(self):
|
|
233
|
+
"""Create a new field batch."""
|
|
231
234
|
pass
|
|
232
235
|
|
|
233
236
|
|
|
234
|
-
class
|
|
237
|
+
class FieldBatch(ABC):
|
|
235
238
|
"""
|
|
236
|
-
Abstract base class for handling field data
|
|
239
|
+
Abstract base class for handling field data batches.
|
|
237
240
|
|
|
238
241
|
This class defines the interface for requesting field data based on user inputs
|
|
239
242
|
and retrieving responses from the server. It provides abstract methods that allow
|
|
@@ -287,6 +290,120 @@ class FieldTransaction(ABC):
|
|
|
287
290
|
pass
|
|
288
291
|
|
|
289
292
|
|
|
293
|
+
class _SurfaceNames:
|
|
294
|
+
def __init__(self, allowed_surface_names):
|
|
295
|
+
self._allowed_surface_names = allowed_surface_names
|
|
296
|
+
|
|
297
|
+
def allowed_values(self):
|
|
298
|
+
"""Lists available surface names."""
|
|
299
|
+
return list(self._allowed_surface_names())
|
|
300
|
+
|
|
301
|
+
def validate(self, surfaces: List[str]) -> bool:
|
|
302
|
+
"""
|
|
303
|
+
Validate that the given surfaces are in the list of allowed surface names.
|
|
304
|
+
|
|
305
|
+
Parameters
|
|
306
|
+
----------
|
|
307
|
+
surfaces : List[int]
|
|
308
|
+
A list of surface name strings to validate.
|
|
309
|
+
|
|
310
|
+
Returns
|
|
311
|
+
-------
|
|
312
|
+
bool
|
|
313
|
+
True if all surfaces are valid, False otherwise.
|
|
314
|
+
If any name is invalid, a warning is issued and validation stops early.
|
|
315
|
+
"""
|
|
316
|
+
for surf in surfaces:
|
|
317
|
+
if surf not in self._allowed_surface_names():
|
|
318
|
+
warnings.warn(f"'{surf}' is not a valid surface name.")
|
|
319
|
+
return False
|
|
320
|
+
return True
|
|
321
|
+
|
|
322
|
+
def __call__(self):
|
|
323
|
+
return self._allowed_surface_names()
|
|
324
|
+
|
|
325
|
+
|
|
326
|
+
class _SurfaceIds:
|
|
327
|
+
def __init__(self, allowed_surface_ids):
|
|
328
|
+
self._allowed_surface_ids = allowed_surface_ids
|
|
329
|
+
|
|
330
|
+
def allowed_values(self):
|
|
331
|
+
"""Lists available surface ids."""
|
|
332
|
+
return self._allowed_surface_ids()
|
|
333
|
+
|
|
334
|
+
def validate(self, surface_ids: List[int]) -> bool:
|
|
335
|
+
"""
|
|
336
|
+
Validate that the given surface IDs are in the list of allowed surface IDs.
|
|
337
|
+
|
|
338
|
+
Parameters
|
|
339
|
+
----------
|
|
340
|
+
surface_ids : List[int]
|
|
341
|
+
A list of surface ID integers to validate.
|
|
342
|
+
|
|
343
|
+
Returns
|
|
344
|
+
-------
|
|
345
|
+
bool
|
|
346
|
+
True if all surface IDs are valid, False otherwise.
|
|
347
|
+
If any ID is invalid, a warning is issued and validation stops early.
|
|
348
|
+
"""
|
|
349
|
+
for surf in surface_ids:
|
|
350
|
+
if surf not in self._allowed_surface_ids():
|
|
351
|
+
warnings.warn(f"'{surf}' is not a valid surface id.")
|
|
352
|
+
return False
|
|
353
|
+
return True
|
|
354
|
+
|
|
355
|
+
def __call__(self):
|
|
356
|
+
return self._allowed_surface_ids()
|
|
357
|
+
|
|
358
|
+
|
|
359
|
+
class _Fields:
|
|
360
|
+
def __init__(self, available_field_names):
|
|
361
|
+
self._available_field_names = available_field_names
|
|
362
|
+
|
|
363
|
+
def is_active(self, field_name):
|
|
364
|
+
"""Check whether a field is active in the given context."""
|
|
365
|
+
if _to_field_name_str(field_name) in self._available_field_names():
|
|
366
|
+
return True
|
|
367
|
+
return False
|
|
368
|
+
|
|
369
|
+
def allowed_values(self):
|
|
370
|
+
"""Lists available scalar or vector field names."""
|
|
371
|
+
return list(self._available_field_names())
|
|
372
|
+
|
|
373
|
+
def __call__(self):
|
|
374
|
+
return self._available_field_names()
|
|
375
|
+
|
|
376
|
+
|
|
377
|
+
class _ScalarFields(_Fields):
|
|
378
|
+
def __init__(self, available_field_names, field_info):
|
|
379
|
+
super().__init__(available_field_names)
|
|
380
|
+
self._field_info = field_info
|
|
381
|
+
|
|
382
|
+
def range(
|
|
383
|
+
self, field: str, node_value: bool = False, surface_ids: list[int] = None
|
|
384
|
+
) -> list[float]:
|
|
385
|
+
"""Get the range (minimum and maximum values) of the field.
|
|
386
|
+
|
|
387
|
+
Parameters
|
|
388
|
+
----------
|
|
389
|
+
field: str
|
|
390
|
+
Field name
|
|
391
|
+
node_value: bool
|
|
392
|
+
surface_ids : List[int], optional
|
|
393
|
+
List of surface IDS for the surface data.
|
|
394
|
+
|
|
395
|
+
Returns
|
|
396
|
+
-------
|
|
397
|
+
List[float]
|
|
398
|
+
"""
|
|
399
|
+
return self._field_info._get_scalar_field_range(field, node_value, surface_ids)
|
|
400
|
+
|
|
401
|
+
|
|
402
|
+
class _VectorFields(_Fields):
|
|
403
|
+
def __init__(self, available_field_names):
|
|
404
|
+
super().__init__(available_field_names)
|
|
405
|
+
|
|
406
|
+
|
|
290
407
|
class _AllowedNames:
|
|
291
408
|
def __init__(
|
|
292
409
|
self, field_info: BaseFieldInfo | None = None, info: dict | None = None
|
|
@@ -334,7 +451,7 @@ class _AllowedFieldNames(_AllowedNames):
|
|
|
334
451
|
|
|
335
452
|
class _AllowedSurfaceNames(_AllowedNames):
|
|
336
453
|
def __call__(self, respect_data_valid: bool = True) -> List[str]:
|
|
337
|
-
return self._info if self._info else self._field_info.
|
|
454
|
+
return self._info if self._info else self._field_info._get_surfaces_info()
|
|
338
455
|
|
|
339
456
|
def valid_name(self, surface_name: str) -> str:
|
|
340
457
|
"""Returns valid names.
|
|
@@ -362,7 +479,7 @@ class _AllowedSurfaceIDs(_AllowedNames):
|
|
|
362
479
|
try:
|
|
363
480
|
return [
|
|
364
481
|
info["surface_id"][0]
|
|
365
|
-
for _, info in self._field_info.
|
|
482
|
+
for _, info in self._field_info._get_surfaces_info().items()
|
|
366
483
|
]
|
|
367
484
|
except (KeyError, IndexError):
|
|
368
485
|
pass
|
|
@@ -380,7 +497,7 @@ class _AllowedScalarFieldNames(_AllowedFieldNames):
|
|
|
380
497
|
|
|
381
498
|
def __call__(self, respect_data_valid: bool = True) -> List[str]:
|
|
382
499
|
field_dict = (
|
|
383
|
-
self._info if self._info else self._field_info.
|
|
500
|
+
self._info if self._info else self._field_info._get_scalar_fields_info()
|
|
384
501
|
)
|
|
385
502
|
return (
|
|
386
503
|
field_dict
|
|
@@ -402,7 +519,7 @@ class _AllowedVectorFieldNames(_AllowedFieldNames):
|
|
|
402
519
|
self._info
|
|
403
520
|
if self._info
|
|
404
521
|
else (
|
|
405
|
-
self._field_info.
|
|
522
|
+
self._field_info._get_vector_fields_info()
|
|
406
523
|
if (not respect_data_valid or self._is_data_valid())
|
|
407
524
|
else []
|
|
408
525
|
)
|
|
@@ -482,23 +599,14 @@ class PathlinesData:
|
|
|
482
599
|
|
|
483
600
|
class _ReturnFieldData:
|
|
484
601
|
|
|
485
|
-
@staticmethod
|
|
486
|
-
def _get_faces_connectivity_data(data):
|
|
487
|
-
faces_data = []
|
|
488
|
-
i = 0
|
|
489
|
-
while i < len(data):
|
|
490
|
-
end = i + 1 + data[i]
|
|
491
|
-
faces_data.append(data[i + 1 : end])
|
|
492
|
-
i = end
|
|
493
|
-
return faces_data
|
|
494
|
-
|
|
495
602
|
@staticmethod
|
|
496
603
|
def _scalar_data(
|
|
497
604
|
field_name: str,
|
|
498
|
-
surfaces: List[int | str],
|
|
605
|
+
surfaces: List[int | str | object],
|
|
499
606
|
surface_ids: List[int],
|
|
500
607
|
scalar_field_data: np.array,
|
|
501
608
|
) -> Dict[int | str, np.array]:
|
|
609
|
+
surfaces = get_surfaces_from_objects(surfaces)
|
|
502
610
|
return {
|
|
503
611
|
surface: scalar_field_data[surface_ids[count]][field_name]
|
|
504
612
|
for count, surface in enumerate(surfaces)
|
|
@@ -507,23 +615,36 @@ class _ReturnFieldData:
|
|
|
507
615
|
@staticmethod
|
|
508
616
|
def _surface_data(
|
|
509
617
|
data_types: List[SurfaceDataType],
|
|
510
|
-
surfaces: List[int | str],
|
|
618
|
+
surfaces: List[int | str | object],
|
|
511
619
|
surface_ids: List[int],
|
|
512
620
|
surface_data: np.array | List[np.array],
|
|
513
621
|
deprecated_flag: bool | None = False,
|
|
622
|
+
flatten_connectivity: bool = False,
|
|
514
623
|
) -> Dict[int | str, Dict[SurfaceDataType, np.array | List[np.array]]]:
|
|
624
|
+
surfaces = get_surfaces_from_objects(surfaces)
|
|
515
625
|
ret_surf_data = {}
|
|
516
626
|
for count, surface in enumerate(surfaces):
|
|
517
627
|
ret_surf_data[surface] = {}
|
|
518
628
|
for data_type in data_types:
|
|
519
629
|
if data_type == SurfaceDataType.FacesConnectivity:
|
|
520
|
-
|
|
521
|
-
|
|
522
|
-
|
|
523
|
-
|
|
524
|
-
|
|
630
|
+
if flatten_connectivity:
|
|
631
|
+
ret_surf_data[surface][data_type] = surface_data[
|
|
632
|
+
surface_ids[count]
|
|
633
|
+
][SurfaceDataType.FacesConnectivity.value]
|
|
634
|
+
else:
|
|
635
|
+
warnings.warn(
|
|
636
|
+
"Structured face connectivity output is deprecated and will be replaced by the flat format "
|
|
637
|
+
"in a future release. In the current release, pass 'flatten_connectivity=True' argument while creating the "
|
|
638
|
+
"'SurfaceFieldDataRequest' to request data in the flat format.",
|
|
639
|
+
PyFluentDeprecationWarning,
|
|
640
|
+
)
|
|
641
|
+
ret_surf_data[surface][data_type] = (
|
|
642
|
+
_transform_faces_connectivity_data(
|
|
643
|
+
surface_data[surface_ids[count]][
|
|
644
|
+
SurfaceDataType.FacesConnectivity.value
|
|
645
|
+
]
|
|
646
|
+
)
|
|
525
647
|
)
|
|
526
|
-
)
|
|
527
648
|
else:
|
|
528
649
|
ret_surf_data[surface][data_type] = surface_data[
|
|
529
650
|
surface_ids[count]
|
|
@@ -535,10 +656,11 @@ class _ReturnFieldData:
|
|
|
535
656
|
@staticmethod
|
|
536
657
|
def _vector_data(
|
|
537
658
|
field_name: str,
|
|
538
|
-
surfaces: List[int | str],
|
|
659
|
+
surfaces: List[int | str | object],
|
|
539
660
|
surface_ids: List[int],
|
|
540
661
|
vector_field_data: np.array,
|
|
541
662
|
) -> Dict[int | str, np.array]:
|
|
663
|
+
surfaces = get_surfaces_from_objects(surfaces)
|
|
542
664
|
return {
|
|
543
665
|
surface: vector_field_data[surface_ids[count]][field_name].reshape(-1, 3)
|
|
544
666
|
for count, surface in enumerate(surfaces)
|
|
@@ -547,20 +669,32 @@ class _ReturnFieldData:
|
|
|
547
669
|
@staticmethod
|
|
548
670
|
def _pathlines_data(
|
|
549
671
|
field_name: str,
|
|
550
|
-
surfaces: List[int | str],
|
|
672
|
+
surfaces: List[int | str | object],
|
|
551
673
|
surface_ids: List[int],
|
|
552
674
|
pathlines_data: Dict,
|
|
553
675
|
deprecated_flag: bool | None = False,
|
|
676
|
+
flatten_connectivity: bool = False,
|
|
554
677
|
) -> Dict:
|
|
678
|
+
surfaces = get_surfaces_from_objects(surfaces)
|
|
555
679
|
path_lines_dict = {}
|
|
556
680
|
for count, surface in enumerate(surfaces):
|
|
681
|
+
if flatten_connectivity:
|
|
682
|
+
lines_data = pathlines_data[surface_ids[count]]["lines"]
|
|
683
|
+
else:
|
|
684
|
+
warnings.warn(
|
|
685
|
+
"Structured face connectivity output is deprecated and will be replaced by the flat format "
|
|
686
|
+
"in a future release. In the current release, pass 'flatten_connectivity=True' argument while creating the "
|
|
687
|
+
"'SurfaceFieldDataRequest' to request data in the flat format.",
|
|
688
|
+
PyFluentDeprecationWarning,
|
|
689
|
+
)
|
|
690
|
+
lines_data = _transform_faces_connectivity_data(
|
|
691
|
+
pathlines_data[surface_ids[count]]["lines"]
|
|
692
|
+
)
|
|
557
693
|
temp_dict = {
|
|
558
694
|
"vertices": pathlines_data[surface_ids[count]]["vertices"].reshape(
|
|
559
695
|
-1, 3
|
|
560
696
|
),
|
|
561
|
-
"lines":
|
|
562
|
-
pathlines_data[surface_ids[count]]["lines"]
|
|
563
|
-
),
|
|
697
|
+
"lines": lines_data,
|
|
564
698
|
field_name: pathlines_data[surface_ids[count]][field_name],
|
|
565
699
|
"pathlines-count": pathlines_data[surface_ids[count]][
|
|
566
700
|
"pathlines-count"
|
|
@@ -575,3 +709,70 @@ class _ReturnFieldData:
|
|
|
575
709
|
else:
|
|
576
710
|
path_lines_dict[surface] = temp_dict
|
|
577
711
|
return path_lines_dict
|
|
712
|
+
|
|
713
|
+
|
|
714
|
+
def get_surfaces_from_objects(surfaces: List[int | str | object]):
|
|
715
|
+
"""
|
|
716
|
+
Extract surface names or identifiers from a list of surfaces.
|
|
717
|
+
|
|
718
|
+
Parameters
|
|
719
|
+
----------
|
|
720
|
+
surfaces : List[int | str | object]
|
|
721
|
+
A list of surface identifiers, which may include:
|
|
722
|
+
- integers or strings representing surface names/IDs,
|
|
723
|
+
- objects with a callable `name()` method,
|
|
724
|
+
- or iterables (e.g., lists or tuples) containing such elements.
|
|
725
|
+
|
|
726
|
+
Returns
|
|
727
|
+
-------
|
|
728
|
+
List
|
|
729
|
+
A flattened list of surface names/identifiers:
|
|
730
|
+
- If an element has a `name()` method, the result of `surface.name()` is used.
|
|
731
|
+
- Otherwise, the element itself is returned as-is.
|
|
732
|
+
"""
|
|
733
|
+
updated_surfaces = []
|
|
734
|
+
for surface in surfaces:
|
|
735
|
+
if hasattr(surface, "name"):
|
|
736
|
+
updated_surfaces.append(surface.name())
|
|
737
|
+
else:
|
|
738
|
+
updated_surfaces.append(surface)
|
|
739
|
+
return updated_surfaces
|
|
740
|
+
|
|
741
|
+
|
|
742
|
+
def _transform_faces_connectivity_data(data):
|
|
743
|
+
"""
|
|
744
|
+
Transform flat face connectivity data into structured face-wise format.
|
|
745
|
+
|
|
746
|
+
Each face in the flat array is represented by:
|
|
747
|
+
[N, v0, v1, ..., vN], where:
|
|
748
|
+
- N is the number of vertices in the face
|
|
749
|
+
- v0...vN are the vertex indices
|
|
750
|
+
|
|
751
|
+
This function parses such a flat array and returns a list of vertex index arrays,
|
|
752
|
+
each representing a face.
|
|
753
|
+
|
|
754
|
+
Parameters
|
|
755
|
+
----------
|
|
756
|
+
data : array-like of int
|
|
757
|
+
Flat array containing face connectivity data, typically returned from
|
|
758
|
+
`faces_connectivity_data["inlet"].connectivity`.
|
|
759
|
+
|
|
760
|
+
Returns
|
|
761
|
+
-------
|
|
762
|
+
faces_data : list of ndarray
|
|
763
|
+
List of 1D NumPy arrays, where each array contains the vertex indices
|
|
764
|
+
of a face.
|
|
765
|
+
|
|
766
|
+
Examples
|
|
767
|
+
--------
|
|
768
|
+
>>> flat_data = np.array([4, 4, 5, 12, 11, 3, 1, 2, 3], dtype=np.int32)
|
|
769
|
+
>>> _transform_faces_connectivity_data(flat_data)
|
|
770
|
+
[array([ 4, 5, 12, 11]), array([1, 2, 3])]
|
|
771
|
+
"""
|
|
772
|
+
faces_data = []
|
|
773
|
+
i = 0
|
|
774
|
+
while i < len(data):
|
|
775
|
+
end = i + 1 + data[i]
|
|
776
|
+
faces_data.append(data[i + 1 : end])
|
|
777
|
+
i = end
|
|
778
|
+
return faces_data
|