ansys-fluent-core 0.33.dev0__py3-none-any.whl → 0.33.1__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of ansys-fluent-core might be problematic. Click here for more details.
- ansys/fluent/core/__init__.py +11 -4
- ansys/fluent/core/codegen/builtin_settingsgen.py +17 -9
- ansys/fluent/core/codegen/datamodelgen.py +2 -2
- ansys/fluent/core/codegen/print_fluent_version.py +4 -4
- ansys/fluent/core/codegen/settingsgen.py +1 -2
- ansys/fluent/core/exceptions.py +0 -1
- ansys/fluent/core/file_session.py +9 -9
- ansys/fluent/core/filereader/case_file.py +2 -2
- ansys/fluent/core/fluent_connection.py +5 -5
- ansys/fluent/core/generated/api_tree/api_objects.json +1 -1
- ansys/fluent/core/generated/datamodel_231/flicing.py +40 -40
- ansys/fluent/core/generated/datamodel_231/meshing.py +182 -182
- ansys/fluent/core/generated/datamodel_232/flicing.py +55 -55
- ansys/fluent/core/generated/datamodel_232/meshing.py +174 -174
- ansys/fluent/core/generated/datamodel_241/flicing.py +20 -20
- ansys/fluent/core/generated/datamodel_241/meshing.py +283 -283
- ansys/fluent/core/generated/datamodel_242/flicing.py +40 -40
- ansys/fluent/core/generated/datamodel_242/meshing.py +308 -308
- ansys/fluent/core/generated/datamodel_251/flicing.py +35 -35
- ansys/fluent/core/generated/datamodel_251/meshing.py +295 -295
- ansys/fluent/core/generated/datamodel_251/part_management.py +6 -6
- ansys/fluent/core/generated/datamodel_252/flicing.py +25 -25
- ansys/fluent/core/generated/datamodel_252/meshing.py +424 -424
- ansys/fluent/core/generated/datamodel_252/part_management.py +5 -5
- ansys/fluent/core/generated/datamodel_261/flicing.py +40 -40
- ansys/fluent/core/generated/datamodel_261/meshing.py +443 -425
- ansys/fluent/core/generated/datamodel_261/meshing_utilities.py +296 -616
- ansys/fluent/core/generated/datamodel_261/part_management.py +10 -10
- ansys/fluent/core/generated/datamodel_261/preferences.py +7 -0
- ansys/fluent/core/generated/datamodel_261/solver_workflow.py +7 -0
- ansys/fluent/core/generated/fluent_version_252.py +1 -1
- ansys/fluent/core/generated/fluent_version_261.py +3 -3
- ansys/fluent/core/generated/meshing/tui_261.py +1137 -1177
- ansys/fluent/core/generated/solver/settings_252.py +55 -55
- ansys/fluent/core/generated/solver/settings_261.py +4147 -4698
- ansys/fluent/core/generated/solver/settings_261.pyi +3467 -5578
- ansys/fluent/core/generated/solver/settings_builtin.py +57 -1
- ansys/fluent/core/generated/solver/settings_builtin.pyi +79 -0
- ansys/fluent/core/generated/solver/tui_261.py +2625 -2771
- ansys/fluent/core/launcher/container_launcher.py +7 -9
- ansys/fluent/core/launcher/fluent_container.py +107 -75
- ansys/fluent/core/launcher/launch_options.py +22 -1
- ansys/fluent/core/launcher/launcher.py +5 -4
- ansys/fluent/core/launcher/slurm_launcher.py +1 -0
- ansys/fluent/core/pyfluent_warnings.py +13 -0
- ansys/fluent/core/search.py +170 -83
- ansys/fluent/core/services/app_utilities.py +52 -32
- ansys/fluent/core/services/datamodel_se.py +6 -3
- ansys/fluent/core/services/scheme_eval.py +2 -0
- ansys/fluent/core/services/solution_variables.py +64 -49
- ansys/fluent/core/session.py +36 -30
- ansys/fluent/core/session_base_meshing.py +2 -24
- ansys/fluent/core/session_shared.py +5 -2
- ansys/fluent/core/session_solver.py +15 -9
- ansys/fluent/core/solver/__init__.py +1 -1
- ansys/fluent/core/solver/flobject.py +62 -64
- ansys/fluent/core/solver/settings_builtin_bases.py +14 -7
- ansys/fluent/core/solver/settings_builtin_data.py +121 -540
- ansys/fluent/core/utils/context_managers.py +0 -17
- ansys/fluent/core/utils/fluent_version.py +173 -0
- {ansys_fluent_core-0.33.dev0.dist-info → ansys_fluent_core-0.33.1.dist-info}/METADATA +8 -8
- {ansys_fluent_core-0.33.dev0.dist-info → ansys_fluent_core-0.33.1.dist-info}/RECORD +64 -82
- {ansys_fluent_core-0.33.dev0.dist-info → ansys_fluent_core-0.33.1.dist-info}/WHEEL +1 -1
- ansys/fluent/core/generated/datamodel_222/meshing.py +0 -6332
- ansys/fluent/core/generated/datamodel_222/part_management.py +0 -2072
- ansys/fluent/core/generated/datamodel_222/pm_file_management.py +0 -290
- ansys/fluent/core/generated/datamodel_222/preferences.py +0 -2449
- ansys/fluent/core/generated/datamodel_222/workflow.py +0 -651
- ansys/fluent/core/generated/fluent_version_222.py +0 -5
- ansys/fluent/core/generated/meshing/tui_222.py +0 -9649
- ansys/fluent/core/generated/solver/settings_222.py +0 -29473
- ansys/fluent/core/generated/solver/settings_222.pyi +0 -19590
- ansys/fluent/core/generated/solver/tui_222.py +0 -43451
- ansys/fluent/core/post_objects/__init__.py +0 -21
- ansys/fluent/core/post_objects/check_in_notebook.py +0 -35
- ansys/fluent/core/post_objects/meta.py +0 -926
- ansys/fluent/core/post_objects/post_helper.py +0 -178
- ansys/fluent/core/post_objects/post_object_definitions.py +0 -693
- ansys/fluent/core/post_objects/post_objects_container.py +0 -248
- ansys/fluent/core/post_objects/singleton_meta.py +0 -42
- ansys/fluent/core/post_objects/timing_decorator.py +0 -38
- {ansys_fluent_core-0.33.dev0.dist-info → ansys_fluent_core-0.33.1.dist-info/licenses}/LICENSE +0 -0
ansys/fluent/core/search.py
CHANGED
|
@@ -30,7 +30,6 @@ import os
|
|
|
30
30
|
from pathlib import Path
|
|
31
31
|
import pickle
|
|
32
32
|
import re
|
|
33
|
-
import warnings
|
|
34
33
|
|
|
35
34
|
import ansys.fluent.core as pyfluent
|
|
36
35
|
from ansys.fluent.core.solver.error_message import closest_allowed_names
|
|
@@ -54,16 +53,6 @@ def get_api_tree_file_name(version: str) -> Path:
|
|
|
54
53
|
return (CODEGEN_OUTDIR / f"api_tree_{version}.pickle").resolve()
|
|
55
54
|
|
|
56
55
|
|
|
57
|
-
def _match(source: str, word: str, match_whole_word: bool, match_case: bool):
|
|
58
|
-
if not match_case:
|
|
59
|
-
source = source.lower()
|
|
60
|
-
word = word.lower()
|
|
61
|
-
if match_whole_word:
|
|
62
|
-
return source == word
|
|
63
|
-
else:
|
|
64
|
-
return word in source
|
|
65
|
-
|
|
66
|
-
|
|
67
56
|
def _remove_suffix(input: str, suffix):
|
|
68
57
|
if hasattr(input, "removesuffix"):
|
|
69
58
|
return input.removesuffix(suffix)
|
|
@@ -73,9 +62,6 @@ def _remove_suffix(input: str, suffix):
|
|
|
73
62
|
return input
|
|
74
63
|
|
|
75
64
|
|
|
76
|
-
_meshing_rules = ["workflow", "meshing", "PartManagement", "PMFileManagement"]
|
|
77
|
-
|
|
78
|
-
|
|
79
65
|
def _generate_api_data(
|
|
80
66
|
version: str | None = None,
|
|
81
67
|
):
|
|
@@ -118,6 +104,11 @@ def _generate_api_data(
|
|
|
118
104
|
next_path = f"{path}.{k}"
|
|
119
105
|
type_ = "Object" if isinstance(v, Mapping) else v
|
|
120
106
|
api_object_names.add(k)
|
|
107
|
+
next_path = (
|
|
108
|
+
next_path.replace("MeshingUtilities", "meshing_utilities")
|
|
109
|
+
if "MeshingUtilities" in next_path
|
|
110
|
+
else next_path
|
|
111
|
+
)
|
|
121
112
|
if "tui" in next_path:
|
|
122
113
|
api_tui_objects.add(f"{next_path} ({type_})")
|
|
123
114
|
else:
|
|
@@ -172,37 +163,70 @@ def _get_api_tree_data():
|
|
|
172
163
|
return api_tree_data
|
|
173
164
|
|
|
174
165
|
|
|
175
|
-
def _print_search_results(
|
|
176
|
-
|
|
166
|
+
def _print_search_results(
|
|
167
|
+
queries: list, api_tree_data: dict | None = None, api_path: str | None = None
|
|
168
|
+
):
|
|
169
|
+
"""
|
|
170
|
+
Print search results.
|
|
177
171
|
|
|
178
172
|
Parameters
|
|
179
173
|
----------
|
|
180
|
-
queries: list
|
|
181
|
-
List of search string to match API object names.
|
|
182
|
-
api_tree_data: dict
|
|
183
|
-
|
|
174
|
+
queries : list
|
|
175
|
+
List of search strings or (string, score) tuples to match against API object names.
|
|
176
|
+
api_tree_data : dict, optional
|
|
177
|
+
The full API tree data, containing 'api_objects' and 'api_tui_objects'.
|
|
178
|
+
If None, it is retrieved using _get_api_tree_data().
|
|
179
|
+
api_path : str, optional
|
|
180
|
+
Specific path to restrict the search to. If None, searches the entire object hierarchy.
|
|
184
181
|
"""
|
|
185
|
-
|
|
186
|
-
|
|
187
|
-
|
|
182
|
+
api_tree_data = api_tree_data or _get_api_tree_data()
|
|
183
|
+
api_sources = [api_tree_data["api_objects"], api_tree_data["api_tui_objects"]]
|
|
184
|
+
|
|
185
|
+
def has_query(query, substrings):
|
|
186
|
+
"""Check if query is present via dot or underscore notation."""
|
|
187
|
+
return any(
|
|
188
|
+
s.startswith(query)
|
|
189
|
+
or ("_" in s and (f"_{query}" in s or f"_{query}_" in s))
|
|
190
|
+
for s in substrings
|
|
191
|
+
)
|
|
188
192
|
|
|
189
|
-
def
|
|
190
|
-
results =
|
|
191
|
-
|
|
192
|
-
|
|
193
|
-
|
|
194
|
-
|
|
195
|
-
|
|
193
|
+
def extract_results(api_data):
|
|
194
|
+
results = set()
|
|
195
|
+
|
|
196
|
+
for api_object in api_data:
|
|
197
|
+
target = api_object
|
|
198
|
+
if api_path:
|
|
199
|
+
start = api_object.find(api_path)
|
|
200
|
+
if start == -1:
|
|
201
|
+
continue
|
|
202
|
+
target = api_object[start:]
|
|
203
|
+
|
|
204
|
+
first_token = target.split()[0]
|
|
205
|
+
substrings = first_token.split(".")
|
|
206
|
+
|
|
207
|
+
for query in queries:
|
|
208
|
+
if isinstance(query, tuple):
|
|
209
|
+
name, score = query
|
|
210
|
+
if name in first_token and has_query(name, substrings):
|
|
211
|
+
results.add((api_object, round(score, 2)))
|
|
212
|
+
else:
|
|
213
|
+
if query in first_token and has_query(query, substrings):
|
|
214
|
+
results.add(api_object)
|
|
196
215
|
|
|
197
|
-
|
|
198
|
-
tui_results = _get_results(api_tree_datas[1])
|
|
216
|
+
return sorted(results)
|
|
199
217
|
|
|
200
|
-
|
|
201
|
-
|
|
218
|
+
all_results = []
|
|
219
|
+
final_results = []
|
|
220
|
+
for source in api_sources:
|
|
221
|
+
all_results.extend(extract_results(source))
|
|
202
222
|
|
|
203
|
-
|
|
204
|
-
|
|
223
|
+
if all_results and isinstance(queries[0], tuple):
|
|
224
|
+
all_results = sorted(all_results, key=lambda item: item[1], reverse=True)
|
|
225
|
+
final_results.extend(
|
|
226
|
+
[f"{api_name} (similarity: {score}%)" for api_name, score in all_results]
|
|
227
|
+
)
|
|
205
228
|
|
|
229
|
+
results = final_results or all_results
|
|
206
230
|
if pyfluent.PRINT_SEARCH_RESULTS:
|
|
207
231
|
for result in results:
|
|
208
232
|
print(result)
|
|
@@ -230,26 +254,49 @@ def _get_wildcard_matches_for_word_from_names(word: str, names: list):
|
|
|
230
254
|
return [name for name in names if regex.match(name)]
|
|
231
255
|
|
|
232
256
|
|
|
233
|
-
def _search_wildcard(
|
|
257
|
+
def _search_wildcard(
|
|
258
|
+
search_string: str | list[tuple[str, float]],
|
|
259
|
+
api_tree_data: dict,
|
|
260
|
+
api_path: str | None = None,
|
|
261
|
+
):
|
|
234
262
|
"""Perform wildcard search for a word through the Fluent's object hierarchy.
|
|
235
263
|
|
|
236
264
|
Parameters
|
|
237
265
|
----------
|
|
238
|
-
search_string: str
|
|
266
|
+
search_string: str | list[(str, float)]
|
|
239
267
|
Word to search for. Semantic search is default.
|
|
240
268
|
api_tree_data: dict
|
|
241
269
|
All API object data.
|
|
270
|
+
api_path: str, optional
|
|
271
|
+
The API path to search in. The default is ``None``. If ``None``, it searches in the whole
|
|
272
|
+
Fluent's object hierarchy.
|
|
242
273
|
|
|
243
274
|
Returns
|
|
244
275
|
-------
|
|
245
276
|
List of search string matches.
|
|
246
277
|
"""
|
|
247
|
-
api_tree_data = api_tree_data
|
|
248
|
-
|
|
249
|
-
|
|
250
|
-
|
|
278
|
+
api_tree_data = api_tree_data or _get_api_tree_data()
|
|
279
|
+
all_names = api_tree_data["all_api_object_names"]
|
|
280
|
+
queries = []
|
|
281
|
+
|
|
282
|
+
def add_matches(word: str, score: float | None = None):
|
|
283
|
+
matches = _get_wildcard_matches_for_word_from_names(word, names=all_names)
|
|
284
|
+
if matches:
|
|
285
|
+
if score is not None:
|
|
286
|
+
queries.extend((match, score) for match in matches)
|
|
287
|
+
else:
|
|
288
|
+
queries.extend(matches)
|
|
289
|
+
|
|
290
|
+
if isinstance(search_string, str):
|
|
291
|
+
add_matches(search_string)
|
|
292
|
+
elif isinstance(search_string, list):
|
|
293
|
+
for word, score in search_string:
|
|
294
|
+
add_matches(word, score)
|
|
295
|
+
|
|
251
296
|
if queries:
|
|
252
|
-
return _print_search_results(
|
|
297
|
+
return _print_search_results(
|
|
298
|
+
queries, api_tree_data=api_tree_data, api_path=api_path
|
|
299
|
+
)
|
|
253
300
|
|
|
254
301
|
|
|
255
302
|
def _get_exact_match_for_word_from_names(
|
|
@@ -343,6 +390,7 @@ def _search_whole_word(
|
|
|
343
390
|
match_case: bool = False,
|
|
344
391
|
match_whole_word: bool = True,
|
|
345
392
|
api_tree_data: dict = None,
|
|
393
|
+
api_path: str | None = None,
|
|
346
394
|
):
|
|
347
395
|
"""Perform exact search for a word through the Fluent's object hierarchy.
|
|
348
396
|
|
|
@@ -358,6 +406,9 @@ def _search_whole_word(
|
|
|
358
406
|
If ``True``, it matches the given word, and it's capitalize case.
|
|
359
407
|
api_tree_data: dict
|
|
360
408
|
All API object data.
|
|
409
|
+
api_path: str, optional
|
|
410
|
+
The API path to search in. The default is ``None``. If ``None``, it searches in the whole
|
|
411
|
+
Fluent's object hierarchy.
|
|
361
412
|
|
|
362
413
|
Returns
|
|
363
414
|
-------
|
|
@@ -401,7 +452,9 @@ def _search_whole_word(
|
|
|
401
452
|
)
|
|
402
453
|
)
|
|
403
454
|
if queries:
|
|
404
|
-
return _print_search_results(
|
|
455
|
+
return _print_search_results(
|
|
456
|
+
queries, api_tree_data=api_tree_data, api_path=api_path
|
|
457
|
+
)
|
|
405
458
|
|
|
406
459
|
|
|
407
460
|
def _download_nltk_data():
|
|
@@ -426,7 +479,41 @@ def _download_nltk_data():
|
|
|
426
479
|
)
|
|
427
480
|
|
|
428
481
|
|
|
429
|
-
def
|
|
482
|
+
def _are_words_semantically_close(query, api_name, language="eng"):
|
|
483
|
+
from nltk.corpus import wordnet as wn
|
|
484
|
+
|
|
485
|
+
similarity_threshold = (
|
|
486
|
+
3.2 if language == "eng" else 0.8
|
|
487
|
+
) # Max values are 3.7 and 1.0 respectively
|
|
488
|
+
max_similarity = 0.0
|
|
489
|
+
|
|
490
|
+
synsets1 = wn.synsets(query, lang=language)
|
|
491
|
+
synsets2 = wn.synsets(api_name, lang="eng")
|
|
492
|
+
|
|
493
|
+
for syn1 in synsets1:
|
|
494
|
+
for syn2 in synsets2:
|
|
495
|
+
if syn1.pos() == syn2.pos():
|
|
496
|
+
similarity = (
|
|
497
|
+
syn1.lch_similarity(syn2) # Leacock–Chodorow similarity
|
|
498
|
+
if language == "eng"
|
|
499
|
+
else syn1.wup_similarity(syn2) # Wu–Palmer similarity
|
|
500
|
+
)
|
|
501
|
+
if similarity is not None:
|
|
502
|
+
max_similarity = max(max_similarity, similarity)
|
|
503
|
+
if similarity >= similarity_threshold:
|
|
504
|
+
score = (
|
|
505
|
+
(similarity / 3.7) * 100
|
|
506
|
+
if language == "eng"
|
|
507
|
+
else similarity * 100
|
|
508
|
+
)
|
|
509
|
+
return True, score
|
|
510
|
+
|
|
511
|
+
return False, 0
|
|
512
|
+
|
|
513
|
+
|
|
514
|
+
def _search_semantic(
|
|
515
|
+
search_string: str, language: str, api_tree_data: dict, api_path: str | None = None
|
|
516
|
+
):
|
|
430
517
|
"""Perform semantic search for a word through the Fluent's object hierarchy.
|
|
431
518
|
|
|
432
519
|
Parameters
|
|
@@ -445,26 +532,26 @@ def _search_semantic(search_string: str, language: str, api_tree_data: dict):
|
|
|
445
532
|
queries: list
|
|
446
533
|
List of search string matches.
|
|
447
534
|
"""
|
|
448
|
-
from nltk.corpus import wordnet as wn
|
|
449
|
-
|
|
450
535
|
api_tree_data = api_tree_data if api_tree_data else _get_api_tree_data()
|
|
451
536
|
similar_keys = set()
|
|
452
|
-
|
|
453
|
-
for api_object_name
|
|
454
|
-
|
|
455
|
-
|
|
456
|
-
|
|
457
|
-
|
|
458
|
-
|
|
459
|
-
|
|
460
|
-
|
|
461
|
-
|
|
537
|
+
api_object_names = api_tree_data["all_api_object_names"]
|
|
538
|
+
for api_object_name in api_object_names:
|
|
539
|
+
api_obj_name = (
|
|
540
|
+
api_object_name.replace("_", " ")
|
|
541
|
+
if "_" in api_object_name
|
|
542
|
+
else api_object_name
|
|
543
|
+
)
|
|
544
|
+
is_similar, score = _are_words_semantically_close(
|
|
545
|
+
search_string, api_obj_name, language=language
|
|
546
|
+
)
|
|
547
|
+
if is_similar:
|
|
548
|
+
similar_keys.add((api_object_name + "*", score))
|
|
462
549
|
if similar_keys:
|
|
550
|
+
sorted_similar_keys = sorted(similar_keys)
|
|
463
551
|
results = []
|
|
464
|
-
|
|
465
|
-
|
|
466
|
-
|
|
467
|
-
results.extend(result)
|
|
552
|
+
result = _search_wildcard(sorted_similar_keys, api_tree_data, api_path=api_path)
|
|
553
|
+
if result:
|
|
554
|
+
results.extend(result)
|
|
468
555
|
if results:
|
|
469
556
|
return results
|
|
470
557
|
else:
|
|
@@ -473,15 +560,17 @@ def _search_semantic(search_string: str, language: str, api_tree_data: dict):
|
|
|
473
560
|
names=api_tree_data["all_api_object_names"],
|
|
474
561
|
)
|
|
475
562
|
if queries:
|
|
476
|
-
return _print_search_results(
|
|
563
|
+
return _print_search_results(
|
|
564
|
+
queries, api_tree_data=api_tree_data, api_path=api_path
|
|
565
|
+
)
|
|
477
566
|
|
|
478
567
|
|
|
479
568
|
def search(
|
|
480
569
|
search_string: str,
|
|
481
570
|
language: str | None = "eng",
|
|
482
|
-
wildcard: bool | None = False,
|
|
483
571
|
match_whole_word: bool = False,
|
|
484
572
|
match_case: bool | None = True,
|
|
573
|
+
api_path: str | None = None,
|
|
485
574
|
):
|
|
486
575
|
"""Search for a word through the Fluent's object hierarchy.
|
|
487
576
|
|
|
@@ -493,52 +582,49 @@ def search(
|
|
|
493
582
|
ISO 639-3 code for the language to use for the semantic search.
|
|
494
583
|
The default is ``eng`` for English. For the list of supported languages,
|
|
495
584
|
see `OMW Version 1 <https://omwn.org/omw1.html>`_.
|
|
496
|
-
wildcard: bool, optional
|
|
497
|
-
Whether to use the wildcard pattern. The default is ``False``. If ``True``, the
|
|
498
|
-
wildcard pattern is based on the ``fnmatch`` module and semantic matching
|
|
499
|
-
is turned off.
|
|
500
585
|
match_whole_word: bool, optional
|
|
501
586
|
Whether to find only exact matches. The default is ``False``. If ``True``,
|
|
502
587
|
only exact matches are found and semantic matching is turned off.
|
|
503
588
|
match_case: bool, optional
|
|
504
589
|
Whether to match case. The default is ``True``. If ``False``, the search is case-insensitive.
|
|
590
|
+
api_path: str, optional
|
|
591
|
+
The API path to search in. The default is ``None``. If ``None``, it searches in the whole
|
|
592
|
+
Fluent's object hierarchy.
|
|
505
593
|
|
|
506
594
|
Examples
|
|
507
595
|
--------
|
|
508
596
|
>>> import ansys.fluent.core as pyfluent
|
|
509
597
|
>>> pyfluent.search("font", match_whole_word=True)
|
|
510
598
|
>>> pyfluent.search("Font")
|
|
511
|
-
>>> pyfluent.search("
|
|
599
|
+
>>> pyfluent.search("local*", api_path="<solver_session>.setup")
|
|
600
|
+
<solver_session>.setup.dynamic_mesh.methods.smoothing.radial_settings.local_smoothing (Parameter)
|
|
601
|
+
<solver_session>.setup.mesh_interfaces.interface["<name>"].local_absolute_mapped_tolerance (Parameter)
|
|
602
|
+
<solver_session>.setup.mesh_interfaces.interface["<name>"].local_relative_mapped_tolerance (Parameter)
|
|
512
603
|
>>> pyfluent.search("读", language="cmn") # search 'read' in Chinese
|
|
513
|
-
The most similar API objects are:
|
|
514
604
|
<solver_session>.file.read (Command)
|
|
515
605
|
<solver_session>.file.import_.read (Command)
|
|
516
606
|
<solver_session>.mesh.surface_mesh.read (Command)
|
|
517
607
|
<solver_session>.tui.display.display_states.read (Command)
|
|
518
608
|
<meshing_session>.tui.display.display_states.read (Command)
|
|
519
609
|
"""
|
|
520
|
-
if (wildcard and match_whole_word) or (wildcard and match_case):
|
|
521
|
-
warnings.warn(
|
|
522
|
-
"``wildcard=True`` matches wildcard pattern.",
|
|
523
|
-
UserWarning,
|
|
524
|
-
)
|
|
525
|
-
elif language and wildcard:
|
|
526
|
-
warnings.warn(
|
|
527
|
-
"``wildcard=True`` matches wildcard pattern.",
|
|
528
|
-
UserWarning,
|
|
529
|
-
)
|
|
530
610
|
|
|
531
611
|
api_tree_data = _get_api_tree_data()
|
|
532
612
|
|
|
533
|
-
|
|
613
|
+
wildcard_pattern = re.compile(r"[*?\[\]]")
|
|
614
|
+
|
|
615
|
+
if bool(wildcard_pattern.search(search_string)):
|
|
534
616
|
return _search_wildcard(
|
|
535
617
|
search_string,
|
|
536
618
|
api_tree_data=api_tree_data,
|
|
619
|
+
api_path=api_path,
|
|
537
620
|
)
|
|
538
621
|
elif match_whole_word:
|
|
539
622
|
if not match_case:
|
|
540
623
|
return _search_whole_word(
|
|
541
|
-
search_string,
|
|
624
|
+
search_string,
|
|
625
|
+
match_whole_word=True,
|
|
626
|
+
api_tree_data=api_tree_data,
|
|
627
|
+
api_path=api_path,
|
|
542
628
|
)
|
|
543
629
|
else:
|
|
544
630
|
return _search_whole_word(
|
|
@@ -546,16 +632,17 @@ def search(
|
|
|
546
632
|
match_case=True,
|
|
547
633
|
match_whole_word=True,
|
|
548
634
|
api_tree_data=api_tree_data,
|
|
635
|
+
api_path=api_path,
|
|
549
636
|
)
|
|
550
637
|
else:
|
|
551
638
|
try:
|
|
552
639
|
return _search_semantic(
|
|
553
|
-
search_string, language, api_tree_data=api_tree_data
|
|
640
|
+
search_string, language, api_tree_data=api_tree_data, api_path=api_path
|
|
554
641
|
)
|
|
555
642
|
except ModuleNotFoundError:
|
|
556
643
|
pass
|
|
557
644
|
except LookupError:
|
|
558
645
|
_download_nltk_data()
|
|
559
646
|
return _search_semantic(
|
|
560
|
-
search_string, language, api_tree_data=api_tree_data
|
|
647
|
+
search_string, language, api_tree_data=api_tree_data, api_path=api_path
|
|
561
648
|
)
|
|
@@ -22,6 +22,7 @@
|
|
|
22
22
|
|
|
23
23
|
"""Wrappers over AppUtilities gRPC service of Fluent."""
|
|
24
24
|
|
|
25
|
+
from dataclasses import dataclass
|
|
25
26
|
from enum import Enum
|
|
26
27
|
from typing import List, Tuple
|
|
27
28
|
|
|
@@ -150,6 +151,25 @@ class AppUtilitiesService:
|
|
|
150
151
|
return self._stub.SetWorkingDirectory(request, metadata=self._metadata)
|
|
151
152
|
|
|
152
153
|
|
|
154
|
+
@dataclass
|
|
155
|
+
class ProcessInfo:
|
|
156
|
+
"""ProcessInfo dataclass to hold process information."""
|
|
157
|
+
|
|
158
|
+
process_id: int
|
|
159
|
+
hostname: str
|
|
160
|
+
working_directory: str
|
|
161
|
+
|
|
162
|
+
|
|
163
|
+
@dataclass
|
|
164
|
+
class BuildInfo:
|
|
165
|
+
"""BuildInfo dataclass to hold build information."""
|
|
166
|
+
|
|
167
|
+
build_time: str
|
|
168
|
+
build_id: str
|
|
169
|
+
vcs_revision: str
|
|
170
|
+
vcs_branch: str
|
|
171
|
+
|
|
172
|
+
|
|
153
173
|
class AppUtilitiesOld:
|
|
154
174
|
"""AppUtilitiesOld."""
|
|
155
175
|
|
|
@@ -167,34 +187,34 @@ class AppUtilitiesOld:
|
|
|
167
187
|
build_id = self.scheme.eval("(inquire-build-id)")
|
|
168
188
|
vcs_revision = self.scheme.eval("(inquire-src-vcs-id)")
|
|
169
189
|
vcs_branch = self.scheme.eval("(inquire-src-vcs-branch)")
|
|
170
|
-
return
|
|
171
|
-
|
|
172
|
-
|
|
173
|
-
|
|
174
|
-
|
|
175
|
-
|
|
190
|
+
return BuildInfo(
|
|
191
|
+
build_time=build_time,
|
|
192
|
+
build_id=build_id,
|
|
193
|
+
vcs_revision=vcs_revision,
|
|
194
|
+
vcs_branch=vcs_branch,
|
|
195
|
+
)
|
|
176
196
|
|
|
177
197
|
def get_controller_process_info(self) -> dict:
|
|
178
198
|
"""Get controller process info."""
|
|
179
199
|
cortex_host = self.scheme.eval("(cx-cortex-host)")
|
|
180
200
|
cortex_pid = self.scheme.eval("(cx-cortex-id)")
|
|
181
201
|
cortex_pwd = self.scheme.eval("(cortex-pwd)")
|
|
182
|
-
return
|
|
183
|
-
|
|
184
|
-
|
|
185
|
-
|
|
186
|
-
|
|
202
|
+
return ProcessInfo(
|
|
203
|
+
process_id=cortex_pid,
|
|
204
|
+
hostname=cortex_host,
|
|
205
|
+
working_directory=cortex_pwd,
|
|
206
|
+
)
|
|
187
207
|
|
|
188
208
|
def get_solver_process_info(self) -> dict:
|
|
189
209
|
"""Get solver process info."""
|
|
190
210
|
fluent_host = self.scheme.eval("(cx-client-host)")
|
|
191
211
|
fluent_pid = self.scheme.eval("(cx-client-id)")
|
|
192
212
|
fluent_pwd = self.scheme.eval("(cx-send '(cx-client-pwd))")
|
|
193
|
-
return
|
|
194
|
-
|
|
195
|
-
|
|
196
|
-
|
|
197
|
-
|
|
213
|
+
return ProcessInfo(
|
|
214
|
+
process_id=fluent_pid,
|
|
215
|
+
hostname=fluent_host,
|
|
216
|
+
working_directory=fluent_pwd,
|
|
217
|
+
)
|
|
198
218
|
|
|
199
219
|
def get_app_mode(self) -> Enum:
|
|
200
220
|
"""Get app mode."""
|
|
@@ -312,32 +332,32 @@ class AppUtilities:
|
|
|
312
332
|
"""Get build info."""
|
|
313
333
|
request = AppUtilitiesProtoModule.GetBuildInfoRequest()
|
|
314
334
|
response = self.service.get_build_info(request)
|
|
315
|
-
return
|
|
316
|
-
|
|
317
|
-
|
|
318
|
-
|
|
319
|
-
|
|
320
|
-
|
|
335
|
+
return BuildInfo(
|
|
336
|
+
build_time=response.build_time,
|
|
337
|
+
build_id=response.build_id,
|
|
338
|
+
vcs_revision=response.vcs_revision,
|
|
339
|
+
vcs_branch=response.vcs_branch,
|
|
340
|
+
)
|
|
321
341
|
|
|
322
342
|
def get_controller_process_info(self) -> dict:
|
|
323
343
|
"""Get controller process info."""
|
|
324
344
|
request = AppUtilitiesProtoModule.GetControllerProcessInfoRequest()
|
|
325
345
|
response = self.service.get_controller_process_info(request)
|
|
326
|
-
return
|
|
327
|
-
|
|
328
|
-
|
|
329
|
-
|
|
330
|
-
|
|
346
|
+
return ProcessInfo(
|
|
347
|
+
process_id=response.process_id,
|
|
348
|
+
hostname=response.hostname,
|
|
349
|
+
working_directory=response.working_directory,
|
|
350
|
+
)
|
|
331
351
|
|
|
332
352
|
def get_solver_process_info(self) -> dict:
|
|
333
353
|
"""Get solver process info."""
|
|
334
354
|
request = AppUtilitiesProtoModule.GetSolverProcessInfoRequest()
|
|
335
355
|
response = self.service.get_solver_process_info(request)
|
|
336
|
-
return
|
|
337
|
-
|
|
338
|
-
|
|
339
|
-
|
|
340
|
-
|
|
356
|
+
return ProcessInfo(
|
|
357
|
+
process_id=response.process_id,
|
|
358
|
+
hostname=response.hostname,
|
|
359
|
+
working_directory=response.working_directory,
|
|
360
|
+
)
|
|
341
361
|
|
|
342
362
|
def get_app_mode(self) -> Enum:
|
|
343
363
|
"""Get app mode.
|
|
@@ -1970,9 +1970,12 @@ class PyCommand:
|
|
|
1970
1970
|
id,
|
|
1971
1971
|
]
|
|
1972
1972
|
# Possible error thrown from the grpc layer
|
|
1973
|
-
except (RuntimeError, ValueError):
|
|
1973
|
+
except (RuntimeError, ValueError) as e:
|
|
1974
1974
|
logger.warning(
|
|
1975
|
-
"
|
|
1975
|
+
"datamodels_se.PyCommand was unable to construct command arguments. "
|
|
1976
|
+
"This may be due to gRPC issues or unsupported Fluent version (23.1+ required). "
|
|
1977
|
+
"Error details: %s",
|
|
1978
|
+
e,
|
|
1976
1979
|
)
|
|
1977
1980
|
|
|
1978
1981
|
def create_instance(self) -> "PyCommandArguments":
|
|
@@ -2225,7 +2228,7 @@ arg_class_by_type = {
|
|
|
2225
2228
|
["String", "ListString", "String List"], PyTextualCommandArgumentsSubItem
|
|
2226
2229
|
),
|
|
2227
2230
|
**dict.fromkeys(
|
|
2228
|
-
["Real", "Int", "ListReal", "Real List", "Integer", "ListInt"],
|
|
2231
|
+
["Real", "Int", "ListReal", "Real List", "Integer", "ListInt", "Integer List"],
|
|
2229
2232
|
PyNumericalCommandArgumentsSubItem,
|
|
2230
2233
|
),
|
|
2231
2234
|
"Dict": PyDictionaryCommandArgumentsSubItem,
|
|
@@ -43,6 +43,7 @@ from ansys.api.fluent.v0.scheme_pointer_pb2 import SchemePointer
|
|
|
43
43
|
from ansys.fluent.core.services.interceptors import (
|
|
44
44
|
BatchInterceptor,
|
|
45
45
|
ErrorStateInterceptor,
|
|
46
|
+
GrpcErrorInterceptor,
|
|
46
47
|
TracingInterceptor,
|
|
47
48
|
)
|
|
48
49
|
from ansys.fluent.core.utils.fluent_version import FluentVersion
|
|
@@ -60,6 +61,7 @@ class SchemeEvalService:
|
|
|
60
61
|
"""__init__ method of SchemeEvalService class."""
|
|
61
62
|
intercept_channel = grpc.intercept_channel(
|
|
62
63
|
channel,
|
|
64
|
+
GrpcErrorInterceptor(),
|
|
63
65
|
ErrorStateInterceptor(fluent_error_state),
|
|
64
66
|
TracingInterceptor(),
|
|
65
67
|
BatchInterceptor(),
|