ansys-fluent-core 0.28.dev0__py3-none-any.whl → 0.28.1__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of ansys-fluent-core might be problematic. Click here for more details.

Files changed (63) hide show
  1. ansys/fluent/core/__init__.py +15 -16
  2. ansys/fluent/core/_version.py +1 -1
  3. ansys/fluent/core/codegen/allapigen.py +0 -3
  4. ansys/fluent/core/codegen/builtin_settingsgen.py +5 -20
  5. ansys/fluent/core/codegen/print_fluent_version.py +9 -14
  6. ansys/fluent/core/codegen/walk_api.py +57 -0
  7. ansys/fluent/core/fluent_connection.py +26 -22
  8. ansys/fluent/core/generated/api_tree/api_objects.json +1 -1
  9. ansys/fluent/core/generated/datamodel_252/meshing.py +21 -0
  10. ansys/fluent/core/generated/datamodel_252/preferences.py +7 -0
  11. ansys/fluent/core/generated/fluent_version_252.py +3 -3
  12. ansys/fluent/core/generated/meshing/tui_252.py +1183 -1133
  13. ansys/fluent/core/generated/solver/settings_252.py +8944 -6572
  14. ansys/fluent/core/generated/solver/settings_252.pyi +6357 -5352
  15. ansys/fluent/core/generated/solver/tui_252.py +3039 -2473
  16. ansys/fluent/core/journaling.py +4 -4
  17. ansys/fluent/core/launcher/fluent_container.py +31 -7
  18. ansys/fluent/core/launcher/launcher.py +3 -2
  19. ansys/fluent/core/launcher/launcher_utils.py +9 -0
  20. ansys/fluent/core/launcher/process_launch_string.py +8 -6
  21. ansys/fluent/core/launcher/pyfluent_enums.py +6 -3
  22. ansys/fluent/core/launcher/server_info.py +25 -2
  23. ansys/fluent/core/launcher/slurm_launcher.py +6 -3
  24. ansys/fluent/core/launcher/standalone_launcher.py +11 -9
  25. ansys/fluent/core/post_objects/post_helper.py +16 -10
  26. ansys/fluent/core/services/__init__.py +2 -0
  27. ansys/fluent/core/services/api_upgrade.py +11 -9
  28. ansys/fluent/core/services/app_utilities.py +408 -0
  29. ansys/fluent/core/services/datamodel_se.py +42 -5
  30. ansys/fluent/core/services/datamodel_tui.py +5 -2
  31. ansys/fluent/core/services/field_data.py +1 -0
  32. ansys/fluent/core/services/reduction.py +2 -0
  33. ansys/fluent/core/services/settings.py +5 -2
  34. ansys/fluent/core/session.py +27 -4
  35. ansys/fluent/core/session_pure_meshing.py +1 -1
  36. ansys/fluent/core/session_solver.py +0 -1
  37. ansys/fluent/core/solver/__init__.py +6 -0
  38. ansys/fluent/core/solver/flobject.py +15 -27
  39. ansys/fluent/core/solver/function/reduction.py +3 -0
  40. ansys/fluent/core/solver/settings_builtin_data.py +1 -1
  41. ansys/fluent/core/streaming_services/datamodel_event_streaming.py +5 -5
  42. ansys/fluent/core/streaming_services/events_streaming.py +336 -52
  43. ansys/fluent/tests/conftest.py +30 -0
  44. ansys/fluent/tests/test_builtin_settings.py +1 -1
  45. ansys/fluent/tests/test_codegen.py +0 -410
  46. ansys/fluent/tests/test_datamodel_api.py +449 -0
  47. ansys/fluent/tests/test_datamodel_service.py +64 -64
  48. ansys/fluent/tests/test_events_manager.py +24 -6
  49. ansys/fluent/tests/test_field_data.py +32 -0
  50. ansys/fluent/tests/test_launcher.py +30 -2
  51. ansys/fluent/tests/test_mapped_api.py +774 -0
  52. ansys/fluent/tests/test_reduction.py +30 -0
  53. ansys/fluent/tests/test_session.py +16 -1
  54. ansys/fluent/tests/test_settings_api.py +21 -0
  55. ansys/fluent/tests/test_solution_variables.py +27 -0
  56. ansys/fluent/tests/util/__init__.py +36 -0
  57. {ansys_fluent_core-0.28.dev0.dist-info → ansys_fluent_core-0.28.1.dist-info}/METADATA +4 -3
  58. {ansys_fluent_core-0.28.dev0.dist-info → ansys_fluent_core-0.28.1.dist-info}/RECORD +61 -58
  59. {ansys_fluent_core-0.28.dev0.dist-info → ansys_fluent_core-0.28.1.dist-info}/WHEEL +1 -1
  60. ansys/fluent/core/codegen/settingsgen_old.py +0 -535
  61. ansys/fluent/tests/fluent/test_version/test.py +0 -2
  62. {ansys_fluent_core-0.28.dev0.dist-info → ansys_fluent_core-0.28.1.dist-info}/AUTHORS +0 -0
  63. {ansys_fluent_core-0.28.dev0.dist-info → ansys_fluent_core-0.28.1.dist-info}/LICENSE +0 -0
@@ -1,535 +0,0 @@
1
- """Provide a module to generate the Fluent settings tree.
2
-
3
- Running this module generates a python module with the definition of the Fluent
4
- settings classes. The out is placed at:
5
-
6
- - src/ansys/fluent/core/solver/settings.py
7
-
8
- Running this module requires Fluent to be installed.
9
-
10
- Process
11
- -------
12
- - Launch fluent and get static info. Parse the class with flobject.get_cls()
13
- - Generate a dictionary of unique classes with their hash as a key and a tuple of cls, children hash, commands hash, arguments hash, child object type hash as value.
14
- - - This eliminates reduandancy and only unique classes are written.
15
- - Generate .py files for the classes in hash dictionary. Resolve named conflicts with integer suffix.
16
- - - Populate files dictionary with hash as key and file name as value.
17
- - - child_object_type handled specially to avoid a lot of files with same name and to provide more insight of the child.
18
- - Populate the classes.
19
- - - For writing the import statements, get the hash of the child/command/argument/named object stored in the hash dict tuple value.
20
- - - Use that hash to locate the corresponding children file name in the hash dict.
21
-
22
- Usage
23
- -----
24
- python <path to settingsgen.py>
25
- """
26
-
27
- import hashlib
28
- import io
29
- import os
30
- from pathlib import Path
31
- import pickle
32
- import pprint
33
- import shutil
34
-
35
- import ansys.fluent.core as pyfluent
36
- from ansys.fluent.core import launch_fluent
37
- from ansys.fluent.core.codegen import StaticInfoType
38
- from ansys.fluent.core.solver import flobject
39
- from ansys.fluent.core.utils.fix_doc import fix_settings_doc
40
- from ansys.fluent.core.utils.fluent_version import get_version_for_file_name
41
-
42
- hash_dict = {}
43
- files_dict = {}
44
- root_class_path = ""
45
-
46
-
47
- def _gethash(obj_info):
48
- dhash = hashlib.sha256()
49
- dhash.update(pickle.dumps(obj_info))
50
- return dhash.hexdigest()
51
-
52
-
53
- def _get_indent_str(indent):
54
- return f"{' '*indent*4}"
55
-
56
-
57
- def _populate_hash_dict(name, info, cls, api_tree):
58
- children = info.get("children")
59
- if children:
60
- children_hash = []
61
- for cname, cinfo in children.items():
62
- for child in getattr(cls, "child_names", None):
63
- child_cls = cls._child_classes[child]
64
- if cname == child_cls.fluent_name:
65
- api_tree[child] = {}
66
- children_hash.append(
67
- _populate_hash_dict(cname, cinfo, child_cls, api_tree[child])
68
- )
69
- okey = f"{child}:<name>"
70
- if okey in api_tree[child]:
71
- api_tree[child].update(api_tree[child][okey])
72
- del api_tree[child][okey]
73
- api_tree[okey] = api_tree.pop(child)
74
- else:
75
- api_tree[child] = api_tree[child] or "Parameter"
76
- break
77
- else:
78
- children_hash = None
79
-
80
- commands = info.get("commands")
81
- if commands:
82
- commands_hash = []
83
- for cname, cinfo in commands.items():
84
- for command in getattr(cls, "command_names", None):
85
- command_cls = cls._child_classes[command]
86
- if cname == command_cls.fluent_name:
87
- api_tree[command] = "Command"
88
- commands_hash.append(
89
- _populate_hash_dict(cname, cinfo, command_cls, {})
90
- )
91
- break
92
- else:
93
- commands_hash = None
94
-
95
- queries = info.get("queries")
96
- if queries:
97
- queries_hash = []
98
- for qname, qinfo in queries.items():
99
- for query in getattr(cls, "query_names", None):
100
- query_cls = cls._child_classes[query]
101
- if qname == query_cls.fluent_name:
102
- api_tree[query] = "Query"
103
- queries_hash.append(
104
- _populate_hash_dict(qname, qinfo, query_cls, {})
105
- )
106
- break
107
- else:
108
- queries_hash = None
109
-
110
- arguments = info.get("arguments")
111
- if arguments:
112
- arguments_hash = []
113
- for aname, ainfo in arguments.items():
114
- for argument in getattr(cls, "argument_names", None):
115
- argument_cls = cls._child_classes[argument]
116
- if aname == argument_cls.fluent_name:
117
- arguments_hash.append(
118
- _populate_hash_dict(aname, ainfo, argument_cls, {})
119
- )
120
- break
121
- else:
122
- arguments_hash = None
123
-
124
- object_type = info.get("object-type")
125
- if object_type:
126
- key = f"{cls.__name__}:<name>"
127
- api_tree[key] = {}
128
- object_hash = _populate_hash_dict(
129
- "child-object-type",
130
- object_type,
131
- getattr(cls, "child_object_type", None),
132
- api_tree[key],
133
- )
134
- else:
135
- object_hash = None
136
-
137
- cls_tuple = (
138
- name,
139
- cls.__name__,
140
- cls.__bases__,
141
- info["type"],
142
- info.get("help"),
143
- children_hash,
144
- commands_hash,
145
- queries_hash,
146
- arguments_hash,
147
- object_hash,
148
- )
149
- hash = _gethash(cls_tuple)
150
- if not hash_dict.get(hash):
151
- hash_dict[hash] = (
152
- cls,
153
- children_hash,
154
- commands_hash,
155
- queries_hash,
156
- arguments_hash,
157
- object_hash,
158
- )
159
- return hash
160
-
161
-
162
- class _CommandInfo:
163
- def __init__(self, doc, args_info):
164
- self.doc = doc
165
- self.args_info = args_info
166
-
167
-
168
- _arg_type_strings = {
169
- flobject.Boolean: "bool",
170
- flobject.Integer: "int",
171
- flobject.Real: "float | str",
172
- flobject.String: "str",
173
- flobject.Filename: "str",
174
- flobject.BooleanList: "List[bool]",
175
- flobject.IntegerList: "List[int]",
176
- flobject.RealVector: "Tuple[float | str, float | str, float | str",
177
- flobject.RealList: "List[float | str]",
178
- flobject.StringList: "List[str]",
179
- flobject.FilenameList: "List[str]",
180
- }
181
-
182
-
183
- def _get_commands_info(commands_hash):
184
- commands_info = {}
185
- for command_hash in commands_hash:
186
- command_hash_info = hash_dict.get(command_hash)
187
- command_cls = command_hash_info[0]
188
- command_name = command_cls.__name__
189
- command_info = _CommandInfo(command_cls.__doc__, [])
190
- if command_hash_info[4]:
191
- for arg_hash in command_hash_info[4]:
192
- arg_hash_info = hash_dict.get(arg_hash)
193
- arg_cls = arg_hash_info[0]
194
- arg_name = arg_cls.__name__
195
- arg_type = _arg_type_strings[arg_cls.__bases__[0]]
196
- command_info.args_info.append(f"{arg_name}: {arg_type}")
197
- commands_info[command_name] = command_info
198
- return commands_info
199
-
200
-
201
- def _write_doc_string(doc, indent, writer):
202
- doc = ("\n" + indent).join(doc.split("\n"))
203
- writer.write(f'{indent}"""\n')
204
- writer.write(f"{indent}{doc}")
205
- writer.write(f'\n{indent}"""\n\n')
206
-
207
-
208
- def _populate_classes(parent_dir):
209
- istr = _get_indent_str(0)
210
- istr1 = _get_indent_str(1)
211
- istr2 = _get_indent_str(2)
212
- files = []
213
- # generate files
214
- for key, (
215
- cls,
216
- children_hash,
217
- commands_hash,
218
- queries_hash,
219
- arguments_hash,
220
- object_hash,
221
- ) in hash_dict.items():
222
- cls_name = file_name = cls.__name__
223
- if cls_name == "child_object_type":
224
- # Get the first parent for this class.
225
- for (
226
- cls1,
227
- children_hash1,
228
- commands_hash1,
229
- queries_hash1,
230
- arguments_hash1,
231
- object_hash1,
232
- ) in hash_dict.values():
233
- if key == object_hash1:
234
- cls.__name__ = file_name = cls1.__name__ + "_child"
235
- break
236
- i = 0
237
- while file_name in files:
238
- if i > 0:
239
- file_name = file_name[: file_name.rfind("_")]
240
- i += 1
241
- file_name += "_" + str(i)
242
- files.append(file_name)
243
- files_dict[key] = file_name
244
-
245
- # Store root class path for __init__.py
246
- if cls_name == "root":
247
- global root_class_path
248
- root_class_path = file_name
249
-
250
- file_name += ".py"
251
- file_name = os.path.normpath(os.path.join(parent_dir, file_name))
252
- with open(file_name, "w") as f:
253
- f.write(f"name: {cls_name}")
254
-
255
- # populate files
256
- for key, (
257
- cls,
258
- children_hash,
259
- commands_hash,
260
- queries_hash,
261
- arguments_hash,
262
- object_hash,
263
- ) in hash_dict.items():
264
- file_name = files_dict.get(key)
265
- cls_name = cls.__name__
266
- file_name = os.path.normpath(os.path.join(parent_dir, file_name + ".py"))
267
- stub_f = None
268
- if not pyfluent.CODEGEN_ZIP_SETTINGS:
269
- stub_file_name = file_name + "i"
270
- stub_f = open(stub_file_name, "w")
271
- with open(file_name, "w") as f:
272
- # disclaimer to py file
273
- f.write("#\n")
274
- f.write("# This is an auto-generated file. DO NOT EDIT!\n")
275
- f.write("#\n")
276
- f.write("\n")
277
- if stub_f:
278
- stub_f.write("#\n")
279
- stub_f.write("# This is an auto-generated file. DO NOT EDIT!\n")
280
- stub_f.write("#\n")
281
- stub_f.write("\n\n")
282
-
283
- # write imports to py file
284
- import_str = (
285
- "from ansys.fluent.core.solver.flobject import *\n\n"
286
- "from ansys.fluent.core.solver.flobject import (\n"
287
- f"{istr1}_ChildNamedObjectAccessorMixin,\n"
288
- f"{istr1}CreatableNamedObjectMixin,\n"
289
- f"{istr1}_NonCreatableNamedObjectMixin,\n"
290
- f"{istr1}AllowedValuesMixin,\n"
291
- f"{istr1}_InputFile,\n"
292
- f"{istr1}_OutputFile,\n"
293
- f"{istr1}_InOutFile,\n"
294
- ")\n\n"
295
- )
296
- f.write(import_str)
297
- if stub_f:
298
- stub_f.write(import_str)
299
- stub_f.write("from typing import Union, List, Tuple\n\n")
300
-
301
- if children_hash:
302
- for child in children_hash:
303
- pchild_name = hash_dict.get(child)[0].__name__
304
- import_str = f"from .{files_dict.get(child)} import {pchild_name} as {pchild_name}_cls\n"
305
- f.write(import_str)
306
- if stub_f:
307
- stub_f.write(import_str)
308
-
309
- if commands_hash:
310
- for child in commands_hash:
311
- pchild_name = hash_dict.get(child)[0].__name__
312
- import_str = f"from .{files_dict.get(child)} import {pchild_name} as {pchild_name}_cls\n"
313
- f.write(import_str)
314
- if stub_f:
315
- stub_f.write(import_str)
316
-
317
- if queries_hash:
318
- for child in queries_hash:
319
- pchild_name = hash_dict.get(child)[0].__name__
320
- import_str = f"from .{files_dict.get(child)} import {pchild_name} as {pchild_name}_cls\n"
321
- f.write(import_str)
322
- if stub_f:
323
- stub_f.write(import_str)
324
-
325
- if arguments_hash:
326
- for child in arguments_hash:
327
- pchild_name = hash_dict.get(child)[0].__name__
328
- import_str = f"from .{files_dict.get(child)} import {pchild_name} as {pchild_name}_cls\n"
329
- f.write(import_str)
330
- if stub_f:
331
- stub_f.write(import_str)
332
-
333
- if object_hash:
334
- pchild_name = hash_dict.get(object_hash)[0].__name__
335
- import_str = (
336
- f"from .{files_dict.get(object_hash)} import {pchild_name}\n\n"
337
- )
338
- f.write(import_str)
339
- if stub_f:
340
- stub_f.write(import_str)
341
-
342
- # class name
343
- class_def_str = (
344
- f"\n{istr}class {cls_name}"
345
- f'({", ".join(f"{c.__name__}[{hash_dict.get(object_hash)[0].__name__}]" if object_hash else c.__name__ for c in cls.__bases__)}):\n'
346
- )
347
- f.write(class_def_str)
348
- if stub_f:
349
- stub_f.write(class_def_str)
350
-
351
- doc = fix_settings_doc(cls.__doc__)
352
- # Custom doc for child object type
353
- if cls.fluent_name == "child-object-type":
354
- parent_name = Path(file_name).stem[
355
- 0 : Path(file_name).stem.find("_child")
356
- ]
357
- doc = f"'child_object_type' of {parent_name}."
358
-
359
- _write_doc_string(doc, istr1, f)
360
- f.write(f'{istr1}fluent_name = "{cls.fluent_name}"\n')
361
- f.write(f'{istr1}version = "{cls.version}"\n\n')
362
- if stub_f:
363
- stub_f.write(f"{istr1}fluent_name = ...\n")
364
- stub_f.write(f"{istr1}version = ...\n\n")
365
-
366
- child_class_strings = []
367
-
368
- # write children objects
369
- child_names = getattr(cls, "child_names", None)
370
- if child_names:
371
- f.write(f"{istr1}child_names = \\\n")
372
- strout = io.StringIO()
373
- pprint.pprint(child_names, stream=strout, compact=True, width=70)
374
- mn = ("\n" + istr2).join(strout.getvalue().strip().split("\n"))
375
- f.write(f"{istr2}{mn}\n\n")
376
- if stub_f:
377
- stub_f.write(f"{istr1}child_names = ...\n")
378
-
379
- for child in child_names:
380
- child_cls = cls._child_classes[child]
381
- child_class_strings.append(f"{child}={child_cls.__name__}_cls")
382
- if stub_f:
383
- stub_f.write(
384
- f"{istr1}{child}: {child_cls.__name__}_cls = ...\n"
385
- )
386
-
387
- # write command objects
388
- command_names = getattr(cls, "command_names", None)
389
- if command_names:
390
- f.write(f"{istr1}command_names = \\\n")
391
- strout = io.StringIO()
392
- pprint.pprint(command_names, stream=strout, compact=True, width=70)
393
- mn = ("\n" + istr2).join(strout.getvalue().strip().split("\n"))
394
- f.write(f"{istr2}{mn}\n\n")
395
- if stub_f:
396
- stub_f.write(f"{istr1}command_names = ...\n\n")
397
-
398
- commands_info = _get_commands_info(commands_hash)
399
- for command in command_names:
400
- command_cls = cls._child_classes[command]
401
- child_class_strings.append(f"{command}={command_cls.__name__}_cls")
402
- # function annotation for commands
403
- command_info = commands_info[command]
404
- if stub_f:
405
- stub_f.write(f"{istr1}def {command}(self, ")
406
- stub_f.write(", ".join(command_info.args_info))
407
- stub_f.write("):\n")
408
- _write_doc_string(command_info.doc, istr2, stub_f)
409
-
410
- # write query objects
411
- query_names = getattr(cls, "query_names", None)
412
- if query_names:
413
- f.write(f"{istr1}query_names = \\\n")
414
- strout = io.StringIO()
415
- pprint.pprint(query_names, stream=strout, compact=True, width=70)
416
- mn = ("\n" + istr2).join(strout.getvalue().strip().split("\n"))
417
- f.write(f"{istr2}{mn}\n\n")
418
- if stub_f:
419
- stub_f.write(f"{istr1}query_names = ...\n\n")
420
-
421
- queries_info = _get_commands_info(queries_hash)
422
- for query in query_names:
423
- query_cls = cls._child_classes[query]
424
- child_class_strings.append(f"{query}={query_cls.__name__}_cls")
425
- # function annotation for queries
426
- query_info = queries_info[query]
427
- if stub_f:
428
- stub_f.write(f"{istr1}def {query}(self, ")
429
- stub_f.write(", ".join(query_info.args_info))
430
- stub_f.write("):\n")
431
- _write_doc_string(query_info.doc, istr2, stub_f)
432
-
433
- # write arguments
434
- arguments = getattr(cls, "argument_names", None)
435
- if arguments:
436
- f.write(f"{istr1}argument_names = \\\n")
437
- strout = io.StringIO()
438
- pprint.pprint(arguments, stream=strout, compact=True, width=70)
439
- mn = ("\n" + istr2).join(strout.getvalue().strip().split("\n"))
440
- f.write(f"{istr2}{mn}\n\n")
441
- if stub_f:
442
- stub_f.write(f"{istr1}argument_names = ...\n")
443
-
444
- for argument in arguments:
445
- argument_cls = cls._child_classes[argument]
446
- child_class_strings.append(
447
- f"{argument}={argument_cls.__name__}_cls"
448
- )
449
- if stub_f:
450
- stub_f.write(
451
- f"{istr1}{argument}: {argument_cls.__name__}_cls = ...\n"
452
- )
453
-
454
- if child_class_strings:
455
- f.write(f"{istr1}_child_classes = dict(\n")
456
- f.writelines(
457
- [f"{istr2}{cls_str},\n" for cls_str in child_class_strings]
458
- )
459
- f.write(f"{istr1})\n\n")
460
-
461
- child_aliases = getattr(cls, "_child_aliases", None)
462
- if child_aliases:
463
- f.write(f"{istr1}_child_aliases = dict(\n")
464
- f.writelines([f"{istr2}{k}={v!r},\n" for k, v in child_aliases.items()])
465
- f.write(f"{istr1})\n\n")
466
-
467
- # write object type
468
- child_object_type = getattr(cls, "child_object_type", None)
469
- if child_object_type:
470
- f.write(f"{istr1}child_object_type: {pchild_name} = {pchild_name}\n")
471
- f.write(f'{istr1}"""\n')
472
- f.write(f"{istr1}child_object_type of {cls_name}.")
473
- f.write(f'\n{istr1}"""\n')
474
- if stub_f:
475
- stub_f.write(f"{istr1}child_object_type: {pchild_name} = ...\n")
476
-
477
- return_type = getattr(cls, "return_type", None)
478
- if return_type:
479
- f.write(f'{istr1}return_type = "{return_type}"\n')
480
- if stub_f:
481
- stub_f.write(f"{istr1}return_type = ...\n")
482
- if stub_f:
483
- stub_f.close()
484
-
485
-
486
- def _populate_init(parent_dir, hash):
487
- file_name = os.path.normpath(os.path.join(parent_dir, "__init__.py"))
488
- with open(file_name, "w") as f:
489
- f.write("#\n")
490
- f.write("# This is an auto-generated file. DO NOT EDIT!\n")
491
- f.write("#\n")
492
- f.write("\n")
493
- f.write('"""A package providing Fluent\'s Settings Objects in Python."""')
494
- f.write("\n")
495
- f.write("from ansys.fluent.core.solver.flobject import *\n\n")
496
- f.write(f'SHASH = "{hash}"\n')
497
- f.write(f"from .{root_class_path} import root")
498
-
499
-
500
- def generate(version, static_infos: dict):
501
- """Generate settings API classes."""
502
- parent_dir = (pyfluent.CODEGEN_OUTDIR / "solver" / f"settings_{version}").resolve()
503
- api_tree = {}
504
- sinfo = static_infos.get(StaticInfoType.SETTINGS)
505
-
506
- # Clear previously generated data
507
- if os.path.exists(parent_dir):
508
- shutil.rmtree(parent_dir)
509
-
510
- if sinfo:
511
- hash = _gethash(sinfo)
512
- os.makedirs(parent_dir)
513
-
514
- if pyfluent.CODEGEN_ZIP_SETTINGS:
515
- parent_dir = parent_dir / "settings"
516
- os.makedirs(parent_dir)
517
-
518
- cls, _ = flobject.get_cls("", sinfo, version=version)
519
-
520
- _populate_hash_dict("", sinfo, cls, api_tree)
521
- _populate_classes(parent_dir)
522
- _populate_init(parent_dir, hash)
523
-
524
- if pyfluent.CODEGEN_ZIP_SETTINGS:
525
- shutil.make_archive(parent_dir.parent, "zip", parent_dir.parent)
526
- shutil.rmtree(parent_dir.parent)
527
-
528
- return {"<solver_session>": api_tree}
529
-
530
-
531
- if __name__ == "__main__":
532
- solver = launch_fluent()
533
- version = get_version_for_file_name(session=solver)
534
- static_infos = {StaticInfoType.SETTINGS: solver._settings_service.get_static_info()}
535
- generate(version, static_infos)
@@ -1,2 +0,0 @@
1
- assert ansys.fluent.core.__version__ == "0.28.dev0" # noqa: F821
2
- exit()