ansys-systemcoupling-core 0.9.0__py3-none-any.whl → 0.10.0__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of ansys-systemcoupling-core might be problematic. Click here for more details.

Files changed (49) hide show
  1. ansys/systemcoupling/core/__init__.py +1 -1
  2. ansys/systemcoupling/core/adaptor/api_25_2/add_data_transfer.py +9 -9
  3. ansys/systemcoupling/core/adaptor/api_25_2/add_interface.py +1 -1
  4. ansys/systemcoupling/core/adaptor/api_25_2/add_ordered_data_transfers.py +12 -3
  5. ansys/systemcoupling/core/adaptor/api_25_2/add_participant.py +27 -12
  6. ansys/systemcoupling/core/adaptor/api_25_2/add_transformation.py +14 -14
  7. ansys/systemcoupling/core/adaptor/api_25_2/analysis_control.py +30 -0
  8. ansys/systemcoupling/core/adaptor/api_25_2/attribute.py +1 -1
  9. ansys/systemcoupling/core/adaptor/api_25_2/attribute_child.py +12 -2
  10. ansys/systemcoupling/core/adaptor/api_25_2/case_root.py +1 -1
  11. ansys/systemcoupling/core/adaptor/api_25_2/create_restart_point.py +4 -5
  12. ansys/systemcoupling/core/adaptor/api_25_2/data_transfer_child.py +9 -1
  13. ansys/systemcoupling/core/adaptor/api_25_2/delete_snapshot.py +3 -1
  14. ansys/systemcoupling/core/adaptor/api_25_2/execution_control.py +10 -0
  15. ansys/systemcoupling/core/adaptor/api_25_2/generate_input_file.py +12 -0
  16. ansys/systemcoupling/core/adaptor/api_25_2/get_execution_command.py +1 -1
  17. ansys/systemcoupling/core/adaptor/api_25_2/get_machines.py +22 -1
  18. ansys/systemcoupling/core/adaptor/api_25_2/get_region_names_for_participant.py +6 -5
  19. ansys/systemcoupling/core/adaptor/api_25_2/mapping_control.py +24 -0
  20. ansys/systemcoupling/core/adaptor/api_25_2/open.py +19 -14
  21. ansys/systemcoupling/core/adaptor/api_25_2/partition_participants.py +48 -38
  22. ansys/systemcoupling/core/adaptor/api_25_2/save.py +17 -15
  23. ansys/systemcoupling/core/adaptor/api_25_2/setup_root.py +1 -17
  24. ansys/systemcoupling/core/adaptor/api_25_2/solution_control.py +10 -0
  25. ansys/systemcoupling/core/adaptor/api_25_2/solution_root.py +1 -1
  26. ansys/systemcoupling/core/adaptor/api_25_2/solve.py +5 -5
  27. ansys/systemcoupling/core/adaptor/api_25_2/update_participant.py +4 -4
  28. ansys/systemcoupling/core/adaptor/impl/get_syc_version.py +4 -1
  29. ansys/systemcoupling/core/adaptor/impl/injected_commands.py +3 -1
  30. ansys/systemcoupling/core/adaptor/impl/syc_proxy.py +34 -4
  31. ansys/systemcoupling/core/charts/csv_chartdata.py +4 -3
  32. ansys/systemcoupling/core/charts/plot_functions.py +12 -4
  33. ansys/systemcoupling/core/charts/plotter.py +3 -2
  34. ansys/systemcoupling/core/client/grpc_client.py +11 -2
  35. ansys/systemcoupling/core/client/syc_container.py +40 -35
  36. ansys/systemcoupling/core/client/syc_process.py +5 -2
  37. ansys/systemcoupling/core/native_api/datamodel_metadata.py +3 -2
  38. ansys/systemcoupling/core/native_api/object_path.py +2 -1
  39. ansys/systemcoupling/core/syc_version.py +1 -1
  40. ansys/systemcoupling/core/util/assertion.py +38 -0
  41. ansys/systemcoupling/core/util/file_transfer.py +2 -1
  42. ansys/systemcoupling/core/util/state_keys.py +11 -2
  43. ansys/systemcoupling/core/util/yaml_helper.py +5 -5
  44. {ansys_systemcoupling_core-0.9.0.dist-info → ansys_systemcoupling_core-0.10.0.dist-info}/METADATA +14 -8
  45. {ansys_systemcoupling_core-0.9.0.dist-info → ansys_systemcoupling_core-0.10.0.dist-info}/RECORD +47 -48
  46. {ansys_systemcoupling_core-0.9.0.dist-info → ansys_systemcoupling_core-0.10.0.dist-info}/WHEEL +1 -1
  47. ansys/systemcoupling/core/adaptor/api_25_2/add_data_transfer_by_display_names.py +0 -191
  48. ansys/systemcoupling/core/adaptor/api_25_2/add_interface_by_display_names.py +0 -78
  49. {ansys_systemcoupling_core-0.9.0.dist-info → ansys_systemcoupling_core-0.10.0.dist-info}/licenses/LICENSE +0 -0
@@ -13,16 +13,32 @@ class partition_participants(Command):
13
13
  At least one participant must be defined for this command to be used. Use
14
14
  of this command is not recommended if participants are already running.
15
15
 
16
+ Note:
17
+ Ansys recommends only using this command for the Custom partitioning algorithm.
18
+
19
+ When other algorithms are used (SharedAllocateMachines,
20
+ DistributedAllocateCores, etc.) the algorithm should be specified directly,
21
+ for example:
22
+
23
+ ``setup.analysis_control.partitioning_algorithm = "SharedAllocateMachines"``
24
+
25
+ The participant parallel fractions can also be specified directly, for example:
26
+
27
+ ``setup.coupling_participant["FLUENT-1"].execution_control.parallel_fraction = 0.5``
28
+
29
+ The machine list should be specified via System Coupling command line arguments,
30
+ for example:
31
+
32
+ ``<v252>/SystemCoupling/binsystemcoupling --cnf="hostA:4,hostB:4"``
33
+
16
34
  Parameters
17
35
  ----------
18
36
  algorithm_name : str, optional
19
37
  Name of the partitioning algorithm. Available algorithms are:
20
-
21
- - \"SharedAllocateMachines\" (default)
22
- - \"SharedAllocateCores\"
23
- - \"DistributedAllocateMachines\"
24
- - \"DistributedAllocateCores\"
25
- - \"Custom\" (see ``partitioning_info`` for more details)
38
+ 'SharedAllocateMachines'(default), 'SharedAllocateCores',
39
+ 'DistributedAllocateMachines', 'DistributedAllocateCores',
40
+ and 'Custom' (please see ``partitioning_info`` section below for more details
41
+ for this algorithm)
26
42
 
27
43
  The algorithms allow for both shared and distributed execution and for
28
44
  the allocation of machines or cores. The default value is generally the
@@ -37,29 +53,27 @@ class partition_participants(Command):
37
53
  List of tuples specifying the fractions of core count applied for
38
54
  each participant
39
55
 
40
- Each tuple must have the participant name as its first item and the
56
+ Each tuple must have the ParticipantName as its first item and the
41
57
  associated fraction as its second item. If this parameter is omitted,
42
58
  then cores will be allocated for all participants set in the
43
59
  data model.
44
60
  machine_list : List, optional
45
61
  List of dictionaries specifying machines available for distributed run.
46
- Each dictionary must have a key \"machine-name\" with machine name as its
47
- value, and key \"core-count\" with number of cores for that machine as
48
- its value. Providing this argument will override any machine list
62
+ Each dictionary must have a key 'machine-name' with machine name as its
63
+ value, and key 'core-count' with number of cores for that machine as
64
+ its value. Providing this argument will over-ride any machine-list
49
65
  information detected from the scheduler environment and any information
50
- provided by the ``--cnf`` command-line argument.
66
+ provided by the --cnf command-line argument.
51
67
  partitioning_info : Dict, optional
52
68
  Dictionary specifying machines resources assigned to each participant by user.
53
- Dictionary must have participant names as keys and machine lists containing
54
- machine resources as values. The value of a ``partitioning_info`` machine list is
69
+ Dictionary must have participant names as keys and machineLists containing
70
+ machine resources as values. The value of ``partitioning_info`` machineList is
55
71
  a list of dictionaries specifying machines assigned to corresponding participants.
56
- Each dictionary of the machine list must have a key \"machine-name\" with the
57
- machine name as its value, and key \"core-count\" with number of cores for that
58
- machine as its value.
59
-
72
+ Each dictionary of machine mist must have a key 'machine-name' with machine name
73
+ as its value, and key 'core-count' with number of cores for that machine as its value.
60
74
  Providing this argument will disallow other arguments except ``algorithm_name``,
61
- which must set as \"Custom\" if provided. Otherwise, ``algorithm_name`` will be
62
- set as \"Custom\" internally if ``partitioning_info`` is provided.
75
+ which must set as 'Custom' if provided. Otherwise, ``algorithm_name`` will be set as
76
+ 'Custom' internally if ``partitioning_info`` is provided.
63
77
 
64
78
  """
65
79
 
@@ -75,12 +89,10 @@ class partition_participants(Command):
75
89
  class algorithm_name(String):
76
90
  """
77
91
  Name of the partitioning algorithm. Available algorithms are:
78
-
79
- - \"SharedAllocateMachines\" (default)
80
- - \"SharedAllocateCores\"
81
- - \"DistributedAllocateMachines\"
82
- - \"DistributedAllocateCores\"
83
- - \"Custom\" (see ``partitioning_info`` for more details)
92
+ 'SharedAllocateMachines'(default), 'SharedAllocateCores',
93
+ 'DistributedAllocateMachines', 'DistributedAllocateCores',
94
+ and 'Custom' (please see ``partitioning_info`` section below for more details
95
+ for this algorithm)
84
96
 
85
97
  The algorithms allow for both shared and distributed execution and for
86
98
  the allocation of machines or cores. The default value is generally the
@@ -100,7 +112,7 @@ class partition_participants(Command):
100
112
  List of tuples specifying the fractions of core count applied for
101
113
  each participant
102
114
 
103
- Each tuple must have the participant name as its first item and the
115
+ Each tuple must have the ParticipantName as its first item and the
104
116
  associated fraction as its second item. If this parameter is omitted,
105
117
  then cores will be allocated for all participants set in the
106
118
  data model.
@@ -111,11 +123,11 @@ class partition_participants(Command):
111
123
  class machine_list(StrOrIntDictList):
112
124
  """
113
125
  List of dictionaries specifying machines available for distributed run.
114
- Each dictionary must have a key \"machine-name\" with machine name as its
115
- value, and key \"core-count\" with number of cores for that machine as
116
- its value. Providing this argument will override any machine list
126
+ Each dictionary must have a key 'machine-name' with machine name as its
127
+ value, and key 'core-count' with number of cores for that machine as
128
+ its value. Providing this argument will over-ride any machine-list
117
129
  information detected from the scheduler environment and any information
118
- provided by the ``--cnf`` command-line argument.
130
+ provided by the --cnf command-line argument.
119
131
  """
120
132
 
121
133
  syc_name = "MachineList"
@@ -123,16 +135,14 @@ class partition_participants(Command):
123
135
  class partitioning_info(StrOrIntDictListDict):
124
136
  """
125
137
  Dictionary specifying machines resources assigned to each participant by user.
126
- Dictionary must have participant names as keys and machine lists containing
127
- machine resources as values. The value of a ``partitioning_info`` machine list is
138
+ Dictionary must have participant names as keys and machineLists containing
139
+ machine resources as values. The value of ``partitioning_info`` machineList is
128
140
  a list of dictionaries specifying machines assigned to corresponding participants.
129
- Each dictionary of the machine list must have a key \"machine-name\" with the
130
- machine name as its value, and key \"core-count\" with number of cores for that
131
- machine as its value.
132
-
141
+ Each dictionary of machine mist must have a key 'machine-name' with machine name
142
+ as its value, and key 'core-count' with number of cores for that machine as its value.
133
143
  Providing this argument will disallow other arguments except ``algorithm_name``,
134
- which must set as \"Custom\" if provided. Otherwise, ``algorithm_name`` will be
135
- set as \"Custom\" internally if ``partitioning_info`` is provided.
144
+ which must set as 'Custom' if provided. Otherwise, ``algorithm_name`` will be set as
145
+ 'Custom' internally if ``partitioning_info`` is provided.
136
146
  """
137
147
 
138
148
  syc_name = "PartitioningInfo"
@@ -9,32 +9,33 @@ class save(Command):
9
9
  """
10
10
  Saves the state of the coupled analysis data model.
11
11
 
12
- - Analysis settings are written to a single Settings.h5 file which
13
- can be used to reload analysis settings.
12
+ -- Analysis settings are written to a single Settings.h5 file which
13
+ can be used to reload analysis settings.
14
14
 
15
- - Restart files for all restart points in the current co-simulation will
16
- be written when this command is called. Existing restart files from
17
- previous System Coupling versions will be renamed to conform to the new
18
- naming scheme.
15
+ -- Restart files for all restart points in the current co-simulation will
16
+ be written when this command is called. Existing restart files from
17
+ previous System Coupling versions will be renamed to conform to the new
18
+ naming scheme.
19
19
 
20
- - Restart files are named according to the convention
21
- ``Restart_step#.h5`` or ``Restart_iter#.h5``, where ``#`` is the index of
22
- the corresponding coupling step or iteration.
20
+ -- Restart files are named according to the convention
21
+ Restart_step#.h5 or Restart_iter#.h5, where "#" is the index of
22
+ the corresponding coupling step or iteration.
23
23
 
24
- Returns a Boolean value of ``True`` if the files were saved successfully;
25
- otherwise, returns a value of ``False``.
24
+ Returns a Boolean value of 'True' if the files were saved successfully;
25
+ otherwise, returns a value of 'False'.
26
26
 
27
27
  Note that this command will raise an exception if another instance of
28
28
  System Coupling is solving in the current working directory.
29
29
 
30
- By default, writes to the ``SyC`` sub-directory of the current working
31
- directory. This behavior may be modified by providing ``file_path``.
30
+ If given optional arguments, then behaves as described below in "Optional
31
+ Keyword Arguments."
32
32
 
33
33
  Parameters
34
34
  ----------
35
35
  file_path : str, optional
36
36
  Writeable directory to which the SyC directory is added. (Settings and
37
- results .h5 files will be written to the SyC directory.)
37
+ Results.h5 files will be written to the SyC directory.). Ansys does
38
+ not recommend changing the default value of this argument.
38
39
 
39
40
  """
40
41
 
@@ -45,7 +46,8 @@ class save(Command):
45
46
  class file_path(String):
46
47
  """
47
48
  Writeable directory to which the SyC directory is added. (Settings and
48
- results .h5 files will be written to the SyC directory.)
49
+ Results.h5 files will be written to the SyC directory.). Ansys does
50
+ not recommend changing the default value of this argument.
49
51
  """
50
52
 
51
53
  syc_name = "FilePath"
@@ -2,7 +2,7 @@
2
2
  # This is an auto-generated file. DO NOT EDIT!
3
3
  #
4
4
 
5
- SHASH = "bb3a306a5f61f6aa15db0778cc94815e9d4f8176d4c372d9c4eb2c2664921947"
5
+ SHASH = "de74a13bc10a8800dcb6f36be054801d70928ed5d61590540cefe133364d7adc"
6
6
 
7
7
  from ansys.systemcoupling.core.adaptor.impl.types import *
8
8
 
@@ -10,12 +10,10 @@ from ._add_participant import _add_participant
10
10
  from .activate_hidden import activate_hidden
11
11
  from .add_aerodamping_data_transfers import add_aerodamping_data_transfers
12
12
  from .add_data_transfer import add_data_transfer
13
- from .add_data_transfer_by_display_names import add_data_transfer_by_display_names
14
13
  from .add_expression_function import add_expression_function
15
14
  from .add_flow_boundary_data_transfers import add_flow_boundary_data_transfers
16
15
  from .add_fsi_data_transfers import add_fsi_data_transfers
17
16
  from .add_interface import add_interface
18
- from .add_interface_by_display_names import add_interface_by_display_names
19
17
  from .add_named_expression import add_named_expression
20
18
  from .add_ordered_data_transfers import add_ordered_data_transfers
21
19
  from .add_participant import add_participant
@@ -93,12 +91,10 @@ class setup_root(Container):
93
91
  "_add_participant",
94
92
  "add_aerodamping_data_transfers",
95
93
  "add_data_transfer",
96
- "add_data_transfer_by_display_names",
97
94
  "add_expression_function",
98
95
  "add_flow_boundary_data_transfers",
99
96
  "add_fsi_data_transfers",
100
97
  "add_interface",
101
- "add_interface_by_display_names",
102
98
  "add_named_expression",
103
99
  "add_ordered_data_transfers",
104
100
  "add_participant",
@@ -135,12 +131,6 @@ class setup_root(Container):
135
131
  """
136
132
  add_data_transfer command of setup_root.
137
133
  """
138
- add_data_transfer_by_display_names: add_data_transfer_by_display_names = (
139
- add_data_transfer_by_display_names
140
- )
141
- """
142
- add_data_transfer_by_display_names command of setup_root.
143
- """
144
134
  add_expression_function: add_expression_function = add_expression_function
145
135
  """
146
136
  add_expression_function command of setup_root.
@@ -159,12 +149,6 @@ class setup_root(Container):
159
149
  """
160
150
  add_interface command of setup_root.
161
151
  """
162
- add_interface_by_display_names: add_interface_by_display_names = (
163
- add_interface_by_display_names
164
- )
165
- """
166
- add_interface_by_display_names command of setup_root.
167
- """
168
152
  add_named_expression: add_named_expression = add_named_expression
169
153
  """
170
154
  add_named_expression command of setup_root.
@@ -29,6 +29,7 @@ class solution_control(Container):
29
29
  ("maximum_iterations", "MaximumIterations", "int"),
30
30
  ("use_ip_address_when_possible", "UseIPAddressWhenPossible", "str"),
31
31
  ("use_local_host_when_possible", "UseLocalHostWhenPossible", "str"),
32
+ ("write_participant_output_stream", "WriteParticipantOutputStream", "str"),
32
33
  ]
33
34
 
34
35
  @property
@@ -115,3 +116,12 @@ class solution_control(Container):
115
116
  @use_local_host_when_possible.setter
116
117
  def use_local_host_when_possible(self, value: str):
117
118
  self.set_property_state("use_local_host_when_possible", value)
119
+
120
+ @property
121
+ def write_participant_output_stream(self) -> str:
122
+ """Controls whether participant output stream is directed to an output file."""
123
+ return self.get_property_state("write_participant_output_stream")
124
+
125
+ @write_participant_output_stream.setter
126
+ def write_participant_output_stream(self, value: str):
127
+ self.set_property_state("write_participant_output_stream", value)
@@ -2,7 +2,7 @@
2
2
  # This is an auto-generated file. DO NOT EDIT!
3
3
  #
4
4
 
5
- SHASH = "5a1ce2bf8c6b59f120413651ed283c65b2593ebac9c4b8485e0817e00ecbd1e6"
5
+ SHASH = "5752cfcb125c06b52c7d98fc8d1287154da25832c24c95eb0a98d3876d385b47"
6
6
 
7
7
  from ansys.systemcoupling.core.adaptor.impl.types import *
8
8
 
@@ -7,16 +7,16 @@ from ansys.systemcoupling.core.adaptor.impl.types import *
7
7
 
8
8
  class solve(InjectedCommand):
9
9
  """
10
- Starts the participants (if necessary) and solves the coupled analysis. By
11
- default, the solution runs straight through without pause unless stopped by
12
- an scStop file.
10
+ Solves the coupled analysis. This command will execute until
11
+ end coupled analysis is reached, or it is interrupted or aborted
12
+ (for example, via scStop file).
13
13
 
14
14
  Disabled when a solution is already in progress.
15
15
 
16
16
  For restarts, the ``open`` command must be run before the ``solve`` command.
17
17
 
18
- Note that if the ``execution_control`` option for a participant is set to
19
- \"ExternallyManaged\", then System Coupling will not start the participant
18
+ Note that if the ``execution_control.option`` for a participant is set to
19
+ "ExternallyManaged", then System Coupling will not start the participant
20
20
  using either this command or any of the other commands that automatically
21
21
  start participants. The user is expected to manually start the participant.
22
22
  This function will not return until all participants have been connected.
@@ -34,8 +34,8 @@ class update_participant(Command):
34
34
  input_file : str, optional
35
35
  Name of the input file for the participant to be added.
36
36
  Currently supported formats are SCP files, mechanical server
37
- (*.rst) files, cfd server (*.csv) files, and system coupling
38
- data server (*.scdt/axdt/csv) files.
37
+ (\*.rst) files, cfd server (\*.csv) files, and system coupling
38
+ data server (\*.scdt/axdt/csv) files.
39
39
 
40
40
  """
41
41
 
@@ -54,8 +54,8 @@ class update_participant(Command):
54
54
  """
55
55
  Name of the input file for the participant to be added.
56
56
  Currently supported formats are SCP files, mechanical server
57
- (*.rst) files, cfd server (*.csv) files, and system coupling
58
- data server (*.scdt/axdt/csv) files.
57
+ (\*.rst) files, cfd server (\*.csv) files, and system coupling
58
+ data server (\*.scdt/axdt/csv) files.
59
59
  """
60
60
 
61
61
  syc_name = "InputFile"
@@ -44,11 +44,14 @@ def get_syc_version(api) -> str:
44
44
  def clean_version_string(version_in: str) -> str:
45
45
  year, _, release = version_in.partition(" ")
46
46
  if len(year) == 4 and year.startswith("20") and release.startswith("R"):
47
+ # Exclude Bandit check. The try-except-pass is only used to simplify logic.
48
+ # An exception will be thrown in any case, but it *also* gets thrown for
49
+ # input that does not match the above 'if' condition.
47
50
  try:
48
51
  year = int(year[2:])
49
52
  release = int(release[1:])
50
53
  return f"{year}.{release}"
51
- except:
54
+ except: # nosec B110
52
55
  pass
53
56
  raise RuntimeError(
54
57
  f"Version string {version_in} has invalid format (expect '20yy Rn')."
@@ -187,7 +187,9 @@ def _ensure_file_available(session: SessionProtocol, filepath: str) -> str:
187
187
  file_name = os.path.basename(filepath)
188
188
  root_name, _, ext = file_name.rpartition(".")
189
189
  ext = f".{ext}" if ext else ""
190
- new_name = f"{root_name}_{int(time.time())}_{random.randint(1, 10000000)}{ext}"
190
+ # Exclude Bandit check as random number is simply being used to create a unique
191
+ # file name, not for security/cryptographic purposes.
192
+ new_name = f"{root_name}_{int(time.time())}_{random.randint(1, 10000000)}{ext}" # nosec B311
191
193
 
192
194
  session._native_api.ExecPythonString(
193
195
  PythonString=f"import shutil\nshutil.copy('{filepath}', '{new_name}')"
@@ -28,7 +28,10 @@ from ansys.systemcoupling.core.adaptor.impl.static_info import (
28
28
  make_combined_metadata,
29
29
  )
30
30
  from ansys.systemcoupling.core.adaptor.impl.syc_proxy_interface import SycProxyInterface
31
- from ansys.systemcoupling.core.util.state_keys import adapt_native_named_object_keys
31
+ from ansys.systemcoupling.core.util.state_keys import (
32
+ adapt_client_named_object_keys,
33
+ adapt_native_named_object_keys,
34
+ )
32
35
 
33
36
 
34
37
  class SycProxy(SycProxyInterface):
@@ -37,13 +40,16 @@ class SycProxy(SycProxyInterface):
37
40
  self.__injected_cmds = {}
38
41
  self.__version = None
39
42
  self.__defunct = False
43
+ self.__named_obj_level_map: Dict = {}
44
+ self.__datamodel_metadata = None
40
45
 
41
46
  def reset_rpc(self, rpc):
42
47
  """Reset the original ``rpc`` instance with a new one if the remote connection is lost.
43
48
 
44
49
  When a remote connection is lost, this method is called, providing an
45
50
  ``rpc`` instance that replaces the original one from the initializer.
46
- A sensible error is raised if any attempt is made to use this method.
51
+ The intent is that a sensible error will be raised if any attempt is
52
+ made to access any attributes on the replacement rpc.
47
53
 
48
54
  The motivating use case is to catch attempted uses of stale
49
55
  objects after the current session has ended.
@@ -66,8 +72,9 @@ class SycProxy(SycProxyInterface):
66
72
  if category == "setup":
67
73
  cmd_metadata = get_extended_cmd_metadata(self.__rpc)
68
74
  root_type = "SystemCoupling"
69
- dm_metadata = get_dm_metadata(self.__rpc, root_type)
70
- metadata = make_combined_metadata(dm_metadata, cmd_metadata, category)
75
+ metadata = make_combined_metadata(
76
+ self._get_datamodel_metadata(root_type), cmd_metadata, category
77
+ )
71
78
  elif category in ("case", "solution"):
72
79
  cmd_metadata = get_extended_cmd_metadata(self.__rpc)
73
80
  metadata, root_type = make_cmdonly_metadata(cmd_metadata, category)
@@ -81,6 +88,9 @@ class SycProxy(SycProxyInterface):
81
88
  return self.__version
82
89
 
83
90
  def set_state(self, path, state):
91
+ state = adapt_client_named_object_keys(
92
+ state, self._get_named_object_level_map(), path.count("/") - 1
93
+ )
84
94
  self.__rpc.SetState(ObjectPath=path, State=state)
85
95
 
86
96
  def get_state(self, path):
@@ -116,3 +126,23 @@ class SycProxy(SycProxyInterface):
116
126
  cmd_name = args[1]
117
127
  cmd = self.__injected_cmds.get(cmd_name, None)
118
128
  return cmd(**kwargs)
129
+
130
+ def _get_datamodel_metadata(self, root_type):
131
+ if self.__datamodel_metadata is None:
132
+ self.__datamodel_metadata = get_dm_metadata(self.__rpc, root_type)
133
+ return self.__datamodel_metadata
134
+
135
+ def _get_named_object_level_map(self):
136
+ if not self.__named_obj_level_map:
137
+ self._make_named_object_level_map(root_type="SystemCoupling")
138
+ return self.__named_obj_level_map
139
+
140
+ def _make_named_object_level_map(self, root_type):
141
+ def visit_children(metadata, level):
142
+ for k, v in metadata["__children"].items():
143
+ if v["isNamed"]:
144
+ self.__named_obj_level_map.setdefault(level, set()).add(k)
145
+ visit_children(v, level + 1)
146
+
147
+ dm_metadata = self._get_datamodel_metadata(root_type)
148
+ visit_children(dm_metadata[root_type], 0)
@@ -32,6 +32,7 @@ from ansys.systemcoupling.core.charts.chart_datatypes import (
32
32
  TimestepData,
33
33
  TransferSeriesInfo,
34
34
  )
35
+ from ansys.systemcoupling.core.util.assertion import assert_
35
36
 
36
37
  HeaderList = list[str]
37
38
  ChartData = list[list[float]]
@@ -231,8 +232,8 @@ def _parse_suffix(header: str, part_disp_name: str) -> str:
231
232
 
232
233
  def parse_csv_metadata(interface_name: str, headers: list[str]) -> InterfaceInfo:
233
234
  intf_info = InterfaceInfo(name=interface_name)
234
- assert headers[0] == "Iteration"
235
- assert headers[1] == "Step"
235
+ assert_(headers[0] == "Iteration", 'Header expected to be "Iteration"')
236
+ assert_(headers[1] == "Step", 'Header expected to be "Step"')
236
237
  intf_info.is_transient = headers[2] == "Time"
237
238
 
238
239
  start_index = 3 if intf_info.is_transient else 2
@@ -263,7 +264,7 @@ def parse_csv_metadata(interface_name: str, headers: list[str]) -> InterfaceInfo
263
264
 
264
265
  intf_disp_name = intf_or_part_disp_name
265
266
  if data_index == 0:
266
- assert intf_info.display_name == ""
267
+ assert_(intf_info.display_name == "", "display_name should be empty")
267
268
  intf_info.display_name = intf_disp_name
268
269
  series_info = TransferSeriesInfo(
269
270
  data_index,
@@ -34,8 +34,12 @@ from ansys.systemcoupling.core.charts.plotter import Plotter
34
34
 
35
35
 
36
36
  def create_and_show_plot(spec: PlotSpec, csv_list: list[str]) -> Plotter:
37
- assert len(spec.interfaces) == 1, "Plots currently only support one interface"
38
- assert len(spec.interfaces) == len(csv_list)
37
+ if len(spec.interfaces) != 1:
38
+ raise ValueError("Plots currently only support one interface")
39
+ if len(spec.interfaces) != len(csv_list):
40
+ raise ValueError(
41
+ "'csv_list' should have length equal to the number of interfaces"
42
+ )
39
43
 
40
44
  manager = PlotDefinitionManager(spec)
41
45
  reader = CsvChartDataReader(spec.interfaces[0].name, csv_list[0])
@@ -61,8 +65,12 @@ def solve_with_live_plot(
61
65
  csv_list: list[str],
62
66
  solve_func: Callable[[], None],
63
67
  ):
64
- assert len(spec.interfaces) == 1, "Plots currently only support one interface"
65
- assert len(spec.interfaces) == len(csv_list)
68
+ if len(spec.interfaces) != 1:
69
+ raise ValueError("Plots currently only support one interface")
70
+ if len(spec.interfaces) != len(csv_list):
71
+ raise ValueError(
72
+ "'csv_list' should have length equal to the number of interfaces"
73
+ )
66
74
 
67
75
  manager = PlotDefinitionManager(spec)
68
76
  dispatcher = MessageDispatcher()
@@ -37,6 +37,7 @@ from ansys.systemcoupling.core.charts.chart_datatypes import (
37
37
  from ansys.systemcoupling.core.charts.plotdefinition_manager import (
38
38
  PlotDefinitionManager,
39
39
  )
40
+ from ansys.systemcoupling.core.util.assertion import assert_
40
41
 
41
42
 
42
43
  def _process_timestep_data(
@@ -282,13 +283,13 @@ class Plotter:
282
283
  # this (assume the wait function is stored as an
283
284
  # attribute):
284
285
  #
285
- # assert self._wait_for_metadata is not None
286
+ # assert_(self._wait_for_metadata is not None)
286
287
  # metadata = self._wait_for_metadata()
287
288
  # if metadata is not None:
288
289
  # self.set_metadata(metadata)
289
290
  # else:
290
291
  # return
291
- assert self._request_update is not None
292
+ assert_(self._request_update is not None)
292
293
 
293
294
  self.ani = FuncAnimation(
294
295
  self._fig,
@@ -101,6 +101,7 @@ class SycGrpc(object):
101
101
  self.__output_thread = None
102
102
  self.__pim_instance = None
103
103
  self.__skip_exit = False
104
+ self.__container = None
104
105
 
105
106
  @classmethod
106
107
  def _cleanup(cls):
@@ -161,7 +162,9 @@ class SycGrpc(object):
161
162
  """Start the System Coupling container and establish a connection."""
162
163
  LOG.debug("Starting container...")
163
164
  port = port if port is not None else _find_port()
164
- start_container(mounted_from, mounted_to, network, port, version)
165
+ self.__container = start_container(
166
+ mounted_from, mounted_to, network, port, version
167
+ )
165
168
  LOG.debug("...started")
166
169
  self._connect(_LOCALHOST_IP, port)
167
170
 
@@ -307,12 +310,18 @@ class SycGrpc(object):
307
310
  try:
308
311
  self.__ostream_service.end_streaming()
309
312
  except Exception as e:
310
- LOG.debug("Exception on OutputStreamService.end_straming(): " + str(e))
313
+ LOG.debug(f"Exception on OutputStreamService.end_straming(): {e}")
311
314
  self.__process_service.quit()
312
315
  self.__channel = None
313
316
  if self.__process:
314
317
  self.__process.end()
315
318
  self.__process = None
319
+ if self.__container:
320
+ try:
321
+ self.__container.stop()
322
+ except Exception as e:
323
+ LOG.debug(f"Exception from container.stop(): {e}")
324
+ self.__container = None
316
325
  if self.__pim_instance is not None:
317
326
  self.__pim_instance.delete()
318
327
  self.__pim_instance = None