pyedb 0.59.0__py3-none-any.whl → 0.61.0__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of pyedb might be problematic. Click here for more details.
- pyedb/__init__.py +23 -1
- pyedb/common/__init__.py +21 -0
- pyedb/common/nets.py +22 -0
- pyedb/component_libraries/ansys_components.py +22 -0
- pyedb/configuration/__init__.py +21 -0
- pyedb/configuration/cfg_boundaries.py +1 -1
- pyedb/configuration/cfg_common.py +1 -1
- pyedb/configuration/cfg_components.py +36 -8
- pyedb/configuration/cfg_data.py +1 -1
- pyedb/configuration/cfg_general.py +1 -1
- pyedb/configuration/cfg_modeler.py +1 -1
- pyedb/configuration/cfg_nets.py +1 -1
- pyedb/configuration/cfg_operations.py +1 -1
- pyedb/configuration/cfg_package_definition.py +1 -1
- pyedb/configuration/cfg_padstacks.py +1 -1
- pyedb/configuration/cfg_pin_groups.py +1 -1
- pyedb/configuration/cfg_ports_sources.py +3 -2
- pyedb/configuration/cfg_s_parameter_models.py +1 -1
- pyedb/configuration/cfg_setup.py +5 -1
- pyedb/configuration/cfg_spice_models.py +1 -1
- pyedb/configuration/cfg_stackup.py +1 -1
- pyedb/configuration/cfg_terminals.py +22 -0
- pyedb/configuration/configuration.py +6 -5
- pyedb/dotnet/__init__.py +21 -0
- pyedb/dotnet/clr_module.py +22 -0
- pyedb/dotnet/database/Variables.py +1 -1
- pyedb/dotnet/database/__init__.py +22 -0
- pyedb/dotnet/database/cell/__init__.py +21 -0
- pyedb/dotnet/database/cell/connectable.py +1 -1
- pyedb/dotnet/database/cell/hierarchy/__init__.py +21 -0
- pyedb/dotnet/database/cell/hierarchy/component.py +9 -7
- pyedb/dotnet/database/cell/hierarchy/hierarchy_obj.py +1 -1
- pyedb/dotnet/database/cell/hierarchy/model.py +2 -29
- pyedb/dotnet/database/cell/hierarchy/netlist_model.py +1 -1
- pyedb/dotnet/database/cell/hierarchy/pin_pair_model.py +1 -1
- pyedb/dotnet/database/cell/hierarchy/s_parameter_model.py +11 -15
- pyedb/dotnet/database/cell/hierarchy/spice_model.py +14 -8
- pyedb/dotnet/database/cell/layout.py +5 -4
- pyedb/dotnet/database/cell/layout_obj.py +1 -1
- pyedb/dotnet/database/cell/primitive/__init__.py +22 -0
- pyedb/dotnet/database/cell/primitive/bondwire.py +1 -1
- pyedb/dotnet/database/cell/primitive/path.py +1 -1
- pyedb/dotnet/database/cell/primitive/primitive.py +1 -1
- pyedb/dotnet/database/cell/terminal/__init__.py +21 -0
- pyedb/dotnet/database/cell/terminal/bundle_terminal.py +1 -1
- pyedb/dotnet/database/cell/terminal/edge_terminal.py +1 -1
- pyedb/dotnet/database/cell/terminal/padstack_instance_terminal.py +1 -1
- pyedb/dotnet/database/cell/terminal/pingroup_terminal.py +1 -1
- pyedb/dotnet/database/cell/terminal/point_terminal.py +1 -1
- pyedb/dotnet/database/cell/terminal/terminal.py +7 -2
- pyedb/dotnet/database/cell/voltage_regulator.py +1 -1
- pyedb/dotnet/database/components.py +6 -2
- pyedb/dotnet/database/definition/__init__.py +21 -0
- pyedb/dotnet/database/definition/component_def.py +1 -1
- pyedb/dotnet/database/definition/component_model.py +1 -1
- pyedb/dotnet/database/definition/definition_obj.py +1 -1
- pyedb/dotnet/database/definition/definitions.py +1 -1
- pyedb/dotnet/database/definition/package_def.py +1 -1
- pyedb/dotnet/database/dotnet/__init__.py +21 -0
- pyedb/dotnet/database/dotnet/database.py +1 -1
- pyedb/dotnet/database/dotnet/primitive.py +1 -1
- pyedb/dotnet/database/edb_data/__init__.py +21 -0
- pyedb/dotnet/database/edb_data/control_file.py +1 -1
- pyedb/dotnet/database/edb_data/design_options.py +1 -1
- pyedb/dotnet/database/edb_data/edbvalue.py +1 -1
- pyedb/dotnet/database/edb_data/hfss_extent_info.py +1 -1
- pyedb/dotnet/database/edb_data/layer_data.py +1 -1
- pyedb/dotnet/database/edb_data/nets_data.py +1 -1
- pyedb/dotnet/database/edb_data/padstacks_data.py +6 -4
- pyedb/dotnet/database/edb_data/ports.py +1 -1
- pyedb/dotnet/database/edb_data/primitives_data.py +1 -1
- pyedb/dotnet/database/edb_data/raptor_x_simulation_setup_data.py +1 -1
- pyedb/dotnet/database/edb_data/simulation_configuration.py +1 -1
- pyedb/dotnet/database/edb_data/sources.py +1 -1
- pyedb/dotnet/database/edb_data/utilities.py +1 -1
- pyedb/dotnet/database/edb_data/variables.py +1 -1
- pyedb/dotnet/database/general.py +1 -1
- pyedb/dotnet/database/geometry/__init__.py +21 -0
- pyedb/dotnet/database/geometry/point_data.py +1 -1
- pyedb/dotnet/database/geometry/polygon_data.py +1 -1
- pyedb/dotnet/database/hfss.py +1 -1
- pyedb/dotnet/database/layout_obj_instance.py +1 -1
- pyedb/dotnet/database/layout_validation.py +1 -1
- pyedb/dotnet/database/materials.py +1 -1
- pyedb/dotnet/database/modeler.py +3 -2
- pyedb/dotnet/database/net_class.py +1 -1
- pyedb/dotnet/database/nets.py +1 -1
- pyedb/dotnet/database/padstack.py +188 -2
- pyedb/dotnet/database/sim_setup_data/__init__.py +22 -0
- pyedb/dotnet/database/sim_setup_data/data/__init__.py +22 -0
- pyedb/dotnet/database/sim_setup_data/data/adaptive_frequency_data.py +1 -1
- pyedb/dotnet/database/sim_setup_data/data/mesh_operation.py +1 -1
- pyedb/dotnet/database/sim_setup_data/data/settings.py +1 -1
- pyedb/dotnet/database/sim_setup_data/data/sim_setup_info.py +1 -1
- pyedb/dotnet/database/sim_setup_data/data/simulation_settings.py +1 -1
- pyedb/dotnet/database/sim_setup_data/data/siw_dc_ir_settings.py +1 -1
- pyedb/dotnet/database/sim_setup_data/data/sweep_data.py +1 -1
- pyedb/dotnet/database/sim_setup_data/io/__init__.py +21 -0
- pyedb/dotnet/database/sim_setup_data/io/siwave.py +1 -1
- pyedb/dotnet/database/siwave.py +1 -1
- pyedb/dotnet/database/stackup.py +1 -1
- pyedb/dotnet/database/utilities/__init__.py +22 -0
- pyedb/dotnet/database/utilities/heatsink.py +23 -0
- pyedb/dotnet/database/utilities/hfss_simulation_setup.py +1 -1
- pyedb/dotnet/database/utilities/obj_base.py +1 -1
- pyedb/dotnet/database/utilities/simulation_setup.py +1 -1
- pyedb/dotnet/database/utilities/siwave_cpa_simulation_setup.py +22 -0
- pyedb/dotnet/database/utilities/siwave_simulation_setup.py +22 -0
- pyedb/dotnet/database/utilities/value.py +1 -1
- pyedb/dotnet/edb.py +119 -123
- pyedb/edb_logger.py +1 -1
- pyedb/exceptions.py +22 -0
- pyedb/extensions/__init__.py +21 -0
- pyedb/extensions/create_cell_array.py +1 -1
- pyedb/extensions/via_design_backend.py +22 -0
- pyedb/generic/__init__.py +21 -0
- pyedb/generic/constants.py +1 -1
- pyedb/generic/data_handlers.py +22 -0
- pyedb/generic/design_types.py +1 -1
- pyedb/generic/filesystem.py +22 -0
- pyedb/generic/general_methods.py +22 -1
- pyedb/generic/grpc_warnings.py +22 -0
- pyedb/generic/plot.py +22 -0
- pyedb/generic/process.py +29 -2
- pyedb/generic/settings.py +1 -1
- pyedb/grpc/__init__.py +21 -0
- pyedb/grpc/database/__init__.py +21 -0
- pyedb/grpc/database/_typing.py +21 -0
- pyedb/grpc/database/components.py +9 -8
- pyedb/grpc/database/control_file.py +1 -1
- pyedb/grpc/database/definition/__init__.py +21 -0
- pyedb/grpc/database/definition/component_def.py +1 -1
- pyedb/grpc/database/definition/component_model.py +1 -1
- pyedb/grpc/database/definition/component_pin.py +1 -1
- pyedb/grpc/database/definition/materials.py +2 -2
- pyedb/grpc/database/definition/n_port_component_model.py +1 -1
- pyedb/grpc/database/definition/package_def.py +1 -1
- pyedb/grpc/database/definition/padstack_def.py +17 -10
- pyedb/grpc/database/definitions.py +1 -1
- pyedb/grpc/database/general.py +1 -1
- pyedb/grpc/database/geometry/__init__.py +21 -0
- pyedb/grpc/database/geometry/arc_data.py +1 -1
- pyedb/grpc/database/geometry/point_3d_data.py +1 -1
- pyedb/grpc/database/geometry/point_data.py +1 -1
- pyedb/grpc/database/geometry/polygon_data.py +1 -1
- pyedb/grpc/database/hfss.py +1 -1
- pyedb/grpc/database/hierarchy/__init__.py +21 -0
- pyedb/grpc/database/hierarchy/component.py +1 -1
- pyedb/grpc/database/hierarchy/model.py +1 -1
- pyedb/grpc/database/hierarchy/netlist_model.py +1 -1
- pyedb/grpc/database/hierarchy/pin_pair_model.py +1 -1
- pyedb/grpc/database/hierarchy/pingroup.py +1 -1
- pyedb/grpc/database/hierarchy/s_parameter_model.py +1 -1
- pyedb/grpc/database/hierarchy/spice_model.py +1 -1
- pyedb/grpc/database/layers/__init__.py +21 -0
- pyedb/grpc/database/layers/layer.py +22 -0
- pyedb/grpc/database/layers/stackup_layer.py +1 -1
- pyedb/grpc/database/layout/__init__.py +21 -0
- pyedb/grpc/database/layout/cell.py +1 -1
- pyedb/grpc/database/layout/layout.py +1 -1
- pyedb/grpc/database/layout/voltage_regulator.py +1 -1
- pyedb/grpc/database/layout_validation.py +1 -1
- pyedb/grpc/database/modeler.py +31 -9
- pyedb/grpc/database/net/__init__.py +21 -0
- pyedb/grpc/database/net/differential_pair.py +1 -1
- pyedb/grpc/database/net/extended_net.py +1 -1
- pyedb/grpc/database/net/net.py +1 -1
- pyedb/grpc/database/net/net_class.py +1 -1
- pyedb/grpc/database/nets.py +1 -1
- pyedb/grpc/database/padstacks.py +209 -9
- pyedb/grpc/database/ports/__init__.py +21 -0
- pyedb/grpc/database/ports/ports.py +1 -1
- pyedb/grpc/database/primitive/__init__.py +22 -0
- pyedb/grpc/database/primitive/bondwire.py +1 -1
- pyedb/grpc/database/primitive/circle.py +1 -1
- pyedb/grpc/database/primitive/padstack_instance.py +111 -16
- pyedb/grpc/database/primitive/path.py +1 -1
- pyedb/grpc/database/primitive/polygon.py +6 -4
- pyedb/grpc/database/primitive/primitive.py +1 -6
- pyedb/grpc/database/primitive/rectangle.py +1 -1
- pyedb/grpc/database/simulation_setup/__init__.py +21 -0
- pyedb/grpc/database/simulation_setup/adaptive_frequency.py +1 -1
- pyedb/grpc/database/simulation_setup/hfss_advanced_meshing_settings.py +1 -1
- pyedb/grpc/database/simulation_setup/hfss_advanced_settings.py +1 -1
- pyedb/grpc/database/simulation_setup/hfss_dcr_settings.py +1 -1
- pyedb/grpc/database/simulation_setup/hfss_general_settings.py +1 -1
- pyedb/grpc/database/simulation_setup/hfss_settings_options.py +1 -1
- pyedb/grpc/database/simulation_setup/hfss_simulation_settings.py +1 -1
- pyedb/grpc/database/simulation_setup/hfss_simulation_setup.py +1 -1
- pyedb/grpc/database/simulation_setup/hfss_solver_settings.py +1 -1
- pyedb/grpc/database/simulation_setup/mesh_operation.py +1 -1
- pyedb/grpc/database/simulation_setup/raptor_x_advanced_settings.py +1 -1
- pyedb/grpc/database/simulation_setup/raptor_x_general_settings.py +1 -1
- pyedb/grpc/database/simulation_setup/raptor_x_simulation_settings.py +1 -1
- pyedb/grpc/database/simulation_setup/raptor_x_simulation_setup.py +1 -1
- pyedb/grpc/database/simulation_setup/siwave_cpa_simulation_setup.py +22 -0
- pyedb/grpc/database/simulation_setup/siwave_dcir_simulation_setup.py +1 -1
- pyedb/grpc/database/simulation_setup/siwave_simulation_setup.py +1 -1
- pyedb/grpc/database/simulation_setup/sweep_data.py +1 -1
- pyedb/grpc/database/siwave.py +1 -1
- pyedb/grpc/database/source_excitations.py +1 -1
- pyedb/grpc/database/stackup.py +1 -1
- pyedb/grpc/database/terminal/__init__.py +21 -0
- pyedb/grpc/database/terminal/bundle_terminal.py +1 -1
- pyedb/grpc/database/terminal/edge_terminal.py +1 -1
- pyedb/grpc/database/terminal/padstack_instance_terminal.py +1 -1
- pyedb/grpc/database/terminal/pingroup_terminal.py +1 -1
- pyedb/grpc/database/terminal/point_terminal.py +1 -1
- pyedb/grpc/database/terminal/terminal.py +1 -1
- pyedb/grpc/database/utility/__init__.py +22 -0
- pyedb/grpc/database/utility/constants.py +1 -1
- pyedb/grpc/database/utility/heat_sink.py +1 -1
- pyedb/grpc/database/utility/hfss_extent_info.py +1 -1
- pyedb/grpc/database/utility/layout_statistics.py +1 -1
- pyedb/grpc/database/utility/rlc.py +1 -1
- pyedb/grpc/database/utility/sources.py +1 -1
- pyedb/grpc/database/utility/sweep_data_distribution.py +1 -1
- pyedb/grpc/database/utility/value.py +1 -1
- pyedb/grpc/database/utility/xml_control_file.py +1 -1
- pyedb/grpc/edb.py +230 -990
- pyedb/grpc/edb_init.py +1 -1
- pyedb/grpc/rpc_session.py +17 -4
- pyedb/ipc2581/__init__.py +21 -0
- pyedb/ipc2581/bom/__init__.py +21 -0
- pyedb/ipc2581/bom/bom.py +1 -1
- pyedb/ipc2581/bom/bom_item.py +1 -1
- pyedb/ipc2581/bom/characteristics.py +1 -1
- pyedb/ipc2581/bom/refdes.py +1 -1
- pyedb/ipc2581/content/__init__.py +21 -0
- pyedb/ipc2581/content/color.py +1 -1
- pyedb/ipc2581/content/content.py +1 -1
- pyedb/ipc2581/content/dictionary_color.py +1 -1
- pyedb/ipc2581/content/dictionary_fill.py +1 -1
- pyedb/ipc2581/content/dictionary_line.py +1 -1
- pyedb/ipc2581/content/entry_color.py +1 -1
- pyedb/ipc2581/content/entry_line.py +1 -1
- pyedb/ipc2581/content/fill.py +1 -1
- pyedb/ipc2581/content/layer_ref.py +1 -1
- pyedb/ipc2581/content/standard_geometries_dictionary.py +1 -1
- pyedb/ipc2581/ecad/__init__.py +21 -0
- pyedb/ipc2581/ecad/cad_data/__init__.py +21 -0
- pyedb/ipc2581/ecad/cad_data/assembly_drawing.py +1 -1
- pyedb/ipc2581/ecad/cad_data/cad_data.py +1 -1
- pyedb/ipc2581/ecad/cad_data/component.py +1 -1
- pyedb/ipc2581/ecad/cad_data/drill.py +1 -1
- pyedb/ipc2581/ecad/cad_data/feature.py +1 -1
- pyedb/ipc2581/ecad/cad_data/layer.py +1 -1
- pyedb/ipc2581/ecad/cad_data/layer_feature.py +1 -1
- pyedb/ipc2581/ecad/cad_data/logical_net.py +1 -1
- pyedb/ipc2581/ecad/cad_data/outline.py +1 -1
- pyedb/ipc2581/ecad/cad_data/package.py +1 -1
- pyedb/ipc2581/ecad/cad_data/padstack_def.py +1 -1
- pyedb/ipc2581/ecad/cad_data/padstack_hole_def.py +1 -1
- pyedb/ipc2581/ecad/cad_data/padstack_instance.py +1 -1
- pyedb/ipc2581/ecad/cad_data/padstack_pad_def.py +1 -1
- pyedb/ipc2581/ecad/cad_data/path.py +1 -1
- pyedb/ipc2581/ecad/cad_data/phy_net.py +1 -1
- pyedb/ipc2581/ecad/cad_data/pin.py +1 -1
- pyedb/ipc2581/ecad/cad_data/polygon.py +1 -1
- pyedb/ipc2581/ecad/cad_data/profile.py +1 -1
- pyedb/ipc2581/ecad/cad_data/stackup.py +1 -1
- pyedb/ipc2581/ecad/cad_data/stackup_group.py +1 -1
- pyedb/ipc2581/ecad/cad_data/stackup_layer.py +1 -1
- pyedb/ipc2581/ecad/cad_data/step.py +1 -1
- pyedb/ipc2581/ecad/cad_header.py +1 -1
- pyedb/ipc2581/ecad/ecad.py +1 -1
- pyedb/ipc2581/ecad/spec.py +1 -1
- pyedb/ipc2581/history_record.py +1 -1
- pyedb/ipc2581/ipc2581.py +1 -1
- pyedb/ipc2581/logistic_header.py +1 -1
- pyedb/libraries/common.py +1 -1
- pyedb/libraries/rf_libraries/base_functions.py +1 -1
- pyedb/libraries/rf_libraries/planar_antennas.py +1 -1
- pyedb/misc/__init__.py +21 -0
- pyedb/misc/aedtlib_personalib_install.py +1 -1
- pyedb/misc/decorators.py +22 -0
- pyedb/misc/downloads.py +1 -1
- pyedb/misc/misc.py +1 -1
- pyedb/misc/siw_feature_config/__init__.py +21 -0
- pyedb/misc/siw_feature_config/emc/__init__.py +21 -0
- pyedb/misc/siw_feature_config/emc/component_tags.py +22 -0
- pyedb/misc/siw_feature_config/emc/net_tags.py +22 -0
- pyedb/misc/siw_feature_config/emc/tag_library.py +22 -0
- pyedb/misc/siw_feature_config/emc/xml_generic.py +22 -0
- pyedb/misc/siw_feature_config/emc_rule_checker_settings.py +1 -1
- pyedb/misc/siw_feature_config/xtalk_scan/fd_xtalk_scan_config.py +1 -1
- pyedb/misc/siw_feature_config/xtalk_scan/impedance_scan_config.py +1 -1
- pyedb/misc/siw_feature_config/xtalk_scan/net.py +1 -1
- pyedb/misc/siw_feature_config/xtalk_scan/pins.py +1 -1
- pyedb/misc/siw_feature_config/xtalk_scan/scan_config.py +1 -1
- pyedb/misc/siw_feature_config/xtalk_scan/td_xtalk_config.py +1 -1
- pyedb/misc/utilities.py +1 -1
- pyedb/modeler/geometry_operators.py +22 -0
- pyedb/siwave.py +22 -0
- pyedb/siwave_core/__init__.py +21 -0
- pyedb/siwave_core/cpa/__init__.py +21 -0
- pyedb/siwave_core/cpa/simulation_setup_data_model.py +22 -0
- pyedb/siwave_core/icepak.py +1 -1
- pyedb/siwave_core/product_properties.py +23 -0
- pyedb/workflow.py +22 -0
- pyedb/workflows/__init__.py +21 -0
- pyedb/workflows/job_manager/__init__.py +21 -0
- pyedb/workflows/job_manager/backend/__init__.py +21 -0
- pyedb/workflows/job_manager/backend/job_manager_handler.py +910 -0
- pyedb/workflows/job_manager/backend/job_submission.py +1169 -0
- pyedb/workflows/job_manager/backend/service.py +1663 -0
- pyedb/workflows/job_manager/backend/start_service.py +86 -0
- pyedb/workflows/job_manager/backend/submit_job_on_scheduler.py +168 -0
- pyedb/workflows/job_manager/backend/submit_local_job.py +166 -0
- pyedb/workflows/sipi/hfss_auto_configuration.py +1 -1
- pyedb/workflows/utilities/__init__.py +21 -0
- pyedb/workflows/utilities/cutout.py +1428 -0
- pyedb/workflows/utilities/hfss_log_parser.py +446 -0
- {pyedb-0.59.0.dist-info → pyedb-0.61.0.dist-info}/METADATA +7 -4
- pyedb-0.61.0.dist-info/RECORD +318 -0
- {pyedb-0.59.0.dist-info → pyedb-0.61.0.dist-info}/licenses/LICENSE +7 -7
- pyedb-0.59.0.dist-info/RECORD +0 -306
- {pyedb-0.59.0.dist-info → pyedb-0.61.0.dist-info}/WHEEL +0 -0
|
@@ -0,0 +1,910 @@
|
|
|
1
|
+
# Copyright (C) 2023 - 2025 ANSYS, Inc. and/or its affiliates.
|
|
2
|
+
# SPDX-License-Identifier: MIT
|
|
3
|
+
#
|
|
4
|
+
#
|
|
5
|
+
# Permission is hereby granted, free of charge, to any person obtaining a copy
|
|
6
|
+
# of this software and associated documentation files (the "Software"), to deal
|
|
7
|
+
# in the Software without restriction, including without limitation the rights
|
|
8
|
+
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
|
|
9
|
+
# copies of the Software, and to permit persons to whom the Software is
|
|
10
|
+
# furnished to do so, subject to the following conditions:
|
|
11
|
+
#
|
|
12
|
+
# The above copyright notice and this permission notice shall be included in all
|
|
13
|
+
# copies or substantial portions of the Software.
|
|
14
|
+
#
|
|
15
|
+
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
|
16
|
+
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
|
17
|
+
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
|
|
18
|
+
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
|
19
|
+
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
|
|
20
|
+
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
|
|
21
|
+
# SOFTWARE.
|
|
22
|
+
|
|
23
|
+
|
|
24
|
+
"""
|
|
25
|
+
Thread-safe façade for the async ANSYS Job Manager.
|
|
26
|
+
|
|
27
|
+
This module exposes a **synchronous, production-grade** entry point to the
|
|
28
|
+
**asynchronous** job manager service. A background daemon thread hosts an
|
|
29
|
+
``aiohttp`` web server that schedules and monitors HFSS/3D-Layout simulations
|
|
30
|
+
on the local machine or external clusters (SLURM, LSF, PBS, Windows-HPC).
|
|
31
|
+
|
|
32
|
+
The handler guarantees:
|
|
33
|
+
|
|
34
|
+
* **Non-blocking** start/stop semantics for the caller thread.
|
|
35
|
+
* **Graceful shutdown** via ``atexit`` or explicit ``close()``.
|
|
36
|
+
* **Thread-safe** job submission and cancellation.
|
|
37
|
+
* **Global timeout** support for batched workloads.
|
|
38
|
+
* **Zero configuration** when used with PyEDB ``Edb`` objects.
|
|
39
|
+
|
|
40
|
+
Examples
|
|
41
|
+
--------
|
|
42
|
+
>>> handler = JobManagerHandler() # doctest: +SKIP
|
|
43
|
+
>>> handler.start_service() # doctest: +SKIP
|
|
44
|
+
>>> config = handler.create_simulation_config("/path/to/project.aedt") # doctest: +SKIP
|
|
45
|
+
>>> job_id = asyncio.run(handler.submit_job(config)) # doctest: +SKIP
|
|
46
|
+
>>> handler.close() # doctest: +SKIP
|
|
47
|
+
|
|
48
|
+
For command-line usage:
|
|
49
|
+
|
|
50
|
+
.. code-block:: bash
|
|
51
|
+
|
|
52
|
+
python -m pyedb.workflows.job_manager.backend.job_manager_handler --host localhost --port 8080
|
|
53
|
+
"""
|
|
54
|
+
|
|
55
|
+
import asyncio
|
|
56
|
+
from asyncio import run_coroutine_threadsafe
|
|
57
|
+
import atexit
|
|
58
|
+
import concurrent.futures as _futs
|
|
59
|
+
import getpass
|
|
60
|
+
import os
|
|
61
|
+
from pathlib import Path
|
|
62
|
+
import platform
|
|
63
|
+
import shutil
|
|
64
|
+
import ssl
|
|
65
|
+
import sys
|
|
66
|
+
import threading
|
|
67
|
+
import time
|
|
68
|
+
from typing import Optional
|
|
69
|
+
import uuid
|
|
70
|
+
|
|
71
|
+
import aiohttp
|
|
72
|
+
from aiohttp import web
|
|
73
|
+
import requests
|
|
74
|
+
|
|
75
|
+
from pyedb.generic.general_methods import is_linux
|
|
76
|
+
from pyedb.workflows.job_manager.backend.job_submission import (
|
|
77
|
+
HFSS3DLayoutBatchOptions,
|
|
78
|
+
HFSSSimulationConfig,
|
|
79
|
+
MachineNode,
|
|
80
|
+
SchedulerType,
|
|
81
|
+
create_hfss_config,
|
|
82
|
+
)
|
|
83
|
+
from pyedb.workflows.job_manager.backend.service import JobManager, ResourceLimits, SchedulerManager
|
|
84
|
+
from pyedb.workflows.utilities.hfss_log_parser import HFSSLogParser
|
|
85
|
+
|
|
86
|
+
|
|
87
|
+
def get_session(url: str) -> aiohttp.ClientSession:
|
|
88
|
+
"""
|
|
89
|
+
Return an aiohttp.ClientSession with appropriate TLS configuration.
|
|
90
|
+
|
|
91
|
+
Parameters
|
|
92
|
+
----------
|
|
93
|
+
url : str
|
|
94
|
+
Base URL; used only to decide whether TLS verification is required.
|
|
95
|
+
|
|
96
|
+
Returns
|
|
97
|
+
-------
|
|
98
|
+
aiohttp.ClientSession
|
|
99
|
+
Configured client session with timeout and SSL context.
|
|
100
|
+
|
|
101
|
+
Notes
|
|
102
|
+
-----
|
|
103
|
+
The session is configured with:
|
|
104
|
+
- 30-second total timeout
|
|
105
|
+
- TLS verification for HTTPS URLs
|
|
106
|
+
- Connection pooling (limit=20, limit_per_host=10)
|
|
107
|
+
- Appropriate User-Agent header
|
|
108
|
+
"""
|
|
109
|
+
timeout = aiohttp.ClientTimeout(total=30)
|
|
110
|
+
|
|
111
|
+
# --- actually use the url ----------------------------------------------
|
|
112
|
+
tls = url.lower().startswith("https://")
|
|
113
|
+
ssl_context = ssl.create_default_context() if tls else False
|
|
114
|
+
# -----------------------------------------------------------------------
|
|
115
|
+
|
|
116
|
+
try:
|
|
117
|
+
loop = asyncio.get_running_loop()
|
|
118
|
+
except RuntimeError: # synchronous context
|
|
119
|
+
loop = asyncio.new_event_loop()
|
|
120
|
+
asyncio.set_event_loop(loop)
|
|
121
|
+
|
|
122
|
+
return aiohttp.ClientSession(
|
|
123
|
+
timeout=timeout,
|
|
124
|
+
headers={"User-Agent": "pyedb-job-manager/1.0"},
|
|
125
|
+
connector=aiohttp.TCPConnector(
|
|
126
|
+
limit=20,
|
|
127
|
+
limit_per_host=10,
|
|
128
|
+
ssl=ssl_context,
|
|
129
|
+
loop=loop,
|
|
130
|
+
),
|
|
131
|
+
loop=loop,
|
|
132
|
+
)
|
|
133
|
+
|
|
134
|
+
|
|
135
|
+
@web.middleware
|
|
136
|
+
async def cors_middleware(request, handler):
|
|
137
|
+
"""
|
|
138
|
+
CORS middleware for aiohttp server.
|
|
139
|
+
|
|
140
|
+
Parameters
|
|
141
|
+
----------
|
|
142
|
+
request : aiohttp.web.Request
|
|
143
|
+
Incoming HTTP request
|
|
144
|
+
handler : callable
|
|
145
|
+
Next handler in the middleware chain
|
|
146
|
+
|
|
147
|
+
Returns
|
|
148
|
+
-------
|
|
149
|
+
aiohttp.web.Response
|
|
150
|
+
Response with CORS headers added
|
|
151
|
+
"""
|
|
152
|
+
response = await handler(request)
|
|
153
|
+
response.headers["Access-Control-Allow-Origin"] = "*"
|
|
154
|
+
response.headers["Access-Control-Allow-Methods"] = "GET, POST, OPTIONS"
|
|
155
|
+
response.headers["Access-Control-Allow-Headers"] = "Content-Type"
|
|
156
|
+
return response
|
|
157
|
+
|
|
158
|
+
|
|
159
|
+
class JobManagerHandler:
|
|
160
|
+
"""
|
|
161
|
+
Synchronous façade that controls an **async** Job Manager service.
|
|
162
|
+
|
|
163
|
+
This class provides a thread-safe interface to manage asynchronous job
|
|
164
|
+
execution while running the aiohttp server in a background thread.
|
|
165
|
+
|
|
166
|
+
Parameters
|
|
167
|
+
----------
|
|
168
|
+
edb : Optional[Edb]
|
|
169
|
+
PyEDB instance for automatic ANSYS path detection
|
|
170
|
+
version : Optional[str]
|
|
171
|
+
Specific ANSYS version to use (e.g., "2023.1")
|
|
172
|
+
host : str
|
|
173
|
+
Hostname or IP address to bind the server
|
|
174
|
+
port : int
|
|
175
|
+
TCP port to listen on
|
|
176
|
+
|
|
177
|
+
Attributes
|
|
178
|
+
----------
|
|
179
|
+
ansys_path : str
|
|
180
|
+
Path to ANSYS EDT executable
|
|
181
|
+
scheduler_type : SchedulerType
|
|
182
|
+
Detected scheduler type (SLURM, LSF, or NONE)
|
|
183
|
+
manager : JobManager
|
|
184
|
+
Underlying async job manager instance
|
|
185
|
+
host : str
|
|
186
|
+
Server hostname
|
|
187
|
+
port : int
|
|
188
|
+
Server port
|
|
189
|
+
url : str
|
|
190
|
+
Full server URL
|
|
191
|
+
started : bool
|
|
192
|
+
Whether the service is currently running
|
|
193
|
+
|
|
194
|
+
Raises
|
|
195
|
+
------
|
|
196
|
+
ValueError
|
|
197
|
+
If specified ANSYS version is not found
|
|
198
|
+
RuntimeError
|
|
199
|
+
If service fails to start within timeout
|
|
200
|
+
|
|
201
|
+
Examples
|
|
202
|
+
--------
|
|
203
|
+
>>> handler = JobManagerHandler() # doctest: +SKIP
|
|
204
|
+
>>> handler.start_service() # doctest: +SKIP
|
|
205
|
+
>>> print(f"Server running at {handler.url}") # doctest: +SKIP
|
|
206
|
+
>>> # Submit jobs via REST API or handler methods
|
|
207
|
+
>>> handler.close() # doctest: +SKIP
|
|
208
|
+
"""
|
|
209
|
+
|
|
210
|
+
def __init__(self, edb=None, version=None, host="localhost", port=8080):
|
|
211
|
+
if edb:
|
|
212
|
+
if is_linux:
|
|
213
|
+
self.ansys_path = os.path.join(edb.base_path, "ansysedt" if is_linux else "ansysedt")
|
|
214
|
+
else:
|
|
215
|
+
self.ansys_path = os.path.join(edb.base_path, "ansysedt" if is_linux else "ansysedt.exe")
|
|
216
|
+
else:
|
|
217
|
+
from pyedb.generic.general_methods import installed_ansys_em_versions
|
|
218
|
+
|
|
219
|
+
installed_versions = installed_ansys_em_versions()
|
|
220
|
+
if not version:
|
|
221
|
+
if is_linux:
|
|
222
|
+
self.ansys_path = os.path.join(list(installed_versions.values())[-1], "ansysedt") # latest
|
|
223
|
+
else:
|
|
224
|
+
self.ansys_path = os.path.join(list(installed_versions.values())[-1], "ansysedt.exe") # latest
|
|
225
|
+
else:
|
|
226
|
+
if version not in installed_versions:
|
|
227
|
+
raise ValueError(f"ANSYS release {version} not found")
|
|
228
|
+
if is_linux:
|
|
229
|
+
self.ansys_path = os.path.join(installed_versions[version], "ansysedt")
|
|
230
|
+
else:
|
|
231
|
+
self.ansys_path = os.path.join(installed_versions[version], "ansysedt.exe")
|
|
232
|
+
self.scheduler_type = self._detect_scheduler()
|
|
233
|
+
self.manager = JobManager(scheduler_type=self.scheduler_type)
|
|
234
|
+
self.manager.resource_limits = ResourceLimits(max_concurrent_jobs=1)
|
|
235
|
+
self.manager.jobs = {} # In-memory job store -TODO add persistence database
|
|
236
|
+
# Pass the detected ANSYS path to the manager
|
|
237
|
+
self.manager.ansys_path = self.ansys_path
|
|
238
|
+
|
|
239
|
+
self.host, self.port = host, port
|
|
240
|
+
self._url = f"http://{host}:{port}"
|
|
241
|
+
|
|
242
|
+
# Setup aiohttp and Socket.IO server ---
|
|
243
|
+
self.sio = self.manager.sio
|
|
244
|
+
self.app = self.manager.app
|
|
245
|
+
self.app.middlewares.append(cors_middleware)
|
|
246
|
+
self._add_routes()
|
|
247
|
+
# ----------------------------------------
|
|
248
|
+
|
|
249
|
+
self.runner: Optional[web.AppRunner] = None
|
|
250
|
+
self.site = None
|
|
251
|
+
self.started = False
|
|
252
|
+
self._loop: Optional[asyncio.AbstractEventLoop] = None
|
|
253
|
+
self._thread: Optional[threading.Thread] = None
|
|
254
|
+
self._start_event = threading.Event()
|
|
255
|
+
self._shutdown = False
|
|
256
|
+
atexit.register(self.close)
|
|
257
|
+
|
|
258
|
+
self.scheduler_type = self._detect_scheduler()
|
|
259
|
+
self._sch_mgr: Optional[SchedulerManager] = None
|
|
260
|
+
if self.scheduler_type != SchedulerType.NONE:
|
|
261
|
+
self._sch_mgr = SchedulerManager(self.scheduler_type)
|
|
262
|
+
|
|
263
|
+
def _add_routes(self):
|
|
264
|
+
"""Add REST API routes to the aiohttp application."""
|
|
265
|
+
self.app.router.add_get("/api/jobs", self.get_jobs)
|
|
266
|
+
self.app.router.add_get("/api/queue", self.get_queue_status)
|
|
267
|
+
self.app.router.add_get("/api/resources", self.get_resources)
|
|
268
|
+
self.app.router.add_get("/api/scheduler_type", self.get_scheduler_type)
|
|
269
|
+
self.app.router.add_get("/api/cluster_partitions", self.get_cluster_partitions)
|
|
270
|
+
self.app.router.add_post("/api/submit", self.handle_submit_job)
|
|
271
|
+
self.app.router.add_post("/api/cancel/{job_id}", self.cancel_job)
|
|
272
|
+
self.app.router.add_get("/api/jobs/{job_id}/log", self.get_job_log)
|
|
273
|
+
self.app.router.add_get("/api/me", self.get_me)
|
|
274
|
+
self.app.router.add_get("/system/status", self.get_system_status)
|
|
275
|
+
self.app.router.add_post("/jobs/submit", self.handle_submit_job)
|
|
276
|
+
|
|
277
|
+
def _find_latest_log(self, project_path: str) -> Path | None:
|
|
278
|
+
"""
|
|
279
|
+
Find the newest log file in batchinfo directories.
|
|
280
|
+
|
|
281
|
+
Parameters
|
|
282
|
+
----------
|
|
283
|
+
project_path : str
|
|
284
|
+
Path to the AEDT project file
|
|
285
|
+
|
|
286
|
+
Returns
|
|
287
|
+
-------
|
|
288
|
+
Path or None
|
|
289
|
+
Path to the newest log file, or None if no logs found
|
|
290
|
+
|
|
291
|
+
Notes
|
|
292
|
+
-----
|
|
293
|
+
Searches for pattern: <project>.aedb.batchinfo.<timestamp>/*.log
|
|
294
|
+
and returns the most recently modified log file.
|
|
295
|
+
"""
|
|
296
|
+
proj = Path(project_path).resolve()
|
|
297
|
+
base = proj.with_suffix("") # strip .aedt / .aedb
|
|
298
|
+
batch_parent = proj.parent # folder that contains the project
|
|
299
|
+
|
|
300
|
+
# all timestamped folders: <proj>.aedb.batchinfo.<timestamp>
|
|
301
|
+
batch_folders = sorted(
|
|
302
|
+
batch_parent.glob(f"{base.name}.aedb.batchinfo*"), key=lambda p: p.stat().st_mtime, reverse=True
|
|
303
|
+
)
|
|
304
|
+
for bf in batch_folders:
|
|
305
|
+
# newest *.log inside that folder
|
|
306
|
+
try:
|
|
307
|
+
return max(bf.glob("*.log"), key=lambda p: p.stat().st_mtime)
|
|
308
|
+
except ValueError: # no *.log here
|
|
309
|
+
continue
|
|
310
|
+
return None
|
|
311
|
+
|
|
312
|
+
def submit_job(self, config: HFSSSimulationConfig, priority: int = 0, timeout: float = 30.0) -> str:
|
|
313
|
+
"""
|
|
314
|
+
Synchronously submit a simulation job.
|
|
315
|
+
|
|
316
|
+
The method is thread-safe: it marshals the async work into the
|
|
317
|
+
background event-loop and returns the job identifier.
|
|
318
|
+
|
|
319
|
+
Parameters
|
|
320
|
+
----------
|
|
321
|
+
config : HFSSSimulationConfig
|
|
322
|
+
Fully-built and validated simulation configuration.
|
|
323
|
+
priority : int, optional
|
|
324
|
+
Job priority (higher → de-queued earlier). Default 0.
|
|
325
|
+
timeout : float, optional
|
|
326
|
+
Seconds to wait for the submission to complete. Default 30 s.
|
|
327
|
+
|
|
328
|
+
Returns
|
|
329
|
+
-------
|
|
330
|
+
str
|
|
331
|
+
Unique job identifier (same as ``config.jobid``).
|
|
332
|
+
|
|
333
|
+
Raises
|
|
334
|
+
------
|
|
335
|
+
RuntimeError
|
|
336
|
+
If the service is not started or the submission times out.
|
|
337
|
+
Exception
|
|
338
|
+
Any validation / scheduler error raised by the underlying coroutine.
|
|
339
|
+
|
|
340
|
+
Examples
|
|
341
|
+
--------
|
|
342
|
+
>>> from pyedb.workflows.job_manager.backend.job_manager_handler import JobManagerHandler
|
|
343
|
+
>>> from pyedb.workflows.job_manager.backend.job_submission import create_hfss_config, SchedulerType
|
|
344
|
+
|
|
345
|
+
>>> handler = JobManagerHandler()
|
|
346
|
+
>>> handler.start_service()
|
|
347
|
+
>>> cfg = create_hfss_config(
|
|
348
|
+
>>> ansys_edt_path=...,
|
|
349
|
+
>>> jobid="my_job",
|
|
350
|
+
>>> project_path=...,
|
|
351
|
+
>>> scheduler_type=SchedulerType.NONE
|
|
352
|
+
>>> )
|
|
353
|
+
>>> job_id = handler.submit_job(cfg, priority=0)
|
|
354
|
+
>>> print("submitted", job_id)
|
|
355
|
+
>>> # later
|
|
356
|
+
>>> handler.close()
|
|
357
|
+
"""
|
|
358
|
+
if not self.started:
|
|
359
|
+
raise RuntimeError("Job-manager service is not started")
|
|
360
|
+
|
|
361
|
+
# Ship coroutine to the background loop
|
|
362
|
+
future = run_coroutine_threadsafe(self.manager.submit_job(config, priority=priority), self._loop)
|
|
363
|
+
try:
|
|
364
|
+
return future.result(timeout=timeout) # block until done
|
|
365
|
+
except _futs.TimeoutError as exc:
|
|
366
|
+
raise RuntimeError("Job submission timed out") from exc
|
|
367
|
+
|
|
368
|
+
def wait_until_done(self, job_id: str, poll_every: float = 2.0) -> str:
|
|
369
|
+
"""
|
|
370
|
+
Block until the requested job reaches a terminal state
|
|
371
|
+
(completed, failed, or cancelled).
|
|
372
|
+
|
|
373
|
+
Returns
|
|
374
|
+
-------
|
|
375
|
+
str
|
|
376
|
+
Terminal status string.
|
|
377
|
+
"""
|
|
378
|
+
if not self.started:
|
|
379
|
+
raise RuntimeError("Service not started")
|
|
380
|
+
|
|
381
|
+
while True:
|
|
382
|
+
rsp = requests.get(f"{self.url}/api/jobs", timeout=30).json()
|
|
383
|
+
job = next((j for j in rsp if j["id"] == job_id), None)
|
|
384
|
+
if not job:
|
|
385
|
+
raise RuntimeError(f"Job {job_id} disappeared from manager")
|
|
386
|
+
status = job["status"]
|
|
387
|
+
if status in {"completed", "failed", "cancelled"}:
|
|
388
|
+
return status
|
|
389
|
+
time.sleep(poll_every)
|
|
390
|
+
|
|
391
|
+
def wait_until_all_done(self, poll_every: float = 2.0) -> None:
|
|
392
|
+
"""
|
|
393
|
+
Block until **every** job currently known to the manager
|
|
394
|
+
is in a terminal state.
|
|
395
|
+
"""
|
|
396
|
+
if not self.started:
|
|
397
|
+
raise RuntimeError("Service not started")
|
|
398
|
+
|
|
399
|
+
while True:
|
|
400
|
+
rsp = requests.get(f"{self.url}/api/jobs", timeout=30).json()
|
|
401
|
+
active = [j for j in rsp if j["status"] not in {"completed", "failed", "cancelled"}]
|
|
402
|
+
if not active:
|
|
403
|
+
return
|
|
404
|
+
time.sleep(poll_every)
|
|
405
|
+
|
|
406
|
+
async def get_system_status(self, request):
|
|
407
|
+
"""
|
|
408
|
+
Get system status and scheduler information.
|
|
409
|
+
|
|
410
|
+
Parameters
|
|
411
|
+
----------
|
|
412
|
+
request : aiohttp.web.Request
|
|
413
|
+
HTTP request object
|
|
414
|
+
|
|
415
|
+
Returns
|
|
416
|
+
-------
|
|
417
|
+
aiohttp.web.Response
|
|
418
|
+
JSON response with system status
|
|
419
|
+
"""
|
|
420
|
+
return web.json_response(
|
|
421
|
+
{
|
|
422
|
+
"mode": self.scheduler_type.value, # ← real value: "slurm", "lsf", "none"
|
|
423
|
+
"scheduler_detection": {
|
|
424
|
+
"active_scheduler": self.scheduler_type.name,
|
|
425
|
+
"detected_by": "JobManagerHandler",
|
|
426
|
+
},
|
|
427
|
+
}
|
|
428
|
+
)
|
|
429
|
+
|
|
430
|
+
async def get_me(self, request):
|
|
431
|
+
"""
|
|
432
|
+
Get current user information.
|
|
433
|
+
|
|
434
|
+
Parameters
|
|
435
|
+
----------
|
|
436
|
+
request : aiohttp.web.Request
|
|
437
|
+
HTTP request object
|
|
438
|
+
|
|
439
|
+
Returns
|
|
440
|
+
-------
|
|
441
|
+
aiohttp.web.Response
|
|
442
|
+
JSON response with username
|
|
443
|
+
"""
|
|
444
|
+
import getpass
|
|
445
|
+
|
|
446
|
+
return web.json_response({"username": getpass.getuser()})
|
|
447
|
+
|
|
448
|
+
async def get_jobs(self, request):
|
|
449
|
+
"""
|
|
450
|
+
Get list of all jobs with their current status.
|
|
451
|
+
|
|
452
|
+
Parameters
|
|
453
|
+
----------
|
|
454
|
+
request : aiohttp.web.Request
|
|
455
|
+
HTTP request object
|
|
456
|
+
|
|
457
|
+
Returns
|
|
458
|
+
-------
|
|
459
|
+
aiohttp.web.Response
|
|
460
|
+
JSON array of job objects
|
|
461
|
+
"""
|
|
462
|
+
jobs_data = []
|
|
463
|
+
for job_id, job_info in self.manager.jobs.items():
|
|
464
|
+
jobs_data.append(
|
|
465
|
+
{
|
|
466
|
+
"id": job_id,
|
|
467
|
+
"user": job_info.config.user or getpass.getuser(),
|
|
468
|
+
"config": job_info.config.to_dict(),
|
|
469
|
+
"status": job_info.status.value,
|
|
470
|
+
"start_time": job_info.start_time.isoformat() if job_info.start_time else None,
|
|
471
|
+
"end_time": job_info.end_time.isoformat() if job_info.end_time else None,
|
|
472
|
+
"return_code": job_info.return_code,
|
|
473
|
+
"scheduler_job_id": job_info.scheduler_job_id,
|
|
474
|
+
"priority": job_info.priority,
|
|
475
|
+
}
|
|
476
|
+
)
|
|
477
|
+
return web.json_response(jobs_data)
|
|
478
|
+
|
|
479
|
+
async def get_scheduler_type(self, request):
|
|
480
|
+
"""
|
|
481
|
+
Get detected scheduler type.
|
|
482
|
+
|
|
483
|
+
Parameters
|
|
484
|
+
----------
|
|
485
|
+
request : aiohttp.web.Request
|
|
486
|
+
HTTP request object
|
|
487
|
+
|
|
488
|
+
Returns
|
|
489
|
+
-------
|
|
490
|
+
aiohttp.web.Response
|
|
491
|
+
JSON response with scheduler type
|
|
492
|
+
"""
|
|
493
|
+
return web.json_response({"scheduler_type": self.scheduler_type.value})
|
|
494
|
+
|
|
495
|
+
async def get_cluster_partitions(self, request):
|
|
496
|
+
"""
|
|
497
|
+
Get available cluster partitions/queues.
|
|
498
|
+
|
|
499
|
+
Parameters
|
|
500
|
+
----------
|
|
501
|
+
request : aiohttp.web.Request
|
|
502
|
+
HTTP request object
|
|
503
|
+
|
|
504
|
+
Returns
|
|
505
|
+
-------
|
|
506
|
+
aiohttp.web.Response
|
|
507
|
+
JSON array of partition information
|
|
508
|
+
"""
|
|
509
|
+
if self._sch_mgr:
|
|
510
|
+
partitions = await self._sch_mgr.get_partitions()
|
|
511
|
+
return web.json_response(partitions)
|
|
512
|
+
return web.json_response([])
|
|
513
|
+
|
|
514
|
+
async def get_job_log(self, request):
|
|
515
|
+
"""
|
|
516
|
+
Get parsed HFSS log for a finished job.
|
|
517
|
+
|
|
518
|
+
Parameters
|
|
519
|
+
----------
|
|
520
|
+
request : aiohttp.web.Request
|
|
521
|
+
HTTP request with job_id in URL path
|
|
522
|
+
|
|
523
|
+
Returns
|
|
524
|
+
-------
|
|
525
|
+
aiohttp.web.Response
|
|
526
|
+
- 200: JSON with parsed log data
|
|
527
|
+
- 204: No log available yet
|
|
528
|
+
- 404: Job not found
|
|
529
|
+
- 500: Log parsing error
|
|
530
|
+
"""
|
|
531
|
+
job_id = request.match_info["job_id"]
|
|
532
|
+
job_info = self.manager.jobs.get(job_id)
|
|
533
|
+
if not job_info:
|
|
534
|
+
return web.json_response({"error": "Job not found"}, status=404)
|
|
535
|
+
|
|
536
|
+
log_file = self._find_latest_log(job_info.config.project_path)
|
|
537
|
+
if not log_file or not log_file.exists():
|
|
538
|
+
return web.Response(status=204) # No Content
|
|
539
|
+
|
|
540
|
+
try:
|
|
541
|
+
parsed = HFSSLogParser(log_file).parse()
|
|
542
|
+
out = parsed.to_dict()
|
|
543
|
+
out["log_parser"] = {
|
|
544
|
+
"is_converged": parsed.adaptive[-1].converged if parsed.adaptive else False,
|
|
545
|
+
}
|
|
546
|
+
return web.json_response(out)
|
|
547
|
+
except Exception as exc:
|
|
548
|
+
return web.json_response({"error": str(exc)}, status=500)
|
|
549
|
+
|
|
550
|
+
async def handle_submit_job(self, request):
|
|
551
|
+
"""
|
|
552
|
+
Submit a new simulation job.
|
|
553
|
+
|
|
554
|
+
Parameters
|
|
555
|
+
----------
|
|
556
|
+
request : aiohttp.web.Request
|
|
557
|
+
HTTP request with JSON payload containing job configuration
|
|
558
|
+
|
|
559
|
+
Returns
|
|
560
|
+
-------
|
|
561
|
+
aiohttp.web.Response
|
|
562
|
+
JSON response with job ID and status
|
|
563
|
+
|
|
564
|
+
Notes
|
|
565
|
+
-----
|
|
566
|
+
Expected JSON payload:
|
|
567
|
+
|
|
568
|
+
.. code-block:: json
|
|
569
|
+
|
|
570
|
+
{
|
|
571
|
+
"config": {
|
|
572
|
+
"scheduler_type": "slurm|lsf|none",
|
|
573
|
+
"project_path": "/path/to/project.aedt",
|
|
574
|
+
... other HFSS config fields
|
|
575
|
+
},
|
|
576
|
+
"user": "username",
|
|
577
|
+
"machine_nodes": [...],
|
|
578
|
+
"batch_options": {...}
|
|
579
|
+
}
|
|
580
|
+
"""
|
|
581
|
+
data = await request.json()
|
|
582
|
+
|
|
583
|
+
# 1. decide which scheduler the UI *really* wants
|
|
584
|
+
sched_type_str = data.get("config", {}).get("scheduler_type", "none")
|
|
585
|
+
try:
|
|
586
|
+
scheduler_type = SchedulerType(sched_type_str.lower())
|
|
587
|
+
except ValueError:
|
|
588
|
+
scheduler_type = SchedulerType.NONE
|
|
589
|
+
|
|
590
|
+
# 2. inject the server-side ANSYS path (never trust the client)
|
|
591
|
+
data["config"]["ansys_edt_path"] = self.ansys_path
|
|
592
|
+
config = HFSSSimulationConfig.from_dict(data["config"])
|
|
593
|
+
|
|
594
|
+
# 3. overwrite scheduler type and user with authoritative values
|
|
595
|
+
if config.scheduler_type != scheduler_type:
|
|
596
|
+
print("Overriding scheduler type from client:", config.scheduler_type, "→", scheduler_type)
|
|
597
|
+
config.scheduler_type = self.scheduler_type
|
|
598
|
+
config.user = data.get("user") or getpass.getuser()
|
|
599
|
+
|
|
600
|
+
# 4. optional machine nodes / batch options
|
|
601
|
+
if data.get("machine_nodes"):
|
|
602
|
+
config.machine_nodes = [MachineNode(**n) for n in data["machine_nodes"]]
|
|
603
|
+
if data.get("batch_options"):
|
|
604
|
+
config.layout_options = HFSS3DLayoutBatchOptions(**data["batch_options"])
|
|
605
|
+
|
|
606
|
+
# 5. FINAL guarantee – path must be non-empty and exist
|
|
607
|
+
if not config.ansys_edt_path or not os.path.isfile(config.ansys_edt_path):
|
|
608
|
+
config.ansys_edt_path = self.ansys_path
|
|
609
|
+
# rebuild so every cached field (command string, scripts, …) is correct
|
|
610
|
+
config = HFSSSimulationConfig(**config.model_dump())
|
|
611
|
+
|
|
612
|
+
# 6. submit to the async manager and return the job id
|
|
613
|
+
job_id = await self.manager.submit_job(config)
|
|
614
|
+
return web.json_response({"job_id": job_id, "status": "submitted"})
|
|
615
|
+
|
|
616
|
+
async def get_queue_status(self, request):
|
|
617
|
+
"""
|
|
618
|
+
Get current queue status for UI display.
|
|
619
|
+
|
|
620
|
+
Parameters
|
|
621
|
+
----------
|
|
622
|
+
request : aiohttp.web.Request
|
|
623
|
+
HTTP request object
|
|
624
|
+
|
|
625
|
+
Returns
|
|
626
|
+
-------
|
|
627
|
+
aiohttp.web.Response
|
|
628
|
+
JSON with queue statistics
|
|
629
|
+
"""
|
|
630
|
+
queue_stats = self.manager.job_pool.get_queue_stats()
|
|
631
|
+
return web.json_response(queue_stats)
|
|
632
|
+
|
|
633
|
+
async def get_resources(self, request):
|
|
634
|
+
"""
|
|
635
|
+
Get current resource usage for UI display.
|
|
636
|
+
|
|
637
|
+
Parameters
|
|
638
|
+
----------
|
|
639
|
+
request : aiohttp.web.Request
|
|
640
|
+
HTTP request object
|
|
641
|
+
|
|
642
|
+
Returns
|
|
643
|
+
-------
|
|
644
|
+
aiohttp.web.Response
|
|
645
|
+
JSON with current resource usage
|
|
646
|
+
"""
|
|
647
|
+
resources = self.manager.resource_monitor.current_usage
|
|
648
|
+
return web.json_response(resources)
|
|
649
|
+
|
|
650
|
+
async def cancel_job(self, request):
|
|
651
|
+
"""
|
|
652
|
+
Cancel a running or queued job.
|
|
653
|
+
|
|
654
|
+
Parameters
|
|
655
|
+
----------
|
|
656
|
+
request : aiohttp.web.Request
|
|
657
|
+
HTTP request with job_id in URL path
|
|
658
|
+
|
|
659
|
+
Returns
|
|
660
|
+
-------
|
|
661
|
+
aiohttp.web.Response
|
|
662
|
+
JSON response with cancellation status
|
|
663
|
+
"""
|
|
664
|
+
job_id = request.match_info["job_id"]
|
|
665
|
+
success = await self.manager.cancel_job(job_id)
|
|
666
|
+
return web.json_response({"status": "cancelled" if success else "failed", "success": success})
|
|
667
|
+
|
|
668
|
+
@staticmethod
|
|
669
|
+
def _detect_scheduler() -> SchedulerType:
|
|
670
|
+
"""
|
|
671
|
+
Detect available job scheduler on the system.
|
|
672
|
+
|
|
673
|
+
Returns
|
|
674
|
+
-------
|
|
675
|
+
SchedulerType
|
|
676
|
+
Detected scheduler type (SLURM, LSF, or NONE)
|
|
677
|
+
|
|
678
|
+
Notes
|
|
679
|
+
-----
|
|
680
|
+
Detection logic:
|
|
681
|
+
- Windows: Always returns NONE
|
|
682
|
+
- Linux: Checks for 'sinfo' (SLURM) or 'bhosts' (LSF) commands
|
|
683
|
+
"""
|
|
684
|
+
if platform.system() == "Windows":
|
|
685
|
+
return SchedulerType.NONE
|
|
686
|
+
for cmd, enum in (("sinfo", SchedulerType.SLURM), ("bhosts", SchedulerType.LSF)):
|
|
687
|
+
if shutil.which(cmd) is not None:
|
|
688
|
+
return enum
|
|
689
|
+
return SchedulerType.NONE
|
|
690
|
+
|
|
691
|
+
@property
|
|
692
|
+
def url(self) -> str:
|
|
693
|
+
"""
|
|
694
|
+
Get the server URL.
|
|
695
|
+
|
|
696
|
+
Returns
|
|
697
|
+
-------
|
|
698
|
+
str
|
|
699
|
+
Full server URL (http://host:port)
|
|
700
|
+
"""
|
|
701
|
+
return self._url
|
|
702
|
+
|
|
703
|
+
def start_service(self) -> None:
|
|
704
|
+
"""
|
|
705
|
+
Start the job manager service in a background thread.
|
|
706
|
+
|
|
707
|
+
Raises
|
|
708
|
+
------
|
|
709
|
+
RuntimeError
|
|
710
|
+
If service fails to start within 10 seconds
|
|
711
|
+
|
|
712
|
+
Notes
|
|
713
|
+
-----
|
|
714
|
+
This method is non-blocking and returns immediately.
|
|
715
|
+
The service runs in a daemon thread with its own event loop.
|
|
716
|
+
"""
|
|
717
|
+
if self.started:
|
|
718
|
+
return
|
|
719
|
+
self._thread = threading.Thread(target=self._run_event_loop, daemon=True)
|
|
720
|
+
self._thread.start()
|
|
721
|
+
if not self._start_event.wait(timeout=10):
|
|
722
|
+
raise RuntimeError("Job-Manager service failed to start within 10 s")
|
|
723
|
+
|
|
724
|
+
async def _start_site(self) -> None:
|
|
725
|
+
"""
|
|
726
|
+
Internal method to start the aiohttp server.
|
|
727
|
+
|
|
728
|
+
This method runs in the background thread's event loop.
|
|
729
|
+
"""
|
|
730
|
+
self.runner = web.AppRunner(self.app)
|
|
731
|
+
await self.runner.setup()
|
|
732
|
+
self.site = web.TCPSite(self.runner, self.host, self.port)
|
|
733
|
+
await self.site.start()
|
|
734
|
+
self.started = True
|
|
735
|
+
self._start_event.set()
|
|
736
|
+
|
|
737
|
+
def close(self) -> None:
|
|
738
|
+
"""
|
|
739
|
+
Gracefully shutdown the job manager service.
|
|
740
|
+
|
|
741
|
+
Notes
|
|
742
|
+
-----
|
|
743
|
+
This method is automatically called on program exit via atexit,
|
|
744
|
+
but can also be called explicitly for clean shutdown.
|
|
745
|
+
"""
|
|
746
|
+
if not self.started or not self._loop:
|
|
747
|
+
return
|
|
748
|
+
coro = self.stop_service()
|
|
749
|
+
try:
|
|
750
|
+
run_coroutine_threadsafe(coro, self._loop).result(timeout=10)
|
|
751
|
+
except (_futs.TimeoutError, asyncio.TimeoutError):
|
|
752
|
+
print("Warning: Service did not shut down gracefully.", file=sys.stderr)
|
|
753
|
+
self.started = False
|
|
754
|
+
|
|
755
|
+
async def stop_service(self) -> None:
|
|
756
|
+
"""
|
|
757
|
+
Stop the aiohttp server and cleanup resources.
|
|
758
|
+
|
|
759
|
+
This is the async version of close() that runs in the event loop.
|
|
760
|
+
"""
|
|
761
|
+
if not self.started:
|
|
762
|
+
return
|
|
763
|
+
self._shutdown = True
|
|
764
|
+
if self.site:
|
|
765
|
+
await self.site.stop()
|
|
766
|
+
if self.runner:
|
|
767
|
+
await self.runner.cleanup()
|
|
768
|
+
self.started = False
|
|
769
|
+
|
|
770
|
+
def _run_event_loop(self) -> None:
|
|
771
|
+
self._loop = asyncio.new_event_loop()
|
|
772
|
+
asyncio.set_event_loop(self._loop)
|
|
773
|
+
self._loop.run_until_complete(self._start_site())
|
|
774
|
+
|
|
775
|
+
# ---- make the first sample synchronous ----
|
|
776
|
+
import datetime
|
|
777
|
+
import math
|
|
778
|
+
import os
|
|
779
|
+
|
|
780
|
+
import psutil
|
|
781
|
+
|
|
782
|
+
memory = psutil.virtual_memory()
|
|
783
|
+
disk = psutil.disk_usage(os.sep)
|
|
784
|
+
self.manager.resource_monitor.current_usage.update(
|
|
785
|
+
{
|
|
786
|
+
"cpu_percent": psutil.cpu_percent(interval=1),
|
|
787
|
+
"memory_percent": memory.percent,
|
|
788
|
+
"memory_used_gb": round(memory.used / 1024**3, 2),
|
|
789
|
+
"memory_total_gb": round(memory.total / 1024**3, 2),
|
|
790
|
+
"memory_free_gb": round(memory.available / 1024**3, 2),
|
|
791
|
+
"disk_usage_percent": disk.percent,
|
|
792
|
+
"disk_free_gb": round(disk.free / 1024**3, 2),
|
|
793
|
+
"timestamp": datetime.datetime.now().isoformat(),
|
|
794
|
+
}
|
|
795
|
+
)
|
|
796
|
+
# ------------------------------------------
|
|
797
|
+
|
|
798
|
+
# now start the periodic coroutine
|
|
799
|
+
self.manager._monitor_task = self._loop.create_task(self.manager.resource_monitor.monitor_resources())
|
|
800
|
+
self.manager._ensure_scheduler_monitor_running()
|
|
801
|
+
|
|
802
|
+
self._loop.run_forever()
|
|
803
|
+
|
|
804
|
+
def create_simulation_config(
|
|
805
|
+
self,
|
|
806
|
+
project_path: str,
|
|
807
|
+
ansys_edt_path: str | None = None,
|
|
808
|
+
jobid: str | None = None,
|
|
809
|
+
scheduler_type: SchedulerType | None = None,
|
|
810
|
+
cpu_cores: int = 1,
|
|
811
|
+
user: str = "unknown",
|
|
812
|
+
) -> HFSSSimulationConfig:
|
|
813
|
+
"""
|
|
814
|
+
Create a validated HFSSSimulationConfig.
|
|
815
|
+
|
|
816
|
+
Parameters
|
|
817
|
+
----------
|
|
818
|
+
project_path : str
|
|
819
|
+
Path to the AEDT project file
|
|
820
|
+
ansys_edt_path : str, optional
|
|
821
|
+
Path to ANSYS EDT executable. Uses detected path if None.
|
|
822
|
+
jobid : str, optional
|
|
823
|
+
Job identifier. Auto-generated if None.
|
|
824
|
+
scheduler_type : SchedulerType, optional
|
|
825
|
+
Scheduler type. Uses detected scheduler if None.
|
|
826
|
+
cpu_cores : int
|
|
827
|
+
Number of CPU cores for local execution
|
|
828
|
+
user : str
|
|
829
|
+
Username for job ownership
|
|
830
|
+
|
|
831
|
+
Returns
|
|
832
|
+
-------
|
|
833
|
+
HFSSSimulationConfig
|
|
834
|
+
Validated simulation configuration
|
|
835
|
+
|
|
836
|
+
Raises
|
|
837
|
+
------
|
|
838
|
+
ValueError
|
|
839
|
+
If project_path is empty or invalid
|
|
840
|
+
|
|
841
|
+
Notes
|
|
842
|
+
-----
|
|
843
|
+
The cpu_cores parameter is only used when scheduler_type is NONE (local execution).
|
|
844
|
+
For cluster execution, cores are determined by the scheduler configuration.
|
|
845
|
+
"""
|
|
846
|
+
if not project_path:
|
|
847
|
+
raise ValueError("Project path must be provided")
|
|
848
|
+
|
|
849
|
+
if ansys_edt_path is None:
|
|
850
|
+
ansys_edt_path = self.ansys_path
|
|
851
|
+
if jobid is None:
|
|
852
|
+
jobid = f"{Path(project_path).stem}_{uuid.uuid4().hex[:6]}"
|
|
853
|
+
if scheduler_type is None:
|
|
854
|
+
scheduler_type = self.scheduler_type
|
|
855
|
+
|
|
856
|
+
# Build ONE machine-node that carries the requested CPU count
|
|
857
|
+
machine_nodes = [
|
|
858
|
+
MachineNode(
|
|
859
|
+
hostname="localhost",
|
|
860
|
+
cores=cpu_cores, # <-- honour UI choice
|
|
861
|
+
max_cores=cpu_cores,
|
|
862
|
+
utilization=90,
|
|
863
|
+
)
|
|
864
|
+
]
|
|
865
|
+
|
|
866
|
+
cfg = create_hfss_config(
|
|
867
|
+
ansys_edt_path=ansys_edt_path,
|
|
868
|
+
jobid=jobid,
|
|
869
|
+
project_path=project_path,
|
|
870
|
+
scheduler_type=scheduler_type,
|
|
871
|
+
machine_nodes=machine_nodes,
|
|
872
|
+
)
|
|
873
|
+
cfg.user = user
|
|
874
|
+
return cfg
|
|
875
|
+
|
|
876
|
+
|
|
877
|
+
if __name__ == "__main__":
|
|
878
|
+
"""
|
|
879
|
+
Command-line entry point for the job manager backend.
|
|
880
|
+
|
|
881
|
+
Example
|
|
882
|
+
-------
|
|
883
|
+
python -m pyedb.workflows.job_manager.backend.job_manager_handler --host localhost --port 8080
|
|
884
|
+
"""
|
|
885
|
+
import argparse
|
|
886
|
+
|
|
887
|
+
parser = argparse.ArgumentParser(description="Start the PyEDB job-manager backend.")
|
|
888
|
+
parser.add_argument(
|
|
889
|
+
"--host",
|
|
890
|
+
type=str,
|
|
891
|
+
default="localhost",
|
|
892
|
+
help="IP address or hostname to bind the server (default: localhost)",
|
|
893
|
+
)
|
|
894
|
+
parser.add_argument(
|
|
895
|
+
"--port",
|
|
896
|
+
type=int,
|
|
897
|
+
default=8080,
|
|
898
|
+
help="TCP port to listen on (default: 8080)",
|
|
899
|
+
)
|
|
900
|
+
args = parser.parse_args()
|
|
901
|
+
|
|
902
|
+
handler = JobManagerHandler(host=args.host, port=args.port)
|
|
903
|
+
handler.start_service()
|
|
904
|
+
print(f"✅ Job-manager backend listening on http://{handler.host}:{handler.port}")
|
|
905
|
+
try:
|
|
906
|
+
threading.Event().wait() # Keep main thread alive
|
|
907
|
+
except KeyboardInterrupt:
|
|
908
|
+
print("\nShutting down...")
|
|
909
|
+
handler.close()
|
|
910
|
+
sys.exit(0)
|