mlmm-toolkit 0.2.2.dev0__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- hessian_ff/__init__.py +50 -0
- hessian_ff/analytical_hessian.py +609 -0
- hessian_ff/constants.py +46 -0
- hessian_ff/forcefield.py +339 -0
- hessian_ff/loaders.py +608 -0
- hessian_ff/native/Makefile +8 -0
- hessian_ff/native/__init__.py +28 -0
- hessian_ff/native/analytical_hessian.py +88 -0
- hessian_ff/native/analytical_hessian_ext.cpp +258 -0
- hessian_ff/native/bonded.py +82 -0
- hessian_ff/native/bonded_ext.cpp +640 -0
- hessian_ff/native/loader.py +349 -0
- hessian_ff/native/nonbonded.py +118 -0
- hessian_ff/native/nonbonded_ext.cpp +1150 -0
- hessian_ff/prmtop_parmed.py +23 -0
- hessian_ff/system.py +107 -0
- hessian_ff/terms/__init__.py +14 -0
- hessian_ff/terms/angle.py +73 -0
- hessian_ff/terms/bond.py +44 -0
- hessian_ff/terms/cmap.py +406 -0
- hessian_ff/terms/dihedral.py +141 -0
- hessian_ff/terms/nonbonded.py +209 -0
- hessian_ff/tests/__init__.py +0 -0
- hessian_ff/tests/conftest.py +75 -0
- hessian_ff/tests/data/small/complex.parm7 +1346 -0
- hessian_ff/tests/data/small/complex.pdb +125 -0
- hessian_ff/tests/data/small/complex.rst7 +63 -0
- hessian_ff/tests/test_coords_input.py +44 -0
- hessian_ff/tests/test_energy_force.py +49 -0
- hessian_ff/tests/test_hessian.py +137 -0
- hessian_ff/tests/test_smoke.py +18 -0
- hessian_ff/tests/test_validation.py +40 -0
- hessian_ff/workflows.py +889 -0
- mlmm/__init__.py +36 -0
- mlmm/__main__.py +7 -0
- mlmm/_version.py +34 -0
- mlmm/add_elem_info.py +374 -0
- mlmm/advanced_help.py +91 -0
- mlmm/align_freeze_atoms.py +601 -0
- mlmm/all.py +3535 -0
- mlmm/bond_changes.py +231 -0
- mlmm/bool_compat.py +223 -0
- mlmm/cli.py +574 -0
- mlmm/cli_utils.py +166 -0
- mlmm/default_group.py +337 -0
- mlmm/defaults.py +467 -0
- mlmm/define_layer.py +526 -0
- mlmm/dft.py +1041 -0
- mlmm/energy_diagram.py +253 -0
- mlmm/extract.py +2213 -0
- mlmm/fix_altloc.py +464 -0
- mlmm/freq.py +1406 -0
- mlmm/harmonic_constraints.py +140 -0
- mlmm/hessian_cache.py +44 -0
- mlmm/hessian_calc.py +174 -0
- mlmm/irc.py +638 -0
- mlmm/mlmm_calc.py +2262 -0
- mlmm/mm_parm.py +945 -0
- mlmm/oniom_export.py +1983 -0
- mlmm/oniom_import.py +457 -0
- mlmm/opt.py +1742 -0
- mlmm/path_opt.py +1353 -0
- mlmm/path_search.py +2299 -0
- mlmm/preflight.py +88 -0
- mlmm/py.typed +1 -0
- mlmm/pysis_runner.py +45 -0
- mlmm/scan.py +1047 -0
- mlmm/scan2d.py +1226 -0
- mlmm/scan3d.py +1265 -0
- mlmm/scan_common.py +184 -0
- mlmm/summary_log.py +736 -0
- mlmm/trj2fig.py +448 -0
- mlmm/tsopt.py +2871 -0
- mlmm/utils.py +2309 -0
- mlmm/xtb_embedcharge_correction.py +475 -0
- mlmm_toolkit-0.2.2.dev0.dist-info/METADATA +1159 -0
- mlmm_toolkit-0.2.2.dev0.dist-info/RECORD +372 -0
- mlmm_toolkit-0.2.2.dev0.dist-info/WHEEL +5 -0
- mlmm_toolkit-0.2.2.dev0.dist-info/entry_points.txt +2 -0
- mlmm_toolkit-0.2.2.dev0.dist-info/licenses/LICENSE +674 -0
- mlmm_toolkit-0.2.2.dev0.dist-info/top_level.txt +4 -0
- pysisyphus/Geometry.py +1667 -0
- pysisyphus/LICENSE +674 -0
- pysisyphus/TableFormatter.py +63 -0
- pysisyphus/TablePrinter.py +74 -0
- pysisyphus/__init__.py +12 -0
- pysisyphus/calculators/AFIR.py +452 -0
- pysisyphus/calculators/AnaPot.py +20 -0
- pysisyphus/calculators/AnaPot2.py +48 -0
- pysisyphus/calculators/AnaPot3.py +12 -0
- pysisyphus/calculators/AnaPot4.py +20 -0
- pysisyphus/calculators/AnaPotBase.py +337 -0
- pysisyphus/calculators/AnaPotCBM.py +25 -0
- pysisyphus/calculators/AtomAtomTransTorque.py +154 -0
- pysisyphus/calculators/CFOUR.py +250 -0
- pysisyphus/calculators/Calculator.py +844 -0
- pysisyphus/calculators/CerjanMiller.py +24 -0
- pysisyphus/calculators/Composite.py +123 -0
- pysisyphus/calculators/ConicalIntersection.py +171 -0
- pysisyphus/calculators/DFTBp.py +430 -0
- pysisyphus/calculators/DFTD3.py +66 -0
- pysisyphus/calculators/DFTD4.py +84 -0
- pysisyphus/calculators/Dalton.py +61 -0
- pysisyphus/calculators/Dimer.py +681 -0
- pysisyphus/calculators/Dummy.py +20 -0
- pysisyphus/calculators/EGO.py +76 -0
- pysisyphus/calculators/EnergyMin.py +224 -0
- pysisyphus/calculators/ExternalPotential.py +264 -0
- pysisyphus/calculators/FakeASE.py +35 -0
- pysisyphus/calculators/FourWellAnaPot.py +28 -0
- pysisyphus/calculators/FreeEndNEBPot.py +39 -0
- pysisyphus/calculators/Gaussian09.py +18 -0
- pysisyphus/calculators/Gaussian16.py +726 -0
- pysisyphus/calculators/HardSphere.py +159 -0
- pysisyphus/calculators/IDPPCalculator.py +49 -0
- pysisyphus/calculators/IPIClient.py +133 -0
- pysisyphus/calculators/IPIServer.py +234 -0
- pysisyphus/calculators/LEPSBase.py +24 -0
- pysisyphus/calculators/LEPSExpr.py +139 -0
- pysisyphus/calculators/LennardJones.py +80 -0
- pysisyphus/calculators/MOPAC.py +219 -0
- pysisyphus/calculators/MullerBrownSympyPot.py +51 -0
- pysisyphus/calculators/MultiCalc.py +85 -0
- pysisyphus/calculators/NFK.py +45 -0
- pysisyphus/calculators/OBabel.py +87 -0
- pysisyphus/calculators/ONIOMv2.py +1129 -0
- pysisyphus/calculators/ORCA.py +893 -0
- pysisyphus/calculators/ORCA5.py +6 -0
- pysisyphus/calculators/OpenMM.py +88 -0
- pysisyphus/calculators/OpenMolcas.py +281 -0
- pysisyphus/calculators/OverlapCalculator.py +908 -0
- pysisyphus/calculators/Psi4.py +218 -0
- pysisyphus/calculators/PyPsi4.py +37 -0
- pysisyphus/calculators/PySCF.py +341 -0
- pysisyphus/calculators/PyXTB.py +73 -0
- pysisyphus/calculators/QCEngine.py +106 -0
- pysisyphus/calculators/Rastrigin.py +22 -0
- pysisyphus/calculators/Remote.py +76 -0
- pysisyphus/calculators/Rosenbrock.py +15 -0
- pysisyphus/calculators/SocketCalc.py +97 -0
- pysisyphus/calculators/TIP3P.py +111 -0
- pysisyphus/calculators/TransTorque.py +161 -0
- pysisyphus/calculators/Turbomole.py +965 -0
- pysisyphus/calculators/VRIPot.py +37 -0
- pysisyphus/calculators/WFOWrapper.py +333 -0
- pysisyphus/calculators/WFOWrapper2.py +341 -0
- pysisyphus/calculators/XTB.py +418 -0
- pysisyphus/calculators/__init__.py +81 -0
- pysisyphus/calculators/cosmo_data.py +139 -0
- pysisyphus/calculators/parser.py +150 -0
- pysisyphus/color.py +19 -0
- pysisyphus/config.py +133 -0
- pysisyphus/constants.py +65 -0
- pysisyphus/cos/AdaptiveNEB.py +230 -0
- pysisyphus/cos/ChainOfStates.py +725 -0
- pysisyphus/cos/FreeEndNEB.py +25 -0
- pysisyphus/cos/FreezingString.py +103 -0
- pysisyphus/cos/GrowingChainOfStates.py +71 -0
- pysisyphus/cos/GrowingNT.py +309 -0
- pysisyphus/cos/GrowingString.py +508 -0
- pysisyphus/cos/NEB.py +189 -0
- pysisyphus/cos/SimpleZTS.py +64 -0
- pysisyphus/cos/__init__.py +22 -0
- pysisyphus/cos/stiffness.py +199 -0
- pysisyphus/drivers/__init__.py +17 -0
- pysisyphus/drivers/afir.py +855 -0
- pysisyphus/drivers/barriers.py +271 -0
- pysisyphus/drivers/birkholz.py +138 -0
- pysisyphus/drivers/cluster.py +318 -0
- pysisyphus/drivers/diabatization.py +133 -0
- pysisyphus/drivers/merge.py +368 -0
- pysisyphus/drivers/merge_mol2.py +322 -0
- pysisyphus/drivers/opt.py +375 -0
- pysisyphus/drivers/perf.py +91 -0
- pysisyphus/drivers/pka.py +52 -0
- pysisyphus/drivers/precon_pos_rot.py +669 -0
- pysisyphus/drivers/rates.py +480 -0
- pysisyphus/drivers/replace.py +219 -0
- pysisyphus/drivers/scan.py +212 -0
- pysisyphus/drivers/spectrum.py +166 -0
- pysisyphus/drivers/thermo.py +31 -0
- pysisyphus/dynamics/Gaussian.py +103 -0
- pysisyphus/dynamics/__init__.py +20 -0
- pysisyphus/dynamics/colvars.py +136 -0
- pysisyphus/dynamics/driver.py +297 -0
- pysisyphus/dynamics/helpers.py +256 -0
- pysisyphus/dynamics/lincs.py +105 -0
- pysisyphus/dynamics/mdp.py +364 -0
- pysisyphus/dynamics/rattle.py +121 -0
- pysisyphus/dynamics/thermostats.py +128 -0
- pysisyphus/dynamics/wigner.py +266 -0
- pysisyphus/elem_data.py +3473 -0
- pysisyphus/exceptions.py +2 -0
- pysisyphus/filtertrj.py +69 -0
- pysisyphus/helpers.py +623 -0
- pysisyphus/helpers_pure.py +649 -0
- pysisyphus/init_logging.py +50 -0
- pysisyphus/intcoords/Bend.py +69 -0
- pysisyphus/intcoords/Bend2.py +25 -0
- pysisyphus/intcoords/BondedFragment.py +32 -0
- pysisyphus/intcoords/Cartesian.py +41 -0
- pysisyphus/intcoords/CartesianCoords.py +140 -0
- pysisyphus/intcoords/Coords.py +56 -0
- pysisyphus/intcoords/DLC.py +197 -0
- pysisyphus/intcoords/DistanceFunction.py +34 -0
- pysisyphus/intcoords/DummyImproper.py +70 -0
- pysisyphus/intcoords/DummyTorsion.py +72 -0
- pysisyphus/intcoords/LinearBend.py +105 -0
- pysisyphus/intcoords/LinearDisplacement.py +80 -0
- pysisyphus/intcoords/OutOfPlane.py +59 -0
- pysisyphus/intcoords/PrimTypes.py +286 -0
- pysisyphus/intcoords/Primitive.py +137 -0
- pysisyphus/intcoords/RedundantCoords.py +659 -0
- pysisyphus/intcoords/RobustTorsion.py +59 -0
- pysisyphus/intcoords/Rotation.py +147 -0
- pysisyphus/intcoords/Stretch.py +31 -0
- pysisyphus/intcoords/Torsion.py +101 -0
- pysisyphus/intcoords/Torsion2.py +25 -0
- pysisyphus/intcoords/Translation.py +45 -0
- pysisyphus/intcoords/__init__.py +61 -0
- pysisyphus/intcoords/augment_bonds.py +126 -0
- pysisyphus/intcoords/derivatives.py +10512 -0
- pysisyphus/intcoords/eval.py +80 -0
- pysisyphus/intcoords/exceptions.py +37 -0
- pysisyphus/intcoords/findiffs.py +48 -0
- pysisyphus/intcoords/generate_derivatives.py +414 -0
- pysisyphus/intcoords/helpers.py +235 -0
- pysisyphus/intcoords/logging_conf.py +10 -0
- pysisyphus/intcoords/mp_derivatives.py +10836 -0
- pysisyphus/intcoords/setup.py +962 -0
- pysisyphus/intcoords/setup_fast.py +176 -0
- pysisyphus/intcoords/update.py +272 -0
- pysisyphus/intcoords/valid.py +89 -0
- pysisyphus/interpolate/Geodesic.py +93 -0
- pysisyphus/interpolate/IDPP.py +55 -0
- pysisyphus/interpolate/Interpolator.py +116 -0
- pysisyphus/interpolate/LST.py +70 -0
- pysisyphus/interpolate/Redund.py +152 -0
- pysisyphus/interpolate/__init__.py +9 -0
- pysisyphus/interpolate/helpers.py +34 -0
- pysisyphus/io/__init__.py +22 -0
- pysisyphus/io/aomix.py +178 -0
- pysisyphus/io/cjson.py +24 -0
- pysisyphus/io/crd.py +101 -0
- pysisyphus/io/cube.py +220 -0
- pysisyphus/io/fchk.py +184 -0
- pysisyphus/io/hdf5.py +49 -0
- pysisyphus/io/hessian.py +72 -0
- pysisyphus/io/mol2.py +146 -0
- pysisyphus/io/molden.py +293 -0
- pysisyphus/io/orca.py +189 -0
- pysisyphus/io/pdb.py +269 -0
- pysisyphus/io/psf.py +79 -0
- pysisyphus/io/pubchem.py +31 -0
- pysisyphus/io/qcschema.py +34 -0
- pysisyphus/io/sdf.py +29 -0
- pysisyphus/io/xyz.py +61 -0
- pysisyphus/io/zmat.py +175 -0
- pysisyphus/irc/DWI.py +108 -0
- pysisyphus/irc/DampedVelocityVerlet.py +134 -0
- pysisyphus/irc/Euler.py +22 -0
- pysisyphus/irc/EulerPC.py +345 -0
- pysisyphus/irc/GonzalezSchlegel.py +187 -0
- pysisyphus/irc/IMKMod.py +164 -0
- pysisyphus/irc/IRC.py +878 -0
- pysisyphus/irc/IRCDummy.py +10 -0
- pysisyphus/irc/Instanton.py +307 -0
- pysisyphus/irc/LQA.py +53 -0
- pysisyphus/irc/ModeKill.py +136 -0
- pysisyphus/irc/ParamPlot.py +53 -0
- pysisyphus/irc/RK4.py +36 -0
- pysisyphus/irc/__init__.py +31 -0
- pysisyphus/irc/initial_displ.py +219 -0
- pysisyphus/linalg.py +411 -0
- pysisyphus/line_searches/Backtracking.py +88 -0
- pysisyphus/line_searches/HagerZhang.py +184 -0
- pysisyphus/line_searches/LineSearch.py +232 -0
- pysisyphus/line_searches/StrongWolfe.py +108 -0
- pysisyphus/line_searches/__init__.py +9 -0
- pysisyphus/line_searches/interpol.py +15 -0
- pysisyphus/modefollow/NormalMode.py +40 -0
- pysisyphus/modefollow/__init__.py +10 -0
- pysisyphus/modefollow/davidson.py +199 -0
- pysisyphus/modefollow/lanczos.py +95 -0
- pysisyphus/optimizers/BFGS.py +99 -0
- pysisyphus/optimizers/BacktrackingOptimizer.py +113 -0
- pysisyphus/optimizers/ConjugateGradient.py +98 -0
- pysisyphus/optimizers/CubicNewton.py +75 -0
- pysisyphus/optimizers/FIRE.py +113 -0
- pysisyphus/optimizers/HessianOptimizer.py +1176 -0
- pysisyphus/optimizers/LBFGS.py +228 -0
- pysisyphus/optimizers/LayerOpt.py +411 -0
- pysisyphus/optimizers/MicroOptimizer.py +169 -0
- pysisyphus/optimizers/NCOptimizer.py +90 -0
- pysisyphus/optimizers/Optimizer.py +1084 -0
- pysisyphus/optimizers/PreconLBFGS.py +260 -0
- pysisyphus/optimizers/PreconSteepestDescent.py +7 -0
- pysisyphus/optimizers/QuickMin.py +74 -0
- pysisyphus/optimizers/RFOptimizer.py +181 -0
- pysisyphus/optimizers/RSA.py +99 -0
- pysisyphus/optimizers/StabilizedQNMethod.py +248 -0
- pysisyphus/optimizers/SteepestDescent.py +23 -0
- pysisyphus/optimizers/StringOptimizer.py +173 -0
- pysisyphus/optimizers/__init__.py +41 -0
- pysisyphus/optimizers/closures.py +301 -0
- pysisyphus/optimizers/cls_map.py +58 -0
- pysisyphus/optimizers/exceptions.py +6 -0
- pysisyphus/optimizers/gdiis.py +280 -0
- pysisyphus/optimizers/guess_hessians.py +311 -0
- pysisyphus/optimizers/hessian_updates.py +355 -0
- pysisyphus/optimizers/poly_fit.py +285 -0
- pysisyphus/optimizers/precon.py +153 -0
- pysisyphus/optimizers/restrict_step.py +24 -0
- pysisyphus/pack.py +172 -0
- pysisyphus/peakdetect.py +948 -0
- pysisyphus/plot.py +1031 -0
- pysisyphus/run.py +2106 -0
- pysisyphus/socket_helper.py +74 -0
- pysisyphus/stocastic/FragmentKick.py +132 -0
- pysisyphus/stocastic/Kick.py +81 -0
- pysisyphus/stocastic/Pipeline.py +303 -0
- pysisyphus/stocastic/__init__.py +21 -0
- pysisyphus/stocastic/align.py +127 -0
- pysisyphus/testing.py +96 -0
- pysisyphus/thermo.py +156 -0
- pysisyphus/trj.py +824 -0
- pysisyphus/tsoptimizers/RSIRFOptimizer.py +56 -0
- pysisyphus/tsoptimizers/RSPRFOptimizer.py +182 -0
- pysisyphus/tsoptimizers/TRIM.py +59 -0
- pysisyphus/tsoptimizers/TSHessianOptimizer.py +463 -0
- pysisyphus/tsoptimizers/__init__.py +23 -0
- pysisyphus/wavefunction/Basis.py +239 -0
- pysisyphus/wavefunction/DIIS.py +76 -0
- pysisyphus/wavefunction/__init__.py +25 -0
- pysisyphus/wavefunction/build_ext.py +42 -0
- pysisyphus/wavefunction/cart2sph.py +190 -0
- pysisyphus/wavefunction/diabatization.py +304 -0
- pysisyphus/wavefunction/excited_states.py +435 -0
- pysisyphus/wavefunction/gen_ints.py +1811 -0
- pysisyphus/wavefunction/helpers.py +104 -0
- pysisyphus/wavefunction/ints/__init__.py +0 -0
- pysisyphus/wavefunction/ints/boys.py +193 -0
- pysisyphus/wavefunction/ints/boys_table_N_64_xasym_27.1_step_0.01.npy +0 -0
- pysisyphus/wavefunction/ints/cart_gto3d.py +176 -0
- pysisyphus/wavefunction/ints/coulomb3d.py +25928 -0
- pysisyphus/wavefunction/ints/diag_quadrupole3d.py +10036 -0
- pysisyphus/wavefunction/ints/dipole3d.py +8762 -0
- pysisyphus/wavefunction/ints/int2c2e3d.py +7198 -0
- pysisyphus/wavefunction/ints/int3c2e3d_sph.py +65040 -0
- pysisyphus/wavefunction/ints/kinetic3d.py +8240 -0
- pysisyphus/wavefunction/ints/ovlp3d.py +3777 -0
- pysisyphus/wavefunction/ints/quadrupole3d.py +15054 -0
- pysisyphus/wavefunction/ints/self_ovlp3d.py +198 -0
- pysisyphus/wavefunction/localization.py +458 -0
- pysisyphus/wavefunction/multipole.py +159 -0
- pysisyphus/wavefunction/normalization.py +36 -0
- pysisyphus/wavefunction/pop_analysis.py +134 -0
- pysisyphus/wavefunction/shells.py +1171 -0
- pysisyphus/wavefunction/wavefunction.py +504 -0
- pysisyphus/wrapper/__init__.py +11 -0
- pysisyphus/wrapper/exceptions.py +2 -0
- pysisyphus/wrapper/jmol.py +120 -0
- pysisyphus/wrapper/mwfn.py +169 -0
- pysisyphus/wrapper/packmol.py +71 -0
- pysisyphus/xyzloader.py +168 -0
- pysisyphus/yaml_mods.py +45 -0
- thermoanalysis/LICENSE +674 -0
- thermoanalysis/QCData.py +244 -0
- thermoanalysis/__init__.py +0 -0
- thermoanalysis/config.py +3 -0
- thermoanalysis/constants.py +20 -0
- thermoanalysis/thermo.py +1011 -0
pysisyphus/run.py
ADDED
|
@@ -0,0 +1,2106 @@
|
|
|
1
|
+
import argparse
|
|
2
|
+
from collections import namedtuple
|
|
3
|
+
import copy
|
|
4
|
+
import datetime
|
|
5
|
+
import itertools as it
|
|
6
|
+
import os
|
|
7
|
+
from math import modf
|
|
8
|
+
from pathlib import Path
|
|
9
|
+
import platform
|
|
10
|
+
from pprint import pprint
|
|
11
|
+
import re
|
|
12
|
+
import shutil
|
|
13
|
+
import sys
|
|
14
|
+
import textwrap
|
|
15
|
+
import time
|
|
16
|
+
|
|
17
|
+
from distributed import Client
|
|
18
|
+
import numpy as np
|
|
19
|
+
import scipy as sp
|
|
20
|
+
import yaml
|
|
21
|
+
__version__ = 'Pytorch-Enabled version bundled with `pdb2reaction`'
|
|
22
|
+
from pysisyphus.calculators import *
|
|
23
|
+
from pysisyphus.config import OUT_DIR_DEFAULT, p_DEFAULT, T_DEFAULT
|
|
24
|
+
from pysisyphus.cos import *
|
|
25
|
+
from pysisyphus.cos.GrowingChainOfStates import GrowingChainOfStates
|
|
26
|
+
from pysisyphus.color import bool_color
|
|
27
|
+
from pysisyphus.exceptions import HEIIsFirstOrLastException
|
|
28
|
+
from pysisyphus.dynamics import (
|
|
29
|
+
get_mb_velocities_for_geom,
|
|
30
|
+
mdp,
|
|
31
|
+
md,
|
|
32
|
+
get_colvar,
|
|
33
|
+
Gaussian,
|
|
34
|
+
)
|
|
35
|
+
from pysisyphus.drivers import (
|
|
36
|
+
relaxed_1d_scan,
|
|
37
|
+
run_afir_paths,
|
|
38
|
+
run_opt,
|
|
39
|
+
run_precontr,
|
|
40
|
+
run_perf,
|
|
41
|
+
print_perf_results,
|
|
42
|
+
)
|
|
43
|
+
from pysisyphus.drivers.barriers import do_endopt_ts_barriers
|
|
44
|
+
|
|
45
|
+
from pysisyphus.Geometry import Geometry
|
|
46
|
+
from pysisyphus.helpers import (
|
|
47
|
+
confirm_input,
|
|
48
|
+
shake_coords,
|
|
49
|
+
print_barrier,
|
|
50
|
+
get_tangent_trj_str,
|
|
51
|
+
)
|
|
52
|
+
from pysisyphus.helpers_pure import (
|
|
53
|
+
find_closest_sequence,
|
|
54
|
+
merge_sets,
|
|
55
|
+
recursive_update,
|
|
56
|
+
highlight_text,
|
|
57
|
+
approx_float,
|
|
58
|
+
results_to_json,
|
|
59
|
+
)
|
|
60
|
+
from pysisyphus.intcoords import PrimitiveNotDefinedException
|
|
61
|
+
from pysisyphus.intcoords.setup import get_bond_mat
|
|
62
|
+
from pysisyphus.init_logging import init_logging
|
|
63
|
+
from pysisyphus.intcoords.PrimTypes import PrimTypes, normalize_prim_inputs
|
|
64
|
+
from pysisyphus.intcoords.helpers import form_coordinate_union
|
|
65
|
+
from pysisyphus.intcoords.setup import get_bond_sets
|
|
66
|
+
from pysisyphus.interpolate import interpolate_all
|
|
67
|
+
from pysisyphus.irc import *
|
|
68
|
+
from pysisyphus.io import save_hessian
|
|
69
|
+
from pysisyphus.stocastic import *
|
|
70
|
+
from pysisyphus.thermo import can_thermoanalysis
|
|
71
|
+
from pysisyphus.trj import get_geoms, dump_geoms, standardize_geoms
|
|
72
|
+
from pysisyphus.xyzloader import write_geoms_to_trj
|
|
73
|
+
from pysisyphus.yaml_mods import get_loader, UNITS
|
|
74
|
+
|
|
75
|
+
|
|
76
|
+
CALC_DICT = {
|
|
77
|
+
"afir": AFIR,
|
|
78
|
+
"composite": Composite,
|
|
79
|
+
"conical": ConicalIntersection,
|
|
80
|
+
"dftb+": DFTBp,
|
|
81
|
+
"dimer": Dimer,
|
|
82
|
+
"dummy": Dummy,
|
|
83
|
+
"energymin": EnergyMin,
|
|
84
|
+
"ext": ExternalPotential,
|
|
85
|
+
"g09": Gaussian09,
|
|
86
|
+
"g16": Gaussian16,
|
|
87
|
+
"ipiserver": IPIServer,
|
|
88
|
+
"mopac": MOPAC,
|
|
89
|
+
"multi": MultiCalc,
|
|
90
|
+
"obabel": OBabel,
|
|
91
|
+
"oniom": ONIOM,
|
|
92
|
+
"openmolcas": OpenMolcas,
|
|
93
|
+
"orca": ORCA,
|
|
94
|
+
"orca5": ORCA5,
|
|
95
|
+
"psi4": Psi4,
|
|
96
|
+
"pyxtb": PyXTB,
|
|
97
|
+
"remote": Remote,
|
|
98
|
+
"turbomole": Turbomole,
|
|
99
|
+
"xtb": XTB,
|
|
100
|
+
"cfour": CFOUR,
|
|
101
|
+
}
|
|
102
|
+
|
|
103
|
+
try:
|
|
104
|
+
from pysisyphus.calculators.PySCF import PySCF
|
|
105
|
+
|
|
106
|
+
CALC_DICT["pyscf"] = PySCF
|
|
107
|
+
except ImportError:
|
|
108
|
+
pass
|
|
109
|
+
|
|
110
|
+
try:
|
|
111
|
+
from pysisyphus.calculators.QCEngine import QCEngine
|
|
112
|
+
|
|
113
|
+
CALC_DICT["qcengine"] = QCEngine
|
|
114
|
+
except ImportError:
|
|
115
|
+
pass
|
|
116
|
+
|
|
117
|
+
try:
|
|
118
|
+
from mlmm.mlmm_calc import mlmm as MLMMCalc
|
|
119
|
+
|
|
120
|
+
CALC_DICT["mlmm"] = MLMMCalc
|
|
121
|
+
except ImportError:
|
|
122
|
+
pass
|
|
123
|
+
|
|
124
|
+
COS_DICT = {
|
|
125
|
+
"neb": NEB.NEB,
|
|
126
|
+
"aneb": AdaptiveNEB.AdaptiveNEB,
|
|
127
|
+
"feneb": FreeEndNEB.FreeEndNEB,
|
|
128
|
+
"szts": SimpleZTS.SimpleZTS,
|
|
129
|
+
"gs": GrowingString.GrowingString,
|
|
130
|
+
"fs": FreezingString.FreezingString,
|
|
131
|
+
}
|
|
132
|
+
|
|
133
|
+
IRC_DICT = {
|
|
134
|
+
"dvv": DampedVelocityVerlet,
|
|
135
|
+
"euler": Euler,
|
|
136
|
+
"eulerpc": EulerPC,
|
|
137
|
+
"gs": GonzalezSchlegel,
|
|
138
|
+
"imk": IMKMod,
|
|
139
|
+
"lqa": LQA,
|
|
140
|
+
"modekill": ModeKill,
|
|
141
|
+
"rk4": RK4,
|
|
142
|
+
}
|
|
143
|
+
|
|
144
|
+
STOCASTIC_DICT = {
|
|
145
|
+
"frag": FragmentKick,
|
|
146
|
+
"kick": Kick,
|
|
147
|
+
}
|
|
148
|
+
|
|
149
|
+
|
|
150
|
+
def parse_args(args):
|
|
151
|
+
parser = argparse.ArgumentParser()
|
|
152
|
+
|
|
153
|
+
action_group = parser.add_mutually_exclusive_group(required=True)
|
|
154
|
+
action_group.add_argument(
|
|
155
|
+
"yaml", nargs="?", help="Start pysisyphus with input from a YAML file."
|
|
156
|
+
)
|
|
157
|
+
action_group.add_argument(
|
|
158
|
+
"--clean", action="store_true", help="Ask for confirmation before cleaning."
|
|
159
|
+
)
|
|
160
|
+
action_group.add_argument(
|
|
161
|
+
"--fclean",
|
|
162
|
+
action="store_true",
|
|
163
|
+
help="Force cleaning without prior confirmation.",
|
|
164
|
+
)
|
|
165
|
+
action_group.add_argument(
|
|
166
|
+
"--bibtex",
|
|
167
|
+
action="store_true",
|
|
168
|
+
help="Print bibtex string for pysisyphus paper.",
|
|
169
|
+
)
|
|
170
|
+
action_group.add_argument(
|
|
171
|
+
"-v", "--version", action="store_true", help="Print pysisyphus version."
|
|
172
|
+
)
|
|
173
|
+
|
|
174
|
+
run_type_group = parser.add_mutually_exclusive_group(required=False)
|
|
175
|
+
run_type_group.add_argument(
|
|
176
|
+
"--restart",
|
|
177
|
+
action="store_true",
|
|
178
|
+
help="Continue a previously crashed/aborted/... pysisphus run.",
|
|
179
|
+
)
|
|
180
|
+
run_type_group.add_argument(
|
|
181
|
+
"--cp",
|
|
182
|
+
"--copy",
|
|
183
|
+
nargs="+",
|
|
184
|
+
help="Copy .yaml file and corresponding geometries from the 'geom' section "
|
|
185
|
+
"to a new directory. The first argument is interpreted as destination. Any "
|
|
186
|
+
"remaining (optional) arguments are files that are also copied.",
|
|
187
|
+
)
|
|
188
|
+
|
|
189
|
+
parser.add_argument(
|
|
190
|
+
"--scheduler", default=None, help="Address of the dask scheduler."
|
|
191
|
+
)
|
|
192
|
+
return parser.parse_args()
|
|
193
|
+
|
|
194
|
+
|
|
195
|
+
def get_calc_closure(base_name, calc_key, calc_kwargs, iter_dict=None, index=None):
|
|
196
|
+
if iter_dict is None:
|
|
197
|
+
iter_dict = dict()
|
|
198
|
+
|
|
199
|
+
if index is None:
|
|
200
|
+
index = 0
|
|
201
|
+
# Maps YAML input to actual Calculator-class arguments
|
|
202
|
+
calc_map = {
|
|
203
|
+
"calculator": "calculator",
|
|
204
|
+
"calc": "calculator", # shortcut for 'calculator'
|
|
205
|
+
# calc1/calc2 are used for ConicalIntersection and EnergyMin.
|
|
206
|
+
"calculator1": "calculator1",
|
|
207
|
+
"calc1": "calculator1", # shortcut for 'calculator1'
|
|
208
|
+
"calcualtor2": "calculator2",
|
|
209
|
+
"calc2": "calculator2", # shortcut for 'calculator2'
|
|
210
|
+
}
|
|
211
|
+
|
|
212
|
+
def calc_getter(**add_kwargs):
|
|
213
|
+
nonlocal index
|
|
214
|
+
|
|
215
|
+
kwargs_copy = copy.deepcopy(calc_kwargs)
|
|
216
|
+
|
|
217
|
+
# Some calculators are just wrappers, modifying forces from actual calculators,
|
|
218
|
+
# e.g. AFIR and Dimer. If we find one of the keys in 'calc_map' in 'calc_kwargs'
|
|
219
|
+
# we create the actual calculator and assign it to the corresponding value in
|
|
220
|
+
# 'calc_map'.
|
|
221
|
+
for key, val in calc_map.items():
|
|
222
|
+
if key in kwargs_copy:
|
|
223
|
+
# Use different base_name to distinguish the calculator(s)
|
|
224
|
+
actual_base_name = val
|
|
225
|
+
actual_kwargs = kwargs_copy.pop(key)
|
|
226
|
+
actual_key = actual_kwargs.pop("type")
|
|
227
|
+
# Pass 'index' to arguments, to avoid recreating calculators with
|
|
228
|
+
# the same name.
|
|
229
|
+
actual_calc = get_calc_closure(
|
|
230
|
+
actual_base_name, actual_key, actual_kwargs, index=index
|
|
231
|
+
)()
|
|
232
|
+
kwargs_copy[val] = actual_calc
|
|
233
|
+
|
|
234
|
+
kwargs_copy["base_name"] = base_name
|
|
235
|
+
kwargs_copy.update(add_kwargs)
|
|
236
|
+
kwargs_copy["calc_number"] = index
|
|
237
|
+
for key, iter_ in iter_dict.items():
|
|
238
|
+
kwargs_copy[key] = next(iter_)
|
|
239
|
+
index += 1
|
|
240
|
+
return CALC_DICT[calc_key](**kwargs_copy)
|
|
241
|
+
|
|
242
|
+
return calc_getter
|
|
243
|
+
|
|
244
|
+
|
|
245
|
+
def run_tsopt_from_cos(
|
|
246
|
+
cos,
|
|
247
|
+
tsopt_key,
|
|
248
|
+
tsopt_kwargs,
|
|
249
|
+
calc_getter=None,
|
|
250
|
+
ovlp_thresh=0.4,
|
|
251
|
+
coordinate_union="bonds",
|
|
252
|
+
save_hessian=False,
|
|
253
|
+
):
|
|
254
|
+
print(highlight_text(f"Running TS-optimization from COS"))
|
|
255
|
+
|
|
256
|
+
# Later want a Cartesian HEI tangent, so if not already present we create
|
|
257
|
+
# a Cartesian COS object to obtain the tangent from.
|
|
258
|
+
atoms = cos.images[0].atoms
|
|
259
|
+
if cos.coord_type != "cart":
|
|
260
|
+
cart_images = list()
|
|
261
|
+
for image in cos.images:
|
|
262
|
+
cart_image = Geometry(atoms, image.cart_coords)
|
|
263
|
+
cart_image.energy = image.energy
|
|
264
|
+
cart_images.append(cart_image)
|
|
265
|
+
cart_cos = ChainOfStates.ChainOfStates(cart_images)
|
|
266
|
+
# Just continue using the Cartesian COS object
|
|
267
|
+
else:
|
|
268
|
+
cart_cos = cos
|
|
269
|
+
|
|
270
|
+
hei_kind = tsopt_kwargs.pop("hei_kind", "splined")
|
|
271
|
+
# Use plain, unsplined, HEI
|
|
272
|
+
if hei_kind == "plain":
|
|
273
|
+
hei_index = cos.get_hei_index()
|
|
274
|
+
hei_image = cos.images[hei_index]
|
|
275
|
+
# Select the Cartesian tangent from the COS
|
|
276
|
+
cart_hei_tangent = cart_cos.get_tangent(hei_index)
|
|
277
|
+
# Use splined HEI
|
|
278
|
+
elif hei_kind == "splined":
|
|
279
|
+
# The splined HEI tangent is usually very bady for the purpose of
|
|
280
|
+
# selecting an imaginary mode to follow uphill. So we construct a better
|
|
281
|
+
# HEI tangent by mixing the two tangents closest to the splined HEI.
|
|
282
|
+
hei_coords, hei_energy, splined_hei_tangent, hei_index = cos.get_splined_hei()
|
|
283
|
+
hei_image = Geometry(atoms, hei_coords)
|
|
284
|
+
close_to_first = hei_index < 0.5
|
|
285
|
+
close_to_last = hei_index > len(cos.images) - 1.5
|
|
286
|
+
if close_to_first or close_to_last:
|
|
287
|
+
close_to = "first" if close_to_first else "last"
|
|
288
|
+
print(
|
|
289
|
+
f"Splined HEI is too close to the {close_to} image. Aborting TS optimization!"
|
|
290
|
+
)
|
|
291
|
+
raise HEIIsFirstOrLastException
|
|
292
|
+
# The hei_index is a float. We split off the decimal part and mix the two
|
|
293
|
+
# nearest tangents accordingly.
|
|
294
|
+
frac, floor = modf(hei_index)
|
|
295
|
+
# Indices of the two nearest images with integer indices.
|
|
296
|
+
floor = int(floor)
|
|
297
|
+
ceil = floor + 1
|
|
298
|
+
floor_tangent = cart_cos.get_tangent(floor)
|
|
299
|
+
ceil_tangent = cart_cos.get_tangent(ceil)
|
|
300
|
+
print(f"Creating mixed HEI tangent, using tangents at images {(floor, ceil)}.")
|
|
301
|
+
print("Overlap of splined HEI tangent with these tangents:")
|
|
302
|
+
for ind, tang in ((floor, floor_tangent), (ceil, ceil_tangent)):
|
|
303
|
+
print(f"\t{ind:02d}: {splined_hei_tangent.dot(tang):.6f}")
|
|
304
|
+
# When frac is big, e.g. 0.9 the tangent resembles the tangent at 'ceil'
|
|
305
|
+
# so we mix in only (1-frac) == (1-0.9) == 0.1 of the 'floor' tangent.
|
|
306
|
+
cart_hei_tangent = (1 - frac) * floor_tangent + frac * ceil_tangent
|
|
307
|
+
cart_hei_tangent /= np.linalg.norm(cart_hei_tangent)
|
|
308
|
+
# print(f"\t(1-{frac:.4f})*t({floor})+{frac:.4f}*t({ceil}): "
|
|
309
|
+
# f"{splined_hei_tangent.dot(cart_hei_tangent):.6f}"
|
|
310
|
+
# )
|
|
311
|
+
else:
|
|
312
|
+
raise Exception(f"Invalid hei_kind='{hei_kind}'!")
|
|
313
|
+
|
|
314
|
+
print(f"Index of {hei_kind} highest energy image (HEI) is {hei_index:.2f}.")
|
|
315
|
+
print()
|
|
316
|
+
|
|
317
|
+
# When the COS was optimized in internal coordinates the united primitive
|
|
318
|
+
# indices are already present and we just keep on using them.
|
|
319
|
+
try:
|
|
320
|
+
typed_prims = hei_image.internal.typed_prims
|
|
321
|
+
# If the COS was optimized in Cartesians we have to generated a new
|
|
322
|
+
# set of primitive internals.
|
|
323
|
+
except AttributeError:
|
|
324
|
+
|
|
325
|
+
def get_int_geom(geom):
|
|
326
|
+
return Geometry(geom.atoms, geom.cart_coords, coord_type="redund")
|
|
327
|
+
|
|
328
|
+
internal_geom1 = get_int_geom(cos.images[0])
|
|
329
|
+
internal_geom2 = get_int_geom(cos.images[-1])
|
|
330
|
+
typed_prims = form_coordinate_union(internal_geom1, internal_geom2)
|
|
331
|
+
|
|
332
|
+
coord_kwargs = dict()
|
|
333
|
+
if coordinate_union == "all":
|
|
334
|
+
coord_kwargs["typed_prims"] = typed_prims
|
|
335
|
+
union_msg = "Using full coordinate union for TS guess. Probably a bad idea!"
|
|
336
|
+
elif coordinate_union in ("bonds", "bonds_bends_dihedrals"):
|
|
337
|
+
# Only keep actual bonds ...
|
|
338
|
+
valid_prim_types = (PrimTypes.BOND,)
|
|
339
|
+
# ... and bends and dihedrals, if requested
|
|
340
|
+
if coordinate_union == "bonds_bends_dihedrals":
|
|
341
|
+
valid_prim_types += (PrimTypes.BEND, PrimTypes.PROPER_DIHEDRAL)
|
|
342
|
+
coord_kwargs["define_prims"] = [
|
|
343
|
+
tp for tp in typed_prims if tp[0] in valid_prim_types
|
|
344
|
+
]
|
|
345
|
+
union_msg = f"Kept primitive types: {valid_prim_types}"
|
|
346
|
+
else:
|
|
347
|
+
union_msg = "No coordinate union."
|
|
348
|
+
print(union_msg)
|
|
349
|
+
|
|
350
|
+
ts_geom_kwargs = tsopt_kwargs.pop("geom")
|
|
351
|
+
ts_coord_type = ts_geom_kwargs.pop("type")
|
|
352
|
+
if ts_coord_type != "cart":
|
|
353
|
+
ts_geom_kwargs["coord_kwargs"].update(coord_kwargs)
|
|
354
|
+
|
|
355
|
+
ts_geom = Geometry(
|
|
356
|
+
hei_image.atoms,
|
|
357
|
+
hei_image.cart_coords,
|
|
358
|
+
coord_type=ts_coord_type,
|
|
359
|
+
**ts_geom_kwargs,
|
|
360
|
+
)
|
|
361
|
+
|
|
362
|
+
# Convert tangent from whatever coordinates to redundant internals.
|
|
363
|
+
# When the HEI was splined the tangent will be in Cartesians.
|
|
364
|
+
if ts_coord_type == "cart":
|
|
365
|
+
ref_tangent = cart_hei_tangent
|
|
366
|
+
elif ts_coord_type in ("redund", "dlc"):
|
|
367
|
+
ref_tangent = ts_geom.internal.B @ cart_hei_tangent
|
|
368
|
+
else:
|
|
369
|
+
raise Exception(f"Invalid coord_type='{ts_coord_type}'!")
|
|
370
|
+
ref_tangent /= np.linalg.norm(ref_tangent)
|
|
371
|
+
|
|
372
|
+
# Dump HEI data
|
|
373
|
+
#
|
|
374
|
+
# Cartesian tangent and an animated _trj.xyz file
|
|
375
|
+
cart_hei_fn = "cart_hei_tangent"
|
|
376
|
+
np.savetxt(cart_hei_fn, cart_hei_tangent)
|
|
377
|
+
trj = get_tangent_trj_str(
|
|
378
|
+
ts_geom.atoms, ts_geom.cart_coords, cart_hei_tangent, points=10
|
|
379
|
+
)
|
|
380
|
+
trj_fn = cart_hei_fn + "_trj.xyz"
|
|
381
|
+
with open(trj_fn, "w") as handle:
|
|
382
|
+
handle.write(trj)
|
|
383
|
+
print(f"Wrote animated HEI tangent to {trj_fn}\n")
|
|
384
|
+
|
|
385
|
+
# Print HEI information (coords & tangent)
|
|
386
|
+
print(f"{hei_kind.capitalize()} HEI (TS guess)")
|
|
387
|
+
print(ts_geom.as_xyz())
|
|
388
|
+
print()
|
|
389
|
+
dummy = Geometry(atoms, cart_hei_tangent)
|
|
390
|
+
print(f"{hei_kind.capitalize()} Cartesian HEI tangent")
|
|
391
|
+
print(dummy.as_xyz())
|
|
392
|
+
print()
|
|
393
|
+
|
|
394
|
+
# Write out HEI information (coords & tangent)
|
|
395
|
+
hei_xyz_fn = f"{hei_kind}_hei.xyz"
|
|
396
|
+
with open(hei_xyz_fn, "w") as handle:
|
|
397
|
+
handle.write(ts_geom.as_xyz())
|
|
398
|
+
print(f"Wrote {hei_kind} HEI coordinates to '{hei_xyz_fn}'")
|
|
399
|
+
|
|
400
|
+
ts_calc = calc_getter()
|
|
401
|
+
|
|
402
|
+
def wrapped_calc_getter():
|
|
403
|
+
return ts_calc
|
|
404
|
+
|
|
405
|
+
ts_geom.set_calculator(ts_calc)
|
|
406
|
+
tsopt_kwargs["prefix"] = "ts"
|
|
407
|
+
|
|
408
|
+
if tsopt_key == "dimer":
|
|
409
|
+
# Right now Dimer optimization is rectricted to cartesian
|
|
410
|
+
# rotations and translations, even though translation in
|
|
411
|
+
# internals would be possible.
|
|
412
|
+
ts_geom = Geometry(hei_image.atoms, hei_image.cart_coords)
|
|
413
|
+
dimer_kwargs = tsopt_kwargs.pop("dimer_kwargs", {})
|
|
414
|
+
dimer_kwargs.update(
|
|
415
|
+
{
|
|
416
|
+
"N_raw": cart_hei_tangent,
|
|
417
|
+
"base_name": "dimer",
|
|
418
|
+
}
|
|
419
|
+
)
|
|
420
|
+
|
|
421
|
+
def wrapped_calc_getter():
|
|
422
|
+
return Dimer(ts_calc, **dimer_kwargs)
|
|
423
|
+
|
|
424
|
+
tsopt_key = "plbfgs"
|
|
425
|
+
# When calling run_opt we pass the Hessian as additional argument,
|
|
426
|
+
# so it is not recalculated unecessary. As no Hessian is available
|
|
427
|
+
# for the dimer method we set it None.
|
|
428
|
+
cart_hessian = None
|
|
429
|
+
# Determine which imaginary mode has the highest overlap with the splined HEI tangent.
|
|
430
|
+
else:
|
|
431
|
+
print(f"Calculating Hessian at {hei_kind} TS guess.")
|
|
432
|
+
# Calculate Hessian
|
|
433
|
+
cart_hessian = ts_geom.cart_hessian
|
|
434
|
+
# Continue Hessian in whatever coordinate system is actually in use
|
|
435
|
+
H = ts_geom.hessian
|
|
436
|
+
eigvals, eigvecs = np.linalg.eigh(H)
|
|
437
|
+
neg_inds = eigvals < -1e-4
|
|
438
|
+
if sum(neg_inds) == 0:
|
|
439
|
+
raise Exception("No negative eigenvalues found at splined HEI. Exiting!")
|
|
440
|
+
eigval_str = np.array2string(eigvals[neg_inds], precision=6)
|
|
441
|
+
print(f"Negative eigenvalues at splined HEI:\n{eigval_str}")
|
|
442
|
+
neg_eigvals = eigvals[neg_inds]
|
|
443
|
+
neg_eigvecs = eigvecs.T[neg_inds]
|
|
444
|
+
ovlps = [np.abs(imag_mode.dot(ref_tangent)) for imag_mode in neg_eigvecs]
|
|
445
|
+
print("Overlaps between HEI tangent and imaginary modes:")
|
|
446
|
+
for i, ov in enumerate(ovlps):
|
|
447
|
+
print(f"\t{i:02d}: {ov:.6f}")
|
|
448
|
+
max_ovlp_ind = np.argmax(ovlps)
|
|
449
|
+
max_ovlp = ovlps[max_ovlp_ind]
|
|
450
|
+
print(
|
|
451
|
+
f"Imaginary mode {max_ovlp_ind} has highest overlap ({max_ovlp:.2%}) "
|
|
452
|
+
"with splined HEI tangent."
|
|
453
|
+
)
|
|
454
|
+
rel_ovlps = np.array(ovlps) / max(ovlps)
|
|
455
|
+
similar_inds = rel_ovlps > 0.80
|
|
456
|
+
# Only 1 big overlap is present
|
|
457
|
+
if (max_ovlp >= ovlp_thresh) and (similar_inds.sum() == 1):
|
|
458
|
+
ovlp_root = np.argmax(ovlps)
|
|
459
|
+
# Multiple big and similar overlaps are present.
|
|
460
|
+
elif (max_ovlp >= ovlp_thresh) and (similar_inds.sum() > 1):
|
|
461
|
+
# Will yield the first occurence of True, which corresponds to a
|
|
462
|
+
# similar overlaps with the most negative eigenvalue.
|
|
463
|
+
ovlp_root = similar_inds.argmax()
|
|
464
|
+
neg_eigval = neg_eigvals[ovlp_root]
|
|
465
|
+
verbose_inds = np.arange(neg_eigvals.size)[similar_inds]
|
|
466
|
+
print(
|
|
467
|
+
f"Overlaps {verbose_inds} are very similar! Falling back to the "
|
|
468
|
+
f"one with the most negative eigenvalue {neg_eigval:.6f} "
|
|
469
|
+
f"(mode {ovlp_root})."
|
|
470
|
+
)
|
|
471
|
+
# Fallback to the most negative eigenvalue when all overlaps are too low.
|
|
472
|
+
else:
|
|
473
|
+
ovlp_root = neg_eigvals.argmin()
|
|
474
|
+
neg_eigval = neg_eigvals[ovlp_root]
|
|
475
|
+
print(
|
|
476
|
+
f"Highest overlap {max_ovlp:.6f} is below the threshold "
|
|
477
|
+
f"of {ovlp_thresh:.6f}.\nFalling back to mode {ovlp_root} with most "
|
|
478
|
+
f"negative eigenvalue {neg_eigval:.6f}."
|
|
479
|
+
)
|
|
480
|
+
root = tsopt_kwargs.get("root", None)
|
|
481
|
+
if root is None:
|
|
482
|
+
# Use mode with highest overlap as initial root
|
|
483
|
+
tsopt_kwargs["root"] = ovlp_root
|
|
484
|
+
else:
|
|
485
|
+
print(
|
|
486
|
+
f"Initial root={root} given, neglecting root {ovlp_root} "
|
|
487
|
+
"determined from overlaps."
|
|
488
|
+
)
|
|
489
|
+
|
|
490
|
+
opt_result = run_opt(
|
|
491
|
+
ts_geom,
|
|
492
|
+
calc_getter=wrapped_calc_getter,
|
|
493
|
+
opt_key=tsopt_key,
|
|
494
|
+
opt_kwargs=tsopt_kwargs,
|
|
495
|
+
cart_hessian=cart_hessian,
|
|
496
|
+
title="TS-Optimization",
|
|
497
|
+
copy_final_geom="ts_opt.xyz",
|
|
498
|
+
save_hessian=save_hessian,
|
|
499
|
+
)
|
|
500
|
+
ts_geom = opt_result.geom
|
|
501
|
+
ts_opt = opt_result.opt
|
|
502
|
+
|
|
503
|
+
# Restore original calculator for Dimer calculations
|
|
504
|
+
if tsopt_key == "dimer":
|
|
505
|
+
ts_geom.set_calculator(ts_calc)
|
|
506
|
+
|
|
507
|
+
print(f"Optimized TS coords:")
|
|
508
|
+
print(ts_geom.as_xyz())
|
|
509
|
+
# Include ts_ prefix
|
|
510
|
+
ts_opt_fn = ts_opt.get_path_for_fn("opt.xyz")
|
|
511
|
+
with open(ts_opt_fn, "w") as handle:
|
|
512
|
+
handle.write(ts_geom.as_xyz())
|
|
513
|
+
print(f"Wrote TS geometry to '{ts_opt_fn}\n'")
|
|
514
|
+
|
|
515
|
+
ts_energy = ts_geom.energy
|
|
516
|
+
first_cos_energy = cos.images[0].energy
|
|
517
|
+
last_cos_energy = cos.images[-1].energy
|
|
518
|
+
print_barrier(ts_energy, first_cos_energy, "TS", "first COS image")
|
|
519
|
+
print_barrier(ts_energy, last_cos_energy, "TS", "last COS image")
|
|
520
|
+
|
|
521
|
+
print()
|
|
522
|
+
return opt_result
|
|
523
|
+
|
|
524
|
+
|
|
525
|
+
def run_calculations(
|
|
526
|
+
geoms,
|
|
527
|
+
calc_getter,
|
|
528
|
+
scheduler=None,
|
|
529
|
+
assert_track=False,
|
|
530
|
+
run_func=None,
|
|
531
|
+
):
|
|
532
|
+
print(highlight_text("Running calculations"))
|
|
533
|
+
|
|
534
|
+
func_name = "run_calculation" if run_func is None else run_func
|
|
535
|
+
|
|
536
|
+
def par_calc(geom):
|
|
537
|
+
return getattr(geom.calculator, func_name)(geom.atoms, geom.coords)
|
|
538
|
+
|
|
539
|
+
for geom in geoms:
|
|
540
|
+
geom.set_calculator(calc_getter())
|
|
541
|
+
|
|
542
|
+
if assert_track:
|
|
543
|
+
assert all(
|
|
544
|
+
[geom.calculator.track for geom in geoms]
|
|
545
|
+
), "'track: True' must be present in calc section."
|
|
546
|
+
|
|
547
|
+
if scheduler:
|
|
548
|
+
client = Client(scheduler, pure=False, silence_logs=False)
|
|
549
|
+
results_futures = client.map(par_calc, geoms)
|
|
550
|
+
all_results = client.gather(results_futures)
|
|
551
|
+
else:
|
|
552
|
+
all_results = list()
|
|
553
|
+
i_fmt = "02d"
|
|
554
|
+
for i, geom in enumerate(geoms):
|
|
555
|
+
print(highlight_text(f"Calculation {i:{i_fmt}}", level=1))
|
|
556
|
+
|
|
557
|
+
start = time.time()
|
|
558
|
+
print(geom)
|
|
559
|
+
results = getattr(geom.calculator, func_name)(geom.atoms, geom.cart_coords)
|
|
560
|
+
# results dict of MultiCalc will contain keys that can be dumped yet. So
|
|
561
|
+
# we skip the JSON dumping when KeyError is raised.
|
|
562
|
+
try:
|
|
563
|
+
as_json = results_to_json(results)
|
|
564
|
+
calc = geom.calculator
|
|
565
|
+
# Decrease counter, because it will be increased by 1, w.r.t to the
|
|
566
|
+
# calculation.
|
|
567
|
+
json_fn = calc.make_fn("results", counter=calc.calc_counter - 1)
|
|
568
|
+
with open(json_fn, "w") as handle:
|
|
569
|
+
handle.write(as_json)
|
|
570
|
+
except KeyError:
|
|
571
|
+
print("Skipped JSON dump of calculation results!")
|
|
572
|
+
|
|
573
|
+
hess_keys = [
|
|
574
|
+
key
|
|
575
|
+
for key, val in results.items()
|
|
576
|
+
if isinstance(val, dict) and "hessian" in val
|
|
577
|
+
]
|
|
578
|
+
for hkey in hess_keys:
|
|
579
|
+
hres = results[hkey]
|
|
580
|
+
hfn = f"{hkey}_hessian.h5"
|
|
581
|
+
# save_hessian(
|
|
582
|
+
# hfn,
|
|
583
|
+
# geom,
|
|
584
|
+
# cart_hessian=hres["hessian"],
|
|
585
|
+
# energy=hres["energy"],
|
|
586
|
+
# )
|
|
587
|
+
# print(f"Dumped hessian to '{hfn}'.")
|
|
588
|
+
|
|
589
|
+
all_results.append(results)
|
|
590
|
+
if i < (len(geoms) - 1):
|
|
591
|
+
try:
|
|
592
|
+
cur_calculator = geom.calculator
|
|
593
|
+
next_calculator = geoms[i + 1].calculator
|
|
594
|
+
next_calculator.set_chkfiles(cur_calculator.get_chkfiles())
|
|
595
|
+
msg = f"Set chkfiles of calculator {i:{i_fmt}} on calculator {i+1:{i_fmt}}"
|
|
596
|
+
except AttributeError:
|
|
597
|
+
msg = "Calculator does not support set/get_chkfiles!"
|
|
598
|
+
print(msg)
|
|
599
|
+
end = time.time()
|
|
600
|
+
diff = end - start
|
|
601
|
+
print(f"Calculation took {diff:.1f} s.\n")
|
|
602
|
+
sys.stdout.flush()
|
|
603
|
+
print()
|
|
604
|
+
|
|
605
|
+
for geom, results in zip(geoms, all_results):
|
|
606
|
+
try:
|
|
607
|
+
geom.set_results(results)
|
|
608
|
+
except KeyError:
|
|
609
|
+
pass
|
|
610
|
+
|
|
611
|
+
return geoms, all_results
|
|
612
|
+
|
|
613
|
+
|
|
614
|
+
def run_stocastic(stoc):
|
|
615
|
+
# Fragment
|
|
616
|
+
stoc.run()
|
|
617
|
+
print()
|
|
618
|
+
|
|
619
|
+
return stoc
|
|
620
|
+
|
|
621
|
+
|
|
622
|
+
def run_md(geom, calc_getter, md_kwargs):
|
|
623
|
+
print(highlight_text(f"Running Molecular Dynamics"))
|
|
624
|
+
|
|
625
|
+
calc = calc_getter()
|
|
626
|
+
geom.set_calculator(calc)
|
|
627
|
+
|
|
628
|
+
T_init_vel = md_kwargs.pop("T_init_vel")
|
|
629
|
+
steps = md_kwargs.pop("steps")
|
|
630
|
+
dt = md_kwargs.pop("dt")
|
|
631
|
+
seed = md_kwargs.pop("seed", None)
|
|
632
|
+
|
|
633
|
+
_gaussian = md_kwargs.pop("gaussian", {})
|
|
634
|
+
gaussians = list()
|
|
635
|
+
for g_name, g_kwargs in _gaussian.items():
|
|
636
|
+
# Create collective variable
|
|
637
|
+
cv_kwargs = g_kwargs.pop("colvar")
|
|
638
|
+
cv_key = cv_kwargs.pop("type")
|
|
639
|
+
colvar = get_colvar(cv_key, cv_kwargs)
|
|
640
|
+
|
|
641
|
+
# Create & append Gaussian
|
|
642
|
+
g_w = g_kwargs.pop("w")
|
|
643
|
+
g_s = g_kwargs.pop("s")
|
|
644
|
+
g_stride = g_kwargs.pop("stride")
|
|
645
|
+
gau = Gaussian(w=g_w, s=g_s, colvar=colvar, dump_name=g_name)
|
|
646
|
+
gaussians.append((g_name, gau, g_stride))
|
|
647
|
+
|
|
648
|
+
remove_com_v = md_kwargs.get("remove_com_v")
|
|
649
|
+
v0 = get_mb_velocities_for_geom(
|
|
650
|
+
geom, T_init_vel, seed=seed, remove_com_v=remove_com_v, remove_rot_v=False
|
|
651
|
+
).flatten()
|
|
652
|
+
md_result = md(geom, v0=v0, steps=steps, dt=dt, gaussians=gaussians, **md_kwargs)
|
|
653
|
+
|
|
654
|
+
# from pysisyphus.xyzloader import coords_to_trj
|
|
655
|
+
# trj_fn = "md_trj.xyz"
|
|
656
|
+
# _ = coords_to_trj(
|
|
657
|
+
# trj_fn, geom.atoms, md_result.coords[::md_kwargs["dump_stride"]]
|
|
658
|
+
# )
|
|
659
|
+
print()
|
|
660
|
+
|
|
661
|
+
return md_result
|
|
662
|
+
|
|
663
|
+
|
|
664
|
+
def run_scan(geom, calc_getter, scan_kwargs, callback=None):
|
|
665
|
+
print(highlight_text("Relaxed Scan") + "\n")
|
|
666
|
+
assert (
|
|
667
|
+
geom.coord_type != "cart"
|
|
668
|
+
), "Internal coordinates are required for coordinate scans."
|
|
669
|
+
|
|
670
|
+
type_ = scan_kwargs["type"]
|
|
671
|
+
indices = scan_kwargs["indices"]
|
|
672
|
+
steps = scan_kwargs["steps"]
|
|
673
|
+
start = scan_kwargs.get("start", None)
|
|
674
|
+
end = scan_kwargs.get("end", None)
|
|
675
|
+
step_size = scan_kwargs.get("step_size", None)
|
|
676
|
+
symmetric = scan_kwargs.get("symmetric", False)
|
|
677
|
+
# The final prim value is determined either as
|
|
678
|
+
# start + steps*step_size
|
|
679
|
+
# or
|
|
680
|
+
# (end - start) / steps .
|
|
681
|
+
#
|
|
682
|
+
# So we always require steps and either end or step_size.
|
|
683
|
+
# bool(a) != bool(b) amounts to an logical XOR.
|
|
684
|
+
assert (steps > 0) and (
|
|
685
|
+
bool(end) != bool(step_size)
|
|
686
|
+
), "Please specify either 'end' or 'step_size'!"
|
|
687
|
+
if symmetric:
|
|
688
|
+
assert step_size and (
|
|
689
|
+
start is None
|
|
690
|
+
), "'symmetric: True' requires 'step_size' and 'start == None'!"
|
|
691
|
+
|
|
692
|
+
constrain_prims = normalize_prim_inputs(((type_, *indices),))
|
|
693
|
+
constr_prim = constrain_prims[0]
|
|
694
|
+
|
|
695
|
+
start_was_none = start is None
|
|
696
|
+
if start_was_none:
|
|
697
|
+
try:
|
|
698
|
+
constr_ind = geom.internal.get_index_of_typed_prim(constr_prim)
|
|
699
|
+
# Recreate geom with appropriate primitive
|
|
700
|
+
except PrimitiveNotDefinedException:
|
|
701
|
+
geom = geom.copy(coord_kwargs={"define_prims": constrain_prims})
|
|
702
|
+
constr_ind = geom.internal.get_index_of_typed_prim(constr_prim)
|
|
703
|
+
# The given indices may not correspond exactly to a typed primitives,
|
|
704
|
+
# as they may be reversed. So we fetch the actual typed primitive.
|
|
705
|
+
constr_prim = geom.internal.typed_prims[constr_ind]
|
|
706
|
+
start = geom.coords[constr_ind]
|
|
707
|
+
|
|
708
|
+
if step_size is None:
|
|
709
|
+
step_size = (end - start) / steps
|
|
710
|
+
opt_kwargs = scan_kwargs["opt"].copy()
|
|
711
|
+
opt_key = opt_kwargs.pop("type")
|
|
712
|
+
|
|
713
|
+
def wrapper(geom, start, step_size, steps, pref=None):
|
|
714
|
+
return relaxed_1d_scan(
|
|
715
|
+
geom,
|
|
716
|
+
calc_getter,
|
|
717
|
+
[
|
|
718
|
+
constr_prim,
|
|
719
|
+
],
|
|
720
|
+
start,
|
|
721
|
+
step_size,
|
|
722
|
+
steps,
|
|
723
|
+
opt_key,
|
|
724
|
+
opt_kwargs,
|
|
725
|
+
pref=pref,
|
|
726
|
+
callback=callback,
|
|
727
|
+
)
|
|
728
|
+
|
|
729
|
+
if not symmetric:
|
|
730
|
+
scan_geoms, scan_vals, scan_energies = wrapper(geom, start, step_size, steps)
|
|
731
|
+
else:
|
|
732
|
+
# Negative direction
|
|
733
|
+
print(highlight_text("Negative direction", level=1) + "\n")
|
|
734
|
+
minus_geoms, minus_vals, minus_energies = wrapper(
|
|
735
|
+
geom, start, -step_size, steps, pref="minus"
|
|
736
|
+
)
|
|
737
|
+
init_geom = minus_geoms[0].copy()
|
|
738
|
+
# Positive direction. Compared to the negative direction we start at a
|
|
739
|
+
# displaced geometry and reduce the number of steps by 1.
|
|
740
|
+
print(highlight_text("Positive direction", level=1) + "\n")
|
|
741
|
+
plus_start = start + step_size
|
|
742
|
+
# Do one step less, as we already start from the optimized geometry
|
|
743
|
+
plus_steps = steps - 1
|
|
744
|
+
plus_geoms, plus_vals, plus_energies = wrapper(
|
|
745
|
+
init_geom, plus_start, step_size, plus_steps, pref="plus"
|
|
746
|
+
)
|
|
747
|
+
scan_geoms = minus_geoms[::-1] + plus_geoms
|
|
748
|
+
scan_vals = np.concatenate((minus_vals[::-1], plus_vals))
|
|
749
|
+
scan_energies = np.concatenate((minus_energies[::-1], plus_energies))
|
|
750
|
+
|
|
751
|
+
trj = "\n".join([geom.as_xyz() for geom in scan_geoms])
|
|
752
|
+
with open("relaxed_scan_trj.xyz", "w") as handle:
|
|
753
|
+
handle.write(trj)
|
|
754
|
+
scan_data = np.stack((scan_vals, scan_energies), axis=1)
|
|
755
|
+
np.savetxt(f"relaxed_scan.dat", scan_data)
|
|
756
|
+
return scan_geoms, scan_vals, scan_energies
|
|
757
|
+
|
|
758
|
+
|
|
759
|
+
def run_preopt(
|
|
760
|
+
first_geom,
|
|
761
|
+
last_geom,
|
|
762
|
+
calc_getter,
|
|
763
|
+
preopt_key,
|
|
764
|
+
preopt_kwargs,
|
|
765
|
+
save_hessian=False,
|
|
766
|
+
):
|
|
767
|
+
strict = preopt_kwargs.pop("strict", False)
|
|
768
|
+
|
|
769
|
+
geom_kwargs = preopt_kwargs.pop("geom")
|
|
770
|
+
coord_type = geom_kwargs.pop("type")
|
|
771
|
+
|
|
772
|
+
def recreate_geom(geom):
|
|
773
|
+
return Geometry(geom.atoms, geom.coords, coord_type=coord_type, **geom_kwargs)
|
|
774
|
+
|
|
775
|
+
first_geom = recreate_geom(first_geom)
|
|
776
|
+
last_geom = recreate_geom(last_geom)
|
|
777
|
+
|
|
778
|
+
opt_results = list()
|
|
779
|
+
for geom, key in ((first_geom, "first"), (last_geom, "last")):
|
|
780
|
+
# Backup original geometry for RMSD calculation
|
|
781
|
+
org_geom = geom.copy(coord_type="cart")
|
|
782
|
+
|
|
783
|
+
prefix = f"{key}_pre"
|
|
784
|
+
opt_kwargs = preopt_kwargs.copy()
|
|
785
|
+
opt_kwargs.update(
|
|
786
|
+
{
|
|
787
|
+
"prefix": prefix,
|
|
788
|
+
"h5_group_name": prefix,
|
|
789
|
+
}
|
|
790
|
+
)
|
|
791
|
+
opt_result = run_opt(
|
|
792
|
+
geom,
|
|
793
|
+
calc_getter,
|
|
794
|
+
preopt_key,
|
|
795
|
+
opt_kwargs,
|
|
796
|
+
title=f"{key} preoptimization",
|
|
797
|
+
save_hessian=save_hessian,
|
|
798
|
+
)
|
|
799
|
+
opt = opt_result.opt
|
|
800
|
+
opt_results.append(opt_result)
|
|
801
|
+
|
|
802
|
+
# Continue with next pre-optimization when stopped manually
|
|
803
|
+
if strict and not opt.stopped and not opt.is_converged:
|
|
804
|
+
print(f"Problem in preoptimization of {key}. Exiting!")
|
|
805
|
+
sys.exit()
|
|
806
|
+
print(f"Preoptimization of {key} geometry converged!")
|
|
807
|
+
opt_fn = opt.get_path_for_fn(f"opt.xyz")
|
|
808
|
+
shutil.move(opt.final_fn, opt_fn)
|
|
809
|
+
print(f"Saved final preoptimized structure to '{opt_fn}'.")
|
|
810
|
+
|
|
811
|
+
rmsd = org_geom.rmsd(opt_result.geom)
|
|
812
|
+
print(f"RMSD with initial geometry: {rmsd:.6f} au")
|
|
813
|
+
print()
|
|
814
|
+
|
|
815
|
+
return opt_results
|
|
816
|
+
|
|
817
|
+
|
|
818
|
+
def run_irc(geom, irc_key, irc_kwargs, calc_getter):
|
|
819
|
+
print(highlight_text(f"Running IRC") + "\n")
|
|
820
|
+
|
|
821
|
+
calc = calc_getter()
|
|
822
|
+
calc.base_name = "irc"
|
|
823
|
+
geom.set_calculator(calc, clear=False)
|
|
824
|
+
|
|
825
|
+
# Recreate geometry with Cartesian coordinates if needed.
|
|
826
|
+
if geom.coord_type != "cart":
|
|
827
|
+
geom = geom.copy_all(coord_type="cart")
|
|
828
|
+
|
|
829
|
+
irc = IRC_DICT[irc_key](geom, **irc_kwargs)
|
|
830
|
+
irc.run()
|
|
831
|
+
|
|
832
|
+
return irc
|
|
833
|
+
|
|
834
|
+
|
|
835
|
+
def do_rmsds(xyz, geoms, end_fns, end_geoms, preopt_map=None, similar_thresh=0.25):
|
|
836
|
+
if len(end_fns) != 2 or len(end_geoms) != 2:
|
|
837
|
+
return
|
|
838
|
+
end_fns = [str(fn) for fn in end_fns]
|
|
839
|
+
max_end_len = max(len(s) for s in end_fns)
|
|
840
|
+
|
|
841
|
+
if len(geoms) == 1:
|
|
842
|
+
return
|
|
843
|
+
elif len(geoms) > 2:
|
|
844
|
+
geoms = (geoms[0], geoms[-1])
|
|
845
|
+
assert len(geoms) == 2
|
|
846
|
+
|
|
847
|
+
if isinstance(xyz, str):
|
|
848
|
+
xyz = (f"{xyz}, first entry", f"{xyz}, last entry")
|
|
849
|
+
elif (not isinstance(xyz, str)) and len(xyz) >= 2:
|
|
850
|
+
xyz = (xyz[0], xyz[-1])
|
|
851
|
+
assert len(xyz) == 2
|
|
852
|
+
|
|
853
|
+
# Append original filenames, if supplied
|
|
854
|
+
if preopt_map is not None:
|
|
855
|
+
xyz = [f"{preopt}/{preopt_map[preopt]}" for preopt in xyz]
|
|
856
|
+
|
|
857
|
+
max_len = max(len(s) for s in xyz)
|
|
858
|
+
|
|
859
|
+
print(highlight_text(f"RMSDs After End Optimizations"))
|
|
860
|
+
print()
|
|
861
|
+
|
|
862
|
+
start_cbms = [get_bond_mat(geom) for geom in geoms]
|
|
863
|
+
end_cbms = [get_bond_mat(geom) for geom in end_geoms]
|
|
864
|
+
for i, start_geom in enumerate(geoms):
|
|
865
|
+
fn = xyz[i]
|
|
866
|
+
found_similar = False
|
|
867
|
+
print(f"start geom {i:>2d} ({fn:>{max_len}s})")
|
|
868
|
+
# Condensed bond mat
|
|
869
|
+
start_cbm = start_cbms[i]
|
|
870
|
+
for j, end_geom in enumerate(end_geoms):
|
|
871
|
+
end_fn = end_fns[j]
|
|
872
|
+
# Compare bond matrices
|
|
873
|
+
cbm_match = (start_cbm == end_cbms[j]).all()
|
|
874
|
+
cbm_str = "(bond matrices match)" if cbm_match else ""
|
|
875
|
+
rmsd = start_geom.rmsd(end_geom)
|
|
876
|
+
similar_str = ""
|
|
877
|
+
if rmsd < similar_thresh:
|
|
878
|
+
found_similar = True
|
|
879
|
+
similar_str = " (similar)"
|
|
880
|
+
print(
|
|
881
|
+
f"\tend geom {j:>2d} ({end_fn:>{max_end_len}s}): "
|
|
882
|
+
f"RMSD={rmsd:>8.6f} au{similar_str} " + cbm_str
|
|
883
|
+
)
|
|
884
|
+
if not found_similar:
|
|
885
|
+
print(f"\tRMSDs of end geometries are dissimilar to '{fn}'!")
|
|
886
|
+
print()
|
|
887
|
+
|
|
888
|
+
|
|
889
|
+
def run_endopt(irc, endopt_key, endopt_kwargs, calc_getter, save_hessian=False):
|
|
890
|
+
print(highlight_text(f"Optimizing reaction path ends"))
|
|
891
|
+
|
|
892
|
+
# Gather geometries that shall be optimized and appropriate keys.
|
|
893
|
+
to_opt = list()
|
|
894
|
+
if irc.forward:
|
|
895
|
+
coords = irc.all_coords[0]
|
|
896
|
+
to_opt.append((coords, "forward"))
|
|
897
|
+
if irc.backward:
|
|
898
|
+
coords = irc.all_coords[-1]
|
|
899
|
+
to_opt.append((coords, "backward"))
|
|
900
|
+
if irc.downhill:
|
|
901
|
+
coords = irc.all_coords[-1]
|
|
902
|
+
to_opt.append((coords, "downhill"))
|
|
903
|
+
|
|
904
|
+
def to_frozensets(sets):
|
|
905
|
+
return [frozenset(_) for _ in sets]
|
|
906
|
+
|
|
907
|
+
separate_fragments = endopt_kwargs.pop("fragments", False)
|
|
908
|
+
total = separate_fragments in ("total", False)
|
|
909
|
+
|
|
910
|
+
# Convert to array for easy indexing with the fragment lists
|
|
911
|
+
atoms = np.array(irc.atoms)
|
|
912
|
+
fragments_to_opt = list()
|
|
913
|
+
# Expand endpoints into fragments if requested
|
|
914
|
+
for coords, key in to_opt:
|
|
915
|
+
base_name = f"{key}_end"
|
|
916
|
+
c3d = coords.reshape(-1, 3)
|
|
917
|
+
fragments = list()
|
|
918
|
+
fragment_names = list()
|
|
919
|
+
|
|
920
|
+
# Detect separate fragments if requested.
|
|
921
|
+
if separate_fragments:
|
|
922
|
+
bond_sets = to_frozensets(get_bond_sets(atoms.tolist(), c3d))
|
|
923
|
+
# Sort atom indices, so the atoms don't become totally scrambled.
|
|
924
|
+
fragments.extend([sorted(frag) for frag in merge_sets(bond_sets)])
|
|
925
|
+
# Disable higher fragment counts. I'm looking forward to the day
|
|
926
|
+
# this ever occurs and someone complains :)
|
|
927
|
+
assert len(fragments) < 10, "Something probably went wrong"
|
|
928
|
+
fragment_names.extend(
|
|
929
|
+
[f"{base_name}_frag{i:02d}" for i, _ in enumerate(fragments)]
|
|
930
|
+
)
|
|
931
|
+
print(f"Found {len(fragments)} fragment(s) at {base_name}")
|
|
932
|
+
for frag_name, frag in zip(fragment_names, fragments):
|
|
933
|
+
print(f"\t{frag_name}: {len(frag)} atoms")
|
|
934
|
+
|
|
935
|
+
# Optimize the whole geometries, without splitting them into fragments
|
|
936
|
+
# Skip this optimization if separate fragments are requested and only
|
|
937
|
+
# one fragment is present, which would result in twice the same optimization.
|
|
938
|
+
skip_one_frag = separate_fragments and len(fragments) == 1
|
|
939
|
+
if total and not skip_one_frag:
|
|
940
|
+
# Dummy fragment containing all atom indices.
|
|
941
|
+
fragments.extend(
|
|
942
|
+
[
|
|
943
|
+
range(len(atoms)),
|
|
944
|
+
]
|
|
945
|
+
)
|
|
946
|
+
fragment_names.extend(
|
|
947
|
+
[
|
|
948
|
+
base_name,
|
|
949
|
+
]
|
|
950
|
+
)
|
|
951
|
+
elif skip_one_frag:
|
|
952
|
+
print(
|
|
953
|
+
f"Only one fragment present for '{key}'. Skipping optimization "
|
|
954
|
+
"of total system."
|
|
955
|
+
)
|
|
956
|
+
|
|
957
|
+
fragment_keys = [key] * len(fragments)
|
|
958
|
+
fragment_atoms = [tuple(atoms[list(frag)]) for frag in fragments]
|
|
959
|
+
fragment_coords = [c3d[frag].flatten() for frag in fragments]
|
|
960
|
+
fragments_to_opt.extend(
|
|
961
|
+
list(zip(fragment_keys, fragment_names, fragment_atoms, fragment_coords))
|
|
962
|
+
)
|
|
963
|
+
print()
|
|
964
|
+
|
|
965
|
+
to_opt = fragments_to_opt
|
|
966
|
+
|
|
967
|
+
geom_kwargs = endopt_kwargs.pop("geom")
|
|
968
|
+
coord_type = geom_kwargs.pop("type")
|
|
969
|
+
|
|
970
|
+
results = {k: list() for k in ("forward", "backward", "downhill")}
|
|
971
|
+
for key, name, atoms, coords in to_opt:
|
|
972
|
+
geom = Geometry(
|
|
973
|
+
atoms,
|
|
974
|
+
coords,
|
|
975
|
+
coord_type=coord_type,
|
|
976
|
+
**geom_kwargs,
|
|
977
|
+
)
|
|
978
|
+
initial_fn = f"{name}_initial.xyz"
|
|
979
|
+
with open(initial_fn, "w") as handle:
|
|
980
|
+
handle.write(geom.as_xyz())
|
|
981
|
+
|
|
982
|
+
def wrapped_calc_getter():
|
|
983
|
+
calc = calc_getter()
|
|
984
|
+
calc.base_name = name
|
|
985
|
+
return calc
|
|
986
|
+
|
|
987
|
+
opt_kwargs = endopt_kwargs.copy()
|
|
988
|
+
opt_kwargs.update(
|
|
989
|
+
{
|
|
990
|
+
"prefix": name,
|
|
991
|
+
"h5_group_name": name,
|
|
992
|
+
"dump": True,
|
|
993
|
+
}
|
|
994
|
+
)
|
|
995
|
+
try:
|
|
996
|
+
opt_result = run_opt(
|
|
997
|
+
geom,
|
|
998
|
+
wrapped_calc_getter,
|
|
999
|
+
endopt_key,
|
|
1000
|
+
opt_kwargs,
|
|
1001
|
+
title=f"{name} Optimization",
|
|
1002
|
+
level=1,
|
|
1003
|
+
save_hessian=save_hessian,
|
|
1004
|
+
)
|
|
1005
|
+
except Exception as err:
|
|
1006
|
+
print(f"{err}\nOptimization crashed!")
|
|
1007
|
+
continue
|
|
1008
|
+
final_fn = opt_result.opt.final_fn
|
|
1009
|
+
opt_fn = f"{name}_opt.xyz"
|
|
1010
|
+
shutil.move(final_fn, opt_fn)
|
|
1011
|
+
print(f"Moved '{final_fn.name}' to '{opt_fn}'.\n")
|
|
1012
|
+
results[key].append(opt_result)
|
|
1013
|
+
print()
|
|
1014
|
+
return results["forward"], results["backward"], results["downhill"]
|
|
1015
|
+
|
|
1016
|
+
|
|
1017
|
+
def run_mdp(geom, calc_getter, mdp_kwargs):
|
|
1018
|
+
cwd = Path(".").resolve()
|
|
1019
|
+
for i in range(3):
|
|
1020
|
+
out_dir = cwd / f"outdir{i:02d}"
|
|
1021
|
+
try:
|
|
1022
|
+
os.mkdir(out_dir)
|
|
1023
|
+
except FileExistsError:
|
|
1024
|
+
print(out_dir, "exists")
|
|
1025
|
+
add_kwargs = {
|
|
1026
|
+
"out_dir": out_dir,
|
|
1027
|
+
}
|
|
1028
|
+
calc = calc_getter(**add_kwargs)
|
|
1029
|
+
geom.set_calculator(calc)
|
|
1030
|
+
mdp_result = mdp(geom, **mdp_kwargs)
|
|
1031
|
+
break
|
|
1032
|
+
return mdp_result
|
|
1033
|
+
|
|
1034
|
+
|
|
1035
|
+
def copy_yaml_and_geometries(run_dict, yaml_fn, dest_and_add_cp, new_yaml_fn=None):
|
|
1036
|
+
destination, *copy_also = dest_and_add_cp
|
|
1037
|
+
src_path = Path(yaml_fn).resolve().parent
|
|
1038
|
+
destination = Path(destination)
|
|
1039
|
+
try:
|
|
1040
|
+
print(f"Trying to create directory '{destination}' ... ", end="")
|
|
1041
|
+
os.mkdir(destination)
|
|
1042
|
+
print("done")
|
|
1043
|
+
except FileExistsError:
|
|
1044
|
+
print("already exists")
|
|
1045
|
+
if "geom" in run_dict:
|
|
1046
|
+
xyzs = run_dict["geom"]["fn"]
|
|
1047
|
+
try:
|
|
1048
|
+
xyzs = list(xyzs.values())
|
|
1049
|
+
except AttributeError:
|
|
1050
|
+
pass
|
|
1051
|
+
else:
|
|
1052
|
+
xyzs = run_dict["xyz"]
|
|
1053
|
+
print("Copying:")
|
|
1054
|
+
# Copy geometries
|
|
1055
|
+
# When newlines are present we have an inline xyz formatted string
|
|
1056
|
+
if not "\n" in xyzs:
|
|
1057
|
+
if isinstance(xyzs, str):
|
|
1058
|
+
xyzs = [
|
|
1059
|
+
xyzs,
|
|
1060
|
+
]
|
|
1061
|
+
for xyz in xyzs:
|
|
1062
|
+
if xyz.startswith("lib:"):
|
|
1063
|
+
continue
|
|
1064
|
+
shutil.copy(src_path / xyz, destination)
|
|
1065
|
+
print("\t", xyz)
|
|
1066
|
+
else:
|
|
1067
|
+
print("Found inline xyz formatted string. No files to copy!")
|
|
1068
|
+
# Copy additional files
|
|
1069
|
+
for src in copy_also:
|
|
1070
|
+
try:
|
|
1071
|
+
shutil.copy(src_path / src, destination)
|
|
1072
|
+
print(f"\t{src}")
|
|
1073
|
+
except FileNotFoundError:
|
|
1074
|
+
print(f"\tCould not find '{src}'. Skipping!")
|
|
1075
|
+
# Update yaml_fn to match destination
|
|
1076
|
+
yaml_dest_fn = Path(destination.stem).with_suffix(".yaml")
|
|
1077
|
+
shutil.copy(yaml_fn, destination / yaml_dest_fn)
|
|
1078
|
+
print("\t", yaml_fn)
|
|
1079
|
+
|
|
1080
|
+
|
|
1081
|
+
def get_defaults(conf_dict, T_default=T_DEFAULT, p_default=p_DEFAULT):
|
|
1082
|
+
# Defaults
|
|
1083
|
+
dd = {
|
|
1084
|
+
"assert": None,
|
|
1085
|
+
"afir": None,
|
|
1086
|
+
"barrier": None,
|
|
1087
|
+
"calc": {
|
|
1088
|
+
"type": "dummy",
|
|
1089
|
+
},
|
|
1090
|
+
"cos": None,
|
|
1091
|
+
"endopt": None,
|
|
1092
|
+
"geom": None,
|
|
1093
|
+
"glob": None,
|
|
1094
|
+
"interpol": None,
|
|
1095
|
+
"irc": None,
|
|
1096
|
+
"md": None,
|
|
1097
|
+
"mdp": None,
|
|
1098
|
+
"opt": None,
|
|
1099
|
+
"perf": None,
|
|
1100
|
+
"precontr": None,
|
|
1101
|
+
"preopt": None,
|
|
1102
|
+
"scan": None,
|
|
1103
|
+
"stocastic": None,
|
|
1104
|
+
"shake": None,
|
|
1105
|
+
"tsopt": None,
|
|
1106
|
+
}
|
|
1107
|
+
|
|
1108
|
+
mol_opt_defaults = {
|
|
1109
|
+
"dump": True,
|
|
1110
|
+
"max_cycles": 150,
|
|
1111
|
+
"overachieve_factor": 5,
|
|
1112
|
+
"type": "rfo",
|
|
1113
|
+
"do_hess": False,
|
|
1114
|
+
"T": T_default,
|
|
1115
|
+
"p": p_default,
|
|
1116
|
+
}
|
|
1117
|
+
cos_opt_defaults = {
|
|
1118
|
+
"type": "qm",
|
|
1119
|
+
"align": True,
|
|
1120
|
+
"dump": True,
|
|
1121
|
+
}
|
|
1122
|
+
if "interpol" in conf_dict:
|
|
1123
|
+
dd["interpol"] = {
|
|
1124
|
+
"align": True,
|
|
1125
|
+
}
|
|
1126
|
+
|
|
1127
|
+
if "cos" in conf_dict:
|
|
1128
|
+
dd["cos"] = {
|
|
1129
|
+
"type": "neb",
|
|
1130
|
+
"fix_first": True,
|
|
1131
|
+
"fix_last": True,
|
|
1132
|
+
}
|
|
1133
|
+
dd["opt"] = cos_opt_defaults.copy()
|
|
1134
|
+
# Use a different, more powerful, optimizer when we are not dealing
|
|
1135
|
+
# with a COS-optimization.
|
|
1136
|
+
elif "opt" in conf_dict:
|
|
1137
|
+
dd["opt"] = mol_opt_defaults.copy()
|
|
1138
|
+
elif "stocastic" in conf_dict:
|
|
1139
|
+
dd["stocastic"] = {
|
|
1140
|
+
"type": "frag",
|
|
1141
|
+
}
|
|
1142
|
+
|
|
1143
|
+
def get_opt_geom_defaults():
|
|
1144
|
+
return {
|
|
1145
|
+
"type": "redund",
|
|
1146
|
+
"coord_kwargs": {},
|
|
1147
|
+
}
|
|
1148
|
+
|
|
1149
|
+
if "tsopt" in conf_dict:
|
|
1150
|
+
dd["tsopt"] = mol_opt_defaults.copy()
|
|
1151
|
+
dd["tsopt"].update(
|
|
1152
|
+
{
|
|
1153
|
+
"type": "rsprfo",
|
|
1154
|
+
"h5_group_name": "tsopt",
|
|
1155
|
+
"prefix": "ts",
|
|
1156
|
+
}
|
|
1157
|
+
)
|
|
1158
|
+
if "cos" in conf_dict:
|
|
1159
|
+
dd["tsopt"]["geom"] = get_opt_geom_defaults()
|
|
1160
|
+
|
|
1161
|
+
if "precontr" in conf_dict:
|
|
1162
|
+
dd["precontr"] = {
|
|
1163
|
+
"prefix": "precontr",
|
|
1164
|
+
}
|
|
1165
|
+
|
|
1166
|
+
if "preopt" in conf_dict:
|
|
1167
|
+
# We can't just copy dd["opt"] because there will probably be
|
|
1168
|
+
# some COS specific optimizer, but we just wan't to optimize the
|
|
1169
|
+
# (molecular) endpoints.
|
|
1170
|
+
dd["preopt"] = mol_opt_defaults.copy()
|
|
1171
|
+
dd["preopt"].update(
|
|
1172
|
+
{
|
|
1173
|
+
# Optimization specific
|
|
1174
|
+
# We are a bit more cautious here
|
|
1175
|
+
"max_cycles": 100,
|
|
1176
|
+
"thresh": "gau_loose",
|
|
1177
|
+
"trust_max": 0.3,
|
|
1178
|
+
"geom": get_opt_geom_defaults(),
|
|
1179
|
+
# Preopt specific
|
|
1180
|
+
"strict": False,
|
|
1181
|
+
}
|
|
1182
|
+
)
|
|
1183
|
+
# dd["preopt"]["geom"]["type"] = "tric"
|
|
1184
|
+
|
|
1185
|
+
if "endopt" in conf_dict:
|
|
1186
|
+
dd["endopt"] = mol_opt_defaults.copy()
|
|
1187
|
+
dd["endopt"].update(
|
|
1188
|
+
{
|
|
1189
|
+
"thresh": "gau",
|
|
1190
|
+
"fragments": False,
|
|
1191
|
+
"geom": get_opt_geom_defaults(),
|
|
1192
|
+
}
|
|
1193
|
+
)
|
|
1194
|
+
|
|
1195
|
+
if "barriers" in conf_dict:
|
|
1196
|
+
dd["barriers"] = {
|
|
1197
|
+
"T": T_default,
|
|
1198
|
+
"p": p_default,
|
|
1199
|
+
"solv_calc": {},
|
|
1200
|
+
"do_standard_state_corr": True,
|
|
1201
|
+
}
|
|
1202
|
+
|
|
1203
|
+
if "shake" in conf_dict:
|
|
1204
|
+
dd["shake"] = {
|
|
1205
|
+
"scale": 0.1,
|
|
1206
|
+
"seed": None,
|
|
1207
|
+
}
|
|
1208
|
+
|
|
1209
|
+
if "irc" in conf_dict:
|
|
1210
|
+
dd["irc"] = {
|
|
1211
|
+
"type": "eulerpc",
|
|
1212
|
+
"rms_grad_thresh": 1e-3,
|
|
1213
|
+
}
|
|
1214
|
+
|
|
1215
|
+
if "assert" in conf_dict:
|
|
1216
|
+
dd["assert"] = {}
|
|
1217
|
+
|
|
1218
|
+
if "geom" in conf_dict:
|
|
1219
|
+
dd["geom"] = {
|
|
1220
|
+
"type": "cart",
|
|
1221
|
+
"coord_kwargs": {},
|
|
1222
|
+
}
|
|
1223
|
+
|
|
1224
|
+
if "mdp" in conf_dict:
|
|
1225
|
+
dd["mdp"] = {}
|
|
1226
|
+
|
|
1227
|
+
if "scan" in conf_dict:
|
|
1228
|
+
dd["scan"] = {
|
|
1229
|
+
"opt": mol_opt_defaults.copy(),
|
|
1230
|
+
"symmetric": False,
|
|
1231
|
+
}
|
|
1232
|
+
dd["scan"]["opt"]["dump"] = False
|
|
1233
|
+
|
|
1234
|
+
if "md" in conf_dict:
|
|
1235
|
+
md_T = T_default
|
|
1236
|
+
dd["md"] = {
|
|
1237
|
+
"T": md_T,
|
|
1238
|
+
"T_init_vel": md_T,
|
|
1239
|
+
"dt": 0.5,
|
|
1240
|
+
"thermostat": "csvr_2",
|
|
1241
|
+
"timecon": 50,
|
|
1242
|
+
"print_stride": 100,
|
|
1243
|
+
"dump_stride": 10,
|
|
1244
|
+
"remove_com_v": True,
|
|
1245
|
+
}
|
|
1246
|
+
|
|
1247
|
+
if "perf" in conf_dict:
|
|
1248
|
+
dd["perf"] = {
|
|
1249
|
+
"mems": 2500,
|
|
1250
|
+
"repeat": 3,
|
|
1251
|
+
}
|
|
1252
|
+
|
|
1253
|
+
if "afir" in conf_dict:
|
|
1254
|
+
dd["afir"] = {}
|
|
1255
|
+
|
|
1256
|
+
return dd
|
|
1257
|
+
|
|
1258
|
+
|
|
1259
|
+
def get_last_calc_cycle():
|
|
1260
|
+
def keyfunc(path):
|
|
1261
|
+
return re.match(r"image_\d+.(\d+).out", str(path))[1]
|
|
1262
|
+
|
|
1263
|
+
cwd = Path(".")
|
|
1264
|
+
calc_logs = [str(cl) for cl in cwd.glob("image_*.*.out")]
|
|
1265
|
+
calc_logs = sorted(calc_logs, key=keyfunc)
|
|
1266
|
+
grouped = it.groupby(calc_logs, key=keyfunc)
|
|
1267
|
+
# Find the last completly finished cycle.
|
|
1268
|
+
last_length = 0
|
|
1269
|
+
last_calc_cycle = 0
|
|
1270
|
+
for calc_cycle, group in grouped:
|
|
1271
|
+
cycle_length = len(list(group))
|
|
1272
|
+
if cycle_length < last_length:
|
|
1273
|
+
# When this is True we have a cycle that has less
|
|
1274
|
+
# items than last one, that is an unfinished cycle.
|
|
1275
|
+
break
|
|
1276
|
+
last_length = cycle_length
|
|
1277
|
+
last_calc_cycle = int(calc_cycle)
|
|
1278
|
+
if last_calc_cycle == 0:
|
|
1279
|
+
print("Can't find any old calculator logs.")
|
|
1280
|
+
print(f"Last calculation counter is {last_calc_cycle}.")
|
|
1281
|
+
return last_calc_cycle
|
|
1282
|
+
|
|
1283
|
+
|
|
1284
|
+
VALID_KEYS = {
|
|
1285
|
+
"assert",
|
|
1286
|
+
"afir",
|
|
1287
|
+
"barriers",
|
|
1288
|
+
"calc",
|
|
1289
|
+
"cos",
|
|
1290
|
+
"endopt",
|
|
1291
|
+
"geom",
|
|
1292
|
+
"interpol",
|
|
1293
|
+
"irc",
|
|
1294
|
+
"md",
|
|
1295
|
+
"mdp",
|
|
1296
|
+
"opt",
|
|
1297
|
+
"perf",
|
|
1298
|
+
"precontr",
|
|
1299
|
+
"preopt",
|
|
1300
|
+
"scan",
|
|
1301
|
+
"shake",
|
|
1302
|
+
"stocastic",
|
|
1303
|
+
"tsopt",
|
|
1304
|
+
}
|
|
1305
|
+
|
|
1306
|
+
|
|
1307
|
+
def setup_run_dict(run_dict):
|
|
1308
|
+
org_dict = run_dict.copy()
|
|
1309
|
+
|
|
1310
|
+
# Load defaults to have a sane baseline
|
|
1311
|
+
run_dict = get_defaults(run_dict)
|
|
1312
|
+
# Update nested entries that are dicts by themselves
|
|
1313
|
+
# Take care to insert a , after the string!
|
|
1314
|
+
key_set = set(org_dict.keys())
|
|
1315
|
+
assert (
|
|
1316
|
+
key_set <= VALID_KEYS
|
|
1317
|
+
), f"Found invalid keys in YAML input: {key_set - VALID_KEYS}"
|
|
1318
|
+
for key in key_set & VALID_KEYS:
|
|
1319
|
+
try:
|
|
1320
|
+
# Recursive update, because there may be nested dicts
|
|
1321
|
+
recursive_update(run_dict[key], org_dict[key])
|
|
1322
|
+
except TypeError:
|
|
1323
|
+
print(f"Using default values for '{key}' section.")
|
|
1324
|
+
return run_dict
|
|
1325
|
+
|
|
1326
|
+
|
|
1327
|
+
RunResult = namedtuple(
|
|
1328
|
+
"RunResult",
|
|
1329
|
+
(
|
|
1330
|
+
"preopt_first_geom preopt_last_geom "
|
|
1331
|
+
"cos cos_opt "
|
|
1332
|
+
"ts_geom ts_opt "
|
|
1333
|
+
"end_geoms irc irc_geom "
|
|
1334
|
+
"mdp_result "
|
|
1335
|
+
"opt_geom opt "
|
|
1336
|
+
"calced_geoms calced_results "
|
|
1337
|
+
"stocastic calc_getter "
|
|
1338
|
+
"scan_geoms scan_vals scan_energies "
|
|
1339
|
+
"perf_results "
|
|
1340
|
+
),
|
|
1341
|
+
)
|
|
1342
|
+
|
|
1343
|
+
|
|
1344
|
+
def main(run_dict, restart=False, yaml_dir="./", scheduler=None):
|
|
1345
|
+
|
|
1346
|
+
# Dump run_dict
|
|
1347
|
+
run_dict_copy = run_dict.copy()
|
|
1348
|
+
run_dict_copy["version"] = __version__
|
|
1349
|
+
with open("RUN.yaml", "w") as handle:
|
|
1350
|
+
yaml.dump(run_dict_copy, handle)
|
|
1351
|
+
|
|
1352
|
+
if run_dict["interpol"]:
|
|
1353
|
+
interpol_key = run_dict["interpol"].pop("type")
|
|
1354
|
+
interpol_kwargs = run_dict["interpol"]
|
|
1355
|
+
# Preoptimization prior to COS optimization
|
|
1356
|
+
if run_dict["preopt"]:
|
|
1357
|
+
preopt_key = run_dict["preopt"].pop("type")
|
|
1358
|
+
preopt_kwargs = run_dict["preopt"]
|
|
1359
|
+
# Optimization of fragments after IRC integration
|
|
1360
|
+
if run_dict["endopt"]:
|
|
1361
|
+
endopt_key = run_dict["endopt"].pop("type")
|
|
1362
|
+
endopt_kwargs = run_dict["endopt"]
|
|
1363
|
+
if run_dict["opt"]:
|
|
1364
|
+
opt_key = run_dict["opt"].pop("type")
|
|
1365
|
+
opt_kwargs = run_dict["opt"]
|
|
1366
|
+
if run_dict["cos"]:
|
|
1367
|
+
cos_key = run_dict["cos"].pop("type")
|
|
1368
|
+
cos_kwargs = run_dict["cos"]
|
|
1369
|
+
cos_kwargs["scheduler"] = scheduler
|
|
1370
|
+
if run_dict["stocastic"]:
|
|
1371
|
+
stoc_key = run_dict["stocastic"].pop("type")
|
|
1372
|
+
stoc_kwargs = run_dict["stocastic"]
|
|
1373
|
+
if run_dict["tsopt"]:
|
|
1374
|
+
tsopt_key = run_dict["tsopt"].pop("type")
|
|
1375
|
+
tsopt_kwargs = run_dict["tsopt"]
|
|
1376
|
+
if run_dict["irc"]:
|
|
1377
|
+
irc_key = run_dict["irc"].pop("type")
|
|
1378
|
+
irc_kwargs = run_dict["irc"]
|
|
1379
|
+
if run_dict["afir"]:
|
|
1380
|
+
afir_key = run_dict["afir"].pop("type")
|
|
1381
|
+
afir_kwargs = run_dict["afir"]
|
|
1382
|
+
|
|
1383
|
+
# Handle geometry input. This section must always be present.
|
|
1384
|
+
geom_kwargs = run_dict["geom"]
|
|
1385
|
+
xyz = geom_kwargs.pop("fn")
|
|
1386
|
+
coord_type = geom_kwargs.pop("type")
|
|
1387
|
+
union = geom_kwargs.pop("union", None)
|
|
1388
|
+
|
|
1389
|
+
####################
|
|
1390
|
+
# CALCULATOR SETUP #
|
|
1391
|
+
####################
|
|
1392
|
+
|
|
1393
|
+
# Prepare calculator
|
|
1394
|
+
calc_key = run_dict["calc"].pop("type")
|
|
1395
|
+
save_hessian = run_dict["calc"].pop("save_hessian", False)
|
|
1396
|
+
calc_kwargs = run_dict["calc"]
|
|
1397
|
+
calc_run_func = calc_kwargs.pop("run_func", None)
|
|
1398
|
+
calc_kwargs["out_dir"] = calc_kwargs.get("out_dir", yaml_dir / OUT_DIR_DEFAULT)
|
|
1399
|
+
calc_base_name = calc_kwargs.get("base_name", "calculator")
|
|
1400
|
+
if calc_key in ("oniom", "ext"):
|
|
1401
|
+
geoms = get_geoms(xyz, quiet=True)
|
|
1402
|
+
iter_dict = {
|
|
1403
|
+
"geom": iter(geoms),
|
|
1404
|
+
}
|
|
1405
|
+
elif calc_key == "multi":
|
|
1406
|
+
geoms = get_geoms(xyz, quiet=True)
|
|
1407
|
+
iter_dict = {
|
|
1408
|
+
"base_name": iter([geom.name for geom in geoms]),
|
|
1409
|
+
}
|
|
1410
|
+
else:
|
|
1411
|
+
iter_dict = None
|
|
1412
|
+
calc_getter = get_calc_closure(
|
|
1413
|
+
calc_base_name, calc_key, calc_kwargs, iter_dict=iter_dict
|
|
1414
|
+
)
|
|
1415
|
+
# Create second function that returns a wrapped calculator. This may be
|
|
1416
|
+
# useful if we later want to drop the wrapper and use the actual calculator.
|
|
1417
|
+
if "calc" in calc_kwargs:
|
|
1418
|
+
act_calc_kwargs = calc_kwargs["calc"].copy()
|
|
1419
|
+
act_calc_key = act_calc_kwargs.pop("type")
|
|
1420
|
+
act_calc_getter = get_calc_closure(
|
|
1421
|
+
"act_calculator", act_calc_key, act_calc_kwargs
|
|
1422
|
+
)
|
|
1423
|
+
try:
|
|
1424
|
+
solv_calc_kwargs = run_dict["barriers"].pop("solv_calc")
|
|
1425
|
+
solv_calc_key = solv_calc_kwargs.pop("type")
|
|
1426
|
+
solv_calc_getter = get_calc_closure(
|
|
1427
|
+
"solv_calculator", solv_calc_key, solv_calc_kwargs
|
|
1428
|
+
)
|
|
1429
|
+
except KeyError:
|
|
1430
|
+
solv_calc_getter = None
|
|
1431
|
+
|
|
1432
|
+
##################
|
|
1433
|
+
# GEOMETRY SETUP #
|
|
1434
|
+
##################
|
|
1435
|
+
|
|
1436
|
+
# Initial loading of geometries from file(s)
|
|
1437
|
+
geoms = get_geoms(xyz, coord_type="cart")
|
|
1438
|
+
|
|
1439
|
+
# ------------------------+
|
|
1440
|
+
# Preconditioning of |
|
|
1441
|
+
# Translation & Rotation |
|
|
1442
|
+
# ------------------------+
|
|
1443
|
+
|
|
1444
|
+
if run_dict["precontr"]:
|
|
1445
|
+
ptr_geom0, ptr_geom_m1 = run_precontr(
|
|
1446
|
+
geoms[0], geoms[1], prefix=run_dict["precontr"]["prefix"]
|
|
1447
|
+
)
|
|
1448
|
+
geoms[0] = ptr_geom0
|
|
1449
|
+
geoms[-1] = ptr_geom_m1
|
|
1450
|
+
|
|
1451
|
+
# -----------------------+
|
|
1452
|
+
# Preoptimization of |
|
|
1453
|
+
# first & last image(s) |
|
|
1454
|
+
# -----------------------+
|
|
1455
|
+
|
|
1456
|
+
# Preoptimization only makes sense with a subsequent COS run.
|
|
1457
|
+
if run_dict["preopt"] and (run_dict["cos"] or run_dict["afir"]):
|
|
1458
|
+
assert len(geoms) > 1
|
|
1459
|
+
# Preopt should be expanded to support > 2 fragments with AFIR
|
|
1460
|
+
if run_dict["afir"] and len(geoms) > 2:
|
|
1461
|
+
raise Exception("Currently, only the first & last geometry are optimized!")
|
|
1462
|
+
first_opt_result, last_opt_result = run_preopt(
|
|
1463
|
+
geoms[0],
|
|
1464
|
+
geoms[-1],
|
|
1465
|
+
calc_getter,
|
|
1466
|
+
preopt_key,
|
|
1467
|
+
preopt_kwargs,
|
|
1468
|
+
save_hessian=save_hessian,
|
|
1469
|
+
)
|
|
1470
|
+
# Update with (pre)optimized geometries. 'preopt_first_geom'/'preopt_last_geom'
|
|
1471
|
+
# are assigned here so they can later be assigned to 'run_result':
|
|
1472
|
+
geoms[0] = preopt_first_geom = first_opt_result.geom.copy(coord_type=coord_type)
|
|
1473
|
+
geoms[-1] = preopt_last_geom = last_opt_result.geom.copy(coord_type=coord_type)
|
|
1474
|
+
|
|
1475
|
+
if run_dict["interpol"]:
|
|
1476
|
+
# Interpolate. Will return the original geometries for between = 0
|
|
1477
|
+
geoms = interpolate_all(geoms, kind=interpol_key, **interpol_kwargs)
|
|
1478
|
+
dump_geoms(geoms, "interpolated")
|
|
1479
|
+
|
|
1480
|
+
# Recreate geometries with desired coordinate type and keyword arguments
|
|
1481
|
+
geoms = standardize_geoms(geoms, coord_type, geom_kwargs, union=union)
|
|
1482
|
+
|
|
1483
|
+
# Create COS objects and supply a function that yields new Calculators,
|
|
1484
|
+
# as needed for growing COS classes, where images are added over time.
|
|
1485
|
+
if run_dict["cos"]:
|
|
1486
|
+
cos_cls = COS_DICT[cos_key]
|
|
1487
|
+
if issubclass(cos_cls, GrowingChainOfStates) or isinstance(
|
|
1488
|
+
cos_cls, type(FreezingString)
|
|
1489
|
+
):
|
|
1490
|
+
cos_kwargs["calc_getter"] = get_calc_closure("image", calc_key, calc_kwargs)
|
|
1491
|
+
geom = COS_DICT[cos_key](geoms, **cos_kwargs)
|
|
1492
|
+
elif len(geoms) == 1:
|
|
1493
|
+
geom = geoms[0]
|
|
1494
|
+
|
|
1495
|
+
if run_dict["stocastic"]:
|
|
1496
|
+
stoc_kwargs["calc_kwargs"] = calc_kwargs
|
|
1497
|
+
stocastic = STOCASTIC_DICT[stoc_key](geom, **stoc_kwargs)
|
|
1498
|
+
stocastic = run_stocastic(stocastic)
|
|
1499
|
+
elif run_dict["md"]:
|
|
1500
|
+
md_kwargs = run_dict["md"].copy()
|
|
1501
|
+
run_md(geom, calc_getter, md_kwargs)
|
|
1502
|
+
elif run_dict["scan"]:
|
|
1503
|
+
scan_kwargs = run_dict["scan"]
|
|
1504
|
+
scan_geoms, scan_vals, scan_energies = run_scan(geom, calc_getter, scan_kwargs)
|
|
1505
|
+
elif run_dict["perf"]:
|
|
1506
|
+
perf_results = run_perf(geom, calc_getter, **run_dict["perf"])
|
|
1507
|
+
print_perf_results(perf_results)
|
|
1508
|
+
elif run_dict["afir"]:
|
|
1509
|
+
ts_guesses, afir_paths = run_afir_paths(
|
|
1510
|
+
afir_key, geoms, calc_getter, **afir_kwargs,
|
|
1511
|
+
)
|
|
1512
|
+
# This case will handle most pysisyphus runs. A full run encompasses
|
|
1513
|
+
# the following steps:
|
|
1514
|
+
#
|
|
1515
|
+
# (0. Preoptimization, already handled)
|
|
1516
|
+
# 1. (COS)-Optimization
|
|
1517
|
+
# 2. TS-Optimization by TSHessianOptimizer or Dimer method
|
|
1518
|
+
# 3. IRC integration
|
|
1519
|
+
# 4. Optimization of IRC endpoints
|
|
1520
|
+
#
|
|
1521
|
+
# Everything can be chained. All functions operate on the 'geom' object,
|
|
1522
|
+
# which is propagated along through all functions calls.
|
|
1523
|
+
#
|
|
1524
|
+
# All keys are present in 'run_dict', but most of the corresponding values will
|
|
1525
|
+
# be set to zero.
|
|
1526
|
+
elif any(
|
|
1527
|
+
[run_dict[key] is not None for key in ("opt", "tsopt", "irc", "mdp", "endopt")]
|
|
1528
|
+
):
|
|
1529
|
+
|
|
1530
|
+
#######
|
|
1531
|
+
# OPT #
|
|
1532
|
+
#######
|
|
1533
|
+
|
|
1534
|
+
if run_dict["opt"]:
|
|
1535
|
+
if run_dict["shake"]:
|
|
1536
|
+
print(highlight_text("Shake coordinates"))
|
|
1537
|
+
shaked_coords = shake_coords(geom.coords, **run_dict["shake"])
|
|
1538
|
+
geom.coords = shaked_coords
|
|
1539
|
+
print(f"Shaken coordinates:\n{geom.as_xyz()}")
|
|
1540
|
+
opt_result = run_opt(
|
|
1541
|
+
geom,
|
|
1542
|
+
calc_getter,
|
|
1543
|
+
opt_key,
|
|
1544
|
+
opt_kwargs,
|
|
1545
|
+
print_thermo=True,
|
|
1546
|
+
save_hessian=save_hessian,
|
|
1547
|
+
)
|
|
1548
|
+
opt_geom = opt_result.geom
|
|
1549
|
+
opt = opt_result.opt
|
|
1550
|
+
# Keep a backup of the optimized geometry
|
|
1551
|
+
if isinstance(opt_geom, ChainOfStates.ChainOfStates):
|
|
1552
|
+
# Set some variables that are later collected into RunResult
|
|
1553
|
+
cos = opt_geom
|
|
1554
|
+
cos_opt = opt
|
|
1555
|
+
# copy() is not present for ChainOfState objects, so we just keep
|
|
1556
|
+
# using the COS object with a different name.
|
|
1557
|
+
geom = opt_geom
|
|
1558
|
+
else:
|
|
1559
|
+
geom = opt_geom.copy()
|
|
1560
|
+
|
|
1561
|
+
#########
|
|
1562
|
+
# TSOPT #
|
|
1563
|
+
#########
|
|
1564
|
+
|
|
1565
|
+
abort = False
|
|
1566
|
+
if run_dict["tsopt"]:
|
|
1567
|
+
# Use a separate implementation for TS-Optimizations started from
|
|
1568
|
+
# COS-optimizations.
|
|
1569
|
+
try:
|
|
1570
|
+
if isinstance(geom, ChainOfStates.ChainOfStates):
|
|
1571
|
+
ts_calc_getter = get_calc_closure(tsopt_key, calc_key, calc_kwargs)
|
|
1572
|
+
ts_opt_result = run_tsopt_from_cos(
|
|
1573
|
+
geom,
|
|
1574
|
+
tsopt_key,
|
|
1575
|
+
tsopt_kwargs,
|
|
1576
|
+
ts_calc_getter,
|
|
1577
|
+
save_hessian=save_hessian,
|
|
1578
|
+
)
|
|
1579
|
+
else:
|
|
1580
|
+
ts_opt_result = run_opt(
|
|
1581
|
+
geom,
|
|
1582
|
+
calc_getter,
|
|
1583
|
+
tsopt_key,
|
|
1584
|
+
tsopt_kwargs,
|
|
1585
|
+
title="TS-Optimization",
|
|
1586
|
+
copy_final_geom="ts_opt.xyz",
|
|
1587
|
+
save_hessian=save_hessian,
|
|
1588
|
+
)
|
|
1589
|
+
ts_geom = ts_opt_result.geom
|
|
1590
|
+
ts_opt = ts_opt_result.opt
|
|
1591
|
+
geom = ts_geom.copy_all()
|
|
1592
|
+
except HEIIsFirstOrLastException:
|
|
1593
|
+
abort = True
|
|
1594
|
+
|
|
1595
|
+
#######
|
|
1596
|
+
# IRC #
|
|
1597
|
+
#######
|
|
1598
|
+
|
|
1599
|
+
ran_irc = False
|
|
1600
|
+
if (not abort) and run_dict["irc"]:
|
|
1601
|
+
# After a Dimer run we continue with the actual calculator
|
|
1602
|
+
# and not the Dimer calculator.
|
|
1603
|
+
if calc_key == "dimer":
|
|
1604
|
+
calc_getter = act_calc_getter
|
|
1605
|
+
irc_geom = geom.copy()
|
|
1606
|
+
irc = run_irc(geom, irc_key, irc_kwargs, calc_getter)
|
|
1607
|
+
# IRC geom won't have a calculator, so we set the appropriate values here.
|
|
1608
|
+
irc_geom.energy = irc.ts_energy
|
|
1609
|
+
# irc_geom.cart_hessian = irc.init_hessian
|
|
1610
|
+
ran_irc = True
|
|
1611
|
+
|
|
1612
|
+
##########
|
|
1613
|
+
# ENDOPT #
|
|
1614
|
+
##########
|
|
1615
|
+
|
|
1616
|
+
# Run 'endopt' when a previous IRC calculation was done
|
|
1617
|
+
# if ran_irc and run_dict["endopt"]:
|
|
1618
|
+
if run_dict["endopt"]:
|
|
1619
|
+
if not ran_irc:
|
|
1620
|
+
|
|
1621
|
+
_, irc_geom, _ = geoms # IRC geom should correspond to the TS
|
|
1622
|
+
|
|
1623
|
+
class DummyIRC:
|
|
1624
|
+
def __init__(self, geoms):
|
|
1625
|
+
first, _, last = geoms
|
|
1626
|
+
self.atoms = copy.copy(first.atoms)
|
|
1627
|
+
self.forward = True
|
|
1628
|
+
self.backward = True
|
|
1629
|
+
self.downhill = False
|
|
1630
|
+
self.all_coords = [
|
|
1631
|
+
geom.cart_coords.copy() for geom in (first, last)
|
|
1632
|
+
]
|
|
1633
|
+
self.hessian_init = "dummy"
|
|
1634
|
+
|
|
1635
|
+
irc = DummyIRC(geoms)
|
|
1636
|
+
|
|
1637
|
+
do_thermo = run_dict["endopt"].get("do_hess", False) and can_thermoanalysis
|
|
1638
|
+
T = run_dict["endopt"]["T"]
|
|
1639
|
+
p = run_dict["endopt"]["p"]
|
|
1640
|
+
# Order is forward, backward, downhill
|
|
1641
|
+
endopt_results = run_endopt(
|
|
1642
|
+
irc,
|
|
1643
|
+
endopt_key,
|
|
1644
|
+
endopt_kwargs,
|
|
1645
|
+
calc_getter,
|
|
1646
|
+
save_hessian=save_hessian,
|
|
1647
|
+
)
|
|
1648
|
+
|
|
1649
|
+
# Determine "left" and "right" geoms
|
|
1650
|
+
# Only downhill
|
|
1651
|
+
if irc.downhill:
|
|
1652
|
+
left_results = endopt_results[2]
|
|
1653
|
+
# Only backward
|
|
1654
|
+
elif irc.backward and not irc.forward:
|
|
1655
|
+
left_results = endopt_results[1]
|
|
1656
|
+
# Forward and backward run
|
|
1657
|
+
else:
|
|
1658
|
+
left_results = endopt_results[0]
|
|
1659
|
+
|
|
1660
|
+
left_geoms = [result.geom for result in left_results]
|
|
1661
|
+
left_fns = [result.fn for result in left_results]
|
|
1662
|
+
# Use 'backward' results; might be empty.
|
|
1663
|
+
right_geoms = [result.geom for result in endopt_results[1]]
|
|
1664
|
+
right_fns = [result.fn for result in endopt_results[1]]
|
|
1665
|
+
|
|
1666
|
+
end_geoms = left_geoms + right_geoms
|
|
1667
|
+
if run_dict["cos"]:
|
|
1668
|
+
end_fns = left_fns + right_fns
|
|
1669
|
+
do_rmsds(xyz, geoms, end_fns, end_geoms)
|
|
1670
|
+
|
|
1671
|
+
# Try to compute barriers.
|
|
1672
|
+
barrier_kwargs = {
|
|
1673
|
+
"do_thermo": do_thermo,
|
|
1674
|
+
"T": T,
|
|
1675
|
+
"p": p,
|
|
1676
|
+
"calc_getter": calc_getter,
|
|
1677
|
+
"solv_calc_getter": solv_calc_getter,
|
|
1678
|
+
}
|
|
1679
|
+
barrier_kwargs_ = run_dict.get("barriers", {})
|
|
1680
|
+
barrier_kwargs.update(barrier_kwargs_)
|
|
1681
|
+
do_endopt_ts_barriers(
|
|
1682
|
+
irc_geom,
|
|
1683
|
+
left_geoms,
|
|
1684
|
+
right_geoms,
|
|
1685
|
+
left_fns=left_fns,
|
|
1686
|
+
right_fns=right_fns,
|
|
1687
|
+
**barrier_kwargs,
|
|
1688
|
+
)
|
|
1689
|
+
|
|
1690
|
+
# Dump TS and endopt geoms to _trj.xyz. But only when we did not optimize
|
|
1691
|
+
# separate fragments.
|
|
1692
|
+
if len(left_geoms) == 1 and len(right_geoms) in (0, 1):
|
|
1693
|
+
trj_fn = "left_ts_right_geoms_trj.xyz"
|
|
1694
|
+
write_geoms_to_trj(
|
|
1695
|
+
list(it.chain(left_geoms, [irc_geom], right_geoms)),
|
|
1696
|
+
trj_fn,
|
|
1697
|
+
comments=list(it.chain(left_fns, ["IRC start"], right_fns)),
|
|
1698
|
+
)
|
|
1699
|
+
print(f"Wrote optimized end-geometries and TS to '{trj_fn}'")
|
|
1700
|
+
|
|
1701
|
+
if run_dict["mdp"]:
|
|
1702
|
+
mdp_kwargs = run_dict["mdp"]
|
|
1703
|
+
mdp_result = run_mdp(geom, calc_getter, mdp_kwargs)
|
|
1704
|
+
# Fallback when no specific job type was specified
|
|
1705
|
+
else:
|
|
1706
|
+
calced_geoms, calced_results = run_calculations(
|
|
1707
|
+
geoms, calc_getter, scheduler, run_func=calc_run_func
|
|
1708
|
+
)
|
|
1709
|
+
|
|
1710
|
+
# We can't use locals() in the dict comprehension, as it runs in its own
|
|
1711
|
+
# local scope.
|
|
1712
|
+
locals_ = locals()
|
|
1713
|
+
results = {key: locals_.get(key, None) for key in RunResult._fields}
|
|
1714
|
+
run_result = RunResult(**results)
|
|
1715
|
+
return run_result
|
|
1716
|
+
|
|
1717
|
+
|
|
1718
|
+
def check_asserts(results, run_dict):
|
|
1719
|
+
print(highlight_text(f"Asserting results"))
|
|
1720
|
+
|
|
1721
|
+
assert_ = run_dict["assert"]
|
|
1722
|
+
keys = list(assert_.keys())
|
|
1723
|
+
objs_attrs = [key.split(".") for key in keys]
|
|
1724
|
+
ref_vals = [assert_[k] for k in keys]
|
|
1725
|
+
matches = list()
|
|
1726
|
+
for i, ((obj, attr), ref_val) in enumerate(zip(objs_attrs, ref_vals)):
|
|
1727
|
+
cur_val = getattr(getattr(results, obj), attr)
|
|
1728
|
+
matched = approx_float(cur_val, ref_val)
|
|
1729
|
+
print(f"{i:02d}: {obj}.{attr}")
|
|
1730
|
+
print(f"\tReference: {ref_val}")
|
|
1731
|
+
print(f"\t Current: {cur_val}")
|
|
1732
|
+
print(f"\t Matches: {bool_color(matched)}")
|
|
1733
|
+
matches.append(matched)
|
|
1734
|
+
|
|
1735
|
+
assert all(matches)
|
|
1736
|
+
print()
|
|
1737
|
+
|
|
1738
|
+
|
|
1739
|
+
def do_clean(force=False):
|
|
1740
|
+
"""Deletes files from previous runs in the cwd.
|
|
1741
|
+
A similar function could be used to store everything ..."""
|
|
1742
|
+
cwd = Path(".").resolve()
|
|
1743
|
+
rm_globs = (
|
|
1744
|
+
"cycle*_trj.xyz",
|
|
1745
|
+
"interpolated_trj.xyz",
|
|
1746
|
+
"interpolated.image*.xyz",
|
|
1747
|
+
"calculator.log",
|
|
1748
|
+
"optimizer.log",
|
|
1749
|
+
"tsoptimizer.log",
|
|
1750
|
+
"wfoverlap.log",
|
|
1751
|
+
"host_*.calculator.log",
|
|
1752
|
+
"host_*.wfoverlap.log",
|
|
1753
|
+
"wfo_*.out" "optimization_trj.xyz",
|
|
1754
|
+
"cos.log",
|
|
1755
|
+
"*.gradient",
|
|
1756
|
+
"optimizer_results.yaml",
|
|
1757
|
+
# ORCA specific
|
|
1758
|
+
"*.orca.gbw",
|
|
1759
|
+
"*.orca.cis",
|
|
1760
|
+
"*.orca.engrad",
|
|
1761
|
+
"*.orca.hessian",
|
|
1762
|
+
"*.orca.inp",
|
|
1763
|
+
"*.orca.hess",
|
|
1764
|
+
"*.orca.molden",
|
|
1765
|
+
# OpenMOLCAS specific
|
|
1766
|
+
"calculator*.out",
|
|
1767
|
+
"calculator*.JobIph",
|
|
1768
|
+
"calculator*.RasOrb",
|
|
1769
|
+
"*rasscf.molden",
|
|
1770
|
+
# Turbomole specific
|
|
1771
|
+
"calculator_*.control",
|
|
1772
|
+
"calculator_*.coord",
|
|
1773
|
+
"calculator_*.mos",
|
|
1774
|
+
"calculator_*.ciss_a",
|
|
1775
|
+
"calculator*.sing_a",
|
|
1776
|
+
"*wavefunction.molden",
|
|
1777
|
+
"*input.xyz",
|
|
1778
|
+
"*.coord",
|
|
1779
|
+
# PySCF specific
|
|
1780
|
+
"calculator*.chkfile",
|
|
1781
|
+
"*.pyscf.out",
|
|
1782
|
+
"*.chkfile",
|
|
1783
|
+
# WFOverlap specific
|
|
1784
|
+
"wfo_*.*.out",
|
|
1785
|
+
# XTB specific
|
|
1786
|
+
"image*.grad",
|
|
1787
|
+
"calculator*.grad",
|
|
1788
|
+
"calculator*.xcontrol",
|
|
1789
|
+
"calculator*.charges",
|
|
1790
|
+
"image_*",
|
|
1791
|
+
"splined_ts_guess.xyz",
|
|
1792
|
+
"splined_hei_tangent",
|
|
1793
|
+
"cart_hei_tangent_trj.xyz",
|
|
1794
|
+
"dimer_ts.xyz",
|
|
1795
|
+
"dimer_pickle",
|
|
1796
|
+
"interpolated.geom_*.xyz",
|
|
1797
|
+
# Wavefunction overlap
|
|
1798
|
+
"wfo_*",
|
|
1799
|
+
"image*.molden",
|
|
1800
|
+
"jmol.spt",
|
|
1801
|
+
"overlap_data.h5",
|
|
1802
|
+
"*_CDD.png",
|
|
1803
|
+
"*_CDD.cub",
|
|
1804
|
+
"internal_coords.log",
|
|
1805
|
+
"hei_tangent",
|
|
1806
|
+
"optimization_trj.xyz",
|
|
1807
|
+
"splined_hei.xyz",
|
|
1808
|
+
"ts_opt.xyz",
|
|
1809
|
+
"final_geometry.xyz",
|
|
1810
|
+
"calculated_init_hessian",
|
|
1811
|
+
"cur_out",
|
|
1812
|
+
# HDF5 files
|
|
1813
|
+
"optimization.h5",
|
|
1814
|
+
"afir.h5",
|
|
1815
|
+
# Optimization files
|
|
1816
|
+
"*_optimization_trj.xyz",
|
|
1817
|
+
# Preopt files
|
|
1818
|
+
"first_*",
|
|
1819
|
+
"last_*",
|
|
1820
|
+
# TSOpt
|
|
1821
|
+
"rsirfo*",
|
|
1822
|
+
# IRC files
|
|
1823
|
+
"irc_*",
|
|
1824
|
+
"irc.log",
|
|
1825
|
+
"finished_*",
|
|
1826
|
+
# IRC/Endopt files
|
|
1827
|
+
"backward_*",
|
|
1828
|
+
"forward_*",
|
|
1829
|
+
# Misc
|
|
1830
|
+
"*imaginary_mode_*_trj.xyz",
|
|
1831
|
+
"cart_hei_tangent",
|
|
1832
|
+
"ts_calculated_init_cart_hessian",
|
|
1833
|
+
"calculated_final_cart_hessian",
|
|
1834
|
+
"*final_geometry.xyz",
|
|
1835
|
+
"*final_geometries_trj.xyz",
|
|
1836
|
+
"current_geometry.xyz",
|
|
1837
|
+
"*current_geometries_trj.xyz",
|
|
1838
|
+
"hess_calc_cyc*.h5",
|
|
1839
|
+
"ts_hess_calc_cyc*.h5",
|
|
1840
|
+
"hess_init_irc.h5",
|
|
1841
|
+
"final_hessian.h5",
|
|
1842
|
+
"ts_current_geometry.xyz",
|
|
1843
|
+
"dimer_*",
|
|
1844
|
+
"plain_hei_tangent",
|
|
1845
|
+
"plain_hei.xyz",
|
|
1846
|
+
"hess_calc_irc*.h5",
|
|
1847
|
+
"rebuilt_primitives.xyz",
|
|
1848
|
+
"RUN.yaml",
|
|
1849
|
+
"middle_for_preopt_trj.xyz",
|
|
1850
|
+
"relaxed_scan_trj.xyz",
|
|
1851
|
+
"too_similar_trj.xyz",
|
|
1852
|
+
# MDP
|
|
1853
|
+
"mdp_ee_ascent_trj.xyz",
|
|
1854
|
+
"mdp_ee_fin_*_trj.xyz",
|
|
1855
|
+
"mdp_ee_init_*_trj.xyz",
|
|
1856
|
+
"aligned.geom*xyz",
|
|
1857
|
+
"cos_hei_trj.xyz",
|
|
1858
|
+
# Dimer
|
|
1859
|
+
"calculator_*.N",
|
|
1860
|
+
"calculator_*.N_trj.xyz",
|
|
1861
|
+
"dimer.log",
|
|
1862
|
+
"*.gfnff_topo",
|
|
1863
|
+
# DFTB+
|
|
1864
|
+
"*.detailed.out",
|
|
1865
|
+
"*.geometry.gen",
|
|
1866
|
+
"*.dftb_in.hsd",
|
|
1867
|
+
"*.EXC.DAT",
|
|
1868
|
+
"*.XplusY.DAT",
|
|
1869
|
+
"*.dftb.out",
|
|
1870
|
+
"rsprfo_*",
|
|
1871
|
+
"reparametrized_trj.xyz",
|
|
1872
|
+
"end_geoms_and_ts_trj.xyz",
|
|
1873
|
+
"left_ts_right_geoms_trj.xyz",
|
|
1874
|
+
"ts_final_hessian.h5",
|
|
1875
|
+
"third_deriv.h5",
|
|
1876
|
+
"*.ao_ovlp_rec",
|
|
1877
|
+
# MOPAC
|
|
1878
|
+
"*.mopac.aux",
|
|
1879
|
+
"*.mopac.arc",
|
|
1880
|
+
"*.mopac.mop",
|
|
1881
|
+
"*.mopac.out",
|
|
1882
|
+
)
|
|
1883
|
+
to_rm_paths = list()
|
|
1884
|
+
for glob in rm_globs:
|
|
1885
|
+
to_rm_paths.extend(list(cwd.glob(glob)))
|
|
1886
|
+
to_rm_strs = [str(p) for p in to_rm_paths]
|
|
1887
|
+
for s in to_rm_strs:
|
|
1888
|
+
print(s)
|
|
1889
|
+
|
|
1890
|
+
def delete():
|
|
1891
|
+
for p in to_rm_paths:
|
|
1892
|
+
try:
|
|
1893
|
+
os.remove(p)
|
|
1894
|
+
print(f"Deleted {p}")
|
|
1895
|
+
except FileNotFoundError:
|
|
1896
|
+
pass
|
|
1897
|
+
try:
|
|
1898
|
+
os.unlink("cur_out")
|
|
1899
|
+
except FileNotFoundError:
|
|
1900
|
+
pass
|
|
1901
|
+
|
|
1902
|
+
if force:
|
|
1903
|
+
delete()
|
|
1904
|
+
return
|
|
1905
|
+
# If we dont force the cleaning ask for confirmation first
|
|
1906
|
+
elif to_rm_paths and confirm_input("Delete these files?"):
|
|
1907
|
+
delete()
|
|
1908
|
+
else:
|
|
1909
|
+
print("No files found for removal.")
|
|
1910
|
+
|
|
1911
|
+
|
|
1912
|
+
def print_header():
|
|
1913
|
+
"""Generated from https://asciiartgen.now.sh/?s=pysisyphus&style=colossal"""
|
|
1914
|
+
normal_logo = """ d8b 888
|
|
1915
|
+
Y8P 888
|
|
1916
|
+
888
|
|
1917
|
+
88888b. 888 888 .d8888b 888 .d8888b 888 888 88888b. 88888b. 888 888 .d8888b
|
|
1918
|
+
888 "88b 888 888 88K 888 88K 888 888 888 "88b 888 "88b 888 888 88K
|
|
1919
|
+
888 888 888 888 "Y8888b. 888 "Y8888b. 888 888 888 888 888 888 888 888 "Y8888b.
|
|
1920
|
+
888 d88P Y88b 888 X88 888 X88 Y88b 888 888 d88P 888 888 Y88b 888 X88
|
|
1921
|
+
88888P" "Y88888 88888P' 888 88888P' "Y88888 88888P" 888 888 "Y88888 88888P'
|
|
1922
|
+
888 888 888 888
|
|
1923
|
+
888 Y8b d88P Y8b d88P 888
|
|
1924
|
+
888 "Y88P" "Y88P" 888 """
|
|
1925
|
+
|
|
1926
|
+
xmas_logo = r"""
|
|
1927
|
+
\|/
|
|
1928
|
+
d8b --o-- 888 X
|
|
1929
|
+
x Y8P /|\ 888
|
|
1930
|
+
888
|
|
1931
|
+
88888b. 888 888 .d8888b 888 .d8888b 888 888 88888b. 88888b. 888 888 .d8888b
|
|
1932
|
+
888 "88b 888 888 88K 888 88K 888 888 888 "88b 888 "88b 888 888 88K
|
|
1933
|
+
888 888 888 888 "Y8888b. 888 "Y8888b. 888 888 888 888 888 888 888 888 "Y8888b.
|
|
1934
|
+
888 d88P Y88b 888 X88 888 X88 Y88b 888 888 d88P 888 888 Y88b 888 X88
|
|
1935
|
+
88888P" "Y88888 88888P' 888 88888P' "Y88888 88888P" 888 888 "Y88888 88888P'
|
|
1936
|
+
888 | 888 | | | | 888 888 | | |
|
|
1937
|
+
888 O Y8b d88P o | X / \ Y8b d88P 888 x / \ O
|
|
1938
|
+
888 "Y88P" O \_/ "Y88P" 888 \_/
|
|
1939
|
+
| x | \|/
|
|
1940
|
+
X | xox O --X--
|
|
1941
|
+
O x /|\
|
|
1942
|
+
"""
|
|
1943
|
+
now = datetime.datetime.now()
|
|
1944
|
+
today = now.date()
|
|
1945
|
+
xmas = (today.month == 12) and (today.day <= 24)
|
|
1946
|
+
logo = xmas_logo if xmas else normal_logo
|
|
1947
|
+
version = f"Version {__version__}"
|
|
1948
|
+
try:
|
|
1949
|
+
commit = re.compile(r"g(\w+)\.").search(version).group(1)
|
|
1950
|
+
commit_line = f"Git commit {commit}\n"
|
|
1951
|
+
# Raised when regex search failed
|
|
1952
|
+
except AttributeError:
|
|
1953
|
+
commit_line = ""
|
|
1954
|
+
vi = sys.version_info
|
|
1955
|
+
sv = f"{vi.major}.{vi.minor}.{vi.micro}" # Python
|
|
1956
|
+
npv = np.__version__ # Numpy
|
|
1957
|
+
spv = sp.__version__ # SciPy
|
|
1958
|
+
cwd = Path(".").resolve()
|
|
1959
|
+
print(
|
|
1960
|
+
f"{logo}\n\n{version} (Python {sv}, NumPy {npv}, SciPy {spv})\n"
|
|
1961
|
+
f"{commit_line}"
|
|
1962
|
+
f"Executed at {now.strftime('%c')} on '{platform.node()}'\n"
|
|
1963
|
+
f"Platform: {platform.platform()}\n"
|
|
1964
|
+
f"Interpreter: {sys.executable}\n"
|
|
1965
|
+
f"Current working directory: {cwd}\n"
|
|
1966
|
+
)
|
|
1967
|
+
|
|
1968
|
+
|
|
1969
|
+
def print_bibtex():
|
|
1970
|
+
bibtex = textwrap.dedent(
|
|
1971
|
+
"""@article{Steinmetzer2020,
|
|
1972
|
+
doi = {10.1002/qua.26390},
|
|
1973
|
+
url = {https://doi.org/10.1002/qua.26390},
|
|
1974
|
+
year = {2020},
|
|
1975
|
+
month = aug,
|
|
1976
|
+
publisher = {Wiley},
|
|
1977
|
+
author = {Johannes Steinmetzer and Stephan Kupfer and Stefanie Gräfe},
|
|
1978
|
+
title = {pysisyphus: Exploring potential energy surfaces in ground and excited states},
|
|
1979
|
+
journal = {International Journal of Quantum Chemistry}
|
|
1980
|
+
}"""
|
|
1981
|
+
)
|
|
1982
|
+
print(bibtex)
|
|
1983
|
+
|
|
1984
|
+
|
|
1985
|
+
def run_from_dict(
|
|
1986
|
+
run_dict,
|
|
1987
|
+
cwd=None,
|
|
1988
|
+
set_defaults=True,
|
|
1989
|
+
yaml_fn=None,
|
|
1990
|
+
cp=None,
|
|
1991
|
+
scheduler=None,
|
|
1992
|
+
clean=False,
|
|
1993
|
+
fclean=False,
|
|
1994
|
+
version=False,
|
|
1995
|
+
restart=False,
|
|
1996
|
+
):
|
|
1997
|
+
if cwd is None:
|
|
1998
|
+
cwd = Path(".")
|
|
1999
|
+
|
|
2000
|
+
print_header()
|
|
2001
|
+
|
|
2002
|
+
# Citation
|
|
2003
|
+
citation = (
|
|
2004
|
+
"If pysisyphus benefitted your research please cite:\n\n"
|
|
2005
|
+
"\thttps://doi.org/10.1002/qua.26390\n\nGood luck!\n"
|
|
2006
|
+
)
|
|
2007
|
+
print(citation)
|
|
2008
|
+
|
|
2009
|
+
init_logging(cwd, scheduler)
|
|
2010
|
+
# Load defaults etc.
|
|
2011
|
+
if set_defaults:
|
|
2012
|
+
run_dict = setup_run_dict(run_dict)
|
|
2013
|
+
sys.stdout.flush()
|
|
2014
|
+
|
|
2015
|
+
if cp:
|
|
2016
|
+
copy_yaml_and_geometries(run_dict, yaml_fn, cp)
|
|
2017
|
+
return
|
|
2018
|
+
elif clean:
|
|
2019
|
+
do_clean()
|
|
2020
|
+
return
|
|
2021
|
+
elif fclean:
|
|
2022
|
+
do_clean(force=True)
|
|
2023
|
+
return
|
|
2024
|
+
# Return after header was printed
|
|
2025
|
+
elif version:
|
|
2026
|
+
return
|
|
2027
|
+
|
|
2028
|
+
run_dict_without_none = {k: v for k, v in run_dict.items() if v is not None}
|
|
2029
|
+
pprint(run_dict_without_none, compact=True, width=200)
|
|
2030
|
+
print()
|
|
2031
|
+
sys.stdout.flush()
|
|
2032
|
+
|
|
2033
|
+
run_result = main(run_dict, restart, cwd, scheduler)
|
|
2034
|
+
|
|
2035
|
+
if run_dict["assert"] is not None:
|
|
2036
|
+
print()
|
|
2037
|
+
check_asserts(run_result, run_dict)
|
|
2038
|
+
|
|
2039
|
+
return run_result
|
|
2040
|
+
|
|
2041
|
+
|
|
2042
|
+
def load_run_dict(yaml_fn):
|
|
2043
|
+
with open(yaml_fn) as handle:
|
|
2044
|
+
yaml_str = handle.read()
|
|
2045
|
+
try:
|
|
2046
|
+
loader = get_loader()
|
|
2047
|
+
try:
|
|
2048
|
+
run_dict = yaml.load(yaml_str, Loader=loader)
|
|
2049
|
+
except yaml.constructor.ConstructorError as err:
|
|
2050
|
+
mobj = re.compile(r"for the tag '\!(\w+)'").search(err.problem)
|
|
2051
|
+
if mobj:
|
|
2052
|
+
err_unit = mobj.group(1)
|
|
2053
|
+
best_match, _ = find_closest_sequence(err_unit, UNITS)
|
|
2054
|
+
print(
|
|
2055
|
+
f"Unknown unit!\nKnown units are\n'{UNITS}'.\n"
|
|
2056
|
+
f"Did you mean '{best_match}', instead of '{err_unit}'?\n"
|
|
2057
|
+
)
|
|
2058
|
+
raise err
|
|
2059
|
+
assert type(run_dict) == type(dict())
|
|
2060
|
+
except (AssertionError, yaml.parser.ParserError) as err:
|
|
2061
|
+
print(err)
|
|
2062
|
+
if not (yaml_fn.lower().endswith(".yaml")):
|
|
2063
|
+
print("Are you sure that you supplied a YAML file?")
|
|
2064
|
+
sys.exit(1)
|
|
2065
|
+
return run_dict
|
|
2066
|
+
|
|
2067
|
+
|
|
2068
|
+
def run():
|
|
2069
|
+
start_time = datetime.datetime.now()
|
|
2070
|
+
args = parse_args(sys.argv[1:])
|
|
2071
|
+
|
|
2072
|
+
# Defaults
|
|
2073
|
+
run_dict = {}
|
|
2074
|
+
yaml_dir = Path(".")
|
|
2075
|
+
|
|
2076
|
+
if args.yaml:
|
|
2077
|
+
run_dict = load_run_dict(args.yaml)
|
|
2078
|
+
yaml_dir = Path(os.path.abspath(args.yaml)).parent
|
|
2079
|
+
elif args.bibtex:
|
|
2080
|
+
print_bibtex()
|
|
2081
|
+
return
|
|
2082
|
+
|
|
2083
|
+
run_kwargs = {
|
|
2084
|
+
"cwd": yaml_dir,
|
|
2085
|
+
"set_defaults": True,
|
|
2086
|
+
"yaml_fn": args.yaml,
|
|
2087
|
+
"cp": args.cp,
|
|
2088
|
+
"scheduler": args.scheduler,
|
|
2089
|
+
"clean": args.clean,
|
|
2090
|
+
"fclean": args.fclean,
|
|
2091
|
+
"version": args.version,
|
|
2092
|
+
"restart": args.restart,
|
|
2093
|
+
}
|
|
2094
|
+
run_result = run_from_dict(run_dict, **run_kwargs)
|
|
2095
|
+
|
|
2096
|
+
end_time = datetime.datetime.now()
|
|
2097
|
+
duration = end_time - start_time
|
|
2098
|
+
# Only keep hh:mm:ss
|
|
2099
|
+
duration_hms = str(duration).split(".")[0]
|
|
2100
|
+
print(f"pysisyphus run took {duration_hms} h.")
|
|
2101
|
+
|
|
2102
|
+
return 0
|
|
2103
|
+
|
|
2104
|
+
|
|
2105
|
+
if __name__ == "__main__":
|
|
2106
|
+
run()
|