calphy 1.3.6__tar.gz → 1.3.8__tar.gz

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (36) hide show
  1. {calphy-1.3.6/calphy.egg-info → calphy-1.3.8}/PKG-INFO +1 -1
  2. {calphy-1.3.6 → calphy-1.3.8}/calphy/__init__.py +1 -1
  3. {calphy-1.3.6 → calphy-1.3.8}/calphy/input.py +1 -1
  4. {calphy-1.3.6 → calphy-1.3.8}/calphy/phase_diagram.py +151 -66
  5. {calphy-1.3.6 → calphy-1.3.8}/calphy/scheduler.py +1 -1
  6. calphy-1.3.8/calphy/utils.py +108 -0
  7. {calphy-1.3.6 → calphy-1.3.8/calphy.egg-info}/PKG-INFO +1 -1
  8. {calphy-1.3.6 → calphy-1.3.8}/setup.py +1 -1
  9. calphy-1.3.6/calphy/utils.py +0 -74
  10. {calphy-1.3.6 → calphy-1.3.8}/LICENSE +0 -0
  11. {calphy-1.3.6 → calphy-1.3.8}/MANIFEST.in +0 -0
  12. {calphy-1.3.6 → calphy-1.3.8}/README.md +0 -0
  13. {calphy-1.3.6 → calphy-1.3.8}/calphy/alchemy.py +0 -0
  14. {calphy-1.3.6 → calphy-1.3.8}/calphy/clitools.py +0 -0
  15. {calphy-1.3.6 → calphy-1.3.8}/calphy/composition_transformation.py +0 -0
  16. {calphy-1.3.6 → calphy-1.3.8}/calphy/errors.py +0 -0
  17. {calphy-1.3.6 → calphy-1.3.8}/calphy/helpers.py +0 -0
  18. {calphy-1.3.6 → calphy-1.3.8}/calphy/integrators.py +0 -0
  19. {calphy-1.3.6 → calphy-1.3.8}/calphy/kernel.py +0 -0
  20. {calphy-1.3.6 → calphy-1.3.8}/calphy/liquid.py +0 -0
  21. {calphy-1.3.6 → calphy-1.3.8}/calphy/phase.py +0 -0
  22. {calphy-1.3.6 → calphy-1.3.8}/calphy/queuekernel.py +0 -0
  23. {calphy-1.3.6 → calphy-1.3.8}/calphy/routines.py +0 -0
  24. {calphy-1.3.6 → calphy-1.3.8}/calphy/solid.py +0 -0
  25. {calphy-1.3.6 → calphy-1.3.8}/calphy/splines.py +0 -0
  26. {calphy-1.3.6 → calphy-1.3.8}/calphy.egg-info/SOURCES.txt +0 -0
  27. {calphy-1.3.6 → calphy-1.3.8}/calphy.egg-info/dependency_links.txt +0 -0
  28. {calphy-1.3.6 → calphy-1.3.8}/calphy.egg-info/entry_points.txt +0 -0
  29. {calphy-1.3.6 → calphy-1.3.8}/calphy.egg-info/not-zip-safe +0 -0
  30. {calphy-1.3.6 → calphy-1.3.8}/calphy.egg-info/requires.txt +0 -0
  31. {calphy-1.3.6 → calphy-1.3.8}/calphy.egg-info/top_level.txt +0 -0
  32. {calphy-1.3.6 → calphy-1.3.8}/setup.cfg +0 -0
  33. {calphy-1.3.6 → calphy-1.3.8}/tests/test_helpers.py +0 -0
  34. {calphy-1.3.6 → calphy-1.3.8}/tests/test_integrators.py +0 -0
  35. {calphy-1.3.6 → calphy-1.3.8}/tests/test_options.py +0 -0
  36. {calphy-1.3.6 → calphy-1.3.8}/tests/test_solid_methods.py +0 -0
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.1
2
2
  Name: calphy
3
- Version: 1.3.6
3
+ Version: 1.3.8
4
4
  Summary: free energy calculation for python
5
5
  Home-page: https://github.com/ICAMS/calphy
6
6
  Author: Sarath Menon, Yury Lysogorskiy, Ralf Drautz
@@ -4,7 +4,7 @@ from calphy.solid import Solid
4
4
  from calphy.alchemy import Alchemy
5
5
  from calphy.routines import MeltingTemp
6
6
 
7
- __version__ = "1.3.6"
7
+ __version__ = "1.3.8"
8
8
 
9
9
  def addtest(a,b):
10
10
  return a+b
@@ -40,7 +40,7 @@ from pyscal3.core import structure_dict, element_dict, _make_crystal
40
40
  from ase.io import read, write
41
41
  import shutil
42
42
 
43
- __version__ = "1.3.6"
43
+ __version__ = "1.3.8"
44
44
 
45
45
  def read_report(folder):
46
46
  """
@@ -4,47 +4,58 @@ import pandas as pd
4
4
  import matplotlib.pyplot as plt
5
5
  import warnings
6
6
  import itertools
7
+ import math
7
8
 
8
9
  from calphy.integrators import kb
9
10
 
10
11
  from scipy.spatial import ConvexHull
11
12
  from scipy.interpolate import splrep, splev
12
13
 
13
- colors = ['#a6cee3','#1f78b4','#b2df8a','#33a02c','#fb9a99','#e31a1c','#fdbf6f','#ff7f00','#cab2d6','#6a3d9a','#ffff99','#b15928']
14
+ colors = ['#a6cee3','#1f78b4','#b2df8a',
15
+ '#33a02c','#fb9a99','#e31a1c',
16
+ '#fdbf6f','#ff7f00','#cab2d6',
17
+ '#6a3d9a','#ffff99','#b15928']
14
18
 
15
- def get_free_energy_at(d, phase, comp, temp, threshold=1E-1):
16
- """
17
- Extract free energy at given temperature
18
- """
19
- tarr = np.array(d[phase]["%.2f"%comp]["temperature"])
19
+
20
+ def _get_temp_arg(tarr, temp, threshold=1E-1):
21
+ if tarr is None:
22
+ return None
20
23
  arg = np.argsort(np.abs(tarr-temp))[0]
24
+
21
25
  th = np.abs(tarr-temp)[arg]
22
26
  if th > threshold:
23
- val = None
27
+ arg = None
28
+ return arg
29
+
30
+ def _is_val_ok(val):
31
+ if val is None:
32
+ return False
33
+ elif math.isnan(val):
34
+ return False
24
35
  else:
25
- val = d[phase]["%.2f"%comp]["free_energy"][arg]
26
- return val
27
-
28
- def calculate_configurational_entropy(x, correction=0):
29
- """
30
- Calculate configurational entropy
31
- """
36
+ return True
37
+
38
+ def _get_fe_at_args(arr, args):
39
+ fes = []
40
+ for count, x in enumerate(args):
41
+ if _is_val_ok(x):
42
+ fes.append(arr[count][int(x)])
43
+ else:
44
+ fes.append(None)
45
+ return fes
46
+
47
+ def _calculate_configurational_entropy(x, correction=0):
32
48
  if correction == 0:
33
49
  s = np.array([(c*np.log(c) + (1-c)*np.log(1-c)) if 1 > c > 0 else 0 for c in x])
34
50
  else:
35
51
  arg = np.argsort(np.abs(x-correction))[0]
36
52
  left_side = x[:arg+1]
37
53
  right_side = x[arg:]
38
- #print(len(left_side))
39
- #print(left_side)
40
- #print(len(right_side))
41
- #print(right_side)
42
54
 
43
55
  if len(left_side)>0:
44
56
  left_side = left_side/left_side[-1]
45
57
  s_left = np.array([(c*np.log(c) + (1-c)*np.log(1-c)) if 1 > c > 0 else 0 for c in left_side])
46
-
47
- #correct to zero
58
+
48
59
  if len(right_side)>0:
49
60
  right_side = right_side - right_side[0]
50
61
  right_side = right_side/right_side[-1]
@@ -56,81 +67,122 @@ def calculate_configurational_entropy(x, correction=0):
56
67
  return s_left
57
68
  else:
58
69
  return np.concatenate((s_left, s_right[1:]))
59
-
60
-
61
70
  return -s
62
71
 
63
- #def get_free_energy_splines(composition, free_energy, k=3):
64
- # """
65
- # Create splines for free energy, and return them
66
- # """
67
- # return splrep(comp, fes, k=3)
68
-
69
- def get_free_energy_fit(composition, free_energy, fit_order=5):
72
+ def _get_free_energy_fit(composition,
73
+ free_energy,
74
+ fit_order=5,
75
+ end_weight=3,
76
+ end_indices=4):
70
77
  """
71
78
  Create splines for free energy, and return them
72
79
  """
73
80
  weights = np.ones_like(free_energy)
74
- weights[0:4] = 3
75
- weights[-4:] = 3
81
+ weights[0:end_indices] = end_weight
82
+ weights[-end_indices:] = end_weight
76
83
  fit = np.polyfit(composition, free_energy, fit_order, w=weights)
77
84
  return fit
78
85
 
79
-
80
- def get_phase_free_energy(data, phase, temp,
86
+ def get_phase_free_energy(df, phase, temp,
87
+ composition_interval=(0, 1),
81
88
  ideal_configurational_entropy=False,
82
89
  entropy_correction=0.0,
83
- composition_grid=10000,
84
90
  fit_order=5,
85
- plot=False,
86
- composition_interval=(0, 1),
91
+ composition_grid=10000,
87
92
  composition_cutoff=None,
88
- reset_value=1):
93
+ reset_value=1,
94
+ plot=False):
89
95
  """
90
- Extract free energy for given phase
96
+ Get the free energy of a phase as a function of composition.
97
+
98
+ Parameters
99
+ ----------
100
+ df: Pandas dataframe
101
+ Dataframe consisting of values from simulation. Should contain at least columns composition, phase, `free_energy` and `temperature`.
102
+ `energy_free` and `temperature` should be arrays of equal length, generally an output from reversible scaling calculation.
103
+
104
+ phase: str
105
+ phase for which calculation is to be done. Should be present in `df`.
106
+
107
+ temp: float
108
+ temperature at which the free energy curves are to be calculated.
109
+
110
+ composition_interval: tuple, optional
111
+ If provided, this composition interval is considered. Default (0, 1)
112
+
113
+ ideal_configuration_entropy: bool, optional\
114
+ If True, add the ideal configurational entropy. See Notes. Default False.
115
+
116
+ entropy_correction: float, optional.
117
+ The composition of the ordered phase. See Notes. Default None.
118
+
119
+ fit_order: int, optional
120
+ Order of the polynomial fit used for fitting free energy as a function of composition. Default 5.
121
+
122
+ composition_grid: int, optional
123
+ Number of composition points to be used for fitting. Default 10000.
124
+
125
+ composition_cutoff: float, optional
126
+ term for correcting incomplete data. If two consecutive composition values are separated by more than `composition_cutoff`,
127
+ it is reset to `reset_value`. Default None.
128
+
129
+ reset_value: float, optional
130
+ see above. Default 1.
131
+
132
+ plot: bool, optional
133
+ If True, plot the calculated free energy curves.
134
+
135
+ Returns
136
+ -------
137
+ result_dict: dict
138
+ contains keys: "phase", "temperature", "composition", "free_energy", and "entropy".
139
+
140
+ Notes
141
+ -----
142
+ To be added
91
143
  """
92
- comporg = list(data[phase]["composition"])
93
- fes = []
94
- comp = []
95
-
96
- for c in comporg:
97
- if (composition_interval[0] <= c <= composition_interval[1]):
98
- f = get_free_energy_at(data, phase, float(c), temp)
99
- if f is not None:
100
- fes.append(f)
101
- comp.append(c)
144
+ df_phase = df.loc[df['phase']==phase]
145
+ #drop Nones
146
+ df_phase = df_phase.sort_values(by="composition")
147
+ df_phase = df_phase[(df_phase['composition'] >= composition_interval[0]) & (df_phase['composition'] <= composition_interval[1])]
102
148
 
103
- fes = np.array(fes)
104
- comp = np.array(comp)
149
+ composition = df_phase['composition'].values
150
+ args = df_phase["temperature"].apply(_get_temp_arg, args=(temp,))
151
+ fes = _get_fe_at_args(df_phase["free_energy"].values, args)
105
152
 
153
+ #print(fes)
154
+ #filter out None values
155
+ composition = np.array([composition[count] for count, x in enumerate(fes) if x is not None])
156
+ fes = np.array([x for x in fes if x is not None])
157
+
106
158
  if (len(fes)==0) or (fes is None):
107
159
  warnings.warn("Some temperatures could not be found!")
108
- else:
160
+ else:
109
161
  if ideal_configurational_entropy:
110
- entropy_term = kb*temp*calculate_configurational_entropy(comp, correction=entropy_correction)
162
+ entropy_term = kb*temp*_calculate_configurational_entropy(composition,
163
+ correction=entropy_correction)
111
164
  fes = fes - entropy_term
112
165
  else:
113
166
  entropy_term = []
114
167
 
115
- fe_fit = get_free_energy_fit(comp, fes, fit_order=fit_order)
116
- compfine = np.linspace(np.min(comp), np.max(comp), composition_grid)
117
-
118
- fe = np.polyval(fe_fit, compfine)
168
+ fe_fit = _get_free_energy_fit(composition, fes, fit_order=fit_order)
169
+ compfine = np.linspace(np.min(composition), np.max(composition), composition_grid)
119
170
 
120
- #fix missing values; assign +0.01 to all values which are not within vicinity
171
+ #now fit on the comp grid again
172
+ fe = np.polyval(fe_fit, compfine)
173
+
121
174
  if composition_cutoff is not None:
122
- #so we go along composition, see if there are points with no adjacent comp values, ignore them
123
- distances = [np.min(np.abs(c-comp)) for c in compfine]
175
+ distances = [np.min(np.abs(c-composition)) for c in compfine]
124
176
  filters = [x for x in range(len(distances)) if distances[x] > composition_cutoff]
125
177
  fe[filters] = reset_value
126
-
178
+
127
179
  if plot:
128
- plt.scatter(comp, fes, s=4, label=f'{phase}-calc.', color=colors[np.random.randint(len(colors))])
129
- plt.plot(compfine, fe, label=f'{phase}-fit', color=colors[np.random.randint(len(colors))])
180
+ plt.scatter(composition, fes, s=4, label=f'{phase}-calc.', color="#e57373")
181
+ plt.plot(compfine, fe, label=f'{phase}-fit', color="#b71c1c")
130
182
  plt.xlabel("x")
131
183
  plt.ylabel("F (eV/atom)")
132
184
  plt.legend()
133
- #plt.ylim(top=0.0)
185
+
134
186
  return {"phase":phase, "temperature": temp, "composition": compfine,
135
187
  "free_energy": fe, "entropy": entropy_term}
136
188
  return None
@@ -139,7 +191,14 @@ def get_phase_free_energy(data, phase, temp,
139
191
  def get_free_energy_mixing(dict_list, threshold=1E-3):
140
192
  """
141
193
  Input is a list of dictionaries
194
+
195
+ Get free energy of mixing by subtracting end member values.
196
+ End members are chosen automatically.
142
197
  """
198
+ dict_list = np.atleast_1d(dict_list)
199
+
200
+ dict_list = np.array([dct for dct in dict_list if dct is not None])
201
+
143
202
  #we have to get min_comp from all possible values
144
203
  min_comp = np.min([np.min(d["composition"]) for d in dict_list])
145
204
  max_comp = np.max([np.max(d["composition"]) for d in dict_list])
@@ -236,13 +295,17 @@ def get_tangent_type(dict_list, tangent, energy):
236
295
  return phase_str
237
296
 
238
297
 
239
- def get_common_tangents(dict_list, peak_cutoff=0.01, plot=False,
298
+ def get_common_tangents(dict_list,
299
+ peak_cutoff=0.01,
300
+ plot=False,
240
301
  remove_self_tangents_for=[],
241
302
  color_dict=None):
242
303
  """
243
304
  Get common tangent constructions using convex hull method
244
305
  """
245
- points = np.vstack([np.column_stack((d["composition"], d["free_energy_mix"])) for d in dict_list])
306
+ points = np.vstack([np.column_stack((d["composition"],
307
+ d["free_energy_mix"])) for d in dict_list])
308
+
246
309
  if color_dict is None:
247
310
  color_dict = create_color_list(dict_list)
248
311
 
@@ -266,6 +329,7 @@ def get_common_tangents(dict_list, peak_cutoff=0.01, plot=False,
266
329
  tangents = []
267
330
  energies = []
268
331
  tangent_colors = []
332
+ phases = []
269
333
 
270
334
  for d in dist:
271
335
  t = [convex_x[sargs][d], convex_x[sargs][d+1]]
@@ -276,6 +340,7 @@ def get_common_tangents(dict_list, peak_cutoff=0.01, plot=False,
276
340
  tangents.append(t)
277
341
  energies.append(e)
278
342
  tangent_colors.append(color_dict[phase_str])
343
+ phases.append(phase_str.split("-"))
279
344
 
280
345
  if plot:
281
346
  for d in dict_list:
@@ -283,5 +348,25 @@ def get_common_tangents(dict_list, peak_cutoff=0.01, plot=False,
283
348
  for t, e in zip(tangents, energies):
284
349
  plt.plot(t, e, color="black", ls="dashed")
285
350
  plt.ylim(top=0.0)
286
- return np.array(tangents), np.array(energies), np.array(tangent_colors), color_dict
351
+
352
+ return np.array(tangents), np.array(energies), np.array(tangent_colors), color_dict, np.array(phases)
353
+
354
+
355
+ def plot_phase_diagram(tangents, temperature,
356
+ colors,
357
+ edgecolor="#37474f",
358
+ linewidth=1,
359
+ linestyle='-'):
360
+
361
+ fig, ax = plt.subplots(edgecolor=edgecolor)
362
+
363
+ for count, x in enumerate(tangents):
364
+ for c, a in enumerate(x):
365
+ ax.plot(np.array(a),
366
+ [temperature[count], temperature[count]],
367
+ linestyle,
368
+ lw=linewidth,
369
+ c=colors[count][c],
370
+ )
371
+ return fig
287
372
 
@@ -106,7 +106,7 @@ class SLURM:
106
106
  for (key, val) in options.items():
107
107
  if key in self.queueoptions.keys():
108
108
  if val is not None:
109
- if val is not "":
109
+ if val != "":
110
110
  self.queueoptions[key] = val
111
111
  self.maincommand = ""
112
112
 
@@ -0,0 +1,108 @@
1
+ from calphy.input import read_inputfile
2
+ import shutil
3
+ from ase.io import read
4
+ import numpy as np
5
+ from tqdm.notebook import trange
6
+ import os
7
+ import re
8
+
9
+ try:
10
+ from pyiron_atomistics import Project
11
+ from pyiron_atomistics.atomistics.structure.atoms import ase_to_pyiron
12
+ except ImportError:
13
+ raise ImportError('This feature needs pyiron_atomistics installed')
14
+
15
+ def create_job_from_inputfile(pr, inputfile, potential, kernel=None):
16
+ """
17
+ Create a pyiron job from calphy input file
18
+
19
+ Parameters
20
+ ----------
21
+ pr: pyiron Project
22
+ project to which the job is to be added
23
+
24
+ inputfile: string
25
+ calphy input file
26
+
27
+ potential: string
28
+ name of the potential as present in pyiron
29
+
30
+ kernel: int, optional
31
+ the index of the calculation to be read in. If None, all calculations are read in
32
+ """
33
+ calcs = read_inputfile(inputfile)
34
+
35
+ if kernel is None:
36
+ kernel = [x for x in range(len(calcs))]
37
+ kernel = np.atleast_1d(kernel)
38
+
39
+ for i in trange(len(kernel)):
40
+ calc = calcs[kernel[i]]
41
+ basedir = calc.create_identifier()
42
+ basedir_path = os.path.join(os.path.dirname(inputfile), basedir)
43
+
44
+ if os.path.exists(basedir_path):
45
+ #create job and copy files
46
+ try:
47
+ #make sure that the report file exists
48
+ reportfile = os.path.join(basedir_path, 'report.yaml')
49
+ if os.path.exists(reportfile):
50
+ job = pr.create.job.Calphy(basedir.replace('-', '_'))
51
+ job._job_id = pr.db.add_item_dict(job.db_entry())
52
+ job.refresh_job_status()
53
+ shutil.copytree(basedir_path, job.working_directory, dirs_exist_ok=True)
54
+
55
+ #read in structure, assign potential
56
+ Z_of_type = dict([(count+1, calc._element_dict[element]['atomic_number']) for count, element in enumerate(calc.element)])
57
+ structure = read(calc.lattice, format='lammps-data', style='atomic', Z_of_type=Z_of_type)
58
+ job.structure = ase_to_pyiron(structure)
59
+ job.potential = potential
60
+ pr.db.item_update({"ChemicalFormula": job.structure.get_chemical_formula()}, job._job_id)
61
+
62
+ #collect output
63
+ job.input.mode = calc.mode
64
+ job.status.collect = True
65
+ job.collect_output()
66
+
67
+ #populate inputs
68
+ calcdict = calc.model_dump()
69
+ #temporary fix for comp scaling until its introduced in pyiron
70
+ del calcdict['composition_scaling']
71
+ job.input.update(calcdict)
72
+ job._create_calc()
73
+ job.to_hdf()
74
+ job.status.finished = True
75
+ else:
76
+ print(f'parsing {basedir_path} failed, skipping')
77
+ except:
78
+ #delete job
79
+ pr.remove_job(basedir.replace('-', '_'))
80
+ print(f'parsing {basedir_path} failed, skipping')
81
+ else:
82
+ print(f'could not find {basedir_path}, skipping')
83
+
84
+
85
+ def get_free_energy(job):
86
+ return job["output/energy_free"]
87
+
88
+ def get_temperature(job):
89
+ return job["output/temperature"]
90
+
91
+ def get_phase(job):
92
+ raw = job.name.split('_')
93
+ if raw[-3] == 'liquid':
94
+ phase = 'liquid'
95
+ else:
96
+ phase = raw[1]
97
+ return phase
98
+
99
+ def get_composition(job):
100
+ chem = job.project.db.get_item_by_id(job.id)['chemicalformula']
101
+ comp_split = re.split('(\d+)', chem)[:-1]
102
+ if len(comp_split) == 2:
103
+ if comp_split[0] == 'Al':
104
+ return 0.00
105
+ else:
106
+ return 1.00
107
+ else:
108
+ return int(comp_split[3])/(int(comp_split[1])+int(comp_split[3]))
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.1
2
2
  Name: calphy
3
- Version: 1.3.6
3
+ Version: 1.3.8
4
4
  Summary: free energy calculation for python
5
5
  Home-page: https://github.com/ICAMS/calphy
6
6
  Author: Sarath Menon, Yury Lysogorskiy, Ralf Drautz
@@ -53,7 +53,7 @@ setup(
53
53
  packages=find_packages(include=['calphy', 'calphy.*']),
54
54
  test_suite='tests',
55
55
  url='https://github.com/ICAMS/calphy',
56
- version='1.3.6',
56
+ version='1.3.8',
57
57
  zip_safe=False,
58
58
  entry_points={
59
59
  'console_scripts': [
@@ -1,74 +0,0 @@
1
- from calphy.input import read_inputfile
2
- import shutil
3
- from ase.io import read
4
- import numpy as np
5
- from tqdm.notebook import trange
6
- import os
7
-
8
- try:
9
- from pyiron_atomistics import Project
10
- from pyiron_atomistics.atomistics.structure.atoms import ase_to_pyiron
11
- except ImportError:
12
- raise ImportError('This feature needs pyiron_atomistics installed')
13
-
14
- def create_job_from_inputfile(pr, inputfile, potential, kernel=None):
15
- """
16
- Create a pyiron job from calphy input file
17
-
18
- Parameters
19
- ----------
20
- pr: pyiron Project
21
- project to which the job is to be added
22
-
23
- inputfile: string
24
- calphy input file
25
-
26
- potential: string
27
- name of the potential as present in pyiron
28
-
29
- kernel: int, optional
30
- the index of the calculation to be read in. If None, all calculations are read in
31
- """
32
- calcs = read_inputfile(inputfile)
33
-
34
- if kernel is None:
35
- kernel = [x for x in range(len(calcs))]
36
- kernel = np.atleast_1d(kernel)
37
-
38
- for i in trange(len(kernel)):
39
- calc = calcs[kernel[i]]
40
- basedir = calc.create_identifier()
41
- basedir_path = os.path.join(os.path.dirname(inputfile), basedir)
42
-
43
- if os.path.exists(basedir_path):
44
- #create job and copy files
45
- try:
46
- job = pr.create.job.Calphy(basedir.replace('-', '_'))
47
- job._job_id = pr.db.add_item_dict(job.db_entry())
48
- job.refresh_job_status()
49
- shutil.copytree(basedir_path, job.working_directory, dirs_exist_ok=True)
50
-
51
- #read in structure, assign potential
52
- Z_of_type = dict([(count+1, calc._element_dict[element]['atomic_number']) for count, element in enumerate(calc.element)])
53
- structure = read(calc.lattice, format='lammps-data', style='atomic', Z_of_type=Z_of_type)
54
- job.structure = ase_to_pyiron(structure)
55
- job.potential = potential
56
- pr.db.item_update({"ChemicalFormula": job.structure.get_chemical_formula()}, job._job_id)
57
-
58
- #collect output
59
- job.input.mode = calc.mode
60
- job.status.collect = True
61
- job.collect_output()
62
-
63
- #populate inputs
64
- calcdict = calc.model_dump()
65
- #temporary fix for comp scaling until its introduced in pyiron
66
- del calcdict['composition_scaling']
67
- job.input.update(calcdict)
68
- job._create_calc()
69
- job.to_hdf()
70
- job.status.finished = True
71
- except:
72
- print(f'parsing {basedir_path} failed, skipping')
73
- else:
74
- print(f'could not find {basedir_path}, skipping')
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes