nomad-parser-plugins-workflow 1.0__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- nomad_parser_plugins_workflow-1.0.dist-info/LICENSE +202 -0
- nomad_parser_plugins_workflow-1.0.dist-info/METADATA +319 -0
- nomad_parser_plugins_workflow-1.0.dist-info/RECORD +58 -0
- nomad_parser_plugins_workflow-1.0.dist-info/WHEEL +5 -0
- nomad_parser_plugins_workflow-1.0.dist-info/entry_points.txt +11 -0
- nomad_parser_plugins_workflow-1.0.dist-info/top_level.txt +1 -0
- workflowparsers/__init__.py +314 -0
- workflowparsers/aflow/__init__.py +19 -0
- workflowparsers/aflow/__main__.py +31 -0
- workflowparsers/aflow/metainfo/__init__.py +19 -0
- workflowparsers/aflow/metainfo/aflow.py +1240 -0
- workflowparsers/aflow/parser.py +741 -0
- workflowparsers/asr/__init__.py +19 -0
- workflowparsers/asr/__main__.py +31 -0
- workflowparsers/asr/metainfo/__init__.py +19 -0
- workflowparsers/asr/metainfo/asr.py +306 -0
- workflowparsers/asr/parser.py +266 -0
- workflowparsers/atomate/__init__.py +19 -0
- workflowparsers/atomate/__main__.py +31 -0
- workflowparsers/atomate/metainfo/__init__.py +19 -0
- workflowparsers/atomate/metainfo/atomate.py +395 -0
- workflowparsers/atomate/parser.py +357 -0
- workflowparsers/elastic/__init__.py +19 -0
- workflowparsers/elastic/__main__.py +31 -0
- workflowparsers/elastic/metainfo/__init__.py +19 -0
- workflowparsers/elastic/metainfo/elastic.py +364 -0
- workflowparsers/elastic/parser.py +798 -0
- workflowparsers/fhivibes/__init__.py +19 -0
- workflowparsers/fhivibes/__main__.py +31 -0
- workflowparsers/fhivibes/metainfo/__init__.py +19 -0
- workflowparsers/fhivibes/metainfo/fhi_vibes.py +898 -0
- workflowparsers/fhivibes/parser.py +566 -0
- workflowparsers/lobster/__init__.py +19 -0
- workflowparsers/lobster/__main__.py +31 -0
- workflowparsers/lobster/metainfo/__init__.py +19 -0
- workflowparsers/lobster/metainfo/lobster.py +446 -0
- workflowparsers/lobster/parser.py +618 -0
- workflowparsers/phonopy/__init__.py +19 -0
- workflowparsers/phonopy/__main__.py +31 -0
- workflowparsers/phonopy/calculator.py +260 -0
- workflowparsers/phonopy/metainfo/__init__.py +19 -0
- workflowparsers/phonopy/metainfo/phonopy.py +83 -0
- workflowparsers/phonopy/parser.py +583 -0
- workflowparsers/quantum_espresso_epw/__init__.py +19 -0
- workflowparsers/quantum_espresso_epw/__main__.py +31 -0
- workflowparsers/quantum_espresso_epw/metainfo/__init__.py +19 -0
- workflowparsers/quantum_espresso_epw/metainfo/quantum_espresso_epw.py +579 -0
- workflowparsers/quantum_espresso_epw/parser.py +583 -0
- workflowparsers/quantum_espresso_phonon/__init__.py +19 -0
- workflowparsers/quantum_espresso_phonon/__main__.py +31 -0
- workflowparsers/quantum_espresso_phonon/metainfo/__init__.py +19 -0
- workflowparsers/quantum_espresso_phonon/metainfo/quantum_espresso_phonon.py +389 -0
- workflowparsers/quantum_espresso_phonon/parser.py +483 -0
- workflowparsers/quantum_espresso_xspectra/__init__.py +19 -0
- workflowparsers/quantum_espresso_xspectra/__main__.py +31 -0
- workflowparsers/quantum_espresso_xspectra/metainfo/__init__.py +19 -0
- workflowparsers/quantum_espresso_xspectra/metainfo/quantum_espresso_xspectra.py +290 -0
- workflowparsers/quantum_espresso_xspectra/parser.py +586 -0
|
@@ -0,0 +1,357 @@
|
|
|
1
|
+
#
|
|
2
|
+
# Copyright The NOMAD Authors.
|
|
3
|
+
#
|
|
4
|
+
# This file is part of NOMAD.
|
|
5
|
+
# See https://nomad-lab.eu for further info.
|
|
6
|
+
#
|
|
7
|
+
# Licensed under the Apache License, Version 2.0 (the "License");
|
|
8
|
+
# you may not use this file except in compliance with the License.
|
|
9
|
+
# You may obtain a copy of the License at
|
|
10
|
+
#
|
|
11
|
+
# http://www.apache.org/licenses/LICENSE-2.0
|
|
12
|
+
#
|
|
13
|
+
# Unless required by applicable law or agreed to in writing, software
|
|
14
|
+
# distributed under the License is distributed on an "AS IS" BASIS,
|
|
15
|
+
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
|
16
|
+
# See the License for the specific language governing permissions and
|
|
17
|
+
# limitations under the License.
|
|
18
|
+
#
|
|
19
|
+
import os
|
|
20
|
+
import logging
|
|
21
|
+
import json
|
|
22
|
+
import numpy as np
|
|
23
|
+
|
|
24
|
+
from nomad.units import ureg
|
|
25
|
+
from simulationworkflowschema import (
|
|
26
|
+
Elastic,
|
|
27
|
+
ElasticMethod,
|
|
28
|
+
ElasticResults,
|
|
29
|
+
EquationOfState,
|
|
30
|
+
EquationOfStateMethod,
|
|
31
|
+
EquationOfStateResults,
|
|
32
|
+
Thermodynamics,
|
|
33
|
+
ThermodynamicsResults,
|
|
34
|
+
Phonon,
|
|
35
|
+
PhononMethod,
|
|
36
|
+
PhononResults,
|
|
37
|
+
)
|
|
38
|
+
from simulationworkflowschema.equation_of_state import EOSFit
|
|
39
|
+
from simulationworkflowschema.thermodynamics import Stability, Decomposition
|
|
40
|
+
from runschema.run import Run, Program
|
|
41
|
+
from runschema.system import System, Atoms
|
|
42
|
+
from runschema.method import (
|
|
43
|
+
Method,
|
|
44
|
+
DFT,
|
|
45
|
+
Electronic,
|
|
46
|
+
XCFunctional,
|
|
47
|
+
Functional,
|
|
48
|
+
BasisSet,
|
|
49
|
+
BasisSetContainer,
|
|
50
|
+
)
|
|
51
|
+
from runschema.calculation import (
|
|
52
|
+
Calculation,
|
|
53
|
+
Dos,
|
|
54
|
+
DosValues,
|
|
55
|
+
BandStructure,
|
|
56
|
+
BandEnergies,
|
|
57
|
+
)
|
|
58
|
+
from .metainfo.atomate import Composition, Symmetry
|
|
59
|
+
|
|
60
|
+
|
|
61
|
+
class AtomateParser:
|
|
62
|
+
def __init__(self):
|
|
63
|
+
# NOTE This is not technically the atomate parser but for the json file materials
|
|
64
|
+
# project produces for its workflow runs. They use atomate for workflow and the
|
|
65
|
+
# emmet package for post-processing.
|
|
66
|
+
# TODO extend this to read atomate output. There are probably some deviations in
|
|
67
|
+
# names.
|
|
68
|
+
pass
|
|
69
|
+
|
|
70
|
+
def init_parser(self):
|
|
71
|
+
try:
|
|
72
|
+
self.data = json.load(open(self.filepath))
|
|
73
|
+
except Exception:
|
|
74
|
+
self.logger.error('Failed to load json file.')
|
|
75
|
+
|
|
76
|
+
def parse_elastic(self, source):
|
|
77
|
+
workflow = Elastic(method=ElasticMethod(), results=ElasticResults())
|
|
78
|
+
source = source.get('elasticity', source)
|
|
79
|
+
workflow.method.energy_stress_calculator = 'VASP'
|
|
80
|
+
workflow.method.calculation_method = 'stress'
|
|
81
|
+
workflow.method.elastic_constants_order = source.get('order', 2)
|
|
82
|
+
|
|
83
|
+
deformations = source.get('deformations')
|
|
84
|
+
if deformations is not None:
|
|
85
|
+
workflow.results.n_deformations = len(deformations)
|
|
86
|
+
|
|
87
|
+
elastic_tensor = source.get('elastic_tensor')
|
|
88
|
+
if elastic_tensor is not None:
|
|
89
|
+
workflow.results.elastic_constants_matrix_second_order = (
|
|
90
|
+
elastic_tensor * ureg.GPa
|
|
91
|
+
)
|
|
92
|
+
|
|
93
|
+
compliance_tensor = source.get('compliance_tensor')
|
|
94
|
+
if compliance_tensor is not None:
|
|
95
|
+
workflow.results.compliance_matrix_second_order = compliance_tensor * (
|
|
96
|
+
1 / ureg.GPa
|
|
97
|
+
)
|
|
98
|
+
|
|
99
|
+
if source.get('g_reuss') is not None:
|
|
100
|
+
workflow.results.shear_modulus_reuss = source['g_reuss'] * ureg.GPa
|
|
101
|
+
if source.get('g_voigt') is not None:
|
|
102
|
+
workflow.results.shear_modulus_voigt = source['g_voigt'] * ureg.GPa
|
|
103
|
+
if source.get('g_vrh') is not None:
|
|
104
|
+
workflow.results.shear_modulus_hill = source['g_vrh'] * ureg.GPa
|
|
105
|
+
if source.get('homogeneous_poisson') is not None:
|
|
106
|
+
workflow.results.poisson_ratio_hill = source['homogeneous_poisson']
|
|
107
|
+
if source.get('k_reuss') is not None:
|
|
108
|
+
workflow.results.bulk_modulus_reuss = source['k_reuss'] * ureg.GPa
|
|
109
|
+
if source.get('k_voigt') is not None:
|
|
110
|
+
workflow.results.bulk_modulus_voigt = source['k_voigt'] * ureg.GPa
|
|
111
|
+
if source.get('k_vrh') is not None:
|
|
112
|
+
workflow.results.bulk_modulus_hill = source['k_vrh'] * ureg.GPa
|
|
113
|
+
self.archive.workflow2 = workflow
|
|
114
|
+
|
|
115
|
+
def parse_eos(self, source):
|
|
116
|
+
workflow = EquationOfState(
|
|
117
|
+
method=EquationOfStateMethod(), results=EquationOfStateResults()
|
|
118
|
+
)
|
|
119
|
+
if source.get('volumes') is not None:
|
|
120
|
+
workflow.results.volumes = source['volumes'] * ureg.angstrom**3
|
|
121
|
+
if source.get('energies') is not None:
|
|
122
|
+
workflow.results.energies = source['energies'] * ureg.eV
|
|
123
|
+
for fit_function, result in source.get('eos', {}).items():
|
|
124
|
+
sec_eos_fit = EOSFit()
|
|
125
|
+
workflow.results.eos_fit.append(sec_eos_fit)
|
|
126
|
+
sec_eos_fit.function_name = fit_function
|
|
127
|
+
if result.get('B') is not None:
|
|
128
|
+
sec_eos_fit.bulk_modulus = result['B'] * ureg.eV / ureg.angstrom**3
|
|
129
|
+
if result.get('C') is not None:
|
|
130
|
+
sec_eos_fit.bulk_modulus_derivative = result['C']
|
|
131
|
+
if result.get('E0') is not None:
|
|
132
|
+
sec_eos_fit.equilibrium_energy = result['E0'] * ureg.eV
|
|
133
|
+
if result.get('V0') is not None:
|
|
134
|
+
sec_eos_fit.equilibrium_volume = result['V0'] * ureg.angstrom**3
|
|
135
|
+
if result.get('eos_energies') is not None:
|
|
136
|
+
sec_eos_fit.fitted_energies = result['eos_energies'] * ureg.eV
|
|
137
|
+
self.archive.workflow2 = workflow
|
|
138
|
+
|
|
139
|
+
def parse_thermo(self, data):
|
|
140
|
+
workflow = self.archive.workflow2
|
|
141
|
+
if not workflow:
|
|
142
|
+
workflow = Thermodynamics()
|
|
143
|
+
if not workflow.results:
|
|
144
|
+
workflow.results = ThermodynamicsResults()
|
|
145
|
+
sec_stability = Stability()
|
|
146
|
+
sec_stability.formation_energy = (
|
|
147
|
+
data.get('formation_energy_per_atom', 0) * data.get('nsites', 1) * ureg.eV
|
|
148
|
+
)
|
|
149
|
+
sec_stability.delta_formation_energy = (
|
|
150
|
+
data.get('energy_above_hull', 0) * ureg.eV
|
|
151
|
+
)
|
|
152
|
+
sec_stability.is_stable = data.get('is_stable')
|
|
153
|
+
if data.get('decomposes_to') is not None:
|
|
154
|
+
for system in data.get('decomposes_to'):
|
|
155
|
+
sec_decomposition = Decomposition()
|
|
156
|
+
sec_stability.decomposition.append(sec_decomposition)
|
|
157
|
+
sec_decomposition.formula = system.get('formula')
|
|
158
|
+
sec_decomposition.fraction = system.get('amount')
|
|
159
|
+
workflow.results.stability = sec_stability
|
|
160
|
+
self.archive.workflow2 = workflow
|
|
161
|
+
|
|
162
|
+
def parse_phonon(self, data):
|
|
163
|
+
# TODO is vasp always mp calculator?
|
|
164
|
+
workflow = Phonon(method=PhononMethod(), results=PhononResults())
|
|
165
|
+
workflow.method.force_calculator = 'vasp'
|
|
166
|
+
|
|
167
|
+
calculations = self.archive.run[-1].calculation
|
|
168
|
+
if calculations:
|
|
169
|
+
calc = calculations[-1]
|
|
170
|
+
else:
|
|
171
|
+
calc = Calculation()
|
|
172
|
+
self.archive.run[-1].calculations.append(calc)
|
|
173
|
+
|
|
174
|
+
if data.get('ph_dos') is not None:
|
|
175
|
+
sec_dos = Dos()
|
|
176
|
+
calc.dos_phonon.append(sec_dos)
|
|
177
|
+
sec_dos.energies = data['ph_dos']['frequencies'] * ureg.THz * ureg.h
|
|
178
|
+
dos = data['ph_dos']['densities'] * (1 / (ureg.THz * ureg.h))
|
|
179
|
+
sec_dos.total.append(DosValues(value=dos))
|
|
180
|
+
|
|
181
|
+
if data.get('ph_bs') is not None:
|
|
182
|
+
workflow.method.with_non_analytic_correction = data['ph_bs'].get('has_nac')
|
|
183
|
+
sec_bs = BandStructure()
|
|
184
|
+
calc.band_structure_phonon.append(sec_bs)
|
|
185
|
+
bands = np.transpose(data['ph_bs']['bands'])
|
|
186
|
+
qpoints = data['ph_bs']['qpoints']
|
|
187
|
+
labels = data['ph_bs']['labels_dict']
|
|
188
|
+
hisym_qpts = list(labels.values())
|
|
189
|
+
labels = list(labels.keys())
|
|
190
|
+
endpoints = []
|
|
191
|
+
for i, qpoint in enumerate(qpoints):
|
|
192
|
+
if qpoint in hisym_qpts:
|
|
193
|
+
endpoints.append(i)
|
|
194
|
+
if len(endpoints) < 2:
|
|
195
|
+
continue
|
|
196
|
+
sec_segment = BandEnergies()
|
|
197
|
+
sec_bs.segment.append(sec_segment)
|
|
198
|
+
energies = bands[endpoints[0] : endpoints[1] + 1]
|
|
199
|
+
sec_segment.energies = (
|
|
200
|
+
np.reshape(energies, (1, *np.shape(energies))) * ureg.THz * ureg.h
|
|
201
|
+
)
|
|
202
|
+
sec_segment.kpoints = qpoints[endpoints[0] : endpoints[1] + 1]
|
|
203
|
+
sec_segment.endpoints_labels = [
|
|
204
|
+
labels[hisym_qpts.index(qpoint)] for i in endpoints
|
|
205
|
+
]
|
|
206
|
+
endpoints = []
|
|
207
|
+
|
|
208
|
+
calc.system_ref = self.archive.run[-1].system[0]
|
|
209
|
+
self.archive.workflow2 = workflow
|
|
210
|
+
|
|
211
|
+
# TODO add eigendisplacements
|
|
212
|
+
|
|
213
|
+
def parse_tasks(self, data):
|
|
214
|
+
if len(data['calcs_reversed']) == 0:
|
|
215
|
+
return
|
|
216
|
+
|
|
217
|
+
xc_func_mapping = {'PAW_PBE': ['GGA_X_PBE', 'GGA_C_PBE']}
|
|
218
|
+
|
|
219
|
+
sec_method = Method()
|
|
220
|
+
self.archive.run[-1].method.append(sec_method)
|
|
221
|
+
sec_xc_functional = XCFunctional()
|
|
222
|
+
for potcar_type in (
|
|
223
|
+
data['calcs_reversed'][0].get('input', {}).get('potcar_type', [])
|
|
224
|
+
):
|
|
225
|
+
for xc_functional in xc_func_mapping.get(potcar_type, []):
|
|
226
|
+
if '_X_' in xc_functional or xc_functional.endswith('_X'):
|
|
227
|
+
sec_xc_functional.exchange.append(Functional(name=xc_functional))
|
|
228
|
+
elif '_C_' in xc_functional or xc_functional.endswith('_C'):
|
|
229
|
+
sec_xc_functional.correlation.append(Functional(name=xc_functional))
|
|
230
|
+
elif 'HYB' in xc_functional:
|
|
231
|
+
sec_xc_functional.hybrid.append(Functional(name=xc_functional))
|
|
232
|
+
else:
|
|
233
|
+
sec_xc_functional.contributions.append(
|
|
234
|
+
Functional(name=xc_functional)
|
|
235
|
+
)
|
|
236
|
+
|
|
237
|
+
sec_method.dft = DFT(xc_functional=sec_xc_functional)
|
|
238
|
+
sec_method.electronic = Electronic(method='DFT')
|
|
239
|
+
|
|
240
|
+
encut = data['calcs_reversed'][0].get('input', {}).get('incar', {}).get('ENCUT')
|
|
241
|
+
prec = data['calcs_reversed'][0].get('input', {}).get('incar', {}).get('PREC')
|
|
242
|
+
if encut is not None and prec is not None:
|
|
243
|
+
encut_enhancement = 1.3 if 'acc' in prec else 1.0
|
|
244
|
+
cutoff = encut * encut_enhancement * ureg.eV
|
|
245
|
+
sec_em = [
|
|
246
|
+
BasisSetContainer(
|
|
247
|
+
type='plane waves',
|
|
248
|
+
scope=['wavefunction'],
|
|
249
|
+
basis_set=[
|
|
250
|
+
BasisSet(
|
|
251
|
+
type='plane waves',
|
|
252
|
+
scope=['valence'],
|
|
253
|
+
cutoff=cutoff,
|
|
254
|
+
),
|
|
255
|
+
],
|
|
256
|
+
)
|
|
257
|
+
]
|
|
258
|
+
sec_method.electrons_representation = sec_em
|
|
259
|
+
|
|
260
|
+
self.archive.run[-1].calculation[0].method_ref = sec_method
|
|
261
|
+
|
|
262
|
+
def parse(self, filepath, archive, logger):
|
|
263
|
+
self.filepath = os.path.abspath(filepath)
|
|
264
|
+
self.archive = archive
|
|
265
|
+
self.logger = logger if logger is not None else logging.getLogger(__name__)
|
|
266
|
+
self.maindir = os.path.dirname(self.filepath)
|
|
267
|
+
|
|
268
|
+
self.init_parser()
|
|
269
|
+
|
|
270
|
+
sec_run = Run()
|
|
271
|
+
archive.run.append(sec_run)
|
|
272
|
+
sec_run.program = Program(name='MaterialsProject', version='1.0.0')
|
|
273
|
+
|
|
274
|
+
# TODO system should be referenced
|
|
275
|
+
structure = self.data.get('structure')
|
|
276
|
+
if structure is not None:
|
|
277
|
+
labels = [site['label'] for site in structure.get('sites')]
|
|
278
|
+
positions = [site['xyz'] for site in structure.get('sites')]
|
|
279
|
+
cell = structure.get('lattice', {}).get('matrix')
|
|
280
|
+
sec_system = System()
|
|
281
|
+
sec_run.system.append(sec_system)
|
|
282
|
+
sec_atoms = Atoms()
|
|
283
|
+
sec_system.atoms = sec_atoms
|
|
284
|
+
if cell is not None:
|
|
285
|
+
sec_atoms.lattice_vectors = cell * ureg.angstrom
|
|
286
|
+
sec_atoms.periodic = [True, True, True]
|
|
287
|
+
if positions:
|
|
288
|
+
sec_atoms.positions = positions * ureg.angstrom
|
|
289
|
+
if labels:
|
|
290
|
+
sec_atoms.labels = labels
|
|
291
|
+
|
|
292
|
+
for key, val in self.data.get('composition', {}).items():
|
|
293
|
+
sec_system.x_mp_composition.append(
|
|
294
|
+
Composition(x_mp_label=key, x_mp_value=val)
|
|
295
|
+
)
|
|
296
|
+
|
|
297
|
+
for key, val in self.data.get('composition_reduced', {}).items():
|
|
298
|
+
sec_system.x_mp_composition_reduced.append(
|
|
299
|
+
Composition(x_mp_label=key, x_mp_value=val)
|
|
300
|
+
)
|
|
301
|
+
|
|
302
|
+
symmetry = self.data.get('symmetry')
|
|
303
|
+
if symmetry is not None:
|
|
304
|
+
sec_symmetry = Symmetry()
|
|
305
|
+
sec_system.x_mp_symmetry.append(sec_symmetry)
|
|
306
|
+
for key, val in symmetry.items():
|
|
307
|
+
try:
|
|
308
|
+
setattr(sec_symmetry, 'x_mp_%s' % key, val)
|
|
309
|
+
except Exception:
|
|
310
|
+
pass
|
|
311
|
+
|
|
312
|
+
# misc
|
|
313
|
+
sec_system.x_mp_elements = [
|
|
314
|
+
e.get('element') if isinstance(e, dict) else e
|
|
315
|
+
for e in self.data.get('elements', [])
|
|
316
|
+
]
|
|
317
|
+
for key, val in self.data.items():
|
|
318
|
+
try:
|
|
319
|
+
setattr(sec_system, 'x_mp_%s' % key, val)
|
|
320
|
+
except Exception:
|
|
321
|
+
pass
|
|
322
|
+
|
|
323
|
+
# temporary fix to go through workflow normalization
|
|
324
|
+
sec_calc = Calculation()
|
|
325
|
+
sec_run.calculation.append(sec_calc)
|
|
326
|
+
sec_calc.system_ref = sec_system
|
|
327
|
+
|
|
328
|
+
# TODO should we use the MP api for workflow results?
|
|
329
|
+
# TODO handle multiple workflow sections
|
|
330
|
+
workflow_files = [
|
|
331
|
+
f
|
|
332
|
+
for f in os.listdir(self.maindir)
|
|
333
|
+
if f.endswith('.json') and f != os.path.basename(self.filepath)
|
|
334
|
+
]
|
|
335
|
+
workflow_files.sort()
|
|
336
|
+
for filename in workflow_files:
|
|
337
|
+
try:
|
|
338
|
+
data = json.load(open(os.path.join(self.maindir, filename)))
|
|
339
|
+
except Exception:
|
|
340
|
+
continue
|
|
341
|
+
# make sure data matches that of system
|
|
342
|
+
# TODO maybe better to simply compare filename prefix so no need to load data
|
|
343
|
+
if data.get('material_id', data.get('task_id')) != self.data.get(
|
|
344
|
+
'material_id'
|
|
345
|
+
):
|
|
346
|
+
continue
|
|
347
|
+
|
|
348
|
+
if 'elasticity' in data:
|
|
349
|
+
self.parse_elastic(data)
|
|
350
|
+
if 'eos' in data:
|
|
351
|
+
self.parse_eos(data)
|
|
352
|
+
if 'ph_bs' in data or 'ph_dos' in data:
|
|
353
|
+
self.parse_phonon(data)
|
|
354
|
+
if 'property_name' in data and data.get('property_name') == 'thermo':
|
|
355
|
+
self.parse_thermo(data)
|
|
356
|
+
if 'calcs_reversed' in data:
|
|
357
|
+
self.parse_tasks(data)
|
|
@@ -0,0 +1,19 @@
|
|
|
1
|
+
#
|
|
2
|
+
# Copyright The NOMAD Authors.
|
|
3
|
+
#
|
|
4
|
+
# This file is part of NOMAD.
|
|
5
|
+
# See https://nomad-lab.eu for further info.
|
|
6
|
+
#
|
|
7
|
+
# Licensed under the Apache License, Version 2.0 (the "License");
|
|
8
|
+
# you may not use this file except in compliance with the License.
|
|
9
|
+
# You may obtain a copy of the License at
|
|
10
|
+
#
|
|
11
|
+
# http://www.apache.org/licenses/LICENSE-2.0
|
|
12
|
+
#
|
|
13
|
+
# Unless required by applicable law or agreed to in writing, software
|
|
14
|
+
# distributed under the License is distributed on an "AS IS" BASIS,
|
|
15
|
+
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
|
16
|
+
# See the License for the specific language governing permissions and
|
|
17
|
+
# limitations under the License.
|
|
18
|
+
#
|
|
19
|
+
from .parser import ElasticParser
|
|
@@ -0,0 +1,31 @@
|
|
|
1
|
+
#
|
|
2
|
+
# Copyright The NOMAD Authors.
|
|
3
|
+
#
|
|
4
|
+
# This file is part of NOMAD.
|
|
5
|
+
# See https://nomad-lab.eu for further info.
|
|
6
|
+
#
|
|
7
|
+
# Licensed under the Apache License, Version 2.0 (the "License");
|
|
8
|
+
# you may not use this file except in compliance with the License.
|
|
9
|
+
# You may obtain a copy of the License at
|
|
10
|
+
#
|
|
11
|
+
# http://www.apache.org/licenses/LICENSE-2.0
|
|
12
|
+
#
|
|
13
|
+
# Unless required by applicable law or agreed to in writing, software
|
|
14
|
+
# distributed under the License is distributed on an "AS IS" BASIS,
|
|
15
|
+
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
|
16
|
+
# See the License for the specific language governing permissions and
|
|
17
|
+
# limitations under the License.
|
|
18
|
+
#
|
|
19
|
+
import sys
|
|
20
|
+
import json
|
|
21
|
+
import logging
|
|
22
|
+
|
|
23
|
+
from nomad.utils import configure_logging
|
|
24
|
+
from nomad.datamodel import EntryArchive
|
|
25
|
+
from workflowparsers.elastic import ElasticParser
|
|
26
|
+
|
|
27
|
+
if __name__ == '__main__':
|
|
28
|
+
configure_logging(console_log_level=logging.DEBUG)
|
|
29
|
+
archive = EntryArchive()
|
|
30
|
+
ElasticParser().parse(sys.argv[1], archive, logging)
|
|
31
|
+
json.dump(archive.m_to_dict(), sys.stdout, indent=2)
|
|
@@ -0,0 +1,19 @@
|
|
|
1
|
+
#
|
|
2
|
+
# Copyright The NOMAD Authors.
|
|
3
|
+
#
|
|
4
|
+
# This file is part of NOMAD.
|
|
5
|
+
# See https://nomad-lab.eu for further info.
|
|
6
|
+
#
|
|
7
|
+
# Licensed under the Apache License, Version 2.0 (the "License");
|
|
8
|
+
# you may not use this file except in compliance with the License.
|
|
9
|
+
# You may obtain a copy of the License at
|
|
10
|
+
#
|
|
11
|
+
# http://www.apache.org/licenses/LICENSE-2.0
|
|
12
|
+
#
|
|
13
|
+
# Unless required by applicable law or agreed to in writing, software
|
|
14
|
+
# distributed under the License is distributed on an "AS IS" BASIS,
|
|
15
|
+
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
|
16
|
+
# See the License for the specific language governing permissions and
|
|
17
|
+
# limitations under the License.
|
|
18
|
+
#
|
|
19
|
+
from . import elastic
|