nomad-parser-plugins-workflow 1.0__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- nomad_parser_plugins_workflow-1.0.dist-info/LICENSE +202 -0
- nomad_parser_plugins_workflow-1.0.dist-info/METADATA +319 -0
- nomad_parser_plugins_workflow-1.0.dist-info/RECORD +58 -0
- nomad_parser_plugins_workflow-1.0.dist-info/WHEEL +5 -0
- nomad_parser_plugins_workflow-1.0.dist-info/entry_points.txt +11 -0
- nomad_parser_plugins_workflow-1.0.dist-info/top_level.txt +1 -0
- workflowparsers/__init__.py +314 -0
- workflowparsers/aflow/__init__.py +19 -0
- workflowparsers/aflow/__main__.py +31 -0
- workflowparsers/aflow/metainfo/__init__.py +19 -0
- workflowparsers/aflow/metainfo/aflow.py +1240 -0
- workflowparsers/aflow/parser.py +741 -0
- workflowparsers/asr/__init__.py +19 -0
- workflowparsers/asr/__main__.py +31 -0
- workflowparsers/asr/metainfo/__init__.py +19 -0
- workflowparsers/asr/metainfo/asr.py +306 -0
- workflowparsers/asr/parser.py +266 -0
- workflowparsers/atomate/__init__.py +19 -0
- workflowparsers/atomate/__main__.py +31 -0
- workflowparsers/atomate/metainfo/__init__.py +19 -0
- workflowparsers/atomate/metainfo/atomate.py +395 -0
- workflowparsers/atomate/parser.py +357 -0
- workflowparsers/elastic/__init__.py +19 -0
- workflowparsers/elastic/__main__.py +31 -0
- workflowparsers/elastic/metainfo/__init__.py +19 -0
- workflowparsers/elastic/metainfo/elastic.py +364 -0
- workflowparsers/elastic/parser.py +798 -0
- workflowparsers/fhivibes/__init__.py +19 -0
- workflowparsers/fhivibes/__main__.py +31 -0
- workflowparsers/fhivibes/metainfo/__init__.py +19 -0
- workflowparsers/fhivibes/metainfo/fhi_vibes.py +898 -0
- workflowparsers/fhivibes/parser.py +566 -0
- workflowparsers/lobster/__init__.py +19 -0
- workflowparsers/lobster/__main__.py +31 -0
- workflowparsers/lobster/metainfo/__init__.py +19 -0
- workflowparsers/lobster/metainfo/lobster.py +446 -0
- workflowparsers/lobster/parser.py +618 -0
- workflowparsers/phonopy/__init__.py +19 -0
- workflowparsers/phonopy/__main__.py +31 -0
- workflowparsers/phonopy/calculator.py +260 -0
- workflowparsers/phonopy/metainfo/__init__.py +19 -0
- workflowparsers/phonopy/metainfo/phonopy.py +83 -0
- workflowparsers/phonopy/parser.py +583 -0
- workflowparsers/quantum_espresso_epw/__init__.py +19 -0
- workflowparsers/quantum_espresso_epw/__main__.py +31 -0
- workflowparsers/quantum_espresso_epw/metainfo/__init__.py +19 -0
- workflowparsers/quantum_espresso_epw/metainfo/quantum_espresso_epw.py +579 -0
- workflowparsers/quantum_espresso_epw/parser.py +583 -0
- workflowparsers/quantum_espresso_phonon/__init__.py +19 -0
- workflowparsers/quantum_espresso_phonon/__main__.py +31 -0
- workflowparsers/quantum_espresso_phonon/metainfo/__init__.py +19 -0
- workflowparsers/quantum_espresso_phonon/metainfo/quantum_espresso_phonon.py +389 -0
- workflowparsers/quantum_espresso_phonon/parser.py +483 -0
- workflowparsers/quantum_espresso_xspectra/__init__.py +19 -0
- workflowparsers/quantum_espresso_xspectra/__main__.py +31 -0
- workflowparsers/quantum_espresso_xspectra/metainfo/__init__.py +19 -0
- workflowparsers/quantum_espresso_xspectra/metainfo/quantum_espresso_xspectra.py +290 -0
- workflowparsers/quantum_espresso_xspectra/parser.py +586 -0
|
@@ -0,0 +1,19 @@
|
|
|
1
|
+
#
|
|
2
|
+
# Copyright The NOMAD Authors.
|
|
3
|
+
#
|
|
4
|
+
# This file is part of NOMAD.
|
|
5
|
+
# See https://nomad-lab.eu for further info.
|
|
6
|
+
#
|
|
7
|
+
# Licensed under the Apache License, Version 2.0 (the "License");
|
|
8
|
+
# you may not use this file except in compliance with the License.
|
|
9
|
+
# You may obtain a copy of the License at
|
|
10
|
+
#
|
|
11
|
+
# http://www.apache.org/licenses/LICENSE-2.0
|
|
12
|
+
#
|
|
13
|
+
# Unless required by applicable law or agreed to in writing, software
|
|
14
|
+
# distributed under the License is distributed on an "AS IS" BASIS,
|
|
15
|
+
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
|
16
|
+
# See the License for the specific language governing permissions and
|
|
17
|
+
# limitations under the License.
|
|
18
|
+
#
|
|
19
|
+
from .parser import ASRParser, asr_to_archives
|
|
@@ -0,0 +1,31 @@
|
|
|
1
|
+
#
|
|
2
|
+
# Copyright The NOMAD Authors.
|
|
3
|
+
#
|
|
4
|
+
# This file is part of NOMAD.
|
|
5
|
+
# See https://nomad-lab.eu for further info.
|
|
6
|
+
#
|
|
7
|
+
# Licensed under the Apache License, Version 2.0 (the "License");
|
|
8
|
+
# you may not use this file except in compliance with the License.
|
|
9
|
+
# You may obtain a copy of the License at
|
|
10
|
+
#
|
|
11
|
+
# http://www.apache.org/licenses/LICENSE-2.0
|
|
12
|
+
#
|
|
13
|
+
# Unless required by applicable law or agreed to in writing, software
|
|
14
|
+
# distributed under the License is distributed on an "AS IS" BASIS,
|
|
15
|
+
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
|
16
|
+
# See the License for the specific language governing permissions and
|
|
17
|
+
# limitations under the License.
|
|
18
|
+
#
|
|
19
|
+
import sys
|
|
20
|
+
import json
|
|
21
|
+
import logging
|
|
22
|
+
|
|
23
|
+
from nomad.utils import configure_logging
|
|
24
|
+
from nomad.datamodel import EntryArchive
|
|
25
|
+
from workflowparsers.asr import ASRParser
|
|
26
|
+
|
|
27
|
+
if __name__ == '__main__':
|
|
28
|
+
configure_logging(console_log_level=logging.DEBUG)
|
|
29
|
+
archive = EntryArchive()
|
|
30
|
+
ASRParser().parse(sys.argv[1], archive, logging)
|
|
31
|
+
json.dump(archive.m_to_dict(), sys.stdout, indent=2)
|
|
@@ -0,0 +1,19 @@
|
|
|
1
|
+
#
|
|
2
|
+
# Copyright The NOMAD Authors.
|
|
3
|
+
#
|
|
4
|
+
# This file is part of NOMAD.
|
|
5
|
+
# See https://nomad-lab.eu for further info.
|
|
6
|
+
#
|
|
7
|
+
# Licensed under the Apache License, Version 2.0 (the "License");
|
|
8
|
+
# you may not use this file except in compliance with the License.
|
|
9
|
+
# You may obtain a copy of the License at
|
|
10
|
+
#
|
|
11
|
+
# http://www.apache.org/licenses/LICENSE-2.0
|
|
12
|
+
#
|
|
13
|
+
# Unless required by applicable law or agreed to in writing, software
|
|
14
|
+
# distributed under the License is distributed on an "AS IS" BASIS,
|
|
15
|
+
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
|
16
|
+
# See the License for the specific language governing permissions and
|
|
17
|
+
# limitations under the License.
|
|
18
|
+
#
|
|
19
|
+
from . import asr
|
|
@@ -0,0 +1,306 @@
|
|
|
1
|
+
import numpy as np # pylint: disable=unused-import
|
|
2
|
+
import typing # pylint: disable=unused-import
|
|
3
|
+
from nomad.metainfo import ( # pylint: disable=unused-import
|
|
4
|
+
MSection,
|
|
5
|
+
MCategory,
|
|
6
|
+
Category,
|
|
7
|
+
Package,
|
|
8
|
+
Quantity,
|
|
9
|
+
Section,
|
|
10
|
+
SubSection,
|
|
11
|
+
SectionProxy,
|
|
12
|
+
Reference,
|
|
13
|
+
MEnum,
|
|
14
|
+
JSON,
|
|
15
|
+
)
|
|
16
|
+
import runschema.run # pylint: disable=unused-import
|
|
17
|
+
import runschema.calculation # pylint: disable=unused-import
|
|
18
|
+
import runschema.method # pylint: disable=unused-import
|
|
19
|
+
import runschema.system # pylint: disable=unused-import
|
|
20
|
+
|
|
21
|
+
|
|
22
|
+
m_package = Package(name='None', description='None')
|
|
23
|
+
|
|
24
|
+
|
|
25
|
+
class x_asr_parameters(MSection):
|
|
26
|
+
m_def = Section(validate=False)
|
|
27
|
+
|
|
28
|
+
x_asr_tmp_atoms = Quantity(
|
|
29
|
+
type=str,
|
|
30
|
+
shape=[],
|
|
31
|
+
description="""
|
|
32
|
+
""",
|
|
33
|
+
)
|
|
34
|
+
|
|
35
|
+
x_asr_tmp_atoms_file = Quantity(
|
|
36
|
+
type=str,
|
|
37
|
+
shape=[],
|
|
38
|
+
description="""
|
|
39
|
+
""",
|
|
40
|
+
)
|
|
41
|
+
|
|
42
|
+
x_asr_fmax = Quantity(
|
|
43
|
+
type=np.float64,
|
|
44
|
+
shape=[],
|
|
45
|
+
description="""
|
|
46
|
+
""",
|
|
47
|
+
)
|
|
48
|
+
|
|
49
|
+
x_asr_calculator = Quantity(
|
|
50
|
+
type=JSON,
|
|
51
|
+
shape=[],
|
|
52
|
+
description="""
|
|
53
|
+
""",
|
|
54
|
+
)
|
|
55
|
+
|
|
56
|
+
x_asr_magstatecalculator = Quantity(
|
|
57
|
+
type=JSON,
|
|
58
|
+
shape=[],
|
|
59
|
+
description="""
|
|
60
|
+
""",
|
|
61
|
+
)
|
|
62
|
+
|
|
63
|
+
x_asr_rc = Quantity(
|
|
64
|
+
type=np.float64,
|
|
65
|
+
shape=[],
|
|
66
|
+
description="""
|
|
67
|
+
""",
|
|
68
|
+
)
|
|
69
|
+
|
|
70
|
+
x_asr_d = Quantity(
|
|
71
|
+
type=np.float64,
|
|
72
|
+
shape=[],
|
|
73
|
+
description="""
|
|
74
|
+
""",
|
|
75
|
+
)
|
|
76
|
+
|
|
77
|
+
x_asr_fsname = Quantity(
|
|
78
|
+
type=str,
|
|
79
|
+
shape=[],
|
|
80
|
+
description="""
|
|
81
|
+
""",
|
|
82
|
+
)
|
|
83
|
+
|
|
84
|
+
x_asr_sc = Quantity(
|
|
85
|
+
type=np.float64,
|
|
86
|
+
shape=[3],
|
|
87
|
+
description="""
|
|
88
|
+
""",
|
|
89
|
+
)
|
|
90
|
+
|
|
91
|
+
x_asr_dist_max = Quantity(
|
|
92
|
+
type=np.float64,
|
|
93
|
+
shape=[],
|
|
94
|
+
description="""
|
|
95
|
+
""",
|
|
96
|
+
)
|
|
97
|
+
|
|
98
|
+
|
|
99
|
+
class x_asr_code(MSection):
|
|
100
|
+
m_def = Section(validate=False)
|
|
101
|
+
|
|
102
|
+
x_asr_package = Quantity(
|
|
103
|
+
type=str,
|
|
104
|
+
shape=[],
|
|
105
|
+
description="""
|
|
106
|
+
""",
|
|
107
|
+
)
|
|
108
|
+
|
|
109
|
+
x_asr_version = Quantity(
|
|
110
|
+
type=str,
|
|
111
|
+
shape=[],
|
|
112
|
+
description="""
|
|
113
|
+
""",
|
|
114
|
+
)
|
|
115
|
+
|
|
116
|
+
x_asr_git_hash = Quantity(
|
|
117
|
+
type=str,
|
|
118
|
+
shape=[],
|
|
119
|
+
description="""
|
|
120
|
+
""",
|
|
121
|
+
)
|
|
122
|
+
|
|
123
|
+
|
|
124
|
+
class x_asr_codes(MSection):
|
|
125
|
+
m_def = Section(validate=False)
|
|
126
|
+
|
|
127
|
+
x_asr_code = SubSection(sub_section=SectionProxy('x_asr_code'), repeats=True)
|
|
128
|
+
|
|
129
|
+
|
|
130
|
+
class x_asr_run_specification(MSection):
|
|
131
|
+
m_def = Section(validate=False)
|
|
132
|
+
|
|
133
|
+
x_asr_name = Quantity(
|
|
134
|
+
type=str,
|
|
135
|
+
shape=[],
|
|
136
|
+
description="""
|
|
137
|
+
""",
|
|
138
|
+
)
|
|
139
|
+
|
|
140
|
+
x_asr_version = Quantity(
|
|
141
|
+
type=np.int32,
|
|
142
|
+
shape=[],
|
|
143
|
+
description="""
|
|
144
|
+
""",
|
|
145
|
+
)
|
|
146
|
+
|
|
147
|
+
x_asr_uid = Quantity(
|
|
148
|
+
type=str,
|
|
149
|
+
shape=[],
|
|
150
|
+
description="""
|
|
151
|
+
""",
|
|
152
|
+
)
|
|
153
|
+
|
|
154
|
+
x_asr_parameters = SubSection(
|
|
155
|
+
sub_section=SectionProxy('x_asr_parameters'), repeats=False
|
|
156
|
+
)
|
|
157
|
+
|
|
158
|
+
x_asr_codes = SubSection(sub_section=SectionProxy('x_asr_codes'), repeats=False)
|
|
159
|
+
|
|
160
|
+
|
|
161
|
+
class x_asr_resources(MSection):
|
|
162
|
+
m_def = Section(validate=False)
|
|
163
|
+
|
|
164
|
+
x_asr_execution_start = Quantity(
|
|
165
|
+
type=np.float64,
|
|
166
|
+
shape=[],
|
|
167
|
+
description="""
|
|
168
|
+
""",
|
|
169
|
+
)
|
|
170
|
+
|
|
171
|
+
x_asr_execution_end = Quantity(
|
|
172
|
+
type=np.float64,
|
|
173
|
+
shape=[],
|
|
174
|
+
description="""
|
|
175
|
+
""",
|
|
176
|
+
)
|
|
177
|
+
|
|
178
|
+
x_asr_execution_duration = Quantity(
|
|
179
|
+
type=np.float64,
|
|
180
|
+
shape=[],
|
|
181
|
+
description="""
|
|
182
|
+
""",
|
|
183
|
+
)
|
|
184
|
+
|
|
185
|
+
x_asr_ncores = Quantity(
|
|
186
|
+
type=np.int32,
|
|
187
|
+
shape=[],
|
|
188
|
+
description="""
|
|
189
|
+
""",
|
|
190
|
+
)
|
|
191
|
+
|
|
192
|
+
|
|
193
|
+
class x_asr_dependency(MSection):
|
|
194
|
+
m_def = Section(validate=False)
|
|
195
|
+
|
|
196
|
+
x_asr_uid = Quantity(
|
|
197
|
+
type=str,
|
|
198
|
+
shape=[],
|
|
199
|
+
description="""
|
|
200
|
+
""",
|
|
201
|
+
)
|
|
202
|
+
|
|
203
|
+
x_asr_revision = Quantity(
|
|
204
|
+
type=str,
|
|
205
|
+
shape=[],
|
|
206
|
+
description="""
|
|
207
|
+
""",
|
|
208
|
+
)
|
|
209
|
+
|
|
210
|
+
|
|
211
|
+
class x_asr_dependencies(MSection):
|
|
212
|
+
m_def = Section(validate=False)
|
|
213
|
+
|
|
214
|
+
x_asr_dependency = SubSection(
|
|
215
|
+
sub_section=SectionProxy('x_asr_dependency'), repeats=True
|
|
216
|
+
)
|
|
217
|
+
|
|
218
|
+
|
|
219
|
+
class x_asr_metadata(MSection):
|
|
220
|
+
m_def = Section(validate=False)
|
|
221
|
+
|
|
222
|
+
x_asr_directory = Quantity(
|
|
223
|
+
type=str,
|
|
224
|
+
shape=[],
|
|
225
|
+
description="""
|
|
226
|
+
""",
|
|
227
|
+
)
|
|
228
|
+
|
|
229
|
+
x_asr_created = Quantity(
|
|
230
|
+
type=np.float64,
|
|
231
|
+
shape=[],
|
|
232
|
+
description="""
|
|
233
|
+
""",
|
|
234
|
+
)
|
|
235
|
+
|
|
236
|
+
x_asr_modified = Quantity(
|
|
237
|
+
type=np.float64,
|
|
238
|
+
shape=[],
|
|
239
|
+
description="""
|
|
240
|
+
""",
|
|
241
|
+
)
|
|
242
|
+
|
|
243
|
+
|
|
244
|
+
class Run(runschema.run.Run):
|
|
245
|
+
m_def = Section(validate=False, extends_base_section=True)
|
|
246
|
+
|
|
247
|
+
x_asr_history = Quantity(
|
|
248
|
+
type=str,
|
|
249
|
+
shape=[],
|
|
250
|
+
description="""
|
|
251
|
+
""",
|
|
252
|
+
)
|
|
253
|
+
|
|
254
|
+
x_asr_name = Quantity(
|
|
255
|
+
type=str,
|
|
256
|
+
shape=[],
|
|
257
|
+
description="""
|
|
258
|
+
""",
|
|
259
|
+
)
|
|
260
|
+
|
|
261
|
+
x_asr_tags = Quantity(
|
|
262
|
+
type=str,
|
|
263
|
+
shape=[],
|
|
264
|
+
description="""
|
|
265
|
+
""",
|
|
266
|
+
)
|
|
267
|
+
|
|
268
|
+
x_asr_revision = Quantity(
|
|
269
|
+
type=str,
|
|
270
|
+
shape=[],
|
|
271
|
+
description="""
|
|
272
|
+
""",
|
|
273
|
+
)
|
|
274
|
+
|
|
275
|
+
x_asr_uid = Quantity(
|
|
276
|
+
type=str,
|
|
277
|
+
shape=[],
|
|
278
|
+
description="""
|
|
279
|
+
""",
|
|
280
|
+
)
|
|
281
|
+
|
|
282
|
+
x_asr_version = Quantity(
|
|
283
|
+
type=np.int32,
|
|
284
|
+
shape=[],
|
|
285
|
+
description="""
|
|
286
|
+
""",
|
|
287
|
+
)
|
|
288
|
+
|
|
289
|
+
x_asr_resources = SubSection(
|
|
290
|
+
sub_section=SectionProxy('x_asr_resources'), repeats=False
|
|
291
|
+
)
|
|
292
|
+
|
|
293
|
+
x_asr_dependencies = SubSection(
|
|
294
|
+
sub_section=SectionProxy('x_asr_dependencies'), repeats=False
|
|
295
|
+
)
|
|
296
|
+
|
|
297
|
+
x_asr_metadata = SubSection(
|
|
298
|
+
sub_section=SectionProxy('x_asr_metadata'), repeats=False
|
|
299
|
+
)
|
|
300
|
+
|
|
301
|
+
x_asr_run_specification = SubSection(
|
|
302
|
+
sub_section=SectionProxy('x_asr_run_specification'), repeats=False
|
|
303
|
+
)
|
|
304
|
+
|
|
305
|
+
|
|
306
|
+
m_package.__init_metainfo__()
|
|
@@ -0,0 +1,266 @@
|
|
|
1
|
+
#
|
|
2
|
+
# Copyright The NOMAD Authors.
|
|
3
|
+
#
|
|
4
|
+
# This file is part of NOMAD.
|
|
5
|
+
# See https://nomad-lab.eu for further info.
|
|
6
|
+
#
|
|
7
|
+
# Licensed under the Apache License, Version 2.0 (the "License");
|
|
8
|
+
# you may not use this file except in compliance with the License.
|
|
9
|
+
# You may obtain a copy of the License at
|
|
10
|
+
#
|
|
11
|
+
# http://www.apache.org/licenses/LICENSE-2.0
|
|
12
|
+
#
|
|
13
|
+
# Unless required by applicable law or agreed to in writing, software
|
|
14
|
+
# distributed under the License is distributed on an "AS IS" BASIS,
|
|
15
|
+
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
|
16
|
+
# See the License for the specific language governing permissions and
|
|
17
|
+
# limitations under the License.
|
|
18
|
+
#
|
|
19
|
+
import os
|
|
20
|
+
import json
|
|
21
|
+
import numpy as np
|
|
22
|
+
from typing import List
|
|
23
|
+
import datetime
|
|
24
|
+
|
|
25
|
+
try:
|
|
26
|
+
import asr
|
|
27
|
+
from asr.core.cache import get_cache # pylint: disable=E0611,E0401
|
|
28
|
+
from asr.core.record import Record # pylint: disable=E0611,E0401
|
|
29
|
+
except Exception:
|
|
30
|
+
pass
|
|
31
|
+
|
|
32
|
+
from nomad.units import ureg
|
|
33
|
+
from nomad.datamodel import EntryArchive
|
|
34
|
+
from runschema.run import Run, Program, TimeRun
|
|
35
|
+
from runschema.system import System, Atoms
|
|
36
|
+
from runschema.calculation import (
|
|
37
|
+
Calculation,
|
|
38
|
+
BandStructure,
|
|
39
|
+
BandEnergies,
|
|
40
|
+
Energy,
|
|
41
|
+
EnergyEntry,
|
|
42
|
+
Forces,
|
|
43
|
+
ForcesEntry,
|
|
44
|
+
Stress,
|
|
45
|
+
StressEntry,
|
|
46
|
+
)
|
|
47
|
+
from simulationworkflowschema import GeometryOptimization, Phonon
|
|
48
|
+
from .metainfo.asr import (
|
|
49
|
+
x_asr_resources,
|
|
50
|
+
x_asr_metadata,
|
|
51
|
+
x_asr_run_specification,
|
|
52
|
+
x_asr_parameters,
|
|
53
|
+
x_asr_code,
|
|
54
|
+
x_asr_codes,
|
|
55
|
+
x_asr_dependencies,
|
|
56
|
+
x_asr_dependency,
|
|
57
|
+
)
|
|
58
|
+
|
|
59
|
+
|
|
60
|
+
class ASRRecord:
|
|
61
|
+
def __init__(self, record=None):
|
|
62
|
+
self._record = record
|
|
63
|
+
self._converted = False
|
|
64
|
+
|
|
65
|
+
@property
|
|
66
|
+
def archive(self):
|
|
67
|
+
if self._archive is None:
|
|
68
|
+
self._archive = EntryArchive()
|
|
69
|
+
if not self._converted:
|
|
70
|
+
self.to_archive()
|
|
71
|
+
return self._archive
|
|
72
|
+
|
|
73
|
+
@property
|
|
74
|
+
def record(self):
|
|
75
|
+
return self._record
|
|
76
|
+
|
|
77
|
+
@record.setter
|
|
78
|
+
def record(self, value):
|
|
79
|
+
self._record = value
|
|
80
|
+
self._archive = None
|
|
81
|
+
self._converted = False
|
|
82
|
+
|
|
83
|
+
def _parse_system(self, atoms):
|
|
84
|
+
system = System()
|
|
85
|
+
self.archive.run[-1].system.append(system)
|
|
86
|
+
system.atoms = Atoms(
|
|
87
|
+
positions=atoms.get_positions() * ureg.angstrom,
|
|
88
|
+
lattice_vectors=atoms.get_cell().array * ureg.angstrom,
|
|
89
|
+
periodic=[True, True, True],
|
|
90
|
+
labels=atoms.get_chemical_symbols(),
|
|
91
|
+
)
|
|
92
|
+
|
|
93
|
+
def _parse_c2db_relax(self):
|
|
94
|
+
result = self.record.result
|
|
95
|
+
for image in result.images:
|
|
96
|
+
self._parse_system(image)
|
|
97
|
+
self.archive.workflow2 = GeometryOptimization()
|
|
98
|
+
|
|
99
|
+
calc = Calculation()
|
|
100
|
+
self._archive.run[-1].calculation.append(calc)
|
|
101
|
+
calc.system_ref = self._archive.run[-1].system[-1]
|
|
102
|
+
calc.energy = Energy(total=EnergyEntry(value=result.etot * ureg.eV))
|
|
103
|
+
calc.forces = Forces(
|
|
104
|
+
total=ForcesEntry(value=result.forces * ureg.eV / ureg.angstrom)
|
|
105
|
+
)
|
|
106
|
+
stress = np.zeros((3, 3))
|
|
107
|
+
stress[0][0] = result.stress[0]
|
|
108
|
+
stress[1][1] = result.stress[1]
|
|
109
|
+
stress[2][2] = result.stress[2]
|
|
110
|
+
stress[0][1] = stress[1][0] = result.stress[3]
|
|
111
|
+
stress[0][2] = stress[2][0] = result.stress[4]
|
|
112
|
+
stress[1][2] = stress[2][1] = result.stress[5]
|
|
113
|
+
calc.stress = Stress(total=StressEntry(value=stress))
|
|
114
|
+
|
|
115
|
+
def _parse_c2db_phonopy(self):
|
|
116
|
+
self.archive.workflow2 = Phonon()
|
|
117
|
+
|
|
118
|
+
bands = self.record.result.data.get('omega_kl')
|
|
119
|
+
|
|
120
|
+
path = self.record.result.data.get('path')
|
|
121
|
+
hisym_kpts = [list(p) for p in path.special_points.values()]
|
|
122
|
+
labels = list(path.special_points.keys())
|
|
123
|
+
endpoints = []
|
|
124
|
+
calc = Calculation()
|
|
125
|
+
self._archive.run[-1].calculation.append(calc)
|
|
126
|
+
bandstructure = BandStructure()
|
|
127
|
+
calc.band_structure_phonon.append(bandstructure)
|
|
128
|
+
for i, qpoint in enumerate(path.kpts):
|
|
129
|
+
if list(qpoint) in hisym_kpts:
|
|
130
|
+
endpoints.append(i)
|
|
131
|
+
if len(endpoints) < 2:
|
|
132
|
+
continue
|
|
133
|
+
sec_segment = BandEnergies()
|
|
134
|
+
bandstructure.segment.append(sec_segment)
|
|
135
|
+
energies = bands[endpoints[0] : endpoints[1] + 1]
|
|
136
|
+
sec_segment.energies = np.reshape(energies, (1, *np.shape(energies)))
|
|
137
|
+
sec_segment.kpoints = path.kpts[endpoints[0] : endpoints[1] + 1]
|
|
138
|
+
sec_segment.endpoints_labels = [
|
|
139
|
+
labels[hisym_kpts.index(list(path.kpts[i]))] for i in endpoints
|
|
140
|
+
]
|
|
141
|
+
endpoints = [i]
|
|
142
|
+
|
|
143
|
+
def _parse_run(self):
|
|
144
|
+
run = Run()
|
|
145
|
+
self._archive.run.append(run)
|
|
146
|
+
run.program = Program(name='ASR', version=asr.__version__)
|
|
147
|
+
|
|
148
|
+
if self.record.resources is not None:
|
|
149
|
+
run.time_run = TimeRun(
|
|
150
|
+
date_start=self.record.resources.execution_start,
|
|
151
|
+
date_end=self.record.resources.execution_end,
|
|
152
|
+
)
|
|
153
|
+
resources = x_asr_resources()
|
|
154
|
+
run.x_asr_resources.append(resources)
|
|
155
|
+
for key, val in self.record.resources.__dict__.items():
|
|
156
|
+
try:
|
|
157
|
+
setattr(resources, 'x_asr_%s' % key, val)
|
|
158
|
+
except Exception:
|
|
159
|
+
pass
|
|
160
|
+
|
|
161
|
+
if self.record.metadata is not None:
|
|
162
|
+
metadata = x_asr_metadata()
|
|
163
|
+
run.x_asr_metadata.append(metadata)
|
|
164
|
+
metadata.x_asr_created = (
|
|
165
|
+
self.record.metadata.created - datetime.datetime(1970, 1, 1)
|
|
166
|
+
).total_seconds()
|
|
167
|
+
metadata.x_asr_modified = (
|
|
168
|
+
self.record.metadata.modified - datetime.datetime(1970, 1, 1)
|
|
169
|
+
).total_seconds()
|
|
170
|
+
metadata.x_asr_directory = self.record.metadata.directory
|
|
171
|
+
|
|
172
|
+
# misc
|
|
173
|
+
for key, val in self.record.__dict__.items():
|
|
174
|
+
if hasattr(val, '__dict__'):
|
|
175
|
+
continue
|
|
176
|
+
try:
|
|
177
|
+
setattr(run, 'x_asr_%s' % key, val)
|
|
178
|
+
except Exception:
|
|
179
|
+
pass
|
|
180
|
+
|
|
181
|
+
if self.record.run_specification is not None:
|
|
182
|
+
# parse original system info
|
|
183
|
+
atoms = self.record.run_specification.parameters.atoms
|
|
184
|
+
self._parse_system(atoms)
|
|
185
|
+
run_spec = x_asr_run_specification()
|
|
186
|
+
run.x_asr_run_specification.append(run_spec)
|
|
187
|
+
for key, val in self.record.run_specification.__dict__.items():
|
|
188
|
+
if hasattr(val, '__dict__'):
|
|
189
|
+
continue
|
|
190
|
+
try:
|
|
191
|
+
setattr(run_spec, 'x_asr_%s' % key, val)
|
|
192
|
+
except Exception:
|
|
193
|
+
pass
|
|
194
|
+
parameters = x_asr_parameters()
|
|
195
|
+
run_spec.x_asr_parameters.append(parameters)
|
|
196
|
+
for key, val in self.record.run_specification.parameters.__dict__.items():
|
|
197
|
+
if hasattr(val, '__dict__'):
|
|
198
|
+
continue
|
|
199
|
+
try:
|
|
200
|
+
setattr(parameters, 'x_asr_%s' % key, val)
|
|
201
|
+
except Exception:
|
|
202
|
+
pass
|
|
203
|
+
codes = x_asr_codes()
|
|
204
|
+
run_spec.x_asr_codes.append(codes)
|
|
205
|
+
for entry in self.record.run_specification.codes.codes:
|
|
206
|
+
code = x_asr_code()
|
|
207
|
+
codes.x_asr_code.append(code)
|
|
208
|
+
code.x_asr_package = entry.package
|
|
209
|
+
code.x_asr_version = entry.version
|
|
210
|
+
code.x_asr_git_hash = entry.git_hash
|
|
211
|
+
|
|
212
|
+
if self.record.dependencies is not None:
|
|
213
|
+
dependencies = x_asr_dependencies()
|
|
214
|
+
run.x_asr_dependencies.append(dependencies)
|
|
215
|
+
for dep in self.record.dependencies.deps:
|
|
216
|
+
dependency = x_asr_dependency()
|
|
217
|
+
dependencies.x_asr_dependency.append(dependency)
|
|
218
|
+
dependency.x_asr_uid = dep.uid
|
|
219
|
+
dependency.x_asr_revision = dep.revision
|
|
220
|
+
|
|
221
|
+
def to_archive(self):
|
|
222
|
+
self._parse_run()
|
|
223
|
+
if self.record.name == 'asr.c2db.relax':
|
|
224
|
+
self._parse_c2db_relax()
|
|
225
|
+
elif self.record.name == 'asr.c2db.phonopy':
|
|
226
|
+
self._parse_c2db_phonopy()
|
|
227
|
+
self._converted = True
|
|
228
|
+
return self._archive
|
|
229
|
+
|
|
230
|
+
|
|
231
|
+
class ASRParser:
|
|
232
|
+
def __init__(self):
|
|
233
|
+
pass
|
|
234
|
+
|
|
235
|
+
def parse(self, mainfile: str, archive: EntryArchive, logger=None):
|
|
236
|
+
with open(mainfile, 'rt') as f:
|
|
237
|
+
archive_data = json.load(f)
|
|
238
|
+
archive.m_update_from_dict(archive_data)
|
|
239
|
+
|
|
240
|
+
|
|
241
|
+
def asr_to_archives(directory: str, recipes: List[str] = None):
|
|
242
|
+
"""
|
|
243
|
+
Converts the asr results for the specified recipes under the given directory to the
|
|
244
|
+
nomad archive format.
|
|
245
|
+
"""
|
|
246
|
+
# record can only be fetched on the directory
|
|
247
|
+
cwd = os.getcwd()
|
|
248
|
+
try:
|
|
249
|
+
os.chdir(directory)
|
|
250
|
+
cache = get_cache()
|
|
251
|
+
|
|
252
|
+
records: List[Record] = []
|
|
253
|
+
if recipes is None:
|
|
254
|
+
records = cache.select()
|
|
255
|
+
else:
|
|
256
|
+
for recipe in recipes:
|
|
257
|
+
records.extend(cache.select(name=recipe))
|
|
258
|
+
asr_record = ASRRecord()
|
|
259
|
+
for record in records:
|
|
260
|
+
asr_record.record = record
|
|
261
|
+
with open('archive_%s.json' % record.uid, 'w') as f:
|
|
262
|
+
json.dump(asr_record.archive.m_to_dict(), f, indent=4)
|
|
263
|
+
except Exception:
|
|
264
|
+
pass
|
|
265
|
+
finally:
|
|
266
|
+
os.chdir(cwd)
|
|
@@ -0,0 +1,19 @@
|
|
|
1
|
+
#
|
|
2
|
+
# Copyright The NOMAD Authors.
|
|
3
|
+
#
|
|
4
|
+
# This file is part of NOMAD.
|
|
5
|
+
# See https://nomad-lab.eu for further info.
|
|
6
|
+
#
|
|
7
|
+
# Licensed under the Apache License, Version 2.0 (the "License");
|
|
8
|
+
# you may not use this file except in compliance with the License.
|
|
9
|
+
# You may obtain a copy of the License at
|
|
10
|
+
#
|
|
11
|
+
# http://www.apache.org/licenses/LICENSE-2.0
|
|
12
|
+
#
|
|
13
|
+
# Unless required by applicable law or agreed to in writing, software
|
|
14
|
+
# distributed under the License is distributed on an "AS IS" BASIS,
|
|
15
|
+
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
|
16
|
+
# See the License for the specific language governing permissions and
|
|
17
|
+
# limitations under the License.
|
|
18
|
+
#
|
|
19
|
+
from .parser import AtomateParser
|
|
@@ -0,0 +1,31 @@
|
|
|
1
|
+
#
|
|
2
|
+
# Copyright The NOMAD Authors.
|
|
3
|
+
#
|
|
4
|
+
# This file is part of NOMAD.
|
|
5
|
+
# See https://nomad-lab.eu for further info.
|
|
6
|
+
#
|
|
7
|
+
# Licensed under the Apache License, Version 2.0 (the "License");
|
|
8
|
+
# you may not use this file except in compliance with the License.
|
|
9
|
+
# You may obtain a copy of the License at
|
|
10
|
+
#
|
|
11
|
+
# http://www.apache.org/licenses/LICENSE-2.0
|
|
12
|
+
#
|
|
13
|
+
# Unless required by applicable law or agreed to in writing, software
|
|
14
|
+
# distributed under the License is distributed on an "AS IS" BASIS,
|
|
15
|
+
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
|
16
|
+
# See the License for the specific language governing permissions and
|
|
17
|
+
# limitations under the License.
|
|
18
|
+
#
|
|
19
|
+
import sys
|
|
20
|
+
import json
|
|
21
|
+
import logging
|
|
22
|
+
|
|
23
|
+
from nomad.utils import configure_logging
|
|
24
|
+
from nomad.datamodel import EntryArchive
|
|
25
|
+
from workflowparsers.atomate import AtomateParser
|
|
26
|
+
|
|
27
|
+
if __name__ == '__main__':
|
|
28
|
+
configure_logging(console_log_level=logging.DEBUG)
|
|
29
|
+
archive = EntryArchive()
|
|
30
|
+
AtomateParser().parse(sys.argv[1], archive, logging)
|
|
31
|
+
json.dump(archive.m_to_dict(), sys.stdout, indent=2)
|