digichem-core 6.0.0rc1__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- digichem/__init__.py +75 -0
- digichem/basis.py +116 -0
- digichem/config/README +3 -0
- digichem/config/__init__.py +5 -0
- digichem/config/base.py +321 -0
- digichem/config/locations.py +14 -0
- digichem/config/parse.py +90 -0
- digichem/config/util.py +117 -0
- digichem/data/README +4 -0
- digichem/data/batoms/COPYING +18 -0
- digichem/data/batoms/LICENSE +674 -0
- digichem/data/batoms/README +2 -0
- digichem/data/batoms/__init__.py +0 -0
- digichem/data/batoms/batoms-renderer.py +351 -0
- digichem/data/config/digichem.yaml +714 -0
- digichem/data/functionals.csv +15 -0
- digichem/data/solvents.csv +185 -0
- digichem/data/tachyon/COPYING.md +5 -0
- digichem/data/tachyon/LICENSE +30 -0
- digichem/data/tachyon/tachyon_LINUXAMD64 +0 -0
- digichem/data/vmd/common.tcl +468 -0
- digichem/data/vmd/generate_combined_orbital_images.tcl +70 -0
- digichem/data/vmd/generate_density_images.tcl +45 -0
- digichem/data/vmd/generate_dipole_images.tcl +68 -0
- digichem/data/vmd/generate_orbital_images.tcl +57 -0
- digichem/data/vmd/generate_spin_images.tcl +66 -0
- digichem/data/vmd/generate_structure_images.tcl +40 -0
- digichem/datas.py +14 -0
- digichem/exception/__init__.py +7 -0
- digichem/exception/base.py +133 -0
- digichem/exception/uncatchable.py +63 -0
- digichem/file/__init__.py +1 -0
- digichem/file/base.py +364 -0
- digichem/file/cube.py +284 -0
- digichem/file/fchk.py +94 -0
- digichem/file/prattle.py +277 -0
- digichem/file/types.py +97 -0
- digichem/image/__init__.py +6 -0
- digichem/image/base.py +113 -0
- digichem/image/excited_states.py +335 -0
- digichem/image/graph.py +293 -0
- digichem/image/orbitals.py +239 -0
- digichem/image/render.py +617 -0
- digichem/image/spectroscopy.py +797 -0
- digichem/image/structure.py +115 -0
- digichem/image/vmd.py +826 -0
- digichem/input/__init__.py +3 -0
- digichem/input/base.py +78 -0
- digichem/input/digichem_input.py +500 -0
- digichem/input/gaussian.py +140 -0
- digichem/log.py +179 -0
- digichem/memory.py +166 -0
- digichem/misc/__init__.py +4 -0
- digichem/misc/argparse.py +44 -0
- digichem/misc/base.py +61 -0
- digichem/misc/io.py +239 -0
- digichem/misc/layered_dict.py +285 -0
- digichem/misc/text.py +139 -0
- digichem/misc/time.py +73 -0
- digichem/parse/__init__.py +13 -0
- digichem/parse/base.py +220 -0
- digichem/parse/cclib.py +138 -0
- digichem/parse/dump.py +253 -0
- digichem/parse/gaussian.py +130 -0
- digichem/parse/orca.py +96 -0
- digichem/parse/turbomole.py +201 -0
- digichem/parse/util.py +523 -0
- digichem/result/__init__.py +6 -0
- digichem/result/alignment/AA.py +114 -0
- digichem/result/alignment/AAA.py +61 -0
- digichem/result/alignment/FAP.py +148 -0
- digichem/result/alignment/__init__.py +3 -0
- digichem/result/alignment/base.py +310 -0
- digichem/result/angle.py +153 -0
- digichem/result/atom.py +742 -0
- digichem/result/base.py +258 -0
- digichem/result/dipole_moment.py +332 -0
- digichem/result/emission.py +402 -0
- digichem/result/energy.py +323 -0
- digichem/result/excited_state.py +821 -0
- digichem/result/ground_state.py +94 -0
- digichem/result/metadata.py +644 -0
- digichem/result/multi.py +98 -0
- digichem/result/nmr.py +1086 -0
- digichem/result/orbital.py +647 -0
- digichem/result/result.py +244 -0
- digichem/result/soc.py +272 -0
- digichem/result/spectroscopy.py +514 -0
- digichem/result/tdm.py +267 -0
- digichem/result/vibration.py +167 -0
- digichem/test/__init__.py +6 -0
- digichem/test/conftest.py +4 -0
- digichem/test/test_basis.py +71 -0
- digichem/test/test_calculate.py +30 -0
- digichem/test/test_config.py +78 -0
- digichem/test/test_cube.py +369 -0
- digichem/test/test_exception.py +16 -0
- digichem/test/test_file.py +104 -0
- digichem/test/test_image.py +337 -0
- digichem/test/test_input.py +64 -0
- digichem/test/test_parsing.py +79 -0
- digichem/test/test_prattle.py +36 -0
- digichem/test/test_result.py +489 -0
- digichem/test/test_translate.py +112 -0
- digichem/test/util.py +207 -0
- digichem/translate.py +591 -0
- digichem_core-6.0.0rc1.dist-info/METADATA +96 -0
- digichem_core-6.0.0rc1.dist-info/RECORD +111 -0
- digichem_core-6.0.0rc1.dist-info/WHEEL +4 -0
- digichem_core-6.0.0rc1.dist-info/licenses/COPYING.md +10 -0
- digichem_core-6.0.0rc1.dist-info/licenses/LICENSE +11 -0
digichem/misc/io.py
ADDED
|
@@ -0,0 +1,239 @@
|
|
|
1
|
+
import collections
|
|
2
|
+
import math
|
|
3
|
+
from pathlib import Path
|
|
4
|
+
import tempfile
|
|
5
|
+
import os
|
|
6
|
+
import shutil
|
|
7
|
+
import sys
|
|
8
|
+
import warnings
|
|
9
|
+
|
|
10
|
+
from digichem.datas import get_resource
|
|
11
|
+
|
|
12
|
+
def expand_path(pth):
|
|
13
|
+
"""
|
|
14
|
+
Expand variables (both $VAR and '~') in a string.
|
|
15
|
+
|
|
16
|
+
This function is similar to calling both os.path.expanduser() and os.path.expandvar(),
|
|
17
|
+
but with some additional functionality for relative paths (which are interpreted relative
|
|
18
|
+
to the silico data dir).
|
|
19
|
+
"""
|
|
20
|
+
pth = str(pth)
|
|
21
|
+
|
|
22
|
+
new_pth = pth.replace("$SILICO", str(get_resource("data")))
|
|
23
|
+
if new_pth != pth:
|
|
24
|
+
warnings.warn("the '$SILICO' magic variable is deprecated, use '$DIGICHEM' instead", DeprecationWarning)
|
|
25
|
+
|
|
26
|
+
pth = new_pth
|
|
27
|
+
pth = pth.replace("$DIGICHEM", str(get_resource("data")))
|
|
28
|
+
pth = os.path.expanduser(pth)
|
|
29
|
+
pth = os.path.expandvars(pth)
|
|
30
|
+
return pth
|
|
31
|
+
|
|
32
|
+
def tail(file, num_lines = 20):
|
|
33
|
+
"""
|
|
34
|
+
Return the last n lines of a file.
|
|
35
|
+
"""
|
|
36
|
+
last_lines = collections.deque(maxlen = num_lines)
|
|
37
|
+
|
|
38
|
+
# We'll read through the file from the top.
|
|
39
|
+
# This is probably inefficient for huge files but is easy to implement and I don't care just now.
|
|
40
|
+
for line in file:
|
|
41
|
+
last_lines.append(line)
|
|
42
|
+
|
|
43
|
+
return list(last_lines)
|
|
44
|
+
|
|
45
|
+
|
|
46
|
+
def smkdir(dir_name, max_iter = math.inf):
|
|
47
|
+
"""
|
|
48
|
+
Safe mkdir.
|
|
49
|
+
|
|
50
|
+
Will attempt to create the given directory. If a file with the given name already exists, a directory with the same name except with an appropriate number appended will be created instead.
|
|
51
|
+
|
|
52
|
+
If max_iter == 1, this method acts just like a normal mkdir().
|
|
53
|
+
|
|
54
|
+
:param dir_name: The path to a directory to try and create.
|
|
55
|
+
:param max_iter: The maximum number of iterations to try and create the directory. Default is no max.
|
|
56
|
+
:returns: The path to the directory actually created.
|
|
57
|
+
"""
|
|
58
|
+
# First try and make our base directory.
|
|
59
|
+
counter = 1
|
|
60
|
+
dir_name = str(dir_name)
|
|
61
|
+
directory = None
|
|
62
|
+
while True:
|
|
63
|
+
try:
|
|
64
|
+
directory = Path(dir_name + " {}".format(str(counter).zfill(2)) if counter != 1 else dir_name)
|
|
65
|
+
directory.mkdir(parents = True)
|
|
66
|
+
break
|
|
67
|
+
except FileExistsError:
|
|
68
|
+
if counter < max_iter:
|
|
69
|
+
counter +=1
|
|
70
|
+
else:
|
|
71
|
+
raise
|
|
72
|
+
|
|
73
|
+
return directory
|
|
74
|
+
|
|
75
|
+
|
|
76
|
+
# TODO: Would be nice to implement this as a context manager that acts like a file (better compatibility with yaml.dump() for example).
|
|
77
|
+
def atomic_write(file, data):
|
|
78
|
+
"""
|
|
79
|
+
Atomically write to a file.
|
|
80
|
+
|
|
81
|
+
:param file: The name of file to write to.
|
|
82
|
+
:param data: Data to write to the file.
|
|
83
|
+
"""
|
|
84
|
+
file = Path(file)
|
|
85
|
+
# Open a temp file in the same directory as the real file (so we're likely to be on the same file system).
|
|
86
|
+
with tempfile.NamedTemporaryFile("wt", dir = file.parent, delete = False) as temp_write:
|
|
87
|
+
# Write data to the temp file.
|
|
88
|
+
temp_write.write(data)
|
|
89
|
+
# Force it to disk.
|
|
90
|
+
temp_write.flush()
|
|
91
|
+
os.fsync(temp_write)
|
|
92
|
+
# Rename the temp file over the real file (overwriting it).
|
|
93
|
+
os.rename(temp_write.name, file)
|
|
94
|
+
|
|
95
|
+
|
|
96
|
+
# Based on https://stackoverflow.com/questions/431684/how-do-i-change-the-working-directory-in-python/24176022#24176022
|
|
97
|
+
class cd:
|
|
98
|
+
"""
|
|
99
|
+
Context manager for temporarily changing the working directory.
|
|
100
|
+
"""
|
|
101
|
+
|
|
102
|
+
def __init__(self, directory):
|
|
103
|
+
"""
|
|
104
|
+
Constructor for cd.
|
|
105
|
+
|
|
106
|
+
:param directory: Path to the directory to change to.
|
|
107
|
+
"""
|
|
108
|
+
self.new_directory = Path(directory)
|
|
109
|
+
self.old_directory = None
|
|
110
|
+
|
|
111
|
+
def __enter__(self):
|
|
112
|
+
# Save our current working directory.
|
|
113
|
+
self.old_directory = os.getcwd()
|
|
114
|
+
|
|
115
|
+
# And change to the new directory.
|
|
116
|
+
os.chdir(str(self.new_directory))
|
|
117
|
+
|
|
118
|
+
def __exit__(self, exc_type, exc_value, traceback):
|
|
119
|
+
# Restore our old directory.
|
|
120
|
+
os.chdir(self.old_directory)
|
|
121
|
+
|
|
122
|
+
|
|
123
|
+
def copytree(src, dst, symlinks = False, ignore = None, copy_function = shutil.copy):
|
|
124
|
+
"""
|
|
125
|
+
Fixed implementation of shutil.copytree that doesn't arbitrarily fail if src exists.
|
|
126
|
+
|
|
127
|
+
Adapted from https://stackoverflow.com/questions/1868714/how-do-i-copy-an-entire-directory-of-files-into-an-existing-directory-using-pyth
|
|
128
|
+
"""
|
|
129
|
+
if not os.path.exists(dst):
|
|
130
|
+
os.makedirs(dst)
|
|
131
|
+
for item in os.listdir(src):
|
|
132
|
+
s = os.path.join(src, item)
|
|
133
|
+
d = os.path.join(dst, item)
|
|
134
|
+
if os.path.isdir(s):
|
|
135
|
+
copytree(s, d, symlinks = symlinks, ignore = ignore, copy_function = copy_function)
|
|
136
|
+
else:
|
|
137
|
+
copy_function(s, d)
|
|
138
|
+
|
|
139
|
+
def copy_contents(src, dst, copy_function = shutil.copy):
|
|
140
|
+
"""
|
|
141
|
+
Copy the top-level contents of one directory to another.
|
|
142
|
+
"""
|
|
143
|
+
Path(dst).mkdir(parents = True, exist_ok = True)
|
|
144
|
+
for child in Path(src).iterdir():
|
|
145
|
+
if child.is_file():
|
|
146
|
+
copy_function(child, Path(dst, child.name))
|
|
147
|
+
|
|
148
|
+
|
|
149
|
+
def rmtree(path, *args, **kwargs):
|
|
150
|
+
"""
|
|
151
|
+
Fixed implementation of shutil.rmtree that doesn't arbitrarily require the first path to be a directory.
|
|
152
|
+
"""
|
|
153
|
+
try:
|
|
154
|
+
os.remove(path)
|
|
155
|
+
|
|
156
|
+
except IsADirectoryError:
|
|
157
|
+
shutil.rmtree(path, *args, **kwargs)
|
|
158
|
+
|
|
159
|
+
|
|
160
|
+
class Multi_file_wrapper():
|
|
161
|
+
"""
|
|
162
|
+
A class that can be used to transparently handle both 'normal' files and stdin/stdout.
|
|
163
|
+
"""
|
|
164
|
+
|
|
165
|
+
def __init__(self, file, mode = "r", *args, **kwargs):
|
|
166
|
+
# If the filename is the special symbol '-' (a dash), then 'open' stdout as appropriate).
|
|
167
|
+
if file == '-':
|
|
168
|
+
if 'w' in mode or 'a' in mode:
|
|
169
|
+
# We are writing.
|
|
170
|
+
if 'b' in mode:
|
|
171
|
+
# We are a binary format, use a different stdout that accepts binary output.
|
|
172
|
+
object.__setattr__(self, 'file', sys.stdout.buffer)
|
|
173
|
+
#self.file = sys.stdout.buffer
|
|
174
|
+
else:
|
|
175
|
+
object.__setattr__(self, 'file', sys.stdout)
|
|
176
|
+
#self.file = sys.stdout
|
|
177
|
+
elif '+' in mode:
|
|
178
|
+
# We are updating (not allowed).
|
|
179
|
+
raise ValueError("Invalid mode specified '{}', updating is not valid for stdin/stdout".format(mode))
|
|
180
|
+
else:
|
|
181
|
+
# We are reading.
|
|
182
|
+
if 'b' in mode:
|
|
183
|
+
object.__setattr__(self, 'file', sys.stdin.buffer)
|
|
184
|
+
#self.file = sys.stdin.buffer
|
|
185
|
+
else:
|
|
186
|
+
object.__setattr__(self, 'file', sys.stdin)
|
|
187
|
+
#self.file = sys.stdin
|
|
188
|
+
# We don't close stdin/stdout.
|
|
189
|
+
object.__setattr__(self, 'should_close_file', False)
|
|
190
|
+
#self.should_close_file = False
|
|
191
|
+
else:
|
|
192
|
+
# Normal file.
|
|
193
|
+
|
|
194
|
+
object.__setattr__(self, 'file', open(file, mode, *args, **kwargs))
|
|
195
|
+
object.__setattr__(self, 'should_close_file', True)
|
|
196
|
+
#self.file = open(file, mode, *args, **kwargs)
|
|
197
|
+
#self.should_close_file = True
|
|
198
|
+
|
|
199
|
+
def __getattr__(self, name):
|
|
200
|
+
"""
|
|
201
|
+
Magic method so we can pretend to be a real file.
|
|
202
|
+
"""
|
|
203
|
+
return getattr(self.file, name)
|
|
204
|
+
|
|
205
|
+
def __setattr__(self, name, value):
|
|
206
|
+
"""
|
|
207
|
+
Magic method so we can pretend to be a real file.
|
|
208
|
+
"""
|
|
209
|
+
return setattr(self.file, name, value)
|
|
210
|
+
|
|
211
|
+
def __delattr__(self, name):
|
|
212
|
+
"""
|
|
213
|
+
Magic method so we can pretend to be a real file.
|
|
214
|
+
"""
|
|
215
|
+
if name in self.__dict__:
|
|
216
|
+
object.__delattr__(self, name)
|
|
217
|
+
else:
|
|
218
|
+
return delattr(self.file, name)
|
|
219
|
+
|
|
220
|
+
|
|
221
|
+
def close(self):
|
|
222
|
+
if self.should_close_file:
|
|
223
|
+
self.file.close()
|
|
224
|
+
else:
|
|
225
|
+
self.file.flush()
|
|
226
|
+
|
|
227
|
+
def __enter__(self):
|
|
228
|
+
"""
|
|
229
|
+
Magic function for the 'with' keyword.
|
|
230
|
+
"""
|
|
231
|
+
return self
|
|
232
|
+
|
|
233
|
+
def __exit__(self, etype, value, traceback):
|
|
234
|
+
"""
|
|
235
|
+
Magic function for the 'with' keyword, called at the end of the block.
|
|
236
|
+
"""
|
|
237
|
+
# Close our file.
|
|
238
|
+
self.close()
|
|
239
|
+
|
|
@@ -0,0 +1,285 @@
|
|
|
1
|
+
from collections import OrderedDict
|
|
2
|
+
|
|
3
|
+
class Layered_dict(list):
|
|
4
|
+
"""
|
|
5
|
+
An enhanced OrderedDict object that remembers layers/groups of positions.
|
|
6
|
+
|
|
7
|
+
A layered dict is essentially a wrapper around a list of ordered dicts. They expose methods found both in list and in dict. Most methods are similar to those found in list (including setting an index), but iteration operates over each dict in turn.
|
|
8
|
+
|
|
9
|
+
New dicts can either be updated to the last OrderedDict (the same as updating a normal OrderedDict object).
|
|
10
|
+
Or appended to the end of the list, creating a new layer/group.
|
|
11
|
+
|
|
12
|
+
Layered_dicts are useful for preserving order in cases where you might not know the dict keys in some instances of a layer. Layered_dicts can later be normalised; comparing the order of all keys from all layers (and intelligently skipping layers that contain None) to produce a normal OrderedDict.
|
|
13
|
+
|
|
14
|
+
Layered_dicts can also be nested to support even more complex cases.
|
|
15
|
+
|
|
16
|
+
Note that Layered_dict extends from list (and not from dict, at least not yet), be careful if type checking (isinstance(my_layered_dict, dict) == False ...).
|
|
17
|
+
"""
|
|
18
|
+
|
|
19
|
+
def __init__(self, *iterables, **key_values):
|
|
20
|
+
"""
|
|
21
|
+
Constructor for Layered_dict objects.
|
|
22
|
+
|
|
23
|
+
Layered_dicts can be initialised from a single iterable in the same manor as normal dicts. Multiple iterables can also be given as multiple positional args; each iterable will be stored as its own layer. Alternatively, key/value pairs can be given directly as **keyword args (but note that this form is incompatible with the positional args form because the ordering of the layers would be ambiguous.
|
|
24
|
+
|
|
25
|
+
If no args are given, an empty Layered_dict is created.
|
|
26
|
+
|
|
27
|
+
:param *iterables: An optional number of iterables to add as new layers.
|
|
28
|
+
:param **key_values: Alternatively, an optional number of keyword arguments which are taken to be key=value pairs to construct a single layer of dict.
|
|
29
|
+
"""
|
|
30
|
+
# Init our listy self.
|
|
31
|
+
super().__init__()
|
|
32
|
+
|
|
33
|
+
if len(iterables) > 0 and len(key_values) > 0:
|
|
34
|
+
raise ValueError("The positional and keyword argument forms are mutually exclusive")
|
|
35
|
+
elif len(iterables) > 0:
|
|
36
|
+
self.extend(iterables)
|
|
37
|
+
elif len(key_values) > 0:
|
|
38
|
+
self.append(key_values)
|
|
39
|
+
else:
|
|
40
|
+
# We'll always have at least one OrderedDict object.
|
|
41
|
+
self.append({})
|
|
42
|
+
|
|
43
|
+
@property
|
|
44
|
+
def active_dict(self):
|
|
45
|
+
"""
|
|
46
|
+
Return the current active dict object.
|
|
47
|
+
|
|
48
|
+
The active dict is merely the last dict appended, so use len() to get its index. This method exists only for convenience.
|
|
49
|
+
"""
|
|
50
|
+
return self[-1]
|
|
51
|
+
|
|
52
|
+
def __setitem__(self, key, value):
|
|
53
|
+
"""
|
|
54
|
+
Set the value of one of the layers in this Layered_dict.
|
|
55
|
+
|
|
56
|
+
Layered_dicts can only contain OrderedDicts, Layered_dict and None as values. Any other values will first be used to construct a new OrderedDict object.
|
|
57
|
+
"""
|
|
58
|
+
# First do some type checking.
|
|
59
|
+
if not isinstance(value, (OrderedDict, Layered_dict, type(None))):
|
|
60
|
+
value = OrderedDict(value)
|
|
61
|
+
|
|
62
|
+
# Now insert.
|
|
63
|
+
list.__setitem__(self, key, value)
|
|
64
|
+
|
|
65
|
+
# If we inserted None, insert a new LayeredDict too.
|
|
66
|
+
if value is None:
|
|
67
|
+
self.append(OrderedDict)
|
|
68
|
+
|
|
69
|
+
def __delitem__(self, *args, **kwargs):
|
|
70
|
+
"""
|
|
71
|
+
Delete one of the layers of this Layered_dict.
|
|
72
|
+
|
|
73
|
+
Note that it is impossible to delete all layers; a new empty OrderedDict layer will be created if no layers remain.
|
|
74
|
+
"""
|
|
75
|
+
list.__delitem__(self, *args, **kwargs)
|
|
76
|
+
if len(self) == 0:
|
|
77
|
+
self[0] = OrderedDict()
|
|
78
|
+
|
|
79
|
+
def get(self, layer_index, key, key_index = None):
|
|
80
|
+
"""
|
|
81
|
+
Get a value from this Layered_dict.
|
|
82
|
+
|
|
83
|
+
:raises IndexError: If the one of the given layer indices is not contained in this Layer_dict.
|
|
84
|
+
:raises KeyError: If the given key is not found in the dict at the specified level.
|
|
85
|
+
:param layer_index: A tuple of indices indicating the layer to get from. See items() for further explanation.
|
|
86
|
+
:param key: The name of the key to get at the indicated layer.
|
|
87
|
+
:param key_index: An optional constraint; if given, a KeyError exception is raised if the requested key is not at the the index given by key_index.
|
|
88
|
+
"""
|
|
89
|
+
# If layer_index contains multiple parts, we can recurse.
|
|
90
|
+
if len(layer_index) > 1:
|
|
91
|
+
index = layer_index[0]
|
|
92
|
+
if isinstance(self[index], type(self)):
|
|
93
|
+
return self[index].get(layer_index[1:], key, key_index = key_index)
|
|
94
|
+
else:
|
|
95
|
+
# We'be been given a multi-depth layer_index, but our object at that position is not another Layered_dict.
|
|
96
|
+
raise IndexError("Bad layer_index, object ({}) at index {} is not of type {}".format(type(self[index]).__name__, index, type(self).__name__))
|
|
97
|
+
|
|
98
|
+
# We only have one index to worry about.
|
|
99
|
+
index = layer_index[0]
|
|
100
|
+
|
|
101
|
+
# Get the key from the dict.
|
|
102
|
+
# But first check to make sure our object at index is not another Layered_dict.
|
|
103
|
+
if isinstance(self[index], type(self)):
|
|
104
|
+
# It is a Layered_dict, raise IndexError.
|
|
105
|
+
raise IndexError("Bad layer_index, object ({}) at index {} is of type {}".format(type(self[index]).__name__, index, type(self).__name__))
|
|
106
|
+
|
|
107
|
+
# Also check to make sure the dict isn't None.
|
|
108
|
+
elif self[index] is None:
|
|
109
|
+
raise KeyError("Cannot retrieve key '{}' at layer_index '{}'; dict is None".format(key, index))
|
|
110
|
+
|
|
111
|
+
# Now check the given key_index is the same (if a key_index was given).
|
|
112
|
+
if key_index is not None and list(self[index].keys()).index(key) != key_index:
|
|
113
|
+
raise KeyError("Key/index mismatch; the key '{}' is not at requested index '{}'".format(key, key_index))
|
|
114
|
+
|
|
115
|
+
return self[index][key]
|
|
116
|
+
|
|
117
|
+
|
|
118
|
+
def items(self):
|
|
119
|
+
"""
|
|
120
|
+
Generator to support iteration over this Layered_dict.
|
|
121
|
+
|
|
122
|
+
Each iteration over a Layered_dict returns a 3-membered tuple of the form (layer_index, key_name, value) where:
|
|
123
|
+
layer_index is a tuple of indices of the current layer being iterated through. For example, the second item of the first layer would be returned as (1,). The third item of the second layer would be (1,2). Each item in layer_index is an index to a nested layer, so the length of layer_index is variable and depends on the degree of nesting. A Layered_dict that contains no other Layered_dict objects will have len(layer_index) == 1 for all items.
|
|
124
|
+
key_name is the name of the key. Note that unlike real dicts, the same key can appear multiple times in a Layered_dict.
|
|
125
|
+
value is the value of the current key in the current dict being iterated through.
|
|
126
|
+
|
|
127
|
+
Although layered dicts can contain None as placeholder values, iteration will skip these automatically.
|
|
128
|
+
"""
|
|
129
|
+
for layer_index, dict_layer in enumerate(self):
|
|
130
|
+
# Some layers can be None place holders; skip these.
|
|
131
|
+
if dict_layer is None:
|
|
132
|
+
continue
|
|
133
|
+
|
|
134
|
+
# Other layers can be nested Layered_dict objects.
|
|
135
|
+
elif isinstance(dict_layer, Layered_dict):
|
|
136
|
+
for sub_path, key, value in dict_layer.items():
|
|
137
|
+
# Add our child's index hierarchy to ours.
|
|
138
|
+
index_path = [layer_index]
|
|
139
|
+
index_path.extend(sub_path)
|
|
140
|
+
|
|
141
|
+
yield (tuple(index_path), key, value)
|
|
142
|
+
|
|
143
|
+
# The remaining layers are normal dicts.
|
|
144
|
+
else:
|
|
145
|
+
for key in dict_layer:
|
|
146
|
+
yield ((layer_index,), key, dict_layer[key])
|
|
147
|
+
|
|
148
|
+
#while True:
|
|
149
|
+
# # Nothing more to iterate over.
|
|
150
|
+
# raise StopIteration()
|
|
151
|
+
|
|
152
|
+
def update(self, *iterables, **key_values):
|
|
153
|
+
"""
|
|
154
|
+
Update the current active dict with new values.
|
|
155
|
+
|
|
156
|
+
The order of the keys is preserved as for normal OrderedDicts (the first insertion order is preserved).
|
|
157
|
+
|
|
158
|
+
:param *iterables: An optional number of iterables to update with. Each iterable will be traversed in the order given and used to update the same dictionary.
|
|
159
|
+
:param **key_values: Alternatively, an optional number of keyword arguments which are used to update the active dict directly.
|
|
160
|
+
:return: This Layered_dict object, for convenience.
|
|
161
|
+
"""
|
|
162
|
+
if len(iterables) > 0 and len(key_values) > 0:
|
|
163
|
+
raise ValueError("The positional and keyword argument forms are mutually exclusive")
|
|
164
|
+
elif len(iterables) > 0:
|
|
165
|
+
# Positional arg form.
|
|
166
|
+
# Add each iterable in turn.
|
|
167
|
+
for iterable in iterables:
|
|
168
|
+
if isinstance(iterable, type(self)):
|
|
169
|
+
# The iterable is another Layered_dict, update with flatten().
|
|
170
|
+
self.active_dict.update(iterable.flatten())
|
|
171
|
+
else:
|
|
172
|
+
self.active_dict.update(iterable)
|
|
173
|
+
else:
|
|
174
|
+
# Keyword arg form, update from given keywords.
|
|
175
|
+
self.active_dict.update(**key_values)
|
|
176
|
+
|
|
177
|
+
return self
|
|
178
|
+
|
|
179
|
+
def flatten(self):
|
|
180
|
+
"""
|
|
181
|
+
Flatten this Layered_dict, returning a single OrderedDict object.
|
|
182
|
+
|
|
183
|
+
The order of keys in the returned dict will honour the ordering of this Layered_dict object.
|
|
184
|
+
If the same key is present more than once, the first instance determines the order the key appears in, but the last determines its value.
|
|
185
|
+
|
|
186
|
+
Note that unlike iterating directly over this Layered_dict, the same key cannot appear more than once in the returned flat dict.
|
|
187
|
+
"""
|
|
188
|
+
# The implementation here is simple, just update a new dict from each dict we contain.
|
|
189
|
+
return OrderedDict({key:value for index, key, value in self.items()})
|
|
190
|
+
|
|
191
|
+
def normalise(self, mapping):
|
|
192
|
+
"""
|
|
193
|
+
Normalise this Layered_dict according to a mapping so that is aligns with a number of other Layered_dict.
|
|
194
|
+
|
|
195
|
+
A mapping describes the order in which key/values should be aligned. Although the ordering of keys is defined for each individual Layered_dict, this ordering can be ambiguous if two Layered_dicts contain different keys. Similarly, an individual Layered_dict cannot properly align None placeholder keys because it cannot know how many keys are expected. A mapping resolves this ambiguity.
|
|
196
|
+
|
|
197
|
+
A mapping is a list of tuples of the form (layer_index, key_name), where:
|
|
198
|
+
layer_index has the same meaning as in items().
|
|
199
|
+
#key_index is the index of the key in dict to get. It is required to differentiate between duplicate keys with different orderings.
|
|
200
|
+
key_name is the name of the key to get (same as in items()).
|
|
201
|
+
|
|
202
|
+
:param mapping: A list of tuples of the form (layer_index, key_name).
|
|
203
|
+
:return: A list of values as dictated by mapping. Each index of the returned list is controlled by the corresponding index of mapping (and so the two lists will be of the same length). If the index in mapping is None or specifies a key not contained in this dict, None is inserted into the list.
|
|
204
|
+
"""
|
|
205
|
+
normal_list = []
|
|
206
|
+
|
|
207
|
+
for mapped_index in mapping:
|
|
208
|
+
# If the map is None, so is the value we insert.
|
|
209
|
+
mapped_value = None
|
|
210
|
+
|
|
211
|
+
# Try and get a value from ourself.
|
|
212
|
+
if mapped_index is not None:
|
|
213
|
+
try:
|
|
214
|
+
mapped_value = self.get(mapped_index[0], mapped_index[1])
|
|
215
|
+
except (IndexError, KeyError):
|
|
216
|
+
mapped_value = None
|
|
217
|
+
|
|
218
|
+
# Add to the list.
|
|
219
|
+
normal_list.append(mapped_value)
|
|
220
|
+
|
|
221
|
+
# And return.
|
|
222
|
+
return normal_list
|
|
223
|
+
|
|
224
|
+
@classmethod
|
|
225
|
+
def common_mapping(self, layered_dicts):
|
|
226
|
+
"""
|
|
227
|
+
Generate a mapping from a list of Layered_dict objects, suitable for passing to normalise().
|
|
228
|
+
|
|
229
|
+
A mapping is a list of tuples of the form (layer_index, key_name), with the same meaning as in normalise(). It is used to align the keys of multiple dicts.
|
|
230
|
+
|
|
231
|
+
The mapping generated will contain an entry for each unique (layer_index, key_name) in all of the given layered_dicts. The order of the mapping is well defined, respecting the order of layer_index in the first instance, then the order of keys as found in the respective OrderedDict.
|
|
232
|
+
|
|
233
|
+
The order is poorly defined, however, for non-identical keys at the same layer_index (because the ordering is ambiguous).
|
|
234
|
+
|
|
235
|
+
:param layered_dicts: An iterable of Layered_dict objects to generate a mapping from.
|
|
236
|
+
:return: A list describing the mapping.
|
|
237
|
+
"""
|
|
238
|
+
# To get our mapping, we'll add non duplicate flattened forms of each layered_dict to our list.
|
|
239
|
+
mapping = []
|
|
240
|
+
for layered_dict in layered_dicts:
|
|
241
|
+
# We can discard the value part as we're only interested in the order for now.
|
|
242
|
+
for index, key, value in layered_dict.items():
|
|
243
|
+
# Add this index_key pair to mapping if not already present.
|
|
244
|
+
# In addition to the normal layer_index, we'll also include the key_index (to support multiple identical dicts with different key ordering).
|
|
245
|
+
if (index, key) not in mapping:
|
|
246
|
+
mapping.append((index, key))
|
|
247
|
+
|
|
248
|
+
|
|
249
|
+
# Finally, we partially re-sort in case any layer indices got added in the wrong order.
|
|
250
|
+
# We won't re-order key names though, so these will still respect their relative orders from their OrderedDicts.
|
|
251
|
+
# If two layer index pairs have OrderedDicts with different keys, the order in which these two OrderedDicts will be combined is undefined (but the keys within them will remember their relative order).
|
|
252
|
+
mapping.sort(key = lambda index_key: index_key[0])
|
|
253
|
+
|
|
254
|
+
# Done.
|
|
255
|
+
return mapping
|
|
256
|
+
|
|
257
|
+
@classmethod
|
|
258
|
+
def tabulate(self, layered_dicts):
|
|
259
|
+
"""
|
|
260
|
+
Tabulate a list of layered_dicts.
|
|
261
|
+
|
|
262
|
+
tabulate() is a convenience function, first calling common_mapping() on the list of layered_dicts followed by normalise().
|
|
263
|
+
|
|
264
|
+
:param layered_dicts: A list of layered_dict objects to tabulate. The order of rows will be the same as the order of this list. Alternatively, a number of OrderedDicts can be included in the list, which will automatically be converted to Layered_dicts.
|
|
265
|
+
:return: A tuple of (headers, table), where headers is a list of table headers (derived from the names of the keys in layered_dicts) and table is the tabulated data (each row will be of the same length, possible padded with None).
|
|
266
|
+
"""
|
|
267
|
+
# First convert any OrderedDicts
|
|
268
|
+
layered_dicts = [self(current_dict) if isinstance(current_dict, OrderedDict) else current_dict for current_dict in layered_dicts]
|
|
269
|
+
|
|
270
|
+
# First, get our mapping.
|
|
271
|
+
mapping = self.common_mapping(layered_dicts)
|
|
272
|
+
|
|
273
|
+
# Now get our list.
|
|
274
|
+
table = [layered_dict.normalise(mapping) for layered_dict in layered_dicts]
|
|
275
|
+
|
|
276
|
+
# Return both.
|
|
277
|
+
return ([header[1] for header in mapping], table)
|
|
278
|
+
|
|
279
|
+
|
|
280
|
+
|
|
281
|
+
|
|
282
|
+
|
|
283
|
+
|
|
284
|
+
|
|
285
|
+
|
digichem/misc/text.py
ADDED
|
@@ -0,0 +1,139 @@
|
|
|
1
|
+
from itertools import groupby
|
|
2
|
+
|
|
3
|
+
def andjoin(listlike):
|
|
4
|
+
"""
|
|
5
|
+
Joins a list of string-like variables with commas, except for the last item which is joined with 'and'.
|
|
6
|
+
|
|
7
|
+
For example:
|
|
8
|
+
>>> andjoin(["item1"])
|
|
9
|
+
"item1"
|
|
10
|
+
>>> andjoin(["item1", "item2"])
|
|
11
|
+
"item1 and item2"
|
|
12
|
+
>>> andjoin(["item1", "item2", "item3"])
|
|
13
|
+
"item1, item2 and item3"
|
|
14
|
+
"""
|
|
15
|
+
# Now combine.
|
|
16
|
+
joined = ", ".join([str(item) for item in listlike[:-1]])
|
|
17
|
+
|
|
18
|
+
if len(listlike) > 1:
|
|
19
|
+
joined += " and " + str(listlike[-1])
|
|
20
|
+
elif len(listlike) == 1:
|
|
21
|
+
joined = listlike[0]
|
|
22
|
+
|
|
23
|
+
return joined
|
|
24
|
+
|
|
25
|
+
def text_integer(number):
|
|
26
|
+
"""
|
|
27
|
+
Convert an integer to a text form suitable for inline inclusion in the body of some text.
|
|
28
|
+
|
|
29
|
+
If the integer is 0 <= x <= 9, the name of the number ("zero", "one", "two" etc) is returned, otherwise the value ("10", "11", "12" etc) is returned.
|
|
30
|
+
"""
|
|
31
|
+
if number == 0:
|
|
32
|
+
return "zero"
|
|
33
|
+
elif number == 1:
|
|
34
|
+
return "one"
|
|
35
|
+
elif number == 2:
|
|
36
|
+
return "two"
|
|
37
|
+
elif number == 3:
|
|
38
|
+
return "three"
|
|
39
|
+
elif number == 4:
|
|
40
|
+
return "four"
|
|
41
|
+
elif number == 5:
|
|
42
|
+
return "five"
|
|
43
|
+
elif number == 6:
|
|
44
|
+
return "six"
|
|
45
|
+
elif number == 7:
|
|
46
|
+
return "seven"
|
|
47
|
+
elif number == 8:
|
|
48
|
+
return "eight"
|
|
49
|
+
elif number == 9:
|
|
50
|
+
return "nine"
|
|
51
|
+
else:
|
|
52
|
+
return str(number)
|
|
53
|
+
|
|
54
|
+
def text_float(number, decimals = 2):
|
|
55
|
+
"""
|
|
56
|
+
Convert a float to a text form suitable for inline inclusion in the body of some text.
|
|
57
|
+
|
|
58
|
+
If the rounded number would appear to be 0.0, then the next smallest number that can be displayed with the given number of decimal points is returned instead, along with a 'less-than sing <'.
|
|
59
|
+
For example:
|
|
60
|
+
>>> text_float(0.567)
|
|
61
|
+
"0.57"
|
|
62
|
+
>>> text_float(0.001)
|
|
63
|
+
"< 0.01"
|
|
64
|
+
>>> text_float(0.007)
|
|
65
|
+
"0.01"
|
|
66
|
+
>>> text_float(0.0)
|
|
67
|
+
"0.00"
|
|
68
|
+
"""
|
|
69
|
+
if number == 0 or round(number, decimals) != 0:
|
|
70
|
+
return "{:.{}f}".format(number, decimals)
|
|
71
|
+
|
|
72
|
+
else:
|
|
73
|
+
# The number is too small to diplay with this sig fig,
|
|
74
|
+
# show that the number is not actually zero but is close.
|
|
75
|
+
return "< {}".format(1 / 10 ** decimals)
|
|
76
|
+
|
|
77
|
+
|
|
78
|
+
def isare(listlike):
|
|
79
|
+
if len(listlike) == 1:
|
|
80
|
+
return "is"
|
|
81
|
+
else:
|
|
82
|
+
return "are"
|
|
83
|
+
|
|
84
|
+
def werewas(listlike):
|
|
85
|
+
if len(listlike) == 1:
|
|
86
|
+
return "was"
|
|
87
|
+
else:
|
|
88
|
+
return "were"
|
|
89
|
+
|
|
90
|
+
def ordinal_suffix(number):
|
|
91
|
+
"""
|
|
92
|
+
Retuns the suffix of the ordinal (st, nd, rd or th) of a number.
|
|
93
|
+
"""
|
|
94
|
+
last_digit = abs(int(number)) % 10
|
|
95
|
+
last_two = abs(int(number)) % 100
|
|
96
|
+
if last_two == 11 or last_two == 12 or last_two == 13:
|
|
97
|
+
return "th"
|
|
98
|
+
elif last_digit == 1:
|
|
99
|
+
return "st"
|
|
100
|
+
elif last_digit == 2:
|
|
101
|
+
return "nd"
|
|
102
|
+
elif last_digit == 3:
|
|
103
|
+
return "rd"
|
|
104
|
+
else:
|
|
105
|
+
return "th"
|
|
106
|
+
|
|
107
|
+
|
|
108
|
+
def text_join(index, total, second_join = None):
|
|
109
|
+
"""
|
|
110
|
+
"""
|
|
111
|
+
if index == total -1:
|
|
112
|
+
# The last item, full stop.
|
|
113
|
+
return "."
|
|
114
|
+
elif second_join is not None and index == 0:
|
|
115
|
+
return ", {}".format(second_join)
|
|
116
|
+
elif index == total -2:
|
|
117
|
+
# Second to last item, and.
|
|
118
|
+
return " and"
|
|
119
|
+
else:
|
|
120
|
+
# Other item, comma.
|
|
121
|
+
return ","
|
|
122
|
+
|
|
123
|
+
|
|
124
|
+
def listjoin(numbers):
|
|
125
|
+
"""
|
|
126
|
+
|
|
127
|
+
"""
|
|
128
|
+
sorted_numbers = sorted(numbers)
|
|
129
|
+
joined = []
|
|
130
|
+
for keys, group in groupby(enumerate(sorted_numbers), lambda i: i[0]-i[1]):
|
|
131
|
+
group = [number for index, number in group]
|
|
132
|
+
if len(group) == 1:
|
|
133
|
+
joined.append("{}".format(group[0]))
|
|
134
|
+
else:
|
|
135
|
+
joined.append("{}-{}".format(group[0], group[-1]))
|
|
136
|
+
|
|
137
|
+
return andjoin(joined)
|
|
138
|
+
|
|
139
|
+
|