patme 0.4.4__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of patme might be problematic. Click here for more details.
- patme/__init__.py +52 -0
- patme/buildtools/__init__.py +7 -0
- patme/buildtools/rce_releasecreator.py +336 -0
- patme/buildtools/release.py +26 -0
- patme/femtools/__init__.py +5 -0
- patme/femtools/abqmsgfilechecker.py +137 -0
- patme/femtools/fecall.py +1092 -0
- patme/geometry/__init__.py +0 -0
- patme/geometry/area.py +124 -0
- patme/geometry/coordinatesystem.py +635 -0
- patme/geometry/intersect.py +284 -0
- patme/geometry/line.py +183 -0
- patme/geometry/misc.py +420 -0
- patme/geometry/plane.py +464 -0
- patme/geometry/rotate.py +244 -0
- patme/geometry/scale.py +152 -0
- patme/geometry/shape2d.py +50 -0
- patme/geometry/transformations.py +1831 -0
- patme/geometry/translate.py +139 -0
- patme/mechanics/__init__.py +4 -0
- patme/mechanics/loads.py +435 -0
- patme/mechanics/material.py +1260 -0
- patme/service/__init__.py +7 -0
- patme/service/decorators.py +85 -0
- patme/service/duration.py +96 -0
- patme/service/exceptionhook.py +104 -0
- patme/service/exceptions.py +36 -0
- patme/service/io/__init__.py +3 -0
- patme/service/io/basewriter.py +122 -0
- patme/service/logger.py +375 -0
- patme/service/mathutils.py +108 -0
- patme/service/misc.py +71 -0
- patme/service/moveimports.py +217 -0
- patme/service/stringutils.py +419 -0
- patme/service/systemutils.py +290 -0
- patme/sshtools/__init__.py +3 -0
- patme/sshtools/cara.py +435 -0
- patme/sshtools/clustercaller.py +420 -0
- patme/sshtools/facluster.py +350 -0
- patme/sshtools/sshcall.py +168 -0
- patme-0.4.4.dist-info/LICENSE +21 -0
- patme-0.4.4.dist-info/LICENSES/MIT.txt +9 -0
- patme-0.4.4.dist-info/METADATA +168 -0
- patme-0.4.4.dist-info/RECORD +46 -0
- patme-0.4.4.dist-info/WHEEL +4 -0
- patme-0.4.4.dist-info/entry_points.txt +3 -0
patme/service/logger.py
ADDED
|
@@ -0,0 +1,375 @@
|
|
|
1
|
+
# Copyright (C) 2013 Deutsches Zentrum fuer Luft- und Raumfahrt(DLR, German Aerospace Center) <www.dlr.de>
|
|
2
|
+
# SPDX-FileCopyrightText: 2022 German Aerospace Center (DLR)
|
|
3
|
+
#
|
|
4
|
+
# SPDX-License-Identifier: MIT
|
|
5
|
+
|
|
6
|
+
"""
|
|
7
|
+
This logger extends the python logging with several features.
|
|
8
|
+
|
|
9
|
+
- it utilizes a simple method to create log file handlers - for debug and for regular log messages.
|
|
10
|
+
- it cuts the log width by introducing linebreaks automatically
|
|
11
|
+
- it can switch the log level using the with-statement easily (switchLevelTemp)
|
|
12
|
+
- it counts the log entries
|
|
13
|
+
|
|
14
|
+
As example, please refer to ``test.test_service.test_logger``
|
|
15
|
+
|
|
16
|
+
"""
|
|
17
|
+
|
|
18
|
+
import logging
|
|
19
|
+
import os
|
|
20
|
+
import re
|
|
21
|
+
import sys
|
|
22
|
+
import time
|
|
23
|
+
import traceback
|
|
24
|
+
from contextlib import contextmanager
|
|
25
|
+
|
|
26
|
+
from _io import StringIO
|
|
27
|
+
|
|
28
|
+
from patme.service.exceptions import ImproperParameterError, InternalError
|
|
29
|
+
|
|
30
|
+
|
|
31
|
+
def resetLoggerToNewRunDir(runDir, logLevel=logging.INFO):
|
|
32
|
+
"""resets the logger to the given run dir"""
|
|
33
|
+
if not os.path.exists(runDir) or not os.path.isdir(runDir):
|
|
34
|
+
raise ImproperParameterError(f"The given path does not exist: {runDir}")
|
|
35
|
+
|
|
36
|
+
log.log(logLevel, f"Change logging to this directory: {runDir}")
|
|
37
|
+
# create dummy logger having handlers with correct paths. these handlers are moved to the old logger
|
|
38
|
+
log.setLogPath(runDir)
|
|
39
|
+
|
|
40
|
+
# err log file
|
|
41
|
+
from patme.service import exceptionhook
|
|
42
|
+
|
|
43
|
+
if sys.excepthook is exceptionhook.excepthook:
|
|
44
|
+
exceptionhook.errorLogFilename = os.path.join(runDir, os.path.basename(exceptionhook.errorLogFilename))
|
|
45
|
+
|
|
46
|
+
|
|
47
|
+
class MyLogger(logging.Logger):
|
|
48
|
+
""" """
|
|
49
|
+
|
|
50
|
+
DEBUG = logging.DEBUG
|
|
51
|
+
INFO = logging.INFO
|
|
52
|
+
WARN = logging.WARN
|
|
53
|
+
WARNING = logging.WARNING
|
|
54
|
+
ERROR = logging.ERROR
|
|
55
|
+
FATAL = logging.FATAL
|
|
56
|
+
CRITICAL = logging.CRITICAL
|
|
57
|
+
NOTSET = logging.NOTSET
|
|
58
|
+
TABSTR = " "
|
|
59
|
+
|
|
60
|
+
def __init__(
|
|
61
|
+
self,
|
|
62
|
+
name,
|
|
63
|
+
baseDirectory=None,
|
|
64
|
+
logFileName=None,
|
|
65
|
+
debugLogFileName=None,
|
|
66
|
+
logLevel=logging.INFO,
|
|
67
|
+
enableParallelHandling=False,
|
|
68
|
+
):
|
|
69
|
+
"""doc"""
|
|
70
|
+
logging.Logger.__init__(self, name, level=MyLogger.DEBUG)
|
|
71
|
+
"""Logger is initialized with DEBUG as log level. This is required for the
|
|
72
|
+
debug.log handler. The real log level used is set in each handler.
|
|
73
|
+
See self._setLogLevel for details."""
|
|
74
|
+
self.startTime = time.time()
|
|
75
|
+
self.debugCount = 0
|
|
76
|
+
self.infoCount = 0
|
|
77
|
+
self.warningCount = 0
|
|
78
|
+
self.errorCount = 0
|
|
79
|
+
self.maxLineLength = 120
|
|
80
|
+
self._intendationDepth = 0
|
|
81
|
+
|
|
82
|
+
self.debugHandler = None
|
|
83
|
+
"""This is the debug log handler. It is stored as extra variable to identify it
|
|
84
|
+
and prevent it from changing the loglevel of the handler. The debug log handler
|
|
85
|
+
should always have DEBUG as level"""
|
|
86
|
+
|
|
87
|
+
procID = None
|
|
88
|
+
for keyToTest in ["SLURM_PROCID", "PMI_RANK"]:
|
|
89
|
+
if keyToTest in os.environ.keys():
|
|
90
|
+
enableParallelHandling = True
|
|
91
|
+
procID = os.environ[keyToTest]
|
|
92
|
+
break
|
|
93
|
+
|
|
94
|
+
if enableParallelHandling:
|
|
95
|
+
|
|
96
|
+
formatStr = "%(levelname)s "
|
|
97
|
+
if procID is not None:
|
|
98
|
+
formatStr += f"RK{procID}"
|
|
99
|
+
else:
|
|
100
|
+
formatStr += "P%(process)s"
|
|
101
|
+
|
|
102
|
+
formatStr += " %(asctime)s: %(message)s"
|
|
103
|
+
self.parallelMode = True
|
|
104
|
+
|
|
105
|
+
else:
|
|
106
|
+
|
|
107
|
+
self.parallelMode = False
|
|
108
|
+
formatStr = "%(levelname)s\t%(asctime)s: %(message)s"
|
|
109
|
+
|
|
110
|
+
self.formatter = logging.Formatter(formatStr, None)
|
|
111
|
+
|
|
112
|
+
# stream handler
|
|
113
|
+
handler = logging.StreamHandler(sys.stdout)
|
|
114
|
+
handler.setFormatter(self.formatter)
|
|
115
|
+
handler.setLevel(logLevel)
|
|
116
|
+
self.addHandler(handler)
|
|
117
|
+
|
|
118
|
+
if logFileName or debugLogFileName:
|
|
119
|
+
self.addFileHandlers(baseDirectory, logFileName, debugLogFileName)
|
|
120
|
+
|
|
121
|
+
def increaseIntendationLevel(self):
|
|
122
|
+
"""doc"""
|
|
123
|
+
self._intendationDepth += 1
|
|
124
|
+
|
|
125
|
+
def decreaseIntendationLevel(self):
|
|
126
|
+
"""doc"""
|
|
127
|
+
self._intendationDepth -= 1
|
|
128
|
+
self._intendationDepth = max(self._intendationDepth, 0)
|
|
129
|
+
|
|
130
|
+
def addFileHandlers(self, baseDirectory, logFileName, debugLogFileName):
|
|
131
|
+
"""Adds a file handler that is identical to the stream handler and one
|
|
132
|
+
file handler with debug log level"""
|
|
133
|
+
|
|
134
|
+
if baseDirectory is None:
|
|
135
|
+
# initialize handlers as stream until they are set to a directory in setLogPath()
|
|
136
|
+
|
|
137
|
+
# run log handler
|
|
138
|
+
handler = logging.StreamHandler(StringIO())
|
|
139
|
+
handler.setFormatter(self.formatter)
|
|
140
|
+
handler.setLevel(self.logLevel)
|
|
141
|
+
self.addHandler(handler)
|
|
142
|
+
# debug log handler
|
|
143
|
+
self.debugHandler = logging.StreamHandler(StringIO())
|
|
144
|
+
self.addHandler(self.debugHandler)
|
|
145
|
+
|
|
146
|
+
else:
|
|
147
|
+
|
|
148
|
+
logFileNameFull = os.path.join(baseDirectory, logFileName)
|
|
149
|
+
debugLogFileNameFull = os.path.join(baseDirectory, debugLogFileName)
|
|
150
|
+
|
|
151
|
+
for ffile in [logFileNameFull, debugLogFileNameFull]:
|
|
152
|
+
try:
|
|
153
|
+
# remove log files in case they are greater than approx 10MB
|
|
154
|
+
if os.path.getsize(ffile) > 1e7:
|
|
155
|
+
os.remove(ffile)
|
|
156
|
+
except:
|
|
157
|
+
pass
|
|
158
|
+
|
|
159
|
+
# run log handler
|
|
160
|
+
handler = logging.FileHandler(logFileNameFull, "a")
|
|
161
|
+
handler.setFormatter(self.formatter)
|
|
162
|
+
handler.setLevel(self.logLevel)
|
|
163
|
+
self.addHandler(handler)
|
|
164
|
+
|
|
165
|
+
# debug log handler
|
|
166
|
+
self.debugHandler = logging.FileHandler(debugLogFileNameFull, "a")
|
|
167
|
+
self.addHandler(self.debugHandler)
|
|
168
|
+
|
|
169
|
+
self.debugHandler.setFormatter(self.formatter)
|
|
170
|
+
self.debugHandler.setLevel(log.DEBUG)
|
|
171
|
+
|
|
172
|
+
def debug(self, msg, *args, **kwargs):
|
|
173
|
+
"""see description of logging.Logger.<methodname>"""
|
|
174
|
+
self.debugCount += 1
|
|
175
|
+
returnMessageList = self.parallelMode
|
|
176
|
+
longMesageDelim = kwargs.pop("longMesageDelim", None)
|
|
177
|
+
retMsg = self.formatLongMessages(msg, returnMessageList, longMesageDelim=longMesageDelim)
|
|
178
|
+
if not returnMessageList:
|
|
179
|
+
retMsg = [retMsg]
|
|
180
|
+
|
|
181
|
+
pref = self.TABSTR * self.intendationDepth
|
|
182
|
+
for msg in retMsg:
|
|
183
|
+
logging.Logger.debug(self, f"{pref}{msg}", *args, **kwargs)
|
|
184
|
+
|
|
185
|
+
def info(self, msg, *args, **kwargs):
|
|
186
|
+
"""see description of logging.Logger.<methodname>"""
|
|
187
|
+
self.infoCount += 1
|
|
188
|
+
returnMessageList = self.parallelMode
|
|
189
|
+
longMesageDelim = kwargs.pop("longMesageDelim", None)
|
|
190
|
+
retMsg = self.formatLongMessages(msg, returnMessageList, longMesageDelim=longMesageDelim)
|
|
191
|
+
if not returnMessageList:
|
|
192
|
+
retMsg = [retMsg]
|
|
193
|
+
|
|
194
|
+
pref = self.TABSTR * self.intendationDepth
|
|
195
|
+
for msg in retMsg:
|
|
196
|
+
logging.Logger.info(self, f"{pref}{msg}", *args, **kwargs)
|
|
197
|
+
|
|
198
|
+
def infoHeadline(self, msg, *args, **kwargs):
|
|
199
|
+
"""see description of logging.Logger.<methodname>"""
|
|
200
|
+
self.infoCount += 1
|
|
201
|
+
pref = self.TABSTR * self.intendationDepth
|
|
202
|
+
logging.Logger.info(
|
|
203
|
+
self, f"{pref}================================================================================"
|
|
204
|
+
)
|
|
205
|
+
logging.Logger.info(self, f"{pref} {msg}", *args, **kwargs)
|
|
206
|
+
logging.Logger.info(
|
|
207
|
+
self, f"{pref}================================================================================"
|
|
208
|
+
)
|
|
209
|
+
|
|
210
|
+
def warn(self, msg, *args, **kwargs):
|
|
211
|
+
"""see description of logging.Logger.<methodname>"""
|
|
212
|
+
self.warning(msg, *args, **kwargs)
|
|
213
|
+
|
|
214
|
+
def warning(self, msg, *args, **kwargs):
|
|
215
|
+
"""see description of logging.Logger.<methodname>"""
|
|
216
|
+
self.warningCount += 1
|
|
217
|
+
returnMessageList = self.parallelMode
|
|
218
|
+
retMsg = self.formatLongMessages(msg, returnMessageList)
|
|
219
|
+
if not returnMessageList:
|
|
220
|
+
retMsg = [retMsg]
|
|
221
|
+
|
|
222
|
+
pref = self.TABSTR * self.intendationDepth
|
|
223
|
+
for msg in retMsg:
|
|
224
|
+
logging.Logger.warning(self, f"{pref}{msg}", *args, **kwargs)
|
|
225
|
+
|
|
226
|
+
def formatException(self, ei):
|
|
227
|
+
"""
|
|
228
|
+
Format and return the specified exception information as a string.
|
|
229
|
+
|
|
230
|
+
This default implementation just uses
|
|
231
|
+
traceback.print_exception()
|
|
232
|
+
"""
|
|
233
|
+
sio = StringIO()
|
|
234
|
+
tb = ei[2]
|
|
235
|
+
# See issues #9427, #1553375. Commented out for now.
|
|
236
|
+
# if getattr(self, 'fullstack', False):
|
|
237
|
+
# traceback.print_stack(tb.tb_frame.f_back, file=sio)
|
|
238
|
+
traceback.print_exception(ei[0], ei[1], tb, None, sio)
|
|
239
|
+
s = sio.getvalue()
|
|
240
|
+
sio.close()
|
|
241
|
+
if s[-1:] == "\n":
|
|
242
|
+
s = s[:-1]
|
|
243
|
+
|
|
244
|
+
return s
|
|
245
|
+
|
|
246
|
+
def exception(self, msg, *args, exc_info=True, **kwargs):
|
|
247
|
+
"""
|
|
248
|
+
Convenience method for logging an ERROR with exception information.
|
|
249
|
+
"""
|
|
250
|
+
self.error(msg, *args, exc_info=exc_info, **kwargs)
|
|
251
|
+
|
|
252
|
+
def error(self, msg, *args, **kwargs):
|
|
253
|
+
"""see description of logging.Logger.<methodname>"""
|
|
254
|
+
self.errorCount += 1
|
|
255
|
+
exc_info = kwargs.pop("exc_info", False)
|
|
256
|
+
logging.Logger.error(self, msg, *args, **kwargs)
|
|
257
|
+
|
|
258
|
+
if exc_info:
|
|
259
|
+
exceptionLines = self.formatException(sys.exc_info()).split("\n")
|
|
260
|
+
for msg in exceptionLines:
|
|
261
|
+
logging.Logger.error(self, msg, *args, **kwargs)
|
|
262
|
+
|
|
263
|
+
def end(self):
|
|
264
|
+
"""doc"""
|
|
265
|
+
duration = time.time() - self.startTime
|
|
266
|
+
log.info(f"Program run finished. Runtime of program: {duration:4.4f} sec")
|
|
267
|
+
msg = [
|
|
268
|
+
f"{self.errorCount} errors",
|
|
269
|
+
f"{self.warningCount} warnings",
|
|
270
|
+
f"{self.infoCount + 1} info messages",
|
|
271
|
+
f"{self.debugCount} debug messages",
|
|
272
|
+
]
|
|
273
|
+
|
|
274
|
+
self.info("; ".join(msg))
|
|
275
|
+
|
|
276
|
+
logging.shutdown()
|
|
277
|
+
|
|
278
|
+
def _getLogLevel(self):
|
|
279
|
+
"""doc"""
|
|
280
|
+
for handler in self.handlers:
|
|
281
|
+
if handler is self.debugHandler:
|
|
282
|
+
continue
|
|
283
|
+
return handler.level
|
|
284
|
+
raise InternalError("There is no log handler besides the debug handler. Please check your log handlers")
|
|
285
|
+
|
|
286
|
+
def _setLogLevel(self, logLevel):
|
|
287
|
+
"""doc"""
|
|
288
|
+
logging._checkLevel(logLevel)
|
|
289
|
+
for handler in self.handlers:
|
|
290
|
+
if handler is self.debugHandler:
|
|
291
|
+
# the loglevel of the debug handler will not be changed
|
|
292
|
+
continue
|
|
293
|
+
handler.level = logLevel
|
|
294
|
+
|
|
295
|
+
def setLogPath(self, logPath=None):
|
|
296
|
+
"""This method reinitializes the file based log handlers according to the new path given.
|
|
297
|
+
|
|
298
|
+
:param logPath: New Path of the handlers. If it is not given, the current directory is used.
|
|
299
|
+
"""
|
|
300
|
+
if not logPath:
|
|
301
|
+
logPath = os.getcwd()
|
|
302
|
+
|
|
303
|
+
oldHandlers = self.handlers
|
|
304
|
+
|
|
305
|
+
dummyLogger = MyLogger("logger", baseDirectory=logPath, logLevel=self.logLevel)
|
|
306
|
+
self.handlers = dummyLogger.handlers
|
|
307
|
+
self.debugHandler = dummyLogger.debugHandler
|
|
308
|
+
|
|
309
|
+
for oldhandler, newHandler in zip(oldHandlers, self.handlers):
|
|
310
|
+
|
|
311
|
+
if isinstance(oldhandler.stream, StringIO):
|
|
312
|
+
newHandler.stream.write(oldhandler.stream.getvalue())
|
|
313
|
+
|
|
314
|
+
oldhandler.close()
|
|
315
|
+
|
|
316
|
+
@contextmanager
|
|
317
|
+
def switchLevelTemp(self, temporaryLogLevel):
|
|
318
|
+
"""doc"""
|
|
319
|
+
# changing loglevel for less output temporarily
|
|
320
|
+
# can be used by "with log.switchLevelTemp(newLevel):"
|
|
321
|
+
oldLogLevel = self.logLevel
|
|
322
|
+
self.logLevel = temporaryLogLevel
|
|
323
|
+
yield
|
|
324
|
+
self.logLevel = oldLogLevel
|
|
325
|
+
|
|
326
|
+
def formatLongMessages(self, msg, returnAsMessagesList=False, longMesageDelim=None):
|
|
327
|
+
"""doc"""
|
|
328
|
+
msg_str = str(msg)
|
|
329
|
+
if "\n" in msg_str:
|
|
330
|
+
# message is already formatted
|
|
331
|
+
return msg_str
|
|
332
|
+
|
|
333
|
+
pattern = longMesageDelim
|
|
334
|
+
if longMesageDelim is None:
|
|
335
|
+
pattern = r"(,|\.?\s+)"
|
|
336
|
+
|
|
337
|
+
subStrings = []
|
|
338
|
+
while len(msg_str) > self.maxLineLength:
|
|
339
|
+
indexes = [m.start() for m in re.finditer(pattern, msg_str) if m.start() <= self.maxLineLength]
|
|
340
|
+
if indexes:
|
|
341
|
+
nearestIx = max(indexes) + 1
|
|
342
|
+
else:
|
|
343
|
+
indexes = [m.start() for m in re.finditer(pattern, msg_str) if m.start() > self.maxLineLength]
|
|
344
|
+
if indexes:
|
|
345
|
+
nearestIx = indexes[0] + 1
|
|
346
|
+
else:
|
|
347
|
+
nearestIx = None
|
|
348
|
+
|
|
349
|
+
subStrings.append(msg_str[:nearestIx])
|
|
350
|
+
if nearestIx is None:
|
|
351
|
+
msg_str = ""
|
|
352
|
+
else:
|
|
353
|
+
msg_str = msg_str[nearestIx:]
|
|
354
|
+
|
|
355
|
+
if msg_str != "":
|
|
356
|
+
subStrings.append(msg_str)
|
|
357
|
+
|
|
358
|
+
if returnAsMessagesList:
|
|
359
|
+
return subStrings
|
|
360
|
+
else:
|
|
361
|
+
return "\n\t\t\t\t ".join(subStrings)
|
|
362
|
+
|
|
363
|
+
def _getIntendationDepth(self):
|
|
364
|
+
"""doc"""
|
|
365
|
+
return self._intendationDepth
|
|
366
|
+
|
|
367
|
+
def getFileHandlerFilenames(self):
|
|
368
|
+
"""Returns a list of the filenames of the file handlers"""
|
|
369
|
+
return [handler.baseFilename for handler in self.handlers if hasattr(handler, "baseFilename")]
|
|
370
|
+
|
|
371
|
+
intendationDepth = property(fget=_getIntendationDepth)
|
|
372
|
+
logLevel = property(fget=_getLogLevel, fset=_setLogLevel)
|
|
373
|
+
|
|
374
|
+
|
|
375
|
+
log = MyLogger("logger")
|
|
@@ -0,0 +1,108 @@
|
|
|
1
|
+
# SPDX-FileCopyrightText: 2022 German Aerospace Center (DLR)
|
|
2
|
+
#
|
|
3
|
+
# SPDX-License-Identifier: MIT
|
|
4
|
+
|
|
5
|
+
"""
|
|
6
|
+
Created on 16.06.2021
|
|
7
|
+
|
|
8
|
+
@author: schu_a1
|
|
9
|
+
"""
|
|
10
|
+
import numpy as np
|
|
11
|
+
from scipy.spatial import KDTree, cKDTree
|
|
12
|
+
|
|
13
|
+
|
|
14
|
+
def getUniqueListByThreshold(llist, threshold=1e-08):
|
|
15
|
+
"""doc"""
|
|
16
|
+
newList = sorted(llist)
|
|
17
|
+
removeIndexes = []
|
|
18
|
+
elem = newList[0]
|
|
19
|
+
for index, posX in enumerate(newList[1:]):
|
|
20
|
+
if abs(posX - elem) < threshold:
|
|
21
|
+
removeIndexes.append(index + 1)
|
|
22
|
+
else:
|
|
23
|
+
elem = posX
|
|
24
|
+
|
|
25
|
+
for index in removeIndexes[::-1]:
|
|
26
|
+
newList.pop(index)
|
|
27
|
+
|
|
28
|
+
return newList
|
|
29
|
+
|
|
30
|
+
|
|
31
|
+
def cartprod(*arrays):
|
|
32
|
+
"""The method computes a cartesian product a multiple 1D-input arrays
|
|
33
|
+
:param arrays: iterable of 1d-arrays
|
|
34
|
+
:return: 2D array"""
|
|
35
|
+
N = len(arrays)
|
|
36
|
+
return np.transpose(np.meshgrid(*arrays, indexing="ij"), np.roll(np.arange(N + 1), -1)).reshape(-1, N)
|
|
37
|
+
|
|
38
|
+
|
|
39
|
+
def get2DSubArrayWithPregivenOrder(arr, columnValueOrder, columnNumber=0, returnMask=False):
|
|
40
|
+
"""
|
|
41
|
+
The method creates an ordered 2D array with respect to a user defined order for a
|
|
42
|
+
particular column of an input array. The value order do not need to contain all values
|
|
43
|
+
from that column so that a reduced array can be returned.
|
|
44
|
+
:param arr: 2D array
|
|
45
|
+
:param columnValueOrder: Order
|
|
46
|
+
:return: ordered 2D array
|
|
47
|
+
"""
|
|
48
|
+
|
|
49
|
+
if columnNumber > arr.shape[1] - 1:
|
|
50
|
+
raise Exception("Column number %s is greater than the number of columns for given array" % columnNumber)
|
|
51
|
+
mask = np.in1d(arr[:, columnNumber], columnValueOrder)
|
|
52
|
+
|
|
53
|
+
subArray = arr[mask] # still 'unsorted'
|
|
54
|
+
|
|
55
|
+
columnValueOrder = np.asarray(columnValueOrder)
|
|
56
|
+
arr_is_sorted = (np.diff(arr[:, columnNumber]) == 1).all()
|
|
57
|
+
if arr_is_sorted:
|
|
58
|
+
ii = (columnValueOrder - int(arr[:, columnNumber][0])).astype(int)
|
|
59
|
+
else:
|
|
60
|
+
ii = None
|
|
61
|
+
for treeImpl in [KDTree, cKDTree]:
|
|
62
|
+
|
|
63
|
+
kdTree = treeImpl(subArray[:, columnNumber][:, np.newaxis])
|
|
64
|
+
dd, ii = kdTree.query(columnValueOrder[:, np.newaxis])
|
|
65
|
+
if np.all(np.isnan(dd)):
|
|
66
|
+
ii = None
|
|
67
|
+
continue
|
|
68
|
+
else:
|
|
69
|
+
break
|
|
70
|
+
|
|
71
|
+
if ii is None:
|
|
72
|
+
raise Exception("Unable to perform KDTree-based nearest neighbor search!")
|
|
73
|
+
|
|
74
|
+
if returnMask:
|
|
75
|
+
return subArray[ii], mask
|
|
76
|
+
else:
|
|
77
|
+
return subArray[ii]
|
|
78
|
+
|
|
79
|
+
|
|
80
|
+
def stack2DArrays(listWithArraysToStack, fillValue=0, use_dtype=np.float64):
|
|
81
|
+
"""This method stacks multiple 2D arrays to one complete array
|
|
82
|
+
|
|
83
|
+
:param listWithArraysToStack: list with multiple 2D numpy arrays
|
|
84
|
+
:param fillValue: Value which should be used when 2D input arrays are not all of the same
|
|
85
|
+
column length so that entries are not set with values of the input array
|
|
86
|
+
:param use_dtype: force numpy data type from use_dtype
|
|
87
|
+
:return: stacked array"""
|
|
88
|
+
numRows = sum(array.shape[0] for array in listWithArraysToStack)
|
|
89
|
+
numCols = [array.shape[1] for array in listWithArraysToStack if len(array.shape) > 1]
|
|
90
|
+
maxColumns = max(numCols, default=0)
|
|
91
|
+
|
|
92
|
+
if numRows == 0 and maxColumns == 0:
|
|
93
|
+
return np.array([])
|
|
94
|
+
|
|
95
|
+
fullTable = np.empty((numRows, maxColumns), dtype=use_dtype)
|
|
96
|
+
fullTable.fill(fillValue)
|
|
97
|
+
i_min, i_max = 0, 0
|
|
98
|
+
|
|
99
|
+
for table in listWithArraysToStack:
|
|
100
|
+
|
|
101
|
+
if len(table.shape) < 2:
|
|
102
|
+
continue
|
|
103
|
+
|
|
104
|
+
i_max = i_min + table.shape[0]
|
|
105
|
+
fullTable[i_min:i_max, : table.shape[1]] = table
|
|
106
|
+
i_min = i_max
|
|
107
|
+
|
|
108
|
+
return fullTable
|
patme/service/misc.py
ADDED
|
@@ -0,0 +1,71 @@
|
|
|
1
|
+
# SPDX-FileCopyrightText: 2022 German Aerospace Center (DLR)
|
|
2
|
+
#
|
|
3
|
+
# SPDX-License-Identifier: MIT
|
|
4
|
+
|
|
5
|
+
"""
|
|
6
|
+
Created on 16.06.2021
|
|
7
|
+
|
|
8
|
+
@author: schu_a1
|
|
9
|
+
"""
|
|
10
|
+
|
|
11
|
+
from itertools import chain, tee
|
|
12
|
+
from operator import attrgetter
|
|
13
|
+
|
|
14
|
+
from patme.service.exceptions import InternalError
|
|
15
|
+
|
|
16
|
+
|
|
17
|
+
class GenericEntityList(list):
|
|
18
|
+
"""classdocs"""
|
|
19
|
+
|
|
20
|
+
def __init__(self, *args, **kwargs):
|
|
21
|
+
"""doc"""
|
|
22
|
+
if len(args) > 0 and isinstance(args[0], str):
|
|
23
|
+
raise InternalError(
|
|
24
|
+
"Incorrect initialization of structureElementList. "
|
|
25
|
+
+ 'Probably "defaultSortAttribute" needs to be added as key'
|
|
26
|
+
)
|
|
27
|
+
|
|
28
|
+
attr = kwargs.pop("defaultSortAttribute", None)
|
|
29
|
+
list.__init__(self, *args, **kwargs)
|
|
30
|
+
if attr is not None:
|
|
31
|
+
self.defaultSortAttribute = attr
|
|
32
|
+
|
|
33
|
+
def sort(self, key=None, reverse=False, attribute=None):
|
|
34
|
+
"""doc"""
|
|
35
|
+
if key == None:
|
|
36
|
+
if attribute == None:
|
|
37
|
+
attribute = self.defaultSortAttribute
|
|
38
|
+
|
|
39
|
+
if isinstance(attribute, str) or not hasattr(attribute, "__iter__"):
|
|
40
|
+
# make attribute an iterable item if needed
|
|
41
|
+
attribute = [attribute]
|
|
42
|
+
|
|
43
|
+
key = attrgetter(*attribute)
|
|
44
|
+
|
|
45
|
+
list.sort(self, key=key, reverse=reverse)
|
|
46
|
+
|
|
47
|
+
def update(self, newEntries):
|
|
48
|
+
"""doc"""
|
|
49
|
+
self += [elem for elem in newEntries if elem not in self]
|
|
50
|
+
return self
|
|
51
|
+
|
|
52
|
+
def copy(self):
|
|
53
|
+
"""doc"""
|
|
54
|
+
return self.__class__(self[:], defaultSortAttribute=self.defaultSortAttribute)
|
|
55
|
+
|
|
56
|
+
def applyFuncOnElements(self, func=None):
|
|
57
|
+
"""Method applies function on list and return flatten list"""
|
|
58
|
+
return self.__class__(chain.from_iterable(map(func, self)), defaultSortAttribute=self.defaultSortAttribute)
|
|
59
|
+
|
|
60
|
+
|
|
61
|
+
def pairwise(iterable):
|
|
62
|
+
"s -> (s0,s1), (s1,s2), (s2, s3), ..."
|
|
63
|
+
a, b = tee(iterable)
|
|
64
|
+
next(b, None)
|
|
65
|
+
return zip(a, b)
|
|
66
|
+
|
|
67
|
+
|
|
68
|
+
def invertDict(mydict):
|
|
69
|
+
"""This method inverts a dictionary and returns it. To apply the inversion without
|
|
70
|
+
loss of data the mapping should be bijective."""
|
|
71
|
+
return dict([[v, k] for k, v in mydict.items()])
|