iker-python-common 1.0.1__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- iker/common/__init__.py +6 -0
- iker/common/core/__init__.py +0 -0
- iker/common/core/exceptions.py +64 -0
- iker/common/utils/__init__.py +0 -0
- iker/common/utils/config.py +117 -0
- iker/common/utils/dbutils.py +203 -0
- iker/common/utils/dockerutils.py +223 -0
- iker/common/utils/dtutils.py +187 -0
- iker/common/utils/funcutils.py +101 -0
- iker/common/utils/logger.py +67 -0
- iker/common/utils/numutils.py +103 -0
- iker/common/utils/randutils.py +147 -0
- iker/common/utils/retry.py +182 -0
- iker/common/utils/s3utils.py +270 -0
- iker/common/utils/sequtils.py +394 -0
- iker/common/utils/shutils.py +229 -0
- iker/common/utils/stream.py +188 -0
- iker/common/utils/strutils.py +159 -0
- iker/common/utils/testutils.py +171 -0
- iker_python_common-1.0.1.dist-info/METADATA +40 -0
- iker_python_common-1.0.1.dist-info/RECORD +23 -0
- iker_python_common-1.0.1.dist-info/WHEEL +5 -0
- iker_python_common-1.0.1.dist-info/top_level.txt +1 -0
|
@@ -0,0 +1,394 @@
|
|
|
1
|
+
from __future__ import annotations
|
|
2
|
+
|
|
3
|
+
import enum
|
|
4
|
+
import functools
|
|
5
|
+
import itertools
|
|
6
|
+
from typing import Callable, Generator, Iterable, Sequence, TypeVar
|
|
7
|
+
|
|
8
|
+
__all__ = [
|
|
9
|
+
"head",
|
|
10
|
+
"head_or_none",
|
|
11
|
+
"last",
|
|
12
|
+
"last_or_none",
|
|
13
|
+
"tail",
|
|
14
|
+
"init",
|
|
15
|
+
"grouped",
|
|
16
|
+
"deduped",
|
|
17
|
+
"batch_yield",
|
|
18
|
+
"chunk",
|
|
19
|
+
"chunk_between",
|
|
20
|
+
"chunk_with_key",
|
|
21
|
+
"merge_chunks",
|
|
22
|
+
"IntervalRelation",
|
|
23
|
+
"interval_relation",
|
|
24
|
+
"intervals_union",
|
|
25
|
+
"intervals_intersect",
|
|
26
|
+
"intervals_subtract",
|
|
27
|
+
]
|
|
28
|
+
|
|
29
|
+
T = TypeVar("T")
|
|
30
|
+
K = TypeVar("K")
|
|
31
|
+
V = TypeVar("V")
|
|
32
|
+
|
|
33
|
+
|
|
34
|
+
# See Haskell's list operations head, tail, init, and last
|
|
35
|
+
# which is also provided in Scala list operations
|
|
36
|
+
|
|
37
|
+
def head(ms: Sequence[T]) -> T:
|
|
38
|
+
return ms[0]
|
|
39
|
+
|
|
40
|
+
|
|
41
|
+
def head_or_none(ms: Sequence[T]) -> T | None:
|
|
42
|
+
if len(ms) > 0:
|
|
43
|
+
return ms[0]
|
|
44
|
+
return None
|
|
45
|
+
|
|
46
|
+
|
|
47
|
+
def last(ms: Sequence[T]) -> T:
|
|
48
|
+
return ms[-1]
|
|
49
|
+
|
|
50
|
+
|
|
51
|
+
def last_or_none(ms: Sequence[T]) -> T | None:
|
|
52
|
+
if len(ms) > 0:
|
|
53
|
+
return ms[-1]
|
|
54
|
+
return None
|
|
55
|
+
|
|
56
|
+
|
|
57
|
+
def tail(ms: Sequence[T]) -> Sequence[T]:
|
|
58
|
+
return ms[1:]
|
|
59
|
+
|
|
60
|
+
|
|
61
|
+
def init(ms: Sequence[T]) -> Sequence[T]:
|
|
62
|
+
return ms[:-1]
|
|
63
|
+
|
|
64
|
+
|
|
65
|
+
def grouped(
|
|
66
|
+
ms: Sequence[T],
|
|
67
|
+
key_func: Callable[[T], K],
|
|
68
|
+
values_only: bool = False,
|
|
69
|
+
) -> list[tuple[K, list[T]]] | list[list[T]]:
|
|
70
|
+
"""
|
|
71
|
+
Groups the given list of elements according to key generator function
|
|
72
|
+
|
|
73
|
+
:param ms: list of elements
|
|
74
|
+
:param key_func: key generator function
|
|
75
|
+
:param values_only: True if only return elements groups without corresponding keys
|
|
76
|
+
:return: grouped elements, with corresponding keys if `values_only` is set to False
|
|
77
|
+
"""
|
|
78
|
+
if ms is None or len(ms) == 0:
|
|
79
|
+
return []
|
|
80
|
+
grouped_ms: dict[K, list[T]] = {}
|
|
81
|
+
for m in ms:
|
|
82
|
+
k = key_func(m)
|
|
83
|
+
grouped_ms.setdefault(k, []).append(m)
|
|
84
|
+
if values_only:
|
|
85
|
+
return [d for _, d in grouped_ms.items()]
|
|
86
|
+
else:
|
|
87
|
+
return [(k, d) for k, d in grouped_ms.items()]
|
|
88
|
+
|
|
89
|
+
|
|
90
|
+
def deduped(ms: Sequence[T], comp_func: Callable[[T, T], bool]) -> list[T]:
|
|
91
|
+
"""
|
|
92
|
+
Dedupes the given list of elements
|
|
93
|
+
|
|
94
|
+
:param ms: list of elements
|
|
95
|
+
:param comp_func: comparator generator function
|
|
96
|
+
:return: deduped elements
|
|
97
|
+
"""
|
|
98
|
+
if ms is None or len(ms) == 0:
|
|
99
|
+
return []
|
|
100
|
+
deduped_ms: list[T] = [head(ms)]
|
|
101
|
+
for m in tail(ms):
|
|
102
|
+
if not comp_func(last(deduped_ms), m):
|
|
103
|
+
deduped_ms.append(m)
|
|
104
|
+
return deduped_ms
|
|
105
|
+
|
|
106
|
+
|
|
107
|
+
def batch_yield(ms: Iterable[T], batch_size: int) -> Generator[list[T]]:
|
|
108
|
+
"""
|
|
109
|
+
Splits the given input sequence into batches according to the specific batch size
|
|
110
|
+
|
|
111
|
+
:param ms: sequence of elements
|
|
112
|
+
:param batch_size: batch size
|
|
113
|
+
:return: batches of sequences
|
|
114
|
+
"""
|
|
115
|
+
if batch_size < 1:
|
|
116
|
+
raise ValueError("illegal batch size")
|
|
117
|
+
batch: list[T] = []
|
|
118
|
+
for m in ms:
|
|
119
|
+
batch.append(m)
|
|
120
|
+
if len(batch) == batch_size:
|
|
121
|
+
yield batch
|
|
122
|
+
batch = []
|
|
123
|
+
if len(batch) > 0:
|
|
124
|
+
yield batch
|
|
125
|
+
|
|
126
|
+
|
|
127
|
+
def chunk(ms: Sequence[T], chunk_func: Callable[[Sequence[T], T], bool], exclusive_end: bool = False) -> list[list[T]]:
|
|
128
|
+
"""
|
|
129
|
+
Chops the list of elements into chunks
|
|
130
|
+
|
|
131
|
+
:param ms: list of elements
|
|
132
|
+
:param chunk_func: chunk generator function with compares the current chunk and the next element from the list
|
|
133
|
+
:param exclusive_end: set to true to make each chunk (except the last one) carrying the first element of
|
|
134
|
+
the next chunk as an exclusive end
|
|
135
|
+
:return: list of element chunks
|
|
136
|
+
"""
|
|
137
|
+
if ms is None or len(ms) == 0:
|
|
138
|
+
return []
|
|
139
|
+
chunks: list[list[T]] = [[head(ms)]]
|
|
140
|
+
for m in tail(ms):
|
|
141
|
+
if chunk_func(last(chunks), m):
|
|
142
|
+
if exclusive_end:
|
|
143
|
+
last(chunks).append(m)
|
|
144
|
+
chunks.append([m])
|
|
145
|
+
else:
|
|
146
|
+
last(chunks).append(m)
|
|
147
|
+
return chunks
|
|
148
|
+
|
|
149
|
+
|
|
150
|
+
def chunk_between(ms: Sequence[T], chunk_func: Callable[[T, T], bool], exclusive_end: bool = False) -> list[list[T]]:
|
|
151
|
+
return chunk(ms, lambda x, y: chunk_func(last(x), y), exclusive_end)
|
|
152
|
+
|
|
153
|
+
|
|
154
|
+
def chunk_with_key(ms: Sequence[T], key_func: Callable[[T], K], exclusive_end: bool = False) -> list[list[T]]:
|
|
155
|
+
return chunk_between(ms, lambda x, y: key_func(x) != key_func(y), exclusive_end)
|
|
156
|
+
|
|
157
|
+
|
|
158
|
+
def merge_chunks(
|
|
159
|
+
chunks: Sequence[Sequence[T]],
|
|
160
|
+
merge_func: Callable[[Sequence[T], Sequence[T]], bool],
|
|
161
|
+
drop_exclusive_end: bool = False,
|
|
162
|
+
) -> list[list[T]]:
|
|
163
|
+
"""
|
|
164
|
+
Merges chunks according to the given merging criteria
|
|
165
|
+
|
|
166
|
+
:param chunks: chunks to be merged into larger ones
|
|
167
|
+
:param merge_func: merged chunk generator function
|
|
168
|
+
:param drop_exclusive_end: set to true if each of the given chunk (except the last one) as an exclusive end element,
|
|
169
|
+
and these exclusive end elements will be dropped while merging their chunks to the corresponding next chunks
|
|
170
|
+
:return: merged chunks
|
|
171
|
+
"""
|
|
172
|
+
if chunks is None or len(chunks) == 0:
|
|
173
|
+
return []
|
|
174
|
+
|
|
175
|
+
merged_chunks: list[list[T]] = []
|
|
176
|
+
|
|
177
|
+
def stateful_reducer(a: Sequence[T], b: Sequence[T]) -> Sequence[T]:
|
|
178
|
+
if merge_func(a, b):
|
|
179
|
+
if drop_exclusive_end:
|
|
180
|
+
return list(itertools.chain(init(a), b))
|
|
181
|
+
return list(itertools.chain(a, b))
|
|
182
|
+
else:
|
|
183
|
+
merged_chunks.append(list(a))
|
|
184
|
+
return b
|
|
185
|
+
|
|
186
|
+
last_chunk = functools.reduce(stateful_reducer, chunks)
|
|
187
|
+
merged_chunks.append(list(last_chunk))
|
|
188
|
+
return merged_chunks
|
|
189
|
+
|
|
190
|
+
|
|
191
|
+
class IntervalRelation(enum.IntEnum):
|
|
192
|
+
LeftIn = 0x1
|
|
193
|
+
RightIn = 0x2
|
|
194
|
+
LeftLeftOut = 0x10
|
|
195
|
+
LeftLeftOn = 0x20
|
|
196
|
+
LeftRightOn = 0x40
|
|
197
|
+
LeftRightOut = 0x80
|
|
198
|
+
RightLeftOut = 0x100
|
|
199
|
+
RightLeftOn = 0x200
|
|
200
|
+
RightRightOn = 0x400
|
|
201
|
+
RightRightOut = 0x800
|
|
202
|
+
|
|
203
|
+
LeftDetach = LeftLeftOut | RightLeftOut
|
|
204
|
+
LeftTouch = LeftLeftOut | RightLeftOn
|
|
205
|
+
LeftOverlap = LeftLeftOut | RightIn
|
|
206
|
+
LeftOn = LeftLeftOn | RightLeftOn
|
|
207
|
+
LeftAlignOverlay = LeftLeftOn | RightIn
|
|
208
|
+
LeftAlignCover = LeftLeftOn | RightRightOut
|
|
209
|
+
Overlay = LeftIn | RightIn
|
|
210
|
+
Cover = LeftLeftOut | RightRightOut
|
|
211
|
+
Identical = LeftLeftOn | RightRightOn
|
|
212
|
+
RightAlignOverlay = LeftIn | RightRightOn
|
|
213
|
+
RightAlignCover = LeftLeftOut | RightRightOn
|
|
214
|
+
RightOn = LeftRightOn | RightRightOn
|
|
215
|
+
RightOverlap = LeftIn | RightRightOut
|
|
216
|
+
RightTouch = LeftRightOn | RightRightOut
|
|
217
|
+
RightDetach = LeftRightOut | RightRightOut
|
|
218
|
+
|
|
219
|
+
|
|
220
|
+
def interval_relation(a: tuple[float, float], b: tuple[float, float]) -> int:
|
|
221
|
+
(a0, a1), (b0, b1) = a, b
|
|
222
|
+
rel = 0
|
|
223
|
+
if a0 < b0:
|
|
224
|
+
rel |= IntervalRelation.LeftLeftOut
|
|
225
|
+
elif a0 == b0:
|
|
226
|
+
rel |= IntervalRelation.LeftLeftOn
|
|
227
|
+
elif b0 < a0 < b1:
|
|
228
|
+
rel |= IntervalRelation.LeftIn
|
|
229
|
+
elif a0 == b1:
|
|
230
|
+
rel |= IntervalRelation.LeftRightOn
|
|
231
|
+
elif a0 > b1:
|
|
232
|
+
rel |= IntervalRelation.LeftRightOut
|
|
233
|
+
if a1 > b1:
|
|
234
|
+
rel |= IntervalRelation.RightRightOut
|
|
235
|
+
elif a1 == b1:
|
|
236
|
+
rel |= IntervalRelation.RightRightOn
|
|
237
|
+
elif b0 < a1 < b1:
|
|
238
|
+
rel |= IntervalRelation.RightIn
|
|
239
|
+
elif a1 == b0:
|
|
240
|
+
rel |= IntervalRelation.RightLeftOn
|
|
241
|
+
elif a1 < b0:
|
|
242
|
+
rel |= IntervalRelation.RightLeftOut
|
|
243
|
+
return rel
|
|
244
|
+
|
|
245
|
+
|
|
246
|
+
def intervals_union(a: Sequence[tuple[T, T]], *bs: Sequence[tuple[T, T]]) -> list[tuple[T, T]]:
|
|
247
|
+
"""
|
|
248
|
+
Computes the union of the given interval lists. The intervals in each of the lists must be sorted and do not
|
|
249
|
+
mutually overlap
|
|
250
|
+
|
|
251
|
+
:param a: the first interval list
|
|
252
|
+
:param bs: the remaining interval lists
|
|
253
|
+
:return: union of the interval lists whose intervals are sorted
|
|
254
|
+
"""
|
|
255
|
+
|
|
256
|
+
def union(xs: Sequence[tuple[T, T]], ys: Sequence[tuple[T, T]]) -> list[tuple[T, T]]:
|
|
257
|
+
if not xs or not ys:
|
|
258
|
+
return list(itertools.chain(xs, ys))
|
|
259
|
+
|
|
260
|
+
result: list[tuple[T, T]] = []
|
|
261
|
+
|
|
262
|
+
i, j = 0, 0
|
|
263
|
+
x0_lo, _ = xs[i]
|
|
264
|
+
y0_lo, _ = ys[j]
|
|
265
|
+
lo = hi = min(x0_lo, y0_lo)
|
|
266
|
+
while i < len(xs) or j < len(ys):
|
|
267
|
+
if i < len(xs) and j < len(ys):
|
|
268
|
+
x_lo, _ = xs[i]
|
|
269
|
+
y_lo, _ = ys[j]
|
|
270
|
+
if x_lo < y_lo:
|
|
271
|
+
curr = xs[i]
|
|
272
|
+
i += 1
|
|
273
|
+
else:
|
|
274
|
+
curr = ys[j]
|
|
275
|
+
j += 1
|
|
276
|
+
elif i == len(xs):
|
|
277
|
+
curr = ys[j]
|
|
278
|
+
j += 1
|
|
279
|
+
else:
|
|
280
|
+
curr = xs[i]
|
|
281
|
+
i += 1
|
|
282
|
+
|
|
283
|
+
curr_lo, curr_hi = curr
|
|
284
|
+
|
|
285
|
+
if hi < curr_lo:
|
|
286
|
+
result.append((lo, hi))
|
|
287
|
+
lo, hi = curr
|
|
288
|
+
else:
|
|
289
|
+
hi = max(hi, curr_hi)
|
|
290
|
+
|
|
291
|
+
result.append((lo, hi))
|
|
292
|
+
|
|
293
|
+
return result
|
|
294
|
+
|
|
295
|
+
for b in bs:
|
|
296
|
+
a = union(a, b)
|
|
297
|
+
return a
|
|
298
|
+
|
|
299
|
+
|
|
300
|
+
def intervals_intersect(a: Sequence[tuple[T, T]], *bs: Sequence[tuple[T, T]]) -> list[tuple[T, T]]:
|
|
301
|
+
"""
|
|
302
|
+
Computes the intersection of the given interval lists. The intervals in each of the lists must be sorted and do not
|
|
303
|
+
mutually overlap
|
|
304
|
+
|
|
305
|
+
:param a: the first interval list
|
|
306
|
+
:param bs: the remaining interval lists
|
|
307
|
+
:return: intersection of the interval lists whose intervals are sorted
|
|
308
|
+
"""
|
|
309
|
+
|
|
310
|
+
def intersect(xs: Sequence[tuple[T, T]], ys: Sequence[tuple[T, T]]) -> list[tuple[T, T]]:
|
|
311
|
+
if not xs or not ys:
|
|
312
|
+
return []
|
|
313
|
+
|
|
314
|
+
result: list[tuple[T, T]] = []
|
|
315
|
+
|
|
316
|
+
i, j = 0, 0
|
|
317
|
+
while i < len(xs) and j < len(ys):
|
|
318
|
+
x_lo, x_hi = xs[i]
|
|
319
|
+
y_lo, y_hi = ys[j]
|
|
320
|
+
lo = max(x_lo, y_lo)
|
|
321
|
+
hi = min(x_hi, y_hi)
|
|
322
|
+
|
|
323
|
+
if not hi < lo:
|
|
324
|
+
result.append((lo, hi))
|
|
325
|
+
|
|
326
|
+
if x_hi < y_hi:
|
|
327
|
+
i += 1
|
|
328
|
+
else:
|
|
329
|
+
j += 1
|
|
330
|
+
|
|
331
|
+
return result
|
|
332
|
+
|
|
333
|
+
for b in bs:
|
|
334
|
+
a = intersect(a, b)
|
|
335
|
+
return a
|
|
336
|
+
|
|
337
|
+
|
|
338
|
+
def intervals_subtract(a: Sequence[tuple[T, T]], *bs: Sequence[tuple[T, T]]) -> list[tuple[T, T]]:
|
|
339
|
+
"""
|
|
340
|
+
Computes the subtraction on the first interval list by the remaining interval lists. The intervals in each of the
|
|
341
|
+
lists must be sorted and do not mutually overlap
|
|
342
|
+
|
|
343
|
+
:param a: the first interval list
|
|
344
|
+
:param bs: the remaining interval lists
|
|
345
|
+
:return: subtraction on the first interval list by the remaining interval lists whose intervals are sorted
|
|
346
|
+
"""
|
|
347
|
+
|
|
348
|
+
def subtract(xs: Sequence[tuple[T, T]], ys: Sequence[tuple[T, T]]) -> list[tuple[T, T]]:
|
|
349
|
+
if not xs or not ys:
|
|
350
|
+
return list(xs)
|
|
351
|
+
|
|
352
|
+
result: list[tuple[T, T]] = []
|
|
353
|
+
|
|
354
|
+
i, j = 0, 0
|
|
355
|
+
curr = xs[i]
|
|
356
|
+
while j < len(ys):
|
|
357
|
+
curr_lo, curr_hi = curr
|
|
358
|
+
y_lo, y_hi = ys[j]
|
|
359
|
+
lo = max(curr_lo, y_lo)
|
|
360
|
+
hi = min(curr_hi, y_hi)
|
|
361
|
+
|
|
362
|
+
if not lo > hi:
|
|
363
|
+
if curr_lo < lo:
|
|
364
|
+
result.append((curr_lo, lo))
|
|
365
|
+
if hi < curr_hi:
|
|
366
|
+
curr = hi, curr_hi
|
|
367
|
+
else:
|
|
368
|
+
curr = None
|
|
369
|
+
elif curr_hi < y_lo:
|
|
370
|
+
result.append(curr)
|
|
371
|
+
curr = None
|
|
372
|
+
|
|
373
|
+
if curr is None:
|
|
374
|
+
i += 1
|
|
375
|
+
if i < len(xs):
|
|
376
|
+
curr = xs[i]
|
|
377
|
+
else:
|
|
378
|
+
break
|
|
379
|
+
else:
|
|
380
|
+
j += 1
|
|
381
|
+
|
|
382
|
+
if curr is not None:
|
|
383
|
+
result.append(curr)
|
|
384
|
+
|
|
385
|
+
i += 1
|
|
386
|
+
while i < len(xs):
|
|
387
|
+
result.append(xs[i])
|
|
388
|
+
i += 1
|
|
389
|
+
|
|
390
|
+
return result
|
|
391
|
+
|
|
392
|
+
for b in bs:
|
|
393
|
+
a = subtract(a, b)
|
|
394
|
+
return a
|
|
@@ -0,0 +1,229 @@
|
|
|
1
|
+
import fnmatch
|
|
2
|
+
import os
|
|
3
|
+
import shutil
|
|
4
|
+
from typing import Protocol
|
|
5
|
+
|
|
6
|
+
from iker.common.utils import logger
|
|
7
|
+
from iker.common.utils.sequtils import last, last_or_none, tail
|
|
8
|
+
from iker.common.utils.strutils import is_empty
|
|
9
|
+
|
|
10
|
+
__all__ = [
|
|
11
|
+
"extension",
|
|
12
|
+
"extensions",
|
|
13
|
+
"stem",
|
|
14
|
+
"expanded_path",
|
|
15
|
+
"path_depth",
|
|
16
|
+
"glob_match",
|
|
17
|
+
"listfile",
|
|
18
|
+
"copy",
|
|
19
|
+
"run",
|
|
20
|
+
"execute",
|
|
21
|
+
]
|
|
22
|
+
|
|
23
|
+
|
|
24
|
+
def extension(filename: str) -> str:
|
|
25
|
+
"""
|
|
26
|
+
Extracts filename extension of the given filename or path
|
|
27
|
+
|
|
28
|
+
:param filename: the specific filename or path
|
|
29
|
+
:return: filename extension
|
|
30
|
+
"""
|
|
31
|
+
_, result = os.path.splitext(os.path.basename(filename))
|
|
32
|
+
return result
|
|
33
|
+
|
|
34
|
+
|
|
35
|
+
def stem(filename: str, minimal: bool = False) -> str:
|
|
36
|
+
"""
|
|
37
|
+
Extracts filename stem of the given filename or path
|
|
38
|
+
|
|
39
|
+
:param filename: the specific filename or path
|
|
40
|
+
:param minimal: True if the minimal (shortest) stem is extracted
|
|
41
|
+
:return: filename stem
|
|
42
|
+
"""
|
|
43
|
+
base = os.path.basename(filename)
|
|
44
|
+
if not minimal:
|
|
45
|
+
result, _ = os.path.splitext(base)
|
|
46
|
+
return result
|
|
47
|
+
else:
|
|
48
|
+
maximal_extension = last_or_none(extensions(base))
|
|
49
|
+
if is_empty(maximal_extension):
|
|
50
|
+
return base
|
|
51
|
+
else:
|
|
52
|
+
return base[:-len(maximal_extension)]
|
|
53
|
+
|
|
54
|
+
|
|
55
|
+
def extensions(filename: str) -> list[str]:
|
|
56
|
+
"""
|
|
57
|
+
Extracts all filename extensions and compound extensions of the given filename or path
|
|
58
|
+
|
|
59
|
+
:param filename: the specific filename or path
|
|
60
|
+
:return: list of all extensions ordered from the shortest to the longest
|
|
61
|
+
"""
|
|
62
|
+
base = os.path.basename(filename)
|
|
63
|
+
results = [""]
|
|
64
|
+
while True:
|
|
65
|
+
fn, ext = os.path.splitext(base)
|
|
66
|
+
base = fn
|
|
67
|
+
if is_empty(ext):
|
|
68
|
+
break
|
|
69
|
+
results.append(ext + last(results))
|
|
70
|
+
return tail(results)
|
|
71
|
+
|
|
72
|
+
|
|
73
|
+
def expanded_path(path: str) -> str:
|
|
74
|
+
"""
|
|
75
|
+
Returns absolute expanded path whose environment vars and home tilde has been expanded
|
|
76
|
+
|
|
77
|
+
:param path: the given path
|
|
78
|
+
:return: the absolute canonical path
|
|
79
|
+
"""
|
|
80
|
+
return os.path.abspath(os.path.expanduser(os.path.expandvars(path)))
|
|
81
|
+
|
|
82
|
+
|
|
83
|
+
def path_depth(root: str, child: str) -> int:
|
|
84
|
+
"""
|
|
85
|
+
Returns the relative path depth from the given child to the root
|
|
86
|
+
|
|
87
|
+
:param root: the root path
|
|
88
|
+
:param child: the child path
|
|
89
|
+
:return: relative depth
|
|
90
|
+
"""
|
|
91
|
+
root_expanded = expanded_path(root)
|
|
92
|
+
child_expanded = expanded_path(child)
|
|
93
|
+
if not child_expanded.startswith(root_expanded):
|
|
94
|
+
return -1
|
|
95
|
+
return child_expanded[len(root_expanded):].count(os.sep)
|
|
96
|
+
|
|
97
|
+
|
|
98
|
+
def glob_match(names: list[str], include_patterns: list[str] = None, exclude_patterns: list[str] = None) -> list[str]:
|
|
99
|
+
"""
|
|
100
|
+
Applies the given inclusive and exclusive glob patterns on the given names and returns the filtered result
|
|
101
|
+
|
|
102
|
+
:param names: names to apply the glob patterns
|
|
103
|
+
:param include_patterns: inclusive glob patterns
|
|
104
|
+
:param exclude_patterns: exclusive glob patterns
|
|
105
|
+
:return: filtered names
|
|
106
|
+
"""
|
|
107
|
+
ret = set()
|
|
108
|
+
for pat in (include_patterns or []):
|
|
109
|
+
ret.update(fnmatch.filter(names, pat))
|
|
110
|
+
if include_patterns is None or len(include_patterns) == 0:
|
|
111
|
+
ret.update(names)
|
|
112
|
+
for pat in (exclude_patterns or []):
|
|
113
|
+
ret.difference_update(fnmatch.filter(names, pat))
|
|
114
|
+
return list(ret)
|
|
115
|
+
|
|
116
|
+
|
|
117
|
+
class CopyFuncProtocol(Protocol):
|
|
118
|
+
def __call__(self, src: str, dst: str, **kwargs) -> None: ...
|
|
119
|
+
|
|
120
|
+
|
|
121
|
+
def listfile(
|
|
122
|
+
path: str,
|
|
123
|
+
*,
|
|
124
|
+
include_patterns: list[str] = None,
|
|
125
|
+
exclude_patterns: list[str] = None,
|
|
126
|
+
depth: int = 0,
|
|
127
|
+
) -> list[str]:
|
|
128
|
+
"""
|
|
129
|
+
Recursively scans the given path and returns list of files whose names satisfy the given name patterns and the
|
|
130
|
+
relative depth of their folders to the given root path is not greater than the specific depth value
|
|
131
|
+
|
|
132
|
+
:param path: the root path which is scanned
|
|
133
|
+
:param include_patterns: inclusive glob patterns applied to the filenames
|
|
134
|
+
:param exclude_patterns: exclusive glob patterns applied to the filenames
|
|
135
|
+
:param depth: maximum depth of the subdirectories included in the scan
|
|
136
|
+
:return:
|
|
137
|
+
"""
|
|
138
|
+
if os.path.exists(path) and not os.path.isdir(path):
|
|
139
|
+
if len(glob_match([os.path.basename(path)], include_patterns, exclude_patterns)) == 0:
|
|
140
|
+
return []
|
|
141
|
+
return [path]
|
|
142
|
+
|
|
143
|
+
ret = []
|
|
144
|
+
for parent, dirs, filenames in os.walk(path):
|
|
145
|
+
if 0 < depth <= path_depth(path, parent):
|
|
146
|
+
continue
|
|
147
|
+
for filename in glob_match(filenames, include_patterns, exclude_patterns):
|
|
148
|
+
ret.append(os.path.join(parent, filename))
|
|
149
|
+
return ret
|
|
150
|
+
|
|
151
|
+
|
|
152
|
+
def copy(
|
|
153
|
+
src: str,
|
|
154
|
+
dst: str,
|
|
155
|
+
*,
|
|
156
|
+
include_patterns: list[str] = None,
|
|
157
|
+
exclude_patterns: list[str] = None,
|
|
158
|
+
depth: int = 0,
|
|
159
|
+
follow_symlinks: bool = False,
|
|
160
|
+
ignore_dangling_symlinks: bool = False,
|
|
161
|
+
dirs_exist_ok: bool = False,
|
|
162
|
+
copy_func: CopyFuncProtocol = shutil.copy2
|
|
163
|
+
):
|
|
164
|
+
"""
|
|
165
|
+
Recursively copies the given source path to the destination path. Only copies the files whose names satisfy the
|
|
166
|
+
given name patterns and the relative depth of their folders to the given source path is not greater than the
|
|
167
|
+
specific depth value
|
|
168
|
+
|
|
169
|
+
:param src: the source path or file
|
|
170
|
+
:param dst: the destination path or file
|
|
171
|
+
:param include_patterns: inclusive glob patterns applied to the filenames
|
|
172
|
+
:param exclude_patterns: exclusive glob patterns applied to the filenames
|
|
173
|
+
:param depth: maximum depth of the subdirectories included in the scan
|
|
174
|
+
:param follow_symlinks: True to create symbolic links for the symbolic links present in the source, otherwise, make
|
|
175
|
+
a physical copy
|
|
176
|
+
:param ignore_dangling_symlinks: True to ignore errors if the file pointed by the symbolic link does not exist
|
|
177
|
+
:param dirs_exist_ok: True to ignore errors if the destination directory and subdirectories exist
|
|
178
|
+
:param copy_func: copy function
|
|
179
|
+
"""
|
|
180
|
+
if not os.path.isdir(src):
|
|
181
|
+
if len(glob_match([os.path.basename(src)], include_patterns, exclude_patterns)) == 0:
|
|
182
|
+
return
|
|
183
|
+
if not os.path.exists(dst):
|
|
184
|
+
os.makedirs(os.path.dirname(dst), exist_ok=True)
|
|
185
|
+
copy_func(src, dst, follow_symlinks=follow_symlinks)
|
|
186
|
+
return
|
|
187
|
+
|
|
188
|
+
def ignore_func(parent, names):
|
|
189
|
+
filenames = list(filter(lambda x: not os.path.isdir(os.path.join(parent, x)), names))
|
|
190
|
+
ret = set(filenames)
|
|
191
|
+
if 0 < depth <= path_depth(src, parent):
|
|
192
|
+
return ret
|
|
193
|
+
ret.difference_update(glob_match(filenames, include_patterns, exclude_patterns))
|
|
194
|
+
return ret
|
|
195
|
+
|
|
196
|
+
shutil.copytree(src,
|
|
197
|
+
dst,
|
|
198
|
+
symlinks=follow_symlinks,
|
|
199
|
+
ignore=ignore_func,
|
|
200
|
+
ignore_dangling_symlinks=ignore_dangling_symlinks,
|
|
201
|
+
dirs_exist_ok=dirs_exist_ok,
|
|
202
|
+
copy_function=copy_func)
|
|
203
|
+
|
|
204
|
+
|
|
205
|
+
def run(cmd: str) -> bool:
|
|
206
|
+
"""
|
|
207
|
+
Runs given command and returns the success status
|
|
208
|
+
|
|
209
|
+
:param cmd: command to run
|
|
210
|
+
|
|
211
|
+
:return: True if the command has been successfully run
|
|
212
|
+
"""
|
|
213
|
+
logger.debug("Running command: %s", cmd)
|
|
214
|
+
return os.system(cmd) == 0
|
|
215
|
+
|
|
216
|
+
|
|
217
|
+
def execute(cmd: str, strip: bool = True) -> str:
|
|
218
|
+
"""
|
|
219
|
+
Executes given command and returns contents collected from standard output
|
|
220
|
+
|
|
221
|
+
:param cmd: command to execute
|
|
222
|
+
:param strip: True if the contents will be stripped
|
|
223
|
+
|
|
224
|
+
:return: the content from standard output
|
|
225
|
+
"""
|
|
226
|
+
logger.debug("Executing command: %s", cmd)
|
|
227
|
+
if strip:
|
|
228
|
+
return os.popen(cmd).read().strip()
|
|
229
|
+
return os.popen(cmd).read()
|