ScriptCollection 3.3.23__py3-none-any.whl → 4.0.78__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- ScriptCollection/AnionBuildPlatform.py +206 -0
- ScriptCollection/{UpdateCertificates.py → CertificateUpdater.py} +149 -128
- ScriptCollection/Executables.py +868 -292
- ScriptCollection/GeneralUtilities.py +609 -107
- ScriptCollection/ImageUpdater.py +648 -0
- ScriptCollection/ProcessesRunner.py +41 -0
- ScriptCollection/ProgramRunnerBase.py +47 -42
- ScriptCollection/ProgramRunnerMock.py +2 -0
- ScriptCollection/ProgramRunnerPopen.py +57 -50
- ScriptCollection/ProgramRunnerSudo.py +108 -0
- ScriptCollection/SCLog.py +115 -0
- ScriptCollection/ScriptCollectionCore.py +2541 -1383
- ScriptCollection/TFCPS/Docker/TFCPS_CodeUnitSpecific_Docker.py +95 -0
- ScriptCollection/TFCPS/Docker/__init__.py +0 -0
- ScriptCollection/TFCPS/DotNet/CertificateGeneratorInformationBase.py +8 -0
- ScriptCollection/TFCPS/DotNet/CertificateGeneratorInformationGenerate.py +6 -0
- ScriptCollection/TFCPS/DotNet/CertificateGeneratorInformationNoGenerate.py +7 -0
- ScriptCollection/TFCPS/DotNet/TFCPS_CodeUnitSpecific_DotNet.py +485 -0
- ScriptCollection/TFCPS/DotNet/__init__.py +0 -0
- ScriptCollection/TFCPS/Flutter/TFCPS_CodeUnitSpecific_Flutter.py +130 -0
- ScriptCollection/TFCPS/Flutter/__init__.py +0 -0
- ScriptCollection/TFCPS/Go/TFCPS_CodeUnitSpecific_Go.py +74 -0
- ScriptCollection/TFCPS/Go/__init__.py +0 -0
- ScriptCollection/TFCPS/NodeJS/TFCPS_CodeUnitSpecific_NodeJS.py +131 -0
- ScriptCollection/TFCPS/NodeJS/__init__.py +0 -0
- ScriptCollection/TFCPS/Python/TFCPS_CodeUnitSpecific_Python.py +227 -0
- ScriptCollection/TFCPS/Python/__init__.py +0 -0
- ScriptCollection/TFCPS/TFCPS_CodeUnitSpecific_Base.py +418 -0
- ScriptCollection/TFCPS/TFCPS_CodeUnit_BuildCodeUnit.py +128 -0
- ScriptCollection/TFCPS/TFCPS_CodeUnit_BuildCodeUnits.py +136 -0
- ScriptCollection/TFCPS/TFCPS_CreateRelease.py +95 -0
- ScriptCollection/TFCPS/TFCPS_Generic.py +43 -0
- ScriptCollection/TFCPS/TFCPS_MergeToMain.py +122 -0
- ScriptCollection/TFCPS/TFCPS_MergeToStable.py +350 -0
- ScriptCollection/TFCPS/TFCPS_PreBuildCodeunitsScript.py +47 -0
- ScriptCollection/TFCPS/TFCPS_Tools_General.py +1356 -0
- ScriptCollection/TFCPS/__init__.py +0 -0
- {ScriptCollection-3.3.23.dist-info → scriptcollection-4.0.78.dist-info}/METADATA +26 -21
- scriptcollection-4.0.78.dist-info/RECORD +43 -0
- {ScriptCollection-3.3.23.dist-info → scriptcollection-4.0.78.dist-info}/WHEEL +1 -1
- scriptcollection-4.0.78.dist-info/entry_points.txt +64 -0
- ScriptCollection/Hardening.py +0 -59
- ScriptCollection/ProgramRunnerEpew.py +0 -122
- ScriptCollection/TasksForCommonProjectStructure.py +0 -1170
- ScriptCollection-3.3.23.dist-info/RECORD +0 -15
- ScriptCollection-3.3.23.dist-info/entry_points.txt +0 -24
- {ScriptCollection-3.3.23.dist-info → scriptcollection-4.0.78.dist-info}/top_level.txt +0 -0
|
@@ -1,1383 +1,2541 @@
|
|
|
1
|
-
from datetime import timedelta, datetime
|
|
2
|
-
import
|
|
3
|
-
import
|
|
4
|
-
import
|
|
5
|
-
import hashlib
|
|
6
|
-
|
|
7
|
-
import
|
|
8
|
-
import
|
|
9
|
-
import
|
|
10
|
-
|
|
11
|
-
|
|
12
|
-
|
|
13
|
-
import
|
|
14
|
-
import
|
|
15
|
-
import
|
|
16
|
-
import
|
|
17
|
-
import
|
|
18
|
-
from
|
|
19
|
-
import
|
|
20
|
-
import
|
|
21
|
-
from
|
|
22
|
-
|
|
23
|
-
|
|
24
|
-
|
|
25
|
-
|
|
26
|
-
|
|
27
|
-
|
|
28
|
-
|
|
29
|
-
|
|
30
|
-
|
|
31
|
-
|
|
32
|
-
|
|
33
|
-
|
|
34
|
-
|
|
35
|
-
|
|
36
|
-
|
|
37
|
-
|
|
38
|
-
|
|
39
|
-
|
|
40
|
-
|
|
41
|
-
|
|
42
|
-
|
|
43
|
-
|
|
44
|
-
|
|
45
|
-
|
|
46
|
-
|
|
47
|
-
|
|
48
|
-
|
|
49
|
-
|
|
50
|
-
|
|
51
|
-
|
|
52
|
-
|
|
53
|
-
|
|
54
|
-
|
|
55
|
-
|
|
56
|
-
|
|
57
|
-
|
|
58
|
-
|
|
59
|
-
|
|
60
|
-
|
|
61
|
-
|
|
62
|
-
|
|
63
|
-
|
|
64
|
-
|
|
65
|
-
|
|
66
|
-
|
|
67
|
-
|
|
68
|
-
|
|
69
|
-
|
|
70
|
-
|
|
71
|
-
|
|
72
|
-
|
|
73
|
-
|
|
74
|
-
|
|
75
|
-
|
|
76
|
-
|
|
77
|
-
|
|
78
|
-
|
|
79
|
-
|
|
80
|
-
|
|
81
|
-
|
|
82
|
-
|
|
83
|
-
|
|
84
|
-
|
|
85
|
-
|
|
86
|
-
|
|
87
|
-
|
|
88
|
-
|
|
89
|
-
|
|
90
|
-
|
|
91
|
-
|
|
92
|
-
|
|
93
|
-
@GeneralUtilities.check_arguments
|
|
94
|
-
def
|
|
95
|
-
|
|
96
|
-
|
|
97
|
-
|
|
98
|
-
|
|
99
|
-
|
|
100
|
-
|
|
101
|
-
|
|
102
|
-
|
|
103
|
-
|
|
104
|
-
|
|
105
|
-
|
|
106
|
-
|
|
107
|
-
|
|
108
|
-
|
|
109
|
-
|
|
110
|
-
|
|
111
|
-
|
|
112
|
-
|
|
113
|
-
|
|
114
|
-
|
|
115
|
-
|
|
116
|
-
|
|
117
|
-
|
|
118
|
-
|
|
119
|
-
|
|
120
|
-
|
|
121
|
-
|
|
122
|
-
|
|
123
|
-
|
|
124
|
-
|
|
125
|
-
|
|
126
|
-
self.run_program("dotnet",
|
|
127
|
-
|
|
128
|
-
|
|
129
|
-
|
|
130
|
-
|
|
131
|
-
|
|
132
|
-
|
|
133
|
-
|
|
134
|
-
|
|
135
|
-
if
|
|
136
|
-
|
|
137
|
-
|
|
138
|
-
|
|
139
|
-
|
|
140
|
-
|
|
141
|
-
|
|
142
|
-
|
|
143
|
-
|
|
144
|
-
|
|
145
|
-
|
|
146
|
-
|
|
147
|
-
|
|
148
|
-
|
|
149
|
-
|
|
150
|
-
|
|
151
|
-
|
|
152
|
-
|
|
153
|
-
|
|
154
|
-
|
|
155
|
-
|
|
156
|
-
|
|
157
|
-
|
|
158
|
-
|
|
159
|
-
|
|
160
|
-
|
|
161
|
-
|
|
162
|
-
|
|
163
|
-
|
|
164
|
-
|
|
165
|
-
|
|
166
|
-
|
|
167
|
-
if
|
|
168
|
-
|
|
169
|
-
|
|
170
|
-
|
|
171
|
-
|
|
172
|
-
|
|
173
|
-
|
|
174
|
-
|
|
175
|
-
self.run_program("
|
|
176
|
-
|
|
177
|
-
|
|
178
|
-
|
|
179
|
-
|
|
180
|
-
|
|
181
|
-
|
|
182
|
-
|
|
183
|
-
|
|
184
|
-
|
|
185
|
-
|
|
186
|
-
|
|
187
|
-
|
|
188
|
-
|
|
189
|
-
|
|
190
|
-
|
|
191
|
-
|
|
192
|
-
|
|
193
|
-
|
|
194
|
-
|
|
195
|
-
|
|
196
|
-
|
|
197
|
-
|
|
198
|
-
|
|
199
|
-
|
|
200
|
-
|
|
201
|
-
|
|
202
|
-
|
|
203
|
-
|
|
204
|
-
|
|
205
|
-
|
|
206
|
-
|
|
207
|
-
|
|
208
|
-
|
|
209
|
-
|
|
210
|
-
|
|
211
|
-
return
|
|
212
|
-
|
|
213
|
-
@GeneralUtilities.check_arguments
|
|
214
|
-
def
|
|
215
|
-
|
|
216
|
-
|
|
217
|
-
|
|
218
|
-
|
|
219
|
-
|
|
220
|
-
|
|
221
|
-
|
|
222
|
-
|
|
223
|
-
|
|
224
|
-
|
|
225
|
-
|
|
226
|
-
|
|
227
|
-
|
|
228
|
-
|
|
229
|
-
|
|
230
|
-
|
|
231
|
-
|
|
232
|
-
|
|
233
|
-
|
|
234
|
-
|
|
235
|
-
|
|
236
|
-
|
|
237
|
-
|
|
238
|
-
|
|
239
|
-
|
|
240
|
-
if (self.
|
|
241
|
-
return True
|
|
242
|
-
if (self.
|
|
243
|
-
return True
|
|
244
|
-
return False
|
|
245
|
-
|
|
246
|
-
@GeneralUtilities.check_arguments
|
|
247
|
-
def
|
|
248
|
-
|
|
249
|
-
|
|
250
|
-
|
|
251
|
-
|
|
252
|
-
|
|
253
|
-
|
|
254
|
-
|
|
255
|
-
|
|
256
|
-
result
|
|
257
|
-
|
|
258
|
-
return result
|
|
259
|
-
|
|
260
|
-
@GeneralUtilities.check_arguments
|
|
261
|
-
def
|
|
262
|
-
|
|
263
|
-
|
|
264
|
-
|
|
265
|
-
|
|
266
|
-
|
|
267
|
-
|
|
268
|
-
|
|
269
|
-
|
|
270
|
-
|
|
271
|
-
|
|
272
|
-
|
|
273
|
-
|
|
274
|
-
|
|
275
|
-
|
|
276
|
-
|
|
277
|
-
|
|
278
|
-
|
|
279
|
-
|
|
280
|
-
|
|
281
|
-
|
|
282
|
-
|
|
283
|
-
|
|
284
|
-
|
|
285
|
-
|
|
286
|
-
|
|
287
|
-
|
|
288
|
-
|
|
289
|
-
|
|
290
|
-
|
|
291
|
-
|
|
292
|
-
|
|
293
|
-
|
|
294
|
-
|
|
295
|
-
|
|
296
|
-
|
|
297
|
-
|
|
298
|
-
|
|
299
|
-
|
|
300
|
-
|
|
301
|
-
|
|
302
|
-
|
|
303
|
-
|
|
304
|
-
|
|
305
|
-
|
|
306
|
-
|
|
307
|
-
|
|
308
|
-
|
|
309
|
-
@GeneralUtilities.check_arguments
|
|
310
|
-
def
|
|
311
|
-
|
|
312
|
-
|
|
313
|
-
|
|
314
|
-
|
|
315
|
-
|
|
316
|
-
|
|
317
|
-
|
|
318
|
-
|
|
319
|
-
|
|
320
|
-
|
|
321
|
-
|
|
322
|
-
|
|
323
|
-
|
|
324
|
-
|
|
325
|
-
|
|
326
|
-
|
|
327
|
-
|
|
328
|
-
|
|
329
|
-
|
|
330
|
-
|
|
331
|
-
|
|
332
|
-
|
|
333
|
-
|
|
334
|
-
|
|
335
|
-
|
|
336
|
-
|
|
337
|
-
|
|
338
|
-
|
|
339
|
-
|
|
340
|
-
|
|
341
|
-
|
|
342
|
-
|
|
343
|
-
|
|
344
|
-
|
|
345
|
-
self.
|
|
346
|
-
|
|
347
|
-
|
|
348
|
-
|
|
349
|
-
|
|
350
|
-
|
|
351
|
-
|
|
352
|
-
|
|
353
|
-
|
|
354
|
-
|
|
355
|
-
|
|
356
|
-
|
|
357
|
-
|
|
358
|
-
|
|
359
|
-
|
|
360
|
-
|
|
361
|
-
|
|
362
|
-
|
|
363
|
-
|
|
364
|
-
|
|
365
|
-
|
|
366
|
-
|
|
367
|
-
|
|
368
|
-
|
|
369
|
-
|
|
370
|
-
|
|
371
|
-
|
|
372
|
-
|
|
373
|
-
|
|
374
|
-
|
|
375
|
-
|
|
376
|
-
|
|
377
|
-
|
|
378
|
-
|
|
379
|
-
|
|
380
|
-
|
|
381
|
-
|
|
382
|
-
|
|
383
|
-
|
|
384
|
-
|
|
385
|
-
|
|
386
|
-
|
|
387
|
-
|
|
388
|
-
|
|
389
|
-
|
|
390
|
-
|
|
391
|
-
|
|
392
|
-
self.
|
|
393
|
-
|
|
394
|
-
|
|
395
|
-
|
|
396
|
-
|
|
397
|
-
|
|
398
|
-
|
|
399
|
-
|
|
400
|
-
self.
|
|
401
|
-
|
|
402
|
-
|
|
403
|
-
|
|
404
|
-
|
|
405
|
-
|
|
406
|
-
|
|
407
|
-
|
|
408
|
-
|
|
409
|
-
|
|
410
|
-
|
|
411
|
-
|
|
412
|
-
|
|
413
|
-
|
|
414
|
-
|
|
415
|
-
|
|
416
|
-
|
|
417
|
-
|
|
418
|
-
|
|
419
|
-
|
|
420
|
-
|
|
421
|
-
|
|
422
|
-
|
|
423
|
-
|
|
424
|
-
|
|
425
|
-
|
|
426
|
-
|
|
427
|
-
|
|
428
|
-
|
|
429
|
-
|
|
430
|
-
|
|
431
|
-
|
|
432
|
-
|
|
433
|
-
|
|
434
|
-
|
|
435
|
-
|
|
436
|
-
|
|
437
|
-
|
|
438
|
-
|
|
439
|
-
|
|
440
|
-
|
|
441
|
-
|
|
442
|
-
|
|
443
|
-
|
|
444
|
-
|
|
445
|
-
|
|
446
|
-
raise
|
|
447
|
-
|
|
448
|
-
|
|
449
|
-
|
|
450
|
-
|
|
451
|
-
self.
|
|
452
|
-
|
|
453
|
-
|
|
454
|
-
|
|
455
|
-
|
|
456
|
-
|
|
457
|
-
|
|
458
|
-
|
|
459
|
-
|
|
460
|
-
|
|
461
|
-
|
|
462
|
-
|
|
463
|
-
|
|
464
|
-
|
|
465
|
-
|
|
466
|
-
|
|
467
|
-
|
|
468
|
-
|
|
469
|
-
|
|
470
|
-
|
|
471
|
-
|
|
472
|
-
|
|
473
|
-
|
|
474
|
-
|
|
475
|
-
|
|
476
|
-
|
|
477
|
-
|
|
478
|
-
|
|
479
|
-
|
|
480
|
-
|
|
481
|
-
|
|
482
|
-
|
|
483
|
-
|
|
484
|
-
|
|
485
|
-
|
|
486
|
-
|
|
487
|
-
|
|
488
|
-
|
|
489
|
-
|
|
490
|
-
|
|
491
|
-
|
|
492
|
-
|
|
493
|
-
|
|
494
|
-
|
|
495
|
-
|
|
496
|
-
|
|
497
|
-
|
|
498
|
-
|
|
499
|
-
|
|
500
|
-
|
|
501
|
-
|
|
502
|
-
|
|
503
|
-
|
|
504
|
-
|
|
505
|
-
|
|
506
|
-
|
|
507
|
-
|
|
508
|
-
|
|
509
|
-
|
|
510
|
-
|
|
511
|
-
|
|
512
|
-
|
|
513
|
-
|
|
514
|
-
|
|
515
|
-
|
|
516
|
-
|
|
517
|
-
|
|
518
|
-
|
|
519
|
-
|
|
520
|
-
|
|
521
|
-
|
|
522
|
-
|
|
523
|
-
|
|
524
|
-
|
|
525
|
-
|
|
526
|
-
|
|
527
|
-
|
|
528
|
-
|
|
529
|
-
|
|
530
|
-
|
|
531
|
-
|
|
532
|
-
|
|
533
|
-
|
|
534
|
-
|
|
535
|
-
|
|
536
|
-
|
|
537
|
-
|
|
538
|
-
|
|
539
|
-
|
|
540
|
-
|
|
541
|
-
|
|
542
|
-
|
|
543
|
-
|
|
544
|
-
|
|
545
|
-
|
|
546
|
-
|
|
547
|
-
|
|
548
|
-
|
|
549
|
-
|
|
550
|
-
|
|
551
|
-
|
|
552
|
-
|
|
553
|
-
|
|
554
|
-
|
|
555
|
-
|
|
556
|
-
self.run_program_argsasarray("
|
|
557
|
-
|
|
558
|
-
@GeneralUtilities.check_arguments
|
|
559
|
-
def
|
|
560
|
-
|
|
561
|
-
|
|
562
|
-
|
|
563
|
-
|
|
564
|
-
|
|
565
|
-
|
|
566
|
-
|
|
567
|
-
|
|
568
|
-
|
|
569
|
-
|
|
570
|
-
|
|
571
|
-
|
|
572
|
-
|
|
573
|
-
|
|
574
|
-
|
|
575
|
-
|
|
576
|
-
|
|
577
|
-
|
|
578
|
-
|
|
579
|
-
|
|
580
|
-
|
|
581
|
-
|
|
582
|
-
|
|
583
|
-
|
|
584
|
-
|
|
585
|
-
|
|
586
|
-
|
|
587
|
-
|
|
588
|
-
self.
|
|
589
|
-
|
|
590
|
-
|
|
591
|
-
|
|
592
|
-
|
|
593
|
-
|
|
594
|
-
|
|
595
|
-
|
|
596
|
-
|
|
597
|
-
|
|
598
|
-
|
|
599
|
-
|
|
600
|
-
|
|
601
|
-
|
|
602
|
-
|
|
603
|
-
|
|
604
|
-
|
|
605
|
-
|
|
606
|
-
|
|
607
|
-
|
|
608
|
-
|
|
609
|
-
|
|
610
|
-
|
|
611
|
-
|
|
612
|
-
|
|
613
|
-
|
|
614
|
-
|
|
615
|
-
|
|
616
|
-
|
|
617
|
-
|
|
618
|
-
|
|
619
|
-
|
|
620
|
-
|
|
621
|
-
|
|
622
|
-
|
|
623
|
-
|
|
624
|
-
|
|
625
|
-
|
|
626
|
-
|
|
627
|
-
|
|
628
|
-
|
|
629
|
-
|
|
630
|
-
|
|
631
|
-
|
|
632
|
-
|
|
633
|
-
|
|
634
|
-
|
|
635
|
-
|
|
636
|
-
|
|
637
|
-
|
|
638
|
-
|
|
639
|
-
for
|
|
640
|
-
|
|
641
|
-
|
|
642
|
-
|
|
643
|
-
|
|
644
|
-
|
|
645
|
-
|
|
646
|
-
|
|
647
|
-
|
|
648
|
-
|
|
649
|
-
|
|
650
|
-
|
|
651
|
-
|
|
652
|
-
|
|
653
|
-
|
|
654
|
-
|
|
655
|
-
|
|
656
|
-
|
|
657
|
-
|
|
658
|
-
|
|
659
|
-
|
|
660
|
-
|
|
661
|
-
|
|
662
|
-
|
|
663
|
-
|
|
664
|
-
|
|
665
|
-
|
|
666
|
-
|
|
667
|
-
if
|
|
668
|
-
|
|
669
|
-
|
|
670
|
-
|
|
671
|
-
|
|
672
|
-
|
|
673
|
-
|
|
674
|
-
|
|
675
|
-
|
|
676
|
-
|
|
677
|
-
|
|
678
|
-
|
|
679
|
-
|
|
680
|
-
|
|
681
|
-
|
|
682
|
-
|
|
683
|
-
|
|
684
|
-
|
|
685
|
-
|
|
686
|
-
|
|
687
|
-
|
|
688
|
-
|
|
689
|
-
|
|
690
|
-
|
|
691
|
-
|
|
692
|
-
|
|
693
|
-
|
|
694
|
-
|
|
695
|
-
|
|
696
|
-
|
|
697
|
-
|
|
698
|
-
|
|
699
|
-
|
|
700
|
-
|
|
701
|
-
|
|
702
|
-
|
|
703
|
-
|
|
704
|
-
|
|
705
|
-
|
|
706
|
-
|
|
707
|
-
|
|
708
|
-
|
|
709
|
-
|
|
710
|
-
|
|
711
|
-
|
|
712
|
-
|
|
713
|
-
|
|
714
|
-
|
|
715
|
-
|
|
716
|
-
|
|
717
|
-
|
|
718
|
-
|
|
719
|
-
|
|
720
|
-
|
|
721
|
-
|
|
722
|
-
|
|
723
|
-
|
|
724
|
-
|
|
725
|
-
|
|
726
|
-
|
|
727
|
-
|
|
728
|
-
|
|
729
|
-
|
|
730
|
-
|
|
731
|
-
|
|
732
|
-
|
|
733
|
-
|
|
734
|
-
|
|
735
|
-
|
|
736
|
-
|
|
737
|
-
|
|
738
|
-
|
|
739
|
-
|
|
740
|
-
|
|
741
|
-
|
|
742
|
-
|
|
743
|
-
|
|
744
|
-
|
|
745
|
-
|
|
746
|
-
|
|
747
|
-
|
|
748
|
-
|
|
749
|
-
|
|
750
|
-
|
|
751
|
-
|
|
752
|
-
|
|
753
|
-
|
|
754
|
-
|
|
755
|
-
|
|
756
|
-
|
|
757
|
-
|
|
758
|
-
|
|
759
|
-
|
|
760
|
-
|
|
761
|
-
|
|
762
|
-
|
|
763
|
-
|
|
764
|
-
|
|
765
|
-
|
|
766
|
-
|
|
767
|
-
|
|
768
|
-
|
|
769
|
-
|
|
770
|
-
|
|
771
|
-
|
|
772
|
-
|
|
773
|
-
|
|
774
|
-
|
|
775
|
-
self.
|
|
776
|
-
|
|
777
|
-
|
|
778
|
-
|
|
779
|
-
|
|
780
|
-
|
|
781
|
-
|
|
782
|
-
|
|
783
|
-
|
|
784
|
-
|
|
785
|
-
|
|
786
|
-
|
|
787
|
-
|
|
788
|
-
|
|
789
|
-
for
|
|
790
|
-
|
|
791
|
-
|
|
792
|
-
|
|
793
|
-
|
|
794
|
-
|
|
795
|
-
|
|
796
|
-
|
|
797
|
-
|
|
798
|
-
|
|
799
|
-
|
|
800
|
-
|
|
801
|
-
|
|
802
|
-
|
|
803
|
-
|
|
804
|
-
|
|
805
|
-
|
|
806
|
-
|
|
807
|
-
|
|
808
|
-
|
|
809
|
-
|
|
810
|
-
|
|
811
|
-
|
|
812
|
-
|
|
813
|
-
|
|
814
|
-
|
|
815
|
-
|
|
816
|
-
|
|
817
|
-
|
|
818
|
-
|
|
819
|
-
|
|
820
|
-
|
|
821
|
-
|
|
822
|
-
|
|
823
|
-
|
|
824
|
-
|
|
825
|
-
|
|
826
|
-
|
|
827
|
-
|
|
828
|
-
|
|
829
|
-
|
|
830
|
-
|
|
831
|
-
|
|
832
|
-
|
|
833
|
-
|
|
834
|
-
|
|
835
|
-
|
|
836
|
-
|
|
837
|
-
|
|
838
|
-
|
|
839
|
-
|
|
840
|
-
|
|
841
|
-
|
|
842
|
-
|
|
843
|
-
|
|
844
|
-
|
|
845
|
-
|
|
846
|
-
|
|
847
|
-
|
|
848
|
-
|
|
849
|
-
|
|
850
|
-
|
|
851
|
-
|
|
852
|
-
|
|
853
|
-
|
|
854
|
-
|
|
855
|
-
|
|
856
|
-
|
|
857
|
-
|
|
858
|
-
|
|
859
|
-
|
|
860
|
-
|
|
861
|
-
|
|
862
|
-
|
|
863
|
-
|
|
864
|
-
|
|
865
|
-
|
|
866
|
-
|
|
867
|
-
|
|
868
|
-
|
|
869
|
-
|
|
870
|
-
|
|
871
|
-
|
|
872
|
-
|
|
873
|
-
|
|
874
|
-
|
|
875
|
-
|
|
876
|
-
|
|
877
|
-
|
|
878
|
-
|
|
879
|
-
|
|
880
|
-
|
|
881
|
-
|
|
882
|
-
|
|
883
|
-
|
|
884
|
-
|
|
885
|
-
|
|
886
|
-
|
|
887
|
-
|
|
888
|
-
|
|
889
|
-
|
|
890
|
-
|
|
891
|
-
|
|
892
|
-
|
|
893
|
-
|
|
894
|
-
|
|
895
|
-
|
|
896
|
-
|
|
897
|
-
if
|
|
898
|
-
|
|
899
|
-
|
|
900
|
-
|
|
901
|
-
|
|
902
|
-
|
|
903
|
-
|
|
904
|
-
|
|
905
|
-
|
|
906
|
-
|
|
907
|
-
|
|
908
|
-
|
|
909
|
-
|
|
910
|
-
|
|
911
|
-
|
|
912
|
-
|
|
913
|
-
|
|
914
|
-
|
|
915
|
-
|
|
916
|
-
|
|
917
|
-
|
|
918
|
-
|
|
919
|
-
|
|
920
|
-
|
|
921
|
-
|
|
922
|
-
|
|
923
|
-
|
|
924
|
-
|
|
925
|
-
|
|
926
|
-
|
|
927
|
-
|
|
928
|
-
|
|
929
|
-
|
|
930
|
-
|
|
931
|
-
|
|
932
|
-
|
|
933
|
-
|
|
934
|
-
|
|
935
|
-
|
|
936
|
-
|
|
937
|
-
|
|
938
|
-
|
|
939
|
-
|
|
940
|
-
|
|
941
|
-
if "
|
|
942
|
-
|
|
943
|
-
|
|
944
|
-
|
|
945
|
-
|
|
946
|
-
|
|
947
|
-
|
|
948
|
-
|
|
949
|
-
|
|
950
|
-
|
|
951
|
-
|
|
952
|
-
|
|
953
|
-
|
|
954
|
-
|
|
955
|
-
|
|
956
|
-
|
|
957
|
-
|
|
958
|
-
|
|
959
|
-
|
|
960
|
-
|
|
961
|
-
|
|
962
|
-
|
|
963
|
-
|
|
964
|
-
|
|
965
|
-
|
|
966
|
-
GeneralUtilities.
|
|
967
|
-
|
|
968
|
-
|
|
969
|
-
|
|
970
|
-
|
|
971
|
-
|
|
972
|
-
|
|
973
|
-
|
|
974
|
-
|
|
975
|
-
|
|
976
|
-
|
|
977
|
-
|
|
978
|
-
|
|
979
|
-
|
|
980
|
-
|
|
981
|
-
|
|
982
|
-
|
|
983
|
-
|
|
984
|
-
|
|
985
|
-
|
|
986
|
-
|
|
987
|
-
|
|
988
|
-
|
|
989
|
-
|
|
990
|
-
|
|
991
|
-
|
|
992
|
-
|
|
993
|
-
|
|
994
|
-
|
|
995
|
-
|
|
996
|
-
def
|
|
997
|
-
|
|
998
|
-
|
|
999
|
-
|
|
1000
|
-
|
|
1001
|
-
|
|
1002
|
-
|
|
1003
|
-
|
|
1004
|
-
|
|
1005
|
-
|
|
1006
|
-
|
|
1007
|
-
|
|
1008
|
-
|
|
1009
|
-
|
|
1010
|
-
|
|
1011
|
-
|
|
1012
|
-
|
|
1013
|
-
|
|
1014
|
-
|
|
1015
|
-
|
|
1016
|
-
|
|
1017
|
-
|
|
1018
|
-
|
|
1019
|
-
|
|
1020
|
-
|
|
1021
|
-
|
|
1022
|
-
|
|
1023
|
-
|
|
1024
|
-
|
|
1025
|
-
|
|
1026
|
-
|
|
1027
|
-
|
|
1028
|
-
|
|
1029
|
-
|
|
1030
|
-
|
|
1031
|
-
|
|
1032
|
-
|
|
1033
|
-
|
|
1034
|
-
|
|
1035
|
-
|
|
1036
|
-
|
|
1037
|
-
|
|
1038
|
-
|
|
1039
|
-
|
|
1040
|
-
|
|
1041
|
-
|
|
1042
|
-
|
|
1043
|
-
|
|
1044
|
-
|
|
1045
|
-
|
|
1046
|
-
|
|
1047
|
-
|
|
1048
|
-
|
|
1049
|
-
|
|
1050
|
-
|
|
1051
|
-
|
|
1052
|
-
|
|
1053
|
-
|
|
1054
|
-
|
|
1055
|
-
|
|
1056
|
-
|
|
1057
|
-
|
|
1058
|
-
|
|
1059
|
-
|
|
1060
|
-
|
|
1061
|
-
|
|
1062
|
-
|
|
1063
|
-
|
|
1064
|
-
|
|
1065
|
-
|
|
1066
|
-
file
|
|
1067
|
-
|
|
1068
|
-
|
|
1069
|
-
|
|
1070
|
-
|
|
1071
|
-
|
|
1072
|
-
|
|
1073
|
-
@GeneralUtilities.check_arguments
|
|
1074
|
-
def
|
|
1075
|
-
"
|
|
1076
|
-
|
|
1077
|
-
|
|
1078
|
-
|
|
1079
|
-
|
|
1080
|
-
|
|
1081
|
-
|
|
1082
|
-
|
|
1083
|
-
|
|
1084
|
-
|
|
1085
|
-
|
|
1086
|
-
|
|
1087
|
-
|
|
1088
|
-
|
|
1089
|
-
|
|
1090
|
-
|
|
1091
|
-
|
|
1092
|
-
|
|
1093
|
-
|
|
1094
|
-
|
|
1095
|
-
|
|
1096
|
-
|
|
1097
|
-
|
|
1098
|
-
|
|
1099
|
-
|
|
1100
|
-
|
|
1101
|
-
|
|
1102
|
-
|
|
1103
|
-
|
|
1104
|
-
|
|
1105
|
-
|
|
1106
|
-
|
|
1107
|
-
|
|
1108
|
-
|
|
1109
|
-
|
|
1110
|
-
|
|
1111
|
-
|
|
1112
|
-
|
|
1113
|
-
|
|
1114
|
-
|
|
1115
|
-
|
|
1116
|
-
|
|
1117
|
-
|
|
1118
|
-
|
|
1119
|
-
|
|
1120
|
-
|
|
1121
|
-
|
|
1122
|
-
|
|
1123
|
-
|
|
1124
|
-
|
|
1125
|
-
|
|
1126
|
-
|
|
1127
|
-
|
|
1128
|
-
|
|
1129
|
-
|
|
1130
|
-
|
|
1131
|
-
|
|
1132
|
-
|
|
1133
|
-
|
|
1134
|
-
|
|
1135
|
-
|
|
1136
|
-
|
|
1137
|
-
|
|
1138
|
-
|
|
1139
|
-
|
|
1140
|
-
|
|
1141
|
-
|
|
1142
|
-
|
|
1143
|
-
|
|
1144
|
-
|
|
1145
|
-
|
|
1146
|
-
|
|
1147
|
-
|
|
1148
|
-
|
|
1149
|
-
|
|
1150
|
-
|
|
1151
|
-
|
|
1152
|
-
|
|
1153
|
-
|
|
1154
|
-
|
|
1155
|
-
|
|
1156
|
-
|
|
1157
|
-
|
|
1158
|
-
|
|
1159
|
-
|
|
1160
|
-
|
|
1161
|
-
|
|
1162
|
-
|
|
1163
|
-
|
|
1164
|
-
|
|
1165
|
-
|
|
1166
|
-
|
|
1167
|
-
|
|
1168
|
-
|
|
1169
|
-
|
|
1170
|
-
|
|
1171
|
-
|
|
1172
|
-
|
|
1173
|
-
|
|
1174
|
-
|
|
1175
|
-
else:
|
|
1176
|
-
|
|
1177
|
-
|
|
1178
|
-
|
|
1179
|
-
|
|
1180
|
-
|
|
1181
|
-
|
|
1182
|
-
|
|
1183
|
-
|
|
1184
|
-
|
|
1185
|
-
|
|
1186
|
-
|
|
1187
|
-
|
|
1188
|
-
|
|
1189
|
-
|
|
1190
|
-
|
|
1191
|
-
|
|
1192
|
-
|
|
1193
|
-
|
|
1194
|
-
|
|
1195
|
-
|
|
1196
|
-
|
|
1197
|
-
|
|
1198
|
-
|
|
1199
|
-
|
|
1200
|
-
|
|
1201
|
-
|
|
1202
|
-
|
|
1203
|
-
|
|
1204
|
-
|
|
1205
|
-
|
|
1206
|
-
|
|
1207
|
-
|
|
1208
|
-
|
|
1209
|
-
|
|
1210
|
-
|
|
1211
|
-
|
|
1212
|
-
|
|
1213
|
-
|
|
1214
|
-
|
|
1215
|
-
|
|
1216
|
-
|
|
1217
|
-
|
|
1218
|
-
|
|
1219
|
-
|
|
1220
|
-
|
|
1221
|
-
|
|
1222
|
-
|
|
1223
|
-
|
|
1224
|
-
|
|
1225
|
-
|
|
1226
|
-
|
|
1227
|
-
|
|
1228
|
-
|
|
1229
|
-
|
|
1230
|
-
|
|
1231
|
-
|
|
1232
|
-
@GeneralUtilities.check_arguments
|
|
1233
|
-
def
|
|
1234
|
-
|
|
1235
|
-
|
|
1236
|
-
|
|
1237
|
-
|
|
1238
|
-
|
|
1239
|
-
|
|
1240
|
-
|
|
1241
|
-
|
|
1242
|
-
|
|
1243
|
-
|
|
1244
|
-
|
|
1245
|
-
|
|
1246
|
-
|
|
1247
|
-
|
|
1248
|
-
|
|
1249
|
-
|
|
1250
|
-
|
|
1251
|
-
|
|
1252
|
-
|
|
1253
|
-
|
|
1254
|
-
|
|
1255
|
-
|
|
1256
|
-
|
|
1257
|
-
|
|
1258
|
-
|
|
1259
|
-
|
|
1260
|
-
|
|
1261
|
-
|
|
1262
|
-
|
|
1263
|
-
|
|
1264
|
-
|
|
1265
|
-
|
|
1266
|
-
|
|
1267
|
-
|
|
1268
|
-
|
|
1269
|
-
|
|
1270
|
-
|
|
1271
|
-
|
|
1272
|
-
|
|
1273
|
-
|
|
1274
|
-
|
|
1275
|
-
|
|
1276
|
-
|
|
1277
|
-
|
|
1278
|
-
|
|
1279
|
-
|
|
1280
|
-
|
|
1281
|
-
|
|
1282
|
-
|
|
1283
|
-
|
|
1284
|
-
|
|
1285
|
-
|
|
1286
|
-
|
|
1287
|
-
|
|
1288
|
-
|
|
1289
|
-
|
|
1290
|
-
|
|
1291
|
-
|
|
1292
|
-
|
|
1293
|
-
|
|
1294
|
-
|
|
1295
|
-
|
|
1296
|
-
|
|
1297
|
-
|
|
1298
|
-
|
|
1299
|
-
|
|
1300
|
-
|
|
1301
|
-
|
|
1302
|
-
|
|
1303
|
-
|
|
1304
|
-
|
|
1305
|
-
|
|
1306
|
-
|
|
1307
|
-
|
|
1308
|
-
|
|
1309
|
-
|
|
1310
|
-
|
|
1311
|
-
result
|
|
1312
|
-
|
|
1313
|
-
|
|
1314
|
-
|
|
1315
|
-
|
|
1316
|
-
|
|
1317
|
-
|
|
1318
|
-
|
|
1319
|
-
|
|
1320
|
-
|
|
1321
|
-
|
|
1322
|
-
|
|
1323
|
-
|
|
1324
|
-
|
|
1325
|
-
|
|
1326
|
-
|
|
1327
|
-
|
|
1328
|
-
|
|
1329
|
-
|
|
1330
|
-
|
|
1331
|
-
|
|
1332
|
-
|
|
1333
|
-
|
|
1334
|
-
|
|
1335
|
-
|
|
1336
|
-
|
|
1337
|
-
|
|
1338
|
-
|
|
1339
|
-
|
|
1340
|
-
|
|
1341
|
-
|
|
1342
|
-
|
|
1343
|
-
|
|
1344
|
-
|
|
1345
|
-
|
|
1346
|
-
|
|
1347
|
-
|
|
1348
|
-
|
|
1349
|
-
|
|
1350
|
-
|
|
1351
|
-
|
|
1352
|
-
|
|
1353
|
-
|
|
1354
|
-
|
|
1355
|
-
|
|
1356
|
-
|
|
1357
|
-
|
|
1358
|
-
|
|
1359
|
-
|
|
1360
|
-
|
|
1361
|
-
|
|
1362
|
-
|
|
1363
|
-
|
|
1364
|
-
|
|
1365
|
-
|
|
1366
|
-
|
|
1367
|
-
|
|
1368
|
-
|
|
1369
|
-
|
|
1370
|
-
|
|
1371
|
-
|
|
1372
|
-
|
|
1373
|
-
|
|
1374
|
-
|
|
1375
|
-
|
|
1376
|
-
|
|
1377
|
-
|
|
1378
|
-
|
|
1379
|
-
|
|
1380
|
-
|
|
1381
|
-
|
|
1382
|
-
|
|
1383
|
-
|
|
1
|
+
from datetime import timedelta, datetime
|
|
2
|
+
import json
|
|
3
|
+
import binascii
|
|
4
|
+
import filecmp
|
|
5
|
+
import hashlib
|
|
6
|
+
import multiprocessing
|
|
7
|
+
import time
|
|
8
|
+
from io import BytesIO
|
|
9
|
+
import itertools
|
|
10
|
+
import zipfile
|
|
11
|
+
import math
|
|
12
|
+
import base64
|
|
13
|
+
import os
|
|
14
|
+
from queue import Queue, Empty
|
|
15
|
+
from concurrent.futures import ThreadPoolExecutor
|
|
16
|
+
import xml.etree.ElementTree as ET
|
|
17
|
+
from pathlib import Path
|
|
18
|
+
from subprocess import Popen
|
|
19
|
+
import re
|
|
20
|
+
import shutil
|
|
21
|
+
from typing import IO
|
|
22
|
+
import fnmatch
|
|
23
|
+
import uuid
|
|
24
|
+
import tempfile
|
|
25
|
+
import io
|
|
26
|
+
import requests
|
|
27
|
+
import ntplib
|
|
28
|
+
import yaml
|
|
29
|
+
import qrcode
|
|
30
|
+
import pycdlib
|
|
31
|
+
import send2trash
|
|
32
|
+
from pypdf import PdfReader, PdfWriter
|
|
33
|
+
from .GeneralUtilities import GeneralUtilities
|
|
34
|
+
from .ProgramRunnerBase import ProgramRunnerBase
|
|
35
|
+
from .ProgramRunnerPopen import ProgramRunnerPopen
|
|
36
|
+
from .SCLog import SCLog, LogLevel
|
|
37
|
+
|
|
38
|
+
version = "4.0.78"
|
|
39
|
+
__version__ = version
|
|
40
|
+
|
|
41
|
+
|
|
42
|
+
class ScriptCollectionCore:
|
|
43
|
+
|
|
44
|
+
# The purpose of this property is to use it when testing your code which uses scriptcollection for external program-calls.
|
|
45
|
+
# Do not change this value for productive environments.
|
|
46
|
+
mock_program_calls: bool = False#TODO remove this variable. When someone want to mock program-calls then the ProgramRunnerMock can be used instead
|
|
47
|
+
# The purpose of this property is to use it when testing your code which uses scriptcollection for external program-calls.
|
|
48
|
+
execute_program_really_if_no_mock_call_is_defined: bool = False
|
|
49
|
+
__mocked_program_calls: list = None
|
|
50
|
+
program_runner: ProgramRunnerBase = None
|
|
51
|
+
call_program_runner_directly: bool = None
|
|
52
|
+
log: SCLog = None
|
|
53
|
+
|
|
54
|
+
def __init__(self):
|
|
55
|
+
self.program_runner = ProgramRunnerPopen()
|
|
56
|
+
self.call_program_runner_directly = None
|
|
57
|
+
self.__mocked_program_calls = list[ScriptCollectionCore.__MockProgramCall]()
|
|
58
|
+
self.log = SCLog(None, LogLevel.Warning, False)
|
|
59
|
+
|
|
60
|
+
@staticmethod
|
|
61
|
+
@GeneralUtilities.check_arguments
|
|
62
|
+
def get_scriptcollection_version() -> str:
|
|
63
|
+
return __version__
|
|
64
|
+
|
|
65
|
+
@GeneralUtilities.check_arguments
|
|
66
|
+
def python_file_has_errors(self, file: str, working_directory: str, treat_warnings_as_errors: bool = True) -> tuple[bool, list[str]]:
|
|
67
|
+
errors = list()
|
|
68
|
+
filename = os.path.relpath(file, working_directory)
|
|
69
|
+
if treat_warnings_as_errors:
|
|
70
|
+
errorsonly_argument = GeneralUtilities.empty_string
|
|
71
|
+
else:
|
|
72
|
+
errorsonly_argument = " --errors-only"
|
|
73
|
+
(exit_code, stdout, stderr, _) = self.run_program("pylint", filename + errorsonly_argument, working_directory, throw_exception_if_exitcode_is_not_zero=False)
|
|
74
|
+
if (exit_code != 0):
|
|
75
|
+
errors.append(f"Linting-issues of {file}:")
|
|
76
|
+
errors.append(f"Pylint-exitcode: {exit_code}")
|
|
77
|
+
for line in GeneralUtilities.string_to_lines(stdout):
|
|
78
|
+
errors.append(line)
|
|
79
|
+
for line in GeneralUtilities.string_to_lines(stderr):
|
|
80
|
+
errors.append(line)
|
|
81
|
+
return (True, errors)
|
|
82
|
+
|
|
83
|
+
return (False, errors)
|
|
84
|
+
|
|
85
|
+
@GeneralUtilities.check_arguments
|
|
86
|
+
def replace_version_in_dockerfile_file(self, dockerfile: str, new_version_value: str) -> None:
|
|
87
|
+
GeneralUtilities.write_text_to_file(dockerfile, re.sub("ARG Version=\"\\d+\\.\\d+\\.\\d+\"", f"ARG Version=\"{new_version_value}\"", GeneralUtilities.read_text_from_file(dockerfile)))
|
|
88
|
+
|
|
89
|
+
@GeneralUtilities.check_arguments
|
|
90
|
+
def replace_version_in_python_file(self, file: str, new_version_value: str):
|
|
91
|
+
GeneralUtilities.write_text_to_file(file, re.sub("version = \"\\d+\\.\\d+\\.\\d+\"", f"version = \"{new_version_value}\"", GeneralUtilities.read_text_from_file(file)))
|
|
92
|
+
|
|
93
|
+
@GeneralUtilities.check_arguments
|
|
94
|
+
def replace_version_in_ini_file(self, file: str, new_version_value: str):
|
|
95
|
+
GeneralUtilities.write_text_to_file(file, re.sub("version = \\d+\\.\\d+\\.\\d+", f"version = {new_version_value}", GeneralUtilities.read_text_from_file(file)))
|
|
96
|
+
|
|
97
|
+
@GeneralUtilities.check_arguments
|
|
98
|
+
def replace_version_in_nuspec_file(self, nuspec_file: str, new_version: str) -> None:
|
|
99
|
+
# TODO use XSLT instead
|
|
100
|
+
versionregex = "\\d+\\.\\d+\\.\\d+"
|
|
101
|
+
versiononlyregex = f"^{versionregex}$"
|
|
102
|
+
pattern = re.compile(versiononlyregex)
|
|
103
|
+
if pattern.match(new_version):
|
|
104
|
+
GeneralUtilities.write_text_to_file(nuspec_file, re.sub(f"<version>{versionregex}<\\/version>", f"<version>{new_version}</version>", GeneralUtilities.read_text_from_file(nuspec_file)))
|
|
105
|
+
else:
|
|
106
|
+
raise ValueError(f"Version '{new_version}' does not match version-regex '{versiononlyregex}'")
|
|
107
|
+
|
|
108
|
+
@GeneralUtilities.check_arguments
|
|
109
|
+
def replace_version_in_csproj_file(self, csproj_file: str, current_version: str):
|
|
110
|
+
versionregex = "\\d+\\.\\d+\\.\\d+"
|
|
111
|
+
versiononlyregex = f"^{versionregex}$"
|
|
112
|
+
pattern = re.compile(versiononlyregex)
|
|
113
|
+
if pattern.match(current_version):
|
|
114
|
+
for tag in ["Version", "AssemblyVersion", "FileVersion"]:
|
|
115
|
+
GeneralUtilities.write_text_to_file(csproj_file, re.sub(f"<{tag}>{versionregex}(.\\d+)?<\\/{tag}>", f"<{tag}>{current_version}</{tag}>", GeneralUtilities.read_text_from_file(csproj_file)))
|
|
116
|
+
else:
|
|
117
|
+
raise ValueError(f"Version '{current_version}' does not match version-regex '{versiononlyregex}'")
|
|
118
|
+
|
|
119
|
+
@GeneralUtilities.check_arguments
|
|
120
|
+
def push_nuget_build_artifact(self, nupkg_file: str, registry_address: str, api_key: str = None):
|
|
121
|
+
nupkg_file_name = os.path.basename(nupkg_file)
|
|
122
|
+
nupkg_file_folder = os.path.dirname(nupkg_file)
|
|
123
|
+
argument = f"nuget push {nupkg_file_name} --force-english-output --source {registry_address}"
|
|
124
|
+
if api_key is not None:
|
|
125
|
+
argument = f"{argument} --api-key {api_key}"
|
|
126
|
+
self.run_program("dotnet", argument, nupkg_file_folder)
|
|
127
|
+
|
|
128
|
+
@GeneralUtilities.check_arguments
|
|
129
|
+
def dotnet_build(self, folder: str, projectname: str, configuration: str):
|
|
130
|
+
self.run_program("dotnet", f"clean -c {configuration}", folder)
|
|
131
|
+
self.run_program("dotnet", f"build {projectname}/{projectname}.csproj -c {configuration}", folder)
|
|
132
|
+
|
|
133
|
+
@GeneralUtilities.check_arguments
|
|
134
|
+
def find_file_by_extension(self, folder: str, extension_without_dot: str):
|
|
135
|
+
"""This function works platform-independent also for non-local-executions if the ScriptCollection commandline-commands are available as global command on the target-system."""
|
|
136
|
+
result = [file for file in self.list_content(folder, True, False, False) if file.endswith(f".{extension_without_dot}")]
|
|
137
|
+
result_length = len(result)
|
|
138
|
+
if result_length == 0:
|
|
139
|
+
raise FileNotFoundError(f"No file available in folder '{folder}' with extension '{extension_without_dot}'.")
|
|
140
|
+
if result_length == 1:
|
|
141
|
+
return result[0]
|
|
142
|
+
else:
|
|
143
|
+
raise ValueError(f"Multiple values available in folder '{folder}' with extension '{extension_without_dot}'.")
|
|
144
|
+
|
|
145
|
+
@GeneralUtilities.check_arguments
|
|
146
|
+
def find_last_file_by_extension(self, folder: str, extension_without_dot: str) -> str:
|
|
147
|
+
files: list[str] = GeneralUtilities.get_direct_files_of_folder(folder)
|
|
148
|
+
possible_results: list[str] = []
|
|
149
|
+
for file in files:
|
|
150
|
+
if file.endswith(f".{extension_without_dot}"):
|
|
151
|
+
possible_results.append(file)
|
|
152
|
+
result_length = len(possible_results)
|
|
153
|
+
if result_length == 0:
|
|
154
|
+
raise FileNotFoundError(f"No file available in folder '{folder}' with extension '{extension_without_dot}'.")
|
|
155
|
+
else:
|
|
156
|
+
return possible_results[-1]
|
|
157
|
+
|
|
158
|
+
@GeneralUtilities.check_arguments
|
|
159
|
+
def commit_is_signed_by_key(self, repository_folder: str, revision_identifier: str, key: str) -> bool:
|
|
160
|
+
self.is_git_or_bare_git_repository(repository_folder)
|
|
161
|
+
result = self.run_program("git", f"verify-commit {revision_identifier}", repository_folder, throw_exception_if_exitcode_is_not_zero=False)
|
|
162
|
+
if (result[0] != 0):
|
|
163
|
+
return False
|
|
164
|
+
if (not GeneralUtilities.contains_line(result[1].splitlines(), f"gpg\\:\\ using\\ [A-Za-z0-9]+\\ key\\ [A-Za-z0-9]+{key}")):
|
|
165
|
+
# TODO check whether this works on machines where gpg is installed in another langauge than english
|
|
166
|
+
return False
|
|
167
|
+
if (not GeneralUtilities.contains_line(result[1].splitlines(), "gpg\\:\\ Good\\ signature\\ from")):
|
|
168
|
+
# TODO check whether this works on machines where gpg is installed in another langauge than english
|
|
169
|
+
return False
|
|
170
|
+
return True
|
|
171
|
+
|
|
172
|
+
@GeneralUtilities.check_arguments
|
|
173
|
+
def get_parent_commit_ids_of_commit(self, repository_folder: str, commit_id: str) -> str:
|
|
174
|
+
self.is_git_or_bare_git_repository(repository_folder)
|
|
175
|
+
return self.run_program("git", f'log --pretty=%P -n 1 "{commit_id}"', repository_folder, throw_exception_if_exitcode_is_not_zero=True)[1].replace("\r", GeneralUtilities.empty_string).replace("\n", GeneralUtilities.empty_string).split(" ")
|
|
176
|
+
|
|
177
|
+
|
|
178
|
+
@GeneralUtilities.check_arguments
|
|
179
|
+
def get_commit_ids_between_dates(self, repository_folder: str, since: datetime, until: datetime, ignore_commits_which_are_not_in_history_of_head: bool = True) -> None:
|
|
180
|
+
self.is_git_or_bare_git_repository(repository_folder)
|
|
181
|
+
since_as_string = self.__datetime_to_string_for_git(since)
|
|
182
|
+
until_as_string = self.__datetime_to_string_for_git(until)
|
|
183
|
+
result = filter(lambda line: not GeneralUtilities.string_is_none_or_whitespace(line), self.run_program("git", f'log --since "{since_as_string}" --until "{until_as_string}" --pretty=format:"%H" --no-patch', repository_folder, throw_exception_if_exitcode_is_not_zero=True)[1].split("\n").replace("\r", GeneralUtilities.empty_string))
|
|
184
|
+
if ignore_commits_which_are_not_in_history_of_head:
|
|
185
|
+
result = [commit_id for commit_id in result if self.git_commit_is_ancestor(repository_folder, commit_id)]
|
|
186
|
+
return result
|
|
187
|
+
|
|
188
|
+
@GeneralUtilities.check_arguments
|
|
189
|
+
def __datetime_to_string_for_git(self, datetime_object: datetime) -> str:
|
|
190
|
+
return datetime_object.strftime('%Y-%m-%d %H:%M:%S')
|
|
191
|
+
|
|
192
|
+
@GeneralUtilities.check_arguments
|
|
193
|
+
def git_commit_is_ancestor(self, repository_folder: str, ancestor: str, descendant: str = "HEAD") -> bool:
|
|
194
|
+
self.is_git_or_bare_git_repository(repository_folder)
|
|
195
|
+
result = self.run_program_argsasarray("git", ["merge-base", "--is-ancestor", ancestor, descendant], repository_folder, throw_exception_if_exitcode_is_not_zero=False)
|
|
196
|
+
exit_code = result[0]
|
|
197
|
+
if exit_code == 0:
|
|
198
|
+
return True
|
|
199
|
+
elif exit_code == 1:
|
|
200
|
+
return False
|
|
201
|
+
else:
|
|
202
|
+
raise ValueError(f'Can not calculate if {ancestor} is an ancestor of {descendant} in repository {repository_folder}. Outout of "{repository_folder}> git merge-base --is-ancestor {ancestor} {descendant}": Exitcode: {exit_code}; StdOut: {result[1]}; StdErr: {result[2]}.')
|
|
203
|
+
|
|
204
|
+
@GeneralUtilities.check_arguments
|
|
205
|
+
def __git_changes_helper(self, repository_folder: str, arguments_as_array: list[str]) -> bool:
|
|
206
|
+
self.assert_is_git_repository(repository_folder)
|
|
207
|
+
lines = GeneralUtilities.string_to_lines(self.run_program_argsasarray("git", arguments_as_array, repository_folder, throw_exception_if_exitcode_is_not_zero=True)[1], False)
|
|
208
|
+
for line in lines:
|
|
209
|
+
if GeneralUtilities.string_has_content(line):
|
|
210
|
+
return True
|
|
211
|
+
return False
|
|
212
|
+
|
|
213
|
+
@GeneralUtilities.check_arguments
|
|
214
|
+
def git_repository_has_new_untracked_files(self, repository_folder: str):
|
|
215
|
+
self.assert_is_git_repository(repository_folder)
|
|
216
|
+
return self.__git_changes_helper(repository_folder, ["ls-files", "--exclude-standard", "--others"])
|
|
217
|
+
|
|
218
|
+
@GeneralUtilities.check_arguments
|
|
219
|
+
def git_repository_has_unstaged_changes_of_tracked_files(self, repository_folder: str):
|
|
220
|
+
self.assert_is_git_repository(repository_folder)
|
|
221
|
+
return self.__git_changes_helper(repository_folder, ["--no-pager", "diff"])
|
|
222
|
+
|
|
223
|
+
@GeneralUtilities.check_arguments
|
|
224
|
+
def git_repository_has_staged_changes(self, repository_folder: str):
|
|
225
|
+
self.assert_is_git_repository(repository_folder)
|
|
226
|
+
return self.__git_changes_helper(repository_folder, ["--no-pager", "diff", "--cached"])
|
|
227
|
+
|
|
228
|
+
@GeneralUtilities.check_arguments
|
|
229
|
+
def git_repository_has_uncommitted_changes(self, repository_folder: str) -> bool:
|
|
230
|
+
self.assert_is_git_repository(repository_folder)
|
|
231
|
+
if (self.git_repository_has_unstaged_changes(repository_folder)):
|
|
232
|
+
return True
|
|
233
|
+
if (self.git_repository_has_staged_changes(repository_folder)):
|
|
234
|
+
return True
|
|
235
|
+
return False
|
|
236
|
+
|
|
237
|
+
@GeneralUtilities.check_arguments
|
|
238
|
+
def git_repository_has_unstaged_changes(self, repository_folder: str) -> bool:
|
|
239
|
+
self.assert_is_git_repository(repository_folder)
|
|
240
|
+
if (self.git_repository_has_unstaged_changes_of_tracked_files(repository_folder)):
|
|
241
|
+
return True
|
|
242
|
+
if (self.git_repository_has_new_untracked_files(repository_folder)):
|
|
243
|
+
return True
|
|
244
|
+
return False
|
|
245
|
+
|
|
246
|
+
@GeneralUtilities.check_arguments
|
|
247
|
+
def git_get_commit_id(self, repository_folder: str, commit: str = "HEAD") -> str:
|
|
248
|
+
self.is_git_or_bare_git_repository(repository_folder)
|
|
249
|
+
result: tuple[int, str, str, int] = self.run_program_argsasarray("git", ["rev-parse", "--verify", commit], repository_folder, throw_exception_if_exitcode_is_not_zero=True)
|
|
250
|
+
return result[1].replace('\n', '')
|
|
251
|
+
|
|
252
|
+
@GeneralUtilities.check_arguments
|
|
253
|
+
def git_get_commit_date(self, repository_folder: str, commit: str = "HEAD") -> datetime:
|
|
254
|
+
self.is_git_or_bare_git_repository(repository_folder)
|
|
255
|
+
result: tuple[int, str, str, int] = self.run_program_argsasarray("git", ["show", "-s", "--format=%ci", commit], repository_folder, throw_exception_if_exitcode_is_not_zero=True)
|
|
256
|
+
date_as_string = result[1].replace('\n', '')
|
|
257
|
+
result = datetime.strptime(date_as_string, '%Y-%m-%d %H:%M:%S %z')
|
|
258
|
+
return result
|
|
259
|
+
|
|
260
|
+
@GeneralUtilities.check_arguments
|
|
261
|
+
def git_fetch_with_retry(self, folder: str, remotename: str = "--all", amount_of_attempts: int = 5) -> None:
|
|
262
|
+
GeneralUtilities.retry_action(lambda: self.git_fetch(folder, remotename), amount_of_attempts)
|
|
263
|
+
|
|
264
|
+
@GeneralUtilities.check_arguments
|
|
265
|
+
def git_fetch(self, folder: str, remotename: str = "--all") -> None:
|
|
266
|
+
self.is_git_or_bare_git_repository(folder)
|
|
267
|
+
self.run_program_argsasarray("git", ["fetch", remotename, "--tags", "--prune"], folder, throw_exception_if_exitcode_is_not_zero=True)
|
|
268
|
+
|
|
269
|
+
@GeneralUtilities.check_arguments
|
|
270
|
+
def git_fetch_in_bare_repository(self, folder: str, remotename, localbranch: str, remotebranch: str) -> None:
|
|
271
|
+
self.is_git_or_bare_git_repository(folder)
|
|
272
|
+
self.run_program_argsasarray("git", ["fetch", remotename, f"{remotebranch}:{localbranch}"], folder, throw_exception_if_exitcode_is_not_zero=True)
|
|
273
|
+
|
|
274
|
+
@GeneralUtilities.check_arguments
|
|
275
|
+
def git_remove_branch(self, folder: str, branchname: str) -> None:
|
|
276
|
+
self.is_git_or_bare_git_repository(folder)
|
|
277
|
+
self.run_program("git", f"branch -D {branchname}", folder, throw_exception_if_exitcode_is_not_zero=True)
|
|
278
|
+
|
|
279
|
+
@GeneralUtilities.check_arguments
|
|
280
|
+
def git_push_with_retry(self, folder: str, remotename: str, localbranchname: str, remotebranchname: str, forcepush: bool = False, pushalltags: bool = True, verbosity: LogLevel = LogLevel.Quiet, amount_of_attempts: int = 5) -> None:
|
|
281
|
+
GeneralUtilities.retry_action(lambda: self.git_push(folder, remotename, localbranchname, remotebranchname, forcepush, pushalltags, verbosity), amount_of_attempts)
|
|
282
|
+
|
|
283
|
+
@GeneralUtilities.check_arguments
|
|
284
|
+
def git_push(self, folder: str, remotename: str, localbranchname: str, remotebranchname: str, forcepush: bool = False, pushalltags: bool = True, verbosity: LogLevel = LogLevel.Quiet,resurse_submodules:bool=False) -> None:
|
|
285
|
+
self.is_git_or_bare_git_repository(folder)
|
|
286
|
+
argument = ["push"]
|
|
287
|
+
if resurse_submodules:
|
|
288
|
+
argument = argument + ["--recurse-submodules=on-demand"]
|
|
289
|
+
argument = argument + [remotename, f"{localbranchname}:{remotebranchname}"]
|
|
290
|
+
if (forcepush):
|
|
291
|
+
argument.append("--force")
|
|
292
|
+
if (pushalltags):
|
|
293
|
+
argument.append("--tags")
|
|
294
|
+
result: tuple[int, str, str, int] = self.run_program_argsasarray("git", argument, folder, throw_exception_if_exitcode_is_not_zero=True, print_errors_as_information=True)
|
|
295
|
+
return result[1].replace('\r', '').replace('\n', '')
|
|
296
|
+
|
|
297
|
+
@GeneralUtilities.check_arguments
|
|
298
|
+
def git_pull_with_retry(self, folder: str, remote: str, localbranchname: str, remotebranchname: str, force: bool = False, amount_of_attempts: int = 5) -> None:
|
|
299
|
+
GeneralUtilities.retry_action(lambda: self.git_pull(folder, remote, localbranchname, remotebranchname), amount_of_attempts)
|
|
300
|
+
|
|
301
|
+
@GeneralUtilities.check_arguments
|
|
302
|
+
def git_pull(self, folder: str, remote: str, localbranchname: str, remotebranchname: str, force: bool = False) -> None:
|
|
303
|
+
self.is_git_or_bare_git_repository(folder)
|
|
304
|
+
argument = f"pull {remote} {remotebranchname}:{localbranchname}"
|
|
305
|
+
if force:
|
|
306
|
+
argument = f"{argument} --force"
|
|
307
|
+
self.run_program("git", argument, folder, throw_exception_if_exitcode_is_not_zero=True)
|
|
308
|
+
|
|
309
|
+
@GeneralUtilities.check_arguments
|
|
310
|
+
def git_list_remote_branches(self, folder: str, remote: str, fetch: bool) -> list[str]:
|
|
311
|
+
self.is_git_or_bare_git_repository(folder)
|
|
312
|
+
if fetch:
|
|
313
|
+
self.git_fetch(folder, remote)
|
|
314
|
+
run_program_result = self.run_program("git", f"branch -rl {remote}/*", folder, throw_exception_if_exitcode_is_not_zero=True)
|
|
315
|
+
output = GeneralUtilities.string_to_lines(run_program_result[1])
|
|
316
|
+
result = list[str]()
|
|
317
|
+
for item in output:
|
|
318
|
+
striped_item = item.strip()
|
|
319
|
+
if GeneralUtilities.string_has_content(striped_item):
|
|
320
|
+
branch: str = None
|
|
321
|
+
if " " in striped_item:
|
|
322
|
+
branch = striped_item.split(" ")[0]
|
|
323
|
+
else:
|
|
324
|
+
branch = striped_item
|
|
325
|
+
branchname = branch[len(remote)+1:]
|
|
326
|
+
if branchname != "HEAD":
|
|
327
|
+
result.append(branchname)
|
|
328
|
+
return result
|
|
329
|
+
|
|
330
|
+
@GeneralUtilities.check_arguments
|
|
331
|
+
def git_clone(self, clone_target_folder: str, remote_repository_path: str, include_submodules: bool = True, mirror: bool = False) -> None:
|
|
332
|
+
if (os.path.isdir(clone_target_folder)):
|
|
333
|
+
pass # TODO throw error
|
|
334
|
+
else:
|
|
335
|
+
args = ["clone", remote_repository_path, clone_target_folder]
|
|
336
|
+
if include_submodules:
|
|
337
|
+
args.append("--recurse-submodules")
|
|
338
|
+
args.append("--remote-submodules")
|
|
339
|
+
if mirror:
|
|
340
|
+
args.append("--mirror")
|
|
341
|
+
self.run_program_argsasarray("git", args, os.getcwd(), throw_exception_if_exitcode_is_not_zero=True)
|
|
342
|
+
|
|
343
|
+
@GeneralUtilities.check_arguments
|
|
344
|
+
def git_get_all_remote_names(self, directory: str) -> list[str]:
|
|
345
|
+
self.is_git_or_bare_git_repository(directory)
|
|
346
|
+
result = GeneralUtilities.string_to_lines(self.run_program_argsasarray("git", ["remote"], directory, throw_exception_if_exitcode_is_not_zero=True)[1], False)
|
|
347
|
+
return result
|
|
348
|
+
|
|
349
|
+
@GeneralUtilities.check_arguments
|
|
350
|
+
def git_get_remote_url(self, directory: str, remote_name: str) -> str:
|
|
351
|
+
self.is_git_or_bare_git_repository(directory)
|
|
352
|
+
result = GeneralUtilities.string_to_lines(self.run_program_argsasarray("git", ["remote", "get-url", remote_name], directory, throw_exception_if_exitcode_is_not_zero=True)[1], False)
|
|
353
|
+
return result[0].replace('\n', '')
|
|
354
|
+
|
|
355
|
+
@GeneralUtilities.check_arguments
|
|
356
|
+
def repository_has_remote_with_specific_name(self, directory: str, remote_name: str) -> bool:
|
|
357
|
+
self.is_git_or_bare_git_repository(directory)
|
|
358
|
+
return remote_name in self.git_get_all_remote_names(directory)
|
|
359
|
+
|
|
360
|
+
@GeneralUtilities.check_arguments
|
|
361
|
+
def git_add_or_set_remote_address(self, directory: str, remote_name: str, remote_address: str) -> None:
|
|
362
|
+
self.assert_is_git_repository(directory)
|
|
363
|
+
if (self.repository_has_remote_with_specific_name(directory, remote_name)):
|
|
364
|
+
self.run_program_argsasarray("git", ['remote', 'set-url', 'remote_name', remote_address], directory, throw_exception_if_exitcode_is_not_zero=True)
|
|
365
|
+
else:
|
|
366
|
+
self.run_program_argsasarray("git", ['remote', 'add', remote_name, remote_address], directory, throw_exception_if_exitcode_is_not_zero=True)
|
|
367
|
+
|
|
368
|
+
@GeneralUtilities.check_arguments
|
|
369
|
+
def git_stage_all_changes(self, directory: str) -> None:
|
|
370
|
+
self.assert_is_git_repository(directory)
|
|
371
|
+
self.run_program_argsasarray("git", ["add", "-A"], directory, throw_exception_if_exitcode_is_not_zero=True)
|
|
372
|
+
|
|
373
|
+
@GeneralUtilities.check_arguments
|
|
374
|
+
def git_unstage_all_changes(self, directory: str) -> None:
|
|
375
|
+
self.assert_is_git_repository(directory)
|
|
376
|
+
self.run_program_argsasarray("git", ["reset"], directory, throw_exception_if_exitcode_is_not_zero=True)
|
|
377
|
+
# TODO check if this will also be done for submodules
|
|
378
|
+
|
|
379
|
+
@GeneralUtilities.check_arguments
|
|
380
|
+
def git_stage_file(self, directory: str, file: str) -> None:
|
|
381
|
+
self.assert_is_git_repository(directory)
|
|
382
|
+
self.run_program_argsasarray("git", ['stage', file], directory, throw_exception_if_exitcode_is_not_zero=True)
|
|
383
|
+
|
|
384
|
+
@GeneralUtilities.check_arguments
|
|
385
|
+
def git_unstage_file(self, directory: str, file: str) -> None:
|
|
386
|
+
self.assert_is_git_repository(directory)
|
|
387
|
+
self.run_program_argsasarray("git", ['reset', file], directory, throw_exception_if_exitcode_is_not_zero=True)
|
|
388
|
+
|
|
389
|
+
@GeneralUtilities.check_arguments
|
|
390
|
+
def git_discard_unstaged_changes_of_file(self, directory: str, file: str) -> None:
|
|
391
|
+
"""Caution: This method works really only for 'changed' files yet. So this method does not work properly for new or renamed files."""
|
|
392
|
+
self.assert_is_git_repository(directory)
|
|
393
|
+
self.run_program_argsasarray("git", ['checkout', file], directory, throw_exception_if_exitcode_is_not_zero=True)
|
|
394
|
+
|
|
395
|
+
@GeneralUtilities.check_arguments
|
|
396
|
+
def git_discard_all_unstaged_changes(self, directory: str) -> None:
|
|
397
|
+
"""Caution: This function executes 'git clean -df'. This can delete files which maybe should not be deleted. Be aware of that."""
|
|
398
|
+
self.assert_is_git_repository(directory)
|
|
399
|
+
self.run_program_argsasarray("git", ['clean', '-df'], directory, throw_exception_if_exitcode_is_not_zero=True)
|
|
400
|
+
self.run_program_argsasarray("git", ['checkout', '.'], directory, throw_exception_if_exitcode_is_not_zero=True)
|
|
401
|
+
# TODO check if this will also be done for submodules
|
|
402
|
+
|
|
403
|
+
@GeneralUtilities.check_arguments
|
|
404
|
+
def git_commit(self, directory: str, message: str = "Saved changes.", author_name: str = None, author_email: str = None, stage_all_changes: bool = True, no_changes_behavior: int = 0) -> str:
|
|
405
|
+
"""no_changes_behavior=0 => No commit; no_changes_behavior=1 => Commit anyway; no_changes_behavior=2 => Exception"""
|
|
406
|
+
self.assert_is_git_repository(directory)
|
|
407
|
+
author_name = GeneralUtilities.str_none_safe(author_name).strip()
|
|
408
|
+
author_email = GeneralUtilities.str_none_safe(author_email).strip()
|
|
409
|
+
argument = ['commit', '--quiet', '--allow-empty', '--message', message]
|
|
410
|
+
if (GeneralUtilities.string_has_content(author_name)):
|
|
411
|
+
argument.append(f'--author="{author_name} <{author_email}>"')
|
|
412
|
+
git_repository_has_uncommitted_changes = self.git_repository_has_uncommitted_changes(directory)
|
|
413
|
+
|
|
414
|
+
if git_repository_has_uncommitted_changes:
|
|
415
|
+
do_commit = True
|
|
416
|
+
if stage_all_changes:
|
|
417
|
+
self.git_stage_all_changes(directory)
|
|
418
|
+
else:
|
|
419
|
+
if no_changes_behavior == 0:
|
|
420
|
+
self.log.log(f"Commit '{message}' will not be done because there are no changes to commit in repository '{directory}'", LogLevel.Debug)
|
|
421
|
+
do_commit = False
|
|
422
|
+
elif no_changes_behavior == 1:
|
|
423
|
+
self.log.log(f"There are no changes to commit in repository '{directory}'. Commit '{message}' will be done anyway.", LogLevel.Debug)
|
|
424
|
+
do_commit = True
|
|
425
|
+
elif no_changes_behavior == 2:
|
|
426
|
+
raise RuntimeError(f"There are no changes to commit in repository '{directory}'. Commit '{message}' will not be done.")
|
|
427
|
+
else:
|
|
428
|
+
raise ValueError(f"Unknown value for no_changes_behavior: {GeneralUtilities.str_none_safe(no_changes_behavior)}")
|
|
429
|
+
|
|
430
|
+
if do_commit:
|
|
431
|
+
self.log.log(f"Commit changes in '{directory}'", LogLevel.Information)
|
|
432
|
+
self.run_program_argsasarray("git", argument, directory, throw_exception_if_exitcode_is_not_zero=True)
|
|
433
|
+
|
|
434
|
+
return self.git_get_commit_id(directory)
|
|
435
|
+
|
|
436
|
+
def search_repository_folder(self,some_file_in_repository:str)->str:
|
|
437
|
+
current_path:str=os.path.dirname(some_file_in_repository)
|
|
438
|
+
enabled:bool=True
|
|
439
|
+
while enabled:
|
|
440
|
+
try:
|
|
441
|
+
current_path=GeneralUtilities.resolve_relative_path("..",current_path)
|
|
442
|
+
if self.is_git_repository(current_path):
|
|
443
|
+
return current_path
|
|
444
|
+
except:
|
|
445
|
+
enabled=False
|
|
446
|
+
raise ValueError(f"Can not find git-repository for folder \"{some_file_in_repository}\".")
|
|
447
|
+
|
|
448
|
+
|
|
449
|
+
@GeneralUtilities.check_arguments
|
|
450
|
+
def git_create_tag(self, directory: str, target_for_tag: str, tag: str, sign: bool = False, message: str = None) -> None:
|
|
451
|
+
self.is_git_or_bare_git_repository(directory)
|
|
452
|
+
argument = ["tag", tag, target_for_tag]
|
|
453
|
+
if sign:
|
|
454
|
+
if message is None:
|
|
455
|
+
message = f"Created {target_for_tag}"
|
|
456
|
+
argument.extend(["-s", '-m', message])
|
|
457
|
+
self.run_program_argsasarray("git", argument, directory, throw_exception_if_exitcode_is_not_zero=True)
|
|
458
|
+
|
|
459
|
+
@GeneralUtilities.check_arguments
|
|
460
|
+
def git_delete_tag(self, directory: str, tag: str) -> None:
|
|
461
|
+
self.is_git_or_bare_git_repository(directory)
|
|
462
|
+
self.run_program_argsasarray("git", ["tag", "--delete", tag], directory, throw_exception_if_exitcode_is_not_zero=True)
|
|
463
|
+
|
|
464
|
+
@GeneralUtilities.check_arguments
|
|
465
|
+
def git_checkout(self, directory: str, branch: str, undo_all_changes_after_checkout: bool = True, assert_no_uncommitted_changes: bool = True) -> None:
|
|
466
|
+
self.assert_is_git_repository(directory)
|
|
467
|
+
if assert_no_uncommitted_changes:
|
|
468
|
+
GeneralUtilities.assert_condition(not self.git_repository_has_uncommitted_changes(directory), f"Repository '{directory}' has uncommitted changes.")
|
|
469
|
+
self.run_program_argsasarray("git", ["checkout", branch], directory, throw_exception_if_exitcode_is_not_zero=True)
|
|
470
|
+
self.run_program_argsasarray("git", ["submodule", "update", "--recursive"], directory, throw_exception_if_exitcode_is_not_zero=True)
|
|
471
|
+
if undo_all_changes_after_checkout:
|
|
472
|
+
self.git_undo_all_changes(directory)
|
|
473
|
+
|
|
474
|
+
@GeneralUtilities.check_arguments
|
|
475
|
+
def merge_repository(self, repository_folder: str, remote: str, branch: str, pull_first_if_there_are_no_uncommitted_changes: bool = True):
|
|
476
|
+
if pull_first_if_there_are_no_uncommitted_changes:
|
|
477
|
+
uncommitted_changes = self.git_repository_has_uncommitted_changes(repository_folder)
|
|
478
|
+
if not uncommitted_changes:
|
|
479
|
+
is_pullable: bool = self.git_commit_is_ancestor(repository_folder, branch, f"{remote}/{branch}")
|
|
480
|
+
if is_pullable:
|
|
481
|
+
self.git_pull(repository_folder, remote, branch, branch)
|
|
482
|
+
uncommitted_changes = self.git_repository_has_uncommitted_changes(repository_folder)
|
|
483
|
+
GeneralUtilities.assert_condition(not uncommitted_changes, f"Pulling remote \"{remote}\" in \"{repository_folder}\" caused new uncommitted files.")
|
|
484
|
+
self.git_checkout(repository_folder, branch)
|
|
485
|
+
self.git_commit(repository_folder, "Automatic commit due to merge")
|
|
486
|
+
self.git_fetch(repository_folder, remote)
|
|
487
|
+
self.git_merge(repository_folder, f"{remote}/{branch}", branch)
|
|
488
|
+
self.git_push_with_retry(repository_folder, remote, branch, branch)
|
|
489
|
+
self.git_checkout(repository_folder, branch)
|
|
490
|
+
|
|
491
|
+
@GeneralUtilities.check_arguments
|
|
492
|
+
def git_merge_abort(self, directory: str) -> None:
|
|
493
|
+
self.assert_is_git_repository(directory)
|
|
494
|
+
self.run_program_argsasarray("git", ["merge", "--abort"], directory, throw_exception_if_exitcode_is_not_zero=True)
|
|
495
|
+
|
|
496
|
+
@GeneralUtilities.check_arguments
|
|
497
|
+
def git_merge(self, directory: str, sourcebranch: str, targetbranch: str, fastforward: bool = True, commit: bool = True, commit_message: str = None, undo_all_changes_after_checkout: bool = True, assert_no_uncommitted_changes: bool = True) -> str:
|
|
498
|
+
self.assert_is_git_repository(directory)
|
|
499
|
+
self.git_checkout(directory, targetbranch, undo_all_changes_after_checkout, assert_no_uncommitted_changes)
|
|
500
|
+
args = ["merge"]
|
|
501
|
+
if not commit:
|
|
502
|
+
args.append("--no-commit")
|
|
503
|
+
if not fastforward:
|
|
504
|
+
args.append("--no-ff")
|
|
505
|
+
if commit_message is not None:
|
|
506
|
+
args.append("-m")
|
|
507
|
+
args.append(commit_message)
|
|
508
|
+
args.append(sourcebranch)
|
|
509
|
+
self.run_program_argsasarray("git", args, directory, throw_exception_if_exitcode_is_not_zero=True)
|
|
510
|
+
self.run_program_argsasarray("git", ["submodule", "update"], directory, throw_exception_if_exitcode_is_not_zero=True)
|
|
511
|
+
return self.git_get_commit_id(directory)
|
|
512
|
+
|
|
513
|
+
@GeneralUtilities.check_arguments
|
|
514
|
+
def git_undo_all_changes(self, directory: str) -> None:
|
|
515
|
+
"""Caution: This function executes 'git clean -df'. This can delete files which maybe should not be deleted. Be aware of that."""
|
|
516
|
+
self.assert_is_git_repository(directory)
|
|
517
|
+
self.git_unstage_all_changes(directory)
|
|
518
|
+
self.git_discard_all_unstaged_changes(directory)
|
|
519
|
+
|
|
520
|
+
@GeneralUtilities.check_arguments
|
|
521
|
+
def git_fetch_or_clone_all_in_directory(self, source_directory: str, target_directory: str) -> None:
|
|
522
|
+
for subfolder in GeneralUtilities.get_direct_folders_of_folder(source_directory):
|
|
523
|
+
foldername = os.path.basename(subfolder)
|
|
524
|
+
if self.is_git_repository(subfolder):
|
|
525
|
+
source_repository = subfolder
|
|
526
|
+
target_repository = os.path.join(target_directory, foldername)
|
|
527
|
+
if os.path.isdir(target_directory):
|
|
528
|
+
# fetch
|
|
529
|
+
self.git_fetch(target_directory)
|
|
530
|
+
else:
|
|
531
|
+
# clone
|
|
532
|
+
self.git_clone(target_repository, source_repository, include_submodules=True, mirror=True)
|
|
533
|
+
|
|
534
|
+
def get_git_submodules(self, directory: str) -> list[str]:
|
|
535
|
+
self.is_git_or_bare_git_repository(directory)
|
|
536
|
+
e = self.run_program("git", "submodule status", directory)
|
|
537
|
+
result = []
|
|
538
|
+
for submodule_line in GeneralUtilities.string_to_lines(e[1], False, True):
|
|
539
|
+
result.append(submodule_line.split(' ')[1])
|
|
540
|
+
return result
|
|
541
|
+
|
|
542
|
+
@GeneralUtilities.check_arguments
|
|
543
|
+
def file_is_git_ignored(self, file_in_repository: str, repositorybasefolder: str) -> None:
|
|
544
|
+
self.is_git_or_bare_git_repository(repositorybasefolder)
|
|
545
|
+
exit_code = self.run_program_argsasarray("git", ['check-ignore', file_in_repository], repositorybasefolder, throw_exception_if_exitcode_is_not_zero=False)[0]
|
|
546
|
+
if (exit_code == 0):
|
|
547
|
+
return True
|
|
548
|
+
if (exit_code == 1):
|
|
549
|
+
return False
|
|
550
|
+
raise ValueError(f"Unable to calculate whether '{file_in_repository}' in repository '{repositorybasefolder}' is ignored due to git-exitcode {exit_code}.")
|
|
551
|
+
|
|
552
|
+
@GeneralUtilities.check_arguments
|
|
553
|
+
def git_discard_all_changes(self, repository: str) -> None:
|
|
554
|
+
self.assert_is_git_repository(repository)
|
|
555
|
+
self.run_program_argsasarray("git", ["reset", "HEAD", "."], repository, throw_exception_if_exitcode_is_not_zero=True)
|
|
556
|
+
self.run_program_argsasarray("git", ["checkout", "."], repository, throw_exception_if_exitcode_is_not_zero=True)
|
|
557
|
+
|
|
558
|
+
@GeneralUtilities.check_arguments
|
|
559
|
+
def git_get_current_branch_name(self, repository: str) -> str:
|
|
560
|
+
self.assert_is_git_repository(repository)
|
|
561
|
+
result = self.run_program_argsasarray("git", ["rev-parse", "--abbrev-ref", "HEAD"], repository, throw_exception_if_exitcode_is_not_zero=True)
|
|
562
|
+
return result[1].replace("\r", GeneralUtilities.empty_string).replace("\n", GeneralUtilities.empty_string)
|
|
563
|
+
|
|
564
|
+
@GeneralUtilities.check_arguments
|
|
565
|
+
def git_get_commitid_of_tag(self, repository: str, tag: str) -> str:
|
|
566
|
+
self.is_git_or_bare_git_repository(repository)
|
|
567
|
+
stdout = self.run_program_argsasarray("git", ["rev-list", "-n", "1", tag], repository)
|
|
568
|
+
result = stdout[1].replace("\r", GeneralUtilities.empty_string).replace("\n", GeneralUtilities.empty_string)
|
|
569
|
+
return result
|
|
570
|
+
|
|
571
|
+
@GeneralUtilities.check_arguments
|
|
572
|
+
def git_get_tags(self, repository: str) -> list[str]:
|
|
573
|
+
self.is_git_or_bare_git_repository(repository)
|
|
574
|
+
tags = [line.replace("\r", GeneralUtilities.empty_string) for line in self.run_program_argsasarray(
|
|
575
|
+
"git", ["tag"], repository)[1].split("\n") if len(line) > 0]
|
|
576
|
+
return tags
|
|
577
|
+
|
|
578
|
+
@GeneralUtilities.check_arguments
|
|
579
|
+
def git_move_tags_to_another_branch(self, repository: str, tag_source_branch: str, tag_target_branch: str, sign: bool = False, message: str = None) -> None:
|
|
580
|
+
self.is_git_or_bare_git_repository(repository)
|
|
581
|
+
tags = self.git_get_tags(repository)
|
|
582
|
+
tags_count = len(tags)
|
|
583
|
+
counter = 0
|
|
584
|
+
for tag in tags:
|
|
585
|
+
counter = counter+1
|
|
586
|
+
self.log.log(f"Process tag {counter}/{tags_count}.", LogLevel.Information)
|
|
587
|
+
# tag is on source-branch
|
|
588
|
+
if self.git_commit_is_ancestor(repository, tag, tag_source_branch):
|
|
589
|
+
commit_id_old = self.git_get_commitid_of_tag(repository, tag)
|
|
590
|
+
commit_date: datetime = self.git_get_commit_date(repository, commit_id_old)
|
|
591
|
+
date_as_string = self.__datetime_to_string_for_git(commit_date)
|
|
592
|
+
search_commit_result = self.run_program_argsasarray("git", ["log", f'--after="{date_as_string}"', f'--before="{date_as_string}"', "--pretty=format:%H", tag_target_branch], repository, throw_exception_if_exitcode_is_not_zero=False)
|
|
593
|
+
if search_commit_result[0] != 0 or not GeneralUtilities.string_has_nonwhitespace_content(search_commit_result[1]):
|
|
594
|
+
raise ValueError(f"Can not calculate corresponding commit for tag '{tag}'.")
|
|
595
|
+
commit_id_new = search_commit_result[1]
|
|
596
|
+
self.git_delete_tag(repository, tag)
|
|
597
|
+
self.git_create_tag(repository, commit_id_new, tag, sign, message)
|
|
598
|
+
|
|
599
|
+
@GeneralUtilities.check_arguments
|
|
600
|
+
def get_current_git_branch_has_tag(self, repository_folder: str) -> bool:
|
|
601
|
+
self.is_git_or_bare_git_repository(repository_folder)
|
|
602
|
+
result = self.run_program_argsasarray("git", ["describe", "--tags", "--abbrev=0"], repository_folder, throw_exception_if_exitcode_is_not_zero=False)
|
|
603
|
+
return result[0] == 0
|
|
604
|
+
|
|
605
|
+
@GeneralUtilities.check_arguments
|
|
606
|
+
def get_latest_git_tag(self, repository_folder: str) -> str:
|
|
607
|
+
self.is_git_or_bare_git_repository(repository_folder)
|
|
608
|
+
result = self.run_program_argsasarray("git", ["describe", "--tags", "--abbrev=0"], repository_folder)
|
|
609
|
+
result = result[1].replace("\r", GeneralUtilities.empty_string).replace("\n", GeneralUtilities.empty_string)
|
|
610
|
+
return result
|
|
611
|
+
|
|
612
|
+
@GeneralUtilities.check_arguments
|
|
613
|
+
def get_staged_or_committed_git_ignored_files(self, repository_folder: str) -> list[str]:
|
|
614
|
+
self.assert_is_git_repository(repository_folder)
|
|
615
|
+
temp_result = self.run_program_argsasarray("git", ["ls-files", "-i", "-c", "--exclude-standard"], repository_folder)
|
|
616
|
+
temp_result = temp_result[1].replace("\r", GeneralUtilities.empty_string)
|
|
617
|
+
result = [line for line in temp_result.split("\n") if len(line) > 0]
|
|
618
|
+
return result
|
|
619
|
+
|
|
620
|
+
@GeneralUtilities.check_arguments
|
|
621
|
+
def git_repository_has_commits(self, repository_folder: str) -> bool:
|
|
622
|
+
self.assert_is_git_repository(repository_folder)
|
|
623
|
+
return self.run_program_argsasarray("git", ["rev-parse", "--verify", "HEAD"], repository_folder, throw_exception_if_exitcode_is_not_zero=False)[0] == 0
|
|
624
|
+
|
|
625
|
+
@GeneralUtilities.check_arguments
|
|
626
|
+
def run_git_command_in_repository_and_submodules(self, repository_folder: str, arguments: list[str]) -> None:
|
|
627
|
+
self.is_git_or_bare_git_repository(repository_folder)
|
|
628
|
+
self.run_program_argsasarray("git", arguments, repository_folder)
|
|
629
|
+
self.run_program_argsasarray("git", ["submodule", "foreach", "--recursive", "git"]+arguments, repository_folder)
|
|
630
|
+
|
|
631
|
+
@GeneralUtilities.check_arguments
|
|
632
|
+
def export_filemetadata(self, folder: str, target_file: str, encoding: str = "utf-8", filter_function=None) -> None:
|
|
633
|
+
folder = GeneralUtilities.resolve_relative_path_from_current_working_directory(folder)
|
|
634
|
+
lines = list()
|
|
635
|
+
path_prefix = len(folder)+1
|
|
636
|
+
items = dict()
|
|
637
|
+
for item in GeneralUtilities.get_all_folders_of_folder(folder):
|
|
638
|
+
items[item] = "d"
|
|
639
|
+
for item in GeneralUtilities.get_all_files_of_folder(folder):
|
|
640
|
+
items[item] = "f"
|
|
641
|
+
for file_or_folder, item_type in items.items():
|
|
642
|
+
truncated_file = file_or_folder[path_prefix:]
|
|
643
|
+
if (filter_function is None or filter_function(folder, truncated_file)):
|
|
644
|
+
owner_and_permisssion = self.get_file_owner_and_file_permission(file_or_folder)
|
|
645
|
+
user = owner_and_permisssion[0]
|
|
646
|
+
permissions = owner_and_permisssion[1]
|
|
647
|
+
lines.append(f"{truncated_file};{item_type};{user};{permissions}")
|
|
648
|
+
lines = sorted(lines, key=str.casefold)
|
|
649
|
+
with open(target_file, "w", encoding=encoding) as file_object:
|
|
650
|
+
file_object.write("\n".join(lines))
|
|
651
|
+
|
|
652
|
+
@GeneralUtilities.check_arguments
|
|
653
|
+
def escape_git_repositories_in_folder(self, folder: str) -> dict[str, str]:
|
|
654
|
+
return self.__escape_git_repositories_in_folder_internal(folder, dict[str, str]())
|
|
655
|
+
|
|
656
|
+
@GeneralUtilities.check_arguments
|
|
657
|
+
def __escape_git_repositories_in_folder_internal(self, folder: str, renamed_items: dict[str, str]) -> dict[str, str]:
|
|
658
|
+
for file in GeneralUtilities.get_direct_files_of_folder(folder):
|
|
659
|
+
filename = os.path.basename(file)
|
|
660
|
+
if ".git" in filename:
|
|
661
|
+
new_name = filename.replace(".git", ".gitx")
|
|
662
|
+
target = os.path.join(folder, new_name)
|
|
663
|
+
os.rename(file, target)
|
|
664
|
+
renamed_items[target] = file
|
|
665
|
+
for subfolder in GeneralUtilities.get_direct_folders_of_folder(folder):
|
|
666
|
+
foldername = os.path.basename(subfolder)
|
|
667
|
+
if ".git" in foldername:
|
|
668
|
+
new_name = foldername.replace(".git", ".gitx")
|
|
669
|
+
subfolder2 = os.path.join(str(Path(subfolder).parent), new_name)
|
|
670
|
+
os.rename(subfolder, subfolder2)
|
|
671
|
+
renamed_items[subfolder2] = subfolder
|
|
672
|
+
else:
|
|
673
|
+
subfolder2 = subfolder
|
|
674
|
+
self.__escape_git_repositories_in_folder_internal(subfolder2, renamed_items)
|
|
675
|
+
return renamed_items
|
|
676
|
+
|
|
677
|
+
@GeneralUtilities.check_arguments
|
|
678
|
+
def deescape_git_repositories_in_folder(self, renamed_items: dict[str, str]):
|
|
679
|
+
for renamed_item, original_name in renamed_items.items():
|
|
680
|
+
os.rename(renamed_item, original_name)
|
|
681
|
+
|
|
682
|
+
@GeneralUtilities.check_arguments
|
|
683
|
+
def is_git_repository(self, folder: str) -> bool:
|
|
684
|
+
"""This function works platform-independent also for non-local-executions if the ScriptCollection commandline-commands are available as global command on the target-system."""
|
|
685
|
+
folder=folder.replace("\\","/")
|
|
686
|
+
if folder.endswith("/"):
|
|
687
|
+
folder = folder[:-1]
|
|
688
|
+
if not self.is_folder(folder):
|
|
689
|
+
raise ValueError(f"Folder '{folder}' does not exist.")
|
|
690
|
+
git_folder_path = f"{folder}/.git"
|
|
691
|
+
return self.is_folder(git_folder_path) or self.is_file(git_folder_path)
|
|
692
|
+
|
|
693
|
+
@GeneralUtilities.check_arguments
|
|
694
|
+
def is_bare_git_repository(self, folder: str) -> bool:
|
|
695
|
+
"""This function works platform-independent also for non-local-executions if the ScriptCollection commandline-commands are available as global command on the target-system."""
|
|
696
|
+
if folder.endswith("/") or folder.endswith("\\"):
|
|
697
|
+
folder = folder[:-1]
|
|
698
|
+
if not self.is_folder(folder):
|
|
699
|
+
raise ValueError(f"Folder '{folder}' does not exist.")
|
|
700
|
+
return folder.endswith(".git")
|
|
701
|
+
|
|
702
|
+
@GeneralUtilities.check_arguments
|
|
703
|
+
def is_git_or_bare_git_repository(self, folder: str) -> bool:
|
|
704
|
+
"""This function works platform-independent also for non-local-executions if the ScriptCollection commandline-commands are available as global command on the target-system."""
|
|
705
|
+
return self.is_git_repository(folder) or self.is_bare_git_repository(folder)
|
|
706
|
+
|
|
707
|
+
@GeneralUtilities.check_arguments
|
|
708
|
+
def assert_is_git_repository(self, folder: str) -> str:
|
|
709
|
+
"""This function works platform-independent also for non-local-executions if the ScriptCollection commandline-commands are available as global command on the target-system."""
|
|
710
|
+
GeneralUtilities.assert_condition(self.is_git_repository(folder), f"'{folder}' is not a git-repository.")
|
|
711
|
+
|
|
712
|
+
@GeneralUtilities.check_arguments
|
|
713
|
+
def convert_git_repository_to_bare_repository(self, repository_folder: str):
|
|
714
|
+
repository_folder = repository_folder.replace("\\", "/")
|
|
715
|
+
self.assert_is_git_repository(repository_folder)
|
|
716
|
+
git_folder = repository_folder + "/.git"
|
|
717
|
+
if not self.is_folder(git_folder):
|
|
718
|
+
raise ValueError(f"Converting '{repository_folder}' to a bare repository not possible. The folder '{git_folder}' does not exist. Converting is currently only supported when the git-folder is a direct folder in a repository and not a reference to another location.")
|
|
719
|
+
target_folder: str = repository_folder + ".git"
|
|
720
|
+
GeneralUtilities.ensure_directory_exists(target_folder)
|
|
721
|
+
GeneralUtilities.move_content_of_folder(git_folder, target_folder)
|
|
722
|
+
GeneralUtilities.ensure_directory_does_not_exist(repository_folder)
|
|
723
|
+
self.run_program_argsasarray("git", ["config", "--bool", "core.bare", "true"], target_folder)
|
|
724
|
+
|
|
725
|
+
@GeneralUtilities.check_arguments
|
|
726
|
+
def assert_no_uncommitted_changes(self, repository_folder: str):
|
|
727
|
+
if self.git_repository_has_uncommitted_changes(repository_folder):
|
|
728
|
+
raise ValueError(f"Repository '{repository_folder}' has uncommitted changes.")
|
|
729
|
+
|
|
730
|
+
@GeneralUtilities.check_arguments
|
|
731
|
+
def list_content(self, path: str, include_files: bool, include_folder: bool, printonlynamewithoutpath: bool) -> list[str]:
|
|
732
|
+
"""This function works platform-independent also for non-local-executions if the ScriptCollection commandline-commands are available as global command on the target-system."""
|
|
733
|
+
result: list[str] = []
|
|
734
|
+
if self.program_runner.will_be_executed_locally():
|
|
735
|
+
if include_files:
|
|
736
|
+
result = result + GeneralUtilities.get_direct_files_of_folder(path)
|
|
737
|
+
if include_folder:
|
|
738
|
+
result = result + GeneralUtilities.get_direct_folders_of_folder(path)
|
|
739
|
+
else:
|
|
740
|
+
arguments = ["--path", path]
|
|
741
|
+
if not include_files:
|
|
742
|
+
arguments = arguments+["--excludefiles"]
|
|
743
|
+
if not include_folder:
|
|
744
|
+
arguments = arguments+["--excludedirectories"]
|
|
745
|
+
if printonlynamewithoutpath:
|
|
746
|
+
arguments = arguments+["--printonlynamewithoutpath"]
|
|
747
|
+
exit_code, stdout, stderr, _ = self.run_program_argsasarray("sclistfoldercontent", arguments)
|
|
748
|
+
if exit_code == 0:
|
|
749
|
+
for line in stdout.split("\n"):
|
|
750
|
+
normalized_line = line.replace("\r", "")
|
|
751
|
+
result.append(normalized_line)
|
|
752
|
+
else:
|
|
753
|
+
raise ValueError(f"Fatal error occurrs while checking whether file '{path}' exists. StdErr: '{stderr}'")
|
|
754
|
+
result = [item for item in result if GeneralUtilities.string_has_nonwhitespace_content(item)]
|
|
755
|
+
return result
|
|
756
|
+
|
|
757
|
+
@GeneralUtilities.check_arguments
|
|
758
|
+
def is_file(self, path: str) -> bool:
|
|
759
|
+
"""This function works platform-independent also for non-local-executions if the ScriptCollection commandline-commands are available as global command on the target-system."""
|
|
760
|
+
if self.program_runner.will_be_executed_locally():
|
|
761
|
+
return os.path.isfile(path) # works only locally, but much more performant than always running an external program
|
|
762
|
+
else:
|
|
763
|
+
exit_code, _, stderr, _ = self.run_program_argsasarray("scfileexists", ["--path", path], throw_exception_if_exitcode_is_not_zero=False) # works platform-indepent
|
|
764
|
+
if exit_code == 0:
|
|
765
|
+
return True
|
|
766
|
+
elif exit_code == 1:
|
|
767
|
+
raise ValueError(f"Not calculatable whether file '{path}' exists. StdErr: '{stderr}'")
|
|
768
|
+
elif exit_code == 2:
|
|
769
|
+
return False
|
|
770
|
+
raise ValueError(f"Fatal error occurrs while checking whether file '{path}' exists. StdErr: '{stderr}'")
|
|
771
|
+
|
|
772
|
+
@GeneralUtilities.check_arguments
|
|
773
|
+
def is_folder(self, path: str) -> bool:
|
|
774
|
+
"""This function works platform-independent also for non-local-executions if the ScriptCollection commandline-commands are available as global command on the target-system."""
|
|
775
|
+
if self.program_runner.will_be_executed_locally(): # works only locally, but much more performant than always running an external program
|
|
776
|
+
return os.path.isdir(path)
|
|
777
|
+
else:
|
|
778
|
+
exit_code, _, stderr, _ = self.run_program_argsasarray("scfolderexists", ["--path", path], throw_exception_if_exitcode_is_not_zero=False) # works platform-indepent
|
|
779
|
+
if exit_code == 0:
|
|
780
|
+
return True
|
|
781
|
+
elif exit_code == 1:
|
|
782
|
+
raise ValueError(f"Not calculatable whether folder '{path}' exists. StdErr: '{stderr}'")
|
|
783
|
+
elif exit_code == 2:
|
|
784
|
+
return False
|
|
785
|
+
raise ValueError(f"Fatal error occurrs while checking whether folder '{path}' exists. StdErr: '{stderr}'")
|
|
786
|
+
|
|
787
|
+
@GeneralUtilities.check_arguments
|
|
788
|
+
def get_file_content(self, path: str, encoding: str = "utf-8") -> str:
|
|
789
|
+
"""This function works platform-independent also for non-local-executions if the ScriptCollection commandline-commands are available as global command on the target-system."""
|
|
790
|
+
if self.program_runner.will_be_executed_locally():
|
|
791
|
+
return GeneralUtilities.read_text_from_file(path, encoding)
|
|
792
|
+
else:
|
|
793
|
+
result = self.run_program_argsasarray("scprintfilecontent", ["--path", path, "--encofing", encoding]) # works platform-indepent
|
|
794
|
+
return result[1].replace("\\n", "\n")
|
|
795
|
+
|
|
796
|
+
@GeneralUtilities.check_arguments
|
|
797
|
+
def set_file_content(self, path: str, content: str, encoding: str = "utf-8") -> None:
|
|
798
|
+
"""This function works platform-independent also for non-local-executions if the ScriptCollection commandline-commands are available as global command on the target-system."""
|
|
799
|
+
if self.program_runner.will_be_executed_locally():
|
|
800
|
+
GeneralUtilities.write_text_to_file(path, content, encoding)
|
|
801
|
+
else:
|
|
802
|
+
content_bytes = content.encode('utf-8')
|
|
803
|
+
base64_bytes = base64.b64encode(content_bytes)
|
|
804
|
+
base64_string = base64_bytes.decode('utf-8')
|
|
805
|
+
self.run_program_argsasarray("scsetfilecontent", ["--path", path, "--argumentisinbase64", "--content", base64_string]) # works platform-indepent
|
|
806
|
+
|
|
807
|
+
@GeneralUtilities.check_arguments
|
|
808
|
+
def remove(self, path: str) -> None:
|
|
809
|
+
"""This function works platform-independent also for non-local-executions if the ScriptCollection commandline-commands are available as global command on the target-system."""
|
|
810
|
+
if self.program_runner.will_be_executed_locally(): # works only locally, but much more performant than always running an external program
|
|
811
|
+
if os.path.isdir(path):
|
|
812
|
+
GeneralUtilities.ensure_directory_does_not_exist(path)
|
|
813
|
+
if os.path.isfile(path):
|
|
814
|
+
GeneralUtilities.ensure_file_does_not_exist(path)
|
|
815
|
+
else:
|
|
816
|
+
if self.is_file(path):
|
|
817
|
+
exit_code, _, stderr, _ = self.run_program_argsasarray("scremovefile", ["--path", path], throw_exception_if_exitcode_is_not_zero=False) # works platform-indepent
|
|
818
|
+
if exit_code != 0:
|
|
819
|
+
raise ValueError(f"Fatal error occurrs while removing file '{path}'. StdErr: '{stderr}'")
|
|
820
|
+
if self.is_folder(path):
|
|
821
|
+
exit_code, _, stderr, _ = self.run_program_argsasarray("scremovefolder", ["--path", path], throw_exception_if_exitcode_is_not_zero=False) # works platform-indepent
|
|
822
|
+
if exit_code != 0:
|
|
823
|
+
raise ValueError(f"Fatal error occurrs while removing folder '{path}'. StdErr: '{stderr}'")
|
|
824
|
+
|
|
825
|
+
@GeneralUtilities.check_arguments
|
|
826
|
+
def rename(self, source: str, target: str) -> None:
|
|
827
|
+
"""This function works platform-independent also for non-local-executions if the ScriptCollection commandline-commands are available as global command on the target-system."""
|
|
828
|
+
if self.program_runner.will_be_executed_locally(): # works only locally, but much more performant than always running an external program
|
|
829
|
+
os.rename(source, target)
|
|
830
|
+
else:
|
|
831
|
+
exit_code, _, stderr, _ = self.run_program_argsasarray("screname", ["--source", source, "--target", target], throw_exception_if_exitcode_is_not_zero=False) # works platform-indepent
|
|
832
|
+
if exit_code != 0:
|
|
833
|
+
raise ValueError(f"Fatal error occurrs while renaming '{source}' to '{target}'. StdErr: '{stderr}'")
|
|
834
|
+
|
|
835
|
+
@GeneralUtilities.check_arguments
|
|
836
|
+
def copy(self, source: str, target: str) -> None:
|
|
837
|
+
"""This function works platform-independent also for non-local-executions if the ScriptCollection commandline-commands are available as global command on the target-system."""
|
|
838
|
+
if self.program_runner.will_be_executed_locally(): # works only locally, but much more performant than always running an external program
|
|
839
|
+
if os.path.isfile(target) or os.path.isdir(target):
|
|
840
|
+
raise ValueError(f"Can not copy to '{target}' because the target already exists.")
|
|
841
|
+
if os.path.isfile(source):
|
|
842
|
+
shutil.copyfile(source, target)
|
|
843
|
+
elif os.path.isdir(source):
|
|
844
|
+
GeneralUtilities.ensure_directory_exists(target)
|
|
845
|
+
GeneralUtilities.copy_content_of_folder(source, target)
|
|
846
|
+
else:
|
|
847
|
+
raise ValueError(f"'{source}' can not be copied because the path does not exist.")
|
|
848
|
+
else:
|
|
849
|
+
exit_code, _, stderr, _ = self.run_program_argsasarray("sccopy", ["--source", source, "--target", target], throw_exception_if_exitcode_is_not_zero=False) # works platform-indepent
|
|
850
|
+
if exit_code != 0:
|
|
851
|
+
raise ValueError(f"Fatal error occurrs while copying '{source}' to '{target}'. StdErr: '{stderr}'")
|
|
852
|
+
|
|
853
|
+
@GeneralUtilities.check_arguments
|
|
854
|
+
def create_file(self, path: str, error_if_already_exists: bool, create_necessary_folder: bool) -> None:
|
|
855
|
+
"""This function works platform-independent also for non-local-executions if the ScriptCollection commandline-commands are available as global command on the target-system."""
|
|
856
|
+
if self.program_runner.will_be_executed_locally():
|
|
857
|
+
if not os.path.isabs(path):
|
|
858
|
+
path = os.path.join(os.getcwd(), path)
|
|
859
|
+
|
|
860
|
+
if os.path.isfile(path) and error_if_already_exists:
|
|
861
|
+
raise ValueError(f"File '{path}' already exists.")
|
|
862
|
+
|
|
863
|
+
# TODO maybe it should be checked if there is a folder with the same path which already exists.
|
|
864
|
+
|
|
865
|
+
folder = os.path.dirname(path)
|
|
866
|
+
|
|
867
|
+
if not os.path.isdir(folder):
|
|
868
|
+
if create_necessary_folder:
|
|
869
|
+
GeneralUtilities.ensure_directory_exists(folder) # TODO check if this also create nested folders if required
|
|
870
|
+
else:
|
|
871
|
+
raise ValueError(f"Folder '{folder}' does not exist.")
|
|
872
|
+
|
|
873
|
+
GeneralUtilities.ensure_file_exists(path)
|
|
874
|
+
else:
|
|
875
|
+
arguments = ["--path", path]
|
|
876
|
+
|
|
877
|
+
if error_if_already_exists:
|
|
878
|
+
arguments = arguments+["--errorwhenexists"]
|
|
879
|
+
|
|
880
|
+
if create_necessary_folder:
|
|
881
|
+
arguments = arguments+["--createnecessaryfolder"]
|
|
882
|
+
|
|
883
|
+
exit_code, _, stderr, _ = self.run_program_argsasarray("sccreatefile", arguments, throw_exception_if_exitcode_is_not_zero=False) # works platform-indepent
|
|
884
|
+
if exit_code != 0:
|
|
885
|
+
raise ValueError(f"Fatal error occurrs while create file '{path}'. StdErr: '{stderr}'")
|
|
886
|
+
|
|
887
|
+
@GeneralUtilities.check_arguments
|
|
888
|
+
def create_folder(self, path: str, error_if_already_exists: bool, create_necessary_folder: bool) -> None:
|
|
889
|
+
"""This function works platform-independent also for non-local-executions if the ScriptCollection commandline-commands are available as global command on the target-system."""
|
|
890
|
+
if self.program_runner.will_be_executed_locally():
|
|
891
|
+
if not os.path.isabs(path):
|
|
892
|
+
path = os.path.join(os.getcwd(), path)
|
|
893
|
+
|
|
894
|
+
if os.path.isdir(path) and error_if_already_exists:
|
|
895
|
+
raise ValueError(f"Folder '{path}' already exists.")
|
|
896
|
+
|
|
897
|
+
# TODO maybe it should be checked if there is a file with the same path which already exists.
|
|
898
|
+
|
|
899
|
+
folder = os.path.dirname(path)
|
|
900
|
+
|
|
901
|
+
if not os.path.isdir(folder):
|
|
902
|
+
if create_necessary_folder:
|
|
903
|
+
GeneralUtilities.ensure_directory_exists(folder) # TODO check if this also create nested folders if required
|
|
904
|
+
else:
|
|
905
|
+
raise ValueError(f"Folder '{folder}' does not exist.")
|
|
906
|
+
|
|
907
|
+
GeneralUtilities.ensure_directory_exists(path)
|
|
908
|
+
else:
|
|
909
|
+
arguments = ["--path", path]
|
|
910
|
+
|
|
911
|
+
if error_if_already_exists:
|
|
912
|
+
arguments = arguments+["--errorwhenexists"]
|
|
913
|
+
|
|
914
|
+
if create_necessary_folder:
|
|
915
|
+
arguments = arguments+["--createnecessaryfolder"]
|
|
916
|
+
|
|
917
|
+
exit_code, _, stderr, _ = self.run_program_argsasarray("sccreatefolder", arguments, throw_exception_if_exitcode_is_not_zero=False) # works platform-indepent
|
|
918
|
+
if exit_code != 0:
|
|
919
|
+
raise ValueError(f"Fatal error occurrs while create folder '{path}'. StdErr: '{stderr}'")
|
|
920
|
+
|
|
921
|
+
@GeneralUtilities.check_arguments
|
|
922
|
+
def __sort_fmd(self, line: str):
|
|
923
|
+
splitted: list = line.split(";")
|
|
924
|
+
filetype: str = splitted[1]
|
|
925
|
+
if filetype == "d":
|
|
926
|
+
return -1
|
|
927
|
+
if filetype == "f":
|
|
928
|
+
return 1
|
|
929
|
+
return 0
|
|
930
|
+
|
|
931
|
+
@GeneralUtilities.check_arguments
|
|
932
|
+
def restore_filemetadata(self, folder: str, source_file: str, strict=False, encoding: str = "utf-8", create_folder_is_not_exist: bool = True) -> None:
|
|
933
|
+
lines = GeneralUtilities.read_lines_from_file(source_file, encoding)
|
|
934
|
+
lines.sort(key=self.__sort_fmd)
|
|
935
|
+
for line in lines:
|
|
936
|
+
splitted: list = line.split(";")
|
|
937
|
+
full_path_of_file_or_folder: str = os.path.join(folder, splitted[0])
|
|
938
|
+
filetype: str = splitted[1]
|
|
939
|
+
user: str = splitted[2]
|
|
940
|
+
permissions: str = splitted[3]
|
|
941
|
+
if filetype == "d" and create_folder_is_not_exist and not os.path.isdir(full_path_of_file_or_folder):
|
|
942
|
+
GeneralUtilities.ensure_directory_exists(full_path_of_file_or_folder)
|
|
943
|
+
if (filetype == "f" and os.path.isfile(full_path_of_file_or_folder)) or (filetype == "d" and os.path.isdir(full_path_of_file_or_folder)):
|
|
944
|
+
self.set_owner(full_path_of_file_or_folder, user, os.name != 'nt')
|
|
945
|
+
self.set_permission(full_path_of_file_or_folder, permissions)
|
|
946
|
+
else:
|
|
947
|
+
if strict:
|
|
948
|
+
if filetype == "f":
|
|
949
|
+
filetype_full = "File"
|
|
950
|
+
elif filetype == "d":
|
|
951
|
+
filetype_full = "Directory"
|
|
952
|
+
else:
|
|
953
|
+
raise ValueError(f"Unknown filetype: {GeneralUtilities.str_none_safe(filetype)}")
|
|
954
|
+
raise ValueError(f"{filetype_full} '{full_path_of_file_or_folder}' does not exist")
|
|
955
|
+
|
|
956
|
+
@GeneralUtilities.check_arguments
|
|
957
|
+
def __calculate_lengh_in_seconds(self, filename: str, folder: str) -> float:
|
|
958
|
+
argument = ['-v', 'error', '-show_entries', 'format=duration', '-of', 'default=noprint_wrappers=1:nokey=1', filename]
|
|
959
|
+
result = self.run_program_argsasarray("ffprobe", argument, folder, throw_exception_if_exitcode_is_not_zero=True)
|
|
960
|
+
return float(result[1].replace('\n', ''))
|
|
961
|
+
|
|
962
|
+
@GeneralUtilities.check_arguments
|
|
963
|
+
def __create_thumbnails(self, filename: str, fps: str, folder: str, tempname_for_thumbnails: str) -> list[str]:
|
|
964
|
+
argument = ['-i', filename, '-r', fps, '-vf', 'scale=-1:120', '-vcodec', 'png', f'{tempname_for_thumbnails}-%002d.png']
|
|
965
|
+
self.run_program_argsasarray("ffmpeg", argument, folder, throw_exception_if_exitcode_is_not_zero=True)
|
|
966
|
+
files = GeneralUtilities.get_direct_files_of_folder(folder)
|
|
967
|
+
result: list[str] = []
|
|
968
|
+
regex = "^"+re.escape(tempname_for_thumbnails)+"\\-\\d+\\.png$"
|
|
969
|
+
regex_for_files = re.compile(regex)
|
|
970
|
+
for file in files:
|
|
971
|
+
filename = os.path.basename(file)
|
|
972
|
+
if regex_for_files.match(filename):
|
|
973
|
+
result.append(file)
|
|
974
|
+
GeneralUtilities.assert_condition(0 < len(result), "No thumbnail-files found.")
|
|
975
|
+
return result
|
|
976
|
+
|
|
977
|
+
@GeneralUtilities.check_arguments
|
|
978
|
+
def __create_thumbnail(self, outputfilename: str, folder: str, length_in_seconds: float, tempname_for_thumbnails: str, amount_of_images: int) -> None:
|
|
979
|
+
duration = timedelta(seconds=length_in_seconds)
|
|
980
|
+
info = GeneralUtilities.timedelta_to_simple_string(duration)
|
|
981
|
+
next_square_number = GeneralUtilities.get_next_square_number(amount_of_images)
|
|
982
|
+
root = math.sqrt(next_square_number)
|
|
983
|
+
rows: int = root # 5
|
|
984
|
+
columns: int = root # math.ceil(amount_of_images/rows)
|
|
985
|
+
argument = ['-title', f'"{outputfilename} ({info})"', '-tile', f'{rows}x{columns}', f'{tempname_for_thumbnails}*.png', f'{outputfilename}.png']
|
|
986
|
+
self.run_program_argsasarray("montage", argument, folder, throw_exception_if_exitcode_is_not_zero=True)
|
|
987
|
+
|
|
988
|
+
@GeneralUtilities.check_arguments
|
|
989
|
+
def __create_thumbnail2(self, outputfilename: str, folder: str, length_in_seconds: float, rows: int, columns: int, tempname_for_thumbnails: str, amount_of_images: int) -> None:
|
|
990
|
+
duration = timedelta(seconds=length_in_seconds)
|
|
991
|
+
info = GeneralUtilities.timedelta_to_simple_string(duration)
|
|
992
|
+
argument = ['-title', f'"{outputfilename} ({info})"', '-tile', f'{rows}x{columns}', f'{tempname_for_thumbnails}*.png', f'{outputfilename}.png']
|
|
993
|
+
self.run_program_argsasarray("montage", argument, folder, throw_exception_if_exitcode_is_not_zero=True)
|
|
994
|
+
|
|
995
|
+
@GeneralUtilities.check_arguments
|
|
996
|
+
def __roundup(self, x: float, places: int) -> int:
|
|
997
|
+
d = 10 ** places
|
|
998
|
+
if x < 0:
|
|
999
|
+
return math.floor(x * d) / d
|
|
1000
|
+
else:
|
|
1001
|
+
return math.ceil(x * d) / d
|
|
1002
|
+
|
|
1003
|
+
@GeneralUtilities.check_arguments
|
|
1004
|
+
def generate_thumbnail(self, file: str, frames_per_second: float, tempname_for_thumbnails: str = None, hook=None) -> None:
|
|
1005
|
+
if tempname_for_thumbnails is None:
|
|
1006
|
+
tempname_for_thumbnails = "t_"+str(uuid.uuid4())
|
|
1007
|
+
|
|
1008
|
+
file = GeneralUtilities.resolve_relative_path_from_current_working_directory(file)
|
|
1009
|
+
filename = os.path.basename(file)
|
|
1010
|
+
folder = os.path.dirname(file)
|
|
1011
|
+
filename_without_extension = Path(file).stem
|
|
1012
|
+
preview_files: list[str] = []
|
|
1013
|
+
try:
|
|
1014
|
+
length_in_seconds = self.__calculate_lengh_in_seconds(filename, folder)
|
|
1015
|
+
# frames per second, example: frames_per_second="20fps" => 20 frames per second
|
|
1016
|
+
frames_per_second = self.__roundup(float(frames_per_second[:-3]), 2)
|
|
1017
|
+
frames_per_second_as_string = str(frames_per_second)
|
|
1018
|
+
preview_files = self.__create_thumbnails(filename, frames_per_second_as_string, folder, tempname_for_thumbnails)
|
|
1019
|
+
if hook is not None:
|
|
1020
|
+
hook(file, preview_files)
|
|
1021
|
+
actual_amounf_of_previewframes = len(preview_files)
|
|
1022
|
+
self.__create_thumbnail(filename_without_extension, folder, length_in_seconds, tempname_for_thumbnails, actual_amounf_of_previewframes)
|
|
1023
|
+
finally:
|
|
1024
|
+
for thumbnail_to_delete in preview_files:
|
|
1025
|
+
os.remove(thumbnail_to_delete)
|
|
1026
|
+
|
|
1027
|
+
@GeneralUtilities.check_arguments
|
|
1028
|
+
def generate_thumbnail_by_amount_of_pictures(self, file: str, amount_of_columns: int, amount_of_rows: int, tempname_for_thumbnails: str = None, hook=None) -> None:
|
|
1029
|
+
if tempname_for_thumbnails is None:
|
|
1030
|
+
tempname_for_thumbnails = "t_"+str(uuid.uuid4())
|
|
1031
|
+
|
|
1032
|
+
file = GeneralUtilities.resolve_relative_path_from_current_working_directory(file)
|
|
1033
|
+
filename = os.path.basename(file)
|
|
1034
|
+
folder = os.path.dirname(file)
|
|
1035
|
+
filename_without_extension = Path(file).stem
|
|
1036
|
+
preview_files: list[str] = []
|
|
1037
|
+
try:
|
|
1038
|
+
length_in_seconds = self.__calculate_lengh_in_seconds(filename, folder)
|
|
1039
|
+
amounf_of_previewframes = int(amount_of_columns*amount_of_rows)
|
|
1040
|
+
frames_per_second_as_string = f"{amounf_of_previewframes-2}/{length_in_seconds}"
|
|
1041
|
+
preview_files = self.__create_thumbnails(filename, frames_per_second_as_string, folder, tempname_for_thumbnails)
|
|
1042
|
+
if hook is not None:
|
|
1043
|
+
hook(file, preview_files)
|
|
1044
|
+
actual_amounf_of_previewframes = len(preview_files)
|
|
1045
|
+
self.__create_thumbnail2(filename_without_extension, folder, length_in_seconds, amount_of_rows, amount_of_columns, tempname_for_thumbnails, actual_amounf_of_previewframes)
|
|
1046
|
+
finally:
|
|
1047
|
+
for thumbnail_to_delete in preview_files:
|
|
1048
|
+
os.remove(thumbnail_to_delete)
|
|
1049
|
+
|
|
1050
|
+
@GeneralUtilities.check_arguments
|
|
1051
|
+
def extract_pdf_pages(self, file: str, from_page: int, to_page: int, outputfile: str) -> None:
|
|
1052
|
+
pdf_reader: PdfReader = PdfReader(file)
|
|
1053
|
+
pdf_writer: PdfWriter = PdfWriter()
|
|
1054
|
+
start = from_page
|
|
1055
|
+
end = to_page
|
|
1056
|
+
while start <= end:
|
|
1057
|
+
pdf_writer.add_page(pdf_reader.pages[start-1])
|
|
1058
|
+
start += 1
|
|
1059
|
+
with open(outputfile, 'wb') as out:
|
|
1060
|
+
pdf_writer.write(out)
|
|
1061
|
+
|
|
1062
|
+
@GeneralUtilities.check_arguments
|
|
1063
|
+
def merge_pdf_files(self, files: list[str], outputfile: str) -> None:
|
|
1064
|
+
# TODO add wildcard-option
|
|
1065
|
+
pdfFileMerger: PdfWriter = PdfWriter()
|
|
1066
|
+
for file in files:
|
|
1067
|
+
with open(file, "rb") as f:
|
|
1068
|
+
pdfFileMerger.append(f)
|
|
1069
|
+
with open(outputfile, "wb") as output:
|
|
1070
|
+
pdfFileMerger.write(output)
|
|
1071
|
+
pdfFileMerger.close()
|
|
1072
|
+
|
|
1073
|
+
@GeneralUtilities.check_arguments
|
|
1074
|
+
def pdf_to_image(self, file: str, outputfilename_without_extension: str) -> None:
|
|
1075
|
+
raise ValueError("Function currently not available")
|
|
1076
|
+
# PyMuPDF can be used for that but sometimes it throws
|
|
1077
|
+
# "ImportError: DLL load failed while importing _fitz: Das angegebene Modul wurde nicht gefunden."
|
|
1078
|
+
|
|
1079
|
+
# doc = None # fitz.open(file)
|
|
1080
|
+
# for i, page in enumerate(doc):
|
|
1081
|
+
# pix = page.get_pixmap()
|
|
1082
|
+
# img = Image.frombytes("RGB", [pix.width, pix.height], pix.samples)
|
|
1083
|
+
# img.save(f"{outputfilename_without_extension}_{i}.png", "PNG")
|
|
1084
|
+
|
|
1085
|
+
@GeneralUtilities.check_arguments
|
|
1086
|
+
def show_missing_files(self, folderA: str, folderB: str):
|
|
1087
|
+
for file in GeneralUtilities.get_missing_files(folderA, folderB):
|
|
1088
|
+
GeneralUtilities.write_message_to_stdout(file)
|
|
1089
|
+
|
|
1090
|
+
@GeneralUtilities.check_arguments
|
|
1091
|
+
def SCCreateEmptyFileWithSpecificSize(self, name: str, size_string: str) -> int:
|
|
1092
|
+
if size_string.isdigit():
|
|
1093
|
+
size = int(size_string)
|
|
1094
|
+
else:
|
|
1095
|
+
if len(size_string) >= 3:
|
|
1096
|
+
if (size_string.endswith("kb")):
|
|
1097
|
+
size = int(size_string[:-2]) * pow(10, 3)
|
|
1098
|
+
elif (size_string.endswith("mb")):
|
|
1099
|
+
size = int(size_string[:-2]) * pow(10, 6)
|
|
1100
|
+
elif (size_string.endswith("gb")):
|
|
1101
|
+
size = int(size_string[:-2]) * pow(10, 9)
|
|
1102
|
+
elif (size_string.endswith("kib")):
|
|
1103
|
+
size = int(size_string[:-3]) * pow(2, 10)
|
|
1104
|
+
elif (size_string.endswith("mib")):
|
|
1105
|
+
size = int(size_string[:-3]) * pow(2, 20)
|
|
1106
|
+
elif (size_string.endswith("gib")):
|
|
1107
|
+
size = int(size_string[:-3]) * pow(2, 30)
|
|
1108
|
+
else:
|
|
1109
|
+
self.log.log("Wrong format", LogLevel.Error)
|
|
1110
|
+
return 1
|
|
1111
|
+
else:
|
|
1112
|
+
self.log.log("Wrong format", LogLevel.Error)
|
|
1113
|
+
return 1
|
|
1114
|
+
with open(name, "wb") as f:
|
|
1115
|
+
f.seek(size-1)
|
|
1116
|
+
f.write(b"\0")
|
|
1117
|
+
return 0
|
|
1118
|
+
|
|
1119
|
+
@GeneralUtilities.check_arguments
|
|
1120
|
+
def SCCreateHashOfAllFiles(self, folder: str) -> None:
|
|
1121
|
+
for file in GeneralUtilities.absolute_file_paths(folder):
|
|
1122
|
+
with open(file+".sha256", "w+", encoding="utf-8") as f:
|
|
1123
|
+
f.write(GeneralUtilities.get_sha256_of_file(file))
|
|
1124
|
+
|
|
1125
|
+
@GeneralUtilities.check_arguments
|
|
1126
|
+
def SCCreateSimpleMergeWithoutRelease(self, repository: str, sourcebranch: str, targetbranch: str, remotename: str, remove_source_branch: bool) -> None:
|
|
1127
|
+
commitid = self.git_merge(repository, sourcebranch, targetbranch, False, True)
|
|
1128
|
+
self.git_merge(repository, targetbranch, sourcebranch, True, True)
|
|
1129
|
+
created_version = self.get_semver_version_from_gitversion(repository)
|
|
1130
|
+
self.git_create_tag(repository, commitid, f"v{created_version}", True)
|
|
1131
|
+
self.git_push(repository, remotename, targetbranch, targetbranch, False, True)
|
|
1132
|
+
if (GeneralUtilities.string_has_nonwhitespace_content(remotename)):
|
|
1133
|
+
self.git_push(repository, remotename, sourcebranch, sourcebranch, False, True)
|
|
1134
|
+
if (remove_source_branch):
|
|
1135
|
+
self.git_remove_branch(repository, sourcebranch)
|
|
1136
|
+
|
|
1137
|
+
@GeneralUtilities.check_arguments
|
|
1138
|
+
def sc_organize_lines_in_file(self, file: str, encoding: str, sort: bool = False, remove_duplicated_lines: bool = False, ignore_first_line: bool = False, remove_empty_lines: bool = True, ignored_start_character: list = list()) -> int:
|
|
1139
|
+
if os.path.isfile(file):
|
|
1140
|
+
|
|
1141
|
+
# read file
|
|
1142
|
+
lines = GeneralUtilities.read_lines_from_file(file, encoding)
|
|
1143
|
+
if (len(lines) == 0):
|
|
1144
|
+
return 0
|
|
1145
|
+
|
|
1146
|
+
# store first line if desiredpopd
|
|
1147
|
+
|
|
1148
|
+
if (ignore_first_line):
|
|
1149
|
+
first_line = lines.pop(0)
|
|
1150
|
+
|
|
1151
|
+
# remove empty lines if desired
|
|
1152
|
+
if remove_empty_lines:
|
|
1153
|
+
temp = lines
|
|
1154
|
+
lines = []
|
|
1155
|
+
for line in temp:
|
|
1156
|
+
if (not (GeneralUtilities.string_is_none_or_whitespace(line))):
|
|
1157
|
+
lines.append(line)
|
|
1158
|
+
|
|
1159
|
+
# remove duplicated lines if desired
|
|
1160
|
+
if remove_duplicated_lines:
|
|
1161
|
+
lines = GeneralUtilities.remove_duplicates(lines)
|
|
1162
|
+
|
|
1163
|
+
# sort lines if desired
|
|
1164
|
+
if sort:
|
|
1165
|
+
lines = sorted(lines, key=lambda singleline: self.__adapt_line_for_sorting(singleline, ignored_start_character))
|
|
1166
|
+
|
|
1167
|
+
# reinsert first line
|
|
1168
|
+
if ignore_first_line:
|
|
1169
|
+
lines.insert(0, first_line)
|
|
1170
|
+
|
|
1171
|
+
# write result to file
|
|
1172
|
+
GeneralUtilities.write_lines_to_file(file, lines, encoding)
|
|
1173
|
+
|
|
1174
|
+
return 0
|
|
1175
|
+
else:
|
|
1176
|
+
self.log.log(f"File '{file}' does not exist.", LogLevel.Error)
|
|
1177
|
+
return 1
|
|
1178
|
+
|
|
1179
|
+
@GeneralUtilities.check_arguments
|
|
1180
|
+
def __adapt_line_for_sorting(self, line: str, ignored_start_characters: list):
|
|
1181
|
+
result = line.lower()
|
|
1182
|
+
while len(result) > 0 and result[0] in ignored_start_characters:
|
|
1183
|
+
result = result[1:]
|
|
1184
|
+
return result
|
|
1185
|
+
|
|
1186
|
+
@GeneralUtilities.check_arguments
|
|
1187
|
+
def SCGenerateSnkFiles(self, outputfolder, keysize=4096, amountofkeys=10) -> int:
|
|
1188
|
+
GeneralUtilities.ensure_directory_exists(outputfolder)
|
|
1189
|
+
for _ in range(amountofkeys):
|
|
1190
|
+
file = os.path.join(outputfolder, str(uuid.uuid4())+".snk")
|
|
1191
|
+
argument = f"-k {keysize} {file}"
|
|
1192
|
+
self.run_program("sn", argument, outputfolder)
|
|
1193
|
+
|
|
1194
|
+
@GeneralUtilities.check_arguments
|
|
1195
|
+
def __merge_files(self, sourcefile: str, targetfile: str) -> None:
|
|
1196
|
+
with open(sourcefile, "rb") as f:
|
|
1197
|
+
source_data = f.read()
|
|
1198
|
+
with open(targetfile, "ab") as fout:
|
|
1199
|
+
merge_separator = [0x0A]
|
|
1200
|
+
fout.write(bytes(merge_separator))
|
|
1201
|
+
fout.write(source_data)
|
|
1202
|
+
|
|
1203
|
+
@GeneralUtilities.check_arguments
|
|
1204
|
+
def __process_file(self, file: str, substringInFilename: str, newSubstringInFilename: str, conflictResolveMode: str) -> None:
|
|
1205
|
+
new_filename = os.path.join(os.path.dirname(file), os.path.basename(file).replace(substringInFilename, newSubstringInFilename))
|
|
1206
|
+
if file != new_filename:
|
|
1207
|
+
if os.path.isfile(new_filename):
|
|
1208
|
+
if filecmp.cmp(file, new_filename):
|
|
1209
|
+
send2trash.send2trash(file)
|
|
1210
|
+
else:
|
|
1211
|
+
if conflictResolveMode == "ignore":
|
|
1212
|
+
pass
|
|
1213
|
+
elif conflictResolveMode == "preservenewest":
|
|
1214
|
+
if (os.path.getmtime(file) - os.path.getmtime(new_filename) > 0):
|
|
1215
|
+
send2trash.send2trash(file)
|
|
1216
|
+
else:
|
|
1217
|
+
send2trash.send2trash(new_filename)
|
|
1218
|
+
os.rename(file, new_filename)
|
|
1219
|
+
elif (conflictResolveMode == "merge"):
|
|
1220
|
+
self.__merge_files(file, new_filename)
|
|
1221
|
+
send2trash.send2trash(file)
|
|
1222
|
+
else:
|
|
1223
|
+
raise ValueError('Unknown conflict resolve mode')
|
|
1224
|
+
else:
|
|
1225
|
+
os.rename(file, new_filename)
|
|
1226
|
+
|
|
1227
|
+
@GeneralUtilities.check_arguments
|
|
1228
|
+
def SCReplaceSubstringsInFilenames(self, folder: str, substringInFilename: str, newSubstringInFilename: str, conflictResolveMode: str) -> None:
|
|
1229
|
+
for file in GeneralUtilities.absolute_file_paths(folder):
|
|
1230
|
+
self.__process_file(file, substringInFilename, newSubstringInFilename, conflictResolveMode)
|
|
1231
|
+
|
|
1232
|
+
@GeneralUtilities.check_arguments
|
|
1233
|
+
def __check_file(self, file: str, searchstring: str) -> None:
|
|
1234
|
+
bytes_ascii = bytes(searchstring, "ascii")
|
|
1235
|
+
# often called "unicode-encoding"
|
|
1236
|
+
bytes_utf16 = bytes(searchstring, "utf-16")
|
|
1237
|
+
bytes_utf8 = bytes(searchstring, "utf-8")
|
|
1238
|
+
with open(file, mode='rb') as file_object:
|
|
1239
|
+
content = file_object.read()
|
|
1240
|
+
if bytes_ascii in content:
|
|
1241
|
+
GeneralUtilities.write_message_to_stdout(file)
|
|
1242
|
+
elif bytes_utf16 in content:
|
|
1243
|
+
GeneralUtilities.write_message_to_stdout(file)
|
|
1244
|
+
elif bytes_utf8 in content:
|
|
1245
|
+
GeneralUtilities.write_message_to_stdout(file)
|
|
1246
|
+
|
|
1247
|
+
@GeneralUtilities.check_arguments
|
|
1248
|
+
def SCSearchInFiles(self, folder: str, searchstring: str) -> None:
|
|
1249
|
+
for file in GeneralUtilities.absolute_file_paths(folder):
|
|
1250
|
+
self.__check_file(file, searchstring)
|
|
1251
|
+
|
|
1252
|
+
@GeneralUtilities.check_arguments
|
|
1253
|
+
def get_string_as_qr_code(self,string: str) -> None:
|
|
1254
|
+
qr = qrcode.QRCode()
|
|
1255
|
+
qr.add_data(string)
|
|
1256
|
+
f = io.StringIO()
|
|
1257
|
+
qr.print_ascii(out=f)
|
|
1258
|
+
f.seek(0)
|
|
1259
|
+
return f.read()
|
|
1260
|
+
|
|
1261
|
+
@GeneralUtilities.check_arguments
|
|
1262
|
+
def __print_qr_code_by_csv_line(self, displayname: str, website: str, emailaddress: str, key: str, period: str) -> None:
|
|
1263
|
+
qrcode_content = f"otpauth://totp/{website}:{emailaddress}?secret={key}&issuer={displayname}&period={period}"
|
|
1264
|
+
GeneralUtilities.write_message_to_stdout(f"{displayname} ({emailaddress}):")
|
|
1265
|
+
GeneralUtilities.write_message_to_stdout(qrcode_content)
|
|
1266
|
+
qr = qrcode.QRCode()
|
|
1267
|
+
qr.add_data(qrcode_content)
|
|
1268
|
+
f = io.StringIO()
|
|
1269
|
+
qr.print_ascii(out=f)
|
|
1270
|
+
f.seek(0)
|
|
1271
|
+
GeneralUtilities.write_message_to_stdout(f.read())
|
|
1272
|
+
|
|
1273
|
+
@GeneralUtilities.check_arguments
|
|
1274
|
+
def SCShow2FAAsQRCode(self, csvfile: str) -> None:
|
|
1275
|
+
lines = GeneralUtilities.read_csv_file(csvfile, True)
|
|
1276
|
+
lines.sort(key=lambda items: ''.join(items).lower())
|
|
1277
|
+
for line in lines:
|
|
1278
|
+
self.__print_qr_code_by_csv_line(line[0], line[1], line[2], line[3], line[4])
|
|
1279
|
+
GeneralUtilities.write_message_to_stdout(GeneralUtilities.get_longline())
|
|
1280
|
+
|
|
1281
|
+
@GeneralUtilities.check_arguments
|
|
1282
|
+
def SCCalculateBitcoinBlockHash(self, block_version_number: str, previousblockhash: str, transactionsmerkleroot: str, timestamp: str, target: str, nonce: str) -> str:
|
|
1283
|
+
# Example-values:
|
|
1284
|
+
# block_version_number: "00000020"
|
|
1285
|
+
# previousblockhash: "66720b99e07d284bd4fe67ff8c49a5db1dd8514fcdab61000000000000000000"
|
|
1286
|
+
# transactionsmerkleroot: "7829844f4c3a41a537b3131ca992643eaa9d093b2383e4cdc060ad7dc5481187"
|
|
1287
|
+
# timestamp: "51eb505a"
|
|
1288
|
+
# target: "c1910018"
|
|
1289
|
+
# nonce: "de19b302"
|
|
1290
|
+
header = str(block_version_number + previousblockhash + transactionsmerkleroot + timestamp + target + nonce)
|
|
1291
|
+
return binascii.hexlify(hashlib.sha256(hashlib.sha256(binascii.unhexlify(header)).digest()).digest()[::-1]).decode('utf-8')
|
|
1292
|
+
|
|
1293
|
+
@GeneralUtilities.check_arguments
|
|
1294
|
+
def SCChangeHashOfProgram(self, inputfile: str) -> None:
|
|
1295
|
+
valuetoappend = str(uuid.uuid4())
|
|
1296
|
+
|
|
1297
|
+
outputfile = inputfile + '.modified'
|
|
1298
|
+
|
|
1299
|
+
shutil.copy2(inputfile, outputfile)
|
|
1300
|
+
with open(outputfile, 'a', encoding="utf-8") as file:
|
|
1301
|
+
# TODO use rcedit for .exe-files instead of appending valuetoappend ( https://github.com/electron/rcedit/ )
|
|
1302
|
+
# background: you can retrieve the "original-filename" from the .exe-file like discussed here:
|
|
1303
|
+
# https://security.stackexchange.com/questions/210843/ is-it-possible-to-change-original-filename-of-an-exe
|
|
1304
|
+
# so removing the original filename with rcedit is probably a better way to make it more difficult to detect the programname.
|
|
1305
|
+
# this would obviously also change the hashvalue of the program so appending a whitespace is not required anymore.
|
|
1306
|
+
file.write(valuetoappend)
|
|
1307
|
+
|
|
1308
|
+
@GeneralUtilities.check_arguments
|
|
1309
|
+
def __adjust_folder_name(self, folder: str) -> str:
|
|
1310
|
+
result = os.path.dirname(folder).replace("\\", "/")
|
|
1311
|
+
if result == "/":
|
|
1312
|
+
return GeneralUtilities.empty_string
|
|
1313
|
+
else:
|
|
1314
|
+
return result
|
|
1315
|
+
|
|
1316
|
+
@GeneralUtilities.check_arguments
|
|
1317
|
+
def __create_iso(self, folder, iso_file) -> None:
|
|
1318
|
+
created_directories = []
|
|
1319
|
+
files_directory = "FILES"
|
|
1320
|
+
iso = pycdlib.PyCdlib()
|
|
1321
|
+
iso.new()
|
|
1322
|
+
files_directory = files_directory.upper()
|
|
1323
|
+
iso.add_directory("/" + files_directory)
|
|
1324
|
+
created_directories.append("/" + files_directory)
|
|
1325
|
+
for root, _, files in os.walk(folder):
|
|
1326
|
+
for file in files:
|
|
1327
|
+
full_path = os.path.join(root, file)
|
|
1328
|
+
with (open(full_path, "rb").read()) as text_io_wrapper:
|
|
1329
|
+
content = text_io_wrapper
|
|
1330
|
+
path_in_iso = '/' + files_directory + \
|
|
1331
|
+
self.__adjust_folder_name(full_path[len(folder)::1]).upper()
|
|
1332
|
+
if path_in_iso not in created_directories:
|
|
1333
|
+
iso.add_directory(path_in_iso)
|
|
1334
|
+
created_directories.append(path_in_iso)
|
|
1335
|
+
iso.add_fp(BytesIO(content), len(content), path_in_iso + '/' + file.upper() + ';1')
|
|
1336
|
+
iso.write(iso_file)
|
|
1337
|
+
iso.close()
|
|
1338
|
+
|
|
1339
|
+
@GeneralUtilities.check_arguments
|
|
1340
|
+
def SCCreateISOFileWithObfuscatedFiles(self, inputfolder: str, outputfile: str, printtableheadline, createisofile, extensions) -> None:
|
|
1341
|
+
if (os.path.isdir(inputfolder)):
|
|
1342
|
+
namemappingfile = "name_map.csv"
|
|
1343
|
+
files_directory = inputfolder
|
|
1344
|
+
files_directory_obf = f"{files_directory}_Obfuscated"
|
|
1345
|
+
self.SCObfuscateFilesFolder(
|
|
1346
|
+
inputfolder, printtableheadline, namemappingfile, extensions)
|
|
1347
|
+
os.rename(namemappingfile, os.path.join(
|
|
1348
|
+
files_directory_obf, namemappingfile))
|
|
1349
|
+
if createisofile:
|
|
1350
|
+
self.__create_iso(files_directory_obf, outputfile)
|
|
1351
|
+
shutil.rmtree(files_directory_obf)
|
|
1352
|
+
else:
|
|
1353
|
+
raise ValueError(f"Directory not found: '{inputfolder}'")
|
|
1354
|
+
|
|
1355
|
+
@GeneralUtilities.check_arguments
|
|
1356
|
+
def SCFilenameObfuscator(self, inputfolder: str, printtableheadline, namemappingfile: str, extensions: str) -> None:
|
|
1357
|
+
obfuscate_all_files = extensions == "*"
|
|
1358
|
+
if (obfuscate_all_files):
|
|
1359
|
+
obfuscate_file_extensions = None
|
|
1360
|
+
else:
|
|
1361
|
+
obfuscate_file_extensions = extensions.split(",")
|
|
1362
|
+
if (os.path.isdir(inputfolder)):
|
|
1363
|
+
printtableheadline = GeneralUtilities.string_to_boolean(
|
|
1364
|
+
printtableheadline)
|
|
1365
|
+
files = []
|
|
1366
|
+
if not os.path.isfile(namemappingfile):
|
|
1367
|
+
with open(namemappingfile, "a", encoding="utf-8"):
|
|
1368
|
+
pass
|
|
1369
|
+
if printtableheadline:
|
|
1370
|
+
GeneralUtilities.append_line_to_file(
|
|
1371
|
+
namemappingfile, "Original filename;new filename;SHA2-hash of file")
|
|
1372
|
+
for file in GeneralUtilities.absolute_file_paths(inputfolder):
|
|
1373
|
+
if os.path.isfile(os.path.join(inputfolder, file)):
|
|
1374
|
+
if obfuscate_all_files or self.__extension_matchs(file, obfuscate_file_extensions):
|
|
1375
|
+
files.append(file)
|
|
1376
|
+
for file in files:
|
|
1377
|
+
hash_value = GeneralUtilities.get_sha256_of_file(file)
|
|
1378
|
+
extension = Path(file).suffix
|
|
1379
|
+
new_file_name_without_path = str(uuid.uuid4())[0:8] + extension
|
|
1380
|
+
new_file_name = os.path.join(
|
|
1381
|
+
os.path.dirname(file), new_file_name_without_path)
|
|
1382
|
+
os.rename(file, new_file_name)
|
|
1383
|
+
GeneralUtilities.append_line_to_file(namemappingfile, os.path.basename(file) + ";" + new_file_name_without_path + ";" + hash_value)
|
|
1384
|
+
else:
|
|
1385
|
+
raise ValueError(f"Directory not found: '{inputfolder}'")
|
|
1386
|
+
|
|
1387
|
+
@GeneralUtilities.check_arguments
|
|
1388
|
+
def __extension_matchs(self, file: str, obfuscate_file_extensions) -> bool:
|
|
1389
|
+
for extension in obfuscate_file_extensions:
|
|
1390
|
+
if file.lower().endswith("."+extension.lower()):
|
|
1391
|
+
return True
|
|
1392
|
+
return False
|
|
1393
|
+
|
|
1394
|
+
@GeneralUtilities.check_arguments
|
|
1395
|
+
def SCHealthcheck(self, file: str) -> int:
|
|
1396
|
+
lines = GeneralUtilities.read_lines_from_file(file)
|
|
1397
|
+
for line in reversed(lines):
|
|
1398
|
+
if not GeneralUtilities.string_is_none_or_whitespace(line):
|
|
1399
|
+
if "RunningHealthy (" in line: # TODO use regex
|
|
1400
|
+
GeneralUtilities.write_message_to_stderr(f"Healthy running due to line '{line}' in file '{file}'.")
|
|
1401
|
+
return 0
|
|
1402
|
+
else:
|
|
1403
|
+
GeneralUtilities.write_message_to_stderr(f"Not healthy running due to line '{line}' in file '{file}'.")
|
|
1404
|
+
return 1
|
|
1405
|
+
GeneralUtilities.write_message_to_stderr(f"No valid line found for healthycheck in file '{file}'.")
|
|
1406
|
+
return 2
|
|
1407
|
+
|
|
1408
|
+
@GeneralUtilities.check_arguments
|
|
1409
|
+
def SCObfuscateFilesFolder(self, inputfolder: str, printtableheadline, namemappingfile: str, extensions: str) -> None:
|
|
1410
|
+
obfuscate_all_files = extensions == "*"
|
|
1411
|
+
if (obfuscate_all_files):
|
|
1412
|
+
obfuscate_file_extensions = None
|
|
1413
|
+
else:
|
|
1414
|
+
if "," in extensions:
|
|
1415
|
+
obfuscate_file_extensions = extensions.split(",")
|
|
1416
|
+
else:
|
|
1417
|
+
obfuscate_file_extensions = [extensions]
|
|
1418
|
+
newd = inputfolder+"_Obfuscated"
|
|
1419
|
+
shutil.copytree(inputfolder, newd)
|
|
1420
|
+
inputfolder = newd
|
|
1421
|
+
if (os.path.isdir(inputfolder)):
|
|
1422
|
+
for file in GeneralUtilities.absolute_file_paths(inputfolder):
|
|
1423
|
+
if obfuscate_all_files or self.__extension_matchs(file, obfuscate_file_extensions):
|
|
1424
|
+
self.SCChangeHashOfProgram(file)
|
|
1425
|
+
os.remove(file)
|
|
1426
|
+
os.rename(file + ".modified", file)
|
|
1427
|
+
self.SCFilenameObfuscator(inputfolder, printtableheadline, namemappingfile, extensions)
|
|
1428
|
+
else:
|
|
1429
|
+
raise ValueError(f"Directory not found: '{inputfolder}'")
|
|
1430
|
+
|
|
1431
|
+
@GeneralUtilities.check_arguments
|
|
1432
|
+
def get_services_from_yaml_file(self, yaml_file: str) -> list[str]:
|
|
1433
|
+
with open(yaml_file, encoding="utf-8") as stream:
|
|
1434
|
+
loaded = yaml.safe_load(stream)
|
|
1435
|
+
services = loaded["services"]
|
|
1436
|
+
result = list(services.keys())
|
|
1437
|
+
return result
|
|
1438
|
+
|
|
1439
|
+
@GeneralUtilities.check_arguments
|
|
1440
|
+
def kill_docker_container(self, container_name: str) -> None:
|
|
1441
|
+
self.run_program("docker", f"container rm -f {container_name}")
|
|
1442
|
+
|
|
1443
|
+
@GeneralUtilities.check_arguments
|
|
1444
|
+
def get_docker_debian_version(self, image_tag: str) -> str:
|
|
1445
|
+
result = ScriptCollectionCore().run_program_argsasarray("docker", ['run', f'debian:{image_tag}', 'bash', '-c', 'apt-get -y update && apt-get -y install lsb-release && lsb_release -cs'])
|
|
1446
|
+
result_line = GeneralUtilities.string_to_lines(result[1])[-1]
|
|
1447
|
+
return result_line
|
|
1448
|
+
|
|
1449
|
+
@GeneralUtilities.check_arguments
|
|
1450
|
+
def get_latest_tor_version_of_debian_repository(self, debian_version: str) -> str:
|
|
1451
|
+
package_url: str = f"https://deb.torproject.org/torproject.org/dists/{debian_version}/main/binary-amd64/Packages"
|
|
1452
|
+
headers = {'Cache-Control': 'no-cache'}
|
|
1453
|
+
r = requests.get(package_url, timeout=5, headers=headers)
|
|
1454
|
+
if r.status_code != 200:
|
|
1455
|
+
raise ValueError(f"Checking for latest tor package resulted in HTTP-response-code {r.status_code}.")
|
|
1456
|
+
lines = GeneralUtilities.string_to_lines(GeneralUtilities.bytes_to_string(r.content))
|
|
1457
|
+
version_line_prefix = "Version: "
|
|
1458
|
+
version_content_line = [line for line in lines if line.startswith(version_line_prefix)][1]
|
|
1459
|
+
version_with_overhead = version_content_line[len(version_line_prefix):]
|
|
1460
|
+
tor_version = version_with_overhead.split("~")[0]
|
|
1461
|
+
return tor_version
|
|
1462
|
+
|
|
1463
|
+
def run_testcases_for_python_project(self, repository_folder: str):
|
|
1464
|
+
self.assert_is_git_repository(repository_folder)
|
|
1465
|
+
self.run_program("coverage", "run -m pytest", repository_folder)
|
|
1466
|
+
self.run_program("coverage", "xml", repository_folder)
|
|
1467
|
+
GeneralUtilities.ensure_directory_exists(os.path.join(repository_folder, "Other/TestCoverage"))
|
|
1468
|
+
coveragefile = os.path.join(repository_folder, "Other/TestCoverage/TestCoverage.xml")
|
|
1469
|
+
GeneralUtilities.ensure_file_does_not_exist(coveragefile)
|
|
1470
|
+
os.rename(os.path.join(repository_folder, "coverage.xml"), coveragefile)
|
|
1471
|
+
|
|
1472
|
+
@GeneralUtilities.check_arguments
|
|
1473
|
+
def get_file_permission(self, file: str) -> str:
|
|
1474
|
+
"""This function returns an usual octet-triple, for example "700"."""
|
|
1475
|
+
ls_output: str = self.run_ls_for_folder(file)
|
|
1476
|
+
return self.__get_file_permission_helper(ls_output)
|
|
1477
|
+
|
|
1478
|
+
@GeneralUtilities.check_arguments
|
|
1479
|
+
def __get_file_permission_helper(self, permissions: str) -> str:
|
|
1480
|
+
return str(self.__to_octet(permissions[0:3])) + str(self.__to_octet(permissions[3:6]))+str(self.__to_octet(permissions[6:9]))
|
|
1481
|
+
|
|
1482
|
+
@GeneralUtilities.check_arguments
|
|
1483
|
+
def __to_octet(self, string: str) -> int:
|
|
1484
|
+
return int(self.__to_octet_helper(string[0]) + self.__to_octet_helper(string[1])+self.__to_octet_helper(string[2]), 2)
|
|
1485
|
+
|
|
1486
|
+
@GeneralUtilities.check_arguments
|
|
1487
|
+
def __to_octet_helper(self, string: str) -> str:
|
|
1488
|
+
if (string == "-"):
|
|
1489
|
+
return "0"
|
|
1490
|
+
else:
|
|
1491
|
+
return "1"
|
|
1492
|
+
|
|
1493
|
+
@GeneralUtilities.check_arguments
|
|
1494
|
+
def get_file_owner(self, file: str) -> str:
|
|
1495
|
+
"""This function returns the user and the group in the format "user:group"."""
|
|
1496
|
+
ls_output: str = self.run_ls_for_folder(file)
|
|
1497
|
+
return self.__get_file_owner_helper(ls_output)
|
|
1498
|
+
|
|
1499
|
+
@GeneralUtilities.check_arguments
|
|
1500
|
+
def __get_file_owner_helper(self, ls_output: str) -> str:
|
|
1501
|
+
splitted = ls_output.split()
|
|
1502
|
+
return f"{splitted[2]}:{splitted[3]}"
|
|
1503
|
+
|
|
1504
|
+
@GeneralUtilities.check_arguments
|
|
1505
|
+
def get_file_owner_and_file_permission(self, file: str) -> str:
|
|
1506
|
+
ls_output: str = self.run_ls_for_folder(file)
|
|
1507
|
+
return [self.__get_file_owner_helper(ls_output), self.__get_file_permission_helper(ls_output)]
|
|
1508
|
+
|
|
1509
|
+
@GeneralUtilities.check_arguments
|
|
1510
|
+
def run_ls_for_folder(self, file_or_folder: str) -> str:
|
|
1511
|
+
file_or_folder = file_or_folder.replace("\\", "/")
|
|
1512
|
+
GeneralUtilities.assert_condition(os.path.isfile(file_or_folder) or os.path.isdir(file_or_folder), f"Can not execute 'ls -ld' because '{file_or_folder}' does not exist.")
|
|
1513
|
+
ls_result = self.run_program_argsasarray("ls", ["-ld", file_or_folder])
|
|
1514
|
+
GeneralUtilities.assert_condition(ls_result[0] == 0, f"'ls -ld {file_or_folder}' resulted in exitcode {str(ls_result[0])}. StdErr: {ls_result[2]}")
|
|
1515
|
+
GeneralUtilities.assert_condition(not GeneralUtilities.string_is_none_or_whitespace(ls_result[1]), f"'ls -ld' of '{file_or_folder}' had an empty output. StdErr: '{ls_result[2]}'")
|
|
1516
|
+
output = ls_result[1]
|
|
1517
|
+
result = output.replace("\n", GeneralUtilities.empty_string)
|
|
1518
|
+
result = ' '.join(result.split()) # reduce multiple whitespaces to one
|
|
1519
|
+
return result
|
|
1520
|
+
|
|
1521
|
+
@GeneralUtilities.check_arguments
|
|
1522
|
+
def run_ls_for_folder_content(self, file_or_folder: str) -> list[str]:
|
|
1523
|
+
file_or_folder = file_or_folder.replace("\\", "/")
|
|
1524
|
+
GeneralUtilities.assert_condition(os.path.isfile(file_or_folder) or os.path.isdir(file_or_folder), f"Can not execute 'ls -la' because '{file_or_folder}' does not exist.")
|
|
1525
|
+
ls_result = self.run_program_argsasarray("ls", ["-la", file_or_folder])
|
|
1526
|
+
GeneralUtilities.assert_condition(ls_result[0] == 0, f"'ls -la {file_or_folder}' resulted in exitcode {str(ls_result[0])}. StdErr: {ls_result[2]}")
|
|
1527
|
+
GeneralUtilities.assert_condition(not GeneralUtilities.string_is_none_or_whitespace(ls_result[1]), f"'ls -la' of '{file_or_folder}' had an empty output. StdErr: '{ls_result[2]}'")
|
|
1528
|
+
output = ls_result[1]
|
|
1529
|
+
result = output.split("\n")[3:] # skip the lines with "Total", "." and ".."
|
|
1530
|
+
result = [' '.join(line.split()) for line in result] # reduce multiple whitespaces to one
|
|
1531
|
+
return result
|
|
1532
|
+
|
|
1533
|
+
@GeneralUtilities.check_arguments
|
|
1534
|
+
def set_permission(self, file_or_folder: str, permissions: str, recursive: bool = False) -> None:
|
|
1535
|
+
"""This function expects an usual octet-triple, for example "700"."""
|
|
1536
|
+
args = []
|
|
1537
|
+
if recursive:
|
|
1538
|
+
args.append("--recursive")
|
|
1539
|
+
args.append(permissions)
|
|
1540
|
+
args.append(file_or_folder)
|
|
1541
|
+
self.run_program_argsasarray("chmod", args)
|
|
1542
|
+
|
|
1543
|
+
@GeneralUtilities.check_arguments
|
|
1544
|
+
def set_owner(self, file_or_folder: str, owner: str, recursive: bool = False, follow_symlinks: bool = False) -> None:
|
|
1545
|
+
"""This function expects the user and the group in the format "user:group"."""
|
|
1546
|
+
args = []
|
|
1547
|
+
if recursive:
|
|
1548
|
+
args.append("--recursive")
|
|
1549
|
+
if follow_symlinks:
|
|
1550
|
+
args.append("--no-dereference")
|
|
1551
|
+
args.append(owner)
|
|
1552
|
+
args.append(file_or_folder)
|
|
1553
|
+
self.run_program_argsasarray("chown", args)
|
|
1554
|
+
|
|
1555
|
+
# <run programs>
|
|
1556
|
+
|
|
1557
|
+
@GeneralUtilities.check_arguments
|
|
1558
|
+
def __run_program_argsasarray_async_helper(self, program: str, arguments_as_array: list[str] = [], working_directory: str = None, print_errors_as_information: bool = False, log_file: str = None, timeoutInSeconds: int = 600, addLogOverhead: bool = False, title: str = None, log_namespace: str = "", arguments_for_log: list[str] = None, custom_argument: object = None, interactive: bool = False) -> Popen:
|
|
1559
|
+
popen: Popen = self.program_runner.run_program_argsasarray_async_helper(program, arguments_as_array, working_directory, custom_argument, interactive)
|
|
1560
|
+
return popen
|
|
1561
|
+
|
|
1562
|
+
@staticmethod
|
|
1563
|
+
def __enqueue_output(file: IO, queue: Queue):
|
|
1564
|
+
for line in iter(file.readline, ''):
|
|
1565
|
+
queue.put(line)
|
|
1566
|
+
file.close()
|
|
1567
|
+
|
|
1568
|
+
@staticmethod
|
|
1569
|
+
def __continue_process_reading(pid: int, p: Popen, q_stdout: Queue, q_stderr: Queue, reading_stdout_last_time_resulted_in_exception: bool, reading_stderr_last_time_resulted_in_exception: bool):
|
|
1570
|
+
if p.poll() is None:
|
|
1571
|
+
return True
|
|
1572
|
+
|
|
1573
|
+
# if reading_stdout_last_time_resulted_in_exception and reading_stderr_last_time_resulted_in_exception:
|
|
1574
|
+
# return False
|
|
1575
|
+
|
|
1576
|
+
if not q_stdout.empty():
|
|
1577
|
+
return True
|
|
1578
|
+
|
|
1579
|
+
if not q_stderr.empty():
|
|
1580
|
+
return True
|
|
1581
|
+
|
|
1582
|
+
return False
|
|
1583
|
+
|
|
1584
|
+
@staticmethod
|
|
1585
|
+
def __read_popen_pipes(p: Popen, print_live_output: bool, print_errors_as_information: bool, log: SCLog) -> tuple[list[str], list[str]]:
|
|
1586
|
+
p_id = p.pid
|
|
1587
|
+
with ThreadPoolExecutor(2) as pool:
|
|
1588
|
+
q_stdout = Queue()
|
|
1589
|
+
q_stderr = Queue()
|
|
1590
|
+
|
|
1591
|
+
pool.submit(ScriptCollectionCore.__enqueue_output, p.stdout, q_stdout)
|
|
1592
|
+
pool.submit(ScriptCollectionCore.__enqueue_output, p.stderr, q_stderr)
|
|
1593
|
+
reading_stdout_last_time_resulted_in_exception: bool = False
|
|
1594
|
+
reading_stderr_last_time_resulted_in_exception: bool = False
|
|
1595
|
+
|
|
1596
|
+
stdout_result: list[str] = []
|
|
1597
|
+
stderr_result: list[str] = []
|
|
1598
|
+
|
|
1599
|
+
while (ScriptCollectionCore.__continue_process_reading(p_id, p, q_stdout, q_stderr, reading_stdout_last_time_resulted_in_exception, reading_stderr_last_time_resulted_in_exception)):
|
|
1600
|
+
try:
|
|
1601
|
+
while not q_stdout.empty():
|
|
1602
|
+
out_line: str = q_stdout.get_nowait()
|
|
1603
|
+
out_line = out_line.replace("\r", GeneralUtilities.empty_string).replace("\n", GeneralUtilities.empty_string)
|
|
1604
|
+
if GeneralUtilities.string_has_content(out_line):
|
|
1605
|
+
stdout_result.append(out_line)
|
|
1606
|
+
reading_stdout_last_time_resulted_in_exception = False
|
|
1607
|
+
if print_live_output:
|
|
1608
|
+
loglevel = LogLevel.Information
|
|
1609
|
+
if out_line.startswith("Debug: "):
|
|
1610
|
+
loglevel = LogLevel.Debug
|
|
1611
|
+
out_line = out_line[len("Debug: "):]
|
|
1612
|
+
if out_line.startswith("Diagnostic: "):
|
|
1613
|
+
loglevel = LogLevel.Diagnostic
|
|
1614
|
+
out_line = out_line[len("Diagnostic: "):]
|
|
1615
|
+
log.log(out_line, loglevel)
|
|
1616
|
+
except Empty:
|
|
1617
|
+
reading_stdout_last_time_resulted_in_exception = True
|
|
1618
|
+
|
|
1619
|
+
try:
|
|
1620
|
+
while not q_stderr.empty():
|
|
1621
|
+
err_line: str = q_stderr.get_nowait()
|
|
1622
|
+
err_line = err_line.replace("\r", GeneralUtilities.empty_string).replace("\n", GeneralUtilities.empty_string)
|
|
1623
|
+
if GeneralUtilities.string_has_content(err_line):
|
|
1624
|
+
stderr_result.append(err_line)
|
|
1625
|
+
reading_stderr_last_time_resulted_in_exception = False
|
|
1626
|
+
if print_live_output:
|
|
1627
|
+
loglevel = LogLevel.Error
|
|
1628
|
+
if err_line.startswith("Warning: "):
|
|
1629
|
+
loglevel = LogLevel.Warning
|
|
1630
|
+
err_line = err_line[len("Warning: "):]
|
|
1631
|
+
if print_errors_as_information: # "errors" in "print_errors_as_information" means: all what is written to std-err
|
|
1632
|
+
loglevel = LogLevel.Information
|
|
1633
|
+
log.log(err_line, loglevel)
|
|
1634
|
+
except Empty:
|
|
1635
|
+
reading_stderr_last_time_resulted_in_exception = True
|
|
1636
|
+
|
|
1637
|
+
time.sleep(0.01) # this is required to not finish too early
|
|
1638
|
+
|
|
1639
|
+
return (stdout_result, stderr_result)
|
|
1640
|
+
|
|
1641
|
+
@GeneralUtilities.check_arguments
|
|
1642
|
+
def run_program_argsasarray(self, program: str, arguments_as_array: list[str] = [], working_directory: str = None, print_errors_as_information: bool = False, log_file: str = None, timeoutInSeconds: int = 600, addLogOverhead: bool = False, title: str = None, log_namespace: str = "", arguments_for_log: list[str] = None, throw_exception_if_exitcode_is_not_zero: bool = True, custom_argument: object = None, interactive: bool = False, print_live_output: bool = False) -> tuple[int, str, str, int]:
|
|
1643
|
+
if self.call_program_runner_directly:
|
|
1644
|
+
return self.program_runner.run_program_argsasarray(program, arguments_as_array, working_directory, custom_argument, interactive)
|
|
1645
|
+
try:
|
|
1646
|
+
arguments_as_str = ' '.join(arguments_as_array)
|
|
1647
|
+
mock_loader_result = self.__try_load_mock(program, arguments_as_str, working_directory)
|
|
1648
|
+
if mock_loader_result[0]:
|
|
1649
|
+
return mock_loader_result[1]
|
|
1650
|
+
|
|
1651
|
+
working_directory = self.__adapt_workingdirectory(working_directory)
|
|
1652
|
+
|
|
1653
|
+
if arguments_for_log is None:
|
|
1654
|
+
arguments_for_log = arguments_as_array
|
|
1655
|
+
|
|
1656
|
+
cmd = f'{working_directory}>{program}'
|
|
1657
|
+
if 0 < len(arguments_for_log):
|
|
1658
|
+
arguments_for_log_as_string: str = ' '.join([f'"{argument_for_log}"' for argument_for_log in arguments_for_log])
|
|
1659
|
+
cmd = f'{cmd} {arguments_for_log_as_string}'
|
|
1660
|
+
|
|
1661
|
+
if GeneralUtilities.string_is_none_or_whitespace(title):
|
|
1662
|
+
info_for_log = cmd
|
|
1663
|
+
else:
|
|
1664
|
+
info_for_log = title
|
|
1665
|
+
|
|
1666
|
+
self.log.log(f"Run '{info_for_log}'.", LogLevel.Debug)
|
|
1667
|
+
|
|
1668
|
+
exit_code: int = None
|
|
1669
|
+
stdout: str = GeneralUtilities.empty_string
|
|
1670
|
+
stderr: str = GeneralUtilities.empty_string
|
|
1671
|
+
pid: int = None
|
|
1672
|
+
|
|
1673
|
+
with self.__run_program_argsasarray_async_helper(program, arguments_as_array, working_directory, print_errors_as_information, log_file, timeoutInSeconds, addLogOverhead, title, log_namespace, arguments_for_log, custom_argument, interactive) as process:
|
|
1674
|
+
|
|
1675
|
+
if log_file is not None:
|
|
1676
|
+
GeneralUtilities.ensure_file_exists(log_file)
|
|
1677
|
+
pid = process.pid
|
|
1678
|
+
|
|
1679
|
+
outputs: tuple[list[str], list[str]] = ScriptCollectionCore.__read_popen_pipes(process, print_live_output, print_errors_as_information, self.log)
|
|
1680
|
+
|
|
1681
|
+
for out_line_plain in outputs[0]:
|
|
1682
|
+
if out_line_plain is not None:
|
|
1683
|
+
out_line: str = None
|
|
1684
|
+
if isinstance(out_line_plain, str):
|
|
1685
|
+
out_line = out_line_plain
|
|
1686
|
+
elif isinstance(out_line_plain, bytes):
|
|
1687
|
+
out_line = GeneralUtilities.bytes_to_string(out_line_plain)
|
|
1688
|
+
else:
|
|
1689
|
+
raise ValueError(f"Unknown type of output: {str(type(out_line_plain))}")
|
|
1690
|
+
|
|
1691
|
+
if out_line is not None and GeneralUtilities.string_has_content(out_line):
|
|
1692
|
+
if out_line.endswith("\n"):
|
|
1693
|
+
out_line = out_line[:-1]
|
|
1694
|
+
if 0 < len(stdout):
|
|
1695
|
+
stdout = stdout+"\n"
|
|
1696
|
+
stdout = stdout+out_line
|
|
1697
|
+
if log_file is not None:
|
|
1698
|
+
GeneralUtilities.append_line_to_file(log_file, out_line)
|
|
1699
|
+
|
|
1700
|
+
for err_line_plain in outputs[1]:
|
|
1701
|
+
if err_line_plain is not None:
|
|
1702
|
+
err_line: str = None
|
|
1703
|
+
if isinstance(err_line_plain, str):
|
|
1704
|
+
err_line = err_line_plain
|
|
1705
|
+
elif isinstance(err_line_plain, bytes):
|
|
1706
|
+
err_line = GeneralUtilities.bytes_to_string(err_line_plain)
|
|
1707
|
+
else:
|
|
1708
|
+
raise ValueError(f"Unknown type of output: {str(type(err_line_plain))}")
|
|
1709
|
+
if err_line is not None and GeneralUtilities.string_has_content(err_line):
|
|
1710
|
+
if err_line.endswith("\n"):
|
|
1711
|
+
err_line = err_line[:-1]
|
|
1712
|
+
if 0 < len(stderr):
|
|
1713
|
+
stderr = stderr+"\n"
|
|
1714
|
+
stderr = stderr+err_line
|
|
1715
|
+
if log_file is not None:
|
|
1716
|
+
GeneralUtilities.append_line_to_file(log_file, err_line)
|
|
1717
|
+
|
|
1718
|
+
exit_code = process.returncode
|
|
1719
|
+
GeneralUtilities.assert_condition(exit_code is not None, f"Exitcode of program-run of '{info_for_log}' is None.")
|
|
1720
|
+
|
|
1721
|
+
result_message = f"Program '{info_for_log}' resulted in exitcode {exit_code}."
|
|
1722
|
+
|
|
1723
|
+
self.log.log(result_message, LogLevel.Debug)
|
|
1724
|
+
|
|
1725
|
+
if throw_exception_if_exitcode_is_not_zero and exit_code != 0:
|
|
1726
|
+
raise ValueError(f"{result_message} (StdOut: '{stdout}', StdErr: '{stderr}')")
|
|
1727
|
+
|
|
1728
|
+
result = (exit_code, stdout, stderr, pid)
|
|
1729
|
+
return result
|
|
1730
|
+
except Exception as e:#pylint:disable=unused-variable, try-except-raise
|
|
1731
|
+
raise
|
|
1732
|
+
|
|
1733
|
+
# Return-values program_runner: Exitcode, StdOut, StdErr, Pid
|
|
1734
|
+
@GeneralUtilities.check_arguments
|
|
1735
|
+
def run_program_with_retry(self, program: str, arguments: str = "", working_directory: str = None, print_errors_as_information: bool = False, log_file: str = None, timeoutInSeconds: int = 600, addLogOverhead: bool = False, title: str = None, log_namespace: str = "", arguments_for_log: list[str] = None, throw_exception_if_exitcode_is_not_zero: bool = True, custom_argument: object = None, interactive: bool = False, print_live_output: bool = False, amount_of_attempts: int = 5) -> tuple[int, str, str, int]:
|
|
1736
|
+
return GeneralUtilities.retry_action(lambda: self.run_program(program, arguments, working_directory, print_errors_as_information, log_file, timeoutInSeconds, addLogOverhead, title, log_namespace, arguments_for_log, throw_exception_if_exitcode_is_not_zero, custom_argument, interactive, print_live_output), amount_of_attempts)
|
|
1737
|
+
|
|
1738
|
+
# Return-values program_runner: Exitcode, StdOut, StdErr, Pid
|
|
1739
|
+
@GeneralUtilities.check_arguments
|
|
1740
|
+
def run_program(self, program: str, arguments: str = "", working_directory: str = None, print_errors_as_information: bool = False, log_file: str = None, timeoutInSeconds: int = 600, addLogOverhead: bool = False, title: str = None, log_namespace: str = "", arguments_for_log: list[str] = None, throw_exception_if_exitcode_is_not_zero: bool = True, custom_argument: object = None, interactive: bool = False, print_live_output: bool = False) -> tuple[int, str, str, int]:
|
|
1741
|
+
if self.call_program_runner_directly:
|
|
1742
|
+
return self.program_runner.run_program(program, arguments, working_directory, custom_argument, interactive)
|
|
1743
|
+
return self.run_program_argsasarray(program, GeneralUtilities.arguments_to_array(arguments), working_directory, print_errors_as_information, log_file, timeoutInSeconds, addLogOverhead, title, log_namespace, arguments_for_log, throw_exception_if_exitcode_is_not_zero, custom_argument, interactive, print_live_output)
|
|
1744
|
+
|
|
1745
|
+
# Return-values program_runner: Pid
|
|
1746
|
+
@GeneralUtilities.check_arguments
|
|
1747
|
+
def run_program_argsasarray_async(self, program: str, arguments_as_array: list[str] = [], working_directory: str = None, print_errors_as_information: bool = False, log_file: str = None, timeoutInSeconds: int = 600, addLogOverhead: bool = False, title: str = None, log_namespace: str = "", arguments_for_log: list[str] = None, custom_argument: object = None, interactive: bool = False) -> int:
|
|
1748
|
+
if self.call_program_runner_directly:
|
|
1749
|
+
return self.program_runner.run_program_argsasarray_async(program, arguments_as_array, working_directory, custom_argument, interactive)
|
|
1750
|
+
mock_loader_result = self.__try_load_mock(program, ' '.join(arguments_as_array), working_directory)
|
|
1751
|
+
if mock_loader_result[0]:
|
|
1752
|
+
return mock_loader_result[1]
|
|
1753
|
+
process: Popen = self.__run_program_argsasarray_async_helper(program, arguments_as_array, working_directory, print_errors_as_information, log_file, timeoutInSeconds, addLogOverhead, title, log_namespace, arguments_for_log, custom_argument, interactive)
|
|
1754
|
+
return process.pid
|
|
1755
|
+
|
|
1756
|
+
# Return-values program_runner: Pid
|
|
1757
|
+
@GeneralUtilities.check_arguments
|
|
1758
|
+
def run_program_async(self, program: str, arguments: str = "", working_directory: str = None,print_errors_as_information: bool = False, log_file: str = None, timeoutInSeconds: int = 600, addLogOverhead: bool = False, title: str = None, log_namespace: str = "", arguments_for_log: list[str] = None, custom_argument: object = None, interactive: bool = False) -> int:
|
|
1759
|
+
if self.call_program_runner_directly:
|
|
1760
|
+
return self.program_runner.run_program_argsasarray_async(program, arguments, working_directory, custom_argument, interactive)
|
|
1761
|
+
return self.run_program_argsasarray_async(program, GeneralUtilities.arguments_to_array(arguments), working_directory, print_errors_as_information, log_file, timeoutInSeconds, addLogOverhead, title, log_namespace, arguments_for_log, custom_argument, interactive)
|
|
1762
|
+
|
|
1763
|
+
@GeneralUtilities.check_arguments
|
|
1764
|
+
def __try_load_mock(self, program: str, arguments: str, working_directory: str) -> tuple[bool, tuple[int, str, str, int]]:
|
|
1765
|
+
if self.mock_program_calls:
|
|
1766
|
+
try:
|
|
1767
|
+
return [True, self.__get_mock_program_call(program, arguments, working_directory)]
|
|
1768
|
+
except LookupError:
|
|
1769
|
+
if not self.execute_program_really_if_no_mock_call_is_defined:
|
|
1770
|
+
raise
|
|
1771
|
+
return [False, None]
|
|
1772
|
+
|
|
1773
|
+
@GeneralUtilities.check_arguments
|
|
1774
|
+
def __adapt_workingdirectory(self, workingdirectory: str) -> str:
|
|
1775
|
+
result: str = None
|
|
1776
|
+
if workingdirectory is None:
|
|
1777
|
+
result = os.getcwd()
|
|
1778
|
+
else:
|
|
1779
|
+
if os.path.isabs(workingdirectory):
|
|
1780
|
+
result = workingdirectory
|
|
1781
|
+
else:
|
|
1782
|
+
result = GeneralUtilities.resolve_relative_path_from_current_working_directory(workingdirectory)
|
|
1783
|
+
if not os.path.isdir(result):
|
|
1784
|
+
raise ValueError(f"Working-directory '{workingdirectory}' does not exist.")
|
|
1785
|
+
return result
|
|
1786
|
+
|
|
1787
|
+
@GeneralUtilities.check_arguments
|
|
1788
|
+
def verify_no_pending_mock_program_calls(self):
|
|
1789
|
+
if (len(self.__mocked_program_calls) > 0):
|
|
1790
|
+
raise AssertionError("The following mock-calls were not called:\n"+",\n ".join([self.__format_mock_program_call(r) for r in self.__mocked_program_calls]))
|
|
1791
|
+
|
|
1792
|
+
@GeneralUtilities.check_arguments
|
|
1793
|
+
def __format_mock_program_call(self, r) -> str:
|
|
1794
|
+
r: ScriptCollectionCore.__MockProgramCall = r
|
|
1795
|
+
return f"'{r.workingdirectory}>{r.program} {r.argument}' (" \
|
|
1796
|
+
f"exitcode: {GeneralUtilities.str_none_safe(str(r.exit_code))}, " \
|
|
1797
|
+
f"pid: {GeneralUtilities.str_none_safe(str(r.pid))}, "\
|
|
1798
|
+
f"stdout: {GeneralUtilities.str_none_safe(str(r.stdout))}, " \
|
|
1799
|
+
f"stderr: {GeneralUtilities.str_none_safe(str(r.stderr))})"
|
|
1800
|
+
|
|
1801
|
+
@GeneralUtilities.check_arguments
|
|
1802
|
+
def register_mock_program_call(self, program: str, argument: str, workingdirectory: str, result_exit_code: int, result_stdout: str, result_stderr: str, result_pid: int, amount_of_expected_calls=1):
|
|
1803
|
+
"This function is for test-purposes only"
|
|
1804
|
+
for _ in itertools.repeat(None, amount_of_expected_calls):
|
|
1805
|
+
mock_call = ScriptCollectionCore.__MockProgramCall()
|
|
1806
|
+
mock_call.program = program
|
|
1807
|
+
mock_call.argument = argument
|
|
1808
|
+
mock_call.workingdirectory = workingdirectory
|
|
1809
|
+
mock_call.exit_code = result_exit_code
|
|
1810
|
+
mock_call.stdout = result_stdout
|
|
1811
|
+
mock_call.stderr = result_stderr
|
|
1812
|
+
mock_call.pid = result_pid
|
|
1813
|
+
self.__mocked_program_calls.append(mock_call)
|
|
1814
|
+
|
|
1815
|
+
@GeneralUtilities.check_arguments
|
|
1816
|
+
def __get_mock_program_call(self, program: str, argument: str, workingdirectory: str):
|
|
1817
|
+
result: ScriptCollectionCore.__MockProgramCall = None
|
|
1818
|
+
for mock_call in self.__mocked_program_calls:
|
|
1819
|
+
if ((re.match(mock_call.program, program) is not None)
|
|
1820
|
+
and (re.match(mock_call.argument, argument) is not None)
|
|
1821
|
+
and (re.match(mock_call.workingdirectory, workingdirectory) is not None)):
|
|
1822
|
+
result = mock_call
|
|
1823
|
+
break
|
|
1824
|
+
if result is None:
|
|
1825
|
+
raise LookupError(f"Tried to execute mock-call '{workingdirectory}>{program} {argument}' but no mock-call was defined for that execution")
|
|
1826
|
+
else:
|
|
1827
|
+
self.__mocked_program_calls.remove(result)
|
|
1828
|
+
return (result.exit_code, result.stdout, result.stderr, result.pid)
|
|
1829
|
+
|
|
1830
|
+
@GeneralUtilities.check_arguments
|
|
1831
|
+
class __MockProgramCall:
|
|
1832
|
+
program: str
|
|
1833
|
+
argument: str
|
|
1834
|
+
workingdirectory: str
|
|
1835
|
+
exit_code: int
|
|
1836
|
+
stdout: str
|
|
1837
|
+
stderr: str
|
|
1838
|
+
pid: int
|
|
1839
|
+
|
|
1840
|
+
@GeneralUtilities.check_arguments
|
|
1841
|
+
def run_with_epew(self, program: str, argument: str = "", working_directory: str = None, print_errors_as_information: bool = False, log_file: str = None, timeoutInSeconds: int = 600, addLogOverhead: bool = False, title: str = None, log_namespace: str = "", arguments_for_log: list[str] = None, throw_exception_if_exitcode_is_not_zero: bool = True, custom_argument: object = None, interactive: bool = False,print_live_output:bool=False,encode_argument_in_base64:bool=False) -> tuple[int, str, str, int]:
|
|
1842
|
+
epew_argument=["-p",program ,"-w", working_directory]
|
|
1843
|
+
if encode_argument_in_base64:
|
|
1844
|
+
if arguments_for_log is None:
|
|
1845
|
+
arguments_for_log=epew_argument+["-a",f"\"{argument}\""]
|
|
1846
|
+
base64_string = base64.b64encode(argument.encode("utf-8")).decode("utf-8")
|
|
1847
|
+
epew_argument=epew_argument+["-a",base64_string,"-b"]
|
|
1848
|
+
else:
|
|
1849
|
+
epew_argument=epew_argument+["-a",argument]
|
|
1850
|
+
if arguments_for_log is None:
|
|
1851
|
+
arguments_for_log=epew_argument
|
|
1852
|
+
return self.run_program_argsasarray("epew", epew_argument, working_directory, print_errors_as_information, log_file, timeoutInSeconds, addLogOverhead, title, log_namespace, arguments_for_log, throw_exception_if_exitcode_is_not_zero, custom_argument, interactive,print_live_output=print_live_output)
|
|
1853
|
+
|
|
1854
|
+
|
|
1855
|
+
# </run programs>
|
|
1856
|
+
|
|
1857
|
+
@GeneralUtilities.check_arguments
|
|
1858
|
+
def extract_archive_with_7z(self, unzip_program_file: str, zip_file: str, password: str, output_directory: str) -> None:
|
|
1859
|
+
password_set = not password is None
|
|
1860
|
+
file_name = Path(zip_file).name
|
|
1861
|
+
file_folder = os.path.dirname(zip_file)
|
|
1862
|
+
argument = "x"
|
|
1863
|
+
if password_set:
|
|
1864
|
+
argument = f"{argument} -p\"{password}\""
|
|
1865
|
+
argument = f"{argument} -o {output_directory}"
|
|
1866
|
+
argument = f"{argument} {file_name}"
|
|
1867
|
+
return self.run_program(unzip_program_file, argument, file_folder)
|
|
1868
|
+
|
|
1869
|
+
@GeneralUtilities.check_arguments
|
|
1870
|
+
def get_internet_time(self) -> datetime:
|
|
1871
|
+
response = ntplib.NTPClient().request('pool.ntp.org')
|
|
1872
|
+
return datetime.fromtimestamp(response.tx_time)
|
|
1873
|
+
|
|
1874
|
+
@GeneralUtilities.check_arguments
|
|
1875
|
+
def system_time_equals_internet_time(self, maximal_tolerance_difference: timedelta) -> bool:
|
|
1876
|
+
return abs(GeneralUtilities.get_now() - self.get_internet_time()) < maximal_tolerance_difference
|
|
1877
|
+
|
|
1878
|
+
@GeneralUtilities.check_arguments
|
|
1879
|
+
def system_time_equals_internet_time_with_default_tolerance(self) -> bool:
|
|
1880
|
+
return self.system_time_equals_internet_time(self.__get_default_tolerance_for_system_time_equals_internet_time())
|
|
1881
|
+
|
|
1882
|
+
@GeneralUtilities.check_arguments
|
|
1883
|
+
def check_system_time(self, maximal_tolerance_difference: timedelta):
|
|
1884
|
+
if not self.system_time_equals_internet_time(maximal_tolerance_difference):
|
|
1885
|
+
raise ValueError("System time may be wrong")
|
|
1886
|
+
|
|
1887
|
+
@GeneralUtilities.check_arguments
|
|
1888
|
+
def check_system_time_with_default_tolerance(self) -> None:
|
|
1889
|
+
self.check_system_time(self.__get_default_tolerance_for_system_time_equals_internet_time())
|
|
1890
|
+
|
|
1891
|
+
@GeneralUtilities.check_arguments
|
|
1892
|
+
def __get_default_tolerance_for_system_time_equals_internet_time(self) -> timedelta:
|
|
1893
|
+
return timedelta(hours=0, minutes=0, seconds=3)
|
|
1894
|
+
|
|
1895
|
+
@GeneralUtilities.check_arguments
|
|
1896
|
+
def increment_version(self, input_version: str, increment_major: bool, increment_minor: bool, increment_patch: bool) -> str:
|
|
1897
|
+
splitted = input_version.split(".")
|
|
1898
|
+
GeneralUtilities.assert_condition(len(splitted) == 3, f"Version '{input_version}' does not have the 'major.minor.patch'-pattern.")
|
|
1899
|
+
major = int(splitted[0])
|
|
1900
|
+
minor = int(splitted[1])
|
|
1901
|
+
patch = int(splitted[2])
|
|
1902
|
+
if increment_major:
|
|
1903
|
+
major = major+1
|
|
1904
|
+
if increment_minor:
|
|
1905
|
+
minor = minor+1
|
|
1906
|
+
if increment_patch:
|
|
1907
|
+
patch = patch+1
|
|
1908
|
+
return f"{major}.{minor}.{patch}"
|
|
1909
|
+
|
|
1910
|
+
@GeneralUtilities.check_arguments
|
|
1911
|
+
def get_semver_version_from_gitversion(self, repository_folder: str) -> str:
|
|
1912
|
+
self.assert_is_git_repository(repository_folder)
|
|
1913
|
+
if (self.git_repository_has_commits(repository_folder)):
|
|
1914
|
+
result = self.get_version_from_gitversion(repository_folder, "MajorMinorPatch")
|
|
1915
|
+
if self.git_repository_has_uncommitted_changes(repository_folder):
|
|
1916
|
+
if self.get_current_git_branch_has_tag(repository_folder):
|
|
1917
|
+
id_of_latest_tag = self.git_get_commitid_of_tag(repository_folder, self.get_latest_git_tag(repository_folder))
|
|
1918
|
+
current_commit = self.git_get_commit_id(repository_folder)
|
|
1919
|
+
current_commit_is_on_latest_tag = id_of_latest_tag == current_commit
|
|
1920
|
+
if current_commit_is_on_latest_tag:
|
|
1921
|
+
result = self.increment_version(result, False, False, True)
|
|
1922
|
+
else:
|
|
1923
|
+
result = "0.1.0"
|
|
1924
|
+
return result
|
|
1925
|
+
|
|
1926
|
+
@staticmethod
|
|
1927
|
+
@GeneralUtilities.check_arguments
|
|
1928
|
+
def is_patch_version(version_string: str) -> bool:
|
|
1929
|
+
return not version_string.endswith(".0")
|
|
1930
|
+
|
|
1931
|
+
@GeneralUtilities.check_arguments
|
|
1932
|
+
def get_version_from_gitversion(self, folder: str, variable: str) -> str:
|
|
1933
|
+
# called twice as workaround for issue 1877 in gitversion ( https://github.com/GitTools/GitVersion/issues/1877 )
|
|
1934
|
+
result = self.run_program_argsasarray("gitversion", ["/showVariable", variable], folder)
|
|
1935
|
+
result = self.run_program_argsasarray("gitversion", ["/showVariable", variable], folder)
|
|
1936
|
+
result = GeneralUtilities.strip_new_line_character(result[1])
|
|
1937
|
+
|
|
1938
|
+
return result
|
|
1939
|
+
|
|
1940
|
+
@GeneralUtilities.check_arguments
|
|
1941
|
+
def generate_certificate_authority(self, folder: str, name: str, subj_c: str, subj_st: str, subj_l: str, subj_o: str, subj_ou: str, days_until_expire: int = None, password: str = None) -> None:
|
|
1942
|
+
if days_until_expire is None:
|
|
1943
|
+
days_until_expire = 1825
|
|
1944
|
+
if password is None:
|
|
1945
|
+
password = GeneralUtilities.generate_password()
|
|
1946
|
+
GeneralUtilities.ensure_directory_exists(folder)
|
|
1947
|
+
self.run_program_argsasarray("openssl", ['req', '-new', '-newkey', 'ec', '-pkeyopt', 'ec_paramgen_curve:prime256v1', '-days', str(days_until_expire), '-nodes', '-x509', '-subj', f'/C={subj_c}/ST={subj_st}/L={subj_l}/O={subj_o}/CN={name}/OU={subj_ou}', '-passout', f'pass:{password}', '-keyout', f'{name}.key', '-out', f'{name}.crt'], folder)
|
|
1948
|
+
|
|
1949
|
+
@GeneralUtilities.check_arguments
|
|
1950
|
+
def generate_certificate(self, folder: str, domain: str, filename: str, subj_c: str, subj_st: str, subj_l: str, subj_o: str, subj_ou: str, days_until_expire: int = None, password: str = None) -> None:
|
|
1951
|
+
if days_until_expire is None:
|
|
1952
|
+
days_until_expire = 397
|
|
1953
|
+
if password is None:
|
|
1954
|
+
password = GeneralUtilities.generate_password()
|
|
1955
|
+
rsa_key_length = 4096
|
|
1956
|
+
self.run_program_argsasarray("openssl", ['genrsa', '-out', f'{filename}.key', f'{rsa_key_length}'], folder)
|
|
1957
|
+
self.run_program_argsasarray("openssl", ['req', '-new', '-subj', f'/C={subj_c}/ST={subj_st}/L={subj_l}/O={subj_o}/CN={domain}/OU={subj_ou}', '-x509', '-key', f'{filename}.key', '-out', f'{filename}.unsigned.crt', '-days', f'{days_until_expire}'], folder)
|
|
1958
|
+
self.run_program_argsasarray("openssl", ['pkcs12', '-export', '-out', f'{filename}.selfsigned.pfx', '-password', f'pass:{password}', '-inkey', f'{filename}.key', '-in', f'{filename}.unsigned.crt'], folder)
|
|
1959
|
+
GeneralUtilities.write_text_to_file(os.path.join(folder, f"{filename}.password"), password)
|
|
1960
|
+
GeneralUtilities.write_text_to_file(os.path.join(folder, f"{filename}.san.conf"), f"""[ req ]
|
|
1961
|
+
default_bits = {rsa_key_length}
|
|
1962
|
+
distinguished_name = req_distinguished_name
|
|
1963
|
+
req_extensions = v3_req
|
|
1964
|
+
default_md = sha256
|
|
1965
|
+
dirstring_type = nombstr
|
|
1966
|
+
prompt = no
|
|
1967
|
+
|
|
1968
|
+
[ req_distinguished_name ]
|
|
1969
|
+
countryName = {subj_c}
|
|
1970
|
+
stateOrProvinceName = {subj_st}
|
|
1971
|
+
localityName = {subj_l}
|
|
1972
|
+
organizationName = {subj_o}
|
|
1973
|
+
organizationUnit = {subj_ou}
|
|
1974
|
+
commonName = {domain}
|
|
1975
|
+
|
|
1976
|
+
[v3_req]
|
|
1977
|
+
subjectAltName = @subject_alt_name
|
|
1978
|
+
|
|
1979
|
+
[ subject_alt_name ]
|
|
1980
|
+
DNS = {domain}
|
|
1981
|
+
""")
|
|
1982
|
+
|
|
1983
|
+
@GeneralUtilities.check_arguments
|
|
1984
|
+
def generate_certificate_sign_request(self, folder: str, domain: str, filename: str, subj_c: str, subj_st: str, subj_l: str, subj_o: str, subj_ou: str) -> None:
|
|
1985
|
+
self.run_program_argsasarray("openssl", ['req', '-new', '-subj', f'/C={subj_c}/ST={subj_st}/L={subj_l}/O={subj_o}/CN={domain}/OU={subj_ou}', '-key', f'{filename}.key', f'-out', f'{filename}.csr', f'-config', f'{filename}.san.conf'], folder)
|
|
1986
|
+
|
|
1987
|
+
@GeneralUtilities.check_arguments
|
|
1988
|
+
def sign_certificate(self, folder: str, ca_folder: str, ca_name: str, domain: str, filename: str, days_until_expire: int = None) -> None:
|
|
1989
|
+
if days_until_expire is None:
|
|
1990
|
+
days_until_expire = 397
|
|
1991
|
+
ca = os.path.join(ca_folder, ca_name)
|
|
1992
|
+
password_file = os.path.join(folder, f"{filename}.password")
|
|
1993
|
+
password = GeneralUtilities.read_text_from_file(password_file)
|
|
1994
|
+
self.run_program_argsasarray("openssl", ['x509', '-req', '-in', f'{filename}.csr', '-CA', f'{ca}.crt', '-CAkey', f'{ca}.key', '-CAcreateserial', '-CAserial', f'{ca}.srl', '-out', f'{filename}.crt', '-days', str(days_until_expire), '-sha256', '-extensions', 'v3_req', '-extfile', f'{filename}.san.conf'], folder)
|
|
1995
|
+
self.run_program_argsasarray("openssl", ['pkcs12', '-export', '-out', f'{filename}.pfx', f'-inkey', f'{filename}.key', '-in', f'{filename}.crt', '-password', f'pass:{password}'], folder)
|
|
1996
|
+
|
|
1997
|
+
@GeneralUtilities.check_arguments
|
|
1998
|
+
def update_dependencies_of_python_in_requirementstxt_file(self, file: str, ignored_dependencies: list[str]):
|
|
1999
|
+
# TODO consider ignored_dependencies
|
|
2000
|
+
lines = GeneralUtilities.read_lines_from_file(file)
|
|
2001
|
+
new_lines = []
|
|
2002
|
+
for line in lines:
|
|
2003
|
+
if GeneralUtilities.string_has_content(line):
|
|
2004
|
+
new_lines.append(self.__get_updated_line_for_python_requirements(line.strip()))
|
|
2005
|
+
GeneralUtilities.write_lines_to_file(file, new_lines)
|
|
2006
|
+
|
|
2007
|
+
@GeneralUtilities.check_arguments
|
|
2008
|
+
def __get_updated_line_for_python_requirements(self, line: str) -> str:
|
|
2009
|
+
if "==" in line or "<" in line:
|
|
2010
|
+
return line
|
|
2011
|
+
elif ">" in line:
|
|
2012
|
+
try:
|
|
2013
|
+
# line is something like "cyclonedx-bom>=2.0.2" and the function must return with the updated version
|
|
2014
|
+
# (something like "cyclonedx-bom>=2.11.0" for example)
|
|
2015
|
+
package = line.split(">")[0]
|
|
2016
|
+
operator = ">=" if ">=" in line else ">"
|
|
2017
|
+
headers = {'Cache-Control': 'no-cache'}
|
|
2018
|
+
response = requests.get(f'https://pypi.org/pypi/{package}/json', timeout=5, headers=headers)
|
|
2019
|
+
latest_version = response.json()['info']['version']
|
|
2020
|
+
# TODO update only minor- and patch-version
|
|
2021
|
+
# TODO print info if there is a new major-version
|
|
2022
|
+
return package+operator+latest_version
|
|
2023
|
+
except:
|
|
2024
|
+
return line
|
|
2025
|
+
else:
|
|
2026
|
+
raise ValueError(f'Unexpected line in requirements-file: "{line}"')
|
|
2027
|
+
|
|
2028
|
+
@GeneralUtilities.check_arguments
|
|
2029
|
+
def update_dependencies_of_python_in_setupcfg_file(self, setup_cfg_file: str, ignored_dependencies: list[str]):
|
|
2030
|
+
# TODO consider ignored_dependencies
|
|
2031
|
+
lines = GeneralUtilities.read_lines_from_file(setup_cfg_file)
|
|
2032
|
+
new_lines = []
|
|
2033
|
+
requirement_parsing_mode = False
|
|
2034
|
+
for line in lines:
|
|
2035
|
+
new_line = line
|
|
2036
|
+
if (requirement_parsing_mode):
|
|
2037
|
+
if ("<" in line or "=" in line or ">" in line):
|
|
2038
|
+
updated_line = f" {self.__get_updated_line_for_python_requirements(line.strip())}"
|
|
2039
|
+
new_line = updated_line
|
|
2040
|
+
else:
|
|
2041
|
+
requirement_parsing_mode = False
|
|
2042
|
+
else:
|
|
2043
|
+
if line.startswith("install_requires ="):
|
|
2044
|
+
requirement_parsing_mode = True
|
|
2045
|
+
new_lines.append(new_line)
|
|
2046
|
+
GeneralUtilities.write_lines_to_file(setup_cfg_file, new_lines)
|
|
2047
|
+
|
|
2048
|
+
@GeneralUtilities.check_arguments
|
|
2049
|
+
def update_dependencies_of_dotnet_project(self, csproj_file: str, ignored_dependencies: list[str]):
|
|
2050
|
+
folder = os.path.dirname(csproj_file)
|
|
2051
|
+
csproj_filename = os.path.basename(csproj_file)
|
|
2052
|
+
self.log.log(f"Check for updates in {csproj_filename}", LogLevel.Information)
|
|
2053
|
+
result = self.run_program_with_retry("dotnet", f"list {csproj_filename} package --outdated", folder, print_errors_as_information=True)
|
|
2054
|
+
for line in result[1].replace("\r", GeneralUtilities.empty_string).split("\n"):
|
|
2055
|
+
# Relevant output-lines are something like " > NJsonSchema 10.7.0 10.7.0 10.9.0"
|
|
2056
|
+
if ">" in line:
|
|
2057
|
+
package_name = line.replace(">", GeneralUtilities.empty_string).strip().split(" ")[0]
|
|
2058
|
+
if not (package_name in ignored_dependencies):
|
|
2059
|
+
self.log.log(f"Update package {package_name}...", LogLevel.Debug)
|
|
2060
|
+
time.sleep(1.1) # attempt to prevent rate-limit
|
|
2061
|
+
self.run_program_with_retry("dotnet", f"add {csproj_filename} package {package_name}", folder, print_errors_as_information=True)
|
|
2062
|
+
|
|
2063
|
+
@GeneralUtilities.check_arguments
|
|
2064
|
+
def create_deb_package(self, toolname: str, binary_folder: str, control_file_content: str, deb_output_folder: str, permission_of_executable_file_as_octet_triple: int) -> None:
|
|
2065
|
+
|
|
2066
|
+
# prepare
|
|
2067
|
+
GeneralUtilities.ensure_directory_exists(deb_output_folder)
|
|
2068
|
+
temp_folder = os.path.join(tempfile.gettempdir(), str(uuid.uuid4()))
|
|
2069
|
+
GeneralUtilities.ensure_directory_exists(temp_folder)
|
|
2070
|
+
bin_folder = binary_folder
|
|
2071
|
+
tool_content_folder_name = toolname+"Content"
|
|
2072
|
+
|
|
2073
|
+
# create folder
|
|
2074
|
+
GeneralUtilities.ensure_directory_exists(temp_folder)
|
|
2075
|
+
control_content_folder_name = "controlcontent"
|
|
2076
|
+
packagecontent_control_folder = os.path.join(temp_folder, control_content_folder_name)
|
|
2077
|
+
GeneralUtilities.ensure_directory_exists(packagecontent_control_folder)
|
|
2078
|
+
data_content_folder_name = "datacontent"
|
|
2079
|
+
packagecontent_data_folder = os.path.join(temp_folder, data_content_folder_name)
|
|
2080
|
+
GeneralUtilities.ensure_directory_exists(packagecontent_data_folder)
|
|
2081
|
+
entireresult_content_folder_name = "entireresultcontent"
|
|
2082
|
+
packagecontent_entireresult_folder = os.path.join(temp_folder, entireresult_content_folder_name)
|
|
2083
|
+
GeneralUtilities.ensure_directory_exists(packagecontent_entireresult_folder)
|
|
2084
|
+
|
|
2085
|
+
# create "debian-binary"-file
|
|
2086
|
+
debianbinary_file = os.path.join(packagecontent_entireresult_folder, "debian-binary")
|
|
2087
|
+
GeneralUtilities.ensure_file_exists(debianbinary_file)
|
|
2088
|
+
GeneralUtilities.write_text_to_file(debianbinary_file, "2.0\n")
|
|
2089
|
+
|
|
2090
|
+
# create control-content
|
|
2091
|
+
|
|
2092
|
+
# conffiles
|
|
2093
|
+
conffiles_file = os.path.join(packagecontent_control_folder, "conffiles")
|
|
2094
|
+
GeneralUtilities.ensure_file_exists(conffiles_file)
|
|
2095
|
+
|
|
2096
|
+
# postinst-script
|
|
2097
|
+
postinst_file = os.path.join(packagecontent_control_folder, "postinst")
|
|
2098
|
+
GeneralUtilities.ensure_file_exists(postinst_file)
|
|
2099
|
+
exe_file = f"/usr/bin/{tool_content_folder_name}/{toolname}"
|
|
2100
|
+
link_file = f"/usr/bin/{toolname.lower()}"
|
|
2101
|
+
permission = str(permission_of_executable_file_as_octet_triple)
|
|
2102
|
+
GeneralUtilities.write_text_to_file(postinst_file, f"""#!/bin/sh
|
|
2103
|
+
ln -s {exe_file} {link_file}
|
|
2104
|
+
chmod {permission} {exe_file}
|
|
2105
|
+
chmod {permission} {link_file}
|
|
2106
|
+
""")
|
|
2107
|
+
|
|
2108
|
+
# control
|
|
2109
|
+
control_file = os.path.join(packagecontent_control_folder, "control")
|
|
2110
|
+
GeneralUtilities.ensure_file_exists(control_file)
|
|
2111
|
+
GeneralUtilities.write_text_to_file(control_file, control_file_content)
|
|
2112
|
+
|
|
2113
|
+
# md5sums
|
|
2114
|
+
md5sums_file = os.path.join(packagecontent_control_folder, "md5sums")
|
|
2115
|
+
GeneralUtilities.ensure_file_exists(md5sums_file)
|
|
2116
|
+
|
|
2117
|
+
# create data-content
|
|
2118
|
+
|
|
2119
|
+
# copy binaries
|
|
2120
|
+
usr_bin_folder = os.path.join(packagecontent_data_folder, "usr/bin")
|
|
2121
|
+
GeneralUtilities.ensure_directory_exists(usr_bin_folder)
|
|
2122
|
+
usr_bin_content_folder = os.path.join(usr_bin_folder, tool_content_folder_name)
|
|
2123
|
+
GeneralUtilities.copy_content_of_folder(bin_folder, usr_bin_content_folder)
|
|
2124
|
+
|
|
2125
|
+
# create debfile
|
|
2126
|
+
deb_filename = f"{toolname}.deb"
|
|
2127
|
+
self.run_program_argsasarray("tar", ["czf", f"../{entireresult_content_folder_name}/control.tar.gz", "*"], packagecontent_control_folder)
|
|
2128
|
+
self.run_program_argsasarray("tar", ["czf", f"../{entireresult_content_folder_name}/data.tar.gz", "*"], packagecontent_data_folder)
|
|
2129
|
+
self.run_program_argsasarray("ar", ["r", deb_filename, "debian-binary", "control.tar.gz", "data.tar.gz"], packagecontent_entireresult_folder)
|
|
2130
|
+
result_file = os.path.join(packagecontent_entireresult_folder, deb_filename)
|
|
2131
|
+
shutil.copy(result_file, os.path.join(deb_output_folder, deb_filename))
|
|
2132
|
+
|
|
2133
|
+
# cleanup
|
|
2134
|
+
GeneralUtilities.ensure_directory_does_not_exist(temp_folder)
|
|
2135
|
+
|
|
2136
|
+
@GeneralUtilities.check_arguments
|
|
2137
|
+
def update_year_in_copyright_tags(self, file: str) -> None:
|
|
2138
|
+
current_year = str(GeneralUtilities.get_now().year)
|
|
2139
|
+
lines = GeneralUtilities.read_lines_from_file(file)
|
|
2140
|
+
lines_result = []
|
|
2141
|
+
for line in lines:
|
|
2142
|
+
if match := re.search("(.*<[Cc]opyright>.*)\\d\\d\\d\\d(.*<\\/[Cc]opyright>.*)", line):
|
|
2143
|
+
part1 = match.group(1)
|
|
2144
|
+
part2 = match.group(2)
|
|
2145
|
+
adapted = part1+current_year+part2
|
|
2146
|
+
else:
|
|
2147
|
+
adapted = line
|
|
2148
|
+
lines_result.append(adapted)
|
|
2149
|
+
GeneralUtilities.write_lines_to_file(file, lines_result)
|
|
2150
|
+
|
|
2151
|
+
@GeneralUtilities.check_arguments
|
|
2152
|
+
def update_year_in_first_line_of_file(self, file: str) -> None:
|
|
2153
|
+
current_year = str(GeneralUtilities.get_now().year)
|
|
2154
|
+
lines = GeneralUtilities.read_lines_from_file(file)
|
|
2155
|
+
lines[0] = re.sub("\\d\\d\\d\\d", current_year, lines[0])
|
|
2156
|
+
GeneralUtilities.write_lines_to_file(file, lines)
|
|
2157
|
+
|
|
2158
|
+
@GeneralUtilities.check_arguments
|
|
2159
|
+
def get_external_ip_address(self) -> str:
|
|
2160
|
+
information = self.get_externalnetworkinformation_as_json_string()
|
|
2161
|
+
parsed = json.loads(information)
|
|
2162
|
+
return parsed["IPAddress"]
|
|
2163
|
+
|
|
2164
|
+
@GeneralUtilities.check_arguments
|
|
2165
|
+
def get_country_of_external_ip_address(self) -> str:
|
|
2166
|
+
information = self.get_externalnetworkinformation_as_json_string()
|
|
2167
|
+
parsed = json.loads(information)
|
|
2168
|
+
return parsed["Country"]
|
|
2169
|
+
|
|
2170
|
+
@GeneralUtilities.check_arguments
|
|
2171
|
+
def get_externalnetworkinformation_as_json_string(self,clientinformation_link:str='https://clientinformation.anion327.de') -> str:
|
|
2172
|
+
headers = {'Cache-Control': 'no-cache'}
|
|
2173
|
+
response = requests.get(clientinformation_link, timeout=5, headers=headers)
|
|
2174
|
+
network_information_as_json_string = GeneralUtilities.bytes_to_string(response.content)
|
|
2175
|
+
return network_information_as_json_string
|
|
2176
|
+
|
|
2177
|
+
@GeneralUtilities.check_arguments
|
|
2178
|
+
def change_file_extensions(self, folder: str, from_extension: str, to_extension: str, recursive: bool, ignore_case: bool) -> None:
|
|
2179
|
+
extension_to_compare: str = None
|
|
2180
|
+
if ignore_case:
|
|
2181
|
+
extension_to_compare = from_extension.lower()
|
|
2182
|
+
else:
|
|
2183
|
+
extension_to_compare = from_extension
|
|
2184
|
+
for file in GeneralUtilities.get_direct_files_of_folder(folder):
|
|
2185
|
+
if (ignore_case and file.lower().endswith(f".{extension_to_compare}") or not ignore_case and file.endswith(f".{extension_to_compare}")):
|
|
2186
|
+
p = Path(file)
|
|
2187
|
+
p.rename(p.with_suffix('.'+to_extension))
|
|
2188
|
+
if recursive:
|
|
2189
|
+
for subfolder in GeneralUtilities.get_direct_folders_of_folder(folder):
|
|
2190
|
+
self.change_file_extensions(subfolder, from_extension, to_extension, recursive, ignore_case)
|
|
2191
|
+
|
|
2192
|
+
@GeneralUtilities.check_arguments
|
|
2193
|
+
def __add_chapter(self, main_reference_file, reference_content_folder, number: int, chaptertitle: str, content: str = None):
|
|
2194
|
+
if content is None:
|
|
2195
|
+
content = "TXDX add content here"
|
|
2196
|
+
filename = str(number).zfill(2)+"_"+chaptertitle.replace(' ', '-')
|
|
2197
|
+
file = f"{reference_content_folder}/{filename}.md"
|
|
2198
|
+
full_title = f"{number}. {chaptertitle}"
|
|
2199
|
+
|
|
2200
|
+
GeneralUtilities.append_line_to_file(main_reference_file, f"- [{full_title}](./{filename}.md)")
|
|
2201
|
+
|
|
2202
|
+
GeneralUtilities.ensure_file_exists(file)
|
|
2203
|
+
GeneralUtilities.write_text_to_file(file, f"""# {full_title}
|
|
2204
|
+
|
|
2205
|
+
{content}
|
|
2206
|
+
""".replace("XDX", "ODO"))
|
|
2207
|
+
|
|
2208
|
+
@GeneralUtilities.check_arguments
|
|
2209
|
+
def generate_arc42_reference_template(self, repository: str, productname: str = None, subfolder: str = None):
|
|
2210
|
+
productname: str
|
|
2211
|
+
if productname is None:
|
|
2212
|
+
productname = os.path.basename(repository)
|
|
2213
|
+
if subfolder is None:
|
|
2214
|
+
subfolder = "Other/Reference"
|
|
2215
|
+
reference_root_folder = f"{repository}/{subfolder}"
|
|
2216
|
+
reference_content_folder = reference_root_folder + "/Technical"
|
|
2217
|
+
if os.path.isdir(reference_root_folder):
|
|
2218
|
+
raise ValueError(f"The folder '{reference_root_folder}' does already exist.")
|
|
2219
|
+
GeneralUtilities.ensure_directory_exists(reference_root_folder)
|
|
2220
|
+
GeneralUtilities.ensure_directory_exists(reference_content_folder)
|
|
2221
|
+
main_reference_file = f"{reference_root_folder}/Reference.md"
|
|
2222
|
+
GeneralUtilities.ensure_file_exists(main_reference_file)
|
|
2223
|
+
GeneralUtilities.write_text_to_file(main_reference_file, f"""# {productname}
|
|
2224
|
+
|
|
2225
|
+
TXDX add minimal service-description here.
|
|
2226
|
+
|
|
2227
|
+
## Technical documentation
|
|
2228
|
+
|
|
2229
|
+
""".replace("XDX", "ODO"))
|
|
2230
|
+
self.__add_chapter(main_reference_file, reference_content_folder, 1, 'Introduction and Goals', """## Overview
|
|
2231
|
+
|
|
2232
|
+
TXDX
|
|
2233
|
+
|
|
2234
|
+
## Quality goals
|
|
2235
|
+
|
|
2236
|
+
TXDX
|
|
2237
|
+
|
|
2238
|
+
## Stakeholder
|
|
2239
|
+
|
|
2240
|
+
| Name | How to contact | Reason |
|
|
2241
|
+
| ---- | -------------- | ------ |""")
|
|
2242
|
+
self.__add_chapter(main_reference_file, reference_content_folder, 2, 'Constraints', """## Technical constraints
|
|
2243
|
+
|
|
2244
|
+
| Constraint-identifier | Constraint | Reason |
|
|
2245
|
+
| --------------------- | ---------- | ------ |
|
|
2246
|
+
|
|
2247
|
+
## Organizational constraints
|
|
2248
|
+
|
|
2249
|
+
| Constraint-identifier | Constraint | Reason |
|
|
2250
|
+
| --------------------- | ---------- | ------ |""")
|
|
2251
|
+
self.__add_chapter(main_reference_file, reference_content_folder, 3, 'Context and Scope', """## Context
|
|
2252
|
+
|
|
2253
|
+
TXDX
|
|
2254
|
+
|
|
2255
|
+
## Scope
|
|
2256
|
+
|
|
2257
|
+
TXDX""")
|
|
2258
|
+
self.__add_chapter(main_reference_file, reference_content_folder, 4, 'Solution Strategy', """TXDX""")
|
|
2259
|
+
self.__add_chapter(main_reference_file, reference_content_folder, 5, 'Building Block View', """TXDX""")
|
|
2260
|
+
self.__add_chapter(main_reference_file, reference_content_folder, 6, 'Runtime View', """TXDX""")
|
|
2261
|
+
self.__add_chapter(main_reference_file, reference_content_folder, 7, 'Deployment View', """## Infrastructure-overview
|
|
2262
|
+
|
|
2263
|
+
TXDX
|
|
2264
|
+
|
|
2265
|
+
## Infrastructure-requirements
|
|
2266
|
+
|
|
2267
|
+
TXDX
|
|
2268
|
+
|
|
2269
|
+
## Deployment-proecsses
|
|
2270
|
+
|
|
2271
|
+
TXDX""")
|
|
2272
|
+
self.__add_chapter(main_reference_file, reference_content_folder, 8, 'Crosscutting Concepts', """TXDX""")
|
|
2273
|
+
self.__add_chapter(main_reference_file, reference_content_folder, 9, 'Architectural Decisions', """## Decision-board
|
|
2274
|
+
|
|
2275
|
+
| Decision-identifier | Date | Decision | Reason and notes |
|
|
2276
|
+
| ------------------- | ---- | -------- | ---------------- |""") # empty because there are no decsions yet
|
|
2277
|
+
self.__add_chapter(main_reference_file, reference_content_folder, 10, 'Quality Requirements', """TXDX""")
|
|
2278
|
+
self.__add_chapter(main_reference_file, reference_content_folder, 11, 'Risks and Technical Debt', """## Risks
|
|
2279
|
+
|
|
2280
|
+
Currently there are no known risks.
|
|
2281
|
+
|
|
2282
|
+
## Technical debts
|
|
2283
|
+
|
|
2284
|
+
Currently there are no technical depts.""")
|
|
2285
|
+
self.__add_chapter(main_reference_file, reference_content_folder, 12, 'Glossary', """## Terms
|
|
2286
|
+
|
|
2287
|
+
| Term | Meaning |
|
|
2288
|
+
| ---- | ------- |
|
|
2289
|
+
|
|
2290
|
+
## Abbreviations
|
|
2291
|
+
|
|
2292
|
+
| Abbreviation | Meaning |
|
|
2293
|
+
| ------------ | ------- |""")
|
|
2294
|
+
|
|
2295
|
+
GeneralUtilities.append_to_file(main_reference_file, """
|
|
2296
|
+
|
|
2297
|
+
## Responsibilities
|
|
2298
|
+
|
|
2299
|
+
| Responsibility | Name and contact-information |
|
|
2300
|
+
| --------------- | ---------------------------- |
|
|
2301
|
+
| Pdocut-owner | TXDX |
|
|
2302
|
+
| Product-manager | TXDX |
|
|
2303
|
+
| Support | TXDX |
|
|
2304
|
+
|
|
2305
|
+
## License & Pricing
|
|
2306
|
+
|
|
2307
|
+
TXDX
|
|
2308
|
+
|
|
2309
|
+
## External resources
|
|
2310
|
+
|
|
2311
|
+
- [Repository](TXDX)
|
|
2312
|
+
- [Productive-System](TXDX)
|
|
2313
|
+
- [QualityCheck-system](TXDX)
|
|
2314
|
+
""".replace("XDX", "ODO"))
|
|
2315
|
+
|
|
2316
|
+
@GeneralUtilities.check_arguments
|
|
2317
|
+
def run_with_timeout(self, method, timeout_in_seconds: float) -> bool:
|
|
2318
|
+
# Returns true if the method was terminated due to a timeout
|
|
2319
|
+
# Returns false if the method terminates in the given time
|
|
2320
|
+
p = multiprocessing.Process(target=method)
|
|
2321
|
+
p.start()
|
|
2322
|
+
p.join(timeout_in_seconds)
|
|
2323
|
+
if p.is_alive():
|
|
2324
|
+
p.kill()
|
|
2325
|
+
p.join()
|
|
2326
|
+
return True
|
|
2327
|
+
else:
|
|
2328
|
+
return False
|
|
2329
|
+
|
|
2330
|
+
@GeneralUtilities.check_arguments
|
|
2331
|
+
def ensure_local_docker_network_exists(self, network_name: str) -> None:
|
|
2332
|
+
if not self.local_docker_network_exists(network_name):
|
|
2333
|
+
self.create_local_docker_network(network_name)
|
|
2334
|
+
|
|
2335
|
+
@GeneralUtilities.check_arguments
|
|
2336
|
+
def ensure_local_docker_network_does_not_exist(self, network_name: str) -> None:
|
|
2337
|
+
if self.local_docker_network_exists(network_name):
|
|
2338
|
+
self.remove_local_docker_network(network_name)
|
|
2339
|
+
|
|
2340
|
+
@GeneralUtilities.check_arguments
|
|
2341
|
+
def local_docker_network_exists(self, network_name: str) -> bool:
|
|
2342
|
+
return network_name in self.get_all_local_existing_docker_networks()
|
|
2343
|
+
|
|
2344
|
+
@GeneralUtilities.check_arguments
|
|
2345
|
+
def get_all_local_existing_docker_networks(self) -> list[str]:
|
|
2346
|
+
program_call_result = self.run_program("docker", "network list")
|
|
2347
|
+
std_out = program_call_result[1]
|
|
2348
|
+
std_out_lines = std_out.split("\n")[1:]
|
|
2349
|
+
result: list[str] = []
|
|
2350
|
+
for std_out_line in std_out_lines:
|
|
2351
|
+
normalized_line = ';'.join(std_out_line.split())
|
|
2352
|
+
splitted = normalized_line.split(";")
|
|
2353
|
+
result.append(splitted[1])
|
|
2354
|
+
return result
|
|
2355
|
+
|
|
2356
|
+
@GeneralUtilities.check_arguments
|
|
2357
|
+
def remove_local_docker_network(self, network_name: str) -> None:
|
|
2358
|
+
self.run_program("docker", f"network remove {network_name}")
|
|
2359
|
+
|
|
2360
|
+
@GeneralUtilities.check_arguments
|
|
2361
|
+
def create_local_docker_network(self, network_name: str) -> None:
|
|
2362
|
+
self.run_program("docker", f"network create {network_name}")
|
|
2363
|
+
|
|
2364
|
+
@GeneralUtilities.check_arguments
|
|
2365
|
+
def format_xml_file(self, file: str) -> None:
|
|
2366
|
+
encoding = "utf-8"
|
|
2367
|
+
element = ET.XML(GeneralUtilities.read_text_from_file(file, encoding))
|
|
2368
|
+
ET.indent(element)
|
|
2369
|
+
GeneralUtilities.write_text_to_file(file, ET.tostring(element, encoding="unicode"), encoding)
|
|
2370
|
+
|
|
2371
|
+
@GeneralUtilities.check_arguments
|
|
2372
|
+
def install_requirementstxt_file(self, requirements_txt_file: str):
|
|
2373
|
+
folder: str = os.path.dirname(requirements_txt_file)
|
|
2374
|
+
filename: str = os.path.basename(requirements_txt_file)
|
|
2375
|
+
self.run_program_argsasarray("pip", ["install", "-r", filename], folder)
|
|
2376
|
+
|
|
2377
|
+
@GeneralUtilities.check_arguments
|
|
2378
|
+
def ocr_analysis_of_folder(self, folder: str, serviceaddress: str, extensions: list[str], languages: list[str]) -> list[str]: # Returns a list of changed files due to ocr-analysis.
|
|
2379
|
+
GeneralUtilities.write_message_to_stdout("Starting OCR analysis of folder " + folder)
|
|
2380
|
+
supported_extensions = ['png', 'jpg', 'jpeg', 'tiff', 'bmp', 'gif', 'pdf', 'docx', 'doc', 'xlsx', 'xls', 'pptx', 'ppt']
|
|
2381
|
+
changes_files: list[str] = []
|
|
2382
|
+
if extensions is None:
|
|
2383
|
+
extensions = supported_extensions
|
|
2384
|
+
for file in GeneralUtilities.get_direct_files_of_folder(folder):
|
|
2385
|
+
file_lower = file.lower()
|
|
2386
|
+
for extension in extensions:
|
|
2387
|
+
if file_lower.endswith("."+extension):
|
|
2388
|
+
if self.ocr_analysis_of_file(file, serviceaddress, languages):
|
|
2389
|
+
changes_files.append(file)
|
|
2390
|
+
break
|
|
2391
|
+
for subfolder in GeneralUtilities.get_direct_folders_of_folder(folder):
|
|
2392
|
+
for file in self.ocr_analysis_of_folder(subfolder, serviceaddress, extensions, languages):
|
|
2393
|
+
changes_files.append(file)
|
|
2394
|
+
return changes_files
|
|
2395
|
+
|
|
2396
|
+
@GeneralUtilities.check_arguments
|
|
2397
|
+
def ocr_analysis_of_file(self, file: str, serviceaddress: str, languages: list[str]) -> bool: # Returns true if the ocr-file was generated or updated. Returns false if the existing ocr-file was not changed.
|
|
2398
|
+
GeneralUtilities.write_message_to_stdout("Do OCR analysis of file " + file)
|
|
2399
|
+
supported_extensions = ['png', 'jpg', 'jpeg', 'tiff', 'bmp', 'webp', 'gif', 'pdf', 'rtf', 'docx', 'doc', 'odt', 'xlsx', 'xls', 'ods', 'pptx', 'ppt', 'odp']
|
|
2400
|
+
for extension in supported_extensions:
|
|
2401
|
+
if file.lower().endswith("."+extension):
|
|
2402
|
+
raise ValueError(f"Extension '{extension}' is not supported. Supported extensions are: {', '.join(supported_extensions)}")
|
|
2403
|
+
target_file = file+".ocr.txt"
|
|
2404
|
+
hash_of_current_file: str = GeneralUtilities. get_sha256_of_file(file)
|
|
2405
|
+
if os.path.isfile(target_file):
|
|
2406
|
+
lines = GeneralUtilities.read_lines_from_file(target_file)
|
|
2407
|
+
previous_hash_of_current_file: str = lines[1].split(":")[1].strip()
|
|
2408
|
+
if hash_of_current_file == previous_hash_of_current_file:
|
|
2409
|
+
return False
|
|
2410
|
+
ocr_content = self.get_ocr_content_of_file(file, serviceaddress, languages)
|
|
2411
|
+
GeneralUtilities.ensure_file_exists(target_file)
|
|
2412
|
+
GeneralUtilities.write_text_to_file(file, f"""Name of file: \"{os.path.basename(file)}\""
|
|
2413
|
+
Hash of file: {hash_of_current_file}
|
|
2414
|
+
OCR-content:
|
|
2415
|
+
\"{ocr_content}\"""")
|
|
2416
|
+
return True
|
|
2417
|
+
|
|
2418
|
+
@GeneralUtilities.check_arguments
|
|
2419
|
+
def get_ocr_content_of_file(self, file: str, serviceaddress: str, languages: list[str]) -> str: # serviceaddress = None means local executable
|
|
2420
|
+
result: str = None
|
|
2421
|
+
extension = Path(file).suffix
|
|
2422
|
+
if serviceaddress is None:
|
|
2423
|
+
program_result = self.run_program_argsasarray("simpleocr", ["--File", file, "--Languages", "+".join(languages)] + languages)
|
|
2424
|
+
result = program_result[1]
|
|
2425
|
+
else:
|
|
2426
|
+
languages_for_url = '%2B'.join(languages)
|
|
2427
|
+
package_url: str = f"https://{serviceaddress}/GetOCRContent?languages={languages_for_url}&fileType={extension}"
|
|
2428
|
+
headers = {'Cache-Control': 'no-cache'}
|
|
2429
|
+
r = requests.put(package_url, timeout=5, headers=headers, data=GeneralUtilities.read_binary_from_file(file))
|
|
2430
|
+
if r.status_code != 200:
|
|
2431
|
+
raise ValueError(f"Checking for latest tor package resulted in HTTP-response-code {r.status_code}.")
|
|
2432
|
+
result = GeneralUtilities.bytes_to_string(r.content)
|
|
2433
|
+
return result
|
|
2434
|
+
|
|
2435
|
+
@GeneralUtilities.check_arguments
|
|
2436
|
+
def ocr_analysis_of_repository(self, folder: str, serviceaddress: str, extensions: list[str], languages: list[str]) -> None:
|
|
2437
|
+
self.assert_is_git_repository(folder)
|
|
2438
|
+
changed_files = self.ocr_analysis_of_folder(folder, serviceaddress, extensions, languages)
|
|
2439
|
+
for changed_ocr_file in changed_files:
|
|
2440
|
+
GeneralUtilities.assert_condition(changed_ocr_file.endswith(".ocr.txt"), f"File '{changed_ocr_file}' is not an OCR-file. It should end with '.ocr.txt'.")
|
|
2441
|
+
base_file = changed_ocr_file[:-len(".ocr.txt")]
|
|
2442
|
+
GeneralUtilities.assert_condition(os.path.isfile(base_file), f"Base file '{base_file}' does not exist. The OCR-file '{changed_ocr_file}' is not valid.")
|
|
2443
|
+
base_file_relative_path = os.path.relpath(base_file, folder)
|
|
2444
|
+
base_file_diff_program_result = self.run_program("git", f"diff --quiet -- \"{base_file_relative_path}\"", folder, throw_exception_if_exitcode_is_not_zero=False)
|
|
2445
|
+
has_staged_changes: bool = None
|
|
2446
|
+
if base_file_diff_program_result[0] == 0:
|
|
2447
|
+
has_staged_changes = False
|
|
2448
|
+
elif base_file_diff_program_result[0] == 1:
|
|
2449
|
+
has_staged_changes = True
|
|
2450
|
+
else:
|
|
2451
|
+
raise RuntimeError(f"Unexpected exit code {base_file_diff_program_result[0]} when checking for staged changes of file '{base_file_relative_path}'.")
|
|
2452
|
+
if has_staged_changes:
|
|
2453
|
+
changed_ocr_file_relative_path = os.path.relpath(changed_ocr_file, folder)
|
|
2454
|
+
self.run_program_argsasarray("git", ["add", changed_ocr_file_relative_path], folder)
|
|
2455
|
+
|
|
2456
|
+
@GeneralUtilities.check_arguments
|
|
2457
|
+
def update_timestamp_in_file(self, target_file: str) -> None:
|
|
2458
|
+
lines = GeneralUtilities.read_lines_from_file(target_file)
|
|
2459
|
+
new_lines = []
|
|
2460
|
+
prefix: str = "# last update: "
|
|
2461
|
+
for line in lines:
|
|
2462
|
+
if line.startswith(prefix):
|
|
2463
|
+
new_lines.append(prefix+GeneralUtilities.datetime_to_string_with_timezone(GeneralUtilities.get_now()))
|
|
2464
|
+
else:
|
|
2465
|
+
new_lines.append(line)
|
|
2466
|
+
GeneralUtilities.write_lines_to_file(target_file, new_lines)
|
|
2467
|
+
|
|
2468
|
+
def do_and_log_task(self, name_of_task: str, task):
|
|
2469
|
+
try:
|
|
2470
|
+
self.log.log(f"Start action \"{name_of_task}\".", LogLevel.Information)
|
|
2471
|
+
result = task()
|
|
2472
|
+
if result is None:
|
|
2473
|
+
result = 0
|
|
2474
|
+
return result
|
|
2475
|
+
except Exception as e:
|
|
2476
|
+
self.log.log_exception(f"Error while running action \"{name_of_task}\".", e, LogLevel.Error)
|
|
2477
|
+
return 1
|
|
2478
|
+
finally:
|
|
2479
|
+
self.log.log(f"Finished action \"{name_of_task}\".", LogLevel.Information)
|
|
2480
|
+
|
|
2481
|
+
def get_lines_of_code_with_default_excluded_patterns(self, repository: str) -> int:
|
|
2482
|
+
return self.get_lines_of_code(repository, self.default_excluded_patterns_for_loc)
|
|
2483
|
+
|
|
2484
|
+
default_excluded_patterns_for_loc: list[str] = [".txt", ".md", ".vscode", "Resources", "Reference", ".gitignore", ".gitattributes", "Other/Metrics"]
|
|
2485
|
+
|
|
2486
|
+
def get_lines_of_code(self, repository: str, excluded_pattern: list[str]) -> int:
|
|
2487
|
+
self.assert_is_git_repository(repository)
|
|
2488
|
+
result: int = 0
|
|
2489
|
+
self.log.log(f"Calculate lines of code in repository '{repository}' with excluded patterns: {', '.join(excluded_pattern)}",LogLevel.Debug)
|
|
2490
|
+
git_response = self.run_program("git", "ls-files", repository)
|
|
2491
|
+
files: list[str] = GeneralUtilities.string_to_lines(git_response[1])
|
|
2492
|
+
for file in files:
|
|
2493
|
+
if os.path.isfile(os.path.join(repository, file)):
|
|
2494
|
+
if self.__is_excluded_by_glob_pattern(file, excluded_pattern):
|
|
2495
|
+
self.log.log(f"File '{file}' is ignored because it matches an excluded pattern.",LogLevel.Diagnostic)
|
|
2496
|
+
else:
|
|
2497
|
+
full_file: str = os.path.join(repository, file)
|
|
2498
|
+
if GeneralUtilities.is_binary_file(full_file):
|
|
2499
|
+
self.log.log(f"File '{file}' is ignored because it is a binary-file.",LogLevel.Diagnostic)
|
|
2500
|
+
else:
|
|
2501
|
+
self.log.log(f"Count lines of file '{file}'.",LogLevel.Diagnostic)
|
|
2502
|
+
length = len(GeneralUtilities.read_nonempty_lines_from_file(full_file))
|
|
2503
|
+
result = result+length
|
|
2504
|
+
else:
|
|
2505
|
+
self.log.log(f"File '{file}' is ignored because it does not exist.",LogLevel.Diagnostic)
|
|
2506
|
+
return result
|
|
2507
|
+
|
|
2508
|
+
def __is_excluded_by_glob_pattern(self, file: str, excluded_patterns: list[str]) -> bool:
|
|
2509
|
+
for pattern in excluded_patterns:
|
|
2510
|
+
if fnmatch.fnmatch(file, f"*{pattern}*"):
|
|
2511
|
+
return True
|
|
2512
|
+
return False
|
|
2513
|
+
|
|
2514
|
+
@GeneralUtilities.check_arguments
|
|
2515
|
+
def create_zip_archive(self, folder:str,zip_file:str) -> None:
|
|
2516
|
+
GeneralUtilities.assert_folder_exists(folder)
|
|
2517
|
+
GeneralUtilities.assert_file_does_not_exist(zip_file)
|
|
2518
|
+
folder = os.path.abspath(folder)
|
|
2519
|
+
with zipfile.ZipFile(zip_file, "w", zipfile.ZIP_DEFLATED) as zipf:
|
|
2520
|
+
for root, _, files in os.walk(folder):
|
|
2521
|
+
for file in files:
|
|
2522
|
+
file_path = os.path.join(root, file)
|
|
2523
|
+
arcname = os.path.relpath(file_path, start=folder)
|
|
2524
|
+
zipf.write(file_path, arcname)
|
|
2525
|
+
|
|
2526
|
+
@GeneralUtilities.check_arguments
|
|
2527
|
+
def start_local_test_service(self, file: str):
|
|
2528
|
+
example_folder = os.path.dirname(file)
|
|
2529
|
+
docker_compose_file = os.path.join(example_folder, "docker-compose.yml")
|
|
2530
|
+
for service in self.get_services_from_yaml_file(docker_compose_file):
|
|
2531
|
+
self.kill_docker_container(service)
|
|
2532
|
+
example_name = os.path.basename(example_folder)
|
|
2533
|
+
title = f"Test{example_name}"
|
|
2534
|
+
self.run_program("docker", f"compose -p {title.lower()} up --detach", example_folder, title=title)
|
|
2535
|
+
|
|
2536
|
+
@GeneralUtilities.check_arguments
|
|
2537
|
+
def stop_local_test_service(self, file: str):
|
|
2538
|
+
example_folder = os.path.dirname(file)
|
|
2539
|
+
example_name = os.path.basename(example_folder)
|
|
2540
|
+
title = f"Test{example_name}"
|
|
2541
|
+
self.run_program("docker", f"compose -p {title.lower()} down", example_folder, title=title)
|