skilleter-thingy 0.0.22__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of skilleter-thingy might be problematic. Click here for more details.

Files changed (67) hide show
  1. skilleter_thingy/__init__.py +0 -0
  2. skilleter_thingy/addpath.py +107 -0
  3. skilleter_thingy/aws.py +548 -0
  4. skilleter_thingy/borger.py +269 -0
  5. skilleter_thingy/colour.py +213 -0
  6. skilleter_thingy/console_colours.py +63 -0
  7. skilleter_thingy/dc_curses.py +278 -0
  8. skilleter_thingy/dc_defaults.py +221 -0
  9. skilleter_thingy/dc_util.py +50 -0
  10. skilleter_thingy/dircolors.py +308 -0
  11. skilleter_thingy/diskspacecheck.py +67 -0
  12. skilleter_thingy/docker.py +95 -0
  13. skilleter_thingy/docker_purge.py +113 -0
  14. skilleter_thingy/ffind.py +536 -0
  15. skilleter_thingy/files.py +142 -0
  16. skilleter_thingy/ggit.py +90 -0
  17. skilleter_thingy/ggrep.py +154 -0
  18. skilleter_thingy/git.py +1368 -0
  19. skilleter_thingy/git2.py +1307 -0
  20. skilleter_thingy/git_br.py +180 -0
  21. skilleter_thingy/git_ca.py +142 -0
  22. skilleter_thingy/git_cleanup.py +287 -0
  23. skilleter_thingy/git_co.py +220 -0
  24. skilleter_thingy/git_common.py +61 -0
  25. skilleter_thingy/git_hold.py +154 -0
  26. skilleter_thingy/git_mr.py +92 -0
  27. skilleter_thingy/git_parent.py +77 -0
  28. skilleter_thingy/git_review.py +1416 -0
  29. skilleter_thingy/git_update.py +385 -0
  30. skilleter_thingy/git_wt.py +96 -0
  31. skilleter_thingy/gitcmp_helper.py +322 -0
  32. skilleter_thingy/gitlab.py +193 -0
  33. skilleter_thingy/gitprompt.py +274 -0
  34. skilleter_thingy/gl.py +174 -0
  35. skilleter_thingy/gphotosync.py +610 -0
  36. skilleter_thingy/linecount.py +155 -0
  37. skilleter_thingy/logger.py +112 -0
  38. skilleter_thingy/moviemover.py +133 -0
  39. skilleter_thingy/path.py +156 -0
  40. skilleter_thingy/photodupe.py +110 -0
  41. skilleter_thingy/phototidier.py +248 -0
  42. skilleter_thingy/popup.py +87 -0
  43. skilleter_thingy/process.py +112 -0
  44. skilleter_thingy/py_audit.py +131 -0
  45. skilleter_thingy/readable.py +270 -0
  46. skilleter_thingy/remdir.py +126 -0
  47. skilleter_thingy/rmdupe.py +550 -0
  48. skilleter_thingy/rpylint.py +91 -0
  49. skilleter_thingy/run.py +334 -0
  50. skilleter_thingy/s3_sync.py +383 -0
  51. skilleter_thingy/splitpics.py +99 -0
  52. skilleter_thingy/strreplace.py +82 -0
  53. skilleter_thingy/sysmon.py +435 -0
  54. skilleter_thingy/tfm.py +920 -0
  55. skilleter_thingy/tfm_pane.py +595 -0
  56. skilleter_thingy/tfparse.py +101 -0
  57. skilleter_thingy/tidy.py +160 -0
  58. skilleter_thingy/trimpath.py +84 -0
  59. skilleter_thingy/window_rename.py +92 -0
  60. skilleter_thingy/xchmod.py +125 -0
  61. skilleter_thingy/yamlcheck.py +89 -0
  62. skilleter_thingy-0.0.22.dist-info/LICENSE +619 -0
  63. skilleter_thingy-0.0.22.dist-info/METADATA +22 -0
  64. skilleter_thingy-0.0.22.dist-info/RECORD +67 -0
  65. skilleter_thingy-0.0.22.dist-info/WHEEL +5 -0
  66. skilleter_thingy-0.0.22.dist-info/entry_points.txt +43 -0
  67. skilleter_thingy-0.0.22.dist-info/top_level.txt +1 -0
@@ -0,0 +1,383 @@
1
+ #!/usr/bin/env python3
2
+
3
+ """Selectively synchronise an S3 bucket to a local destination.
4
+ Similar to the aws s3 sync CLI command, but faster, has better
5
+ options to filter files, only downloads from S3 to local and
6
+ doesn't support the huge range of command line options."""
7
+
8
+ import os
9
+ import argparse
10
+ import sys
11
+ import fnmatch
12
+ import datetime
13
+ import threading
14
+ import queue
15
+
16
+ from pathlib import Path
17
+
18
+ import boto3
19
+
20
+ from botocore.exceptions import ClientError
21
+
22
+ ################################################################################
23
+
24
+ # Number of download threads to run - doing the downloads in threads is about
25
+ # six times faster than doing so sequentially.
26
+
27
+ NUM_THREADS = 12
28
+
29
+ # Translate our environment names to AWS ARNs
30
+
31
+ AWS_ACCOUNT_ARNS = {
32
+ 'prod': 'arn:aws:iam::459580378985:role/ERSReadOnlyRole',
33
+ 'test': 'arn:aws:iam::094438481629:role/ERSReadOnlyRole',
34
+ 'dev': 'arn:aws:iam::402653103803:role/ERSReadOnlyRole',
35
+ 'mgmt': 'arn:aws:iam::125943076446:role/ERSReadOnlyRole',
36
+ 'audit': 'arn:aws:iam::229627323276:role/ERSReadOnlyRole',
37
+ }
38
+
39
+ ################################################################################
40
+
41
+ def error(msg, status=1):
42
+ """Report an error message and exit"""
43
+
44
+ print(f'ERROR: {msg}')
45
+ sys.exit(status)
46
+
47
+ ################################################################################
48
+
49
+ def verbose(args, msg):
50
+ """Report a message in verbose mode"""
51
+
52
+ if not args or args.verbose:
53
+ print(msg)
54
+
55
+ ################################################################################
56
+
57
+ def splitlist(lists, deliminator):
58
+ """Create a list from a list of deliminated strings"""
59
+
60
+ result = []
61
+
62
+ for item in lists or []:
63
+ result += item.split(deliminator)
64
+
65
+ return result
66
+
67
+ ################################################################################
68
+
69
+ def configure():
70
+ """Parse the command line"""
71
+
72
+ parser = argparse.ArgumentParser(description='Selectively sync an S3 bucket to a local directory')
73
+
74
+ parser.add_argument('--verbose', '-v', action='store_true', help='Report verbose results (includes number of commits between branch and parent)')
75
+
76
+ parser.add_argument('--profile', '-p', action='store', help='Specify the AWS profile')
77
+
78
+ parser.add_argument('--include', '-i', action='append', help='Comma-separated list of wildcards to sync - if specified, only files matching one or more of these are synced')
79
+ parser.add_argument('--exclude', '-x', action='append', help='Comma-separated list of wildcards NOT to sync - if specified, only files NOT matching any of these are synced')
80
+
81
+ parser.add_argument('--include-type', '-I', action='append',
82
+ help='Comma-separated list of file types to sync - if specified, only files matching one or more of these are synced')
83
+ parser.add_argument('--exclude-type', '-X', action='append',
84
+ help='Comma-separated list of file types NOT to sync - if specified, only files NOT matching any of these are synced')
85
+
86
+ # TODO: parser.add_argument('--delete', '-d', action='store_true', help='Delete local files that don\'t exist in the bucket')
87
+ parser.add_argument('--force', '-f', action='store_true', help='Always overwrite locals files (by default files are only overwritten if they are older or a different size)')
88
+
89
+ parser.add_argument('--max-objects', '-m', action='store', type=int, help='Limit the number of objects to download')
90
+ parser.add_argument('--threads', '-t', action='store', type=int, default=NUM_THREADS, help='Number of parallel threads to run')
91
+ parser.add_argument('source', action='store', nargs=1, type=str, help='Name of the S3 bucket, optionally including path within the bucket')
92
+ parser.add_argument('destination', action='store', nargs=1, type=str, help='Name of the local directory to sync into')
93
+
94
+ args = parser.parse_args()
95
+
96
+ # Convert the arguments to single items, but 1-entry lists
97
+
98
+ args.source = args.source[0]
99
+ args.destination = args.destination[0]
100
+
101
+ # Convert the include/exclude parameters to lists
102
+
103
+ args.include = splitlist(args.include, ',')
104
+ args.exclude = splitlist(args.exclude, ',')
105
+
106
+ args.include_type = splitlist(args.include_type, ',')
107
+ args.exclude_type = splitlist(args.exclude_type, ',')
108
+
109
+ return args
110
+
111
+ ################################################################################
112
+
113
+ def get_client(args):
114
+ """Create an S3 client for the specified profile"""
115
+
116
+ if args.profile:
117
+ profile = args.profile.split('-')[0]
118
+ else:
119
+ try:
120
+ profile = os.environ['AWS_PROFILE']
121
+ except KeyError:
122
+ error('The AWS profile must be specified via the AWS_PROFILE environment variable or the --profile command line option')
123
+
124
+ try:
125
+ arn = AWS_ACCOUNT_ARNS[profile]
126
+ except KeyError:
127
+ error(f'Invalid AWS profile "{profile}"')
128
+
129
+ sts_connection = boto3.client("sts")
130
+
131
+ try:
132
+ acct_b = sts_connection.assume_role(RoleArn=arn, RoleSessionName='s3-selective-sync')
133
+ except ClientError as exc:
134
+ error(f'{exc.response["Error"]["Message"]}')
135
+
136
+ access_key = acct_b["Credentials"]["AccessKeyId"]
137
+ secret_key = acct_b["Credentials"]["SecretAccessKey"]
138
+ session_token = acct_b["Credentials"]["SessionToken"]
139
+
140
+ session = boto3.Session(
141
+ aws_access_key_id=access_key,
142
+ aws_secret_access_key=secret_key,
143
+ aws_session_token=session_token)
144
+
145
+ return session.client('s3')
146
+
147
+ ################################################################################
148
+
149
+ def download_filter(args, s3_client, s3_bucket, s3_object):
150
+ """Decide whether to download an object from S3
151
+ Returns True if the object should be downloaded, or False if it should be skipped."""
152
+
153
+ # Ignore directories
154
+
155
+ if s3_object['Key'][-1] == '/':
156
+ verbose(args, f'{s3_object["Key"]} is a prefix, so will be skipped')
157
+ return False
158
+
159
+ # Handle the object as a Path for simpicity
160
+
161
+ object_path = Path(s3_object['Key'])
162
+
163
+ # Filter according to wildcard
164
+
165
+ if args.include:
166
+ for wildcard in args.include:
167
+ if '/' in wildcard:
168
+ if fnmatch.fnmatch(s3_object['Key'], wildcard):
169
+ break
170
+ elif fnmatch.fnmatch(object_path.name, wildcard):
171
+ break
172
+ else:
173
+ verbose(args, f'"{s3_object["Key"]}" does not match any include wildcards, so will be skipped')
174
+ return False
175
+
176
+ if args.exclude:
177
+ for wildcard in args.exclude:
178
+ if '/' in wildcard:
179
+ if fnmatch.fnmatch(s3_object['Key'], wildcard):
180
+ verbose(args, f'"{s3_object["Key"]}" matches one or more exclude wildcards, so will be skipped')
181
+ elif fnmatch.fnmatch(object_path.name, wildcard):
182
+ verbose(args, f'"{s3_object["Key"]}" matches one or more exclude wildcards, so will be skipped')
183
+ return False
184
+
185
+ # Filter according to content type
186
+
187
+ if args.include_type or args.exclude_type:
188
+ object_type = s3_client.head_object(Bucket=s3_bucket, Key=s3_object["Key"])['ContentType']
189
+
190
+ if args.include_type:
191
+ for include_type in args.include_type:
192
+ if object_type == include_type:
193
+ break
194
+ else:
195
+ verbose(args, f'"{s3_object["Key"]}" is of type "{object_type}" which does not match any entries in the the type include list, so will be skipped')
196
+ return False
197
+
198
+ if args.exclude_type:
199
+ for exclude_type in args.exclude_type:
200
+ if object_type == exclude_type:
201
+ verbose(args, f'"{s3_object["Key"]}" is of type "{object_type}" which matches one of the entries in the type exclude list, so will be skipped')
202
+ return False
203
+
204
+ # Unless we are in force-download mode, check if the destination file already exists and see if it needs to be overwritten
205
+
206
+ if not args.force:
207
+ dest_file = args.destination / object_path
208
+
209
+ if dest_file.exists():
210
+ # Overwrite if destination is older or a different size
211
+
212
+ dest_stat = dest_file.stat()
213
+ dest_timestamp = datetime.datetime.fromtimestamp(dest_stat.st_mtime, tz=datetime.timezone.utc)
214
+
215
+ if dest_timestamp >= s3_object['LastModified']:
216
+ verbose(args, f'Destination file already exists and is same age or newer, so "{s3_object["Key"]}" will be skipped')
217
+ return False
218
+
219
+ return True
220
+
221
+ ################################################################################
222
+
223
+ def download(args, s3_client, mkdir_lock, bucket, s3_object):
224
+ """Attempt to download an object from S3 to an equivalent local location"""
225
+
226
+ local_path = Path(args.destination) / s3_object['Key']
227
+
228
+ with mkdir_lock:
229
+ if local_path.parent.exists():
230
+ if not local_path.parent.is_dir():
231
+ error(f'Unable to download "{s3_object["Key"]}" as the destination path is not a directory')
232
+ else:
233
+ local_path.parent.mkdir(parents=True)
234
+
235
+ # Download the object and the set the file timestamp to the same as the object
236
+
237
+ object_timestamp = s3_object['LastModified'].timestamp()
238
+ s3_client.download_file(bucket, s3_object['Key'], local_path)
239
+ os.utime(local_path, (object_timestamp, object_timestamp))
240
+
241
+ ################################################################################
242
+
243
+ def downloader(args, s3_client, mkdir_lock, bucket, object_queue, error_queue, sem_counter, real_thread=True):
244
+ """Download thread"""
245
+
246
+ finished = False
247
+ while not finished:
248
+ # Get the next object to download (waiting for one to be added to the queue)
249
+
250
+ s3_object = object_queue.get()
251
+
252
+ # If it is a candidate for downloading (meetings the criteria specified on the command
253
+ # line and, unless force-downloading, hasn't already been downloaded) then attempt to
254
+ # download it.
255
+
256
+ # If the semaphore is being used to limit the number of downloads, attempt to acquire it
257
+ # If we couldn't, then we've reached the download limit so we'll finish.
258
+
259
+ if download_filter(args, s3_client, bucket, s3_object):
260
+
261
+ if not sem_counter or sem_counter.acquire(blocking=False):
262
+ print(f'Downloading "{s3_object["Key"]}"')
263
+ try:
264
+ download(args, s3_client, mkdir_lock, bucket, s3_object)
265
+ except ClientError as exc:
266
+ error_queue.put(f'Failed to download "{s3_object["Key"]}" - {exc.response["Error"]["Message"]}')
267
+
268
+ if sem_counter:
269
+ sem_counter.release()
270
+ else:
271
+ print(f' Done "{s3_object["Key"]}"')
272
+
273
+ else:
274
+ finished = True
275
+
276
+ # Indicate the queued item has been consumed
277
+
278
+ object_queue.task_done()
279
+
280
+ # If we were using a download semaphore then drain the queue (this will happen in all
281
+ # threads and will never terminate, but we're running as a daemon so it doesn't matter too much).
282
+
283
+ if sem_counter and real_thread:
284
+ while True:
285
+ object_queue.get()
286
+ object_queue.task_done()
287
+
288
+ ################################################################################
289
+
290
+ def thread_exception_handler(args):
291
+ """Brute-force thread exception handler"""
292
+
293
+ _ = args
294
+ sys.exit(1)
295
+
296
+ ################################################################################
297
+
298
+ def main():
299
+ """Entry point"""
300
+
301
+ args = configure()
302
+
303
+ s3_client = get_client(args)
304
+
305
+ bucket = args.source
306
+
307
+ # Remove the 's3://' prefix, if present so that we can split bucket and folder
308
+ # if specified
309
+
310
+ if bucket.startswith('s3://'):
311
+ bucket = bucket[5:]
312
+
313
+ if '/' in bucket:
314
+ bucket, prefix = bucket.split('/', 1)
315
+ else:
316
+ prefix = ''
317
+
318
+ # Semaphore to protect download counter
319
+
320
+ sem_counter = threading.Semaphore(value=args.max_objects) if args.max_objects else None
321
+
322
+ # Create the download queue and the worker threads
323
+
324
+ object_queue = queue.Queue()
325
+
326
+ # Create the queue for reporting errors back from the threads
327
+
328
+ error_queue = queue.Queue()
329
+
330
+ # Lock to prevent race conditions around directory creation
331
+
332
+ mkdir_lock = threading.Lock()
333
+
334
+ if args.threads > 1:
335
+ # Create threads
336
+
337
+ threading.excepthook = thread_exception_handler
338
+
339
+ for _ in range(NUM_THREADS):
340
+ thread = threading.Thread(target=downloader, daemon=True, args=(args, s3_client, mkdir_lock, bucket, object_queue, error_queue, sem_counter))
341
+ thread.start()
342
+
343
+ # Read all the objects in the bucket and queue them for consideration by the download workers
344
+
345
+ for page in s3_client.get_paginator('list_objects_v2').paginate(Bucket=bucket, Prefix=prefix):
346
+ for s3_object in page['Contents']:
347
+ object_queue.put(s3_object)
348
+
349
+ print('Finished queuing objects')
350
+
351
+ if args.threads > 1:
352
+ # Wait for the queues to drain
353
+
354
+ object_queue.join()
355
+ else:
356
+ downloader(args, s3_client, mkdir_lock, bucket, object_queue, error_queue, sem_counter, real_thread=False)
357
+
358
+ # Report any errors:
359
+
360
+ if not error_queue.empty():
361
+ sys.stderr.write('\nErrors were encountered downloading some of the objects:\n\n\n')
362
+
363
+ while not error_queue.empty():
364
+ error_msg = error_queue.get()
365
+ sys.stderr.write(f'{error_msg}\n')
366
+ error_queue.task_done()
367
+
368
+ ################################################################################
369
+
370
+ def s3_sync():
371
+ """Entry point"""
372
+
373
+ try:
374
+ main()
375
+ except KeyboardInterrupt:
376
+ sys.exit(1)
377
+ except BrokenPipeError:
378
+ sys.exit(2)
379
+
380
+ ################################################################################
381
+
382
+ if __name__ == '__main__':
383
+ s3_sync()
@@ -0,0 +1,99 @@
1
+ #! /usr/bin/env python3
2
+
3
+ ################################################################################
4
+ """ Copy a directory full of pictures to a destination, creating subdiretories
5
+ with N pictures in each in the destination directory
6
+ """
7
+ ################################################################################
8
+
9
+ import os
10
+ import glob
11
+ import argparse
12
+
13
+ from PIL import Image
14
+
15
+ ################################################################################
16
+ # Constants
17
+
18
+ DEFAULT_SOURCE_DIR = '/storage/Starred Photos/'
19
+ DEFAULT_DEST_DIR = '/media/jms/48A7-BE16'
20
+ DEFAULT_MAX_SIZE = 3840
21
+
22
+ ################################################################################
23
+
24
+ def parse_command_line():
25
+ """ Parse the command line """
26
+
27
+ parser = argparse.ArgumentParser(description='Copy a collection of pictures to a set of numbered directories')
28
+
29
+ parser.add_argument('--pics', type=int, help='Number of pictures per directory (default is not to use numbered subdirectories)', default=None)
30
+ parser.add_argument('--max-size', type=int, help='Maximum size for each image in pixels (default=%d, images will be resized if larger)' %
31
+ DEFAULT_MAX_SIZE, default=DEFAULT_MAX_SIZE)
32
+ parser.add_argument('source', nargs=1, help='Source directory', default=DEFAULT_SOURCE_DIR)
33
+ parser.add_argument('destination', nargs=1, help='Destination directory', default=DEFAULT_DEST_DIR)
34
+
35
+ args = parser.parse_args()
36
+
37
+ return args
38
+
39
+ ################################################################################
40
+
41
+ def copy_images(args):
42
+ """ Copy the images """
43
+
44
+ dir_num = -1
45
+
46
+ pictures = glob.glob(os.path.join(args.source[0], '*'))
47
+ dest_dir = args.destination[0]
48
+
49
+ if not os.path.isdir(dest_dir):
50
+ os.makedirs(dest_dir)
51
+
52
+ for index, picture in enumerate(pictures):
53
+ picture_name = os.path.basename(picture)
54
+
55
+ # Create the new directory in the destination every N pcitures
56
+
57
+ if args.pics and index % args.pics == 0:
58
+ dir_num += 1
59
+ dest_dir = os.path.join(args.destination[0], '%05d' % dir_num)
60
+ if not os.path.isdir(dest_dir):
61
+ os.makedirs(dest_dir)
62
+
63
+ print('%d/%d: Copying %s to %s' % (index + 1, len(pictures), picture, dest_dir))
64
+
65
+ # Resize the image if neccessary
66
+
67
+ image = Image.open(picture)
68
+
69
+ if args.max_size and (image.width > args.max_size or image.height > args.max_size):
70
+ if image.width > image.height:
71
+ scale = image.width / args.max_size
72
+ else:
73
+ scale = image.height / args.max_size
74
+
75
+ new_size = (round(image.width / scale), round(image.height / scale))
76
+
77
+ print(' Resizing from %d x %d to %d x %d' % (image.width, image.height, new_size[0], new_size[1]))
78
+
79
+ image.resize(new_size)
80
+
81
+ # Write the image
82
+
83
+ destination = os.path.join(dest_dir, picture_name)
84
+
85
+ image.save(destination)
86
+
87
+ ################################################################################
88
+
89
+ def splitpics():
90
+ """Entry point"""
91
+
92
+ args = parse_command_line()
93
+
94
+ copy_images(args)
95
+
96
+ ################################################################################
97
+
98
+ if __name__ == '__main__':
99
+ splitpics()
@@ -0,0 +1,82 @@
1
+ #! /usr/bin/env python3
2
+
3
+ ################################################################################
4
+ """ Textual search and replace
5
+
6
+ For those occasions when you want to search and replace strings with
7
+ regexppy characters that upset sed.
8
+
9
+ Copyright (C) 2018 John Skilleter """
10
+ ################################################################################
11
+
12
+ import os
13
+ import sys
14
+ import argparse
15
+ import tempfile
16
+
17
+ ################################################################################
18
+
19
+ def main():
20
+ """ Main function """
21
+
22
+ parser = argparse.ArgumentParser(description='Textual search and replace')
23
+ parser.add_argument('-i', '--inplace', action='store_true', help='Do an in-place search and replace on the input file')
24
+ parser.add_argument('search', nargs=1, action='store', help='Search text')
25
+ parser.add_argument('replace', nargs=1, action='store', help='Replacment text')
26
+ parser.add_argument('infile', nargs='?', action='store', help='Input file')
27
+ parser.add_argument('outfile', nargs='?', action='store', help='Output file')
28
+
29
+ args = parser.parse_args()
30
+
31
+ # Sanity check
32
+
33
+ if args.inplace and not args.infile or args.outfile:
34
+ print('For in-place operations you must specify and input file and no output file')
35
+
36
+ # Open the input file
37
+
38
+ if args.infile:
39
+ infile = open(args.infile, 'r')
40
+ else:
41
+ infile = sys.stdin
42
+
43
+ # Open the output file, using a temporary file in the same directory as the input file
44
+ # if we are doing in-place operations
45
+
46
+ if args.outfile:
47
+ outfile = open(args.outfile, 'w')
48
+ elif args.inplace:
49
+ outfile = tempfile.NamedTemporaryFile(mode='w', delete=False, dir=os.path.dirname(args.infile))
50
+ else:
51
+ outfile = sys.stdout
52
+
53
+ # Perform the searchy-replacey-ness
54
+
55
+ for data in infile:
56
+ outfile.write(data.replace(args.search[0], args.replace[0]))
57
+
58
+ # If we doing in-place then juggle the temporary and input files
59
+
60
+ if args.inplace:
61
+ mode = os.stat(args.infile).st_mode
62
+ outfile.close()
63
+ infile.close()
64
+ os.rename(outfile.name, args.infile)
65
+ os.chmod(args.infile, mode)
66
+
67
+ ################################################################################
68
+
69
+ def strreplace():
70
+ """Entry point"""
71
+
72
+ try:
73
+ main()
74
+ except KeyboardInterrupt:
75
+ sys.exit(1)
76
+ except BrokenPipeError:
77
+ sys.exit(2)
78
+
79
+ ################################################################################
80
+
81
+ if __name__ == '__main__':
82
+ strreplace()