dar-backup 0.6.2__py3-none-any.whl → 0.6.4__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
dar_backup/.darrc CHANGED
@@ -14,13 +14,13 @@ verbose:
14
14
  # -vs
15
15
 
16
16
  # shows diretory currently being processed
17
- # -vd
17
+ -vd
18
18
 
19
19
  # shows detailed messages, not related to files and directories
20
20
  # -vm
21
21
 
22
22
  # shows summary of each treated directory, including average compression
23
- # -vf
23
+ -vf
24
24
 
25
25
  # equivalent to "-vm -vs -vt"
26
26
  # -va
dar_backup/__about__.py CHANGED
@@ -1 +1 @@
1
- __version__ = "0.6.2"
1
+ __version__ = "0.6.4"
dar_backup/manager.py CHANGED
@@ -21,8 +21,9 @@
21
21
  """
22
22
 
23
23
 
24
- import os
25
24
  import argparse
25
+ import os
26
+ import re
26
27
  import sys
27
28
 
28
29
 
@@ -30,7 +31,9 @@ from . import __about__ as about
30
31
  from dar_backup.config_settings import ConfigSettings
31
32
  from dar_backup.util import run_command
32
33
  from dar_backup.util import setup_logging
34
+ from datetime import datetime
33
35
  from time import time
36
+ from typing import Dict, List, NamedTuple
34
37
 
35
38
  # Constants
36
39
  SCRIPTNAME = os.path.basename(__file__)
@@ -70,9 +73,20 @@ def create_db(backup_def: str, config_settings: ConfigSettings):
70
73
  logger.error(f"stderr: {stderr}")
71
74
  logger.error(f"stdout: {stdout}")
72
75
 
76
+ return process.returncode
73
77
 
74
78
 
75
- def list_db(backup_def: str, config_settings: ConfigSettings):
79
+ def list_catalogs(backup_def: str, config_settings: ConfigSettings) -> NamedTuple:
80
+ """
81
+ Returns:
82
+ a typing.NamedTuple of class dar-backup.util.CommandResult with the following properties:
83
+ - process: of type subprocess.CompletedProcess: The result of the command execution.
84
+ - stdout: of type str: The standard output of the command.
85
+ - stderr: of type str: The standard error of the command.
86
+ - returncode: of type int: The return code of the command.
87
+ - timeout: of type int: The timeout value in seconds used to run the command.
88
+ - command: of type list[str): The command executed.
89
+ """
76
90
  database = f"{backup_def}{DB_SUFFIX}"
77
91
  database_path = os.path.join(config_settings.backup_dir, database)
78
92
  if not os.path.exists(database_path):
@@ -87,27 +101,101 @@ def list_db(backup_def: str, config_settings: ConfigSettings):
87
101
  logger.error(f"stdout: {stdout}")
88
102
  else:
89
103
  print(stdout)
90
- sys.exit(process.returncode)
104
+ return process
105
+
106
+
107
+ def cat_no_for_name(archive: str, config_settings: ConfigSettings) -> int:
108
+ """
109
+ Find the catalog number for the given archive name
110
+
111
+ Returns:
112
+ - the found number, if the archive catalog is present in the database
113
+ - "-1" if the archive is not found
114
+ """
115
+ backup_def = backup_def_from_archive(archive)
116
+ process = list_catalogs(backup_def, config_settings)
117
+ if process.returncode != 0:
118
+ logger.error(f"Error listing catalogs for backup def: '{backup_def}'")
119
+ return -1
120
+ line_no = 1
121
+ for line in process.stdout.splitlines():
122
+ #print(f"{line_no}: '{line}'")
123
+ line_no += 1
124
+ search = re.search(f".*?(\d+)\s+.*?({archive}).*", line)
125
+ if search:
126
+ #print(f"FOUND: archive: {search.group(2)}, catalog #: '{search.group(1)}'")
127
+ logger.info(f"Found archive: '{archive}', catalog #: '{search.group(1)}'")
128
+ return int(search.group(1))
129
+ return -1
130
+
131
+
132
+
133
+
134
+
135
+ def list_catalog_contents(catalog_number: int, backup_def: str, config_settings: ConfigSettings):
136
+ """
137
+ List the contents of catalog # in catalog database for given backup definition
138
+ """
139
+ database = f"{backup_def}{DB_SUFFIX}"
140
+ database_path = os.path.join(config_settings.backup_dir, database)
141
+ if not os.path.exists(database_path):
142
+ logger.error(f'Database not found: "{database_path}"')
143
+ return 1
144
+ command = ['dar_manager', '--base', database_path, '-u', f"{catalog_number}"]
145
+ process = run_command(command)
146
+ stdout, stderr = process.stdout, process.stderr
147
+ if process.returncode != 0:
148
+ logger.error(f'Error listing catalogs for: "{database_path}"')
149
+ logger.error(f"stderr: {stderr}")
150
+ logger.error(f"stdout: {stdout}")
151
+ else:
152
+ print(stdout)
153
+ return process.returncode
154
+
155
+
156
+ def find_file(file, backup_def, config_settings):
157
+ """
158
+ Find a specific file
159
+ """
160
+ database = f"{backup_def}{DB_SUFFIX}"
161
+ database_path = os.path.join(config_settings.backup_dir, database)
162
+ if not os.path.exists(database_path):
163
+ logger.error(f'Database not found: "{database_path}"')
164
+ return 1
165
+ command = ['dar_manager', '--base', database_path, '-f', f"{file}"]
166
+ process = run_command(command)
167
+ stdout, stderr = process.stdout, process.stderr
168
+ if process.returncode != 0:
169
+ logger.error(f'Error finding file: {file} in: "{database_path}"')
170
+ logger.error(f"stderr: {stderr}")
171
+ logger.error(f"stdout: {stdout}")
172
+ else:
173
+ print(stdout)
174
+ return process.returncode
91
175
 
92
176
 
93
- def add_specific_archive(archive: str, config_settings: ConfigSettings):
177
+ def add_specific_archive(archive: str, config_settings: ConfigSettings, directory: str =None) -> int:
94
178
  # sanity check - does dar backup exist?
179
+ if not directory:
180
+ directory = config_settings.backup_dir
95
181
  archive = os.path.basename(archive) # remove path if it was given
96
- archive_path = os.path.join(config_settings.backup_dir, f'{archive}.1.dar')
97
- if not os.path.exists(archive_path):
98
- logger.error(f'dar backup: "{archive_path}" not found, exiting')
99
- sys.exit(1)
182
+ archive_path = os.path.join(directory, f'{archive}')
183
+
184
+ archive_test_path = os.path.join(directory, f'{archive}.1.dar')
185
+ if not os.path.exists(archive_test_path):
186
+ logger.error(f'dar backup: "{archive_test_path}" not found, exiting')
187
+ return 1
100
188
 
101
189
  # sanity check - does backup definition exist?
102
190
  backup_definition = archive.split('_')[0]
103
191
  backup_def_path = os.path.join(config_settings.backup_d_dir, backup_definition)
104
192
  if not os.path.exists(backup_def_path):
105
193
  logger.error(f'backup definition "{backup_definition}" not found (--add-specific-archive option probably not correct), exiting')
106
- sys.exit(1)
194
+ return 1
107
195
 
108
196
  database = f"{backup_definition}{DB_SUFFIX}"
109
197
  database_path = os.path.realpath(os.path.join(config_settings.backup_dir, database))
110
- logger.info(f'Add "{archive_path}" to catalog "{database}"')
198
+ logger.info(f'Add "{archive_path}" to catalog: "{database}"')
111
199
 
112
200
  command = ['dar_manager', '--base', database_path, "--add", archive_path, "-ai", "-Q"]
113
201
  process = run_command(command)
@@ -122,7 +210,99 @@ def add_specific_archive(archive: str, config_settings: ConfigSettings):
122
210
  logger.error(f"stderr: {stderr}")
123
211
  logger.error(f"stdout: {stdout}")
124
212
 
125
- sys.exit(process.returncode)
213
+ return process.returncode
214
+
215
+
216
+
217
+ def add_directory(args: argparse.ArgumentParser, config_settings: ConfigSettings) -> None:
218
+ """
219
+ Loop over the DAR archives in the given directory args.add_dir in increasing order by date and add them to their catalog database.
220
+
221
+ Args:
222
+ args (argparse.ArgumentParser): The command-line arguments object containing the add_dir attribute.
223
+ config_settings (ConfigSettings): The configuration settings object.
224
+
225
+ This function performs the following steps:
226
+ 1. Checks if the specified directory exists. If not, raises a RuntimeError.
227
+ 2. Uses a regular expression to match DAR archive files with base names in the format <string>_{FULL, DIFF, INCR}_YYYY-MM-DD.
228
+ 3. Lists the DAR archives in the specified directory and extracts their base names and dates.
229
+ 4. Sorts the DAR archives by date.
230
+ 5. Loops over the sorted DAR archives and adds each archive to its catalog database using the add_specific_archive function.
231
+
232
+ Example:
233
+ args = argparse.ArgumentParser()
234
+ args.add_dir = '/path/to/dar/archives'
235
+ config_settings = ConfigSettings()
236
+ add_directory(args, config_settings)
237
+ """
238
+ if not os.path.exists(args.add_dir):
239
+ raise RuntimeError(f"Directory {args.add_dir} does not exist")
240
+
241
+ # Regular expression to match DAR archive files with base name and date in the format <string>_{FULL, DIFF, INCR}_YYYY-MM-DD
242
+ #dar_pattern = re.compile(r'^(.*?_(FULL|DIFF|INCR)_(\d{4}-\d{2}-\d{2}))\.\d+\.dar$')
243
+ dar_pattern = re.compile(r'^(.*?_(FULL|DIFF|INCR)_(\d{4}-\d{2}-\d{2}))\.1.dar$') # just read slice #1 of an archive
244
+ # List of DAR archives with their dates and base names
245
+ dar_archives = []
246
+
247
+ for filename in os.listdir(args.add_dir):
248
+ logger.debug(f"check if '{filename}' is a dar archive slice #1?")
249
+ match = dar_pattern.match(filename)
250
+ if match:
251
+ base_name = match.group(1)
252
+ date_str = match.group(3)
253
+ date_obj = datetime.strptime(date_str, '%Y-%m-%d')
254
+ dar_archives.append((date_obj, base_name))
255
+ logger.debug(f" -> yes: base name: {base_name}, date: {date_str}")
256
+
257
+ if not dar_archives or len(dar_archives) == 0:
258
+ logger.info(f"No 'dar' archives found in directory {args.add_dir}")
259
+ return
260
+
261
+ # Sort the DAR archives by date
262
+ dar_archives.sort()
263
+
264
+ # Loop over the sorted DAR archives and process them
265
+ result: List[Dict] = []
266
+ for date_obj, base_name in dar_archives:
267
+ logger.info(f"Adding dar archive: '{base_name}' to it's catalog database")
268
+ result_archive = add_specific_archive(base_name, config_settings, args.add_dir)
269
+ result.append({ f"{base_name}" : result_archive})
270
+ if result_archive != 0:
271
+ logger.error(f"Something went wrong added {base_name} to it's catalog")
272
+
273
+ logger.debug(f"Results adding archives found in: '{args.add_dir}': result")
274
+
275
+
276
+ def backup_def_from_archive(archive: str) -> str:
277
+ """
278
+ return the backup definition from archive name
279
+ """
280
+ search = re.search("(.*?)_", archive)
281
+ backup_def = search.group(1)
282
+ logger.debug(f"backup definition: '{backup_def}' from given archive '{archive}'")
283
+ return backup_def
284
+
285
+
286
+
287
+ def remove_specific_archive(archive: str, config_settings: ConfigSettings) -> int:
288
+ backup_def = backup_def_from_archive(archive)
289
+ database_path = os.path.join(config_settings.backup_dir, f"{backup_def}{DB_SUFFIX}")
290
+ cat_no = cat_no_for_name(archive, config_settings)
291
+ if cat_no >= 0:
292
+ command = ['dar_manager', '--base', database_path, "--delete", str(cat_no)]
293
+ process = run_command(command)
294
+ else:
295
+ logger.error(f"archive: '{archive}' not found in in't catalog database: {database_path}")
296
+ return cat_no
297
+
298
+ if process.returncode == 0:
299
+ logger.info(f"'{archive}' removed from it's catalog")
300
+ else:
301
+ logger.error(process.stdout)
302
+ logger.error(process.sterr)
303
+
304
+ return process.returncode
305
+
126
306
 
127
307
 
128
308
 
@@ -142,7 +322,9 @@ def main():
142
322
  parser.add_argument('-d', '--backup-def', type=str, help='Restrict to work only on this backup definition')
143
323
  parser.add_argument('--add-specific-archive', type=str, help='Add this archive to catalog database')
144
324
  parser.add_argument('--remove-specific-archive', type=str, help='Remove this archive from catalog database')
145
- parser.add_argument('--list-db', action='store_true', help='List catalogs in databases')
325
+ parser.add_argument('--list-catalog', action='store_true', help='List catalogs in databases for all backup definitions')
326
+ parser.add_argument('--list-catalog-contents', type=int, help="List contents of a catalog. Argument is the 'archive #', '-d <definition>' argument is also required")
327
+ parser.add_argument('--find-file', type=str, help="List catalogs containing <path>/file. '-d <definition>' argument is also required")
146
328
  parser.add_argument('--verbose', action='store_true', help='Be more verbose')
147
329
  parser.add_argument('--log-level', type=str, help="`debug` or `trace`, default is `info`", default="info")
148
330
  parser.add_argument('--log-stdout', action='store_true', help='also print log messages to stdout')
@@ -196,6 +378,13 @@ See section 15 and section 16 in the supplied "LICENSE" file.''')
196
378
  logger.error("you can't add and remove archives in the same operation, exiting")
197
379
  sys.exit(1)
198
380
 
381
+ if args.add_dir and args.add_specific_archive:
382
+ logger.error("you cannot add both a directory and an archive")
383
+ sys.exit(1)
384
+
385
+ if args.backup_def and not args.backup_def.strip():
386
+ logger.error(f"No backup definition given to --backup-def")
387
+
199
388
  if args.backup_def:
200
389
  backup_def_path = os.path.join(config_settings.backup_d_dir, args.backup_def)
201
390
  if not os.path.exists(backup_def_path):
@@ -203,6 +392,16 @@ See section 15 and section 16 in the supplied "LICENSE" file.''')
203
392
  sys.exit(1)
204
393
 
205
394
 
395
+ if args.list_catalog_contents and not args.backup_def:
396
+ logger.error(f"--list-catalog-contents requires the --backup-def, exiting")
397
+ sys.exit(1)
398
+
399
+ if args.find_file and not args.backup_def:
400
+ logger.error(f"--find-file requires the --backup-def, exiting")
401
+ sys.exit(1)
402
+
403
+
404
+
206
405
  # Modify config settings based on the arguments
207
406
  if args.alternate_archive_dir:
208
407
  if not os.path.exists(args.alternate_archive_dir):
@@ -213,35 +412,53 @@ See section 15 and section 16 in the supplied "LICENSE" file.''')
213
412
 
214
413
  if args.create_db:
215
414
  if args.backup_def:
216
- create_db(args.backup_def, config_settings)
415
+ sys.exit(create_db(args.backup_def, config_settings))
217
416
  else:
218
417
  for root, dirs, files in os.walk(config_settings.backup_d_dir):
219
418
  for file in files:
220
419
  current_backupdef = os.path.basename(file)
221
- create_db(current_backupdef, config_settings)
222
- sys.exit(0)
420
+ logger.debug(f"Create catalog db for backup definition: '{current_backupdef}'")
421
+ result = create_db(current_backupdef, config_settings)
422
+ if result != 0:
423
+ sys.exit(result)
223
424
 
224
425
  if args.add_specific_archive:
225
- add_specific_archive(args.add_specific_archive, config_settings)
426
+ sys.exit(add_specific_archive(args.add_specific_archive, config_settings))
226
427
 
227
428
  if args.add_dir:
228
- # Implement add directory logic
229
- pass
429
+ sys.exit(add_directory(args, config_settings))
430
+
230
431
 
231
432
  if args.remove_specific_archive:
232
- # Implement remove specific archive logic
233
- pass
433
+
434
+ if remove_specific_archive(args.remove_specific_archive, config_settings) == 0:
435
+ sys.exit(0)
436
+ else:
437
+ sys.exit(1)
438
+
234
439
 
235
440
 
236
- if args.list_db:
441
+ if args.list_catalog:
237
442
  if args.backup_def:
238
- list_db(args.backup_def, config_settings)
443
+ process = list_catalogs(args.backup_def, config_settings)
444
+ result = process.returncode
239
445
  else:
446
+ result = 0
240
447
  for root, dirs, files in os.walk(config_settings.backup_d_dir):
241
448
  for file in files:
242
449
  current_backupdef = os.path.basename(file)
243
- list_db(current_backupdef, config_settings)
244
- sys.exit(0)
450
+ if list_catalogs(current_backupdef, config_settings).returncode != 0:
451
+ result = 1
452
+ sys.exit(result)
453
+
454
+ if args.list_catalog_contents:
455
+ result = list_catalog_contents(args.list_catalog_contents, args.backup_def, config_settings)
456
+ sys.exit(result)
457
+
458
+ if args.find_file:
459
+ result = find_file(args.find_file, args.backup_def, config_settings)
460
+ sys.exit(result)
461
+
245
462
 
246
463
  if __name__ == "__main__":
247
464
  main()
dar_backup/util.py CHANGED
@@ -15,10 +15,11 @@ import re
15
15
  import subprocess
16
16
  import shlex
17
17
  import sys
18
+ import threading
18
19
  import traceback
19
20
  from datetime import datetime
20
21
 
21
- from typing import NamedTuple
22
+ from typing import NamedTuple, List
22
23
 
23
24
  logger=None
24
25
 
@@ -101,45 +102,110 @@ class CommandResult(NamedTuple):
101
102
  command: list[str]
102
103
 
103
104
  def __str__(self):
104
- return f"CommandResult: [Return Code: '{self.returncode}', Stdout: '{self.stdout}', Stderr: '{self.stderr}', Timeout: '{self.timeout}', Command: '{self.command}']"
105
+ #return f"CommandResult: [Return Code: '{self.returncode}', \nCommand: '{' '.join(map(shlex.quote, self.command))}', \nStdout:\n'{self.stdout}', \nStderr:\n'{self.stderr}', \nTimeout: '{self.timeout}']"
106
+ return f"CommandResult: [Return Code: '{self.returncode}', \nCommand: '{' '.join(map(shlex.quote, self.command))}']"
105
107
 
106
108
 
107
- def run_command(command: list[str], timeout: int=30) -> typing.NamedTuple:
109
+
110
+ def _stream_reader(pipe, log_func, output_accumulator: List[str]):
111
+ """
112
+ Reads lines from the subprocess pipe, logs them, and accumulates them.
113
+
114
+ Args:
115
+ pipe: The pipe to read from (stdout or stderr).
116
+ log_func: The logging function to use (e.g., logger.info, logger.error).
117
+ output_accumulator: A list to store the lines read from the pipe.
118
+ """
119
+ with pipe:
120
+ for line in iter(pipe.readline, ''):
121
+ stripped_line = line.strip()
122
+ output_accumulator.append(stripped_line) # Accumulate the output
123
+ log_func(stripped_line) # Log the output in real time
124
+
125
+
126
+ def run_command(command: List[str], timeout: int = 30) -> CommandResult:
108
127
  """
109
- Executes a given command via subprocess and captures its output.
128
+ Executes a given command via subprocess, logs its output in real time, and returns the result.
110
129
 
111
130
  Args:
112
131
  command (list): The command to be executed, represented as a list of strings.
113
- timeout (int): The maximum time in seconds to wait for the command to complete.Defaults to 30 seconds.
132
+ timeout (int): The maximum time in seconds to wait for the command to complete. Defaults to 30 seconds.
114
133
 
115
134
  Returns:
116
- a typing.NamedTuple of class dar-backup.util.CommandResult with the following properties:
117
- - process: of type subprocess.CompletedProcess: The result of the command execution.
118
- - stdout: of type str: The standard output of the command.
119
- - stderr: of type str: The standard error of the command.
120
- - returncode: of type int: The return code of the command.
121
- - timeout: of type int: The timeout value in seconds used to run the command.
122
- - command: of type list[str): The command executed.
123
-
135
+ A CommandResult NamedTuple with the following properties:
136
+ - process: subprocess.CompletedProcess
137
+ - stdout: str: The full standard output of the command.
138
+ - stderr: str: The full standard error of the command.
139
+ - returncode: int: The return code of the command.
140
+ - timeout: int: The timeout value in seconds used to run the command.
141
+ - command: list[str]: The command executed.
142
+
143
+ Logs:
144
+ - Logs standard output (`stdout`) in real-time at the INFO log level.
145
+ - Logs standard error (`stderr`) in real-time at the ERROR log level.
146
+
124
147
  Raises:
125
- subprocess.TimeoutExpired: if the command execution times out (see `timeout` parameter).
126
- Exception: raise exceptions during command runs.
148
+ subprocess.TimeoutExpired: If the command execution times out (see `timeout` parameter).
149
+ Exception: If other exceptions occur during command execution.
150
+
151
+ Notes:
152
+ - While the command runs, its `stdout` and `stderr` streams are logged in real-time.
153
+ - The returned `stdout` and `stderr` capture the complete output, even though the output is also logged.
154
+ - The command is forcibly terminated if it exceeds the specified timeout.
127
155
  """
128
- stdout = None
129
- stderr = None
156
+ stdout_lines = [] # To accumulate stdout
157
+ stderr_lines = [] # To accumulate stderr
158
+ process = None # Track the process for cleanup
159
+
130
160
  try:
131
161
  logger.debug(f"Running command: {command}")
132
162
  process = subprocess.Popen(command, stdout=subprocess.PIPE, stderr=subprocess.PIPE, text=True)
133
- stdout, stderr = process.communicate(timeout) # Wait with timeout
134
- result = CommandResult(process=process, stdout=stdout, stderr=stderr, returncode=process.returncode, timeout=timeout, command=command)
135
- logger.debug(f"Command result: {str(result)}")
163
+
164
+ # Start threads to read and log stdout and stderr
165
+ stdout_thread = threading.Thread(target=_stream_reader, args=(process.stdout, logger.info, stdout_lines))
166
+ stderr_thread = threading.Thread(target=_stream_reader, args=(process.stderr, logger.error, stderr_lines))
167
+
168
+ stdout_thread.start()
169
+ stderr_thread.start()
170
+
171
+ # Wait for process to complete or timeout
172
+ process.wait(timeout=timeout)
173
+
136
174
  except subprocess.TimeoutExpired:
137
- process.terminate()
175
+ if process:
176
+ process.terminate()
138
177
  logger.error(f"Command: '{command}' timed out and was terminated.")
139
178
  raise
140
179
  except Exception as e:
141
180
  logger.error(f"Error running command: {command}", exc_info=True)
142
181
  raise
182
+ finally:
183
+ # Ensure threads are joined to clean up
184
+ if stdout_thread.is_alive():
185
+ stdout_thread.join()
186
+ if stderr_thread.is_alive():
187
+ stderr_thread.join()
188
+
189
+ # Ensure process streams are closed
190
+ if process and process.stdout:
191
+ process.stdout.close()
192
+ if process and process.stderr:
193
+ process.stderr.close()
194
+
195
+ # Combine captured stdout and stderr lines into single strings
196
+ stdout = "\n".join(stdout_lines)
197
+ stderr = "\n".join(stderr_lines)
198
+
199
+ # Build the result object
200
+ result = CommandResult(
201
+ process=process,
202
+ stdout=stdout,
203
+ stderr=stderr,
204
+ returncode=process.returncode,
205
+ timeout=timeout,
206
+ command=command
207
+ )
208
+ logger.debug(f"Command result: {result}")
143
209
  return result
144
210
 
145
211
 
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.4
2
2
  Name: dar-backup
3
- Version: 0.6.2
3
+ Version: 0.6.4
4
4
  Summary: A script to do full, differential and incremental backups using dar. Some files are restored from the backups during verification, after which par2 redundancy files are created. The script also has a cleanup feature to remove old backups and par2 files.
5
5
  Project-URL: Homepage, https://github.com/per2jensen/dar-backup
6
6
  Project-URL: Issues, https://github.com/per2jensen/dar-backup/issues
@@ -0,0 +1,13 @@
1
+ dar_backup/.darrc,sha256=3d9opAnnZGU9XLyQpTDsLtgo6hqsvZ3JU-yMLz-7_f0,2110
2
+ dar_backup/__about__.py,sha256=ZbrGjsYU2ekVSe1-DcOOl7YsjrwB1bbJTUQlEi8pMjU,21
3
+ dar_backup/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
4
+ dar_backup/cleanup.py,sha256=DgmxSUwKrLLIQuYSIY_yTRhIuOMgI6ivjlQuH4u3wX4,9057
5
+ dar_backup/config_settings.py,sha256=CBMUhLOOZ-x7CRdS3vBDk4TYaGqC4N1Ot8IMH-qPaI0,3617
6
+ dar_backup/dar_backup.py,sha256=YZoVu9NJX3_WIkQIG8EMLSK3-VWdslI0c2XKrM2Un38,32214
7
+ dar_backup/manager.py,sha256=2qTcn1HiDrejc6S7dLpkylURGzMQ0QqTaSl2KQQ58Uw,19198
8
+ dar_backup/util.py,sha256=SSSJYM9lQZfubhTUBlX1xDGWmCpYEF3ePARmlY544xM,11283
9
+ dar_backup-0.6.4.dist-info/METADATA,sha256=9AKTUu-uyxNQqKoxtYdAWbAyMgHsmLCblvchEvT_B94,22496
10
+ dar_backup-0.6.4.dist-info/WHEEL,sha256=qtCwoSJWgHk21S1Kb4ihdzI2rlJ1ZKaIurTj_ngOhyQ,87
11
+ dar_backup-0.6.4.dist-info/entry_points.txt,sha256=x9vnW-JEl8mpDJC69f_XBcn0mBSkV1U0cyvFV-NAP1g,126
12
+ dar_backup-0.6.4.dist-info/licenses/LICENSE,sha256=OXLcl0T2SZ8Pmy2_dmlvKuetivmyPd5m1q-Gyd-zaYY,35149
13
+ dar_backup-0.6.4.dist-info/RECORD,,
@@ -1,13 +0,0 @@
1
- dar_backup/.darrc,sha256=ggex9N6eETOS6u003_QRRJMeWbveQfkT1lDBt0XpU-I,2112
2
- dar_backup/__about__.py,sha256=d7NGuoje3vHyudKIFR_PmfKozIOKDFvAhGx0QXiyuMw,21
3
- dar_backup/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
4
- dar_backup/cleanup.py,sha256=DgmxSUwKrLLIQuYSIY_yTRhIuOMgI6ivjlQuH4u3wX4,9057
5
- dar_backup/config_settings.py,sha256=CBMUhLOOZ-x7CRdS3vBDk4TYaGqC4N1Ot8IMH-qPaI0,3617
6
- dar_backup/dar_backup.py,sha256=YZoVu9NJX3_WIkQIG8EMLSK3-VWdslI0c2XKrM2Un38,32214
7
- dar_backup/manager.py,sha256=lkw1ZAIdxY7WedLPKZMnHpuq_QbjkUdcG61ooiwUYpo,10197
8
- dar_backup/util.py,sha256=6lPCFHr3MDdaLWAW9EDMZ4jdL7pt8rki-5dOXcesmP8,8955
9
- dar_backup-0.6.2.dist-info/METADATA,sha256=PMfP8NhPwzhHRbjJt6_7Unqxbqqj13PW19QdYac6pcc,22496
10
- dar_backup-0.6.2.dist-info/WHEEL,sha256=qtCwoSJWgHk21S1Kb4ihdzI2rlJ1ZKaIurTj_ngOhyQ,87
11
- dar_backup-0.6.2.dist-info/entry_points.txt,sha256=x9vnW-JEl8mpDJC69f_XBcn0mBSkV1U0cyvFV-NAP1g,126
12
- dar_backup-0.6.2.dist-info/licenses/LICENSE,sha256=OXLcl0T2SZ8Pmy2_dmlvKuetivmyPd5m1q-Gyd-zaYY,35149
13
- dar_backup-0.6.2.dist-info/RECORD,,