cnda_dl 1.0.0__py3-none-any.whl → 1.1.0__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
cnda_dl/cli.py CHANGED
@@ -6,11 +6,12 @@ Authors:
6
6
  Joey Scanga (scanga@wustl.edu)
7
7
  Ramone Agard (rhagard@wustl.edu)
8
8
  '''
9
-
9
+ from .formatters import ParensOnRightFormatter1, Colors
10
10
  from glob import glob
11
11
  from matplotlib.ticker import EngFormatter
12
12
  from pathlib import Path
13
13
  from pyxnat import Interface
14
+ import concurrent.futures
14
15
  import pyxnat
15
16
  import argparse
16
17
  import logging
@@ -25,8 +26,11 @@ import xml.etree.ElementTree as et
25
26
  import zipfile
26
27
  import datetime
27
28
 
29
+ CONNECTION_POOL_SIZE = 10
30
+
28
31
  default_log_format = "%(levelname)s:%(funcName)s: %(message)s"
29
32
  sout_handler = logging.StreamHandler(stream=sys.stdout)
33
+ sout_handler.setFormatter(ParensOnRightFormatter1())
30
34
  logging.basicConfig(level=logging.INFO,
31
35
  handlers=[sout_handler],
32
36
  format=default_log_format)
@@ -49,16 +53,16 @@ def handle_dir_creation(dir_path: Path):
49
53
  ans = ans.lower()
50
54
 
51
55
  if len(ans) != 1 or ans not in 'yn':
52
- logger.info("Invalid response.")
56
+ logger.info("Invalid response")
53
57
  elif ans == 'y':
54
58
  dir_path.mkdir(parents=True)
55
59
  prompt_chosen = True
56
- logger.info(f"new directory created at {dir_path}.")
60
+ logger.info(f"new directory created at {dir_path}")
57
61
  elif ans == 'n':
58
- logger.info("Chose to not create a new directory. Aborting...")
62
+ logger.info("Chose to not create a new directory Aborting")
59
63
  sys.exit(0)
60
64
  else:
61
- logger.info("Invalid response.")
65
+ logger.info("Invalid response")
62
66
 
63
67
 
64
68
  def download_xml(central: Interface,
@@ -66,7 +70,7 @@ def download_xml(central: Interface,
66
70
  project_id: str,
67
71
  file_path: Path):
68
72
 
69
- logger.info("Downloading session xml...")
73
+ logger.info("Downloading session xml")
70
74
  sub = central.select(f"/projects/{project_id}/subjects/{subject_id}")
71
75
  with open(file_path, "w") as f:
72
76
  f.write(sub.get().decode())
@@ -109,7 +113,6 @@ def download_experiment_dicoms(session_experiment: pyxnat.jsonutil.JsonTable,
109
113
  xml_file_path: Path,
110
114
  scan_number_start: str = None,
111
115
  skip_unusable: bool = False):
112
-
113
116
  project_id = session_experiment["project"]
114
117
  exp_id = session_experiment['ID']
115
118
 
@@ -135,53 +138,77 @@ def download_experiment_dicoms(session_experiment: pyxnat.jsonutil.JsonTable,
135
138
  logger.info(f"The following scans were marked 'unusable' and will not be downloaded: \n\t {[s for s,q in quality_pairs.items() if q=='unusable']}")
136
139
 
137
140
  # Get total number of files
138
- total_file_count, cur_file_count = 0, 0
139
- for s in scans:
140
- files = central.select(f"/projects/{project_id}/experiments/{exp_id}/scans/{s}/resources/files").get("")
141
- total_file_count += len(files)
141
+ def _get_file_objects_in_scan(scan: str) -> dict:
142
+ file_objects = central.select(f"/projects/{project_id}/experiments/{exp_id}/scans/{scan}/resources/files").get("")
143
+ return {file_obj: scan for file_obj in file_objects}
144
+
145
+ all_scan_file_objects = {}
146
+ with concurrent.futures.ThreadPoolExecutor(max_workers=CONNECTION_POOL_SIZE) as executor:
147
+ future_dicts = executor.map(_get_file_objects_in_scan, scans)
148
+ for future_dict in future_dicts:
149
+ all_scan_file_objects.update(future_dict)
150
+
151
+ total_file_count = len(all_scan_file_objects.keys())
142
152
  logger.info(f"Total number of files: {total_file_count}")
143
153
 
154
+ # Make DICOM directories for each series number
155
+ for scan in scans:
156
+ series_path = session_dicom_dir / scan / "DICOM"
157
+ series_path.mkdir(parents=True, exist_ok=True)
158
+
144
159
  # So log message does not interfere with format of the progress bar
145
160
  logger.removeHandler(sout_handler)
146
- downloaded_files = set()
147
161
  zero_size_files = set()
148
162
  fmt = EngFormatter('B')
149
163
 
164
+ # Function assigned to threads
165
+ def _download_session_file(f, scan):
166
+ file_attrs = {}
167
+ series_path = session_dicom_dir / scan / "DICOM"
168
+ assert series_path.is_dir()
169
+ file_attrs = {
170
+ "name": series_path / f._uri.split("/")[-1],
171
+ "size": fmt(int(f.size())) if f.size() else fmt(0),
172
+ "isempty": True if not f.size() else False,
173
+ "isdownloaded": False
174
+ }
175
+ if file_attrs["isempty"] and file_attrs["name"].exists():
176
+ return file_attrs
177
+ f.get(file_attrs["name"])
178
+ file_attrs["isdownloaded"] = True
179
+ return file_attrs
180
+
150
181
  # Download the session files
151
- with progressbar.ProgressBar(max_value=total_file_count, redirect_stdout=True) as bar:
152
- for s in scans:
153
- logger.info(f" Downloading scan {s}...")
154
- print(f"Downloading scan {s}...")
155
- series_path = session_dicom_dir / s / "DICOM"
156
- series_path.mkdir(parents=True, exist_ok=True)
157
- files = central.select(f"/projects/{project_id}/experiments/{exp_id}/scans/{s}/resources/files").get("")
158
- for f in files:
182
+ with (
183
+ progressbar.ProgressBar(max_value=total_file_count, redirect_stdout=True) as bar,
184
+ concurrent.futures.ThreadPoolExecutor(max_workers=CONNECTION_POOL_SIZE) as executor
185
+ ):
186
+ cur_file_count = 0
187
+ zero_size_files = []
188
+ futures = [executor.submit(_download_session_file, f, scan) for (f, scan) in all_scan_file_objects.items()]
189
+ for future in concurrent.futures.as_completed(futures):
190
+ try:
191
+ file_attrs = future.result()
159
192
  cur_file_count += 1
160
- add_file = True
161
- file_name = series_path / f._uri.split("/")[-1]
162
- file_size = fmt(int(f.size())) if f.size() else fmt(0)
163
- file_info = f"File {f.attributes()['Name']}, {file_size} ({cur_file_count} out of {total_file_count})"
164
- print("\t" + file_info)
165
- logger.info("\t" + file_info)
166
- if not f.size():
167
- msg = "\t-- File is empty"
168
- if file_name in downloaded_files:
169
- msg += " -- another copy was already downloaded, skipping download of this file"
170
- add_file = False
171
- else:
172
- zero_size_files.add(file_name)
173
- print(msg)
174
- logger.info(msg)
175
- elif file_name in zero_size_files:
176
- zero_size_files.remove(file_name)
177
- if add_file:
178
- f.get(file_name)
179
- downloaded_files.add(file_name)
180
193
  bar.update(cur_file_count)
194
+ if file_attrs['isempty']:
195
+ zero_size_files.append(file_attrs)
196
+ if file_attrs['isdownloaded']:
197
+ msg = f"Downloaded file {file_attrs['name']}, {file_attrs['size']} ({cur_file_count} out of {total_file_count})"
198
+ if len(msg) > (tsize := os.get_terminal_size()[0]) - tsize // 5:
199
+ msg = msg[:tsize // 2 - 4] + f"{Colors.DARK_GREY}.......{Colors.RESET}" + msg[tsize // 2 + tsize // 5:]
200
+ # if len(msg) > (tsize := os.get_terminal_size()[0]) and tsize % 2 == 1:
201
+ # msg = msg[:tsize // 2 - 4] + f"{Colors.DARK_GREY}.......{Colors.RESET}" + msg[tsize // 2 + tsize // 5:]
202
+ logger.info(msg)
203
+ # logger.info(f"\tDownloaded file {file_attrs['name']}, {file_attrs['size']} ({cur_file_count} out of {total_file_count})")
204
+ print(msg)
205
+ except Exception as exc:
206
+ print(f"Task ended with an exception {exc}")
207
+
181
208
  logger.addHandler(sout_handler)
182
- logger.info("Dicom download complete \n")
209
+ logger.info("DICOM download complete!")
183
210
  if len(zero_size_files) > 0:
184
- logger.warning(f"The following downloaded files contained no data:\n{[f.label() for f in zero_size_files]} \nCheck these files for unintended missing data!")
211
+ logger.warning(msg := f"The following downloaded files contained no data:\n{[file_attrs['name'] for file_attrs in zero_size_files]} \nCheck these files for unintended missing data!")
185
212
 
186
213
 
187
214
  def download_nordic_zips(session: str,
@@ -192,8 +219,8 @@ def download_nordic_zips(session: str,
192
219
  project_id = session_experiment["project"]
193
220
  exp_id = session_experiment['ID']
194
221
 
195
- def __digests_identical(zip_path: Path,
196
- cnda_file: pyxnat.core.resources.File):
222
+ def _digests_identical(zip_path: Path,
223
+ cnda_file: pyxnat.core.resources.File):
197
224
  if zip_path.is_file(): # Compare digests of zip on CNDA to see if we need to redownload
198
225
  with zip_path.open("rb") as f:
199
226
  if hashlib.md5(f.read()).hexdigest() == cnda_file.attributes()['digest']: # digests match
@@ -205,12 +232,12 @@ def download_nordic_zips(session: str,
205
232
  logger.info(f"Found {len(nordic_volumes)} 'NORDIC_VOLUMES' for this session")
206
233
  for nv in nordic_volumes:
207
234
  zip_path = session_dicom_dir / nv._uri.split("/")[-1]
208
- if not __digests_identical(zip_path, nv):
209
- logger.info(f"Downloading {zip_path.name}...")
235
+ if not _digests_identical(zip_path, nv):
236
+ logger.info(f"Downloading {zip_path.name}")
210
237
  nv.get(zip_path)
211
238
  unzip_path = zip_path.parent / zip_path.stem
212
239
  with zipfile.ZipFile(zip_path, "r") as zip_ref:
213
- logger.info(f"Unzipping to {unzip_path}...")
240
+ logger.info(f"Unzipping to {unzip_path}")
214
241
  zip_ref.extractall(unzip_path)
215
242
  dat_dir_list.append(unzip_path)
216
243
 
@@ -286,8 +313,8 @@ def dat_dcm_to_nifti(session: str,
286
313
  logger.warning("Could not find the mismatched dicom")
287
314
 
288
315
  # run the dcmdat2niix subprocess
289
- logger.info(f"Running dcmdat2niix on series {series_id}...")
290
- dcmdat2niix_cmd = shlex.split(f"dcmdat2niix -ba n -z o -w 1 -o {nifti_path} {series_path}")
316
+ logger.info(f"Running dcmdat2niix on series {series_id}")
317
+ dcmdat2niix_cmd = shlex.split(f"dcmdat2niix -ba y -z o -w 1 -o {nifti_path} {series_path}")
291
318
  with subprocess.Popen(dcmdat2niix_cmd, stdout=subprocess.PIPE, stderr=subprocess.PIPE) as p:
292
319
  while p.poll() is None:
293
320
  for line in p.stdout:
@@ -318,7 +345,7 @@ def dat_dcm_to_nifti(session: str,
318
345
  def main():
319
346
  parser = argparse.ArgumentParser(
320
347
  prog="cnda-dl",
321
- description="download cnda data directly to wallace",
348
+ description="A command-line utility for downloading fMRI data from CNDA",
322
349
  )
323
350
  parser.add_argument('session_list',
324
351
  nargs="+",
@@ -376,7 +403,7 @@ def main():
376
403
  logger.addHandler(file_handler)
377
404
  logger.addHandler(sout_handler)
378
405
 
379
- logger.info("Starting cnda-dl...")
406
+ logger.info("Starting cnda-dl")
380
407
  logger.info(f"Log will be stored at {log_path}")
381
408
 
382
409
  # set up data paths
@@ -399,6 +426,7 @@ def main():
399
426
 
400
427
  # main loop
401
428
  for session in session_list:
429
+ download_success = False
402
430
  xml_file_path = xml_path / f"{session}.xml"
403
431
  session_dicom_dir = dicom_path / session
404
432
 
@@ -415,10 +443,11 @@ def main():
415
443
  skip_short_runs=args.skip_short_runs)
416
444
  except Exception:
417
445
  logger.exception(f"Error moving the .dat files to the appropriate scan directories and converting to NIFTI for session: {session}")
446
+ download_success = False
418
447
  continue
419
448
 
420
449
  # download the experiment data
421
- logger.info(f"Starting download of session {session}...")
450
+ logger.info(f"Starting download of session {session}")
422
451
 
423
452
  # try to retrieve the experiment corresponding to this session
424
453
  exp = None
@@ -434,6 +463,26 @@ def main():
434
463
 
435
464
  except Exception:
436
465
  logger.exception("Error retrieving the experiment from the given parameters. Double check your inputs or enter more specific parameters.")
466
+ download_success = False
467
+ continue
468
+
469
+ # download the experiment data
470
+ logger.info(f"Starting download of session {session}")
471
+
472
+ # try to retrieve the experiment corresponding to this session
473
+ exp = None
474
+ try:
475
+ exp = retrieve_experiment(central=central,
476
+ session=session,
477
+ experiment_id=args.experiment_id,
478
+ project_id=args.project_id)
479
+ if len(exp) == 0:
480
+ raise RuntimeError("ERROR: CNDA query returned JsonTable object of length 0, meaning there were no results found with the given search parameters.")
481
+ elif len(exp) > 1:
482
+ raise RuntimeError("ERROR: CNDA query returned JsonTable object of length >1, meaning there were multiple results returned with the given search parameters.")
483
+
484
+ except Exception:
485
+
437
486
  continue
438
487
 
439
488
  # download the xml for this session
@@ -452,29 +501,33 @@ def main():
452
501
  skip_unusable=args.skip_unusable)
453
502
  except Exception:
454
503
  logger.exception(f"Error downloading the experiment data from CNDA for session: {session}")
504
+ download_success = False
455
505
  continue
456
506
 
457
- # if we are not skipping the NORDIC files
458
- if not args.ignore_nordic_volumes:
459
- # try to download NORDIC related files and convert raw data to NIFTI
460
- try:
461
- nordic_dat_dirs = download_nordic_zips(session=session,
462
- central=central,
463
- session_experiment=exp,
464
- session_dicom_dir=session_dicom_dir)
465
- nifti_path = dicom_path / f"{session}_nii"
466
- for nordic_dat_path in nordic_dat_dirs:
467
- dat_dcm_to_nifti(session=session,
468
- dat_directory=nordic_dat_path,
469
- xml_file_path=xml_file_path,
470
- session_dicom_dir=session_dicom_dir,
471
- nifti_path=nifti_path,
472
- skip_short_runs=args.skip_short_runs)
473
- except Exception:
474
- logger.exception(f"Error downloading 'NORDIC_VOLUMES' and converting to NIFTI for session: {session}")
475
- continue
507
+ # exit if skipping the NORDIC files
508
+ if args.ignore_nordic_volumes:
509
+ continue
510
+ # try to download NORDIC related files and convert raw data to NIFTI
511
+ try:
512
+ nordic_dat_dirs = download_nordic_zips(session=session,
513
+ central=central,
514
+ session_experiment=exp,
515
+ session_dicom_dir=session_dicom_dir)
516
+ nifti_path = dicom_path / f"{session}_nii"
517
+ for nordic_dat_path in nordic_dat_dirs:
518
+ dat_dcm_to_nifti(session=session,
519
+ dat_directory=nordic_dat_path,
520
+ xml_file_path=xml_file_path,
521
+ session_dicom_dir=session_dicom_dir,
522
+ nifti_path=nifti_path,
523
+ skip_short_runs=args.skip_short_runs)
524
+ except Exception:
525
+ logger.exception(f"Error downloading 'NORDIC_VOLUMES' and converting to NIFTI for session: {session}")
526
+ download_success = False
527
+ continue
476
528
 
477
- logger.info("\n...Downloads Complete")
529
+ if download_success:
530
+ logger.info("\nDownloads Complete")
478
531
 
479
532
 
480
533
  if __name__ == "__main__":
cnda_dl/formatters.py ADDED
@@ -0,0 +1,65 @@
1
+ import logging
2
+ import os
3
+
4
+
5
+ class Colors:
6
+ RESET = "\033[0m"
7
+ BOLD = "\033[1m"
8
+ UNDERLINE = "\033[4m"
9
+
10
+ BLACK = "\033[30m"
11
+ RED = "\033[31m"
12
+ GREEN = "\033[32m"
13
+ YELLOW = "\033[33m"
14
+ BLUE = "\033[34m"
15
+ MAGENTA = "\033[35m"
16
+ CYAN = "\033[36m"
17
+ WHITE = "\033[37m"
18
+ DARK_GREY = "\033[90m"
19
+ LIGHT_RED = "\033[91m"
20
+ LIGHT_GREEN = "\033[92m"
21
+ LIGHT_YELLOW = "\033[93m"
22
+ LIGHT_BLUE = "\033[94m"
23
+ LIGHT_MAGENTA = "\033[95m"
24
+ LIGHT_CYAN = "\033[96m"
25
+ LIGHT_WHITE = "\033[97m"
26
+
27
+ # Background colors
28
+ BACK_BLACK = "\033[40m"
29
+ BACK_RED = "\033[41m"
30
+ BACK_GREEN = "\033[42m"
31
+ BACK_YELLOW = "\033[43m"
32
+ BACK_BLUE = "\033[44m"
33
+ BACK_MAGENTA = "\033[45m"
34
+ BACK_CYAN = "\033[46m"
35
+ BACK_WHITE = "\033[47m"
36
+ BACK_DARK_GREY = "\033[100m"
37
+ BACK_LIGHT_RED = "\033[101m"
38
+ BACK_LIGHT_GREEN = "\033[102m"
39
+ BACK_LIGHT_YELLOW = "\033[103m"
40
+ BACK_LIGHT_BLUE = "\033[104m"
41
+ BACK_LIGHT_MAGENTA = "\033[105m"
42
+ BACK_LIGHT_CYAN = "\033[106m"
43
+ BACK_LIGHT_WHITE = "\033[107m"
44
+
45
+
46
+ class ParensOnRightFormatter1(logging.Formatter):
47
+ def format(self, record):
48
+ log_message = f"{record.msg}"
49
+ log_level = f"{record.levelname}"
50
+ func_name = (f"{record.funcName}")
51
+ if func_name[-1] == '.':
52
+ func_name[-1] = f"{Colors.DARK_GREY}.{Colors.RESET}"
53
+ # Determine total width of terminal window
54
+ terminal_width = os.get_terminal_size()[0]
55
+ # Calculate the right margin position for the log level and function name
56
+ if log_level == "INFO":
57
+ right_margin_text = f"{Colors.LIGHT_GREEN}({log_level}, {func_name}){Colors.RESET}"
58
+ elif log_level == "WARNING":
59
+ right_margin_text = f"{Colors.YELLOW}({log_level}, {func_name}){Colors.RESET}"
60
+ elif log_level == "ERROR":
61
+ right_margin_text = f"{Colors.RED}({log_level}, {func_name}){Colors.RESET}"
62
+ necessary_padding = terminal_width - len(log_message) - len(right_margin_text)
63
+ # Ensure padding is non-negative
64
+ padding = f'{Colors.DARK_GREY}.{Colors.RESET}' * max(0, necessary_padding)
65
+ return f"{log_message}{padding}{right_margin_text}"
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.1
2
2
  Name: cnda_dl
3
- Version: 1.0.0
3
+ Version: 1.1.0
4
4
  Summary: A command line utility for downloading fMRI data from CNDA
5
5
  Author-Email: Ramone Agard <agardr@wustl.edu>, Joey Scanga <joeyscanga92@gmail.com>
6
6
  Requires-Python: <4.0,>=3.9
@@ -10,20 +10,12 @@ Requires-Dist: progressbar2>=4.4.2
10
10
  Requires-Dist: pyxnat>=1.6.2
11
11
  Description-Content-Type: text/markdown
12
12
 
13
- # Dev installation
13
+ # Installation
14
14
 
15
- Ensure `uv` is installed:
15
+ We recommend installing cnda-dl with [pipx](https://github.com/pypa/pipx), which ensures that the cnda-dl binary is installed in an isolated environment. Installation instructions can be found [here](https://github.com/pypa/pipx?tab=readme-ov-file#install-pipx).
16
16
 
17
- ```
18
- which uv
19
- ```
20
-
21
- If not, this can be installed following instructions [here](https://docs.astral.sh/uv/getting-started/installation/).
22
-
23
- Then run:
17
+ With `pipx` installed:
24
18
 
25
19
  ```
26
- git clone https://github.com/washu-ocean/cnda-dl.git
27
- cd cnda-dl
28
- uv sync
20
+ pipx install cnda_dl
29
21
  ```
@@ -0,0 +1,11 @@
1
+ cnda_dl-1.1.0.dist-info/METADATA,sha256=SjIkJKqGNTeVH9Jw3KiSqonQ7a4ehU33Lb2E28_2jEE,742
2
+ cnda_dl-1.1.0.dist-info/WHEEL,sha256=tSfRZzRHthuv7vxpI4aehrdN9scLjk-dCJkPLzkHxGg,90
3
+ cnda_dl-1.1.0.dist-info/entry_points.txt,sha256=U5B378NGa-YaoKi3s456cW9HT1THi1B_vLKzwAHxHi8,61
4
+ cnda_dl-1.1.0.dist-info/licenses/LICENSE,sha256=5Dte9TUnLZzPRs4NQzl-Jc2-Ljd-t_v0ZR5Ng5r0UsY,35131
5
+ cnda_dl/.ruff_cache/.gitignore,sha256=njpg8ebsSuYCFcEdVLFxOSdF7CXp3e1DPVvZITY68xY,35
6
+ cnda_dl/.ruff_cache/0.9.9/15962950311086395899,sha256=2FFgjB2VS-A2SnbYZgGkU-flEhS3KDRCXoPWi-wjLtQ,22959
7
+ cnda_dl/.ruff_cache/CACHEDIR.TAG,sha256=WVMVbX4MVkpCclExbq8m-IcOZIOuIZf5FrYw5Pk-Ma4,43
8
+ cnda_dl/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
9
+ cnda_dl/cli.py,sha256=0nIM5mK7A8upr48WuUOq6S6RUZSPvSuH0MQCUOc2aQk,24002
10
+ cnda_dl/formatters.py,sha256=A9cTQJhW7lYggreRt7zNRKIYQnH70uguc0UrHRIxRsk,2185
11
+ cnda_dl-1.1.0.dist-info/RECORD,,
@@ -1,10 +0,0 @@
1
- cnda_dl-1.0.0.dist-info/METADATA,sha256=n_3NBGocWRKc3eGmMg3EhhaucneIkmlySV0jyMcn0lI,684
2
- cnda_dl-1.0.0.dist-info/WHEEL,sha256=tSfRZzRHthuv7vxpI4aehrdN9scLjk-dCJkPLzkHxGg,90
3
- cnda_dl-1.0.0.dist-info/entry_points.txt,sha256=U5B378NGa-YaoKi3s456cW9HT1THi1B_vLKzwAHxHi8,61
4
- cnda_dl-1.0.0.dist-info/licenses/LICENSE,sha256=5Dte9TUnLZzPRs4NQzl-Jc2-Ljd-t_v0ZR5Ng5r0UsY,35131
5
- cnda_dl/.ruff_cache/.gitignore,sha256=njpg8ebsSuYCFcEdVLFxOSdF7CXp3e1DPVvZITY68xY,35
6
- cnda_dl/.ruff_cache/0.9.9/15962950311086395899,sha256=2FFgjB2VS-A2SnbYZgGkU-flEhS3KDRCXoPWi-wjLtQ,22959
7
- cnda_dl/.ruff_cache/CACHEDIR.TAG,sha256=WVMVbX4MVkpCclExbq8m-IcOZIOuIZf5FrYw5Pk-Ma4,43
8
- cnda_dl/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
9
- cnda_dl/cli.py,sha256=8v452YDuE-9a987Jb0oC3hJSmnR1mKrN1XiDfQr0mBs,21577
10
- cnda_dl-1.0.0.dist-info/RECORD,,