qmenta-client 1.1.dev1289__py3-none-any.whl → 1.1.dev1295__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
qmenta/client/Project.py CHANGED
@@ -22,7 +22,8 @@ logger_name = "qmenta.client"
22
22
  def show_progress(done, total, finish=False):
23
23
  bytes_in_mb = 1024 * 1024
24
24
  progress_message = "\r[{:.2f} %] Uploaded {:.2f} of {:.2f} Mb".format(
25
- done / float(total) * 100, done / bytes_in_mb, total / bytes_in_mb)
25
+ done / float(total) * 100, done / bytes_in_mb, total / bytes_in_mb
26
+ )
26
27
  sys.stdout.write(progress_message)
27
28
  sys.stdout.flush()
28
29
  if not finish:
@@ -71,6 +72,7 @@ class QCStatus(Enum):
71
72
  Enum with the following options:
72
73
  FAIL, PASS
73
74
  """
75
+
74
76
  PASS = "pass"
75
77
  FAIL = "fail"
76
78
 
@@ -94,15 +96,11 @@ class Project:
94
96
  # project id (int)
95
97
  if isinstance(project_id, str):
96
98
  project_name = project_id
97
- project_id = next(iter(filter(
98
- lambda proj: proj["name"] == project_id, account.projects)
99
- ))["id"]
99
+ project_id = next(iter(filter(lambda proj: proj["name"] == project_id, account.projects)))["id"]
100
100
  else:
101
101
  if isinstance(project_id, float):
102
102
  project_id = int(project_id)
103
- project_name = next(iter(filter(
104
- lambda proj: proj["id"] == project_id, account.projects)
105
- ))["name"]
103
+ project_name = next(iter(filter(lambda proj: proj["id"] == project_id, account.projects)))["name"]
106
104
 
107
105
  self._account = account
108
106
  self._project_id = project_id
@@ -133,11 +131,11 @@ class Project:
133
131
  """
134
132
  logger = logging.getLogger(logger_name)
135
133
  try:
136
- platform.parse_response(platform.post(
137
- self._account.auth,
138
- "projectset_manager/activate_project",
139
- data={"project_id": int(project_id)}
140
- ))
134
+ platform.parse_response(
135
+ platform.post(
136
+ self._account.auth, "projectset_manager/activate_project", data={"project_id": int(project_id)}
137
+ )
138
+ )
141
139
  except errors.PlatformError:
142
140
  logger.error("Unable to activate the project.")
143
141
  return False
@@ -263,27 +261,29 @@ class Project:
263
261
 
264
262
  """
265
263
 
266
- assert len(items) == 2, f"The number of elements in items " \
267
- f"'{len(items)}' should be equal to two."
268
- assert all([isinstance(item, int) for item in items]), \
269
- f"All items elements '{items}' should be integers."
264
+ assert len(items) == 2, f"The number of elements in items " f"'{len(items)}' should be equal to two."
265
+ assert all([isinstance(item, int) for item in items]), f"All items elements '{items}' should be integers."
270
266
 
271
- assert all([key[:5] == "pars_" for key in search_criteria.keys()]), \
272
- f"All keys of the search_criteria dictionary " \
273
- f"'{search_criteria.keys()}' must start with 'pars_'."
267
+ assert all([key[:5] == "pars_" for key in search_criteria.keys()]), (
268
+ f"All keys of the search_criteria dictionary " f"'{search_criteria.keys()}' must start with 'pars_'."
269
+ )
274
270
 
275
271
  operator_list = ["eq", "ne", "gt", "gte", "lt", "lte"]
276
272
  for key, value in search_criteria.items():
277
273
  if value.split(";")[0] in ["integer", "decimal"]:
278
- assert value.split(";")[1].split("|")[0] in operator_list, \
279
- f"Search criteria of type '{value.split(';')[0]}' must " \
274
+ assert value.split(";")[1].split("|")[0] in operator_list, (
275
+ f"Search criteria of type '{value.split(';')[0]}' must "
280
276
  f"include an operator ({', '.join(operator_list)})."
277
+ )
281
278
 
282
- content = platform.parse_response(platform.post(
283
- self._account.auth, "patient_manager/get_patient_list",
284
- data=search_criteria,
285
- headers={"X-Range": f"items={items[0]}-{items[1]}"}
286
- ))
279
+ content = platform.parse_response(
280
+ platform.post(
281
+ self._account.auth,
282
+ "patient_manager/get_patient_list",
283
+ data=search_criteria,
284
+ headers={"X-Range": f"items={items[0]}-{items[1]}"},
285
+ )
286
+ )
287
287
  return content
288
288
 
289
289
  @property
@@ -339,16 +339,13 @@ class Project:
339
339
  """
340
340
  logger = logging.getLogger(logger_name)
341
341
  try:
342
- data = platform.parse_response(platform.post(
343
- self._account.auth, "patient_manager/module_config"
344
- ))
342
+ data = platform.parse_response(platform.post(self._account.auth, "patient_manager/module_config"))
345
343
  except errors.PlatformError:
346
344
  logger.error("Could not retrieve metadata parameters.")
347
345
  return None
348
346
  return data["fields"]
349
347
 
350
- def add_metadata_parameter(self, title, param_id=None,
351
- param_type="string", visible=False):
348
+ def add_metadata_parameter(self, title, param_id=None, param_type="string", visible=False):
352
349
  """
353
350
  Add a metadata parameter to the project.
354
351
 
@@ -374,18 +371,13 @@ class Project:
374
371
 
375
372
  param_properties = [title, param_id, param_type, str(int(visible))]
376
373
 
377
- post_data = {"add": "|".join(param_properties),
378
- "edit": "",
379
- "delete": ""
380
- }
374
+ post_data = {"add": "|".join(param_properties), "edit": "", "delete": ""}
381
375
 
382
376
  logger = logging.getLogger(logger_name)
383
377
  try:
384
- answer = platform.parse_response(platform.post(
385
- self._account.auth,
386
- "patient_manager/save_metadata_changes",
387
- data=post_data
388
- ))
378
+ answer = platform.parse_response(
379
+ platform.post(self._account.auth, "patient_manager/save_metadata_changes", data=post_data)
380
+ )
389
381
  except errors.PlatformError:
390
382
  answer = {}
391
383
 
@@ -402,20 +394,17 @@ class Project:
402
394
  elif isinstance(analysis_name_or_id, str):
403
395
  search_tag = "p_n"
404
396
  else:
405
- raise Exception("The analysis identifier must be its name or an "
406
- "integer")
397
+ raise Exception("The analysis identifier must be its name or an " "integer")
407
398
 
408
399
  search_condition = {
409
400
  search_tag: analysis_name_or_id,
410
401
  }
411
- response = platform.parse_response(platform.post(
412
- self._account.auth, "analysis_manager/get_analysis_list",
413
- data=search_condition
414
- ))
402
+ response = platform.parse_response(
403
+ platform.post(self._account.auth, "analysis_manager/get_analysis_list", data=search_condition)
404
+ )
415
405
 
416
406
  if len(response) > 1:
417
- raise Exception(f"multiple analyses with name "
418
- f"{analysis_name_or_id} found")
407
+ raise Exception(f"multiple analyses with name " f"{analysis_name_or_id} found")
419
408
  elif len(response) == 1:
420
409
  return response[0]
421
410
  else:
@@ -455,10 +444,8 @@ class Project:
455
444
  dict
456
445
  List of analysis, each a dictionary
457
446
  """
458
- assert len(items) == 2, f"The number of elements in items " \
459
- f"'{len(items)}' should be equal to two."
460
- assert all([isinstance(item, int) for item in items]), \
461
- f"All items elements '{items}' should be integers."
447
+ assert len(items) == 2, f"The number of elements in items " f"'{len(items)}' should be equal to two."
448
+ assert all([isinstance(item, int) for item in items]), f"All items elements '{items}' should be integers."
462
449
  search_keys = {
463
450
  "p_n": str,
464
451
  "type": str,
@@ -474,26 +461,18 @@ class Project:
474
461
  }
475
462
  for key in search_condition.keys():
476
463
  if key not in search_keys.keys():
477
- raise Exception(
478
- (
479
- f"This key '{key}' is not accepted by this"
480
- "search condition"
481
- )
482
- )
464
+ raise Exception((f"This key '{key}' is not accepted by this" "search condition"))
483
465
  if not isinstance(search_condition[key], search_keys[key]):
484
- raise Exception(
485
- (
486
- f"The key {key} in the search condition"
487
- f"is not type {search_keys[key]}"
488
- )
489
- )
466
+ raise Exception((f"The key {key} in the search condition" f"is not type {search_keys[key]}"))
490
467
  req_headers = {"X-Range": f"items={items[0]}-{items[1] - 1}"}
491
- return platform.parse_response(platform.post(
492
- auth=self._account.auth,
493
- endpoint="analysis_manager/get_analysis_list",
494
- headers=req_headers,
495
- data=search_condition
496
- ))
468
+ return platform.parse_response(
469
+ platform.post(
470
+ auth=self._account.auth,
471
+ endpoint="analysis_manager/get_analysis_list",
472
+ headers=req_headers,
473
+ data=search_condition,
474
+ )
475
+ )
497
476
 
498
477
  def get_subject_container_id(self, subject_name, ssid):
499
478
  """
@@ -513,17 +492,11 @@ class Project:
513
492
  the subject is not found.
514
493
  """
515
494
 
516
- search_criteria = {
517
- "s_n": subject_name,
518
- "ssid": ssid
519
- }
520
- response = self.list_input_containers(
521
- search_criteria=search_criteria
522
- )
495
+ search_criteria = {"s_n": subject_name, "ssid": ssid}
496
+ response = self.list_input_containers(search_criteria=search_criteria)
523
497
 
524
498
  for subject in response:
525
- if subject["patient_secret_name"] == subject_name and \
526
- subject["ssid"] == ssid:
499
+ if subject["patient_secret_name"] == subject_name and subject["ssid"] == ssid:
527
500
  return subject["container_id"]
528
501
  return False
529
502
 
@@ -561,16 +534,17 @@ class Project:
561
534
  {"container_name", "container_id", "patient_secret_name", "ssid"}
562
535
  """
563
536
 
564
- assert len(items) == 2, f"The number of elements in items " \
565
- f"'{len(items)}' should be equal to two."
566
- assert all([isinstance(item, int) for item in items]), \
567
- f"All items elements '{items}' should be integers."
537
+ assert len(items) == 2, f"The number of elements in items " f"'{len(items)}' should be equal to two."
538
+ assert all([isinstance(item, int) for item in items]), f"All items elements '{items}' should be integers."
568
539
 
569
- response = platform.parse_response(platform.post(
570
- self._account.auth, "file_manager/get_container_list",
571
- data=search_criteria,
572
- headers={"X-Range": f"items={items[0]}-{items[1]}"}
573
- ))
540
+ response = platform.parse_response(
541
+ platform.post(
542
+ self._account.auth,
543
+ "file_manager/get_container_list",
544
+ data=search_criteria,
545
+ headers={"X-Range": f"items={items[0]}-{items[1]}"},
546
+ )
547
+ )
574
548
  containers = [
575
549
  {
576
550
  "patient_secret_name": container_item["patient_secret_name"],
@@ -598,8 +572,7 @@ class Project:
598
572
  {"name": "container-name", "id": "container_id"}
599
573
  """
600
574
  analysis = self.list_analysis(limit)
601
- return [{"name": a["name"],
602
- "id": a["out_container_id"]} for a in analysis]
575
+ return [{"name": a["name"], "id": a["out_container_id"]} for a in analysis]
603
576
 
604
577
  def list_container_files(self, container_id):
605
578
  """
@@ -616,10 +589,11 @@ class Project:
616
589
  List of file names (strings)
617
590
  """
618
591
  try:
619
- content = platform.parse_response(platform.post(
620
- self._account.auth, "file_manager/get_container_files",
621
- data={"container_id": container_id}
622
- ))
592
+ content = platform.parse_response(
593
+ platform.post(
594
+ self._account.auth, "file_manager/get_container_files", data={"container_id": container_id}
595
+ )
596
+ )
623
597
  except errors.PlatformError as e:
624
598
  logging.getLogger(logger_name).error(e)
625
599
  return False
@@ -646,10 +620,11 @@ class Project:
646
620
  """
647
621
 
648
622
  try:
649
- data = platform.parse_response(platform.post(
650
- self._account.auth, "file_manager/get_container_files",
651
- data={"container_id": container_id}
652
- ))
623
+ data = platform.parse_response(
624
+ platform.post(
625
+ self._account.auth, "file_manager/get_container_files", data={"container_id": container_id}
626
+ )
627
+ )
653
628
  except errors.PlatformError as e:
654
629
  logging.getLogger(logger_name).error(e)
655
630
  return False
@@ -698,18 +673,15 @@ class Project:
698
673
  """
699
674
 
700
675
  tags_str = "" if tags is None else ";".join(tags)
701
- platform.parse_response(platform.post(
702
- self._account.auth, "file_manager/edit_file",
703
- data={
704
- "container_id": container_id,
705
- "filename": filename,
706
- "tags": tags_str,
707
- "modality": modality
708
- }
709
- ))
676
+ platform.parse_response(
677
+ platform.post(
678
+ self._account.auth,
679
+ "file_manager/edit_file",
680
+ data={"container_id": container_id, "filename": filename, "tags": tags_str, "modality": modality},
681
+ )
682
+ )
710
683
 
711
- def download_file(self, container_id, file_name, local_filename=False,
712
- overwrite=False):
684
+ def download_file(self, container_id, file_name, local_filename=False, overwrite=False):
713
685
  """
714
686
  Download a single file from a specific container.
715
687
 
@@ -726,8 +698,7 @@ class Project:
726
698
  """
727
699
  logger = logging.getLogger(logger_name)
728
700
  if file_name not in self.list_container_files(container_id):
729
- msg = (f"File \"{file_name}\" does not exist in container "
730
- f"{container_id}")
701
+ msg = f'File "{file_name}" does not exist in container ' f"{container_id}"
731
702
  logger.error(msg)
732
703
  return False
733
704
 
@@ -740,22 +711,18 @@ class Project:
740
711
 
741
712
  params = {"container_id": container_id, "files": file_name}
742
713
 
743
- with platform.post(self._account.auth, "file_manager/download_file",
744
- data=params, stream=True) as response, \
745
- open(local_filename, "wb") as f:
714
+ with platform.post(
715
+ self._account.auth, "file_manager/download_file", data=params, stream=True
716
+ ) as response, open(local_filename, "wb") as f:
746
717
 
747
- for chunk in response.iter_content(chunk_size=2 ** 9 * 1024):
718
+ for chunk in response.iter_content(chunk_size=2**9 * 1024):
748
719
  f.write(chunk)
749
720
  f.flush()
750
721
 
751
- logger.info(
752
- f"File {file_name} from container {container_id} saved to"
753
- f" {local_filename}"
754
- )
722
+ logger.info(f"File {file_name} from container {container_id} saved to" f" {local_filename}")
755
723
  return True
756
724
 
757
- def download_files(self, container_id, filenames, zip_name="files.zip",
758
- overwrite=False):
725
+ def download_files(self, container_id, filenames, zip_name="files.zip", overwrite=False):
759
726
  """
760
727
  Download a set of files from a given container.
761
728
 
@@ -771,34 +738,30 @@ class Project:
771
738
  Name of the zip where the downloaded files are stored.
772
739
  """
773
740
  logger = logging.getLogger(logger_name)
774
- files_not_in_container = list(
775
- filter(lambda f: f not in self.list_container_files(container_id),
776
- filenames)
777
- )
741
+ files_not_in_container = list(filter(lambda f: f not in self.list_container_files(container_id), filenames))
778
742
 
779
743
  if files_not_in_container:
780
- msg = (f"The following files are missing in container "
781
- f"{container_id}: {', '.join(files_not_in_container)}")
744
+ msg = (
745
+ f"The following files are missing in container " f"{container_id}: {', '.join(files_not_in_container)}"
746
+ )
782
747
  logger.error(msg)
783
748
  return False
784
749
 
785
750
  if os.path.exists(zip_name) and not overwrite:
786
- msg = f"File \"{zip_name}\" already exists"
751
+ msg = f'File "{zip_name}" already exists'
787
752
  logger.error(msg)
788
753
  return False
789
754
 
790
755
  params = {"container_id": container_id, "files": ";".join(filenames)}
791
- with platform.post(self._account.auth,
792
- "file_manager/download_file",
793
- data=params, stream=True) as response, \
794
- open(zip_name, "wb") as f:
756
+ with platform.post(
757
+ self._account.auth, "file_manager/download_file", data=params, stream=True
758
+ ) as response, open(zip_name, "wb") as f:
795
759
 
796
- for chunk in response.iter_content(chunk_size=2 ** 9 * 1024):
760
+ for chunk in response.iter_content(chunk_size=2**9 * 1024):
797
761
  f.write(chunk)
798
762
  f.flush()
799
763
 
800
- logger.info("Files from container {} saved to {}".format(
801
- container_id, zip_name))
764
+ logger.info("Files from container {} saved to {}".format(container_id, zip_name))
802
765
  return True
803
766
 
804
767
  def get_subject_id(self, subject_name, ssid):
@@ -821,8 +784,7 @@ class Project:
821
784
  """
822
785
 
823
786
  for user in self.get_subjects_metadata():
824
- if user["patient_secret_name"] == str(subject_name) and \
825
- user["ssid"] == str(ssid):
787
+ if user["patient_secret_name"] == str(subject_name) and user["ssid"] == str(ssid):
826
788
  return int(user["_id"])
827
789
  return False
828
790
 
@@ -842,27 +804,23 @@ class Project:
842
804
  """
843
805
  logger = logging.getLogger(logger_name)
844
806
  if self.check_subject_name(subject.name):
845
- logger.error(f"Subject with name {subject.name} already exists in "
846
- f"project!")
807
+ logger.error(f"Subject with name {subject.name} already exists in " f"project!")
847
808
  return False
848
809
 
849
810
  try:
850
- platform.parse_response(platform.post(
851
- self._account.auth, "patient_manager/upsert_patient",
852
- data={"secret_name": subject.name}
853
- ))
811
+ platform.parse_response(
812
+ platform.post(self._account.auth, "patient_manager/upsert_patient", data={"secret_name": subject.name})
813
+ )
854
814
  except errors.PlatformError:
855
815
  logger.error(f"Subject {subject.name} could not be created.")
856
816
  return False
857
817
 
858
818
  subject.subject_id = self.get_subject_id(subject.name)
859
819
  subject.project = self
860
- logger.info(
861
- "Subject {0} was successfully created".format(subject.name))
820
+ logger.info("Subject {0} was successfully created".format(subject.name))
862
821
  return True
863
822
 
864
- def change_subject_metadata(self, patient_id, subject_name, ssid, tags,
865
- age_at_scan, metadata):
823
+ def change_subject_metadata(self, patient_id, subject_name, ssid, tags, age_at_scan, metadata):
866
824
  """
867
825
  Change the Subject ID, Session ID, Tags, Age at Scan and Metadata of
868
826
  the session with Patient ID
@@ -897,42 +855,36 @@ class Project:
897
855
  try:
898
856
  patient_id = str(int(patient_id))
899
857
  except ValueError:
900
- raise ValueError(f"'patient_id': '{patient_id}' not valid. "
901
- f"Must be convertible to int.")
858
+ raise ValueError(f"'patient_id': '{patient_id}' not valid. " f"Must be convertible to int.")
902
859
 
903
- assert isinstance(tags, list) and \
904
- all(isinstance(item, str) for item in tags), \
905
- f"tags: '{tags}' should be a list of strings."
860
+ assert isinstance(tags, list) and all(
861
+ isinstance(item, str) for item in tags
862
+ ), f"tags: '{tags}' should be a list of strings."
906
863
  tags = [tag.lower() for tag in tags]
907
864
 
908
- assert subject_name is not None and subject_name != "", \
909
- "subject_name must be a non empty string."
910
- assert ssid is not None and ssid != "", \
911
- "ssid must be a non empty string."
865
+ assert subject_name is not None and subject_name != "", "subject_name must be a non empty string."
866
+ assert ssid is not None and ssid != "", "ssid must be a non empty string."
912
867
 
913
868
  try:
914
869
  age_at_scan = str(int(age_at_scan)) if age_at_scan else None
915
870
  except ValueError:
916
- raise ValueError(f"age_at_scan: '{age_at_scan}' not valid. "
917
- f"Must be an integer.")
871
+ raise ValueError(f"age_at_scan: '{age_at_scan}' not valid. " f"Must be an integer.")
918
872
 
919
- assert isinstance(metadata, dict), \
920
- f"metadata: '{metadata}' should be a dictionary."
873
+ assert isinstance(metadata, dict), f"metadata: '{metadata}' should be a dictionary."
921
874
 
922
- assert all("md_" == key[:3] for key in metadata.keys()) or \
923
- all("md_" != key[:3] for key in metadata.keys()), \
924
- f"metadata: '{metadata}' must be a dictionary whose keys " \
925
- f"are either all starting with 'md_' or none."
875
+ assert all("md_" == key[:3] for key in metadata.keys()) or all("md_" != key[:3] for key in metadata.keys()), (
876
+ f"metadata: '{metadata}' must be a dictionary whose keys " f"are either all starting with 'md_' or none."
877
+ )
926
878
 
927
879
  metadata_keys = self.metadata_parameters.keys()
928
- assert \
929
- all([key[3:] in metadata_keys
930
- if "md_" == key[:3] else key in metadata_keys
931
- for key in metadata.keys()]), \
932
- f"Some metadata keys provided ({', '.join(metadata.keys())}) " \
933
- f"are not available in the project. They can be added via the " \
934
- f"Metadata Manager via the QMENTA Platform graphical user " \
880
+ assert all(
881
+ [key[3:] in metadata_keys if "md_" == key[:3] else key in metadata_keys for key in metadata.keys()]
882
+ ), (
883
+ f"Some metadata keys provided ({', '.join(metadata.keys())}) "
884
+ f"are not available in the project. They can be added via the "
885
+ f"Metadata Manager via the QMENTA Platform graphical user "
935
886
  f"interface (GUI)."
887
+ )
936
888
 
937
889
  post_data = {
938
890
  "patient_id": patient_id,
@@ -946,11 +898,7 @@ class Project:
946
898
  post_data[f"last_vals.{id}"] = value
947
899
 
948
900
  try:
949
- platform.parse_response(platform.post(
950
- self._account.auth,
951
- "patient_manager/upsert_patient",
952
- data=post_data
953
- ))
901
+ platform.parse_response(platform.post(self._account.auth, "patient_manager/upsert_patient", data=post_data))
954
902
  except errors.PlatformError:
955
903
  logger.error(f"Patient ID '{patient_id}' could not be modified.")
956
904
  return False
@@ -979,45 +927,32 @@ class Project:
979
927
  all_sessions = self.get_subjects_metadata()
980
928
 
981
929
  session_to_del = [
982
- s for s in all_sessions if
983
- s["patient_secret_name"] == subject_name and
984
- s["ssid"] == session_id
930
+ s for s in all_sessions if s["patient_secret_name"] == subject_name and s["ssid"] == session_id
985
931
  ]
986
932
 
987
933
  if not session_to_del:
988
- logger.error(
989
- f"Session {subject_name}/{session_id} could not be found "
990
- f"in this project."
991
- )
934
+ logger.error(f"Session {subject_name}/{session_id} could not be found " f"in this project.")
992
935
  return False
993
936
  elif len(session_to_del) > 1:
994
- raise RuntimeError(
995
- "Multiple sessions with same Subject ID and Session ID."
996
- " Contact support."
997
- )
937
+ raise RuntimeError("Multiple sessions with same Subject ID and Session ID." " Contact support.")
998
938
  else:
999
- logger.info("{}/{} found (id {})".format(
1000
- subject_name, session_id, session_to_del[0]["_id"]
1001
- ))
939
+ logger.info("{}/{} found (id {})".format(subject_name, session_id, session_to_del[0]["_id"]))
1002
940
 
1003
941
  session = session_to_del[0]
1004
942
 
1005
943
  try:
1006
- platform.parse_response(platform.post(
1007
- self._account.auth, "patient_manager/delete_patient",
1008
- data={
1009
- "patient_id": str(int(session["_id"])), "delete_files": 1
1010
- }
1011
- ))
944
+ platform.parse_response(
945
+ platform.post(
946
+ self._account.auth,
947
+ "patient_manager/delete_patient",
948
+ data={"patient_id": str(int(session["_id"])), "delete_files": 1},
949
+ )
950
+ )
1012
951
  except errors.PlatformError:
1013
- logger.error(f"Session \"{subject_name}/{session['ssid']}\" could"
1014
- f" not be deleted.")
952
+ logger.error(f"Session \"{subject_name}/{session['ssid']}\" could" f" not be deleted.")
1015
953
  return False
1016
954
 
1017
- logger.info(
1018
- f"Session \"{subject_name}/{session['ssid']}\" successfully "
1019
- f"deleted."
1020
- )
955
+ logger.info(f"Session \"{subject_name}/{session['ssid']}\" successfully " f"deleted.")
1021
956
  return True
1022
957
 
1023
958
  def delete_session_by_patientid(self, patient_id):
@@ -1038,12 +973,13 @@ class Project:
1038
973
  logger = logging.getLogger(logger_name)
1039
974
 
1040
975
  try:
1041
- platform.parse_response(platform.post(
1042
- self._account.auth, "patient_manager/delete_patient",
1043
- data={
1044
- "patient_id": str(int(patient_id)), "delete_files": 1
1045
- }
1046
- ))
976
+ platform.parse_response(
977
+ platform.post(
978
+ self._account.auth,
979
+ "patient_manager/delete_patient",
980
+ data={"patient_id": str(int(patient_id)), "delete_files": 1},
981
+ )
982
+ )
1047
983
  except errors.PlatformError:
1048
984
  logger.error(f"Patient ID {patient_id} could not be deleted.")
1049
985
  return False
@@ -1071,16 +1007,10 @@ class Project:
1071
1007
  # Always fetch the session IDs from the platform before deleting them
1072
1008
  all_sessions = self.get_subjects_metadata()
1073
1009
 
1074
- sessions_to_del = [
1075
- s for s in all_sessions if s["patient_secret_name"] == subject_name
1076
- ]
1010
+ sessions_to_del = [s for s in all_sessions if s["patient_secret_name"] == subject_name]
1077
1011
 
1078
1012
  if not sessions_to_del:
1079
- logger.error(
1080
- "Subject {} cannot be found in this project.".format(
1081
- subject_name
1082
- )
1083
- )
1013
+ logger.error("Subject {} cannot be found in this project.".format(subject_name))
1084
1014
  return False
1085
1015
 
1086
1016
  for ssid in [s["ssid"] for s in sessions_to_del]:
@@ -1088,15 +1018,25 @@ class Project:
1088
1018
  return False
1089
1019
  return True
1090
1020
 
1091
- def _upload_chunk(self, data, range_str, length, session_id,
1092
- disposition,
1093
- last_chunk,
1094
- name="", date_of_scan="", description="",
1095
- subject_name="", ssid="", filename="DATA.zip",
1096
- input_data_type="mri_brain_data:1.0",
1097
- result=False, add_to_container_id=0,
1098
- split_data=False
1099
- ):
1021
+ def _upload_chunk(
1022
+ self,
1023
+ data,
1024
+ range_str,
1025
+ length,
1026
+ session_id,
1027
+ disposition,
1028
+ last_chunk,
1029
+ name="",
1030
+ date_of_scan="",
1031
+ description="",
1032
+ subject_name="",
1033
+ ssid="",
1034
+ filename="DATA.zip",
1035
+ input_data_type="mri_brain_data:1.0",
1036
+ result=False,
1037
+ add_to_container_id=0,
1038
+ split_data=False,
1039
+ ):
1100
1040
  """
1101
1041
  Upload a chunk of a file to the platform.
1102
1042
 
@@ -1123,10 +1063,11 @@ class Project:
1123
1063
  """
1124
1064
 
1125
1065
  request_headers = {
1126
- "Content-Type": "application/zip", "Content-Range":
1127
- range_str, "Session-ID": str(session_id),
1066
+ "Content-Type": "application/zip",
1067
+ "Content-Range": range_str,
1068
+ "Session-ID": str(session_id),
1128
1069
  "Content-Length": str(length),
1129
- "Content-Disposition": disposition
1070
+ "Content-Disposition": disposition,
1130
1071
  }
1131
1072
 
1132
1073
  if last_chunk:
@@ -1154,20 +1095,25 @@ class Project:
1154
1095
 
1155
1096
  response_time = 900.0 if last_chunk else 120.0
1156
1097
  response = platform.post(
1157
- auth=self._account.auth,
1158
- endpoint="upload",
1159
- data=data,
1160
- headers=request_headers,
1161
- timeout=response_time
1098
+ auth=self._account.auth, endpoint="upload", data=data, headers=request_headers, timeout=response_time
1162
1099
  )
1163
1100
 
1164
1101
  return response
1165
1102
 
1166
- def upload_file(self, file_path, subject_name, ssid="", date_of_scan="",
1167
- description="", result=False, name="",
1168
- input_data_type="qmenta_mri_brain_data:1.0",
1169
- add_to_container_id=0, chunk_size=2 ** 9,
1170
- split_data=False):
1103
+ def upload_file(
1104
+ self,
1105
+ file_path,
1106
+ subject_name,
1107
+ ssid="",
1108
+ date_of_scan="",
1109
+ description="",
1110
+ result=False,
1111
+ name="",
1112
+ input_data_type="qmenta_mri_brain_data:1.0",
1113
+ add_to_container_id=0,
1114
+ chunk_size=2**9,
1115
+ split_data=False,
1116
+ ):
1171
1117
  """
1172
1118
  Upload a ZIP file to the platform.
1173
1119
 
@@ -1231,8 +1177,7 @@ class Project:
1231
1177
  last_chunk = False
1232
1178
 
1233
1179
  if ssid and split_data:
1234
- logger.warning("split-data argument will be ignored because" +
1235
- " ssid has been specified")
1180
+ logger.warning("split-data argument will be ignored because" + " ssid has been specified")
1236
1181
  split_data = False
1237
1182
 
1238
1183
  while True:
@@ -1249,16 +1194,27 @@ class Project:
1249
1194
  end_position = total_bytes - 1
1250
1195
  bytes_to_send = total_bytes - uploaded_bytes
1251
1196
 
1252
- bytes_range = "bytes " + str(start_position) + "-" + \
1253
- str(end_position) + "/" + str(total_bytes)
1197
+ bytes_range = "bytes " + str(start_position) + "-" + str(end_position) + "/" + str(total_bytes)
1254
1198
 
1255
1199
  dispstr = f"attachment; filename={filename}"
1256
1200
  response = self._upload_chunk(
1257
- data, bytes_range, bytes_to_send, session_id, dispstr,
1201
+ data,
1202
+ bytes_range,
1203
+ bytes_to_send,
1204
+ session_id,
1205
+ dispstr,
1258
1206
  last_chunk,
1259
- name, date_of_scan, description, subject_name, ssid,
1260
- filename, input_data_type, result, add_to_container_id,
1261
- split_data)
1207
+ name,
1208
+ date_of_scan,
1209
+ description,
1210
+ subject_name,
1211
+ ssid,
1212
+ filename,
1213
+ input_data_type,
1214
+ result,
1215
+ add_to_container_id,
1216
+ split_data,
1217
+ )
1262
1218
 
1263
1219
  if response is None:
1264
1220
  retries_count += 1
@@ -1278,17 +1234,14 @@ class Project:
1278
1234
  retries_count += 1
1279
1235
  time.sleep(retries_count * 5)
1280
1236
  if retries_count > self.max_retries:
1281
- error_message = (
1282
- "Error Code: 416; "
1283
- "Requested Range Not Satisfiable (NGINX)")
1237
+ error_message = "Error Code: 416; " "Requested Range Not Satisfiable (NGINX)"
1284
1238
  logger.error(error_message)
1285
1239
  break
1286
1240
  else:
1287
1241
  retries_count += 1
1288
1242
  time.sleep(retries_count * 5)
1289
1243
  if retries_count > max_retries:
1290
- error_message = ("Number of retries has been reached. "
1291
- "Upload process stops here !")
1244
+ error_message = "Number of retries has been reached. " "Upload process stops here !"
1292
1245
  logger.error(error_message)
1293
1246
  break
1294
1247
 
@@ -1342,9 +1295,7 @@ class Project:
1342
1295
  """
1343
1296
 
1344
1297
  if check_upload_file(file_path):
1345
- return self.upload_file(
1346
- file_path, subject_name,
1347
- input_data_type="parkinson_gametection")
1298
+ return self.upload_file(file_path, subject_name, input_data_type="parkinson_gametection")
1348
1299
  return False
1349
1300
 
1350
1301
  def upload_result(self, file_path, subject_name):
@@ -1388,13 +1339,9 @@ class Project:
1388
1339
  p_id = int(project_id)
1389
1340
  elif type(project_id) == str:
1390
1341
  projects = self._account.projects
1391
- projects_match = [proj for proj in projects
1392
- if proj["name"] == project_id]
1342
+ projects_match = [proj for proj in projects if proj["name"] == project_id]
1393
1343
  if not projects_match:
1394
- raise Exception(
1395
- f"Project {project_id}" +
1396
- " does not exist or is not available for this user."
1397
- )
1344
+ raise Exception(f"Project {project_id}" + " does not exist or is not available for this user.")
1398
1345
  p_id = int(projects_match[0]["id"])
1399
1346
  else:
1400
1347
  raise TypeError("project_id")
@@ -1404,30 +1351,26 @@ class Project:
1404
1351
  }
1405
1352
 
1406
1353
  try:
1407
- platform.parse_response(platform.post(
1408
- self._account.auth,
1409
- "file_manager/copy_container_to_another_project",
1410
- data=data
1411
- ))
1412
- except errors.PlatformError as e:
1413
- logging.getLogger(logger_name).error(
1414
- "Couldn not copy container: {}".format(e)
1354
+ platform.parse_response(
1355
+ platform.post(self._account.auth, "file_manager/copy_container_to_another_project", data=data)
1415
1356
  )
1357
+ except errors.PlatformError as e:
1358
+ logging.getLogger(logger_name).error("Couldn not copy container: {}".format(e))
1416
1359
  return False
1417
1360
 
1418
1361
  return True
1419
1362
 
1420
1363
  def start_analysis(
1421
- self,
1422
- script_name,
1423
- version,
1424
- in_container_id=None,
1425
- analysis_name=None,
1426
- analysis_description=None,
1427
- ignore_warnings=False,
1428
- settings=None,
1429
- tags=None,
1430
- preferred_destination=None
1364
+ self,
1365
+ script_name,
1366
+ version,
1367
+ in_container_id=None,
1368
+ analysis_name=None,
1369
+ analysis_description=None,
1370
+ ignore_warnings=False,
1371
+ settings=None,
1372
+ tags=None,
1373
+ preferred_destination=None,
1431
1374
  ):
1432
1375
  """
1433
1376
  Starts an analysis on a subject.
@@ -1466,13 +1409,9 @@ class Project:
1466
1409
  logger = logging.getLogger(logger_name)
1467
1410
 
1468
1411
  if in_container_id is None and settings is None:
1469
- raise ValueError(
1470
- "Pass a value for either in_container_id or settings.")
1412
+ raise ValueError("Pass a value for either in_container_id or settings.")
1471
1413
 
1472
- post_data = {
1473
- "script_name": script_name,
1474
- "version": version
1475
- }
1414
+ post_data = {"script_name": script_name, "version": version}
1476
1415
 
1477
1416
  settings = settings or {}
1478
1417
 
@@ -1502,9 +1441,7 @@ class Project:
1502
1441
  post_data["preferred_destination"] = preferred_destination
1503
1442
 
1504
1443
  logger.debug(f"post_data = {post_data}")
1505
- return self.__handle_start_analysis(
1506
- post_data, ignore_warnings=ignore_warnings
1507
- )
1444
+ return self.__handle_start_analysis(post_data, ignore_warnings=ignore_warnings)
1508
1445
 
1509
1446
  def delete_analysis(self, analysis_id):
1510
1447
  """
@@ -1516,19 +1453,20 @@ class Project:
1516
1453
  logger = logging.getLogger(logger_name)
1517
1454
 
1518
1455
  try:
1519
- platform.parse_response(platform.post(
1520
- auth=self._account.auth,
1521
- endpoint="analysis_manager/delete_analysis",
1522
- data={"project_id": analysis_id}
1523
- ))
1456
+ platform.parse_response(
1457
+ platform.post(
1458
+ auth=self._account.auth,
1459
+ endpoint="analysis_manager/delete_analysis",
1460
+ data={"project_id": analysis_id},
1461
+ )
1462
+ )
1524
1463
  except errors.PlatformError as error:
1525
1464
  logger.error("Could not delete analysis: {}".format(error))
1526
1465
  return False
1527
1466
 
1528
1467
  return True
1529
1468
 
1530
- def __handle_start_analysis(self, post_data, ignore_warnings=False,
1531
- n_calls=0):
1469
+ def __handle_start_analysis(self, post_data, ignore_warnings=False, n_calls=0):
1532
1470
  """
1533
1471
  Handle the possible responses from the server after start_analysis.
1534
1472
  Sometimes we have to send a request again, and then check again the
@@ -1543,16 +1481,16 @@ class Project:
1543
1481
 
1544
1482
  logger = logging.getLogger(logger_name)
1545
1483
  if n_calls > call_limit:
1546
- logger.error(f"__handle_start_analysis_response called itself more\
1547
- than {n_calls} times: aborting.")
1484
+ logger.error(
1485
+ f"__handle_start_analysis_response called itself more\
1486
+ than {n_calls} times: aborting."
1487
+ )
1548
1488
  return None
1549
1489
 
1550
1490
  try:
1551
- response = platform.parse_response(platform.post(
1552
- self._account.auth,
1553
- "analysis_manager/analysis_registration",
1554
- data=post_data
1555
- ))
1491
+ response = platform.parse_response(
1492
+ platform.post(self._account.auth, "analysis_manager/analysis_registration", data=post_data)
1493
+ )
1556
1494
  logger.info(response["message"])
1557
1495
  return int(response["analysis_id"])
1558
1496
  except platform.ChooseDataError as choose_data:
@@ -1574,8 +1512,7 @@ class Project:
1574
1512
  chosen_files = {}
1575
1513
  for settings_key in choose_data.data_to_choose:
1576
1514
  chosen_files[settings_key] = {}
1577
- filters = choose_data.data_to_choose[
1578
- settings_key]["filters"]
1515
+ filters = choose_data.data_to_choose[settings_key]["filters"]
1579
1516
  for filter_key in filters:
1580
1517
  filter_data = filters[filter_key]
1581
1518
 
@@ -1587,35 +1524,24 @@ class Project:
1587
1524
  if filter_data["range"][0] != 0:
1588
1525
  number_of_files_to_select = filter_data["range"][0]
1589
1526
  elif filter_data["range"][1] != 0:
1590
- number_of_files_to_select = min(
1591
- filter_data["range"][1],
1592
- len(filter_data["files"])
1593
- )
1527
+ number_of_files_to_select = min(filter_data["range"][1], len(filter_data["files"]))
1594
1528
  else:
1595
- number_of_files_to_select = len(
1596
- filter_data["files"]
1597
- )
1529
+ number_of_files_to_select = len(filter_data["files"])
1598
1530
 
1599
- files_selection = [ff["_id"] for ff in
1600
- filter_data["files"]
1601
- [:number_of_files_to_select]]
1602
- chosen_files[settings_key][filter_key] = \
1603
- files_selection
1531
+ files_selection = [ff["_id"] for ff in filter_data["files"][:number_of_files_to_select]]
1532
+ chosen_files[settings_key][filter_key] = files_selection
1604
1533
 
1605
1534
  new_post["user_preference"] = json.dumps(chosen_files)
1606
1535
  else:
1607
1536
  if has_warning and not ignore_warnings:
1608
- logger.info("cancelling analysis due to warnings, " +
1609
- "set \"ignore_warnings\" to True to override")
1537
+ logger.info("cancelling analysis due to warnings, " + 'set "ignore_warnings" to True to override')
1610
1538
  new_post["cancel"] = "1"
1611
1539
  else:
1612
1540
  logger.info("suppressing warnings")
1613
1541
  new_post["user_preference"] = "{}"
1614
1542
  new_post["_mint_only_warning"] = "1"
1615
1543
 
1616
- return self.__handle_start_analysis(
1617
- new_post, ignore_warnings=ignore_warnings, n_calls=n_calls
1618
- )
1544
+ return self.__handle_start_analysis(new_post, ignore_warnings=ignore_warnings, n_calls=n_calls)
1619
1545
  except platform.ActionFailedError as e:
1620
1546
  logger.error(f"Unable to start the analysis: {e}")
1621
1547
  return None
@@ -1646,19 +1572,15 @@ class Project:
1646
1572
  logger = logging.getLogger(__name__)
1647
1573
  logger.info(f"Setting QC status to {status}: {comments}")
1648
1574
 
1649
- platform.parse_response(platform.post(
1650
- auth=self._account.auth,
1651
- endpoint="projectset_manager/set_qa_status",
1652
- data={
1653
- "item_ids": analysis_id,
1654
- "status": status.value,
1655
- "comments": comments,
1656
- "entity": "analysis"
1657
- }
1658
- ))
1575
+ platform.parse_response(
1576
+ platform.post(
1577
+ auth=self._account.auth,
1578
+ endpoint="projectset_manager/set_qa_status",
1579
+ data={"item_ids": analysis_id, "status": status.value, "comments": comments, "entity": "analysis"},
1580
+ )
1581
+ )
1659
1582
 
1660
- def get_qc_status(
1661
- self, patient_secret_name=None, ssid=None, analysis_id=None):
1583
+ def get_qc_status(self, patient_secret_name=None, ssid=None, analysis_id=None):
1662
1584
  """
1663
1585
  Gets the session QC status of a session. If the analysis_id is
1664
1586
  specified, it returns the QC of the
@@ -1668,17 +1590,15 @@ class Project:
1668
1590
  if patient_secret_name and ssid:
1669
1591
  session = self.get_subjects_metadata(
1670
1592
  search_criteria={
1671
- "pars_patient_secret_name": f"string;"
1672
- f"{patient_secret_name}",
1673
- "pars_ssid": f"integer;eq|{ssid}"
1593
+ "pars_patient_secret_name": f"string;" f"{patient_secret_name}",
1594
+ "pars_ssid": f"integer;eq|{ssid}",
1674
1595
  }
1675
1596
  )
1676
1597
  to_return = session["qa_status"], session["qa_comments"]
1677
1598
  elif analysis_id:
1678
1599
  try:
1679
1600
  to_return = [
1680
- analysis["qa_data"] for analysis in self.list_analysis()
1681
- if analysis["_id"] == analysis_id
1601
+ analysis["qa_data"] for analysis in self.list_analysis() if analysis["_id"] == analysis_id
1682
1602
  ][0]
1683
1603
  to_return = to_return["qa_status"], to_return["qa_comments"]
1684
1604
  except IndexError:
@@ -1689,22 +1609,21 @@ class Project:
1689
1609
  print(f"An error occurred: {e}")
1690
1610
  to_return = None
1691
1611
  else:
1692
- raise Exception(f"Must specify {patient_secret_name} and "
1693
- f"{ssid} or {analysis_id}.")
1612
+ raise Exception(f"Must specify {patient_secret_name} and {ssid} or {analysis_id}.")
1694
1613
  return to_return
1695
1614
 
1696
1615
  def start_multiple_analyses(
1697
- self,
1698
- script_name,
1699
- version,
1700
- n_times,
1701
- in_container_id=None,
1702
- analysis_name=None,
1703
- analysis_description=None,
1704
- ignore_warnings=False,
1705
- settings=None,
1706
- tags=None,
1707
- preferred_destination=None
1616
+ self,
1617
+ script_name,
1618
+ version,
1619
+ n_times,
1620
+ in_container_id=None,
1621
+ analysis_name=None,
1622
+ analysis_description=None,
1623
+ ignore_warnings=False,
1624
+ settings=None,
1625
+ tags=None,
1626
+ preferred_destination=None,
1708
1627
  ):
1709
1628
  """
1710
1629
  Starts multiple times the same analysis on a subject with the same
@@ -1745,9 +1664,7 @@ class Project:
1745
1664
  """
1746
1665
  logger = logging.getLogger(logger_name)
1747
1666
  for n in range(n_times):
1748
- logger.info(
1749
- f"Running tool {script_name}:{version} {n + 1}/{n_times}"
1750
- )
1667
+ logger.info(f"Running tool {script_name}:{version} {n + 1}/{n_times}")
1751
1668
  yield self.start_analysis(
1752
1669
  script_name,
1753
1670
  version,
@@ -1757,5 +1674,40 @@ class Project:
1757
1674
  ignore_warnings=ignore_warnings,
1758
1675
  settings=settings,
1759
1676
  tags=tags,
1760
- preferred_destination=preferred_destination
1677
+ preferred_destination=preferred_destination,
1761
1678
  )
1679
+
1680
+ def set_project_qa_rules(self, rules_file_path, guidance_text=""):
1681
+ """
1682
+ Logs in to the Qmenta platform, retrieves the project ID based on the project name,
1683
+ and updates the project's QA rules using the provided rules file.
1684
+
1685
+ Args:
1686
+ rules_file_path (str): The file path to the JSON file containing the QA rules.
1687
+ guidance_text (str): Description of the rules. Only visible for Platform admins.
1688
+
1689
+ Returns:
1690
+ bool: True if the rules were set successfully, False otherwise.
1691
+ """
1692
+ # Read the rules from the JSON file
1693
+ try:
1694
+ with open(rules_file_path, "r") as fr:
1695
+ rules = json.load(fr)
1696
+ except FileNotFoundError:
1697
+ print(f"ERROR: Rules file '{rules_file_path}' not found.")
1698
+ return False
1699
+
1700
+ # Update the project's QA rules
1701
+ res = platform.post(
1702
+ auth=self._account.auth,
1703
+ endpoint="projectset_manager/set_session_qa_requirements",
1704
+ data={"project_id": self._project_id, "rules": json.dumps(rules), "guidance_text": guidance_text},
1705
+ )
1706
+
1707
+ if res.json().get("success") == 1:
1708
+ print("Rules set up successfully!")
1709
+ return True
1710
+ else:
1711
+ print("ERROR setting the rules")
1712
+ print(res.json())
1713
+ return False
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.3
2
2
  Name: qmenta-client
3
- Version: 1.1.dev1289
3
+ Version: 1.1.dev1295
4
4
  Summary: Python client lib to interact with the QMENTA platform.
5
5
  Author: QMENTA
6
6
  Author-email: dev@qmenta.com
@@ -1,10 +1,10 @@
1
1
  qmenta/__init__.py,sha256=jv2YF__bseklT3OWEzlqJ5qE24c4aWd5F4r0TTjOrWQ,65
2
2
  qmenta/client/Account.py,sha256=MEljEy9cmg2uP2FG1d3TZAgfj66EE2_3PQAZ9rvpCXY,9647
3
3
  qmenta/client/File.py,sha256=ZgvSqejIosUt4uoX7opUnPnp5XGEaJNMRwFC0mQVB8k,5344
4
- qmenta/client/Project.py,sha256=Y0G5vSVODThAL8kGwmtkcZrE49vB2LVlNf2eRrqr8dg,59767
4
+ qmenta/client/Project.py,sha256=XXwsd2QKk_p61Mg2ahXxS9MHl1KENDPeNTsooQoXNkE,58955
5
5
  qmenta/client/Subject.py,sha256=lhxxVdQ6d-GNoQC8mrJwa4L1f44nJc4PcJtDspmKN7I,8756
6
6
  qmenta/client/__init__.py,sha256=AjTojBhZeW5nl0i605KS8S1Gl5tPNc1hdzD47BGNfoI,147
7
7
  qmenta/client/utils.py,sha256=5DK2T_HQprrCwLS0Ycm2CjseaYmAUKaJkJvYoW-Rqzc,2479
8
- qmenta_client-1.1.dev1289.dist-info/METADATA,sha256=khcRoMKuV0hhGvxc-9tjMy9RRIXugkzyE5sKZ_PuF4M,672
9
- qmenta_client-1.1.dev1289.dist-info/WHEEL,sha256=IYZQI976HJqqOpQU6PHkJ8fb3tMNBFjg-Cn-pwAbaFM,88
10
- qmenta_client-1.1.dev1289.dist-info/RECORD,,
8
+ qmenta_client-1.1.dev1295.dist-info/METADATA,sha256=G87kykIBmTLL0ebfHvfT0J-0as4u-HCWPzpdDnTiuds,672
9
+ qmenta_client-1.1.dev1295.dist-info/WHEEL,sha256=IYZQI976HJqqOpQU6PHkJ8fb3tMNBFjg-Cn-pwAbaFM,88
10
+ qmenta_client-1.1.dev1295.dist-info/RECORD,,