nci-cidc-api-modules 1.1.11__py3-none-any.whl → 1.1.13__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
cidc_api/shared/auth.py CHANGED
@@ -172,9 +172,7 @@ def _extract_token() -> str:
172
172
 
173
173
 
174
174
  ### Authorization logic ###
175
- def authorize(
176
- user: Users, allowed_roles: List[str], resource: str, method: str
177
- ) -> bool:
175
+ def authorize(user: Users, allowed_roles: List[str], resource: str, method: str) -> bool:
178
176
  """Check if the current user is authorized to act on the current request's resource.
179
177
  Raises Unauthorized
180
178
  - if user is not registered
@@ -220,9 +218,7 @@ def authorize(
220
218
 
221
219
  # User is approved and registered, so just check their role.
222
220
  if allowed_roles and db_user.role not in allowed_roles:
223
- raise Unauthorized(
224
- f"{db_user.email} is not authorized to access this endpoint."
225
- )
221
+ raise Unauthorized(f"{db_user.email} is not authorized to access this endpoint.")
226
222
 
227
223
  return True
228
224
 
@@ -268,9 +264,7 @@ def _enforce_cli_version():
268
264
 
269
265
  # The CLI sets the User-Agent header to `cidc-cli/{version}`,
270
266
  # so we can assess whether the requester needs to update their CLI.
271
- is_old_cli = client == "cidc-cli" and version.parse(client_version) < version.parse(
272
- app.config["MIN_CLI_VERSION"]
273
- )
267
+ is_old_cli = client == "cidc-cli" and version.parse(client_version) < version.parse(app.config["MIN_CLI_VERSION"])
274
268
 
275
269
  if is_old_cli:
276
270
  logger.info("cancelling request: detected outdated CLI")
cidc_api/shared/emails.py CHANGED
@@ -87,18 +87,14 @@ def new_user_registration(email: str) -> dict:
87
87
  @sendable
88
88
  def new_upload_alert(upload, full_metadata) -> dict:
89
89
  """Alert the CIDC administrators that an upload succeeded."""
90
- pipeline_configs: Dict[str, Union[bytes, str]] = (
91
- generate_analysis_configs_from_upload_patch(
92
- full_metadata,
93
- upload.metadata_patch,
94
- upload.upload_type,
95
- GOOGLE_ACL_DATA_BUCKET,
96
- )
90
+ pipeline_configs: Dict[str, Union[bytes, str]] = generate_analysis_configs_from_upload_patch(
91
+ full_metadata,
92
+ upload.metadata_patch,
93
+ upload.upload_type,
94
+ GOOGLE_ACL_DATA_BUCKET,
97
95
  )
98
96
 
99
- subject = (
100
- f"[UPLOAD SUCCESS]({ENV}) {upload.upload_type} uploaded to {upload.trial_id}"
101
- )
97
+ subject = f"[UPLOAD SUCCESS]({ENV}) {upload.upload_type} uploaded to {upload.trial_id}"
102
98
 
103
99
  html_content = f"""
104
100
  <ul>
@@ -119,16 +115,12 @@ def new_upload_alert(upload, full_metadata) -> dict:
119
115
 
120
116
 
121
117
  @sendable
122
- def intake_metadata(
123
- user, trial_id: str, assay_type: str, description: str, xlsx_gcp_url: str
124
- ) -> dict:
118
+ def intake_metadata(user, trial_id: str, assay_type: str, description: str, xlsx_gcp_url: str) -> dict:
125
119
  """
126
120
  Send an email containing a metadata xlsx file and description of that file to the
127
121
  CIDC Admin mailing list.
128
122
  """
129
- subject = (
130
- f"[METADATA SUBMISSION]({ENV}) {user.email} submitted {trial_id}/{assay_type}"
131
- )
123
+ subject = f"[METADATA SUBMISSION]({ENV}) {user.email} submitted {trial_id}/{assay_type}"
132
124
  html_content = f"""
133
125
  <p><strong>user:</strong> {user.first_n} {user.last_n} ({user.email})</p>
134
126
  <p><strong>contact email:</strong> {user.contact_email}</p>
@@ -80,15 +80,11 @@ def _get_storage_client() -> storage.Client:
80
80
  if STORAGE_CLIENT is None:
81
81
  logger.debug("Getting local client")
82
82
  if os.environ.get("DEV_GOOGLE_STORAGE", None):
83
- client_options = ClientOptions(
84
- api_endpoint=os.environ.get("DEV_GOOGLE_STORAGE")
85
- )
83
+ client_options = ClientOptions(api_endpoint=os.environ.get("DEV_GOOGLE_STORAGE"))
86
84
  credentials = Credentials.from_service_account_info(
87
85
  json.loads(SECRET_MANAGER.get("APP_ENGINE_CREDENTIALS"))
88
86
  )
89
- STORAGE_CLIENT = storage.Client(
90
- client_options=client_options, credentials=credentials
91
- )
87
+ STORAGE_CLIENT = storage.Client(client_options=client_options, credentials=credentials)
92
88
  logger.debug(f"Local client set to {STORAGE_CLIENT}")
93
89
  return STORAGE_CLIENT
94
90
 
@@ -124,9 +120,7 @@ def _get_crm_service() -> googleapiclient.discovery.Resource:
124
120
  credentials = Credentials.from_service_account_info(
125
121
  json.loads(SECRET_MANAGER.get(environ.get("APP_ENGINE_CREDENTIALS_ID")))
126
122
  )
127
- CRM_SERVICE = googleapiclient.discovery.build(
128
- "cloudresourcemanager", "v1", credentials=credentials
129
- )
123
+ CRM_SERVICE = googleapiclient.discovery.build("cloudresourcemanager", "v1", credentials=credentials)
130
124
  return CRM_SERVICE
131
125
 
132
126
 
@@ -176,14 +170,10 @@ def _get_bigquery_dataset(dataset_id: str) -> bigquery.Dataset:
176
170
  return dataset
177
171
 
178
172
 
179
- XLSX_GCS_URI_FORMAT = (
180
- "{trial_id}/xlsx/{template_category}/{template_type}/{upload_moment}.xlsx"
181
- )
173
+ XLSX_GCS_URI_FORMAT = "{trial_id}/xlsx/{template_category}/{template_type}/{upload_moment}.xlsx"
182
174
 
183
175
 
184
- PseudoBblob = namedtuple(
185
- "_pseudo_blob", ["name", "size", "md5_hash", "crc32c", "time_created"]
186
- )
176
+ PseudoBblob = namedtuple("_pseudo_blob", ["name", "size", "md5_hash", "crc32c", "time_created"])
187
177
 
188
178
 
189
179
  def upload_xlsx_to_gcs(
@@ -210,12 +200,8 @@ def upload_xlsx_to_gcs(
210
200
  )
211
201
 
212
202
  if ENV == "dev" and not DEV_USE_GCS:
213
- logger.info(
214
- f"Would've saved {blob_name} to {GOOGLE_UPLOAD_BUCKET} and {GOOGLE_ACL_DATA_BUCKET}"
215
- )
216
- return PseudoBblob(
217
- blob_name, 0, "_pseudo_md5_hash", "_pseudo_crc32c", upload_moment
218
- )
203
+ logger.info(f"Would've saved {blob_name} to {GOOGLE_UPLOAD_BUCKET} and {GOOGLE_ACL_DATA_BUCKET}")
204
+ return PseudoBblob(blob_name, 0, "_pseudo_md5_hash", "_pseudo_crc32c", upload_moment)
219
205
 
220
206
  upload_bucket: storage.Bucket = _get_bucket(GOOGLE_UPLOAD_BUCKET)
221
207
  blob = upload_bucket.blob(blob_name)
@@ -348,9 +334,7 @@ def revoke_intake_access(user_email: str) -> None:
348
334
  revoke_storage_iam_access(bucket, GOOGLE_INTAKE_ROLE, user_email)
349
335
 
350
336
 
351
- def upload_xlsx_to_intake_bucket(
352
- user_email: str, trial_id: str, upload_type: str, xlsx: FileStorage
353
- ) -> str:
337
+ def upload_xlsx_to_intake_bucket(user_email: str, trial_id: str, upload_type: str, xlsx: FileStorage) -> str:
354
338
  """
355
339
  Upload a metadata spreadsheet file to the GCS intake bucket,
356
340
  returning the URL to the bucket in the GCP console.
@@ -401,17 +385,13 @@ def get_blob_names(
401
385
  session: Optional[Session] = None,
402
386
  ) -> Set[str]:
403
387
  """session only needed if trial_id is None"""
404
- prefixes: Set[str] = _build_trial_upload_prefixes(
405
- trial_id, upload_type, session=session
406
- )
388
+ prefixes: Set[str] = _build_trial_upload_prefixes(trial_id, upload_type, session=session)
407
389
 
408
390
  # https://googleapis.dev/python/storage/latest/client.html#google.cloud.storage.client.Client.list_blobs
409
391
  blob_list = []
410
392
  storage_client = _get_storage_client()
411
393
  for prefix in prefixes:
412
- blob_list.extend(
413
- storage_client.list_blobs(GOOGLE_ACL_DATA_BUCKET, prefix=prefix)
414
- )
394
+ blob_list.extend(storage_client.list_blobs(GOOGLE_ACL_DATA_BUCKET, prefix=prefix))
415
395
  return {blob.name for blob in blob_list}
416
396
 
417
397
 
@@ -450,13 +430,9 @@ def grant_download_access(
450
430
  If the user already has download access for this trial and upload type, idempotent.
451
431
  Download access is controlled by IAM on production and ACL elsewhere.
452
432
  """
453
- user_email_list = (
454
- [user_email_list] if isinstance(user_email_list, str) else user_email_list
455
- )
433
+ user_email_list = [user_email_list] if isinstance(user_email_list, str) else user_email_list
456
434
 
457
- logger.info(
458
- f"Granting download access on trial {trial_id} upload {upload_type} to {user_email_list}"
459
- )
435
+ logger.info(f"Granting download access on trial {trial_id} upload {upload_type} to {user_email_list}")
460
436
 
461
437
  # ---- Handle through main grant permissions topic ----
462
438
  # would time out in CFn
@@ -510,12 +486,8 @@ def revoke_download_access(
510
486
  Download access is controlled by ACL.
511
487
  """
512
488
 
513
- user_email_list = (
514
- [user_email_list] if isinstance(user_email_list, str) else user_email_list
515
- )
516
- logger.info(
517
- f"Revoking download access on trial {trial_id} upload {upload_type} from {user_email_list}"
518
- )
489
+ user_email_list = [user_email_list] if isinstance(user_email_list, str) else user_email_list
490
+ logger.info(f"Revoking download access on trial {trial_id} upload {upload_type} from {user_email_list}")
519
491
 
520
492
  # ---- Handle through main grant permissions topic ----
521
493
  # would timeout in cloud function
@@ -554,20 +526,13 @@ def _build_trial_upload_prefixes(
554
526
  if not trial_id:
555
527
  from ..models.models import TrialMetadata
556
528
 
557
- trial_set = {
558
- str(t.trial_id)
559
- for t in session.query(TrialMetadata).add_columns(TrialMetadata.trial_id)
560
- }
529
+ trial_set = {str(t.trial_id) for t in session.query(TrialMetadata).add_columns(TrialMetadata.trial_id)}
561
530
 
562
531
  else:
563
532
  trial_set = set([trial_id])
564
533
 
565
534
  if not upload_type or None in upload_type:
566
- upload_set = {
567
- upload_name
568
- for upload_name in ASSAY_TO_FILEPATH.keys()
569
- if upload_name != "clinical_data"
570
- }
535
+ upload_set = {upload_name for upload_name in ASSAY_TO_FILEPATH.keys() if upload_name != "clinical_data"}
571
536
  else:
572
537
  upload_set = set(upload_type)
573
538
 
@@ -613,9 +578,7 @@ def grant_storage_iam_access(
613
578
  # special value -1 for non-expiring
614
579
  binding = _build_storage_iam_binding(bucket.name, role, user_email, ttl_days=-1)
615
580
  else:
616
- binding = _build_storage_iam_binding(
617
- bucket.name, role, user_email
618
- ) # use default
581
+ binding = _build_storage_iam_binding(bucket.name, role, user_email) # use default
619
582
  # insert the binding into the policy
620
583
  policy.bindings.append(binding)
621
584
 
@@ -634,20 +597,14 @@ def grant_bigquery_iam_access(policy: Policy, user_emails: List[str]) -> None:
634
597
  """
635
598
  roles = [b["role"] for b in policy["bindings"]]
636
599
 
637
- if (
638
- GOOGLE_BIGQUERY_USER_ROLE in roles
639
- ): # if the role is already in the policy, add the users
640
- binding = next(
641
- b for b in policy["bindings"] if b["role"] == GOOGLE_BIGQUERY_USER_ROLE
642
- )
600
+ if GOOGLE_BIGQUERY_USER_ROLE in roles: # if the role is already in the policy, add the users
601
+ binding = next(b for b in policy["bindings"] if b["role"] == GOOGLE_BIGQUERY_USER_ROLE)
643
602
  for user_email in user_emails:
644
603
  binding["members"].append(user_member(user_email))
645
604
  else: # otherwise create the role and add to policy
646
605
  binding = {
647
606
  "role": GOOGLE_BIGQUERY_USER_ROLE,
648
- "members": [
649
- user_member(user_email) for user_email in user_emails
650
- ], # convert format
607
+ "members": [user_member(user_email) for user_email in user_emails], # convert format
651
608
  }
652
609
  policy["bindings"].append(binding)
653
610
 
@@ -683,9 +640,7 @@ def grant_bigquery_iam_access(policy: Policy, user_emails: List[str]) -> None:
683
640
  MAX_REVOKE_ALL_ITERATIONS = 250
684
641
 
685
642
 
686
- def revoke_storage_iam_access(
687
- bucket: storage.Bucket, role: str, user_email: str
688
- ) -> None:
643
+ def revoke_storage_iam_access(bucket: storage.Bucket, role: str, user_email: str) -> None:
689
644
  """Revoke a bucket IAM policy made by calling `grant_storage_iam_access`."""
690
645
  # see https://cloud.google.com/storage/docs/access-control/using-iam-permissions#code-samples_3
691
646
  policy = bucket.get_iam_policy(requested_policy_version=3)
@@ -696,9 +651,7 @@ def revoke_storage_iam_access(
696
651
  removed_binding = _find_and_pop_storage_iam_binding(policy, role, user_email)
697
652
  if removed_binding is None:
698
653
  if i == 0:
699
- warnings.warn(
700
- f"Tried to revoke a non-existent download IAM permission for {user_email}"
701
- )
654
+ warnings.warn(f"Tried to revoke a non-existent download IAM permission for {user_email}")
702
655
  break
703
656
 
704
657
  try:
@@ -715,13 +668,9 @@ def revoke_bigquery_iam_access(policy: Policy, user_email: str) -> None:
715
668
  revoked from the public bigquery dataset in prod.
716
669
  """
717
670
  # find and remove user on binding
718
- binding = next(
719
- (b for b in policy["bindings"] if b["role"] == GOOGLE_BIGQUERY_USER_ROLE), None
720
- )
671
+ binding = next((b for b in policy["bindings"] if b["role"] == GOOGLE_BIGQUERY_USER_ROLE), None)
721
672
  if not binding:
722
- logger.warning(
723
- "Expected at least 1 user to have a bigquery jobUser role, but 0 found."
724
- )
673
+ logger.warning("Expected at least 1 user to have a bigquery jobUser role, but 0 found.")
725
674
  return
726
675
 
727
676
  if "members" in binding and user_member(user_email) in binding["members"]:
@@ -748,9 +697,7 @@ def revoke_bigquery_iam_access(policy: Policy, user_email: str) -> None:
748
697
  dataset = _get_bigquery_dataset(dataset_id)
749
698
  entries = list(dataset.access_entries)
750
699
 
751
- dataset.access_entries = [
752
- entry for entry in entries if entry.entity_id != user_email
753
- ]
700
+ dataset.access_entries = [entry for entry in entries if entry.entity_id != user_email]
754
701
 
755
702
  dataset = BIGQUERY_CLIENT.update_dataset(
756
703
  dataset,
@@ -837,11 +784,7 @@ def _find_and_pop_storage_iam_binding(
837
784
  )
838
785
  user_binding_index = i
839
786
 
840
- binding = (
841
- policy.bindings.pop(user_binding_index)
842
- if user_binding_index is not None
843
- else None
844
- )
787
+ binding = policy.bindings.pop(user_binding_index) if user_binding_index is not None else None
845
788
 
846
789
  return binding
847
790
 
@@ -870,9 +813,9 @@ def get_signed_url(
870
813
  full_filename = object_name.replace("/", "_").replace('"', "_").replace(" ", "_")
871
814
  other_kwargs = {}
872
815
  if os.environ.get("DEV_GOOGLE_STORAGE", None):
873
- other_kwargs["api_access_endpoint"] = (
874
- os.environ.get("DEV_GOOGLE_STORAGE") or ""
875
- ) + (os.environ.get("DEV_GOOGLE_STORAGE_PATH") or "")
816
+ other_kwargs["api_access_endpoint"] = (os.environ.get("DEV_GOOGLE_STORAGE") or "") + (
817
+ os.environ.get("DEV_GOOGLE_STORAGE_PATH") or ""
818
+ )
876
819
  url = blob.generate_signed_url(
877
820
  version="v2",
878
821
  expiration=expiration,
@@ -894,9 +837,7 @@ def _encode_and_publish(content: str, topic: str) -> Future:
894
837
  # Don't actually publish to Pub/Sub if running locally
895
838
  if ENV == "dev":
896
839
  if DEV_CFUNCTIONS_SERVER:
897
- logger.info(
898
- f"Publishing message {content!r} to topic {DEV_CFUNCTIONS_SERVER}/{topic}"
899
- )
840
+ logger.info(f"Publishing message {content!r} to topic {DEV_CFUNCTIONS_SERVER}/{topic}")
900
841
 
901
842
  bdata = base64.b64encode(content.encode("utf-8"))
902
843
  try:
@@ -906,15 +847,11 @@ def _encode_and_publish(content: str, topic: str) -> Future:
906
847
  timeout=TIMEOUT_IN_SECONDS,
907
848
  )
908
849
  except Exception as e:
909
- raise Exception(
910
- f"Couldn't publish message {content!r} to topic {DEV_CFUNCTIONS_SERVER}/{topic}"
911
- ) from e
850
+ raise Exception(f"Couldn't publish message {content!r} to topic {DEV_CFUNCTIONS_SERVER}/{topic}") from e
912
851
 
913
852
  logger.info(f"Got {res}")
914
853
  if res.status_code != 200:
915
- raise Exception(
916
- f"Couldn't publish message {content!r} to {DEV_CFUNCTIONS_SERVER}/{topic}: {res!r}"
917
- )
854
+ raise Exception(f"Couldn't publish message {content!r} to {DEV_CFUNCTIONS_SERVER}/{topic}: {res!r}")
918
855
 
919
856
  else:
920
857
  logger.info(f"Would've published message {content} to topic {topic}")
@@ -966,9 +903,7 @@ def send_email(to_emails: List[str], subject: str, html_content: str, **kw) -> N
966
903
  return
967
904
 
968
905
  logger.info(f"({ENV}) Sending email to {to_emails} with subject {subject}")
969
- email_json = json.dumps(
970
- {"to_emails": to_emails, "subject": subject, "html_content": html_content, **kw}
971
- )
906
+ email_json = json.dumps({"to_emails": to_emails, "subject": subject, "html_content": html_content, **kw})
972
907
 
973
908
  report = _encode_and_publish(email_json, GOOGLE_EMAILS_TOPIC)
974
909
 
cidc_api/shared/jose.py CHANGED
@@ -17,9 +17,7 @@ PUBLIC_KEYS_CACHE = TTLCache(maxsize=3600, ttl=1024) # 1 hour, 1 MB
17
17
  @cached(cache=PUBLIC_KEYS_CACHE)
18
18
  def get_jwks() -> list:
19
19
  # get jwks from our Auth0 domain
20
- return requests.get(
21
- f"https://{AUTH0_DOMAIN}/.well-known/jwks.json", timeout=TIMEOUT_IN_SECONDS
22
- ).json()
20
+ return requests.get(f"https://{AUTH0_DOMAIN}/.well-known/jwks.json", timeout=TIMEOUT_IN_SECONDS).json()
23
21
 
24
22
 
25
23
  def decode_id_token(token: str) -> dict:
@@ -149,9 +149,7 @@ def lookup(
149
149
 
150
150
  if check_etag:
151
151
  if etag != record._etag:
152
- raise PreconditionFailed(
153
- "provided ETag does not match the stored ETag for this record"
154
- )
152
+ raise PreconditionFailed("provided ETag does not match the stored ETag for this record")
155
153
 
156
154
  return record
157
155
 
@@ -177,9 +175,7 @@ def use_args_with_pagination(argmap: dict, model_schema: BaseSchema):
177
175
 
178
176
  # Ensure there are no collisions between argmaps
179
177
  for arg in argmap.keys():
180
- assert (
181
- arg not in pagination_argmap
182
- ), f"Provided arg `{arg}` collides with pagination args"
178
+ assert arg not in pagination_argmap, f"Provided arg `{arg}` collides with pagination args"
183
179
 
184
180
  full_argmap = {**pagination_argmap, **argmap}
185
181
 
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.4
2
2
  Name: nci_cidc_api_modules
3
- Version: 1.1.11
3
+ Version: 1.1.13
4
4
  Summary: SQLAlchemy data models and configuration tools used in the NCI CIDC API
5
5
  Home-page: https://github.com/NCI-CIDC/cidc-api-gae
6
6
  License: MIT license
@@ -0,0 +1,26 @@
1
+ cidc_api/config/__init__.py,sha256=5mX8GAPxUKV84iS-aGOoE-4m68LsOCGCDptXNdlgvj0,148
2
+ cidc_api/config/db.py,sha256=cyWhWtmXha4OsrwUf6ez8aKSfm7tPSmPDE9JVSBx3Fk,1935
3
+ cidc_api/config/logging.py,sha256=E-SdjOYTpfsml-YaBL7Mj2mUT60OTAHH2X6TdeJWasU,1038
4
+ cidc_api/config/secrets.py,sha256=jRFj7W43pWuPf9DZQLCKF7WPXf5cUv-BAaS3ASqhV_Q,1481
5
+ cidc_api/config/settings.py,sha256=p3YuGCH64S8pkdi_b5sDleVqXO6m7tZcDTd5EJnHlbE,4090
6
+ cidc_api/csms/__init__.py,sha256=eJkY6rWNOAUBmSd4G1_U6h7i472druKEtBdVmgFZVPg,20
7
+ cidc_api/csms/auth.py,sha256=VTfHlCym_hqVrHXv41Ku9RMAGN9BiNe7ui0o9KZCKtY,3185
8
+ cidc_api/models/__init__.py,sha256=bl445G8Zic9YbhZ8ZBni07wtBMhLJRMBA-JqjLxx2bw,66
9
+ cidc_api/models/csms_api.py,sha256=ovi_jZXZBg6XYEvIupbf5c0WyMbPi4V07OywbleKGqs,30737
10
+ cidc_api/models/migrations.py,sha256=gp9vtkYbA9FFy2s-7woelAmsvQbJ41LO2_DY-YkFIrQ,11464
11
+ cidc_api/models/models.py,sha256=tKLyBrVH_7fL2smGzApypBTyL-nuuXQHxI-TJsUAIMg,129200
12
+ cidc_api/models/schemas.py,sha256=7tDYtmULuzTt2kg7RorWhte06ffalgpQKrFiDRGcPEQ,2711
13
+ cidc_api/models/files/__init__.py,sha256=8BMTnUSHzUbz0lBeEQY6NvApxDD3GMWMduoVMos2g4Y,213
14
+ cidc_api/models/files/details.py,sha256=v4v_BetAOBiEq-FVcxho8lamZCRGb48oAvKk9yq4ZOI,62204
15
+ cidc_api/models/files/facets.py,sha256=803siMWziH5UVaF2tj49a_kJYb6CmSWHsGQJweXwyJY,28705
16
+ cidc_api/shared/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
17
+ cidc_api/shared/auth.py,sha256=PHqmVGkqDjbmUofytVFwD_9ssgUomESl3fFtFHPwZYQ,9062
18
+ cidc_api/shared/emails.py,sha256=MG2csNhMaw3WtbUEqjMxhzV3X1pMzC8RGYBfNy_X0nc,4334
19
+ cidc_api/shared/gcloud_client.py,sha256=spgH0fapO713X82eV23uLXtq4aVWJHwEDK-XDxaehJc,33088
20
+ cidc_api/shared/jose.py,sha256=-qzGzEDAlokEp9E7WtBtQkXyyfPWTYXlwYpCqVJWmqM,1830
21
+ cidc_api/shared/rest_utils.py,sha256=RwR30WOUAYCxL7V-i2totEyeriG30GbBDvBcpLXhM9w,6594
22
+ nci_cidc_api_modules-1.1.13.dist-info/licenses/LICENSE,sha256=pNYWVTHaYonnmJyplmeAp7tQAjosmDpAWjb34jjv7Xs,1102
23
+ nci_cidc_api_modules-1.1.13.dist-info/METADATA,sha256=yk1QTeCl_Oal9mBirw9EvWHlLle0hmVOFzKxGnLw4Jk,41284
24
+ nci_cidc_api_modules-1.1.13.dist-info/WHEEL,sha256=CmyFI0kx5cdEMTLiONQRbGQwjIoR1aIYB7eCAQ4KPJ0,91
25
+ nci_cidc_api_modules-1.1.13.dist-info/top_level.txt,sha256=rNiRzL0lJGi5Q9tY9uSoMdTbJ-7u5c_D2E86KA94yRA,9
26
+ nci_cidc_api_modules-1.1.13.dist-info/RECORD,,
@@ -1,26 +0,0 @@
1
- cidc_api/config/__init__.py,sha256=5mX8GAPxUKV84iS-aGOoE-4m68LsOCGCDptXNdlgvj0,148
2
- cidc_api/config/db.py,sha256=ayeeNV-sV20hGoFyMMTMncI2V-FI9lVN3JV-Lmpr3xI,1981
3
- cidc_api/config/logging.py,sha256=gJ2TGgQVREng4Hv0phlCCkQai7HhumKYjJxubpxS6Q0,1090
4
- cidc_api/config/secrets.py,sha256=2DXeew1Pm0lnf2SLuo8wW5c5kOJp2WrhjflxZGsY_Ng,1505
5
- cidc_api/config/settings.py,sha256=2VHOVWdN4yUCNYMob2gaWgH2-1_4sbuo46JEIVT_PCY,4021
6
- cidc_api/csms/__init__.py,sha256=eJkY6rWNOAUBmSd4G1_U6h7i472druKEtBdVmgFZVPg,20
7
- cidc_api/csms/auth.py,sha256=25Yma2Kz3KLENAPSeBYacFuSZXng-EDgmgInKBsRyP0,3191
8
- cidc_api/models/__init__.py,sha256=bl445G8Zic9YbhZ8ZBni07wtBMhLJRMBA-JqjLxx2bw,66
9
- cidc_api/models/csms_api.py,sha256=_uB9ZoxCFxKO8ZDTxCjS0CpeQg14EdlkEqnwyAFyYFQ,31377
10
- cidc_api/models/migrations.py,sha256=gp9vtkYbA9FFy2s-7woelAmsvQbJ41LO2_DY-YkFIrQ,11464
11
- cidc_api/models/models.py,sha256=C0s28yCozvZ6K5xpSiVgURTci8fjQ2_wJlxU4OAQz-I,129135
12
- cidc_api/models/schemas.py,sha256=7tDYtmULuzTt2kg7RorWhte06ffalgpQKrFiDRGcPEQ,2711
13
- cidc_api/models/files/__init__.py,sha256=8BMTnUSHzUbz0lBeEQY6NvApxDD3GMWMduoVMos2g4Y,213
14
- cidc_api/models/files/details.py,sha256=h6R0p_hi-ukHsO7HV-3Wukccp0zRLJ1Oie_JNA_7Pl0,62274
15
- cidc_api/models/files/facets.py,sha256=0owlp-is2QJ7DemcsJ4VdlnN255NkkV1Cimg0VaXHpY,28967
16
- cidc_api/shared/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
17
- cidc_api/shared/auth.py,sha256=EzMpYAR_gN5x985hgFAXTd24xygyctqZ80Yp05Ph_HQ,9104
18
- cidc_api/shared/emails.py,sha256=FXW9UfI2bCus350SQuL7ZQYq1Vg-vGXaGWmRfA6z2nM,4408
19
- cidc_api/shared/gcloud_client.py,sha256=7dDs0crLMJKdIp4IDSfrZBMB3h-zvWNieB81azoeLO4,33746
20
- cidc_api/shared/jose.py,sha256=QO30uIhbYDwzPEWWJXz0PfyV7E1AZHReEZJUVT70UJY,1844
21
- cidc_api/shared/rest_utils.py,sha256=LMfBpvJRjkfQjCzVXuhTTe4Foz4wlvaKg6QntyR-Hkc,6648
22
- nci_cidc_api_modules-1.1.11.dist-info/licenses/LICENSE,sha256=pNYWVTHaYonnmJyplmeAp7tQAjosmDpAWjb34jjv7Xs,1102
23
- nci_cidc_api_modules-1.1.11.dist-info/METADATA,sha256=b72D5JEyi8PEsRLq9c5T-AnKQvVB-LKVD_n8gbbbXhM,41284
24
- nci_cidc_api_modules-1.1.11.dist-info/WHEEL,sha256=CmyFI0kx5cdEMTLiONQRbGQwjIoR1aIYB7eCAQ4KPJ0,91
25
- nci_cidc_api_modules-1.1.11.dist-info/top_level.txt,sha256=rNiRzL0lJGi5Q9tY9uSoMdTbJ-7u5c_D2E86KA94yRA,9
26
- nci_cidc_api_modules-1.1.11.dist-info/RECORD,,