aind-data-transfer-service 1.15.0__py3-none-any.whl → 1.16.0__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of aind-data-transfer-service might be problematic. Click here for more details.

@@ -1,7 +1,7 @@
1
1
  """Init package"""
2
2
  import os
3
3
 
4
- __version__ = "1.15.0"
4
+ __version__ = "1.16.0"
5
5
 
6
6
  # Global constants
7
7
  OPEN_DATA_BUCKET_NAME = os.getenv("OPEN_DATA_BUCKET_NAME", "open")
@@ -1,7 +1,10 @@
1
1
  """Module to handle processing legacy csv files"""
2
2
 
3
3
  import re
4
+ from collections.abc import Mapping
5
+ from copy import deepcopy
4
6
  from datetime import datetime
7
+ from typing import Any, Dict
5
8
 
6
9
  from aind_data_schema_models.modalities import Modality
7
10
  from aind_data_schema_models.platforms import Platform
@@ -13,6 +16,45 @@ DATETIME_PATTERN2 = re.compile(
13
16
  )
14
17
 
15
18
 
19
+ def nested_update(dict_to_update: Dict[str, Any], updates: Mapping):
20
+ """
21
+ Update a nested dictionary in-place.
22
+ Parameters
23
+ ----------
24
+ dict_to_update : Dict[str, Any]
25
+ updates : Mapping
26
+
27
+ """
28
+ for k, v in updates.items():
29
+ if isinstance(v, Mapping):
30
+ dict_to_update[k] = nested_update(dict_to_update.get(k, {}), v)
31
+ else:
32
+ dict_to_update[k] = v
33
+ return dict_to_update
34
+
35
+
36
+ def create_nested_dict(
37
+ dict_to_update: Dict[str, Any], key_string: str, value: Any
38
+ ):
39
+ """
40
+ Updates in-place a nested dictionary with a period delimited key and value.
41
+ Parameters
42
+ ----------
43
+ dict_to_update : Dict[str, Any]
44
+ key_string : str
45
+ value : Any
46
+
47
+ """
48
+ keys = key_string.split(".", 1)
49
+ current_key = keys[0]
50
+ if len(keys) == 1:
51
+ dict_to_update[current_key] = value
52
+ else:
53
+ if current_key not in dict_to_update:
54
+ dict_to_update[current_key] = dict()
55
+ create_nested_dict(dict_to_update[current_key], keys[1], value)
56
+
57
+
16
58
  def map_csv_row_to_job(row: dict) -> UploadJobConfigsV2:
17
59
  """
18
60
  Maps csv row into a UploadJobConfigsV2 model. This attempts to be somewhat
@@ -29,7 +71,6 @@ def map_csv_row_to_job(row: dict) -> UploadJobConfigsV2:
29
71
  modality_configs = dict()
30
72
  job_configs = dict()
31
73
  check_s3_folder_exists_task = None
32
- final_check_s3_folder_exist = None
33
74
  codeocean_tasks = dict()
34
75
  for key, value in row.items():
35
76
  # Strip white spaces and replace dashes with underscores
@@ -42,7 +83,9 @@ def map_csv_row_to_job(row: dict) -> UploadJobConfigsV2:
42
83
  modality_parts = clean_key.split(".")
43
84
  modality_key = modality_parts[0]
44
85
  sub_key = (
45
- "modality" if len(modality_parts) == 1 else modality_parts[1]
86
+ "modality"
87
+ if len(modality_parts) == 1
88
+ else ".".join(modality_parts[1:])
46
89
  )
47
90
  modality_configs.setdefault(modality_key, dict())
48
91
  # Temp backwards compatibility check
@@ -66,13 +109,22 @@ def map_csv_row_to_job(row: dict) -> UploadJobConfigsV2:
66
109
  job_settings=codeocean_pipeline_monitor_settings,
67
110
  )
68
111
  else:
69
- modality_configs[modality_key].update({sub_key: clean_val})
112
+ nested_val = dict()
113
+ create_nested_dict(
114
+ dict_to_update=nested_val,
115
+ key_string=sub_key,
116
+ value=clean_val,
117
+ )
118
+ current_dict = deepcopy(
119
+ modality_configs.get(modality_key, dict())
120
+ )
121
+ nested_update(current_dict, nested_val)
122
+ modality_configs[modality_key] = current_dict
70
123
  elif clean_key == "force_cloud_sync" and clean_val.upper() in [
71
124
  "TRUE",
72
125
  "T",
73
126
  ]:
74
127
  check_s3_folder_exists_task = {"skip_task": True}
75
- final_check_s3_folder_exist = {"skip_task": True}
76
128
  else:
77
129
  job_configs[clean_key] = clean_val
78
130
  # Rename codeocean config keys with correct modality
@@ -93,8 +145,7 @@ def map_csv_row_to_job(row: dict) -> UploadJobConfigsV2:
93
145
  )
94
146
  tasks = {
95
147
  "gather_preliminary_metadata": metadata_task,
96
- "check_s3_folder_exists_task": check_s3_folder_exists_task,
97
- "final_check_s3_folder_exist": final_check_s3_folder_exist,
148
+ "check_s3_folder_exists": check_s3_folder_exists_task,
98
149
  "modality_transformation_settings": modality_tasks,
99
150
  "codeocean_pipeline_settings": None
100
151
  if codeocean_tasks == dict()
@@ -29,7 +29,9 @@ from starlette.middleware.sessions import SessionMiddleware
29
29
  from starlette.responses import RedirectResponse
30
30
  from starlette.routing import Route
31
31
 
32
- from aind_data_transfer_service import OPEN_DATA_BUCKET_NAME
32
+ from aind_data_transfer_service import (
33
+ OPEN_DATA_BUCKET_NAME,
34
+ )
33
35
  from aind_data_transfer_service import (
34
36
  __version__ as aind_data_transfer_service_version,
35
37
  )
@@ -37,14 +39,18 @@ from aind_data_transfer_service.configs.csv_handler import map_csv_row_to_job
37
39
  from aind_data_transfer_service.configs.job_configs import (
38
40
  BasicUploadJobConfigs as LegacyBasicUploadJobConfigs,
39
41
  )
40
- from aind_data_transfer_service.configs.job_configs import HpcJobConfigs
42
+ from aind_data_transfer_service.configs.job_configs import (
43
+ HpcJobConfigs,
44
+ )
41
45
  from aind_data_transfer_service.configs.job_upload_template import (
42
46
  JobUploadTemplate,
43
47
  )
44
48
  from aind_data_transfer_service.hpc.client import HpcClient, HpcClientConfigs
45
49
  from aind_data_transfer_service.hpc.models import HpcJobSubmitSettings
46
50
  from aind_data_transfer_service.log_handler import LoggingConfigs, get_logger
47
- from aind_data_transfer_service.models.core import SubmitJobRequestV2
51
+ from aind_data_transfer_service.models.core import (
52
+ SubmitJobRequestV2,
53
+ )
48
54
  from aind_data_transfer_service.models.core import (
49
55
  validation_context as validation_context_v2,
50
56
  )
@@ -929,10 +935,10 @@ async def get_task_logs(request: Request):
929
935
  async def index(request: Request):
930
936
  """GET|POST /: form handler"""
931
937
  return templates.TemplateResponse(
938
+ request=request,
932
939
  name="index.html",
933
940
  context=(
934
941
  {
935
- "request": request,
936
942
  "project_names_url": project_names_url,
937
943
  }
938
944
  ),
@@ -945,10 +951,10 @@ async def job_tasks_table(request: Request):
945
951
  response_tasks_json = json.loads(response_tasks.body)
946
952
  data = response_tasks_json.get("data")
947
953
  return templates.TemplateResponse(
954
+ request=request,
948
955
  name="job_tasks_table.html",
949
956
  context=(
950
957
  {
951
- "request": request,
952
958
  "status_code": response_tasks.status_code,
953
959
  "message": response_tasks_json.get("message"),
954
960
  "errors": data.get("errors", []),
@@ -965,10 +971,10 @@ async def task_logs(request: Request):
965
971
  response_tasks_json = json.loads(response_tasks.body)
966
972
  data = response_tasks_json.get("data")
967
973
  return templates.TemplateResponse(
974
+ request=request,
968
975
  name="task_logs.html",
969
976
  context=(
970
977
  {
971
- "request": request,
972
978
  "status_code": response_tasks.status_code,
973
979
  "message": response_tasks_json.get("message"),
974
980
  "errors": data.get("errors", []),
@@ -982,10 +988,10 @@ async def jobs(request: Request):
982
988
  """Get Job Status page with pagination"""
983
989
  dag_ids = AirflowDagRunsRequestParameters.model_fields["dag_ids"].default
984
990
  return templates.TemplateResponse(
991
+ request=request,
985
992
  name="job_status.html",
986
993
  context=(
987
994
  {
988
- "request": request,
989
995
  "project_names_url": project_names_url,
990
996
  "dag_ids": dag_ids,
991
997
  }
@@ -996,10 +1002,10 @@ async def jobs(request: Request):
996
1002
  async def job_params(request: Request):
997
1003
  """Get Job Parameters page"""
998
1004
  return templates.TemplateResponse(
1005
+ request=request,
999
1006
  name="job_params.html",
1000
1007
  context=(
1001
1008
  {
1002
- "request": request,
1003
1009
  "project_names_url": os.getenv(
1004
1010
  "AIND_METADATA_SERVICE_PROJECT_NAMES_URL"
1005
1011
  ),
@@ -1122,10 +1128,10 @@ async def admin(request: Request):
1122
1128
  user = {"name": "local user"}
1123
1129
  if user:
1124
1130
  return templates.TemplateResponse(
1131
+ request=request,
1125
1132
  name="admin.html",
1126
1133
  context=(
1127
1134
  {
1128
- "request": request,
1129
1135
  "project_names_url": project_names_url,
1130
1136
  "user_name": user.get("name", "unknown"),
1131
1137
  "user_email": user.get("email", "unknown"),
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.4
2
2
  Name: aind-data-transfer-service
3
- Version: 1.15.0
3
+ Version: 1.16.0
4
4
  Summary: Service that handles requests to upload data to the cloud
5
5
  Author: Allen Institute for Neural Dynamics
6
6
  License: MIT
@@ -28,10 +28,10 @@ Requires-Dist: aind-data-transfer-models==0.17.0; extra == "server"
28
28
  Requires-Dist: aind-metadata-mapper==0.23.0; extra == "server"
29
29
  Requires-Dist: boto3; extra == "server"
30
30
  Requires-Dist: boto3-stubs[ssm]; extra == "server"
31
- Requires-Dist: fastapi; extra == "server"
31
+ Requires-Dist: fastapi>=0.115.13; extra == "server"
32
32
  Requires-Dist: httpx; extra == "server"
33
33
  Requires-Dist: jinja2; extra == "server"
34
- Requires-Dist: starlette; extra == "server"
34
+ Requires-Dist: starlette<0.47.0,>=0.40.0; extra == "server"
35
35
  Requires-Dist: starlette_wtf; extra == "server"
36
36
  Requires-Dist: uvicorn[standard]; extra == "server"
37
37
  Requires-Dist: wtforms; extra == "server"
@@ -1,8 +1,8 @@
1
- aind_data_transfer_service/__init__.py,sha256=lho7ClOyDOm8TOnoEsiTmbuidZwwCwry8o_Vu_mJ5qI,272
1
+ aind_data_transfer_service/__init__.py,sha256=BHEL9NBC6zix81jxrsxrEp-bqoWgrWqKd2qrtmDv1fg,272
2
2
  aind_data_transfer_service/log_handler.py,sha256=c7a-gLmZeRpeCUBwCz6XsTszWXQeQdR7eKZtas4llXM,1700
3
- aind_data_transfer_service/server.py,sha256=8TRxybpk8hkqPng_6mEZc9wv8ofGErgYf0BSPk-8VAU,44545
3
+ aind_data_transfer_service/server.py,sha256=cJf57MXuzKQb9oBK6n0z7gksHMSbssQ0f409-rOqoXI,44506
4
4
  aind_data_transfer_service/configs/__init__.py,sha256=9W5GTuso9Is1B9X16RXcdb_GxasZvj6qDzOBDv0AbTc,36
5
- aind_data_transfer_service/configs/csv_handler.py,sha256=9jM0fUlWCzmqTC7ubAeFCl0eEIX5BQvHcPPPTPngcog,4374
5
+ aind_data_transfer_service/configs/csv_handler.py,sha256=hCdfAYZW_49-l1rbua5On2Tw2ks674Z-MgB_NJlIkU4,5746
6
6
  aind_data_transfer_service/configs/job_configs.py,sha256=T-h5N6lyY9xTZ_xg_5FxkyYuMdagApbE6xalxFQ-bqA,18848
7
7
  aind_data_transfer_service/configs/job_upload_template.py,sha256=aC5m1uD_YcpbggFQ-yZ7ZJSUUGX1yQqQLF3SwmljrLk,5127
8
8
  aind_data_transfer_service/hpc/__init__.py,sha256=YNc68YNlmXwKIPFMIViz_K4XzVVHkLPEBOFyO5DKMKI,53
@@ -17,8 +17,8 @@ aind_data_transfer_service/templates/job_params.html,sha256=ivofS1CjSnC87T5J2BTs
17
17
  aind_data_transfer_service/templates/job_status.html,sha256=5lUUGZL-5urppG610qDOgpfIE-OcQH57gFWvRA5pBNM,16938
18
18
  aind_data_transfer_service/templates/job_tasks_table.html,sha256=rWFukhjZ4dhPyabe372tmi4lbQS2fyELZ7Awbn5Un4g,6181
19
19
  aind_data_transfer_service/templates/task_logs.html,sha256=y1GnQft0S50ghPb2xJDjAlefymB9a4zYdMikUFV7Tl4,918
20
- aind_data_transfer_service-1.15.0.dist-info/licenses/LICENSE,sha256=U0Y7B3gZJHXpjJVLgTQjM8e_c8w4JJpLgGhIdsoFR1Y,1092
21
- aind_data_transfer_service-1.15.0.dist-info/METADATA,sha256=XNDcKsS_NFZIZMBOip1nBheDFo8lirOA7a9P14SVtNo,2452
22
- aind_data_transfer_service-1.15.0.dist-info/WHEEL,sha256=_zCd3N1l69ArxyTb8rzEoP9TpbYXkqRFSNOD5OuxnTs,91
23
- aind_data_transfer_service-1.15.0.dist-info/top_level.txt,sha256=XmxH0q27Jholj2-VYh-6WMrh9Lw6kkuCX_fdsj3SaFE,27
24
- aind_data_transfer_service-1.15.0.dist-info/RECORD,,
20
+ aind_data_transfer_service-1.16.0.dist-info/licenses/LICENSE,sha256=U0Y7B3gZJHXpjJVLgTQjM8e_c8w4JJpLgGhIdsoFR1Y,1092
21
+ aind_data_transfer_service-1.16.0.dist-info/METADATA,sha256=YkT9jGjZKxpa2O5qzekouiwzsuviOT_2w-7kg3511Ck,2478
22
+ aind_data_transfer_service-1.16.0.dist-info/WHEEL,sha256=_zCd3N1l69ArxyTb8rzEoP9TpbYXkqRFSNOD5OuxnTs,91
23
+ aind_data_transfer_service-1.16.0.dist-info/top_level.txt,sha256=XmxH0q27Jholj2-VYh-6WMrh9Lw6kkuCX_fdsj3SaFE,27
24
+ aind_data_transfer_service-1.16.0.dist-info/RECORD,,