aind-data-transfer-service 1.14.0__py3-none-any.whl → 1.16.0__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of aind-data-transfer-service might be problematic. Click here for more details.

@@ -1,7 +1,7 @@
1
1
  """Init package"""
2
2
  import os
3
3
 
4
- __version__ = "1.14.0"
4
+ __version__ = "1.16.0"
5
5
 
6
6
  # Global constants
7
7
  OPEN_DATA_BUCKET_NAME = os.getenv("OPEN_DATA_BUCKET_NAME", "open")
@@ -1,7 +1,10 @@
1
1
  """Module to handle processing legacy csv files"""
2
2
 
3
3
  import re
4
+ from collections.abc import Mapping
5
+ from copy import deepcopy
4
6
  from datetime import datetime
7
+ from typing import Any, Dict
5
8
 
6
9
  from aind_data_schema_models.modalities import Modality
7
10
  from aind_data_schema_models.platforms import Platform
@@ -13,6 +16,45 @@ DATETIME_PATTERN2 = re.compile(
13
16
  )
14
17
 
15
18
 
19
+ def nested_update(dict_to_update: Dict[str, Any], updates: Mapping):
20
+ """
21
+ Update a nested dictionary in-place.
22
+ Parameters
23
+ ----------
24
+ dict_to_update : Dict[str, Any]
25
+ updates : Mapping
26
+
27
+ """
28
+ for k, v in updates.items():
29
+ if isinstance(v, Mapping):
30
+ dict_to_update[k] = nested_update(dict_to_update.get(k, {}), v)
31
+ else:
32
+ dict_to_update[k] = v
33
+ return dict_to_update
34
+
35
+
36
+ def create_nested_dict(
37
+ dict_to_update: Dict[str, Any], key_string: str, value: Any
38
+ ):
39
+ """
40
+ Updates in-place a nested dictionary with a period delimited key and value.
41
+ Parameters
42
+ ----------
43
+ dict_to_update : Dict[str, Any]
44
+ key_string : str
45
+ value : Any
46
+
47
+ """
48
+ keys = key_string.split(".", 1)
49
+ current_key = keys[0]
50
+ if len(keys) == 1:
51
+ dict_to_update[current_key] = value
52
+ else:
53
+ if current_key not in dict_to_update:
54
+ dict_to_update[current_key] = dict()
55
+ create_nested_dict(dict_to_update[current_key], keys[1], value)
56
+
57
+
16
58
  def map_csv_row_to_job(row: dict) -> UploadJobConfigsV2:
17
59
  """
18
60
  Maps csv row into a UploadJobConfigsV2 model. This attempts to be somewhat
@@ -29,7 +71,6 @@ def map_csv_row_to_job(row: dict) -> UploadJobConfigsV2:
29
71
  modality_configs = dict()
30
72
  job_configs = dict()
31
73
  check_s3_folder_exists_task = None
32
- final_check_s3_folder_exist = None
33
74
  codeocean_tasks = dict()
34
75
  for key, value in row.items():
35
76
  # Strip white spaces and replace dashes with underscores
@@ -42,7 +83,9 @@ def map_csv_row_to_job(row: dict) -> UploadJobConfigsV2:
42
83
  modality_parts = clean_key.split(".")
43
84
  modality_key = modality_parts[0]
44
85
  sub_key = (
45
- "modality" if len(modality_parts) == 1 else modality_parts[1]
86
+ "modality"
87
+ if len(modality_parts) == 1
88
+ else ".".join(modality_parts[1:])
46
89
  )
47
90
  modality_configs.setdefault(modality_key, dict())
48
91
  # Temp backwards compatibility check
@@ -66,13 +109,22 @@ def map_csv_row_to_job(row: dict) -> UploadJobConfigsV2:
66
109
  job_settings=codeocean_pipeline_monitor_settings,
67
110
  )
68
111
  else:
69
- modality_configs[modality_key].update({sub_key: clean_val})
112
+ nested_val = dict()
113
+ create_nested_dict(
114
+ dict_to_update=nested_val,
115
+ key_string=sub_key,
116
+ value=clean_val,
117
+ )
118
+ current_dict = deepcopy(
119
+ modality_configs.get(modality_key, dict())
120
+ )
121
+ nested_update(current_dict, nested_val)
122
+ modality_configs[modality_key] = current_dict
70
123
  elif clean_key == "force_cloud_sync" and clean_val.upper() in [
71
124
  "TRUE",
72
125
  "T",
73
126
  ]:
74
127
  check_s3_folder_exists_task = {"skip_task": True}
75
- final_check_s3_folder_exist = {"skip_task": True}
76
128
  else:
77
129
  job_configs[clean_key] = clean_val
78
130
  # Rename codeocean config keys with correct modality
@@ -93,8 +145,7 @@ def map_csv_row_to_job(row: dict) -> UploadJobConfigsV2:
93
145
  )
94
146
  tasks = {
95
147
  "gather_preliminary_metadata": metadata_task,
96
- "check_s3_folder_exists_task": check_s3_folder_exists_task,
97
- "final_check_s3_folder_exist": final_check_s3_folder_exist,
148
+ "check_s3_folder_exists": check_s3_folder_exists_task,
98
149
  "modality_transformation_settings": modality_tasks,
99
150
  "codeocean_pipeline_settings": None
100
151
  if codeocean_tasks == dict()
@@ -2,7 +2,7 @@
2
2
 
3
3
  import datetime
4
4
  from io import BytesIO
5
- from typing import Any, Dict, List
5
+ from typing import Any, ClassVar, Dict, List
6
6
 
7
7
  from aind_data_schema_models.modalities import Modality
8
8
  from aind_data_schema_models.platforms import Platform
@@ -10,16 +10,16 @@ from openpyxl import Workbook
10
10
  from openpyxl.styles import Font
11
11
  from openpyxl.utils import get_column_letter
12
12
  from openpyxl.worksheet.datavalidation import DataValidation
13
+ from pydantic import BaseModel
13
14
 
14
15
 
15
- # TODO: convert to pydantic model
16
- class JobUploadTemplate:
16
+ class JobUploadTemplate(BaseModel):
17
17
  """Class to configure and create xlsx job upload template"""
18
18
 
19
- FILE_NAME = "job_upload_template.xlsx"
20
- NUM_TEMPLATE_ROWS = 20
21
- XLSX_DATETIME_FORMAT = "YYYY-MM-DDTHH:mm:ss"
22
- HEADERS = [
19
+ FILE_NAME: ClassVar[str] = "job_upload_template.xlsx"
20
+ _NUM_TEMPLATE_ROWS: ClassVar[int] = 20
21
+ _XLSX_DATETIME_FORMAT: ClassVar[str] = "YYYY-MM-DDTHH:mm:ss"
22
+ _HEADERS: ClassVar[List[str]] = [
23
23
  "job_type",
24
24
  "project_name",
25
25
  "platform",
@@ -31,7 +31,7 @@ class JobUploadTemplate:
31
31
  "modality1",
32
32
  "modality1.input_source",
33
33
  ]
34
- SAMPLE_JOBS = [
34
+ _SAMPLE_JOBS: ClassVar[List[List[Any]]] = [
35
35
  [
36
36
  "default",
37
37
  "Behavior Platform",
@@ -68,8 +68,8 @@ class JobUploadTemplate:
68
68
  ],
69
69
  ]
70
70
 
71
- @property
72
- def validators(self) -> List[Dict[str, Any]]:
71
+ @classmethod
72
+ def _get_validators(cls) -> List[Dict[str, Any]]:
73
73
  """
74
74
  Returns
75
75
  -------
@@ -82,36 +82,36 @@ class JobUploadTemplate:
82
82
  "name": "platform",
83
83
  "type": "list",
84
84
  "options": list(Platform.abbreviation_map.keys()),
85
- "column_indexes": [self.HEADERS.index("platform")],
85
+ "column_indexes": [cls._HEADERS.index("platform")],
86
86
  },
87
87
  {
88
88
  "name": "modality",
89
89
  "type": "list",
90
90
  "options": list(Modality.abbreviation_map.keys()),
91
91
  "column_indexes": [
92
- self.HEADERS.index("modality0"),
93
- self.HEADERS.index("modality1"),
92
+ cls._HEADERS.index("modality0"),
93
+ cls._HEADERS.index("modality1"),
94
94
  ],
95
95
  },
96
96
  {
97
97
  "name": "datetime",
98
98
  "type": "date",
99
- "column_indexes": [self.HEADERS.index("acq_datetime")],
99
+ "column_indexes": [cls._HEADERS.index("acq_datetime")],
100
100
  },
101
101
  ]
102
102
 
103
- @property
104
- def excel_sheet_filestream(self) -> BytesIO:
103
+ @classmethod
104
+ def create_excel_sheet_filestream(cls) -> BytesIO:
105
105
  """Create job template as xlsx filestream"""
106
106
  xl_io = BytesIO()
107
107
  workbook = Workbook()
108
108
  workbook.iso_dates = True
109
109
  worksheet = workbook.active
110
- worksheet.append(self.HEADERS)
111
- for job in self.SAMPLE_JOBS:
110
+ worksheet.append(cls._HEADERS)
111
+ for job in cls._SAMPLE_JOBS:
112
112
  worksheet.append(job)
113
113
  # data validators
114
- for validator in self.validators:
114
+ for validator in cls._get_validators():
115
115
  dv_type = validator["type"]
116
116
  dv_name = validator["name"]
117
117
  dv_params = {
@@ -127,17 +127,17 @@ class JobUploadTemplate:
127
127
  dv_params["prompt"] = f"Select a {dv_name} from the dropdown"
128
128
  elif dv_type == "date":
129
129
  dv_params["prompt"] = "Provide a {} using {}".format(
130
- dv_name, self.XLSX_DATETIME_FORMAT
130
+ dv_name, cls._XLSX_DATETIME_FORMAT
131
131
  )
132
132
  dv = DataValidation(**dv_params)
133
133
  for i in validator["column_indexes"]:
134
134
  col = get_column_letter(i + 1)
135
- col_range = f"{col}2:{col}{self.NUM_TEMPLATE_ROWS}"
135
+ col_range = f"{col}2:{col}{cls._NUM_TEMPLATE_ROWS}"
136
136
  dv.add(col_range)
137
137
  if dv_type != "date":
138
138
  continue
139
139
  for (cell,) in worksheet[col_range]:
140
- cell.number_format = self.XLSX_DATETIME_FORMAT
140
+ cell.number_format = cls._XLSX_DATETIME_FORMAT
141
141
  worksheet.add_data_validation(dv)
142
142
  # formatting
143
143
  bold = Font(bold=True)
@@ -290,11 +290,15 @@ class SubmitJobRequestV2(BaseSettings):
290
290
  # check against any jobs in the context
291
291
  current_jobs = (info.context or dict()).get("current_jobs", list())
292
292
  for job in current_jobs:
293
- prefix = job.get("s3_prefix")
294
- if (
295
- prefix is not None
296
- and prefix in jobs_map
297
- and json.dumps(job, sort_keys=True) in jobs_map[prefix]
298
- ):
299
- raise ValueError(f"Job is already running/queued for {prefix}")
293
+ jobs_to_check = job.get("upload_jobs", [job])
294
+ for j in jobs_to_check:
295
+ prefix = j.get("s3_prefix")
296
+ if (
297
+ prefix is not None
298
+ and prefix in jobs_map
299
+ and json.dumps(j, sort_keys=True) in jobs_map[prefix]
300
+ ):
301
+ raise ValueError(
302
+ f"Job is already running/queued for {prefix}"
303
+ )
300
304
  return self
@@ -15,6 +15,7 @@ from aind_data_transfer_models import (
15
15
  __version__ as aind_data_transfer_models_version,
16
16
  )
17
17
  from aind_data_transfer_models.core import SubmitJobRequest, validation_context
18
+ from authlib.integrations.starlette_client import OAuth
18
19
  from botocore.exceptions import ClientError
19
20
  from fastapi import Request
20
21
  from fastapi.responses import JSONResponse, StreamingResponse
@@ -23,9 +24,14 @@ from httpx import AsyncClient
23
24
  from openpyxl import load_workbook
24
25
  from pydantic import SecretStr, ValidationError
25
26
  from starlette.applications import Starlette
27
+ from starlette.config import Config
28
+ from starlette.middleware.sessions import SessionMiddleware
29
+ from starlette.responses import RedirectResponse
26
30
  from starlette.routing import Route
27
31
 
28
- from aind_data_transfer_service import OPEN_DATA_BUCKET_NAME
32
+ from aind_data_transfer_service import (
33
+ OPEN_DATA_BUCKET_NAME,
34
+ )
29
35
  from aind_data_transfer_service import (
30
36
  __version__ as aind_data_transfer_service_version,
31
37
  )
@@ -33,14 +39,18 @@ from aind_data_transfer_service.configs.csv_handler import map_csv_row_to_job
33
39
  from aind_data_transfer_service.configs.job_configs import (
34
40
  BasicUploadJobConfigs as LegacyBasicUploadJobConfigs,
35
41
  )
36
- from aind_data_transfer_service.configs.job_configs import HpcJobConfigs
42
+ from aind_data_transfer_service.configs.job_configs import (
43
+ HpcJobConfigs,
44
+ )
37
45
  from aind_data_transfer_service.configs.job_upload_template import (
38
46
  JobUploadTemplate,
39
47
  )
40
48
  from aind_data_transfer_service.hpc.client import HpcClient, HpcClientConfigs
41
49
  from aind_data_transfer_service.hpc.models import HpcJobSubmitSettings
42
50
  from aind_data_transfer_service.log_handler import LoggingConfigs, get_logger
43
- from aind_data_transfer_service.models.core import SubmitJobRequestV2
51
+ from aind_data_transfer_service.models.core import (
52
+ SubmitJobRequestV2,
53
+ )
44
54
  from aind_data_transfer_service.models.core import (
45
55
  validation_context as validation_context_v2,
46
56
  )
@@ -95,6 +105,27 @@ def get_project_names() -> List[str]:
95
105
  return project_names
96
106
 
97
107
 
108
+ def set_oauth() -> OAuth:
109
+ """Set up OAuth for the service"""
110
+ secrets_client = boto3.client("secretsmanager")
111
+ secret_response = secrets_client.get_secret_value(
112
+ SecretId=os.getenv("AIND_SSO_SECRET_NAME")
113
+ )
114
+ secret_value = json.loads(secret_response["SecretString"])
115
+ for secrets in secret_value:
116
+ os.environ[secrets] = secret_value[secrets]
117
+ config = Config()
118
+ oauth = OAuth(config)
119
+ oauth.register(
120
+ name="azure",
121
+ client_id=config("CLIENT_ID"),
122
+ client_secret=config("CLIENT_SECRET"),
123
+ server_metadata_url=config("AUTHORITY"),
124
+ client_kwargs={"scope": "openid email profile"},
125
+ )
126
+ return oauth
127
+
128
+
98
129
  def get_job_types(version: Optional[str] = None) -> List[str]:
99
130
  """Get a list of job_types"""
100
131
  params = get_parameter_infos(version)
@@ -228,7 +259,7 @@ async def validate_csv(request: Request):
228
259
  data = csv_io.getvalue()
229
260
  csv_reader = csv.DictReader(io.StringIO(data))
230
261
  params = AirflowDagRunsRequestParameters(
231
- dag_ids=["transform_and_upload_v2"],
262
+ dag_ids=["transform_and_upload_v2", "run_list_of_jobs"],
232
263
  states=["running", "queued"],
233
264
  )
234
265
  _, current_jobs = await get_airflow_jobs(
@@ -324,7 +355,8 @@ async def validate_json_v2(request: Request):
324
355
  content = await request.json()
325
356
  try:
326
357
  params = AirflowDagRunsRequestParameters(
327
- dag_ids=["transform_and_upload_v2"], states=["running", "queued"]
358
+ dag_ids=["transform_and_upload_v2", "run_list_of_jobs"],
359
+ states=["running", "queued"],
328
360
  )
329
361
  _, current_jobs = await get_airflow_jobs(params=params, get_confs=True)
330
362
  context = {
@@ -439,7 +471,8 @@ async def submit_jobs_v2(request: Request):
439
471
  content = await request.json()
440
472
  try:
441
473
  params = AirflowDagRunsRequestParameters(
442
- dag_ids=["transform_and_upload_v2"], states=["running", "queued"]
474
+ dag_ids=["transform_and_upload_v2", "run_list_of_jobs"],
475
+ states=["running", "queued"],
443
476
  )
444
477
  _, current_jobs = await get_airflow_jobs(params=params, get_confs=True)
445
478
  context = {
@@ -902,10 +935,10 @@ async def get_task_logs(request: Request):
902
935
  async def index(request: Request):
903
936
  """GET|POST /: form handler"""
904
937
  return templates.TemplateResponse(
938
+ request=request,
905
939
  name="index.html",
906
940
  context=(
907
941
  {
908
- "request": request,
909
942
  "project_names_url": project_names_url,
910
943
  }
911
944
  ),
@@ -918,10 +951,10 @@ async def job_tasks_table(request: Request):
918
951
  response_tasks_json = json.loads(response_tasks.body)
919
952
  data = response_tasks_json.get("data")
920
953
  return templates.TemplateResponse(
954
+ request=request,
921
955
  name="job_tasks_table.html",
922
956
  context=(
923
957
  {
924
- "request": request,
925
958
  "status_code": response_tasks.status_code,
926
959
  "message": response_tasks_json.get("message"),
927
960
  "errors": data.get("errors", []),
@@ -938,10 +971,10 @@ async def task_logs(request: Request):
938
971
  response_tasks_json = json.loads(response_tasks.body)
939
972
  data = response_tasks_json.get("data")
940
973
  return templates.TemplateResponse(
974
+ request=request,
941
975
  name="task_logs.html",
942
976
  context=(
943
977
  {
944
- "request": request,
945
978
  "status_code": response_tasks.status_code,
946
979
  "message": response_tasks_json.get("message"),
947
980
  "errors": data.get("errors", []),
@@ -955,10 +988,10 @@ async def jobs(request: Request):
955
988
  """Get Job Status page with pagination"""
956
989
  dag_ids = AirflowDagRunsRequestParameters.model_fields["dag_ids"].default
957
990
  return templates.TemplateResponse(
991
+ request=request,
958
992
  name="job_status.html",
959
993
  context=(
960
994
  {
961
- "request": request,
962
995
  "project_names_url": project_names_url,
963
996
  "dag_ids": dag_ids,
964
997
  }
@@ -969,10 +1002,10 @@ async def jobs(request: Request):
969
1002
  async def job_params(request: Request):
970
1003
  """Get Job Parameters page"""
971
1004
  return templates.TemplateResponse(
1005
+ request=request,
972
1006
  name="job_params.html",
973
1007
  context=(
974
1008
  {
975
- "request": request,
976
1009
  "project_names_url": os.getenv(
977
1010
  "AIND_METADATA_SERVICE_PROJECT_NAMES_URL"
978
1011
  ),
@@ -987,8 +1020,7 @@ async def download_job_template(_: Request):
987
1020
  """Get job template as xlsx filestream for download"""
988
1021
 
989
1022
  try:
990
- job_template = JobUploadTemplate()
991
- xl_io = job_template.excel_sheet_filestream
1023
+ xl_io = JobUploadTemplate.create_excel_sheet_filestream()
992
1024
  return StreamingResponse(
993
1025
  io.BytesIO(xl_io.getvalue()),
994
1026
  media_type=(
@@ -997,7 +1029,7 @@ async def download_job_template(_: Request):
997
1029
  ),
998
1030
  headers={
999
1031
  "Content-Disposition": (
1000
- f"attachment; filename={job_template.FILE_NAME}"
1032
+ f"attachment; filename={JobUploadTemplate.FILE_NAME}"
1001
1033
  )
1002
1034
  },
1003
1035
  status_code=200,
@@ -1089,6 +1121,60 @@ def get_parameter(request: Request):
1089
1121
  )
1090
1122
 
1091
1123
 
1124
+ async def admin(request: Request):
1125
+ """Get admin page if authenticated, else redirect to login."""
1126
+ user = request.session.get("user")
1127
+ if os.getenv("ENV_NAME") == "local":
1128
+ user = {"name": "local user"}
1129
+ if user:
1130
+ return templates.TemplateResponse(
1131
+ request=request,
1132
+ name="admin.html",
1133
+ context=(
1134
+ {
1135
+ "project_names_url": project_names_url,
1136
+ "user_name": user.get("name", "unknown"),
1137
+ "user_email": user.get("email", "unknown"),
1138
+ }
1139
+ ),
1140
+ )
1141
+ return RedirectResponse(url="/login")
1142
+
1143
+
1144
+ async def login(request: Request):
1145
+ """Redirect to Azure login page"""
1146
+ oauth = set_oauth()
1147
+ redirect_uri = request.url_for("auth")
1148
+ response = await oauth.azure.authorize_redirect(request, redirect_uri)
1149
+ return response
1150
+
1151
+
1152
+ async def logout(request: Request):
1153
+ """Logout user and clear session"""
1154
+ request.session.pop("user", None)
1155
+ return RedirectResponse(url="/")
1156
+
1157
+
1158
+ async def auth(request: Request):
1159
+ """Authenticate user and store user info in session"""
1160
+ oauth = set_oauth()
1161
+ try:
1162
+ token = await oauth.azure.authorize_access_token(request)
1163
+ user = token.get("userinfo")
1164
+ if not user:
1165
+ raise ValueError("User info not found in access token.")
1166
+ request.session["user"] = dict(user)
1167
+ except Exception as error:
1168
+ return JSONResponse(
1169
+ content={
1170
+ "message": "Error Logging In",
1171
+ "data": {"error": f"{error.__class__.__name__}{error.args}"},
1172
+ },
1173
+ status_code=500,
1174
+ )
1175
+ return RedirectResponse(url="/admin")
1176
+
1177
+
1092
1178
  routes = [
1093
1179
  Route("/", endpoint=index, methods=["GET", "POST"]),
1094
1180
  Route("/api/validate_csv", endpoint=validate_csv_legacy, methods=["POST"]),
@@ -1131,6 +1217,11 @@ routes = [
1131
1217
  endpoint=download_job_template,
1132
1218
  methods=["GET"],
1133
1219
  ),
1220
+ Route("/login", login, methods=["GET"]),
1221
+ Route("/logout", logout, methods=["GET"]),
1222
+ Route("/auth", auth, methods=["GET"]),
1223
+ Route("/admin", admin, methods=["GET"]),
1134
1224
  ]
1135
1225
 
1136
1226
  app = Starlette(routes=routes)
1227
+ app.add_middleware(SessionMiddleware, secret_key=None)
@@ -0,0 +1,36 @@
1
+ <!DOCTYPE html>
2
+ <html>
3
+ <head>
4
+ <meta charset="UTF-8">
5
+ <link href="https://cdn.jsdelivr.net/npm/bootstrap@5.3.3/dist/css/bootstrap.min.css" rel="stylesheet">
6
+ <script src="https://cdn.jsdelivr.net/npm/bootstrap@5.3.3/dist/js/bootstrap.bundle.min.js"></script>
7
+ <title>{% block title %} {% endblock %} AIND Data Transfer Service Admin</title>
8
+ <style>
9
+ body {
10
+ margin: 20px;
11
+ font-family: arial, sans-serif;
12
+ }
13
+ nav {
14
+ height: 40px;
15
+ }
16
+ </style>
17
+ </head>
18
+ <body>
19
+ <nav>
20
+ <a href="/">Submit Jobs</a> |
21
+ <a href="/jobs">Job Status</a> |
22
+ <a href="/job_params">Job Parameters</a> |
23
+ <a title="Download job template as .xslx" href="/api/job_upload_template" download>Job Submit Template</a> |
24
+ <a title="List of project names" href="{{ project_names_url }}" target="_blank">Project Names</a> |
25
+ <a title="For more information click here" href="https://aind-data-transfer-service.readthedocs.io"
26
+ target="_blank">Help</a> |
27
+ <a href="/admin">Admin</a> |
28
+ <a href="/logout">Log out</a>
29
+ </nav>
30
+ <div>
31
+ <h3>Admin</h3>
32
+ <div>Hello {{user_name}}, welcome to the admin page</div>
33
+ <div>Email: {{user_email}}</div>
34
+ </div>
35
+ </body>
36
+ </html>
@@ -49,7 +49,8 @@
49
49
  <a href="/job_params">Job Parameters</a> |
50
50
  <a title="Download job template as .xslx" href= "/api/job_upload_template" download>Job Submit Template</a> |
51
51
  <a title="List of project names" href= "{{ project_names_url }}" target="_blank" >Project Names</a> |
52
- <a title="For more information click here" href="https://aind-data-transfer-service.readthedocs.io" target="_blank" >Help</a>
52
+ <a title="For more information click here" href="https://aind-data-transfer-service.readthedocs.io" target="_blank" >Help</a> |
53
+ <a href="/admin">Admin</a>
53
54
  </nav>
54
55
  <br>
55
56
  <div>
@@ -34,7 +34,8 @@
34
34
  <a href="/job_params">Job Parameters</a> |
35
35
  <a title="Download job template as .xslx" href= "/api/job_upload_template" download>Job Submit Template</a> |
36
36
  <a title="List of project names" href= "{{ project_names_url }}" target="_blank" >Project Names</a> |
37
- <a title="For more information click here" href="https://aind-data-transfer-service.readthedocs.io" target="_blank" >Help</a>
37
+ <a title="For more information click here" href="https://aind-data-transfer-service.readthedocs.io" target="_blank" >Help</a> |
38
+ <a href="/admin">Admin</a>
38
39
  </nav>
39
40
  <div class="content">
40
41
  <h4 class="mb-2">
@@ -32,7 +32,8 @@
32
32
  <a href="/job_params">Job Parameters</a> |
33
33
  <a title="Download job template as .xslx" href= "/api/job_upload_template" download>Job Submit Template</a> |
34
34
  <a title="List of project names" href= "{{ project_names_url }}" target="_blank" >Project Names</a> |
35
- <a title="For more information click here" href="https://aind-data-transfer-service.readthedocs.io" target="_blank" >Help</a>
35
+ <a title="For more information click here" href="https://aind-data-transfer-service.readthedocs.io" target="_blank" >Help</a> |
36
+ <a href="/admin">Admin</a>
36
37
  </nav>
37
38
  <div class="content">
38
39
  <!-- display total entries -->
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.4
2
2
  Name: aind-data-transfer-service
3
- Version: 1.14.0
3
+ Version: 1.16.0
4
4
  Summary: Service that handles requests to upload data to the cloud
5
5
  Author: Allen Institute for Neural Dynamics
6
6
  License: MIT
@@ -28,16 +28,17 @@ Requires-Dist: aind-data-transfer-models==0.17.0; extra == "server"
28
28
  Requires-Dist: aind-metadata-mapper==0.23.0; extra == "server"
29
29
  Requires-Dist: boto3; extra == "server"
30
30
  Requires-Dist: boto3-stubs[ssm]; extra == "server"
31
- Requires-Dist: fastapi; extra == "server"
31
+ Requires-Dist: fastapi>=0.115.13; extra == "server"
32
32
  Requires-Dist: httpx; extra == "server"
33
33
  Requires-Dist: jinja2; extra == "server"
34
- Requires-Dist: starlette; extra == "server"
34
+ Requires-Dist: starlette<0.47.0,>=0.40.0; extra == "server"
35
35
  Requires-Dist: starlette_wtf; extra == "server"
36
36
  Requires-Dist: uvicorn[standard]; extra == "server"
37
37
  Requires-Dist: wtforms; extra == "server"
38
38
  Requires-Dist: requests==2.25.0; extra == "server"
39
39
  Requires-Dist: openpyxl; extra == "server"
40
40
  Requires-Dist: python-logging-loki; extra == "server"
41
+ Requires-Dist: authlib; extra == "server"
41
42
  Dynamic: license-file
42
43
 
43
44
  # aind-data-transfer-service
@@ -1,23 +1,24 @@
1
- aind_data_transfer_service/__init__.py,sha256=02hESRKUPpXuMD22H3psRTP20D33fQMG08afGio0d9k,272
1
+ aind_data_transfer_service/__init__.py,sha256=BHEL9NBC6zix81jxrsxrEp-bqoWgrWqKd2qrtmDv1fg,272
2
2
  aind_data_transfer_service/log_handler.py,sha256=c7a-gLmZeRpeCUBwCz6XsTszWXQeQdR7eKZtas4llXM,1700
3
- aind_data_transfer_service/server.py,sha256=ozavVyxMpZEwDAQ0OwRAwy_9CHXoLcUZ2rriSX1qH04,41643
3
+ aind_data_transfer_service/server.py,sha256=cJf57MXuzKQb9oBK6n0z7gksHMSbssQ0f409-rOqoXI,44506
4
4
  aind_data_transfer_service/configs/__init__.py,sha256=9W5GTuso9Is1B9X16RXcdb_GxasZvj6qDzOBDv0AbTc,36
5
- aind_data_transfer_service/configs/csv_handler.py,sha256=9jM0fUlWCzmqTC7ubAeFCl0eEIX5BQvHcPPPTPngcog,4374
5
+ aind_data_transfer_service/configs/csv_handler.py,sha256=hCdfAYZW_49-l1rbua5On2Tw2ks674Z-MgB_NJlIkU4,5746
6
6
  aind_data_transfer_service/configs/job_configs.py,sha256=T-h5N6lyY9xTZ_xg_5FxkyYuMdagApbE6xalxFQ-bqA,18848
7
- aind_data_transfer_service/configs/job_upload_template.py,sha256=dJo_nuGIjHNzkGtZmJd7-qqRz8s8R8RcWnYV3Hi_8QE,4990
7
+ aind_data_transfer_service/configs/job_upload_template.py,sha256=aC5m1uD_YcpbggFQ-yZ7ZJSUUGX1yQqQLF3SwmljrLk,5127
8
8
  aind_data_transfer_service/hpc/__init__.py,sha256=YNc68YNlmXwKIPFMIViz_K4XzVVHkLPEBOFyO5DKMKI,53
9
9
  aind_data_transfer_service/hpc/client.py,sha256=-JSxAWn96_XOIDwhsXAHK3TZAdckddUhtcCzRHnaTqA,4700
10
10
  aind_data_transfer_service/hpc/models.py,sha256=-7HhV16s_MUyKPy0x0FGIbnq8DPL2qJAzJO5G7003AE,16184
11
11
  aind_data_transfer_service/models/__init__.py,sha256=Meym73bEZ9nQr4QoeyhQmV3nRTYtd_4kWKPNygsBfJg,25
12
- aind_data_transfer_service/models/core.py,sha256=_DHYFQL8kgXWgjvE21mWkRIVDCrmReskgwFAaTb5KQI,9971
12
+ aind_data_transfer_service/models/core.py,sha256=uXtPUqjxKalg-sE8MxaJr11w_T_KKBRBSJuUgwoMZlQ,10135
13
13
  aind_data_transfer_service/models/internal.py,sha256=MGQrPuHrR21nn4toqdTCIEDW6MG7pWRajoPqD3j-ST0,9706
14
- aind_data_transfer_service/templates/index.html,sha256=KoqedswLWOiqgtkk2Z3HrDfEJycS_SJ7ueiuYGhL2Yo,11289
15
- aind_data_transfer_service/templates/job_params.html,sha256=vqIdNQsZTM0kq3Wa9u-VjmmMa0UzBTpK02WpOSatXBQ,8817
16
- aind_data_transfer_service/templates/job_status.html,sha256=vIOaJGJM78hOWTLTAzMfHjG9sNqPvS-muAyXYQtpnYI,16901
14
+ aind_data_transfer_service/templates/admin.html,sha256=KvQB54-mD8LL8wnDd3G9a8lDYAT8BWK13_MhpIJAiSY,1200
15
+ aind_data_transfer_service/templates/index.html,sha256=TDmmHlhWFPnQrk6nk1OhNjC3SapZtX0TXeuUonoCO7g,11326
16
+ aind_data_transfer_service/templates/job_params.html,sha256=ivofS1CjSnC87T5J2BTsNtVzq6xnUqlPZePdGt_fc4U,8854
17
+ aind_data_transfer_service/templates/job_status.html,sha256=5lUUGZL-5urppG610qDOgpfIE-OcQH57gFWvRA5pBNM,16938
17
18
  aind_data_transfer_service/templates/job_tasks_table.html,sha256=rWFukhjZ4dhPyabe372tmi4lbQS2fyELZ7Awbn5Un4g,6181
18
19
  aind_data_transfer_service/templates/task_logs.html,sha256=y1GnQft0S50ghPb2xJDjAlefymB9a4zYdMikUFV7Tl4,918
19
- aind_data_transfer_service-1.14.0.dist-info/licenses/LICENSE,sha256=U0Y7B3gZJHXpjJVLgTQjM8e_c8w4JJpLgGhIdsoFR1Y,1092
20
- aind_data_transfer_service-1.14.0.dist-info/METADATA,sha256=N7A2du3anL11r5AHh8NXI-yPtB8Ilsm1saCJ-_o-200,2410
21
- aind_data_transfer_service-1.14.0.dist-info/WHEEL,sha256=_zCd3N1l69ArxyTb8rzEoP9TpbYXkqRFSNOD5OuxnTs,91
22
- aind_data_transfer_service-1.14.0.dist-info/top_level.txt,sha256=XmxH0q27Jholj2-VYh-6WMrh9Lw6kkuCX_fdsj3SaFE,27
23
- aind_data_transfer_service-1.14.0.dist-info/RECORD,,
20
+ aind_data_transfer_service-1.16.0.dist-info/licenses/LICENSE,sha256=U0Y7B3gZJHXpjJVLgTQjM8e_c8w4JJpLgGhIdsoFR1Y,1092
21
+ aind_data_transfer_service-1.16.0.dist-info/METADATA,sha256=YkT9jGjZKxpa2O5qzekouiwzsuviOT_2w-7kg3511Ck,2478
22
+ aind_data_transfer_service-1.16.0.dist-info/WHEEL,sha256=_zCd3N1l69ArxyTb8rzEoP9TpbYXkqRFSNOD5OuxnTs,91
23
+ aind_data_transfer_service-1.16.0.dist-info/top_level.txt,sha256=XmxH0q27Jholj2-VYh-6WMrh9Lw6kkuCX_fdsj3SaFE,27
24
+ aind_data_transfer_service-1.16.0.dist-info/RECORD,,