aind-data-transfer-service 1.14.0__py3-none-any.whl → 1.15.0__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of aind-data-transfer-service might be problematic. Click here for more details.

@@ -1,7 +1,7 @@
1
1
  """Init package"""
2
2
  import os
3
3
 
4
- __version__ = "1.14.0"
4
+ __version__ = "1.15.0"
5
5
 
6
6
  # Global constants
7
7
  OPEN_DATA_BUCKET_NAME = os.getenv("OPEN_DATA_BUCKET_NAME", "open")
@@ -2,7 +2,7 @@
2
2
 
3
3
  import datetime
4
4
  from io import BytesIO
5
- from typing import Any, Dict, List
5
+ from typing import Any, ClassVar, Dict, List
6
6
 
7
7
  from aind_data_schema_models.modalities import Modality
8
8
  from aind_data_schema_models.platforms import Platform
@@ -10,16 +10,16 @@ from openpyxl import Workbook
10
10
  from openpyxl.styles import Font
11
11
  from openpyxl.utils import get_column_letter
12
12
  from openpyxl.worksheet.datavalidation import DataValidation
13
+ from pydantic import BaseModel
13
14
 
14
15
 
15
- # TODO: convert to pydantic model
16
- class JobUploadTemplate:
16
+ class JobUploadTemplate(BaseModel):
17
17
  """Class to configure and create xlsx job upload template"""
18
18
 
19
- FILE_NAME = "job_upload_template.xlsx"
20
- NUM_TEMPLATE_ROWS = 20
21
- XLSX_DATETIME_FORMAT = "YYYY-MM-DDTHH:mm:ss"
22
- HEADERS = [
19
+ FILE_NAME: ClassVar[str] = "job_upload_template.xlsx"
20
+ _NUM_TEMPLATE_ROWS: ClassVar[int] = 20
21
+ _XLSX_DATETIME_FORMAT: ClassVar[str] = "YYYY-MM-DDTHH:mm:ss"
22
+ _HEADERS: ClassVar[List[str]] = [
23
23
  "job_type",
24
24
  "project_name",
25
25
  "platform",
@@ -31,7 +31,7 @@ class JobUploadTemplate:
31
31
  "modality1",
32
32
  "modality1.input_source",
33
33
  ]
34
- SAMPLE_JOBS = [
34
+ _SAMPLE_JOBS: ClassVar[List[List[Any]]] = [
35
35
  [
36
36
  "default",
37
37
  "Behavior Platform",
@@ -68,8 +68,8 @@ class JobUploadTemplate:
68
68
  ],
69
69
  ]
70
70
 
71
- @property
72
- def validators(self) -> List[Dict[str, Any]]:
71
+ @classmethod
72
+ def _get_validators(cls) -> List[Dict[str, Any]]:
73
73
  """
74
74
  Returns
75
75
  -------
@@ -82,36 +82,36 @@ class JobUploadTemplate:
82
82
  "name": "platform",
83
83
  "type": "list",
84
84
  "options": list(Platform.abbreviation_map.keys()),
85
- "column_indexes": [self.HEADERS.index("platform")],
85
+ "column_indexes": [cls._HEADERS.index("platform")],
86
86
  },
87
87
  {
88
88
  "name": "modality",
89
89
  "type": "list",
90
90
  "options": list(Modality.abbreviation_map.keys()),
91
91
  "column_indexes": [
92
- self.HEADERS.index("modality0"),
93
- self.HEADERS.index("modality1"),
92
+ cls._HEADERS.index("modality0"),
93
+ cls._HEADERS.index("modality1"),
94
94
  ],
95
95
  },
96
96
  {
97
97
  "name": "datetime",
98
98
  "type": "date",
99
- "column_indexes": [self.HEADERS.index("acq_datetime")],
99
+ "column_indexes": [cls._HEADERS.index("acq_datetime")],
100
100
  },
101
101
  ]
102
102
 
103
- @property
104
- def excel_sheet_filestream(self) -> BytesIO:
103
+ @classmethod
104
+ def create_excel_sheet_filestream(cls) -> BytesIO:
105
105
  """Create job template as xlsx filestream"""
106
106
  xl_io = BytesIO()
107
107
  workbook = Workbook()
108
108
  workbook.iso_dates = True
109
109
  worksheet = workbook.active
110
- worksheet.append(self.HEADERS)
111
- for job in self.SAMPLE_JOBS:
110
+ worksheet.append(cls._HEADERS)
111
+ for job in cls._SAMPLE_JOBS:
112
112
  worksheet.append(job)
113
113
  # data validators
114
- for validator in self.validators:
114
+ for validator in cls._get_validators():
115
115
  dv_type = validator["type"]
116
116
  dv_name = validator["name"]
117
117
  dv_params = {
@@ -127,17 +127,17 @@ class JobUploadTemplate:
127
127
  dv_params["prompt"] = f"Select a {dv_name} from the dropdown"
128
128
  elif dv_type == "date":
129
129
  dv_params["prompt"] = "Provide a {} using {}".format(
130
- dv_name, self.XLSX_DATETIME_FORMAT
130
+ dv_name, cls._XLSX_DATETIME_FORMAT
131
131
  )
132
132
  dv = DataValidation(**dv_params)
133
133
  for i in validator["column_indexes"]:
134
134
  col = get_column_letter(i + 1)
135
- col_range = f"{col}2:{col}{self.NUM_TEMPLATE_ROWS}"
135
+ col_range = f"{col}2:{col}{cls._NUM_TEMPLATE_ROWS}"
136
136
  dv.add(col_range)
137
137
  if dv_type != "date":
138
138
  continue
139
139
  for (cell,) in worksheet[col_range]:
140
- cell.number_format = self.XLSX_DATETIME_FORMAT
140
+ cell.number_format = cls._XLSX_DATETIME_FORMAT
141
141
  worksheet.add_data_validation(dv)
142
142
  # formatting
143
143
  bold = Font(bold=True)
@@ -290,11 +290,15 @@ class SubmitJobRequestV2(BaseSettings):
290
290
  # check against any jobs in the context
291
291
  current_jobs = (info.context or dict()).get("current_jobs", list())
292
292
  for job in current_jobs:
293
- prefix = job.get("s3_prefix")
294
- if (
295
- prefix is not None
296
- and prefix in jobs_map
297
- and json.dumps(job, sort_keys=True) in jobs_map[prefix]
298
- ):
299
- raise ValueError(f"Job is already running/queued for {prefix}")
293
+ jobs_to_check = job.get("upload_jobs", [job])
294
+ for j in jobs_to_check:
295
+ prefix = j.get("s3_prefix")
296
+ if (
297
+ prefix is not None
298
+ and prefix in jobs_map
299
+ and json.dumps(j, sort_keys=True) in jobs_map[prefix]
300
+ ):
301
+ raise ValueError(
302
+ f"Job is already running/queued for {prefix}"
303
+ )
300
304
  return self
@@ -15,6 +15,7 @@ from aind_data_transfer_models import (
15
15
  __version__ as aind_data_transfer_models_version,
16
16
  )
17
17
  from aind_data_transfer_models.core import SubmitJobRequest, validation_context
18
+ from authlib.integrations.starlette_client import OAuth
18
19
  from botocore.exceptions import ClientError
19
20
  from fastapi import Request
20
21
  from fastapi.responses import JSONResponse, StreamingResponse
@@ -23,6 +24,9 @@ from httpx import AsyncClient
23
24
  from openpyxl import load_workbook
24
25
  from pydantic import SecretStr, ValidationError
25
26
  from starlette.applications import Starlette
27
+ from starlette.config import Config
28
+ from starlette.middleware.sessions import SessionMiddleware
29
+ from starlette.responses import RedirectResponse
26
30
  from starlette.routing import Route
27
31
 
28
32
  from aind_data_transfer_service import OPEN_DATA_BUCKET_NAME
@@ -95,6 +99,27 @@ def get_project_names() -> List[str]:
95
99
  return project_names
96
100
 
97
101
 
102
+ def set_oauth() -> OAuth:
103
+ """Set up OAuth for the service"""
104
+ secrets_client = boto3.client("secretsmanager")
105
+ secret_response = secrets_client.get_secret_value(
106
+ SecretId=os.getenv("AIND_SSO_SECRET_NAME")
107
+ )
108
+ secret_value = json.loads(secret_response["SecretString"])
109
+ for secrets in secret_value:
110
+ os.environ[secrets] = secret_value[secrets]
111
+ config = Config()
112
+ oauth = OAuth(config)
113
+ oauth.register(
114
+ name="azure",
115
+ client_id=config("CLIENT_ID"),
116
+ client_secret=config("CLIENT_SECRET"),
117
+ server_metadata_url=config("AUTHORITY"),
118
+ client_kwargs={"scope": "openid email profile"},
119
+ )
120
+ return oauth
121
+
122
+
98
123
  def get_job_types(version: Optional[str] = None) -> List[str]:
99
124
  """Get a list of job_types"""
100
125
  params = get_parameter_infos(version)
@@ -228,7 +253,7 @@ async def validate_csv(request: Request):
228
253
  data = csv_io.getvalue()
229
254
  csv_reader = csv.DictReader(io.StringIO(data))
230
255
  params = AirflowDagRunsRequestParameters(
231
- dag_ids=["transform_and_upload_v2"],
256
+ dag_ids=["transform_and_upload_v2", "run_list_of_jobs"],
232
257
  states=["running", "queued"],
233
258
  )
234
259
  _, current_jobs = await get_airflow_jobs(
@@ -324,7 +349,8 @@ async def validate_json_v2(request: Request):
324
349
  content = await request.json()
325
350
  try:
326
351
  params = AirflowDagRunsRequestParameters(
327
- dag_ids=["transform_and_upload_v2"], states=["running", "queued"]
352
+ dag_ids=["transform_and_upload_v2", "run_list_of_jobs"],
353
+ states=["running", "queued"],
328
354
  )
329
355
  _, current_jobs = await get_airflow_jobs(params=params, get_confs=True)
330
356
  context = {
@@ -439,7 +465,8 @@ async def submit_jobs_v2(request: Request):
439
465
  content = await request.json()
440
466
  try:
441
467
  params = AirflowDagRunsRequestParameters(
442
- dag_ids=["transform_and_upload_v2"], states=["running", "queued"]
468
+ dag_ids=["transform_and_upload_v2", "run_list_of_jobs"],
469
+ states=["running", "queued"],
443
470
  )
444
471
  _, current_jobs = await get_airflow_jobs(params=params, get_confs=True)
445
472
  context = {
@@ -987,8 +1014,7 @@ async def download_job_template(_: Request):
987
1014
  """Get job template as xlsx filestream for download"""
988
1015
 
989
1016
  try:
990
- job_template = JobUploadTemplate()
991
- xl_io = job_template.excel_sheet_filestream
1017
+ xl_io = JobUploadTemplate.create_excel_sheet_filestream()
992
1018
  return StreamingResponse(
993
1019
  io.BytesIO(xl_io.getvalue()),
994
1020
  media_type=(
@@ -997,7 +1023,7 @@ async def download_job_template(_: Request):
997
1023
  ),
998
1024
  headers={
999
1025
  "Content-Disposition": (
1000
- f"attachment; filename={job_template.FILE_NAME}"
1026
+ f"attachment; filename={JobUploadTemplate.FILE_NAME}"
1001
1027
  )
1002
1028
  },
1003
1029
  status_code=200,
@@ -1089,6 +1115,60 @@ def get_parameter(request: Request):
1089
1115
  )
1090
1116
 
1091
1117
 
1118
+ async def admin(request: Request):
1119
+ """Get admin page if authenticated, else redirect to login."""
1120
+ user = request.session.get("user")
1121
+ if os.getenv("ENV_NAME") == "local":
1122
+ user = {"name": "local user"}
1123
+ if user:
1124
+ return templates.TemplateResponse(
1125
+ name="admin.html",
1126
+ context=(
1127
+ {
1128
+ "request": request,
1129
+ "project_names_url": project_names_url,
1130
+ "user_name": user.get("name", "unknown"),
1131
+ "user_email": user.get("email", "unknown"),
1132
+ }
1133
+ ),
1134
+ )
1135
+ return RedirectResponse(url="/login")
1136
+
1137
+
1138
+ async def login(request: Request):
1139
+ """Redirect to Azure login page"""
1140
+ oauth = set_oauth()
1141
+ redirect_uri = request.url_for("auth")
1142
+ response = await oauth.azure.authorize_redirect(request, redirect_uri)
1143
+ return response
1144
+
1145
+
1146
+ async def logout(request: Request):
1147
+ """Logout user and clear session"""
1148
+ request.session.pop("user", None)
1149
+ return RedirectResponse(url="/")
1150
+
1151
+
1152
+ async def auth(request: Request):
1153
+ """Authenticate user and store user info in session"""
1154
+ oauth = set_oauth()
1155
+ try:
1156
+ token = await oauth.azure.authorize_access_token(request)
1157
+ user = token.get("userinfo")
1158
+ if not user:
1159
+ raise ValueError("User info not found in access token.")
1160
+ request.session["user"] = dict(user)
1161
+ except Exception as error:
1162
+ return JSONResponse(
1163
+ content={
1164
+ "message": "Error Logging In",
1165
+ "data": {"error": f"{error.__class__.__name__}{error.args}"},
1166
+ },
1167
+ status_code=500,
1168
+ )
1169
+ return RedirectResponse(url="/admin")
1170
+
1171
+
1092
1172
  routes = [
1093
1173
  Route("/", endpoint=index, methods=["GET", "POST"]),
1094
1174
  Route("/api/validate_csv", endpoint=validate_csv_legacy, methods=["POST"]),
@@ -1131,6 +1211,11 @@ routes = [
1131
1211
  endpoint=download_job_template,
1132
1212
  methods=["GET"],
1133
1213
  ),
1214
+ Route("/login", login, methods=["GET"]),
1215
+ Route("/logout", logout, methods=["GET"]),
1216
+ Route("/auth", auth, methods=["GET"]),
1217
+ Route("/admin", admin, methods=["GET"]),
1134
1218
  ]
1135
1219
 
1136
1220
  app = Starlette(routes=routes)
1221
+ app.add_middleware(SessionMiddleware, secret_key=None)
@@ -0,0 +1,36 @@
1
+ <!DOCTYPE html>
2
+ <html>
3
+ <head>
4
+ <meta charset="UTF-8">
5
+ <link href="https://cdn.jsdelivr.net/npm/bootstrap@5.3.3/dist/css/bootstrap.min.css" rel="stylesheet">
6
+ <script src="https://cdn.jsdelivr.net/npm/bootstrap@5.3.3/dist/js/bootstrap.bundle.min.js"></script>
7
+ <title>{% block title %} {% endblock %} AIND Data Transfer Service Admin</title>
8
+ <style>
9
+ body {
10
+ margin: 20px;
11
+ font-family: arial, sans-serif;
12
+ }
13
+ nav {
14
+ height: 40px;
15
+ }
16
+ </style>
17
+ </head>
18
+ <body>
19
+ <nav>
20
+ <a href="/">Submit Jobs</a> |
21
+ <a href="/jobs">Job Status</a> |
22
+ <a href="/job_params">Job Parameters</a> |
23
+ <a title="Download job template as .xslx" href="/api/job_upload_template" download>Job Submit Template</a> |
24
+ <a title="List of project names" href="{{ project_names_url }}" target="_blank">Project Names</a> |
25
+ <a title="For more information click here" href="https://aind-data-transfer-service.readthedocs.io"
26
+ target="_blank">Help</a> |
27
+ <a href="/admin">Admin</a> |
28
+ <a href="/logout">Log out</a>
29
+ </nav>
30
+ <div>
31
+ <h3>Admin</h3>
32
+ <div>Hello {{user_name}}, welcome to the admin page</div>
33
+ <div>Email: {{user_email}}</div>
34
+ </div>
35
+ </body>
36
+ </html>
@@ -49,7 +49,8 @@
49
49
  <a href="/job_params">Job Parameters</a> |
50
50
  <a title="Download job template as .xslx" href= "/api/job_upload_template" download>Job Submit Template</a> |
51
51
  <a title="List of project names" href= "{{ project_names_url }}" target="_blank" >Project Names</a> |
52
- <a title="For more information click here" href="https://aind-data-transfer-service.readthedocs.io" target="_blank" >Help</a>
52
+ <a title="For more information click here" href="https://aind-data-transfer-service.readthedocs.io" target="_blank" >Help</a> |
53
+ <a href="/admin">Admin</a>
53
54
  </nav>
54
55
  <br>
55
56
  <div>
@@ -34,7 +34,8 @@
34
34
  <a href="/job_params">Job Parameters</a> |
35
35
  <a title="Download job template as .xslx" href= "/api/job_upload_template" download>Job Submit Template</a> |
36
36
  <a title="List of project names" href= "{{ project_names_url }}" target="_blank" >Project Names</a> |
37
- <a title="For more information click here" href="https://aind-data-transfer-service.readthedocs.io" target="_blank" >Help</a>
37
+ <a title="For more information click here" href="https://aind-data-transfer-service.readthedocs.io" target="_blank" >Help</a> |
38
+ <a href="/admin">Admin</a>
38
39
  </nav>
39
40
  <div class="content">
40
41
  <h4 class="mb-2">
@@ -32,7 +32,8 @@
32
32
  <a href="/job_params">Job Parameters</a> |
33
33
  <a title="Download job template as .xslx" href= "/api/job_upload_template" download>Job Submit Template</a> |
34
34
  <a title="List of project names" href= "{{ project_names_url }}" target="_blank" >Project Names</a> |
35
- <a title="For more information click here" href="https://aind-data-transfer-service.readthedocs.io" target="_blank" >Help</a>
35
+ <a title="For more information click here" href="https://aind-data-transfer-service.readthedocs.io" target="_blank" >Help</a> |
36
+ <a href="/admin">Admin</a>
36
37
  </nav>
37
38
  <div class="content">
38
39
  <!-- display total entries -->
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.4
2
2
  Name: aind-data-transfer-service
3
- Version: 1.14.0
3
+ Version: 1.15.0
4
4
  Summary: Service that handles requests to upload data to the cloud
5
5
  Author: Allen Institute for Neural Dynamics
6
6
  License: MIT
@@ -38,6 +38,7 @@ Requires-Dist: wtforms; extra == "server"
38
38
  Requires-Dist: requests==2.25.0; extra == "server"
39
39
  Requires-Dist: openpyxl; extra == "server"
40
40
  Requires-Dist: python-logging-loki; extra == "server"
41
+ Requires-Dist: authlib; extra == "server"
41
42
  Dynamic: license-file
42
43
 
43
44
  # aind-data-transfer-service
@@ -1,23 +1,24 @@
1
- aind_data_transfer_service/__init__.py,sha256=02hESRKUPpXuMD22H3psRTP20D33fQMG08afGio0d9k,272
1
+ aind_data_transfer_service/__init__.py,sha256=lho7ClOyDOm8TOnoEsiTmbuidZwwCwry8o_Vu_mJ5qI,272
2
2
  aind_data_transfer_service/log_handler.py,sha256=c7a-gLmZeRpeCUBwCz6XsTszWXQeQdR7eKZtas4llXM,1700
3
- aind_data_transfer_service/server.py,sha256=ozavVyxMpZEwDAQ0OwRAwy_9CHXoLcUZ2rriSX1qH04,41643
3
+ aind_data_transfer_service/server.py,sha256=8TRxybpk8hkqPng_6mEZc9wv8ofGErgYf0BSPk-8VAU,44545
4
4
  aind_data_transfer_service/configs/__init__.py,sha256=9W5GTuso9Is1B9X16RXcdb_GxasZvj6qDzOBDv0AbTc,36
5
5
  aind_data_transfer_service/configs/csv_handler.py,sha256=9jM0fUlWCzmqTC7ubAeFCl0eEIX5BQvHcPPPTPngcog,4374
6
6
  aind_data_transfer_service/configs/job_configs.py,sha256=T-h5N6lyY9xTZ_xg_5FxkyYuMdagApbE6xalxFQ-bqA,18848
7
- aind_data_transfer_service/configs/job_upload_template.py,sha256=dJo_nuGIjHNzkGtZmJd7-qqRz8s8R8RcWnYV3Hi_8QE,4990
7
+ aind_data_transfer_service/configs/job_upload_template.py,sha256=aC5m1uD_YcpbggFQ-yZ7ZJSUUGX1yQqQLF3SwmljrLk,5127
8
8
  aind_data_transfer_service/hpc/__init__.py,sha256=YNc68YNlmXwKIPFMIViz_K4XzVVHkLPEBOFyO5DKMKI,53
9
9
  aind_data_transfer_service/hpc/client.py,sha256=-JSxAWn96_XOIDwhsXAHK3TZAdckddUhtcCzRHnaTqA,4700
10
10
  aind_data_transfer_service/hpc/models.py,sha256=-7HhV16s_MUyKPy0x0FGIbnq8DPL2qJAzJO5G7003AE,16184
11
11
  aind_data_transfer_service/models/__init__.py,sha256=Meym73bEZ9nQr4QoeyhQmV3nRTYtd_4kWKPNygsBfJg,25
12
- aind_data_transfer_service/models/core.py,sha256=_DHYFQL8kgXWgjvE21mWkRIVDCrmReskgwFAaTb5KQI,9971
12
+ aind_data_transfer_service/models/core.py,sha256=uXtPUqjxKalg-sE8MxaJr11w_T_KKBRBSJuUgwoMZlQ,10135
13
13
  aind_data_transfer_service/models/internal.py,sha256=MGQrPuHrR21nn4toqdTCIEDW6MG7pWRajoPqD3j-ST0,9706
14
- aind_data_transfer_service/templates/index.html,sha256=KoqedswLWOiqgtkk2Z3HrDfEJycS_SJ7ueiuYGhL2Yo,11289
15
- aind_data_transfer_service/templates/job_params.html,sha256=vqIdNQsZTM0kq3Wa9u-VjmmMa0UzBTpK02WpOSatXBQ,8817
16
- aind_data_transfer_service/templates/job_status.html,sha256=vIOaJGJM78hOWTLTAzMfHjG9sNqPvS-muAyXYQtpnYI,16901
14
+ aind_data_transfer_service/templates/admin.html,sha256=KvQB54-mD8LL8wnDd3G9a8lDYAT8BWK13_MhpIJAiSY,1200
15
+ aind_data_transfer_service/templates/index.html,sha256=TDmmHlhWFPnQrk6nk1OhNjC3SapZtX0TXeuUonoCO7g,11326
16
+ aind_data_transfer_service/templates/job_params.html,sha256=ivofS1CjSnC87T5J2BTsNtVzq6xnUqlPZePdGt_fc4U,8854
17
+ aind_data_transfer_service/templates/job_status.html,sha256=5lUUGZL-5urppG610qDOgpfIE-OcQH57gFWvRA5pBNM,16938
17
18
  aind_data_transfer_service/templates/job_tasks_table.html,sha256=rWFukhjZ4dhPyabe372tmi4lbQS2fyELZ7Awbn5Un4g,6181
18
19
  aind_data_transfer_service/templates/task_logs.html,sha256=y1GnQft0S50ghPb2xJDjAlefymB9a4zYdMikUFV7Tl4,918
19
- aind_data_transfer_service-1.14.0.dist-info/licenses/LICENSE,sha256=U0Y7B3gZJHXpjJVLgTQjM8e_c8w4JJpLgGhIdsoFR1Y,1092
20
- aind_data_transfer_service-1.14.0.dist-info/METADATA,sha256=N7A2du3anL11r5AHh8NXI-yPtB8Ilsm1saCJ-_o-200,2410
21
- aind_data_transfer_service-1.14.0.dist-info/WHEEL,sha256=_zCd3N1l69ArxyTb8rzEoP9TpbYXkqRFSNOD5OuxnTs,91
22
- aind_data_transfer_service-1.14.0.dist-info/top_level.txt,sha256=XmxH0q27Jholj2-VYh-6WMrh9Lw6kkuCX_fdsj3SaFE,27
23
- aind_data_transfer_service-1.14.0.dist-info/RECORD,,
20
+ aind_data_transfer_service-1.15.0.dist-info/licenses/LICENSE,sha256=U0Y7B3gZJHXpjJVLgTQjM8e_c8w4JJpLgGhIdsoFR1Y,1092
21
+ aind_data_transfer_service-1.15.0.dist-info/METADATA,sha256=XNDcKsS_NFZIZMBOip1nBheDFo8lirOA7a9P14SVtNo,2452
22
+ aind_data_transfer_service-1.15.0.dist-info/WHEEL,sha256=_zCd3N1l69ArxyTb8rzEoP9TpbYXkqRFSNOD5OuxnTs,91
23
+ aind_data_transfer_service-1.15.0.dist-info/top_level.txt,sha256=XmxH0q27Jholj2-VYh-6WMrh9Lw6kkuCX_fdsj3SaFE,27
24
+ aind_data_transfer_service-1.15.0.dist-info/RECORD,,