aind-data-transfer-service 1.16.0__py3-none-any.whl → 1.17.2__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- aind_data_transfer_service/__init__.py +1 -1
- aind_data_transfer_service/hpc/client.py +28 -25
- aind_data_transfer_service/models/internal.py +35 -10
- aind_data_transfer_service/server.py +188 -64
- {aind_data_transfer_service-1.16.0.dist-info → aind_data_transfer_service-1.17.2.dist-info}/METADATA +2 -2
- aind_data_transfer_service-1.17.2.dist-info/RECORD +18 -0
- aind_data_transfer_service/templates/admin.html +0 -36
- aind_data_transfer_service/templates/index.html +0 -258
- aind_data_transfer_service/templates/job_params.html +0 -195
- aind_data_transfer_service/templates/job_status.html +0 -324
- aind_data_transfer_service/templates/job_tasks_table.html +0 -146
- aind_data_transfer_service/templates/task_logs.html +0 -31
- aind_data_transfer_service-1.16.0.dist-info/RECORD +0 -24
- {aind_data_transfer_service-1.16.0.dist-info → aind_data_transfer_service-1.17.2.dist-info}/WHEEL +0 -0
- {aind_data_transfer_service-1.16.0.dist-info → aind_data_transfer_service-1.17.2.dist-info}/licenses/LICENSE +0 -0
- {aind_data_transfer_service-1.16.0.dist-info → aind_data_transfer_service-1.17.2.dist-info}/top_level.txt +0 -0
|
@@ -3,10 +3,9 @@
|
|
|
3
3
|
import json
|
|
4
4
|
from typing import List, Optional, Union
|
|
5
5
|
|
|
6
|
-
import
|
|
6
|
+
from httpx import AsyncClient, Response
|
|
7
7
|
from pydantic import Field, SecretStr, field_validator
|
|
8
8
|
from pydantic_settings import BaseSettings
|
|
9
|
-
from requests.models import Response
|
|
10
9
|
|
|
11
10
|
from aind_data_transfer_service.hpc.models import HpcJobSubmitSettings
|
|
12
11
|
|
|
@@ -75,37 +74,41 @@ class HpcClient:
|
|
|
75
74
|
"X-SLURM-USER-TOKEN": self.configs.hpc_token.get_secret_value(),
|
|
76
75
|
}
|
|
77
76
|
|
|
78
|
-
def get_node_status(self) -> Response:
|
|
77
|
+
async def get_node_status(self) -> Response:
|
|
79
78
|
"""Get status of nodes"""
|
|
80
|
-
|
|
81
|
-
|
|
82
|
-
|
|
79
|
+
async with AsyncClient() as async_client:
|
|
80
|
+
response = await async_client.get(
|
|
81
|
+
url=self._node_status_url, headers=self.__headers
|
|
82
|
+
)
|
|
83
83
|
return response
|
|
84
84
|
|
|
85
|
-
def get_job_status(self, job_id: Union[str, int]) -> Response:
|
|
85
|
+
async def get_job_status(self, job_id: Union[str, int]) -> Response:
|
|
86
86
|
"""Get status of job"""
|
|
87
|
-
|
|
88
|
-
|
|
89
|
-
|
|
90
|
-
|
|
87
|
+
async with AsyncClient() as async_client:
|
|
88
|
+
response = await async_client.get(
|
|
89
|
+
url=self._job_status_url + "/" + str(job_id),
|
|
90
|
+
headers=self.__headers,
|
|
91
|
+
)
|
|
91
92
|
return response
|
|
92
93
|
|
|
93
|
-
def get_jobs(self) -> Response:
|
|
94
|
+
async def get_jobs(self) -> Response:
|
|
94
95
|
"""Get status of job"""
|
|
95
|
-
|
|
96
|
-
|
|
97
|
-
|
|
98
|
-
|
|
96
|
+
async with AsyncClient() as async_client:
|
|
97
|
+
response = await async_client.get(
|
|
98
|
+
url=self._jobs_url,
|
|
99
|
+
headers=self.__headers,
|
|
100
|
+
)
|
|
99
101
|
return response
|
|
100
102
|
|
|
101
|
-
def submit_job(self, job_def: dict) -> Response:
|
|
103
|
+
async def submit_job(self, job_def: dict) -> Response:
|
|
102
104
|
"""Submit a job defined by job def"""
|
|
103
|
-
|
|
104
|
-
|
|
105
|
-
|
|
105
|
+
async with AsyncClient() as async_client:
|
|
106
|
+
response = await async_client.post(
|
|
107
|
+
url=self._job_submit_url, json=job_def, headers=self.__headers
|
|
108
|
+
)
|
|
106
109
|
return response
|
|
107
110
|
|
|
108
|
-
def submit_hpc_job(
|
|
111
|
+
async def submit_hpc_job(
|
|
109
112
|
self,
|
|
110
113
|
script: str,
|
|
111
114
|
job: Optional[HpcJobSubmitSettings] = None,
|
|
@@ -144,8 +147,8 @@ class HpcClient:
|
|
|
144
147
|
],
|
|
145
148
|
"script": script,
|
|
146
149
|
}
|
|
147
|
-
|
|
148
|
-
|
|
149
|
-
|
|
150
|
-
|
|
150
|
+
async with AsyncClient() as async_client:
|
|
151
|
+
response = await async_client.post(
|
|
152
|
+
url=self._job_submit_url, json=job_def, headers=self.__headers
|
|
153
|
+
)
|
|
151
154
|
return response
|
|
@@ -3,8 +3,9 @@
|
|
|
3
3
|
import ast
|
|
4
4
|
import os
|
|
5
5
|
from datetime import datetime, timedelta, timezone
|
|
6
|
-
from typing import List, Optional, Union
|
|
6
|
+
from typing import ClassVar, List, Optional, Union
|
|
7
7
|
|
|
8
|
+
from aind_data_schema_models.modalities import Modality
|
|
8
9
|
from mypy_boto3_ssm.type_defs import ParameterMetadataTypeDef
|
|
9
10
|
from pydantic import AwareDatetime, BaseModel, Field, field_validator
|
|
10
11
|
from starlette.datastructures import QueryParams
|
|
@@ -223,11 +224,30 @@ class JobTasks(BaseModel):
|
|
|
223
224
|
class JobParamInfo(BaseModel):
|
|
224
225
|
"""Model for job parameter info from AWS Parameter Store"""
|
|
225
226
|
|
|
227
|
+
_MODALITIES_LIST: ClassVar[list[str]] = list(
|
|
228
|
+
Modality.abbreviation_map.keys()
|
|
229
|
+
)
|
|
230
|
+
_MODALITY_TASKS: ClassVar[list[str]] = [
|
|
231
|
+
"modality_transformation_settings",
|
|
232
|
+
"codeocean_pipeline_settings",
|
|
233
|
+
]
|
|
234
|
+
|
|
226
235
|
name: Optional[str]
|
|
227
236
|
last_modified: Optional[datetime]
|
|
228
|
-
job_type: str
|
|
229
|
-
task_id: str
|
|
237
|
+
job_type: str = Field(..., pattern=r"^[^\s/]+$")
|
|
238
|
+
task_id: str = Field(..., pattern=r"^[^\s/]+$")
|
|
230
239
|
modality: Optional[str]
|
|
240
|
+
version: Optional[str] = Field(..., pattern=r"^(v1|v2)?$")
|
|
241
|
+
|
|
242
|
+
@field_validator("modality", mode="after")
|
|
243
|
+
def validate_modality(cls, v):
|
|
244
|
+
"""Check that modality is one of aind-data-schema modalities"""
|
|
245
|
+
if v is not None and v not in JobParamInfo._MODALITIES_LIST:
|
|
246
|
+
raise ValueError(
|
|
247
|
+
"Invalid modality: modality must be one of "
|
|
248
|
+
f"{JobParamInfo._MODALITIES_LIST}"
|
|
249
|
+
)
|
|
250
|
+
return v
|
|
231
251
|
|
|
232
252
|
@classmethod
|
|
233
253
|
def from_aws_describe_parameter(
|
|
@@ -236,6 +256,7 @@ class JobParamInfo(BaseModel):
|
|
|
236
256
|
job_type: str,
|
|
237
257
|
task_id: str,
|
|
238
258
|
modality: Optional[str],
|
|
259
|
+
version: Optional[str],
|
|
239
260
|
):
|
|
240
261
|
"""Map the parameter to the model"""
|
|
241
262
|
return cls(
|
|
@@ -244,13 +265,14 @@ class JobParamInfo(BaseModel):
|
|
|
244
265
|
job_type=job_type,
|
|
245
266
|
task_id=task_id,
|
|
246
267
|
modality=modality,
|
|
268
|
+
version=version,
|
|
247
269
|
)
|
|
248
270
|
|
|
249
271
|
@staticmethod
|
|
250
272
|
def get_parameter_prefix(version: Optional[str] = None) -> str:
|
|
251
273
|
"""Get the prefix for job_type parameters"""
|
|
252
274
|
prefix = os.getenv("AIND_AIRFLOW_PARAM_PREFIX")
|
|
253
|
-
if version is None:
|
|
275
|
+
if version is None or version == "v1":
|
|
254
276
|
return prefix
|
|
255
277
|
return f"{prefix}/{version}"
|
|
256
278
|
|
|
@@ -262,16 +284,19 @@ class JobParamInfo(BaseModel):
|
|
|
262
284
|
"(?P<job_type>[^/]+)/tasks/(?P<task_id>[^/]+)"
|
|
263
285
|
"(?:/(?P<modality>[^/]+))?"
|
|
264
286
|
)
|
|
265
|
-
if version is None:
|
|
287
|
+
if version is None or version == "v1":
|
|
266
288
|
return f"{prefix}/{regex}"
|
|
267
289
|
return f"{prefix}/{version}/{regex}"
|
|
268
290
|
|
|
269
291
|
@staticmethod
|
|
270
292
|
def get_parameter_name(
|
|
271
|
-
job_type: str,
|
|
293
|
+
job_type: str,
|
|
294
|
+
task_id: str,
|
|
295
|
+
modality: Optional[str],
|
|
296
|
+
version: Optional[str] = None,
|
|
272
297
|
) -> str:
|
|
273
298
|
"""Create the parameter name from job_type and task_id"""
|
|
274
|
-
prefix =
|
|
275
|
-
if
|
|
276
|
-
return f"{prefix}/{job_type}/tasks/{task_id}"
|
|
277
|
-
return f"{prefix}/{
|
|
299
|
+
prefix = JobParamInfo.get_parameter_prefix(version)
|
|
300
|
+
if modality:
|
|
301
|
+
return f"{prefix}/{job_type}/tasks/{task_id}/{modality}"
|
|
302
|
+
return f"{prefix}/{job_type}/tasks/{task_id}"
|
|
@@ -7,10 +7,9 @@ import os
|
|
|
7
7
|
import re
|
|
8
8
|
from asyncio import gather, sleep
|
|
9
9
|
from pathlib import PurePosixPath
|
|
10
|
-
from typing import List, Optional, Union
|
|
10
|
+
from typing import Any, List, Optional, Union
|
|
11
11
|
|
|
12
12
|
import boto3
|
|
13
|
-
import requests
|
|
14
13
|
from aind_data_transfer_models import (
|
|
15
14
|
__version__ as aind_data_transfer_models_version,
|
|
16
15
|
)
|
|
@@ -96,12 +95,13 @@ logger = get_logger(log_configs=LoggingConfigs())
|
|
|
96
95
|
project_names_url = os.getenv("AIND_METADATA_SERVICE_PROJECT_NAMES_URL")
|
|
97
96
|
|
|
98
97
|
|
|
99
|
-
def get_project_names() -> List[str]:
|
|
98
|
+
async def get_project_names() -> List[str]:
|
|
100
99
|
"""Get a list of project_names"""
|
|
101
100
|
# TODO: Cache response for 5 minutes
|
|
102
|
-
|
|
103
|
-
|
|
104
|
-
|
|
101
|
+
async with AsyncClient() as async_client:
|
|
102
|
+
response = await async_client.get(project_names_url)
|
|
103
|
+
response.raise_for_status()
|
|
104
|
+
project_names = response.json()["data"]
|
|
105
105
|
return project_names
|
|
106
106
|
|
|
107
107
|
|
|
@@ -156,6 +156,7 @@ def get_parameter_infos(version: Optional[str] = None) -> List[JobParamInfo]:
|
|
|
156
156
|
job_type=match.group("job_type"),
|
|
157
157
|
task_id=match.group("task_id"),
|
|
158
158
|
modality=match.group("modality"),
|
|
159
|
+
version=version,
|
|
159
160
|
)
|
|
160
161
|
params.append(param_info)
|
|
161
162
|
else:
|
|
@@ -173,6 +174,19 @@ def get_parameter_value(param_name: str) -> dict:
|
|
|
173
174
|
return param_value
|
|
174
175
|
|
|
175
176
|
|
|
177
|
+
def put_parameter_value(param_name: str, param_value: dict) -> Any:
|
|
178
|
+
"""Set a parameter value in AWS param store based on parameter name"""
|
|
179
|
+
param_value_str = json.dumps(param_value)
|
|
180
|
+
ssm_client = boto3.client("ssm")
|
|
181
|
+
result = ssm_client.put_parameter(
|
|
182
|
+
Name=param_name,
|
|
183
|
+
Value=param_value_str,
|
|
184
|
+
Type="String",
|
|
185
|
+
Overwrite=True,
|
|
186
|
+
)
|
|
187
|
+
return result
|
|
188
|
+
|
|
189
|
+
|
|
176
190
|
async def get_airflow_jobs(
|
|
177
191
|
params: AirflowDagRunsRequestParameters, get_confs: bool = False
|
|
178
192
|
) -> tuple[int, Union[List[JobStatus], List[dict]]]:
|
|
@@ -267,7 +281,7 @@ async def validate_csv(request: Request):
|
|
|
267
281
|
)
|
|
268
282
|
context = {
|
|
269
283
|
"job_types": get_job_types("v2"),
|
|
270
|
-
"project_names": get_project_names(),
|
|
284
|
+
"project_names": await get_project_names(),
|
|
271
285
|
"current_jobs": current_jobs,
|
|
272
286
|
}
|
|
273
287
|
for row in csv_reader:
|
|
@@ -361,7 +375,7 @@ async def validate_json_v2(request: Request):
|
|
|
361
375
|
_, current_jobs = await get_airflow_jobs(params=params, get_confs=True)
|
|
362
376
|
context = {
|
|
363
377
|
"job_types": get_job_types("v2"),
|
|
364
|
-
"project_names": get_project_names(),
|
|
378
|
+
"project_names": await get_project_names(),
|
|
365
379
|
"current_jobs": current_jobs,
|
|
366
380
|
}
|
|
367
381
|
with validation_context_v2(context):
|
|
@@ -417,7 +431,7 @@ async def validate_json(request: Request):
|
|
|
417
431
|
logger.info("Received request to validate json")
|
|
418
432
|
content = await request.json()
|
|
419
433
|
try:
|
|
420
|
-
project_names = get_project_names()
|
|
434
|
+
project_names = await get_project_names()
|
|
421
435
|
with validation_context({"project_names": project_names}):
|
|
422
436
|
validated_model = SubmitJobRequest.model_validate_json(
|
|
423
437
|
json.dumps(content)
|
|
@@ -477,7 +491,7 @@ async def submit_jobs_v2(request: Request):
|
|
|
477
491
|
_, current_jobs = await get_airflow_jobs(params=params, get_confs=True)
|
|
478
492
|
context = {
|
|
479
493
|
"job_types": get_job_types("v2"),
|
|
480
|
-
"project_names": get_project_names(),
|
|
494
|
+
"project_names": await get_project_names(),
|
|
481
495
|
"current_jobs": current_jobs,
|
|
482
496
|
}
|
|
483
497
|
with validation_context_v2(context):
|
|
@@ -485,7 +499,6 @@ async def submit_jobs_v2(request: Request):
|
|
|
485
499
|
full_content = json.loads(
|
|
486
500
|
model.model_dump_json(warnings=False, exclude_none=True)
|
|
487
501
|
)
|
|
488
|
-
# TODO: Replace with httpx async client
|
|
489
502
|
logger.info(
|
|
490
503
|
f"Valid request detected. Sending list of jobs. "
|
|
491
504
|
f"dag_id: {model.dag_id}"
|
|
@@ -497,19 +510,23 @@ async def submit_jobs_v2(request: Request):
|
|
|
497
510
|
f"{job_index} of {total_jobs}."
|
|
498
511
|
)
|
|
499
512
|
|
|
500
|
-
|
|
501
|
-
url=os.getenv("AIND_AIRFLOW_SERVICE_URL"),
|
|
513
|
+
async with AsyncClient(
|
|
502
514
|
auth=(
|
|
503
515
|
os.getenv("AIND_AIRFLOW_SERVICE_USER"),
|
|
504
516
|
os.getenv("AIND_AIRFLOW_SERVICE_PASSWORD"),
|
|
505
|
-
)
|
|
506
|
-
|
|
507
|
-
|
|
517
|
+
)
|
|
518
|
+
) as async_client:
|
|
519
|
+
response = await async_client.post(
|
|
520
|
+
url=os.getenv("AIND_AIRFLOW_SERVICE_URL"),
|
|
521
|
+
json={"conf": full_content},
|
|
522
|
+
)
|
|
523
|
+
status_code = response.status_code
|
|
524
|
+
response_json = response.json()
|
|
508
525
|
return JSONResponse(
|
|
509
|
-
status_code=
|
|
526
|
+
status_code=status_code,
|
|
510
527
|
content={
|
|
511
528
|
"message": "Submitted request to airflow",
|
|
512
|
-
"data": {"responses": [
|
|
529
|
+
"data": {"responses": [response_json], "errors": []},
|
|
513
530
|
},
|
|
514
531
|
)
|
|
515
532
|
except ValidationError as e:
|
|
@@ -537,13 +554,12 @@ async def submit_jobs(request: Request):
|
|
|
537
554
|
logger.info("Received request to submit jobs")
|
|
538
555
|
content = await request.json()
|
|
539
556
|
try:
|
|
540
|
-
project_names = get_project_names()
|
|
557
|
+
project_names = await get_project_names()
|
|
541
558
|
with validation_context({"project_names": project_names}):
|
|
542
559
|
model = SubmitJobRequest.model_validate_json(json.dumps(content))
|
|
543
560
|
full_content = json.loads(
|
|
544
561
|
model.model_dump_json(warnings=False, exclude_none=True)
|
|
545
562
|
)
|
|
546
|
-
# TODO: Replace with httpx async client
|
|
547
563
|
logger.info(
|
|
548
564
|
f"Valid request detected. Sending list of jobs. "
|
|
549
565
|
f"Job Type: {model.job_type}"
|
|
@@ -555,19 +571,23 @@ async def submit_jobs(request: Request):
|
|
|
555
571
|
f"{job_index} of {total_jobs}."
|
|
556
572
|
)
|
|
557
573
|
|
|
558
|
-
|
|
559
|
-
url=os.getenv("AIND_AIRFLOW_SERVICE_URL"),
|
|
574
|
+
async with AsyncClient(
|
|
560
575
|
auth=(
|
|
561
576
|
os.getenv("AIND_AIRFLOW_SERVICE_USER"),
|
|
562
577
|
os.getenv("AIND_AIRFLOW_SERVICE_PASSWORD"),
|
|
563
|
-
)
|
|
564
|
-
|
|
565
|
-
|
|
578
|
+
)
|
|
579
|
+
) as async_client:
|
|
580
|
+
response = await async_client.post(
|
|
581
|
+
url=os.getenv("AIND_AIRFLOW_SERVICE_URL"),
|
|
582
|
+
json={"conf": full_content},
|
|
583
|
+
)
|
|
584
|
+
status_code = response.status_code
|
|
585
|
+
response_json = response.json()
|
|
566
586
|
return JSONResponse(
|
|
567
|
-
status_code=
|
|
587
|
+
status_code=status_code,
|
|
568
588
|
content={
|
|
569
589
|
"message": "Submitted request to airflow",
|
|
570
|
-
"data": {"responses": [
|
|
590
|
+
"data": {"responses": [response_json], "errors": []},
|
|
571
591
|
},
|
|
572
592
|
)
|
|
573
593
|
|
|
@@ -631,7 +651,7 @@ async def submit_basic_jobs(request: Request):
|
|
|
631
651
|
for hpc_job in hpc_jobs:
|
|
632
652
|
try:
|
|
633
653
|
job_def = hpc_job.job_definition
|
|
634
|
-
response = hpc_client.submit_job(job_def)
|
|
654
|
+
response = await hpc_client.submit_job(job_def)
|
|
635
655
|
response_json = response.json()
|
|
636
656
|
responses.append(response_json)
|
|
637
657
|
# Add pause to stagger job requests to the hpc
|
|
@@ -755,7 +775,7 @@ async def submit_hpc_jobs(request: Request): # noqa: C901
|
|
|
755
775
|
hpc_job_def = hpc_job[0]
|
|
756
776
|
try:
|
|
757
777
|
script = hpc_job[1]
|
|
758
|
-
response = hpc_client.submit_hpc_job(
|
|
778
|
+
response = await hpc_client.submit_hpc_job(
|
|
759
779
|
job=hpc_job_def, script=script
|
|
760
780
|
)
|
|
761
781
|
response_json = response.json()
|
|
@@ -828,20 +848,23 @@ async def get_tasks_list(request: Request):
|
|
|
828
848
|
request.query_params
|
|
829
849
|
)
|
|
830
850
|
params_dict = json.loads(params.model_dump_json())
|
|
831
|
-
|
|
832
|
-
url=(
|
|
833
|
-
f"{url}/{params.dag_id}/dagRuns/{params.dag_run_id}/"
|
|
834
|
-
"taskInstances"
|
|
835
|
-
),
|
|
851
|
+
async with AsyncClient(
|
|
836
852
|
auth=(
|
|
837
853
|
os.getenv("AIND_AIRFLOW_SERVICE_USER"),
|
|
838
854
|
os.getenv("AIND_AIRFLOW_SERVICE_PASSWORD"),
|
|
839
|
-
)
|
|
840
|
-
)
|
|
841
|
-
|
|
842
|
-
|
|
855
|
+
)
|
|
856
|
+
) as async_client:
|
|
857
|
+
response_tasks = await async_client.get(
|
|
858
|
+
url=(
|
|
859
|
+
f"{url}/{params.dag_id}/dagRuns/{params.dag_run_id}/"
|
|
860
|
+
"taskInstances"
|
|
861
|
+
),
|
|
862
|
+
)
|
|
863
|
+
status_code = response_tasks.status_code
|
|
864
|
+
response_json = response_tasks.json()
|
|
865
|
+
if status_code == 200:
|
|
843
866
|
task_instances = AirflowTaskInstancesResponse.model_validate_json(
|
|
844
|
-
json.dumps(
|
|
867
|
+
json.dumps(response_json)
|
|
845
868
|
)
|
|
846
869
|
job_tasks_list = sorted(
|
|
847
870
|
[
|
|
@@ -862,7 +885,7 @@ async def get_tasks_list(request: Request):
|
|
|
862
885
|
message = "Error retrieving job tasks list from airflow"
|
|
863
886
|
data = {
|
|
864
887
|
"params": params_dict,
|
|
865
|
-
"errors": [
|
|
888
|
+
"errors": [response_json],
|
|
866
889
|
}
|
|
867
890
|
except ValidationError as e:
|
|
868
891
|
logger.warning(f"There was a validation error process task_list: {e}")
|
|
@@ -892,27 +915,29 @@ async def get_task_logs(request: Request):
|
|
|
892
915
|
)
|
|
893
916
|
params_dict = json.loads(params.model_dump_json())
|
|
894
917
|
params_full = dict(params)
|
|
895
|
-
|
|
896
|
-
url=(
|
|
897
|
-
f"{url}/{params.dag_id}/dagRuns/{params.dag_run_id}"
|
|
898
|
-
f"/taskInstances/{params.task_id}/logs/{params.try_number}"
|
|
899
|
-
),
|
|
918
|
+
async with AsyncClient(
|
|
900
919
|
auth=(
|
|
901
920
|
os.getenv("AIND_AIRFLOW_SERVICE_USER"),
|
|
902
921
|
os.getenv("AIND_AIRFLOW_SERVICE_PASSWORD"),
|
|
903
|
-
)
|
|
904
|
-
|
|
905
|
-
|
|
906
|
-
|
|
907
|
-
|
|
908
|
-
|
|
909
|
-
|
|
910
|
-
|
|
911
|
-
|
|
912
|
-
|
|
913
|
-
|
|
914
|
-
"
|
|
915
|
-
|
|
922
|
+
)
|
|
923
|
+
) as async_client:
|
|
924
|
+
response_logs = await async_client.get(
|
|
925
|
+
url=(
|
|
926
|
+
f"{url}/{params.dag_id}/dagRuns/{params.dag_run_id}"
|
|
927
|
+
f"/taskInstances/{params.task_id}/logs/{params.try_number}"
|
|
928
|
+
),
|
|
929
|
+
params=params_dict,
|
|
930
|
+
)
|
|
931
|
+
status_code = response_logs.status_code
|
|
932
|
+
if status_code == 200:
|
|
933
|
+
message = "Retrieved task logs from airflow"
|
|
934
|
+
data = {"params": params_full, "logs": response_logs.text}
|
|
935
|
+
else:
|
|
936
|
+
message = "Error retrieving task logs from airflow"
|
|
937
|
+
data = {
|
|
938
|
+
"params": params_full,
|
|
939
|
+
"errors": [response_logs.json()],
|
|
940
|
+
}
|
|
916
941
|
except ValidationError as e:
|
|
917
942
|
logger.warning(f"Error validating request parameters: {e}")
|
|
918
943
|
status_code = 406
|
|
@@ -1001,16 +1026,20 @@ async def jobs(request: Request):
|
|
|
1001
1026
|
|
|
1002
1027
|
async def job_params(request: Request):
|
|
1003
1028
|
"""Get Job Parameters page"""
|
|
1029
|
+
user = request.session.get("user")
|
|
1004
1030
|
return templates.TemplateResponse(
|
|
1005
1031
|
request=request,
|
|
1006
1032
|
name="job_params.html",
|
|
1007
1033
|
context=(
|
|
1008
1034
|
{
|
|
1035
|
+
"user_signed_in": user is not None,
|
|
1009
1036
|
"project_names_url": os.getenv(
|
|
1010
1037
|
"AIND_METADATA_SERVICE_PROJECT_NAMES_URL"
|
|
1011
1038
|
),
|
|
1012
1039
|
"versions": ["v1", "v2"],
|
|
1013
1040
|
"default_version": "v1",
|
|
1041
|
+
"modalities": JobParamInfo._MODALITIES_LIST,
|
|
1042
|
+
"modality_tasks": JobParamInfo._MODALITY_TASKS,
|
|
1014
1043
|
}
|
|
1015
1044
|
),
|
|
1016
1045
|
)
|
|
@@ -1074,7 +1103,10 @@ def get_parameter_v2(request: Request):
|
|
|
1074
1103
|
# path params are auto validated
|
|
1075
1104
|
job_type = request.path_params.get("job_type")
|
|
1076
1105
|
task_id = request.path_params.get("task_id")
|
|
1077
|
-
|
|
1106
|
+
modality = request.path_params.get("modality")
|
|
1107
|
+
param_name = JobParamInfo.get_parameter_name(
|
|
1108
|
+
job_type=job_type, task_id=task_id, modality=modality, version="v2"
|
|
1109
|
+
)
|
|
1078
1110
|
try:
|
|
1079
1111
|
param_value = get_parameter_value(param_name)
|
|
1080
1112
|
return JSONResponse(
|
|
@@ -1095,12 +1127,79 @@ def get_parameter_v2(request: Request):
|
|
|
1095
1127
|
)
|
|
1096
1128
|
|
|
1097
1129
|
|
|
1130
|
+
async def put_parameter(request: Request):
|
|
1131
|
+
"""Set v1/v2 parameter in AWS param store based on job_type and task_id"""
|
|
1132
|
+
# User must be signed in
|
|
1133
|
+
user = request.session.get("user")
|
|
1134
|
+
if not user:
|
|
1135
|
+
return JSONResponse(
|
|
1136
|
+
content={
|
|
1137
|
+
"message": "User not authenticated",
|
|
1138
|
+
"data": {"error": "User not authenticated"},
|
|
1139
|
+
},
|
|
1140
|
+
status_code=401,
|
|
1141
|
+
)
|
|
1142
|
+
try:
|
|
1143
|
+
# path params
|
|
1144
|
+
param_info = JobParamInfo(
|
|
1145
|
+
name=None,
|
|
1146
|
+
last_modified=None,
|
|
1147
|
+
job_type=request.path_params.get("job_type"),
|
|
1148
|
+
task_id=request.path_params.get("task_id"),
|
|
1149
|
+
modality=request.path_params.get("modality"),
|
|
1150
|
+
version=request.path_params.get("version"),
|
|
1151
|
+
)
|
|
1152
|
+
param_name = JobParamInfo.get_parameter_name(
|
|
1153
|
+
job_type=param_info.job_type,
|
|
1154
|
+
task_id=param_info.task_id,
|
|
1155
|
+
modality=param_info.modality,
|
|
1156
|
+
version=param_info.version,
|
|
1157
|
+
)
|
|
1158
|
+
# update param store
|
|
1159
|
+
logger.info(
|
|
1160
|
+
f"Received request from {user} to set parameter {param_name}"
|
|
1161
|
+
)
|
|
1162
|
+
param_value = await request.json()
|
|
1163
|
+
logger.info(f"Setting parameter {param_name} to {param_value}")
|
|
1164
|
+
result = put_parameter_value(
|
|
1165
|
+
param_name=param_name, param_value=param_value
|
|
1166
|
+
)
|
|
1167
|
+
logger.info(result)
|
|
1168
|
+
return JSONResponse(
|
|
1169
|
+
content={
|
|
1170
|
+
"message": f"Set parameter for {param_name}",
|
|
1171
|
+
"data": param_value,
|
|
1172
|
+
},
|
|
1173
|
+
status_code=200,
|
|
1174
|
+
)
|
|
1175
|
+
except ValidationError as error:
|
|
1176
|
+
return JSONResponse(
|
|
1177
|
+
content={
|
|
1178
|
+
"message": "Invalid parameter",
|
|
1179
|
+
"data": {"errors": json.loads(error.json())},
|
|
1180
|
+
},
|
|
1181
|
+
status_code=400,
|
|
1182
|
+
)
|
|
1183
|
+
except Exception as e:
|
|
1184
|
+
logger.exception(f"Error setting parameter {param_name}: {e}")
|
|
1185
|
+
return JSONResponse(
|
|
1186
|
+
content={
|
|
1187
|
+
"message": f"Error setting parameter {param_name}",
|
|
1188
|
+
"data": {"error": f"{e.__class__.__name__}{e.args}"},
|
|
1189
|
+
},
|
|
1190
|
+
status_code=500,
|
|
1191
|
+
)
|
|
1192
|
+
|
|
1193
|
+
|
|
1098
1194
|
def get_parameter(request: Request):
|
|
1099
1195
|
"""Get parameter from AWS parameter store based on job_type and task_id"""
|
|
1100
1196
|
# path params are auto validated
|
|
1101
1197
|
job_type = request.path_params.get("job_type")
|
|
1102
1198
|
task_id = request.path_params.get("task_id")
|
|
1103
|
-
|
|
1199
|
+
modality = request.path_params.get("modality")
|
|
1200
|
+
param_name = JobParamInfo.get_parameter_name(
|
|
1201
|
+
job_type=job_type, task_id=task_id, modality=modality
|
|
1202
|
+
)
|
|
1104
1203
|
try:
|
|
1105
1204
|
param_value = get_parameter_value(param_name)
|
|
1106
1205
|
return JSONResponse(
|
|
@@ -1124,8 +1223,6 @@ def get_parameter(request: Request):
|
|
|
1124
1223
|
async def admin(request: Request):
|
|
1125
1224
|
"""Get admin page if authenticated, else redirect to login."""
|
|
1126
1225
|
user = request.session.get("user")
|
|
1127
|
-
if os.getenv("ENV_NAME") == "local":
|
|
1128
|
-
user = {"name": "local user"}
|
|
1129
1226
|
if user:
|
|
1130
1227
|
return templates.TemplateResponse(
|
|
1131
1228
|
request=request,
|
|
@@ -1143,6 +1240,9 @@ async def admin(request: Request):
|
|
|
1143
1240
|
|
|
1144
1241
|
async def login(request: Request):
|
|
1145
1242
|
"""Redirect to Azure login page"""
|
|
1243
|
+
if os.getenv("ENV_NAME") == "local":
|
|
1244
|
+
request.session["user"] = {"name": "local user"}
|
|
1245
|
+
return RedirectResponse(url="/admin")
|
|
1146
1246
|
oauth = set_oauth()
|
|
1147
1247
|
redirect_uri = request.url_for("auth")
|
|
1148
1248
|
response = await oauth.azure.authorize_redirect(request, redirect_uri)
|
|
@@ -1193,7 +1293,13 @@ routes = [
|
|
|
1193
1293
|
Route("/api/v1/get_task_logs", endpoint=get_task_logs, methods=["GET"]),
|
|
1194
1294
|
Route("/api/v1/parameters", endpoint=list_parameters, methods=["GET"]),
|
|
1195
1295
|
Route(
|
|
1196
|
-
"/api/v1/parameters/job_types/{job_type:str}/tasks/{task_id:
|
|
1296
|
+
"/api/v1/parameters/job_types/{job_type:str}/tasks/{task_id:str}",
|
|
1297
|
+
endpoint=get_parameter,
|
|
1298
|
+
methods=["GET"],
|
|
1299
|
+
),
|
|
1300
|
+
Route(
|
|
1301
|
+
"/api/v1/parameters/job_types/{job_type:str}/tasks/{task_id:str}"
|
|
1302
|
+
"/{modality:str}",
|
|
1197
1303
|
endpoint=get_parameter,
|
|
1198
1304
|
methods=["GET"],
|
|
1199
1305
|
),
|
|
@@ -1204,10 +1310,28 @@ routes = [
|
|
|
1204
1310
|
Route("/api/v2/submit_jobs", endpoint=submit_jobs_v2, methods=["POST"]),
|
|
1205
1311
|
Route("/api/v2/parameters", endpoint=list_parameters_v2, methods=["GET"]),
|
|
1206
1312
|
Route(
|
|
1207
|
-
"/api/v2/parameters/job_types/{job_type:str}/tasks/{task_id:
|
|
1313
|
+
"/api/v2/parameters/job_types/{job_type:str}/tasks/{task_id:str}",
|
|
1314
|
+
endpoint=get_parameter_v2,
|
|
1315
|
+
methods=["GET"],
|
|
1316
|
+
),
|
|
1317
|
+
Route(
|
|
1318
|
+
"/api/v2/parameters/job_types/{job_type:str}/tasks/{task_id:str}"
|
|
1319
|
+
"/{modality:str}",
|
|
1208
1320
|
endpoint=get_parameter_v2,
|
|
1209
1321
|
methods=["GET"],
|
|
1210
1322
|
),
|
|
1323
|
+
Route(
|
|
1324
|
+
"/api/{version:str}/parameters/job_types/{job_type:str}"
|
|
1325
|
+
"/tasks/{task_id:str}",
|
|
1326
|
+
endpoint=put_parameter,
|
|
1327
|
+
methods=["PUT"],
|
|
1328
|
+
),
|
|
1329
|
+
Route(
|
|
1330
|
+
"/api/{version:str}/parameters/job_types/{job_type:str}"
|
|
1331
|
+
"/tasks/{task_id:str}/{modality:str}",
|
|
1332
|
+
endpoint=put_parameter,
|
|
1333
|
+
methods=["PUT"],
|
|
1334
|
+
),
|
|
1211
1335
|
Route("/jobs", endpoint=jobs, methods=["GET"]),
|
|
1212
1336
|
Route("/job_tasks_table", endpoint=job_tasks_table, methods=["GET"]),
|
|
1213
1337
|
Route("/task_logs", endpoint=task_logs, methods=["GET"]),
|
{aind_data_transfer_service-1.16.0.dist-info → aind_data_transfer_service-1.17.2.dist-info}/METADATA
RENAMED
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
Metadata-Version: 2.4
|
|
2
2
|
Name: aind-data-transfer-service
|
|
3
|
-
Version: 1.
|
|
3
|
+
Version: 1.17.2
|
|
4
4
|
Summary: Service that handles requests to upload data to the cloud
|
|
5
5
|
Author: Allen Institute for Neural Dynamics
|
|
6
6
|
License: MIT
|
|
@@ -25,7 +25,7 @@ Requires-Dist: furo; extra == "docs"
|
|
|
25
25
|
Provides-Extra: server
|
|
26
26
|
Requires-Dist: aind-data-schema<2.0,>=1.0.0; extra == "server"
|
|
27
27
|
Requires-Dist: aind-data-transfer-models==0.17.0; extra == "server"
|
|
28
|
-
Requires-Dist: aind-metadata-mapper
|
|
28
|
+
Requires-Dist: aind-metadata-mapper>=0.23.0; extra == "server"
|
|
29
29
|
Requires-Dist: boto3; extra == "server"
|
|
30
30
|
Requires-Dist: boto3-stubs[ssm]; extra == "server"
|
|
31
31
|
Requires-Dist: fastapi>=0.115.13; extra == "server"
|