aind-data-transfer-service 1.14.0__tar.gz → 1.16.0__tar.gz
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of aind-data-transfer-service might be problematic. Click here for more details.
- {aind_data_transfer_service-1.14.0 → aind_data_transfer_service-1.16.0}/.flake8 +2 -0
- {aind_data_transfer_service-1.14.0/src/aind_data_transfer_service.egg-info → aind_data_transfer_service-1.16.0}/PKG-INFO +4 -3
- {aind_data_transfer_service-1.14.0 → aind_data_transfer_service-1.16.0}/docs/examples/example1.csv +2 -2
- aind_data_transfer_service-1.16.0/docs/examples/remove_source_folders.py +80 -0
- {aind_data_transfer_service-1.14.0 → aind_data_transfer_service-1.16.0}/docs/source/Contributing.rst +1 -0
- {aind_data_transfer_service-1.14.0 → aind_data_transfer_service-1.16.0}/pyproject.toml +4 -3
- {aind_data_transfer_service-1.14.0 → aind_data_transfer_service-1.16.0}/src/aind_data_transfer_service/__init__.py +1 -1
- {aind_data_transfer_service-1.14.0 → aind_data_transfer_service-1.16.0}/src/aind_data_transfer_service/configs/csv_handler.py +57 -6
- {aind_data_transfer_service-1.14.0 → aind_data_transfer_service-1.16.0}/src/aind_data_transfer_service/configs/job_upload_template.py +22 -22
- {aind_data_transfer_service-1.14.0 → aind_data_transfer_service-1.16.0}/src/aind_data_transfer_service/models/core.py +11 -7
- {aind_data_transfer_service-1.14.0 → aind_data_transfer_service-1.16.0}/src/aind_data_transfer_service/server.py +105 -14
- aind_data_transfer_service-1.16.0/src/aind_data_transfer_service/templates/admin.html +36 -0
- {aind_data_transfer_service-1.14.0 → aind_data_transfer_service-1.16.0}/src/aind_data_transfer_service/templates/index.html +2 -1
- {aind_data_transfer_service-1.14.0 → aind_data_transfer_service-1.16.0}/src/aind_data_transfer_service/templates/job_params.html +2 -1
- {aind_data_transfer_service-1.14.0 → aind_data_transfer_service-1.16.0}/src/aind_data_transfer_service/templates/job_status.html +2 -1
- {aind_data_transfer_service-1.14.0 → aind_data_transfer_service-1.16.0/src/aind_data_transfer_service.egg-info}/PKG-INFO +4 -3
- {aind_data_transfer_service-1.14.0 → aind_data_transfer_service-1.16.0}/src/aind_data_transfer_service.egg-info/SOURCES.txt +4 -0
- {aind_data_transfer_service-1.14.0 → aind_data_transfer_service-1.16.0}/src/aind_data_transfer_service.egg-info/requires.txt +3 -2
- aind_data_transfer_service-1.16.0/tests/resources/get_secrets_response.json +19 -0
- aind_data_transfer_service-1.16.0/tests/resources/nested_sample.csv +2 -0
- {aind_data_transfer_service-1.14.0 → aind_data_transfer_service-1.16.0}/tests/test_core.py +18 -12
- {aind_data_transfer_service-1.14.0 → aind_data_transfer_service-1.16.0}/tests/test_csv_handler.py +63 -5
- {aind_data_transfer_service-1.14.0 → aind_data_transfer_service-1.16.0}/tests/test_job_upload_template.py +3 -5
- {aind_data_transfer_service-1.14.0 → aind_data_transfer_service-1.16.0}/tests/test_server.py +197 -8
- {aind_data_transfer_service-1.14.0 → aind_data_transfer_service-1.16.0}/.github/ISSUE_TEMPLATE/bug_report.md +0 -0
- {aind_data_transfer_service-1.14.0 → aind_data_transfer_service-1.16.0}/.github/ISSUE_TEMPLATE/feature_request.md +0 -0
- {aind_data_transfer_service-1.14.0 → aind_data_transfer_service-1.16.0}/.github/ISSUE_TEMPLATE/user-story.md +0 -0
- {aind_data_transfer_service-1.14.0 → aind_data_transfer_service-1.16.0}/.github/workflows/add_issue_to_project_board.yml +0 -0
- {aind_data_transfer_service-1.14.0 → aind_data_transfer_service-1.16.0}/.github/workflows/publish_dev.yml +0 -0
- {aind_data_transfer_service-1.14.0 → aind_data_transfer_service-1.16.0}/.github/workflows/publish_main.yml +0 -0
- {aind_data_transfer_service-1.14.0 → aind_data_transfer_service-1.16.0}/.github/workflows/run_dev_tests.yml +0 -0
- {aind_data_transfer_service-1.14.0 → aind_data_transfer_service-1.16.0}/.github/workflows/run_main_tests.yml +0 -0
- {aind_data_transfer_service-1.14.0 → aind_data_transfer_service-1.16.0}/.gitignore +0 -0
- {aind_data_transfer_service-1.14.0 → aind_data_transfer_service-1.16.0}/.readthedocs.yaml +0 -0
- {aind_data_transfer_service-1.14.0 → aind_data_transfer_service-1.16.0}/CODE_OF_CONDUCT.md +0 -0
- {aind_data_transfer_service-1.14.0 → aind_data_transfer_service-1.16.0}/Dockerfile +0 -0
- {aind_data_transfer_service-1.14.0 → aind_data_transfer_service-1.16.0}/LICENSE +0 -0
- {aind_data_transfer_service-1.14.0 → aind_data_transfer_service-1.16.0}/README.md +0 -0
- {aind_data_transfer_service-1.14.0 → aind_data_transfer_service-1.16.0}/docs/Makefile +0 -0
- {aind_data_transfer_service-1.14.0 → aind_data_transfer_service-1.16.0}/docs/diagrams/system_container.png +0 -0
- {aind_data_transfer_service-1.14.0 → aind_data_transfer_service-1.16.0}/docs/diagrams/system_container.puml +0 -0
- {aind_data_transfer_service-1.14.0 → aind_data_transfer_service-1.16.0}/docs/diagrams/system_context.png +0 -0
- {aind_data_transfer_service-1.14.0 → aind_data_transfer_service-1.16.0}/docs/diagrams/system_context.puml +0 -0
- {aind_data_transfer_service-1.14.0 → aind_data_transfer_service-1.16.0}/docs/examples/basic_upload.py +0 -0
- {aind_data_transfer_service-1.14.0 → aind_data_transfer_service-1.16.0}/docs/examples/behavior_videos_compression.py +0 -0
- {aind_data_transfer_service-1.14.0 → aind_data_transfer_service-1.16.0}/docs/examples/custom_codeocean_pipeline_settings.py +0 -0
- {aind_data_transfer_service-1.14.0 → aind_data_transfer_service-1.16.0}/docs/examples/custom_metadata_mapper_settings.py +0 -0
- {aind_data_transfer_service-1.14.0 → aind_data_transfer_service-1.16.0}/docs/examples/hcr_example.py +0 -0
- {aind_data_transfer_service-1.14.0 → aind_data_transfer_service-1.16.0}/docs/examples/skip_s3_check.py +0 -0
- {aind_data_transfer_service-1.14.0 → aind_data_transfer_service-1.16.0}/docs/examples/upload_with_custom_slurm_settings.py +0 -0
- {aind_data_transfer_service-1.14.0 → aind_data_transfer_service-1.16.0}/docs/examples/upload_with_notification.py +0 -0
- {aind_data_transfer_service-1.14.0 → aind_data_transfer_service-1.16.0}/docs/make.bat +0 -0
- {aind_data_transfer_service-1.14.0 → aind_data_transfer_service-1.16.0}/docs/source/UserGuideV1.rst +0 -0
- {aind_data_transfer_service-1.14.0 → aind_data_transfer_service-1.16.0}/docs/source/UserGuideV2.rst +0 -0
- {aind_data_transfer_service-1.14.0 → aind_data_transfer_service-1.16.0}/docs/source/_static/dark-logo.svg +0 -0
- {aind_data_transfer_service-1.14.0 → aind_data_transfer_service-1.16.0}/docs/source/_static/favicon.ico +0 -0
- {aind_data_transfer_service-1.14.0 → aind_data_transfer_service-1.16.0}/docs/source/_static/light-logo.svg +0 -0
- {aind_data_transfer_service-1.14.0 → aind_data_transfer_service-1.16.0}/docs/source/aind_data_transfer_service.configs.rst +0 -0
- {aind_data_transfer_service-1.14.0 → aind_data_transfer_service-1.16.0}/docs/source/aind_data_transfer_service.hpc.rst +0 -0
- {aind_data_transfer_service-1.14.0 → aind_data_transfer_service-1.16.0}/docs/source/aind_data_transfer_service.models.rst +0 -0
- {aind_data_transfer_service-1.14.0 → aind_data_transfer_service-1.16.0}/docs/source/aind_data_transfer_service.rst +0 -0
- {aind_data_transfer_service-1.14.0 → aind_data_transfer_service-1.16.0}/docs/source/conf.py +0 -0
- {aind_data_transfer_service-1.14.0 → aind_data_transfer_service-1.16.0}/docs/source/index.rst +0 -0
- {aind_data_transfer_service-1.14.0 → aind_data_transfer_service-1.16.0}/docs/source/modules.rst +0 -0
- {aind_data_transfer_service-1.14.0 → aind_data_transfer_service-1.16.0}/setup.cfg +0 -0
- {aind_data_transfer_service-1.14.0 → aind_data_transfer_service-1.16.0}/setup.py +0 -0
- {aind_data_transfer_service-1.14.0 → aind_data_transfer_service-1.16.0}/src/aind_data_transfer_service/configs/__init__.py +0 -0
- {aind_data_transfer_service-1.14.0 → aind_data_transfer_service-1.16.0}/src/aind_data_transfer_service/configs/job_configs.py +0 -0
- {aind_data_transfer_service-1.14.0 → aind_data_transfer_service-1.16.0}/src/aind_data_transfer_service/hpc/__init__.py +0 -0
- {aind_data_transfer_service-1.14.0 → aind_data_transfer_service-1.16.0}/src/aind_data_transfer_service/hpc/client.py +0 -0
- {aind_data_transfer_service-1.14.0 → aind_data_transfer_service-1.16.0}/src/aind_data_transfer_service/hpc/models.py +0 -0
- {aind_data_transfer_service-1.14.0 → aind_data_transfer_service-1.16.0}/src/aind_data_transfer_service/log_handler.py +0 -0
- {aind_data_transfer_service-1.14.0 → aind_data_transfer_service-1.16.0}/src/aind_data_transfer_service/models/__init__.py +0 -0
- {aind_data_transfer_service-1.14.0 → aind_data_transfer_service-1.16.0}/src/aind_data_transfer_service/models/internal.py +0 -0
- {aind_data_transfer_service-1.14.0 → aind_data_transfer_service-1.16.0}/src/aind_data_transfer_service/templates/job_tasks_table.html +0 -0
- {aind_data_transfer_service-1.14.0 → aind_data_transfer_service-1.16.0}/src/aind_data_transfer_service/templates/task_logs.html +0 -0
- {aind_data_transfer_service-1.14.0 → aind_data_transfer_service-1.16.0}/src/aind_data_transfer_service.egg-info/dependency_links.txt +0 -0
- {aind_data_transfer_service-1.14.0 → aind_data_transfer_service-1.16.0}/src/aind_data_transfer_service.egg-info/top_level.txt +0 -0
- {aind_data_transfer_service-1.14.0 → aind_data_transfer_service-1.16.0}/tests/__init__.py +0 -0
- {aind_data_transfer_service-1.14.0 → aind_data_transfer_service-1.16.0}/tests/resources/airflow_dag_run_response.json +0 -0
- {aind_data_transfer_service-1.14.0 → aind_data_transfer_service-1.16.0}/tests/resources/airflow_dag_runs_response.json +0 -0
- {aind_data_transfer_service-1.14.0 → aind_data_transfer_service-1.16.0}/tests/resources/airflow_task_instances_response.json +0 -0
- {aind_data_transfer_service-1.14.0 → aind_data_transfer_service-1.16.0}/tests/resources/describe_parameters_response.json +0 -0
- {aind_data_transfer_service-1.14.0 → aind_data_transfer_service-1.16.0}/tests/resources/get_parameter_response.json +0 -0
- {aind_data_transfer_service-1.14.0 → aind_data_transfer_service-1.16.0}/tests/resources/job_upload_template.xlsx +0 -0
- {aind_data_transfer_service-1.14.0 → aind_data_transfer_service-1.16.0}/tests/resources/legacy_sample.csv +0 -0
- {aind_data_transfer_service-1.14.0 → aind_data_transfer_service-1.16.0}/tests/resources/legacy_sample2.csv +0 -0
- {aind_data_transfer_service-1.14.0 → aind_data_transfer_service-1.16.0}/tests/resources/new_sample.csv +0 -0
- {aind_data_transfer_service-1.14.0 → aind_data_transfer_service-1.16.0}/tests/resources/sample.csv +0 -0
- {aind_data_transfer_service-1.14.0 → aind_data_transfer_service-1.16.0}/tests/resources/sample.xlsx +0 -0
- {aind_data_transfer_service-1.14.0 → aind_data_transfer_service-1.16.0}/tests/resources/sample_alt_modality_case.csv +0 -0
- {aind_data_transfer_service-1.14.0 → aind_data_transfer_service-1.16.0}/tests/resources/sample_empty_rows.csv +0 -0
- {aind_data_transfer_service-1.14.0 → aind_data_transfer_service-1.16.0}/tests/resources/sample_empty_rows.xlsx +0 -0
- {aind_data_transfer_service-1.14.0 → aind_data_transfer_service-1.16.0}/tests/resources/sample_empty_rows_2.csv +0 -0
- {aind_data_transfer_service-1.14.0 → aind_data_transfer_service-1.16.0}/tests/resources/sample_invalid_ext.txt +0 -0
- {aind_data_transfer_service-1.14.0 → aind_data_transfer_service-1.16.0}/tests/resources/sample_malformed.csv +0 -0
- {aind_data_transfer_service-1.14.0 → aind_data_transfer_service-1.16.0}/tests/resources/sample_malformed.xlsx +0 -0
- {aind_data_transfer_service-1.14.0 → aind_data_transfer_service-1.16.0}/tests/resources/sample_malformed_2.csv +0 -0
- {aind_data_transfer_service-1.14.0 → aind_data_transfer_service-1.16.0}/tests/test_configs.py +0 -0
- {aind_data_transfer_service-1.14.0 → aind_data_transfer_service-1.16.0}/tests/test_hpc_client.py +0 -0
- {aind_data_transfer_service-1.14.0 → aind_data_transfer_service-1.16.0}/tests/test_hpc_models.py +0 -0
- {aind_data_transfer_service-1.14.0 → aind_data_transfer_service-1.16.0}/tests/test_internal.py +0 -0
- {aind_data_transfer_service-1.14.0 → aind_data_transfer_service-1.16.0}/tests/test_log_handler.py +0 -0
- {aind_data_transfer_service-1.14.0 → aind_data_transfer_service-1.16.0}/tests/test_server/Dockerfile +0 -0
- {aind_data_transfer_service-1.14.0 → aind_data_transfer_service-1.16.0}/tests/test_server/db.json +0 -0
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
Metadata-Version: 2.4
|
|
2
2
|
Name: aind-data-transfer-service
|
|
3
|
-
Version: 1.
|
|
3
|
+
Version: 1.16.0
|
|
4
4
|
Summary: Service that handles requests to upload data to the cloud
|
|
5
5
|
Author: Allen Institute for Neural Dynamics
|
|
6
6
|
License: MIT
|
|
@@ -28,16 +28,17 @@ Requires-Dist: aind-data-transfer-models==0.17.0; extra == "server"
|
|
|
28
28
|
Requires-Dist: aind-metadata-mapper==0.23.0; extra == "server"
|
|
29
29
|
Requires-Dist: boto3; extra == "server"
|
|
30
30
|
Requires-Dist: boto3-stubs[ssm]; extra == "server"
|
|
31
|
-
Requires-Dist: fastapi; extra == "server"
|
|
31
|
+
Requires-Dist: fastapi>=0.115.13; extra == "server"
|
|
32
32
|
Requires-Dist: httpx; extra == "server"
|
|
33
33
|
Requires-Dist: jinja2; extra == "server"
|
|
34
|
-
Requires-Dist: starlette; extra == "server"
|
|
34
|
+
Requires-Dist: starlette<0.47.0,>=0.40.0; extra == "server"
|
|
35
35
|
Requires-Dist: starlette_wtf; extra == "server"
|
|
36
36
|
Requires-Dist: uvicorn[standard]; extra == "server"
|
|
37
37
|
Requires-Dist: wtforms; extra == "server"
|
|
38
38
|
Requires-Dist: requests==2.25.0; extra == "server"
|
|
39
39
|
Requires-Dist: openpyxl; extra == "server"
|
|
40
40
|
Requires-Dist: python-logging-loki; extra == "server"
|
|
41
|
+
Requires-Dist: authlib; extra == "server"
|
|
41
42
|
Dynamic: license-file
|
|
42
43
|
|
|
43
44
|
# aind-data-transfer-service
|
{aind_data_transfer_service-1.14.0 → aind_data_transfer_service-1.16.0}/docs/examples/example1.csv
RENAMED
|
@@ -1,4 +1,4 @@
|
|
|
1
1
|
project_name, modality0.capsule_id, modality0, modality0.input_source, modality1, modality1.input_source, s3_bucket, subject_id, platform, acq_datetime, job_type
|
|
2
2
|
Ephys Platform, , ecephys, dir/data_set_1, ,, default, 123454, ecephys, 2020-10-10 14:10:10, ecephys
|
|
3
|
-
Behavior Platform, 1f999652-00a0-4c4b-99b5-64c2985ad070, behavior-videos, dir/data_set_2, MRI, dir/data_set_3, open, 123456, behavior, 10/13/2020 1:10:10 PM,
|
|
4
|
-
Behavior Platform, , behavior-videos, dir/data_set_2, behavior, dir/data_set_3, private, 123456, behavior, 10/13/2020 1:10:10 PM,
|
|
3
|
+
Behavior Platform, 1f999652-00a0-4c4b-99b5-64c2985ad070, behavior-videos, dir/data_set_2, MRI, dir/data_set_3, open, 123456, behavior, 10/13/2020 1:10:10 PM, default
|
|
4
|
+
Behavior Platform, , behavior-videos, dir/data_set_2, behavior, dir/data_set_3, private, 123456, behavior, 10/13/2020 1:10:10 PM, default
|
|
@@ -0,0 +1,80 @@
|
|
|
1
|
+
"""
|
|
2
|
+
WARNING: This deletes raw data. Please use caution.
|
|
3
|
+
This example demonstrates how to remove the source folders after the data
|
|
4
|
+
has been uploaded to S3.
|
|
5
|
+
"""
|
|
6
|
+
|
|
7
|
+
from datetime import datetime
|
|
8
|
+
|
|
9
|
+
import requests
|
|
10
|
+
from aind_data_schema_models.modalities import Modality
|
|
11
|
+
from aind_data_schema_models.platforms import Platform
|
|
12
|
+
|
|
13
|
+
from aind_data_transfer_service.models.core import (
|
|
14
|
+
SubmitJobRequestV2,
|
|
15
|
+
Task,
|
|
16
|
+
UploadJobConfigsV2,
|
|
17
|
+
)
|
|
18
|
+
|
|
19
|
+
# The job_type contains the default settings for compression and Code Ocean
|
|
20
|
+
# pipelines.
|
|
21
|
+
job_type = "ecephys"
|
|
22
|
+
|
|
23
|
+
acq_datetime = datetime(2023, 4, 3, 18, 17, 7)
|
|
24
|
+
|
|
25
|
+
remove_source_folders = Task(skip_task=False)
|
|
26
|
+
|
|
27
|
+
ecephys_task = Task(
|
|
28
|
+
job_settings={
|
|
29
|
+
"input_source": (
|
|
30
|
+
"/allen/aind/scratch/svc_aind_upload/test_data_sets/"
|
|
31
|
+
"ecephys/655019_2023-04-03_18-17-07"
|
|
32
|
+
)
|
|
33
|
+
}
|
|
34
|
+
)
|
|
35
|
+
|
|
36
|
+
modality_transformation_settings = {"ecephys": ecephys_task}
|
|
37
|
+
|
|
38
|
+
gather_preliminary_metadata = Task(
|
|
39
|
+
job_settings={
|
|
40
|
+
"metadata_dir": (
|
|
41
|
+
"/allen/aind/scratch/svc_aind_upload/test_data_sets/"
|
|
42
|
+
"ecephys/655019_2023-04-03_18-17-07"
|
|
43
|
+
)
|
|
44
|
+
}
|
|
45
|
+
)
|
|
46
|
+
|
|
47
|
+
|
|
48
|
+
upload_job_configs_v2 = UploadJobConfigsV2(
|
|
49
|
+
job_type=job_type,
|
|
50
|
+
project_name="Ephys Platform",
|
|
51
|
+
platform=Platform.ECEPHYS,
|
|
52
|
+
modalities=[Modality.ECEPHYS],
|
|
53
|
+
subject_id="655019",
|
|
54
|
+
acq_datetime=acq_datetime,
|
|
55
|
+
tasks={
|
|
56
|
+
"modality_transformation_settings": modality_transformation_settings,
|
|
57
|
+
"gather_preliminary_metadata": gather_preliminary_metadata,
|
|
58
|
+
"remove_source_folders": remove_source_folders,
|
|
59
|
+
},
|
|
60
|
+
)
|
|
61
|
+
|
|
62
|
+
submit_request_v2 = SubmitJobRequestV2(
|
|
63
|
+
upload_jobs=[upload_job_configs_v2],
|
|
64
|
+
)
|
|
65
|
+
|
|
66
|
+
post_request_content = submit_request_v2.model_dump(
|
|
67
|
+
mode="json", exclude_none=True
|
|
68
|
+
)
|
|
69
|
+
|
|
70
|
+
# Please use the production endpoint for submitting jobs and the dev endpoint
|
|
71
|
+
# for running tests.
|
|
72
|
+
# endpoint = "http://aind-data-transfer-service"
|
|
73
|
+
endpoint = "http://aind-data-transfer-service-dev" # For testing
|
|
74
|
+
|
|
75
|
+
submit_job_response = requests.post(
|
|
76
|
+
url=f"{endpoint}/api/v2/submit_jobs",
|
|
77
|
+
json=post_request_content,
|
|
78
|
+
)
|
|
79
|
+
print(submit_job_response.status_code)
|
|
80
|
+
print(submit_job_response.json())
|
{aind_data_transfer_service-1.14.0 → aind_data_transfer_service-1.16.0}/docs/source/Contributing.rst
RENAMED
|
@@ -71,6 +71,7 @@ To run uvicorn locally:
|
|
|
71
71
|
export ENV_NAME='local'
|
|
72
72
|
export AWS_DEFAULT_REGION='us-west-2'
|
|
73
73
|
export AIND_AIRFLOW_PARAM_PREFIX='/aind/dev/airflow/variables/job_types'
|
|
74
|
+
export AIND_SSO_SECRET_NAME='/aind/dev/data_transfer_service/sso/secrets'
|
|
74
75
|
uvicorn aind_data_transfer_service.server:app --host 0.0.0.0 --port 5000 --reload
|
|
75
76
|
|
|
76
77
|
You can now access aind-data-transfer-service at
|
|
@@ -44,16 +44,17 @@ server = [
|
|
|
44
44
|
'aind-metadata-mapper==0.23.0',
|
|
45
45
|
'boto3',
|
|
46
46
|
'boto3-stubs[ssm]',
|
|
47
|
-
'fastapi',
|
|
47
|
+
'fastapi>=0.115.13',
|
|
48
48
|
'httpx',
|
|
49
49
|
'jinja2',
|
|
50
|
-
'starlette',
|
|
50
|
+
'starlette>=0.40.0,<0.47.0',
|
|
51
51
|
'starlette_wtf',
|
|
52
52
|
'uvicorn[standard]',
|
|
53
53
|
'wtforms',
|
|
54
54
|
'requests==2.25.0',
|
|
55
55
|
'openpyxl',
|
|
56
|
-
'python-logging-loki'
|
|
56
|
+
'python-logging-loki',
|
|
57
|
+
'authlib'
|
|
57
58
|
]
|
|
58
59
|
|
|
59
60
|
[tool.setuptools.packages.find]
|
|
@@ -1,7 +1,10 @@
|
|
|
1
1
|
"""Module to handle processing legacy csv files"""
|
|
2
2
|
|
|
3
3
|
import re
|
|
4
|
+
from collections.abc import Mapping
|
|
5
|
+
from copy import deepcopy
|
|
4
6
|
from datetime import datetime
|
|
7
|
+
from typing import Any, Dict
|
|
5
8
|
|
|
6
9
|
from aind_data_schema_models.modalities import Modality
|
|
7
10
|
from aind_data_schema_models.platforms import Platform
|
|
@@ -13,6 +16,45 @@ DATETIME_PATTERN2 = re.compile(
|
|
|
13
16
|
)
|
|
14
17
|
|
|
15
18
|
|
|
19
|
+
def nested_update(dict_to_update: Dict[str, Any], updates: Mapping):
|
|
20
|
+
"""
|
|
21
|
+
Update a nested dictionary in-place.
|
|
22
|
+
Parameters
|
|
23
|
+
----------
|
|
24
|
+
dict_to_update : Dict[str, Any]
|
|
25
|
+
updates : Mapping
|
|
26
|
+
|
|
27
|
+
"""
|
|
28
|
+
for k, v in updates.items():
|
|
29
|
+
if isinstance(v, Mapping):
|
|
30
|
+
dict_to_update[k] = nested_update(dict_to_update.get(k, {}), v)
|
|
31
|
+
else:
|
|
32
|
+
dict_to_update[k] = v
|
|
33
|
+
return dict_to_update
|
|
34
|
+
|
|
35
|
+
|
|
36
|
+
def create_nested_dict(
|
|
37
|
+
dict_to_update: Dict[str, Any], key_string: str, value: Any
|
|
38
|
+
):
|
|
39
|
+
"""
|
|
40
|
+
Updates in-place a nested dictionary with a period delimited key and value.
|
|
41
|
+
Parameters
|
|
42
|
+
----------
|
|
43
|
+
dict_to_update : Dict[str, Any]
|
|
44
|
+
key_string : str
|
|
45
|
+
value : Any
|
|
46
|
+
|
|
47
|
+
"""
|
|
48
|
+
keys = key_string.split(".", 1)
|
|
49
|
+
current_key = keys[0]
|
|
50
|
+
if len(keys) == 1:
|
|
51
|
+
dict_to_update[current_key] = value
|
|
52
|
+
else:
|
|
53
|
+
if current_key not in dict_to_update:
|
|
54
|
+
dict_to_update[current_key] = dict()
|
|
55
|
+
create_nested_dict(dict_to_update[current_key], keys[1], value)
|
|
56
|
+
|
|
57
|
+
|
|
16
58
|
def map_csv_row_to_job(row: dict) -> UploadJobConfigsV2:
|
|
17
59
|
"""
|
|
18
60
|
Maps csv row into a UploadJobConfigsV2 model. This attempts to be somewhat
|
|
@@ -29,7 +71,6 @@ def map_csv_row_to_job(row: dict) -> UploadJobConfigsV2:
|
|
|
29
71
|
modality_configs = dict()
|
|
30
72
|
job_configs = dict()
|
|
31
73
|
check_s3_folder_exists_task = None
|
|
32
|
-
final_check_s3_folder_exist = None
|
|
33
74
|
codeocean_tasks = dict()
|
|
34
75
|
for key, value in row.items():
|
|
35
76
|
# Strip white spaces and replace dashes with underscores
|
|
@@ -42,7 +83,9 @@ def map_csv_row_to_job(row: dict) -> UploadJobConfigsV2:
|
|
|
42
83
|
modality_parts = clean_key.split(".")
|
|
43
84
|
modality_key = modality_parts[0]
|
|
44
85
|
sub_key = (
|
|
45
|
-
"modality"
|
|
86
|
+
"modality"
|
|
87
|
+
if len(modality_parts) == 1
|
|
88
|
+
else ".".join(modality_parts[1:])
|
|
46
89
|
)
|
|
47
90
|
modality_configs.setdefault(modality_key, dict())
|
|
48
91
|
# Temp backwards compatibility check
|
|
@@ -66,13 +109,22 @@ def map_csv_row_to_job(row: dict) -> UploadJobConfigsV2:
|
|
|
66
109
|
job_settings=codeocean_pipeline_monitor_settings,
|
|
67
110
|
)
|
|
68
111
|
else:
|
|
69
|
-
|
|
112
|
+
nested_val = dict()
|
|
113
|
+
create_nested_dict(
|
|
114
|
+
dict_to_update=nested_val,
|
|
115
|
+
key_string=sub_key,
|
|
116
|
+
value=clean_val,
|
|
117
|
+
)
|
|
118
|
+
current_dict = deepcopy(
|
|
119
|
+
modality_configs.get(modality_key, dict())
|
|
120
|
+
)
|
|
121
|
+
nested_update(current_dict, nested_val)
|
|
122
|
+
modality_configs[modality_key] = current_dict
|
|
70
123
|
elif clean_key == "force_cloud_sync" and clean_val.upper() in [
|
|
71
124
|
"TRUE",
|
|
72
125
|
"T",
|
|
73
126
|
]:
|
|
74
127
|
check_s3_folder_exists_task = {"skip_task": True}
|
|
75
|
-
final_check_s3_folder_exist = {"skip_task": True}
|
|
76
128
|
else:
|
|
77
129
|
job_configs[clean_key] = clean_val
|
|
78
130
|
# Rename codeocean config keys with correct modality
|
|
@@ -93,8 +145,7 @@ def map_csv_row_to_job(row: dict) -> UploadJobConfigsV2:
|
|
|
93
145
|
)
|
|
94
146
|
tasks = {
|
|
95
147
|
"gather_preliminary_metadata": metadata_task,
|
|
96
|
-
"
|
|
97
|
-
"final_check_s3_folder_exist": final_check_s3_folder_exist,
|
|
148
|
+
"check_s3_folder_exists": check_s3_folder_exists_task,
|
|
98
149
|
"modality_transformation_settings": modality_tasks,
|
|
99
150
|
"codeocean_pipeline_settings": None
|
|
100
151
|
if codeocean_tasks == dict()
|
|
@@ -2,7 +2,7 @@
|
|
|
2
2
|
|
|
3
3
|
import datetime
|
|
4
4
|
from io import BytesIO
|
|
5
|
-
from typing import Any, Dict, List
|
|
5
|
+
from typing import Any, ClassVar, Dict, List
|
|
6
6
|
|
|
7
7
|
from aind_data_schema_models.modalities import Modality
|
|
8
8
|
from aind_data_schema_models.platforms import Platform
|
|
@@ -10,16 +10,16 @@ from openpyxl import Workbook
|
|
|
10
10
|
from openpyxl.styles import Font
|
|
11
11
|
from openpyxl.utils import get_column_letter
|
|
12
12
|
from openpyxl.worksheet.datavalidation import DataValidation
|
|
13
|
+
from pydantic import BaseModel
|
|
13
14
|
|
|
14
15
|
|
|
15
|
-
|
|
16
|
-
class JobUploadTemplate:
|
|
16
|
+
class JobUploadTemplate(BaseModel):
|
|
17
17
|
"""Class to configure and create xlsx job upload template"""
|
|
18
18
|
|
|
19
|
-
FILE_NAME = "job_upload_template.xlsx"
|
|
20
|
-
|
|
21
|
-
|
|
22
|
-
|
|
19
|
+
FILE_NAME: ClassVar[str] = "job_upload_template.xlsx"
|
|
20
|
+
_NUM_TEMPLATE_ROWS: ClassVar[int] = 20
|
|
21
|
+
_XLSX_DATETIME_FORMAT: ClassVar[str] = "YYYY-MM-DDTHH:mm:ss"
|
|
22
|
+
_HEADERS: ClassVar[List[str]] = [
|
|
23
23
|
"job_type",
|
|
24
24
|
"project_name",
|
|
25
25
|
"platform",
|
|
@@ -31,7 +31,7 @@ class JobUploadTemplate:
|
|
|
31
31
|
"modality1",
|
|
32
32
|
"modality1.input_source",
|
|
33
33
|
]
|
|
34
|
-
|
|
34
|
+
_SAMPLE_JOBS: ClassVar[List[List[Any]]] = [
|
|
35
35
|
[
|
|
36
36
|
"default",
|
|
37
37
|
"Behavior Platform",
|
|
@@ -68,8 +68,8 @@ class JobUploadTemplate:
|
|
|
68
68
|
],
|
|
69
69
|
]
|
|
70
70
|
|
|
71
|
-
@
|
|
72
|
-
def
|
|
71
|
+
@classmethod
|
|
72
|
+
def _get_validators(cls) -> List[Dict[str, Any]]:
|
|
73
73
|
"""
|
|
74
74
|
Returns
|
|
75
75
|
-------
|
|
@@ -82,36 +82,36 @@ class JobUploadTemplate:
|
|
|
82
82
|
"name": "platform",
|
|
83
83
|
"type": "list",
|
|
84
84
|
"options": list(Platform.abbreviation_map.keys()),
|
|
85
|
-
"column_indexes": [
|
|
85
|
+
"column_indexes": [cls._HEADERS.index("platform")],
|
|
86
86
|
},
|
|
87
87
|
{
|
|
88
88
|
"name": "modality",
|
|
89
89
|
"type": "list",
|
|
90
90
|
"options": list(Modality.abbreviation_map.keys()),
|
|
91
91
|
"column_indexes": [
|
|
92
|
-
|
|
93
|
-
|
|
92
|
+
cls._HEADERS.index("modality0"),
|
|
93
|
+
cls._HEADERS.index("modality1"),
|
|
94
94
|
],
|
|
95
95
|
},
|
|
96
96
|
{
|
|
97
97
|
"name": "datetime",
|
|
98
98
|
"type": "date",
|
|
99
|
-
"column_indexes": [
|
|
99
|
+
"column_indexes": [cls._HEADERS.index("acq_datetime")],
|
|
100
100
|
},
|
|
101
101
|
]
|
|
102
102
|
|
|
103
|
-
@
|
|
104
|
-
def
|
|
103
|
+
@classmethod
|
|
104
|
+
def create_excel_sheet_filestream(cls) -> BytesIO:
|
|
105
105
|
"""Create job template as xlsx filestream"""
|
|
106
106
|
xl_io = BytesIO()
|
|
107
107
|
workbook = Workbook()
|
|
108
108
|
workbook.iso_dates = True
|
|
109
109
|
worksheet = workbook.active
|
|
110
|
-
worksheet.append(
|
|
111
|
-
for job in
|
|
110
|
+
worksheet.append(cls._HEADERS)
|
|
111
|
+
for job in cls._SAMPLE_JOBS:
|
|
112
112
|
worksheet.append(job)
|
|
113
113
|
# data validators
|
|
114
|
-
for validator in
|
|
114
|
+
for validator in cls._get_validators():
|
|
115
115
|
dv_type = validator["type"]
|
|
116
116
|
dv_name = validator["name"]
|
|
117
117
|
dv_params = {
|
|
@@ -127,17 +127,17 @@ class JobUploadTemplate:
|
|
|
127
127
|
dv_params["prompt"] = f"Select a {dv_name} from the dropdown"
|
|
128
128
|
elif dv_type == "date":
|
|
129
129
|
dv_params["prompt"] = "Provide a {} using {}".format(
|
|
130
|
-
dv_name,
|
|
130
|
+
dv_name, cls._XLSX_DATETIME_FORMAT
|
|
131
131
|
)
|
|
132
132
|
dv = DataValidation(**dv_params)
|
|
133
133
|
for i in validator["column_indexes"]:
|
|
134
134
|
col = get_column_letter(i + 1)
|
|
135
|
-
col_range = f"{col}2:{col}{
|
|
135
|
+
col_range = f"{col}2:{col}{cls._NUM_TEMPLATE_ROWS}"
|
|
136
136
|
dv.add(col_range)
|
|
137
137
|
if dv_type != "date":
|
|
138
138
|
continue
|
|
139
139
|
for (cell,) in worksheet[col_range]:
|
|
140
|
-
cell.number_format =
|
|
140
|
+
cell.number_format = cls._XLSX_DATETIME_FORMAT
|
|
141
141
|
worksheet.add_data_validation(dv)
|
|
142
142
|
# formatting
|
|
143
143
|
bold = Font(bold=True)
|
|
@@ -290,11 +290,15 @@ class SubmitJobRequestV2(BaseSettings):
|
|
|
290
290
|
# check against any jobs in the context
|
|
291
291
|
current_jobs = (info.context or dict()).get("current_jobs", list())
|
|
292
292
|
for job in current_jobs:
|
|
293
|
-
|
|
294
|
-
|
|
295
|
-
prefix
|
|
296
|
-
|
|
297
|
-
|
|
298
|
-
|
|
299
|
-
|
|
293
|
+
jobs_to_check = job.get("upload_jobs", [job])
|
|
294
|
+
for j in jobs_to_check:
|
|
295
|
+
prefix = j.get("s3_prefix")
|
|
296
|
+
if (
|
|
297
|
+
prefix is not None
|
|
298
|
+
and prefix in jobs_map
|
|
299
|
+
and json.dumps(j, sort_keys=True) in jobs_map[prefix]
|
|
300
|
+
):
|
|
301
|
+
raise ValueError(
|
|
302
|
+
f"Job is already running/queued for {prefix}"
|
|
303
|
+
)
|
|
300
304
|
return self
|
|
@@ -15,6 +15,7 @@ from aind_data_transfer_models import (
|
|
|
15
15
|
__version__ as aind_data_transfer_models_version,
|
|
16
16
|
)
|
|
17
17
|
from aind_data_transfer_models.core import SubmitJobRequest, validation_context
|
|
18
|
+
from authlib.integrations.starlette_client import OAuth
|
|
18
19
|
from botocore.exceptions import ClientError
|
|
19
20
|
from fastapi import Request
|
|
20
21
|
from fastapi.responses import JSONResponse, StreamingResponse
|
|
@@ -23,9 +24,14 @@ from httpx import AsyncClient
|
|
|
23
24
|
from openpyxl import load_workbook
|
|
24
25
|
from pydantic import SecretStr, ValidationError
|
|
25
26
|
from starlette.applications import Starlette
|
|
27
|
+
from starlette.config import Config
|
|
28
|
+
from starlette.middleware.sessions import SessionMiddleware
|
|
29
|
+
from starlette.responses import RedirectResponse
|
|
26
30
|
from starlette.routing import Route
|
|
27
31
|
|
|
28
|
-
from aind_data_transfer_service import
|
|
32
|
+
from aind_data_transfer_service import (
|
|
33
|
+
OPEN_DATA_BUCKET_NAME,
|
|
34
|
+
)
|
|
29
35
|
from aind_data_transfer_service import (
|
|
30
36
|
__version__ as aind_data_transfer_service_version,
|
|
31
37
|
)
|
|
@@ -33,14 +39,18 @@ from aind_data_transfer_service.configs.csv_handler import map_csv_row_to_job
|
|
|
33
39
|
from aind_data_transfer_service.configs.job_configs import (
|
|
34
40
|
BasicUploadJobConfigs as LegacyBasicUploadJobConfigs,
|
|
35
41
|
)
|
|
36
|
-
from aind_data_transfer_service.configs.job_configs import
|
|
42
|
+
from aind_data_transfer_service.configs.job_configs import (
|
|
43
|
+
HpcJobConfigs,
|
|
44
|
+
)
|
|
37
45
|
from aind_data_transfer_service.configs.job_upload_template import (
|
|
38
46
|
JobUploadTemplate,
|
|
39
47
|
)
|
|
40
48
|
from aind_data_transfer_service.hpc.client import HpcClient, HpcClientConfigs
|
|
41
49
|
from aind_data_transfer_service.hpc.models import HpcJobSubmitSettings
|
|
42
50
|
from aind_data_transfer_service.log_handler import LoggingConfigs, get_logger
|
|
43
|
-
from aind_data_transfer_service.models.core import
|
|
51
|
+
from aind_data_transfer_service.models.core import (
|
|
52
|
+
SubmitJobRequestV2,
|
|
53
|
+
)
|
|
44
54
|
from aind_data_transfer_service.models.core import (
|
|
45
55
|
validation_context as validation_context_v2,
|
|
46
56
|
)
|
|
@@ -95,6 +105,27 @@ def get_project_names() -> List[str]:
|
|
|
95
105
|
return project_names
|
|
96
106
|
|
|
97
107
|
|
|
108
|
+
def set_oauth() -> OAuth:
|
|
109
|
+
"""Set up OAuth for the service"""
|
|
110
|
+
secrets_client = boto3.client("secretsmanager")
|
|
111
|
+
secret_response = secrets_client.get_secret_value(
|
|
112
|
+
SecretId=os.getenv("AIND_SSO_SECRET_NAME")
|
|
113
|
+
)
|
|
114
|
+
secret_value = json.loads(secret_response["SecretString"])
|
|
115
|
+
for secrets in secret_value:
|
|
116
|
+
os.environ[secrets] = secret_value[secrets]
|
|
117
|
+
config = Config()
|
|
118
|
+
oauth = OAuth(config)
|
|
119
|
+
oauth.register(
|
|
120
|
+
name="azure",
|
|
121
|
+
client_id=config("CLIENT_ID"),
|
|
122
|
+
client_secret=config("CLIENT_SECRET"),
|
|
123
|
+
server_metadata_url=config("AUTHORITY"),
|
|
124
|
+
client_kwargs={"scope": "openid email profile"},
|
|
125
|
+
)
|
|
126
|
+
return oauth
|
|
127
|
+
|
|
128
|
+
|
|
98
129
|
def get_job_types(version: Optional[str] = None) -> List[str]:
|
|
99
130
|
"""Get a list of job_types"""
|
|
100
131
|
params = get_parameter_infos(version)
|
|
@@ -228,7 +259,7 @@ async def validate_csv(request: Request):
|
|
|
228
259
|
data = csv_io.getvalue()
|
|
229
260
|
csv_reader = csv.DictReader(io.StringIO(data))
|
|
230
261
|
params = AirflowDagRunsRequestParameters(
|
|
231
|
-
dag_ids=["transform_and_upload_v2"],
|
|
262
|
+
dag_ids=["transform_and_upload_v2", "run_list_of_jobs"],
|
|
232
263
|
states=["running", "queued"],
|
|
233
264
|
)
|
|
234
265
|
_, current_jobs = await get_airflow_jobs(
|
|
@@ -324,7 +355,8 @@ async def validate_json_v2(request: Request):
|
|
|
324
355
|
content = await request.json()
|
|
325
356
|
try:
|
|
326
357
|
params = AirflowDagRunsRequestParameters(
|
|
327
|
-
dag_ids=["transform_and_upload_v2"
|
|
358
|
+
dag_ids=["transform_and_upload_v2", "run_list_of_jobs"],
|
|
359
|
+
states=["running", "queued"],
|
|
328
360
|
)
|
|
329
361
|
_, current_jobs = await get_airflow_jobs(params=params, get_confs=True)
|
|
330
362
|
context = {
|
|
@@ -439,7 +471,8 @@ async def submit_jobs_v2(request: Request):
|
|
|
439
471
|
content = await request.json()
|
|
440
472
|
try:
|
|
441
473
|
params = AirflowDagRunsRequestParameters(
|
|
442
|
-
dag_ids=["transform_and_upload_v2"
|
|
474
|
+
dag_ids=["transform_and_upload_v2", "run_list_of_jobs"],
|
|
475
|
+
states=["running", "queued"],
|
|
443
476
|
)
|
|
444
477
|
_, current_jobs = await get_airflow_jobs(params=params, get_confs=True)
|
|
445
478
|
context = {
|
|
@@ -902,10 +935,10 @@ async def get_task_logs(request: Request):
|
|
|
902
935
|
async def index(request: Request):
|
|
903
936
|
"""GET|POST /: form handler"""
|
|
904
937
|
return templates.TemplateResponse(
|
|
938
|
+
request=request,
|
|
905
939
|
name="index.html",
|
|
906
940
|
context=(
|
|
907
941
|
{
|
|
908
|
-
"request": request,
|
|
909
942
|
"project_names_url": project_names_url,
|
|
910
943
|
}
|
|
911
944
|
),
|
|
@@ -918,10 +951,10 @@ async def job_tasks_table(request: Request):
|
|
|
918
951
|
response_tasks_json = json.loads(response_tasks.body)
|
|
919
952
|
data = response_tasks_json.get("data")
|
|
920
953
|
return templates.TemplateResponse(
|
|
954
|
+
request=request,
|
|
921
955
|
name="job_tasks_table.html",
|
|
922
956
|
context=(
|
|
923
957
|
{
|
|
924
|
-
"request": request,
|
|
925
958
|
"status_code": response_tasks.status_code,
|
|
926
959
|
"message": response_tasks_json.get("message"),
|
|
927
960
|
"errors": data.get("errors", []),
|
|
@@ -938,10 +971,10 @@ async def task_logs(request: Request):
|
|
|
938
971
|
response_tasks_json = json.loads(response_tasks.body)
|
|
939
972
|
data = response_tasks_json.get("data")
|
|
940
973
|
return templates.TemplateResponse(
|
|
974
|
+
request=request,
|
|
941
975
|
name="task_logs.html",
|
|
942
976
|
context=(
|
|
943
977
|
{
|
|
944
|
-
"request": request,
|
|
945
978
|
"status_code": response_tasks.status_code,
|
|
946
979
|
"message": response_tasks_json.get("message"),
|
|
947
980
|
"errors": data.get("errors", []),
|
|
@@ -955,10 +988,10 @@ async def jobs(request: Request):
|
|
|
955
988
|
"""Get Job Status page with pagination"""
|
|
956
989
|
dag_ids = AirflowDagRunsRequestParameters.model_fields["dag_ids"].default
|
|
957
990
|
return templates.TemplateResponse(
|
|
991
|
+
request=request,
|
|
958
992
|
name="job_status.html",
|
|
959
993
|
context=(
|
|
960
994
|
{
|
|
961
|
-
"request": request,
|
|
962
995
|
"project_names_url": project_names_url,
|
|
963
996
|
"dag_ids": dag_ids,
|
|
964
997
|
}
|
|
@@ -969,10 +1002,10 @@ async def jobs(request: Request):
|
|
|
969
1002
|
async def job_params(request: Request):
|
|
970
1003
|
"""Get Job Parameters page"""
|
|
971
1004
|
return templates.TemplateResponse(
|
|
1005
|
+
request=request,
|
|
972
1006
|
name="job_params.html",
|
|
973
1007
|
context=(
|
|
974
1008
|
{
|
|
975
|
-
"request": request,
|
|
976
1009
|
"project_names_url": os.getenv(
|
|
977
1010
|
"AIND_METADATA_SERVICE_PROJECT_NAMES_URL"
|
|
978
1011
|
),
|
|
@@ -987,8 +1020,7 @@ async def download_job_template(_: Request):
|
|
|
987
1020
|
"""Get job template as xlsx filestream for download"""
|
|
988
1021
|
|
|
989
1022
|
try:
|
|
990
|
-
|
|
991
|
-
xl_io = job_template.excel_sheet_filestream
|
|
1023
|
+
xl_io = JobUploadTemplate.create_excel_sheet_filestream()
|
|
992
1024
|
return StreamingResponse(
|
|
993
1025
|
io.BytesIO(xl_io.getvalue()),
|
|
994
1026
|
media_type=(
|
|
@@ -997,7 +1029,7 @@ async def download_job_template(_: Request):
|
|
|
997
1029
|
),
|
|
998
1030
|
headers={
|
|
999
1031
|
"Content-Disposition": (
|
|
1000
|
-
f"attachment; filename={
|
|
1032
|
+
f"attachment; filename={JobUploadTemplate.FILE_NAME}"
|
|
1001
1033
|
)
|
|
1002
1034
|
},
|
|
1003
1035
|
status_code=200,
|
|
@@ -1089,6 +1121,60 @@ def get_parameter(request: Request):
|
|
|
1089
1121
|
)
|
|
1090
1122
|
|
|
1091
1123
|
|
|
1124
|
+
async def admin(request: Request):
|
|
1125
|
+
"""Get admin page if authenticated, else redirect to login."""
|
|
1126
|
+
user = request.session.get("user")
|
|
1127
|
+
if os.getenv("ENV_NAME") == "local":
|
|
1128
|
+
user = {"name": "local user"}
|
|
1129
|
+
if user:
|
|
1130
|
+
return templates.TemplateResponse(
|
|
1131
|
+
request=request,
|
|
1132
|
+
name="admin.html",
|
|
1133
|
+
context=(
|
|
1134
|
+
{
|
|
1135
|
+
"project_names_url": project_names_url,
|
|
1136
|
+
"user_name": user.get("name", "unknown"),
|
|
1137
|
+
"user_email": user.get("email", "unknown"),
|
|
1138
|
+
}
|
|
1139
|
+
),
|
|
1140
|
+
)
|
|
1141
|
+
return RedirectResponse(url="/login")
|
|
1142
|
+
|
|
1143
|
+
|
|
1144
|
+
async def login(request: Request):
|
|
1145
|
+
"""Redirect to Azure login page"""
|
|
1146
|
+
oauth = set_oauth()
|
|
1147
|
+
redirect_uri = request.url_for("auth")
|
|
1148
|
+
response = await oauth.azure.authorize_redirect(request, redirect_uri)
|
|
1149
|
+
return response
|
|
1150
|
+
|
|
1151
|
+
|
|
1152
|
+
async def logout(request: Request):
|
|
1153
|
+
"""Logout user and clear session"""
|
|
1154
|
+
request.session.pop("user", None)
|
|
1155
|
+
return RedirectResponse(url="/")
|
|
1156
|
+
|
|
1157
|
+
|
|
1158
|
+
async def auth(request: Request):
|
|
1159
|
+
"""Authenticate user and store user info in session"""
|
|
1160
|
+
oauth = set_oauth()
|
|
1161
|
+
try:
|
|
1162
|
+
token = await oauth.azure.authorize_access_token(request)
|
|
1163
|
+
user = token.get("userinfo")
|
|
1164
|
+
if not user:
|
|
1165
|
+
raise ValueError("User info not found in access token.")
|
|
1166
|
+
request.session["user"] = dict(user)
|
|
1167
|
+
except Exception as error:
|
|
1168
|
+
return JSONResponse(
|
|
1169
|
+
content={
|
|
1170
|
+
"message": "Error Logging In",
|
|
1171
|
+
"data": {"error": f"{error.__class__.__name__}{error.args}"},
|
|
1172
|
+
},
|
|
1173
|
+
status_code=500,
|
|
1174
|
+
)
|
|
1175
|
+
return RedirectResponse(url="/admin")
|
|
1176
|
+
|
|
1177
|
+
|
|
1092
1178
|
routes = [
|
|
1093
1179
|
Route("/", endpoint=index, methods=["GET", "POST"]),
|
|
1094
1180
|
Route("/api/validate_csv", endpoint=validate_csv_legacy, methods=["POST"]),
|
|
@@ -1131,6 +1217,11 @@ routes = [
|
|
|
1131
1217
|
endpoint=download_job_template,
|
|
1132
1218
|
methods=["GET"],
|
|
1133
1219
|
),
|
|
1220
|
+
Route("/login", login, methods=["GET"]),
|
|
1221
|
+
Route("/logout", logout, methods=["GET"]),
|
|
1222
|
+
Route("/auth", auth, methods=["GET"]),
|
|
1223
|
+
Route("/admin", admin, methods=["GET"]),
|
|
1134
1224
|
]
|
|
1135
1225
|
|
|
1136
1226
|
app = Starlette(routes=routes)
|
|
1227
|
+
app.add_middleware(SessionMiddleware, secret_key=None)
|