aind-data-transfer-service 1.14.0__tar.gz → 1.15.0__tar.gz
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of aind-data-transfer-service might be problematic. Click here for more details.
- {aind_data_transfer_service-1.14.0 → aind_data_transfer_service-1.15.0}/.flake8 +2 -0
- {aind_data_transfer_service-1.14.0/src/aind_data_transfer_service.egg-info → aind_data_transfer_service-1.15.0}/PKG-INFO +2 -1
- {aind_data_transfer_service-1.14.0 → aind_data_transfer_service-1.15.0}/docs/examples/example1.csv +2 -2
- aind_data_transfer_service-1.15.0/docs/examples/remove_source_folders.py +80 -0
- {aind_data_transfer_service-1.14.0 → aind_data_transfer_service-1.15.0}/docs/source/Contributing.rst +1 -0
- {aind_data_transfer_service-1.14.0 → aind_data_transfer_service-1.15.0}/pyproject.toml +2 -1
- {aind_data_transfer_service-1.14.0 → aind_data_transfer_service-1.15.0}/src/aind_data_transfer_service/__init__.py +1 -1
- {aind_data_transfer_service-1.14.0 → aind_data_transfer_service-1.15.0}/src/aind_data_transfer_service/configs/job_upload_template.py +22 -22
- {aind_data_transfer_service-1.14.0 → aind_data_transfer_service-1.15.0}/src/aind_data_transfer_service/models/core.py +11 -7
- {aind_data_transfer_service-1.14.0 → aind_data_transfer_service-1.15.0}/src/aind_data_transfer_service/server.py +91 -6
- aind_data_transfer_service-1.15.0/src/aind_data_transfer_service/templates/admin.html +36 -0
- {aind_data_transfer_service-1.14.0 → aind_data_transfer_service-1.15.0}/src/aind_data_transfer_service/templates/index.html +2 -1
- {aind_data_transfer_service-1.14.0 → aind_data_transfer_service-1.15.0}/src/aind_data_transfer_service/templates/job_params.html +2 -1
- {aind_data_transfer_service-1.14.0 → aind_data_transfer_service-1.15.0}/src/aind_data_transfer_service/templates/job_status.html +2 -1
- {aind_data_transfer_service-1.14.0 → aind_data_transfer_service-1.15.0/src/aind_data_transfer_service.egg-info}/PKG-INFO +2 -1
- {aind_data_transfer_service-1.14.0 → aind_data_transfer_service-1.15.0}/src/aind_data_transfer_service.egg-info/SOURCES.txt +3 -0
- {aind_data_transfer_service-1.14.0 → aind_data_transfer_service-1.15.0}/src/aind_data_transfer_service.egg-info/requires.txt +1 -0
- aind_data_transfer_service-1.15.0/tests/resources/get_secrets_response.json +19 -0
- {aind_data_transfer_service-1.14.0 → aind_data_transfer_service-1.15.0}/tests/test_core.py +18 -12
- {aind_data_transfer_service-1.14.0 → aind_data_transfer_service-1.15.0}/tests/test_job_upload_template.py +3 -5
- {aind_data_transfer_service-1.14.0 → aind_data_transfer_service-1.15.0}/tests/test_server.py +197 -8
- {aind_data_transfer_service-1.14.0 → aind_data_transfer_service-1.15.0}/.github/ISSUE_TEMPLATE/bug_report.md +0 -0
- {aind_data_transfer_service-1.14.0 → aind_data_transfer_service-1.15.0}/.github/ISSUE_TEMPLATE/feature_request.md +0 -0
- {aind_data_transfer_service-1.14.0 → aind_data_transfer_service-1.15.0}/.github/ISSUE_TEMPLATE/user-story.md +0 -0
- {aind_data_transfer_service-1.14.0 → aind_data_transfer_service-1.15.0}/.github/workflows/add_issue_to_project_board.yml +0 -0
- {aind_data_transfer_service-1.14.0 → aind_data_transfer_service-1.15.0}/.github/workflows/publish_dev.yml +0 -0
- {aind_data_transfer_service-1.14.0 → aind_data_transfer_service-1.15.0}/.github/workflows/publish_main.yml +0 -0
- {aind_data_transfer_service-1.14.0 → aind_data_transfer_service-1.15.0}/.github/workflows/run_dev_tests.yml +0 -0
- {aind_data_transfer_service-1.14.0 → aind_data_transfer_service-1.15.0}/.github/workflows/run_main_tests.yml +0 -0
- {aind_data_transfer_service-1.14.0 → aind_data_transfer_service-1.15.0}/.gitignore +0 -0
- {aind_data_transfer_service-1.14.0 → aind_data_transfer_service-1.15.0}/.readthedocs.yaml +0 -0
- {aind_data_transfer_service-1.14.0 → aind_data_transfer_service-1.15.0}/CODE_OF_CONDUCT.md +0 -0
- {aind_data_transfer_service-1.14.0 → aind_data_transfer_service-1.15.0}/Dockerfile +0 -0
- {aind_data_transfer_service-1.14.0 → aind_data_transfer_service-1.15.0}/LICENSE +0 -0
- {aind_data_transfer_service-1.14.0 → aind_data_transfer_service-1.15.0}/README.md +0 -0
- {aind_data_transfer_service-1.14.0 → aind_data_transfer_service-1.15.0}/docs/Makefile +0 -0
- {aind_data_transfer_service-1.14.0 → aind_data_transfer_service-1.15.0}/docs/diagrams/system_container.png +0 -0
- {aind_data_transfer_service-1.14.0 → aind_data_transfer_service-1.15.0}/docs/diagrams/system_container.puml +0 -0
- {aind_data_transfer_service-1.14.0 → aind_data_transfer_service-1.15.0}/docs/diagrams/system_context.png +0 -0
- {aind_data_transfer_service-1.14.0 → aind_data_transfer_service-1.15.0}/docs/diagrams/system_context.puml +0 -0
- {aind_data_transfer_service-1.14.0 → aind_data_transfer_service-1.15.0}/docs/examples/basic_upload.py +0 -0
- {aind_data_transfer_service-1.14.0 → aind_data_transfer_service-1.15.0}/docs/examples/behavior_videos_compression.py +0 -0
- {aind_data_transfer_service-1.14.0 → aind_data_transfer_service-1.15.0}/docs/examples/custom_codeocean_pipeline_settings.py +0 -0
- {aind_data_transfer_service-1.14.0 → aind_data_transfer_service-1.15.0}/docs/examples/custom_metadata_mapper_settings.py +0 -0
- {aind_data_transfer_service-1.14.0 → aind_data_transfer_service-1.15.0}/docs/examples/hcr_example.py +0 -0
- {aind_data_transfer_service-1.14.0 → aind_data_transfer_service-1.15.0}/docs/examples/skip_s3_check.py +0 -0
- {aind_data_transfer_service-1.14.0 → aind_data_transfer_service-1.15.0}/docs/examples/upload_with_custom_slurm_settings.py +0 -0
- {aind_data_transfer_service-1.14.0 → aind_data_transfer_service-1.15.0}/docs/examples/upload_with_notification.py +0 -0
- {aind_data_transfer_service-1.14.0 → aind_data_transfer_service-1.15.0}/docs/make.bat +0 -0
- {aind_data_transfer_service-1.14.0 → aind_data_transfer_service-1.15.0}/docs/source/UserGuideV1.rst +0 -0
- {aind_data_transfer_service-1.14.0 → aind_data_transfer_service-1.15.0}/docs/source/UserGuideV2.rst +0 -0
- {aind_data_transfer_service-1.14.0 → aind_data_transfer_service-1.15.0}/docs/source/_static/dark-logo.svg +0 -0
- {aind_data_transfer_service-1.14.0 → aind_data_transfer_service-1.15.0}/docs/source/_static/favicon.ico +0 -0
- {aind_data_transfer_service-1.14.0 → aind_data_transfer_service-1.15.0}/docs/source/_static/light-logo.svg +0 -0
- {aind_data_transfer_service-1.14.0 → aind_data_transfer_service-1.15.0}/docs/source/aind_data_transfer_service.configs.rst +0 -0
- {aind_data_transfer_service-1.14.0 → aind_data_transfer_service-1.15.0}/docs/source/aind_data_transfer_service.hpc.rst +0 -0
- {aind_data_transfer_service-1.14.0 → aind_data_transfer_service-1.15.0}/docs/source/aind_data_transfer_service.models.rst +0 -0
- {aind_data_transfer_service-1.14.0 → aind_data_transfer_service-1.15.0}/docs/source/aind_data_transfer_service.rst +0 -0
- {aind_data_transfer_service-1.14.0 → aind_data_transfer_service-1.15.0}/docs/source/conf.py +0 -0
- {aind_data_transfer_service-1.14.0 → aind_data_transfer_service-1.15.0}/docs/source/index.rst +0 -0
- {aind_data_transfer_service-1.14.0 → aind_data_transfer_service-1.15.0}/docs/source/modules.rst +0 -0
- {aind_data_transfer_service-1.14.0 → aind_data_transfer_service-1.15.0}/setup.cfg +0 -0
- {aind_data_transfer_service-1.14.0 → aind_data_transfer_service-1.15.0}/setup.py +0 -0
- {aind_data_transfer_service-1.14.0 → aind_data_transfer_service-1.15.0}/src/aind_data_transfer_service/configs/__init__.py +0 -0
- {aind_data_transfer_service-1.14.0 → aind_data_transfer_service-1.15.0}/src/aind_data_transfer_service/configs/csv_handler.py +0 -0
- {aind_data_transfer_service-1.14.0 → aind_data_transfer_service-1.15.0}/src/aind_data_transfer_service/configs/job_configs.py +0 -0
- {aind_data_transfer_service-1.14.0 → aind_data_transfer_service-1.15.0}/src/aind_data_transfer_service/hpc/__init__.py +0 -0
- {aind_data_transfer_service-1.14.0 → aind_data_transfer_service-1.15.0}/src/aind_data_transfer_service/hpc/client.py +0 -0
- {aind_data_transfer_service-1.14.0 → aind_data_transfer_service-1.15.0}/src/aind_data_transfer_service/hpc/models.py +0 -0
- {aind_data_transfer_service-1.14.0 → aind_data_transfer_service-1.15.0}/src/aind_data_transfer_service/log_handler.py +0 -0
- {aind_data_transfer_service-1.14.0 → aind_data_transfer_service-1.15.0}/src/aind_data_transfer_service/models/__init__.py +0 -0
- {aind_data_transfer_service-1.14.0 → aind_data_transfer_service-1.15.0}/src/aind_data_transfer_service/models/internal.py +0 -0
- {aind_data_transfer_service-1.14.0 → aind_data_transfer_service-1.15.0}/src/aind_data_transfer_service/templates/job_tasks_table.html +0 -0
- {aind_data_transfer_service-1.14.0 → aind_data_transfer_service-1.15.0}/src/aind_data_transfer_service/templates/task_logs.html +0 -0
- {aind_data_transfer_service-1.14.0 → aind_data_transfer_service-1.15.0}/src/aind_data_transfer_service.egg-info/dependency_links.txt +0 -0
- {aind_data_transfer_service-1.14.0 → aind_data_transfer_service-1.15.0}/src/aind_data_transfer_service.egg-info/top_level.txt +0 -0
- {aind_data_transfer_service-1.14.0 → aind_data_transfer_service-1.15.0}/tests/__init__.py +0 -0
- {aind_data_transfer_service-1.14.0 → aind_data_transfer_service-1.15.0}/tests/resources/airflow_dag_run_response.json +0 -0
- {aind_data_transfer_service-1.14.0 → aind_data_transfer_service-1.15.0}/tests/resources/airflow_dag_runs_response.json +0 -0
- {aind_data_transfer_service-1.14.0 → aind_data_transfer_service-1.15.0}/tests/resources/airflow_task_instances_response.json +0 -0
- {aind_data_transfer_service-1.14.0 → aind_data_transfer_service-1.15.0}/tests/resources/describe_parameters_response.json +0 -0
- {aind_data_transfer_service-1.14.0 → aind_data_transfer_service-1.15.0}/tests/resources/get_parameter_response.json +0 -0
- {aind_data_transfer_service-1.14.0 → aind_data_transfer_service-1.15.0}/tests/resources/job_upload_template.xlsx +0 -0
- {aind_data_transfer_service-1.14.0 → aind_data_transfer_service-1.15.0}/tests/resources/legacy_sample.csv +0 -0
- {aind_data_transfer_service-1.14.0 → aind_data_transfer_service-1.15.0}/tests/resources/legacy_sample2.csv +0 -0
- {aind_data_transfer_service-1.14.0 → aind_data_transfer_service-1.15.0}/tests/resources/new_sample.csv +0 -0
- {aind_data_transfer_service-1.14.0 → aind_data_transfer_service-1.15.0}/tests/resources/sample.csv +0 -0
- {aind_data_transfer_service-1.14.0 → aind_data_transfer_service-1.15.0}/tests/resources/sample.xlsx +0 -0
- {aind_data_transfer_service-1.14.0 → aind_data_transfer_service-1.15.0}/tests/resources/sample_alt_modality_case.csv +0 -0
- {aind_data_transfer_service-1.14.0 → aind_data_transfer_service-1.15.0}/tests/resources/sample_empty_rows.csv +0 -0
- {aind_data_transfer_service-1.14.0 → aind_data_transfer_service-1.15.0}/tests/resources/sample_empty_rows.xlsx +0 -0
- {aind_data_transfer_service-1.14.0 → aind_data_transfer_service-1.15.0}/tests/resources/sample_empty_rows_2.csv +0 -0
- {aind_data_transfer_service-1.14.0 → aind_data_transfer_service-1.15.0}/tests/resources/sample_invalid_ext.txt +0 -0
- {aind_data_transfer_service-1.14.0 → aind_data_transfer_service-1.15.0}/tests/resources/sample_malformed.csv +0 -0
- {aind_data_transfer_service-1.14.0 → aind_data_transfer_service-1.15.0}/tests/resources/sample_malformed.xlsx +0 -0
- {aind_data_transfer_service-1.14.0 → aind_data_transfer_service-1.15.0}/tests/resources/sample_malformed_2.csv +0 -0
- {aind_data_transfer_service-1.14.0 → aind_data_transfer_service-1.15.0}/tests/test_configs.py +0 -0
- {aind_data_transfer_service-1.14.0 → aind_data_transfer_service-1.15.0}/tests/test_csv_handler.py +0 -0
- {aind_data_transfer_service-1.14.0 → aind_data_transfer_service-1.15.0}/tests/test_hpc_client.py +0 -0
- {aind_data_transfer_service-1.14.0 → aind_data_transfer_service-1.15.0}/tests/test_hpc_models.py +0 -0
- {aind_data_transfer_service-1.14.0 → aind_data_transfer_service-1.15.0}/tests/test_internal.py +0 -0
- {aind_data_transfer_service-1.14.0 → aind_data_transfer_service-1.15.0}/tests/test_log_handler.py +0 -0
- {aind_data_transfer_service-1.14.0 → aind_data_transfer_service-1.15.0}/tests/test_server/Dockerfile +0 -0
- {aind_data_transfer_service-1.14.0 → aind_data_transfer_service-1.15.0}/tests/test_server/db.json +0 -0
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
Metadata-Version: 2.4
|
|
2
2
|
Name: aind-data-transfer-service
|
|
3
|
-
Version: 1.
|
|
3
|
+
Version: 1.15.0
|
|
4
4
|
Summary: Service that handles requests to upload data to the cloud
|
|
5
5
|
Author: Allen Institute for Neural Dynamics
|
|
6
6
|
License: MIT
|
|
@@ -38,6 +38,7 @@ Requires-Dist: wtforms; extra == "server"
|
|
|
38
38
|
Requires-Dist: requests==2.25.0; extra == "server"
|
|
39
39
|
Requires-Dist: openpyxl; extra == "server"
|
|
40
40
|
Requires-Dist: python-logging-loki; extra == "server"
|
|
41
|
+
Requires-Dist: authlib; extra == "server"
|
|
41
42
|
Dynamic: license-file
|
|
42
43
|
|
|
43
44
|
# aind-data-transfer-service
|
{aind_data_transfer_service-1.14.0 → aind_data_transfer_service-1.15.0}/docs/examples/example1.csv
RENAMED
|
@@ -1,4 +1,4 @@
|
|
|
1
1
|
project_name, modality0.capsule_id, modality0, modality0.input_source, modality1, modality1.input_source, s3_bucket, subject_id, platform, acq_datetime, job_type
|
|
2
2
|
Ephys Platform, , ecephys, dir/data_set_1, ,, default, 123454, ecephys, 2020-10-10 14:10:10, ecephys
|
|
3
|
-
Behavior Platform, 1f999652-00a0-4c4b-99b5-64c2985ad070, behavior-videos, dir/data_set_2, MRI, dir/data_set_3, open, 123456, behavior, 10/13/2020 1:10:10 PM,
|
|
4
|
-
Behavior Platform, , behavior-videos, dir/data_set_2, behavior, dir/data_set_3, private, 123456, behavior, 10/13/2020 1:10:10 PM,
|
|
3
|
+
Behavior Platform, 1f999652-00a0-4c4b-99b5-64c2985ad070, behavior-videos, dir/data_set_2, MRI, dir/data_set_3, open, 123456, behavior, 10/13/2020 1:10:10 PM, default
|
|
4
|
+
Behavior Platform, , behavior-videos, dir/data_set_2, behavior, dir/data_set_3, private, 123456, behavior, 10/13/2020 1:10:10 PM, default
|
|
@@ -0,0 +1,80 @@
|
|
|
1
|
+
"""
|
|
2
|
+
WARNING: This deletes raw data. Please use caution.
|
|
3
|
+
This example demonstrates how to remove the source folders after the data
|
|
4
|
+
has been uploaded to S3.
|
|
5
|
+
"""
|
|
6
|
+
|
|
7
|
+
from datetime import datetime
|
|
8
|
+
|
|
9
|
+
import requests
|
|
10
|
+
from aind_data_schema_models.modalities import Modality
|
|
11
|
+
from aind_data_schema_models.platforms import Platform
|
|
12
|
+
|
|
13
|
+
from aind_data_transfer_service.models.core import (
|
|
14
|
+
SubmitJobRequestV2,
|
|
15
|
+
Task,
|
|
16
|
+
UploadJobConfigsV2,
|
|
17
|
+
)
|
|
18
|
+
|
|
19
|
+
# The job_type contains the default settings for compression and Code Ocean
|
|
20
|
+
# pipelines.
|
|
21
|
+
job_type = "ecephys"
|
|
22
|
+
|
|
23
|
+
acq_datetime = datetime(2023, 4, 3, 18, 17, 7)
|
|
24
|
+
|
|
25
|
+
remove_source_folders = Task(skip_task=False)
|
|
26
|
+
|
|
27
|
+
ecephys_task = Task(
|
|
28
|
+
job_settings={
|
|
29
|
+
"input_source": (
|
|
30
|
+
"/allen/aind/scratch/svc_aind_upload/test_data_sets/"
|
|
31
|
+
"ecephys/655019_2023-04-03_18-17-07"
|
|
32
|
+
)
|
|
33
|
+
}
|
|
34
|
+
)
|
|
35
|
+
|
|
36
|
+
modality_transformation_settings = {"ecephys": ecephys_task}
|
|
37
|
+
|
|
38
|
+
gather_preliminary_metadata = Task(
|
|
39
|
+
job_settings={
|
|
40
|
+
"metadata_dir": (
|
|
41
|
+
"/allen/aind/scratch/svc_aind_upload/test_data_sets/"
|
|
42
|
+
"ecephys/655019_2023-04-03_18-17-07"
|
|
43
|
+
)
|
|
44
|
+
}
|
|
45
|
+
)
|
|
46
|
+
|
|
47
|
+
|
|
48
|
+
upload_job_configs_v2 = UploadJobConfigsV2(
|
|
49
|
+
job_type=job_type,
|
|
50
|
+
project_name="Ephys Platform",
|
|
51
|
+
platform=Platform.ECEPHYS,
|
|
52
|
+
modalities=[Modality.ECEPHYS],
|
|
53
|
+
subject_id="655019",
|
|
54
|
+
acq_datetime=acq_datetime,
|
|
55
|
+
tasks={
|
|
56
|
+
"modality_transformation_settings": modality_transformation_settings,
|
|
57
|
+
"gather_preliminary_metadata": gather_preliminary_metadata,
|
|
58
|
+
"remove_source_folders": remove_source_folders
|
|
59
|
+
},
|
|
60
|
+
)
|
|
61
|
+
|
|
62
|
+
submit_request_v2 = SubmitJobRequestV2(
|
|
63
|
+
upload_jobs=[upload_job_configs_v2],
|
|
64
|
+
)
|
|
65
|
+
|
|
66
|
+
post_request_content = submit_request_v2.model_dump(
|
|
67
|
+
mode="json", exclude_none=True
|
|
68
|
+
)
|
|
69
|
+
|
|
70
|
+
# Please use the production endpoint for submitting jobs and the dev endpoint
|
|
71
|
+
# for running tests.
|
|
72
|
+
# endpoint = "http://aind-data-transfer-service"
|
|
73
|
+
endpoint = "http://aind-data-transfer-service-dev" # For testing
|
|
74
|
+
|
|
75
|
+
submit_job_response = requests.post(
|
|
76
|
+
url=f"{endpoint}/api/v2/submit_jobs",
|
|
77
|
+
json=post_request_content,
|
|
78
|
+
)
|
|
79
|
+
print(submit_job_response.status_code)
|
|
80
|
+
print(submit_job_response.json())
|
{aind_data_transfer_service-1.14.0 → aind_data_transfer_service-1.15.0}/docs/source/Contributing.rst
RENAMED
|
@@ -71,6 +71,7 @@ To run uvicorn locally:
|
|
|
71
71
|
export ENV_NAME='local'
|
|
72
72
|
export AWS_DEFAULT_REGION='us-west-2'
|
|
73
73
|
export AIND_AIRFLOW_PARAM_PREFIX='/aind/dev/airflow/variables/job_types'
|
|
74
|
+
export AIND_SSO_SECRET_NAME='/aind/dev/data_transfer_service/sso/secrets'
|
|
74
75
|
uvicorn aind_data_transfer_service.server:app --host 0.0.0.0 --port 5000 --reload
|
|
75
76
|
|
|
76
77
|
You can now access aind-data-transfer-service at
|
|
@@ -2,7 +2,7 @@
|
|
|
2
2
|
|
|
3
3
|
import datetime
|
|
4
4
|
from io import BytesIO
|
|
5
|
-
from typing import Any, Dict, List
|
|
5
|
+
from typing import Any, ClassVar, Dict, List
|
|
6
6
|
|
|
7
7
|
from aind_data_schema_models.modalities import Modality
|
|
8
8
|
from aind_data_schema_models.platforms import Platform
|
|
@@ -10,16 +10,16 @@ from openpyxl import Workbook
|
|
|
10
10
|
from openpyxl.styles import Font
|
|
11
11
|
from openpyxl.utils import get_column_letter
|
|
12
12
|
from openpyxl.worksheet.datavalidation import DataValidation
|
|
13
|
+
from pydantic import BaseModel
|
|
13
14
|
|
|
14
15
|
|
|
15
|
-
|
|
16
|
-
class JobUploadTemplate:
|
|
16
|
+
class JobUploadTemplate(BaseModel):
|
|
17
17
|
"""Class to configure and create xlsx job upload template"""
|
|
18
18
|
|
|
19
|
-
FILE_NAME = "job_upload_template.xlsx"
|
|
20
|
-
|
|
21
|
-
|
|
22
|
-
|
|
19
|
+
FILE_NAME: ClassVar[str] = "job_upload_template.xlsx"
|
|
20
|
+
_NUM_TEMPLATE_ROWS: ClassVar[int] = 20
|
|
21
|
+
_XLSX_DATETIME_FORMAT: ClassVar[str] = "YYYY-MM-DDTHH:mm:ss"
|
|
22
|
+
_HEADERS: ClassVar[List[str]] = [
|
|
23
23
|
"job_type",
|
|
24
24
|
"project_name",
|
|
25
25
|
"platform",
|
|
@@ -31,7 +31,7 @@ class JobUploadTemplate:
|
|
|
31
31
|
"modality1",
|
|
32
32
|
"modality1.input_source",
|
|
33
33
|
]
|
|
34
|
-
|
|
34
|
+
_SAMPLE_JOBS: ClassVar[List[List[Any]]] = [
|
|
35
35
|
[
|
|
36
36
|
"default",
|
|
37
37
|
"Behavior Platform",
|
|
@@ -68,8 +68,8 @@ class JobUploadTemplate:
|
|
|
68
68
|
],
|
|
69
69
|
]
|
|
70
70
|
|
|
71
|
-
@
|
|
72
|
-
def
|
|
71
|
+
@classmethod
|
|
72
|
+
def _get_validators(cls) -> List[Dict[str, Any]]:
|
|
73
73
|
"""
|
|
74
74
|
Returns
|
|
75
75
|
-------
|
|
@@ -82,36 +82,36 @@ class JobUploadTemplate:
|
|
|
82
82
|
"name": "platform",
|
|
83
83
|
"type": "list",
|
|
84
84
|
"options": list(Platform.abbreviation_map.keys()),
|
|
85
|
-
"column_indexes": [
|
|
85
|
+
"column_indexes": [cls._HEADERS.index("platform")],
|
|
86
86
|
},
|
|
87
87
|
{
|
|
88
88
|
"name": "modality",
|
|
89
89
|
"type": "list",
|
|
90
90
|
"options": list(Modality.abbreviation_map.keys()),
|
|
91
91
|
"column_indexes": [
|
|
92
|
-
|
|
93
|
-
|
|
92
|
+
cls._HEADERS.index("modality0"),
|
|
93
|
+
cls._HEADERS.index("modality1"),
|
|
94
94
|
],
|
|
95
95
|
},
|
|
96
96
|
{
|
|
97
97
|
"name": "datetime",
|
|
98
98
|
"type": "date",
|
|
99
|
-
"column_indexes": [
|
|
99
|
+
"column_indexes": [cls._HEADERS.index("acq_datetime")],
|
|
100
100
|
},
|
|
101
101
|
]
|
|
102
102
|
|
|
103
|
-
@
|
|
104
|
-
def
|
|
103
|
+
@classmethod
|
|
104
|
+
def create_excel_sheet_filestream(cls) -> BytesIO:
|
|
105
105
|
"""Create job template as xlsx filestream"""
|
|
106
106
|
xl_io = BytesIO()
|
|
107
107
|
workbook = Workbook()
|
|
108
108
|
workbook.iso_dates = True
|
|
109
109
|
worksheet = workbook.active
|
|
110
|
-
worksheet.append(
|
|
111
|
-
for job in
|
|
110
|
+
worksheet.append(cls._HEADERS)
|
|
111
|
+
for job in cls._SAMPLE_JOBS:
|
|
112
112
|
worksheet.append(job)
|
|
113
113
|
# data validators
|
|
114
|
-
for validator in
|
|
114
|
+
for validator in cls._get_validators():
|
|
115
115
|
dv_type = validator["type"]
|
|
116
116
|
dv_name = validator["name"]
|
|
117
117
|
dv_params = {
|
|
@@ -127,17 +127,17 @@ class JobUploadTemplate:
|
|
|
127
127
|
dv_params["prompt"] = f"Select a {dv_name} from the dropdown"
|
|
128
128
|
elif dv_type == "date":
|
|
129
129
|
dv_params["prompt"] = "Provide a {} using {}".format(
|
|
130
|
-
dv_name,
|
|
130
|
+
dv_name, cls._XLSX_DATETIME_FORMAT
|
|
131
131
|
)
|
|
132
132
|
dv = DataValidation(**dv_params)
|
|
133
133
|
for i in validator["column_indexes"]:
|
|
134
134
|
col = get_column_letter(i + 1)
|
|
135
|
-
col_range = f"{col}2:{col}{
|
|
135
|
+
col_range = f"{col}2:{col}{cls._NUM_TEMPLATE_ROWS}"
|
|
136
136
|
dv.add(col_range)
|
|
137
137
|
if dv_type != "date":
|
|
138
138
|
continue
|
|
139
139
|
for (cell,) in worksheet[col_range]:
|
|
140
|
-
cell.number_format =
|
|
140
|
+
cell.number_format = cls._XLSX_DATETIME_FORMAT
|
|
141
141
|
worksheet.add_data_validation(dv)
|
|
142
142
|
# formatting
|
|
143
143
|
bold = Font(bold=True)
|
|
@@ -290,11 +290,15 @@ class SubmitJobRequestV2(BaseSettings):
|
|
|
290
290
|
# check against any jobs in the context
|
|
291
291
|
current_jobs = (info.context or dict()).get("current_jobs", list())
|
|
292
292
|
for job in current_jobs:
|
|
293
|
-
|
|
294
|
-
|
|
295
|
-
prefix
|
|
296
|
-
|
|
297
|
-
|
|
298
|
-
|
|
299
|
-
|
|
293
|
+
jobs_to_check = job.get("upload_jobs", [job])
|
|
294
|
+
for j in jobs_to_check:
|
|
295
|
+
prefix = j.get("s3_prefix")
|
|
296
|
+
if (
|
|
297
|
+
prefix is not None
|
|
298
|
+
and prefix in jobs_map
|
|
299
|
+
and json.dumps(j, sort_keys=True) in jobs_map[prefix]
|
|
300
|
+
):
|
|
301
|
+
raise ValueError(
|
|
302
|
+
f"Job is already running/queued for {prefix}"
|
|
303
|
+
)
|
|
300
304
|
return self
|
|
@@ -15,6 +15,7 @@ from aind_data_transfer_models import (
|
|
|
15
15
|
__version__ as aind_data_transfer_models_version,
|
|
16
16
|
)
|
|
17
17
|
from aind_data_transfer_models.core import SubmitJobRequest, validation_context
|
|
18
|
+
from authlib.integrations.starlette_client import OAuth
|
|
18
19
|
from botocore.exceptions import ClientError
|
|
19
20
|
from fastapi import Request
|
|
20
21
|
from fastapi.responses import JSONResponse, StreamingResponse
|
|
@@ -23,6 +24,9 @@ from httpx import AsyncClient
|
|
|
23
24
|
from openpyxl import load_workbook
|
|
24
25
|
from pydantic import SecretStr, ValidationError
|
|
25
26
|
from starlette.applications import Starlette
|
|
27
|
+
from starlette.config import Config
|
|
28
|
+
from starlette.middleware.sessions import SessionMiddleware
|
|
29
|
+
from starlette.responses import RedirectResponse
|
|
26
30
|
from starlette.routing import Route
|
|
27
31
|
|
|
28
32
|
from aind_data_transfer_service import OPEN_DATA_BUCKET_NAME
|
|
@@ -95,6 +99,27 @@ def get_project_names() -> List[str]:
|
|
|
95
99
|
return project_names
|
|
96
100
|
|
|
97
101
|
|
|
102
|
+
def set_oauth() -> OAuth:
|
|
103
|
+
"""Set up OAuth for the service"""
|
|
104
|
+
secrets_client = boto3.client("secretsmanager")
|
|
105
|
+
secret_response = secrets_client.get_secret_value(
|
|
106
|
+
SecretId=os.getenv("AIND_SSO_SECRET_NAME")
|
|
107
|
+
)
|
|
108
|
+
secret_value = json.loads(secret_response["SecretString"])
|
|
109
|
+
for secrets in secret_value:
|
|
110
|
+
os.environ[secrets] = secret_value[secrets]
|
|
111
|
+
config = Config()
|
|
112
|
+
oauth = OAuth(config)
|
|
113
|
+
oauth.register(
|
|
114
|
+
name="azure",
|
|
115
|
+
client_id=config("CLIENT_ID"),
|
|
116
|
+
client_secret=config("CLIENT_SECRET"),
|
|
117
|
+
server_metadata_url=config("AUTHORITY"),
|
|
118
|
+
client_kwargs={"scope": "openid email profile"},
|
|
119
|
+
)
|
|
120
|
+
return oauth
|
|
121
|
+
|
|
122
|
+
|
|
98
123
|
def get_job_types(version: Optional[str] = None) -> List[str]:
|
|
99
124
|
"""Get a list of job_types"""
|
|
100
125
|
params = get_parameter_infos(version)
|
|
@@ -228,7 +253,7 @@ async def validate_csv(request: Request):
|
|
|
228
253
|
data = csv_io.getvalue()
|
|
229
254
|
csv_reader = csv.DictReader(io.StringIO(data))
|
|
230
255
|
params = AirflowDagRunsRequestParameters(
|
|
231
|
-
dag_ids=["transform_and_upload_v2"],
|
|
256
|
+
dag_ids=["transform_and_upload_v2", "run_list_of_jobs"],
|
|
232
257
|
states=["running", "queued"],
|
|
233
258
|
)
|
|
234
259
|
_, current_jobs = await get_airflow_jobs(
|
|
@@ -324,7 +349,8 @@ async def validate_json_v2(request: Request):
|
|
|
324
349
|
content = await request.json()
|
|
325
350
|
try:
|
|
326
351
|
params = AirflowDagRunsRequestParameters(
|
|
327
|
-
dag_ids=["transform_and_upload_v2"
|
|
352
|
+
dag_ids=["transform_and_upload_v2", "run_list_of_jobs"],
|
|
353
|
+
states=["running", "queued"],
|
|
328
354
|
)
|
|
329
355
|
_, current_jobs = await get_airflow_jobs(params=params, get_confs=True)
|
|
330
356
|
context = {
|
|
@@ -439,7 +465,8 @@ async def submit_jobs_v2(request: Request):
|
|
|
439
465
|
content = await request.json()
|
|
440
466
|
try:
|
|
441
467
|
params = AirflowDagRunsRequestParameters(
|
|
442
|
-
dag_ids=["transform_and_upload_v2"
|
|
468
|
+
dag_ids=["transform_and_upload_v2", "run_list_of_jobs"],
|
|
469
|
+
states=["running", "queued"],
|
|
443
470
|
)
|
|
444
471
|
_, current_jobs = await get_airflow_jobs(params=params, get_confs=True)
|
|
445
472
|
context = {
|
|
@@ -987,8 +1014,7 @@ async def download_job_template(_: Request):
|
|
|
987
1014
|
"""Get job template as xlsx filestream for download"""
|
|
988
1015
|
|
|
989
1016
|
try:
|
|
990
|
-
|
|
991
|
-
xl_io = job_template.excel_sheet_filestream
|
|
1017
|
+
xl_io = JobUploadTemplate.create_excel_sheet_filestream()
|
|
992
1018
|
return StreamingResponse(
|
|
993
1019
|
io.BytesIO(xl_io.getvalue()),
|
|
994
1020
|
media_type=(
|
|
@@ -997,7 +1023,7 @@ async def download_job_template(_: Request):
|
|
|
997
1023
|
),
|
|
998
1024
|
headers={
|
|
999
1025
|
"Content-Disposition": (
|
|
1000
|
-
f"attachment; filename={
|
|
1026
|
+
f"attachment; filename={JobUploadTemplate.FILE_NAME}"
|
|
1001
1027
|
)
|
|
1002
1028
|
},
|
|
1003
1029
|
status_code=200,
|
|
@@ -1089,6 +1115,60 @@ def get_parameter(request: Request):
|
|
|
1089
1115
|
)
|
|
1090
1116
|
|
|
1091
1117
|
|
|
1118
|
+
async def admin(request: Request):
|
|
1119
|
+
"""Get admin page if authenticated, else redirect to login."""
|
|
1120
|
+
user = request.session.get("user")
|
|
1121
|
+
if os.getenv("ENV_NAME") == "local":
|
|
1122
|
+
user = {"name": "local user"}
|
|
1123
|
+
if user:
|
|
1124
|
+
return templates.TemplateResponse(
|
|
1125
|
+
name="admin.html",
|
|
1126
|
+
context=(
|
|
1127
|
+
{
|
|
1128
|
+
"request": request,
|
|
1129
|
+
"project_names_url": project_names_url,
|
|
1130
|
+
"user_name": user.get("name", "unknown"),
|
|
1131
|
+
"user_email": user.get("email", "unknown"),
|
|
1132
|
+
}
|
|
1133
|
+
),
|
|
1134
|
+
)
|
|
1135
|
+
return RedirectResponse(url="/login")
|
|
1136
|
+
|
|
1137
|
+
|
|
1138
|
+
async def login(request: Request):
|
|
1139
|
+
"""Redirect to Azure login page"""
|
|
1140
|
+
oauth = set_oauth()
|
|
1141
|
+
redirect_uri = request.url_for("auth")
|
|
1142
|
+
response = await oauth.azure.authorize_redirect(request, redirect_uri)
|
|
1143
|
+
return response
|
|
1144
|
+
|
|
1145
|
+
|
|
1146
|
+
async def logout(request: Request):
|
|
1147
|
+
"""Logout user and clear session"""
|
|
1148
|
+
request.session.pop("user", None)
|
|
1149
|
+
return RedirectResponse(url="/")
|
|
1150
|
+
|
|
1151
|
+
|
|
1152
|
+
async def auth(request: Request):
|
|
1153
|
+
"""Authenticate user and store user info in session"""
|
|
1154
|
+
oauth = set_oauth()
|
|
1155
|
+
try:
|
|
1156
|
+
token = await oauth.azure.authorize_access_token(request)
|
|
1157
|
+
user = token.get("userinfo")
|
|
1158
|
+
if not user:
|
|
1159
|
+
raise ValueError("User info not found in access token.")
|
|
1160
|
+
request.session["user"] = dict(user)
|
|
1161
|
+
except Exception as error:
|
|
1162
|
+
return JSONResponse(
|
|
1163
|
+
content={
|
|
1164
|
+
"message": "Error Logging In",
|
|
1165
|
+
"data": {"error": f"{error.__class__.__name__}{error.args}"},
|
|
1166
|
+
},
|
|
1167
|
+
status_code=500,
|
|
1168
|
+
)
|
|
1169
|
+
return RedirectResponse(url="/admin")
|
|
1170
|
+
|
|
1171
|
+
|
|
1092
1172
|
routes = [
|
|
1093
1173
|
Route("/", endpoint=index, methods=["GET", "POST"]),
|
|
1094
1174
|
Route("/api/validate_csv", endpoint=validate_csv_legacy, methods=["POST"]),
|
|
@@ -1131,6 +1211,11 @@ routes = [
|
|
|
1131
1211
|
endpoint=download_job_template,
|
|
1132
1212
|
methods=["GET"],
|
|
1133
1213
|
),
|
|
1214
|
+
Route("/login", login, methods=["GET"]),
|
|
1215
|
+
Route("/logout", logout, methods=["GET"]),
|
|
1216
|
+
Route("/auth", auth, methods=["GET"]),
|
|
1217
|
+
Route("/admin", admin, methods=["GET"]),
|
|
1134
1218
|
]
|
|
1135
1219
|
|
|
1136
1220
|
app = Starlette(routes=routes)
|
|
1221
|
+
app.add_middleware(SessionMiddleware, secret_key=None)
|
|
@@ -0,0 +1,36 @@
|
|
|
1
|
+
<!DOCTYPE html>
|
|
2
|
+
<html>
|
|
3
|
+
<head>
|
|
4
|
+
<meta charset="UTF-8">
|
|
5
|
+
<link href="https://cdn.jsdelivr.net/npm/bootstrap@5.3.3/dist/css/bootstrap.min.css" rel="stylesheet">
|
|
6
|
+
<script src="https://cdn.jsdelivr.net/npm/bootstrap@5.3.3/dist/js/bootstrap.bundle.min.js"></script>
|
|
7
|
+
<title>{% block title %} {% endblock %} AIND Data Transfer Service Admin</title>
|
|
8
|
+
<style>
|
|
9
|
+
body {
|
|
10
|
+
margin: 20px;
|
|
11
|
+
font-family: arial, sans-serif;
|
|
12
|
+
}
|
|
13
|
+
nav {
|
|
14
|
+
height: 40px;
|
|
15
|
+
}
|
|
16
|
+
</style>
|
|
17
|
+
</head>
|
|
18
|
+
<body>
|
|
19
|
+
<nav>
|
|
20
|
+
<a href="/">Submit Jobs</a> |
|
|
21
|
+
<a href="/jobs">Job Status</a> |
|
|
22
|
+
<a href="/job_params">Job Parameters</a> |
|
|
23
|
+
<a title="Download job template as .xslx" href="/api/job_upload_template" download>Job Submit Template</a> |
|
|
24
|
+
<a title="List of project names" href="{{ project_names_url }}" target="_blank">Project Names</a> |
|
|
25
|
+
<a title="For more information click here" href="https://aind-data-transfer-service.readthedocs.io"
|
|
26
|
+
target="_blank">Help</a> |
|
|
27
|
+
<a href="/admin">Admin</a> |
|
|
28
|
+
<a href="/logout">Log out</a>
|
|
29
|
+
</nav>
|
|
30
|
+
<div>
|
|
31
|
+
<h3>Admin</h3>
|
|
32
|
+
<div>Hello {{user_name}}, welcome to the admin page</div>
|
|
33
|
+
<div>Email: {{user_email}}</div>
|
|
34
|
+
</div>
|
|
35
|
+
</body>
|
|
36
|
+
</html>
|
|
@@ -49,7 +49,8 @@
|
|
|
49
49
|
<a href="/job_params">Job Parameters</a> |
|
|
50
50
|
<a title="Download job template as .xslx" href= "/api/job_upload_template" download>Job Submit Template</a> |
|
|
51
51
|
<a title="List of project names" href= "{{ project_names_url }}" target="_blank" >Project Names</a> |
|
|
52
|
-
<a title="For more information click here" href="https://aind-data-transfer-service.readthedocs.io" target="_blank" >Help</a>
|
|
52
|
+
<a title="For more information click here" href="https://aind-data-transfer-service.readthedocs.io" target="_blank" >Help</a> |
|
|
53
|
+
<a href="/admin">Admin</a>
|
|
53
54
|
</nav>
|
|
54
55
|
<br>
|
|
55
56
|
<div>
|
|
@@ -34,7 +34,8 @@
|
|
|
34
34
|
<a href="/job_params">Job Parameters</a> |
|
|
35
35
|
<a title="Download job template as .xslx" href= "/api/job_upload_template" download>Job Submit Template</a> |
|
|
36
36
|
<a title="List of project names" href= "{{ project_names_url }}" target="_blank" >Project Names</a> |
|
|
37
|
-
<a title="For more information click here" href="https://aind-data-transfer-service.readthedocs.io" target="_blank" >Help</a>
|
|
37
|
+
<a title="For more information click here" href="https://aind-data-transfer-service.readthedocs.io" target="_blank" >Help</a> |
|
|
38
|
+
<a href="/admin">Admin</a>
|
|
38
39
|
</nav>
|
|
39
40
|
<div class="content">
|
|
40
41
|
<h4 class="mb-2">
|
|
@@ -32,7 +32,8 @@
|
|
|
32
32
|
<a href="/job_params">Job Parameters</a> |
|
|
33
33
|
<a title="Download job template as .xslx" href= "/api/job_upload_template" download>Job Submit Template</a> |
|
|
34
34
|
<a title="List of project names" href= "{{ project_names_url }}" target="_blank" >Project Names</a> |
|
|
35
|
-
<a title="For more information click here" href="https://aind-data-transfer-service.readthedocs.io" target="_blank" >Help</a>
|
|
35
|
+
<a title="For more information click here" href="https://aind-data-transfer-service.readthedocs.io" target="_blank" >Help</a> |
|
|
36
|
+
<a href="/admin">Admin</a>
|
|
36
37
|
</nav>
|
|
37
38
|
<div class="content">
|
|
38
39
|
<!-- display total entries -->
|
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
Metadata-Version: 2.4
|
|
2
2
|
Name: aind-data-transfer-service
|
|
3
|
-
Version: 1.
|
|
3
|
+
Version: 1.15.0
|
|
4
4
|
Summary: Service that handles requests to upload data to the cloud
|
|
5
5
|
Author: Allen Institute for Neural Dynamics
|
|
6
6
|
License: MIT
|
|
@@ -38,6 +38,7 @@ Requires-Dist: wtforms; extra == "server"
|
|
|
38
38
|
Requires-Dist: requests==2.25.0; extra == "server"
|
|
39
39
|
Requires-Dist: openpyxl; extra == "server"
|
|
40
40
|
Requires-Dist: python-logging-loki; extra == "server"
|
|
41
|
+
Requires-Dist: authlib; extra == "server"
|
|
41
42
|
Dynamic: license-file
|
|
42
43
|
|
|
43
44
|
# aind-data-transfer-service
|
|
@@ -27,6 +27,7 @@ docs/examples/custom_codeocean_pipeline_settings.py
|
|
|
27
27
|
docs/examples/custom_metadata_mapper_settings.py
|
|
28
28
|
docs/examples/example1.csv
|
|
29
29
|
docs/examples/hcr_example.py
|
|
30
|
+
docs/examples/remove_source_folders.py
|
|
30
31
|
docs/examples/skip_s3_check.py
|
|
31
32
|
docs/examples/upload_with_custom_slurm_settings.py
|
|
32
33
|
docs/examples/upload_with_notification.py
|
|
@@ -61,6 +62,7 @@ src/aind_data_transfer_service/hpc/models.py
|
|
|
61
62
|
src/aind_data_transfer_service/models/__init__.py
|
|
62
63
|
src/aind_data_transfer_service/models/core.py
|
|
63
64
|
src/aind_data_transfer_service/models/internal.py
|
|
65
|
+
src/aind_data_transfer_service/templates/admin.html
|
|
64
66
|
src/aind_data_transfer_service/templates/index.html
|
|
65
67
|
src/aind_data_transfer_service/templates/job_params.html
|
|
66
68
|
src/aind_data_transfer_service/templates/job_status.html
|
|
@@ -81,6 +83,7 @@ tests/resources/airflow_dag_runs_response.json
|
|
|
81
83
|
tests/resources/airflow_task_instances_response.json
|
|
82
84
|
tests/resources/describe_parameters_response.json
|
|
83
85
|
tests/resources/get_parameter_response.json
|
|
86
|
+
tests/resources/get_secrets_response.json
|
|
84
87
|
tests/resources/job_upload_template.xlsx
|
|
85
88
|
tests/resources/legacy_sample.csv
|
|
86
89
|
tests/resources/legacy_sample2.csv
|
|
@@ -0,0 +1,19 @@
|
|
|
1
|
+
{
|
|
2
|
+
"ARN": "arn_value",
|
|
3
|
+
"Name": "secret_name",
|
|
4
|
+
"VersionId": "version_id",
|
|
5
|
+
"SecretString": "{\"CLIENT_ID\":\"client_id\",\"CLIENT_SECRET\":\"client_secret\",\"AUTHORITY\":\"https://authority\"}",
|
|
6
|
+
"VersionStages": ["AWSCURRENT"],
|
|
7
|
+
"CreatedDate": "2025-04-15T16:44:07.279000Z",
|
|
8
|
+
"ResponseMetadata": {
|
|
9
|
+
"RequestId": "request_id",
|
|
10
|
+
"HTTPStatusCode": 200,
|
|
11
|
+
"HTTPHeaders": {
|
|
12
|
+
"x-amzn-requestid": "2b090d64-c92d-48c5-a43a-abf5696c815e",
|
|
13
|
+
"content-type": "application/x-amz-json-1.1",
|
|
14
|
+
"content-length": "748",
|
|
15
|
+
"date": "Wed, 23 Apr 2025 21:19:04 GMT"
|
|
16
|
+
},
|
|
17
|
+
"RetryAttempts": 0
|
|
18
|
+
}
|
|
19
|
+
}
|
|
@@ -434,21 +434,27 @@ class TestSubmitJobRequestV2(unittest.TestCase):
|
|
|
434
434
|
submitted_job_request = SubmitJobRequestV2(
|
|
435
435
|
upload_jobs=[self.example_upload_config]
|
|
436
436
|
)
|
|
437
|
-
|
|
437
|
+
current_jobs_1 = [
|
|
438
438
|
j.model_dump(mode="json", exclude_none=True)
|
|
439
439
|
for j in submitted_job_request.upload_jobs
|
|
440
440
|
]
|
|
441
|
-
|
|
442
|
-
|
|
443
|
-
|
|
444
|
-
|
|
445
|
-
|
|
446
|
-
|
|
447
|
-
|
|
448
|
-
|
|
449
|
-
|
|
450
|
-
err_msg
|
|
451
|
-
|
|
441
|
+
current_jobs_2 = [
|
|
442
|
+
submitted_job_request.model_dump(mode="json", exclude_none=True)
|
|
443
|
+
]
|
|
444
|
+
for current_jobs in [current_jobs_1, current_jobs_2]:
|
|
445
|
+
with self.assertRaises(ValidationError) as err:
|
|
446
|
+
with validation_context({"current_jobs": current_jobs}):
|
|
447
|
+
SubmitJobRequestV2(
|
|
448
|
+
upload_jobs=[self.example_upload_config]
|
|
449
|
+
)
|
|
450
|
+
err_msg = json.loads(err.exception.json())[0]["msg"]
|
|
451
|
+
self.assertEqual(
|
|
452
|
+
(
|
|
453
|
+
"Value error, Job is already running/queued for "
|
|
454
|
+
"behavior_123456_2020-10-13_13-10-10"
|
|
455
|
+
),
|
|
456
|
+
err_msg,
|
|
457
|
+
)
|
|
452
458
|
|
|
453
459
|
|
|
454
460
|
if __name__ == "__main__":
|
|
@@ -47,11 +47,9 @@ class TestJobUploadTemplate(unittest.TestCase):
|
|
|
47
47
|
def setUpClass(cls):
|
|
48
48
|
"""Set up test class"""
|
|
49
49
|
expected_lines = cls._read_xl_helper(SAMPLE_JOB_TEMPLATE)
|
|
50
|
-
job_template = JobUploadTemplate()
|
|
51
50
|
(template_lines, template_validators) = cls._read_xl_helper(
|
|
52
|
-
|
|
51
|
+
JobUploadTemplate.create_excel_sheet_filestream(), True
|
|
53
52
|
)
|
|
54
|
-
cls.job_template = job_template
|
|
55
53
|
cls.expected_lines = expected_lines
|
|
56
54
|
cls.template_lines = template_lines
|
|
57
55
|
cls.template_validators = template_validators
|
|
@@ -67,12 +65,12 @@ class TestJobUploadTemplate(unittest.TestCase):
|
|
|
67
65
|
for r in validator["ranges"]:
|
|
68
66
|
rb = (col, *_) = range_boundaries(r)
|
|
69
67
|
self.assertTupleEqual(
|
|
70
|
-
(col, 2, col, JobUploadTemplate.
|
|
68
|
+
(col, 2, col, JobUploadTemplate._NUM_TEMPLATE_ROWS), rb
|
|
71
69
|
)
|
|
72
70
|
validator["column_indexes"].append(col - 1)
|
|
73
71
|
del validator["ranges"]
|
|
74
72
|
self.assertCountEqual(
|
|
75
|
-
|
|
73
|
+
JobUploadTemplate._get_validators(), self.template_validators
|
|
76
74
|
)
|
|
77
75
|
|
|
78
76
|
|