aind-data-transfer-service 1.12.0__tar.gz
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of aind-data-transfer-service might be problematic. Click here for more details.
- aind_data_transfer_service-1.12.0/.flake8 +7 -0
- aind_data_transfer_service-1.12.0/.github/ISSUE_TEMPLATE/bug_report.md +38 -0
- aind_data_transfer_service-1.12.0/.github/ISSUE_TEMPLATE/feature_request.md +20 -0
- aind_data_transfer_service-1.12.0/.github/ISSUE_TEMPLATE/user-story.md +27 -0
- aind_data_transfer_service-1.12.0/.github/workflows/add_issue_to_project_board.yml +15 -0
- aind_data_transfer_service-1.12.0/.github/workflows/publish_dev.yml +34 -0
- aind_data_transfer_service-1.12.0/.github/workflows/publish_main.yml +56 -0
- aind_data_transfer_service-1.12.0/.github/workflows/run_dev_tests.yml +26 -0
- aind_data_transfer_service-1.12.0/.github/workflows/run_main_tests.yml +43 -0
- aind_data_transfer_service-1.12.0/.gitignore +142 -0
- aind_data_transfer_service-1.12.0/.readthedocs.yaml +17 -0
- aind_data_transfer_service-1.12.0/Dockerfile +14 -0
- aind_data_transfer_service-1.12.0/LICENSE +21 -0
- aind_data_transfer_service-1.12.0/PKG-INFO +49 -0
- aind_data_transfer_service-1.12.0/README.md +9 -0
- aind_data_transfer_service-1.12.0/docs/Makefile +20 -0
- aind_data_transfer_service-1.12.0/docs/diagrams/system_container.png +0 -0
- aind_data_transfer_service-1.12.0/docs/diagrams/system_container.puml +26 -0
- aind_data_transfer_service-1.12.0/docs/diagrams/system_context.png +0 -0
- aind_data_transfer_service-1.12.0/docs/diagrams/system_context.puml +19 -0
- aind_data_transfer_service-1.12.0/docs/examples/example1.csv +4 -0
- aind_data_transfer_service-1.12.0/docs/make.bat +35 -0
- aind_data_transfer_service-1.12.0/docs/source/Contributing.rst +253 -0
- aind_data_transfer_service-1.12.0/docs/source/UserGuide.rst +530 -0
- aind_data_transfer_service-1.12.0/docs/source/_static/dark-logo.svg +129 -0
- aind_data_transfer_service-1.12.0/docs/source/_static/favicon.ico +0 -0
- aind_data_transfer_service-1.12.0/docs/source/_static/light-logo.svg +128 -0
- aind_data_transfer_service-1.12.0/docs/source/aind_data_transfer_service.configs.rst +37 -0
- aind_data_transfer_service-1.12.0/docs/source/aind_data_transfer_service.hpc.rst +29 -0
- aind_data_transfer_service-1.12.0/docs/source/aind_data_transfer_service.models.rst +29 -0
- aind_data_transfer_service-1.12.0/docs/source/aind_data_transfer_service.rst +39 -0
- aind_data_transfer_service-1.12.0/docs/source/conf.py +53 -0
- aind_data_transfer_service-1.12.0/docs/source/index.rst +24 -0
- aind_data_transfer_service-1.12.0/docs/source/modules.rst +7 -0
- aind_data_transfer_service-1.12.0/pyproject.toml +103 -0
- aind_data_transfer_service-1.12.0/setup.cfg +4 -0
- aind_data_transfer_service-1.12.0/setup.py +4 -0
- aind_data_transfer_service-1.12.0/src/aind_data_transfer_service/__init__.py +9 -0
- aind_data_transfer_service-1.12.0/src/aind_data_transfer_service/configs/__init__.py +1 -0
- aind_data_transfer_service-1.12.0/src/aind_data_transfer_service/configs/csv_handler.py +59 -0
- aind_data_transfer_service-1.12.0/src/aind_data_transfer_service/configs/job_configs.py +545 -0
- aind_data_transfer_service-1.12.0/src/aind_data_transfer_service/configs/job_upload_template.py +153 -0
- aind_data_transfer_service-1.12.0/src/aind_data_transfer_service/hpc/__init__.py +1 -0
- aind_data_transfer_service-1.12.0/src/aind_data_transfer_service/hpc/client.py +151 -0
- aind_data_transfer_service-1.12.0/src/aind_data_transfer_service/hpc/models.py +492 -0
- aind_data_transfer_service-1.12.0/src/aind_data_transfer_service/log_handler.py +58 -0
- aind_data_transfer_service-1.12.0/src/aind_data_transfer_service/models/__init__.py +1 -0
- aind_data_transfer_service-1.12.0/src/aind_data_transfer_service/models/core.py +300 -0
- aind_data_transfer_service-1.12.0/src/aind_data_transfer_service/models/internal.py +277 -0
- aind_data_transfer_service-1.12.0/src/aind_data_transfer_service/server.py +1125 -0
- aind_data_transfer_service-1.12.0/src/aind_data_transfer_service/templates/index.html +245 -0
- aind_data_transfer_service-1.12.0/src/aind_data_transfer_service/templates/job_params.html +194 -0
- aind_data_transfer_service-1.12.0/src/aind_data_transfer_service/templates/job_status.html +323 -0
- aind_data_transfer_service-1.12.0/src/aind_data_transfer_service/templates/job_tasks_table.html +146 -0
- aind_data_transfer_service-1.12.0/src/aind_data_transfer_service/templates/task_logs.html +31 -0
- aind_data_transfer_service-1.12.0/src/aind_data_transfer_service.egg-info/PKG-INFO +49 -0
- aind_data_transfer_service-1.12.0/src/aind_data_transfer_service.egg-info/SOURCES.txt +86 -0
- aind_data_transfer_service-1.12.0/src/aind_data_transfer_service.egg-info/dependency_links.txt +1 -0
- aind_data_transfer_service-1.12.0/src/aind_data_transfer_service.egg-info/requires.txt +31 -0
- aind_data_transfer_service-1.12.0/src/aind_data_transfer_service.egg-info/top_level.txt +1 -0
- aind_data_transfer_service-1.12.0/tests/__init__.py +1 -0
- aind_data_transfer_service-1.12.0/tests/resources/airflow_dag_run_response.json +51 -0
- aind_data_transfer_service-1.12.0/tests/resources/airflow_dag_runs_response.json +258 -0
- aind_data_transfer_service-1.12.0/tests/resources/airflow_task_instances_response.json +501 -0
- aind_data_transfer_service-1.12.0/tests/resources/describe_parameters_response.json +74 -0
- aind_data_transfer_service-1.12.0/tests/resources/get_parameter_response.json +24 -0
- aind_data_transfer_service-1.12.0/tests/resources/job_upload_template.xlsx +0 -0
- aind_data_transfer_service-1.12.0/tests/resources/new_sample.csv +4 -0
- aind_data_transfer_service-1.12.0/tests/resources/sample.csv +4 -0
- aind_data_transfer_service-1.12.0/tests/resources/sample.xlsx +0 -0
- aind_data_transfer_service-1.12.0/tests/resources/sample_alt_modality_case.csv +4 -0
- aind_data_transfer_service-1.12.0/tests/resources/sample_empty_rows.csv +7 -0
- aind_data_transfer_service-1.12.0/tests/resources/sample_empty_rows.xlsx +0 -0
- aind_data_transfer_service-1.12.0/tests/resources/sample_invalid_ext.txt +4 -0
- aind_data_transfer_service-1.12.0/tests/resources/sample_malformed.csv +4 -0
- aind_data_transfer_service-1.12.0/tests/resources/sample_malformed.xlsx +0 -0
- aind_data_transfer_service-1.12.0/tests/resources/sample_malformed_2.csv +2 -0
- aind_data_transfer_service-1.12.0/tests/test_configs.py +361 -0
- aind_data_transfer_service-1.12.0/tests/test_core.py +447 -0
- aind_data_transfer_service-1.12.0/tests/test_csv_handler.py +110 -0
- aind_data_transfer_service-1.12.0/tests/test_hpc_client.py +166 -0
- aind_data_transfer_service-1.12.0/tests/test_hpc_models.py +139 -0
- aind_data_transfer_service-1.12.0/tests/test_internal.py +69 -0
- aind_data_transfer_service-1.12.0/tests/test_job_upload_template.py +80 -0
- aind_data_transfer_service-1.12.0/tests/test_log_handler.py +49 -0
- aind_data_transfer_service-1.12.0/tests/test_server/Dockerfile +7 -0
- aind_data_transfer_service-1.12.0/tests/test_server/db.json +1084 -0
- aind_data_transfer_service-1.12.0/tests/test_server.py +2333 -0
|
@@ -0,0 +1,38 @@
|
|
|
1
|
+
---
|
|
2
|
+
name: Bug report
|
|
3
|
+
about: Create a report to help us improve
|
|
4
|
+
title: ''
|
|
5
|
+
labels: ''
|
|
6
|
+
assignees: ''
|
|
7
|
+
|
|
8
|
+
---
|
|
9
|
+
|
|
10
|
+
**Describe the bug**
|
|
11
|
+
A clear and concise description of what the bug is.
|
|
12
|
+
|
|
13
|
+
**To Reproduce**
|
|
14
|
+
Steps to reproduce the behavior:
|
|
15
|
+
1. Go to '...'
|
|
16
|
+
2. Click on '....'
|
|
17
|
+
3. Scroll down to '....'
|
|
18
|
+
4. See error
|
|
19
|
+
|
|
20
|
+
**Expected behavior**
|
|
21
|
+
A clear and concise description of what you expected to happen.
|
|
22
|
+
|
|
23
|
+
**Screenshots**
|
|
24
|
+
If applicable, add screenshots to help explain your problem.
|
|
25
|
+
|
|
26
|
+
**Desktop (please complete the following information):**
|
|
27
|
+
- OS: [e.g. iOS]
|
|
28
|
+
- Browser [e.g. chrome, safari]
|
|
29
|
+
- Version [e.g. 22]
|
|
30
|
+
|
|
31
|
+
**Smartphone (please complete the following information):**
|
|
32
|
+
- Device: [e.g. iPhone6]
|
|
33
|
+
- OS: [e.g. iOS8.1]
|
|
34
|
+
- Browser [e.g. stock browser, safari]
|
|
35
|
+
- Version [e.g. 22]
|
|
36
|
+
|
|
37
|
+
**Additional context**
|
|
38
|
+
Add any other context about the problem here.
|
|
@@ -0,0 +1,20 @@
|
|
|
1
|
+
---
|
|
2
|
+
name: Feature request
|
|
3
|
+
about: Suggest an idea for this project
|
|
4
|
+
title: ''
|
|
5
|
+
labels: ''
|
|
6
|
+
assignees: ''
|
|
7
|
+
|
|
8
|
+
---
|
|
9
|
+
|
|
10
|
+
**Is your feature request related to a problem? Please describe.**
|
|
11
|
+
A clear and concise description of what the problem is. Ex. I'm always frustrated when [...]
|
|
12
|
+
|
|
13
|
+
**Describe the solution you'd like**
|
|
14
|
+
A clear and concise description of what you want to happen.
|
|
15
|
+
|
|
16
|
+
**Describe alternatives you've considered**
|
|
17
|
+
A clear and concise description of any alternative solutions or features you've considered.
|
|
18
|
+
|
|
19
|
+
**Additional context**
|
|
20
|
+
Add any other context or screenshots about the feature request here.
|
|
@@ -0,0 +1,27 @@
|
|
|
1
|
+
---
|
|
2
|
+
name: User story
|
|
3
|
+
about: This template provides a basic structure for user story issues.
|
|
4
|
+
title: ''
|
|
5
|
+
labels: ''
|
|
6
|
+
assignees: ''
|
|
7
|
+
|
|
8
|
+
---
|
|
9
|
+
|
|
10
|
+
# User story
|
|
11
|
+
As a ..., I want to ..., so I can ...
|
|
12
|
+
|
|
13
|
+
*Ideally, this is in the issue title, but if not, you can put it here. If so, delete this section.*
|
|
14
|
+
|
|
15
|
+
# Acceptance criteria
|
|
16
|
+
- [ ] This is something that can be verified to show that this user story is satisfied.
|
|
17
|
+
|
|
18
|
+
# Sprint Ready Checklist
|
|
19
|
+
- [ ] 1. Acceptance criteria defined
|
|
20
|
+
- [ ] 2. Team understands acceptance criteria
|
|
21
|
+
- [ ] 3. Team has defined solution / steps to satisfy acceptance criteria
|
|
22
|
+
- [ ] 4. Acceptance criteria is verifiable / testable
|
|
23
|
+
- [ ] 5. External / 3rd Party dependencies identified
|
|
24
|
+
- [ ] 6. Ticket is prioritized and sized
|
|
25
|
+
|
|
26
|
+
# Notes
|
|
27
|
+
*Add any helpful notes here.*
|
|
@@ -0,0 +1,15 @@
|
|
|
1
|
+
name: Auto add issues to project board
|
|
2
|
+
on:
|
|
3
|
+
issues:
|
|
4
|
+
types:
|
|
5
|
+
- opened
|
|
6
|
+
|
|
7
|
+
jobs:
|
|
8
|
+
add-to-project:
|
|
9
|
+
name: Add issue to project
|
|
10
|
+
runs-on: ubuntu-latest
|
|
11
|
+
steps:
|
|
12
|
+
- uses: actions/add-to-project@v1.0.2
|
|
13
|
+
with:
|
|
14
|
+
project-url: https://github.com/orgs/AllenNeuralDynamics/projects/9
|
|
15
|
+
github-token: ${{ secrets.SERVICE_TOKEN }}
|
|
@@ -0,0 +1,34 @@
|
|
|
1
|
+
name: Publish dev
|
|
2
|
+
on:
|
|
3
|
+
push:
|
|
4
|
+
branches:
|
|
5
|
+
- dev
|
|
6
|
+
|
|
7
|
+
jobs:
|
|
8
|
+
publish:
|
|
9
|
+
runs-on: ubuntu-latest
|
|
10
|
+
steps:
|
|
11
|
+
- uses: actions/checkout@v4
|
|
12
|
+
- name: Compute new docker image tag
|
|
13
|
+
run: |
|
|
14
|
+
echo "sha_short=$(git rev-parse --short "$GITHUB_SHA")" >> "$GITHUB_ENV"
|
|
15
|
+
echo "branch=$(echo ${GITHUB_REF_NAME})" >> "$GITHUB_ENV"
|
|
16
|
+
echo "docker_tag=$(echo ${GITHUB_REF_NAME})-$(git rev-parse --short "$GITHUB_SHA")" >> "$GITHUB_ENV"
|
|
17
|
+
- name: Set up Docker Buildx
|
|
18
|
+
id: buildx
|
|
19
|
+
uses: docker/setup-buildx-action@v2
|
|
20
|
+
- name: Login to Github Packages
|
|
21
|
+
uses: docker/login-action@v2
|
|
22
|
+
with:
|
|
23
|
+
registry: ghcr.io
|
|
24
|
+
username: ${{ github.actor }}
|
|
25
|
+
password: ${{ secrets.GITHUB_TOKEN }}
|
|
26
|
+
- name: Build image and push to GitHub Container Registry
|
|
27
|
+
uses: docker/build-push-action@v3
|
|
28
|
+
with:
|
|
29
|
+
# relative path to the place where source code with Dockerfile is located
|
|
30
|
+
context: .
|
|
31
|
+
push: true
|
|
32
|
+
tags: |
|
|
33
|
+
ghcr.io/allenneuraldynamics/aind-data-transfer-service:${{ env.docker_tag }}
|
|
34
|
+
ghcr.io/allenneuraldynamics/aind-data-transfer-service:dev
|
|
@@ -0,0 +1,56 @@
|
|
|
1
|
+
name: Tag and publish main
|
|
2
|
+
on:
|
|
3
|
+
push:
|
|
4
|
+
branches:
|
|
5
|
+
- main
|
|
6
|
+
|
|
7
|
+
jobs:
|
|
8
|
+
tag_and_publish:
|
|
9
|
+
name: Parse version
|
|
10
|
+
runs-on: ubuntu-latest
|
|
11
|
+
outputs:
|
|
12
|
+
pkg_version: ${{ steps.output_version.outputs.pkg_version }}
|
|
13
|
+
steps:
|
|
14
|
+
- uses: actions/checkout@v4
|
|
15
|
+
- name: Get version from file
|
|
16
|
+
run: |
|
|
17
|
+
pkg_name=$(grep -P 'version = \{attr = .*\}' pyproject.toml | grep -oP '\w+.__version__')
|
|
18
|
+
init_file="./src/${pkg_name//.__version__}/__init__.py"
|
|
19
|
+
pkg_version=$(grep -Po '[0-9]+\.[0-9]+\.[0-9]+' "$init_file")
|
|
20
|
+
echo "docker_tag=$pkg_version" >> "$GITHUB_ENV"
|
|
21
|
+
- name: Create git tag
|
|
22
|
+
run: |
|
|
23
|
+
git tag "v${{ env.docker_tag }}"
|
|
24
|
+
- name: Push git tag
|
|
25
|
+
run: git push origin "v${{ env.docker_tag }}"
|
|
26
|
+
- name: Set up Docker Buildx
|
|
27
|
+
id: buildx
|
|
28
|
+
uses: docker/setup-buildx-action@v2
|
|
29
|
+
- name: Login to Github Packages
|
|
30
|
+
uses: docker/login-action@v2
|
|
31
|
+
with:
|
|
32
|
+
registry: ghcr.io
|
|
33
|
+
username: ${{ github.actor }}
|
|
34
|
+
password: ${{ secrets.GITHUB_TOKEN }}
|
|
35
|
+
- name: Build image and push to GitHub Container Registry
|
|
36
|
+
uses: docker/build-push-action@v3
|
|
37
|
+
with:
|
|
38
|
+
# relative path to the place where source code with Dockerfile is located
|
|
39
|
+
context: .
|
|
40
|
+
push: true
|
|
41
|
+
tags: |
|
|
42
|
+
ghcr.io/allenneuraldynamics/aind-data-transfer-service:${{ env.docker_tag }}
|
|
43
|
+
ghcr.io/allenneuraldynamics/aind-data-transfer-service:latest
|
|
44
|
+
- name: Set up Python 3.10
|
|
45
|
+
uses: actions/setup-python@v5
|
|
46
|
+
with:
|
|
47
|
+
python-version: '3.10'
|
|
48
|
+
- name: Install dependencies
|
|
49
|
+
run: |
|
|
50
|
+
pip install --upgrade setuptools wheel twine build
|
|
51
|
+
python -m build
|
|
52
|
+
twine check dist/*
|
|
53
|
+
- name: Publish on PyPI
|
|
54
|
+
uses: pypa/gh-action-pypi-publish@release/v1.12
|
|
55
|
+
with:
|
|
56
|
+
password: ${{ secrets.AIND_PYPI_TOKEN }}
|
|
@@ -0,0 +1,26 @@
|
|
|
1
|
+
name: Run checks in dev
|
|
2
|
+
|
|
3
|
+
on:
|
|
4
|
+
pull_request:
|
|
5
|
+
branches:
|
|
6
|
+
- dev
|
|
7
|
+
|
|
8
|
+
jobs:
|
|
9
|
+
ci:
|
|
10
|
+
runs-on: ubuntu-latest
|
|
11
|
+
strategy:
|
|
12
|
+
matrix:
|
|
13
|
+
python-version: [ '3.9', '3.10', '3.11' ]
|
|
14
|
+
steps:
|
|
15
|
+
- uses: actions/checkout@v4
|
|
16
|
+
- name: Set up Python ${{ matrix.python-version }}
|
|
17
|
+
uses: actions/setup-python@v5
|
|
18
|
+
with:
|
|
19
|
+
python-version: ${{ matrix.python-version }}
|
|
20
|
+
- name: Install dependencies
|
|
21
|
+
run: |
|
|
22
|
+
python -m pip install -e .[dev]
|
|
23
|
+
- name: Run linter checks
|
|
24
|
+
run: flake8 . && interrogate --verbose .
|
|
25
|
+
- name: Run tests and coverage
|
|
26
|
+
run: coverage run -m unittest discover && coverage report
|
|
@@ -0,0 +1,43 @@
|
|
|
1
|
+
name: Run checks in main and release
|
|
2
|
+
|
|
3
|
+
on:
|
|
4
|
+
pull_request:
|
|
5
|
+
branches:
|
|
6
|
+
- '*release*'
|
|
7
|
+
- main
|
|
8
|
+
|
|
9
|
+
jobs:
|
|
10
|
+
ci:
|
|
11
|
+
runs-on: ubuntu-latest
|
|
12
|
+
strategy:
|
|
13
|
+
matrix:
|
|
14
|
+
python-version: [ '3.9', '3.10', '3.11' ]
|
|
15
|
+
steps:
|
|
16
|
+
- uses: actions/checkout@v4
|
|
17
|
+
- name: Set up Python ${{ matrix.python-version }}
|
|
18
|
+
uses: actions/setup-python@v5
|
|
19
|
+
with:
|
|
20
|
+
python-version: ${{ matrix.python-version }}
|
|
21
|
+
- name: Install dependencies
|
|
22
|
+
run: |
|
|
23
|
+
python -m pip install -e .[dev]
|
|
24
|
+
- name: Run linter checks
|
|
25
|
+
run: flake8 . && interrogate --verbose .
|
|
26
|
+
- name: Run tests and coverage
|
|
27
|
+
run: coverage run -m unittest discover && coverage report
|
|
28
|
+
verify_version:
|
|
29
|
+
runs-on: ubuntu-latest
|
|
30
|
+
steps:
|
|
31
|
+
- uses: actions/checkout@v4
|
|
32
|
+
- name: Check version incremented
|
|
33
|
+
run: |
|
|
34
|
+
pkg_name=$(grep -P 'version = \{attr = .*\}' pyproject.toml | grep -oP '\w+.__version__')
|
|
35
|
+
init_file="./src/${pkg_name//.__version__}/__init__.py"
|
|
36
|
+
pkg_version=$(grep -Po '[0-9]+\.[0-9]+\.[0-9]+' "$init_file")
|
|
37
|
+
latest_tag=$(git ls-remote --tags --refs --sort="v:refname" | tail -n1 | sed 's/.*\///')
|
|
38
|
+
echo "Checking pkg_version v$pkg_version and latest_tag $latest_tag"
|
|
39
|
+
if [ "$latest_tag" == "v$pkg_version" ]
|
|
40
|
+
then
|
|
41
|
+
exit 1
|
|
42
|
+
fi
|
|
43
|
+
echo "Versions are different"
|
|
@@ -0,0 +1,142 @@
|
|
|
1
|
+
# Byte-compiled / optimized / DLL files
|
|
2
|
+
__pycache__/
|
|
3
|
+
*.py[cod]
|
|
4
|
+
*$py.class
|
|
5
|
+
|
|
6
|
+
# C extensions
|
|
7
|
+
*.so
|
|
8
|
+
|
|
9
|
+
# Distribution / packaging
|
|
10
|
+
.Python
|
|
11
|
+
build/
|
|
12
|
+
develop-eggs/
|
|
13
|
+
dist/
|
|
14
|
+
downloads/
|
|
15
|
+
eggs/
|
|
16
|
+
.eggs/
|
|
17
|
+
lib/
|
|
18
|
+
lib64/
|
|
19
|
+
parts/
|
|
20
|
+
sdist/
|
|
21
|
+
var/
|
|
22
|
+
wheels/
|
|
23
|
+
pip-wheel-metadata/
|
|
24
|
+
share/python-wheels/
|
|
25
|
+
*.egg-info/
|
|
26
|
+
.installed.cfg
|
|
27
|
+
*.egg
|
|
28
|
+
MANIFEST
|
|
29
|
+
|
|
30
|
+
# PyInstaller
|
|
31
|
+
# Usually these files are written by a python script from a template
|
|
32
|
+
# before PyInstaller builds the exe, so as to inject date/other infos into it.
|
|
33
|
+
*.manifest
|
|
34
|
+
*.spec
|
|
35
|
+
|
|
36
|
+
# Installer logs
|
|
37
|
+
pip-log.txt
|
|
38
|
+
pip-delete-this-directory.txt
|
|
39
|
+
|
|
40
|
+
# Unit test / coverage reports
|
|
41
|
+
htmlcov/
|
|
42
|
+
.tox/
|
|
43
|
+
.nox/
|
|
44
|
+
.coverage
|
|
45
|
+
.coverage.*
|
|
46
|
+
.cache
|
|
47
|
+
nosetests.xml
|
|
48
|
+
coverage.xml
|
|
49
|
+
*.cover
|
|
50
|
+
*.py,cover
|
|
51
|
+
.hypothesis/
|
|
52
|
+
.pytest_cache/
|
|
53
|
+
|
|
54
|
+
# Translations
|
|
55
|
+
*.mo
|
|
56
|
+
*.pot
|
|
57
|
+
|
|
58
|
+
# Django stuff:
|
|
59
|
+
*.log
|
|
60
|
+
local_settings.py
|
|
61
|
+
db.sqlite3
|
|
62
|
+
db.sqlite3-journal
|
|
63
|
+
|
|
64
|
+
# Flask stuff:
|
|
65
|
+
instance/
|
|
66
|
+
.webassets-cache
|
|
67
|
+
|
|
68
|
+
# Scrapy stuff:
|
|
69
|
+
.scrapy
|
|
70
|
+
|
|
71
|
+
# Sphinx documentation
|
|
72
|
+
docs/_build/
|
|
73
|
+
|
|
74
|
+
# PyBuilder
|
|
75
|
+
target/
|
|
76
|
+
|
|
77
|
+
# Jupyter Notebook
|
|
78
|
+
.ipynb_checkpoints
|
|
79
|
+
|
|
80
|
+
# IPython
|
|
81
|
+
profile_default/
|
|
82
|
+
ipython_config.py
|
|
83
|
+
|
|
84
|
+
# pyenv
|
|
85
|
+
.python-version
|
|
86
|
+
|
|
87
|
+
# pipenv
|
|
88
|
+
# According to pypa/pipenv#598, it is recommended to include Pipfile.lock in version control.
|
|
89
|
+
# However, in case of collaboration, if having platform-specific dependencies or dependencies
|
|
90
|
+
# having no cross-platform support, pipenv may install dependencies that don't work, or not
|
|
91
|
+
# install all needed dependencies.
|
|
92
|
+
#Pipfile.lock
|
|
93
|
+
|
|
94
|
+
# PEP 582; used by e.g. github.com/David-OConnor/pyflow
|
|
95
|
+
__pypackages__/
|
|
96
|
+
|
|
97
|
+
# Celery stuff
|
|
98
|
+
celerybeat-schedule
|
|
99
|
+
celerybeat.pid
|
|
100
|
+
|
|
101
|
+
# SageMath parsed files
|
|
102
|
+
*.sage.py
|
|
103
|
+
|
|
104
|
+
# Environments
|
|
105
|
+
.env
|
|
106
|
+
.venv
|
|
107
|
+
env/
|
|
108
|
+
venv/
|
|
109
|
+
ENV/
|
|
110
|
+
env.bak/
|
|
111
|
+
venv.bak/
|
|
112
|
+
|
|
113
|
+
# Spyder project settings
|
|
114
|
+
.spyderproject
|
|
115
|
+
.spyproject
|
|
116
|
+
|
|
117
|
+
# Rope project settings
|
|
118
|
+
.ropeproject
|
|
119
|
+
|
|
120
|
+
# mkdocs documentation
|
|
121
|
+
/site
|
|
122
|
+
|
|
123
|
+
# mypy
|
|
124
|
+
.mypy_cache/
|
|
125
|
+
.dmypy.json
|
|
126
|
+
dmypy.json
|
|
127
|
+
|
|
128
|
+
# Pyre type checker
|
|
129
|
+
.pyre/
|
|
130
|
+
|
|
131
|
+
# PyCharm
|
|
132
|
+
# JetBrains specific template is maintained in a separate JetBrains.gitignore that can
|
|
133
|
+
# be found at https://github.com/github/gitignore/blob/main/Global/JetBrains.gitignore
|
|
134
|
+
# and can be added to the global gitignore or merged into this file. For a more nuclear
|
|
135
|
+
# option (not recommended) you can uncomment the following to ignore the entire idea folder.
|
|
136
|
+
.idea/
|
|
137
|
+
|
|
138
|
+
# MacOs
|
|
139
|
+
**/.DS_Store
|
|
140
|
+
|
|
141
|
+
# VSCode
|
|
142
|
+
.vscode/
|
|
@@ -0,0 +1,14 @@
|
|
|
1
|
+
FROM python:3.11-slim
|
|
2
|
+
WORKDIR /app
|
|
3
|
+
ADD src ./src
|
|
4
|
+
ADD pyproject.toml .
|
|
5
|
+
ADD setup.py .
|
|
6
|
+
|
|
7
|
+
# Add git in case we need to install from branches
|
|
8
|
+
RUN apt-get update && apt-get install -y git
|
|
9
|
+
|
|
10
|
+
# Pip command. Without '-e' flag, index.html isn't found. There's probably a
|
|
11
|
+
# better way to add the static html files to the site-packages.
|
|
12
|
+
RUN pip install -e .[server] --no-cache-dir
|
|
13
|
+
|
|
14
|
+
CMD ["uvicorn", "aind_data_transfer_service.server:app", "--host", "0.0.0.0", "--port", "5000"]
|
|
@@ -0,0 +1,21 @@
|
|
|
1
|
+
MIT License
|
|
2
|
+
|
|
3
|
+
Copyright (c) 2022 Allen Institute for Neural Dynamics
|
|
4
|
+
|
|
5
|
+
Permission is hereby granted, free of charge, to any person obtaining a copy
|
|
6
|
+
of this software and associated documentation files (the "Software"), to deal
|
|
7
|
+
in the Software without restriction, including without limitation the rights
|
|
8
|
+
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
|
|
9
|
+
copies of the Software, and to permit persons to whom the Software is
|
|
10
|
+
furnished to do so, subject to the following conditions:
|
|
11
|
+
|
|
12
|
+
The above copyright notice and this permission notice shall be included in all
|
|
13
|
+
copies or substantial portions of the Software.
|
|
14
|
+
|
|
15
|
+
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
|
16
|
+
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
|
17
|
+
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
|
|
18
|
+
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
|
19
|
+
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
|
|
20
|
+
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
|
|
21
|
+
SOFTWARE.
|
|
@@ -0,0 +1,49 @@
|
|
|
1
|
+
Metadata-Version: 2.4
|
|
2
|
+
Name: aind-data-transfer-service
|
|
3
|
+
Version: 1.12.0
|
|
4
|
+
Summary: Service that handles requests to upload data to the cloud
|
|
5
|
+
Author: Allen Institute for Neural Dynamics
|
|
6
|
+
License: MIT
|
|
7
|
+
Classifier: Programming Language :: Python :: 3
|
|
8
|
+
Requires-Python: >=3.9
|
|
9
|
+
Description-Content-Type: text/markdown
|
|
10
|
+
License-File: LICENSE
|
|
11
|
+
Requires-Dist: boto3
|
|
12
|
+
Requires-Dist: boto3-stubs[ssm]
|
|
13
|
+
Requires-Dist: pydantic<2.9,>=2.7
|
|
14
|
+
Requires-Dist: pydantic-settings>=2.0
|
|
15
|
+
Requires-Dist: aind-data-schema<2.0,>=1.0.0
|
|
16
|
+
Requires-Dist: aind-data-transfer-models==0.17.0
|
|
17
|
+
Requires-Dist: aind-metadata-mapper==0.23.0
|
|
18
|
+
Provides-Extra: dev
|
|
19
|
+
Requires-Dist: aind-data-transfer-service[server]; extra == "dev"
|
|
20
|
+
Requires-Dist: black; extra == "dev"
|
|
21
|
+
Requires-Dist: coverage; extra == "dev"
|
|
22
|
+
Requires-Dist: flake8; extra == "dev"
|
|
23
|
+
Requires-Dist: interrogate; extra == "dev"
|
|
24
|
+
Requires-Dist: isort; extra == "dev"
|
|
25
|
+
Provides-Extra: docs
|
|
26
|
+
Requires-Dist: Sphinx; extra == "docs"
|
|
27
|
+
Requires-Dist: furo; extra == "docs"
|
|
28
|
+
Provides-Extra: server
|
|
29
|
+
Requires-Dist: fastapi; extra == "server"
|
|
30
|
+
Requires-Dist: httpx; extra == "server"
|
|
31
|
+
Requires-Dist: jinja2; extra == "server"
|
|
32
|
+
Requires-Dist: starlette; extra == "server"
|
|
33
|
+
Requires-Dist: starlette_wtf; extra == "server"
|
|
34
|
+
Requires-Dist: uvicorn[standard]; extra == "server"
|
|
35
|
+
Requires-Dist: wtforms; extra == "server"
|
|
36
|
+
Requires-Dist: requests==2.25.0; extra == "server"
|
|
37
|
+
Requires-Dist: openpyxl; extra == "server"
|
|
38
|
+
Requires-Dist: python-logging-loki; extra == "server"
|
|
39
|
+
Dynamic: license-file
|
|
40
|
+
|
|
41
|
+
# aind-data-transfer-service
|
|
42
|
+
|
|
43
|
+
[](LICENSE)
|
|
44
|
+

|
|
45
|
+
[](https://github.com/semantic-release/semantic-release)
|
|
46
|
+
|
|
47
|
+
This service can be used to upload data stored in a VAST drive. It uses FastAPI to upload a job submission csv file that will be used to trigger a data transfer job in an on-prem HPC. Based on the information provided in the file, the data upload process fetches the appropriate metadata and starts the upload process.
|
|
48
|
+
|
|
49
|
+
More information can be found at [readthedocs](https://aind-data-transfer-service.readthedocs.io).
|
|
@@ -0,0 +1,9 @@
|
|
|
1
|
+
# aind-data-transfer-service
|
|
2
|
+
|
|
3
|
+
[](LICENSE)
|
|
4
|
+

|
|
5
|
+
[](https://github.com/semantic-release/semantic-release)
|
|
6
|
+
|
|
7
|
+
This service can be used to upload data stored in a VAST drive. It uses FastAPI to upload a job submission csv file that will be used to trigger a data transfer job in an on-prem HPC. Based on the information provided in the file, the data upload process fetches the appropriate metadata and starts the upload process.
|
|
8
|
+
|
|
9
|
+
More information can be found at [readthedocs](https://aind-data-transfer-service.readthedocs.io).
|
|
@@ -0,0 +1,20 @@
|
|
|
1
|
+
# Minimal makefile for Sphinx documentation
|
|
2
|
+
#
|
|
3
|
+
|
|
4
|
+
# You can set these variables from the command line, and also
|
|
5
|
+
# from the environment for the first two.
|
|
6
|
+
SPHINXOPTS ?=
|
|
7
|
+
SPHINXBUILD ?= sphinx-build
|
|
8
|
+
SOURCEDIR = source
|
|
9
|
+
BUILDDIR = build
|
|
10
|
+
|
|
11
|
+
# Put it first so that "make" without argument is like "make help".
|
|
12
|
+
help:
|
|
13
|
+
@$(SPHINXBUILD) -M help "$(SOURCEDIR)" "$(BUILDDIR)" $(SPHINXOPTS) $(O)
|
|
14
|
+
|
|
15
|
+
.PHONY: help Makefile
|
|
16
|
+
|
|
17
|
+
# Catch-all target: route all unknown targets to Sphinx using the new
|
|
18
|
+
# "make mode" option. $(O) is meant as a shortcut for $(SPHINXOPTS).
|
|
19
|
+
%: Makefile
|
|
20
|
+
@$(SPHINXBUILD) -M $@ "$(SOURCEDIR)" "$(BUILDDIR)" $(SPHINXOPTS) $(O)
|
|
Binary file
|
|
@@ -0,0 +1,26 @@
|
|
|
1
|
+
@startuml
|
|
2
|
+
!include https://raw.githubusercontent.com/plantuml-stdlib/C4-PlantUML/master/C4_Container.puml
|
|
3
|
+
' uncomment the following line and comment the first to use locally
|
|
4
|
+
' !include C4_Container.puml
|
|
5
|
+
|
|
6
|
+
' LAYOUT_TOP_DOWN()
|
|
7
|
+
' LAYOUT_AS_SKETCH()
|
|
8
|
+
LAYOUT_WITH_LEGEND()
|
|
9
|
+
|
|
10
|
+
title Container diagram for AIND Data Transfer Service
|
|
11
|
+
|
|
12
|
+
Person(user, "User", "A scientist or engineer that wants to upload data to the cloud.")
|
|
13
|
+
|
|
14
|
+
System_Boundary(c1, "AIND Data Transfer Service") {
|
|
15
|
+
Container(app, "API Application", "FastAPI, Docker Container", "Validates and submits request to aind-airflow-service. Runs in K8s cluster managed by Central IT.")
|
|
16
|
+
}
|
|
17
|
+
|
|
18
|
+
System_Ext(aind_airflow_service, "AIND Airflow Service", "Receives job requests, does additional validation checks, submits and monitors jobs.")
|
|
19
|
+
System_Ext(slurm, "Slurm", "High performance computing cluster that runs data transformation and data upload jobs.")
|
|
20
|
+
|
|
21
|
+
Rel(user, app, "Uses", "HTTP, REST")
|
|
22
|
+
|
|
23
|
+
Rel_Back(user, aind_airflow_service, "Sends e-mails to", "SMTP")
|
|
24
|
+
Rel(app, aind_airflow_service, "Uses", "REST API")
|
|
25
|
+
Rel(aind_airflow_service, slurm, "Uses", "REST API")
|
|
26
|
+
@enduml
|
|
Binary file
|
|
@@ -0,0 +1,19 @@
|
|
|
1
|
+
@startuml
|
|
2
|
+
!include https://raw.githubusercontent.com/plantuml-stdlib/C4-PlantUML/master/C4_Context.puml
|
|
3
|
+
' uncomment the following line and comment the first to use locally
|
|
4
|
+
' !include C4_Context.puml
|
|
5
|
+
|
|
6
|
+
LAYOUT_WITH_LEGEND()
|
|
7
|
+
|
|
8
|
+
title System Context diagram for AIND Data Transfer Service
|
|
9
|
+
|
|
10
|
+
Person(user, "User", "A scientist or engineer that wants to upload data to the cloud.")
|
|
11
|
+
System(transfer_service, "AIND Data Transfer Service", "Allows people to send job requests to compress (or transform) and upload raw data assets.")
|
|
12
|
+
System_Ext(aind_airflow_service, "AIND Airflow Service", "Receives job requests, does additional validation checks, submits and monitors jobs.")
|
|
13
|
+
System_Ext(slurm, "Slurm", "High performance computing cluster that runs data transformation and data upload jobs.")
|
|
14
|
+
|
|
15
|
+
Rel(user, transfer_service, "Uses", "web portal or REST API")
|
|
16
|
+
Rel_Back(user, aind_airflow_service, "Sends e-mails to", "SMTP")
|
|
17
|
+
Rel(transfer_service, aind_airflow_service, "Uses", "REST API")
|
|
18
|
+
Rel(aind_airflow_service, slurm, "Uses", "REST API")
|
|
19
|
+
@enduml
|
|
@@ -0,0 +1,4 @@
|
|
|
1
|
+
project_name, process_capsule_id, modality0, modality0.source, modality1, modality1.source, s3-bucket, subject-id, platform, acq-datetime, job_type
|
|
2
|
+
Ephys Platform, , ECEPHYS, dir/data_set_1, ,, some_bucket, 123454, ecephys, 2020-10-10 14:10:10, ecephys_opto
|
|
3
|
+
Behavior Platform, 1f999652-00a0-4c4b-99b5-64c2985ad070, BEHAVIOR_VIDEOS, dir/data_set_2, MRI, dir/data_set_3, open, 123456, BEHAVIOR, 10/13/2020 1:10:10 PM,
|
|
4
|
+
Behavior Platform, , BEHAVIOR_VIDEOS, dir/data_set_2, BEHAVIOR_VIDEOS, dir/data_set_3, scratch, 123456, BEHAVIOR, 10/13/2020 1:10:10 PM,
|
|
@@ -0,0 +1,35 @@
|
|
|
1
|
+
@ECHO OFF
|
|
2
|
+
|
|
3
|
+
pushd %~dp0
|
|
4
|
+
|
|
5
|
+
REM Command file for Sphinx documentation
|
|
6
|
+
|
|
7
|
+
if "%SPHINXBUILD%" == "" (
|
|
8
|
+
set SPHINXBUILD=sphinx-build
|
|
9
|
+
)
|
|
10
|
+
set SOURCEDIR=source
|
|
11
|
+
set BUILDDIR=build
|
|
12
|
+
|
|
13
|
+
%SPHINXBUILD% >NUL 2>NUL
|
|
14
|
+
if errorlevel 9009 (
|
|
15
|
+
echo.
|
|
16
|
+
echo.The 'sphinx-build' command was not found. Make sure you have Sphinx
|
|
17
|
+
echo.installed, then set the SPHINXBUILD environment variable to point
|
|
18
|
+
echo.to the full path of the 'sphinx-build' executable. Alternatively you
|
|
19
|
+
echo.may add the Sphinx directory to PATH.
|
|
20
|
+
echo.
|
|
21
|
+
echo.If you don't have Sphinx installed, grab it from
|
|
22
|
+
echo.https://www.sphinx-doc.org/
|
|
23
|
+
exit /b 1
|
|
24
|
+
)
|
|
25
|
+
|
|
26
|
+
if "%1" == "" goto help
|
|
27
|
+
|
|
28
|
+
%SPHINXBUILD% -M %1 %SOURCEDIR% %BUILDDIR% %SPHINXOPTS% %O%
|
|
29
|
+
goto end
|
|
30
|
+
|
|
31
|
+
:help
|
|
32
|
+
%SPHINXBUILD% -M help %SOURCEDIR% %BUILDDIR% %SPHINXOPTS% %O%
|
|
33
|
+
|
|
34
|
+
:end
|
|
35
|
+
popd
|