aind-data-transfer-service 1.14.0__tar.gz → 1.20.0__tar.gz

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (104) hide show
  1. {aind_data_transfer_service-1.14.0 → aind_data_transfer_service-1.20.0}/PKG-INFO +7 -8
  2. {aind_data_transfer_service-1.14.0 → aind_data_transfer_service-1.20.0}/pyproject.toml +8 -9
  3. {aind_data_transfer_service-1.14.0 → aind_data_transfer_service-1.20.0}/src/aind_data_transfer_service/__init__.py +2 -1
  4. {aind_data_transfer_service-1.14.0 → aind_data_transfer_service-1.20.0}/src/aind_data_transfer_service/configs/csv_handler.py +67 -11
  5. {aind_data_transfer_service-1.14.0 → aind_data_transfer_service-1.20.0}/src/aind_data_transfer_service/configs/job_upload_template.py +24 -23
  6. aind_data_transfer_service-1.20.0/src/aind_data_transfer_service/configs/platforms_v1.py +177 -0
  7. {aind_data_transfer_service-1.14.0 → aind_data_transfer_service-1.20.0}/src/aind_data_transfer_service/log_handler.py +3 -3
  8. {aind_data_transfer_service-1.14.0 → aind_data_transfer_service-1.20.0}/src/aind_data_transfer_service/models/core.py +36 -11
  9. {aind_data_transfer_service-1.14.0 → aind_data_transfer_service-1.20.0}/src/aind_data_transfer_service/models/internal.py +35 -10
  10. {aind_data_transfer_service-1.14.0 → aind_data_transfer_service-1.20.0}/src/aind_data_transfer_service/server.py +445 -499
  11. {aind_data_transfer_service-1.14.0 → aind_data_transfer_service-1.20.0}/src/aind_data_transfer_service.egg-info/PKG-INFO +7 -8
  12. aind_data_transfer_service-1.20.0/src/aind_data_transfer_service.egg-info/SOURCES.txt +26 -0
  13. {aind_data_transfer_service-1.14.0 → aind_data_transfer_service-1.20.0}/src/aind_data_transfer_service.egg-info/requires.txt +5 -6
  14. {aind_data_transfer_service-1.14.0 → aind_data_transfer_service-1.20.0}/tests/test_core.py +51 -19
  15. {aind_data_transfer_service-1.14.0 → aind_data_transfer_service-1.20.0}/tests/test_csv_handler.py +64 -6
  16. {aind_data_transfer_service-1.14.0 → aind_data_transfer_service-1.20.0}/tests/test_job_upload_template.py +3 -5
  17. aind_data_transfer_service-1.20.0/tests/test_proxy.py +54 -0
  18. {aind_data_transfer_service-1.14.0 → aind_data_transfer_service-1.20.0}/tests/test_server.py +528 -828
  19. aind_data_transfer_service-1.14.0/.flake8 +0 -7
  20. aind_data_transfer_service-1.14.0/.github/ISSUE_TEMPLATE/bug_report.md +0 -38
  21. aind_data_transfer_service-1.14.0/.github/ISSUE_TEMPLATE/feature_request.md +0 -20
  22. aind_data_transfer_service-1.14.0/.github/ISSUE_TEMPLATE/user-story.md +0 -27
  23. aind_data_transfer_service-1.14.0/.github/workflows/add_issue_to_project_board.yml +0 -15
  24. aind_data_transfer_service-1.14.0/.github/workflows/publish_dev.yml +0 -34
  25. aind_data_transfer_service-1.14.0/.github/workflows/publish_main.yml +0 -56
  26. aind_data_transfer_service-1.14.0/.github/workflows/run_dev_tests.yml +0 -26
  27. aind_data_transfer_service-1.14.0/.github/workflows/run_main_tests.yml +0 -43
  28. aind_data_transfer_service-1.14.0/.gitignore +0 -142
  29. aind_data_transfer_service-1.14.0/.readthedocs.yaml +0 -17
  30. aind_data_transfer_service-1.14.0/CODE_OF_CONDUCT.md +0 -133
  31. aind_data_transfer_service-1.14.0/Dockerfile +0 -14
  32. aind_data_transfer_service-1.14.0/docs/Makefile +0 -20
  33. aind_data_transfer_service-1.14.0/docs/diagrams/system_container.png +0 -0
  34. aind_data_transfer_service-1.14.0/docs/diagrams/system_container.puml +0 -26
  35. aind_data_transfer_service-1.14.0/docs/diagrams/system_context.png +0 -0
  36. aind_data_transfer_service-1.14.0/docs/diagrams/system_context.puml +0 -19
  37. aind_data_transfer_service-1.14.0/docs/examples/basic_upload.py +0 -76
  38. aind_data_transfer_service-1.14.0/docs/examples/behavior_videos_compression.py +0 -123
  39. aind_data_transfer_service-1.14.0/docs/examples/custom_codeocean_pipeline_settings.py +0 -107
  40. aind_data_transfer_service-1.14.0/docs/examples/custom_metadata_mapper_settings.py +0 -113
  41. aind_data_transfer_service-1.14.0/docs/examples/example1.csv +0 -4
  42. aind_data_transfer_service-1.14.0/docs/examples/hcr_example.py +0 -81
  43. aind_data_transfer_service-1.14.0/docs/examples/skip_s3_check.py +0 -78
  44. aind_data_transfer_service-1.14.0/docs/examples/upload_with_custom_slurm_settings.py +0 -86
  45. aind_data_transfer_service-1.14.0/docs/examples/upload_with_notification.py +0 -79
  46. aind_data_transfer_service-1.14.0/docs/make.bat +0 -35
  47. aind_data_transfer_service-1.14.0/docs/source/Contributing.rst +0 -254
  48. aind_data_transfer_service-1.14.0/docs/source/UserGuideV1.rst +0 -528
  49. aind_data_transfer_service-1.14.0/docs/source/UserGuideV2.rst +0 -194
  50. aind_data_transfer_service-1.14.0/docs/source/_static/dark-logo.svg +0 -129
  51. aind_data_transfer_service-1.14.0/docs/source/_static/favicon.ico +0 -0
  52. aind_data_transfer_service-1.14.0/docs/source/_static/light-logo.svg +0 -128
  53. aind_data_transfer_service-1.14.0/docs/source/aind_data_transfer_service.configs.rst +0 -37
  54. aind_data_transfer_service-1.14.0/docs/source/aind_data_transfer_service.hpc.rst +0 -29
  55. aind_data_transfer_service-1.14.0/docs/source/aind_data_transfer_service.models.rst +0 -29
  56. aind_data_transfer_service-1.14.0/docs/source/aind_data_transfer_service.rst +0 -39
  57. aind_data_transfer_service-1.14.0/docs/source/conf.py +0 -53
  58. aind_data_transfer_service-1.14.0/docs/source/index.rst +0 -25
  59. aind_data_transfer_service-1.14.0/docs/source/modules.rst +0 -7
  60. aind_data_transfer_service-1.14.0/src/aind_data_transfer_service/configs/job_configs.py +0 -545
  61. aind_data_transfer_service-1.14.0/src/aind_data_transfer_service/hpc/__init__.py +0 -1
  62. aind_data_transfer_service-1.14.0/src/aind_data_transfer_service/hpc/client.py +0 -151
  63. aind_data_transfer_service-1.14.0/src/aind_data_transfer_service/hpc/models.py +0 -492
  64. aind_data_transfer_service-1.14.0/src/aind_data_transfer_service/templates/index.html +0 -257
  65. aind_data_transfer_service-1.14.0/src/aind_data_transfer_service/templates/job_params.html +0 -194
  66. aind_data_transfer_service-1.14.0/src/aind_data_transfer_service/templates/job_status.html +0 -323
  67. aind_data_transfer_service-1.14.0/src/aind_data_transfer_service/templates/job_tasks_table.html +0 -146
  68. aind_data_transfer_service-1.14.0/src/aind_data_transfer_service/templates/task_logs.html +0 -31
  69. aind_data_transfer_service-1.14.0/src/aind_data_transfer_service.egg-info/SOURCES.txt +0 -99
  70. aind_data_transfer_service-1.14.0/tests/__init__.py +0 -1
  71. aind_data_transfer_service-1.14.0/tests/resources/airflow_dag_run_response.json +0 -51
  72. aind_data_transfer_service-1.14.0/tests/resources/airflow_dag_runs_response.json +0 -258
  73. aind_data_transfer_service-1.14.0/tests/resources/airflow_task_instances_response.json +0 -501
  74. aind_data_transfer_service-1.14.0/tests/resources/describe_parameters_response.json +0 -74
  75. aind_data_transfer_service-1.14.0/tests/resources/get_parameter_response.json +0 -24
  76. aind_data_transfer_service-1.14.0/tests/resources/job_upload_template.xlsx +0 -0
  77. aind_data_transfer_service-1.14.0/tests/resources/legacy_sample.csv +0 -4
  78. aind_data_transfer_service-1.14.0/tests/resources/legacy_sample2.csv +0 -2
  79. aind_data_transfer_service-1.14.0/tests/resources/new_sample.csv +0 -4
  80. aind_data_transfer_service-1.14.0/tests/resources/sample.csv +0 -4
  81. aind_data_transfer_service-1.14.0/tests/resources/sample.xlsx +0 -0
  82. aind_data_transfer_service-1.14.0/tests/resources/sample_alt_modality_case.csv +0 -4
  83. aind_data_transfer_service-1.14.0/tests/resources/sample_empty_rows.csv +0 -7
  84. aind_data_transfer_service-1.14.0/tests/resources/sample_empty_rows.xlsx +0 -0
  85. aind_data_transfer_service-1.14.0/tests/resources/sample_empty_rows_2.csv +0 -7
  86. aind_data_transfer_service-1.14.0/tests/resources/sample_invalid_ext.txt +0 -4
  87. aind_data_transfer_service-1.14.0/tests/resources/sample_malformed.csv +0 -4
  88. aind_data_transfer_service-1.14.0/tests/resources/sample_malformed.xlsx +0 -0
  89. aind_data_transfer_service-1.14.0/tests/resources/sample_malformed_2.csv +0 -2
  90. aind_data_transfer_service-1.14.0/tests/test_configs.py +0 -361
  91. aind_data_transfer_service-1.14.0/tests/test_hpc_client.py +0 -166
  92. aind_data_transfer_service-1.14.0/tests/test_hpc_models.py +0 -139
  93. aind_data_transfer_service-1.14.0/tests/test_server/Dockerfile +0 -7
  94. aind_data_transfer_service-1.14.0/tests/test_server/db.json +0 -1084
  95. {aind_data_transfer_service-1.14.0 → aind_data_transfer_service-1.20.0}/LICENSE +0 -0
  96. {aind_data_transfer_service-1.14.0 → aind_data_transfer_service-1.20.0}/README.md +0 -0
  97. {aind_data_transfer_service-1.14.0 → aind_data_transfer_service-1.20.0}/setup.cfg +0 -0
  98. {aind_data_transfer_service-1.14.0 → aind_data_transfer_service-1.20.0}/setup.py +0 -0
  99. {aind_data_transfer_service-1.14.0 → aind_data_transfer_service-1.20.0}/src/aind_data_transfer_service/configs/__init__.py +0 -0
  100. {aind_data_transfer_service-1.14.0 → aind_data_transfer_service-1.20.0}/src/aind_data_transfer_service/models/__init__.py +0 -0
  101. {aind_data_transfer_service-1.14.0 → aind_data_transfer_service-1.20.0}/src/aind_data_transfer_service.egg-info/dependency_links.txt +0 -0
  102. {aind_data_transfer_service-1.14.0 → aind_data_transfer_service-1.20.0}/src/aind_data_transfer_service.egg-info/top_level.txt +0 -0
  103. {aind_data_transfer_service-1.14.0 → aind_data_transfer_service-1.20.0}/tests/test_internal.py +0 -0
  104. {aind_data_transfer_service-1.14.0 → aind_data_transfer_service-1.20.0}/tests/test_log_handler.py +0 -0
@@ -1,14 +1,14 @@
1
1
  Metadata-Version: 2.4
2
2
  Name: aind-data-transfer-service
3
- Version: 1.14.0
3
+ Version: 1.20.0
4
4
  Summary: Service that handles requests to upload data to the cloud
5
5
  Author: Allen Institute for Neural Dynamics
6
6
  License: MIT
7
7
  Classifier: Programming Language :: Python :: 3
8
- Requires-Python: >=3.9
8
+ Requires-Python: >=3.10
9
9
  Description-Content-Type: text/markdown
10
10
  License-File: LICENSE
11
- Requires-Dist: aind-data-schema-models<1.0.0,>=0.3.2
11
+ Requires-Dist: aind-data-schema-models>=0.3.2
12
12
  Requires-Dist: email-validator
13
13
  Requires-Dist: pydantic>=2.0
14
14
  Requires-Dist: pydantic-settings>=2.0
@@ -23,21 +23,20 @@ Provides-Extra: docs
23
23
  Requires-Dist: Sphinx; extra == "docs"
24
24
  Requires-Dist: furo; extra == "docs"
25
25
  Provides-Extra: server
26
- Requires-Dist: aind-data-schema<2.0,>=1.0.0; extra == "server"
27
- Requires-Dist: aind-data-transfer-models==0.17.0; extra == "server"
28
- Requires-Dist: aind-metadata-mapper==0.23.0; extra == "server"
26
+ Requires-Dist: aind-data-schema==2.0.0; extra == "server"
29
27
  Requires-Dist: boto3; extra == "server"
30
28
  Requires-Dist: boto3-stubs[ssm]; extra == "server"
31
- Requires-Dist: fastapi; extra == "server"
29
+ Requires-Dist: fastapi>=0.115.13; extra == "server"
32
30
  Requires-Dist: httpx; extra == "server"
33
31
  Requires-Dist: jinja2; extra == "server"
34
- Requires-Dist: starlette; extra == "server"
32
+ Requires-Dist: starlette<0.47.0,>=0.40.0; extra == "server"
35
33
  Requires-Dist: starlette_wtf; extra == "server"
36
34
  Requires-Dist: uvicorn[standard]; extra == "server"
37
35
  Requires-Dist: wtforms; extra == "server"
38
36
  Requires-Dist: requests==2.25.0; extra == "server"
39
37
  Requires-Dist: openpyxl; extra == "server"
40
38
  Requires-Dist: python-logging-loki; extra == "server"
39
+ Requires-Dist: authlib; extra == "server"
41
40
  Dynamic: license-file
42
41
 
43
42
  # aind-data-transfer-service
@@ -1,12 +1,12 @@
1
1
  [build-system]
2
- requires = ["setuptools", "setuptools-scm"]
2
+ requires = ["setuptools"]
3
3
  build-backend = "setuptools.build_meta"
4
4
 
5
5
  [project]
6
6
  name = "aind-data-transfer-service"
7
7
  description = "Service that handles requests to upload data to the cloud"
8
8
  license = {text = "MIT"}
9
- requires-python = ">=3.9"
9
+ requires-python = ">=3.10"
10
10
  authors = [
11
11
  {name = "Allen Institute for Neural Dynamics"}
12
12
  ]
@@ -17,7 +17,7 @@ readme = "README.md"
17
17
  dynamic = ["version"]
18
18
 
19
19
  dependencies = [
20
- 'aind-data-schema-models>=0.3.2,<1.0.0',
20
+ 'aind-data-schema-models>=0.3.2',
21
21
  'email-validator',
22
22
  'pydantic>=2.0',
23
23
  'pydantic-settings>=2.0',
@@ -39,21 +39,20 @@ docs = [
39
39
  ]
40
40
 
41
41
  server = [
42
- 'aind-data-schema>=1.0.0,<2.0',
43
- 'aind-data-transfer-models==0.17.0',
44
- 'aind-metadata-mapper==0.23.0',
42
+ 'aind-data-schema==2.0.0',
45
43
  'boto3',
46
44
  'boto3-stubs[ssm]',
47
- 'fastapi',
45
+ 'fastapi>=0.115.13',
48
46
  'httpx',
49
47
  'jinja2',
50
- 'starlette',
48
+ 'starlette>=0.40.0,<0.47.0',
51
49
  'starlette_wtf',
52
50
  'uvicorn[standard]',
53
51
  'wtforms',
54
52
  'requests==2.25.0',
55
53
  'openpyxl',
56
- 'python-logging-loki'
54
+ 'python-logging-loki',
55
+ 'authlib'
57
56
  ]
58
57
 
59
58
  [tool.setuptools.packages.find]
@@ -1,7 +1,8 @@
1
1
  """Init package"""
2
+
2
3
  import os
3
4
 
4
- __version__ = "1.14.0"
5
+ __version__ = "1.20.0"
5
6
 
6
7
  # Global constants
7
8
  OPEN_DATA_BUCKET_NAME = os.getenv("OPEN_DATA_BUCKET_NAME", "open")
@@ -1,11 +1,14 @@
1
1
  """Module to handle processing legacy csv files"""
2
2
 
3
3
  import re
4
+ from collections.abc import Mapping
5
+ from copy import deepcopy
4
6
  from datetime import datetime
7
+ from typing import Any, Dict
5
8
 
6
9
  from aind_data_schema_models.modalities import Modality
7
- from aind_data_schema_models.platforms import Platform
8
10
 
11
+ from aind_data_transfer_service.configs.platforms_v1 import Platform
9
12
  from aind_data_transfer_service.models.core import Task, UploadJobConfigsV2
10
13
 
11
14
  DATETIME_PATTERN2 = re.compile(
@@ -13,6 +16,45 @@ DATETIME_PATTERN2 = re.compile(
13
16
  )
14
17
 
15
18
 
19
+ def nested_update(dict_to_update: Dict[str, Any], updates: Mapping):
20
+ """
21
+ Update a nested dictionary in-place.
22
+ Parameters
23
+ ----------
24
+ dict_to_update : Dict[str, Any]
25
+ updates : Mapping
26
+
27
+ """
28
+ for k, v in updates.items():
29
+ if isinstance(v, Mapping):
30
+ dict_to_update[k] = nested_update(dict_to_update.get(k, {}), v)
31
+ else:
32
+ dict_to_update[k] = v
33
+ return dict_to_update
34
+
35
+
36
+ def create_nested_dict(
37
+ dict_to_update: Dict[str, Any], key_string: str, value: Any
38
+ ):
39
+ """
40
+ Updates in-place a nested dictionary with a period delimited key and value.
41
+ Parameters
42
+ ----------
43
+ dict_to_update : Dict[str, Any]
44
+ key_string : str
45
+ value : Any
46
+
47
+ """
48
+ keys = key_string.split(".", 1)
49
+ current_key = keys[0]
50
+ if len(keys) == 1:
51
+ dict_to_update[current_key] = value
52
+ else:
53
+ if current_key not in dict_to_update:
54
+ dict_to_update[current_key] = dict()
55
+ create_nested_dict(dict_to_update[current_key], keys[1], value)
56
+
57
+
16
58
  def map_csv_row_to_job(row: dict) -> UploadJobConfigsV2:
17
59
  """
18
60
  Maps csv row into a UploadJobConfigsV2 model. This attempts to be somewhat
@@ -29,7 +71,6 @@ def map_csv_row_to_job(row: dict) -> UploadJobConfigsV2:
29
71
  modality_configs = dict()
30
72
  job_configs = dict()
31
73
  check_s3_folder_exists_task = None
32
- final_check_s3_folder_exist = None
33
74
  codeocean_tasks = dict()
34
75
  for key, value in row.items():
35
76
  # Strip white spaces and replace dashes with underscores
@@ -42,7 +83,9 @@ def map_csv_row_to_job(row: dict) -> UploadJobConfigsV2:
42
83
  modality_parts = clean_key.split(".")
43
84
  modality_key = modality_parts[0]
44
85
  sub_key = (
45
- "modality" if len(modality_parts) == 1 else modality_parts[1]
86
+ "modality"
87
+ if len(modality_parts) == 1
88
+ else ".".join(modality_parts[1:])
46
89
  )
47
90
  modality_configs.setdefault(modality_key, dict())
48
91
  # Temp backwards compatibility check
@@ -66,13 +109,22 @@ def map_csv_row_to_job(row: dict) -> UploadJobConfigsV2:
66
109
  job_settings=codeocean_pipeline_monitor_settings,
67
110
  )
68
111
  else:
69
- modality_configs[modality_key].update({sub_key: clean_val})
112
+ nested_val = dict()
113
+ create_nested_dict(
114
+ dict_to_update=nested_val,
115
+ key_string=sub_key,
116
+ value=clean_val,
117
+ )
118
+ current_dict = deepcopy(
119
+ modality_configs.get(modality_key, dict())
120
+ )
121
+ nested_update(current_dict, nested_val)
122
+ modality_configs[modality_key] = current_dict
70
123
  elif clean_key == "force_cloud_sync" and clean_val.upper() in [
71
124
  "TRUE",
72
125
  "T",
73
126
  ]:
74
127
  check_s3_folder_exists_task = {"skip_task": True}
75
- final_check_s3_folder_exist = {"skip_task": True}
76
128
  else:
77
129
  job_configs[clean_key] = clean_val
78
130
  # Rename codeocean config keys with correct modality
@@ -93,16 +145,20 @@ def map_csv_row_to_job(row: dict) -> UploadJobConfigsV2:
93
145
  )
94
146
  tasks = {
95
147
  "gather_preliminary_metadata": metadata_task,
96
- "check_s3_folder_exists_task": check_s3_folder_exists_task,
97
- "final_check_s3_folder_exist": final_check_s3_folder_exist,
148
+ "check_s3_folder_exists": check_s3_folder_exists_task,
98
149
  "modality_transformation_settings": modality_tasks,
99
- "codeocean_pipeline_settings": None
100
- if codeocean_tasks == dict()
101
- else codeocean_tasks,
150
+ "codeocean_pipeline_settings": (
151
+ None if codeocean_tasks == dict() else codeocean_tasks
152
+ ),
102
153
  }
154
+ platform = (
155
+ None
156
+ if job_configs.get("platform") is None
157
+ else Platform.from_abbreviation(job_configs["platform"])
158
+ )
103
159
  job_configs.update(
104
160
  {
105
- "platform": Platform.from_abbreviation(job_configs["platform"]),
161
+ "platform": platform,
106
162
  "modalities": [
107
163
  Modality.from_abbreviation(m) for m in modality_tasks.keys()
108
164
  ],
@@ -2,24 +2,25 @@
2
2
 
3
3
  import datetime
4
4
  from io import BytesIO
5
- from typing import Any, Dict, List
5
+ from typing import Any, ClassVar, Dict, List
6
6
 
7
7
  from aind_data_schema_models.modalities import Modality
8
- from aind_data_schema_models.platforms import Platform
9
8
  from openpyxl import Workbook
10
9
  from openpyxl.styles import Font
11
10
  from openpyxl.utils import get_column_letter
12
11
  from openpyxl.worksheet.datavalidation import DataValidation
12
+ from pydantic import BaseModel
13
13
 
14
+ from aind_data_transfer_service.configs.platforms_v1 import Platform
14
15
 
15
- # TODO: convert to pydantic model
16
- class JobUploadTemplate:
16
+
17
+ class JobUploadTemplate(BaseModel):
17
18
  """Class to configure and create xlsx job upload template"""
18
19
 
19
- FILE_NAME = "job_upload_template.xlsx"
20
- NUM_TEMPLATE_ROWS = 20
21
- XLSX_DATETIME_FORMAT = "YYYY-MM-DDTHH:mm:ss"
22
- HEADERS = [
20
+ FILE_NAME: ClassVar[str] = "job_upload_template.xlsx"
21
+ _NUM_TEMPLATE_ROWS: ClassVar[int] = 20
22
+ _XLSX_DATETIME_FORMAT: ClassVar[str] = "YYYY-MM-DDTHH:mm:ss"
23
+ _HEADERS: ClassVar[List[str]] = [
23
24
  "job_type",
24
25
  "project_name",
25
26
  "platform",
@@ -31,7 +32,7 @@ class JobUploadTemplate:
31
32
  "modality1",
32
33
  "modality1.input_source",
33
34
  ]
34
- SAMPLE_JOBS = [
35
+ _SAMPLE_JOBS: ClassVar[List[List[Any]]] = [
35
36
  [
36
37
  "default",
37
38
  "Behavior Platform",
@@ -68,8 +69,8 @@ class JobUploadTemplate:
68
69
  ],
69
70
  ]
70
71
 
71
- @property
72
- def validators(self) -> List[Dict[str, Any]]:
72
+ @classmethod
73
+ def _get_validators(cls) -> List[Dict[str, Any]]:
73
74
  """
74
75
  Returns
75
76
  -------
@@ -82,36 +83,36 @@ class JobUploadTemplate:
82
83
  "name": "platform",
83
84
  "type": "list",
84
85
  "options": list(Platform.abbreviation_map.keys()),
85
- "column_indexes": [self.HEADERS.index("platform")],
86
+ "column_indexes": [cls._HEADERS.index("platform")],
86
87
  },
87
88
  {
88
89
  "name": "modality",
89
90
  "type": "list",
90
91
  "options": list(Modality.abbreviation_map.keys()),
91
92
  "column_indexes": [
92
- self.HEADERS.index("modality0"),
93
- self.HEADERS.index("modality1"),
93
+ cls._HEADERS.index("modality0"),
94
+ cls._HEADERS.index("modality1"),
94
95
  ],
95
96
  },
96
97
  {
97
98
  "name": "datetime",
98
99
  "type": "date",
99
- "column_indexes": [self.HEADERS.index("acq_datetime")],
100
+ "column_indexes": [cls._HEADERS.index("acq_datetime")],
100
101
  },
101
102
  ]
102
103
 
103
- @property
104
- def excel_sheet_filestream(self) -> BytesIO:
104
+ @classmethod
105
+ def create_excel_sheet_filestream(cls) -> BytesIO:
105
106
  """Create job template as xlsx filestream"""
106
107
  xl_io = BytesIO()
107
108
  workbook = Workbook()
108
109
  workbook.iso_dates = True
109
110
  worksheet = workbook.active
110
- worksheet.append(self.HEADERS)
111
- for job in self.SAMPLE_JOBS:
111
+ worksheet.append(cls._HEADERS)
112
+ for job in cls._SAMPLE_JOBS:
112
113
  worksheet.append(job)
113
114
  # data validators
114
- for validator in self.validators:
115
+ for validator in cls._get_validators():
115
116
  dv_type = validator["type"]
116
117
  dv_name = validator["name"]
117
118
  dv_params = {
@@ -127,17 +128,17 @@ class JobUploadTemplate:
127
128
  dv_params["prompt"] = f"Select a {dv_name} from the dropdown"
128
129
  elif dv_type == "date":
129
130
  dv_params["prompt"] = "Provide a {} using {}".format(
130
- dv_name, self.XLSX_DATETIME_FORMAT
131
+ dv_name, cls._XLSX_DATETIME_FORMAT
131
132
  )
132
133
  dv = DataValidation(**dv_params)
133
134
  for i in validator["column_indexes"]:
134
135
  col = get_column_letter(i + 1)
135
- col_range = f"{col}2:{col}{self.NUM_TEMPLATE_ROWS}"
136
+ col_range = f"{col}2:{col}{cls._NUM_TEMPLATE_ROWS}"
136
137
  dv.add(col_range)
137
138
  if dv_type != "date":
138
139
  continue
139
140
  for (cell,) in worksheet[col_range]:
140
- cell.number_format = self.XLSX_DATETIME_FORMAT
141
+ cell.number_format = cls._XLSX_DATETIME_FORMAT
141
142
  worksheet.add_data_validation(dv)
142
143
  # formatting
143
144
  bold = Font(bold=True)
@@ -0,0 +1,177 @@
1
+ """Platforms"""
2
+
3
+ from typing import Literal, Union
4
+
5
+ from pydantic import BaseModel, ConfigDict, Field
6
+ from typing_extensions import Annotated
7
+
8
+
9
+ class _PlatformModel(BaseModel):
10
+ """Base model for platform"""
11
+
12
+ model_config = ConfigDict(frozen=True)
13
+ name: str
14
+ abbreviation: str
15
+
16
+
17
+ class _Behavior(_PlatformModel):
18
+ """Model behavior"""
19
+
20
+ name: Literal["Behavior platform"] = "Behavior platform"
21
+ abbreviation: Literal["behavior"] = "behavior"
22
+
23
+
24
+ class _Confocal(_PlatformModel):
25
+ """Model confocal"""
26
+
27
+ name: Literal["Confocal microscopy platform"] = (
28
+ "Confocal microscopy platform"
29
+ )
30
+ abbreviation: Literal["confocal"] = "confocal"
31
+
32
+
33
+ class _Ecephys(_PlatformModel):
34
+ """Model ecephys"""
35
+
36
+ name: Literal["Electrophysiology platform"] = "Electrophysiology platform"
37
+ abbreviation: Literal["ecephys"] = "ecephys"
38
+
39
+
40
+ class _Exaspim(_PlatformModel):
41
+ """Model exaSPIM"""
42
+
43
+ name: Literal["ExaSPIM platform"] = "ExaSPIM platform"
44
+ abbreviation: Literal["exaSPIM"] = "exaSPIM"
45
+
46
+
47
+ class _Fip(_PlatformModel):
48
+ """Model FIP"""
49
+
50
+ name: Literal["Frame-projected independent-fiber photometry platform"] = (
51
+ "Frame-projected independent-fiber photometry platform"
52
+ )
53
+ abbreviation: Literal["FIP"] = "FIP"
54
+
55
+
56
+ class _Hcr(_PlatformModel):
57
+ """Model HCR"""
58
+
59
+ name: Literal["Hybridization chain reaction platform"] = (
60
+ "Hybridization chain reaction platform"
61
+ )
62
+ abbreviation: Literal["HCR"] = "HCR"
63
+
64
+
65
+ class _Hsfp(_PlatformModel):
66
+ """Model HSFP"""
67
+
68
+ name: Literal["Hyperspectral fiber photometry platform"] = (
69
+ "Hyperspectral fiber photometry platform"
70
+ )
71
+ abbreviation: Literal["HSFP"] = "HSFP"
72
+
73
+
74
+ class _Isi(_PlatformModel):
75
+ """Model ISI"""
76
+
77
+ name: Literal["Intrinsic signal imaging platform"] = (
78
+ "Intrinsic signal imaging platform"
79
+ )
80
+ abbreviation: Literal["ISI"] = "ISI"
81
+
82
+
83
+ class _Merfish(_PlatformModel):
84
+ """Model MERFISH"""
85
+
86
+ name: Literal["MERFISH platform"] = "MERFISH platform"
87
+ abbreviation: Literal["MERFISH"] = "MERFISH"
88
+
89
+
90
+ class _Mri(_PlatformModel):
91
+ """Model MRI"""
92
+
93
+ name: Literal["Magnetic resonance imaging platform"] = (
94
+ "Magnetic resonance imaging platform"
95
+ )
96
+ abbreviation: Literal["MRI"] = "MRI"
97
+
98
+
99
+ class _Mesospim(_PlatformModel):
100
+ """Model mesoSPIM"""
101
+
102
+ name: Literal["MesoSPIM platform"] = "MesoSPIM platform"
103
+ abbreviation: Literal["mesoSPIM"] = "mesoSPIM"
104
+
105
+
106
+ class _Motor_Observatory(_PlatformModel):
107
+ """Model motor-observatory"""
108
+
109
+ name: Literal["Motor observatory platform"] = "Motor observatory platform"
110
+ abbreviation: Literal["motor-observatory"] = "motor-observatory"
111
+
112
+
113
+ class _Multiplane_Ophys(_PlatformModel):
114
+ """Model multiplane-ophys"""
115
+
116
+ name: Literal["Multiplane optical physiology platform"] = (
117
+ "Multiplane optical physiology platform"
118
+ )
119
+ abbreviation: Literal["multiplane-ophys"] = "multiplane-ophys"
120
+
121
+
122
+ class _Slap2(_PlatformModel):
123
+ """Model SLAP2"""
124
+
125
+ name: Literal["SLAP2 platform"] = "SLAP2 platform"
126
+ abbreviation: Literal["SLAP2"] = "SLAP2"
127
+
128
+
129
+ class _Single_Plane_Ophys(_PlatformModel):
130
+ """Model single-plane-ophys"""
131
+
132
+ name: Literal["Single-plane optical physiology platform"] = (
133
+ "Single-plane optical physiology platform"
134
+ )
135
+ abbreviation: Literal["single-plane-ophys"] = "single-plane-ophys"
136
+
137
+
138
+ class _Smartspim(_PlatformModel):
139
+ """Model SmartSPIM"""
140
+
141
+ name: Literal["SmartSPIM platform"] = "SmartSPIM platform"
142
+ abbreviation: Literal["SmartSPIM"] = "SmartSPIM"
143
+
144
+
145
+ class Platform:
146
+ """Platforms"""
147
+
148
+ BEHAVIOR = _Behavior()
149
+ CONFOCAL = _Confocal()
150
+ ECEPHYS = _Ecephys()
151
+ EXASPIM = _Exaspim()
152
+ FIP = _Fip()
153
+ HCR = _Hcr()
154
+ HSFP = _Hsfp()
155
+ ISI = _Isi()
156
+ MERFISH = _Merfish()
157
+ MRI = _Mri()
158
+ MESOSPIM = _Mesospim()
159
+ MOTOR_OBSERVATORY = _Motor_Observatory()
160
+ MULTIPLANE_OPHYS = _Multiplane_Ophys()
161
+ SLAP2 = _Slap2()
162
+ SINGLE_PLANE_OPHYS = _Single_Plane_Ophys()
163
+ SMARTSPIM = _Smartspim()
164
+
165
+ ALL = tuple(_PlatformModel.__subclasses__())
166
+
167
+ ONE_OF = Annotated[
168
+ Union[tuple(_PlatformModel.__subclasses__())],
169
+ Field(discriminator="name"),
170
+ ]
171
+
172
+ abbreviation_map = {m().abbreviation: m() for m in ALL}
173
+
174
+ @classmethod
175
+ def from_abbreviation(cls, abbreviation: str):
176
+ """Get platform from abbreviation"""
177
+ return cls.abbreviation_map.get(abbreviation, None)
@@ -17,9 +17,9 @@ class LoggingConfigs(BaseSettings):
17
17
  loki_uri: Optional[str] = Field(
18
18
  default=None, description="URI of Loki logging server."
19
19
  )
20
- log_level: Literal[
21
- "DEBUG", "INFO", "WARNING", "ERROR", "CRITICAL"
22
- ] = Field(default="INFO", description="Log level")
20
+ log_level: Literal["DEBUG", "INFO", "WARNING", "ERROR", "CRITICAL"] = (
21
+ Field(default="DEBUG", description="Log level")
22
+ )
23
23
 
24
24
  @property
25
25
  def app_name(self):
@@ -8,7 +8,6 @@ from typing import Any, Dict, List, Literal, Optional, Set, Union
8
8
 
9
9
  from aind_data_schema_models.data_name_patterns import build_data_name
10
10
  from aind_data_schema_models.modalities import Modality
11
- from aind_data_schema_models.platforms import Platform
12
11
  from pydantic import (
13
12
  BaseModel,
14
13
  ConfigDict,
@@ -21,6 +20,8 @@ from pydantic import (
21
20
  )
22
21
  from pydantic_settings import BaseSettings
23
22
 
23
+ from aind_data_transfer_service.configs.platforms_v1 import Platform
24
+
24
25
  _validation_context: ContextVar[Union[Dict[str, Any], None]] = ContextVar(
25
26
  "_validation_context", default=None
26
27
  )
@@ -163,8 +164,13 @@ class UploadJobConfigsV2(BaseSettings):
163
164
  project_name: str = Field(
164
165
  ..., description="Name of project", title="Project Name"
165
166
  )
166
- platform: Platform.ONE_OF = Field(
167
- ..., description="Platform", title="Platform"
167
+ platform: Optional[Platform.ONE_OF] = Field(
168
+ default=None,
169
+ title="Platform",
170
+ description=(
171
+ "Legacy field required for aind-data-schema v1. Will be removed"
172
+ " in future versions."
173
+ ),
168
174
  )
169
175
  modalities: List[Modality.ONE_OF] = Field(
170
176
  ...,
@@ -190,11 +196,26 @@ class UploadJobConfigsV2(BaseSettings):
190
196
  @computed_field
191
197
  def s3_prefix(self) -> str:
192
198
  """Construct s3_prefix from configs."""
199
+ if self.platform is not None:
200
+ label = f"{self.platform.abbreviation}_{self.subject_id}"
201
+ else:
202
+ label = self.subject_id
193
203
  return build_data_name(
194
- label=f"{self.platform.abbreviation}_{self.subject_id}",
204
+ label=label,
195
205
  creation_datetime=self.acq_datetime,
196
206
  )
197
207
 
208
+ @field_validator("platform", mode="before")
209
+ def validate_platform(cls, v):
210
+ """
211
+ For backwards compatibility, allow a user to input an
212
+ aind-data-schema-model platform and then convert it.
213
+ """
214
+ if type(v).__module__ == "aind_data_schema_models.platforms":
215
+ return v.model_dump()
216
+ else:
217
+ return v
218
+
198
219
  @field_validator("job_type", "project_name", mode="before")
199
220
  def validate_with_context(cls, v: str, info: ValidationInfo) -> str:
200
221
  """
@@ -290,11 +311,15 @@ class SubmitJobRequestV2(BaseSettings):
290
311
  # check against any jobs in the context
291
312
  current_jobs = (info.context or dict()).get("current_jobs", list())
292
313
  for job in current_jobs:
293
- prefix = job.get("s3_prefix")
294
- if (
295
- prefix is not None
296
- and prefix in jobs_map
297
- and json.dumps(job, sort_keys=True) in jobs_map[prefix]
298
- ):
299
- raise ValueError(f"Job is already running/queued for {prefix}")
314
+ jobs_to_check = job.get("upload_jobs", [job])
315
+ for j in jobs_to_check:
316
+ prefix = j.get("s3_prefix")
317
+ if (
318
+ prefix is not None
319
+ and prefix in jobs_map
320
+ and json.dumps(j, sort_keys=True) in jobs_map[prefix]
321
+ ):
322
+ raise ValueError(
323
+ f"Job is already running/queued for {prefix}"
324
+ )
300
325
  return self