deep-code 0.0.1.dev0__tar.gz → 0.1.2__tar.gz

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (48) hide show
  1. {deep_code-0.0.1.dev0 → deep_code-0.1.2}/PKG-INFO +75 -45
  2. {deep_code-0.0.1.dev0 → deep_code-0.1.2}/README.md +72 -43
  3. deep_code-0.1.2/deep_code/cli/generate_config.py +22 -0
  4. {deep_code-0.0.1.dev0 → deep_code-0.1.2}/deep_code/cli/main.py +4 -3
  5. deep_code-0.1.2/deep_code/cli/publish.py +31 -0
  6. deep_code-0.1.2/deep_code/constants.py +31 -0
  7. deep_code-0.1.2/deep_code/tests/tools/test_publish.py +109 -0
  8. {deep_code-0.0.1.dev0 → deep_code-0.1.2}/deep_code/tests/utils/test_dataset_stac_generator.py +58 -0
  9. deep_code-0.1.2/deep_code/tests/utils/test_github_automation.py +171 -0
  10. deep_code-0.1.2/deep_code/tests/utils/test_ogc_api_record.py +243 -0
  11. {deep_code-0.0.1.dev0 → deep_code-0.1.2}/deep_code/tests/utils/test_ogc_record_generator.py +2 -5
  12. deep_code-0.1.2/deep_code/tools/new.py +78 -0
  13. deep_code-0.1.2/deep_code/tools/publish.py +433 -0
  14. {deep_code-0.0.1.dev0 → deep_code-0.1.2}/deep_code/utils/dataset_stac_generator.py +134 -13
  15. {deep_code-0.0.1.dev0 → deep_code-0.1.2}/deep_code/utils/github_automation.py +42 -19
  16. deep_code-0.1.2/deep_code/utils/helper.py +14 -0
  17. deep_code-0.1.2/deep_code/utils/ogc_api_record.py +268 -0
  18. {deep_code-0.0.1.dev0 → deep_code-0.1.2}/deep_code/utils/ogc_record_generator.py +19 -4
  19. {deep_code-0.0.1.dev0 → deep_code-0.1.2}/deep_code/utils/osc_extension.py +2 -14
  20. {deep_code-0.0.1.dev0 → deep_code-0.1.2}/deep_code/version.py +1 -1
  21. {deep_code-0.0.1.dev0 → deep_code-0.1.2}/deep_code.egg-info/PKG-INFO +75 -45
  22. {deep_code-0.0.1.dev0 → deep_code-0.1.2}/deep_code.egg-info/SOURCES.txt +2 -0
  23. deep_code-0.0.1.dev0/deep_code/cli/publish.py +0 -26
  24. deep_code-0.0.1.dev0/deep_code/constants.py +0 -16
  25. deep_code-0.0.1.dev0/deep_code/tests/tools/test_publish.py +0 -120
  26. deep_code-0.0.1.dev0/deep_code/tests/utils/test_github_automation.py +0 -120
  27. deep_code-0.0.1.dev0/deep_code/tests/utils/test_ogc_api_record.py +0 -113
  28. deep_code-0.0.1.dev0/deep_code/tools/new.py +0 -5
  29. deep_code-0.0.1.dev0/deep_code/tools/publish.py +0 -233
  30. deep_code-0.0.1.dev0/deep_code/utils/ogc_api_record.py +0 -94
  31. {deep_code-0.0.1.dev0 → deep_code-0.1.2}/LICENSE +0 -0
  32. {deep_code-0.0.1.dev0 → deep_code-0.1.2}/deep_code/__init__.py +0 -0
  33. {deep_code-0.0.1.dev0 → deep_code-0.1.2}/deep_code/cli/__init__.py +0 -0
  34. {deep_code-0.0.1.dev0 → deep_code-0.1.2}/deep_code/tests/tools/__init__.py +0 -0
  35. {deep_code-0.0.1.dev0 → deep_code-0.1.2}/deep_code/tests/utils/__init__.py +0 -0
  36. {deep_code-0.0.1.dev0 → deep_code-0.1.2}/deep_code/tests/utils/test_osc_extension.py +0 -0
  37. {deep_code-0.0.1.dev0 → deep_code-0.1.2}/deep_code/tools/__init__.py +0 -0
  38. {deep_code-0.0.1.dev0 → deep_code-0.1.2}/deep_code/tools/check.py +0 -0
  39. {deep_code-0.0.1.dev0 → deep_code-0.1.2}/deep_code/tools/register.py +0 -0
  40. {deep_code-0.0.1.dev0 → deep_code-0.1.2}/deep_code/tools/setup_ci.py +0 -0
  41. {deep_code-0.0.1.dev0 → deep_code-0.1.2}/deep_code/tools/test.py +0 -0
  42. {deep_code-0.0.1.dev0 → deep_code-0.1.2}/deep_code/utils/__init__.py +0 -0
  43. {deep_code-0.0.1.dev0 → deep_code-0.1.2}/deep_code.egg-info/dependency_links.txt +0 -0
  44. {deep_code-0.0.1.dev0 → deep_code-0.1.2}/deep_code.egg-info/entry_points.txt +0 -0
  45. {deep_code-0.0.1.dev0 → deep_code-0.1.2}/deep_code.egg-info/requires.txt +0 -0
  46. {deep_code-0.0.1.dev0 → deep_code-0.1.2}/deep_code.egg-info/top_level.txt +0 -0
  47. {deep_code-0.0.1.dev0 → deep_code-0.1.2}/pyproject.toml +0 -0
  48. {deep_code-0.0.1.dev0 → deep_code-0.1.2}/setup.cfg +0 -0
@@ -1,6 +1,6 @@
1
- Metadata-Version: 2.2
1
+ Metadata-Version: 2.4
2
2
  Name: deep_code
3
- Version: 0.0.1.dev0
3
+ Version: 0.1.2
4
4
  Summary: deepesdl earthcode integration utility tool
5
5
  Author-email: Tejas Morbagal Harish <tejas.morbagalharish@brockmann-consult.de>
6
6
  License: MIT
@@ -28,6 +28,7 @@ Requires-Dist: ruff; extra == "dev"
28
28
  Requires-Dist: pytest; extra == "dev"
29
29
  Requires-Dist: pytest-cov; extra == "dev"
30
30
  Requires-Dist: pytest-recording; extra == "dev"
31
+ Dynamic: license-file
31
32
 
32
33
  # deep-code
33
34
 
@@ -46,7 +47,8 @@ experiments/workflow as OGC API record and Datasets as an OSC stac collection.
46
47
  ## Setup
47
48
 
48
49
  ## Install
49
- `deep-code` will be available in PyPI and conda-forge. Till the stable release,
50
+ `deep-code` will be available in PyPI for now and will be available in conda-forge
51
+ in the near future. Till the stable release,
50
52
  developers/contributors can follow the below steps to install deep-code.
51
53
 
52
54
  ## Installing from the repository for Developers/Contributors
@@ -87,14 +89,9 @@ pytest --cov-report html --cov=deep-code
87
89
  providing different utility functions.
88
90
  Use the --help option with these subcommands to get more details on usage.
89
91
 
90
- ### deep-code publish-product
91
-
92
- Publish a dataset which is a result of an experiment to the EarthCODE
93
- open-science catalog.
94
-
95
- ```commandline
96
- deep-code publish-dataset /path/to/dataset-config.yaml
97
- ```
92
+ The CLI retrieves the Git username and personal access token from a hidden file named
93
+ .gitaccess. Ensure this file is located in the same directory where you execute the CLI
94
+ command.
98
95
 
99
96
  #### .gitaccess example
100
97
 
@@ -102,60 +99,93 @@ open-science catalog.
102
99
  github-username: your-git-user
103
100
  github-token: personal access token
104
101
  ```
102
+ ### deep-code generate-config
103
+
104
+ Generates starter configuration templates for publishing to EarthCODE openscience
105
+ catalog.
106
+
107
+ #### Usage
108
+ ```
109
+ deep-code generate-config [OPTIONS]
110
+ ```
111
+
112
+ #### Options
113
+ --output-dir, -o : Output directory (default: current)
114
+
115
+ #### Examples:
116
+ ```
117
+ deep-code generate-config
118
+ deep-code generate-config -o ./configs
119
+ ```
120
+
121
+ ### deep-code publish
122
+
123
+ Publishes metadata of experiment, workflow and dataset to the EarthCODE open-science
124
+ catalog
125
+
126
+ ### Usage
127
+ ```
128
+ deep-code publish DATASET_CONFIG WORKFLOW_CONFIG [--environment ENVIRONMENT]
129
+ ```
130
+
131
+ #### Arguments
132
+ DATASET_CONFIG - Path to the dataset configuration YAML file
133
+ (e.g., dataset-config.yaml)
134
+
135
+ WORKFLOW_CONFIG - Path to the workflow configuration YAML file
136
+ (e.g., workflow-config.yaml)
137
+
138
+ #### Options
139
+ --environment, -e - Target catalog environment:
140
+ production (default) | staging | testing
105
141
 
142
+ #### Examples:
143
+ 1. Publish to staging catalog
144
+ ```
145
+ deep-code publish dataset-config.yaml workflow-config.yaml --environment=staging
146
+ ```
147
+ 2. Publish to testing catalog
148
+ ```
149
+ deep-code publish dataset-config.yaml workflow-config.yaml -e testing
150
+ ```
151
+ 3. Publish to production catalog
152
+ ```
153
+ deep-code publish dataset-config.yaml workflow-config.yaml
154
+ ```
106
155
  #### dataset-config.yaml example
107
156
 
108
157
  ```
109
- dataset_id: hydrology-1D-0.009deg-100x60x60-3.0.2.zarr
110
- collection_id: hydrology
158
+ dataset_id: esa-cci-permafrost-1x1151x1641-1.0.0.zarr
159
+ collection_id: esa-cci-permafrost
111
160
  osc_themes:
112
- - Land
113
- - Oceans
161
+ - cryosphere
162
+ osc_region: global
114
163
  # non-mandatory
115
- documentation_link: https://deepesdl.readthedocs.io/en/latest/datasets/hydrology-1D-0.009deg-100x60x60-3.0.2.zarr/
116
- access_link: s3://test
164
+ documentation_link: https://deepesdl.readthedocs.io/en/latest/datasets/esa-cci-permafrost-1x1151x1641-0-0-2-zarr
165
+ access_link: s3://deep-esdl-public/esa-cci-permafrost-1x1151x1641-1.0.0.zarr
117
166
  dataset_status: completed
118
- osc_region: global
119
- cf_parameter:
120
- - name: hydrology
121
167
  ```
122
168
 
123
- dataset-id has to be a valid dataset-id from `deep-esdl-public` s3 or your team bucket.
169
+ dataset-id has to be a valid dataset-id from `deep-esdl-public` s3 bucket or your team
170
+ bucket.
124
171
 
125
- ### deep-code publish-workflow
126
-
127
- Publish a workflow/experiment to the EarthCODE open-science catalog.
128
-
129
- ```commandline
130
- deep-code publish-workflow /path/to/workflow-config.yaml
131
- ```
132
172
  #### workflow-config.yaml example
133
173
 
134
174
  ```
135
- workflow_id: "4D Med hydrology cube generation"
175
+ workflow_id: "esa-cci-permafrost"
136
176
  properties:
137
- title: "Hydrology cube generation recipe"
138
- description: "4D Med cube generation"
177
+ title: "ESA CCI permafrost"
178
+ description: "cube generation workflow for esa-cci-permafrost"
139
179
  keywords:
140
180
  - Earth Science
141
181
  themes:
142
- - Atmosphere
143
- - Ocean
144
- - Evaporation
182
+ - cryosphere
145
183
  license: proprietary
146
184
  jupyter_kernel_info:
147
- name: deepesdl-xcube-1.7.1
185
+ name: deepesdl-xcube-1.8.3
148
186
  python_version: 3.11
149
- env_file: https://git/env.yml
150
- links:
151
- - rel: "documentation"
152
- type: "application/json"
153
- title: "4DMed Hydrology Cube Generation Recipe"
154
- href: "https://github.com/deepesdl/cube-gen/tree/main/hydrology/README.md"
155
- - rel: "jupyter-notebook"
156
- type: "application/json"
157
- title: "Workflow Jupyter Notebook"
158
- href: "https://github.com/deepesdl/cube-gen/blob/main/hydrology/notebooks/reading_hydrology.ipynb"
187
+ env_file: "https://github.com/deepesdl/cube-gen/blob/main/Permafrost/environment.yml"
188
+ jupyter_notebook_url: "https://github.com/deepesdl/cube-gen/blob/main/Permafrost/Create-CCI-Permafrost-cube-EarthCODE.ipynb"
159
189
  contact:
160
190
  - name: Tejas Morbagal Harish
161
191
  organization: Brockmann Consult GmbH
@@ -15,7 +15,8 @@ experiments/workflow as OGC API record and Datasets as an OSC stac collection.
15
15
  ## Setup
16
16
 
17
17
  ## Install
18
- `deep-code` will be available in PyPI and conda-forge. Till the stable release,
18
+ `deep-code` will be available in PyPI for now and will be available in conda-forge
19
+ in the near future. Till the stable release,
19
20
  developers/contributors can follow the below steps to install deep-code.
20
21
 
21
22
  ## Installing from the repository for Developers/Contributors
@@ -56,14 +57,9 @@ pytest --cov-report html --cov=deep-code
56
57
  providing different utility functions.
57
58
  Use the --help option with these subcommands to get more details on usage.
58
59
 
59
- ### deep-code publish-product
60
-
61
- Publish a dataset which is a result of an experiment to the EarthCODE
62
- open-science catalog.
63
-
64
- ```commandline
65
- deep-code publish-dataset /path/to/dataset-config.yaml
66
- ```
60
+ The CLI retrieves the Git username and personal access token from a hidden file named
61
+ .gitaccess. Ensure this file is located in the same directory where you execute the CLI
62
+ command.
67
63
 
68
64
  #### .gitaccess example
69
65
 
@@ -71,60 +67,93 @@ open-science catalog.
71
67
  github-username: your-git-user
72
68
  github-token: personal access token
73
69
  ```
70
+ ### deep-code generate-config
71
+
72
+ Generates starter configuration templates for publishing to EarthCODE openscience
73
+ catalog.
74
+
75
+ #### Usage
76
+ ```
77
+ deep-code generate-config [OPTIONS]
78
+ ```
79
+
80
+ #### Options
81
+ --output-dir, -o : Output directory (default: current)
82
+
83
+ #### Examples:
84
+ ```
85
+ deep-code generate-config
86
+ deep-code generate-config -o ./configs
87
+ ```
88
+
89
+ ### deep-code publish
90
+
91
+ Publishes metadata of experiment, workflow and dataset to the EarthCODE open-science
92
+ catalog
93
+
94
+ ### Usage
95
+ ```
96
+ deep-code publish DATASET_CONFIG WORKFLOW_CONFIG [--environment ENVIRONMENT]
97
+ ```
98
+
99
+ #### Arguments
100
+ DATASET_CONFIG - Path to the dataset configuration YAML file
101
+ (e.g., dataset-config.yaml)
102
+
103
+ WORKFLOW_CONFIG - Path to the workflow configuration YAML file
104
+ (e.g., workflow-config.yaml)
105
+
106
+ #### Options
107
+ --environment, -e - Target catalog environment:
108
+ production (default) | staging | testing
74
109
 
110
+ #### Examples:
111
+ 1. Publish to staging catalog
112
+ ```
113
+ deep-code publish dataset-config.yaml workflow-config.yaml --environment=staging
114
+ ```
115
+ 2. Publish to testing catalog
116
+ ```
117
+ deep-code publish dataset-config.yaml workflow-config.yaml -e testing
118
+ ```
119
+ 3. Publish to production catalog
120
+ ```
121
+ deep-code publish dataset-config.yaml workflow-config.yaml
122
+ ```
75
123
  #### dataset-config.yaml example
76
124
 
77
125
  ```
78
- dataset_id: hydrology-1D-0.009deg-100x60x60-3.0.2.zarr
79
- collection_id: hydrology
126
+ dataset_id: esa-cci-permafrost-1x1151x1641-1.0.0.zarr
127
+ collection_id: esa-cci-permafrost
80
128
  osc_themes:
81
- - Land
82
- - Oceans
129
+ - cryosphere
130
+ osc_region: global
83
131
  # non-mandatory
84
- documentation_link: https://deepesdl.readthedocs.io/en/latest/datasets/hydrology-1D-0.009deg-100x60x60-3.0.2.zarr/
85
- access_link: s3://test
132
+ documentation_link: https://deepesdl.readthedocs.io/en/latest/datasets/esa-cci-permafrost-1x1151x1641-0-0-2-zarr
133
+ access_link: s3://deep-esdl-public/esa-cci-permafrost-1x1151x1641-1.0.0.zarr
86
134
  dataset_status: completed
87
- osc_region: global
88
- cf_parameter:
89
- - name: hydrology
90
135
  ```
91
136
 
92
- dataset-id has to be a valid dataset-id from `deep-esdl-public` s3 or your team bucket.
137
+ dataset-id has to be a valid dataset-id from `deep-esdl-public` s3 bucket or your team
138
+ bucket.
93
139
 
94
- ### deep-code publish-workflow
95
-
96
- Publish a workflow/experiment to the EarthCODE open-science catalog.
97
-
98
- ```commandline
99
- deep-code publish-workflow /path/to/workflow-config.yaml
100
- ```
101
140
  #### workflow-config.yaml example
102
141
 
103
142
  ```
104
- workflow_id: "4D Med hydrology cube generation"
143
+ workflow_id: "esa-cci-permafrost"
105
144
  properties:
106
- title: "Hydrology cube generation recipe"
107
- description: "4D Med cube generation"
145
+ title: "ESA CCI permafrost"
146
+ description: "cube generation workflow for esa-cci-permafrost"
108
147
  keywords:
109
148
  - Earth Science
110
149
  themes:
111
- - Atmosphere
112
- - Ocean
113
- - Evaporation
150
+ - cryosphere
114
151
  license: proprietary
115
152
  jupyter_kernel_info:
116
- name: deepesdl-xcube-1.7.1
153
+ name: deepesdl-xcube-1.8.3
117
154
  python_version: 3.11
118
- env_file: https://git/env.yml
119
- links:
120
- - rel: "documentation"
121
- type: "application/json"
122
- title: "4DMed Hydrology Cube Generation Recipe"
123
- href: "https://github.com/deepesdl/cube-gen/tree/main/hydrology/README.md"
124
- - rel: "jupyter-notebook"
125
- type: "application/json"
126
- title: "Workflow Jupyter Notebook"
127
- href: "https://github.com/deepesdl/cube-gen/blob/main/hydrology/notebooks/reading_hydrology.ipynb"
155
+ env_file: "https://github.com/deepesdl/cube-gen/blob/main/Permafrost/environment.yml"
156
+ jupyter_notebook_url: "https://github.com/deepesdl/cube-gen/blob/main/Permafrost/Create-CCI-Permafrost-cube-EarthCODE.ipynb"
128
157
  contact:
129
158
  - name: Tejas Morbagal Harish
130
159
  organization: Brockmann Consult GmbH
@@ -0,0 +1,22 @@
1
+ #!/usr/bin/env python3
2
+
3
+ # Copyright (c) 2025 by Brockmann Consult GmbH
4
+ # Permissions are hereby granted under the terms of the MIT License:
5
+ # https://opensource.org/licenses/MIT.
6
+
7
+ import click
8
+
9
+ from deep_code.tools.new import TemplateGenerator
10
+
11
+
12
+ @click.command(name="generate-config")
13
+ @click.option(
14
+ "--output-dir",
15
+ "-o",
16
+ type=click.Path(exists=True, file_okay=False, writable=True),
17
+ default=".",
18
+ help="Output directory for templates",
19
+ )
20
+ def generate_config(output_dir):
21
+ TemplateGenerator.generate_workflow_template(f"{output_dir}/workflow_config.yaml")
22
+ TemplateGenerator.generate_dataset_template(f"{output_dir}/dataset_config.yaml")
@@ -6,7 +6,8 @@
6
6
 
7
7
  import click
8
8
 
9
- from deep_code.cli.publish import publish_dataset, publish_workflow
9
+ from deep_code.cli.generate_config import generate_config
10
+ from deep_code.cli.publish import publish
10
11
 
11
12
 
12
13
  @click.group()
@@ -15,8 +16,8 @@ def main():
15
16
  pass
16
17
 
17
18
 
18
- main.add_command(publish_dataset)
19
- main.add_command(publish_workflow)
19
+ main.add_command(publish)
20
+ main.add_command(generate_config)
20
21
 
21
22
  if __name__ == "__main__":
22
23
  main()
@@ -0,0 +1,31 @@
1
+ #!/usr/bin/env python3
2
+
3
+ # Copyright (c) 2025 by Brockmann Consult GmbH
4
+ # Permissions are hereby granted under the terms of the MIT License:
5
+ # https://opensource.org/licenses/MIT.
6
+
7
+ import click
8
+
9
+ from deep_code.tools.publish import Publisher
10
+
11
+
12
+ @click.command(name="publish")
13
+ @click.argument("dataset_config", type=click.Path(exists=True))
14
+ @click.argument("workflow_config", type=click.Path(exists=True))
15
+ @click.option(
16
+ "--environment",
17
+ "-e",
18
+ type=click.Choice(["production", "staging", "testing"], case_sensitive=False),
19
+ default="production",
20
+ help="Target environment for publishing (production, staging, testing)",
21
+ )
22
+ def publish(dataset_config, workflow_config, environment):
23
+ """Request publishing a dataset along with experiment and workflow metadata to the
24
+ open science catalogue.
25
+ """
26
+ publisher = Publisher(
27
+ dataset_config_path=dataset_config,
28
+ workflow_config_path=workflow_config,
29
+ environment=environment.lower(),
30
+ )
31
+ publisher.publish_all()
@@ -0,0 +1,31 @@
1
+ #!/usr/bin/env python3
2
+
3
+ # Copyright (c) 2024 by Brockmann Consult GmbH
4
+ # Permissions are hereby granted under the terms of the MIT License:
5
+ # https://opensource.org/licenses/MIT.
6
+
7
+ OSC_SCHEMA_URI = "https://stac-extensions.github.io/osc/v1.0.0/schema.json"
8
+ CF_SCHEMA_URI = "https://stac-extensions.github.io/cf/v0.2.0/schema.json"
9
+ THEMES_SCHEMA_URI = "https://stac-extensions.github.io/themes/v1.0.0/schema.json"
10
+ OSC_THEME_SCHEME = "https://github.com/stac-extensions/osc#theme"
11
+ OSC_REPO_OWNER = "ESA-EarthCODE"
12
+ OSC_REPO_NAME = "open-science-catalog-metadata"
13
+ OSC_BRANCH_NAME = "add-new-collection"
14
+ DEFAULT_THEME_SCHEME = (
15
+ "https://gcmd.earthdata.nasa.gov/kms/concepts/concept_scheme/sciencekeywords"
16
+ )
17
+ OGC_API_RECORD_SPEC = "http://www.opengis.net/spec/ogcapi-records-1/1.0/req/record-core"
18
+ WF_BRANCH_NAME = "add-new-workflow-from-deepesdl"
19
+ VARIABLE_BASE_CATALOG_SELF_HREF = "https://esa-earthcode.github.io/open-science-catalog-metadata/variables/catalog.json"
20
+ PRODUCT_BASE_CATALOG_SELF_HREF = "https://esa-earthcode.github.io/open-science-catalog-metadata/products/catalog.json"
21
+ DEEPESDL_COLLECTION_SELF_HREF = (
22
+ "https://esa-earthcode.github.io/open-science-catalog-metadata/projects/deepesdl"
23
+ "/collection.json"
24
+ )
25
+ BASE_URL_OSC = "https://esa-earthcode.github.io/open-science-catalog-metadata"
26
+ EXPERIMENT_BASE_CATALOG_SELF_HREF = "https://esa-earthcode.github.io/open-science-catalog-metadata/experiments/catalog.json"
27
+ WORKFLOW_BASE_CATALOG_SELF_HREF = (
28
+ "https://esa-earthcode.github.io/open-science-catalog-metadata/workflows/catalog"
29
+ ".json"
30
+ )
31
+ PROJECT_COLLECTION_NAME = "deep-earth-system-data-lab"
@@ -0,0 +1,109 @@
1
+ import json
2
+ import tempfile
3
+ import unittest
4
+ from pathlib import Path
5
+ from unittest.mock import MagicMock, mock_open, patch
6
+
7
+ import yaml
8
+ from pystac import Catalog
9
+
10
+ from deep_code.tools.publish import Publisher
11
+
12
+
13
+ class TestPublisher(unittest.TestCase):
14
+ @patch("fsspec.open")
15
+ @patch("deep_code.tools.publish.GitHubPublisher")
16
+ def setUp(self, mock_github_publisher, mock_fsspec_open):
17
+ # Mock GitHubPublisher to avoid reading .gitaccess
18
+ self.mock_github_publisher_instance = MagicMock()
19
+ mock_github_publisher.return_value = self.mock_github_publisher_instance
20
+
21
+ # Mock dataset and workflow config files
22
+ self.dataset_config = {
23
+ "collection_id": "test-collection",
24
+ "dataset_id": "test-dataset",
25
+ }
26
+ self.workflow_config = {
27
+ "properties": {"title": "Test Workflow"},
28
+ "workflow_id": "test-workflow",
29
+ }
30
+
31
+ # Mock fsspec.open for config files
32
+ self.mock_fsspec_open = mock_fsspec_open
33
+ self.mock_fsspec_open.side_effect = [
34
+ mock_open(read_data=yaml.dump(self.dataset_config)).return_value,
35
+ mock_open(read_data=yaml.dump(self.workflow_config)).return_value,
36
+ ]
37
+
38
+ # Initialize Publisher
39
+ self.publisher = Publisher(
40
+ dataset_config_path="test-dataset-config.yaml",
41
+ workflow_config_path="test-workflow-config.yaml",
42
+ )
43
+
44
+ def test_normalize_name(self):
45
+ self.assertEqual(Publisher._normalize_name("Test Name"), "test-name")
46
+ self.assertEqual(Publisher._normalize_name("Test Name"), "test---name")
47
+ self.assertIsNone(Publisher._normalize_name(""))
48
+ self.assertIsNone(Publisher._normalize_name(None))
49
+
50
+ def test_write_to_file(self):
51
+ # Create a temporary file
52
+ with tempfile.NamedTemporaryFile(delete=False) as temp_file:
53
+ file_path = temp_file.name
54
+
55
+ # Test data
56
+ data = {"key": "value"}
57
+
58
+ # Call the method
59
+ Publisher._write_to_file(file_path, data)
60
+
61
+ # Read the file and verify its content
62
+ with open(file_path, "r") as f:
63
+ content = json.load(f)
64
+ self.assertEqual(content, data)
65
+
66
+ # Clean up
67
+ Path(file_path).unlink()
68
+
69
+ def test_update_base_catalog(self):
70
+ # Create a mock Catalog
71
+ catalog = Catalog(id="test-catalog", description="Test Catalog")
72
+
73
+ # Mock file path and item ID
74
+ catalog_path = "test-catalog.json"
75
+ item_id = "test-item"
76
+ self_href = "https://example.com/catalog.json"
77
+
78
+ self.publisher.workflow_title = "Test Workflow"
79
+
80
+ # Mock the Catalog.from_file method
81
+ with patch("pystac.Catalog.from_file", return_value=catalog):
82
+ updated_catalog = self.publisher._update_base_catalog(
83
+ catalog_path, item_id, self_href
84
+ )
85
+
86
+ # Assertions
87
+ self.assertEqual(updated_catalog.get_self_href(), self_href)
88
+ self.assertIsInstance(updated_catalog, Catalog)
89
+
90
+ def test_read_config_files(self):
91
+ # Mock dataset and workflow config files
92
+ dataset_config = {
93
+ "collection_id": "test-collection",
94
+ "dataset_id": "test-dataset",
95
+ }
96
+ workflow_config = {
97
+ "properties": {"title": "Test Workflow"},
98
+ "workflow_id": "test-workflow",
99
+ }
100
+
101
+ # Mock fsspec.open for config files
102
+ self.mock_fsspec_open.side_effect = [
103
+ mock_open(read_data=yaml.dump(dataset_config)).return_value,
104
+ mock_open(read_data=yaml.dump(workflow_config)).return_value,
105
+ ]
106
+
107
+ # Assertions
108
+ self.assertEqual(self.publisher.dataset_config, dataset_config)
109
+ self.assertEqual(self.publisher.workflow_config, workflow_config)
@@ -217,3 +217,61 @@ class TestOSCProductSTACGenerator(unittest.TestCase):
217
217
  )
218
218
  self.assertIn("Public store, Authenticated store", str(context.exception))
219
219
  self.assertEqual(mock_new_data_store.call_count, 2)
220
+
221
+
222
+ class TestFormatString(unittest.TestCase):
223
+ def test_single_word(self):
224
+ self.assertEqual(
225
+ OscDatasetStacGenerator.format_string("temperature"), "Temperature"
226
+ )
227
+ self.assertEqual(OscDatasetStacGenerator.format_string("temp"), "Temp")
228
+ self.assertEqual(OscDatasetStacGenerator.format_string("hello"), "Hello")
229
+
230
+ def test_multiple_words_with_spaces(self):
231
+ self.assertEqual(
232
+ OscDatasetStacGenerator.format_string("surface temp"), "Surface Temp"
233
+ )
234
+ self.assertEqual(
235
+ OscDatasetStacGenerator.format_string("this is a test"), "This Is A Test"
236
+ )
237
+
238
+ def test_multiple_words_with_underscores(self):
239
+ self.assertEqual(
240
+ OscDatasetStacGenerator.format_string("surface_temp"), "Surface Temp"
241
+ )
242
+ self.assertEqual(
243
+ OscDatasetStacGenerator.format_string("this_is_a_test"), "This Is A Test"
244
+ )
245
+
246
+ def test_mixed_spaces_and_underscores(self):
247
+ self.assertEqual(
248
+ OscDatasetStacGenerator.format_string("surface_temp and_more"),
249
+ "Surface Temp And More",
250
+ )
251
+ self.assertEqual(
252
+ OscDatasetStacGenerator.format_string(
253
+ "mixed_case_with_underscores_and spaces"
254
+ ),
255
+ "Mixed Case With Underscores And Spaces",
256
+ )
257
+
258
+ def test_edge_cases(self):
259
+ # Empty string
260
+ self.assertEqual(OscDatasetStacGenerator.format_string(""), "")
261
+ # Single word with trailing underscore
262
+ self.assertEqual(
263
+ OscDatasetStacGenerator.format_string("temperature_"), "Temperature"
264
+ )
265
+ # Single word with leading underscore
266
+ self.assertEqual(OscDatasetStacGenerator.format_string("_temp"), "Temp")
267
+ # Single word with leading/trailing spaces
268
+ self.assertEqual(OscDatasetStacGenerator.format_string(" hello "), "Hello")
269
+ # Multiple spaces or underscores
270
+ self.assertEqual(
271
+ OscDatasetStacGenerator.format_string("too___many___underscores"),
272
+ "Too Many Underscores",
273
+ )
274
+ self.assertEqual(
275
+ OscDatasetStacGenerator.format_string("too many spaces"),
276
+ "Too Many Spaces",
277
+ )