deep-code 0.0.1.dev0__tar.gz → 0.1.1__tar.gz

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (46) hide show
  1. {deep_code-0.0.1.dev0 → deep_code-0.1.1}/PKG-INFO +30 -44
  2. {deep_code-0.0.1.dev0 → deep_code-0.1.1}/README.md +27 -42
  3. {deep_code-0.0.1.dev0 → deep_code-0.1.1}/deep_code/cli/main.py +2 -3
  4. deep_code-0.1.1/deep_code/cli/publish.py +21 -0
  5. deep_code-0.1.1/deep_code/constants.py +31 -0
  6. deep_code-0.1.1/deep_code/tests/tools/test_publish.py +108 -0
  7. {deep_code-0.0.1.dev0 → deep_code-0.1.1}/deep_code/tests/utils/test_dataset_stac_generator.py +58 -0
  8. deep_code-0.1.1/deep_code/tests/utils/test_github_automation.py +171 -0
  9. deep_code-0.1.1/deep_code/tests/utils/test_ogc_api_record.py +243 -0
  10. {deep_code-0.0.1.dev0 → deep_code-0.1.1}/deep_code/tests/utils/test_ogc_record_generator.py +2 -5
  11. {deep_code-0.0.1.dev0 → deep_code-0.1.1}/deep_code/tools/new.py +9 -1
  12. deep_code-0.1.1/deep_code/tools/publish.py +419 -0
  13. {deep_code-0.0.1.dev0 → deep_code-0.1.1}/deep_code/utils/dataset_stac_generator.py +134 -13
  14. {deep_code-0.0.1.dev0 → deep_code-0.1.1}/deep_code/utils/github_automation.py +42 -19
  15. deep_code-0.1.1/deep_code/utils/helper.py +14 -0
  16. deep_code-0.1.1/deep_code/utils/ogc_api_record.py +268 -0
  17. {deep_code-0.0.1.dev0 → deep_code-0.1.1}/deep_code/utils/ogc_record_generator.py +19 -4
  18. {deep_code-0.0.1.dev0 → deep_code-0.1.1}/deep_code/utils/osc_extension.py +2 -14
  19. {deep_code-0.0.1.dev0 → deep_code-0.1.1}/deep_code/version.py +1 -1
  20. {deep_code-0.0.1.dev0 → deep_code-0.1.1}/deep_code.egg-info/PKG-INFO +30 -44
  21. {deep_code-0.0.1.dev0 → deep_code-0.1.1}/deep_code.egg-info/SOURCES.txt +1 -0
  22. deep_code-0.0.1.dev0/deep_code/cli/publish.py +0 -26
  23. deep_code-0.0.1.dev0/deep_code/constants.py +0 -16
  24. deep_code-0.0.1.dev0/deep_code/tests/tools/test_publish.py +0 -120
  25. deep_code-0.0.1.dev0/deep_code/tests/utils/test_github_automation.py +0 -120
  26. deep_code-0.0.1.dev0/deep_code/tests/utils/test_ogc_api_record.py +0 -113
  27. deep_code-0.0.1.dev0/deep_code/tools/publish.py +0 -233
  28. deep_code-0.0.1.dev0/deep_code/utils/ogc_api_record.py +0 -94
  29. {deep_code-0.0.1.dev0 → deep_code-0.1.1}/LICENSE +0 -0
  30. {deep_code-0.0.1.dev0 → deep_code-0.1.1}/deep_code/__init__.py +0 -0
  31. {deep_code-0.0.1.dev0 → deep_code-0.1.1}/deep_code/cli/__init__.py +0 -0
  32. {deep_code-0.0.1.dev0 → deep_code-0.1.1}/deep_code/tests/tools/__init__.py +0 -0
  33. {deep_code-0.0.1.dev0 → deep_code-0.1.1}/deep_code/tests/utils/__init__.py +0 -0
  34. {deep_code-0.0.1.dev0 → deep_code-0.1.1}/deep_code/tests/utils/test_osc_extension.py +0 -0
  35. {deep_code-0.0.1.dev0 → deep_code-0.1.1}/deep_code/tools/__init__.py +0 -0
  36. {deep_code-0.0.1.dev0 → deep_code-0.1.1}/deep_code/tools/check.py +0 -0
  37. {deep_code-0.0.1.dev0 → deep_code-0.1.1}/deep_code/tools/register.py +0 -0
  38. {deep_code-0.0.1.dev0 → deep_code-0.1.1}/deep_code/tools/setup_ci.py +0 -0
  39. {deep_code-0.0.1.dev0 → deep_code-0.1.1}/deep_code/tools/test.py +0 -0
  40. {deep_code-0.0.1.dev0 → deep_code-0.1.1}/deep_code/utils/__init__.py +0 -0
  41. {deep_code-0.0.1.dev0 → deep_code-0.1.1}/deep_code.egg-info/dependency_links.txt +0 -0
  42. {deep_code-0.0.1.dev0 → deep_code-0.1.1}/deep_code.egg-info/entry_points.txt +0 -0
  43. {deep_code-0.0.1.dev0 → deep_code-0.1.1}/deep_code.egg-info/requires.txt +0 -0
  44. {deep_code-0.0.1.dev0 → deep_code-0.1.1}/deep_code.egg-info/top_level.txt +0 -0
  45. {deep_code-0.0.1.dev0 → deep_code-0.1.1}/pyproject.toml +0 -0
  46. {deep_code-0.0.1.dev0 → deep_code-0.1.1}/setup.cfg +0 -0
@@ -1,6 +1,6 @@
1
- Metadata-Version: 2.2
1
+ Metadata-Version: 2.4
2
2
  Name: deep_code
3
- Version: 0.0.1.dev0
3
+ Version: 0.1.1
4
4
  Summary: deepesdl earthcode integration utility tool
5
5
  Author-email: Tejas Morbagal Harish <tejas.morbagalharish@brockmann-consult.de>
6
6
  License: MIT
@@ -28,6 +28,7 @@ Requires-Dist: ruff; extra == "dev"
28
28
  Requires-Dist: pytest; extra == "dev"
29
29
  Requires-Dist: pytest-cov; extra == "dev"
30
30
  Requires-Dist: pytest-recording; extra == "dev"
31
+ Dynamic: license-file
31
32
 
32
33
  # deep-code
33
34
 
@@ -87,14 +88,9 @@ pytest --cov-report html --cov=deep-code
87
88
  providing different utility functions.
88
89
  Use the --help option with these subcommands to get more details on usage.
89
90
 
90
- ### deep-code publish-product
91
-
92
- Publish a dataset which is a result of an experiment to the EarthCODE
93
- open-science catalog.
94
-
95
- ```commandline
96
- deep-code publish-dataset /path/to/dataset-config.yaml
97
- ```
91
+ The CLI retrieves the Git username and personal access token from a hidden file named
92
+ .gitaccess. Ensure this file is located in the same directory where you execute the CLI
93
+ command.
98
94
 
99
95
  #### .gitaccess example
100
96
 
@@ -103,59 +99,49 @@ github-username: your-git-user
103
99
  github-token: personal access token
104
100
  ```
105
101
 
102
+ ### deep-code publish
103
+
104
+ Publish the experiment, workflow and dataset which is a result of an experiment to
105
+ the EarthCODE open-science catalog.
106
+
107
+ ```commandline
108
+ deep-code publish /path/to/dataset-config.yaml /path/to/workflow-config.yaml
109
+ ```
110
+
106
111
  #### dataset-config.yaml example
107
112
 
108
113
  ```
109
- dataset_id: hydrology-1D-0.009deg-100x60x60-3.0.2.zarr
110
- collection_id: hydrology
114
+ dataset_id: esa-cci-permafrost-1x1151x1641-1.0.0.zarr
115
+ collection_id: esa-cci-permafrost
111
116
  osc_themes:
112
- - Land
113
- - Oceans
117
+ - cryosphere
118
+ osc_region: global
114
119
  # non-mandatory
115
- documentation_link: https://deepesdl.readthedocs.io/en/latest/datasets/hydrology-1D-0.009deg-100x60x60-3.0.2.zarr/
116
- access_link: s3://test
120
+ documentation_link: https://deepesdl.readthedocs.io/en/latest/datasets/esa-cci-permafrost-1x1151x1641-0-0-2-zarr
121
+ access_link: s3://deep-esdl-public/esa-cci-permafrost-1x1151x1641-1.0.0.zarr
117
122
  dataset_status: completed
118
- osc_region: global
119
- cf_parameter:
120
- - name: hydrology
121
123
  ```
122
124
 
123
- dataset-id has to be a valid dataset-id from `deep-esdl-public` s3 or your team bucket.
124
-
125
- ### deep-code publish-workflow
125
+ dataset-id has to be a valid dataset-id from `deep-esdl-public` s3 bucket or your team
126
+ bucket.
126
127
 
127
- Publish a workflow/experiment to the EarthCODE open-science catalog.
128
-
129
- ```commandline
130
- deep-code publish-workflow /path/to/workflow-config.yaml
131
- ```
132
128
  #### workflow-config.yaml example
133
129
 
134
130
  ```
135
- workflow_id: "4D Med hydrology cube generation"
131
+ workflow_id: "esa-cci-permafrost"
136
132
  properties:
137
- title: "Hydrology cube generation recipe"
138
- description: "4D Med cube generation"
133
+ title: "ESA CCI permafrost"
134
+ description: "cube generation workflow for esa-cci-permafrost"
139
135
  keywords:
140
136
  - Earth Science
141
137
  themes:
142
- - Atmosphere
143
- - Ocean
144
- - Evaporation
138
+ - cryosphere
145
139
  license: proprietary
146
140
  jupyter_kernel_info:
147
- name: deepesdl-xcube-1.7.1
141
+ name: deepesdl-xcube-1.8.3
148
142
  python_version: 3.11
149
- env_file: https://git/env.yml
150
- links:
151
- - rel: "documentation"
152
- type: "application/json"
153
- title: "4DMed Hydrology Cube Generation Recipe"
154
- href: "https://github.com/deepesdl/cube-gen/tree/main/hydrology/README.md"
155
- - rel: "jupyter-notebook"
156
- type: "application/json"
157
- title: "Workflow Jupyter Notebook"
158
- href: "https://github.com/deepesdl/cube-gen/blob/main/hydrology/notebooks/reading_hydrology.ipynb"
143
+ env_file: "https://github.com/deepesdl/cube-gen/blob/main/Permafrost/environment.yml"
144
+ jupyter_notebook_url: "https://github.com/deepesdl/cube-gen/blob/main/Permafrost/Create-CCI-Permafrost-cube-EarthCODE.ipynb"
159
145
  contact:
160
146
  - name: Tejas Morbagal Harish
161
147
  organization: Brockmann Consult GmbH
@@ -56,14 +56,9 @@ pytest --cov-report html --cov=deep-code
56
56
  providing different utility functions.
57
57
  Use the --help option with these subcommands to get more details on usage.
58
58
 
59
- ### deep-code publish-product
60
-
61
- Publish a dataset which is a result of an experiment to the EarthCODE
62
- open-science catalog.
63
-
64
- ```commandline
65
- deep-code publish-dataset /path/to/dataset-config.yaml
66
- ```
59
+ The CLI retrieves the Git username and personal access token from a hidden file named
60
+ .gitaccess. Ensure this file is located in the same directory where you execute the CLI
61
+ command.
67
62
 
68
63
  #### .gitaccess example
69
64
 
@@ -72,59 +67,49 @@ github-username: your-git-user
72
67
  github-token: personal access token
73
68
  ```
74
69
 
70
+ ### deep-code publish
71
+
72
+ Publish the experiment, workflow and dataset which is a result of an experiment to
73
+ the EarthCODE open-science catalog.
74
+
75
+ ```commandline
76
+ deep-code publish /path/to/dataset-config.yaml /path/to/workflow-config.yaml
77
+ ```
78
+
75
79
  #### dataset-config.yaml example
76
80
 
77
81
  ```
78
- dataset_id: hydrology-1D-0.009deg-100x60x60-3.0.2.zarr
79
- collection_id: hydrology
82
+ dataset_id: esa-cci-permafrost-1x1151x1641-1.0.0.zarr
83
+ collection_id: esa-cci-permafrost
80
84
  osc_themes:
81
- - Land
82
- - Oceans
85
+ - cryosphere
86
+ osc_region: global
83
87
  # non-mandatory
84
- documentation_link: https://deepesdl.readthedocs.io/en/latest/datasets/hydrology-1D-0.009deg-100x60x60-3.0.2.zarr/
85
- access_link: s3://test
88
+ documentation_link: https://deepesdl.readthedocs.io/en/latest/datasets/esa-cci-permafrost-1x1151x1641-0-0-2-zarr
89
+ access_link: s3://deep-esdl-public/esa-cci-permafrost-1x1151x1641-1.0.0.zarr
86
90
  dataset_status: completed
87
- osc_region: global
88
- cf_parameter:
89
- - name: hydrology
90
91
  ```
91
92
 
92
- dataset-id has to be a valid dataset-id from `deep-esdl-public` s3 or your team bucket.
93
-
94
- ### deep-code publish-workflow
93
+ dataset-id has to be a valid dataset-id from `deep-esdl-public` s3 bucket or your team
94
+ bucket.
95
95
 
96
- Publish a workflow/experiment to the EarthCODE open-science catalog.
97
-
98
- ```commandline
99
- deep-code publish-workflow /path/to/workflow-config.yaml
100
- ```
101
96
  #### workflow-config.yaml example
102
97
 
103
98
  ```
104
- workflow_id: "4D Med hydrology cube generation"
99
+ workflow_id: "esa-cci-permafrost"
105
100
  properties:
106
- title: "Hydrology cube generation recipe"
107
- description: "4D Med cube generation"
101
+ title: "ESA CCI permafrost"
102
+ description: "cube generation workflow for esa-cci-permafrost"
108
103
  keywords:
109
104
  - Earth Science
110
105
  themes:
111
- - Atmosphere
112
- - Ocean
113
- - Evaporation
106
+ - cryosphere
114
107
  license: proprietary
115
108
  jupyter_kernel_info:
116
- name: deepesdl-xcube-1.7.1
109
+ name: deepesdl-xcube-1.8.3
117
110
  python_version: 3.11
118
- env_file: https://git/env.yml
119
- links:
120
- - rel: "documentation"
121
- type: "application/json"
122
- title: "4DMed Hydrology Cube Generation Recipe"
123
- href: "https://github.com/deepesdl/cube-gen/tree/main/hydrology/README.md"
124
- - rel: "jupyter-notebook"
125
- type: "application/json"
126
- title: "Workflow Jupyter Notebook"
127
- href: "https://github.com/deepesdl/cube-gen/blob/main/hydrology/notebooks/reading_hydrology.ipynb"
111
+ env_file: "https://github.com/deepesdl/cube-gen/blob/main/Permafrost/environment.yml"
112
+ jupyter_notebook_url: "https://github.com/deepesdl/cube-gen/blob/main/Permafrost/Create-CCI-Permafrost-cube-EarthCODE.ipynb"
128
113
  contact:
129
114
  - name: Tejas Morbagal Harish
130
115
  organization: Brockmann Consult GmbH
@@ -6,7 +6,7 @@
6
6
 
7
7
  import click
8
8
 
9
- from deep_code.cli.publish import publish_dataset, publish_workflow
9
+ from deep_code.cli.publish import publish
10
10
 
11
11
 
12
12
  @click.group()
@@ -15,8 +15,7 @@ def main():
15
15
  pass
16
16
 
17
17
 
18
- main.add_command(publish_dataset)
19
- main.add_command(publish_workflow)
18
+ main.add_command(publish)
20
19
 
21
20
  if __name__ == "__main__":
22
21
  main()
@@ -0,0 +1,21 @@
1
+ #!/usr/bin/env python3
2
+
3
+ # Copyright (c) 2025 by Brockmann Consult GmbH
4
+ # Permissions are hereby granted under the terms of the MIT License:
5
+ # https://opensource.org/licenses/MIT.
6
+
7
+ import click
8
+
9
+ from deep_code.tools.publish import Publisher
10
+
11
+
12
+ @click.command(name="publish")
13
+ @click.argument("dataset_config", type=click.Path(exists=True))
14
+ @click.argument("workflow_config", type=click.Path(exists=True))
15
+ def publish(dataset_config, workflow_config):
16
+ """Request publishing a dataset to the open science catalogue.
17
+ """
18
+ publisher = Publisher(
19
+ dataset_config_path=dataset_config, workflow_config_path=workflow_config
20
+ )
21
+ publisher.publish_all()
@@ -0,0 +1,31 @@
1
+ #!/usr/bin/env python3
2
+
3
+ # Copyright (c) 2024 by Brockmann Consult GmbH
4
+ # Permissions are hereby granted under the terms of the MIT License:
5
+ # https://opensource.org/licenses/MIT.
6
+
7
+ OSC_SCHEMA_URI = "https://stac-extensions.github.io/osc/v1.0.0/schema.json"
8
+ CF_SCHEMA_URI = "https://stac-extensions.github.io/cf/v0.2.0/schema.json"
9
+ THEMES_SCHEMA_URI = "https://stac-extensions.github.io/themes/v1.0.0/schema.json"
10
+ OSC_THEME_SCHEME = "https://github.com/stac-extensions/osc#theme"
11
+ OSC_REPO_OWNER = "ESA-EarthCODE"
12
+ OSC_REPO_NAME = "open-science-catalog-metadata"
13
+ OSC_BRANCH_NAME = "add-new-collection"
14
+ DEFAULT_THEME_SCHEME = (
15
+ "https://gcmd.earthdata.nasa.gov/kms/concepts/concept_scheme/sciencekeywords"
16
+ )
17
+ OGC_API_RECORD_SPEC = "http://www.opengis.net/spec/ogcapi-records-1/1.0/req/record-core"
18
+ WF_BRANCH_NAME = "add-new-workflow-from-deepesdl"
19
+ VARIABLE_BASE_CATALOG_SELF_HREF = "https://esa-earthcode.github.io/open-science-catalog-metadata/variables/catalog.json"
20
+ PRODUCT_BASE_CATALOG_SELF_HREF = "https://esa-earthcode.github.io/open-science-catalog-metadata/products/catalog.json"
21
+ DEEPESDL_COLLECTION_SELF_HREF = (
22
+ "https://esa-earthcode.github.io/open-science-catalog-metadata/projects/deepesdl"
23
+ "/collection.json"
24
+ )
25
+ BASE_URL_OSC = "https://esa-earthcode.github.io/open-science-catalog-metadata"
26
+ EXPERIMENT_BASE_CATALOG_SELF_HREF = "https://esa-earthcode.github.io/open-science-catalog-metadata/experiments/catalog.json"
27
+ WORKFLOW_BASE_CATALOG_SELF_HREF = (
28
+ "https://esa-earthcode.github.io/open-science-catalog-metadata/workflows/catalog"
29
+ ".json"
30
+ )
31
+ PROJECT_COLLECTION_NAME = "deep-earth-system-data-lab"
@@ -0,0 +1,108 @@
1
+ import unittest
2
+ from unittest.mock import patch, mock_open, MagicMock
3
+ import json
4
+ import yaml
5
+ from pathlib import Path
6
+ import tempfile
7
+ from pystac import Catalog
8
+
9
+ from deep_code.tools.publish import Publisher
10
+
11
+
12
+ class TestPublisher(unittest.TestCase):
13
+ @patch("fsspec.open")
14
+ @patch("deep_code.tools.publish.GitHubPublisher")
15
+ def setUp(self, mock_github_publisher, mock_fsspec_open):
16
+ # Mock GitHubPublisher to avoid reading .gitaccess
17
+ self.mock_github_publisher_instance = MagicMock()
18
+ mock_github_publisher.return_value = self.mock_github_publisher_instance
19
+
20
+ # Mock dataset and workflow config files
21
+ self.dataset_config = {
22
+ "collection_id": "test-collection",
23
+ "dataset_id": "test-dataset",
24
+ }
25
+ self.workflow_config = {
26
+ "properties": {"title": "Test Workflow"},
27
+ "workflow_id": "test-workflow",
28
+ }
29
+
30
+ # Mock fsspec.open for config files
31
+ self.mock_fsspec_open = mock_fsspec_open
32
+ self.mock_fsspec_open.side_effect = [
33
+ mock_open(read_data=yaml.dump(self.dataset_config)).return_value,
34
+ mock_open(read_data=yaml.dump(self.workflow_config)).return_value,
35
+ ]
36
+
37
+ # Initialize Publisher
38
+ self.publisher = Publisher(
39
+ dataset_config_path="test-dataset-config.yaml",
40
+ workflow_config_path="test-workflow-config.yaml",
41
+ )
42
+
43
+ def test_normalize_name(self):
44
+ self.assertEqual(Publisher._normalize_name("Test Name"), "test-name")
45
+ self.assertEqual(Publisher._normalize_name("Test Name"), "test---name")
46
+ self.assertIsNone(Publisher._normalize_name(""))
47
+ self.assertIsNone(Publisher._normalize_name(None))
48
+
49
+ def test_write_to_file(self):
50
+ # Create a temporary file
51
+ with tempfile.NamedTemporaryFile(delete=False) as temp_file:
52
+ file_path = temp_file.name
53
+
54
+ # Test data
55
+ data = {"key": "value"}
56
+
57
+ # Call the method
58
+ Publisher._write_to_file(file_path, data)
59
+
60
+ # Read the file and verify its content
61
+ with open(file_path, "r") as f:
62
+ content = json.load(f)
63
+ self.assertEqual(content, data)
64
+
65
+ # Clean up
66
+ Path(file_path).unlink()
67
+
68
+ def test_update_base_catalog(self):
69
+ # Create a mock Catalog
70
+ catalog = Catalog(id="test-catalog", description="Test Catalog")
71
+
72
+ # Mock file path and item ID
73
+ catalog_path = "test-catalog.json"
74
+ item_id = "test-item"
75
+ self_href = "https://example.com/catalog.json"
76
+
77
+ self.publisher.workflow_title = "Test Workflow"
78
+
79
+ # Mock the Catalog.from_file method
80
+ with patch("pystac.Catalog.from_file", return_value=catalog):
81
+ updated_catalog = self.publisher._update_base_catalog(
82
+ catalog_path, item_id, self_href
83
+ )
84
+
85
+ # Assertions
86
+ self.assertEqual(updated_catalog.get_self_href(), self_href)
87
+ self.assertIsInstance(updated_catalog, Catalog)
88
+
89
+ def test_read_config_files(self):
90
+ # Mock dataset and workflow config files
91
+ dataset_config = {
92
+ "collection_id": "test-collection",
93
+ "dataset_id": "test-dataset",
94
+ }
95
+ workflow_config = {
96
+ "properties": {"title": "Test Workflow"},
97
+ "workflow_id": "test-workflow",
98
+ }
99
+
100
+ # Mock fsspec.open for config files
101
+ self.mock_fsspec_open.side_effect = [
102
+ mock_open(read_data=yaml.dump(dataset_config)).return_value,
103
+ mock_open(read_data=yaml.dump(workflow_config)).return_value,
104
+ ]
105
+
106
+ # Assertions
107
+ self.assertEqual(self.publisher.dataset_config, dataset_config)
108
+ self.assertEqual(self.publisher.workflow_config, workflow_config)
@@ -217,3 +217,61 @@ class TestOSCProductSTACGenerator(unittest.TestCase):
217
217
  )
218
218
  self.assertIn("Public store, Authenticated store", str(context.exception))
219
219
  self.assertEqual(mock_new_data_store.call_count, 2)
220
+
221
+
222
+ class TestFormatString(unittest.TestCase):
223
+ def test_single_word(self):
224
+ self.assertEqual(
225
+ OscDatasetStacGenerator.format_string("temperature"), "Temperature"
226
+ )
227
+ self.assertEqual(OscDatasetStacGenerator.format_string("temp"), "Temp")
228
+ self.assertEqual(OscDatasetStacGenerator.format_string("hello"), "Hello")
229
+
230
+ def test_multiple_words_with_spaces(self):
231
+ self.assertEqual(
232
+ OscDatasetStacGenerator.format_string("surface temp"), "Surface Temp"
233
+ )
234
+ self.assertEqual(
235
+ OscDatasetStacGenerator.format_string("this is a test"), "This Is A Test"
236
+ )
237
+
238
+ def test_multiple_words_with_underscores(self):
239
+ self.assertEqual(
240
+ OscDatasetStacGenerator.format_string("surface_temp"), "Surface Temp"
241
+ )
242
+ self.assertEqual(
243
+ OscDatasetStacGenerator.format_string("this_is_a_test"), "This Is A Test"
244
+ )
245
+
246
+ def test_mixed_spaces_and_underscores(self):
247
+ self.assertEqual(
248
+ OscDatasetStacGenerator.format_string("surface_temp and_more"),
249
+ "Surface Temp And More",
250
+ )
251
+ self.assertEqual(
252
+ OscDatasetStacGenerator.format_string(
253
+ "mixed_case_with_underscores_and spaces"
254
+ ),
255
+ "Mixed Case With Underscores And Spaces",
256
+ )
257
+
258
+ def test_edge_cases(self):
259
+ # Empty string
260
+ self.assertEqual(OscDatasetStacGenerator.format_string(""), "")
261
+ # Single word with trailing underscore
262
+ self.assertEqual(
263
+ OscDatasetStacGenerator.format_string("temperature_"), "Temperature"
264
+ )
265
+ # Single word with leading underscore
266
+ self.assertEqual(OscDatasetStacGenerator.format_string("_temp"), "Temp")
267
+ # Single word with leading/trailing spaces
268
+ self.assertEqual(OscDatasetStacGenerator.format_string(" hello "), "Hello")
269
+ # Multiple spaces or underscores
270
+ self.assertEqual(
271
+ OscDatasetStacGenerator.format_string("too___many___underscores"),
272
+ "Too Many Underscores",
273
+ )
274
+ self.assertEqual(
275
+ OscDatasetStacGenerator.format_string("too many spaces"),
276
+ "Too Many Spaces",
277
+ )
@@ -0,0 +1,171 @@
1
+ import logging
2
+ import unittest
3
+ from pathlib import Path
4
+ from unittest.mock import MagicMock, patch
5
+
6
+ from deep_code.utils.github_automation import GitHubAutomation
7
+
8
+
9
+ class TestGitHubAutomation(unittest.TestCase):
10
+ def setUp(self):
11
+ # Set up test data
12
+ self.username = "testuser"
13
+ self.token = "testtoken"
14
+ self.repo_owner = "testowner"
15
+ self.repo_name = "testrepo"
16
+ self.github_automation = GitHubAutomation(
17
+ self.username, self.token, self.repo_owner, self.repo_name
18
+ )
19
+ logging.disable(logging.CRITICAL) # Disable logging during tests
20
+
21
+ def tearDown(self):
22
+ logging.disable(logging.NOTSET) # Re-enable logging after tests
23
+
24
+ @patch("requests.post")
25
+ def test_fork_repository(self, mock_post):
26
+ # Mock the response from GitHub API
27
+ mock_response = MagicMock()
28
+ mock_response.raise_for_status.return_value = None
29
+ mock_post.return_value = mock_response
30
+
31
+ # Call the method
32
+ self.github_automation.fork_repository()
33
+
34
+ # Assertions
35
+ mock_post.assert_called_once_with(
36
+ f"https://api.github.com/repos/{self.repo_owner}/{self.repo_name}/forks",
37
+ headers={"Authorization": f"token {self.token}"},
38
+ )
39
+
40
+ @patch("subprocess.run")
41
+ def test_clone_repository_new(self, mock_run):
42
+ # Mock the subprocess.run method
43
+ mock_run.return_value = MagicMock()
44
+
45
+ # Mock os.path.exists to return False (directory does not exist)
46
+ with patch("os.path.exists", return_value=False):
47
+ self.github_automation.clone_sync_repository()
48
+
49
+ # Assertions
50
+ mock_run.assert_called_once_with(
51
+ [
52
+ "git",
53
+ "clone",
54
+ f"https://{self.username}:{self.token}@github.com/{self.username}/{self.repo_name}.git",
55
+ self.github_automation.local_clone_dir,
56
+ ],
57
+ check=True,
58
+ )
59
+
60
+ @patch("subprocess.run")
61
+ def test_clone_repository_existing(self, mock_run):
62
+ # Mock the subprocess.run method
63
+ mock_run.return_value = MagicMock()
64
+
65
+ # Mock os.path.exists to return True (directory exists)
66
+ with patch("os.path.exists", return_value=True):
67
+ with patch("os.chdir"):
68
+ self.github_automation.clone_sync_repository()
69
+
70
+ # Assertions
71
+ mock_run.assert_called_once_with(["git", "pull"], check=True)
72
+
73
+ @patch("subprocess.run")
74
+ def test_create_branch(self, mock_run):
75
+ # Mock the subprocess.run method
76
+ mock_run.return_value = MagicMock()
77
+
78
+ # Mock os.chdir
79
+ with patch("os.chdir"):
80
+ self.github_automation.create_branch("test-branch")
81
+
82
+ # Assertions
83
+ mock_run.assert_called_once_with(
84
+ ["git", "checkout", "-b", "test-branch"], check=True
85
+ )
86
+
87
+ @patch("subprocess.run")
88
+ def test_add_file(self, mock_run):
89
+ # Mock the subprocess.run method
90
+ mock_run.return_value = MagicMock()
91
+
92
+ # Mock os.chdir and Path
93
+ with patch("os.chdir"), patch("pathlib.Path.mkdir"), patch(
94
+ "builtins.open", unittest.mock.mock_open()
95
+ ):
96
+ self.github_automation.add_file("test/file.json", {"key": "value"})
97
+
98
+ # Assertions
99
+ mock_run.assert_called_once_with(
100
+ [
101
+ "git",
102
+ "add",
103
+ str(Path(self.github_automation.local_clone_dir) / "test/file.json"),
104
+ ],
105
+ check=True,
106
+ )
107
+
108
+ @patch("subprocess.run")
109
+ def test_commit_and_push(self, mock_run):
110
+ # Mock the subprocess.run method
111
+ mock_run.return_value = MagicMock()
112
+
113
+ # Mock os.chdir
114
+ with patch("os.chdir"):
115
+ self.github_automation.commit_and_push("test-branch", "Test commit message")
116
+
117
+ # Assertions
118
+ mock_run.assert_any_call(
119
+ ["git", "commit", "-m", "Test commit message"], check=True
120
+ )
121
+ mock_run.assert_any_call(
122
+ ["git", "push", "-u", "origin", "test-branch"], check=True
123
+ )
124
+
125
+ @patch("requests.post")
126
+ def test_create_pull_request(self, mock_post):
127
+ # Mock the response from GitHub API
128
+ mock_response = MagicMock()
129
+ mock_response.raise_for_status.return_value = None
130
+ mock_response.json.return_value = {"html_url": "https://github.com/test/pull/1"}
131
+ mock_post.return_value = mock_response
132
+
133
+ # Mock os.chdir
134
+ with patch("os.chdir"):
135
+ self.github_automation.create_pull_request(
136
+ "test-branch", "Test PR", "Test body"
137
+ )
138
+
139
+ # Assertions
140
+ mock_post.assert_called_once_with(
141
+ f"https://api.github.com/repos/{self.repo_owner}/{self.repo_name}/pulls",
142
+ headers={"Authorization": f"token {self.token}"},
143
+ json={
144
+ "title": "Test PR",
145
+ "head": f"{self.username}:test-branch",
146
+ "base": "main",
147
+ "body": "Test body",
148
+ },
149
+ )
150
+
151
+ @patch("subprocess.run")
152
+ def test_clean_up(self, mock_run):
153
+ # Mock the subprocess.run method
154
+ mock_run.return_value = MagicMock()
155
+
156
+ # Mock os.chdir
157
+ with patch("os.chdir"):
158
+ self.github_automation.clean_up()
159
+
160
+ # Assertions
161
+ mock_run.assert_called_once_with(
162
+ ["rm", "-rf", self.github_automation.local_clone_dir]
163
+ )
164
+
165
+ def test_file_exists(self):
166
+ # Mock os.path.isfile
167
+ with patch("os.path.isfile", return_value=True):
168
+ result = self.github_automation.file_exists("test/file.json")
169
+
170
+ # Assertions
171
+ self.assertTrue(result)