deep-code 0.0.1__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
deep_code/__init__.py ADDED
@@ -0,0 +1,24 @@
1
+ # The MIT License (MIT)
2
+ # Copyright (c) 2024 by DeepESDL and Brockmann Consult GmbH
3
+ #
4
+ # Permission is hereby granted, free of charge, to any person obtaining a
5
+ # copy of this software and associated documentation files (the "Software"),
6
+ # to deal in the Software without restriction, including without limitation
7
+ # the rights to use, copy, modify, merge, publish, distribute, sublicense,
8
+ # and/or sell copies of the Software, and to permit persons to whom the
9
+ # Software is furnished to do so, subject to the following conditions:
10
+ #
11
+ # The above copyright notice and this permission notice shall be included in
12
+ # all copies or substantial portions of the Software.
13
+ #
14
+ # THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
15
+ # IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
16
+ # FITNESS FOR A PARTICULAR PURPOSE AND NON INFRINGEMENT. IN NO EVENT SHALL THE
17
+ # AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
18
+ # LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
19
+ # FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER
20
+ # DEALINGS IN THE SOFTWARE.
21
+
22
+ from .version import version
23
+
24
+ __version__ = version
@@ -0,0 +1,3 @@
1
+ # Copyright (c) 2025 by Brockmann Consult GmbH
2
+ # Permissions are hereby granted under the terms of the MIT License:
3
+ # https://opensource.org/licenses/MIT.
deep_code/cli/main.py ADDED
@@ -0,0 +1,21 @@
1
+ #!/usr/bin/env python3
2
+
3
+ # Copyright (c) 2025 by Brockmann Consult GmbH
4
+ # Permissions are hereby granted under the terms of the MIT License:
5
+ # https://opensource.org/licenses/MIT.
6
+
7
+ import click
8
+
9
+ from deep_code.cli.publish import publish
10
+
11
+
12
+ @click.group()
13
+ def main():
14
+ """Deep Code CLI."""
15
+ pass
16
+
17
+
18
+ main.add_command(publish)
19
+
20
+ if __name__ == "__main__":
21
+ main()
@@ -0,0 +1,21 @@
1
+ #!/usr/bin/env python3
2
+
3
+ # Copyright (c) 2025 by Brockmann Consult GmbH
4
+ # Permissions are hereby granted under the terms of the MIT License:
5
+ # https://opensource.org/licenses/MIT.
6
+
7
+ import click
8
+
9
+ from deep_code.tools.publish import Publisher
10
+
11
+
12
+ @click.command(name="publish")
13
+ @click.argument("dataset_config", type=click.Path(exists=True))
14
+ @click.argument("workflow_config", type=click.Path(exists=True))
15
+ def publish(dataset_config, workflow_config):
16
+ """Request publishing a dataset to the open science catalogue.
17
+ """
18
+ publisher = Publisher(
19
+ dataset_config_path=dataset_config, workflow_config_path=workflow_config
20
+ )
21
+ publisher.publish_all()
deep_code/constants.py ADDED
@@ -0,0 +1,31 @@
1
+ #!/usr/bin/env python3
2
+
3
+ # Copyright (c) 2024 by Brockmann Consult GmbH
4
+ # Permissions are hereby granted under the terms of the MIT License:
5
+ # https://opensource.org/licenses/MIT.
6
+
7
+ OSC_SCHEMA_URI = "https://stac-extensions.github.io/osc/v1.0.0/schema.json"
8
+ CF_SCHEMA_URI = "https://stac-extensions.github.io/cf/v0.2.0/schema.json"
9
+ THEMES_SCHEMA_URI = "https://stac-extensions.github.io/themes/v1.0.0/schema.json"
10
+ OSC_THEME_SCHEME = "https://github.com/stac-extensions/osc#theme"
11
+ OSC_REPO_OWNER = "ESA-EarthCODE"
12
+ OSC_REPO_NAME = "open-science-catalog-metadata"
13
+ OSC_BRANCH_NAME = "add-new-collection"
14
+ DEFAULT_THEME_SCHEME = (
15
+ "https://gcmd.earthdata.nasa.gov/kms/concepts/concept_scheme/sciencekeywords"
16
+ )
17
+ OGC_API_RECORD_SPEC = "http://www.opengis.net/spec/ogcapi-records-1/1.0/req/record-core"
18
+ WF_BRANCH_NAME = "add-new-workflow-from-deepesdl"
19
+ VARIABLE_BASE_CATALOG_SELF_HREF = "https://esa-earthcode.github.io/open-science-catalog-metadata/variables/catalog.json"
20
+ PRODUCT_BASE_CATALOG_SELF_HREF = "https://esa-earthcode.github.io/open-science-catalog-metadata/products/catalog.json"
21
+ DEEPESDL_COLLECTION_SELF_HREF = (
22
+ "https://esa-earthcode.github.io/open-science-catalog-metadata/projects/deepesdl"
23
+ "/collection.json"
24
+ )
25
+ BASE_URL_OSC = "https://esa-earthcode.github.io/open-science-catalog-metadata"
26
+ EXPERIMENT_BASE_CATALOG_SELF_HREF = "https://esa-earthcode.github.io/open-science-catalog-metadata/experiments/catalog.json"
27
+ WORKFLOW_BASE_CATALOG_SELF_HREF = (
28
+ "https://esa-earthcode.github.io/open-science-catalog-metadata/workflows/catalog"
29
+ ".json"
30
+ )
31
+ PROJECT_COLLECTION_NAME = "deep-earth-system-data-lab"
@@ -0,0 +1,3 @@
1
+ # Copyright (c) 2025 by Brockmann Consult GmbH
2
+ # Permissions are hereby granted under the terms of the MIT License:
3
+ # https://opensource.org/licenses/MIT.
@@ -0,0 +1,108 @@
1
+ import unittest
2
+ from unittest.mock import patch, mock_open, MagicMock
3
+ import json
4
+ import yaml
5
+ from pathlib import Path
6
+ import tempfile
7
+ from pystac import Catalog
8
+
9
+ from deep_code.tools.publish import Publisher
10
+
11
+
12
+ class TestPublisher(unittest.TestCase):
13
+ @patch("fsspec.open")
14
+ @patch("deep_code.tools.publish.GitHubPublisher")
15
+ def setUp(self, mock_github_publisher, mock_fsspec_open):
16
+ # Mock GitHubPublisher to avoid reading .gitaccess
17
+ self.mock_github_publisher_instance = MagicMock()
18
+ mock_github_publisher.return_value = self.mock_github_publisher_instance
19
+
20
+ # Mock dataset and workflow config files
21
+ self.dataset_config = {
22
+ "collection_id": "test-collection",
23
+ "dataset_id": "test-dataset",
24
+ }
25
+ self.workflow_config = {
26
+ "properties": {"title": "Test Workflow"},
27
+ "workflow_id": "test-workflow",
28
+ }
29
+
30
+ # Mock fsspec.open for config files
31
+ self.mock_fsspec_open = mock_fsspec_open
32
+ self.mock_fsspec_open.side_effect = [
33
+ mock_open(read_data=yaml.dump(self.dataset_config)).return_value,
34
+ mock_open(read_data=yaml.dump(self.workflow_config)).return_value,
35
+ ]
36
+
37
+ # Initialize Publisher
38
+ self.publisher = Publisher(
39
+ dataset_config_path="test-dataset-config.yaml",
40
+ workflow_config_path="test-workflow-config.yaml",
41
+ )
42
+
43
+ def test_normalize_name(self):
44
+ self.assertEqual(Publisher._normalize_name("Test Name"), "test-name")
45
+ self.assertEqual(Publisher._normalize_name("Test Name"), "test---name")
46
+ self.assertIsNone(Publisher._normalize_name(""))
47
+ self.assertIsNone(Publisher._normalize_name(None))
48
+
49
+ def test_write_to_file(self):
50
+ # Create a temporary file
51
+ with tempfile.NamedTemporaryFile(delete=False) as temp_file:
52
+ file_path = temp_file.name
53
+
54
+ # Test data
55
+ data = {"key": "value"}
56
+
57
+ # Call the method
58
+ Publisher._write_to_file(file_path, data)
59
+
60
+ # Read the file and verify its content
61
+ with open(file_path, "r") as f:
62
+ content = json.load(f)
63
+ self.assertEqual(content, data)
64
+
65
+ # Clean up
66
+ Path(file_path).unlink()
67
+
68
+ def test_update_base_catalog(self):
69
+ # Create a mock Catalog
70
+ catalog = Catalog(id="test-catalog", description="Test Catalog")
71
+
72
+ # Mock file path and item ID
73
+ catalog_path = "test-catalog.json"
74
+ item_id = "test-item"
75
+ self_href = "https://example.com/catalog.json"
76
+
77
+ self.publisher.workflow_title = "Test Workflow"
78
+
79
+ # Mock the Catalog.from_file method
80
+ with patch("pystac.Catalog.from_file", return_value=catalog):
81
+ updated_catalog = self.publisher._update_base_catalog(
82
+ catalog_path, item_id, self_href
83
+ )
84
+
85
+ # Assertions
86
+ self.assertEqual(updated_catalog.get_self_href(), self_href)
87
+ self.assertIsInstance(updated_catalog, Catalog)
88
+
89
+ def test_read_config_files(self):
90
+ # Mock dataset and workflow config files
91
+ dataset_config = {
92
+ "collection_id": "test-collection",
93
+ "dataset_id": "test-dataset",
94
+ }
95
+ workflow_config = {
96
+ "properties": {"title": "Test Workflow"},
97
+ "workflow_id": "test-workflow",
98
+ }
99
+
100
+ # Mock fsspec.open for config files
101
+ self.mock_fsspec_open.side_effect = [
102
+ mock_open(read_data=yaml.dump(dataset_config)).return_value,
103
+ mock_open(read_data=yaml.dump(workflow_config)).return_value,
104
+ ]
105
+
106
+ # Assertions
107
+ self.assertEqual(self.publisher.dataset_config, dataset_config)
108
+ self.assertEqual(self.publisher.workflow_config, workflow_config)
@@ -0,0 +1,3 @@
1
+ # Copyright (c) 2025 by Brockmann Consult GmbH
2
+ # Permissions are hereby granted under the terms of the MIT License:
3
+ # https://opensource.org/licenses/MIT.
@@ -0,0 +1,277 @@
1
+ import os
2
+ import unittest
3
+ from datetime import datetime
4
+ from unittest.mock import MagicMock, patch
5
+
6
+ import numpy as np
7
+ from pystac import Collection
8
+ from xarray import Dataset
9
+
10
+ from deep_code.utils.dataset_stac_generator import OscDatasetStacGenerator
11
+
12
+
13
+ class TestOSCProductSTACGenerator(unittest.TestCase):
14
+ @patch("deep_code.utils.dataset_stac_generator.new_data_store")
15
+ def setUp(self, mock_data_store):
16
+ """Set up a mock dataset and generator."""
17
+ self.mock_dataset = Dataset(
18
+ coords={
19
+ "lon": ("lon", np.linspace(-180, 180, 10)),
20
+ "lat": ("lat", np.linspace(-90, 90, 5)),
21
+ "time": (
22
+ "time",
23
+ [
24
+ np.datetime64(datetime(2023, 1, 1), "ns"),
25
+ np.datetime64(datetime(2023, 1, 2), "ns"),
26
+ ],
27
+ ),
28
+ },
29
+ attrs={"description": "Mock dataset for testing.", "title": "Mock Dataset"},
30
+ data_vars={
31
+ "var1": (
32
+ ("time", "lat", "lon"),
33
+ np.random.rand(2, 5, 10),
34
+ {
35
+ "description": "dummy",
36
+ "standard_name": "var1",
37
+ "gcmd_keyword_url": "https://dummy",
38
+ },
39
+ ),
40
+ "var2": (
41
+ ("time", "lat", "lon"),
42
+ np.random.rand(2, 5, 10),
43
+ {
44
+ "description": "dummy",
45
+ "standard_name": "var2",
46
+ "gcmd_keyword_url": "https://dummy",
47
+ },
48
+ ),
49
+ },
50
+ )
51
+ mock_store = MagicMock()
52
+ mock_store.open_data.return_value = self.mock_dataset
53
+ mock_data_store.return_value = mock_store
54
+
55
+ self.generator = OscDatasetStacGenerator(
56
+ dataset_id="mock-dataset-id",
57
+ collection_id="mock-collection-id",
58
+ access_link="s3://mock-bucket/mock-dataset",
59
+ documentation_link="https://example.com/docs",
60
+ osc_status="ongoing",
61
+ osc_region="Global",
62
+ osc_themes=["climate", "environment"],
63
+ )
64
+
65
+ def test_open_dataset(self):
66
+ """Test if the dataset is opened correctly."""
67
+ self.assertIsInstance(self.generator.dataset, Dataset)
68
+ self.assertIn("lon", self.generator.dataset.coords)
69
+ self.assertIn("lat", self.generator.dataset.coords)
70
+ self.assertIn("time", self.generator.dataset.coords)
71
+
72
+ def test_get_spatial_extent(self):
73
+ """Test spatial extent extraction."""
74
+ extent = self.generator._get_spatial_extent()
75
+ self.assertEqual(extent.bboxes[0], [-180.0, -90.0, 180.0, 90.0])
76
+
77
+ def test_get_temporal_extent(self):
78
+ """Test temporal extent extraction."""
79
+ extent = self.generator._get_temporal_extent()
80
+ expected_intervals = [datetime(2023, 1, 1, 0, 0), datetime(2023, 1, 2, 0, 0)]
81
+ self.assertEqual(extent.intervals[0], expected_intervals)
82
+
83
+ def test_get_variables(self):
84
+ """Test variable extraction."""
85
+ variables = self.generator.get_variable_ids()
86
+ self.assertEqual(variables, ["var1", "var2"])
87
+
88
+ def test_get_general_metadata(self):
89
+ """Test general metadata extraction."""
90
+ metadata = self.generator._get_general_metadata()
91
+ self.assertEqual(metadata["description"], "Mock dataset for testing.")
92
+
93
+ @patch("pystac.Collection.add_link")
94
+ @patch("pystac.Collection.set_self_href")
95
+ def test_build_stac_collection(self, mock_set_self_href, mock_add_link):
96
+ """Test STAC collection creation."""
97
+ collection = self.generator.build_dataset_stac_collection()
98
+ self.assertIsInstance(collection, Collection)
99
+ self.assertEqual(collection.id, "mock-collection-id")
100
+ self.assertEqual(collection.description, "Mock dataset for testing.")
101
+ self.assertEqual(
102
+ collection.extent.spatial.bboxes[0], [-180.0, -90.0, 180.0, 90.0]
103
+ )
104
+ self.assertEqual(
105
+ collection.extent.temporal.intervals[0],
106
+ [datetime(2023, 1, 1, 0, 0), datetime(2023, 1, 2, 0, 0)],
107
+ )
108
+ mock_set_self_href.assert_called_once()
109
+ mock_add_link.assert_called()
110
+
111
+ def test_invalid_spatial_extent(self):
112
+ """Test spatial extent extraction with missing coordinates."""
113
+ self.generator.dataset = Dataset(coords={"x": [], "y": []})
114
+ with self.assertRaises(ValueError):
115
+ self.generator._get_spatial_extent()
116
+
117
+ def test_invalid_temporal_extent(self):
118
+ """Test temporal extent extraction with missing time."""
119
+ self.generator.dataset = Dataset(coords={})
120
+ with self.assertRaises(ValueError):
121
+ self.generator._get_temporal_extent()
122
+
123
+ @patch("deep_code.utils.dataset_stac_generator.new_data_store")
124
+ @patch("deep_code.utils.dataset_stac_generator.logging.getLogger")
125
+ def test_open_dataset_success_public_store(self, mock_logger, mock_new_data_store):
126
+ """Test dataset opening with the public store configuration."""
127
+ # Create a mock store and mock its `open_data` method
128
+ mock_store = MagicMock()
129
+ mock_new_data_store.return_value = mock_store
130
+ mock_store.open_data.return_value = self.mock_dataset
131
+
132
+ # Instantiate the generator (this will implicitly call _open_dataset)
133
+ generator = OscDatasetStacGenerator("mock-dataset-id", "mock-collection-id")
134
+
135
+ # Validate that the dataset is assigned correctly
136
+ self.assertEqual(generator.dataset, "mock_dataset")
137
+
138
+ # Validate that `new_data_store` was called once with the correct parameters
139
+ mock_new_data_store.assert_called_once_with(
140
+ "s3", root="deep-esdl-public", storage_options={"anon": True}
141
+ )
142
+
143
+ # Ensure `open_data` was called once on the returned store
144
+ mock_store.open_data.assert_called_once_with("mock-dataset-id")
145
+
146
+ # Validate logging behavior
147
+ mock_logger().info.assert_any_call(
148
+ "Attempting to open dataset with configuration: Public store"
149
+ )
150
+ mock_logger().info.assert_any_call(
151
+ "Successfully opened dataset with configuration: Public store"
152
+ )
153
+
154
+ @patch("deep_code.utils.dataset_stac_generator.new_data_store")
155
+ @patch("deep_code.utils.dataset_stac_generator.logging.getLogger")
156
+ def test_open_dataset_success_authenticated_store(
157
+ self, mock_logger, mock_new_data_store
158
+ ):
159
+ """Test dataset opening with the authenticated store configuration."""
160
+ # Simulate public store failure
161
+ mock_store = MagicMock()
162
+ mock_new_data_store.side_effect = [
163
+ Exception("Public store failure"),
164
+ # First call (public store) raises an exception
165
+ mock_store,
166
+ # Second call (authenticated store) returns a mock store
167
+ ]
168
+ mock_store.open_data.return_value = self.mock_dataset
169
+
170
+ os.environ["S3_USER_STORAGE_BUCKET"] = "mock-bucket"
171
+ os.environ["S3_USER_STORAGE_KEY"] = "mock-key"
172
+ os.environ["S3_USER_STORAGE_SECRET"] = "mock-secret"
173
+
174
+ generator = OscDatasetStacGenerator("mock-dataset-id", "mock-collection-id")
175
+
176
+ # Validate that the dataset was successfully opened with the authenticated store
177
+ self.assertEqual(generator.dataset, "mock_dataset")
178
+ self.assertEqual(mock_new_data_store.call_count, 2)
179
+
180
+ # Validate calls to `new_data_store`
181
+ mock_new_data_store.assert_any_call(
182
+ "s3", root="deep-esdl-public", storage_options={"anon": True}
183
+ )
184
+ mock_new_data_store.assert_any_call(
185
+ "s3",
186
+ root="mock-bucket",
187
+ storage_options={"anon": False, "key": "mock-key", "secret": "mock-secret"},
188
+ )
189
+
190
+ # Validate logging calls
191
+ mock_logger().info.assert_any_call(
192
+ "Attempting to open dataset with configuration: Public store"
193
+ )
194
+ mock_logger().info.assert_any_call(
195
+ "Attempting to open dataset with configuration: Authenticated store"
196
+ )
197
+ mock_logger().info.assert_any_call(
198
+ "Successfully opened dataset with configuration: Authenticated store"
199
+ )
200
+
201
+ @patch("deep_code.utils.dataset_stac_generator.new_data_store")
202
+ @patch("deep_code.utils.dataset_stac_generator.logging.getLogger")
203
+ def test_open_dataset_failure(self, mock_logger, mock_new_data_store):
204
+ """Test dataset opening failure with all configurations."""
205
+ # Simulate all store failures
206
+ mock_new_data_store.side_effect = Exception("Store failure")
207
+ os.environ["S3_USER_STORAGE_BUCKET"] = "mock-bucket"
208
+ os.environ["S3_USER_STORAGE_KEY"] = "mock-key"
209
+ os.environ["S3_USER_STORAGE_SECRET"] = "mock-secret"
210
+
211
+ with self.assertRaises(ValueError) as context:
212
+ OscDatasetStacGenerator("mock-dataset-id", "mock-collection-id")
213
+
214
+ self.assertIn(
215
+ "Failed to open Zarr dataset with ID mock-dataset-id",
216
+ str(context.exception),
217
+ )
218
+ self.assertIn("Public store, Authenticated store", str(context.exception))
219
+ self.assertEqual(mock_new_data_store.call_count, 2)
220
+
221
+
222
+ class TestFormatString(unittest.TestCase):
223
+ def test_single_word(self):
224
+ self.assertEqual(
225
+ OscDatasetStacGenerator.format_string("temperature"), "Temperature"
226
+ )
227
+ self.assertEqual(OscDatasetStacGenerator.format_string("temp"), "Temp")
228
+ self.assertEqual(OscDatasetStacGenerator.format_string("hello"), "Hello")
229
+
230
+ def test_multiple_words_with_spaces(self):
231
+ self.assertEqual(
232
+ OscDatasetStacGenerator.format_string("surface temp"), "Surface Temp"
233
+ )
234
+ self.assertEqual(
235
+ OscDatasetStacGenerator.format_string("this is a test"), "This Is A Test"
236
+ )
237
+
238
+ def test_multiple_words_with_underscores(self):
239
+ self.assertEqual(
240
+ OscDatasetStacGenerator.format_string("surface_temp"), "Surface Temp"
241
+ )
242
+ self.assertEqual(
243
+ OscDatasetStacGenerator.format_string("this_is_a_test"), "This Is A Test"
244
+ )
245
+
246
+ def test_mixed_spaces_and_underscores(self):
247
+ self.assertEqual(
248
+ OscDatasetStacGenerator.format_string("surface_temp and_more"),
249
+ "Surface Temp And More",
250
+ )
251
+ self.assertEqual(
252
+ OscDatasetStacGenerator.format_string(
253
+ "mixed_case_with_underscores_and spaces"
254
+ ),
255
+ "Mixed Case With Underscores And Spaces",
256
+ )
257
+
258
+ def test_edge_cases(self):
259
+ # Empty string
260
+ self.assertEqual(OscDatasetStacGenerator.format_string(""), "")
261
+ # Single word with trailing underscore
262
+ self.assertEqual(
263
+ OscDatasetStacGenerator.format_string("temperature_"), "Temperature"
264
+ )
265
+ # Single word with leading underscore
266
+ self.assertEqual(OscDatasetStacGenerator.format_string("_temp"), "Temp")
267
+ # Single word with leading/trailing spaces
268
+ self.assertEqual(OscDatasetStacGenerator.format_string(" hello "), "Hello")
269
+ # Multiple spaces or underscores
270
+ self.assertEqual(
271
+ OscDatasetStacGenerator.format_string("too___many___underscores"),
272
+ "Too Many Underscores",
273
+ )
274
+ self.assertEqual(
275
+ OscDatasetStacGenerator.format_string("too many spaces"),
276
+ "Too Many Spaces",
277
+ )
@@ -0,0 +1,171 @@
1
+ import logging
2
+ import unittest
3
+ from pathlib import Path
4
+ from unittest.mock import MagicMock, patch
5
+
6
+ from deep_code.utils.github_automation import GitHubAutomation
7
+
8
+
9
+ class TestGitHubAutomation(unittest.TestCase):
10
+ def setUp(self):
11
+ # Set up test data
12
+ self.username = "testuser"
13
+ self.token = "testtoken"
14
+ self.repo_owner = "testowner"
15
+ self.repo_name = "testrepo"
16
+ self.github_automation = GitHubAutomation(
17
+ self.username, self.token, self.repo_owner, self.repo_name
18
+ )
19
+ logging.disable(logging.CRITICAL) # Disable logging during tests
20
+
21
+ def tearDown(self):
22
+ logging.disable(logging.NOTSET) # Re-enable logging after tests
23
+
24
+ @patch("requests.post")
25
+ def test_fork_repository(self, mock_post):
26
+ # Mock the response from GitHub API
27
+ mock_response = MagicMock()
28
+ mock_response.raise_for_status.return_value = None
29
+ mock_post.return_value = mock_response
30
+
31
+ # Call the method
32
+ self.github_automation.fork_repository()
33
+
34
+ # Assertions
35
+ mock_post.assert_called_once_with(
36
+ f"https://api.github.com/repos/{self.repo_owner}/{self.repo_name}/forks",
37
+ headers={"Authorization": f"token {self.token}"},
38
+ )
39
+
40
+ @patch("subprocess.run")
41
+ def test_clone_repository_new(self, mock_run):
42
+ # Mock the subprocess.run method
43
+ mock_run.return_value = MagicMock()
44
+
45
+ # Mock os.path.exists to return False (directory does not exist)
46
+ with patch("os.path.exists", return_value=False):
47
+ self.github_automation.clone_sync_repository()
48
+
49
+ # Assertions
50
+ mock_run.assert_called_once_with(
51
+ [
52
+ "git",
53
+ "clone",
54
+ f"https://{self.username}:{self.token}@github.com/{self.username}/{self.repo_name}.git",
55
+ self.github_automation.local_clone_dir,
56
+ ],
57
+ check=True,
58
+ )
59
+
60
+ @patch("subprocess.run")
61
+ def test_clone_repository_existing(self, mock_run):
62
+ # Mock the subprocess.run method
63
+ mock_run.return_value = MagicMock()
64
+
65
+ # Mock os.path.exists to return True (directory exists)
66
+ with patch("os.path.exists", return_value=True):
67
+ with patch("os.chdir"):
68
+ self.github_automation.clone_sync_repository()
69
+
70
+ # Assertions
71
+ mock_run.assert_called_once_with(["git", "pull"], check=True)
72
+
73
+ @patch("subprocess.run")
74
+ def test_create_branch(self, mock_run):
75
+ # Mock the subprocess.run method
76
+ mock_run.return_value = MagicMock()
77
+
78
+ # Mock os.chdir
79
+ with patch("os.chdir"):
80
+ self.github_automation.create_branch("test-branch")
81
+
82
+ # Assertions
83
+ mock_run.assert_called_once_with(
84
+ ["git", "checkout", "-b", "test-branch"], check=True
85
+ )
86
+
87
+ @patch("subprocess.run")
88
+ def test_add_file(self, mock_run):
89
+ # Mock the subprocess.run method
90
+ mock_run.return_value = MagicMock()
91
+
92
+ # Mock os.chdir and Path
93
+ with patch("os.chdir"), patch("pathlib.Path.mkdir"), patch(
94
+ "builtins.open", unittest.mock.mock_open()
95
+ ):
96
+ self.github_automation.add_file("test/file.json", {"key": "value"})
97
+
98
+ # Assertions
99
+ mock_run.assert_called_once_with(
100
+ [
101
+ "git",
102
+ "add",
103
+ str(Path(self.github_automation.local_clone_dir) / "test/file.json"),
104
+ ],
105
+ check=True,
106
+ )
107
+
108
+ @patch("subprocess.run")
109
+ def test_commit_and_push(self, mock_run):
110
+ # Mock the subprocess.run method
111
+ mock_run.return_value = MagicMock()
112
+
113
+ # Mock os.chdir
114
+ with patch("os.chdir"):
115
+ self.github_automation.commit_and_push("test-branch", "Test commit message")
116
+
117
+ # Assertions
118
+ mock_run.assert_any_call(
119
+ ["git", "commit", "-m", "Test commit message"], check=True
120
+ )
121
+ mock_run.assert_any_call(
122
+ ["git", "push", "-u", "origin", "test-branch"], check=True
123
+ )
124
+
125
+ @patch("requests.post")
126
+ def test_create_pull_request(self, mock_post):
127
+ # Mock the response from GitHub API
128
+ mock_response = MagicMock()
129
+ mock_response.raise_for_status.return_value = None
130
+ mock_response.json.return_value = {"html_url": "https://github.com/test/pull/1"}
131
+ mock_post.return_value = mock_response
132
+
133
+ # Mock os.chdir
134
+ with patch("os.chdir"):
135
+ self.github_automation.create_pull_request(
136
+ "test-branch", "Test PR", "Test body"
137
+ )
138
+
139
+ # Assertions
140
+ mock_post.assert_called_once_with(
141
+ f"https://api.github.com/repos/{self.repo_owner}/{self.repo_name}/pulls",
142
+ headers={"Authorization": f"token {self.token}"},
143
+ json={
144
+ "title": "Test PR",
145
+ "head": f"{self.username}:test-branch",
146
+ "base": "main",
147
+ "body": "Test body",
148
+ },
149
+ )
150
+
151
+ @patch("subprocess.run")
152
+ def test_clean_up(self, mock_run):
153
+ # Mock the subprocess.run method
154
+ mock_run.return_value = MagicMock()
155
+
156
+ # Mock os.chdir
157
+ with patch("os.chdir"):
158
+ self.github_automation.clean_up()
159
+
160
+ # Assertions
161
+ mock_run.assert_called_once_with(
162
+ ["rm", "-rf", self.github_automation.local_clone_dir]
163
+ )
164
+
165
+ def test_file_exists(self):
166
+ # Mock os.path.isfile
167
+ with patch("os.path.isfile", return_value=True):
168
+ result = self.github_automation.file_exists("test/file.json")
169
+
170
+ # Assertions
171
+ self.assertTrue(result)