earthcatalog 0.2.0__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (50) hide show
  1. earthcatalog/__init__.py +164 -0
  2. earthcatalog/async_http_client.py +1006 -0
  3. earthcatalog/config.py +97 -0
  4. earthcatalog/engines/__init__.py +308 -0
  5. earthcatalog/engines/rustac_engine.py +142 -0
  6. earthcatalog/engines/stac_geoparquet_engine.py +126 -0
  7. earthcatalog/exceptions.py +471 -0
  8. earthcatalog/grid_systems.py +1114 -0
  9. earthcatalog/ingestion_pipeline.py +2281 -0
  10. earthcatalog/input_readers.py +603 -0
  11. earthcatalog/job_tracking.py +485 -0
  12. earthcatalog/pipeline.py +606 -0
  13. earthcatalog/schema_generator.py +911 -0
  14. earthcatalog/spatial_resolver.py +1207 -0
  15. earthcatalog/stac_hooks.py +754 -0
  16. earthcatalog/statistics.py +677 -0
  17. earthcatalog/storage_backends.py +548 -0
  18. earthcatalog/tests/__init__.py +1 -0
  19. earthcatalog/tests/conftest.py +76 -0
  20. earthcatalog/tests/test_all_grids.py +793 -0
  21. earthcatalog/tests/test_async_http.py +700 -0
  22. earthcatalog/tests/test_cli_and_storage.py +230 -0
  23. earthcatalog/tests/test_config.py +245 -0
  24. earthcatalog/tests/test_dask_integration.py +580 -0
  25. earthcatalog/tests/test_e2e_synthetic.py +1624 -0
  26. earthcatalog/tests/test_engines.py +272 -0
  27. earthcatalog/tests/test_exceptions.py +346 -0
  28. earthcatalog/tests/test_file_structure.py +245 -0
  29. earthcatalog/tests/test_input_readers.py +666 -0
  30. earthcatalog/tests/test_integration.py +200 -0
  31. earthcatalog/tests/test_integration_async.py +283 -0
  32. earthcatalog/tests/test_job_tracking.py +603 -0
  33. earthcatalog/tests/test_multi_file_input.py +336 -0
  34. earthcatalog/tests/test_passthrough_hook.py +196 -0
  35. earthcatalog/tests/test_pipeline.py +684 -0
  36. earthcatalog/tests/test_pipeline_components.py +665 -0
  37. earthcatalog/tests/test_schema_generator.py +506 -0
  38. earthcatalog/tests/test_spatial_resolver.py +413 -0
  39. earthcatalog/tests/test_stac_hooks.py +776 -0
  40. earthcatalog/tests/test_statistics.py +477 -0
  41. earthcatalog/tests/test_storage_backends.py +236 -0
  42. earthcatalog/tests/test_validation.py +435 -0
  43. earthcatalog/tests/test_workers.py +653 -0
  44. earthcatalog/validation.py +921 -0
  45. earthcatalog/workers.py +682 -0
  46. earthcatalog-0.2.0.dist-info/METADATA +333 -0
  47. earthcatalog-0.2.0.dist-info/RECORD +50 -0
  48. earthcatalog-0.2.0.dist-info/WHEEL +5 -0
  49. earthcatalog-0.2.0.dist-info/entry_points.txt +3 -0
  50. earthcatalog-0.2.0.dist-info/top_level.txt +1 -0
@@ -0,0 +1,336 @@
1
+ """Tests for multi-file input pattern support."""
2
+
3
+ import json
4
+ from pathlib import Path
5
+
6
+ import pytest
7
+
8
+ from earthcatalog.ingestion_pipeline import ProcessingConfig
9
+ from earthcatalog.input_readers import ReaderFactory
10
+
11
+
12
+ class TestMultiFileInput:
13
+ """Tests for glob pattern-based multi-file input."""
14
+
15
+ @pytest.fixture
16
+ def synthetic_bulk_dir(self, tmp_path: Path) -> Path:
17
+ """Create a temporary directory with synthetic bulk data files.
18
+
19
+ Creates files following the ITS_LIVE bulk data pattern:
20
+ - {year}_{chunk_no}.ndjson (e.g., 2020_1.ndjson, 2020_2.ndjson)
21
+ - Each file contains 5-10 STAC items with url field
22
+
23
+ Args:
24
+ tmp_path: Pytest temporary directory fixture.
25
+
26
+ Returns:
27
+ Path to the temporary directory containing synthetic data.
28
+ """
29
+ bulk_dir = tmp_path / "bulk_data"
30
+ bulk_dir.mkdir()
31
+
32
+ # Create synthetic data files for different years and chunks
33
+ synthetic_data = {
34
+ "2020_1.ndjson": [
35
+ {"url": f"https://example.com/item_{i}.json", "id": f"item_{i}", "year": 2020} for i in range(5)
36
+ ],
37
+ "2020_2.ndjson": [
38
+ {"url": f"https://example.com/item_{i}.json", "id": f"item_{i}", "year": 2020} for i in range(5, 10)
39
+ ],
40
+ "2021_1.ndjson": [
41
+ {"url": f"https://example.com/item_{i}.json", "id": f"item_{i}", "year": 2021} for i in range(10, 15)
42
+ ],
43
+ "2021_2.ndjson": [
44
+ {"url": f"https://example.com/item_{i}.json", "id": f"item_{i}", "year": 2021} for i in range(15, 20)
45
+ ],
46
+ "2022_1.ndjson": [
47
+ {"url": f"https://example.com/item_{i}.json", "id": f"item_{i}", "year": 2022} for i in range(20, 25)
48
+ ],
49
+ }
50
+
51
+ # Write each file
52
+ for filename, items in synthetic_data.items():
53
+ file_path = bulk_dir / filename
54
+ with file_path.open("w") as f:
55
+ for item in items:
56
+ f.write(json.dumps(item) + "\n")
57
+
58
+ return bulk_dir
59
+
60
+ def test_reader_factory_ndjson_support(self):
61
+ """Test that ReaderFactory supports ndjson and jsonl formats."""
62
+ formats = ReaderFactory.get_supported_formats()
63
+ assert "ndjson" in formats
64
+ assert "jsonl" in formats
65
+
66
+ def test_auto_detect_ndjson_format(self, tmp_path: Path):
67
+ """Test auto-detection of .ndjson files."""
68
+ test_file = tmp_path / "test.ndjson"
69
+ test_file.write_text('{"url": "https://example.com/item.json"}\n')
70
+
71
+ format_detected = ReaderFactory.auto_detect_format(str(test_file))
72
+ assert format_detected == "ndjson"
73
+
74
+ def test_auto_detect_jsonl_format(self, tmp_path: Path):
75
+ """Test auto-detection of .jsonl files."""
76
+ test_file = tmp_path / "test.jsonl"
77
+ test_file.write_text('{"url": "https://example.com/item.json"}\n')
78
+
79
+ format_detected = ReaderFactory.auto_detect_format(str(test_file))
80
+ assert format_detected == "jsonl"
81
+
82
+ def test_read_single_ndjson_file(self, synthetic_bulk_dir: Path):
83
+ """Test reading URLs from a single NDJSON file."""
84
+ test_file = synthetic_bulk_dir / "2020_1.ndjson"
85
+
86
+ reader = ReaderFactory.get_reader("ndjson")
87
+ urls = reader.read_urls(str(test_file), "url")
88
+
89
+ assert len(urls) == 5
90
+ assert all(url.startswith("https://example.com/item_") for url in urls)
91
+
92
+ def test_processing_config_with_pattern(self):
93
+ """Test ProcessingConfig accepts input_pattern field."""
94
+ config = ProcessingConfig(
95
+ input_file="./data",
96
+ output_catalog="./catalog",
97
+ scratch_location="./scratch",
98
+ input_pattern="./data/2020_*.ndjson",
99
+ )
100
+
101
+ assert config.input_pattern == "./data/2020_*.ndjson"
102
+ # validate() should not raise when pattern is provided
103
+ # (skips file existence check for input_file)
104
+ try:
105
+ config.validate()
106
+ except (ValueError, TypeError, OSError, RuntimeError) as e:
107
+ pytest.fail(f"validate() raised unexpected exception: {e}")
108
+
109
+ def test_processing_config_pattern_validation(self):
110
+ """Test that validation passes when input_pattern is provided."""
111
+ config = ProcessingConfig(
112
+ input_file="./nonexistent", # Can be non-existent when pattern is provided
113
+ output_catalog="./catalog",
114
+ scratch_location="./scratch",
115
+ input_pattern="./real_data/*.ndjson",
116
+ )
117
+
118
+ # Should not raise FileNotFoundError since pattern is provided
119
+ # Note: Actual file discovery happens during processing, not validation
120
+ assert config.input_pattern == "./real_data/*.ndjson"
121
+
122
+ def test_glob_pattern_local_filesystem(self, synthetic_bulk_dir: Path):
123
+ """Test glob pattern matching on local filesystem."""
124
+ import glob as glob_module
125
+
126
+ pattern = str(synthetic_bulk_dir / "2020_*.ndjson")
127
+ matching_files = glob_module.glob(pattern)
128
+
129
+ assert len(matching_files) == 2
130
+ assert all("2020_" in f for f in matching_files)
131
+ assert all(f.endswith(".ndjson") for f in matching_files)
132
+
133
+ def test_glob_pattern_all_years(self, synthetic_bulk_dir: Path):
134
+ """Test glob pattern matching across all years."""
135
+ import glob as glob_module
136
+
137
+ pattern = str(synthetic_bulk_dir / "*.ndjson")
138
+ matching_files = sorted(glob_module.glob(pattern))
139
+
140
+ assert len(matching_files) == 5
141
+ # Check we get all expected files
142
+ expected_files = [
143
+ "2020_1.ndjson",
144
+ "2020_2.ndjson",
145
+ "2021_1.ndjson",
146
+ "2021_2.ndjson",
147
+ "2022_1.ndjson",
148
+ ]
149
+ actual_files = [Path(f).name for f in matching_files]
150
+ assert actual_files == expected_files
151
+
152
+ def test_read_urls_from_multiple_files(self, synthetic_bulk_dir: Path):
153
+ """Test reading URLs from multiple files using a pattern."""
154
+ import glob as glob_module
155
+
156
+ pattern = str(synthetic_bulk_dir / "2020_*.ndjson")
157
+ matching_files = glob_module.glob(pattern)
158
+
159
+ all_urls = []
160
+ reader = ReaderFactory.get_reader("ndjson")
161
+
162
+ for file_path in matching_files:
163
+ urls = reader.read_urls(file_path, "url")
164
+ all_urls.extend(urls)
165
+
166
+ assert len(all_urls) == 10 # 5 from each of 2 files
167
+ # Check that URLs from different files are all included
168
+ assert "https://example.com/item_0.json" in all_urls
169
+ assert "https://example.com/item_9.json" in all_urls
170
+
171
+ def test_pattern_year_specific(self, synthetic_bulk_dir: Path):
172
+ """Test pattern matching specific year."""
173
+ import glob as glob_module
174
+
175
+ pattern = str(synthetic_bulk_dir / "2021_*.ndjson")
176
+ matching_files = glob_module.glob(pattern)
177
+
178
+ assert len(matching_files) == 2
179
+ for f in matching_files:
180
+ assert "2021_" in Path(f).name
181
+
182
+ def test_pattern_chunk_specific(self, synthetic_bulk_dir: Path):
183
+ """Test pattern matching specific chunk number across years."""
184
+ import glob as glob_module
185
+
186
+ pattern = str(synthetic_bulk_dir / "*_1.ndjson")
187
+ matching_files = sorted(glob_module.glob(pattern))
188
+
189
+ assert len(matching_files) == 3
190
+ # Should get 2020_1.ndjson, 2021_1.ndjson, 2022_1.ndjson
191
+ expected_names = ["2020_1.ndjson", "2021_1.ndjson", "2022_1.ndjson"]
192
+ actual_names = [Path(f).name for f in matching_files]
193
+ assert actual_names == expected_names
194
+
195
+ def test_synthetic_data_cleanup(self, synthetic_bulk_dir: Path):
196
+ """Test that synthetic data is in temporary directory and will be cleaned up.
197
+
198
+ This test verifies that the synthetic_bulk_dir fixture creates files
199
+ within pytest's tmp_path, which will be automatically cleaned up.
200
+ """
201
+ # Verify files exist
202
+ assert synthetic_bulk_dir.exists()
203
+ assert (synthetic_bulk_dir / "2020_1.ndjson").exists()
204
+ assert (synthetic_bulk_dir / "2021_1.ndjson").exists()
205
+
206
+ # Verify files are in pytest's temporary directory
207
+ # The tmp_path fixture is managed by pytest and will be cleaned up
208
+ # Parent directory should be part of pytest's temp directory structure
209
+ assert "pytest-of-" in str(synthetic_bulk_dir) or "tmp" in str(synthetic_bulk_dir).lower()
210
+
211
+ def test_pattern_no_matches(self, tmp_path: Path):
212
+ """Test behavior when pattern matches no files."""
213
+ import glob as glob_module
214
+
215
+ pattern = str(tmp_path / "nonexistent_*.ndjson")
216
+ matching_files = glob_module.glob(pattern)
217
+
218
+ assert matching_files == []
219
+
220
+ def test_empty_directory_handling(self, tmp_path: Path):
221
+ """Test handling of empty directory with pattern."""
222
+ empty_dir = tmp_path / "empty"
223
+ empty_dir.mkdir()
224
+
225
+ import glob as glob_module
226
+
227
+ pattern = str(empty_dir / "*.ndjson")
228
+ matching_files = glob_module.glob(pattern)
229
+
230
+ assert matching_files == []
231
+
232
+ def test_mixed_file_extensions(self, synthetic_bulk_dir: Path):
233
+ """Test that pattern only matches .ndjson files."""
234
+ # Create a non-ndjson file
235
+ (synthetic_bulk_dir / "readme.txt").write_text("This is a readme")
236
+
237
+ import glob as glob_module
238
+
239
+ pattern = str(synthetic_bulk_dir / "*.ndjson")
240
+ matching_files = glob_module.glob(pattern)
241
+
242
+ # Should only match .ndjson files, not .txt
243
+ assert len(matching_files) == 5
244
+ assert all(f.endswith(".ndjson") for f in matching_files)
245
+
246
+ def test_pattern_recursive_directory(self, tmp_path: Path):
247
+ """Test recursive pattern matching with subdirectories."""
248
+ # Create nested directory structure
249
+ nested_dir = tmp_path / "level1" / "level2"
250
+ nested_dir.mkdir(parents=True)
251
+
252
+ # Create file in nested directory
253
+ nested_file = nested_dir / "nested_1.ndjson"
254
+ nested_file.write_text('{"url": "https://example.com/nested.json"}\n')
255
+
256
+ # Create file in top level
257
+ top_file = tmp_path / "top_1.ndjson"
258
+ top_file.write_text('{"url": "https://example.com/top.json"}\n')
259
+
260
+ import glob as glob_module
261
+
262
+ # Non-recursive should only find top level
263
+ pattern = str(tmp_path / "*_1.ndjson")
264
+ matching_files = sorted(glob_module.glob(pattern))
265
+
266
+ assert len(matching_files) == 1
267
+ assert "top_1.ndjson" in matching_files[0]
268
+
269
+ # Recursive should find both
270
+ pattern_recursive = str(tmp_path / "**" / "*_1.ndjson")
271
+ matching_files_recursive = sorted(glob_module.glob(pattern_recursive, recursive=True))
272
+
273
+ assert len(matching_files_recursive) == 2
274
+
275
+
276
+ class TestPatternValidation:
277
+ """Tests for pattern validation and edge cases."""
278
+
279
+ def test_config_with_empty_pattern(self):
280
+ """Test ProcessingConfig with empty pattern string."""
281
+ config = ProcessingConfig(
282
+ input_file="./data.parquet",
283
+ output_catalog="./catalog",
284
+ scratch_location="./scratch",
285
+ input_pattern="", # Empty pattern
286
+ )
287
+
288
+ assert config.input_pattern == ""
289
+ # Empty pattern should be treated as no pattern (single file mode)
290
+
291
+ def test_config_pattern_with_s3_wildcard(self):
292
+ """Test S3 pattern configuration."""
293
+ config = ProcessingConfig(
294
+ input_file="s3://bucket/bulk",
295
+ output_catalog="s3://bucket/catalog",
296
+ scratch_location="s3://bucket/scratch",
297
+ input_pattern="s3://bucket/bulk/2020_*.ndjson",
298
+ )
299
+
300
+ assert config.input_pattern == "s3://bucket/bulk/2020_*.ndjson"
301
+
302
+ def test_config_to_dict_includes_pattern(self):
303
+ """Test that input_pattern is included in to_dict()."""
304
+ config = ProcessingConfig(
305
+ input_file="./data",
306
+ output_catalog="./catalog",
307
+ scratch_location="./scratch",
308
+ input_pattern="./data/*.ndjson",
309
+ )
310
+
311
+ config_dict = config.to_dict()
312
+ assert "input_pattern" in config_dict
313
+ assert config_dict["input_pattern"] == "./data/*.ndjson"
314
+
315
+ def test_config_from_dict_with_pattern(self):
316
+ """Test creating config from dict with input_pattern."""
317
+ config_data = {
318
+ "input_file": "./data",
319
+ "output_catalog": "./catalog",
320
+ "scratch_location": "./scratch",
321
+ "input_pattern": "./data/2020_*.ndjson",
322
+ }
323
+
324
+ config = ProcessingConfig.from_dict(config_data)
325
+ assert config.input_pattern == "./data/2020_*.ndjson"
326
+
327
+ def test_config_from_dict_without_pattern(self):
328
+ """Test creating config from dict without input_pattern (backward compatibility)."""
329
+ config_data = {
330
+ "input_file": "./data.parquet",
331
+ "output_catalog": "./catalog",
332
+ "scratch_location": "./scratch",
333
+ }
334
+
335
+ config = ProcessingConfig.from_dict(config_data)
336
+ assert config.input_pattern == "" # Default value
@@ -0,0 +1,196 @@
1
+ """Tests for PassthroughSTACHook."""
2
+
3
+ import json
4
+
5
+ from earthcatalog.stac_hooks import (
6
+ PassthroughSTACHook,
7
+ parse_hook_config,
8
+ serialize_hook,
9
+ )
10
+
11
+
12
+ class TestPassthroughSTACHook:
13
+ """Tests for the passthrough STAC hook."""
14
+
15
+ def test_fetch_valid_stac_item(self):
16
+ """Test fetching a valid STAC item from JSON string."""
17
+ hook = PassthroughSTACHook()
18
+ stac_json = json.dumps(
19
+ {
20
+ "type": "Feature",
21
+ "id": "test_item",
22
+ "geometry": {"type": "Point", "coordinates": [0, 0]},
23
+ "properties": {"datetime": "2024-01-01T00:00:00Z"},
24
+ }
25
+ )
26
+
27
+ result = hook.fetch(stac_json)
28
+
29
+ assert result is not None
30
+ assert result["id"] == "test_item"
31
+ assert result["type"] == "Feature"
32
+
33
+ def test_fetch_missing_type_field(self):
34
+ """Test that missing 'type' field returns None."""
35
+ hook = PassthroughSTACHook()
36
+ invalid_json = json.dumps({"id": "test", "geometry": {}, "properties": {}})
37
+
38
+ result = hook.fetch(invalid_json)
39
+
40
+ assert result is None
41
+
42
+ def test_fetch_missing_geometry(self):
43
+ """Test that missing 'geometry' field returns None."""
44
+ hook = PassthroughSTACHook()
45
+ invalid_json = json.dumps({"type": "Feature", "id": "test", "properties": {}})
46
+
47
+ result = hook.fetch(invalid_json)
48
+
49
+ assert result is None
50
+
51
+ def test_fetch_missing_properties(self):
52
+ """Test that missing 'properties' field returns None."""
53
+ hook = PassthroughSTACHook()
54
+ invalid_json = json.dumps({"type": "Feature", "id": "test", "geometry": {}})
55
+
56
+ result = hook.fetch(invalid_json)
57
+
58
+ assert result is None
59
+
60
+ def test_fetch_invalid_json(self):
61
+ """Test that invalid JSON string returns None."""
62
+ hook = PassthroughSTACHook()
63
+ invalid_json = "not valid json"
64
+
65
+ result = hook.fetch(invalid_json)
66
+
67
+ assert result is None
68
+
69
+ def test_fetch_non_dict_json(self):
70
+ """Test that non-dict JSON returns None."""
71
+ hook = PassthroughSTACHook()
72
+ # JSON array instead of object
73
+ invalid_json = json.dumps([{"id": "test"}])
74
+
75
+ result = hook.fetch(invalid_json)
76
+
77
+ assert result is None
78
+
79
+ def test_fetch_batch(self):
80
+ """Test batch fetching multiple STAC items."""
81
+ hook = PassthroughSTACHook()
82
+
83
+ stac_items = [
84
+ json.dumps({"type": "Feature", "id": "item1", "geometry": {}, "properties": {}}),
85
+ json.dumps({"type": "Feature", "id": "item2", "geometry": {}, "properties": {}}),
86
+ json.dumps({"type": "Feature", "id": "item3", "geometry": {}, "properties": {}}),
87
+ ]
88
+
89
+ results = hook.fetch_batch(stac_items)
90
+
91
+ assert len(results) == 3
92
+ assert results[0]["id"] == "item1"
93
+ assert results[1]["id"] == "item2"
94
+ assert results[2]["id"] == "item3"
95
+
96
+ def test_fetch_batch_with_invalid_items(self):
97
+ """Test batch fetching with some invalid items."""
98
+ hook = PassthroughSTACHook()
99
+
100
+ items = [
101
+ json.dumps({"type": "Feature", "id": "valid1", "geometry": {}, "properties": {}}),
102
+ "invalid json",
103
+ json.dumps({"type": "Feature", "id": "valid2", "geometry": {}, "properties": {}}),
104
+ '{"type": "NotFeature", "geometry": {}, "properties": {}}', # Wrong type
105
+ ]
106
+
107
+ results = hook.fetch_batch(items)
108
+
109
+ assert len(results) == 4
110
+ assert results[0]["id"] == "valid1"
111
+ assert results[1] is None # invalid json
112
+ assert results[2]["id"] == "valid2"
113
+ assert results[3] is None # wrong type
114
+
115
+ def test_to_config(self):
116
+ """Test serialization to config string."""
117
+ hook = PassthroughSTACHook()
118
+
119
+ config_str = hook.to_config()
120
+
121
+ assert config_str == "passthrough"
122
+
123
+ def test_parse_hook_config_passthrough(self):
124
+ """Test parsing 'passthrough' string returns PassthroughSTACHook."""
125
+ hook = parse_hook_config("passthrough")
126
+
127
+ assert isinstance(hook, PassthroughSTACHook)
128
+
129
+ def test_parse_hook_config_default(self):
130
+ """Test parsing 'default' string returns DefaultSTACHook."""
131
+ from earthcatalog.stac_hooks import DefaultSTACHook
132
+
133
+ hook = parse_hook_config("default")
134
+
135
+ assert isinstance(hook, DefaultSTACHook)
136
+ assert not isinstance(hook, PassthroughSTACHook)
137
+
138
+ def test_serialize_passthrough_hook(self):
139
+ """Test serializing PassthroughSTACHook."""
140
+ hook = PassthroughSTACHook()
141
+
142
+ config_str = serialize_hook(hook)
143
+
144
+ assert config_str == "passthrough"
145
+
146
+ def test_serialize_and_parse_passthrough(self):
147
+ """Test round-trip serialization for passthrough hook."""
148
+ original = PassthroughSTACHook()
149
+
150
+ config_str = serialize_hook(original)
151
+ restored = parse_hook_config(config_str)
152
+
153
+ assert isinstance(restored, PassthroughSTACHook)
154
+
155
+
156
+ class TestPassthroughIntegration:
157
+ """Integration tests for passthrough hook usage."""
158
+
159
+ def test_passthrough_with_its_live_like_data(self):
160
+ """Test passthrough hook with ITS_LIVE-style bulk data."""
161
+ hook = PassthroughSTACHook()
162
+
163
+ # Simulate ITS_LIVE NDJSON line with STAC item
164
+ its_live_item = {
165
+ "type": "Feature",
166
+ "id": "ITS_LIVE_test_item",
167
+ "geometry": {
168
+ "type": "Polygon",
169
+ "coordinates": [[[[-180, -90], [180, -90], [180, 90], [-180, 90], [-180, -90]]]],
170
+ },
171
+ "properties": {
172
+ "datetime": "2020-01-01T00:00:00Z",
173
+ "dataset_id": "TEST_DATASET",
174
+ },
175
+ }
176
+
177
+ url = json.dumps(its_live_item)
178
+ result = hook.fetch(url)
179
+
180
+ assert result is not None
181
+ assert result["id"] == "ITS_LIVE_test_item"
182
+ assert result["properties"]["dataset_id"] == "TEST_DATASET"
183
+
184
+ def test_passthrough_performance_skip_http(self):
185
+ """Test that passthrough doesn't make HTTP requests."""
186
+ hook = PassthroughSTACHook()
187
+
188
+ # Even with timeout/retry params, they're ignored
189
+ result = hook.fetch(
190
+ '{"type": "Feature", "id": "test", "geometry": {}, "properties": {}}',
191
+ timeout=999, # Should be ignored
192
+ retry_attempts=999, # Should be ignored
193
+ )
194
+
195
+ assert result is not None
196
+ assert result["id"] == "test"