opteryx-catalog 0.4.13__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of opteryx-catalog might be problematic. Click here for more details.
- opteryx_catalog/__init__.py +31 -0
- opteryx_catalog/catalog/__init__.py +4 -0
- opteryx_catalog/catalog/compaction.py +529 -0
- opteryx_catalog/catalog/dataset.py +1201 -0
- opteryx_catalog/catalog/manifest.py +438 -0
- opteryx_catalog/catalog/metadata.py +81 -0
- opteryx_catalog/catalog/metastore.py +68 -0
- opteryx_catalog/catalog/view.py +12 -0
- opteryx_catalog/exceptions.py +38 -0
- opteryx_catalog/iops/__init__.py +6 -0
- opteryx_catalog/iops/base.py +42 -0
- opteryx_catalog/iops/fileio.py +125 -0
- opteryx_catalog/iops/gcs.py +255 -0
- opteryx_catalog/opteryx_catalog.py +979 -0
- opteryx_catalog/webhooks/__init__.py +230 -0
- opteryx_catalog/webhooks/events.py +177 -0
- opteryx_catalog-0.4.13.dist-info/METADATA +466 -0
- opteryx_catalog-0.4.13.dist-info/RECORD +28 -0
- opteryx_catalog-0.4.13.dist-info/WHEEL +5 -0
- opteryx_catalog-0.4.13.dist-info/licenses/LICENSE +201 -0
- opteryx_catalog-0.4.13.dist-info/top_level.txt +3 -0
- scripts/create_dataset.py +201 -0
- scripts/read_dataset.py +268 -0
- tests/test_compaction.py +233 -0
- tests/test_dataset_metadata.py +29 -0
- tests/test_import.py +5 -0
- tests/test_pyproject.py +8 -0
- tests/test_webhooks.py +177 -0
tests/test_compaction.py
ADDED
|
@@ -0,0 +1,233 @@
|
|
|
1
|
+
"""
|
|
2
|
+
Test script for compaction functionality.
|
|
3
|
+
|
|
4
|
+
This tests the DatasetCompactor class with both brute and performance strategies.
|
|
5
|
+
"""
|
|
6
|
+
|
|
7
|
+
from unittest.mock import Mock
|
|
8
|
+
|
|
9
|
+
import pyarrow as pa
|
|
10
|
+
|
|
11
|
+
from opteryx_catalog.catalog.compaction import DatasetCompactor
|
|
12
|
+
from opteryx_catalog.catalog.metadata import DatasetMetadata, Snapshot
|
|
13
|
+
|
|
14
|
+
|
|
15
|
+
def create_test_table(num_rows: int, value_range: tuple = (0, 100)) -> pa.Table:
|
|
16
|
+
"""Create a simple test table with a timestamp column for sorting."""
|
|
17
|
+
import random
|
|
18
|
+
|
|
19
|
+
timestamps = sorted([random.randint(value_range[0], value_range[1]) for _ in range(num_rows)])
|
|
20
|
+
values = [f"value_{i}" for i in range(num_rows)]
|
|
21
|
+
|
|
22
|
+
return pa.table({"timestamp": timestamps, "value": values})
|
|
23
|
+
|
|
24
|
+
|
|
25
|
+
def test_brute_compaction():
|
|
26
|
+
"""Test brute force compaction strategy."""
|
|
27
|
+
print("Testing brute force compaction...")
|
|
28
|
+
|
|
29
|
+
# Create mock dataset
|
|
30
|
+
dataset = Mock()
|
|
31
|
+
dataset.metadata = DatasetMetadata(
|
|
32
|
+
dataset_identifier="test_dataset",
|
|
33
|
+
location="/tmp/test_data",
|
|
34
|
+
)
|
|
35
|
+
dataset.metadata.sort_orders = [] # No sort order for brute
|
|
36
|
+
dataset.metadata.snapshots = []
|
|
37
|
+
dataset.metadata.current_snapshot = None
|
|
38
|
+
|
|
39
|
+
# Create mock entries - small files that should be combined
|
|
40
|
+
mock_entries = [
|
|
41
|
+
{
|
|
42
|
+
"file_path": "/tmp/file1.parquet",
|
|
43
|
+
"file_size_in_bytes": 30 * 1024 * 1024, # 30MB compressed
|
|
44
|
+
"uncompressed_size_in_bytes": 40 * 1024 * 1024, # 40MB uncompressed
|
|
45
|
+
"record_count": 1000,
|
|
46
|
+
},
|
|
47
|
+
{
|
|
48
|
+
"file_path": "/tmp/file2.parquet",
|
|
49
|
+
"file_size_in_bytes": 35 * 1024 * 1024, # 35MB compressed
|
|
50
|
+
"uncompressed_size_in_bytes": 50 * 1024 * 1024, # 50MB uncompressed
|
|
51
|
+
"record_count": 1200,
|
|
52
|
+
},
|
|
53
|
+
{
|
|
54
|
+
"file_path": "/tmp/file3.parquet",
|
|
55
|
+
"file_size_in_bytes": 110 * 1024 * 1024, # 110MB compressed (acceptable)
|
|
56
|
+
"uncompressed_size_in_bytes": 130 * 1024 * 1024, # 130MB uncompressed
|
|
57
|
+
"record_count": 3000,
|
|
58
|
+
},
|
|
59
|
+
]
|
|
60
|
+
|
|
61
|
+
# Create current snapshot with manifest
|
|
62
|
+
dataset.metadata.current_snapshot = Snapshot(
|
|
63
|
+
snapshot_id=1000,
|
|
64
|
+
timestamp_ms=1000,
|
|
65
|
+
manifest_list="/tmp/manifest.parquet",
|
|
66
|
+
)
|
|
67
|
+
|
|
68
|
+
# Mock IO and catalog
|
|
69
|
+
dataset.io = Mock()
|
|
70
|
+
dataset.catalog = Mock()
|
|
71
|
+
|
|
72
|
+
# Create compactor
|
|
73
|
+
compactor = DatasetCompactor(dataset, strategy="brute", author="test", agent="test-agent")
|
|
74
|
+
|
|
75
|
+
# Verify strategy selection
|
|
76
|
+
assert compactor.strategy == "brute", "Strategy should be brute"
|
|
77
|
+
assert compactor.decision == "user", "Decision should be user"
|
|
78
|
+
|
|
79
|
+
# Test selection logic directly
|
|
80
|
+
plan = compactor._select_brute_compaction(mock_entries)
|
|
81
|
+
|
|
82
|
+
assert plan is not None, "Should find files to compact"
|
|
83
|
+
assert plan["type"] == "combine", "Should plan to combine small files"
|
|
84
|
+
assert len(plan["files"]) == 2, "Should select 2 small files"
|
|
85
|
+
|
|
86
|
+
print("✓ Brute force compaction test passed")
|
|
87
|
+
|
|
88
|
+
|
|
89
|
+
def test_performance_compaction():
|
|
90
|
+
"""Test performance compaction strategy."""
|
|
91
|
+
print("Testing performance compaction...")
|
|
92
|
+
|
|
93
|
+
# Create mock dataset with sort order
|
|
94
|
+
dataset = Mock()
|
|
95
|
+
dataset.metadata = DatasetMetadata(
|
|
96
|
+
dataset_identifier="test_dataset",
|
|
97
|
+
location="/tmp/test_data",
|
|
98
|
+
)
|
|
99
|
+
dataset.metadata.sort_orders = [0] # Sort by first column
|
|
100
|
+
dataset.metadata.schema = Mock()
|
|
101
|
+
dataset.metadata.schema.fields = [Mock(name="timestamp")]
|
|
102
|
+
dataset.metadata.snapshots = []
|
|
103
|
+
dataset.metadata.current_snapshot = None
|
|
104
|
+
|
|
105
|
+
# Create mock entries with overlapping ranges
|
|
106
|
+
mock_entries = [
|
|
107
|
+
{
|
|
108
|
+
"file_path": "/tmp/file1.parquet",
|
|
109
|
+
"file_size_in_bytes": 30 * 1024 * 1024,
|
|
110
|
+
"uncompressed_size_in_bytes": 40 * 1024 * 1024,
|
|
111
|
+
"record_count": 1000,
|
|
112
|
+
"lower_bounds": {"timestamp": 1},
|
|
113
|
+
"upper_bounds": {"timestamp": 100},
|
|
114
|
+
},
|
|
115
|
+
{
|
|
116
|
+
"file_path": "/tmp/file2.parquet",
|
|
117
|
+
"file_size_in_bytes": 35 * 1024 * 1024,
|
|
118
|
+
"uncompressed_size_in_bytes": 50 * 1024 * 1024,
|
|
119
|
+
"record_count": 1200,
|
|
120
|
+
"lower_bounds": {"timestamp": 50}, # Overlaps with file1
|
|
121
|
+
"upper_bounds": {"timestamp": 150},
|
|
122
|
+
},
|
|
123
|
+
{
|
|
124
|
+
"file_path": "/tmp/file3.parquet",
|
|
125
|
+
"file_size_in_bytes": 110 * 1024 * 1024,
|
|
126
|
+
"uncompressed_size_in_bytes": 130 * 1024 * 1024,
|
|
127
|
+
"record_count": 3000,
|
|
128
|
+
"lower_bounds": {"timestamp": 200}, # No overlap
|
|
129
|
+
"upper_bounds": {"timestamp": 300},
|
|
130
|
+
},
|
|
131
|
+
]
|
|
132
|
+
|
|
133
|
+
dataset.metadata.current_snapshot = Snapshot(
|
|
134
|
+
snapshot_id=1000,
|
|
135
|
+
timestamp_ms=1000,
|
|
136
|
+
manifest_list="/tmp/manifest.parquet",
|
|
137
|
+
)
|
|
138
|
+
|
|
139
|
+
# Mock IO and catalog
|
|
140
|
+
dataset.io = Mock()
|
|
141
|
+
dataset.catalog = Mock()
|
|
142
|
+
|
|
143
|
+
# Create compactor (auto-detect should choose performance)
|
|
144
|
+
compactor = DatasetCompactor(dataset, strategy=None, author="test", agent="test-agent")
|
|
145
|
+
|
|
146
|
+
# Verify strategy selection
|
|
147
|
+
assert compactor.strategy == "performance", "Should auto-select performance strategy"
|
|
148
|
+
assert compactor.decision == "auto", "Decision should be auto"
|
|
149
|
+
|
|
150
|
+
# Test selection logic directly
|
|
151
|
+
plan = compactor._select_performance_compaction(mock_entries)
|
|
152
|
+
|
|
153
|
+
assert plan is not None, "Should find overlapping files"
|
|
154
|
+
assert plan["type"] == "combine-split", "Should plan to combine and split"
|
|
155
|
+
assert len(plan["files"]) == 2, "Should select 2 overlapping files"
|
|
156
|
+
assert plan["sort_column"] == "timestamp", "Should identify sort column"
|
|
157
|
+
|
|
158
|
+
print("✓ Performance compaction test passed")
|
|
159
|
+
|
|
160
|
+
|
|
161
|
+
def test_large_file_splitting():
|
|
162
|
+
"""Test that large files are identified for splitting."""
|
|
163
|
+
print("Testing large file splitting...")
|
|
164
|
+
|
|
165
|
+
dataset = Mock()
|
|
166
|
+
dataset.metadata = DatasetMetadata(
|
|
167
|
+
dataset_identifier="test_dataset",
|
|
168
|
+
location="/tmp/test_data",
|
|
169
|
+
)
|
|
170
|
+
dataset.metadata.sort_orders = []
|
|
171
|
+
|
|
172
|
+
# Create entry for a large file
|
|
173
|
+
mock_entries = [
|
|
174
|
+
{
|
|
175
|
+
"file_path": "/tmp/large_file.parquet",
|
|
176
|
+
"file_size_in_bytes": 180 * 1024 * 1024,
|
|
177
|
+
"uncompressed_size_in_bytes": 200 * 1024 * 1024, # 200MB > 196MB threshold
|
|
178
|
+
"record_count": 5000,
|
|
179
|
+
}
|
|
180
|
+
]
|
|
181
|
+
|
|
182
|
+
compactor = DatasetCompactor(dataset, strategy="brute")
|
|
183
|
+
plan = compactor._select_brute_compaction(mock_entries)
|
|
184
|
+
|
|
185
|
+
assert plan is not None, "Should identify large file"
|
|
186
|
+
assert plan["type"] == "split", "Should plan to split"
|
|
187
|
+
assert plan["reason"] == "file-too-large", "Reason should be file too large"
|
|
188
|
+
|
|
189
|
+
print("✓ Large file splitting test passed")
|
|
190
|
+
|
|
191
|
+
|
|
192
|
+
def test_no_compaction_needed():
|
|
193
|
+
"""Test when no compaction is needed."""
|
|
194
|
+
print("Testing no compaction scenario...")
|
|
195
|
+
|
|
196
|
+
dataset = Mock()
|
|
197
|
+
dataset.metadata = DatasetMetadata(
|
|
198
|
+
dataset_identifier="test_dataset",
|
|
199
|
+
location="/tmp/test_data",
|
|
200
|
+
)
|
|
201
|
+
dataset.metadata.sort_orders = []
|
|
202
|
+
|
|
203
|
+
# All files are in acceptable range
|
|
204
|
+
mock_entries = [
|
|
205
|
+
{
|
|
206
|
+
"file_path": "/tmp/file1.parquet",
|
|
207
|
+
"file_size_in_bytes": 100 * 1024 * 1024,
|
|
208
|
+
"uncompressed_size_in_bytes": 110 * 1024 * 1024,
|
|
209
|
+
"record_count": 2000,
|
|
210
|
+
},
|
|
211
|
+
{
|
|
212
|
+
"file_path": "/tmp/file2.parquet",
|
|
213
|
+
"file_size_in_bytes": 120 * 1024 * 1024,
|
|
214
|
+
"uncompressed_size_in_bytes": 135 * 1024 * 1024,
|
|
215
|
+
"record_count": 2500,
|
|
216
|
+
},
|
|
217
|
+
]
|
|
218
|
+
|
|
219
|
+
compactor = DatasetCompactor(dataset, strategy="brute")
|
|
220
|
+
plan = compactor._select_brute_compaction(mock_entries)
|
|
221
|
+
|
|
222
|
+
assert plan is None, "Should not find anything to compact"
|
|
223
|
+
|
|
224
|
+
print("✓ No compaction test passed")
|
|
225
|
+
|
|
226
|
+
|
|
227
|
+
if __name__ == "__main__":
|
|
228
|
+
print("Running compaction tests...\n")
|
|
229
|
+
test_brute_compaction()
|
|
230
|
+
test_performance_compaction()
|
|
231
|
+
test_large_file_splitting()
|
|
232
|
+
test_no_compaction_needed()
|
|
233
|
+
print("\n✅ All tests passed!")
|
|
@@ -0,0 +1,29 @@
|
|
|
1
|
+
from opteryx_catalog.catalog.metadata import DatasetMetadata
|
|
2
|
+
from opteryx_catalog.catalog.dataset import SimpleDataset
|
|
3
|
+
|
|
4
|
+
|
|
5
|
+
def test_dataset_metadata_and_simpledataset():
|
|
6
|
+
meta = DatasetMetadata(
|
|
7
|
+
dataset_identifier="tests_temp.test",
|
|
8
|
+
location="gs://bucket/ws/tests_temp/test",
|
|
9
|
+
schema=None,
|
|
10
|
+
properties={},
|
|
11
|
+
)
|
|
12
|
+
ds = SimpleDataset(identifier="tests_temp.test", _metadata=meta)
|
|
13
|
+
assert ds.metadata.dataset_identifier == "tests_temp.test"
|
|
14
|
+
assert ds.snapshot() is None
|
|
15
|
+
assert list(ds.snapshots()) == []
|
|
16
|
+
|
|
17
|
+
|
|
18
|
+
def test_sequence_number_requires_history():
|
|
19
|
+
"""Test that _next_sequence_number works with empty snapshots."""
|
|
20
|
+
meta = DatasetMetadata(
|
|
21
|
+
dataset_identifier="tests_temp.test",
|
|
22
|
+
location="gs://bucket/ws/tests_temp/test",
|
|
23
|
+
schema=None,
|
|
24
|
+
properties={},
|
|
25
|
+
)
|
|
26
|
+
ds = SimpleDataset(identifier="tests_temp.test", _metadata=meta)
|
|
27
|
+
|
|
28
|
+
# Should return 1 when no snapshots are loaded (first snapshot)
|
|
29
|
+
assert ds._next_sequence_number() == 1
|
tests/test_import.py
ADDED
tests/test_pyproject.py
ADDED
tests/test_webhooks.py
ADDED
|
@@ -0,0 +1,177 @@
|
|
|
1
|
+
"""Tests for the webhook system."""
|
|
2
|
+
|
|
3
|
+
import os
|
|
4
|
+
from unittest.mock import MagicMock
|
|
5
|
+
from unittest.mock import patch
|
|
6
|
+
|
|
7
|
+
import pytest
|
|
8
|
+
|
|
9
|
+
|
|
10
|
+
def test_webhook_manager_disabled_without_domain():
|
|
11
|
+
"""Test that webhook manager is disabled when no domain is configured."""
|
|
12
|
+
from opteryx_catalog.webhooks import WebhookManager
|
|
13
|
+
|
|
14
|
+
# Clear any existing env vars
|
|
15
|
+
os.environ.pop("OPTERYX_WEBHOOK_DOMAIN", None)
|
|
16
|
+
os.environ.pop("OPTERYX_WEBHOOK_QUEUE", None)
|
|
17
|
+
|
|
18
|
+
manager = WebhookManager()
|
|
19
|
+
assert not manager.enabled
|
|
20
|
+
|
|
21
|
+
# Should return False without making any HTTP calls
|
|
22
|
+
result = manager.send(
|
|
23
|
+
action="create",
|
|
24
|
+
workspace="test",
|
|
25
|
+
collection="test",
|
|
26
|
+
resource_type="dataset",
|
|
27
|
+
resource_name="test",
|
|
28
|
+
)
|
|
29
|
+
assert result is False
|
|
30
|
+
|
|
31
|
+
|
|
32
|
+
def test_webhook_manager_direct_http():
|
|
33
|
+
"""Test that webhooks are sent via direct HTTP when queue is not configured."""
|
|
34
|
+
from opteryx_catalog.webhooks import WebhookManager
|
|
35
|
+
|
|
36
|
+
with patch("opteryx_catalog.webhooks.requests.post") as mock_post:
|
|
37
|
+
mock_response = MagicMock()
|
|
38
|
+
mock_response.status_code = 200
|
|
39
|
+
mock_post.return_value = mock_response
|
|
40
|
+
|
|
41
|
+
manager = WebhookManager(domain="router.example.com", queue_path=None)
|
|
42
|
+
assert manager.enabled
|
|
43
|
+
assert manager._tasks_client is None
|
|
44
|
+
|
|
45
|
+
result = manager.send(
|
|
46
|
+
action="create",
|
|
47
|
+
workspace="test-workspace",
|
|
48
|
+
collection="test-collection",
|
|
49
|
+
resource_type="dataset",
|
|
50
|
+
resource_name="test-dataset",
|
|
51
|
+
payload={"location": "gs://bucket/path"},
|
|
52
|
+
)
|
|
53
|
+
|
|
54
|
+
assert result is True
|
|
55
|
+
mock_post.assert_called_once()
|
|
56
|
+
|
|
57
|
+
# Verify the call arguments
|
|
58
|
+
call_args = mock_post.call_args
|
|
59
|
+
assert call_args.args[0] == "https://router.example.com/event"
|
|
60
|
+
assert call_args.kwargs["json"]["event"]["action"] == "create"
|
|
61
|
+
assert call_args.kwargs["json"]["event"]["resource_type"] == "dataset"
|
|
62
|
+
assert call_args.kwargs["json"]["event"]["resource_name"] == "test-dataset"
|
|
63
|
+
assert call_args.kwargs["json"]["data"]["location"] == "gs://bucket/path"
|
|
64
|
+
|
|
65
|
+
|
|
66
|
+
def test_webhook_manager_payload_building():
|
|
67
|
+
"""Test that webhook payloads are built correctly."""
|
|
68
|
+
from opteryx_catalog.webhooks import WebhookManager
|
|
69
|
+
|
|
70
|
+
manager = WebhookManager(domain="hook.example.com")
|
|
71
|
+
|
|
72
|
+
payload = manager._build_payload(
|
|
73
|
+
action="update",
|
|
74
|
+
workspace="ws",
|
|
75
|
+
collection="col",
|
|
76
|
+
resource_type="dataset",
|
|
77
|
+
resource_name="ds",
|
|
78
|
+
additional={"description": "New description"},
|
|
79
|
+
)
|
|
80
|
+
|
|
81
|
+
assert payload["event"]["action"] == "update"
|
|
82
|
+
assert payload["event"]["workspace"] == "ws"
|
|
83
|
+
assert payload["event"]["collection"] == "col"
|
|
84
|
+
assert payload["event"]["resource_type"] == "dataset"
|
|
85
|
+
assert payload["event"]["resource_name"] == "ds"
|
|
86
|
+
assert "timestamp" in payload["event"]
|
|
87
|
+
assert payload["data"]["description"] == "New description"
|
|
88
|
+
|
|
89
|
+
|
|
90
|
+
def test_webhook_http_failure_returns_false():
|
|
91
|
+
"""Test that HTTP failures return False without raising exceptions."""
|
|
92
|
+
from opteryx_catalog.webhooks import WebhookManager
|
|
93
|
+
|
|
94
|
+
with patch("opteryx_catalog.webhooks.requests.post") as mock_post:
|
|
95
|
+
# Simulate HTTP error
|
|
96
|
+
mock_post.side_effect = Exception("Connection failed")
|
|
97
|
+
|
|
98
|
+
manager = WebhookManager(domain="router.example.com")
|
|
99
|
+
result = manager.send(
|
|
100
|
+
action="create",
|
|
101
|
+
workspace="test",
|
|
102
|
+
collection="test",
|
|
103
|
+
resource_type="dataset",
|
|
104
|
+
resource_name="test",
|
|
105
|
+
)
|
|
106
|
+
|
|
107
|
+
assert result is False
|
|
108
|
+
|
|
109
|
+
|
|
110
|
+
def test_send_webhook_convenience_function():
|
|
111
|
+
"""Test the convenience send_webhook function."""
|
|
112
|
+
from opteryx_catalog.webhooks import send_webhook
|
|
113
|
+
|
|
114
|
+
with patch("opteryx_catalog.webhooks.requests.post") as mock_post:
|
|
115
|
+
mock_response = MagicMock()
|
|
116
|
+
mock_response.status_code = 200
|
|
117
|
+
mock_post.return_value = mock_response
|
|
118
|
+
|
|
119
|
+
os.environ["OPTERYX_WEBHOOK_DOMAIN"] = "router.example.com"
|
|
120
|
+
os.environ.pop("OPTERYX_WEBHOOK_QUEUE", None)
|
|
121
|
+
|
|
122
|
+
# Reset the global manager to pick up new env vars
|
|
123
|
+
import opteryx_catalog.webhooks as webhook_module
|
|
124
|
+
|
|
125
|
+
webhook_module._webhook_manager = None
|
|
126
|
+
|
|
127
|
+
result = send_webhook(
|
|
128
|
+
action="create",
|
|
129
|
+
workspace="test",
|
|
130
|
+
collection="test",
|
|
131
|
+
resource_type="dataset",
|
|
132
|
+
resource_name="test",
|
|
133
|
+
payload={"snapshot_id": 123},
|
|
134
|
+
)
|
|
135
|
+
|
|
136
|
+
assert result is True
|
|
137
|
+
mock_post.assert_called_once()
|
|
138
|
+
|
|
139
|
+
# Clean up
|
|
140
|
+
os.environ.pop("OPTERYX_WEBHOOK_DOMAIN", None)
|
|
141
|
+
|
|
142
|
+
|
|
143
|
+
def test_event_payload_builders():
|
|
144
|
+
"""Test the event payload builder functions."""
|
|
145
|
+
from opteryx_catalog.webhooks.events import dataset_commit_payload
|
|
146
|
+
from opteryx_catalog.webhooks.events import dataset_created_payload
|
|
147
|
+
from opteryx_catalog.webhooks.events import view_created_payload
|
|
148
|
+
from opteryx_catalog.webhooks.events import view_executed_payload
|
|
149
|
+
|
|
150
|
+
# Test dataset created
|
|
151
|
+
payload = dataset_created_payload(
|
|
152
|
+
schema=None, location="gs://bucket/path", properties={"key": "value"}
|
|
153
|
+
)
|
|
154
|
+
assert payload["location"] == "gs://bucket/path"
|
|
155
|
+
assert payload["properties"]["key"] == "value"
|
|
156
|
+
|
|
157
|
+
# Test dataset commit
|
|
158
|
+
payload = dataset_commit_payload(
|
|
159
|
+
snapshot_id=123, sequence_number=5, record_count=1000, file_count=2
|
|
160
|
+
)
|
|
161
|
+
assert payload["snapshot_id"] == 123
|
|
162
|
+
assert payload["sequence_number"] == 5
|
|
163
|
+
assert payload["record_count"] == 1000
|
|
164
|
+
assert payload["file_count"] == 2
|
|
165
|
+
|
|
166
|
+
# Test view created
|
|
167
|
+
payload = view_created_payload(definition="SELECT * FROM table", properties={})
|
|
168
|
+
assert payload["definition"] == "SELECT * FROM table"
|
|
169
|
+
|
|
170
|
+
# Test view executed
|
|
171
|
+
payload = view_executed_payload(execution_time_ms=1500, row_count=100)
|
|
172
|
+
assert payload["execution_time_ms"] == 1500
|
|
173
|
+
assert payload["row_count"] == 100
|
|
174
|
+
|
|
175
|
+
|
|
176
|
+
if __name__ == "__main__":
|
|
177
|
+
pytest.main([__file__, "-v"])
|