fleet-python 0.2.28__py3-none-any.whl → 0.2.32__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of fleet-python might be problematic. Click here for more details.
- examples/diff_example.py +30 -20
- examples/dsl_example.py +12 -7
- examples/example.py +4 -4
- examples/example_account.py +8 -0
- examples/example_action_log.py +2 -2
- examples/example_client.py +2 -2
- examples/example_mcp_anthropic.py +8 -5
- examples/example_mcp_openai.py +2 -2
- examples/example_sync.py +4 -4
- examples/example_task.py +16 -6
- examples/example_tasks.py +3 -6
- examples/example_verifier.py +16 -3
- examples/gemini_example.py +6 -6
- examples/json_tasks_example.py +2 -2
- examples/nova_act_example.py +2 -2
- examples/openai_example.py +3 -3
- examples/openai_simple_example.py +3 -3
- examples/query_builder_example.py +11 -7
- fleet/__init__.py +60 -5
- fleet/_async/__init__.py +258 -1
- fleet/_async/base.py +2 -1
- fleet/_async/client.py +194 -127
- fleet/_async/env/client.py +5 -1
- fleet/_async/global_client.py +43 -0
- fleet/_async/instance/client.py +1 -1
- fleet/_async/models.py +172 -171
- fleet/_async/resources/base.py +1 -1
- fleet/_async/resources/mcp.py +55 -0
- fleet/_async/resources/sqlite.py +141 -130
- fleet/_async/tasks.py +71 -16
- fleet/_async/verifiers/__init__.py +2 -2
- fleet/_async/verifiers/bundler.py +18 -14
- fleet/_async/verifiers/verifier.py +77 -71
- fleet/base.py +2 -1
- fleet/client.py +176 -136
- fleet/config.py +3 -2
- fleet/env/__init__.py +10 -1
- fleet/env/client.py +5 -1
- fleet/global_client.py +43 -0
- fleet/instance/__init__.py +1 -1
- fleet/instance/client.py +2 -4
- fleet/models.py +172 -171
- fleet/resources/base.py +1 -1
- fleet/resources/mcp.py +27 -33
- fleet/resources/sqlite.py +136 -131
- fleet/tasks.py +197 -16
- fleet/types.py +1 -1
- fleet/verifiers/__init__.py +2 -2
- fleet/verifiers/bundler.py +18 -14
- fleet/verifiers/code.py +1 -1
- fleet/verifiers/decorator.py +25 -34
- fleet/verifiers/parse.py +98 -68
- fleet/verifiers/verifier.py +77 -78
- {fleet_python-0.2.28.dist-info → fleet_python-0.2.32.dist-info}/METADATA +9 -9
- fleet_python-0.2.32.dist-info/RECORD +74 -0
- scripts/fix_sync_imports.py +87 -59
- scripts/unasync.py +10 -9
- fleet_python-0.2.28.dist-info/RECORD +0 -70
- {fleet_python-0.2.28.dist-info → fleet_python-0.2.32.dist-info}/WHEEL +0 -0
- {fleet_python-0.2.28.dist-info → fleet_python-0.2.32.dist-info}/licenses/LICENSE +0 -0
- {fleet_python-0.2.28.dist-info → fleet_python-0.2.32.dist-info}/top_level.txt +0 -0
fleet/resources/sqlite.py
CHANGED
|
@@ -14,12 +14,17 @@ if TYPE_CHECKING:
|
|
|
14
14
|
|
|
15
15
|
|
|
16
16
|
# Import types from verifiers module
|
|
17
|
-
from fleet.verifiers.db import
|
|
17
|
+
from fleet.verifiers.db import (
|
|
18
|
+
IgnoreConfig,
|
|
19
|
+
_get_row_identifier,
|
|
20
|
+
_format_row_for_error,
|
|
21
|
+
_values_equivalent,
|
|
22
|
+
)
|
|
18
23
|
|
|
19
24
|
|
|
20
25
|
class SyncDatabaseSnapshot:
|
|
21
26
|
"""Async database snapshot that fetches data through API and stores locally for diffing."""
|
|
22
|
-
|
|
27
|
+
|
|
23
28
|
def __init__(self, resource: "SQLiteResource", name: str | None = None):
|
|
24
29
|
self.resource = resource
|
|
25
30
|
self.name = name or f"snapshot_{datetime.utcnow().isoformat()}"
|
|
@@ -27,51 +32,52 @@ class SyncDatabaseSnapshot:
|
|
|
27
32
|
self._data: dict[str, list[dict[str, Any]]] = {}
|
|
28
33
|
self._schemas: dict[str, list[str]] = {}
|
|
29
34
|
self._fetched = False
|
|
30
|
-
|
|
35
|
+
|
|
31
36
|
def _ensure_fetched(self):
|
|
32
37
|
"""Fetch all data from remote database if not already fetched."""
|
|
33
38
|
if self._fetched:
|
|
34
39
|
return
|
|
35
|
-
|
|
40
|
+
|
|
36
41
|
# Get all tables
|
|
37
42
|
tables_response = self.resource.query(
|
|
38
43
|
"SELECT name FROM sqlite_master WHERE type='table' AND name NOT LIKE 'sqlite_%'"
|
|
39
44
|
)
|
|
40
|
-
|
|
45
|
+
|
|
41
46
|
if not tables_response.rows:
|
|
42
47
|
self._fetched = True
|
|
43
48
|
return
|
|
44
|
-
|
|
49
|
+
|
|
45
50
|
table_names = [row[0] for row in tables_response.rows]
|
|
46
|
-
|
|
51
|
+
|
|
47
52
|
# Fetch data from each table
|
|
48
53
|
for table in table_names:
|
|
49
54
|
# Get table schema
|
|
50
55
|
schema_response = self.resource.query(f"PRAGMA table_info({table})")
|
|
51
56
|
if schema_response.rows:
|
|
52
|
-
self._schemas[table] = [
|
|
53
|
-
|
|
57
|
+
self._schemas[table] = [
|
|
58
|
+
row[1] for row in schema_response.rows
|
|
59
|
+
] # Column names
|
|
60
|
+
|
|
54
61
|
# Get all data
|
|
55
62
|
data_response = self.resource.query(f"SELECT * FROM {table}")
|
|
56
63
|
if data_response.rows and data_response.columns:
|
|
57
64
|
self._data[table] = [
|
|
58
|
-
dict(zip(data_response.columns, row))
|
|
59
|
-
for row in data_response.rows
|
|
65
|
+
dict(zip(data_response.columns, row)) for row in data_response.rows
|
|
60
66
|
]
|
|
61
67
|
else:
|
|
62
68
|
self._data[table] = []
|
|
63
|
-
|
|
69
|
+
|
|
64
70
|
self._fetched = True
|
|
65
|
-
|
|
71
|
+
|
|
66
72
|
def tables(self) -> list[str]:
|
|
67
73
|
"""Get list of all tables in the snapshot."""
|
|
68
74
|
self._ensure_fetched()
|
|
69
75
|
return list(self._data.keys())
|
|
70
|
-
|
|
76
|
+
|
|
71
77
|
def table(self, table_name: str) -> "SyncSnapshotQueryBuilder":
|
|
72
78
|
"""Create a query builder for snapshot data."""
|
|
73
79
|
return SyncSnapshotQueryBuilder(self, table_name)
|
|
74
|
-
|
|
80
|
+
|
|
75
81
|
def diff(
|
|
76
82
|
self,
|
|
77
83
|
other: "SyncDatabaseSnapshot",
|
|
@@ -85,7 +91,7 @@ class SyncDatabaseSnapshot:
|
|
|
85
91
|
|
|
86
92
|
class SyncSnapshotQueryBuilder:
|
|
87
93
|
"""Query builder that works on local snapshot data."""
|
|
88
|
-
|
|
94
|
+
|
|
89
95
|
def __init__(self, snapshot: SyncDatabaseSnapshot, table: str):
|
|
90
96
|
self._snapshot = snapshot
|
|
91
97
|
self._table = table
|
|
@@ -94,62 +100,59 @@ class SyncSnapshotQueryBuilder:
|
|
|
94
100
|
self._limit: int | None = None
|
|
95
101
|
self._order_by: str | None = None
|
|
96
102
|
self._order_desc: bool = False
|
|
97
|
-
|
|
103
|
+
|
|
98
104
|
def _get_data(self) -> list[dict[str, Any]]:
|
|
99
105
|
"""Get table data from snapshot."""
|
|
100
106
|
self._snapshot._ensure_fetched()
|
|
101
107
|
return self._snapshot._data.get(self._table, [])
|
|
102
|
-
|
|
108
|
+
|
|
103
109
|
def eq(self, column: str, value: Any) -> "SyncSnapshotQueryBuilder":
|
|
104
110
|
qb = self._clone()
|
|
105
111
|
qb._conditions.append((column, "=", value))
|
|
106
112
|
return qb
|
|
107
|
-
|
|
113
|
+
|
|
108
114
|
def limit(self, n: int) -> "SyncSnapshotQueryBuilder":
|
|
109
115
|
qb = self._clone()
|
|
110
116
|
qb._limit = n
|
|
111
117
|
return qb
|
|
112
|
-
|
|
118
|
+
|
|
113
119
|
def sort(self, column: str, desc: bool = False) -> "SyncSnapshotQueryBuilder":
|
|
114
120
|
qb = self._clone()
|
|
115
121
|
qb._order_by = column
|
|
116
122
|
qb._order_desc = desc
|
|
117
123
|
return qb
|
|
118
|
-
|
|
124
|
+
|
|
119
125
|
def first(self) -> dict[str, Any] | None:
|
|
120
126
|
rows = self.all()
|
|
121
127
|
return rows[0] if rows else None
|
|
122
|
-
|
|
128
|
+
|
|
123
129
|
def all(self) -> list[dict[str, Any]]:
|
|
124
130
|
data = self._get_data()
|
|
125
|
-
|
|
131
|
+
|
|
126
132
|
# Apply filters
|
|
127
133
|
filtered = data
|
|
128
134
|
for col, op, val in self._conditions:
|
|
129
135
|
if op == "=":
|
|
130
136
|
filtered = [row for row in filtered if row.get(col) == val]
|
|
131
|
-
|
|
137
|
+
|
|
132
138
|
# Apply sorting
|
|
133
139
|
if self._order_by:
|
|
134
140
|
filtered = sorted(
|
|
135
|
-
filtered,
|
|
136
|
-
key=lambda r: r.get(self._order_by),
|
|
137
|
-
reverse=self._order_desc
|
|
141
|
+
filtered, key=lambda r: r.get(self._order_by), reverse=self._order_desc
|
|
138
142
|
)
|
|
139
|
-
|
|
143
|
+
|
|
140
144
|
# Apply limit
|
|
141
145
|
if self._limit is not None:
|
|
142
|
-
filtered = filtered[:self._limit]
|
|
143
|
-
|
|
146
|
+
filtered = filtered[: self._limit]
|
|
147
|
+
|
|
144
148
|
# Apply column selection
|
|
145
149
|
if self._select_cols != ["*"]:
|
|
146
150
|
filtered = [
|
|
147
|
-
{col: row.get(col) for col in self._select_cols}
|
|
148
|
-
for row in filtered
|
|
151
|
+
{col: row.get(col) for col in self._select_cols} for row in filtered
|
|
149
152
|
]
|
|
150
|
-
|
|
153
|
+
|
|
151
154
|
return filtered
|
|
152
|
-
|
|
155
|
+
|
|
153
156
|
def assert_exists(self):
|
|
154
157
|
row = self.first()
|
|
155
158
|
if row is None:
|
|
@@ -164,7 +167,7 @@ class SyncSnapshotQueryBuilder:
|
|
|
164
167
|
error_msg += f"\nConditions: {conditions_str}"
|
|
165
168
|
raise AssertionError(error_msg)
|
|
166
169
|
return self
|
|
167
|
-
|
|
170
|
+
|
|
168
171
|
def _clone(self) -> "SyncSnapshotQueryBuilder":
|
|
169
172
|
qb = SyncSnapshotQueryBuilder(self._snapshot, self._table)
|
|
170
173
|
qb._select_cols = list(self._select_cols)
|
|
@@ -177,7 +180,7 @@ class SyncSnapshotQueryBuilder:
|
|
|
177
180
|
|
|
178
181
|
class SyncSnapshotDiff:
|
|
179
182
|
"""Compute & validate changes between two snapshots fetched via API."""
|
|
180
|
-
|
|
183
|
+
|
|
181
184
|
def __init__(
|
|
182
185
|
self,
|
|
183
186
|
before: SyncDatabaseSnapshot,
|
|
@@ -188,62 +191,62 @@ class SyncSnapshotDiff:
|
|
|
188
191
|
self.after = after
|
|
189
192
|
self.ignore_config = ignore_config or IgnoreConfig()
|
|
190
193
|
self._cached: dict[str, Any] | None = None
|
|
191
|
-
|
|
194
|
+
|
|
192
195
|
def _get_primary_key_columns(self, table: str) -> list[str]:
|
|
193
196
|
"""Get primary key columns for a table."""
|
|
194
197
|
# Try to get from schema
|
|
195
198
|
schema_response = self.after.resource.query(f"PRAGMA table_info({table})")
|
|
196
199
|
if not schema_response.rows:
|
|
197
200
|
return ["id"] # Default fallback
|
|
198
|
-
|
|
201
|
+
|
|
199
202
|
pk_columns = []
|
|
200
203
|
for row in schema_response.rows:
|
|
201
204
|
# row format: (cid, name, type, notnull, dflt_value, pk)
|
|
202
205
|
if row[5] > 0: # pk > 0 means it's part of primary key
|
|
203
206
|
pk_columns.append((row[5], row[1])) # (pk_position, column_name)
|
|
204
|
-
|
|
207
|
+
|
|
205
208
|
if not pk_columns:
|
|
206
209
|
# Try common defaults
|
|
207
210
|
all_columns = [row[1] for row in schema_response.rows]
|
|
208
211
|
if "id" in all_columns:
|
|
209
212
|
return ["id"]
|
|
210
213
|
return ["rowid"]
|
|
211
|
-
|
|
214
|
+
|
|
212
215
|
# Sort by primary key position and return just the column names
|
|
213
216
|
pk_columns.sort(key=lambda x: x[0])
|
|
214
217
|
return [col[1] for col in pk_columns]
|
|
215
|
-
|
|
218
|
+
|
|
216
219
|
def _collect(self):
|
|
217
220
|
"""Collect all differences between snapshots."""
|
|
218
221
|
if self._cached is not None:
|
|
219
222
|
return self._cached
|
|
220
|
-
|
|
223
|
+
|
|
221
224
|
all_tables = set(self.before.tables()) | set(self.after.tables())
|
|
222
225
|
diff: dict[str, dict[str, Any]] = {}
|
|
223
|
-
|
|
226
|
+
|
|
224
227
|
for tbl in all_tables:
|
|
225
228
|
if self.ignore_config.should_ignore_table(tbl):
|
|
226
229
|
continue
|
|
227
|
-
|
|
230
|
+
|
|
228
231
|
# Get primary key columns
|
|
229
232
|
pk_columns = self._get_primary_key_columns(tbl)
|
|
230
|
-
|
|
233
|
+
|
|
231
234
|
# Get data from both snapshots
|
|
232
235
|
before_data = self.before._data.get(tbl, [])
|
|
233
236
|
after_data = self.after._data.get(tbl, [])
|
|
234
|
-
|
|
237
|
+
|
|
235
238
|
# Create indexes by primary key
|
|
236
239
|
def make_key(row: dict, pk_cols: list[str]) -> Any:
|
|
237
240
|
if len(pk_cols) == 1:
|
|
238
241
|
return row.get(pk_cols[0])
|
|
239
242
|
return tuple(row.get(col) for col in pk_cols)
|
|
240
|
-
|
|
243
|
+
|
|
241
244
|
before_index = {make_key(row, pk_columns): row for row in before_data}
|
|
242
245
|
after_index = {make_key(row, pk_columns): row for row in after_data}
|
|
243
|
-
|
|
246
|
+
|
|
244
247
|
before_keys = set(before_index.keys())
|
|
245
248
|
after_keys = set(after_index.keys())
|
|
246
|
-
|
|
249
|
+
|
|
247
250
|
# Find changes
|
|
248
251
|
result = {
|
|
249
252
|
"table_name": tbl,
|
|
@@ -254,27 +257,23 @@ class SyncSnapshotDiff:
|
|
|
254
257
|
"unchanged_count": 0,
|
|
255
258
|
"total_changes": 0,
|
|
256
259
|
}
|
|
257
|
-
|
|
260
|
+
|
|
258
261
|
# Added rows
|
|
259
262
|
for key in after_keys - before_keys:
|
|
260
|
-
result["added_rows"].append({
|
|
261
|
-
|
|
262
|
-
"data": after_index[key]
|
|
263
|
-
})
|
|
264
|
-
|
|
263
|
+
result["added_rows"].append({"row_id": key, "data": after_index[key]})
|
|
264
|
+
|
|
265
265
|
# Removed rows
|
|
266
266
|
for key in before_keys - after_keys:
|
|
267
|
-
result["removed_rows"].append(
|
|
268
|
-
"row_id": key,
|
|
269
|
-
|
|
270
|
-
|
|
271
|
-
|
|
267
|
+
result["removed_rows"].append(
|
|
268
|
+
{"row_id": key, "data": before_index[key]}
|
|
269
|
+
)
|
|
270
|
+
|
|
272
271
|
# Modified rows
|
|
273
272
|
for key in before_keys & after_keys:
|
|
274
273
|
before_row = before_index[key]
|
|
275
274
|
after_row = after_index[key]
|
|
276
275
|
changes = {}
|
|
277
|
-
|
|
276
|
+
|
|
278
277
|
for field in set(before_row.keys()) | set(after_row.keys()):
|
|
279
278
|
if self.ignore_config.should_ignore_field(tbl, field):
|
|
280
279
|
continue
|
|
@@ -282,31 +281,33 @@ class SyncSnapshotDiff:
|
|
|
282
281
|
after_val = after_row.get(field)
|
|
283
282
|
if not _values_equivalent(before_val, after_val):
|
|
284
283
|
changes[field] = {"before": before_val, "after": after_val}
|
|
285
|
-
|
|
284
|
+
|
|
286
285
|
if changes:
|
|
287
|
-
result["modified_rows"].append(
|
|
288
|
-
|
|
289
|
-
|
|
290
|
-
|
|
291
|
-
|
|
286
|
+
result["modified_rows"].append(
|
|
287
|
+
{
|
|
288
|
+
"row_id": key,
|
|
289
|
+
"changes": changes,
|
|
290
|
+
"data": after_row, # Current state
|
|
291
|
+
}
|
|
292
|
+
)
|
|
292
293
|
else:
|
|
293
294
|
result["unchanged_count"] += 1
|
|
294
|
-
|
|
295
|
+
|
|
295
296
|
result["total_changes"] = (
|
|
296
|
-
len(result["added_rows"])
|
|
297
|
-
len(result["removed_rows"])
|
|
298
|
-
len(result["modified_rows"])
|
|
297
|
+
len(result["added_rows"])
|
|
298
|
+
+ len(result["removed_rows"])
|
|
299
|
+
+ len(result["modified_rows"])
|
|
299
300
|
)
|
|
300
|
-
|
|
301
|
+
|
|
301
302
|
diff[tbl] = result
|
|
302
|
-
|
|
303
|
+
|
|
303
304
|
self._cached = diff
|
|
304
305
|
return diff
|
|
305
|
-
|
|
306
|
+
|
|
306
307
|
def expect_only(self, allowed_changes: list[dict[str, Any]]):
|
|
307
308
|
"""Ensure only specified changes occurred."""
|
|
308
309
|
diff = self._collect()
|
|
309
|
-
|
|
310
|
+
|
|
310
311
|
def _is_change_allowed(
|
|
311
312
|
table: str, row_id: Any, field: str | None, after_value: Any
|
|
312
313
|
) -> bool:
|
|
@@ -317,7 +318,7 @@ class SyncSnapshotDiff:
|
|
|
317
318
|
pk_match = (
|
|
318
319
|
str(allowed_pk) == str(row_id) if allowed_pk is not None else False
|
|
319
320
|
)
|
|
320
|
-
|
|
321
|
+
|
|
321
322
|
if (
|
|
322
323
|
allowed["table"] == table
|
|
323
324
|
and pk_match
|
|
@@ -326,57 +327,65 @@ class SyncSnapshotDiff:
|
|
|
326
327
|
):
|
|
327
328
|
return True
|
|
328
329
|
return False
|
|
329
|
-
|
|
330
|
+
|
|
330
331
|
# Collect all unexpected changes
|
|
331
332
|
unexpected_changes = []
|
|
332
|
-
|
|
333
|
+
|
|
333
334
|
for tbl, report in diff.items():
|
|
334
335
|
for row in report.get("modified_rows", []):
|
|
335
336
|
for f, vals in row["changes"].items():
|
|
336
337
|
if self.ignore_config.should_ignore_field(tbl, f):
|
|
337
338
|
continue
|
|
338
339
|
if not _is_change_allowed(tbl, row["row_id"], f, vals["after"]):
|
|
339
|
-
unexpected_changes.append(
|
|
340
|
-
|
|
340
|
+
unexpected_changes.append(
|
|
341
|
+
{
|
|
342
|
+
"type": "modification",
|
|
343
|
+
"table": tbl,
|
|
344
|
+
"row_id": row["row_id"],
|
|
345
|
+
"field": f,
|
|
346
|
+
"before": vals.get("before"),
|
|
347
|
+
"after": vals["after"],
|
|
348
|
+
"full_row": row,
|
|
349
|
+
}
|
|
350
|
+
)
|
|
351
|
+
|
|
352
|
+
for row in report.get("added_rows", []):
|
|
353
|
+
if not _is_change_allowed(tbl, row["row_id"], None, "__added__"):
|
|
354
|
+
unexpected_changes.append(
|
|
355
|
+
{
|
|
356
|
+
"type": "insertion",
|
|
341
357
|
"table": tbl,
|
|
342
358
|
"row_id": row["row_id"],
|
|
343
|
-
"field":
|
|
344
|
-
"
|
|
345
|
-
"after": vals["after"],
|
|
359
|
+
"field": None,
|
|
360
|
+
"after": "__added__",
|
|
346
361
|
"full_row": row,
|
|
347
|
-
}
|
|
348
|
-
|
|
349
|
-
|
|
350
|
-
if not _is_change_allowed(tbl, row["row_id"], None, "__added__"):
|
|
351
|
-
unexpected_changes.append({
|
|
352
|
-
"type": "insertion",
|
|
353
|
-
"table": tbl,
|
|
354
|
-
"row_id": row["row_id"],
|
|
355
|
-
"field": None,
|
|
356
|
-
"after": "__added__",
|
|
357
|
-
"full_row": row,
|
|
358
|
-
})
|
|
359
|
-
|
|
362
|
+
}
|
|
363
|
+
)
|
|
364
|
+
|
|
360
365
|
for row in report.get("removed_rows", []):
|
|
361
366
|
if not _is_change_allowed(tbl, row["row_id"], None, "__removed__"):
|
|
362
|
-
unexpected_changes.append(
|
|
363
|
-
|
|
364
|
-
|
|
365
|
-
|
|
366
|
-
|
|
367
|
-
|
|
368
|
-
|
|
369
|
-
|
|
370
|
-
|
|
367
|
+
unexpected_changes.append(
|
|
368
|
+
{
|
|
369
|
+
"type": "deletion",
|
|
370
|
+
"table": tbl,
|
|
371
|
+
"row_id": row["row_id"],
|
|
372
|
+
"field": None,
|
|
373
|
+
"after": "__removed__",
|
|
374
|
+
"full_row": row,
|
|
375
|
+
}
|
|
376
|
+
)
|
|
377
|
+
|
|
371
378
|
if unexpected_changes:
|
|
372
379
|
# Build comprehensive error message
|
|
373
380
|
error_lines = ["Unexpected database changes detected:"]
|
|
374
381
|
error_lines.append("")
|
|
375
|
-
|
|
382
|
+
|
|
376
383
|
for i, change in enumerate(unexpected_changes[:5], 1):
|
|
377
|
-
error_lines.append(
|
|
384
|
+
error_lines.append(
|
|
385
|
+
f"{i}. {change['type'].upper()} in table '{change['table']}':"
|
|
386
|
+
)
|
|
378
387
|
error_lines.append(f" Row ID: {change['row_id']}")
|
|
379
|
-
|
|
388
|
+
|
|
380
389
|
if change["type"] == "modification":
|
|
381
390
|
error_lines.append(f" Field: {change['field']}")
|
|
382
391
|
error_lines.append(f" Before: {repr(change['before'])}")
|
|
@@ -385,7 +394,7 @@ class SyncSnapshotDiff:
|
|
|
385
394
|
error_lines.append(" New row added")
|
|
386
395
|
elif change["type"] == "deletion":
|
|
387
396
|
error_lines.append(" Row deleted")
|
|
388
|
-
|
|
397
|
+
|
|
389
398
|
# Show some context from the row
|
|
390
399
|
if "full_row" in change and change["full_row"]:
|
|
391
400
|
row_data = change["full_row"]
|
|
@@ -394,13 +403,15 @@ class SyncSnapshotDiff:
|
|
|
394
403
|
row_data.get("data", {}), max_fields=5
|
|
395
404
|
)
|
|
396
405
|
error_lines.append(f" Row data: {formatted_row}")
|
|
397
|
-
|
|
406
|
+
|
|
398
407
|
error_lines.append("")
|
|
399
|
-
|
|
408
|
+
|
|
400
409
|
if len(unexpected_changes) > 5:
|
|
401
|
-
error_lines.append(
|
|
410
|
+
error_lines.append(
|
|
411
|
+
f"... and {len(unexpected_changes) - 5} more unexpected changes"
|
|
412
|
+
)
|
|
402
413
|
error_lines.append("")
|
|
403
|
-
|
|
414
|
+
|
|
404
415
|
# Show what changes were allowed
|
|
405
416
|
error_lines.append("Allowed changes were:")
|
|
406
417
|
if allowed_changes:
|
|
@@ -412,18 +423,20 @@ class SyncSnapshotDiff:
|
|
|
412
423
|
f"After: {repr(allowed.get('after'))}"
|
|
413
424
|
)
|
|
414
425
|
if len(allowed_changes) > 3:
|
|
415
|
-
error_lines.append(
|
|
426
|
+
error_lines.append(
|
|
427
|
+
f" ... and {len(allowed_changes) - 3} more allowed changes"
|
|
428
|
+
)
|
|
416
429
|
else:
|
|
417
430
|
error_lines.append(" (No changes were allowed)")
|
|
418
|
-
|
|
431
|
+
|
|
419
432
|
raise AssertionError("\n".join(error_lines))
|
|
420
|
-
|
|
433
|
+
|
|
421
434
|
return self
|
|
422
435
|
|
|
423
436
|
|
|
424
437
|
class SyncQueryBuilder:
|
|
425
438
|
"""Async query builder that translates DSL to SQL and executes through the API."""
|
|
426
|
-
|
|
439
|
+
|
|
427
440
|
def __init__(self, resource: "SQLiteResource", table: str):
|
|
428
441
|
self._resource = resource
|
|
429
442
|
self._table = table
|
|
@@ -508,10 +521,7 @@ class SyncQueryBuilder:
|
|
|
508
521
|
|
|
509
522
|
# Joins
|
|
510
523
|
for tbl, onmap in self._joins:
|
|
511
|
-
join_clauses = [
|
|
512
|
-
f"{self._table}.{l} = {tbl}.{r}"
|
|
513
|
-
for l, r in onmap.items()
|
|
514
|
-
]
|
|
524
|
+
join_clauses = [f"{self._table}.{l} = {tbl}.{r}" for l, r in onmap.items()]
|
|
515
525
|
sql.append(f"JOIN {tbl} ON {' AND '.join(join_clauses)}")
|
|
516
526
|
|
|
517
527
|
# WHERE
|
|
@@ -558,10 +568,7 @@ class SyncQueryBuilder:
|
|
|
558
568
|
if not response.rows:
|
|
559
569
|
return []
|
|
560
570
|
# Convert List[List] to List[dict] using column names
|
|
561
|
-
return [
|
|
562
|
-
dict(zip(response.columns or [], row))
|
|
563
|
-
for row in response.rows
|
|
564
|
-
]
|
|
571
|
+
return [dict(zip(response.columns or [], row)) for row in response.rows]
|
|
565
572
|
|
|
566
573
|
# Assertions
|
|
567
574
|
def assert_exists(self):
|
|
@@ -644,9 +651,7 @@ class SQLiteResource(Resource):
|
|
|
644
651
|
)
|
|
645
652
|
return DescribeResponse(**response.json())
|
|
646
653
|
|
|
647
|
-
def query(
|
|
648
|
-
self, query: str, args: Optional[List[Any]] = None
|
|
649
|
-
) -> QueryResponse:
|
|
654
|
+
def query(self, query: str, args: Optional[List[Any]] = None) -> QueryResponse:
|
|
650
655
|
return self._query(query, args, read_only=True)
|
|
651
656
|
|
|
652
657
|
def exec(self, query: str, args: Optional[List[Any]] = None) -> QueryResponse:
|
|
@@ -679,17 +684,17 @@ class SQLiteResource(Resource):
|
|
|
679
684
|
ignore_config: IgnoreConfig | None = None,
|
|
680
685
|
) -> SyncSnapshotDiff:
|
|
681
686
|
"""Compare this database with another AsyncSQLiteResource.
|
|
682
|
-
|
|
687
|
+
|
|
683
688
|
Args:
|
|
684
689
|
other: Another AsyncSQLiteResource to compare against
|
|
685
690
|
ignore_config: Optional configuration for ignoring specific tables/fields
|
|
686
|
-
|
|
691
|
+
|
|
687
692
|
Returns:
|
|
688
693
|
AsyncSnapshotDiff: Object containing the differences between the two databases
|
|
689
694
|
"""
|
|
690
695
|
# Create snapshots of both databases
|
|
691
696
|
before_snapshot = self.snapshot(name=f"before_{datetime.utcnow().isoformat()}")
|
|
692
697
|
after_snapshot = other.snapshot(name=f"after_{datetime.utcnow().isoformat()}")
|
|
693
|
-
|
|
698
|
+
|
|
694
699
|
# Return the diff between the snapshots
|
|
695
700
|
return before_snapshot.diff(after_snapshot, ignore_config)
|