furu 0.0.1__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- furu/__init__.py +82 -0
- furu/adapters/__init__.py +3 -0
- furu/adapters/submitit.py +195 -0
- furu/config.py +98 -0
- furu/core/__init__.py +4 -0
- furu/core/furu.py +999 -0
- furu/core/list.py +123 -0
- furu/dashboard/__init__.py +9 -0
- furu/dashboard/__main__.py +7 -0
- furu/dashboard/api/__init__.py +7 -0
- furu/dashboard/api/models.py +170 -0
- furu/dashboard/api/routes.py +135 -0
- furu/dashboard/frontend/dist/assets/index-CbdDfSOZ.css +1 -0
- furu/dashboard/frontend/dist/assets/index-DDv_TYB_.js +67 -0
- furu/dashboard/frontend/dist/favicon.svg +10 -0
- furu/dashboard/frontend/dist/index.html +22 -0
- furu/dashboard/main.py +134 -0
- furu/dashboard/scanner.py +931 -0
- furu/errors.py +76 -0
- furu/migrate.py +48 -0
- furu/migration.py +926 -0
- furu/runtime/__init__.py +27 -0
- furu/runtime/env.py +8 -0
- furu/runtime/logging.py +301 -0
- furu/runtime/tracebacks.py +64 -0
- furu/serialization/__init__.py +20 -0
- furu/serialization/migrations.py +246 -0
- furu/serialization/serializer.py +233 -0
- furu/storage/__init__.py +32 -0
- furu/storage/metadata.py +282 -0
- furu/storage/migration.py +81 -0
- furu/storage/state.py +1107 -0
- furu-0.0.1.dist-info/METADATA +502 -0
- furu-0.0.1.dist-info/RECORD +36 -0
- furu-0.0.1.dist-info/WHEEL +4 -0
- furu-0.0.1.dist-info/entry_points.txt +2 -0
furu/migration.py
ADDED
|
@@ -0,0 +1,926 @@
|
|
|
1
|
+
from __future__ import annotations
|
|
2
|
+
|
|
3
|
+
import datetime as _dt
|
|
4
|
+
import importlib
|
|
5
|
+
import json
|
|
6
|
+
import shutil
|
|
7
|
+
from collections.abc import Iterable, Mapping
|
|
8
|
+
from dataclasses import dataclass
|
|
9
|
+
from pathlib import Path
|
|
10
|
+
from typing import Literal, TypeAlias, cast, overload
|
|
11
|
+
|
|
12
|
+
import chz
|
|
13
|
+
from chz.util import MISSING as CHZ_MISSING, MISSING_TYPE
|
|
14
|
+
from chz.validators import for_all_fields, typecheck
|
|
15
|
+
|
|
16
|
+
from .config import FURU_CONFIG
|
|
17
|
+
from .core.furu import Furu
|
|
18
|
+
from .runtime.logging import get_logger
|
|
19
|
+
from .serialization import FuruSerializer
|
|
20
|
+
from .serialization.serializer import JsonValue
|
|
21
|
+
from .storage import MetadataManager, MigrationManager, MigrationRecord, StateManager
|
|
22
|
+
from .storage.state import _StateAttemptRunning, _StateResultMigrated
|
|
23
|
+
|
|
24
|
+
|
|
25
|
+
Primitive: TypeAlias = str | int | float | bool | None
|
|
26
|
+
MigrationValue: TypeAlias = (
|
|
27
|
+
Primitive | Furu | tuple["MigrationValue", ...] | dict[str, "MigrationValue"]
|
|
28
|
+
)
|
|
29
|
+
|
|
30
|
+
|
|
31
|
+
@dataclass(frozen=True)
|
|
32
|
+
class NamespacePair:
|
|
33
|
+
from_namespace: str
|
|
34
|
+
to_namespace: str
|
|
35
|
+
|
|
36
|
+
|
|
37
|
+
@dataclass(frozen=True)
|
|
38
|
+
class FuruRef:
|
|
39
|
+
namespace: str
|
|
40
|
+
furu_hash: str
|
|
41
|
+
root: Literal["data", "git"]
|
|
42
|
+
directory: Path
|
|
43
|
+
|
|
44
|
+
|
|
45
|
+
@dataclass(frozen=True)
|
|
46
|
+
class MigrationCandidate:
|
|
47
|
+
from_ref: FuruRef
|
|
48
|
+
to_ref: FuruRef
|
|
49
|
+
to_namespace: str
|
|
50
|
+
to_config: dict[str, JsonValue]
|
|
51
|
+
defaults_applied: dict[str, MigrationValue]
|
|
52
|
+
fields_dropped: list[str]
|
|
53
|
+
missing_fields: list[str]
|
|
54
|
+
extra_fields: list[str]
|
|
55
|
+
|
|
56
|
+
def with_default_values(
|
|
57
|
+
self, values: Mapping[str, MigrationValue]
|
|
58
|
+
) -> "MigrationCandidate":
|
|
59
|
+
if not values:
|
|
60
|
+
return self
|
|
61
|
+
updated_defaults = dict(self.defaults_applied)
|
|
62
|
+
updated_defaults.update(values)
|
|
63
|
+
return _rebuild_candidate_with_defaults(self, dict(values), updated_defaults)
|
|
64
|
+
|
|
65
|
+
|
|
66
|
+
@dataclass(frozen=True)
|
|
67
|
+
class MigrationSkip:
|
|
68
|
+
candidate: MigrationCandidate
|
|
69
|
+
reason: str
|
|
70
|
+
|
|
71
|
+
|
|
72
|
+
def _rebuild_candidate_with_defaults(
|
|
73
|
+
candidate: MigrationCandidate,
|
|
74
|
+
new_defaults: dict[str, MigrationValue],
|
|
75
|
+
defaults_applied: dict[str, MigrationValue],
|
|
76
|
+
) -> MigrationCandidate:
|
|
77
|
+
target_class = _resolve_target_class(candidate.to_namespace)
|
|
78
|
+
updated_config = _typed_config(dict(candidate.to_config))
|
|
79
|
+
|
|
80
|
+
target_fields = _target_field_names(target_class)
|
|
81
|
+
config_keys = set(updated_config.keys()) - {"__class__"}
|
|
82
|
+
conflicts = set(new_defaults) & config_keys
|
|
83
|
+
if conflicts:
|
|
84
|
+
raise ValueError(
|
|
85
|
+
"migration: default_values provided for existing fields: "
|
|
86
|
+
f"{_format_fields(conflicts)}"
|
|
87
|
+
)
|
|
88
|
+
unknown = set(new_defaults) - set(target_fields)
|
|
89
|
+
if unknown:
|
|
90
|
+
raise ValueError(
|
|
91
|
+
"migration: default_values contains fields not in target schema: "
|
|
92
|
+
f"{_format_fields(unknown)}"
|
|
93
|
+
)
|
|
94
|
+
|
|
95
|
+
for field, value in new_defaults.items():
|
|
96
|
+
updated_config[field] = _serialize_value(value)
|
|
97
|
+
|
|
98
|
+
updated_config["__class__"] = candidate.to_namespace
|
|
99
|
+
_typecheck_config(updated_config)
|
|
100
|
+
|
|
101
|
+
config_keys = set(updated_config.keys()) - {"__class__"}
|
|
102
|
+
missing_fields = sorted(set(target_fields) - config_keys)
|
|
103
|
+
if missing_fields:
|
|
104
|
+
raise ValueError(
|
|
105
|
+
"migration: missing required fields for target class: "
|
|
106
|
+
f"{_format_fields(missing_fields)}"
|
|
107
|
+
)
|
|
108
|
+
extra_fields = sorted(config_keys - set(target_fields))
|
|
109
|
+
if extra_fields:
|
|
110
|
+
raise ValueError(
|
|
111
|
+
"migration: extra fields present; use drop_fields to remove: "
|
|
112
|
+
f"{_format_fields(extra_fields)}"
|
|
113
|
+
)
|
|
114
|
+
|
|
115
|
+
to_hash = FuruSerializer.compute_hash(updated_config)
|
|
116
|
+
to_ref = _build_target_ref(target_class, candidate.to_namespace, to_hash)
|
|
117
|
+
return MigrationCandidate(
|
|
118
|
+
from_ref=candidate.from_ref,
|
|
119
|
+
to_ref=to_ref,
|
|
120
|
+
to_namespace=candidate.to_namespace,
|
|
121
|
+
to_config=updated_config,
|
|
122
|
+
defaults_applied=defaults_applied,
|
|
123
|
+
fields_dropped=candidate.fields_dropped,
|
|
124
|
+
missing_fields=missing_fields,
|
|
125
|
+
extra_fields=extra_fields,
|
|
126
|
+
)
|
|
127
|
+
|
|
128
|
+
|
|
129
|
+
MigrationPolicy = Literal["alias", "move", "copy"]
|
|
130
|
+
MigrationConflict = Literal["throw", "skip", "overwrite"]
|
|
131
|
+
|
|
132
|
+
|
|
133
|
+
@overload
|
|
134
|
+
def find_migration_candidates(
|
|
135
|
+
*,
|
|
136
|
+
namespace: str,
|
|
137
|
+
to_obj: type[Furu],
|
|
138
|
+
default_values: Mapping[str, MigrationValue] | None = None,
|
|
139
|
+
default_fields: Iterable[str] | None = None,
|
|
140
|
+
drop_fields: Iterable[str] | None = None,
|
|
141
|
+
) -> list[MigrationCandidate]: ...
|
|
142
|
+
|
|
143
|
+
|
|
144
|
+
@overload
|
|
145
|
+
def find_migration_candidates(
|
|
146
|
+
*,
|
|
147
|
+
namespace: NamespacePair,
|
|
148
|
+
to_obj: None = None,
|
|
149
|
+
default_values: Mapping[str, MigrationValue] | None = None,
|
|
150
|
+
default_fields: Iterable[str] | None = None,
|
|
151
|
+
drop_fields: Iterable[str] | None = None,
|
|
152
|
+
) -> list[MigrationCandidate]: ...
|
|
153
|
+
|
|
154
|
+
|
|
155
|
+
def find_migration_candidates(
|
|
156
|
+
*,
|
|
157
|
+
namespace: str | NamespacePair,
|
|
158
|
+
to_obj: type[Furu] | None = None,
|
|
159
|
+
default_values: Mapping[str, MigrationValue] | None = None,
|
|
160
|
+
default_fields: Iterable[str] | None = None,
|
|
161
|
+
drop_fields: Iterable[str] | None = None,
|
|
162
|
+
) -> list[MigrationCandidate]:
|
|
163
|
+
if namespace is None:
|
|
164
|
+
raise ValueError("migration: namespace is required")
|
|
165
|
+
if isinstance(namespace, NamespacePair):
|
|
166
|
+
if to_obj is not None:
|
|
167
|
+
raise ValueError("migration: to_obj cannot be used with NamespacePair")
|
|
168
|
+
from_namespace = namespace.from_namespace
|
|
169
|
+
to_namespace = namespace.to_namespace
|
|
170
|
+
target_class = _resolve_target_class(to_namespace)
|
|
171
|
+
elif isinstance(namespace, str):
|
|
172
|
+
if not _is_furu_class(to_obj):
|
|
173
|
+
raise ValueError(
|
|
174
|
+
"migration: to_obj must be a class (use find_migration_candidates_initialized_target for instances)"
|
|
175
|
+
)
|
|
176
|
+
from_namespace = namespace
|
|
177
|
+
target_class = to_obj
|
|
178
|
+
if target_class is None:
|
|
179
|
+
raise ValueError(
|
|
180
|
+
"migration: to_obj must be a class (use find_migration_candidates_initialized_target for instances)"
|
|
181
|
+
)
|
|
182
|
+
to_namespace = _namespace_str(target_class)
|
|
183
|
+
else:
|
|
184
|
+
raise ValueError("migration: namespace must be str or NamespacePair")
|
|
185
|
+
|
|
186
|
+
candidates: list[MigrationCandidate] = []
|
|
187
|
+
for from_ref, config in _iter_source_configs(from_namespace):
|
|
188
|
+
candidate = _build_candidate(
|
|
189
|
+
from_ref,
|
|
190
|
+
config,
|
|
191
|
+
to_namespace=to_namespace,
|
|
192
|
+
target_class=target_class,
|
|
193
|
+
default_values=default_values,
|
|
194
|
+
default_fields=default_fields,
|
|
195
|
+
drop_fields=drop_fields,
|
|
196
|
+
default_source=None,
|
|
197
|
+
)
|
|
198
|
+
candidates.append(candidate)
|
|
199
|
+
return candidates
|
|
200
|
+
|
|
201
|
+
|
|
202
|
+
def find_migration_candidates_initialized_target(
|
|
203
|
+
*,
|
|
204
|
+
to_obj: Furu,
|
|
205
|
+
from_namespace: str | None = None,
|
|
206
|
+
default_fields: Iterable[str] | None = None,
|
|
207
|
+
drop_fields: Iterable[str] | None = None,
|
|
208
|
+
) -> list[MigrationCandidate]:
|
|
209
|
+
if isinstance(to_obj, type):
|
|
210
|
+
raise ValueError(
|
|
211
|
+
"migration: to_obj must be an instance (use find_migration_candidates for classes)"
|
|
212
|
+
)
|
|
213
|
+
if not isinstance(to_obj, Furu):
|
|
214
|
+
raise ValueError(
|
|
215
|
+
"migration: to_obj must be an instance (use find_migration_candidates for classes)"
|
|
216
|
+
)
|
|
217
|
+
|
|
218
|
+
target_class = to_obj.__class__
|
|
219
|
+
to_namespace = _namespace_str(target_class)
|
|
220
|
+
source_namespace = from_namespace or to_namespace
|
|
221
|
+
|
|
222
|
+
target_config = FuruSerializer.to_dict(to_obj)
|
|
223
|
+
if not isinstance(target_config, dict):
|
|
224
|
+
raise TypeError("migration: to_obj must serialize to a dict")
|
|
225
|
+
target_config = _typed_config(target_config)
|
|
226
|
+
target_config["__class__"] = to_namespace
|
|
227
|
+
_typecheck_config(target_config)
|
|
228
|
+
|
|
229
|
+
candidates: list[MigrationCandidate] = []
|
|
230
|
+
for from_ref, config in _iter_source_configs(source_namespace):
|
|
231
|
+
candidate = _build_candidate(
|
|
232
|
+
from_ref,
|
|
233
|
+
config,
|
|
234
|
+
to_namespace=to_namespace,
|
|
235
|
+
target_class=target_class,
|
|
236
|
+
default_values=None,
|
|
237
|
+
default_fields=default_fields,
|
|
238
|
+
drop_fields=drop_fields,
|
|
239
|
+
default_source=to_obj,
|
|
240
|
+
)
|
|
241
|
+
aligned = _align_candidate_to_target(candidate, target_config)
|
|
242
|
+
if aligned is not None:
|
|
243
|
+
candidates.append(aligned)
|
|
244
|
+
return candidates
|
|
245
|
+
|
|
246
|
+
|
|
247
|
+
@overload
|
|
248
|
+
def apply_migration(
|
|
249
|
+
candidate: MigrationCandidate,
|
|
250
|
+
*,
|
|
251
|
+
policy: MigrationPolicy = "alias",
|
|
252
|
+
cascade: bool = True,
|
|
253
|
+
origin: str | None = None,
|
|
254
|
+
note: str | None = None,
|
|
255
|
+
conflict: Literal["throw", "overwrite"] = "throw",
|
|
256
|
+
) -> list[MigrationRecord]: ...
|
|
257
|
+
|
|
258
|
+
|
|
259
|
+
@overload
|
|
260
|
+
def apply_migration(
|
|
261
|
+
candidate: MigrationCandidate,
|
|
262
|
+
*,
|
|
263
|
+
policy: MigrationPolicy = "alias",
|
|
264
|
+
cascade: bool = True,
|
|
265
|
+
origin: str | None = None,
|
|
266
|
+
note: str | None = None,
|
|
267
|
+
conflict: Literal["skip"],
|
|
268
|
+
) -> list[MigrationRecord | MigrationSkip]: ...
|
|
269
|
+
|
|
270
|
+
|
|
271
|
+
def apply_migration(
|
|
272
|
+
candidate: MigrationCandidate,
|
|
273
|
+
*,
|
|
274
|
+
policy: MigrationPolicy = "alias",
|
|
275
|
+
cascade: bool = True,
|
|
276
|
+
origin: str | None = None,
|
|
277
|
+
note: str | None = None,
|
|
278
|
+
conflict: MigrationConflict = "throw",
|
|
279
|
+
) -> list[MigrationRecord | MigrationSkip]:
|
|
280
|
+
if policy not in {"alias", "move", "copy"}:
|
|
281
|
+
raise ValueError(f"Unsupported migration policy: {policy}")
|
|
282
|
+
|
|
283
|
+
if not cascade:
|
|
284
|
+
get_logger().warning(
|
|
285
|
+
"migration: cascade disabled; dependents will not be migrated"
|
|
286
|
+
)
|
|
287
|
+
|
|
288
|
+
cascade_nodes = (
|
|
289
|
+
_build_cascade_candidates(candidate)
|
|
290
|
+
if cascade
|
|
291
|
+
else [_CascadeNode(candidate=candidate, parent=None)]
|
|
292
|
+
)
|
|
293
|
+
parent_map = {node.key: node.parent for node in cascade_nodes}
|
|
294
|
+
|
|
295
|
+
conflict_statuses: dict[_CandidateKey, str] = {}
|
|
296
|
+
for node in cascade_nodes:
|
|
297
|
+
status = _target_status(node.candidate)
|
|
298
|
+
if status is not None:
|
|
299
|
+
conflict_statuses[node.key] = status
|
|
300
|
+
|
|
301
|
+
if conflict == "throw" and conflict_statuses:
|
|
302
|
+
status = next(iter(conflict_statuses.values()))
|
|
303
|
+
raise ValueError(
|
|
304
|
+
f"migration: target exists with status {status}; pass conflict='overwrite' or conflict='skip'"
|
|
305
|
+
)
|
|
306
|
+
|
|
307
|
+
skip_keys: set[_CandidateKey] = set()
|
|
308
|
+
if conflict == "skip" and conflict_statuses:
|
|
309
|
+
skip_keys = _expand_skip_keys(conflict_statuses.keys(), parent_map)
|
|
310
|
+
for key in conflict_statuses:
|
|
311
|
+
status = conflict_statuses[key]
|
|
312
|
+
get_logger().warning(
|
|
313
|
+
"migration: skipping candidate due to target status %s",
|
|
314
|
+
status,
|
|
315
|
+
)
|
|
316
|
+
|
|
317
|
+
results: list[MigrationRecord | MigrationSkip] = []
|
|
318
|
+
for node in cascade_nodes:
|
|
319
|
+
if node.key in skip_keys:
|
|
320
|
+
reason = "migration: skipping candidate due to skipped dependency"
|
|
321
|
+
if node.key in conflict_statuses:
|
|
322
|
+
status = conflict_statuses[node.key]
|
|
323
|
+
reason = f"migration: skipping candidate due to target status {status}"
|
|
324
|
+
results.append(MigrationSkip(candidate=node.candidate, reason=reason))
|
|
325
|
+
continue
|
|
326
|
+
record = _apply_single_migration(
|
|
327
|
+
node.candidate,
|
|
328
|
+
policy=policy,
|
|
329
|
+
origin=origin,
|
|
330
|
+
note=note,
|
|
331
|
+
conflict=conflict,
|
|
332
|
+
conflict_status=conflict_statuses.get(node.key),
|
|
333
|
+
)
|
|
334
|
+
results.append(record)
|
|
335
|
+
return results
|
|
336
|
+
|
|
337
|
+
|
|
338
|
+
@dataclass(frozen=True)
|
|
339
|
+
class _CascadeNode:
|
|
340
|
+
candidate: MigrationCandidate
|
|
341
|
+
parent: _CandidateKey | None
|
|
342
|
+
|
|
343
|
+
@property
|
|
344
|
+
def key(self) -> "_CandidateKey":
|
|
345
|
+
return _candidate_key(self.candidate)
|
|
346
|
+
|
|
347
|
+
|
|
348
|
+
_CandidateKey: TypeAlias = tuple[str, str, str]
|
|
349
|
+
|
|
350
|
+
|
|
351
|
+
def _candidate_key(candidate: MigrationCandidate) -> _CandidateKey:
|
|
352
|
+
return (
|
|
353
|
+
candidate.from_ref.namespace,
|
|
354
|
+
candidate.from_ref.furu_hash,
|
|
355
|
+
candidate.from_ref.root,
|
|
356
|
+
)
|
|
357
|
+
|
|
358
|
+
|
|
359
|
+
def _build_cascade_candidates(root: MigrationCandidate) -> list[_CascadeNode]:
|
|
360
|
+
nodes: list[_CascadeNode] = []
|
|
361
|
+
queue: list[_CascadeNode] = [_CascadeNode(candidate=root, parent=None)]
|
|
362
|
+
seen: set[_CandidateKey] = {_candidate_key(root)}
|
|
363
|
+
|
|
364
|
+
while queue:
|
|
365
|
+
node = queue.pop(0)
|
|
366
|
+
nodes.append(node)
|
|
367
|
+
for dependent in _find_dependents(node.candidate):
|
|
368
|
+
key = _candidate_key(dependent)
|
|
369
|
+
if key in seen:
|
|
370
|
+
continue
|
|
371
|
+
seen.add(key)
|
|
372
|
+
queue.append(_CascadeNode(candidate=dependent, parent=node.key))
|
|
373
|
+
return nodes
|
|
374
|
+
|
|
375
|
+
|
|
376
|
+
def _find_dependents(candidate: MigrationCandidate) -> list[MigrationCandidate]:
|
|
377
|
+
metadata = MetadataManager.read_metadata_raw(candidate.from_ref.directory)
|
|
378
|
+
if metadata is None:
|
|
379
|
+
return []
|
|
380
|
+
furu_obj = metadata.get("furu_obj")
|
|
381
|
+
if not isinstance(furu_obj, dict):
|
|
382
|
+
return []
|
|
383
|
+
|
|
384
|
+
from_hash = candidate.from_ref.furu_hash
|
|
385
|
+
dependents: list[MigrationCandidate] = []
|
|
386
|
+
|
|
387
|
+
for ref, config in _iter_all_configs():
|
|
388
|
+
updated_config, changed = _replace_dependency(
|
|
389
|
+
config, from_hash, candidate.to_config
|
|
390
|
+
)
|
|
391
|
+
if not changed:
|
|
392
|
+
continue
|
|
393
|
+
dependent_namespace = _extract_namespace(updated_config)
|
|
394
|
+
target_class = _resolve_target_class(dependent_namespace)
|
|
395
|
+
dependent_candidate = _build_candidate(
|
|
396
|
+
ref,
|
|
397
|
+
updated_config,
|
|
398
|
+
to_namespace=dependent_namespace,
|
|
399
|
+
target_class=target_class,
|
|
400
|
+
default_values=None,
|
|
401
|
+
default_fields=None,
|
|
402
|
+
drop_fields=None,
|
|
403
|
+
default_source=None,
|
|
404
|
+
)
|
|
405
|
+
if (
|
|
406
|
+
dependent_candidate.to_ref.furu_hash
|
|
407
|
+
== dependent_candidate.from_ref.furu_hash
|
|
408
|
+
):
|
|
409
|
+
continue
|
|
410
|
+
dependents.append(dependent_candidate)
|
|
411
|
+
return dependents
|
|
412
|
+
|
|
413
|
+
|
|
414
|
+
def _replace_dependency(
|
|
415
|
+
value: JsonValue,
|
|
416
|
+
from_hash: str,
|
|
417
|
+
to_config: dict[str, JsonValue],
|
|
418
|
+
) -> tuple[JsonValue, bool]:
|
|
419
|
+
if isinstance(value, dict):
|
|
420
|
+
if "__class__" in value:
|
|
421
|
+
if FuruSerializer.compute_hash(value) == from_hash:
|
|
422
|
+
return dict(to_config), True
|
|
423
|
+
changed = False
|
|
424
|
+
updated: dict[str, JsonValue] = {}
|
|
425
|
+
for key, item in value.items():
|
|
426
|
+
new_value, was_changed = _replace_dependency(item, from_hash, to_config)
|
|
427
|
+
if was_changed:
|
|
428
|
+
changed = True
|
|
429
|
+
updated[key] = new_value
|
|
430
|
+
return updated, changed
|
|
431
|
+
if isinstance(value, list):
|
|
432
|
+
updated_list: list[JsonValue] = []
|
|
433
|
+
changed = False
|
|
434
|
+
for item in value:
|
|
435
|
+
new_value, was_changed = _replace_dependency(item, from_hash, to_config)
|
|
436
|
+
if was_changed:
|
|
437
|
+
changed = True
|
|
438
|
+
updated_list.append(new_value)
|
|
439
|
+
return updated_list, changed
|
|
440
|
+
return value, False
|
|
441
|
+
|
|
442
|
+
|
|
443
|
+
def _apply_single_migration(
|
|
444
|
+
candidate: MigrationCandidate,
|
|
445
|
+
*,
|
|
446
|
+
policy: MigrationPolicy,
|
|
447
|
+
origin: str | None,
|
|
448
|
+
note: str | None,
|
|
449
|
+
conflict: MigrationConflict,
|
|
450
|
+
conflict_status: str | None,
|
|
451
|
+
) -> MigrationRecord:
|
|
452
|
+
from_dir = candidate.from_ref.directory
|
|
453
|
+
to_dir = candidate.to_ref.directory
|
|
454
|
+
|
|
455
|
+
if conflict == "overwrite" and to_dir.exists():
|
|
456
|
+
shutil.rmtree(to_dir)
|
|
457
|
+
|
|
458
|
+
to_dir.mkdir(parents=True, exist_ok=True)
|
|
459
|
+
now = _dt.datetime.now(_dt.timezone.utc).isoformat(timespec="seconds")
|
|
460
|
+
|
|
461
|
+
if policy in {"move", "copy"}:
|
|
462
|
+
_transfer_payload(from_dir, to_dir, policy)
|
|
463
|
+
_copy_state(from_dir, to_dir, clear_source=policy == "move")
|
|
464
|
+
else:
|
|
465
|
+
_write_migrated_state(to_dir)
|
|
466
|
+
|
|
467
|
+
to_obj = FuruSerializer.from_dict(candidate.to_config)
|
|
468
|
+
metadata = MetadataManager.create_metadata(to_obj, to_dir, ignore_diff=True)
|
|
469
|
+
MetadataManager.write_metadata(metadata, to_dir)
|
|
470
|
+
|
|
471
|
+
default_values = _serialize_default_values(candidate.defaults_applied)
|
|
472
|
+
record = MigrationRecord(
|
|
473
|
+
kind=_kind_for_policy(policy),
|
|
474
|
+
policy=policy,
|
|
475
|
+
from_namespace=candidate.from_ref.namespace,
|
|
476
|
+
from_hash=candidate.from_ref.furu_hash,
|
|
477
|
+
from_root=candidate.from_ref.root,
|
|
478
|
+
to_namespace=candidate.to_ref.namespace,
|
|
479
|
+
to_hash=candidate.to_ref.furu_hash,
|
|
480
|
+
to_root=candidate.to_ref.root,
|
|
481
|
+
migrated_at=now,
|
|
482
|
+
overwritten_at=None,
|
|
483
|
+
default_values=default_values,
|
|
484
|
+
origin=origin,
|
|
485
|
+
note=note,
|
|
486
|
+
)
|
|
487
|
+
MigrationManager.write_migration(record, to_dir)
|
|
488
|
+
|
|
489
|
+
if policy != "copy":
|
|
490
|
+
from_record = MigrationRecord(
|
|
491
|
+
kind="migrated",
|
|
492
|
+
policy=policy,
|
|
493
|
+
from_namespace=candidate.from_ref.namespace,
|
|
494
|
+
from_hash=candidate.from_ref.furu_hash,
|
|
495
|
+
from_root=candidate.from_ref.root,
|
|
496
|
+
to_namespace=candidate.to_ref.namespace,
|
|
497
|
+
to_hash=candidate.to_ref.furu_hash,
|
|
498
|
+
to_root=candidate.to_ref.root,
|
|
499
|
+
migrated_at=now,
|
|
500
|
+
overwritten_at=None,
|
|
501
|
+
default_values=default_values,
|
|
502
|
+
origin=origin,
|
|
503
|
+
note=note,
|
|
504
|
+
)
|
|
505
|
+
MigrationManager.write_migration(from_record, from_dir)
|
|
506
|
+
|
|
507
|
+
event: dict[str, str | int] = {
|
|
508
|
+
"type": "migrated",
|
|
509
|
+
"policy": policy,
|
|
510
|
+
"from": f"{candidate.from_ref.namespace}:{candidate.from_ref.furu_hash}",
|
|
511
|
+
"to": f"{candidate.to_ref.namespace}:{candidate.to_ref.furu_hash}",
|
|
512
|
+
}
|
|
513
|
+
if default_values is not None:
|
|
514
|
+
event["default_values"] = json.dumps(default_values, sort_keys=True)
|
|
515
|
+
StateManager.append_event(to_dir, event)
|
|
516
|
+
StateManager.append_event(from_dir, event.copy())
|
|
517
|
+
|
|
518
|
+
if conflict == "overwrite" and conflict_status is not None:
|
|
519
|
+
overwrite_event = {
|
|
520
|
+
"type": "migration_overwrite",
|
|
521
|
+
"policy": policy,
|
|
522
|
+
"from": f"{candidate.from_ref.namespace}:{candidate.from_ref.furu_hash}",
|
|
523
|
+
"to": f"{candidate.to_ref.namespace}:{candidate.to_ref.furu_hash}",
|
|
524
|
+
"reason": "force_overwrite",
|
|
525
|
+
}
|
|
526
|
+
StateManager.append_event(to_dir, overwrite_event)
|
|
527
|
+
StateManager.append_event(from_dir, overwrite_event.copy())
|
|
528
|
+
|
|
529
|
+
get_logger().info(
|
|
530
|
+
"migration: %s -> %s (%s)",
|
|
531
|
+
from_dir,
|
|
532
|
+
to_dir,
|
|
533
|
+
policy,
|
|
534
|
+
)
|
|
535
|
+
return record
|
|
536
|
+
|
|
537
|
+
|
|
538
|
+
def _transfer_payload(from_dir: Path, to_dir: Path, policy: MigrationPolicy) -> None:
|
|
539
|
+
for item in from_dir.iterdir():
|
|
540
|
+
if item.name == StateManager.INTERNAL_DIR:
|
|
541
|
+
continue
|
|
542
|
+
destination = to_dir / item.name
|
|
543
|
+
if policy == "move":
|
|
544
|
+
shutil.move(str(item), destination)
|
|
545
|
+
continue
|
|
546
|
+
if item.is_dir():
|
|
547
|
+
shutil.copytree(item, destination, dirs_exist_ok=True)
|
|
548
|
+
else:
|
|
549
|
+
shutil.copy2(item, destination)
|
|
550
|
+
|
|
551
|
+
|
|
552
|
+
def _copy_state(from_dir: Path, to_dir: Path, *, clear_source: bool) -> None:
|
|
553
|
+
src_internal = from_dir / StateManager.INTERNAL_DIR
|
|
554
|
+
if not src_internal.exists():
|
|
555
|
+
return
|
|
556
|
+
dst_internal = to_dir / StateManager.INTERNAL_DIR
|
|
557
|
+
dst_internal.mkdir(parents=True, exist_ok=True)
|
|
558
|
+
state_path = StateManager.get_state_path(from_dir)
|
|
559
|
+
if state_path.is_file():
|
|
560
|
+
shutil.copy2(state_path, StateManager.get_state_path(to_dir))
|
|
561
|
+
success_marker = StateManager.get_success_marker_path(from_dir)
|
|
562
|
+
if success_marker.is_file():
|
|
563
|
+
shutil.copy2(success_marker, StateManager.get_success_marker_path(to_dir))
|
|
564
|
+
if clear_source:
|
|
565
|
+
_write_migrated_state(from_dir)
|
|
566
|
+
StateManager.get_success_marker_path(from_dir).unlink(missing_ok=True)
|
|
567
|
+
|
|
568
|
+
|
|
569
|
+
def _write_migrated_state(directory: Path) -> None:
|
|
570
|
+
def mutate(state) -> None:
|
|
571
|
+
state.result = _StateResultMigrated(status="migrated")
|
|
572
|
+
state.attempt = None
|
|
573
|
+
|
|
574
|
+
StateManager.update_state(directory, mutate)
|
|
575
|
+
|
|
576
|
+
|
|
577
|
+
def _kind_for_policy(policy: MigrationPolicy) -> Literal["alias", "moved", "copied"]:
|
|
578
|
+
if policy == "alias":
|
|
579
|
+
return "alias"
|
|
580
|
+
if policy == "move":
|
|
581
|
+
return "moved"
|
|
582
|
+
if policy == "copy":
|
|
583
|
+
return "copied"
|
|
584
|
+
raise ValueError(f"Unsupported migration policy: {policy}")
|
|
585
|
+
|
|
586
|
+
|
|
587
|
+
def _iter_source_configs(
|
|
588
|
+
namespace: str,
|
|
589
|
+
) -> Iterable[tuple[FuruRef, dict[str, JsonValue]]]:
|
|
590
|
+
namespace_path = Path(*namespace.split("."))
|
|
591
|
+
for version_controlled in (False, True):
|
|
592
|
+
root = FURU_CONFIG.get_root(version_controlled=version_controlled)
|
|
593
|
+
class_dir = root / namespace_path
|
|
594
|
+
if not class_dir.exists():
|
|
595
|
+
continue
|
|
596
|
+
for entry in class_dir.iterdir():
|
|
597
|
+
if not entry.is_dir():
|
|
598
|
+
continue
|
|
599
|
+
metadata = MetadataManager.read_metadata_raw(entry)
|
|
600
|
+
if metadata is None:
|
|
601
|
+
continue
|
|
602
|
+
furu_obj = metadata.get("furu_obj")
|
|
603
|
+
if not isinstance(furu_obj, dict):
|
|
604
|
+
continue
|
|
605
|
+
root_kind: str = "git" if version_controlled else "data"
|
|
606
|
+
ref = FuruRef(
|
|
607
|
+
namespace=namespace,
|
|
608
|
+
furu_hash=entry.name,
|
|
609
|
+
root=root_kind,
|
|
610
|
+
directory=entry,
|
|
611
|
+
)
|
|
612
|
+
yield ref, _typed_config(furu_obj)
|
|
613
|
+
|
|
614
|
+
|
|
615
|
+
def _iter_all_configs() -> Iterable[tuple[FuruRef, dict[str, JsonValue]]]:
|
|
616
|
+
for version_controlled in (False, True):
|
|
617
|
+
root = FURU_CONFIG.get_root(version_controlled=version_controlled)
|
|
618
|
+
if not root.exists():
|
|
619
|
+
continue
|
|
620
|
+
for namespace_dir in root.rglob("*"):
|
|
621
|
+
if not namespace_dir.is_dir():
|
|
622
|
+
continue
|
|
623
|
+
state_path = StateManager.get_state_path(namespace_dir)
|
|
624
|
+
if not state_path.is_file():
|
|
625
|
+
continue
|
|
626
|
+
metadata = MetadataManager.read_metadata_raw(namespace_dir)
|
|
627
|
+
if metadata is None:
|
|
628
|
+
continue
|
|
629
|
+
furu_obj = metadata.get("furu_obj")
|
|
630
|
+
if not isinstance(furu_obj, dict):
|
|
631
|
+
continue
|
|
632
|
+
namespace = ".".join(namespace_dir.relative_to(root).parts[:-1])
|
|
633
|
+
root_kind: str = "git" if version_controlled else "data"
|
|
634
|
+
ref = FuruRef(
|
|
635
|
+
namespace=namespace,
|
|
636
|
+
furu_hash=namespace_dir.name,
|
|
637
|
+
root=root_kind,
|
|
638
|
+
directory=namespace_dir,
|
|
639
|
+
)
|
|
640
|
+
yield ref, _typed_config(furu_obj)
|
|
641
|
+
|
|
642
|
+
|
|
643
|
+
def _build_candidate(
|
|
644
|
+
from_ref: FuruRef,
|
|
645
|
+
source_config: dict[str, JsonValue],
|
|
646
|
+
*,
|
|
647
|
+
to_namespace: str,
|
|
648
|
+
target_class: type[Furu],
|
|
649
|
+
default_values: Mapping[str, MigrationValue] | None,
|
|
650
|
+
default_fields: Iterable[str] | None,
|
|
651
|
+
drop_fields: Iterable[str] | None,
|
|
652
|
+
default_source: Furu | None,
|
|
653
|
+
) -> MigrationCandidate:
|
|
654
|
+
config = dict(source_config)
|
|
655
|
+
defaults_applied: dict[str, MigrationValue] = {}
|
|
656
|
+
|
|
657
|
+
fields_dropped = _drop_fields(config, drop_fields)
|
|
658
|
+
target_fields = _target_field_names(target_class)
|
|
659
|
+
|
|
660
|
+
default_values_map = dict(default_values) if default_values is not None else {}
|
|
661
|
+
default_fields_list = list(default_fields) if default_fields is not None else []
|
|
662
|
+
|
|
663
|
+
overlap = set(default_values_map) & set(default_fields_list)
|
|
664
|
+
if overlap:
|
|
665
|
+
raise ValueError(
|
|
666
|
+
f"migration: default_fields and default_values overlap: {_format_fields(overlap)}"
|
|
667
|
+
)
|
|
668
|
+
|
|
669
|
+
existing_fields = set(config.keys()) - {"__class__"}
|
|
670
|
+
remaining_fields = existing_fields - set(fields_dropped)
|
|
671
|
+
if default_values_map:
|
|
672
|
+
conflicts = set(default_values_map) & remaining_fields
|
|
673
|
+
if conflicts:
|
|
674
|
+
raise ValueError(
|
|
675
|
+
f"migration: default_values provided for existing fields: {_format_fields(conflicts)}"
|
|
676
|
+
)
|
|
677
|
+
unknown = set(default_values_map) - set(target_fields)
|
|
678
|
+
if unknown:
|
|
679
|
+
raise ValueError(
|
|
680
|
+
f"migration: default_values contains fields not in target schema: {_format_fields(unknown)}"
|
|
681
|
+
)
|
|
682
|
+
|
|
683
|
+
if default_fields_list:
|
|
684
|
+
conflicts = set(default_fields_list) & remaining_fields
|
|
685
|
+
if conflicts:
|
|
686
|
+
raise ValueError(
|
|
687
|
+
f"migration: default_fields provided for existing fields: {_format_fields(conflicts)}"
|
|
688
|
+
)
|
|
689
|
+
unknown = set(default_fields_list) - set(target_fields)
|
|
690
|
+
if unknown:
|
|
691
|
+
raise ValueError(
|
|
692
|
+
f"migration: default_fields contains fields not in target schema: {_format_fields(unknown)}"
|
|
693
|
+
)
|
|
694
|
+
|
|
695
|
+
if default_fields_list and default_source is None:
|
|
696
|
+
missing_defaults = _missing_class_defaults(target_class, default_fields_list)
|
|
697
|
+
if missing_defaults:
|
|
698
|
+
raise ValueError(
|
|
699
|
+
f"migration: default_fields missing defaults for fields: {_format_fields(missing_defaults)}"
|
|
700
|
+
)
|
|
701
|
+
|
|
702
|
+
for field, value in default_values_map.items():
|
|
703
|
+
defaults_applied[field] = value
|
|
704
|
+
config[field] = _serialize_value(value)
|
|
705
|
+
|
|
706
|
+
for field in default_fields_list:
|
|
707
|
+
value = _default_value_for_field(target_class, default_source, field)
|
|
708
|
+
defaults_applied[field] = value
|
|
709
|
+
config[field] = _serialize_value(value)
|
|
710
|
+
|
|
711
|
+
config_keys = set(config.keys()) - {"__class__"}
|
|
712
|
+
missing_fields = sorted(set(target_fields) - config_keys)
|
|
713
|
+
if missing_fields:
|
|
714
|
+
raise ValueError(
|
|
715
|
+
f"migration: missing required fields for target class: {_format_fields(missing_fields)}"
|
|
716
|
+
)
|
|
717
|
+
extra_fields = sorted(config_keys - set(target_fields))
|
|
718
|
+
if extra_fields:
|
|
719
|
+
raise ValueError(
|
|
720
|
+
f"migration: extra fields present; use drop_fields to remove: {_format_fields(extra_fields)}"
|
|
721
|
+
)
|
|
722
|
+
|
|
723
|
+
config["__class__"] = to_namespace
|
|
724
|
+
to_config = _typed_config(config)
|
|
725
|
+
_typecheck_config(to_config)
|
|
726
|
+
|
|
727
|
+
to_hash = FuruSerializer.compute_hash(to_config)
|
|
728
|
+
to_ref = _build_target_ref(target_class, to_namespace, to_hash)
|
|
729
|
+
|
|
730
|
+
return MigrationCandidate(
|
|
731
|
+
from_ref=from_ref,
|
|
732
|
+
to_ref=to_ref,
|
|
733
|
+
to_namespace=to_namespace,
|
|
734
|
+
to_config=to_config,
|
|
735
|
+
defaults_applied=defaults_applied,
|
|
736
|
+
fields_dropped=fields_dropped,
|
|
737
|
+
missing_fields=missing_fields,
|
|
738
|
+
extra_fields=extra_fields,
|
|
739
|
+
)
|
|
740
|
+
|
|
741
|
+
|
|
742
|
+
def _drop_fields(
|
|
743
|
+
config: dict[str, JsonValue], drop_fields: Iterable[str] | None
|
|
744
|
+
) -> list[str]:
|
|
745
|
+
if drop_fields is None:
|
|
746
|
+
return []
|
|
747
|
+
fields = list(drop_fields)
|
|
748
|
+
unknown = [field for field in fields if field not in config]
|
|
749
|
+
if unknown:
|
|
750
|
+
raise ValueError(
|
|
751
|
+
f"migration: drop_fields contains unknown fields: {_format_fields(unknown)}"
|
|
752
|
+
)
|
|
753
|
+
for field in fields:
|
|
754
|
+
config.pop(field, None)
|
|
755
|
+
return fields
|
|
756
|
+
|
|
757
|
+
|
|
758
|
+
def _target_field_names(target_class: type[Furu]) -> list[str]:
|
|
759
|
+
return [field.logical_name for field in chz.chz_fields(target_class).values()]
|
|
760
|
+
|
|
761
|
+
|
|
762
|
+
def _missing_class_defaults(
|
|
763
|
+
target_class: type[Furu],
|
|
764
|
+
default_fields: list[str],
|
|
765
|
+
) -> list[str]:
|
|
766
|
+
fields = chz.chz_fields(target_class)
|
|
767
|
+
missing: list[str] = []
|
|
768
|
+
for name in default_fields:
|
|
769
|
+
field = fields[name]
|
|
770
|
+
if field._default is not CHZ_MISSING:
|
|
771
|
+
continue
|
|
772
|
+
if not isinstance(field._default_factory, MISSING_TYPE):
|
|
773
|
+
continue
|
|
774
|
+
missing.append(name)
|
|
775
|
+
return missing
|
|
776
|
+
|
|
777
|
+
|
|
778
|
+
def _default_value_for_field(
|
|
779
|
+
target_class: type[Furu],
|
|
780
|
+
default_source: Furu | None,
|
|
781
|
+
field_name: str,
|
|
782
|
+
) -> MigrationValue:
|
|
783
|
+
if default_source is not None:
|
|
784
|
+
return getattr(default_source, field_name)
|
|
785
|
+
fields = chz.chz_fields(target_class)
|
|
786
|
+
field = fields[field_name]
|
|
787
|
+
if field._default is not CHZ_MISSING:
|
|
788
|
+
return field._default
|
|
789
|
+
if not isinstance(field._default_factory, MISSING_TYPE):
|
|
790
|
+
return field._default_factory()
|
|
791
|
+
raise ValueError(
|
|
792
|
+
f"migration: default_fields missing defaults for fields: {_format_fields([field_name])}"
|
|
793
|
+
)
|
|
794
|
+
|
|
795
|
+
|
|
796
|
+
def _serialize_default_values(
|
|
797
|
+
values: Mapping[str, MigrationValue],
|
|
798
|
+
) -> dict[str, JsonValue] | None:
|
|
799
|
+
if not values:
|
|
800
|
+
return None
|
|
801
|
+
return {key: _serialize_value(value) for key, value in values.items()}
|
|
802
|
+
|
|
803
|
+
|
|
804
|
+
def _serialize_value(value: MigrationValue) -> JsonValue:
|
|
805
|
+
result = FuruSerializer.to_dict(value)
|
|
806
|
+
if result is None:
|
|
807
|
+
return result
|
|
808
|
+
if isinstance(result, (str, int, float, bool, list, dict)):
|
|
809
|
+
return result
|
|
810
|
+
raise TypeError(f"Unsupported migration value type: {type(result)}")
|
|
811
|
+
|
|
812
|
+
|
|
813
|
+
def _align_candidate_to_target(
|
|
814
|
+
candidate: MigrationCandidate,
|
|
815
|
+
target_config: dict[str, JsonValue],
|
|
816
|
+
) -> MigrationCandidate | None:
|
|
817
|
+
if candidate.to_config != target_config:
|
|
818
|
+
return None
|
|
819
|
+
return candidate
|
|
820
|
+
|
|
821
|
+
|
|
822
|
+
def _typecheck_config(config: dict[str, JsonValue]) -> None:
|
|
823
|
+
obj = FuruSerializer.from_dict(config)
|
|
824
|
+
obj = _normalize_tuple_fields(obj)
|
|
825
|
+
for_all_fields(typecheck)(obj)
|
|
826
|
+
|
|
827
|
+
|
|
828
|
+
def _normalize_tuple_fields(obj: Furu) -> Furu:
|
|
829
|
+
changes: dict[str, object] = {}
|
|
830
|
+
for field in chz.chz_fields(obj).values():
|
|
831
|
+
field_type = field.final_type
|
|
832
|
+
origin = getattr(field_type, "__origin__", None)
|
|
833
|
+
if field_type is tuple or origin is tuple:
|
|
834
|
+
value = getattr(obj, field.logical_name)
|
|
835
|
+
if isinstance(value, list):
|
|
836
|
+
changes[field.logical_name] = tuple(value)
|
|
837
|
+
if not changes:
|
|
838
|
+
return obj
|
|
839
|
+
return chz.replace(obj, **changes)
|
|
840
|
+
|
|
841
|
+
|
|
842
|
+
def _build_target_ref(
|
|
843
|
+
target_class: type[Furu],
|
|
844
|
+
namespace: str,
|
|
845
|
+
furu_hash: str,
|
|
846
|
+
) -> FuruRef:
|
|
847
|
+
root_kind: str = "git" if target_class.version_controlled else "data"
|
|
848
|
+
root = FURU_CONFIG.get_root(version_controlled=target_class.version_controlled)
|
|
849
|
+
directory = root / Path(*namespace.split(".")) / furu_hash
|
|
850
|
+
return FuruRef(
|
|
851
|
+
namespace=namespace,
|
|
852
|
+
furu_hash=furu_hash,
|
|
853
|
+
root=root_kind,
|
|
854
|
+
directory=directory,
|
|
855
|
+
)
|
|
856
|
+
|
|
857
|
+
|
|
858
|
+
def _resolve_target_class(namespace: str) -> type[Furu]:
|
|
859
|
+
module_path, _, class_name = namespace.rpartition(".")
|
|
860
|
+
if not module_path:
|
|
861
|
+
raise ValueError(f"migration: unable to resolve target class: {namespace}")
|
|
862
|
+
try:
|
|
863
|
+
module = importlib.import_module(module_path)
|
|
864
|
+
except Exception as exc: # pragma: no cover - import errors
|
|
865
|
+
raise ValueError(
|
|
866
|
+
f"migration: unable to resolve target class: {namespace}"
|
|
867
|
+
) from exc
|
|
868
|
+
obj = getattr(module, class_name, None)
|
|
869
|
+
if obj is None:
|
|
870
|
+
raise ValueError(f"migration: unable to resolve target class: {namespace}")
|
|
871
|
+
if not _is_furu_class(obj):
|
|
872
|
+
raise ValueError(f"migration: unable to resolve target class: {namespace}")
|
|
873
|
+
return cast(type[Furu], obj)
|
|
874
|
+
|
|
875
|
+
|
|
876
|
+
def _is_furu_class(value: object) -> bool:
|
|
877
|
+
return isinstance(value, type) and issubclass(value, Furu)
|
|
878
|
+
|
|
879
|
+
|
|
880
|
+
def _namespace_str(target_class: type[Furu]) -> str:
|
|
881
|
+
return ".".join(target_class._namespace().parts)
|
|
882
|
+
|
|
883
|
+
|
|
884
|
+
def _extract_namespace(config: dict[str, JsonValue]) -> str:
|
|
885
|
+
class_name = config.get("__class__")
|
|
886
|
+
if isinstance(class_name, str):
|
|
887
|
+
return class_name
|
|
888
|
+
raise ValueError("migration: unable to resolve target class: <unknown>")
|
|
889
|
+
|
|
890
|
+
|
|
891
|
+
def _target_status(candidate: MigrationCandidate) -> str | None:
|
|
892
|
+
to_obj = FuruSerializer.from_dict(candidate.to_config)
|
|
893
|
+
state = to_obj.get_state(candidate.to_ref.directory)
|
|
894
|
+
if isinstance(state.result, _StateResultMigrated):
|
|
895
|
+
return None
|
|
896
|
+
if state.result.status == "success":
|
|
897
|
+
return "success"
|
|
898
|
+
attempt = state.attempt
|
|
899
|
+
if isinstance(attempt, _StateAttemptRunning):
|
|
900
|
+
return "running"
|
|
901
|
+
return None
|
|
902
|
+
|
|
903
|
+
|
|
904
|
+
def _expand_skip_keys(
|
|
905
|
+
conflicts: Iterable[_CandidateKey],
|
|
906
|
+
parent_map: dict[_CandidateKey, _CandidateKey | None],
|
|
907
|
+
) -> set[_CandidateKey]:
|
|
908
|
+
skipped = set(conflicts)
|
|
909
|
+
changed = True
|
|
910
|
+
while changed:
|
|
911
|
+
changed = False
|
|
912
|
+
for key, parent in parent_map.items():
|
|
913
|
+
if parent is None:
|
|
914
|
+
continue
|
|
915
|
+
if parent in skipped and key not in skipped:
|
|
916
|
+
skipped.add(key)
|
|
917
|
+
changed = True
|
|
918
|
+
return skipped
|
|
919
|
+
|
|
920
|
+
|
|
921
|
+
def _format_fields(fields: Iterable[str]) -> str:
|
|
922
|
+
return ", ".join(sorted(fields))
|
|
923
|
+
|
|
924
|
+
|
|
925
|
+
def _typed_config(config: dict[str, JsonValue]) -> dict[str, JsonValue]:
|
|
926
|
+
return {str(key): value for key, value in config.items()}
|