half-orm-dev 0.17.3a8__py3-none-any.whl → 0.17.3a9__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- half_orm_dev/migrations/0/17/4/00_toml_dict_format.py +204 -0
- half_orm_dev/patch_manager.py +264 -61
- half_orm_dev/release_file.py +32 -18
- half_orm_dev/release_manager.py +168 -36
- half_orm_dev/repo.py +124 -0
- half_orm_dev/version.txt +1 -1
- {half_orm_dev-0.17.3a8.dist-info → half_orm_dev-0.17.3a9.dist-info}/METADATA +1 -1
- {half_orm_dev-0.17.3a8.dist-info → half_orm_dev-0.17.3a9.dist-info}/RECORD +12 -11
- {half_orm_dev-0.17.3a8.dist-info → half_orm_dev-0.17.3a9.dist-info}/WHEEL +0 -0
- {half_orm_dev-0.17.3a8.dist-info → half_orm_dev-0.17.3a9.dist-info}/licenses/AUTHORS +0 -0
- {half_orm_dev-0.17.3a8.dist-info → half_orm_dev-0.17.3a9.dist-info}/licenses/LICENSE +0 -0
- {half_orm_dev-0.17.3a8.dist-info → half_orm_dev-0.17.3a9.dist-info}/top_level.txt +0 -0
|
@@ -0,0 +1,204 @@
|
|
|
1
|
+
"""
|
|
2
|
+
Migration: Convert TOML patches to dict format with merge_commit.
|
|
3
|
+
|
|
4
|
+
This migration converts the old TOML format:
|
|
5
|
+
[patches]
|
|
6
|
+
"1-auth" = "staged"
|
|
7
|
+
"2-api" = "candidate"
|
|
8
|
+
|
|
9
|
+
To the new dict format:
|
|
10
|
+
[patches]
|
|
11
|
+
"1-auth" = { status = "staged", merge_commit = "abc123de" }
|
|
12
|
+
"2-api" = { status = "candidate" }
|
|
13
|
+
|
|
14
|
+
For staged patches, the merge_commit hash is retrieved from git history.
|
|
15
|
+
"""
|
|
16
|
+
|
|
17
|
+
from pathlib import Path
|
|
18
|
+
import subprocess
|
|
19
|
+
import sys
|
|
20
|
+
|
|
21
|
+
try:
|
|
22
|
+
import tomli
|
|
23
|
+
except ImportError:
|
|
24
|
+
import tomllib as tomli
|
|
25
|
+
|
|
26
|
+
try:
|
|
27
|
+
import tomli_w
|
|
28
|
+
except ImportError:
|
|
29
|
+
raise ImportError(
|
|
30
|
+
"tomli_w is required for this migration. "
|
|
31
|
+
"Install it with: pip install tomli_w"
|
|
32
|
+
)
|
|
33
|
+
|
|
34
|
+
|
|
35
|
+
def get_description():
|
|
36
|
+
"""Return migration description."""
|
|
37
|
+
return "Convert TOML patches to dict format with merge_commit"
|
|
38
|
+
|
|
39
|
+
|
|
40
|
+
def find_merge_commit(repo, patch_id: str, version: str) -> str:
|
|
41
|
+
"""
|
|
42
|
+
Find the merge commit hash for a staged patch.
|
|
43
|
+
|
|
44
|
+
Searches git history for the commit that merged the patch branch
|
|
45
|
+
into the release branch.
|
|
46
|
+
|
|
47
|
+
Args:
|
|
48
|
+
repo: Repo instance
|
|
49
|
+
patch_id: Patch identifier (e.g., "456-user-auth")
|
|
50
|
+
version: Release version (e.g., "0.17.0")
|
|
51
|
+
|
|
52
|
+
Returns:
|
|
53
|
+
Commit hash (8 characters) or empty string if not found
|
|
54
|
+
"""
|
|
55
|
+
release_branch = f"ho-release/{version}"
|
|
56
|
+
|
|
57
|
+
try:
|
|
58
|
+
# Search for merge commit message pattern
|
|
59
|
+
# Pattern: [HOP] Merge #PATCH_ID into %"VERSION"
|
|
60
|
+
result = subprocess.run(
|
|
61
|
+
['git', 'log', '--all', '--grep', f'Merge #{patch_id}',
|
|
62
|
+
'--format=%H', '-n', '1'],
|
|
63
|
+
cwd=repo.base_dir,
|
|
64
|
+
capture_output=True,
|
|
65
|
+
text=True,
|
|
66
|
+
check=True
|
|
67
|
+
)
|
|
68
|
+
|
|
69
|
+
commit_hash = result.stdout.strip()
|
|
70
|
+
if commit_hash:
|
|
71
|
+
return commit_hash[:8]
|
|
72
|
+
|
|
73
|
+
# Fallback: search for the move to stage commit
|
|
74
|
+
# Pattern: [HOP] move patch #PATCH_ID from candidate to stage
|
|
75
|
+
result = subprocess.run(
|
|
76
|
+
['git', 'log', '--all', '--grep', f'move patch #{patch_id}',
|
|
77
|
+
'--format=%H', '-n', '1'],
|
|
78
|
+
cwd=repo.base_dir,
|
|
79
|
+
capture_output=True,
|
|
80
|
+
text=True,
|
|
81
|
+
check=True
|
|
82
|
+
)
|
|
83
|
+
|
|
84
|
+
commit_hash = result.stdout.strip()
|
|
85
|
+
if commit_hash:
|
|
86
|
+
# Get the parent commit (the merge commit is the one before the move)
|
|
87
|
+
result = subprocess.run(
|
|
88
|
+
['git', 'rev-parse', f'{commit_hash}^'],
|
|
89
|
+
cwd=repo.base_dir,
|
|
90
|
+
capture_output=True,
|
|
91
|
+
text=True,
|
|
92
|
+
check=True
|
|
93
|
+
)
|
|
94
|
+
parent_hash = result.stdout.strip()
|
|
95
|
+
if parent_hash:
|
|
96
|
+
return parent_hash[:8]
|
|
97
|
+
|
|
98
|
+
except subprocess.CalledProcessError:
|
|
99
|
+
pass
|
|
100
|
+
|
|
101
|
+
return ""
|
|
102
|
+
|
|
103
|
+
|
|
104
|
+
def migrate(repo):
|
|
105
|
+
"""
|
|
106
|
+
Execute migration: Convert TOML patches to dict format.
|
|
107
|
+
|
|
108
|
+
For each X.Y.Z-patches.toml file:
|
|
109
|
+
1. Read current content
|
|
110
|
+
2. Check if already in dict format
|
|
111
|
+
3. Convert to dict format:
|
|
112
|
+
- candidates: { status = "candidate" }
|
|
113
|
+
- staged: { status = "staged", merge_commit = "..." }
|
|
114
|
+
4. Find merge_commit from git history for staged patches
|
|
115
|
+
5. Write updated TOML file
|
|
116
|
+
|
|
117
|
+
Args:
|
|
118
|
+
repo: Repo instance
|
|
119
|
+
"""
|
|
120
|
+
print("Migrating TOML patches to dict format with merge_commit...")
|
|
121
|
+
|
|
122
|
+
releases_dir = Path(repo.releases_dir)
|
|
123
|
+
if not releases_dir.exists():
|
|
124
|
+
print(" No releases directory found, skipping migration.")
|
|
125
|
+
return
|
|
126
|
+
|
|
127
|
+
# Find all TOML patches files
|
|
128
|
+
toml_files = list(releases_dir.glob("*-patches.toml"))
|
|
129
|
+
|
|
130
|
+
if not toml_files:
|
|
131
|
+
print(" No TOML patches files found, skipping migration.")
|
|
132
|
+
return
|
|
133
|
+
|
|
134
|
+
migrated_count = 0
|
|
135
|
+
|
|
136
|
+
for toml_file in toml_files:
|
|
137
|
+
# Extract version from filename
|
|
138
|
+
version = toml_file.stem.replace('-patches', '')
|
|
139
|
+
|
|
140
|
+
print(f" Processing {version}...")
|
|
141
|
+
|
|
142
|
+
try:
|
|
143
|
+
# Read current TOML content
|
|
144
|
+
with toml_file.open('rb') as f:
|
|
145
|
+
data = tomli.load(f)
|
|
146
|
+
|
|
147
|
+
patches = data.get("patches", {})
|
|
148
|
+
|
|
149
|
+
if not patches:
|
|
150
|
+
print(f" No patches in {version}, skipping")
|
|
151
|
+
continue
|
|
152
|
+
|
|
153
|
+
# Check if already in dict format
|
|
154
|
+
first_value = next(iter(patches.values()))
|
|
155
|
+
if isinstance(first_value, dict):
|
|
156
|
+
print(f" Already in dict format, skipping")
|
|
157
|
+
continue
|
|
158
|
+
|
|
159
|
+
# Convert to dict format
|
|
160
|
+
new_patches = {}
|
|
161
|
+
staged_without_commit = []
|
|
162
|
+
|
|
163
|
+
for patch_id, status in patches.items():
|
|
164
|
+
if status == "candidate":
|
|
165
|
+
new_patches[patch_id] = {"status": "candidate"}
|
|
166
|
+
elif status == "staged":
|
|
167
|
+
# Find merge commit from git history
|
|
168
|
+
merge_commit = find_merge_commit(repo, patch_id, version)
|
|
169
|
+
if merge_commit:
|
|
170
|
+
new_patches[patch_id] = {
|
|
171
|
+
"status": "staged",
|
|
172
|
+
"merge_commit": merge_commit
|
|
173
|
+
}
|
|
174
|
+
else:
|
|
175
|
+
# No merge_commit found, store without it
|
|
176
|
+
new_patches[patch_id] = {"status": "staged"}
|
|
177
|
+
staged_without_commit.append(patch_id)
|
|
178
|
+
else:
|
|
179
|
+
# Unknown status, preserve as-is in dict format
|
|
180
|
+
new_patches[patch_id] = {"status": status}
|
|
181
|
+
|
|
182
|
+
# Update data and write
|
|
183
|
+
data["patches"] = new_patches
|
|
184
|
+
|
|
185
|
+
with toml_file.open('wb') as f:
|
|
186
|
+
tomli_w.dump(data, f)
|
|
187
|
+
|
|
188
|
+
print(f" Converted {len(patches)} patch(es)")
|
|
189
|
+
if staged_without_commit:
|
|
190
|
+
print(f" Warning: No merge_commit found for: {', '.join(staged_without_commit)}",
|
|
191
|
+
file=sys.stderr)
|
|
192
|
+
|
|
193
|
+
migrated_count += 1
|
|
194
|
+
|
|
195
|
+
except Exception as e:
|
|
196
|
+
print(f" Error processing {version}: {e}", file=sys.stderr)
|
|
197
|
+
continue
|
|
198
|
+
|
|
199
|
+
repo.hgit.add('.hop')
|
|
200
|
+
|
|
201
|
+
if migrated_count > 0:
|
|
202
|
+
print(f"\nMigration complete: {migrated_count} file(s) converted to dict format")
|
|
203
|
+
else:
|
|
204
|
+
print("\nNo files needed migration")
|
half_orm_dev/patch_manager.py
CHANGED
|
@@ -488,61 +488,84 @@ class PatchManager:
|
|
|
488
488
|
Apply patch with full release context.
|
|
489
489
|
|
|
490
490
|
Workflow:
|
|
491
|
-
1. Restore DB from
|
|
492
|
-
2. Apply
|
|
493
|
-
3.
|
|
494
|
-
4. If current patch is NOT in release, apply it at the end
|
|
495
|
-
5. Generate Python code
|
|
491
|
+
1. Restore DB from release schema (includes all staged patches)
|
|
492
|
+
2. Apply only the current patch
|
|
493
|
+
3. Generate Python code
|
|
496
494
|
|
|
497
|
-
|
|
498
|
-
|
|
499
|
-
|
|
495
|
+
If release schema doesn't exist (backward compatibility), falls back to:
|
|
496
|
+
1. Restore DB from production baseline
|
|
497
|
+
2. Apply all staged patches in order
|
|
498
|
+
3. Apply current patch
|
|
499
|
+
4. Generate Python code
|
|
500
500
|
|
|
501
|
-
|
|
501
|
+
Examples:
|
|
502
|
+
# With release schema (new workflow):
|
|
503
|
+
apply_patch_complete_workflow("999")
|
|
502
504
|
# Execution:
|
|
503
|
-
# 1. Restore DB
|
|
504
|
-
# 2. Apply
|
|
505
|
-
# 3.
|
|
506
|
-
|
|
507
|
-
#
|
|
508
|
-
# 6. Generate code
|
|
509
|
-
|
|
510
|
-
# Current patch: 999 (NOT in release)
|
|
505
|
+
# 1. Restore DB from release-0.17.1.sql (includes staged patches)
|
|
506
|
+
# 2. Apply 999
|
|
507
|
+
# 3. Generate code
|
|
508
|
+
|
|
509
|
+
# Without release schema (backward compat):
|
|
511
510
|
apply_patch_complete_workflow("999")
|
|
512
511
|
# Execution:
|
|
513
|
-
# 1. Restore DB (
|
|
514
|
-
# 2. Apply
|
|
515
|
-
# 3. Apply
|
|
516
|
-
# 4.
|
|
517
|
-
# 5. Apply 234
|
|
518
|
-
# 6. Apply 999 ← At the end
|
|
519
|
-
# 7. Generate code
|
|
512
|
+
# 1. Restore DB from schema.sql (prod)
|
|
513
|
+
# 2. Apply all staged patches
|
|
514
|
+
# 3. Apply 999
|
|
515
|
+
# 4. Generate code
|
|
520
516
|
"""
|
|
521
517
|
|
|
522
518
|
try:
|
|
523
|
-
#
|
|
524
|
-
self.
|
|
525
|
-
|
|
526
|
-
|
|
527
|
-
|
|
519
|
+
# Get release version for this patch
|
|
520
|
+
version = self._find_version_for_candidate(patch_id)
|
|
521
|
+
if not version:
|
|
522
|
+
# Try to find from staged patches
|
|
523
|
+
version = self._repo.release_manager.get_next_release_version()
|
|
528
524
|
|
|
529
525
|
applied_release_files = []
|
|
530
526
|
applied_current_files = []
|
|
531
527
|
patch_was_in_release = False
|
|
532
528
|
|
|
533
|
-
#
|
|
534
|
-
|
|
535
|
-
|
|
536
|
-
|
|
537
|
-
|
|
538
|
-
|
|
529
|
+
# Check if release schema exists
|
|
530
|
+
release_schema_path = None
|
|
531
|
+
if version:
|
|
532
|
+
release_schema_path = self._repo.get_release_schema_path(version)
|
|
533
|
+
|
|
534
|
+
if release_schema_path and release_schema_path.exists():
|
|
535
|
+
# New workflow: restore from release schema (includes all staged patches)
|
|
536
|
+
self._repo.restore_database_from_release_schema(version)
|
|
539
537
|
|
|
540
|
-
|
|
541
|
-
if not patch_was_in_release:
|
|
538
|
+
# Apply only the current patch
|
|
542
539
|
files = self.apply_patch_files(patch_id, self._repo.model)
|
|
543
540
|
applied_current_files = files
|
|
541
|
+
else:
|
|
542
|
+
# Backward compatibility: old workflow
|
|
543
|
+
# Also generates release schema for migration of existing projects
|
|
544
|
+
self._repo.restore_database_from_schema()
|
|
544
545
|
|
|
545
|
-
|
|
546
|
+
# Get and apply all staged release patches
|
|
547
|
+
release_patches = self._repo.release_manager.get_all_release_context_patches()
|
|
548
|
+
|
|
549
|
+
for patch in release_patches:
|
|
550
|
+
if patch == patch_id:
|
|
551
|
+
patch_was_in_release = True
|
|
552
|
+
files = self.apply_patch_files(patch, self._repo.model)
|
|
553
|
+
applied_release_files.extend(files)
|
|
554
|
+
|
|
555
|
+
# Generate release schema for existing projects migration
|
|
556
|
+
# This captures the state after all staged patches are applied
|
|
557
|
+
if version:
|
|
558
|
+
try:
|
|
559
|
+
self._repo.generate_release_schema(version)
|
|
560
|
+
except Exception:
|
|
561
|
+
pass # Non-critical, continue with apply
|
|
562
|
+
|
|
563
|
+
# If current patch not in release (candidate), apply it now
|
|
564
|
+
if not patch_was_in_release:
|
|
565
|
+
files = self.apply_patch_files(patch_id, self._repo.model)
|
|
566
|
+
applied_current_files = files
|
|
567
|
+
|
|
568
|
+
# Generate Python code
|
|
546
569
|
# Track generated files
|
|
547
570
|
package_dir = Path(self._base_dir) / self._repo_name
|
|
548
571
|
files_before = set()
|
|
@@ -557,14 +580,14 @@ class PatchManager:
|
|
|
557
580
|
|
|
558
581
|
generated_files = [str(f.relative_to(self._base_dir)) for f in files_after]
|
|
559
582
|
|
|
560
|
-
#
|
|
583
|
+
# Return success
|
|
561
584
|
return {
|
|
562
585
|
'patch_id': patch_id,
|
|
563
|
-
'release_patches': [p for p in release_patches if p != patch_id],
|
|
564
586
|
'applied_release_files': applied_release_files,
|
|
565
587
|
'applied_current_files': applied_current_files,
|
|
566
588
|
'patch_was_in_release': patch_was_in_release,
|
|
567
589
|
'generated_files': generated_files,
|
|
590
|
+
'used_release_schema': release_schema_path is not None and release_schema_path.exists(),
|
|
568
591
|
'status': 'success',
|
|
569
592
|
'error': None
|
|
570
593
|
}
|
|
@@ -619,9 +642,11 @@ class PatchManager:
|
|
|
619
642
|
# Apply files in lexicographic order
|
|
620
643
|
for patch_file in structure.files:
|
|
621
644
|
if patch_file.is_sql:
|
|
645
|
+
print('XXX', patch_file.name)
|
|
622
646
|
self._execute_sql_file(patch_file.path, database_model)
|
|
623
647
|
applied_files.append(patch_file.name)
|
|
624
648
|
elif patch_file.is_python:
|
|
649
|
+
print('XXX', patch_file.name)
|
|
625
650
|
self._execute_python_file(patch_file.path)
|
|
626
651
|
applied_files.append(patch_file.name)
|
|
627
652
|
# Other file types are ignored (not executed)
|
|
@@ -1909,8 +1934,17 @@ class PatchManager:
|
|
|
1909
1934
|
f"You may need to resolve conflicts manually."
|
|
1910
1935
|
)
|
|
1911
1936
|
|
|
1912
|
-
#
|
|
1913
|
-
self.
|
|
1937
|
+
# 5b. Get merge commit hash
|
|
1938
|
+
merge_commit = self._repo.hgit.last_commit()
|
|
1939
|
+
|
|
1940
|
+
# 6. Move from candidates to stage (with merge commit hash)
|
|
1941
|
+
self._move_patch_to_stage(patch_id, version, merge_commit)
|
|
1942
|
+
|
|
1943
|
+
# 6b. Regenerate release schema (DB is already in correct state after validation)
|
|
1944
|
+
try:
|
|
1945
|
+
self._update_release_schemas(version)
|
|
1946
|
+
except Exception as e:
|
|
1947
|
+
raise PatchManagerError(f"Failed to update release schema: {e}")
|
|
1914
1948
|
|
|
1915
1949
|
# 7. Commit changes on release branch (TOML file is in .hop/releases/)
|
|
1916
1950
|
# This also syncs .hop/ to all active branches automatically via decorator
|
|
@@ -1921,6 +1955,9 @@ class PatchManager:
|
|
|
1921
1955
|
except Exception as e:
|
|
1922
1956
|
raise PatchManagerError(f"Failed to commit/push changes: {e}")
|
|
1923
1957
|
|
|
1958
|
+
# 7b. Propagate release schema to higher version releases (now that commit is done)
|
|
1959
|
+
self._propagate_release_schema_to_higher_versions(version)
|
|
1960
|
+
|
|
1924
1961
|
# 8. Delete patch branch (local and remote)
|
|
1925
1962
|
try:
|
|
1926
1963
|
self._repo.hgit.delete_local_branch(patch_branch)
|
|
@@ -1936,6 +1973,129 @@ class PatchManager:
|
|
|
1936
1973
|
'merged_into': release_branch
|
|
1937
1974
|
}
|
|
1938
1975
|
|
|
1976
|
+
def _update_release_schemas(self, version: str) -> None:
|
|
1977
|
+
"""
|
|
1978
|
+
Update release schema for current version and propagate to higher versions.
|
|
1979
|
+
|
|
1980
|
+
After a patch is merged, regenerates the release schema file for the
|
|
1981
|
+
current version and updates all higher version releases that depend on it.
|
|
1982
|
+
|
|
1983
|
+
Args:
|
|
1984
|
+
version: Current release version (e.g., "0.17.1")
|
|
1985
|
+
|
|
1986
|
+
Workflow:
|
|
1987
|
+
1. Add release schema to staging (already generated during validation)
|
|
1988
|
+
2. Find all release branches with higher versions
|
|
1989
|
+
3. For each higher version:
|
|
1990
|
+
- Checkout to that branch
|
|
1991
|
+
- Restore DB from current release schema
|
|
1992
|
+
- Apply all staged patches for that release
|
|
1993
|
+
- Regenerate its release schema
|
|
1994
|
+
- Commit the updated schema
|
|
1995
|
+
4. Return to original branch
|
|
1996
|
+
"""
|
|
1997
|
+
from packaging.version import Version
|
|
1998
|
+
from half_orm_dev.release_file import ReleaseFile
|
|
1999
|
+
|
|
2000
|
+
original_branch = self._repo.hgit.branch
|
|
2001
|
+
current_ver = Version(version)
|
|
2002
|
+
|
|
2003
|
+
# 1. Write and add release schema to staging area (if not hotfix mode)
|
|
2004
|
+
# Schema content was saved during validation with correct DB state
|
|
2005
|
+
release_schema_path = self._repo.get_release_schema_path(version)
|
|
2006
|
+
if hasattr(self, '_pending_release_schema_content') and self._pending_release_schema_content:
|
|
2007
|
+
click.echo(f" • Writing release schema ({len(self._pending_release_schema_content)} bytes)")
|
|
2008
|
+
release_schema_path.write_text(self._pending_release_schema_content, encoding='utf-8')
|
|
2009
|
+
self._pending_release_schema_content = None # Clear after use
|
|
2010
|
+
self._repo.hgit.add(str(release_schema_path))
|
|
2011
|
+
else:
|
|
2012
|
+
# Hotfix mode or no content - skip release schema
|
|
2013
|
+
click.echo(f" • Skipping release schema (hotfix mode)")
|
|
2014
|
+
|
|
2015
|
+
# NOTE: Do NOT checkout other branches here!
|
|
2016
|
+
# The commit will be done later by commit_and_sync_to_active_branches()
|
|
2017
|
+
# Propagation to higher releases is disabled for now as it causes issues
|
|
2018
|
+
# with uncommitted changes being lost during checkout.
|
|
2019
|
+
# TODO: Re-enable propagation after the main commit is done.
|
|
2020
|
+
|
|
2021
|
+
# 2. Find higher version releases (disabled - propagation moved to after commit)
|
|
2022
|
+
releases_dir = Path(self._repo.releases_dir)
|
|
2023
|
+
higher_releases = []
|
|
2024
|
+
|
|
2025
|
+
for toml_file in releases_dir.glob("*-patches.toml"):
|
|
2026
|
+
rel_version = toml_file.stem.replace('-patches', '')
|
|
2027
|
+
try:
|
|
2028
|
+
rel_ver = Version(rel_version)
|
|
2029
|
+
if rel_ver > current_ver:
|
|
2030
|
+
higher_releases.append(rel_version)
|
|
2031
|
+
except Exception:
|
|
2032
|
+
continue
|
|
2033
|
+
|
|
2034
|
+
# Sort by version (ascending)
|
|
2035
|
+
higher_releases.sort(key=lambda v: Version(v))
|
|
2036
|
+
|
|
2037
|
+
# Store higher releases for later propagation (after commit)
|
|
2038
|
+
# This avoids losing uncommitted changes when checking out other branches
|
|
2039
|
+
self._pending_higher_releases = higher_releases if higher_releases else None
|
|
2040
|
+
|
|
2041
|
+
def _propagate_release_schema_to_higher_versions(self, version: str) -> None:
|
|
2042
|
+
"""
|
|
2043
|
+
Propagate release schema changes to higher version releases.
|
|
2044
|
+
|
|
2045
|
+
Called after commit to update release schemas for all releases
|
|
2046
|
+
with version > current version.
|
|
2047
|
+
|
|
2048
|
+
Args:
|
|
2049
|
+
version: Current release version that was just updated
|
|
2050
|
+
"""
|
|
2051
|
+
from packaging.version import Version
|
|
2052
|
+
from half_orm_dev.release_file import ReleaseFile
|
|
2053
|
+
|
|
2054
|
+
if not hasattr(self, '_pending_higher_releases') or not self._pending_higher_releases:
|
|
2055
|
+
return
|
|
2056
|
+
|
|
2057
|
+
higher_releases = self._pending_higher_releases
|
|
2058
|
+
self._pending_higher_releases = None # Clear after use
|
|
2059
|
+
|
|
2060
|
+
original_branch = self._repo.hgit.branch
|
|
2061
|
+
releases_dir = Path(self._repo.releases_dir)
|
|
2062
|
+
|
|
2063
|
+
for higher_version in higher_releases:
|
|
2064
|
+
higher_branch = f"ho-release/{higher_version}"
|
|
2065
|
+
|
|
2066
|
+
if not self._repo.hgit.branch_exists(higher_branch):
|
|
2067
|
+
continue
|
|
2068
|
+
|
|
2069
|
+
click.echo(f" • Propagating to {higher_branch}...")
|
|
2070
|
+
|
|
2071
|
+
try:
|
|
2072
|
+
# Checkout to higher version branch
|
|
2073
|
+
self._repo.hgit.checkout(higher_branch)
|
|
2074
|
+
|
|
2075
|
+
# Restore DB from current release schema (which includes the new patch)
|
|
2076
|
+
self._repo.restore_database_from_release_schema(version)
|
|
2077
|
+
|
|
2078
|
+
# Apply all staged patches for this higher release
|
|
2079
|
+
release_file = ReleaseFile(higher_version, releases_dir)
|
|
2080
|
+
if release_file.exists():
|
|
2081
|
+
staged_patches = release_file.get_patches(status="staged")
|
|
2082
|
+
for pid in staged_patches:
|
|
2083
|
+
patch_dir = Path(self._base_dir) / "Patches" / pid
|
|
2084
|
+
if patch_dir.exists():
|
|
2085
|
+
self.apply_patch_files(pid, self._repo.model)
|
|
2086
|
+
|
|
2087
|
+
# Regenerate release schema for this higher version
|
|
2088
|
+
higher_schema_path = self._repo.generate_release_schema(higher_version)
|
|
2089
|
+
self._repo.hgit.add(str(higher_schema_path))
|
|
2090
|
+
self._repo.hgit.commit('-m', f"[HOP] Update release schema from %{version}")
|
|
2091
|
+
self._repo.hgit.push()
|
|
2092
|
+
|
|
2093
|
+
except Exception as e:
|
|
2094
|
+
click.echo(f" ⚠️ Warning: Failed to propagate to {higher_branch}: {e}")
|
|
2095
|
+
|
|
2096
|
+
# Return to original branch
|
|
2097
|
+
self._repo.hgit.checkout(original_branch)
|
|
2098
|
+
|
|
1939
2099
|
def _validate_patch_before_merge(
|
|
1940
2100
|
self,
|
|
1941
2101
|
patch_id: str,
|
|
@@ -1976,6 +2136,8 @@ class PatchManager:
|
|
|
1976
2136
|
# Save current branch
|
|
1977
2137
|
original_branch = self._repo.hgit.branch
|
|
1978
2138
|
temp_branch = f"ho-validate/{patch_id}"
|
|
2139
|
+
release_schema_content = None
|
|
2140
|
+
release_schema_path = None
|
|
1979
2141
|
|
|
1980
2142
|
try:
|
|
1981
2143
|
click.echo(f"\n🔍 Validating patch {utils.Color.bold(patch_id)} before merge...")
|
|
@@ -1998,24 +2160,36 @@ class PatchManager:
|
|
|
1998
2160
|
# 3. Run patch apply and verify no modifications
|
|
1999
2161
|
click.echo(f" • Running patch apply to verify idempotency...")
|
|
2000
2162
|
try:
|
|
2001
|
-
#
|
|
2002
|
-
|
|
2003
|
-
staged_patches = []
|
|
2004
|
-
if release_file.exists():
|
|
2005
|
-
staged_patches = release_file.get_patches(status="staged")
|
|
2163
|
+
# Check if release schema exists
|
|
2164
|
+
release_schema_path = self._repo.get_release_schema_path(version)
|
|
2006
2165
|
|
|
2007
|
-
|
|
2008
|
-
|
|
2166
|
+
if release_schema_path.exists():
|
|
2167
|
+
# New workflow: restore from release schema (includes all staged patches)
|
|
2168
|
+
self._repo.restore_database_from_release_schema(version)
|
|
2009
2169
|
|
|
2010
|
-
|
|
2011
|
-
|
|
2012
|
-
|
|
2013
|
-
for pid in all_patches:
|
|
2014
|
-
patch_dir = Path(self._repo.base_dir) / "Patches" / pid
|
|
2170
|
+
# Apply only the current patch
|
|
2171
|
+
patch_dir = Path(self._repo.base_dir) / "Patches" / patch_id
|
|
2015
2172
|
if patch_dir.exists():
|
|
2016
|
-
self.apply_patch_files(
|
|
2173
|
+
self.apply_patch_files(patch_id, self._repo.model)
|
|
2174
|
+
else:
|
|
2175
|
+
# Fallback: old workflow for backward compatibility
|
|
2176
|
+
release_file = ReleaseFile(version, Path(self._repo.releases_dir))
|
|
2177
|
+
staged_patches = []
|
|
2178
|
+
if release_file.exists():
|
|
2179
|
+
staged_patches = release_file.get_patches(status="staged")
|
|
2180
|
+
|
|
2181
|
+
# Apply all staged patches + current patch
|
|
2182
|
+
all_patches = staged_patches + [patch_id]
|
|
2183
|
+
|
|
2184
|
+
# Restore database and apply patches
|
|
2185
|
+
self._repo.restore_database_from_schema()
|
|
2017
2186
|
|
|
2018
|
-
|
|
2187
|
+
for pid in all_patches:
|
|
2188
|
+
patch_dir = Path(self._repo.base_dir) / "Patches" / pid
|
|
2189
|
+
if patch_dir.exists():
|
|
2190
|
+
self.apply_patch_files(pid, self._repo.model)
|
|
2191
|
+
|
|
2192
|
+
# Generate modules
|
|
2019
2193
|
modules.generate(self._repo)
|
|
2020
2194
|
|
|
2021
2195
|
# Check if any files were modified
|
|
@@ -2043,10 +2217,33 @@ class PatchManager:
|
|
|
2043
2217
|
# 4. Run tests (best-effort)
|
|
2044
2218
|
self._run_tests_if_available()
|
|
2045
2219
|
|
|
2220
|
+
# 5. Generate release schema while DB is in correct state
|
|
2221
|
+
# This captures prod + all staged patches + current patch
|
|
2222
|
+
# Skip for hotfix releases (detected by presence of X.Y.Z.txt production file)
|
|
2223
|
+
prod_file = Path(self._repo.releases_dir) / f"{version}.txt"
|
|
2224
|
+
is_hotfix = prod_file.exists()
|
|
2225
|
+
|
|
2226
|
+
if is_hotfix:
|
|
2227
|
+
click.echo(f" • Skipping release schema (hotfix mode)")
|
|
2228
|
+
release_schema_content = None
|
|
2229
|
+
else:
|
|
2230
|
+
click.echo(f" • Generating release schema...")
|
|
2231
|
+
release_schema_path = self._repo.generate_release_schema(version)
|
|
2232
|
+
|
|
2233
|
+
# Save schema content to restore after branch checkout
|
|
2234
|
+
# (the file will be lost when switching branches)
|
|
2235
|
+
release_schema_content = release_schema_path.read_text(encoding='utf-8')
|
|
2236
|
+
|
|
2237
|
+
# Delete the file to avoid checkout conflicts
|
|
2238
|
+
# (content is saved in memory and will be written after checkout)
|
|
2239
|
+
release_schema_path.unlink()
|
|
2240
|
+
|
|
2241
|
+
click.echo(f" • {utils.Color.green('✓')} Release schema generated")
|
|
2242
|
+
|
|
2046
2243
|
click.echo(f" • {utils.Color.green('✓')} Validation passed!\n")
|
|
2047
2244
|
|
|
2048
2245
|
finally:
|
|
2049
|
-
#
|
|
2246
|
+
# 6. Cleanup: Delete temp branch and return to original branch
|
|
2050
2247
|
try:
|
|
2051
2248
|
# Return to original branch
|
|
2052
2249
|
if self._repo.hgit.branch != original_branch:
|
|
@@ -2059,6 +2256,10 @@ class PatchManager:
|
|
|
2059
2256
|
# Cleanup errors are non-critical, just warn
|
|
2060
2257
|
click.echo(f"⚠️ Warning: Failed to cleanup temp branch {temp_branch}: {e}")
|
|
2061
2258
|
|
|
2259
|
+
# Store release schema content for later use in _update_release_schemas
|
|
2260
|
+
# (after merge, when we're on the release branch)
|
|
2261
|
+
self._pending_release_schema_content = release_schema_content
|
|
2262
|
+
|
|
2062
2263
|
def _run_tests_if_available(self) -> None:
|
|
2063
2264
|
"""
|
|
2064
2265
|
Run tests if test configuration is available.
|
|
@@ -2631,7 +2832,7 @@ class PatchManager:
|
|
|
2631
2832
|
|
|
2632
2833
|
return '\n'.join(lines)
|
|
2633
2834
|
|
|
2634
|
-
def _move_patch_to_stage(self, patch_id: str, version: str) -> None:
|
|
2835
|
+
def _move_patch_to_stage(self, patch_id: str, version: str, merge_commit: str) -> None:
|
|
2635
2836
|
"""
|
|
2636
2837
|
Move patch from candidate to staged status.
|
|
2637
2838
|
|
|
@@ -2641,19 +2842,21 @@ class PatchManager:
|
|
|
2641
2842
|
Args:
|
|
2642
2843
|
patch_id: Patch identifier to move
|
|
2643
2844
|
version: Release version
|
|
2845
|
+
merge_commit: Git commit hash of the merge commit
|
|
2644
2846
|
|
|
2645
2847
|
Raises:
|
|
2646
2848
|
PatchManagerError: If operation fails
|
|
2647
2849
|
|
|
2648
2850
|
Examples:
|
|
2649
|
-
self._move_patch_to_stage("456-user-auth", "0.17.0")
|
|
2650
|
-
# Changes "456-user-auth" =
|
|
2851
|
+
self._move_patch_to_stage("456-user-auth", "0.17.0", "abc123de")
|
|
2852
|
+
# Changes "456-user-auth" = {status = "candidate"}
|
|
2853
|
+
# to "456-user-auth" = {status = "staged", merge_commit = "abc123de"}
|
|
2651
2854
|
# Order is preserved!
|
|
2652
2855
|
"""
|
|
2653
2856
|
release_file = ReleaseFile(version, self._releases_dir)
|
|
2654
2857
|
|
|
2655
2858
|
try:
|
|
2656
|
-
release_file.move_to_staged(patch_id)
|
|
2859
|
+
release_file.move_to_staged(patch_id, merge_commit)
|
|
2657
2860
|
|
|
2658
2861
|
# Stage file for commit
|
|
2659
2862
|
self._repo.hgit.add(str(release_file.file_path))
|
half_orm_dev/release_file.py
CHANGED
|
@@ -38,8 +38,8 @@ class ReleaseFile:
|
|
|
38
38
|
|
|
39
39
|
File format:
|
|
40
40
|
[patches]
|
|
41
|
-
"1-auth" = "
|
|
42
|
-
"2-api" = "
|
|
41
|
+
"1-auth" = { status = "staged", merge_commit = "abc123def" }
|
|
42
|
+
"2-api" = { status = "candidate" }
|
|
43
43
|
|
|
44
44
|
The order of patches in the file is preserved and represents the application order.
|
|
45
45
|
"""
|
|
@@ -144,7 +144,7 @@ class ReleaseFile:
|
|
|
144
144
|
|
|
145
145
|
if before is None:
|
|
146
146
|
# Add at end
|
|
147
|
-
patches[patch_id] = "candidate"
|
|
147
|
+
patches[patch_id] = {"status": "candidate"}
|
|
148
148
|
else:
|
|
149
149
|
# Insert before specified patch
|
|
150
150
|
if before not in patches:
|
|
@@ -156,7 +156,7 @@ class ReleaseFile:
|
|
|
156
156
|
new_patches = {}
|
|
157
157
|
for key, value in patches.items():
|
|
158
158
|
if key == before:
|
|
159
|
-
new_patches[patch_id] = "candidate"
|
|
159
|
+
new_patches[patch_id] = {"status": "candidate"}
|
|
160
160
|
new_patches[key] = value
|
|
161
161
|
|
|
162
162
|
patches = new_patches
|
|
@@ -164,7 +164,7 @@ class ReleaseFile:
|
|
|
164
164
|
data["patches"] = patches
|
|
165
165
|
self._write(data)
|
|
166
166
|
|
|
167
|
-
def move_to_staged(self, patch_id: str) -> None:
|
|
167
|
+
def move_to_staged(self, patch_id: str, merge_commit: str) -> None:
|
|
168
168
|
"""
|
|
169
169
|
Change patch status from candidate to staged.
|
|
170
170
|
|
|
@@ -173,13 +173,15 @@ class ReleaseFile:
|
|
|
173
173
|
|
|
174
174
|
Args:
|
|
175
175
|
patch_id: Patch identifier to move
|
|
176
|
+
merge_commit: Git commit hash of the merge commit
|
|
176
177
|
|
|
177
178
|
Raises:
|
|
178
179
|
ReleaseFileError: If patch not found or operation fails
|
|
179
180
|
|
|
180
181
|
Examples:
|
|
181
|
-
release_file.move_to_staged("1-auth")
|
|
182
|
-
# Changes "1-auth" =
|
|
182
|
+
release_file.move_to_staged("1-auth", "abc123def")
|
|
183
|
+
# Changes "1-auth" = {status = "candidate"}
|
|
184
|
+
# to "1-auth" = {status = "staged", merge_commit = "abc123def"}
|
|
183
185
|
"""
|
|
184
186
|
data = self._read()
|
|
185
187
|
patches = data.get("patches", {})
|
|
@@ -189,12 +191,13 @@ class ReleaseFile:
|
|
|
189
191
|
f"Patch {patch_id} not found in {self.version}"
|
|
190
192
|
)
|
|
191
193
|
|
|
192
|
-
|
|
194
|
+
patch_data = patches[patch_id]
|
|
195
|
+
if patch_data.get("status") == "staged":
|
|
193
196
|
raise ReleaseFileError(
|
|
194
197
|
f"Patch {patch_id} is already staged"
|
|
195
198
|
)
|
|
196
199
|
|
|
197
|
-
patches[patch_id] = "staged"
|
|
200
|
+
patches[patch_id] = {"status": "staged", "merge_commit": merge_commit}
|
|
198
201
|
data["patches"] = patches
|
|
199
202
|
self._write(data)
|
|
200
203
|
|
|
@@ -225,8 +228,8 @@ class ReleaseFile:
|
|
|
225
228
|
return list(patches.keys())
|
|
226
229
|
|
|
227
230
|
return [
|
|
228
|
-
patch_id for patch_id,
|
|
229
|
-
if
|
|
231
|
+
patch_id for patch_id, patch_data in patches.items()
|
|
232
|
+
if patch_data.get("status") == status
|
|
230
233
|
]
|
|
231
234
|
|
|
232
235
|
def get_patch_status(self, patch_id: str) -> Optional[str]:
|
|
@@ -246,7 +249,10 @@ class ReleaseFile:
|
|
|
246
249
|
"""
|
|
247
250
|
data = self._read()
|
|
248
251
|
patches = data.get("patches", {})
|
|
249
|
-
|
|
252
|
+
patch_data = patches.get(patch_id)
|
|
253
|
+
if patch_data is None:
|
|
254
|
+
return None
|
|
255
|
+
return patch_data.get("status")
|
|
250
256
|
|
|
251
257
|
def remove_patch(self, patch_id: str) -> None:
|
|
252
258
|
"""
|
|
@@ -276,19 +282,27 @@ class ReleaseFile:
|
|
|
276
282
|
"""Check if release file exists."""
|
|
277
283
|
return self.file_path.exists()
|
|
278
284
|
|
|
279
|
-
def
|
|
285
|
+
def get_merge_commit(self, patch_id: str) -> Optional[str]:
|
|
280
286
|
"""
|
|
281
|
-
Get
|
|
287
|
+
Get merge commit hash of a staged patch.
|
|
288
|
+
|
|
289
|
+
Args:
|
|
290
|
+
patch_id: Patch identifier
|
|
282
291
|
|
|
283
292
|
Returns:
|
|
284
|
-
|
|
293
|
+
Merge commit hash, or None if patch not found or not staged
|
|
285
294
|
|
|
286
295
|
Examples:
|
|
287
|
-
|
|
288
|
-
|
|
296
|
+
commit = release_file.get_merge_commit("1-auth")
|
|
297
|
+
if commit:
|
|
298
|
+
print(f"Patch was merged in commit {commit}")
|
|
289
299
|
"""
|
|
290
300
|
data = self._read()
|
|
291
|
-
|
|
301
|
+
patches = data.get("patches", {})
|
|
302
|
+
patch_data = patches.get(patch_id)
|
|
303
|
+
if patch_data is None:
|
|
304
|
+
return None
|
|
305
|
+
return patch_data.get("merge_commit")
|
|
292
306
|
|
|
293
307
|
def set_metadata(self, metadata: Dict) -> None:
|
|
294
308
|
"""
|
half_orm_dev/release_manager.py
CHANGED
|
@@ -680,6 +680,8 @@ class ReleaseManager:
|
|
|
680
680
|
"""
|
|
681
681
|
Lit les patch IDs d'un fichier de release.
|
|
682
682
|
|
|
683
|
+
Format: patch_id:merge_commit (one per line)
|
|
684
|
+
|
|
683
685
|
Ignore:
|
|
684
686
|
- Lignes vides
|
|
685
687
|
- Commentaires (#)
|
|
@@ -695,41 +697,107 @@ class ReleaseManager:
|
|
|
695
697
|
for line in f:
|
|
696
698
|
line = line.strip()
|
|
697
699
|
if line and not line.startswith('#'):
|
|
698
|
-
|
|
700
|
+
patch_id = line.split(':')[0]
|
|
701
|
+
patch_ids.append(patch_id)
|
|
699
702
|
|
|
700
703
|
return patch_ids
|
|
701
704
|
|
|
702
|
-
def
|
|
705
|
+
def read_release_patches_with_commits(self, filename: str) -> dict:
|
|
706
|
+
"""
|
|
707
|
+
Lit les patch IDs et merge_commits d'un fichier de release.
|
|
708
|
+
|
|
709
|
+
Format: patch_id:merge_commit (one per line)
|
|
710
|
+
|
|
711
|
+
Returns:
|
|
712
|
+
dict: {patch_id: merge_commit}
|
|
713
|
+
|
|
714
|
+
Example:
|
|
715
|
+
# File content:
|
|
716
|
+
# 1-premier:ce96282f
|
|
717
|
+
# 2-second:8e10f11b
|
|
718
|
+
patches = read_release_patches_with_commits("0.1.0-rc1.txt")
|
|
719
|
+
# → {"1-premier": "ce96282f", "2-second": "8e10f11b"}
|
|
720
|
+
"""
|
|
721
|
+
file_path = self._releases_dir / filename
|
|
722
|
+
|
|
723
|
+
if not file_path.exists():
|
|
724
|
+
return {}
|
|
725
|
+
|
|
726
|
+
patches = {}
|
|
727
|
+
with open(file_path, 'r', encoding='utf-8') as f:
|
|
728
|
+
for line in f:
|
|
729
|
+
line = line.strip()
|
|
730
|
+
if line and not line.startswith('#'):
|
|
731
|
+
patch_id, merge_commit = line.split(':', 1)
|
|
732
|
+
patches[patch_id] = merge_commit
|
|
733
|
+
|
|
734
|
+
return patches
|
|
735
|
+
|
|
736
|
+
def _apply_release_patches(self, version: str, hotfix=False, force_apply=False) -> None:
|
|
703
737
|
"""
|
|
704
738
|
Apply all patches for a release version to the database.
|
|
705
739
|
|
|
706
|
-
|
|
740
|
+
If a release schema exists (release-X.Y.Z.sql) and force_apply=False,
|
|
741
|
+
uses it directly. Otherwise, restores database from baseline and
|
|
742
|
+
applies patches in order:
|
|
707
743
|
1. All RC patches (rc1, rc2, etc.)
|
|
708
744
|
2. Stage patches
|
|
709
745
|
|
|
746
|
+
For staged patches with merge_commit recorded, checks out each commit
|
|
747
|
+
before applying the patch to ensure the correct Python code context.
|
|
748
|
+
|
|
710
749
|
Args:
|
|
711
750
|
version: Release version (e.g., "0.1.0")
|
|
751
|
+
hotfix: If True, skip RC patches (hotfix workflow)
|
|
752
|
+
force_apply: If True, always apply patches individually even if
|
|
753
|
+
release schema exists (used for production validation)
|
|
712
754
|
|
|
713
755
|
Raises:
|
|
714
756
|
ReleaseManagerError: If patch application fails
|
|
715
757
|
"""
|
|
716
|
-
#
|
|
758
|
+
# Check if release schema exists (new workflow)
|
|
759
|
+
release_schema_path = self._repo.get_release_schema_path(version)
|
|
760
|
+
if release_schema_path.exists() and not force_apply:
|
|
761
|
+
# New workflow: restore from release schema (already contains all staged patches)
|
|
762
|
+
self._repo.restore_database_from_release_schema(version)
|
|
763
|
+
return
|
|
764
|
+
|
|
765
|
+
# Fallback: old workflow - restore database from baseline
|
|
717
766
|
self._repo.restore_database_from_schema()
|
|
718
767
|
|
|
719
|
-
|
|
768
|
+
current_branch = self._repo.hgit.branch
|
|
769
|
+
|
|
770
|
+
# Collect patches already applied from RC files
|
|
771
|
+
applied_patches = set()
|
|
772
|
+
|
|
773
|
+
# Apply all RC patches in order (with merge_commit checkout)
|
|
720
774
|
if not hotfix:
|
|
721
775
|
rc_files = self._get_label_files(version, 'rc')
|
|
722
776
|
for rc_file in rc_files:
|
|
723
|
-
rc_patches = self.
|
|
724
|
-
for patch_id in rc_patches:
|
|
777
|
+
rc_patches = self.read_release_patches_with_commits(rc_file.name)
|
|
778
|
+
for patch_id, merge_commit in rc_patches.items():
|
|
779
|
+
if merge_commit:
|
|
780
|
+
self._repo.hgit.checkout(merge_commit)
|
|
725
781
|
self._repo.patch_manager.apply_patch_files(patch_id, self._repo.model)
|
|
782
|
+
applied_patches.add(patch_id)
|
|
726
783
|
|
|
727
|
-
# Apply staged patches from TOML file
|
|
728
|
-
#
|
|
784
|
+
# Apply staged patches from TOML file that are NOT already in RC files
|
|
785
|
+
# (patches added after promote_to_rc)
|
|
729
786
|
release_file = ReleaseFile(version, self._releases_dir)
|
|
730
787
|
if release_file.exists():
|
|
731
788
|
# Development: read from TOML
|
|
732
789
|
stage_patches = release_file.get_patches(status="staged")
|
|
790
|
+
|
|
791
|
+
# Apply only patches not already applied from RC
|
|
792
|
+
for patch_id in stage_patches:
|
|
793
|
+
if patch_id in applied_patches:
|
|
794
|
+
continue # Already applied from RC file
|
|
795
|
+
|
|
796
|
+
merge_commit = release_file.get_merge_commit(patch_id)
|
|
797
|
+
if merge_commit:
|
|
798
|
+
# Checkout the merge commit to have correct Python code
|
|
799
|
+
self._repo.hgit.checkout(merge_commit)
|
|
800
|
+
self._repo.patch_manager.apply_patch_files(patch_id, self._repo.model)
|
|
733
801
|
else:
|
|
734
802
|
# Production: read from hotfix snapshot if it exists
|
|
735
803
|
# This handles the case where we're applying a hotfix release
|
|
@@ -741,8 +809,14 @@ class ReleaseManager:
|
|
|
741
809
|
# No patches to apply
|
|
742
810
|
stage_patches = []
|
|
743
811
|
|
|
744
|
-
|
|
745
|
-
|
|
812
|
+
# For production snapshots (no TOML), apply patches without checkout
|
|
813
|
+
# (merge_commit info not available in .txt files)
|
|
814
|
+
for patch_id in stage_patches:
|
|
815
|
+
if patch_id not in applied_patches:
|
|
816
|
+
self._repo.patch_manager.apply_patch_files(patch_id, self._repo.model)
|
|
817
|
+
|
|
818
|
+
# Return to original branch
|
|
819
|
+
self._repo.hgit.checkout(current_branch)
|
|
746
820
|
|
|
747
821
|
def _collect_all_version_patches(self, version: str) -> List[str]:
|
|
748
822
|
"""
|
|
@@ -2249,6 +2323,24 @@ class ReleaseManager:
|
|
|
2249
2323
|
except Exception as e:
|
|
2250
2324
|
raise ReleaseManagerError(f"Failed to checkout release branch: {e}")
|
|
2251
2325
|
|
|
2326
|
+
# Generate release schema file
|
|
2327
|
+
# Use existing release schema as base if available, otherwise use prod
|
|
2328
|
+
try:
|
|
2329
|
+
base_release = self._find_base_release_schema(version)
|
|
2330
|
+
if base_release:
|
|
2331
|
+
self._repo.restore_database_from_release_schema(base_release)
|
|
2332
|
+
else:
|
|
2333
|
+
self._repo.restore_database_from_schema()
|
|
2334
|
+
|
|
2335
|
+
release_schema_path = self._repo.generate_release_schema(version)
|
|
2336
|
+
|
|
2337
|
+
# Commit release schema on release branch
|
|
2338
|
+
self._repo.hgit.add(str(release_schema_path))
|
|
2339
|
+
self._repo.hgit.commit('-m', f"[HOP] Add release schema for %{version}")
|
|
2340
|
+
self._repo.hgit.push()
|
|
2341
|
+
except Exception as e:
|
|
2342
|
+
raise ReleaseManagerError(f"Failed to generate release schema: {e}")
|
|
2343
|
+
|
|
2252
2344
|
return {
|
|
2253
2345
|
'version': version,
|
|
2254
2346
|
'branch': release_branch,
|
|
@@ -2289,6 +2381,53 @@ class ReleaseManager:
|
|
|
2289
2381
|
patches_files.sort(key=version_key)
|
|
2290
2382
|
return patches_files[0].stem.replace('-patches', '')
|
|
2291
2383
|
|
|
2384
|
+
def _find_base_release_schema(self, new_version: str) -> Optional[str]:
|
|
2385
|
+
"""
|
|
2386
|
+
Find the base release schema for a new release.
|
|
2387
|
+
|
|
2388
|
+
When creating a new release, determines which schema to use as base:
|
|
2389
|
+
- If a release with lower version exists and has a release schema, use it
|
|
2390
|
+
- Otherwise, return None (will use production schema)
|
|
2391
|
+
|
|
2392
|
+
This handles parallel releases:
|
|
2393
|
+
- Creating 0.18.0 (minor) when 0.17.1 (patch) exists → use release-0.17.1.sql
|
|
2394
|
+
|
|
2395
|
+
Note: Only one release per level can exist at a time (sequential promotion rule).
|
|
2396
|
+
|
|
2397
|
+
Args:
|
|
2398
|
+
new_version: Version being created (e.g., "0.18.0")
|
|
2399
|
+
|
|
2400
|
+
Returns:
|
|
2401
|
+
Version string of base release, or None if should use prod schema
|
|
2402
|
+
"""
|
|
2403
|
+
from packaging.version import Version
|
|
2404
|
+
|
|
2405
|
+
new_ver = Version(new_version)
|
|
2406
|
+
model_dir = Path(self._repo.model_dir)
|
|
2407
|
+
|
|
2408
|
+
# Find all existing release schema files
|
|
2409
|
+
release_schemas = list(model_dir.glob("release-*.sql"))
|
|
2410
|
+
if not release_schemas:
|
|
2411
|
+
return None
|
|
2412
|
+
|
|
2413
|
+
# Find the release with highest version lower than new_version
|
|
2414
|
+
best_match = None
|
|
2415
|
+
best_ver = None
|
|
2416
|
+
|
|
2417
|
+
for schema_file in release_schemas:
|
|
2418
|
+
match = re.match(r'release-(\d+\.\d+\.\d+)\.sql$', schema_file.name)
|
|
2419
|
+
if match:
|
|
2420
|
+
ver_str = match.group(1)
|
|
2421
|
+
try:
|
|
2422
|
+
ver = Version(ver_str)
|
|
2423
|
+
if ver < new_ver and (best_ver is None or ver > best_ver):
|
|
2424
|
+
best_ver = ver
|
|
2425
|
+
best_match = ver_str
|
|
2426
|
+
except Exception:
|
|
2427
|
+
continue
|
|
2428
|
+
|
|
2429
|
+
return best_match
|
|
2430
|
+
|
|
2292
2431
|
@with_dynamic_branch_lock(lambda self: "ho-prod")
|
|
2293
2432
|
def promote_to_rc(self) -> dict:
|
|
2294
2433
|
"""
|
|
@@ -2355,10 +2494,14 @@ class ReleaseManager:
|
|
|
2355
2494
|
# Stay on release branch for rename operations
|
|
2356
2495
|
# (allows continued development on this release)
|
|
2357
2496
|
|
|
2358
|
-
# Create RC snapshot from staged patches
|
|
2497
|
+
# Create RC snapshot from staged patches (with merge_commit)
|
|
2359
2498
|
rc_file = self._releases_dir / f"{version}-rc{rc_number}.txt"
|
|
2360
2499
|
staged_patches = release_file.get_patches(status="staged")
|
|
2361
|
-
|
|
2500
|
+
lines = []
|
|
2501
|
+
for patch_id in staged_patches:
|
|
2502
|
+
merge_commit = release_file.get_merge_commit(patch_id)
|
|
2503
|
+
lines.append(f"{patch_id}:{merge_commit}" if merge_commit else patch_id)
|
|
2504
|
+
rc_file.write_text("\n".join(lines) + "\n" if lines else "", encoding='utf-8')
|
|
2362
2505
|
|
|
2363
2506
|
# Keep TOML file for continued development (don't delete it)
|
|
2364
2507
|
|
|
@@ -2498,29 +2641,13 @@ class ReleaseManager:
|
|
|
2498
2641
|
"ho-prod has been restored to its previous state."
|
|
2499
2642
|
)
|
|
2500
2643
|
|
|
2501
|
-
# 3. Apply
|
|
2502
|
-
#
|
|
2503
|
-
|
|
2504
|
-
|
|
2505
|
-
|
|
2506
|
-
|
|
2507
|
-
|
|
2508
|
-
latest_rc = self._get_latest_label_number(version, "rc")
|
|
2509
|
-
rc_schema_version = f"{version}-rc{latest_rc}" if latest_rc > 0 else "0.0.0"
|
|
2510
|
-
|
|
2511
|
-
# Restore from RC schema if it exists, otherwise from baseline
|
|
2512
|
-
try:
|
|
2513
|
-
self._repo.restore_database_from_schema(rc_schema_version)
|
|
2514
|
-
except:
|
|
2515
|
-
self._repo.restore_database_from_schema()
|
|
2516
|
-
|
|
2517
|
-
# Apply new stage patches
|
|
2518
|
-
for patch_id in stage_patches:
|
|
2519
|
-
self._repo.patch_manager.apply_patch_files(patch_id, self._repo.model)
|
|
2520
|
-
else:
|
|
2521
|
-
# No new patches - just restore from latest RC
|
|
2522
|
-
# The database should already be in the correct state from RC
|
|
2523
|
-
pass
|
|
2644
|
+
# 3. Apply all patches (RC + staged) to database
|
|
2645
|
+
# Uses _apply_release_patches which handles:
|
|
2646
|
+
# - Reading RC files with merge_commits (patch_id:merge_commit format)
|
|
2647
|
+
# - Checking out each merge_commit before applying patch
|
|
2648
|
+
# - Reading staged patches from TOML with merge_commits
|
|
2649
|
+
# force_apply=True to validate by applying patches even if release schema exists
|
|
2650
|
+
self._apply_release_patches(version, force_apply=True)
|
|
2524
2651
|
|
|
2525
2652
|
# Register the release version in half_orm_meta.hop_release
|
|
2526
2653
|
version_parts = version.split('.')
|
|
@@ -2542,6 +2669,11 @@ class ReleaseManager:
|
|
|
2542
2669
|
if toml_file.exists():
|
|
2543
2670
|
toml_file.unlink()
|
|
2544
2671
|
|
|
2672
|
+
# Delete release schema file (no longer needed - prod schema takes over)
|
|
2673
|
+
release_schema_file = model_dir / f"release-{version}.sql"
|
|
2674
|
+
if release_schema_file.exists():
|
|
2675
|
+
release_schema_file.unlink()
|
|
2676
|
+
|
|
2545
2677
|
# Generate model/data-X.Y.Z.sql if any patches have @HOP:data files
|
|
2546
2678
|
# This file is used for from-scratch installations (clone, restore)
|
|
2547
2679
|
prod_patches = self.read_release_patches(prod_file.name)
|
half_orm_dev/repo.py
CHANGED
|
@@ -2282,6 +2282,130 @@ See docs/half_orm_dev.md for complete documentation.
|
|
|
2282
2282
|
# Catch any unexpected errors
|
|
2283
2283
|
raise RepoError(f"Database restoration failed: {e}") from e
|
|
2284
2284
|
|
|
2285
|
+
def generate_release_schema(self, version: str) -> Path:
|
|
2286
|
+
"""
|
|
2287
|
+
Generate release schema SQL dump.
|
|
2288
|
+
|
|
2289
|
+
Creates .hop/model/release-{version}.sql with current database structure,
|
|
2290
|
+
metadata, and data. This file represents the complete state of a release
|
|
2291
|
+
in development (prod + all staged patches).
|
|
2292
|
+
|
|
2293
|
+
Used by:
|
|
2294
|
+
- release create: Generate initial release schema from prod baseline
|
|
2295
|
+
- patch merge: Update release schema after patch integration
|
|
2296
|
+
|
|
2297
|
+
Args:
|
|
2298
|
+
version: Release version string (e.g., "0.17.1", "0.18.0")
|
|
2299
|
+
|
|
2300
|
+
Returns:
|
|
2301
|
+
Path to generated release schema file
|
|
2302
|
+
|
|
2303
|
+
Raises:
|
|
2304
|
+
RepoError: If pg_dump fails or model_dir doesn't exist
|
|
2305
|
+
|
|
2306
|
+
Examples:
|
|
2307
|
+
# After merging patch into release
|
|
2308
|
+
schema_path = repo.generate_release_schema("0.17.1")
|
|
2309
|
+
# Creates: .hop/model/release-0.17.1.sql
|
|
2310
|
+
"""
|
|
2311
|
+
model_dir = Path(self.model_dir)
|
|
2312
|
+
|
|
2313
|
+
if not model_dir.exists():
|
|
2314
|
+
raise RepoError(f"Model directory does not exist: {model_dir}")
|
|
2315
|
+
|
|
2316
|
+
release_schema_file = model_dir / f"release-{version}.sql"
|
|
2317
|
+
temp_file = model_dir / f".release-{version}.sql.tmp"
|
|
2318
|
+
|
|
2319
|
+
try:
|
|
2320
|
+
# Dump complete database (schema + data) to temp file
|
|
2321
|
+
self.database.execute_pg_command(
|
|
2322
|
+
'pg_dump',
|
|
2323
|
+
self.name,
|
|
2324
|
+
'--no-owner',
|
|
2325
|
+
'-f',
|
|
2326
|
+
str(temp_file)
|
|
2327
|
+
)
|
|
2328
|
+
|
|
2329
|
+
# Filter out version-specific lines for cross-version compatibility
|
|
2330
|
+
content = temp_file.read_text()
|
|
2331
|
+
filtered_lines = []
|
|
2332
|
+
version_specific_sets = (
|
|
2333
|
+
'SET transaction_timeout', # PG17+
|
|
2334
|
+
)
|
|
2335
|
+
for line in content.split('\n'):
|
|
2336
|
+
if line.startswith('\\restrict') or line.startswith('\\unrestrict'):
|
|
2337
|
+
continue
|
|
2338
|
+
if line.startswith('-- Dumped from') or line.startswith('-- Dumped by'):
|
|
2339
|
+
continue
|
|
2340
|
+
if any(line.startswith(s) for s in version_specific_sets):
|
|
2341
|
+
continue
|
|
2342
|
+
filtered_lines.append(line)
|
|
2343
|
+
|
|
2344
|
+
release_schema_file.write_text('\n'.join(filtered_lines))
|
|
2345
|
+
|
|
2346
|
+
return release_schema_file
|
|
2347
|
+
|
|
2348
|
+
except Exception as e:
|
|
2349
|
+
raise RepoError(f"Failed to generate release schema: {e}") from e
|
|
2350
|
+
finally:
|
|
2351
|
+
if temp_file.exists():
|
|
2352
|
+
temp_file.unlink()
|
|
2353
|
+
|
|
2354
|
+
def restore_database_from_release_schema(self, version: str) -> None:
|
|
2355
|
+
"""
|
|
2356
|
+
Restore database from release schema file.
|
|
2357
|
+
|
|
2358
|
+
Restores database from .hop/model/release-{version}.sql which contains
|
|
2359
|
+
the complete state of a release in development (prod + staged patches).
|
|
2360
|
+
|
|
2361
|
+
If the release schema file doesn't exist, falls back to
|
|
2362
|
+
restore_database_from_schema() for backward compatibility.
|
|
2363
|
+
|
|
2364
|
+
Args:
|
|
2365
|
+
version: Release version string (e.g., "0.17.1")
|
|
2366
|
+
|
|
2367
|
+
Raises:
|
|
2368
|
+
RepoError: If restoration fails
|
|
2369
|
+
|
|
2370
|
+
Examples:
|
|
2371
|
+
# Before applying a candidate patch
|
|
2372
|
+
repo.restore_database_from_release_schema("0.17.1")
|
|
2373
|
+
# Database now has prod schema + all staged patches for 0.17.1
|
|
2374
|
+
"""
|
|
2375
|
+
release_schema_path = Path(self.model_dir) / f"release-{version}.sql"
|
|
2376
|
+
|
|
2377
|
+
# Fallback to production schema if release schema doesn't exist
|
|
2378
|
+
if not release_schema_path.exists():
|
|
2379
|
+
self.restore_database_from_schema()
|
|
2380
|
+
return
|
|
2381
|
+
|
|
2382
|
+
try:
|
|
2383
|
+
# Drop all user schemas
|
|
2384
|
+
self._reset_database_schemas()
|
|
2385
|
+
|
|
2386
|
+
# Load release schema
|
|
2387
|
+
self.database.execute_pg_command(
|
|
2388
|
+
'psql', '-d', self.name, '-f', str(release_schema_path)
|
|
2389
|
+
)
|
|
2390
|
+
|
|
2391
|
+
# Reload half_orm metadata cache
|
|
2392
|
+
self.model.reconnect(reload=True)
|
|
2393
|
+
|
|
2394
|
+
except Exception as e:
|
|
2395
|
+
raise RepoError(f"Failed to restore from release schema: {e}") from e
|
|
2396
|
+
|
|
2397
|
+
def get_release_schema_path(self, version: str) -> Path:
|
|
2398
|
+
"""
|
|
2399
|
+
Get path to release schema file.
|
|
2400
|
+
|
|
2401
|
+
Args:
|
|
2402
|
+
version: Release version string
|
|
2403
|
+
|
|
2404
|
+
Returns:
|
|
2405
|
+
Path to .hop/model/release-{version}.sql (may not exist)
|
|
2406
|
+
"""
|
|
2407
|
+
return Path(self.model_dir) / f"release-{version}.sql"
|
|
2408
|
+
|
|
2285
2409
|
def _deduce_metadata_path(self, schema_path: Path) -> Path | None:
|
|
2286
2410
|
"""
|
|
2287
2411
|
Deduce metadata file path from schema.sql symlink target.
|
half_orm_dev/version.txt
CHANGED
|
@@ -1 +1 @@
|
|
|
1
|
-
0.17.3-
|
|
1
|
+
0.17.3-a9
|
|
@@ -5,13 +5,13 @@ half_orm_dev/decorators.py,sha256=JKv_Z_JZUr-s-Vz551temHZhhecPfbvyhTbByRDjVAQ,49
|
|
|
5
5
|
half_orm_dev/hgit.py,sha256=VdzCCQ__xG1IGJaGq4-rrhbA1bNkDw_dBqkUNIeTONg,58045
|
|
6
6
|
half_orm_dev/migration_manager.py,sha256=9RpciH8nyQrF0xV31kAeaYKkQl24Di1VHt-mAjjHhzM,14854
|
|
7
7
|
half_orm_dev/modules.py,sha256=4jfVb2yboRgb9mcO0sMF-iLigcZFTHEm4VRLN6GQXM4,16796
|
|
8
|
-
half_orm_dev/patch_manager.py,sha256=
|
|
8
|
+
half_orm_dev/patch_manager.py,sha256=yFVaSDid6VJAclQre-J-QWjyDIDNHi6EzRybtMEguow,110669
|
|
9
9
|
half_orm_dev/patch_validator.py,sha256=QNe1L6k_xwsnrOTcb3vkW2D0LbqrCRcZOGPnVyspVRk,10871
|
|
10
|
-
half_orm_dev/release_file.py,sha256=
|
|
11
|
-
half_orm_dev/release_manager.py,sha256=
|
|
12
|
-
half_orm_dev/repo.py,sha256=
|
|
10
|
+
half_orm_dev/release_file.py,sha256=2sfWYqjqfOuMtPDH6qNanXUD9UMaZ7yt912o6aZ0GYI,10570
|
|
11
|
+
half_orm_dev/release_manager.py,sha256=IBjQ26ostzXYu5PF-cx-Z7Wezt8SpG9SkeoTnTNFt2c,130985
|
|
12
|
+
half_orm_dev/repo.py,sha256=W14IR-G737QgVgWV6FMC3zVJfG87X9VbzOC3DLJoJng,100601
|
|
13
13
|
half_orm_dev/utils.py,sha256=M3yViUFfsO7Cp9MYSoUSkCZ6R9w_4jW45UDZUOT8FhI,1493
|
|
14
|
-
half_orm_dev/version.txt,sha256=
|
|
14
|
+
half_orm_dev/version.txt,sha256=mcwJgjmZIdLLkWl4QWMi2Zy-eHfrnal7--Y_GUibulg,10
|
|
15
15
|
half_orm_dev/cli/__init__.py,sha256=0CbMj8OIhZmglWakK7NhYPn302erUTEg2VHOdm1hRTQ,163
|
|
16
16
|
half_orm_dev/cli/main.py,sha256=3SVTl5WraNTSY6o7LfvE1dUHKg_RcuVaHHDIn_oINv4,11701
|
|
17
17
|
half_orm_dev/cli/commands/__init__.py,sha256=UhWf0AnWqy4gyFo2SJQv8pL_YJ43pE_c9TgopcjzKDg,1490
|
|
@@ -30,6 +30,7 @@ half_orm_dev/cli/commands/update.py,sha256=OarpDkC8hF08UUv1l2i-2qpHICgpB6WDn9ziY
|
|
|
30
30
|
half_orm_dev/cli/commands/upgrade.py,sha256=9puvnanp6D2n3cQGcmK1-AI6Z_GlTYPZcaZgN1m5wDE,6297
|
|
31
31
|
half_orm_dev/migrations/0/17/1/00_move_to_hop.py,sha256=esXYcdgNUJfUQUCtD_BBxHbdQi7_s4nWJ8bJYNIUfY4,4000
|
|
32
32
|
half_orm_dev/migrations/0/17/1/01_txt_to_toml.py,sha256=CJQo4-DTI0tPBD6VXNo0HsNkG2fkSxHAcQ-5cwXVt_4,4995
|
|
33
|
+
half_orm_dev/migrations/0/17/4/00_toml_dict_format.py,sha256=5LYVExrAhVmS_Fkp2uhs9j5xtWrp3FhzDhrwn3_CQdI,6224
|
|
33
34
|
half_orm_dev/patches/log,sha256=n7MNnGR09Obd87gXLzIi6zA76sI4RhOJzC25wb0TbKE,22
|
|
34
35
|
half_orm_dev/patches/0/1/0/00_half_orm_meta.database.sql,sha256=gMZ94YlyrftxcqDn0l-ToCTee4A_bnP58DpHcIT_T1w,1074
|
|
35
36
|
half_orm_dev/patches/0/1/0/01_alter_half_orm_meta.hop_release.sql,sha256=nhRbDi6sUenvVfOnoRuWSbLEC1cEfzrXbxDof2weq04,183
|
|
@@ -51,9 +52,9 @@ half_orm_dev/templates/sql_adapter,sha256=kAP5y7Qml3DKsbZLUeoVpeXjbQcWltHjkDznED
|
|
|
51
52
|
half_orm_dev/templates/warning,sha256=4hlZ_rRdpmkXxOeRoVd9xnXBARYXn95e-iXrD1f2u7k,490
|
|
52
53
|
half_orm_dev/templates/git-hooks/pre-commit,sha256=Hf084pqeiOebrv4xzA0aiaHbIXswmmNO-dSIXUfzMK0,4707
|
|
53
54
|
half_orm_dev/templates/git-hooks/prepare-commit-msg,sha256=zknOGGoaWKC97zfga2Xl2i_psnNo9MJbrEBuN91eHNw,1070
|
|
54
|
-
half_orm_dev-0.17.
|
|
55
|
-
half_orm_dev-0.17.
|
|
56
|
-
half_orm_dev-0.17.
|
|
57
|
-
half_orm_dev-0.17.
|
|
58
|
-
half_orm_dev-0.17.
|
|
59
|
-
half_orm_dev-0.17.
|
|
55
|
+
half_orm_dev-0.17.3a9.dist-info/licenses/AUTHORS,sha256=eWxqzRdLOt2gX0FMQj_wui03Od3jdlwa8xNe9tl84g0,113
|
|
56
|
+
half_orm_dev-0.17.3a9.dist-info/licenses/LICENSE,sha256=ufhxlSi6mttkGQTsGWrEoB3WA_fCPJ6-k07GSVBgyPw,644
|
|
57
|
+
half_orm_dev-0.17.3a9.dist-info/METADATA,sha256=5j_555Q9_75zj0GZADXj-PUEf-lXJ7xL-v5tZJzWO7w,16149
|
|
58
|
+
half_orm_dev-0.17.3a9.dist-info/WHEEL,sha256=wUyA8OaulRlbfwMtmQsvNngGrxQHAvkKcvRmdizlJi0,92
|
|
59
|
+
half_orm_dev-0.17.3a9.dist-info/top_level.txt,sha256=M5hEsWfn5Kw0HL-VnNmS6Jw-3cwRyjims5a8cr18eTM,13
|
|
60
|
+
half_orm_dev-0.17.3a9.dist-info/RECORD,,
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|