bl-odoo 0.2.6__py3-none-any.whl → 0.3.0__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- bl/freezer.py +2 -4
- bl/spec_parser.py +41 -114
- bl/spec_processor.py +224 -213
- bl/types.py +86 -0
- bl/utils.py +2 -2
- {bl_odoo-0.2.6.dist-info → bl_odoo-0.3.0.dist-info}/METADATA +1 -1
- bl_odoo-0.3.0.dist-info/RECORD +13 -0
- bl_odoo-0.2.6.dist-info/RECORD +0 -12
- {bl_odoo-0.2.6.dist-info → bl_odoo-0.3.0.dist-info}/WHEEL +0 -0
- {bl_odoo-0.2.6.dist-info → bl_odoo-0.3.0.dist-info}/entry_points.txt +0 -0
- {bl_odoo-0.2.6.dist-info → bl_odoo-0.3.0.dist-info}/licenses/LICENSE +0 -0
- {bl_odoo-0.2.6.dist-info → bl_odoo-0.3.0.dist-info}/top_level.txt +0 -0
bl/freezer.py
CHANGED
|
@@ -1,13 +1,11 @@
|
|
|
1
1
|
import asyncio
|
|
2
2
|
import yaml
|
|
3
|
-
from operator import countOf
|
|
4
3
|
from pathlib import Path
|
|
5
|
-
from typing import TextIO
|
|
6
4
|
|
|
7
5
|
from rich.console import Console
|
|
8
6
|
from rich.live import Live
|
|
9
7
|
from rich.progress import BarColumn, MofNCompleteColumn, Progress, TaskID, TextColumn
|
|
10
|
-
from bl.
|
|
8
|
+
from bl.types import RepoInfo, ProjectSpec
|
|
11
9
|
from bl.utils import get_local_ref, get_module_path, run_git
|
|
12
10
|
|
|
13
11
|
console = Console()
|
|
@@ -18,7 +16,7 @@ async def freeze_spec(
|
|
|
18
16
|
progress: Progress,
|
|
19
17
|
task_id: TaskID,
|
|
20
18
|
module_name: str,
|
|
21
|
-
module_spec:
|
|
19
|
+
module_spec: RepoInfo,
|
|
22
20
|
workdir: Path,
|
|
23
21
|
):
|
|
24
22
|
result = {module_name: {}}
|
bl/spec_parser.py
CHANGED
|
@@ -1,18 +1,13 @@
|
|
|
1
1
|
import re
|
|
2
2
|
import warnings
|
|
3
|
+
from dataclasses import dataclass
|
|
3
4
|
from enum import Enum
|
|
4
5
|
from pathlib import Path
|
|
5
6
|
from typing import Any, Dict, List, Optional
|
|
6
7
|
|
|
7
8
|
import yaml
|
|
8
9
|
|
|
9
|
-
|
|
10
|
-
class OriginType(Enum):
|
|
11
|
-
"""Type of origin reference."""
|
|
12
|
-
|
|
13
|
-
BRANCH = "branch"
|
|
14
|
-
PR = "pr"
|
|
15
|
-
REF = "ref"
|
|
10
|
+
from bl.types import RepoInfo, OriginType, ProjectSpec, RefspecInfo
|
|
16
11
|
|
|
17
12
|
|
|
18
13
|
def make_remote_merge_from_src(src: str) -> tuple[dict, list]:
|
|
@@ -52,60 +47,26 @@ def get_origin_type(origin_value: str) -> OriginType:
|
|
|
52
47
|
return OriginType.BRANCH
|
|
53
48
|
|
|
54
49
|
|
|
55
|
-
|
|
56
|
-
|
|
57
|
-
|
|
58
|
-
|
|
59
|
-
|
|
60
|
-
|
|
61
|
-
|
|
62
|
-
|
|
63
|
-
|
|
64
|
-
)
|
|
65
|
-
self.remote = remote
|
|
66
|
-
self.refspec = ref_str
|
|
67
|
-
""" The refspec string (branch name, PR ref, or commit hash). """
|
|
68
|
-
self.type = type
|
|
69
|
-
self.ref_name = ref_name
|
|
70
|
-
|
|
71
|
-
def __repr__(self) -> str:
|
|
72
|
-
return f"RefspecInfo(remote={self.remote!r}, origin={self.refspec!r}, type={self.type.value})"
|
|
73
|
-
|
|
74
|
-
|
|
75
|
-
class ModuleSpec:
|
|
76
|
-
"""Represents the specification for a set of modules."""
|
|
77
|
-
|
|
78
|
-
def __init__(
|
|
79
|
-
self,
|
|
80
|
-
modules: List[str],
|
|
81
|
-
remotes: Optional[Dict[str, str]] = {},
|
|
82
|
-
origins: Optional[List[RefspecInfo]] = [],
|
|
83
|
-
shell_commands: Optional[List[str]] = [],
|
|
84
|
-
patch_globs_to_apply: Optional[List[str]] = None,
|
|
85
|
-
target_folder: Optional[str] = None,
|
|
86
|
-
frozen_modules: Optional[Dict[str, Dict[str, str]]] = None,
|
|
87
|
-
):
|
|
88
|
-
self.modules = modules
|
|
89
|
-
self.remotes = remotes
|
|
90
|
-
self.refspec_info = origins
|
|
91
|
-
self.shell_commands = shell_commands
|
|
92
|
-
self.patch_globs_to_apply = patch_globs_to_apply
|
|
93
|
-
self.frozen_modules = frozen_modules
|
|
94
|
-
self.target_folder = None
|
|
95
|
-
|
|
96
|
-
def __repr__(self) -> str:
|
|
97
|
-
return f"ModuleSpec(modules={self.modules}, remotes={self.remotes}, origins={self.refspec_info})"
|
|
98
|
-
|
|
50
|
+
def parse_remote_refspec_from_parts(parts: List[str], frozen_repo: Dict[str, Dict[str, str]]):
|
|
51
|
+
if len(parts) == 2:
|
|
52
|
+
parts.insert(1, "")
|
|
53
|
+
else:
|
|
54
|
+
warnings.warn(
|
|
55
|
+
"Deprecated src format: use <url> <sha> format for the src property",
|
|
56
|
+
DeprecationWarning,
|
|
57
|
+
)
|
|
58
|
+
remote_key, _, ref_spec = parts
|
|
59
|
+
ref_type = get_origin_type(ref_spec)
|
|
99
60
|
|
|
100
|
-
|
|
101
|
-
|
|
61
|
+
ref_name = None
|
|
62
|
+
remote_freezes = frozen_repo.get(remote_key, {})
|
|
102
63
|
|
|
103
|
-
|
|
104
|
-
|
|
105
|
-
|
|
64
|
+
if ref_spec in remote_freezes:
|
|
65
|
+
ref_type = OriginType.REF
|
|
66
|
+
ref_name = ref_spec
|
|
67
|
+
ref_spec = remote_freezes.get(ref_name)
|
|
106
68
|
|
|
107
|
-
|
|
108
|
-
return f"ProjectSpec(specs={self.specs}, workdir={self.workdir})"
|
|
69
|
+
return RefspecInfo(remote_key, ref_spec, ref_type, ref_name)
|
|
109
70
|
|
|
110
71
|
|
|
111
72
|
def load_spec_file(config: Path, frozen: Path, workdir: Path) -> Optional[ProjectSpec]:
|
|
@@ -123,6 +84,7 @@ def load_spec_file(config: Path, frozen: Path, workdir: Path) -> Optional[Projec
|
|
|
123
84
|
config = config.resolve()
|
|
124
85
|
# If the file is not in the current directory, check inside the odoo subdirectory
|
|
125
86
|
odoo_config = config.parent / "odoo" / config.name
|
|
87
|
+
# TODO(franz): should use rich console for prettiness
|
|
126
88
|
if not odoo_config.exists():
|
|
127
89
|
print(f"Error: Neither '{config}' nor '{odoo_config}' exists.")
|
|
128
90
|
return None
|
|
@@ -151,22 +113,21 @@ def load_spec_file(config: Path, frozen: Path, workdir: Path) -> Optional[Projec
|
|
|
151
113
|
except yaml.YAMLError as e:
|
|
152
114
|
print(f"Error parsing frozen YAML file '{frozen_path}': {e}")
|
|
153
115
|
|
|
154
|
-
|
|
155
|
-
for
|
|
156
|
-
modules =
|
|
157
|
-
src =
|
|
158
|
-
remotes =
|
|
159
|
-
merges =
|
|
160
|
-
shell_commands =
|
|
161
|
-
patch_globs_to_apply =
|
|
162
|
-
|
|
163
|
-
|
|
164
|
-
|
|
165
|
-
|
|
166
|
-
)
|
|
116
|
+
repos: Dict[str, RepoInfo] = {}
|
|
117
|
+
for repo_name, repo_data in data.items():
|
|
118
|
+
modules = repo_data.get("modules", [])
|
|
119
|
+
src = repo_data.get("src")
|
|
120
|
+
remotes = repo_data.get("remotes") or {}
|
|
121
|
+
merges = repo_data.get("merges") or []
|
|
122
|
+
shell_commands = repo_data.get("shell_command_after") or None
|
|
123
|
+
patch_globs_to_apply = repo_data.get("patch_globs") or None
|
|
124
|
+
target_folder = repo_data.get("target_folder") or None
|
|
125
|
+
locales = repo_data.get("locales", [])
|
|
126
|
+
|
|
127
|
+
frozen_repo = frozen_mapping.get(repo_name, {})
|
|
167
128
|
|
|
168
129
|
# Parse merges into RefspecInfo objects
|
|
169
|
-
|
|
130
|
+
refspec_infos: List[RefspecInfo] = []
|
|
170
131
|
if src:
|
|
171
132
|
# If src is defined, create a remote and merge entry from it
|
|
172
133
|
src_remotes, src_merges = make_remote_merge_from_src(src)
|
|
@@ -175,51 +136,17 @@ def load_spec_file(config: Path, frozen: Path, workdir: Path) -> Optional[Projec
|
|
|
175
136
|
|
|
176
137
|
for merge_entry in merges:
|
|
177
138
|
parts = merge_entry.split(" ", 2)
|
|
178
|
-
|
|
179
|
-
|
|
180
|
-
|
|
181
|
-
|
|
182
|
-
ref_type = get_origin_type(ref_spec)
|
|
183
|
-
|
|
184
|
-
ref_name = None
|
|
185
|
-
if frozen_for_section:
|
|
186
|
-
remote_freezes = frozen_for_section.get(remote_key) or {}
|
|
187
|
-
ref_name = ref_spec
|
|
188
|
-
ref_type = OriginType.REF
|
|
189
|
-
frozen_ref = remote_freezes.get(ref_spec)
|
|
190
|
-
ref_spec = frozen_ref or ref_spec
|
|
191
|
-
|
|
192
|
-
origins.append(
|
|
193
|
-
RefspecInfo(
|
|
194
|
-
remote_key,
|
|
195
|
-
ref_spec,
|
|
196
|
-
ref_type,
|
|
197
|
-
ref_name,
|
|
198
|
-
)
|
|
199
|
-
)
|
|
200
|
-
elif len(parts) == 3:
|
|
201
|
-
warnings.warn(
|
|
202
|
-
"Deprecated src format: use <url> <sha> format for the src property",
|
|
203
|
-
DeprecationWarning,
|
|
204
|
-
)
|
|
205
|
-
remote_key, _, ref_spec = parts
|
|
206
|
-
ref_type = get_origin_type(ref_spec)
|
|
207
|
-
|
|
208
|
-
ref_name = None
|
|
209
|
-
if frozen_for_section:
|
|
210
|
-
remote_freezes = frozen_for_section.get(remote_key) or {}
|
|
211
|
-
ref_name = ref_spec
|
|
212
|
-
ref_spec = remote_freezes.get(ref_spec)
|
|
213
|
-
|
|
214
|
-
origins.append(RefspecInfo(remote_key, ref_spec, ref_type, ref_name))
|
|
215
|
-
|
|
216
|
-
specs[section_name] = ModuleSpec(
|
|
139
|
+
refspec_info = parse_remote_refspec_from_parts(parts, frozen_repo)
|
|
140
|
+
refspec_infos.append(refspec_info)
|
|
141
|
+
|
|
142
|
+
repos[repo_name] = RepoInfo(
|
|
217
143
|
modules,
|
|
218
144
|
remotes,
|
|
219
|
-
|
|
145
|
+
refspec_infos,
|
|
220
146
|
shell_commands,
|
|
221
147
|
patch_globs_to_apply,
|
|
222
|
-
|
|
148
|
+
target_folder,
|
|
149
|
+
locales,
|
|
223
150
|
)
|
|
224
151
|
|
|
225
|
-
return ProjectSpec(
|
|
152
|
+
return ProjectSpec(repos, workdir)
|
bl/spec_processor.py
CHANGED
|
@@ -1,12 +1,8 @@
|
|
|
1
1
|
import asyncio
|
|
2
|
-
import hashlib
|
|
3
|
-
from logging import root
|
|
4
2
|
import os
|
|
5
|
-
from posix import link
|
|
6
3
|
import warnings
|
|
7
|
-
import shutil
|
|
8
4
|
from pathlib import Path
|
|
9
|
-
from typing import
|
|
5
|
+
from typing import Dict, List
|
|
10
6
|
|
|
11
7
|
from rich.console import Console
|
|
12
8
|
from rich.live import Live
|
|
@@ -16,10 +12,17 @@ from typing_extensions import deprecated
|
|
|
16
12
|
|
|
17
13
|
from bl.utils import english_env, get_local_ref, get_module_path, run_git
|
|
18
14
|
|
|
19
|
-
from .
|
|
15
|
+
from bl.types import CloneFlags, CloneInfo, OriginType, ProjectSpec, RefspecInfo, RepoInfo
|
|
20
16
|
|
|
21
17
|
console = Console()
|
|
22
18
|
|
|
19
|
+
# TODO(franz): it's a bit better now but better keep an eye on it
|
|
20
|
+
# TODO(franz): Error handling should be watch carefully because if
|
|
21
|
+
# we don't exit on some error code due to the fact that git resolve to
|
|
22
|
+
# the parent repo we could activate sparse checkout on a parent folder
|
|
23
|
+
# should probably make a function that handles the error in a unified manner
|
|
24
|
+
# and crash if the error is on a vital part of the process
|
|
25
|
+
|
|
23
26
|
|
|
24
27
|
def rich_warning(message, category, filename, lineno, file=None, line=None):
|
|
25
28
|
console.print(f"[yellow]Warning:[/] {category.__name__}: {message}\n[dim]{filename}:{lineno}[/]")
|
|
@@ -29,24 +32,44 @@ warnings.showwarning = rich_warning
|
|
|
29
32
|
warnings.simplefilter("default", DeprecationWarning)
|
|
30
33
|
|
|
31
34
|
|
|
32
|
-
|
|
33
|
-
#
|
|
34
|
-
|
|
35
|
-
|
|
35
|
+
def check_path_is_repo(module_path: Path):
|
|
36
|
+
# TODO(franz): add check for .git folder
|
|
37
|
+
return not module_path.exists() or not module_path.is_dir()
|
|
38
|
+
|
|
39
|
+
|
|
40
|
+
def clone_info_from_repo(name: str, repo_info: RepoInfo):
|
|
41
|
+
flags = CloneFlags.SHALLOW if name == "odoo" or len(repo_info.refspec_info) == 1 else 0
|
|
42
|
+
flags |= CloneFlags.SPARSE if name != "odoo" or len(repo_info.locales) > 0 else 0
|
|
43
|
+
root_refspec_info = repo_info.refspec_info[0]
|
|
44
|
+
remote_url = repo_info.remotes.get(root_refspec_info.remote)
|
|
45
|
+
|
|
46
|
+
return CloneInfo(
|
|
47
|
+
remote_url,
|
|
48
|
+
flags,
|
|
49
|
+
root_refspec_info,
|
|
50
|
+
)
|
|
51
|
+
|
|
52
|
+
|
|
53
|
+
# for single branch we should clone shallow but for other we should clone deep
|
|
54
|
+
# this allows us to get merge-base to work and git can then merge by pulling the minimum
|
|
55
|
+
# amount of data
|
|
56
|
+
def create_clone_args(clone_info: CloneInfo) -> List[str]:
|
|
36
57
|
"""Creates git clone arguments based on the base origin."""
|
|
37
58
|
args = [
|
|
38
59
|
"clone",
|
|
39
60
|
"--filter=tree:0",
|
|
40
61
|
]
|
|
41
62
|
|
|
42
|
-
if
|
|
63
|
+
if clone_info.clone_flags & CloneFlags.SHALLOW:
|
|
43
64
|
args += [
|
|
44
65
|
"--depth",
|
|
45
66
|
"1",
|
|
46
67
|
]
|
|
47
|
-
|
|
68
|
+
if clone_info.clone_flags & CloneFlags.SPARSE:
|
|
48
69
|
args += ["--sparse"]
|
|
49
70
|
|
|
71
|
+
ref_spec_info = clone_info.root_refspec_info
|
|
72
|
+
|
|
50
73
|
if ref_spec_info.type == OriginType.REF:
|
|
51
74
|
args += [
|
|
52
75
|
"--revision",
|
|
@@ -61,7 +84,7 @@ def create_clone_args(name: str, ref_spec_info: RefspecInfo, remote_url: str, sh
|
|
|
61
84
|
]
|
|
62
85
|
|
|
63
86
|
args += [
|
|
64
|
-
|
|
87
|
+
clone_info.url,
|
|
65
88
|
]
|
|
66
89
|
|
|
67
90
|
return args
|
|
@@ -74,22 +97,37 @@ def normalize_merge_result(ret: int, out: str, err: str):
|
|
|
74
97
|
return ret, err
|
|
75
98
|
|
|
76
99
|
|
|
77
|
-
class
|
|
100
|
+
class RepoProcessor:
|
|
78
101
|
"""
|
|
79
102
|
Processes a ProjectSpec by concurrently cloning and merging modules.
|
|
80
103
|
"""
|
|
81
104
|
|
|
82
|
-
def __init__(
|
|
105
|
+
def __init__(
|
|
106
|
+
self,
|
|
107
|
+
workdir: Path,
|
|
108
|
+
name: str,
|
|
109
|
+
semaphore: asyncio.Semaphore,
|
|
110
|
+
repo_info: RepoInfo,
|
|
111
|
+
progress: Progress,
|
|
112
|
+
count_progress: Progress,
|
|
113
|
+
count_task: TaskID,
|
|
114
|
+
concurrency: int,
|
|
115
|
+
):
|
|
83
116
|
self.workdir = workdir
|
|
117
|
+
self.name = name
|
|
118
|
+
self.semaphore = semaphore
|
|
119
|
+
self.repo_info = repo_info
|
|
120
|
+
self.progress = progress
|
|
121
|
+
self.count_progress = count_progress
|
|
122
|
+
self.count_task = count_task
|
|
84
123
|
self.concurrency = concurrency
|
|
85
|
-
self.semaphore = asyncio.Semaphore(concurrency)
|
|
86
124
|
|
|
87
125
|
@deprecated(
|
|
88
126
|
"run_shell_commands is deprecated if used to apply patches. Use patch_globs properties in spec.yaml instead."
|
|
89
127
|
)
|
|
90
|
-
async def run_shell_commands(self,
|
|
91
|
-
for cmd in
|
|
92
|
-
progress.update(task_id, status=f"Running shell command: {cmd}...")
|
|
128
|
+
async def run_shell_commands(self, repo_info: RepoInfo, module_path: Path) -> int:
|
|
129
|
+
for cmd in repo_info.shell_commands:
|
|
130
|
+
self.progress.update(self.task_id, status=f"Running shell command: {cmd}...")
|
|
93
131
|
proc = await asyncio.create_subprocess_shell(
|
|
94
132
|
cmd,
|
|
95
133
|
cwd=str(module_path),
|
|
@@ -102,128 +140,74 @@ class SpecProcessor:
|
|
|
102
140
|
# This is a sanity check because people usually put "git am" commands
|
|
103
141
|
# in shell_commands, so we abort any ongoing git am
|
|
104
142
|
await run_git("am", "--abort", cwd=str(module_path))
|
|
105
|
-
progress.update(
|
|
106
|
-
task_id,
|
|
143
|
+
self.progress.update(
|
|
144
|
+
self.task_id,
|
|
107
145
|
status=f"[red]Shell command failed: {cmd}\nError: {stderr.decode().strip()}",
|
|
108
146
|
)
|
|
109
147
|
return -1
|
|
110
148
|
return 0
|
|
111
149
|
|
|
112
|
-
async def fetch_local_ref(
|
|
113
|
-
self,
|
|
114
|
-
origin: RefspecInfo,
|
|
115
|
-
local_ref: str,
|
|
116
|
-
module_path: Path,
|
|
117
|
-
) -> tuple[int, str, str]:
|
|
118
|
-
return await run_git(
|
|
119
|
-
"fetch",
|
|
120
|
-
origin.remote,
|
|
121
|
-
f"{origin.refspec}:{local_ref}",
|
|
122
|
-
cwd=module_path,
|
|
123
|
-
)
|
|
124
|
-
|
|
125
|
-
async def clone_base_repo_ref(
|
|
126
|
-
self, name: str, ref_spec_info: RefspecInfo, remote_url: str, module_path: Path, shallow: bool
|
|
127
|
-
) -> tuple[int, str, str]:
|
|
128
|
-
args = create_clone_args(name, ref_spec_info, remote_url, shallow)
|
|
129
|
-
|
|
130
|
-
ret, out, err = await run_git(
|
|
131
|
-
*args,
|
|
132
|
-
str(module_path),
|
|
133
|
-
)
|
|
134
|
-
|
|
135
|
-
# if it's a ref we need to manually create a base branch because we cannot
|
|
136
|
-
# merge in a detached head
|
|
137
|
-
local_ref = get_local_ref(ref_spec_info)
|
|
138
|
-
ret, out, err = await run_git(
|
|
139
|
-
"checkout",
|
|
140
|
-
"-b",
|
|
141
|
-
local_ref,
|
|
142
|
-
cwd=str(module_path),
|
|
143
|
-
)
|
|
144
|
-
|
|
145
|
-
return ret, out, err
|
|
146
|
-
|
|
147
|
-
async def try_merge(
|
|
148
|
-
self,
|
|
149
|
-
progress: Progress,
|
|
150
|
-
task_id: TaskID,
|
|
151
|
-
remote_url: str,
|
|
152
|
-
local_ref: str,
|
|
153
|
-
module_path: Path,
|
|
154
|
-
origin: RefspecInfo,
|
|
155
|
-
) -> tuple[int, str]:
|
|
156
|
-
# Merge
|
|
157
|
-
# I think the idea would be to not fetch shallow but fetch treeless and do a merge-base
|
|
158
|
-
# then fetch the required data and then merge
|
|
159
|
-
progress.update(task_id, status=f"Merging {local_ref}", advance=0.1)
|
|
160
|
-
ret, out, err = await run_git("merge", "--no-edit", local_ref, cwd=module_path)
|
|
161
|
-
ret, err = normalize_merge_result(ret, out, err)
|
|
162
|
-
|
|
163
|
-
if "CONFLICT" in err:
|
|
164
|
-
progress.update(task_id, status=f"[red]Merge conflict in {origin.refspec}: {err}")
|
|
165
|
-
# In case of conflict, we might want to abort the merge
|
|
166
|
-
await run_git("merge", "--abort", cwd=module_path)
|
|
167
|
-
return ret, err
|
|
168
|
-
|
|
169
150
|
async def setup_new_repo(
|
|
170
151
|
self,
|
|
171
|
-
|
|
172
|
-
task_id: TaskID,
|
|
173
|
-
spec: ModuleSpec,
|
|
174
|
-
name: str,
|
|
175
|
-
root_refspec_info: RefspecInfo,
|
|
176
|
-
remote_url: str,
|
|
152
|
+
clone_info: CloneInfo,
|
|
177
153
|
module_path: Path,
|
|
178
154
|
) -> int:
|
|
179
|
-
|
|
180
|
-
|
|
181
|
-
|
|
155
|
+
root_refspec_info = clone_info.root_refspec_info
|
|
156
|
+
remote = root_refspec_info.remote
|
|
157
|
+
root_refspec = root_refspec_info.refspec
|
|
158
|
+
|
|
159
|
+
self.progress.update(
|
|
160
|
+
self.task_id,
|
|
161
|
+
status=(f"Cloning {remote}/{root_refspec}"),
|
|
182
162
|
)
|
|
183
163
|
|
|
184
|
-
|
|
185
|
-
|
|
186
|
-
# User --revision for specific commit checkout if needed
|
|
187
|
-
shallow_clone = len(spec.refspec_info) == 1
|
|
188
|
-
ret, out, err = await self.clone_base_repo_ref(name, root_refspec_info, remote_url, module_path, shallow_clone)
|
|
164
|
+
clone_args = create_clone_args(clone_info)
|
|
165
|
+
ret, out, err = await run_git(*clone_args, module_path)
|
|
189
166
|
|
|
190
167
|
if ret != 0:
|
|
191
168
|
status_message = (
|
|
192
|
-
f"[red]Clone failed {root_refspec_info.remote}({
|
|
169
|
+
f"[red]Clone failed {root_refspec_info.remote}({clone_info.url})/{root_refspec_info.refspec}"
|
|
193
170
|
+ f" -> {module_path}:\n{err}"
|
|
194
171
|
)
|
|
195
|
-
progress.update(task_id, status=status_message)
|
|
172
|
+
self.progress.update(self.task_id, status=status_message)
|
|
196
173
|
return ret
|
|
197
174
|
|
|
198
|
-
|
|
199
|
-
|
|
200
|
-
|
|
175
|
+
local_ref = get_local_ref(root_refspec_info)
|
|
176
|
+
ret, out, err = await run_git("checkout", "-b", local_ref, cwd=module_path)
|
|
177
|
+
|
|
178
|
+
return 0
|
|
179
|
+
|
|
180
|
+
async def reset_repo_for_work(self, module_path: Path) -> int:
|
|
181
|
+
# TODO(franz): we should test if the folder is a git repo or not
|
|
182
|
+
|
|
201
183
|
ret, out, err = await run_git("status", "--porcelain", cwd=module_path)
|
|
202
184
|
|
|
203
185
|
if out != "":
|
|
204
|
-
progress.update(task_id, status=f"[red]Repo is dirty:\n{out}")
|
|
186
|
+
self.progress.update(self.task_id, status=f"[red]Repo is dirty:\n{out}")
|
|
187
|
+
return ret
|
|
188
|
+
if ret != 0:
|
|
189
|
+
self.progress.update(self.task_id, status="[red]Repo does not exist")
|
|
205
190
|
return ret
|
|
206
191
|
# Reset all the local origin to their remote origins
|
|
207
|
-
|
|
208
|
-
|
|
192
|
+
repo_info = self.repo_info
|
|
193
|
+
root_refspec_info = repo_info.refspec_info[0]
|
|
194
|
+
|
|
195
|
+
self.progress.update(
|
|
196
|
+
self.task_id,
|
|
209
197
|
status=(f"Resetting existing repository for {root_refspec_info.remote}/{root_refspec_info.refspec}"),
|
|
210
198
|
)
|
|
211
199
|
|
|
212
200
|
s_ret, s_out, s_err = await run_git("rev-parse", "--is-shallow-repository", cwd=module_path)
|
|
213
|
-
if len(
|
|
201
|
+
if len(repo_info.refspec_info) > 1 and s_out == "true":
|
|
214
202
|
await run_git("fetch", "--unshallow", cwd=module_path)
|
|
215
203
|
|
|
216
|
-
reset_target =
|
|
204
|
+
reset_target = get_local_ref(root_refspec_info)
|
|
217
205
|
ret, out, err = await run_git("reset", "--hard", reset_target, cwd=module_path)
|
|
218
206
|
if ret != 0:
|
|
219
|
-
progress.update(task_id, status=f"[red]Reset failed: {err}")
|
|
207
|
+
self.progress.update(self.task_id, status=f"[red]Reset failed: {err}")
|
|
220
208
|
return ret
|
|
221
209
|
|
|
222
|
-
|
|
223
|
-
local_ref = get_local_ref(refspec_info)
|
|
224
|
-
# This is probably the best thing but for now this works good enough
|
|
225
|
-
# TODO(franz): find something better
|
|
226
|
-
ret, out, err = await run_git("branch", "-d", local_ref, cwd=module_path)
|
|
210
|
+
return 0
|
|
227
211
|
|
|
228
212
|
def link_all_modules(self, module_list: List[str], module_path: Path) -> tuple[int, str]:
|
|
229
213
|
links_path = self.workdir / "links"
|
|
@@ -239,7 +223,7 @@ class SpecProcessor:
|
|
|
239
223
|
if path_dest_symlink.is_symlink():
|
|
240
224
|
path_dest_symlink.unlink()
|
|
241
225
|
|
|
242
|
-
os.symlink(path_src_symlink.relative_to(
|
|
226
|
+
os.symlink(path_src_symlink.relative_to(links_path, walk_up=True), path_dest_symlink, True)
|
|
243
227
|
except OSError as e:
|
|
244
228
|
return -1, str(e)
|
|
245
229
|
|
|
@@ -247,9 +231,7 @@ class SpecProcessor:
|
|
|
247
231
|
|
|
248
232
|
async def merge_spec_into_tree(
|
|
249
233
|
self,
|
|
250
|
-
|
|
251
|
-
task_id: TaskID,
|
|
252
|
-
spec: ModuleSpec,
|
|
234
|
+
spec: RepoInfo,
|
|
253
235
|
refspec_info: RefspecInfo,
|
|
254
236
|
root_refspec_info: RefspecInfo,
|
|
255
237
|
module_path: Path,
|
|
@@ -260,11 +242,23 @@ class SpecProcessor:
|
|
|
260
242
|
local_ref = get_local_ref(refspec_info)
|
|
261
243
|
remote_ref = refspec_info.refspec
|
|
262
244
|
|
|
263
|
-
|
|
245
|
+
# Merge
|
|
246
|
+
# I think the idea would be to not fetch shallow but fetch treeless and do a merge-base
|
|
247
|
+
# then fetch the required data and then merge
|
|
248
|
+
self.progress.update(self.task_id, status=f"Merging {local_ref}", advance=0.1)
|
|
249
|
+
ret, out, err = await run_git("merge", "--no-edit", local_ref, cwd=module_path)
|
|
250
|
+
ret, err = normalize_merge_result(ret, out, err)
|
|
251
|
+
|
|
252
|
+
if "CONFLICT" in err:
|
|
253
|
+
self.progress.update(self.task_id, status=f"[red]Merge conflict {local_ref} in {remote_ref}: {err}")
|
|
254
|
+
# In case of conflict, we might want to abort the merge
|
|
255
|
+
await run_git("merge", "--abort", cwd=module_path)
|
|
256
|
+
return ret, err
|
|
257
|
+
|
|
264
258
|
if ret != 0:
|
|
259
|
+
self.progress.update(self.task_id, status=f"[red]Merge error {local_ref} in {remote_ref}: {err}")
|
|
265
260
|
return ret, err
|
|
266
261
|
|
|
267
|
-
progress.advance(task_id)
|
|
268
262
|
return 0, ""
|
|
269
263
|
|
|
270
264
|
def get_refspec_by_remote(self, refspec_info_list: List[RefspecInfo]) -> Dict[str, List[RefspecInfo]]:
|
|
@@ -293,7 +287,7 @@ class SpecProcessor:
|
|
|
293
287
|
|
|
294
288
|
return ret, out, err
|
|
295
289
|
|
|
296
|
-
def filter_non_link_module(self, spec:
|
|
290
|
+
def filter_non_link_module(self, spec: RepoInfo):
|
|
297
291
|
result = []
|
|
298
292
|
base_path_links = self.workdir / "links"
|
|
299
293
|
for module in spec.modules:
|
|
@@ -307,153 +301,170 @@ class SpecProcessor:
|
|
|
307
301
|
)
|
|
308
302
|
return result
|
|
309
303
|
|
|
310
|
-
async def
|
|
311
|
-
|
|
312
|
-
|
|
313
|
-
"""
|
|
314
|
-
|
|
304
|
+
async def setup_odoo_sparse(self, module_spec: RepoInfo, module_path: Path):
|
|
305
|
+
list_modules = module_spec.modules
|
|
306
|
+
|
|
307
|
+
await run_git("sparse-checkout", "init", "--no-cone", cwd=module_path)
|
|
308
|
+
included_po = [f"{locale}.po" for locale in module_spec.locales]
|
|
309
|
+
included_modules = [f"/addons/{module}/*" for module in list_modules]
|
|
310
|
+
await run_git(
|
|
311
|
+
"sparse-checkout",
|
|
312
|
+
"set",
|
|
313
|
+
"/*",
|
|
314
|
+
"!/addons/*",
|
|
315
|
+
*included_modules,
|
|
316
|
+
"!*.po",
|
|
317
|
+
*included_po,
|
|
318
|
+
cwd=module_path,
|
|
319
|
+
)
|
|
315
320
|
|
|
316
|
-
|
|
321
|
+
async def setup_sparse_checkout(self, symlink_modules: List[str], module_path: Path):
|
|
322
|
+
# 2. Sparse Checkout setup
|
|
323
|
+
if self.name != "odoo":
|
|
324
|
+
self.progress.update(self.task_id, status="Configuring sparse checkout...")
|
|
325
|
+
await run_git("sparse-checkout", "init", "--cone", cwd=module_path)
|
|
326
|
+
if symlink_modules:
|
|
327
|
+
await run_git("sparse-checkout", "set", *self.repo_info.modules, cwd=module_path)
|
|
328
|
+
elif len(self.repo_info.locales) > 0:
|
|
329
|
+
# TODO(franz): We should still set sparse if there is no locales but there is a module list
|
|
330
|
+
self.progress.update(self.task_id, status="Configuring sparse odoo checkout...")
|
|
331
|
+
await self.setup_odoo_sparse(self.repo_info, module_path)
|
|
332
|
+
|
|
333
|
+
async def process_repo(self) -> int:
|
|
334
|
+
"""Processes a single ModuleSpec."""
|
|
335
|
+
symlink_modules = self.filter_non_link_module(self.repo_info)
|
|
336
|
+
module_path = get_module_path(self.workdir, self.name, self.repo_info)
|
|
317
337
|
|
|
318
338
|
async with self.semaphore:
|
|
319
|
-
task_id = progress.add_task(f"[cyan]{name}", status="Waiting...", total=total_steps)
|
|
320
339
|
try:
|
|
321
|
-
|
|
322
|
-
|
|
340
|
+
self.task_id = self.progress.add_task(
|
|
341
|
+
f"[cyan]{self.name}", status="Waiting...", total=len(self.repo_info.refspec_info) + 1
|
|
342
|
+
)
|
|
343
|
+
if not self.repo_info.refspec_info:
|
|
344
|
+
self.progress.update(self.task_id, status="[yellow]No origins defined", completed=1)
|
|
323
345
|
return -1
|
|
324
346
|
|
|
325
|
-
|
|
326
|
-
|
|
327
|
-
|
|
328
|
-
|
|
329
|
-
|
|
330
|
-
|
|
331
|
-
if not module_path.exists() or not module_path.is_dir():
|
|
332
|
-
await self.setup_new_repo(progress, task_id, spec, name, root_refspec_info, remote_url, module_path)
|
|
347
|
+
# TODO(franz) the shallow and sparseness of repo should be unify
|
|
348
|
+
# so that we don't have all those stupid conditions
|
|
349
|
+
if check_path_is_repo(module_path):
|
|
350
|
+
clone_info = clone_info_from_repo(self.name, self.repo_info)
|
|
351
|
+
ret = await self.setup_new_repo(clone_info, module_path)
|
|
333
352
|
else:
|
|
334
|
-
await self.reset_repo_for_work(
|
|
335
|
-
|
|
336
|
-
if name != "odoo":
|
|
337
|
-
# We don't do sparse checkout for odoo because the odoo repo does not work at
|
|
338
|
-
# all like the other repos (modules are in addons/ and src/addons/) instead of
|
|
339
|
-
# at the root of the repo
|
|
353
|
+
ret = await self.reset_repo_for_work(module_path)
|
|
340
354
|
|
|
341
|
-
|
|
342
|
-
|
|
355
|
+
if ret != 0:
|
|
356
|
+
return -1
|
|
343
357
|
|
|
344
|
-
|
|
345
|
-
progress.update(task_id, status="Configuring sparse checkout...")
|
|
346
|
-
await run_git("sparse-checkout", "init", "--cone", cwd=module_path)
|
|
347
|
-
if symlink_modules:
|
|
348
|
-
await run_git("sparse-checkout", "set", *spec.modules, cwd=module_path)
|
|
358
|
+
await self.setup_sparse_checkout(symlink_modules, module_path)
|
|
349
359
|
|
|
350
360
|
checkout_target = "merged"
|
|
351
361
|
|
|
352
362
|
await run_git("checkout", "-b", checkout_target, cwd=module_path)
|
|
353
|
-
progress.advance(task_id)
|
|
363
|
+
self.progress.advance(self.task_id)
|
|
354
364
|
|
|
355
|
-
for remote, remote_url in
|
|
365
|
+
for remote, remote_url in self.repo_info.remotes.items():
|
|
356
366
|
await run_git("remote", "add", remote, remote_url, cwd=module_path)
|
|
357
367
|
await run_git("config", f"remote.{remote}.partialCloneFilter", "tree:0", cwd=module_path)
|
|
358
368
|
await run_git("config", f"remote.{remote}.promisor", "true", cwd=module_path)
|
|
359
369
|
|
|
360
|
-
|
|
361
|
-
|
|
362
|
-
|
|
363
|
-
refspec_by_remote: Dict[str, List[RefspecInfo]] = self.get_refspec_by_remote(spec.refspec_info[1:])
|
|
370
|
+
refspec_by_remote: Dict[str, List[RefspecInfo]] = self.get_refspec_by_remote(
|
|
371
|
+
self.repo_info.refspec_info
|
|
372
|
+
)
|
|
364
373
|
|
|
374
|
+
# TODO(franz): right now we fetch everything so when the repo is just cloned
|
|
375
|
+
# we fetch the base branch twice. Since we fetch with multi this is probably not
|
|
376
|
+
# a big issue but it could be better
|
|
365
377
|
for remote, refspec_list in refspec_by_remote.items():
|
|
366
|
-
progress.update(task_id, status=f"Fetching multi from {remote}")
|
|
378
|
+
self.progress.update(self.task_id, status=f"Fetching multi from {remote}")
|
|
367
379
|
await self.fetch_multi(remote, refspec_list, module_path)
|
|
368
380
|
|
|
369
381
|
# 4. Fetch and Merge remaining origins
|
|
370
|
-
for refspec_info in
|
|
382
|
+
for refspec_info in self.repo_info.refspec_info[1:]:
|
|
371
383
|
ret, err = await self.merge_spec_into_tree(
|
|
372
|
-
|
|
384
|
+
self.repo_info, refspec_info, self.repo_info.refspec_info[0], module_path
|
|
373
385
|
)
|
|
374
386
|
if ret != 0:
|
|
375
|
-
progress.update(task_id, status=f"[purple]Merge failed from {refspec_info.refspec}: {err}")
|
|
376
387
|
return -1
|
|
388
|
+
self.progress.advance(self.task_id)
|
|
377
389
|
|
|
378
|
-
if
|
|
379
|
-
ret = await self.run_shell_commands(
|
|
390
|
+
if self.repo_info.shell_commands:
|
|
391
|
+
ret = await self.run_shell_commands(self.repo_info, module_path)
|
|
380
392
|
if ret != 0:
|
|
381
393
|
return ret
|
|
382
394
|
|
|
383
|
-
if
|
|
384
|
-
for glob in
|
|
385
|
-
progress.update(task_id, status=f"Applying patches: {glob}...", advance=0.1)
|
|
395
|
+
if self.repo_info.patch_globs_to_apply:
|
|
396
|
+
for glob in self.repo_info.patch_globs_to_apply:
|
|
397
|
+
self.progress.update(self.task_id, status=f"Applying patches: {glob}...", advance=0.1)
|
|
386
398
|
ret, out, err = await run_git("am", glob, cwd=module_path)
|
|
387
399
|
if ret != 0:
|
|
388
400
|
await run_git("am", "--abort", cwd=module_path)
|
|
389
|
-
progress.update(task_id, status=f"[red]Applying patches failed: {err}")
|
|
401
|
+
self.progress.update(self.task_id, status=f"[red]Applying patches failed: {err}")
|
|
390
402
|
return ret
|
|
391
403
|
|
|
392
|
-
progress.update(task_id, status="Linking directory")
|
|
393
|
-
if name != "odoo":
|
|
404
|
+
self.progress.update(self.task_id, status="Linking directory")
|
|
405
|
+
if self.name != "odoo":
|
|
394
406
|
ret, err = self.link_all_modules(symlink_modules, module_path)
|
|
395
407
|
if ret != 0:
|
|
396
|
-
progress.update(task_id, status=f"[red]Could not link modules: {err}")
|
|
408
|
+
self.progress.update(self.task_id, status=f"[red]Could not link modules: {err}")
|
|
397
409
|
return ret
|
|
398
410
|
|
|
399
|
-
progress.update(task_id, status="[green]Complete")
|
|
400
|
-
progress.remove_task(task_id)
|
|
401
|
-
count_progress.advance(count_task)
|
|
411
|
+
self.progress.update(self.task_id, status="[green]Complete", advance=1)
|
|
412
|
+
self.progress.remove_task(self.task_id)
|
|
413
|
+
self.count_progress.advance(self.count_task)
|
|
402
414
|
|
|
403
415
|
except Exception as e:
|
|
404
|
-
progress.update(task_id, status=f"[red]Error: {str(e)}")
|
|
416
|
+
self.progress.update(self.task_id, status=f"[red]Error: {str(e)}")
|
|
417
|
+
raise e
|
|
405
418
|
return -1
|
|
406
419
|
|
|
407
420
|
return 0
|
|
408
421
|
|
|
409
|
-
async def process_project(self, project_spec: ProjectSpec) -> None:
|
|
410
|
-
"""Processes all modules in a ProjectSpec."""
|
|
411
|
-
(self.workdir / "external-src").mkdir(parents=True, exist_ok=True)
|
|
412
|
-
|
|
413
|
-
task_list_progress = Progress(
|
|
414
|
-
SpinnerColumn(),
|
|
415
|
-
TextColumn("[progress.description]{task.description}"),
|
|
416
|
-
BarColumn(),
|
|
417
|
-
TextColumn("[progress.percentage]{task.percentage:>3.0f}%"),
|
|
418
|
-
TextColumn("{task.fields[status]}", table_column=Column(ratio=2)),
|
|
419
|
-
)
|
|
420
422
|
|
|
421
|
-
|
|
422
|
-
|
|
423
|
-
|
|
424
|
-
MofNCompleteColumn(),
|
|
425
|
-
)
|
|
426
|
-
count_task = task_count_progress.add_task("Processing Modules", total=len(project_spec.specs))
|
|
423
|
+
async def process_project(project_spec: ProjectSpec, concurrency: int) -> None:
|
|
424
|
+
"""Processes all modules in a ProjectSpec."""
|
|
425
|
+
(project_spec.workdir / "external-src").mkdir(parents=True, exist_ok=True)
|
|
427
426
|
|
|
428
|
-
|
|
429
|
-
|
|
430
|
-
|
|
431
|
-
)
|
|
432
|
-
|
|
433
|
-
|
|
434
|
-
|
|
427
|
+
task_list_progress = Progress(
|
|
428
|
+
SpinnerColumn(),
|
|
429
|
+
TextColumn("[progress.description]{task.description}"),
|
|
430
|
+
BarColumn(),
|
|
431
|
+
TextColumn("[progress.percentage]{task.percentage:>3.0f}%"),
|
|
432
|
+
TextColumn("{task.fields[status]}", table_column=Column(ratio=2)),
|
|
433
|
+
)
|
|
435
434
|
|
|
436
|
-
|
|
437
|
-
|
|
438
|
-
|
|
439
|
-
|
|
440
|
-
|
|
441
|
-
|
|
442
|
-
spec,
|
|
443
|
-
task_list_progress,
|
|
444
|
-
task_count_progress,
|
|
445
|
-
count_task,
|
|
446
|
-
)
|
|
447
|
-
)
|
|
435
|
+
task_count_progress = Progress(
|
|
436
|
+
TextColumn("[progress.description]{task.description}"),
|
|
437
|
+
BarColumn(),
|
|
438
|
+
MofNCompleteColumn(),
|
|
439
|
+
)
|
|
440
|
+
count_task = task_count_progress.add_task("Processing Modules", total=len(project_spec.repos))
|
|
448
441
|
|
|
449
|
-
|
|
450
|
-
|
|
451
|
-
|
|
452
|
-
|
|
442
|
+
progress_table = Table.grid()
|
|
443
|
+
progress_table.add_row(
|
|
444
|
+
task_list_progress,
|
|
445
|
+
)
|
|
446
|
+
progress_table.add_row(
|
|
447
|
+
task_count_progress,
|
|
448
|
+
)
|
|
453
449
|
|
|
450
|
+
semaphore = asyncio.Semaphore(concurrency)
|
|
451
|
+
with Live(progress_table, console=console, refresh_per_second=10):
|
|
452
|
+
tasks = []
|
|
453
|
+
for name, repo_info in project_spec.repos.items():
|
|
454
|
+
total_steps = len(repo_info.refspec_info) + 1
|
|
455
|
+
repo_processor = RepoProcessor(
|
|
456
|
+
project_spec.workdir,
|
|
457
|
+
name,
|
|
458
|
+
semaphore,
|
|
459
|
+
repo_info,
|
|
460
|
+
task_list_progress,
|
|
461
|
+
task_count_progress,
|
|
462
|
+
count_task,
|
|
463
|
+
concurrency,
|
|
464
|
+
)
|
|
465
|
+
tasks.append(repo_processor.process_repo())
|
|
454
466
|
|
|
455
|
-
|
|
456
|
-
|
|
457
|
-
|
|
458
|
-
|
|
459
|
-
return await processor.process_project(project_spec)
|
|
467
|
+
# this should error if a task crashes
|
|
468
|
+
return_codes = await asyncio.gather(*tasks)
|
|
469
|
+
if any(return_codes):
|
|
470
|
+
raise Exception()
|
bl/types.py
ADDED
|
@@ -0,0 +1,86 @@
|
|
|
1
|
+
from pathlib import Path
|
|
2
|
+
from dataclasses import dataclass
|
|
3
|
+
from enum import Enum, IntEnum
|
|
4
|
+
from typing import Dict, List, Optional
|
|
5
|
+
|
|
6
|
+
|
|
7
|
+
class OriginType(Enum):
|
|
8
|
+
"""Type of origin reference."""
|
|
9
|
+
|
|
10
|
+
BRANCH = "branch"
|
|
11
|
+
PR = "pr"
|
|
12
|
+
REF = "ref"
|
|
13
|
+
|
|
14
|
+
|
|
15
|
+
@dataclass
|
|
16
|
+
class Remote:
|
|
17
|
+
name: str
|
|
18
|
+
url: str
|
|
19
|
+
|
|
20
|
+
|
|
21
|
+
class CloneFlags(IntEnum):
|
|
22
|
+
SHALLOW = 1
|
|
23
|
+
SPARSE = 2
|
|
24
|
+
|
|
25
|
+
|
|
26
|
+
class RefspecInfo:
|
|
27
|
+
"""A git refspec with its remote, type and optional frozen sha."""
|
|
28
|
+
|
|
29
|
+
def __init__(
|
|
30
|
+
self,
|
|
31
|
+
remote: str,
|
|
32
|
+
ref_str: str,
|
|
33
|
+
type: OriginType,
|
|
34
|
+
ref_name: Optional[str],
|
|
35
|
+
):
|
|
36
|
+
self.remote = remote
|
|
37
|
+
self.refspec = ref_str
|
|
38
|
+
""" The refspec string (branch name, PR ref, or commit hash). """
|
|
39
|
+
self.type = type
|
|
40
|
+
self.ref_name = ref_name
|
|
41
|
+
|
|
42
|
+
def __repr__(self) -> str:
|
|
43
|
+
return f"RefspecInfo(remote={self.remote!r}, origin={self.refspec!r}, type={self.type.value})"
|
|
44
|
+
|
|
45
|
+
|
|
46
|
+
@dataclass
|
|
47
|
+
class CloneInfo:
|
|
48
|
+
url: str
|
|
49
|
+
clone_flags: int
|
|
50
|
+
root_refspec_info: RefspecInfo
|
|
51
|
+
|
|
52
|
+
|
|
53
|
+
class RepoInfo:
|
|
54
|
+
"""Represents the specification for a set of modules."""
|
|
55
|
+
|
|
56
|
+
def __init__(
|
|
57
|
+
self,
|
|
58
|
+
modules: List[str],
|
|
59
|
+
remotes: List[str] = {},
|
|
60
|
+
refspecs: List[RefspecInfo] = [],
|
|
61
|
+
shell_commands: List[str] = [],
|
|
62
|
+
patch_globs_to_apply: List[str] = [],
|
|
63
|
+
target_folder: Optional[str] = None,
|
|
64
|
+
locales: List[str] = [],
|
|
65
|
+
):
|
|
66
|
+
self.modules = modules
|
|
67
|
+
self.remotes = remotes
|
|
68
|
+
self.refspec_info = refspecs
|
|
69
|
+
self.shell_commands = shell_commands
|
|
70
|
+
self.patch_globs_to_apply = patch_globs_to_apply
|
|
71
|
+
self.target_folder = target_folder
|
|
72
|
+
self.locales = locales
|
|
73
|
+
|
|
74
|
+
def __repr__(self) -> str:
|
|
75
|
+
return f"ModuleSpec(modules={self.modules}, remotes={self.remotes}, origins={self.refspec_info})"
|
|
76
|
+
|
|
77
|
+
|
|
78
|
+
class ProjectSpec:
|
|
79
|
+
"""Represents the overall project specification from the YAML file."""
|
|
80
|
+
|
|
81
|
+
def __init__(self, repos: Dict[str, RepoInfo], workdir: Path = Path(".")):
|
|
82
|
+
self.repos = repos
|
|
83
|
+
self.workdir = workdir
|
|
84
|
+
|
|
85
|
+
def __repr__(self) -> str:
|
|
86
|
+
return f"ProjectSpec(specs={self.repos}, workdir={self.workdir})"
|
bl/utils.py
CHANGED
|
@@ -4,7 +4,7 @@ from pathlib import Path
|
|
|
4
4
|
from typing import Optional
|
|
5
5
|
|
|
6
6
|
import warnings
|
|
7
|
-
from bl.
|
|
7
|
+
from bl.types import RepoInfo, OriginType, RefspecInfo
|
|
8
8
|
|
|
9
9
|
|
|
10
10
|
english_env = os.environ.copy()
|
|
@@ -12,7 +12,7 @@ english_env = os.environ.copy()
|
|
|
12
12
|
english_env["LANG"] = "en_US.UTF-8"
|
|
13
13
|
|
|
14
14
|
|
|
15
|
-
def get_module_path(workdir: Path, module_name: str, module_spec:
|
|
15
|
+
def get_module_path(workdir: Path, module_name: str, module_spec: RepoInfo) -> Path:
|
|
16
16
|
"""Returns the path to the module directory."""
|
|
17
17
|
if module_name == "odoo" and module_spec.target_folder is None:
|
|
18
18
|
warnings.warn(
|
|
@@ -0,0 +1,13 @@
|
|
|
1
|
+
bl/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
|
2
|
+
bl/__main__.py,sha256=v1d-voJ7N1QBLGJJh8JdrTxXtzf0JFHQv4RUBxlCkcg,1428
|
|
3
|
+
bl/freezer.py,sha256=wUYNd0zKU-0OGdIXSLJol-_xJmxSSkXvzV_VbF2HJyg,2512
|
|
4
|
+
bl/spec_parser.py,sha256=nZTwMh_ja4Mc7CKqB7-5cwECHrP15xd1S8zb_LW79BU,4953
|
|
5
|
+
bl/spec_processor.py,sha256=yejkFFig_Yfbv1Ail1xTpteFv-f7dE7gasF_YU1oTXs,17867
|
|
6
|
+
bl/types.py,sha256=h14FXDVCrYRxY7lYTEu8jhdrEHr1PvSNyZRIHm33CTk,2158
|
|
7
|
+
bl/utils.py,sha256=JNLsxgJgWaa71Xs62gcoOwJnPjNHkrW0q9HFB0vQ60E,1600
|
|
8
|
+
bl_odoo-0.3.0.dist-info/licenses/LICENSE,sha256=GTVQl3vH6ht70wJXKC0yMT8CmXKHxv_YyO_utAgm7EA,1065
|
|
9
|
+
bl_odoo-0.3.0.dist-info/METADATA,sha256=VRKm91QHoxE_y5hSphmULSNRdjzvyvcSfBbQ5iRwafs,391
|
|
10
|
+
bl_odoo-0.3.0.dist-info/WHEEL,sha256=wUyA8OaulRlbfwMtmQsvNngGrxQHAvkKcvRmdizlJi0,92
|
|
11
|
+
bl_odoo-0.3.0.dist-info/entry_points.txt,sha256=fmdGhYYJlP-XByamgaZdM0bo3JK4LJFswU_Nilq6SSw,39
|
|
12
|
+
bl_odoo-0.3.0.dist-info/top_level.txt,sha256=1o4tN3wszdw7U5SnGgdF5P2sTYA0Schf0vKFy9_2D6A,3
|
|
13
|
+
bl_odoo-0.3.0.dist-info/RECORD,,
|
bl_odoo-0.2.6.dist-info/RECORD
DELETED
|
@@ -1,12 +0,0 @@
|
|
|
1
|
-
bl/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
|
2
|
-
bl/__main__.py,sha256=v1d-voJ7N1QBLGJJh8JdrTxXtzf0JFHQv4RUBxlCkcg,1428
|
|
3
|
-
bl/freezer.py,sha256=SXqfTXTys7tu-m94TftUjDLt7usNMkYoO10ZX62fRJg,2577
|
|
4
|
-
bl/spec_parser.py,sha256=23_J5qf-20uSrKOu3drT0u2I1c9UwOGvp2Y7Ql8K5GE,7461
|
|
5
|
-
bl/spec_processor.py,sha256=2_-nIlXd_M_eUpY44Q5VMtqPr-eyJn-nMZMDiDH9L0s,17579
|
|
6
|
-
bl/utils.py,sha256=d6pmkwlMLU4jm94JMisd6LT31YJ_oyqgX50O3g5yzq4,1610
|
|
7
|
-
bl_odoo-0.2.6.dist-info/licenses/LICENSE,sha256=GTVQl3vH6ht70wJXKC0yMT8CmXKHxv_YyO_utAgm7EA,1065
|
|
8
|
-
bl_odoo-0.2.6.dist-info/METADATA,sha256=AZqNx-YmD-ERnmFVv7gUSgCcnQAbciiIem39qIDh3LU,391
|
|
9
|
-
bl_odoo-0.2.6.dist-info/WHEEL,sha256=wUyA8OaulRlbfwMtmQsvNngGrxQHAvkKcvRmdizlJi0,92
|
|
10
|
-
bl_odoo-0.2.6.dist-info/entry_points.txt,sha256=fmdGhYYJlP-XByamgaZdM0bo3JK4LJFswU_Nilq6SSw,39
|
|
11
|
-
bl_odoo-0.2.6.dist-info/top_level.txt,sha256=1o4tN3wszdw7U5SnGgdF5P2sTYA0Schf0vKFy9_2D6A,3
|
|
12
|
-
bl_odoo-0.2.6.dist-info/RECORD,,
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|