bl-odoo 0.2.7__tar.gz → 0.3.0__tar.gz

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.4
2
2
  Name: bl-odoo
3
- Version: 0.2.7
3
+ Version: 0.3.0
4
4
  Summary: A command-line tool for managing Odoo dependencies.
5
5
  Author-email: Your Name <your.email@example.com>
6
6
  License-Expression: MIT
@@ -1,13 +1,11 @@
1
1
  import asyncio
2
2
  import yaml
3
- from operator import countOf
4
3
  from pathlib import Path
5
- from typing import TextIO
6
4
 
7
5
  from rich.console import Console
8
6
  from rich.live import Live
9
7
  from rich.progress import BarColumn, MofNCompleteColumn, Progress, TaskID, TextColumn
10
- from bl.spec_parser import ModuleSpec, ProjectSpec
8
+ from bl.types import RepoInfo, ProjectSpec
11
9
  from bl.utils import get_local_ref, get_module_path, run_git
12
10
 
13
11
  console = Console()
@@ -18,7 +16,7 @@ async def freeze_spec(
18
16
  progress: Progress,
19
17
  task_id: TaskID,
20
18
  module_name: str,
21
- module_spec: ModuleSpec,
19
+ module_spec: RepoInfo,
22
20
  workdir: Path,
23
21
  ):
24
22
  result = {module_name: {}}
@@ -0,0 +1,152 @@
1
+ import re
2
+ import warnings
3
+ from dataclasses import dataclass
4
+ from enum import Enum
5
+ from pathlib import Path
6
+ from typing import Any, Dict, List, Optional
7
+
8
+ import yaml
9
+
10
+ from bl.types import RepoInfo, OriginType, ProjectSpec, RefspecInfo
11
+
12
+
13
+ def make_remote_merge_from_src(src: str) -> tuple[dict, list]:
14
+ """
15
+ Creates a remote and merge entry from the src string.
16
+ """
17
+ remotes = {}
18
+ merges = []
19
+
20
+ parts = src.split(" ", 1)
21
+ remotes["origin"] = parts[0]
22
+ merges.append(f"origin {parts[1]}")
23
+
24
+ return remotes, merges
25
+
26
+
27
+ def get_origin_type(origin_value: str) -> OriginType:
28
+ """
29
+ Determines the origin type based on the origin value.
30
+
31
+ Args:
32
+ origin_value: The origin string to evaluate.
33
+
34
+ Returns:
35
+ The corresponding OriginType.
36
+ """
37
+ # Pattern to match GitHub PR references: refs/pull/{pr_id}/head
38
+ pr_pattern = re.compile(r"^refs/pull/\d+/head$")
39
+ # Pattern to match that matches git reference hashes (40 hex characters)
40
+ ref_pattern = re.compile(r"^[a-z0-9]{40}$")
41
+
42
+ if pr_pattern.match(origin_value):
43
+ return OriginType.PR
44
+ elif ref_pattern.match(origin_value):
45
+ return OriginType.REF
46
+ else:
47
+ return OriginType.BRANCH
48
+
49
+
50
+ def parse_remote_refspec_from_parts(parts: List[str], frozen_repo: Dict[str, Dict[str, str]]):
51
+ if len(parts) == 2:
52
+ parts.insert(1, "")
53
+ else:
54
+ warnings.warn(
55
+ "Deprecated src format: use <url> <sha> format for the src property",
56
+ DeprecationWarning,
57
+ )
58
+ remote_key, _, ref_spec = parts
59
+ ref_type = get_origin_type(ref_spec)
60
+
61
+ ref_name = None
62
+ remote_freezes = frozen_repo.get(remote_key, {})
63
+
64
+ if ref_spec in remote_freezes:
65
+ ref_type = OriginType.REF
66
+ ref_name = ref_spec
67
+ ref_spec = remote_freezes.get(ref_name)
68
+
69
+ return RefspecInfo(remote_key, ref_spec, ref_type, ref_name)
70
+
71
+
72
+ def load_spec_file(config: Path, frozen: Path, workdir: Path) -> Optional[ProjectSpec]:
73
+ """
74
+ Loads and parses the project specification from a YAML file.
75
+
76
+ Args:
77
+ file_path: The path to the YAML specification file.
78
+
79
+ Returns:
80
+ A ProjectSpec object if successful, None otherwise.
81
+ """
82
+ if not config.exists():
83
+ if config.is_relative_to("."):
84
+ config = config.resolve()
85
+ # If the file is not in the current directory, check inside the odoo subdirectory
86
+ odoo_config = config.parent / "odoo" / config.name
87
+ # TODO(franz): should use rich console for prettiness
88
+ if not odoo_config.exists():
89
+ print(f"Error: Neither '{config}' nor '{odoo_config}' exists.")
90
+ return None
91
+ config = odoo_config
92
+ else:
93
+ print(f"Error: File '{config}' does not exist.")
94
+ return None
95
+
96
+ workdir = workdir or config.parent
97
+
98
+ with config.open("r") as f:
99
+ try:
100
+ data: Dict[str, Any] = yaml.safe_load(f)
101
+ except yaml.YAMLError as e:
102
+ print(f"Error parsing YAML file '{config}': {e}")
103
+ return None
104
+
105
+ frozen_mapping: Dict[str, Dict[str, Dict[str, str]]] = {}
106
+ frozen_path = frozen or Path(config).with_name("frozen.yaml")
107
+ if frozen_path.exists():
108
+ try:
109
+ with frozen_path.open("r") as frozen_file:
110
+ loaded_freezes = yaml.safe_load(frozen_file) or {}
111
+ if isinstance(loaded_freezes, dict):
112
+ frozen_mapping = loaded_freezes
113
+ except yaml.YAMLError as e:
114
+ print(f"Error parsing frozen YAML file '{frozen_path}': {e}")
115
+
116
+ repos: Dict[str, RepoInfo] = {}
117
+ for repo_name, repo_data in data.items():
118
+ modules = repo_data.get("modules", [])
119
+ src = repo_data.get("src")
120
+ remotes = repo_data.get("remotes") or {}
121
+ merges = repo_data.get("merges") or []
122
+ shell_commands = repo_data.get("shell_command_after") or None
123
+ patch_globs_to_apply = repo_data.get("patch_globs") or None
124
+ target_folder = repo_data.get("target_folder") or None
125
+ locales = repo_data.get("locales", [])
126
+
127
+ frozen_repo = frozen_mapping.get(repo_name, {})
128
+
129
+ # Parse merges into RefspecInfo objects
130
+ refspec_infos: List[RefspecInfo] = []
131
+ if src:
132
+ # If src is defined, create a remote and merge entry from it
133
+ src_remotes, src_merges = make_remote_merge_from_src(src)
134
+ remotes.update(src_remotes)
135
+ merges = src_merges + merges
136
+
137
+ for merge_entry in merges:
138
+ parts = merge_entry.split(" ", 2)
139
+ refspec_info = parse_remote_refspec_from_parts(parts, frozen_repo)
140
+ refspec_infos.append(refspec_info)
141
+
142
+ repos[repo_name] = RepoInfo(
143
+ modules,
144
+ remotes,
145
+ refspec_infos,
146
+ shell_commands,
147
+ patch_globs_to_apply,
148
+ target_folder,
149
+ locales,
150
+ )
151
+
152
+ return ProjectSpec(repos, workdir)
@@ -0,0 +1,470 @@
1
+ import asyncio
2
+ import os
3
+ import warnings
4
+ from pathlib import Path
5
+ from typing import Dict, List
6
+
7
+ from rich.console import Console
8
+ from rich.live import Live
9
+ from rich.progress import BarColumn, MofNCompleteColumn, Progress, SpinnerColumn, TaskID, TextColumn
10
+ from rich.table import Column, Table
11
+ from typing_extensions import deprecated
12
+
13
+ from bl.utils import english_env, get_local_ref, get_module_path, run_git
14
+
15
+ from bl.types import CloneFlags, CloneInfo, OriginType, ProjectSpec, RefspecInfo, RepoInfo
16
+
17
+ console = Console()
18
+
19
+ # TODO(franz): it's a bit better now but better keep an eye on it
20
+ # TODO(franz): Error handling should be watch carefully because if
21
+ # we don't exit on some error code due to the fact that git resolve to
22
+ # the parent repo we could activate sparse checkout on a parent folder
23
+ # should probably make a function that handles the error in a unified manner
24
+ # and crash if the error is on a vital part of the process
25
+
26
+
27
+ def rich_warning(message, category, filename, lineno, file=None, line=None):
28
+ console.print(f"[yellow]Warning:[/] {category.__name__}: {message}\n[dim]{filename}:{lineno}[/]")
29
+
30
+
31
+ warnings.showwarning = rich_warning
32
+ warnings.simplefilter("default", DeprecationWarning)
33
+
34
+
35
+ def check_path_is_repo(module_path: Path):
36
+ # TODO(franz): add check for .git folder
37
+ return not module_path.exists() or not module_path.is_dir()
38
+
39
+
40
+ def clone_info_from_repo(name: str, repo_info: RepoInfo):
41
+ flags = CloneFlags.SHALLOW if name == "odoo" or len(repo_info.refspec_info) == 1 else 0
42
+ flags |= CloneFlags.SPARSE if name != "odoo" or len(repo_info.locales) > 0 else 0
43
+ root_refspec_info = repo_info.refspec_info[0]
44
+ remote_url = repo_info.remotes.get(root_refspec_info.remote)
45
+
46
+ return CloneInfo(
47
+ remote_url,
48
+ flags,
49
+ root_refspec_info,
50
+ )
51
+
52
+
53
+ # for single branch we should clone shallow but for other we should clone deep
54
+ # this allows us to get merge-base to work and git can then merge by pulling the minimum
55
+ # amount of data
56
+ def create_clone_args(clone_info: CloneInfo) -> List[str]:
57
+ """Creates git clone arguments based on the base origin."""
58
+ args = [
59
+ "clone",
60
+ "--filter=tree:0",
61
+ ]
62
+
63
+ if clone_info.clone_flags & CloneFlags.SHALLOW:
64
+ args += [
65
+ "--depth",
66
+ "1",
67
+ ]
68
+ if clone_info.clone_flags & CloneFlags.SPARSE:
69
+ args += ["--sparse"]
70
+
71
+ ref_spec_info = clone_info.root_refspec_info
72
+
73
+ if ref_spec_info.type == OriginType.REF:
74
+ args += [
75
+ "--revision",
76
+ ref_spec_info.refspec,
77
+ ]
78
+ else:
79
+ args += [
80
+ "--origin",
81
+ ref_spec_info.remote,
82
+ "--branch",
83
+ ref_spec_info.refspec,
84
+ ]
85
+
86
+ args += [
87
+ clone_info.url,
88
+ ]
89
+
90
+ return args
91
+
92
+
93
+ def normalize_merge_result(ret: int, out: str, err: str):
94
+ if "CONFLICT" in out:
95
+ return -1, out
96
+
97
+ return ret, err
98
+
99
+
100
+ class RepoProcessor:
101
+ """
102
+ Processes a ProjectSpec by concurrently cloning and merging modules.
103
+ """
104
+
105
+ def __init__(
106
+ self,
107
+ workdir: Path,
108
+ name: str,
109
+ semaphore: asyncio.Semaphore,
110
+ repo_info: RepoInfo,
111
+ progress: Progress,
112
+ count_progress: Progress,
113
+ count_task: TaskID,
114
+ concurrency: int,
115
+ ):
116
+ self.workdir = workdir
117
+ self.name = name
118
+ self.semaphore = semaphore
119
+ self.repo_info = repo_info
120
+ self.progress = progress
121
+ self.count_progress = count_progress
122
+ self.count_task = count_task
123
+ self.concurrency = concurrency
124
+
125
+ @deprecated(
126
+ "run_shell_commands is deprecated if used to apply patches. Use patch_globs properties in spec.yaml instead."
127
+ )
128
+ async def run_shell_commands(self, repo_info: RepoInfo, module_path: Path) -> int:
129
+ for cmd in repo_info.shell_commands:
130
+ self.progress.update(self.task_id, status=f"Running shell command: {cmd}...")
131
+ proc = await asyncio.create_subprocess_shell(
132
+ cmd,
133
+ cwd=str(module_path),
134
+ stdout=asyncio.subprocess.PIPE,
135
+ stderr=asyncio.subprocess.PIPE,
136
+ env=english_env,
137
+ )
138
+ stdout, stderr = await proc.communicate()
139
+ if proc.returncode != 0:
140
+ # This is a sanity check because people usually put "git am" commands
141
+ # in shell_commands, so we abort any ongoing git am
142
+ await run_git("am", "--abort", cwd=str(module_path))
143
+ self.progress.update(
144
+ self.task_id,
145
+ status=f"[red]Shell command failed: {cmd}\nError: {stderr.decode().strip()}",
146
+ )
147
+ return -1
148
+ return 0
149
+
150
+ async def setup_new_repo(
151
+ self,
152
+ clone_info: CloneInfo,
153
+ module_path: Path,
154
+ ) -> int:
155
+ root_refspec_info = clone_info.root_refspec_info
156
+ remote = root_refspec_info.remote
157
+ root_refspec = root_refspec_info.refspec
158
+
159
+ self.progress.update(
160
+ self.task_id,
161
+ status=(f"Cloning {remote}/{root_refspec}"),
162
+ )
163
+
164
+ clone_args = create_clone_args(clone_info)
165
+ ret, out, err = await run_git(*clone_args, module_path)
166
+
167
+ if ret != 0:
168
+ status_message = (
169
+ f"[red]Clone failed {root_refspec_info.remote}({clone_info.url})/{root_refspec_info.refspec}"
170
+ + f" -> {module_path}:\n{err}"
171
+ )
172
+ self.progress.update(self.task_id, status=status_message)
173
+ return ret
174
+
175
+ local_ref = get_local_ref(root_refspec_info)
176
+ ret, out, err = await run_git("checkout", "-b", local_ref, cwd=module_path)
177
+
178
+ return 0
179
+
180
+ async def reset_repo_for_work(self, module_path: Path) -> int:
181
+ # TODO(franz): we should test if the folder is a git repo or not
182
+
183
+ ret, out, err = await run_git("status", "--porcelain", cwd=module_path)
184
+
185
+ if out != "":
186
+ self.progress.update(self.task_id, status=f"[red]Repo is dirty:\n{out}")
187
+ return ret
188
+ if ret != 0:
189
+ self.progress.update(self.task_id, status="[red]Repo does not exist")
190
+ return ret
191
+ # Reset all the local origin to their remote origins
192
+ repo_info = self.repo_info
193
+ root_refspec_info = repo_info.refspec_info[0]
194
+
195
+ self.progress.update(
196
+ self.task_id,
197
+ status=(f"Resetting existing repository for {root_refspec_info.remote}/{root_refspec_info.refspec}"),
198
+ )
199
+
200
+ s_ret, s_out, s_err = await run_git("rev-parse", "--is-shallow-repository", cwd=module_path)
201
+ if len(repo_info.refspec_info) > 1 and s_out == "true":
202
+ await run_git("fetch", "--unshallow", cwd=module_path)
203
+
204
+ reset_target = get_local_ref(root_refspec_info)
205
+ ret, out, err = await run_git("reset", "--hard", reset_target, cwd=module_path)
206
+ if ret != 0:
207
+ self.progress.update(self.task_id, status=f"[red]Reset failed: {err}")
208
+ return ret
209
+
210
+ return 0
211
+
212
+ def link_all_modules(self, module_list: List[str], module_path: Path) -> tuple[int, str]:
213
+ links_path = self.workdir / "links"
214
+ links_path.mkdir(exist_ok=True)
215
+
216
+ # Remove all symlink
217
+
218
+ for module_name in module_list:
219
+ try:
220
+ path_src_symlink = module_path / module_name
221
+ path_dest_symlink = links_path / module_name
222
+
223
+ if path_dest_symlink.is_symlink():
224
+ path_dest_symlink.unlink()
225
+
226
+ os.symlink(path_src_symlink.relative_to(links_path, walk_up=True), path_dest_symlink, True)
227
+ except OSError as e:
228
+ return -1, str(e)
229
+
230
+ return 0, ""
231
+
232
+ async def merge_spec_into_tree(
233
+ self,
234
+ spec: RepoInfo,
235
+ refspec_info: RefspecInfo,
236
+ root_refspec_info: RefspecInfo,
237
+ module_path: Path,
238
+ ) -> tuple[int, str]:
239
+ # This is weird...
240
+ remote_url = spec.remotes.get(refspec_info.remote) or refspec_info.remote
241
+
242
+ local_ref = get_local_ref(refspec_info)
243
+ remote_ref = refspec_info.refspec
244
+
245
+ # Merge
246
+ # I think the idea would be to not fetch shallow but fetch treeless and do a merge-base
247
+ # then fetch the required data and then merge
248
+ self.progress.update(self.task_id, status=f"Merging {local_ref}", advance=0.1)
249
+ ret, out, err = await run_git("merge", "--no-edit", local_ref, cwd=module_path)
250
+ ret, err = normalize_merge_result(ret, out, err)
251
+
252
+ if "CONFLICT" in err:
253
+ self.progress.update(self.task_id, status=f"[red]Merge conflict {local_ref} in {remote_ref}: {err}")
254
+ # In case of conflict, we might want to abort the merge
255
+ await run_git("merge", "--abort", cwd=module_path)
256
+ return ret, err
257
+
258
+ if ret != 0:
259
+ self.progress.update(self.task_id, status=f"[red]Merge error {local_ref} in {remote_ref}: {err}")
260
+ return ret, err
261
+
262
+ return 0, ""
263
+
264
+ def get_refspec_by_remote(self, refspec_info_list: List[RefspecInfo]) -> Dict[str, List[RefspecInfo]]:
265
+ result = {}
266
+
267
+ for spec in refspec_info_list:
268
+ spec_list = result.get(spec.remote, [])
269
+ spec_list.append(spec)
270
+ result[spec.remote] = spec_list
271
+
272
+ return result
273
+
274
+ async def fetch_multi(self, remote: str, refspec_info_list: List[RefspecInfo], module_path: Path):
275
+ args = [
276
+ "fetch",
277
+ "-j",
278
+ str(self.concurrency),
279
+ remote,
280
+ ]
281
+
282
+ for refspec_info in refspec_info_list:
283
+ local_ref = get_local_ref(refspec_info)
284
+ args += [f"{refspec_info.refspec}:{local_ref}"]
285
+
286
+ ret, out, err = await run_git(*args, cwd=module_path)
287
+
288
+ return ret, out, err
289
+
290
+ def filter_non_link_module(self, spec: RepoInfo):
291
+ result = []
292
+ base_path_links = self.workdir / "links"
293
+ for module in spec.modules:
294
+ path = base_path_links / module
295
+ if path.is_symlink() or not path.exists():
296
+ result.append(module)
297
+ else:
298
+ console.print(
299
+ f"[purple]Watchout ![/] {module} is not a symlink and will be assumed "
300
+ + "to be a local module\nIt will not be fetched or linked"
301
+ )
302
+ return result
303
+
304
+ async def setup_odoo_sparse(self, module_spec: RepoInfo, module_path: Path):
305
+ list_modules = module_spec.modules
306
+
307
+ await run_git("sparse-checkout", "init", "--no-cone", cwd=module_path)
308
+ included_po = [f"{locale}.po" for locale in module_spec.locales]
309
+ included_modules = [f"/addons/{module}/*" for module in list_modules]
310
+ await run_git(
311
+ "sparse-checkout",
312
+ "set",
313
+ "/*",
314
+ "!/addons/*",
315
+ *included_modules,
316
+ "!*.po",
317
+ *included_po,
318
+ cwd=module_path,
319
+ )
320
+
321
+ async def setup_sparse_checkout(self, symlink_modules: List[str], module_path: Path):
322
+ # 2. Sparse Checkout setup
323
+ if self.name != "odoo":
324
+ self.progress.update(self.task_id, status="Configuring sparse checkout...")
325
+ await run_git("sparse-checkout", "init", "--cone", cwd=module_path)
326
+ if symlink_modules:
327
+ await run_git("sparse-checkout", "set", *self.repo_info.modules, cwd=module_path)
328
+ elif len(self.repo_info.locales) > 0:
329
+ # TODO(franz): We should still set sparse if there is no locales but there is a module list
330
+ self.progress.update(self.task_id, status="Configuring sparse odoo checkout...")
331
+ await self.setup_odoo_sparse(self.repo_info, module_path)
332
+
333
+ async def process_repo(self) -> int:
334
+ """Processes a single ModuleSpec."""
335
+ symlink_modules = self.filter_non_link_module(self.repo_info)
336
+ module_path = get_module_path(self.workdir, self.name, self.repo_info)
337
+
338
+ async with self.semaphore:
339
+ try:
340
+ self.task_id = self.progress.add_task(
341
+ f"[cyan]{self.name}", status="Waiting...", total=len(self.repo_info.refspec_info) + 1
342
+ )
343
+ if not self.repo_info.refspec_info:
344
+ self.progress.update(self.task_id, status="[yellow]No origins defined", completed=1)
345
+ return -1
346
+
347
+ # TODO(franz) the shallow and sparseness of repo should be unify
348
+ # so that we don't have all those stupid conditions
349
+ if check_path_is_repo(module_path):
350
+ clone_info = clone_info_from_repo(self.name, self.repo_info)
351
+ ret = await self.setup_new_repo(clone_info, module_path)
352
+ else:
353
+ ret = await self.reset_repo_for_work(module_path)
354
+
355
+ if ret != 0:
356
+ return -1
357
+
358
+ await self.setup_sparse_checkout(symlink_modules, module_path)
359
+
360
+ checkout_target = "merged"
361
+
362
+ await run_git("checkout", "-b", checkout_target, cwd=module_path)
363
+ self.progress.advance(self.task_id)
364
+
365
+ for remote, remote_url in self.repo_info.remotes.items():
366
+ await run_git("remote", "add", remote, remote_url, cwd=module_path)
367
+ await run_git("config", f"remote.{remote}.partialCloneFilter", "tree:0", cwd=module_path)
368
+ await run_git("config", f"remote.{remote}.promisor", "true", cwd=module_path)
369
+
370
+ refspec_by_remote: Dict[str, List[RefspecInfo]] = self.get_refspec_by_remote(
371
+ self.repo_info.refspec_info
372
+ )
373
+
374
+ # TODO(franz): right now we fetch everything so when the repo is just cloned
375
+ # we fetch the base branch twice. Since we fetch with multi this is probably not
376
+ # a big issue but it could be better
377
+ for remote, refspec_list in refspec_by_remote.items():
378
+ self.progress.update(self.task_id, status=f"Fetching multi from {remote}")
379
+ await self.fetch_multi(remote, refspec_list, module_path)
380
+
381
+ # 4. Fetch and Merge remaining origins
382
+ for refspec_info in self.repo_info.refspec_info[1:]:
383
+ ret, err = await self.merge_spec_into_tree(
384
+ self.repo_info, refspec_info, self.repo_info.refspec_info[0], module_path
385
+ )
386
+ if ret != 0:
387
+ return -1
388
+ self.progress.advance(self.task_id)
389
+
390
+ if self.repo_info.shell_commands:
391
+ ret = await self.run_shell_commands(self.repo_info, module_path)
392
+ if ret != 0:
393
+ return ret
394
+
395
+ if self.repo_info.patch_globs_to_apply:
396
+ for glob in self.repo_info.patch_globs_to_apply:
397
+ self.progress.update(self.task_id, status=f"Applying patches: {glob}...", advance=0.1)
398
+ ret, out, err = await run_git("am", glob, cwd=module_path)
399
+ if ret != 0:
400
+ await run_git("am", "--abort", cwd=module_path)
401
+ self.progress.update(self.task_id, status=f"[red]Applying patches failed: {err}")
402
+ return ret
403
+
404
+ self.progress.update(self.task_id, status="Linking directory")
405
+ if self.name != "odoo":
406
+ ret, err = self.link_all_modules(symlink_modules, module_path)
407
+ if ret != 0:
408
+ self.progress.update(self.task_id, status=f"[red]Could not link modules: {err}")
409
+ return ret
410
+
411
+ self.progress.update(self.task_id, status="[green]Complete", advance=1)
412
+ self.progress.remove_task(self.task_id)
413
+ self.count_progress.advance(self.count_task)
414
+
415
+ except Exception as e:
416
+ self.progress.update(self.task_id, status=f"[red]Error: {str(e)}")
417
+ raise e
418
+ return -1
419
+
420
+ return 0
421
+
422
+
423
+ async def process_project(project_spec: ProjectSpec, concurrency: int) -> None:
424
+ """Processes all modules in a ProjectSpec."""
425
+ (project_spec.workdir / "external-src").mkdir(parents=True, exist_ok=True)
426
+
427
+ task_list_progress = Progress(
428
+ SpinnerColumn(),
429
+ TextColumn("[progress.description]{task.description}"),
430
+ BarColumn(),
431
+ TextColumn("[progress.percentage]{task.percentage:>3.0f}%"),
432
+ TextColumn("{task.fields[status]}", table_column=Column(ratio=2)),
433
+ )
434
+
435
+ task_count_progress = Progress(
436
+ TextColumn("[progress.description]{task.description}"),
437
+ BarColumn(),
438
+ MofNCompleteColumn(),
439
+ )
440
+ count_task = task_count_progress.add_task("Processing Modules", total=len(project_spec.repos))
441
+
442
+ progress_table = Table.grid()
443
+ progress_table.add_row(
444
+ task_list_progress,
445
+ )
446
+ progress_table.add_row(
447
+ task_count_progress,
448
+ )
449
+
450
+ semaphore = asyncio.Semaphore(concurrency)
451
+ with Live(progress_table, console=console, refresh_per_second=10):
452
+ tasks = []
453
+ for name, repo_info in project_spec.repos.items():
454
+ total_steps = len(repo_info.refspec_info) + 1
455
+ repo_processor = RepoProcessor(
456
+ project_spec.workdir,
457
+ name,
458
+ semaphore,
459
+ repo_info,
460
+ task_list_progress,
461
+ task_count_progress,
462
+ count_task,
463
+ concurrency,
464
+ )
465
+ tasks.append(repo_processor.process_repo())
466
+
467
+ # this should error if a task crashes
468
+ return_codes = await asyncio.gather(*tasks)
469
+ if any(return_codes):
470
+ raise Exception()