odoo-addon-odoo-repository 16.0.1.3.0.13__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (77) hide show
  1. odoo/addons/odoo_repository/README.rst +81 -0
  2. odoo/addons/odoo_repository/__init__.py +2 -0
  3. odoo/addons/odoo_repository/__manifest__.py +58 -0
  4. odoo/addons/odoo_repository/controllers/__init__.py +1 -0
  5. odoo/addons/odoo_repository/controllers/main.py +32 -0
  6. odoo/addons/odoo_repository/data/ir_cron.xml +38 -0
  7. odoo/addons/odoo_repository/data/odoo.repository.csv +216 -0
  8. odoo/addons/odoo_repository/data/odoo_branch.xml +82 -0
  9. odoo/addons/odoo_repository/data/odoo_module.xml +16 -0
  10. odoo/addons/odoo_repository/data/odoo_repository.xml +71 -0
  11. odoo/addons/odoo_repository/data/odoo_repository_addons_path.xml +59 -0
  12. odoo/addons/odoo_repository/data/odoo_repository_org.xml +14 -0
  13. odoo/addons/odoo_repository/data/queue_job.xml +56 -0
  14. odoo/addons/odoo_repository/lib/__init__.py +0 -0
  15. odoo/addons/odoo_repository/lib/scanner.py +1302 -0
  16. odoo/addons/odoo_repository/migrations/16.0.1.1.0/post-migration.py +26 -0
  17. odoo/addons/odoo_repository/migrations/16.0.1.2.0/pre-migration.py +43 -0
  18. odoo/addons/odoo_repository/migrations/16.0.1.3.0/post-migration.py +19 -0
  19. odoo/addons/odoo_repository/models/__init__.py +18 -0
  20. odoo/addons/odoo_repository/models/authentication_token.py +12 -0
  21. odoo/addons/odoo_repository/models/odoo_author.py +16 -0
  22. odoo/addons/odoo_repository/models/odoo_branch.py +111 -0
  23. odoo/addons/odoo_repository/models/odoo_license.py +16 -0
  24. odoo/addons/odoo_repository/models/odoo_maintainer.py +31 -0
  25. odoo/addons/odoo_repository/models/odoo_module.py +24 -0
  26. odoo/addons/odoo_repository/models/odoo_module_branch.py +873 -0
  27. odoo/addons/odoo_repository/models/odoo_module_branch_version.py +123 -0
  28. odoo/addons/odoo_repository/models/odoo_module_category.py +15 -0
  29. odoo/addons/odoo_repository/models/odoo_module_dev_status.py +15 -0
  30. odoo/addons/odoo_repository/models/odoo_python_dependency.py +16 -0
  31. odoo/addons/odoo_repository/models/odoo_repository.py +664 -0
  32. odoo/addons/odoo_repository/models/odoo_repository_addons_path.py +40 -0
  33. odoo/addons/odoo_repository/models/odoo_repository_branch.py +98 -0
  34. odoo/addons/odoo_repository/models/odoo_repository_org.py +23 -0
  35. odoo/addons/odoo_repository/models/res_company.py +23 -0
  36. odoo/addons/odoo_repository/models/res_config_settings.py +23 -0
  37. odoo/addons/odoo_repository/models/ssh_key.py +12 -0
  38. odoo/addons/odoo_repository/readme/CONTRIBUTORS.rst +2 -0
  39. odoo/addons/odoo_repository/readme/DESCRIPTION.rst +1 -0
  40. odoo/addons/odoo_repository/security/ir.model.access.csv +27 -0
  41. odoo/addons/odoo_repository/security/res_groups.xml +25 -0
  42. odoo/addons/odoo_repository/static/description/README +4 -0
  43. odoo/addons/odoo_repository/static/description/icon.png +0 -0
  44. odoo/addons/odoo_repository/static/description/index.html +430 -0
  45. odoo/addons/odoo_repository/tests/__init__.py +6 -0
  46. odoo/addons/odoo_repository/tests/common.py +162 -0
  47. odoo/addons/odoo_repository/tests/test_base_scanner.py +214 -0
  48. odoo/addons/odoo_repository/tests/test_odoo_module_branch.py +97 -0
  49. odoo/addons/odoo_repository/tests/test_odoo_repository_scan.py +242 -0
  50. odoo/addons/odoo_repository/tests/test_repository_scanner.py +215 -0
  51. odoo/addons/odoo_repository/tests/test_sync_node.py +55 -0
  52. odoo/addons/odoo_repository/tests/test_utils.py +25 -0
  53. odoo/addons/odoo_repository/utils/__init__.py +0 -0
  54. odoo/addons/odoo_repository/utils/github.py +30 -0
  55. odoo/addons/odoo_repository/utils/module.py +25 -0
  56. odoo/addons/odoo_repository/utils/scanner.py +90 -0
  57. odoo/addons/odoo_repository/views/authentication_token.xml +63 -0
  58. odoo/addons/odoo_repository/views/menu.xml +38 -0
  59. odoo/addons/odoo_repository/views/odoo_author.xml +54 -0
  60. odoo/addons/odoo_repository/views/odoo_branch.xml +84 -0
  61. odoo/addons/odoo_repository/views/odoo_license.xml +40 -0
  62. odoo/addons/odoo_repository/views/odoo_maintainer.xml +69 -0
  63. odoo/addons/odoo_repository/views/odoo_module.xml +90 -0
  64. odoo/addons/odoo_repository/views/odoo_module_branch.xml +353 -0
  65. odoo/addons/odoo_repository/views/odoo_module_category.xml +40 -0
  66. odoo/addons/odoo_repository/views/odoo_module_dev_status.xml +40 -0
  67. odoo/addons/odoo_repository/views/odoo_python_dependency.xml +40 -0
  68. odoo/addons/odoo_repository/views/odoo_repository.xml +165 -0
  69. odoo/addons/odoo_repository/views/odoo_repository_addons_path.xml +49 -0
  70. odoo/addons/odoo_repository/views/odoo_repository_branch.xml +60 -0
  71. odoo/addons/odoo_repository/views/odoo_repository_org.xml +54 -0
  72. odoo/addons/odoo_repository/views/res_config_settings.xml +123 -0
  73. odoo/addons/odoo_repository/views/ssh_key.xml +63 -0
  74. odoo_addon_odoo_repository-16.0.1.3.0.13.dist-info/METADATA +100 -0
  75. odoo_addon_odoo_repository-16.0.1.3.0.13.dist-info/RECORD +77 -0
  76. odoo_addon_odoo_repository-16.0.1.3.0.13.dist-info/WHEEL +5 -0
  77. odoo_addon_odoo_repository-16.0.1.3.0.13.dist-info/top_level.txt +1 -0
@@ -0,0 +1,1302 @@
1
+ # Copyright 2023 Camptocamp SA
2
+ # License LGPL-3.0 or later (http://www.gnu.org/licenses/lgpl)
3
+
4
+ import ast
5
+ import contextlib
6
+ import json
7
+ import logging
8
+ import os
9
+ import pathlib
10
+ import re
11
+ import shutil
12
+ import tempfile
13
+ import time
14
+ from urllib.parse import urlparse, urlunparse
15
+
16
+ import git
17
+ import oca_port
18
+ from odoo_addons_parser import ModuleParser
19
+
20
+ # Disable logging from 'pygount' (used by odoo_addons_parser)
21
+ logging.getLogger("pygount").setLevel(logging.ERROR)
22
+
23
+ _logger = logging.getLogger(__name__)
24
+
25
+ # Paths ending with these patterns will be ignored such as if all scanned commits
26
+ # update such files, the underlying module won't be scanned to preserve resources.
27
+ IGNORE_FILES = [".po", ".pot", "README.rst", "index.html"]
28
+
29
+ MANIFEST_FILES = ("__manifest__.py", "__openerp__.py")
30
+
31
+ AUTHOR_EMAILS_TO_SKIP = [
32
+ "transbot@odoo-community.org",
33
+ "noreply@weblate.org",
34
+ "oca-git-bot@odoo-community.org",
35
+ "oca+oca-travis@odoo-community.org",
36
+ "oca-ci@odoo-community.org",
37
+ "shopinvader-git-bot@shopinvader.com",
38
+ ]
39
+
40
+ SUMMARY_TERMS_TO_SKIP = [
41
+ "Translated using Weblate",
42
+ "Added translation using Weblate",
43
+ ]
44
+
45
+
46
+ @contextlib.contextmanager
47
+ def set_env(**environ):
48
+ """
49
+ Temporarily set the process environment variables.
50
+
51
+ >>> with set_env(PLUGINS_DIR='test/plugins'):
52
+ ... "PLUGINS_DIR" in os.environ
53
+ True
54
+
55
+ >>> "PLUGINS_DIR" in os.environ
56
+ False
57
+
58
+ :type environ: dict[str, unicode]
59
+ :param environ: Environment variables to set
60
+ """
61
+ # Copied from:
62
+ # https://stackoverflow.com/questions/2059482/
63
+ # temporarily-modify-the-current-processs-environment
64
+ old_environ = dict(os.environ)
65
+ os.environ.update(environ)
66
+ try:
67
+ yield
68
+ finally:
69
+ os.environ.clear()
70
+ os.environ.update(old_environ)
71
+
72
+
73
+ class BaseScanner:
74
+ _dirname = "odoo-repositories"
75
+
76
+ def __init__(
77
+ self,
78
+ org: str,
79
+ name: str,
80
+ clone_url: str,
81
+ branches: list,
82
+ repositories_path: str = None,
83
+ repo_type: str = None,
84
+ ssh_key: str = None,
85
+ token: str = None,
86
+ workaround_fs_errors: bool = False,
87
+ clone_name: str = None,
88
+ ):
89
+ self.org = org
90
+ self.name = name
91
+ self.clone_url = self._prepare_clone_url(repo_type, clone_url, token)
92
+ self.branches = branches
93
+ self.repositories_path = self._prepare_repositories_path(repositories_path)
94
+ self.clone_name = clone_name
95
+ self.path = self.repositories_path.joinpath(
96
+ self.org, self.clone_name or self.name
97
+ )
98
+ self.repo_type = repo_type
99
+ self.ssh_key = ssh_key
100
+ self.token = token
101
+ self.workaround_fs_errors = workaround_fs_errors
102
+
103
+ def sync(self, fetch=True):
104
+ res = True
105
+ self._apply_git_global_config()
106
+ # Clone or update the repository
107
+ if not self.is_cloned:
108
+ res = self._clone()
109
+ if self.is_cloned:
110
+ with self.repo() as repo:
111
+ self._apply_git_config(repo)
112
+ self._set_git_remote_url(repo, "origin", self.clone_url)
113
+ if fetch:
114
+ res = self._fetch(repo)
115
+ return res
116
+
117
+ @contextlib.contextmanager
118
+ def _get_git_env(self):
119
+ """Context manager yielding env variables used by Git invocations."""
120
+ git_env = {}
121
+ if self.ssh_key:
122
+ with self._get_ssh_key() as ssh_key_path:
123
+ git_ssh_cmd = f"ssh -o StrictHostKeyChecking=no -i {ssh_key_path}"
124
+ git_env.update(GIT_SSH_COMMAND=git_ssh_cmd, GIT_TRACE="true")
125
+ yield git_env
126
+ else:
127
+ yield git_env
128
+
129
+ @contextlib.contextmanager
130
+ def _get_ssh_key(self):
131
+ """Save the SSH key in a temporary file and yield its path."""
132
+ with tempfile.NamedTemporaryFile() as fp:
133
+ fp.write(self.ssh_key.encode())
134
+ fp.flush()
135
+ ssh_key_path = fp.name
136
+ yield ssh_key_path
137
+
138
+ @staticmethod
139
+ def _prepare_clone_url(repo_type, clone_url, token):
140
+ """Return the URL used to clone/fetch the repository.
141
+
142
+ If a token is provided it will be inserted automatically.
143
+ """
144
+ if repo_type in ("github", "gitlab") and token:
145
+ parts = list(urlparse(clone_url))
146
+ if parts[0].startswith("http"):
147
+ # Update 'netloc' part to prefix it with the OAuth token
148
+ parts[1] = f"oauth2:{token}@" + parts[1]
149
+ clone_url = urlunparse(parts)
150
+ return clone_url
151
+
152
+ @classmethod
153
+ def _prepare_repositories_path(cls, repositories_path=None):
154
+ if not repositories_path:
155
+ default_data_dir_path = (
156
+ pathlib.Path.home().joinpath(".local").joinpath("share")
157
+ )
158
+ repositories_path = pathlib.Path(
159
+ os.environ.get("XDG_DATA_HOME", default_data_dir_path),
160
+ cls._dirname,
161
+ )
162
+ repositories_path = pathlib.Path(repositories_path)
163
+ repositories_path.mkdir(parents=True, exist_ok=True)
164
+ return repositories_path
165
+
166
+ def _apply_git_global_config(self):
167
+ # Avoid 'fatal: detected dubious ownership in repository' errors
168
+ # when performing operations in git repositories in case they are
169
+ # cloned on an mounted filesystem with specific options.
170
+ if self.workaround_fs_errors:
171
+ # NOTE: ensure to unset existing entry before adding one, as git doesn't
172
+ # check if an entry already exists, generating duplicates
173
+ os.system(r"git config --global --unset safe.directory '\*'")
174
+ os.system("git config --global --add safe.directory '*'")
175
+
176
+ def _apply_git_config(self, repo):
177
+ with repo.config_writer() as writer:
178
+ # This avoids too high memory consumption (default git config could
179
+ # crash the Odoo workers when the scanner is run by Odoo itself).
180
+ # This is especially useful to checkout big repositories like odoo/odoo.
181
+ writer.set_value("core", "packedGitLimit", "128m")
182
+ writer.set_value("core", "packedGitWindowSize", "32m")
183
+ writer.set_value("pack", "windowMemory", "64m")
184
+ writer.set_value("pack", "threads", "1")
185
+ # Avoid issues with file permissions for mounted filesystems
186
+ # with specific options.
187
+ writer.set_value("core", "filemode", "false")
188
+ # Disable some GC features for performance (memory and IO)
189
+ # Reflog clean up is triggered automatically by some commands.
190
+ # We assume that we scan upstream branches that will never contain
191
+ # orphaned commits to clean up, so some GC features are useless in
192
+ # this context.
193
+ writer.set_value("gc", "pruneExpire", "never")
194
+ writer.set_value("gc", "worktreePruneExpire", "never")
195
+ writer.set_value("gc", "reflogExpire", "never")
196
+ writer.set_value("gc", "reflogExpireUnreachable", "never")
197
+
198
+ def _set_git_remote_url(self, repo, remote, url):
199
+ """Ensure that `remote` has `url` set."""
200
+ # Check first the URL before setting it, as this triggers a 'chmod'
201
+ # command on '.git/config' file (to protect sensitive data) that could
202
+ # be not allowed on some mounted file systems.
203
+ if remote in repo.remotes:
204
+ if repo.remotes[remote].url != url:
205
+ repo.remotes[remote].set_url(url)
206
+ else:
207
+ repo.create_remote(remote, url)
208
+
209
+ @property
210
+ def is_cloned(self):
211
+ return self.path.joinpath(".git").exists()
212
+
213
+ @contextlib.contextmanager
214
+ def repo(self):
215
+ repo = git.Repo(self.path)
216
+ try:
217
+ yield repo
218
+ finally:
219
+ del repo
220
+
221
+ @property
222
+ def full_name(self):
223
+ return f"{self.org}/{self.name}"
224
+
225
+ def _clone_params(self, **extra):
226
+ params = {
227
+ "url": self.clone_url,
228
+ "to_path": self.path,
229
+ "no_checkout": True,
230
+ # Avoid issues with file permissions
231
+ # "allow_unsafe_options": True,
232
+ # "multi_options": ["--config core.filemode=false"],
233
+ }
234
+ if self.branches:
235
+ params["branch"] = self.branches[0]
236
+ params.update(extra)
237
+ return params
238
+
239
+ def _clone(self):
240
+ _logger.info("Cloning %s...", self.full_name)
241
+ tmp_git_dir_path = None
242
+ repo_git_dir_path = pathlib.Path(self.path, ".git")
243
+ with tempfile.TemporaryDirectory() as tmp_git_dir:
244
+ if self.workaround_fs_errors:
245
+ tmp_git_dir_path = pathlib.Path(tmp_git_dir).joinpath(".git")
246
+ with self._get_git_env() as git_env:
247
+ extra = {"env": git_env}
248
+ if self.workaround_fs_errors:
249
+ extra["separate_git_dir"] = str(tmp_git_dir_path)
250
+ params = self._clone_params(**extra)
251
+ try:
252
+ git.Repo.clone_from(**params)
253
+ except git.exc.GitCommandError as exc:
254
+ _logger.error(exc)
255
+ if "not found in upstream origin" in str(exc):
256
+ _logger.info(
257
+ "Couldn't clone remote branch from %s, skipping.",
258
+ self.full_name,
259
+ )
260
+ return False
261
+ else:
262
+ if tmp_git_dir_path:
263
+ # {repo_path}/.git folder is a hardlink, replace
264
+ # it by the .git folder created in /tmp
265
+ # NOTE: use shutil instead of 'pathlib.Path.replace()' as
266
+ # file systems could be different
267
+ if repo_git_dir_path.exists():
268
+ repo_git_dir_path.unlink()
269
+ shutil.move(tmp_git_dir_path, repo_git_dir_path)
270
+ return True
271
+
272
+ def _fetch(self, repo):
273
+ _logger.info(
274
+ "%s: fetch branch(es) %s", self.full_name, ", ".join(self.branches)
275
+ )
276
+ branches_fetched = []
277
+ for branch in self.branches:
278
+ # Do not block the process if the branch doesn't exist on this repo
279
+ try:
280
+ with self._get_git_env() as git_env:
281
+ with repo.git.custom_environment(**git_env):
282
+ repo.remotes.origin.fetch(branch)
283
+ except git.exc.GitCommandError as exc:
284
+ _logger.error(exc)
285
+ if "couldn't find remote ref" in str(exc):
286
+ _logger.info(
287
+ "Couldn't find remote branch %s, skipping.", self.full_name
288
+ )
289
+ return False
290
+ raise
291
+ else:
292
+ _logger.info("%s: branch %s fetched", self.full_name, branch)
293
+ branches_fetched.append(branch)
294
+ # Return True as soon as we fetched at least one branch
295
+ return bool(branches_fetched)
296
+
297
+ def _branch_exists(self, repo, branch, remote="origin"):
298
+ refs = [r.name for r in repo.remotes[remote].refs]
299
+ branch = f"{remote}/{branch}"
300
+ return branch in refs
301
+
302
+ def _checkout_branch(self, repo, branch, remote="origin"):
303
+ # Ensure to clean up the repository before a checkout
304
+ index_lock_path = pathlib.Path(repo.common_dir).joinpath("index.lock")
305
+ if index_lock_path.exists():
306
+ index_lock_path.unlink()
307
+ repo.git.reset("--hard")
308
+ repo.git.clean("-xdf")
309
+ repo.git.checkout("-f", f"remotes/{remote}/{branch}")
310
+
311
+ def _get_last_fetched_commit(self, repo, branch, remote="origin"):
312
+ """Return the last fetched commit for the given `branch`."""
313
+ return repo.rev_parse(f"remotes/{remote}/{branch}").hexsha
314
+
315
+ def _get_module_paths(self, repo, relative_path, branch, remote="origin"):
316
+ """Return the list of modules available in `branch`."""
317
+ # Clean up 'relative_path' to make it compatible with 'git.Tree' object
318
+ relative_tree_path = "/".join(
319
+ [dir_ for dir_ in relative_path.split("/") if dir_ and dir_ != "."]
320
+ )
321
+ # Return all available modules from 'relative_tree_path'
322
+ branch_commit = repo.remotes[remote].refs[branch].commit
323
+ addons_trees = branch_commit.tree.trees
324
+ if relative_tree_path:
325
+ try:
326
+ addons_trees = (branch_commit.tree / relative_tree_path).trees
327
+ except KeyError:
328
+ # 'relative_tree_path' doesn't exist
329
+ return []
330
+ module_paths = [tree.path for tree in addons_trees if self._odoo_module(tree)]
331
+ return sorted(module_paths)
332
+
333
+ def _get_module_paths_updated(
334
+ self,
335
+ repo,
336
+ relative_path,
337
+ from_commit,
338
+ to_commit,
339
+ branch,
340
+ ):
341
+ """Return modules updated between `from_commit` and `to_commit`.
342
+
343
+ It returns a list of modules.
344
+ """
345
+ # Clean up 'relative_path' to make it compatible with 'git.Tree' object
346
+ relative_tree_path = "/".join(
347
+ [dir_ for dir_ in relative_path.split("/") if dir_ and dir_ != "."]
348
+ )
349
+ module_paths = set()
350
+ # Same commits: nothing has changed
351
+ if from_commit == to_commit:
352
+ return list(module_paths)
353
+ # Get only modules updated between the two commits
354
+ from_commit = repo.commit(from_commit)
355
+ to_commit = repo.commit(to_commit)
356
+ diffs = to_commit.diff(from_commit, R=True)
357
+ for diff in diffs:
358
+ # Skip diffs that do not belong to the scanned relative path
359
+ if not diff.a_path.startswith(relative_tree_path):
360
+ continue
361
+ # Skip diffs that relates to unrelevant files
362
+ if not self._filter_file_path(diff.a_path):
363
+ continue
364
+ # Exclude files located in root folder
365
+ if "/" not in diff.a_path:
366
+ continue
367
+ # Remove the relative_path (e.g. 'addons/') from the diff path
368
+ rel_path = pathlib.Path(relative_path)
369
+ diff_path = pathlib.Path(diff.a_path)
370
+ module_path = pathlib.Path(*diff_path.parts[: len(rel_path.parts) + 1])
371
+ tree = self._get_subtree(to_commit.tree, str(module_path))
372
+ if tree:
373
+ # Module still exists
374
+ if self._odoo_module(tree):
375
+ module_paths.add(tree.path)
376
+ else:
377
+ # Module removed
378
+ tree = self._get_subtree(from_commit.tree, str(module_path))
379
+ if self._odoo_module(tree):
380
+ module_paths.add(tree.path)
381
+ return sorted(module_paths)
382
+
383
+ def _filter_file_path(self, path):
384
+ for ext in (".po", ".pot", ".rst", ".html"):
385
+ if path.endswith(ext):
386
+ return False
387
+ return True
388
+
389
+ def _get_last_commit_of_git_tree(self, ref, tree):
390
+ return tree.repo.git.log("--pretty=%H", "-n 1", ref, "--", tree.path)
391
+
392
+ def _get_commits_of_git_tree(self, from_, to_, tree, patterns=None):
393
+ """Returns commits between `from_` and `to_` in chronological order.
394
+
395
+ The list of commits can be limited to a `tree`.
396
+ """
397
+ if not patterns:
398
+ patterns = tuple()
399
+ rev_pattern = f"{from_}..{to_}"
400
+ if not from_:
401
+ rev_pattern = to_
402
+ elif not to_:
403
+ rev_pattern = from_
404
+ cmd = [
405
+ "--pretty=%H",
406
+ "-r",
407
+ rev_pattern,
408
+ "--reverse",
409
+ "--",
410
+ tree.path,
411
+ *patterns,
412
+ ]
413
+ if patterns:
414
+ # It's mandatory to use shell here to leverage file patterns
415
+ commits = tree.repo.git.execute(" ".join(["git", "log"] + cmd), shell=True)
416
+ else:
417
+ commits = tree.repo.git.log(cmd)
418
+ return commits.split()
419
+
420
+ def _odoo_module(self, tree):
421
+ """Check if the `git.Tree` object is an Odoo module."""
422
+ # NOTE: it seems we could have data only modules without '__init__.py'
423
+ # like 'odoo/addons/test_data_module/', so the Python package check
424
+ # is maybe not useful
425
+ return self._manifest_exists(tree) # and self._python_package(tree)
426
+
427
+ def _python_package(self, tree):
428
+ """Check if the `git.Tree` object is a Python package."""
429
+ return bool(self._get_subtree(tree, "__init__.py"))
430
+
431
+ def _manifest_exists(self, tree):
432
+ """Check if the `git.Tree` object contains an Odoo manifest file."""
433
+ manifest_found = False
434
+ for manifest_file in MANIFEST_FILES:
435
+ if self._get_subtree(tree, manifest_file):
436
+ manifest_found = True
437
+ break
438
+ return manifest_found
439
+
440
+ def _get_subtree(self, tree, path):
441
+ """Return the subtree `tree / path` if it exists, or `None`."""
442
+ try:
443
+ return tree / path
444
+ except KeyError: # pylint: disable=except-pass
445
+ pass
446
+
447
+
448
+ class MigrationScanner(BaseScanner):
449
+ def __init__(
450
+ self,
451
+ org: str,
452
+ name: str,
453
+ clone_url: str,
454
+ # FIXME: put specific branch names to clone in 'migration_path':
455
+ # E.g. [('14.0', 'master'), ('18.0', '18.0-mig')]
456
+ migration_path: tuple[str],
457
+ new_repo_name: str = None,
458
+ new_repo_url: str = None,
459
+ repositories_path: str = None,
460
+ repo_type: str = None,
461
+ ssh_key: str = None,
462
+ token: str = None,
463
+ workaround_fs_errors: bool = False,
464
+ clone_name: str = None,
465
+ ):
466
+ branches = [mp[1] for mp in sorted(migration_path)]
467
+ super().__init__(
468
+ org,
469
+ name,
470
+ clone_url,
471
+ branches,
472
+ repositories_path,
473
+ repo_type,
474
+ ssh_key,
475
+ token,
476
+ workaround_fs_errors,
477
+ clone_name,
478
+ )
479
+ self.migration_path = migration_path
480
+ self.new_repo_name = new_repo_name
481
+ self.new_repo_url = (
482
+ self._prepare_clone_url(repo_type, new_repo_url, token)
483
+ if new_repo_url
484
+ else None
485
+ )
486
+
487
+ def sync(self, fetch=True):
488
+ res = super().sync(fetch=fetch)
489
+ # Set the new repository as remote
490
+ if self.is_cloned and self.new_repo_name and self.new_repo_url:
491
+ with self.repo() as repo:
492
+ self._set_git_remote_url(repo, self.new_repo_name, self.new_repo_url)
493
+ return res
494
+
495
+ def scan(self, addons_path=".", target_addons_path=".", module_names=None):
496
+ # Clone/fetch has been done during the repository scan, the migration
497
+ # scan will be processed on the current history of commits
498
+ res = self.sync(fetch=False)
499
+ # 'super()' could return False if the branch to scan doesn't exist,
500
+ # there is nothing to scan then.
501
+ if not res:
502
+ return False
503
+ source_version = self.migration_path[0][0]
504
+ source_branch = self.migration_path[0][1]
505
+ target_version = self.migration_path[1][0]
506
+ target_branch = self.migration_path[1][1]
507
+ target_remote = "origin"
508
+ with self.repo() as repo:
509
+ if self.new_repo_name and self.new_repo_url:
510
+ target_remote = self.new_repo_name
511
+ # Fetch target branch from new repo
512
+ with self._get_git_env() as git_env:
513
+ with repo.git.custom_environment(**git_env):
514
+ repo.remotes[target_remote].fetch(target_branch)
515
+ if self._branch_exists(repo, source_branch) and self._branch_exists(
516
+ repo, target_branch, remote=target_remote
517
+ ):
518
+ return self._scan_migration_path(
519
+ repo,
520
+ source_version,
521
+ source_branch,
522
+ target_remote,
523
+ target_version,
524
+ target_branch,
525
+ addons_path=addons_path,
526
+ target_addons_path=target_addons_path,
527
+ module_names=module_names,
528
+ )
529
+ return res
530
+
531
+ def _scan_migration_path(
532
+ self,
533
+ repo,
534
+ source_version,
535
+ source_branch,
536
+ target_remote,
537
+ target_version,
538
+ target_branch,
539
+ addons_path=".",
540
+ target_addons_path=".",
541
+ module_names=None,
542
+ ):
543
+ repo_source_commit = self._get_last_fetched_commit(repo, source_branch)
544
+ repo_target_commit = self._get_last_fetched_commit(
545
+ repo, target_branch, remote=target_remote
546
+ )
547
+ if not module_names:
548
+ module_names = self._get_module_paths(repo, addons_path, source_branch)
549
+ res = []
550
+ for module in module_names:
551
+ if isinstance(module, tuple):
552
+ module, target_module = module
553
+ else:
554
+ target_module = module
555
+ if self._is_module_blacklisted(module):
556
+ _logger.info(
557
+ "%s: '%s' is blacklisted (no migration scan)",
558
+ self.full_name,
559
+ module,
560
+ )
561
+ continue
562
+ repo_id = self._get_odoo_repository_id()
563
+ module_branch_id = self._get_odoo_module_branch_id(
564
+ repo_id, module, source_version
565
+ )
566
+ if not module_branch_id:
567
+ _logger.warning(
568
+ "Module '%s' for version %s does not exist on Odoo, "
569
+ "a new scan of the repository is required. Aborted"
570
+ % (module, source_version)
571
+ )
572
+ continue
573
+ # For each module and source/target branch:
574
+ # - get commit of 'module' relative to the last fetched commit
575
+ # - get commit of 'module' relative to the last scanned commit
576
+ module_path = str(pathlib.Path(addons_path).joinpath(module))
577
+ target_module_path = str(
578
+ pathlib.Path(target_addons_path).joinpath(target_module)
579
+ )
580
+ module_source_tree = self._get_subtree(
581
+ repo.commit(repo_source_commit).tree, module_path
582
+ )
583
+ module_target_tree = self._get_subtree(
584
+ repo.commit(repo_target_commit).tree, target_module_path
585
+ )
586
+ module_source_commit = self._get_last_commit_of_git_tree(
587
+ repo_source_commit, module_source_tree
588
+ )
589
+ module_target_commit = (
590
+ module_target_tree
591
+ and self._get_last_commit_of_git_tree(
592
+ repo_target_commit, module_target_tree
593
+ )
594
+ or False
595
+ )
596
+ # Retrieve existing migration data if any and check if it is outdated
597
+ data = self._get_odoo_module_branch_migration_data(
598
+ repo_id, module, source_version, target_version
599
+ )
600
+ if (
601
+ data.get("last_source_mig_scanned_commit") != module_source_commit
602
+ or data.get("last_target_mig_scanned_commit") != module_target_commit
603
+ ):
604
+ scanned_data = self._scan_module(
605
+ repo,
606
+ addons_path,
607
+ target_addons_path,
608
+ module,
609
+ target_module,
610
+ module_branch_id,
611
+ source_version,
612
+ source_branch,
613
+ target_remote,
614
+ target_version,
615
+ target_branch,
616
+ module_source_commit,
617
+ module_target_commit,
618
+ data.get("last_source_scanned_commit"),
619
+ data.get("last_target_scanned_commit"),
620
+ data.get("last_source_mig_scanned_commit"),
621
+ data.get("last_target_mig_scanned_commit"),
622
+ )
623
+ res.append(scanned_data)
624
+ return res
625
+
626
+ def _scan_module(
627
+ self,
628
+ repo: git.Repo,
629
+ addons_path: str,
630
+ target_addons_path: str,
631
+ module: str,
632
+ target_module: str,
633
+ module_branch_id: int,
634
+ source_version: str,
635
+ source_branch: str,
636
+ target_remote: str,
637
+ target_version: str,
638
+ target_branch: str,
639
+ source_commit: str,
640
+ target_commit: str,
641
+ source_last_scanned_commit: str,
642
+ target_last_scanned_commit: str,
643
+ source_last_mig_scanned_commit: str,
644
+ target_last_mig_scanned_commit: str,
645
+ ):
646
+ """Collect the migration data of a module."""
647
+ data = {
648
+ "addons_path": addons_path,
649
+ "target_addons_path": addons_path,
650
+ "module": module,
651
+ "source_version": source_version,
652
+ "source_branch": source_branch,
653
+ "target_version": target_version,
654
+ "target_branch": target_branch,
655
+ "source_commit": source_last_scanned_commit,
656
+ "target_commit": target_last_scanned_commit,
657
+ }
658
+ module_path = str(pathlib.Path(addons_path).joinpath(module))
659
+ target_module_path = str(
660
+ pathlib.Path(target_addons_path).joinpath(target_module)
661
+ )
662
+ # If files updated in the module since the last scan are not relevant
663
+ # (e.g. all new commits are updating PO files), we skip the scan.
664
+ source_scan_relevant = self._is_scan_module_relevant(
665
+ repo,
666
+ module_path,
667
+ source_last_mig_scanned_commit,
668
+ source_commit,
669
+ )
670
+ target_scan_relevant = self._is_scan_module_relevant(
671
+ repo,
672
+ target_module_path,
673
+ target_last_mig_scanned_commit,
674
+ target_commit,
675
+ )
676
+ # We push the last source/target scanned commits (the ones scanned by
677
+ # RepositoryScanner) to Odoo only if a scan is relevant.
678
+ # Having the same scanned commit both for code analysis and migration
679
+ # stored in Odoo means the migration scan is not needed.
680
+ if source_scan_relevant:
681
+ data["source_commit"] = source_last_scanned_commit
682
+ if target_scan_relevant:
683
+ data["target_commit"] = target_last_scanned_commit
684
+ scan_relevant = source_scan_relevant or target_scan_relevant
685
+ if scan_relevant:
686
+ _logger.info(
687
+ "%s: relevant changes detected in '%s' (%s -> %s)",
688
+ self.full_name,
689
+ module if source_scan_relevant else target_module,
690
+ source_version,
691
+ target_version,
692
+ )
693
+ oca_port_data = self._run_oca_port(
694
+ module_path,
695
+ target_module_path,
696
+ source_version,
697
+ source_branch,
698
+ target_remote,
699
+ target_version,
700
+ target_branch,
701
+ )
702
+ data["report"] = oca_port_data
703
+ self._push_scanned_data(module_branch_id, data)
704
+ # Mitigate "GH API rate limit exceeds" error
705
+ if scan_relevant:
706
+ time.sleep(4)
707
+ return data
708
+
709
+ def _is_scan_module_relevant(
710
+ self,
711
+ repo: git.Repo,
712
+ module_path: str,
713
+ last_scanned_commit: str,
714
+ last_fetched_commit: str,
715
+ ):
716
+ """Determine if scanning the module is relevant.
717
+
718
+ As the scan of a module can be quite time consuming, we first check
719
+ the files impacted among all new commits since the last scan.
720
+ If all the files are irrelevants, then we can bypass the scan.
721
+ """
722
+ # The first time we want to scan the module obviously
723
+ if not last_scanned_commit:
724
+ return True
725
+ # Module still not available on target branch, no need to re-run a scan
726
+ # as it is still "To migrate" in this case
727
+ if not last_fetched_commit:
728
+ return False
729
+ # Other cases: check files impacted by new commits both on source & target
730
+ # branches to tell if a scan should be processed
731
+ tree = self._get_subtree(repo.commit(last_fetched_commit).tree, module_path)
732
+ new_commits = self._get_commits_of_git_tree(
733
+ last_scanned_commit, last_fetched_commit, tree
734
+ )
735
+ return self._check_relevant_commits(repo, module_path, new_commits)
736
+
737
+ def _check_relevant_commits(self, repo, module_path, commits):
738
+ paths = set()
739
+ for commit_sha in commits:
740
+ commit = repo.commit(commit_sha)
741
+ if commit.parents:
742
+ diffs = commit.diff(commit.parents[0], paths=[module_path], R=True)
743
+ else:
744
+ diffs = commit.diff(git.NULL_TREE)
745
+ for diff in diffs:
746
+ paths.add(diff.a_path)
747
+ paths.add(diff.b_path)
748
+ for path in paths:
749
+ if all(not path.endswith(pattern) for pattern in IGNORE_FILES):
750
+ return True
751
+ return False
752
+
753
+ def _run_oca_port(
754
+ self,
755
+ module_path,
756
+ target_module_path,
757
+ source_version,
758
+ source_branch,
759
+ target_remote,
760
+ target_version,
761
+ target_branch,
762
+ ):
763
+ _logger.info(
764
+ "%s: collect migration data for '%s' (%s -> %s)",
765
+ self.full_name,
766
+ module_path,
767
+ source_branch,
768
+ target_branch,
769
+ )
770
+ # Initialize the oca-port app
771
+ params = {
772
+ "source": f"origin/{source_branch}",
773
+ "source_version": source_version,
774
+ "target": f"{target_remote}/{target_branch}",
775
+ "target_version": target_version,
776
+ "addon_path": module_path,
777
+ "target_addon_path": target_module_path,
778
+ "upstream_org": self.org,
779
+ "repo_path": self.path,
780
+ "repo_name": self.name,
781
+ "output": "json",
782
+ "fetch": False,
783
+ "github_token": self.repo_type == "github" and self.token or None,
784
+ }
785
+ # Store oca_port cache in the same folder than cloned repositories
786
+ # to boost performance of further calls
787
+ with set_env(XDG_CACHE_HOME=str(self.repositories_path)):
788
+ scan = oca_port.App(**params)
789
+ try:
790
+ json_data = scan.run()
791
+ except ValueError as exc:
792
+ _logger.warning(exc)
793
+ else:
794
+ return json.loads(json_data)
795
+
796
+ # Hooks method to override by client class
797
+
798
+ def _get_odoo_repository_id(self) -> int:
799
+ """Return the ID of the 'odoo.repository' record."""
800
+ raise NotImplementedError
801
+
802
+ def _get_odoo_repository_branches(self, repo_id) -> list[str]:
803
+ """Return the relevant branches based on 'odoo.repository.branch'."""
804
+ raise NotImplementedError
805
+
806
+ def _get_odoo_migration_paths(self, branches) -> list[tuple[str]]:
807
+ """Return the available migration paths corresponding to `branches`."""
808
+ raise NotImplementedError
809
+
810
+ def _get_odoo_module_branch_id(self, repo_id, module, branch) -> int:
811
+ """Return the ID of the 'odoo.module.branch' record."""
812
+ raise NotImplementedError
813
+
814
+ def _get_odoo_module_branch_migration_id(
815
+ self, module, source_branch, target_branch
816
+ ) -> int:
817
+ """Return the ID of 'odoo.module.branch.migration' record."""
818
+ raise NotImplementedError
819
+
820
+ def _get_odoo_module_branch_migration_data(
821
+ self, repo_id, module, source_version, target_version
822
+ ) -> dict:
823
+ """Return last scanned commits regarding `module`."""
824
+ raise NotImplementedError
825
+
826
+ def _push_scanned_data(self, module_branch_id, data):
827
+ """Push the scanned module data to Odoo.
828
+
829
+ It has to use the 'odoo.module.branch.migration.push_scanned_data'
830
+ RPC endpoint.
831
+ """
832
+ raise NotImplementedError
833
+
834
+
835
+ class RepositoryScanner(BaseScanner):
836
+ def __init__(
837
+ self,
838
+ org: str,
839
+ name: str,
840
+ clone_url: str,
841
+ version: str,
842
+ branch: str,
843
+ addons_paths_data: list,
844
+ repositories_path: str = None,
845
+ repo_type: str = None,
846
+ ssh_key: str = None,
847
+ token: str = None,
848
+ workaround_fs_errors: bool = False,
849
+ clone_name: str = None,
850
+ ):
851
+ super().__init__(
852
+ org,
853
+ name,
854
+ clone_url,
855
+ [branch],
856
+ repositories_path,
857
+ repo_type,
858
+ ssh_key,
859
+ token,
860
+ workaround_fs_errors,
861
+ clone_name,
862
+ )
863
+ self.version = version
864
+ self.branch = branch
865
+ self.addons_paths_data = addons_paths_data
866
+
867
+ def detect_modules_to_scan(self):
868
+ res = self.sync()
869
+ # 'super()' could return False if the branch to scan doesn't exist,
870
+ # there is nothing to scan then.
871
+ if not res:
872
+ return {}
873
+ repo_id = self._get_odoo_repository_id()
874
+ with self.repo() as repo:
875
+ return self._detect_modules_to_scan(repo, repo_id)
876
+
877
+ def _detect_modules_to_scan(self, repo, repo_id):
878
+ if not self._branch_exists(repo, self.branch):
879
+ return
880
+ branch_id = self._get_odoo_branch_id(self.version)
881
+ cloned_branch = None
882
+ if self.version != self.branch:
883
+ cloned_branch = self.branch
884
+ repo_branch_id = self._create_odoo_repository_branch(
885
+ repo_id, branch_id, cloned_branch=cloned_branch
886
+ )
887
+ last_fetched_commit = self._get_last_fetched_commit(repo, self.branch)
888
+ last_scanned_commit = self._get_repo_last_scanned_commit(repo_branch_id)
889
+ data = {
890
+ "repo_branch_id": repo_branch_id,
891
+ "last_fetched_commit": last_fetched_commit,
892
+ "last_scanned_commit": last_scanned_commit,
893
+ "addons_paths": {},
894
+ }
895
+ if last_fetched_commit != last_scanned_commit:
896
+ # Checkout the source branch to get the last commit of a module working tree
897
+ self._checkout_branch(repo, self.branch)
898
+ # Scan relevant subfolders of the repository
899
+ for addons_path_data in self.addons_paths_data:
900
+ addons_path = addons_path_data["relative_path"]
901
+ data["addons_paths"][addons_path] = {
902
+ "specs": addons_path_data,
903
+ "modules_to_scan": self._detect_modules_to_scan_in_addons_path(
904
+ repo,
905
+ addons_path,
906
+ repo_branch_id,
907
+ last_fetched_commit,
908
+ last_scanned_commit,
909
+ ),
910
+ }
911
+ return data
912
+
913
+ def _detect_modules_to_scan_in_addons_path(
914
+ self,
915
+ repo,
916
+ addons_path,
917
+ repo_branch_id,
918
+ last_fetched_commit,
919
+ last_scanned_commit,
920
+ ):
921
+ if not last_scanned_commit:
922
+ # Get all module paths
923
+ modules_to_scan = sorted(
924
+ self._get_module_paths(repo, addons_path, self.branch)
925
+ )
926
+ else:
927
+ # Get module paths updated since the last scanned commit
928
+ modules_to_scan = self._get_module_paths_updated(
929
+ repo,
930
+ addons_path,
931
+ from_commit=last_scanned_commit,
932
+ to_commit=last_fetched_commit,
933
+ branch=self.branch,
934
+ )
935
+ extra_log = ""
936
+ if addons_path != ".":
937
+ extra_log = f" in {addons_path}"
938
+ _logger.info(
939
+ "%s: %s module(s) updated on %s" + extra_log,
940
+ self.full_name,
941
+ len(modules_to_scan),
942
+ self.branch,
943
+ )
944
+ return modules_to_scan
945
+
946
+ def scan_module(self, module_path, specs):
947
+ self._apply_git_global_config()
948
+ repo_id = self._get_odoo_repository_id()
949
+ branch_id = self._get_odoo_branch_id(self.version)
950
+ cloned_branch = None
951
+ if self.version != self.branch:
952
+ cloned_branch = self.branch
953
+ repo_branch_id = self._create_odoo_repository_branch(
954
+ repo_id, branch_id, cloned_branch=cloned_branch
955
+ )
956
+ with self.repo() as repo:
957
+ # Checkout the source branch to perform module code analysis
958
+ branch_commit = self._get_last_fetched_commit(repo, self.branch)
959
+ if repo.head.commit.hexsha != branch_commit:
960
+ self._checkout_branch(repo, self.branch)
961
+ # Get last commit of 'module_path'
962
+ module_tree = self._get_subtree(
963
+ repo.commit(branch_commit).tree, module_path
964
+ )
965
+ last_module_commit = (
966
+ self._get_last_commit_of_git_tree(f"{branch_commit}", module_tree)
967
+ if module_tree
968
+ else None
969
+ )
970
+ return self._scan_module(
971
+ repo,
972
+ repo_branch_id,
973
+ module_path,
974
+ last_module_commit,
975
+ specs,
976
+ )
977
+
978
+ def _scan_module(
979
+ self,
980
+ repo,
981
+ repo_branch_id,
982
+ module_path,
983
+ last_module_commit,
984
+ specs,
985
+ ):
986
+ module = module_path.split("/")[-1]
987
+ if self._is_module_blacklisted(module):
988
+ _logger.info(
989
+ "%s#%s: '%s' is blacklisted (no scan)",
990
+ self.full_name,
991
+ self.branch,
992
+ module_path,
993
+ )
994
+ return
995
+ last_module_scanned_commit = self._get_module_last_scanned_commit(
996
+ repo_branch_id, module
997
+ )
998
+ # Do not scan if the module didn't changed since last scan
999
+ # NOTE we also do this check at the model level so if the process
1000
+ # is interrupted (time limit, not enough memory...) we could
1001
+ # resume the work where it stopped by skipping already scanned
1002
+ # modules.
1003
+ if last_module_scanned_commit == last_module_commit:
1004
+ return
1005
+ data = {}
1006
+ if last_module_commit:
1007
+ _logger.info(
1008
+ "%s#%s: scan '%s' ",
1009
+ self.full_name,
1010
+ self.branch,
1011
+ module_path,
1012
+ )
1013
+ data = self._run_module_code_analysis(
1014
+ repo,
1015
+ module_path,
1016
+ self.branch,
1017
+ last_module_scanned_commit,
1018
+ last_module_commit,
1019
+ )
1020
+ else:
1021
+ _logger.info(
1022
+ "%s#%s: '%s' removed",
1023
+ self.full_name,
1024
+ self.branch,
1025
+ module_path,
1026
+ )
1027
+ # Insert all flags 'is_standard', 'is_enterprise', etc
1028
+ data.update(specs)
1029
+ # Set the last fetched commit as last scanned commit
1030
+ data["last_scanned_commit"] = last_module_commit
1031
+ self._push_scanned_data(repo_branch_id, module, data)
1032
+ return data
1033
+
1034
+ def _run_module_code_analysis(
1035
+ self, repo, module_path, branch, from_commit, to_commit
1036
+ ):
1037
+ """Perform a code analysis of `module_path`."""
1038
+ # Get current code analysis data
1039
+ parser = ModuleParser(f"{self.path}/{module_path}", scan_models=False)
1040
+ data = parser.to_dict()
1041
+ # Append the history of versions
1042
+ versions = self._read_module_versions(
1043
+ repo, module_path, branch, from_commit, to_commit
1044
+ )
1045
+ data["versions"] = versions
1046
+ return data
1047
+
1048
+ def _read_module_versions(self, repo, module_path, branch, from_commit, to_commit):
1049
+ """Return versions data introduced between `from_commit` and `to_commit`."""
1050
+ versions = {}
1051
+ for manifest_file in MANIFEST_FILES:
1052
+ manifest_path = "/".join([module_path, manifest_file])
1053
+ manifest_tree = self._get_subtree(
1054
+ repo.commit(to_commit).tree, manifest_path
1055
+ )
1056
+ if not manifest_tree:
1057
+ continue
1058
+ new_commits = self._get_commits_of_git_tree(
1059
+ from_commit, to_commit, manifest_tree
1060
+ )
1061
+ versions_ = self._parse_module_versions_from_commits(
1062
+ repo, module_path, manifest_path, branch, new_commits
1063
+ )
1064
+ versions.update(versions_)
1065
+ return versions
1066
+
1067
+ def _parse_module_versions_from_commits(
1068
+ self, repo, module_path, manifest_path, branch, new_commits
1069
+ ):
1070
+ """Parse module versions introduced in `new_commits`."""
1071
+ versions = {}
1072
+ for commit_sha in new_commits:
1073
+ commit = repo.commit(commit_sha)
1074
+ if commit.parents:
1075
+ diffs = commit.diff(commit.parents[0], R=True)
1076
+ else:
1077
+ diffs = commit.diff(git.NULL_TREE)
1078
+ for diff in diffs:
1079
+ # Check only diffs that update the manifest file
1080
+ diff_manifest = diff.a_path.endswith(
1081
+ manifest_path
1082
+ ) or diff.b_path.endswith(manifest_path)
1083
+ if not diff_manifest:
1084
+ continue
1085
+ # Try to parse the manifest file
1086
+ try:
1087
+ manifest_a = ast.literal_eval(
1088
+ diff.a_blob and diff.a_blob.data_stream.read().decode() or "{}"
1089
+ )
1090
+ manifest_b = ast.literal_eval(
1091
+ diff.b_blob and diff.b_blob.data_stream.read().decode() or "{}"
1092
+ )
1093
+ except SyntaxError:
1094
+ _logger.warning(f"Unable to parse {manifest_path} on {branch}")
1095
+ continue
1096
+ # Detect version change (added or updated)
1097
+ if manifest_a.get("version") == manifest_b.get("version"):
1098
+ continue
1099
+ if not manifest_b.get("version"):
1100
+ # Module has been removed? Skipping
1101
+ continue
1102
+ version = manifest_b["version"]
1103
+ # Skip versions that contains special characters
1104
+ # (often human errors fixed afterwards)
1105
+ clean_version = re.sub(r"[^0-9\.]", "", version)
1106
+ if clean_version != version:
1107
+ continue
1108
+ # Detect migration script and bind the version to the commit sha
1109
+ migration_path = "/".join([module_path, "migrations", version])
1110
+ migration_tree = self._get_subtree(
1111
+ repo.tree(f"origin/{branch}"), migration_path
1112
+ )
1113
+ values = {
1114
+ "commit": commit_sha,
1115
+ "migration_script": bool(migration_tree),
1116
+ }
1117
+ versions[version] = values
1118
+ return versions
1119
+
1120
+ # Hooks method to override by client class
1121
+
1122
+ def _get_odoo_repository_id(self):
1123
+ """Return the ID of the 'odoo.repository' record."""
1124
+ raise NotImplementedError
1125
+
1126
+ def _get_odoo_branch_id(self, version):
1127
+ """Return the ID of the relevant 'odoo.branch' record."""
1128
+ raise NotImplementedError
1129
+
1130
+ def _get_odoo_repository_branch_id(self, repo_id, branch_id):
1131
+ """Return the ID of the 'odoo.repository.branch' record."""
1132
+ raise NotImplementedError
1133
+
1134
+ def _create_odoo_repository_branch(self, repo_id, branch_id):
1135
+ """Create an 'odoo.repository.branch' record and return its ID."""
1136
+ raise NotImplementedError
1137
+
1138
+ def _get_repo_last_scanned_commit(self, repo_branch_id):
1139
+ """Return the last scanned commit of the repository/branch."""
1140
+ raise NotImplementedError
1141
+
1142
+ def _is_module_blacklisted(self, module):
1143
+ """Check if `module` is blacklisted (and should not be scanned)."""
1144
+ raise NotImplementedError
1145
+
1146
+ def _get_module_last_scanned_commit(self, repo_branch_id, module):
1147
+ """Return the last scanned commit of the module."""
1148
+ raise NotImplementedError
1149
+
1150
+ def _push_scanned_data(self, repo_branch_id, module, data):
1151
+ """Push the scanned module data to Odoo.
1152
+
1153
+ It has to use the 'odoo.module.branch.push_scanned_data' RPC endpoint.
1154
+ """
1155
+ raise NotImplementedError
1156
+
1157
+ def _update_last_scanned_commit(self, repo_branch_id, last_scanned_commit):
1158
+ """Update the last scanned commit for the repository/branch."""
1159
+ raise NotImplementedError
1160
+
1161
+
1162
+ class ChangelogScanner(BaseScanner):
1163
+ """Generate a changelog for a repository used in a project."""
1164
+
1165
+ def __init__(
1166
+ self,
1167
+ org: str,
1168
+ name: str,
1169
+ clone_url: str,
1170
+ odoo_project_repository_id: int,
1171
+ repositories_path: str = None,
1172
+ repo_type: str = None,
1173
+ ssh_key: str = None,
1174
+ token: str = None,
1175
+ ):
1176
+ self.odoo_project_repository_id = odoo_project_repository_id
1177
+ data = self._get_odoo_project_repository_data(odoo_project_repository_id)
1178
+ self.branch = data["branch"]
1179
+ self.source_commit = data["source_commit"]
1180
+ self.target_commit = data["target_commit"] or f"origin/{self.branch}"
1181
+ self.modules = data["modules"]
1182
+ super().__init__(
1183
+ org,
1184
+ name,
1185
+ clone_url,
1186
+ [self.branch],
1187
+ repositories_path,
1188
+ repo_type,
1189
+ ssh_key,
1190
+ token,
1191
+ )
1192
+
1193
+ def scan(self):
1194
+ res = self.sync()
1195
+ changelog = self._generate_changelog()
1196
+ self._push_odoo_project_repository_changelog(
1197
+ self.odoo_project_repository_id, changelog
1198
+ )
1199
+ return res
1200
+
1201
+ def _generate_changelog(self):
1202
+ with self.repo() as repo:
1203
+ if not self._branch_exists(repo, self.branch):
1204
+ return
1205
+ last_commit = self._get_last_fetched_commit(repo, self.branch)
1206
+ changelog = {
1207
+ "source_commit": self.source_commit,
1208
+ "target_commit": last_commit,
1209
+ "modules": {},
1210
+ }
1211
+ for module_data in self.modules:
1212
+ module_path = module_data["path"]
1213
+ _logger.info(
1214
+ "%s#%s: generate changelog for %s",
1215
+ self.full_name,
1216
+ self.branch,
1217
+ module_path,
1218
+ )
1219
+ module_changelog = self._generate_module_changelog(repo, module_path)
1220
+ if module_changelog:
1221
+ changelog["modules"][module_data["id"]] = module_changelog
1222
+ return changelog
1223
+
1224
+ def _generate_module_changelog(self, repo, module_path):
1225
+ changelog = []
1226
+ tree = self._get_subtree(repo.commit(self.source_commit).tree, module_path)
1227
+ if not tree:
1228
+ return changelog
1229
+ # Leverage git pathspecs magic (patterns) as it is faster than checking
1230
+ # the content (diffs) within Python process to get only relevant commits..
1231
+ commits = self._get_commits_of_git_tree(
1232
+ self.source_commit,
1233
+ self.target_commit,
1234
+ tree,
1235
+ patterns=(
1236
+ "':^*/i18n/*'",
1237
+ "':^*/i18n_extra/*'",
1238
+ "':^*.html'",
1239
+ "':^*.rst'",
1240
+ "':^*/tests/*'",
1241
+ "':^*/demo/*'",
1242
+ "':^*/doc/*'",
1243
+ ),
1244
+ )
1245
+ for commit_sha in commits:
1246
+ commit = repo.commit(commit_sha)
1247
+ if self._skip_commit(commit):
1248
+ continue
1249
+ changelog.append(self._prepare_module_changelog(commit))
1250
+ return changelog
1251
+
1252
+ @staticmethod
1253
+ def _skip_commit(commit):
1254
+ """Check if a commit should be skipped or not.
1255
+
1256
+ E.g merge or translations commits are skipped.
1257
+ """
1258
+ return (
1259
+ # Skip merge commit
1260
+ len(commit.parents) > 1
1261
+ or commit.author.email in AUTHOR_EMAILS_TO_SKIP
1262
+ or any([term in commit.summary for term in SUMMARY_TERMS_TO_SKIP])
1263
+ )
1264
+
1265
+ def _prepare_module_changelog(self, commit):
1266
+ message = commit.message.split("\n")
1267
+ message.pop(0) # Remove redundant summary (first line)
1268
+ message = "\n".join(message).strip()
1269
+ return {
1270
+ "hexsha": commit.hexsha,
1271
+ "authored_datetime": commit.authored_datetime.replace(
1272
+ tzinfo=None
1273
+ ).isoformat(),
1274
+ "summary": commit.summary,
1275
+ "message": message,
1276
+ }
1277
+
1278
+ def _get_odoo_project_repository_data(self, project_repo_id):
1279
+ """Return required data to generate the changelog.
1280
+
1281
+ Return a dictionary such as:
1282
+
1283
+ {
1284
+ "odoo_project_id": 10,
1285
+ "branch": "17.0",
1286
+ "source_commit": "7b58a288b3d79fbdc91dbf14aaeac0d69d65c327",
1287
+ "target_commit": None,
1288
+ "modules": [
1289
+ # List of dicts {"id": PROJECT_MODULE_ID, ...}
1290
+ {"id": 1, "name": "base", "path": "odoo/addons/base"},
1291
+ {"id": 2, "name": "account", "path": "addons/account"},
1292
+ ]
1293
+ }
1294
+ """
1295
+ raise NotImplementedError
1296
+
1297
+ def _push_odoo_project_repository_changelog(self, project_repo_id, changelog):
1298
+ """Push the resulting changelog to its 'odoo.project.repository' object.
1299
+
1300
+ It has to use the 'odoo.project.repository.push_changelog' RPC endpoint.
1301
+ """
1302
+ raise NotImplementedError