alibuild 1.17.14__tar.gz → 1.17.16__tar.gz

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (95) hide show
  1. {alibuild-1.17.14 → alibuild-1.17.16}/.github/workflows/documentation.yml +1 -1
  2. {alibuild-1.17.14 → alibuild-1.17.16}/.github/workflows/pr-check.yml +20 -17
  3. {alibuild-1.17.14 → alibuild-1.17.16}/PKG-INFO +2 -2
  4. {alibuild-1.17.14 → alibuild-1.17.16}/alfaBuild +1 -1
  5. {alibuild-1.17.14 → alibuild-1.17.16}/aliBuild +1 -1
  6. {alibuild-1.17.14 → alibuild-1.17.16}/alibuild.egg-info/PKG-INFO +2 -2
  7. {alibuild-1.17.14 → alibuild-1.17.16}/alibuild.egg-info/requires.txt +1 -1
  8. {alibuild-1.17.14 → alibuild-1.17.16}/alibuild_helpers/_version.py +9 -4
  9. {alibuild-1.17.14 → alibuild-1.17.16}/alibuild_helpers/analytics.py +4 -2
  10. {alibuild-1.17.14 → alibuild-1.17.16}/alibuild_helpers/args.py +1 -1
  11. {alibuild-1.17.14 → alibuild-1.17.16}/alibuild_helpers/build.py +5 -7
  12. {alibuild-1.17.14 → alibuild-1.17.16}/alibuild_helpers/clean.py +2 -2
  13. {alibuild-1.17.14 → alibuild-1.17.16}/alibuild_helpers/cmd.py +1 -1
  14. {alibuild-1.17.14 → alibuild-1.17.16}/alibuild_helpers/deps.py +4 -4
  15. {alibuild-1.17.14 → alibuild-1.17.16}/alibuild_helpers/doctor.py +6 -4
  16. {alibuild-1.17.14 → alibuild-1.17.16}/alibuild_helpers/git.py +1 -1
  17. {alibuild-1.17.14 → alibuild-1.17.16}/alibuild_helpers/init.py +3 -2
  18. {alibuild-1.17.14 → alibuild-1.17.16}/alibuild_helpers/log.py +6 -6
  19. {alibuild-1.17.14 → alibuild-1.17.16}/alibuild_helpers/sync.py +26 -27
  20. {alibuild-1.17.14 → alibuild-1.17.16}/alibuild_helpers/templating_plugin.py +1 -1
  21. {alibuild-1.17.14 → alibuild-1.17.16}/alibuild_helpers/utilities.py +54 -21
  22. {alibuild-1.17.14 → alibuild-1.17.16}/codecov.yml +3 -0
  23. {alibuild-1.17.14 → alibuild-1.17.16}/docs/docs/reference.md +4 -1
  24. alibuild-1.17.16/docs/docs/stylesheets/extra.css +9 -0
  25. {alibuild-1.17.14 → alibuild-1.17.16}/pyproject.toml +1 -1
  26. alibuild-1.17.16/requirements.txt +6 -0
  27. {alibuild-1.17.14 → alibuild-1.17.16}/setup.py +1 -5
  28. {alibuild-1.17.14 → alibuild-1.17.16}/tests/test_analytics.py +2 -2
  29. {alibuild-1.17.14 → alibuild-1.17.16}/tests/test_args.py +3 -3
  30. {alibuild-1.17.14 → alibuild-1.17.16}/tests/test_build.py +3 -5
  31. {alibuild-1.17.14 → alibuild-1.17.16}/tests/test_clean.py +0 -1
  32. {alibuild-1.17.14 → alibuild-1.17.16}/tests/test_deps.py +8 -5
  33. {alibuild-1.17.14 → alibuild-1.17.16}/tests/test_doctor.py +40 -36
  34. {alibuild-1.17.14 → alibuild-1.17.16}/tests/test_git.py +5 -5
  35. {alibuild-1.17.14 → alibuild-1.17.16}/tests/test_hashing.py +1 -1
  36. {alibuild-1.17.14 → alibuild-1.17.16}/tests/test_init.py +17 -13
  37. {alibuild-1.17.14 → alibuild-1.17.16}/tests/test_log.py +1 -1
  38. {alibuild-1.17.14 → alibuild-1.17.16}/tests/test_packagelist.py +68 -47
  39. {alibuild-1.17.14 → alibuild-1.17.16}/tests/test_parseRecipe.py +7 -7
  40. {alibuild-1.17.14 → alibuild-1.17.16}/tests/test_sync.py +3 -3
  41. {alibuild-1.17.14 → alibuild-1.17.16}/tests/test_utilities.py +104 -21
  42. {alibuild-1.17.14 → alibuild-1.17.16}/tox.ini +19 -19
  43. alibuild-1.17.14/docs/docs/stylesheets/extra.css +0 -7
  44. alibuild-1.17.14/requirements.txt +0 -7
  45. {alibuild-1.17.14 → alibuild-1.17.16}/.flake8 +0 -0
  46. {alibuild-1.17.14 → alibuild-1.17.16}/.github/workflows/release.yml +0 -0
  47. {alibuild-1.17.14 → alibuild-1.17.16}/.gitignore +0 -0
  48. {alibuild-1.17.14 → alibuild-1.17.16}/.pylintrc +0 -0
  49. {alibuild-1.17.14 → alibuild-1.17.16}/ANALYTICS.md +0 -0
  50. {alibuild-1.17.14 → alibuild-1.17.16}/DESIGN.md +0 -0
  51. {alibuild-1.17.14 → alibuild-1.17.16}/LICENSE.md +0 -0
  52. {alibuild-1.17.14 → alibuild-1.17.16}/MANIFEST.in +0 -0
  53. {alibuild-1.17.14 → alibuild-1.17.16}/PACKAGING.md +0 -0
  54. {alibuild-1.17.14 → alibuild-1.17.16}/README.rst +0 -0
  55. {alibuild-1.17.14 → alibuild-1.17.16}/aliDeps +0 -0
  56. {alibuild-1.17.14 → alibuild-1.17.16}/aliDoctor +0 -0
  57. {alibuild-1.17.14 → alibuild-1.17.16}/alibuild.egg-info/SOURCES.txt +0 -0
  58. {alibuild-1.17.14 → alibuild-1.17.16}/alibuild.egg-info/dependency_links.txt +0 -0
  59. {alibuild-1.17.14 → alibuild-1.17.16}/alibuild.egg-info/top_level.txt +0 -0
  60. {alibuild-1.17.14 → alibuild-1.17.16}/alibuild_helpers/__init__.py +0 -0
  61. {alibuild-1.17.14 → alibuild-1.17.16}/alibuild_helpers/build_template.sh +0 -0
  62. {alibuild-1.17.14 → alibuild-1.17.16}/alibuild_helpers/scm.py +0 -0
  63. {alibuild-1.17.14 → alibuild-1.17.16}/alibuild_helpers/sl.py +0 -0
  64. {alibuild-1.17.14 → alibuild-1.17.16}/alibuild_helpers/workarea.py +0 -0
  65. {alibuild-1.17.14 → alibuild-1.17.16}/alienv +0 -0
  66. {alibuild-1.17.14 → alibuild-1.17.16}/debian/changelog +0 -0
  67. {alibuild-1.17.14 → alibuild-1.17.16}/debian/compat +0 -0
  68. {alibuild-1.17.14 → alibuild-1.17.16}/debian/control +0 -0
  69. {alibuild-1.17.14 → alibuild-1.17.16}/debian/copyright +0 -0
  70. {alibuild-1.17.14 → alibuild-1.17.16}/debian/files +0 -0
  71. {alibuild-1.17.14 → alibuild-1.17.16}/debian/rules +0 -0
  72. {alibuild-1.17.14 → alibuild-1.17.16}/docs/README.md +0 -0
  73. {alibuild-1.17.14 → alibuild-1.17.16}/docs/SUPPORT +0 -0
  74. {alibuild-1.17.14 → alibuild-1.17.16}/docs/docs/alice_logo.png +0 -0
  75. {alibuild-1.17.14 → alibuild-1.17.16}/docs/docs/deps.png +0 -0
  76. {alibuild-1.17.14 → alibuild-1.17.16}/docs/docs/index.md +0 -0
  77. {alibuild-1.17.14 → alibuild-1.17.16}/docs/docs/quick.md +0 -0
  78. {alibuild-1.17.14 → alibuild-1.17.16}/docs/docs/troubleshooting.md +0 -0
  79. {alibuild-1.17.14 → alibuild-1.17.16}/docs/docs/user.md +0 -0
  80. {alibuild-1.17.14 → alibuild-1.17.16}/docs/mkdocs.yml +0 -0
  81. {alibuild-1.17.14 → alibuild-1.17.16}/pb +0 -0
  82. {alibuild-1.17.14 → alibuild-1.17.16}/setup.cfg +0 -0
  83. {alibuild-1.17.14 → alibuild-1.17.16}/templates/alibuild_to_please.jnj +0 -0
  84. {alibuild-1.17.14 → alibuild-1.17.16}/tests/test_cmd.py +0 -0
  85. {alibuild-1.17.14 → alibuild-1.17.16}/tests/test_workarea.py +0 -0
  86. {alibuild-1.17.14 → alibuild-1.17.16}/tests/testdist/broken1.sh +0 -0
  87. {alibuild-1.17.14 → alibuild-1.17.16}/tests/testdist/broken2.sh +0 -0
  88. {alibuild-1.17.14 → alibuild-1.17.16}/tests/testdist/broken3.sh +0 -0
  89. {alibuild-1.17.14 → alibuild-1.17.16}/tests/testdist/broken4.sh +0 -0
  90. {alibuild-1.17.14 → alibuild-1.17.16}/tests/testdist/broken5.sh +0 -0
  91. {alibuild-1.17.14 → alibuild-1.17.16}/tests/testdist/broken6.sh +0 -0
  92. {alibuild-1.17.14 → alibuild-1.17.16}/tests/testdist/broken7.sh +0 -0
  93. {alibuild-1.17.14 → alibuild-1.17.16}/tests/testdist/clobber-initdotsh.sh +0 -0
  94. {alibuild-1.17.14 → alibuild-1.17.16}/tests/testdist/defaults-o2.sh +0 -0
  95. {alibuild-1.17.14 → alibuild-1.17.16}/tests/testdist/delete-etc.sh +0 -0
@@ -17,7 +17,7 @@ jobs:
17
17
  - uses: actions/setup-python@v5
18
18
  with:
19
19
  python-version: 3.x
20
- - run: echo "cache_id=$(date --utc '+%V')" >> $GITHUB_ENV
20
+ - run: echo "cache_id=$(date --utc '+%V')" >> "$GITHUB_ENV"
21
21
  - uses: actions/cache@v4
22
22
  with:
23
23
  key: mkdocs-material-${{ env.cache_id }}
@@ -74,6 +74,8 @@ jobs:
74
74
  matrix:
75
75
  python-version:
76
76
  - '3.11'
77
+ - '3.12'
78
+ - '3.13'
77
79
 
78
80
  steps:
79
81
  - uses: actions/checkout@v4
@@ -110,20 +112,21 @@ jobs:
110
112
  with:
111
113
  files: coverage.json
112
114
 
113
- lint:
114
- name: lint
115
- runs-on: ubuntu-latest
116
-
117
- steps:
118
- - uses: actions/checkout@v4
119
-
120
- - name: Set up Python
121
- uses: actions/setup-python@v5
122
- with:
123
- python-version: 3.x
124
-
125
- - name: Install dependencies
126
- run: python -m pip install --upgrade tox tox-gh-actions
127
-
128
- - name: Run linters
129
- run: tox -e lint
115
+ # Temporarily disabled until we decide whether to follow PEP8 or not
116
+ # lint:
117
+ # name: lint
118
+ # runs-on: ubuntu-latest
119
+ #
120
+ # steps:
121
+ # - uses: actions/checkout@v4
122
+ #
123
+ # - name: Set up Python
124
+ # uses: actions/setup-python@v5
125
+ # with:
126
+ # python-version: 3.x
127
+ #
128
+ # - name: Install dependencies
129
+ # run: python -m pip install --upgrade tox tox-gh-actions
130
+ #
131
+ # - name: Run linters
132
+ # run: tox -e lint
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.2
2
2
  Name: alibuild
3
- Version: 1.17.14
3
+ Version: 1.17.16
4
4
  Summary: ALICE Build Tool
5
5
  Home-page: https://alisw.github.io/alibuild
6
6
  Author: Giulio Eulisse
@@ -25,7 +25,7 @@ Requires-Dist: pyyaml
25
25
  Requires-Dist: requests
26
26
  Requires-Dist: distro
27
27
  Requires-Dist: jinja2
28
- Requires-Dist: boto3==1.23.10
28
+ Requires-Dist: boto3<1.36.0
29
29
  Dynamic: author
30
30
  Dynamic: description
31
31
  Dynamic: description-content-type
@@ -57,7 +57,7 @@ def doMain(args, parser):
57
57
  error(e.message)
58
58
  exit(1)
59
59
 
60
- if args.action == "version" or args.action == None:
60
+ if args.action == "version" or args.action is None:
61
61
  print("aliBuild version: {version} ({arch})".format(
62
62
  version=__version__ or "unknown", arch=args.architecture or "unknown"))
63
63
  sys.exit(0)
@@ -57,7 +57,7 @@ def doMain(args, parser):
57
57
  error(e.message)
58
58
  exit(1)
59
59
 
60
- if args.action == "version" or args.action == None:
60
+ if args.action == "version" or args.action is None:
61
61
  print("aliBuild version: {version} ({arch})".format(
62
62
  version=__version__ or "unknown", arch=args.architecture or "unknown"))
63
63
  sys.exit(0)
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.2
2
2
  Name: alibuild
3
- Version: 1.17.14
3
+ Version: 1.17.16
4
4
  Summary: ALICE Build Tool
5
5
  Home-page: https://alisw.github.io/alibuild
6
6
  Author: Giulio Eulisse
@@ -25,7 +25,7 @@ Requires-Dist: pyyaml
25
25
  Requires-Dist: requests
26
26
  Requires-Dist: distro
27
27
  Requires-Dist: jinja2
28
- Requires-Dist: boto3==1.23.10
28
+ Requires-Dist: boto3<1.36.0
29
29
  Dynamic: author
30
30
  Dynamic: description
31
31
  Dynamic: description-content-type
@@ -2,4 +2,4 @@ pyyaml
2
2
  requests
3
3
  distro
4
4
  jinja2
5
- boto3==1.23.10
5
+ boto3<1.36.0
@@ -1,8 +1,13 @@
1
- # file generated by setuptools_scm
1
+ # file generated by setuptools-scm
2
2
  # don't change, don't track in version control
3
+
4
+ __all__ = ["__version__", "__version_tuple__", "version", "version_tuple"]
5
+
3
6
  TYPE_CHECKING = False
4
7
  if TYPE_CHECKING:
5
- from typing import Tuple, Union
8
+ from typing import Tuple
9
+ from typing import Union
10
+
6
11
  VERSION_TUPLE = Tuple[Union[int, str], ...]
7
12
  else:
8
13
  VERSION_TUPLE = object
@@ -12,5 +17,5 @@ __version__: str
12
17
  __version_tuple__: VERSION_TUPLE
13
18
  version_tuple: VERSION_TUPLE
14
19
 
15
- __version__ = version = '1.17.14'
16
- __version_tuple__ = version_tuple = (1, 17, 14)
20
+ __version__ = version = '1.17.16'
21
+ __version_tuple__ = version_tuple = (1, 17, 16)
@@ -1,5 +1,7 @@
1
1
  #!/usr/bin/env python3
2
- import os, subprocess, sys
2
+ import os
3
+ import subprocess
4
+ import sys
3
5
  from os.path import exists, expanduser
4
6
  from os import unlink
5
7
 
@@ -104,7 +106,7 @@ def report_exception(e):
104
106
  exd = e.__class__.__name__,
105
107
  exf = "1")
106
108
 
107
- def enable_analytics():
109
+ def enable_analytics() -> None:
108
110
  if exists(expanduser("~/.config/alibuild/disable-analytics")):
109
111
  unlink(expanduser("~/.config/alibuild/disable-analytics"))
110
112
  if not exists(expanduser("~/.config/alibuild/analytics-uuid")):
@@ -474,7 +474,7 @@ def finaliseArgs(args, parser):
474
474
  args.writeStore = args.remoteStore
475
475
 
476
476
  if args.action in ["build", "init"]:
477
- if "develPrefix" in args and args.develPrefix == None:
477
+ if "develPrefix" in args and args.develPrefix is None:
478
478
  if "chdir" in args:
479
479
  args.develPrefix = basename(abspath(args.chdir))
480
480
  else:
@@ -11,7 +11,6 @@ from alibuild_helpers.utilities import parseDefaults, readDefaults
11
11
  from alibuild_helpers.utilities import getPackageList, asList
12
12
  from alibuild_helpers.utilities import validateDefaults
13
13
  from alibuild_helpers.utilities import Hasher
14
- from alibuild_helpers.utilities import yamlDump
15
14
  from alibuild_helpers.utilities import resolve_tag, resolve_version, short_commit_hash
16
15
  from alibuild_helpers.git import Git, git
17
16
  from alibuild_helpers.sl import Sapling
@@ -31,11 +30,10 @@ import socket
31
30
  import os
32
31
  import re
33
32
  import shutil
34
- import sys
35
33
  import time
36
34
 
37
35
 
38
- def writeAll(fn, txt):
36
+ def writeAll(fn, txt) -> None:
39
37
  f = open(fn, "w")
40
38
  f.write(txt)
41
39
  f.close()
@@ -476,7 +474,7 @@ def doBuild(args, parser):
476
474
  checkedOutCommitName = scm.checkedOutCommitName(directory=args.configDir)
477
475
  except SCMError:
478
476
  dieOnError(True, "Cannot find SCM directory in %s." % args.configDir)
479
- os.environ["ALIBUILD_ALIDIST_HASH"] = checkedOutCommitName # type: ignore
477
+ os.environ["ALIBUILD_ALIDIST_HASH"] = checkedOutCommitName
480
478
 
481
479
  debug("Building for architecture %s", args.architecture)
482
480
  debug("Number of parallel builds: %d", args.jobs)
@@ -512,9 +510,9 @@ def doBuild(args, parser):
512
510
  ("\n- ".join(sorted(failed)), args.defaults, " ".join(args.pkgname)))
513
511
 
514
512
  for x in specs.values():
515
- x["requires"] = [r for r in x["requires"] if not r in args.disable]
516
- x["build_requires"] = [r for r in x["build_requires"] if not r in args.disable]
517
- x["runtime_requires"] = [r for r in x["runtime_requires"] if not r in args.disable]
513
+ x["requires"] = [r for r in x["requires"] if r not in args.disable]
514
+ x["build_requires"] = [r for r in x["build_requires"] if r not in args.disable]
515
+ x["runtime_requires"] = [r for r in x["runtime_requires"] if r not in args.disable]
518
516
 
519
517
  if systemPackages:
520
518
  banner("aliBuild can take the following packages from the system and will not build them:\n %s",
@@ -47,14 +47,14 @@ def decideClean(workDir, architecture, aggressiveCleanup):
47
47
  "%s/SOURCES" % (workDir)]
48
48
  allBuildStuff = glob.glob("%s/BUILD/*" % workDir)
49
49
  toDelete += [x for x in allBuildStuff
50
- if not path.islink(x) and not basename(x) in symlinksBuild]
50
+ if not path.islink(x) and basename(x) not in symlinksBuild]
51
51
  installGlob ="%s/%s/*/" % (workDir, architecture)
52
52
  installedPackages = set([dirname(x) for x in glob.glob(installGlob)])
53
53
  symlinksInstall = []
54
54
  for x in installedPackages:
55
55
  symlinksInstall += [path.realpath(y) for y in glob.glob(x + "/latest*")]
56
56
  toDelete += [x for x in glob.glob(installGlob+ "*")
57
- if not path.islink(x) and not path.realpath(x) in symlinksInstall]
57
+ if not path.islink(x) and path.realpath(x) not in symlinksInstall]
58
58
  toDelete = [x for x in toDelete if path.exists(x)]
59
59
  return toDelete
60
60
 
@@ -78,7 +78,7 @@ class DockerRunner:
78
78
  instead.
79
79
  """
80
80
 
81
- def __init__(self, docker_image, docker_run_args=()):
81
+ def __init__(self, docker_image, docker_run_args=()) -> None:
82
82
  self._docker_image = docker_image
83
83
  self._docker_run_args = docker_run_args
84
84
  self._container = None
@@ -41,9 +41,9 @@ def doDeps(args, parser):
41
41
 
42
42
  for s in specs.values():
43
43
  # Remove disabled packages
44
- s["requires"] = [r for r in s["requires"] if not r in args.disable and r != "defaults-release"]
45
- s["build_requires"] = [r for r in s["build_requires"] if not r in args.disable and r != "defaults-release"]
46
- s["runtime_requires"] = [r for r in s["runtime_requires"] if not r in args.disable and r != "defaults-release"]
44
+ s["requires"] = [r for r in s["requires"] if r not in args.disable and r != "defaults-release"]
45
+ s["build_requires"] = [r for r in s["build_requires"] if r not in args.disable and r != "defaults-release"]
46
+ s["runtime_requires"] = [r for r in s["runtime_requires"] if r not in args.disable and r != "defaults-release"]
47
47
 
48
48
  # Determine which pacakages are only build/runtime dependencies
49
49
  all_build = set()
@@ -97,7 +97,7 @@ def doDeps(args, parser):
97
97
  # Check if we have dot in PATH
98
98
  try:
99
99
  execute(["dot", "-V"])
100
- except Exception as e:
100
+ except Exception:
101
101
  dieOnError(True, "Could not find dot in PATH. Please install graphviz and add it to PATH.")
102
102
  try:
103
103
  if args.neat:
@@ -1,5 +1,7 @@
1
1
  #!/usr/bin/env python3
2
- import os, re, sys
2
+ import os
3
+ import re
4
+ import sys
3
5
  from os.path import exists, abspath, expanduser
4
6
  import logging
5
7
  from alibuild_helpers.log import debug, error, banner, info, success, warning
@@ -7,9 +9,9 @@ from alibuild_helpers.log import logger
7
9
  from alibuild_helpers.utilities import getPackageList, parseDefaults, readDefaults, validateDefaults
8
10
  from alibuild_helpers.cmd import getstatusoutput, DockerRunner
9
11
 
10
- def prunePaths(workDir):
12
+ def prunePaths(workDir) -> None:
11
13
  for x in ["PATH", "LD_LIBRARY_PATH", "DYLD_LIBRARY_PATH"]:
12
- if not x in os.environ:
14
+ if x not in os.environ:
13
15
  continue
14
16
  workDirEscaped = re.escape("%s" % workDir) + "[^:]*:?"
15
17
  os.environ[x] = re.sub(workDirEscaped, "", os.environ[x])
@@ -52,7 +54,7 @@ def checkRequirements(spec, cmd, homebrew_replacement, getstatusoutput_docker):
52
54
  spec.get("system_requirement_missing"))
53
55
  return (err, "")
54
56
 
55
- def systemInfo():
57
+ def systemInfo() -> None:
56
58
  _,out = getstatusoutput("env")
57
59
  debug("Environment:\n%s", out)
58
60
  _,out = getstatusoutput("uname -a")
@@ -96,7 +96,7 @@ def git(args, directory=".", check=True, prompt=True):
96
96
  directory=quote(directory),
97
97
  args=" ".join(map(quote, args)),
98
98
  # GIT_TERMINAL_PROMPT is only supported in git 2.3+.
99
- prompt_var=f"GIT_TERMINAL_PROMPT=0" if not prompt else "",
99
+ prompt_var="GIT_TERMINAL_PROMPT=0" if not prompt else "",
100
100
  directory_safe_var=f"GIT_CONFIG_COUNT={lastGitOverride+2} GIT_CONFIG_KEY_{lastGitOverride}=safe.directory GIT_CONFIG_VALUE_{lastGitOverride}=$PWD GIT_CONFIG_KEY_{lastGitOverride+1}=gc.auto GIT_CONFIG_VALUE_{lastGitOverride+1}=0" if directory else "",
101
101
  ), timeout=GIT_CMD_TIMEOUTS.get(args[0] if len(args) else "*", GIT_COMMAND_TIMEOUT_SEC))
102
102
  if check and err != 0:
@@ -2,11 +2,12 @@ from alibuild_helpers.git import git, Git
2
2
  from alibuild_helpers.utilities import getPackageList, parseDefaults, readDefaults, validateDefaults
3
3
  from alibuild_helpers.log import debug, error, warning, banner, info
4
4
  from alibuild_helpers.log import dieOnError
5
- from alibuild_helpers.workarea import cleanup_git_log, updateReferenceRepoSpec
5
+ from alibuild_helpers.workarea import updateReferenceRepoSpec
6
6
 
7
7
  from os.path import join
8
8
  import os.path as path
9
- import os, sys
9
+ import os
10
+ import sys
10
11
 
11
12
  def parsePackagesDefinition(pkgname):
12
13
  return [ dict(zip(["name","ver"], y.split("@")[0:2]))
@@ -6,13 +6,13 @@ import datetime
6
6
 
7
7
  debug, error, warning, info, success = (None, None, None, None, None)
8
8
 
9
- def dieOnError(err, msg):
9
+ def dieOnError(err, msg) -> None:
10
10
  if err:
11
11
  error("%s", msg)
12
12
  sys.exit(1)
13
13
 
14
14
  class LogFormatter(logging.Formatter):
15
- def __init__(self, fmtstr):
15
+ def __init__(self, fmtstr) -> None:
16
16
  self.fmtstr = fmtstr
17
17
  self.COLOR_RESET = "\033[m" if sys.stdout.isatty() else ""
18
18
  self.LEVEL_COLORS = { logging.WARNING: "\033[4;33m",
@@ -35,7 +35,7 @@ class LogFormatter(logging.Formatter):
35
35
  } for x in record.msg.split("\n"))
36
36
 
37
37
 
38
- def log_current_package(package, main_package, specs, devel_prefix):
38
+ def log_current_package(package, main_package, specs, devel_prefix) -> None:
39
39
  """Show PACKAGE as the one currently being processed in future log messages."""
40
40
  if logger_handler.level > logging.DEBUG:
41
41
  return
@@ -55,14 +55,14 @@ def log_current_package(package, main_package, specs, devel_prefix):
55
55
 
56
56
 
57
57
  class ProgressPrint:
58
- def __init__(self, begin_msg=""):
58
+ def __init__(self, begin_msg="") -> None:
59
59
  self.count = -1
60
60
  self.lasttime = 0
61
61
  self.STAGES = ".", "..", "...", "....", ".....", "....", "...", ".."
62
62
  self.begin_msg = begin_msg
63
63
  self.percent = -1
64
64
 
65
- def __call__(self, txt, *args):
65
+ def __call__(self, txt, *args) -> None:
66
66
  if logger.level <= logging.DEBUG or not sys.stdout.isatty():
67
67
  debug(txt, *args)
68
68
  return
@@ -88,7 +88,7 @@ class ProgressPrint:
88
88
  self.lasttime = time.time()
89
89
  sys.stderr.flush()
90
90
 
91
- def erase(self):
91
+ def erase(self) -> None:
92
92
  nerase = len(self.STAGES[self.count]) if self.count > -1 else 0
93
93
  if self.percent > -1:
94
94
  nerase = nerase + 7
@@ -31,15 +31,15 @@ def remote_from_url(read_url, write_url, architecture, work_dir, insecure=False)
31
31
 
32
32
  class NoRemoteSync:
33
33
  """Helper class which does not do anything to sync"""
34
- def fetch_symlinks(self, spec):
34
+ def fetch_symlinks(self, spec) -> None:
35
35
  pass
36
- def fetch_tarball(self, spec):
36
+ def fetch_tarball(self, spec) -> None:
37
37
  pass
38
- def upload_symlinks_and_tarball(self, spec):
38
+ def upload_symlinks_and_tarball(self, spec) -> None:
39
39
  pass
40
40
 
41
41
  class PartialDownloadError(Exception):
42
- def __init__(self, downloaded, size):
42
+ def __init__(self, downloaded, size) -> None:
43
43
  self.downloaded = downloaded
44
44
  self.size = size
45
45
  def __str__(self):
@@ -47,7 +47,7 @@ class PartialDownloadError(Exception):
47
47
 
48
48
 
49
49
  class HttpRemoteSync:
50
- def __init__(self, remoteStore, architecture, workdir, insecure):
50
+ def __init__(self, remoteStore, architecture, workdir, insecure) -> None:
51
51
  self.remoteStore = remoteStore
52
52
  self.writeStore = ""
53
53
  self.architecture = architecture
@@ -137,7 +137,7 @@ class HttpRemoteSync:
137
137
  pass
138
138
  return None
139
139
 
140
- def fetch_tarball(self, spec):
140
+ def fetch_tarball(self, spec) -> None:
141
141
  # Check for any existing tarballs we can use instead of fetching new ones.
142
142
  for pkg_hash in spec["remote_hashes"]:
143
143
  try:
@@ -184,7 +184,7 @@ class HttpRemoteSync:
184
184
  destPath, session=session, progress=progress)
185
185
  progress.end("done")
186
186
 
187
- def fetch_symlinks(self, spec):
187
+ def fetch_symlinks(self, spec) -> None:
188
188
  links_path = resolve_links_path(self.architecture, spec["package"])
189
189
  os.makedirs(os.path.join(self.workdir, links_path), exist_ok=True)
190
190
 
@@ -232,20 +232,20 @@ class HttpRemoteSync:
232
232
  symlink("../../" + target.lstrip("./"),
233
233
  os.path.join(self.workdir, links_path, linkname))
234
234
 
235
- def upload_symlinks_and_tarball(self, spec):
235
+ def upload_symlinks_and_tarball(self, spec) -> None:
236
236
  pass
237
237
 
238
238
 
239
239
  class RsyncRemoteSync:
240
240
  """Helper class to sync package build directory using RSync."""
241
241
 
242
- def __init__(self, remoteStore, writeStore, architecture, workdir):
242
+ def __init__(self, remoteStore, writeStore, architecture, workdir) -> None:
243
243
  self.remoteStore = re.sub("^ssh://", "", remoteStore)
244
244
  self.writeStore = re.sub("^ssh://", "", writeStore)
245
245
  self.architecture = architecture
246
246
  self.workdir = workdir
247
247
 
248
- def fetch_tarball(self, spec):
248
+ def fetch_tarball(self, spec) -> None:
249
249
  info("Downloading tarball for %s@%s, if available", spec["package"], spec["version"])
250
250
  debug("Updating remote store for package %s with hashes %s", spec["package"],
251
251
  ", ".join(spec["remote_hashes"]))
@@ -273,7 +273,7 @@ class RsyncRemoteSync:
273
273
  for pkg_hash in spec["remote_hashes"])))
274
274
  dieOnError(err, "Unable to fetch tarball from specified store.")
275
275
 
276
- def fetch_symlinks(self, spec):
276
+ def fetch_symlinks(self, spec) -> None:
277
277
  links_path = resolve_links_path(self.architecture, spec["package"])
278
278
  os.makedirs(os.path.join(self.workdir, links_path), exist_ok=True)
279
279
  err = execute("rsync -rlvW --delete {remote_store}/{links_path}/ {workdir}/{links_path}/".format(
@@ -283,7 +283,7 @@ class RsyncRemoteSync:
283
283
  ))
284
284
  dieOnError(err, "Unable to fetch symlinks from specified store.")
285
285
 
286
- def upload_symlinks_and_tarball(self, spec):
286
+ def upload_symlinks_and_tarball(self, spec) -> None:
287
287
  if not self.writeStore:
288
288
  return
289
289
  dieOnError(execute("""\
@@ -313,16 +313,16 @@ class CVMFSRemoteSync:
313
313
  means unpacking the symlink to the wanted package.
314
314
  """
315
315
 
316
- def __init__(self, remoteStore, writeStore, architecture, workdir):
316
+ def __init__(self, remoteStore, writeStore, architecture, workdir) -> None:
317
317
  self.remoteStore = re.sub("^cvmfs://", "", remoteStore)
318
318
  # We do not support uploading directly to CVMFS, for obvious
319
319
  # reasons.
320
- assert(writeStore == None)
320
+ assert(writeStore is None)
321
321
  self.writeStore = None
322
322
  self.architecture = architecture
323
323
  self.workdir = workdir
324
324
 
325
- def fetch_tarball(self, spec):
325
+ def fetch_tarball(self, spec) -> None:
326
326
  info("Downloading tarball for %s@%s-%s, if available", spec["package"], spec["version"], spec["revision"])
327
327
  # If we already have a tarball with any equivalent hash, don't check S3.
328
328
  for pkg_hash in spec["remote_hashes"] + spec["local_hashes"]:
@@ -334,7 +334,7 @@ class CVMFSRemoteSync:
334
334
  info("Could not find prebuilt tarball for %s@%s-%s, will be rebuilt",
335
335
  spec["package"], spec["version"], spec["revision"])
336
336
 
337
- def fetch_symlinks(self, spec):
337
+ def fetch_symlinks(self, spec) -> None:
338
338
  # When using CVMFS, we create the symlinks grass by reading the .
339
339
  info("Fetching available build hashes for %s, from %s", spec["package"], self.remoteStore)
340
340
  links_path = resolve_links_path(self.architecture, spec["package"])
@@ -364,7 +364,6 @@ class CVMFSRemoteSync:
364
364
  done
365
365
  """.format(
366
366
  workDir=self.workdir,
367
- b=self.remoteStore,
368
367
  architecture=self.architecture,
369
368
  cvmfs_architecture=cvmfs_architecture,
370
369
  package=spec["package"],
@@ -372,7 +371,7 @@ class CVMFSRemoteSync:
372
371
  links_path=links_path,
373
372
  ))
374
373
 
375
- def upload_symlinks_and_tarball(self, spec):
374
+ def upload_symlinks_and_tarball(self, spec) -> None:
376
375
  dieOnError(True, "CVMFS backend does not support uploading directly")
377
376
 
378
377
  class S3RemoteSync:
@@ -381,13 +380,13 @@ class S3RemoteSync:
381
380
  s3cmd must be installed separately in order for this to work.
382
381
  """
383
382
 
384
- def __init__(self, remoteStore, writeStore, architecture, workdir):
383
+ def __init__(self, remoteStore, writeStore, architecture, workdir) -> None:
385
384
  self.remoteStore = re.sub("^s3://", "", remoteStore)
386
385
  self.writeStore = re.sub("^s3://", "", writeStore)
387
386
  self.architecture = architecture
388
387
  self.workdir = workdir
389
388
 
390
- def fetch_tarball(self, spec):
389
+ def fetch_tarball(self, spec) -> None:
391
390
  info("Downloading tarball for %s@%s, if available", spec["package"], spec["version"])
392
391
  debug("Updating remote store for package %s with hashes %s",
393
392
  spec["package"], ", ".join(spec["remote_hashes"]))
@@ -410,7 +409,7 @@ class S3RemoteSync:
410
409
  ))
411
410
  dieOnError(err, "Unable to fetch tarball from specified store.")
412
411
 
413
- def fetch_symlinks(self, spec):
412
+ def fetch_symlinks(self, spec) -> None:
414
413
  err = execute("""\
415
414
  mkdir -p "{workDir}/{linksPath}"
416
415
  find "{workDir}/{linksPath}" -type l -delete
@@ -432,7 +431,7 @@ class S3RemoteSync:
432
431
  ))
433
432
  dieOnError(err, "Unable to fetch symlinks from specified store.")
434
433
 
435
- def upload_symlinks_and_tarball(self, spec):
434
+ def upload_symlinks_and_tarball(self, spec) -> None:
436
435
  if not self.writeStore:
437
436
  return
438
437
  dieOnError(execute("""\
@@ -486,14 +485,14 @@ class Boto3RemoteSync:
486
485
  time.
487
486
  """
488
487
 
489
- def __init__(self, remoteStore, writeStore, architecture, workdir):
488
+ def __init__(self, remoteStore, writeStore, architecture, workdir) -> None:
490
489
  self.remoteStore = re.sub("^b3://", "", remoteStore)
491
490
  self.writeStore = re.sub("^b3://", "", writeStore)
492
491
  self.architecture = architecture
493
492
  self.workdir = workdir
494
493
  self._s3_init()
495
494
 
496
- def _s3_init(self):
495
+ def _s3_init(self) -> None:
497
496
  # This is a separate method so that we can patch it out for unit tests.
498
497
  # Import boto3 here, so that if we don't use this remote store, we don't
499
498
  # have to install it in the first place.
@@ -530,7 +529,7 @@ class Boto3RemoteSync:
530
529
  raise
531
530
  return True
532
531
 
533
- def fetch_tarball(self, spec):
532
+ def fetch_tarball(self, spec) -> None:
534
533
  debug("Updating remote store for package %s with hashes %s", spec["package"],
535
534
  ", ".join(spec["remote_hashes"]))
536
535
 
@@ -568,7 +567,7 @@ class Boto3RemoteSync:
568
567
  debug("Remote has no tarballs for %s with hashes %s", spec["package"],
569
568
  ", ".join(spec["remote_hashes"]))
570
569
 
571
- def fetch_symlinks(self, spec):
570
+ def fetch_symlinks(self, spec) -> None:
572
571
  from botocore.exceptions import ClientError
573
572
  links_path = resolve_links_path(self.architecture, spec["package"])
574
573
  os.makedirs(os.path.join(self.workdir, links_path), exist_ok=True)
@@ -614,7 +613,7 @@ class Boto3RemoteSync:
614
613
  target = "../../" + target
615
614
  symlink(target, link_path)
616
615
 
617
- def upload_symlinks_and_tarball(self, spec):
616
+ def upload_symlinks_and_tarball(self, spec) -> None:
618
617
  if not self.writeStore:
619
618
  return
620
619
 
@@ -11,7 +11,7 @@ import sys
11
11
  from jinja2.sandbox import SandboxedEnvironment
12
12
 
13
13
 
14
- def build_plugin(specs, args, build_order):
14
+ def build_plugin(specs, args, build_order) -> None:
15
15
  """Read a user-provided template from stdin and render it."""
16
16
  print(SandboxedEnvironment(autoescape=False)
17
17
  .from_string(sys.stdin.read())