alibuild 1.17.19__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (74) hide show
  1. alibuild-1.17.19.data/scripts/aliBuild +137 -0
  2. alibuild-1.17.19.data/scripts/aliDeps +7 -0
  3. alibuild-1.17.19.data/scripts/aliDoctor +7 -0
  4. alibuild-1.17.19.data/scripts/alienv +344 -0
  5. alibuild-1.17.19.data/scripts/pb +7 -0
  6. alibuild-1.17.19.dist-info/METADATA +78 -0
  7. alibuild-1.17.19.dist-info/RECORD +74 -0
  8. alibuild-1.17.19.dist-info/WHEEL +5 -0
  9. alibuild-1.17.19.dist-info/licenses/LICENSE.md +674 -0
  10. alibuild-1.17.19.dist-info/top_level.txt +5 -0
  11. alibuild_helpers/__init__.py +21 -0
  12. alibuild_helpers/_version.py +21 -0
  13. alibuild_helpers/analytics.py +120 -0
  14. alibuild_helpers/args.py +493 -0
  15. alibuild_helpers/build.py +1209 -0
  16. alibuild_helpers/build_template.sh +314 -0
  17. alibuild_helpers/clean.py +83 -0
  18. alibuild_helpers/cmd.py +154 -0
  19. alibuild_helpers/deps.py +116 -0
  20. alibuild_helpers/doctor.py +195 -0
  21. alibuild_helpers/git.py +104 -0
  22. alibuild_helpers/init.py +103 -0
  23. alibuild_helpers/log.py +132 -0
  24. alibuild_helpers/scm.py +31 -0
  25. alibuild_helpers/sl.py +62 -0
  26. alibuild_helpers/sync.py +693 -0
  27. alibuild_helpers/templating_plugin.py +18 -0
  28. alibuild_helpers/utilities.py +662 -0
  29. alibuild_helpers/workarea.py +179 -0
  30. debian/changelog +11 -0
  31. debian/compat +1 -0
  32. debian/control +14 -0
  33. debian/copyright +10 -0
  34. debian/files +1 -0
  35. debian/rules +7 -0
  36. docs/README.md +1 -0
  37. docs/SUPPORT +3 -0
  38. docs/docs/alice_logo.png +0 -0
  39. docs/docs/deps.png +0 -0
  40. docs/docs/index.md +75 -0
  41. docs/docs/quick.md +89 -0
  42. docs/docs/reference.md +430 -0
  43. docs/docs/stylesheets/extra.css +9 -0
  44. docs/docs/troubleshooting.md +346 -0
  45. docs/docs/user.md +413 -0
  46. docs/mkdocs.yml +37 -0
  47. templates/alibuild_to_please.jnj +63 -0
  48. tests/test_analytics.py +42 -0
  49. tests/test_args.py +119 -0
  50. tests/test_build.py +426 -0
  51. tests/test_clean.py +154 -0
  52. tests/test_cmd.py +73 -0
  53. tests/test_deps.py +79 -0
  54. tests/test_doctor.py +128 -0
  55. tests/test_git.py +48 -0
  56. tests/test_hashing.py +67 -0
  57. tests/test_init.py +103 -0
  58. tests/test_log.py +50 -0
  59. tests/test_packagelist.py +235 -0
  60. tests/test_parseRecipe.py +132 -0
  61. tests/test_sync.py +332 -0
  62. tests/test_utilities.py +383 -0
  63. tests/test_workarea.py +101 -0
  64. tests/testdist/broken1.sh +1 -0
  65. tests/testdist/broken2.sh +1 -0
  66. tests/testdist/broken3.sh +3 -0
  67. tests/testdist/broken4.sh +2 -0
  68. tests/testdist/broken5.sh +2 -0
  69. tests/testdist/broken6.sh +2 -0
  70. tests/testdist/broken7.sh +5 -0
  71. tests/testdist/clobber-initdotsh.sh +4 -0
  72. tests/testdist/defaults-o2.sh +10 -0
  73. tests/testdist/delete-etc.sh +4 -0
  74. tests/testdist/tracking-env.sh +6 -0
@@ -0,0 +1,314 @@
1
+ #!/bin/bash
2
+
3
+ # Automatically generated build script
4
+ unset DYLD_LIBRARY_PATH
5
+
6
+ # Cleanup variables which should not be exposed to user code
7
+ unset AWS_ACCESS_KEY_ID
8
+ unset AWS_SECRET_ACCESS_KEY
9
+
10
+ set -e
11
+ set +h
12
+ function hash() { true; }
13
+ export WORK_DIR="${WORK_DIR_OVERRIDE:-%(workDir)s}"
14
+ export ALIBUILD_CONFIG_DIR="${ALIBUILD_CONFIG_DIR_OVERRIDE:-%(configDir)s}"
15
+
16
+ # Insert our own wrapper scripts into $PATH, patched to use the system OpenSSL,
17
+ # instead of the one we build ourselves.
18
+ export PATH=$WORK_DIR/wrapper-scripts:$PATH
19
+
20
+ # The following environment variables are setup by
21
+ # the aliBuild script itself
22
+ #
23
+ # - ARCHITECTURE
24
+ # - BUILD_REQUIRES
25
+ # - CACHED_TARBALL
26
+ # - CAN_DELETE
27
+ # - COMMIT_HASH
28
+ # - DEPS_HASH
29
+ # - DEVEL_HASH
30
+ # - DEVEL_PREFIX
31
+ # - INCREMENTAL_BUILD_HASH
32
+ # - JOBS
33
+ # - PKGHASH
34
+ # - PKGNAME
35
+ # - PKGREVISION
36
+ # - PKGVERSION
37
+ # - REQUIRES
38
+ # - RUNTIME_REQUIRES
39
+
40
+ export PKG_NAME="$PKGNAME"
41
+ export PKG_VERSION="$PKGVERSION"
42
+ export PKG_BUILDNUM="$PKGREVISION"
43
+
44
+ export PKGPATH=${ARCHITECTURE}/${PKGNAME}/${PKGVERSION}-${PKGREVISION}
45
+ mkdir -p "$WORK_DIR/BUILD" "$WORK_DIR/SOURCES" "$WORK_DIR/TARS" \
46
+ "$WORK_DIR/SPECS" "$WORK_DIR/INSTALLROOT"
47
+ # If we are in development mode, then install directly in $WORK_DIR/$PKGPATH,
48
+ # so that we can do "make install" directly into BUILD/$PKGPATH and have
49
+ # changes being propagated.
50
+ # Moreover, devel packages should always go in the official WORK_DIR
51
+ if [ -n "$DEVEL_HASH" ]; then
52
+ export ALIBUILD_BUILD_WORK_DIR="${WORK_DIR}"
53
+ export INSTALLROOT="$WORK_DIR/$PKGPATH"
54
+ else
55
+ export INSTALLROOT="$WORK_DIR/INSTALLROOT/$PKGHASH/$PKGPATH"
56
+ export ALIBUILD_BUILD_WORK_DIR="${ALIBUILD_BUILD_WORK_DIR:-$WORK_DIR}"
57
+ fi
58
+
59
+ export BUILDROOT="$ALIBUILD_BUILD_WORK_DIR/BUILD/$PKGHASH"
60
+ export SOURCEDIR="$WORK_DIR/SOURCES/$PKGNAME/$PKGVERSION/$COMMIT_HASH"
61
+ export BUILDDIR="$BUILDROOT/$PKGNAME"
62
+
63
+ rm -fr "$WORK_DIR/INSTALLROOT/$PKGHASH"
64
+ # We remove the build directory only if we are not in incremental mode.
65
+ if [[ "$INCREMENTAL_BUILD_HASH" == 0 ]] && ! rm -rf "$BUILDROOT"; then
66
+ # Golang installs stuff without write permissions for ourselves sometimes.
67
+ # This makes the `rm -rf` above fail, so give ourselves write permission.
68
+ chmod -R o+w "$BUILDROOT" || :
69
+ rm -rf "$BUILDROOT"
70
+ fi
71
+ mkdir -p "$INSTALLROOT" "$BUILDROOT" "$BUILDDIR" "$WORK_DIR/INSTALLROOT/$PKGHASH/$PKGPATH"
72
+
73
+ cd "$WORK_DIR/INSTALLROOT/$PKGHASH"
74
+ cat > "$INSTALLROOT/.meta.json" <<\EOF
75
+ %(provenance)s
76
+ EOF
77
+
78
+ # Add "source" command for dependencies to init.sh.
79
+ # Install init.sh now, so that it is available for debugging in case the build fails.
80
+ mkdir -p "$INSTALLROOT/etc/profile.d"
81
+ rm -f "$INSTALLROOT/etc/profile.d/init.sh"
82
+ cat <<\EOF > "$INSTALLROOT/etc/profile.d/init.sh"
83
+ %(initdotsh_deps)s
84
+ EOF
85
+
86
+ # Apply dependency initialisation now, but skip setting the variables below until after the build.
87
+ . "$INSTALLROOT/etc/profile.d/init.sh"
88
+
89
+ # Add support for direnv https://github.com/direnv/direnv/
90
+ #
91
+ # This is beneficial for all the cases where the build step requires some
92
+ # environment to be properly setup in order to work. e.g. to support ninja or
93
+ # protoc.
94
+ cat << EOF > "$BUILDDIR/.envrc"
95
+ # Source the build environment which was used for this package
96
+ WORK_DIR=\${WORK_DIR:-$WORK_DIR} source "\${WORK_DIR:-$WORK_DIR}/${INSTALLROOT#$WORK_DIR/}/etc/profile.d/init.sh"
97
+ source_up
98
+ # On mac we build with the proper installation relative RPATH,
99
+ # so this is not actually used and it's actually harmful since
100
+ # startup time is reduced a lot by the extra overhead from the
101
+ # dynamic loader
102
+ unset DYLD_LIBRARY_PATH
103
+ EOF
104
+
105
+ cd "$BUILDROOT"
106
+ ln -snf $PKGHASH "${BUILDROOT}-latest"
107
+ if [[ $DEVEL_PREFIX ]]; then
108
+ ln -snf $PKGHASH "${BUILDROOT}-latest-$DEVEL_PREFIX"
109
+ fi
110
+
111
+ cd "$BUILDDIR"
112
+
113
+ # Actual build script, as defined in the recipe
114
+
115
+ # This actually does the build, taking in to account shortcuts like
116
+ # having a pre build tarball or having an incremental recipe (in the
117
+ # case of development mode).
118
+ #
119
+ # - If the build was never done and we do not have a cached tarball,
120
+ # build everything as usual.
121
+ # - If the build was started, we do not have a tarball, and we
122
+ # have a non trivial incremental recipe, use it to continue the build.
123
+ # - If the build was started, but we do not have a incremental build recipe,
124
+ # simply rebuild as usual.
125
+ # - In case we have a cached tarball, we skip the build and expand it, change
126
+ # the relocation script so that it takes into account the new location.
127
+ if [[ "$CACHED_TARBALL" == "" && ! -f $BUILDROOT/log ]]; then
128
+ set -o pipefail
129
+ (set -x; unset DYLD_LIBRARY_PATH; source "$WORK_DIR/SPECS/$ARCHITECTURE/$PKGNAME/$PKGVERSION-$PKGREVISION/$PKGNAME.sh" 2>&1) | tee "$BUILDROOT/log"
130
+ elif [[ "$CACHED_TARBALL" == "" && $INCREMENTAL_BUILD_HASH != "0" && -f "$BUILDDIR/.build_succeeded" ]]; then
131
+ set -o pipefail
132
+ (%(incremental_recipe)s) 2>&1 | tee "$BUILDROOT/log"
133
+ elif [[ "$CACHED_TARBALL" == "" ]]; then
134
+ set -o pipefail
135
+ (set -x; unset DYLD_LIBRARY_PATH; source "$WORK_DIR/SPECS/$ARCHITECTURE/$PKGNAME/$PKGVERSION-$PKGREVISION/$PKGNAME.sh" 2>&1) | tee "$BUILDROOT/log"
136
+ else
137
+ # Unpack the cached tarball in the $INSTALLROOT and remove the unrelocated
138
+ # files.
139
+ rm -rf "$BUILDROOT/log"
140
+ mkdir -p $WORK_DIR/TMP/$PKGHASH
141
+ tar -xzf "$CACHED_TARBALL" -C "$WORK_DIR/TMP/$PKGHASH"
142
+ mkdir -p $(dirname $INSTALLROOT)
143
+ rm -rf $INSTALLROOT
144
+ mv $WORK_DIR/TMP/$PKGHASH/$ARCHITECTURE/$PKGNAME/$PKGVERSION-* $INSTALLROOT
145
+ pushd $WORK_DIR/INSTALLROOT/$PKGHASH
146
+ if [ -w "$INSTALLROOT" ]; then
147
+ WORK_DIR=$WORK_DIR/INSTALLROOT/$PKGHASH bash -ex $INSTALLROOT/relocate-me.sh
148
+ fi
149
+ popd
150
+ find $INSTALLROOT -name "*.unrelocated" -delete
151
+ rm -rf $WORK_DIR/TMP/$PKGHASH
152
+ fi
153
+
154
+ # Regenerate init.sh, in case the package build clobbered it. This
155
+ # particularly happens in the AliEn-Runtime package, since it copies other
156
+ # packages into its installroot wholesale.
157
+ # Notice how we only do it if $INSTALLROOT is writeable. If it is
158
+ # not, we assume it points to a CVMFS store which should be left untouched.
159
+ if [ -w $INSTALLROOT ]; then
160
+ mkdir -p "$INSTALLROOT/etc/profile.d"
161
+ rm -f "$INSTALLROOT/etc/profile.d/init.sh"
162
+ cat <<\EOF > "$INSTALLROOT/etc/profile.d/init.sh"
163
+ %(initdotsh_full)s
164
+ EOF
165
+
166
+ cd "$WORK_DIR/INSTALLROOT/$PKGHASH"
167
+ # Replace the .envrc to point to the final installation directory.
168
+ cat << EOF > "$BUILDDIR/.envrc"
169
+ # Source the build environment which was used for this package
170
+ WORK_DIR=\${WORK_DIR:-$WORK_DIR} source ../../../$PKGPATH/etc/profile.d/init.sh
171
+ source_up
172
+ # On mac we build with the proper installation relative RPATH,
173
+ # so this is not actually used and it's actually harmful since
174
+ # startup time is reduced a lot by the extra overhead from the
175
+ # dynamic loader
176
+ unset DYLD_LIBRARY_PATH
177
+ EOF
178
+
179
+ cat > "$INSTALLROOT/.meta.json" <<\EOF
180
+ %(provenance)s
181
+ EOF
182
+
183
+ cd "$WORK_DIR/INSTALLROOT/$PKGHASH/$PKGPATH"
184
+ # Find which files need relocation.
185
+ { grep -I -H -l -R "\(INSTALLROOT/$PKGHASH\|[@][@]PKGREVISION[@]$PKGHASH[@][@]\)" . || true; } | sed -e 's|^\./||' > "$INSTALLROOT/.original-unrelocated"
186
+
187
+ # Relocate script for <arch>/<pkgname>/<pkgver> structure
188
+ cat > "$INSTALLROOT/relocate-me.sh" <<EoF
189
+ #!/bin/bash -e
190
+ if [[ "\$WORK_DIR" == '' ]]; then
191
+ echo 'Please, define \$WORK_DIR'
192
+ exit 1
193
+ fi
194
+ OP=${PKGPATH}
195
+ PP=\${PKGPATH:-${PKGPATH}}
196
+ PH=${PKGHASH}
197
+ EoF
198
+
199
+ while read -r unrelocated; do
200
+ echo "sed -e \"s|/[^ ;:]*INSTALLROOT/\$PH/\$OP|\$WORK_DIR/\$PP|g; s|[@][@]PKGREVISION[@]\$PH[@][@]|$PKGREVISION|g\"" \
201
+ "\$PP/$unrelocated.unrelocated > \$PP/$unrelocated"
202
+ done < "$INSTALLROOT/.original-unrelocated" >> "$INSTALLROOT/relocate-me.sh"
203
+
204
+ # Always relocate the modulefile (if present) so that it works also in devel mode.
205
+ if [[ ! -s "$INSTALLROOT/.original-unrelocated" && -f "$INSTALLROOT/etc/modulefiles/$PKGNAME" ]]; then
206
+ echo "mv -f \$PP/etc/modulefiles/$PKGNAME \$PP/etc/modulefiles/${PKGNAME}.forced-relocation && sed -e \"s|[@][@]PKGREVISION[@]\$PH[@][@]|$PKGREVISION|g\" \$PP/etc/modulefiles/${PKGNAME}.forced-relocation > \$PP/etc/modulefiles/$PKGNAME" >> "$INSTALLROOT/relocate-me.sh"
207
+ fi
208
+
209
+ # Find libraries and executables needing relocation on macOS
210
+ if [[ ${ARCHITECTURE:0:3} == "osx" ]]; then
211
+ otool_arch=$(echo "${ARCHITECTURE#osx_}" | tr - _) # otool knows x86_64, not x86-64
212
+
213
+ /usr/bin/find ${RELOCATE_PATHS:-bin lib lib64} -type d \( -name '*.dist-info' -o -path '*/pytz/zoneinfo' \) -prune -false -o -type f \
214
+ -not -name '*.py' -not -name '*.pyc' -not -name '*.pyi' -not -name '*.pxd' -not -name '*.inc' -not -name '*.js' -not -name '*.json' \
215
+ -not -name '*.xml' -not -name '*.xsl' -not -name '*.txt' -not -name '*.dat' -not -name '*.mat' -not -name '*.sav' -not -name '*.csv' \
216
+ -not -name '*.wav' -not -name '*.png' -not -name '*.svg' -not -name '*.css' -not -name '*.html' -not -name '*.woff' -not -name '*.woff2' -not -name '*.ttf' \
217
+ -not -name LICENSE -not -name COPYING -not -name '*.c' -not -name '*.cc' -not -name '*.cxx' -not -name '*.cpp' -not -name '*.h' -not -name '*.hpp' |
218
+ while read -r BIN; do
219
+ MACHOTYPE=$(set +o pipefail; otool -arch "$otool_arch" -h "$PWD/$BIN" 2> /dev/null | grep filetype -A1 | awk 'END{print $5}')
220
+
221
+ # See mach-o/loader.h from XNU sources: 2 == executable, 6 == dylib, 8 == bundle
222
+ if [[ $MACHOTYPE == 6 || $MACHOTYPE == 8 ]]; then
223
+ # Only dylibs: relocate LC_ID_DYLIB
224
+ if otool -arch "$otool_arch" -D "$PWD/$BIN" 2> /dev/null | tail -n1 | grep -q "$PKGHASH"; then
225
+ cat <<EOF >> "$INSTALLROOT/relocate-me.sh"
226
+ install_name_tool -id "\$(otool -arch $otool_arch -D "\$PP/$BIN" | tail -n1 | sed -e "s|/[^ ]*INSTALLROOT/\$PH/\$OP|\$WORK_DIR/\$PP|g")" "\$PP/$BIN"
227
+ EOF
228
+ elif otool -arch "$otool_arch" -D "$PWD/$BIN" 2> /dev/null | tail -n1 | grep -vq /; then
229
+ cat <<EOF >> "$INSTALLROOT/relocate-me.sh"
230
+ install_name_tool -id "\$WORK_DIR/\$PP/$BIN" "\$PP/$BIN"
231
+ EOF
232
+ fi
233
+ fi
234
+
235
+ if [[ $MACHOTYPE == 2 || $MACHOTYPE == 6 || $MACHOTYPE == 8 ]]; then
236
+ # Both libs and binaries: relocate LC_RPATH
237
+ if otool -arch "$otool_arch" -l "$PWD/$BIN" 2> /dev/null | grep -A2 LC_RPATH | grep path | grep -q "$PKGHASH"; then
238
+ cat <<EOF >> "$INSTALLROOT/relocate-me.sh"
239
+ OLD_RPATHS=\$(otool -arch $otool_arch -l "\$PP/$BIN" | grep -A2 LC_RPATH | grep path | grep "\$PH" | sed -e 's|^.*path ||' -e 's| .*$||' | sort -u)
240
+ for OLD_RPATH in \$OLD_RPATHS; do
241
+ NEW_RPATH=\${OLD_RPATH/#*INSTALLROOT\/\$PH\/\$OP/\$WORK_DIR/\$PP}
242
+ install_name_tool -rpath "\$OLD_RPATH" "\$NEW_RPATH" "\$PP/$BIN"
243
+ done
244
+ EOF
245
+ fi
246
+
247
+ # Both libs and binaries: relocate LC_LOAD_DYLIB
248
+ if otool -arch "$otool_arch" -l "$PWD/$BIN" 2> /dev/null | grep -A2 LC_LOAD_DYLIB | grep name | grep -q $PKGHASH; then
249
+ cat <<EOF >> "$INSTALLROOT/relocate-me.sh"
250
+ OLD_LOAD_DYLIBS=\$(otool -arch $otool_arch -l "\$PP/$BIN" | grep -A2 LC_LOAD_DYLIB | grep name | grep "\$PH" | sed -e 's|^.*name ||' -e 's| .*$||' | sort -u)
251
+ for OLD_LOAD_DYLIB in \$OLD_LOAD_DYLIBS; do
252
+ NEW_LOAD_DYLIB=\${OLD_LOAD_DYLIB/#*INSTALLROOT\/\$PH\/\$OP/\$WORK_DIR/\$PP}
253
+ install_name_tool -change "\$OLD_LOAD_DYLIB" "\$NEW_LOAD_DYLIB" "\$PP/$BIN"
254
+ done
255
+ EOF
256
+ fi
257
+ fi
258
+ done || true
259
+ fi
260
+
261
+ cat "$INSTALLROOT/relocate-me.sh"
262
+ cat "$INSTALLROOT/.original-unrelocated" | xargs -n1 -I{} cp '{}' '{}'.unrelocated
263
+ fi
264
+ cd "$WORK_DIR/INSTALLROOT/$PKGHASH"
265
+
266
+ # Archive creation
267
+ HASHPREFIX=`echo $PKGHASH | cut -b1,2`
268
+ HASH_PATH=$ARCHITECTURE/store/$HASHPREFIX/$PKGHASH
269
+ mkdir -p "${WORK_DIR}/TARS/$HASH_PATH" \
270
+ "${WORK_DIR}/TARS/$ARCHITECTURE/$PKGNAME"
271
+
272
+ PACKAGE_WITH_REV=$PKGNAME-$PKGVERSION-$PKGREVISION.$ARCHITECTURE.tar.gz
273
+ # Copy and tar/compress (if applicable) in parallel.
274
+ # Use -H to match tar's behaviour of preserving hardlinks.
275
+ rsync -aH "$WORK_DIR/INSTALLROOT/$PKGHASH/" "$WORK_DIR" & rsync_pid=$!
276
+ if [ "$CAN_DELETE" = 1 ]; then
277
+ # We're deleting the tarball anyway, so no point in creating a new one.
278
+ # There might be an old existing tarball, and we should delete it.
279
+ rm -f "$WORK_DIR/TARS/$HASH_PATH/$PACKAGE_WITH_REV"
280
+ elif [ -z "$CACHED_TARBALL" ]; then
281
+ # Use pigz to compress, if we can, because it's multicore.
282
+ gzip=$(command -v pigz) || gzip=$(command -v gzip)
283
+ # We don't have an existing tarball, and we want to keep the one we create now.
284
+ tar -cC "$WORK_DIR/INSTALLROOT/$PKGHASH" . |
285
+ # Avoid having broken left overs if the tar fails.
286
+ $gzip -c > "$WORK_DIR/TARS/$HASH_PATH/$PACKAGE_WITH_REV.processing"
287
+ mv "$WORK_DIR/TARS/$HASH_PATH/$PACKAGE_WITH_REV.processing" \
288
+ "$WORK_DIR/TARS/$HASH_PATH/$PACKAGE_WITH_REV"
289
+ ln -nfs "../../$HASH_PATH/$PACKAGE_WITH_REV" \
290
+ "$WORK_DIR/TARS/$ARCHITECTURE/$PKGNAME/$PACKAGE_WITH_REV"
291
+ fi
292
+ wait "$rsync_pid"
293
+
294
+ # We've copied files into their final place; now relocate.
295
+ cd "$WORK_DIR"
296
+ if [ -w "$WORK_DIR/$ARCHITECTURE/$PKGNAME/$PKGVERSION-$PKGREVISION" ]; then
297
+ bash -ex "$ARCHITECTURE/$PKGNAME/$PKGVERSION-$PKGREVISION/relocate-me.sh"
298
+ fi
299
+ # Last package built gets a "latest" mark.
300
+ ln -snf $PKGVERSION-$PKGREVISION $ARCHITECTURE/$PKGNAME/latest
301
+
302
+ # Latest package built for a given devel prefix gets latest-$BUILD_FAMILY
303
+ if [[ $BUILD_FAMILY ]]; then
304
+ ln -snf $PKGVERSION-$PKGREVISION $ARCHITECTURE/$PKGNAME/latest-$BUILD_FAMILY
305
+ fi
306
+
307
+ # When the package is definitely fully installed, install the file that marks
308
+ # the package as successful.
309
+ if [ -w "$WORK_DIR/$PKGPATH" ]; then
310
+ echo "$PKGHASH" > "$WORK_DIR/$PKGPATH/.build-hash"
311
+ fi
312
+ # Mark the build as successful with a placeholder. Allows running incremental
313
+ # recipe in case the package is in development mode.
314
+ echo "${DEVEL_HASH}${DEPS_HASH}" > "$BUILDDIR/.build_succeeded"
@@ -0,0 +1,83 @@
1
+ # Import as function if they do not have any side effects
2
+ from os.path import dirname, basename
3
+
4
+ # Import as modules if I need to mock them later
5
+ import os.path as path
6
+ import os
7
+ import glob
8
+ import sys
9
+ import shutil
10
+ from alibuild_helpers import log
11
+
12
+
13
+ def decideClean(workDir, architecture, aggressiveCleanup):
14
+ """Decide what to delete, without actually doing it.
15
+
16
+ To clean up obsolete build directories:
17
+ - Find all the symlinks in "BUILD"
18
+ - Find all the directories in "BUILD"
19
+ - Schedule a directory for deletion if it does not have a symlink
20
+
21
+ Installed packages are deleted from the final installation directory
22
+ according to the above scheme as well.
23
+
24
+ The temporary directory and temporary install roots are always cleaned up.
25
+
26
+ In aggressive mode, the following are also cleaned up:
27
+
28
+ - Tarballs (but not their symlinks), since these are expected to either be
29
+ unpacked in the installation directory, available from the remote store
30
+ for download, or not needed any more if their installation directory is
31
+ gone.
32
+ - Git checkouts for specific tags, since we expect to be able to rebuild
33
+ those easily from the mirror directory.
34
+
35
+ In the case of installed packages and tarballs, only those for the given
36
+ architecture are considered for deletion.
37
+ """
38
+ symlinksBuild = [os.readlink(x) for x in glob.glob("%s/BUILD/*-latest*" % workDir)]
39
+ # $WORK_DIR/TMP should always be cleaned up. This does not happen only
40
+ # in the case we run out of space while unpacking. INSTALLROOT is similar,
41
+ # though it is not cleaned up automatically in case of build errors.
42
+ # $WORK_DIR/<architecture>/store can be cleaned up as well, because
43
+ # we do not need the actual tarballs after they have been built.
44
+ toDelete = ["%s/TMP" % workDir, "%s/INSTALLROOT" % workDir]
45
+ if aggressiveCleanup:
46
+ toDelete += ["%s/TARS/%s/store" % (workDir, architecture),
47
+ "%s/SOURCES" % (workDir)]
48
+ allBuildStuff = glob.glob("%s/BUILD/*" % workDir)
49
+ toDelete += [x for x in allBuildStuff
50
+ if not path.islink(x) and basename(x) not in symlinksBuild]
51
+ installGlob ="%s/%s/*/" % (workDir, architecture)
52
+ installedPackages = set([dirname(x) for x in glob.glob(installGlob)])
53
+ symlinksInstall = []
54
+ for x in installedPackages:
55
+ symlinksInstall += [path.realpath(y) for y in glob.glob(x + "/latest*")]
56
+ toDelete += [x for x in glob.glob(installGlob+ "*")
57
+ if not path.islink(x) and path.realpath(x) not in symlinksInstall]
58
+ toDelete = [x for x in toDelete if path.exists(x)]
59
+ return toDelete
60
+
61
+
62
+ def doClean(workDir, architecture, aggressiveCleanup, dryRun):
63
+ """ CLI API to cleanup build area """
64
+ toDelete = decideClean(workDir, architecture, aggressiveCleanup)
65
+ if not toDelete:
66
+ log.info("Nothing to delete.")
67
+ sys.exit(0)
68
+
69
+ log.banner("This %s delete the following directories:\n%s",
70
+ "would" if dryRun else "will", "\n".join(toDelete))
71
+ if dryRun:
72
+ log.info("--dry-run / -n specified. Doing nothing.")
73
+ sys.exit(0)
74
+
75
+ have_error = False
76
+ for directory in toDelete:
77
+ try:
78
+ shutil.rmtree(directory)
79
+ except OSError as exc:
80
+ have_error = True
81
+ log.error("Unable to delete %s:", directory, exc_info=exc)
82
+
83
+ sys.exit(1 if have_error else 0)
@@ -0,0 +1,154 @@
1
+ import os
2
+ import os.path
3
+ import time
4
+ from subprocess import Popen, PIPE, STDOUT
5
+ from textwrap import dedent
6
+ from subprocess import TimeoutExpired
7
+ from shlex import quote
8
+
9
+ from alibuild_helpers.log import debug, warning, dieOnError
10
+
11
+ def decode_with_fallback(data):
12
+ """Try to decode DATA as utf-8; if that doesn't work, fall back to latin-1.
13
+
14
+ This combination should cover every possible byte string, as latin-1 covers
15
+ every possible single byte.
16
+ """
17
+ if isinstance(data, bytes):
18
+ try:
19
+ return data.decode("utf-8")
20
+ except UnicodeDecodeError:
21
+ return data.decode("latin-1")
22
+ else:
23
+ return str(data)
24
+
25
+
26
+ def getoutput(command, timeout=None):
27
+ """Run command, check it succeeded, and return its stdout as a string."""
28
+ proc = Popen(command, shell=isinstance(command, str), stdout=PIPE, stderr=PIPE)
29
+ try:
30
+ stdout, stderr = proc.communicate(timeout=timeout)
31
+ except TimeoutExpired:
32
+ warning("Process %r timed out; terminated", command)
33
+ proc.terminate()
34
+ stdout, stderr = proc.communicate()
35
+ dieOnError(proc.returncode, "Command %s failed with code %d: %s" %
36
+ (command, proc.returncode, decode_with_fallback(stderr)))
37
+ return decode_with_fallback(stdout)
38
+
39
+
40
+ def getstatusoutput(command, timeout=None, cwd=None):
41
+ """Run command and return its return code and output (stdout and stderr)."""
42
+ proc = Popen(command, shell=isinstance(command, str), stdout=PIPE, stderr=STDOUT, cwd=cwd)
43
+ try:
44
+ merged_output, _ = proc.communicate(timeout=timeout)
45
+ except TimeoutExpired:
46
+ warning("Process %r timed out; terminated", command)
47
+ proc.terminate()
48
+ merged_output, _ = proc.communicate()
49
+ merged_output = decode_with_fallback(merged_output)
50
+ # Strip a single trailing newline, if one exists, to match the behaviour of
51
+ # subprocess.getstatusoutput.
52
+ if merged_output.endswith("\n"):
53
+ merged_output = merged_output[:-1]
54
+ return proc.returncode, merged_output
55
+
56
+
57
+ def execute(command, printer=debug, timeout=None):
58
+ popen = Popen(command, shell=isinstance(command, str), stdout=PIPE, stderr=STDOUT)
59
+ start_time = time.time()
60
+ for line in iter(popen.stdout.readline, b""):
61
+ printer("%s", decode_with_fallback(line).strip("\n"))
62
+ if timeout is not None and time.time() > start_time + timeout:
63
+ popen.terminate()
64
+ break
65
+ out = decode_with_fallback(popen.communicate()[0]).strip("\n")
66
+ if out:
67
+ printer("%s", out)
68
+ return popen.returncode
69
+
70
+
71
+ BASH = "bash" if getstatusoutput("/bin/bash --version")[0] else "/bin/bash"
72
+
73
+
74
+ class DockerRunner:
75
+ """A context manager for running commands inside a Docker container.
76
+
77
+ If the Docker image given is None or empty, the commands are run on the host
78
+ instead.
79
+ """
80
+
81
+ def __init__(self, docker_image, docker_run_args=(), extra_env={}, extra_volumes=[]) -> None:
82
+ self._docker_image = docker_image
83
+ self._docker_run_args = docker_run_args
84
+ self._container = None
85
+ self._extra_env = extra_env
86
+ self._extra_volumes = extra_volumes
87
+
88
+ def __enter__(self):
89
+ if self._docker_image:
90
+ # "sleep inf" pauses forever, until we kill it.
91
+ envOpts = []
92
+ volumes = []
93
+ for env in self._extra_env.items():
94
+ envOpts.append("-e")
95
+ envOpts.append(f"{env[0]}={env[1]}")
96
+ for v in self._extra_volumes:
97
+ volumes.append("-v")
98
+ volumes.append(v)
99
+ cmd = ["docker", "run", "--detach"] + envOpts + volumes + ["--rm", "--entrypoint="]
100
+ cmd += self._docker_run_args
101
+ cmd += [self._docker_image, "sleep", "inf"]
102
+ self._container = getoutput(cmd).strip()
103
+
104
+ def getstatusoutput_docker(cmd, cwd=None):
105
+ if self._container is None:
106
+ command_prefix=""
107
+ if self._extra_env:
108
+ command_prefix="env " + " ".join("{}={}".format(k, v) for (k,v) in self._extra_env.items()) + " "
109
+ return getstatusoutput("{}{} -c {}".format(command_prefix, BASH, quote(cmd))
110
+ , cwd=cwd)
111
+ envOpts = []
112
+ for env in self._extra_env.items():
113
+ envOpts.append("-e")
114
+ envOpts.append("{}={}".format(env[0], env[1]))
115
+ exec_cmd = ["docker", "container", "exec"] + envOpts + [self._container, "bash", "-c", cmd]
116
+ return getstatusoutput(exec_cmd, cwd=cwd)
117
+
118
+ return getstatusoutput_docker
119
+
120
+ def __exit__(self, exc_type, exc_value, traceback):
121
+ if self._container is not None:
122
+ # 'docker container stop' sends SIGTERM, which doesn't work on 'sleep'
123
+ # for some reason. Kill it directly instead, so we don't have to wait.
124
+ getstatusoutput("docker container kill " + quote(self._container))
125
+ self._container = None
126
+ return False # propagate any exception that may have occurred
127
+
128
+
129
+ def install_wrapper_script(name, work_dir):
130
+ script_dir = os.path.join(work_dir, "wrapper-scripts")
131
+ try:
132
+ os.makedirs(script_dir)
133
+ except OSError as exc:
134
+ # Errno 17 means the directory already exists.
135
+ if exc.errno != 17:
136
+ raise
137
+ # Create a wrapper script that cleans up the environment, so we don't see the
138
+ # OpenSSL built by aliBuild.
139
+ with open(os.path.join(script_dir, name), "w") as scriptf:
140
+ # Compute the "real" executable path each time, as the wrapper script might
141
+ # be called on the host or in a container.
142
+ scriptf.write(dedent("""\
143
+ #!/bin/sh
144
+ exec env -u LD_LIBRARY_PATH -u DYLD_LIBRARY_PATH \\
145
+ "$(which -a "$(basename "$0")" | grep -Fxv "$0" | head -1)" "$@"
146
+ """))
147
+ os.fchmod(scriptf.fileno(), 0o755) # make the wrapper script executable
148
+ # If $PATH is empty, this is bad, because we need to fall back to the "real"
149
+ # executable that our script is wrapping.
150
+ dieOnError(not os.environ.get("PATH"),
151
+ "$PATH is unset or empty. Cannot find any executables. Try "
152
+ "rerunning this command inside a login shell (e.g. `bash -l`). "
153
+ "If that doesn't work, run `export PATH` manually.")
154
+ os.environ["PATH"] = script_dir + ":" + os.environ["PATH"]
@@ -0,0 +1,116 @@
1
+ #!/usr/bin/env python3
2
+
3
+ from alibuild_helpers.log import debug, error, info, dieOnError
4
+ from alibuild_helpers.utilities import parseDefaults, readDefaults, getPackageList, validateDefaults
5
+ from alibuild_helpers.cmd import DockerRunner, execute
6
+ from tempfile import NamedTemporaryFile
7
+ from os import remove, path
8
+
9
+ def doDeps(args, parser):
10
+
11
+ # Check if we have an output file
12
+ if not args.outgraph:
13
+ parser.error("Specify a PDF output file with --outgraph")
14
+
15
+ # Resolve all the package parsing boilerplate
16
+ specs = {}
17
+ defaultsReader = lambda: readDefaults(args.configDir, args.defaults, parser.error, args.architecture)
18
+ (err, overrides, taps) = parseDefaults(args.disable, defaultsReader, debug)
19
+ with DockerRunner(args.dockerImage, args.docker_extra_args, extra_env={"ALIBUILD_CONFIG_DIR": "/alidist" if args.docker else path.abspath(args.configDir)}, extra_volumes=[f"{path.abspath(args.configDir)}:/alidist:ro"] if args.docker else []) as getstatusoutput_docker:
20
+ def performCheck(pkg, cmd):
21
+ return getstatusoutput_docker(cmd)
22
+
23
+ systemPackages, ownPackages, failed, validDefaults = \
24
+ getPackageList(packages = [args.package],
25
+ specs = specs,
26
+ configDir = args.configDir,
27
+ preferSystem = args.preferSystem,
28
+ noSystem = args.noSystem,
29
+ architecture = args.architecture,
30
+ disable = args.disable,
31
+ defaults = args.defaults,
32
+ performPreferCheck = performCheck,
33
+ performRequirementCheck = performCheck,
34
+ performValidateDefaults = lambda spec: validateDefaults(spec, args.defaults),
35
+ overrides = overrides,
36
+ taps = taps,
37
+ log = debug)
38
+
39
+ dieOnError(validDefaults and args.defaults not in validDefaults,
40
+ "Specified default `%s' is not compatible with the packages you want to build.\n" % args.defaults +
41
+ "Valid defaults:\n\n- " +
42
+ "\n- ".join(sorted(validDefaults)))
43
+
44
+ for s in specs.values():
45
+ # Remove disabled packages
46
+ s["requires"] = [r for r in s["requires"] if r not in args.disable and r != "defaults-release"]
47
+ s["build_requires"] = [r for r in s["build_requires"] if r not in args.disable and r != "defaults-release"]
48
+ s["runtime_requires"] = [r for r in s["runtime_requires"] if r not in args.disable and r != "defaults-release"]
49
+
50
+ # Determine which packages are only build/runtime dependencies
51
+ all_build = set()
52
+ all_runtime = set()
53
+ for k,spec in specs.items():
54
+ all_build.update(spec["build_requires"])
55
+ all_runtime.update(spec["runtime_requires"])
56
+ all_both = all_build.intersection(all_runtime)
57
+
58
+ dot = "digraph {\n"
59
+ dot += "ratio=\"0.52\"\n"
60
+ dot += 'graph [nodesep=0.25, ranksep=0.2];\n'
61
+ dot += 'node [width=1.5, height=1, fonsize=46, margin=0.1];\n'
62
+ dot += 'edge [penwidth=2];\n'
63
+
64
+ for k,spec in specs.items():
65
+ if k == "defaults-release":
66
+ continue
67
+
68
+ # Determine node color based on its dependency status
69
+ color = None
70
+ if k in all_both:
71
+ color = "tomato1"
72
+ elif k in all_runtime:
73
+ color = "greenyellow"
74
+ elif k in all_build:
75
+ color = "plum"
76
+ elif k == args.package:
77
+ color = "gold"
78
+ else:
79
+ assert color, "This should not happen (happened for %s)" % k
80
+
81
+ # Node definition
82
+ dot += '"%s" [shape=box, style="rounded,filled", fontname="helvetica", fillcolor=%s]\n' % (k,color)
83
+
84
+ # Connections (different whether it's a build dependency or a runtime one)
85
+ for dep in spec["build_requires"]:
86
+ dot += '"%s" -> "%s" [color=grey70]\n' % (k, dep)
87
+ for dep in spec["runtime_requires"]:
88
+ dot += '"%s" -> "%s" [color=dodgerblue3]\n' % (k, dep)
89
+
90
+ dot += "}\n"
91
+
92
+ if args.outdot:
93
+ fp = open(args.outdot, "wt")
94
+ else:
95
+ fp = NamedTemporaryFile(delete=False, mode="wt")
96
+ fp.write(dot)
97
+ fp.close()
98
+
99
+ # Check if we have dot in PATH
100
+ try:
101
+ execute(["dot", "-V"])
102
+ except Exception:
103
+ dieOnError(True, "Could not find dot in PATH. Please install graphviz and add it to PATH.")
104
+ try:
105
+ if args.neat:
106
+ execute("tred {dotFile} > {dotFile}.0 && mv {dotFile}.0 {dotFile}".format(dotFile=fp.name))
107
+ execute(["dot", fp.name, "-Tpdf", "-o", args.outgraph])
108
+ except Exception as e:
109
+ error("Error generating dependencies with dot: %s: %s", type(e).__name__, e)
110
+ else:
111
+ info("Dependencies graph generated: %s" % args.outgraph)
112
+ if fp.name != args.outdot:
113
+ remove(fp.name)
114
+ else:
115
+ info("Intermediate dot file for Graphviz saved: %s" % args.outdot)
116
+ return True