hashicorp-checkpoint 0.1.5 → 0.1.6

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -0,0 +1,3402 @@
1
+ #!/usr/bin/env bash
2
+ # Copyright (c) HashiCorp, Inc.
3
+ # SPDX-License-Identifier: MPL-2.0
4
+ #
5
+ # shellcheck disable=SC2119
6
+ # shellcheck disable=SC2164
7
+
8
+ # If the bash version isn't at least 4, bail
9
+ if [ "${BASH_VERSINFO:-0}" -lt "4" ]; then
10
+ printf "ERROR: Expected bash version >= 4 (is: %d)" "${BASH_VERSINFO:-0}"
11
+ exit 1
12
+ fi
13
+
14
+ # Lets have some emojis
15
+ WARNING_ICON="⚠️"
16
+ ERROR_ICON="🛑"
17
+
18
+ # Coloring
19
+ # shellcheck disable=SC2034
20
+ TEXT_BOLD='\e[1m'
21
+ TEXT_RED='\e[31m'
22
+ # shellcheck disable=SC2034
23
+ TEXT_GREEN='\e[32m'
24
+ TEXT_YELLOW='\e[33m'
25
+ TEXT_CYAN='\e[36m'
26
+ TEXT_CLEAR='\e[0m'
27
+
28
+ # Common variables
29
+ export full_sha="${GITHUB_SHA}"
30
+ export short_sha="${full_sha:0:8}"
31
+ export ident_ref="${GITHUB_REF#*/*/}"
32
+ export repository="${GITHUB_REPOSITORY}"
33
+ export repo_owner="${repository%/*}"
34
+ export repo_name="${repository#*/}"
35
+ # shellcheck disable=SC2153
36
+ export asset_cache="${ASSETS_PRIVATE_SHORTTERM}/${repository}/${GITHUB_ACTION}"
37
+ export run_number="${GITHUB_RUN_NUMBER}"
38
+ export run_id="${GITHUB_RUN_ID}"
39
+ export job_id="${run_id}-${run_number}"
40
+ readonly hc_releases_metadata_filename="release-meta.json"
41
+ # This value is used in our cleanup trap to restore the value in cases
42
+ # where a function call may have failed and did not restore it
43
+ readonly _repository_backup="${repository}"
44
+
45
+ if [ -z "${ci_bin_dir}" ]; then
46
+ if ci_bin_dir="$(realpath ./.ci-bin)"; then
47
+ export ci_bin_dir
48
+ else
49
+ echo "ERROR: Failed to create the local CI bin directory"
50
+ exit 1
51
+ fi
52
+ fi
53
+
54
+ # We are always noninteractive
55
+ export DEBIAN_FRONTEND=noninteractive
56
+
57
+ # If we are on a runner and debug mode is enabled,
58
+ # enable debug mode for ourselves too
59
+ if [ -n "${RUNNER_DEBUG}" ]; then
60
+ DEBUG=1
61
+ fi
62
+
63
+ # If DEBUG is enabled and we are running tests,
64
+ # flag it so we can adjust where output is sent.
65
+ if [ -n "${DEBUG}" ] && [ -n "${BATS_TEST_FILENAME}" ]; then
66
+ DEBUG_WITH_BATS=1
67
+ fi
68
+
69
+ # Write debug output to stderr. Message template
70
+ # and arguments are passed to `printf` for formatting.
71
+ #
72
+ # $1: message template
73
+ # $#: message arguments
74
+ #
75
+ # NOTE: Debug output is only displayed when DEBUG is set
76
+ function debug() {
77
+ if [ -n "${DEBUG}" ]; then
78
+ local msg_template="${1}"
79
+ local i=$(( ${#} - 1 ))
80
+ local msg_args=("${@:2:$i}")
81
+ # Update template to include caller information
82
+ msg_template=$(printf "<%s(%s:%d)> %s" "${FUNCNAME[1]}" "${BASH_SOURCE[1]}" "${BASH_LINENO[0]}" "${msg_template}")
83
+ #shellcheck disable=SC2059
84
+ msg="$(printf "${msg_template}" "${msg_args[@]}")"
85
+
86
+ if [ -n "${DEBUG_WITH_BATS}" ]; then
87
+ printf "%b%s%b\n" "${TEXT_CYAN}" "${msg}" "${TEXT_CLEAR}" >&3
88
+ else
89
+ printf "%b%s%b\n" "${TEXT_CYAN}" "${msg}" "${TEXT_CLEAR}" >&2
90
+ fi
91
+ fi
92
+ }
93
+
94
+ # Wrap the pushd command so we fail
95
+ # if the pushd command fails. Arguments
96
+ # are just passed through.
97
+ function pushd() {
98
+ debug "executing 'pushd %s'" "${*}"
99
+ command builtin pushd "${@}" > /dev/null || exit 1
100
+ }
101
+
102
+ # Wrap the popd command so we fail
103
+ # if the popd command fails. Arguments
104
+ # are just passed through.
105
+ # shellcheck disable=SC2120
106
+ function popd() {
107
+ debug "executing 'popd %s'" "${*}"
108
+ command builtin popd "${@}" || exit 1
109
+ }
110
+
111
+ # Wraps the aws CLI command to support
112
+ # role based access. It will check for
113
+ # expected environment variables when
114
+ # a role has been assumed. If they are
115
+ # not found, it will assume the configured
116
+ # role. If the role has already been
117
+ # assumed, it will check that the credentials
118
+ # have not timed out, and re-assume the
119
+ # role if so. If no role information is
120
+ # provided, it will just pass the command
121
+ # through directly
122
+ #
123
+ # NOTE: Required environment variable: AWS_ASSUME_ROLE_ARN
124
+ # NOTE: This was a wrapper for the AWS command that would properly
125
+ # handle the assume role process and and automatically refresh
126
+ # if close to expiry. With credentials being handled by the doormat
127
+ # action now, this is no longer needed but remains in case it's
128
+ # needed for some reason in the future.
129
+ function aws_deprecated() {
130
+ # Grab the actual aws cli path
131
+ if ! aws_path="$(which aws)"; then
132
+ (>&2 echo "AWS error: failed to locate aws cli executable")
133
+ return 1
134
+ fi
135
+ # First, check if the role ARN environment variable is
136
+ # configured. If it is not, just pass through.
137
+ if [ "${AWS_ASSUME_ROLE_ARN}" = "" ]; then
138
+ "${aws_path}" "${@}"
139
+ return $?
140
+ fi
141
+ # Check if a role has already been assumed. If it
142
+ # has, validate the credentials have not timed out
143
+ # and pass through.
144
+ if [ "${AWS_SESSION_TOKEN}" != "" ]; then
145
+ # Cut off part of the expiration so we don't end up hitting
146
+ # the expiration just as we make our call
147
+ expires_at=$(date -d "${AWS_SESSION_EXPIRATION} - 20 sec" "+%s")
148
+ if (( "${expires_at}" > $(date +%s) )); then
149
+ "${aws_path}" "${@}"
150
+ return $?
151
+ fi
152
+ # If we are here then the credentials were not
153
+ # valid so clear the session token and restore
154
+ # original credentials
155
+ unset AWS_SESSION_TOKEN
156
+ unset AWS_SESSION_EXPIRATION
157
+ export AWS_ACCESS_KEY_ID="${CORE_AWS_ACCESS_KEY_ID}"
158
+ export AWS_SECRET_ACCESS_KEY="${CORE_AWS_SECRET_ACCESS_KEY}"
159
+ fi
160
+ # Now lets assume the role
161
+ if aws_output="$("${aws_path}" sts assume-role --role-arn "${AWS_ASSUME_ROLE_ARN}" --role-session-name "VagrantCI@${repo_name}-${job_id}")"; then
162
+ export CORE_AWS_ACCESS_KEY_ID="${AWS_ACCESS_KEY_ID}"
163
+ export CORE_AWS_SECRET_ACCESS_KEY="${AWS_SECRET_ACCESS_KEY}"
164
+ id="$(printf '%s' "${aws_output}" | jq -r .Credentials.AccessKeyId)" || failed=1
165
+ key="$(printf '%s' "${aws_output}" | jq -r .Credentials.SecretAccessKey)" || failed=1
166
+ token="$(printf '%s' "${aws_output}" | jq -r .Credentials.SessionToken)" || failed=1
167
+ expire="$(printf '%s' "${aws_output}" | jq -r .Credentials.Expiration)" || failed=1
168
+ if [ "${failed}" = "1" ]; then
169
+ (>&2 echo "Failed to extract assume role credentials")
170
+ return 1
171
+ fi
172
+ unset aws_output
173
+ export AWS_ACCESS_KEY_ID="${id}"
174
+ export AWS_SECRET_ACCESS_KEY="${key}"
175
+ export AWS_SESSION_TOKEN="${token}"
176
+ export AWS_SESSION_EXPIRATION="${expire}"
177
+ else
178
+ (>&2 echo "AWS assume role error: ${aws_output}")
179
+ return 1
180
+ fi
181
+ # And we can execute!
182
+ "${aws_path}" "${@}"
183
+ }
184
+
185
+ # Path to file used for output redirect
186
+ # and extracting messages for warning and
187
+ # failure information sent to slack
188
+ function output_file() {
189
+ if [ "${1}" = "clean" ] && [ -f "${ci_output_file_path}" ]; then
190
+ rm -f "${ci_output_file_path}"
191
+ unset ci_output_file_path
192
+ fi
193
+ if [ -z "${ci_output_file_path}" ] || [ ! -f "${ci_output_file_path}" ]; then
194
+ ci_output_file_path="$(mktemp)"
195
+ fi
196
+
197
+ printf "%s" "${ci_output_file_path}"
198
+ }
199
+
200
+ # Write failure message, send error to configured
201
+ # slack, and exit with non-zero status. If an
202
+ # "$(output_file)" file exists, the last 5 lines will be
203
+ # included in the slack message.
204
+ #
205
+ # $1: Failure message
206
+ function failure() {
207
+ local msg_template="${1}"
208
+ local i=$(( ${#} - 1 ))
209
+ local msg_args=("${@:2:$i}")
210
+
211
+ # Update template to include caller information if in DEBUG mode
212
+ if [ -n "${DEBUG}" ]; then
213
+ msg_template=$(printf "<%s(%s:%d)> %s" "${FUNCNAME[1]}" "${BASH_SOURCE[1]}" "${BASH_LINENO[0]}" "${msg_template}")
214
+ fi
215
+ #shellcheck disable=SC2059
216
+ msg="$(printf "${msg_template}" "${msg_args[@]}")"
217
+
218
+ if [ -n "${DEBUG_WITH_BATS}" ]; then
219
+ printf "%s %b%s%b\n" "${ERROR_ICON}" "${TEXT_RED}" "${msg}" "${TEXT_CLEAR}" >&3
220
+ else
221
+ printf "%s %b%s%b\n" "${ERROR_ICON}" "${TEXT_RED}" "${msg}" "${TEXT_CLEAR}" >&2
222
+ fi
223
+
224
+ if [ -n "${SLACK_WEBHOOK}" ]; then
225
+ if [ -f "$(output_file)" ]; then
226
+ slack -s error -m "ERROR: ${msg}" -f "$(output_file)" -T 5
227
+ else
228
+ slack -s error -m "ERROR: ${msg}"
229
+ fi
230
+ fi
231
+ exit 1
232
+ }
233
+
234
+ # Write warning message, send warning to configured
235
+ # slack
236
+ #
237
+ # $1: Warning message
238
+ function warn() {
239
+ local msg_template="${1}"
240
+ local i=$(( ${#} - 1 ))
241
+ local msg_args=("${@:2:$i}")
242
+
243
+ #shellcheck disable=SC2059
244
+ msg="$(printf "${msg_template}" "${msg_args[@]}")"
245
+
246
+ printf "%s %b%s%b\n" "${WARNING_ICON}" "${TEXT_YELLOW}" "${msg}" "${TEXT_CLEAR}" >&2
247
+
248
+ if [ -n "${SLACK_WEBHOOK}" ]; then
249
+ if [ -f "$(output_file)" ]; then
250
+ slack -s warn -m "WARNING: ${msg}" -f "$(output_file)"
251
+ else
252
+ slack -s warn -m "WARNING: ${msg}"
253
+ fi
254
+ fi
255
+ }
256
+
257
+ # Write an informational message
258
+ function info() {
259
+ local msg_template="${1}\n"
260
+ local i=$(( ${#} - 1 ))
261
+ local msg_args=("${@:2:$i}")
262
+
263
+ #shellcheck disable=SC2059
264
+ printf "${msg_template}" "${msg_args[@]}" >&2
265
+ }
266
+
267
+ # Execute command while redirecting all output to
268
+ # a file (file is used within fail mesage on when
269
+ # command is unsuccessful). Final argument is the
270
+ # error message used when the command fails.
271
+ #
272
+ # $@{1:$#-1}: Command to execute
273
+ # $@{$#}: Failure message
274
+ function wrap() {
275
+ local i=$((${#} - 1))
276
+ if ! wrap_raw "${@:1:$i}"; then
277
+ cat "$(output_file)"
278
+ failure "${@:$#}"
279
+ fi
280
+ rm "$(output_file)"
281
+ }
282
+
283
+ # Execute command while redirecting all output to
284
+ # a file. Exit status is returned.
285
+ function wrap_raw() {
286
+ output_file "clean" > /dev/null 2>&1
287
+ "${@}" > "$(output_file)" 2>&1
288
+ return $?
289
+ }
290
+
291
+ # Execute command while redirecting all output to
292
+ # a file (file is used within fail mesage on when
293
+ # command is unsuccessful). Command output will be
294
+ # streamed during execution. Final argument is the
295
+ # error message used when the command fails.
296
+ #
297
+ # $@{1:$#-1}: Command to execute
298
+ # $@{$#}: Failure message
299
+ function wrap_stream() {
300
+ i=$((${#} - 1))
301
+ if ! wrap_stream_raw "${@:1:$i}"; then
302
+ failure "${@:$#}"
303
+ fi
304
+ rm "$(output_file)"
305
+ }
306
+
307
+ # Execute command while redirecting all output
308
+ # to a file. Command output will be streamed
309
+ # during execution. Exit status is returned
310
+ function wrap_stream_raw() {
311
+ output_file "clean"
312
+ "${@}" > "$(output_file)" 2>&1 &
313
+ pid=$!
314
+ until [ -f "$(output_file)" ]; do
315
+ sleep 0.1
316
+ done
317
+ tail -f --quiet --pid "${pid}" "$(output_file)"
318
+ wait "${pid}"
319
+ return $?
320
+ }
321
+
322
+
323
+ # Send command to packet device and wrap
324
+ # execution
325
+ # $@{1:$#-1}: Command to execute
326
+ # $@{$#}: Failure message
327
+ function pkt_wrap() {
328
+ wrap packet-exec run -quiet -- "${@}"
329
+ }
330
+
331
+ # Send command to packet device and wrap
332
+ # execution
333
+ # $@: Command to execute
334
+ function pkt_wrap_raw() {
335
+ wrap_raw packet-exec run -quiet -- "${@}"
336
+ }
337
+
338
+ # Send command to packet device and wrap
339
+ # execution with output streaming
340
+ # $@{1:$#-1}: Command to execute
341
+ # $@{$#}: Failure message
342
+ function pkt_wrap_stream() {
343
+ wrap_stream packet-exec run -quiet -- "${@}"
344
+ }
345
+
346
+ # Send command to packet device and wrap
347
+ # execution with output streaming
348
+ # $@: Command to execute
349
+ function pkt_wrap_stream_raw() {
350
+ wrap_stream_raw packet-exec run -quiet -- "${@}"
351
+ }
352
+
353
+ # Get the full path directory for a given
354
+ # file path. File is not required to exist.
355
+ # NOTE: Parent directories of given path will
356
+ # be created.
357
+ #
358
+ # $1: file path
359
+ function file_directory() {
360
+ local path="${1?File path is required}"
361
+ local dir
362
+ if [[ "${path}" != *"/"* ]]; then
363
+ dir="."
364
+ else
365
+ dir="${path%/*}"
366
+ fi
367
+ if [ ! -d "${dir}" ]; then
368
+ mkdir -p "${dir}" ||
369
+ failure "Could not create directory (%s)" "${dir}"
370
+ fi
371
+ pushd "${dir}"
372
+ dir="$(pwd)" ||
373
+ failure "Could not read directory path (%s)" "${dir}"
374
+ popd
375
+ printf "%s" "${dir}"
376
+ }
377
+
378
+ # Wait until the number of background jobs falls below
379
+ # the maximum number provided. If the max number was reached
380
+ # and waiting was performed until a process completed, the
381
+ # string "waited" will be printed to stdout.
382
+ #
383
+ # NOTE: using `wait -n` would be cleaner but only became
384
+ # available in bash as of 4.3
385
+ #
386
+ # $1: maximum number of jobs
387
+ function background_jobs_limit() {
388
+ local max="${1}"
389
+ if [ -z "${max}" ] || [[ "${max}" = *[!0123456789]* ]]; then
390
+ failure "Maximum number of background jobs required"
391
+ fi
392
+
393
+ local debug_printed
394
+ local jobs
395
+ mapfile -t jobs <<< "$(jobs -p)" ||
396
+ failure "Could not read background job list"
397
+ while [ "${#jobs[@]}" -ge "${max}" ]; do
398
+ if [ -z "${debug_printed}" ]; then
399
+ debug "max background jobs reached (%d), waiting for free process" "${max}"
400
+ debug_printed="1"
401
+ fi
402
+ sleep 1
403
+ jobs=()
404
+ local j_pids
405
+ mapfile -t j_pids <<< "$(jobs -p)" ||
406
+ failure "Could not read background job list"
407
+ for j in "${j_pids[@]}"; do
408
+ if kill -0 "${j}" > /dev/null 2>&1; then
409
+ jobs+=( "${j}" )
410
+ fi
411
+ done
412
+ done
413
+ if [ -n "${debug_printed}" ]; then
414
+ debug "background jobs count (%s) under max, continuing" "${#jobs[@]}"
415
+ printf "waited"
416
+ fi
417
+ }
418
+
419
+ # Reap a completed background process. If the process is
420
+ # not complete, the process is ignored. The success/failure
421
+ # returned from this function only applies to the process
422
+ # identified by the provided PID _if_ the matching PID value
423
+ # was written to stdout
424
+ #
425
+ # $1: PID
426
+ function reap_completed_background_job() {
427
+ local pid="${1}"
428
+ if [ -z "${pid}" ]; then
429
+ failure "PID of process to reap is required"
430
+ fi
431
+ if kill -0 "${pid}" > /dev/null 2>&1; then
432
+ debug "requested pid to reap (%d) has not completed, ignoring" "${pid}"
433
+ return 0
434
+ fi
435
+ # The pid can be reaped so output the pid to indicate
436
+ # any error is from the job
437
+ printf "%s" "${pid}"
438
+ if ! wait "${pid}"; then
439
+ local code="${?}"
440
+ debug "wait error code %d returned for pid %d" "${code}" "${pid}"
441
+ return "${code}"
442
+ fi
443
+
444
+ return 0
445
+ }
446
+
447
+ # Creates a cache and adds the provided items
448
+ #
449
+ # -d Optional description
450
+ # -f Force cache (deletes cache if already exists)
451
+ #
452
+ # $1: name of cache
453
+ # $2: artifact(s) to cache (path to artifact or directory containing artifacts)
454
+ function create-cache() {
455
+ local body
456
+ local force
457
+ local opt
458
+ while getopts ":d:f" opt; do
459
+ case "${opt}" in
460
+ "d") body="${OPTARG}" ;;
461
+ "f") force="1" ;;
462
+ *) failure "Invalid flag provided" ;;
463
+ esac
464
+ done
465
+ shift $((OPTIND-1))
466
+
467
+ cache_name="${1}"
468
+ artifact_path="${2}"
469
+
470
+ if [ -z "${cache_name}" ]; then
471
+ failure "Cache name is required"
472
+ fi
473
+ if [ -z "${artifact_path}" ]; then
474
+ failure "Artifact path is required"
475
+ fi
476
+
477
+ # Check for the cache
478
+ if github_draft_release_exists "${repo_name}" "${cache_name}"; then
479
+ # If forcing, delete the cache
480
+ if [ -n "${force}" ]; then
481
+ debug "cache '%s' found and force is set, removing"
482
+ github_delete_draft_release "${cache_name}"
483
+ else
484
+ failure "Cache already exists (name: %s repo: %s)" "${cache_name}" "${repo_name}"
485
+ fi
486
+ fi
487
+
488
+ # If no description is provided, then provide a default
489
+ if [ -z "${body}" ]; then
490
+ body="Cache name: %s\nCreate time: %s\nSource run: %s/%s/actions/runs/%s" \
491
+ "${cache_name}" "$(date)" "${GITHUB_SERVER_URL}" "${GITHUB_REPOSITORY}" "${GITHUB_RUN_ID}"
492
+ fi
493
+
494
+ # Make sure body is formatted
495
+ if [ -n "${body}" ]; then
496
+ body="$(printf "%b" "${body}")"
497
+ fi
498
+
499
+ response="$(github_create_release -o "${repo_owner}" -r "${repo_name}" -n "${cache_name}" -b "${body}")" ||
500
+ failure "Failed to create GitHub release"
501
+ }
502
+
503
+ # Retrieve items from cache
504
+ #
505
+ # -r Require cache to exist (failure if not found)
506
+ #
507
+ # $1: cache name
508
+ # $2: destination directory
509
+ function restore-cache() {
510
+ local required
511
+
512
+ while getopts ":r" opt; do
513
+ case "${opt}" in
514
+ "r") required="1" ;;
515
+ *) failure "Invalid flag provided" ;;
516
+ esac
517
+ done
518
+ shift $((OPTIND-1))
519
+
520
+ cache_name="${1}"
521
+ destination="${2}"
522
+
523
+ if [ -z "${cache_name}" ]; then
524
+ failure "Cache name is required"
525
+ fi
526
+ if [ -z "${destination}" ]; then
527
+ failure "Destination is required"
528
+ fi
529
+
530
+ # If required, check for the draft release and error if not found
531
+ if [ -n "${required}" ]; then
532
+ if ! github_draft_release_exists "${repo_name}" "${cache_name}"; then
533
+ failure "Cache '%s' does not exist" "${cache_name}"
534
+ fi
535
+ fi
536
+
537
+ mkdir -p "${destination}" ||
538
+ failure "Could not create destination directory (%s)" "${destination}"
539
+
540
+ pushd "${destination}"
541
+ github_draft_release_assets "${repo_name}" "${cache_name}"
542
+ popd
543
+ }
544
+
545
+ # Submit given file to Apple's notarization service and
546
+ # staple the notarization ticket.
547
+ #
548
+ # -i UUID: app store connect issuer ID (optional)
549
+ # -j PATH: JSON file containing API key
550
+ # -k ID: app store connect API key ID (optional)
551
+ # -m SECS: maximum number of seconds to wait (optional, defaults to 600)
552
+ # -o PATH: path to write notarized file (optional, will modify input by default)
553
+ #
554
+ # $1: file to notarize
555
+ function notarize_file() {
556
+ local creds_api_key_id
557
+ local creds_api_key_path
558
+ local creds_issuer_id
559
+ local output_file
560
+ local max_wait="600"
561
+
562
+ local opt
563
+ while getopts ":i:j:k:m:o:" opt; do
564
+ case "${opt}" in
565
+ "i") creds_api_key_id="${OPTARG}" ;;
566
+ "j") creds_api_key_path="${OPTARG}" ;;
567
+ "k") creds_issuer_id="${OPTARG}" ;;
568
+ "m") max_wait="${OPTARG}" ;;
569
+ "o") output_file="${OPTARG}" ;;
570
+ *) failure "Invalid flag provided" ;;
571
+ esac
572
+ done
573
+ shift $((OPTIND-1))
574
+
575
+ # Validate credentials were provided
576
+ if [ -z "${creds_api_key_path}" ]; then
577
+ failure "App store connect key path required for notarization"
578
+ fi
579
+ if [ ! -f "${creds_api_key_path}" ]; then
580
+ failure "Invalid path provided for app store connect key path (%s)" "${creds_api_key_path}"
581
+ fi
582
+
583
+ # Collect auth related arguments
584
+ local base_args=( "--api-key-path" "${creds_api_key_path}" )
585
+ if [ -n "${creds_api_key_id}" ]; then
586
+ base_args+=( "--api-key" "${creds_api_key_id}" )
587
+ fi
588
+ if [ -n "${creds_issuer_id}" ]; then
589
+ base_args+=( "--api-issuer" "${creds_issuer_id}" )
590
+ fi
591
+
592
+ local input_file="${1}"
593
+
594
+ # Validate the input file
595
+ if [ -z "${input_file}" ]; then
596
+ failure "Input file is required for signing"
597
+ fi
598
+ if [ ! -f "${input_file}" ]; then
599
+ failure "Cannot find input file (%s)" "${input_file}"
600
+ fi
601
+
602
+ # Check that rcodesign is available, and install
603
+ # it if it is not
604
+ if ! command -v rcodesign > /dev/null; then
605
+ debug "rcodesign executable not found, installing..."
606
+ install_github_tool "indygreg" "apple-platform-rs" "rcodesign"
607
+ fi
608
+
609
+ local notarize_file
610
+ # If an output file path was defined, copy file
611
+ # to output location before notarizing
612
+ if [ -n "${output_file}" ]; then
613
+ file_directory "${output_file}"
614
+ # Remove file if it already exists
615
+ rm -f "${output_file}" ||
616
+ failure "Could not modify output file (%s)" "${output_file}"
617
+ cp -f "${input_file}" "${output_file}" ||
618
+ failure "Could not write to output file (%s)" "${output_file}"
619
+ notarize_file="${output_file}"
620
+ debug "notarizing file '%s' and writing to '%s'" "${input_file}" "${output_file}"
621
+ else
622
+ notarize_file="${input_file}"
623
+ debug "notarizing file in place '%s'" "${input_file}"
624
+ fi
625
+
626
+ # Notarize the file
627
+ local notarize_output
628
+ if notarize_output="$(rcodesign \
629
+ notary-submit \
630
+ "${base_args[@]}" \
631
+ --max-wait-seconds "${max_wait}" \
632
+ --staple \
633
+ "${notarize_file}" 2>&1)"; then
634
+ return 0
635
+ fi
636
+
637
+ debug "notarization output: %s" "${notarize_output}"
638
+
639
+ # Still here means notarization failure. Pull
640
+ # the logs from the service before failing
641
+ local submission_id="${notarize_output##*submission ID: }"
642
+ submission_id="${submission_id%%$'\n'*}"
643
+ rcodesign \
644
+ notary-log \
645
+ "${base_args[@]}" \
646
+ "${submission_id}"
647
+
648
+ failure "Failed to notarize file (%s)" "${input_file}"
649
+ }
650
+
651
+ # Sign a file using signore. Will automatically apply
652
+ # modified retry settings when larger files are submitted.
653
+ #
654
+ # -b NAME: binary identifier (macOS only)
655
+ # -e PATH: path to entitlements file (macOS only)
656
+ # -o PATH: path to write signed file (optional, will overwrite input by default)
657
+ # $1: file to sign
658
+ #
659
+ # NOTE: If signore is not installed, a HASHIBOT_TOKEN is
660
+ # required for downloading the signore release. The
661
+ # token can also be set in SIGNORE_GITHUB_TOKEN if
662
+ # the HASHIBOT_TOKEN is already set
663
+ #
664
+ # NOTE: SIGNORE_CLIENT_ID, SIGNORE_CLIENT_SECRET, and SIGNORE_SIGNER
665
+ # environment variables must be set prior to calling this function
666
+ function sign_file() {
667
+ # Set 50M to be a largish file
668
+ local largish_file_size="52428800"
669
+
670
+ # Signore environment variables are required. Check
671
+ # that they are set.
672
+ if [ -z "${SIGNORE_CLIENT_ID}" ]; then
673
+ failure "Cannot sign file, SIGNORE_CLIENT_ID is not set"
674
+ fi
675
+ if [ -z "${SIGNORE_CLIENT_SECRET}" ]; then
676
+ failure "Cannot sign file, SIGNORE_CLIENT_SECRET is not set"
677
+ fi
678
+ if [ -z "${SIGNORE_SIGNER}" ]; then
679
+ failure "Cannot sign file, SIGNORE_SIGNER is not set"
680
+ fi
681
+
682
+ local binary_identifier=""
683
+ local entitlements=""
684
+ local output_file=""
685
+
686
+ local opt
687
+ while getopts ":b:e:o:" opt; do
688
+ case "${opt}" in
689
+ "b") binary_identifier="${OPTARG}" ;;
690
+ "e") entitlements="${OPTARG}" ;;
691
+ "o") output_file="${OPTARG}" ;;
692
+ *) failure "Invalid flag provided" ;;
693
+ esac
694
+ done
695
+ shift $((OPTIND-1))
696
+
697
+ local input_file="${1}"
698
+
699
+ # Check that a good input file was given
700
+ if [ -z "${input_file}" ]; then
701
+ failure "Input file is required for signing"
702
+ fi
703
+ if [ ! -f "${input_file}" ]; then
704
+ failure "Cannot find input file (%s)" "${input_file}"
705
+ fi
706
+
707
+ # If the output file is not set it's a replacement
708
+ if [ -z "${output_file}" ]; then
709
+ debug "output file is unset, will replace input file (%s)" "${input_file}"
710
+ output_file="${input_file}"
711
+ fi
712
+
713
+ # This will ensure parent directories exist
714
+ file_directory "${output_file}" > /dev/null
715
+
716
+ # If signore command is not installed, install it
717
+ if ! command -v "signore" > /dev/null; then
718
+ local hashibot_token_backup="${HASHIBOT_TOKEN}"
719
+ # If the signore github token is set, apply it
720
+ if [ -n "${SIGNORE_GITHUB_TOKEN}" ]; then
721
+ HASHIBOT_TOKEN="${SIGNORE_GITHUB_TOKEN}"
722
+ fi
723
+
724
+ install_hashicorp_tool "signore"
725
+
726
+ # Restore the hashibot token if it was modified
727
+ HASHIBOT_TOKEN="${hashibot_token_backup}"
728
+ fi
729
+
730
+ # Define base set of arguments
731
+ local signore_args=( "sign" "--file" "${input_file}" "--out" "${output_file}" "--match-file-mode" )
732
+
733
+ # Check the size of the file to be signed. If it's relatively
734
+ # large, push up the max retries and lengthen the retry interval
735
+ # NOTE: Only checked if `wc` is available
736
+ local file_size="0"
737
+ if command -v wc > /dev/null; then
738
+ file_size="$(wc -c <"${input_file}")" ||
739
+ failure "Could not determine input file size"
740
+ fi
741
+
742
+ if [ "${file_size}" -gt "${largish_file_size}" ]; then
743
+ debug "largish file detected, adjusting retry settings"
744
+ signore_args+=( "--max-retries" "30" "--retry-interval" "10s" )
745
+ fi
746
+
747
+ # If a binary identifier was provided then it's a macos signing
748
+ if [ -n "${binary_identifier}" ]; then
749
+ # shellcheck disable=SC2016
750
+ template='{type: "macos", input_format: "EXECUTABLE", binary_identifier: $identifier}'
751
+ payload="$(jq -n --arg identifier "${binary_identifier}" "${template}")" ||
752
+ failure "Could not create signore payload for macOS signing"
753
+ signore_args+=( "--signer-options" "${payload}" )
754
+ fi
755
+
756
+ # If an entitlement was provided, validate the path
757
+ # and add it to the args
758
+ if [ -n "${entitlements}" ]; then
759
+ if [ ! -f "${entitlements}" ]; then
760
+ failure "Invalid path for entitlements provided (%s)" "${entitlements}"
761
+ fi
762
+ signore_args+=( "--entitlements" "${entitlements}" )
763
+ fi
764
+
765
+ debug "signing file '%s' with arguments - %s" "${input_file}" "${signore_args[*]}"
766
+
767
+ signore "${signore_args[@]}" ||
768
+ failure "Failed to sign file '%s'" "${input_file}"
769
+
770
+ info "successfully signed file (%s)" "${input_file}"
771
+ }
772
+
773
+ # Create a GPG signature. This uses signore to generate a
774
+ # gpg signature for a given file. If the destination
775
+ # path for the signature is not provided, it will
776
+ # be stored at the origin path with a .sig suffix
777
+ #
778
+ # $1: Path to origin file
779
+ # $2: Path to store signature (optional)
780
+ function gpg_sign_file() {
781
+ # Check that we have something to sign
782
+ if [ -z "${1}" ]; then
783
+ failure "Origin file is required for signing"
784
+ fi
785
+
786
+ if [ ! -f "${1}" ]; then
787
+ failure "Origin file does not exist (${1})"
788
+ fi
789
+
790
+ # Validate environment has required signore variables set
791
+ if [ -z "${SIGNORE_CLIENT_ID}" ]; then
792
+ failure "Cannot sign file, SIGNORE_CLIENT_ID is not set"
793
+ fi
794
+ if [ -z "${SIGNORE_CLIENT_SECRET}" ]; then
795
+ failure "Cannot sign file, SIGNORE_CLIENT_SECRET is not set"
796
+ fi
797
+ if [ -z "${SIGNORE_SIGNER}" ]; then
798
+ failure "Cannot sign file, SIGNORE_SIGNER is not set"
799
+ fi
800
+
801
+ local origin="${1}"
802
+ local destination="${2}"
803
+ if [ -z "${destination}" ]; then
804
+ destination="${origin}.sig"
805
+ debug "destination automatically set (%s)" "${destination}"
806
+ fi
807
+
808
+ if ! command -v signore; then
809
+ debug "installing signore tool"
810
+ install_hashicorp_tool "signore"
811
+ fi
812
+
813
+ if [ -e "${destination}" ]; then
814
+ failure "File already exists at signature destination path (${destination})"
815
+ fi
816
+
817
+ wrap_stream signore sign --dearmor --file "${origin}" --out "${destination}" \
818
+ "Failed to sign file"
819
+ }
820
+
821
+ # Validate arguments for GitHub release. Checks for
822
+ # two arguments and that second argument is an exiting
823
+ # file asset, or directory.
824
+ #
825
+ # $1: GitHub tag name
826
+ # $2: Asset file or directory of assets
827
+ function release_validate() {
828
+ if [ "${1}" = "" ]; then
829
+ failure "Missing required position 1 argument (TAG) for release"
830
+ fi
831
+ if [ "${2}" = "" ]; then
832
+ failure "Missing required position 2 argument (PATH) for release"
833
+ fi
834
+ if [ ! -e "${2}" ]; then
835
+ failure "Path provided for release (${2}) does not exist"
836
+ fi
837
+ }
838
+
839
+ # Generate a GitHub release
840
+ #
841
+ # $1: GitHub tag name
842
+ # $2: Asset file or directory of assets
843
+ function release() {
844
+ release_validate "${@}"
845
+ local tag_name="${1}"
846
+ local assets="${2}"
847
+ local body
848
+
849
+ if [ -z "${body}" ]; then
850
+ body="$(release_details "${tag_name}")"
851
+ fi
852
+
853
+ response="$(github_create_release -o "${repo_owner}" -r "${repo_name}" -t "${tag_name}" -n "${tag_name}" -b "${body}")" ||
854
+ failure "Failed to create GitHub release"
855
+ local release_id
856
+ release_id="$(printf "%s" "${response}" | jq -r '.id')" ||
857
+ failure "Failed to extract release ID from response for %s on %s" "${tag_name}" "${repository}"
858
+
859
+ github_upload_release_artifacts "${repo_name}" "${release_id}" "${assets}"
860
+ }
861
+
862
+ # Generate a GitHub prerelease
863
+ #
864
+ # $1: GitHub tag name
865
+ # $2: Asset file or directory of assets
866
+ function prerelease() {
867
+ release_validate "${@}"
868
+ local ptag
869
+ if [[ "${1}" != *"+"* ]]; then
870
+ ptag="${1}+${short_sha}"
871
+ else
872
+ ptag="${1}"
873
+ fi
874
+ local assets="${2}"
875
+
876
+ response="$(github_create_release -o "${repo_owner}" -r "${repo_name}" -t "${ptag}" -n "${ptag}" -b "${body}" -p -m)" ||
877
+ failure "Failed to create GitHub prerelease"
878
+ local release_id
879
+ release_id="$(printf "%s" "${response}" | jq -r '.id')" ||
880
+ failure "Failed to extract prerelease ID from response for %s on %s" "${tag_name}" "${repository}"
881
+
882
+ github_upload_release_artifacts "${repo_name}" "${release_id}" "${assets}"
883
+
884
+ printf "New prerelease published to %s @ %s\n" "${repo_name}" "${ptag}" >&2
885
+
886
+ printf "%s" "${ptag}"
887
+ }
888
+
889
+ # Generate a GitHub draft release
890
+ #
891
+ # $1: GitHub release name
892
+ # $2: Asset file or directory of assets
893
+ function draft_release() {
894
+ local ptag="${1}"
895
+ local assets="${2}"
896
+
897
+ response="$(github_create_release -o "${repo_owner}" -r "${repo_name}" -t "${ptag}" -n "${ptag}" -b "${body}" -d)" ||
898
+ failure "Failed to create GitHub draft release"
899
+ local release_id
900
+ release_id="$(printf "%s" "${response}" | jq -r '.id')" ||
901
+ failure "Failed to extract draft release ID from response for %s on %s" "${tag_name}" "${repository}"
902
+
903
+ github_upload_release_artifacts "${repo_name}" "${release_id}" "${assets}"
904
+
905
+ printf "%s" "${ptag}"
906
+ }
907
+
908
+
909
+ # Generate details of the release. This will consist
910
+ # of a link to the changelog if we can properly detect
911
+ # it based on current location.
912
+ #
913
+ # $1: Tag name
914
+ #
915
+ # Returns: details content
916
+ function release_details() {
917
+ local tag_name="${1}"
918
+ local proj_root
919
+ if ! proj_root="$(git rev-parse --show-toplevel)"; then
920
+ return
921
+ fi
922
+ if [ -z "$(git tag -l "${tag_name}")" ] || [ ! -f "${proj_root}/CHANGELOG.md" ]; then
923
+ return
924
+ fi
925
+ printf "CHANGELOG:\n\nhttps://github.com/%s/blob/%s/CHANGELOG.md" "${repository}" "${tag_name}"
926
+ }
927
+
928
+ # Check if version string is valid for release
929
+ #
930
+ # $1: Version
931
+ # Returns: 0 if valid, 1 if invalid
932
+ function valid_release_version() {
933
+ if [[ "${1}" =~ ^v?[0-9]+\.[0-9]+\.[0-9]+$ ]]; then
934
+ return 0
935
+ else
936
+ return 1
937
+ fi
938
+ }
939
+
940
+ # Validate arguments for HashiCorp release. Ensures asset
941
+ # directory exists, and checks that the SHASUMS and SHASUM.sig
942
+ # files are present.
943
+ #
944
+ # $1: Asset directory
945
+ function hashicorp_release_validate() {
946
+ local directory="${1}"
947
+ local sums
948
+ local sigs
949
+
950
+ # Directory checks
951
+ debug "checking asset directory was provided"
952
+ if [ -z "${directory}" ]; then
953
+ failure "No asset directory was provided for HashiCorp release"
954
+ fi
955
+ debug "checking that asset directory exists"
956
+ if [ ! -d "${directory}" ]; then
957
+ failure "Asset directory for HashiCorp release does not exist (${directory})"
958
+ fi
959
+
960
+ # SHASUMS checks
961
+ debug "checking for shasums file"
962
+ sums=("${directory}/"*SHA256SUMS)
963
+ if [ ${#sums[@]} -lt 1 ]; then
964
+ failure "Asset directory is missing SHASUMS file"
965
+ fi
966
+ debug "checking for shasums signature file"
967
+ sigs=("${directory}/"*SHA256SUMS.sig)
968
+ if [ ${#sigs[@]} -lt 1 ]; then
969
+ failure "Asset directory is missing SHASUMS signature file"
970
+ fi
971
+ }
972
+
973
+ # Verify release assets by validating checksum properly match
974
+ # and that signature file is valid
975
+ #
976
+ # $1: Asset directory
977
+ function hashicorp_release_verify() {
978
+ if [ -z "${HASHICORP_PUBLIC_GPG_KEY_ID}" ]; then
979
+ failure "Cannot verify release without GPG key ID. Set HASHICORP_PUBLIC_GPG_KEY_ID."
980
+ fi
981
+
982
+ local directory="${1}"
983
+ local gpghome
984
+
985
+ pushd "${directory}"
986
+
987
+ # First do a checksum validation
988
+ debug "validating shasums are correct"
989
+ wrap shasum -a 256 -c ./*_SHA256SUMS \
990
+ "Checksum validation of release assets failed"
991
+ # Next check that the signature is valid
992
+ gpghome=$(mktemp -qd)
993
+ export GNUPGHOME="${gpghome}"
994
+ debug "verifying shasums signature file using key: %s" "${HASHICORP_PUBLIC_GPG_KEY_ID}"
995
+ wrap gpg --keyserver keyserver.ubuntu.com --recv "${HASHICORP_PUBLIC_GPG_KEY_ID}" \
996
+ "Failed to import HashiCorp public GPG key"
997
+ wrap gpg --verify ./*SHA256SUMS.sig ./*SHA256SUMS \
998
+ "Validation of SHA256SUMS signature failed"
999
+ rm -rf "${gpghome}"
1000
+ popd
1001
+ }
1002
+
1003
+ # Generate releases-api metadata
1004
+ #
1005
+ # $1: Product Version
1006
+ # $2: Asset directory
1007
+ function hashicorp_release_generate_release_metadata() {
1008
+ local version="${1}"
1009
+ local directory="${2}"
1010
+
1011
+ if ! command -v bob; then
1012
+ debug "bob executable not found, installing"
1013
+ install_hashicorp_tool "bob"
1014
+ fi
1015
+
1016
+ local hc_releases_input_metadata="input-meta.json"
1017
+ # The '-metadata-file' flag expects valid json. Contents are not used for Vagrant.
1018
+ echo "{}" > "${hc_releases_input_metadata}"
1019
+
1020
+ debug "generating release metadata information"
1021
+ wrap_stream bob generate-release-metadata \
1022
+ -metadata-file "${hc_releases_input_metadata}" \
1023
+ -in-dir "${directory}" \
1024
+ -version "${version}" \
1025
+ -out-file "${hc_releases_metadata_filename}" \
1026
+ "Failed to generate release metadata"
1027
+
1028
+ rm -f "${hc_releases_input_metadata}"
1029
+ }
1030
+
1031
+ # Upload release metadata and assets to the staging api
1032
+ #
1033
+ # $1: Product Name (e.g. "vagrant")
1034
+ # $2: Product Version
1035
+ # $3: Asset directory
1036
+ function hashicorp_release_upload_to_staging() {
1037
+ local product="${1}"
1038
+ local version="${2}"
1039
+ local directory="${3}"
1040
+
1041
+ if ! command -v "hc-releases"; then
1042
+ debug "releases-api executable not found, installing"
1043
+ install_hashicorp_tool "releases-api"
1044
+ fi
1045
+
1046
+ if [ -z "${HC_RELEASES_STAGING_HOST}" ]; then
1047
+ failure "Missing required environment variable HC_RELEASES_STAGING_HOST"
1048
+ fi
1049
+ if [ -z "${HC_RELEASES_STAGING_KEY}" ]; then
1050
+ failure "Missing required environment variable HC_RELEASES_STAGING_KEY"
1051
+ fi
1052
+
1053
+ export HC_RELEASES_HOST="${HC_RELEASES_STAGING_HOST}"
1054
+ export HC_RELEASES_KEY="${HC_RELEASES_STAGING_KEY}"
1055
+
1056
+ pushd "${directory}"
1057
+
1058
+ # Create -file parameter list for hc-releases upload
1059
+ local fileParams=()
1060
+ for file in *; do
1061
+ fileParams+=("-file=${file}")
1062
+ done
1063
+
1064
+ debug "uploading release assets to staging"
1065
+ wrap_stream hc-releases upload \
1066
+ -product "${product}" \
1067
+ -version "${version}" \
1068
+ "${fileParams[@]}" \
1069
+ "Failed to upload HashiCorp release assets"
1070
+
1071
+ popd
1072
+
1073
+ debug "creating release metadata"
1074
+
1075
+ wrap_stream hc-releases metadata create \
1076
+ -product "${product}" \
1077
+ -input "${hc_releases_metadata_filename}" \
1078
+ "Failed to create metadata for HashiCorp release"
1079
+
1080
+ unset HC_RELEASES_HOST
1081
+ unset HC_RELEASES_KEY
1082
+ }
1083
+
1084
+ # Promote release from staging to production
1085
+ #
1086
+ # $1: Product Name (e.g. "vagrant")
1087
+ # $2: Product Version
1088
+ function hashicorp_release_promote_to_production() {
1089
+ local product="${1}"
1090
+ local version="${2}"
1091
+
1092
+ if ! command -v "hc-releases"; then
1093
+ debug "releases-api executable not found, installing"
1094
+ install_hashicorp_tool "releases-api"
1095
+ fi
1096
+
1097
+ if [ -z "${HC_RELEASES_PROD_HOST}" ]; then
1098
+ failure "Missing required environment variable HC_RELEASES_PROD_HOST"
1099
+ fi
1100
+ if [ -z "${HC_RELEASES_PROD_KEY}" ]; then
1101
+ failure "Missing required environment variable HC_RELEASES_PROD_KEY"
1102
+ fi
1103
+ if [ -z "${HC_RELEASES_STAGING_KEY}" ]; then
1104
+ failure "Missing required environment variable HC_RELEASES_STAGING_KEY"
1105
+ fi
1106
+
1107
+ export HC_RELEASES_HOST="${HC_RELEASES_PROD_HOST}"
1108
+ export HC_RELEASES_KEY="${HC_RELEASES_PROD_KEY}"
1109
+ export HC_RELEASES_SOURCE_ENV_KEY="${HC_RELEASES_STAGING_KEY}"
1110
+
1111
+ debug "promoting release to production"
1112
+ wrap_stream hc-releases promote \
1113
+ -product "${product}" \
1114
+ -version "${version}" \
1115
+ -source-env staging \
1116
+ "Failed to promote HashiCorp release to Production"
1117
+
1118
+ unset HC_RELEASES_HOST
1119
+ unset HC_RELEASES_KEY
1120
+ unset HC_RELEASES_SOURCE_ENV_KEY
1121
+ }
1122
+
1123
+ # Send the post-publish sns message
1124
+ #
1125
+ # $1: Product name (e.g. "vagrant") defaults to $repo_name
1126
+ # $2: AWS Region of SNS (defaults to us-east-1)
1127
+ function hashicorp_release_sns_publish() {
1128
+ local message
1129
+ local product="${1}"
1130
+ local region="${2}"
1131
+
1132
+ if [ -z "${product}" ]; then
1133
+ product="${repo_name}"
1134
+ fi
1135
+
1136
+ if [ -z "${region}" ]; then
1137
+ region="us-east-1"
1138
+ fi
1139
+
1140
+ # Validate the creds properly assume role and function
1141
+ wrap aws_deprecated configure list \
1142
+ "Failed to reconfigure AWS credentials for release notification"
1143
+
1144
+ # Now send the release notification
1145
+ debug "sending release notification to package repository"
1146
+ message=$(jq --null-input --arg product "$product" '{"product": $product}')
1147
+ wrap_stream aws sns publish --region "${region}" --topic-arn "${HC_RELEASES_PROD_SNS_TOPIC}" --message "${message}" \
1148
+ "Failed to send SNS message for package repository update"
1149
+
1150
+ return 0
1151
+ }
1152
+
1153
+ # Check if a release for the given version
1154
+ # has been published to the HashiCorp
1155
+ # releases site.
1156
+ #
1157
+ # $1: Product Name
1158
+ # $2: Product Version
1159
+ function hashicorp_release_exists() {
1160
+ local product="${1}"
1161
+ local version="${2}"
1162
+
1163
+ if curl --silent --fail --head "https://releases.hashicorp.com/${product}/${product}_${version}/" > /dev/null ; then
1164
+ debug "hashicorp release of %s@%s found" "${product}" "${version}"
1165
+ return 0
1166
+ fi
1167
+ debug "hashicorp release of %s@%s not found" "${product}" "${version}"
1168
+ return 1
1169
+ }
1170
+
1171
+ # Generate the SHA256SUMS file for assets
1172
+ # in a given directory.
1173
+ #
1174
+ # $1: Asset Directory
1175
+ # $2: Product Name
1176
+ # $3: Product Version
1177
+ function generate_shasums() {
1178
+ local directory="${1}"
1179
+ local product="${2}"
1180
+ local version="${3}"
1181
+
1182
+ pushd "${directory}"
1183
+
1184
+ local shacontent
1185
+ debug "generating shasums file for %s@%s" "${product}" "${version}"
1186
+ shacontent="$(shasum -a256 ./*)" ||
1187
+ failure "Failed to generate shasums in ${directory}"
1188
+
1189
+ sed 's/\.\///g' <( printf "%s" "${shacontent}" ) > "${product}_${version}_SHA256SUMS" ||
1190
+ failure "Failed to write shasums file"
1191
+
1192
+ popd
1193
+ }
1194
+
1195
+ # Generate a HashiCorp releases-api compatible release
1196
+ #
1197
+ # $1: Asset directory
1198
+ # $2: Product Name (e.g. "vagrant")
1199
+ # $3: Product Version
1200
+ function hashicorp_release() {
1201
+ local directory="${1}"
1202
+ local product="${2}"
1203
+ local version="${3}"
1204
+
1205
+ # If the version is provided, use the discovered release version
1206
+ if [[ "${version}" == "" ]]; then
1207
+ version="${release_version}"
1208
+ fi
1209
+
1210
+ debug "creating hashicorp release - product: %s version: %s assets: %s" "${product}" "${version}" "${directory}"
1211
+
1212
+ if ! hashicorp_release_exists "${product}" "${version}"; then
1213
+ # Jump into our artifact directory
1214
+ pushd "${directory}"
1215
+
1216
+ # If any sig files happen to have been included in here,
1217
+ # just remove them as they won't be using the correct
1218
+ # signing key
1219
+ rm -f ./*.sig
1220
+
1221
+ # Generate our shasums file
1222
+ debug "generating shasums file for %s@%s" "${product}" "${version}"
1223
+ generate_shasums ./ "${product}" "${version}"
1224
+
1225
+ # Grab the shasums file and sign it
1226
+ local shasum_files=(./*SHA256SUMS)
1227
+ local shasum_file="${shasum_files[0]}"
1228
+ # Remove relative prefix if found
1229
+ shasum_file="${shasum_file##*/}"
1230
+ debug "signing shasums file for %s@%s" "${product}" "${version}"
1231
+ gpg_sign_file "${shasum_file[0]}"
1232
+
1233
+ # Jump back out of our artifact directory
1234
+ popd
1235
+
1236
+ # Run validation and verification on release assets before
1237
+ # we actually do the release.
1238
+ debug "running release validation for %s@%s" "${product}" "${version}"
1239
+ hashicorp_release_validate "${directory}"
1240
+ debug "running release verification for %s@%s" "${product}" "${version}"
1241
+ hashicorp_release_verify "${directory}"
1242
+
1243
+ # Now that the assets have been validated and verified,
1244
+ # peform the release setps
1245
+ debug "generating release metadata for %s@%s" "${product}" "${version}"
1246
+ hashicorp_release_generate_release_metadata "${version}" "${directory}"
1247
+ debug "uploading release artifacts to staging for %s@%s" "${product}" "${version}"
1248
+ hashicorp_release_upload_to_staging "${product}" "${version}" "${directory}"
1249
+ debug "promoting release to production for %s@%s" "${product}" "${version}"
1250
+ hashicorp_release_promote_to_production "${product}" "${version}"
1251
+
1252
+ printf "HashiCorp release created (%s@%s)\n" "${product}" "${version}"
1253
+ else
1254
+ printf "hashicorp release not published, already exists (%s@%s)\n" "${product}" "${version}"
1255
+ fi
1256
+
1257
+ # Send a notification to update the package repositories
1258
+ # with the new release.
1259
+ debug "sending packaging notification for %s@%s" "${product}" "${version}"
1260
+ hashicorp_release_sns_publish "${product}"
1261
+ }
1262
+
1263
+ # Check if gem version is already published to RubyGems
1264
+ #
1265
+ # $1: Name of RubyGem
1266
+ # $2: Verision of RubyGem
1267
+ # $3: Custom gem server to search (optional)
1268
+ function is_version_on_rubygems() {
1269
+ local name="${1}"
1270
+ local version="${2}"
1271
+ local gemstore="${3}"
1272
+
1273
+ if [ -z "${name}" ]; then
1274
+ failure "Name is required for version check on %s" "${gemstore:-RubyGems.org}"
1275
+ fi
1276
+
1277
+ if [ -z "${version}" ]; then
1278
+ failure "Version is required for version check on %s" "${gemstore:-RubyGems.org}"
1279
+ fi
1280
+
1281
+ debug "checking rubygem %s at version %s is currently published" "${name}" "${version}"
1282
+ local cmd_args=("gem" "search")
1283
+ if [ -n "${gemstore}" ]; then
1284
+ debug "checking rubygem publication at custom source: %s" "${gemstore}"
1285
+ cmd_args+=("--clear-sources" "--source" "${gemstore}")
1286
+ fi
1287
+ cmd_args+=("--remote" "--exact" "--all")
1288
+
1289
+ local result
1290
+ result="$("${cmd_args[@]}" "${name}")" ||
1291
+ failure "Failed to retreive remote version list from RubyGems"
1292
+ local versions="${result##*\(}"
1293
+ local versions="${versions%%)*}"
1294
+ local oifs="${IFS}"
1295
+ IFS=', '
1296
+ local r=1
1297
+ for v in $versions; do
1298
+ if [ "${v}" = "${version}" ]; then
1299
+ r=0
1300
+ debug "rubygem %s at version %s was found" "${name}" "${version}"
1301
+ break
1302
+ fi
1303
+ done
1304
+ IFS="${oifs}"
1305
+ return $r
1306
+ }
1307
+
1308
+ # Check if gem version is already published to hashigems
1309
+ #
1310
+ # $1: Name of RubyGem
1311
+ # $2: Verision of RubyGem
1312
+ function is_version_on_hashigems() {
1313
+ is_version_on_rubygems "${1}" "${2}" "https://gems.hashicorp.com"
1314
+ }
1315
+
1316
+ # Build and release project gem to RubyGems
1317
+ function publish_to_rubygems() {
1318
+ if [ -z "${RUBYGEMS_API_KEY}" ]; then
1319
+ failure "RUBYGEMS_API_KEY is required for publishing to RubyGems.org"
1320
+ fi
1321
+
1322
+ local gem_file="${1}"
1323
+
1324
+ if [ -z "${gem_file}" ]; then
1325
+ failure "RubyGem file is required for publishing to RubyGems.org"
1326
+ fi
1327
+
1328
+ if [ ! -f "${gem_file}" ]; then
1329
+ failure "Path provided does not exist or is not a file (%s)" "${gem_file}"
1330
+ fi
1331
+
1332
+ # NOTE: Newer versions of rubygems support setting the
1333
+ # api key via the GEM_HOST_API_KEY environment
1334
+ # variable. Config file is still used so that older
1335
+ # versions can be used for doing pushes.
1336
+ gem_config="$(mktemp -p ./)" ||
1337
+ failure "Could not create gem configuration file"
1338
+ # NOTE: The `--` are required due to the double dash
1339
+ # start of the first argument
1340
+ printf -- "---\n:rubygems_api_key: %s\n" "${RUBYGEMS_API_KEY}" > "${gem_config}"
1341
+
1342
+ gem push --config-file "${gem_config}" "${gem_file}" ||
1343
+ failure "Failed to publish RubyGem at '%s' to RubyGems.org" "${gem_file}"
1344
+ rm -f "${gem_config}"
1345
+ }
1346
+
1347
+ # Publish gem to the hashigems repository
1348
+ #
1349
+ # $1: Path to gem file to publish
1350
+ function publish_to_hashigems() {
1351
+ local path="${1}"
1352
+ if [ -z "${path}" ]; then
1353
+ failure "Path to built gem required for publishing to hashigems"
1354
+ fi
1355
+
1356
+ debug "publishing '%s' to hashigems" "${path}"
1357
+
1358
+ # Define all the variables we'll need
1359
+ local user_bin
1360
+ local reaper
1361
+ local invalid
1362
+ local invalid_id
1363
+
1364
+ wrap_stream gem install --user-install --no-document reaper-man \
1365
+ "Failed to install dependency for hashigem generation"
1366
+ user_bin="$(ruby -e 'puts Gem.user_dir')/bin"
1367
+ reaper="${user_bin}/reaper-man"
1368
+
1369
+ debug "using reaper-man installation at: %s" "${reaper}"
1370
+
1371
+ # Create a temporary directory to work from
1372
+ local tmpdir
1373
+ tmpdir="$(mktemp -d -p ./)" ||
1374
+ failure "Failed to create working directory for hashigems publish"
1375
+ mkdir -p "${tmpdir}/hashigems/gems" ||
1376
+ failure "Failed to create gems directory"
1377
+ wrap cp "${path}" "${tmpdir}/hashigems/gems" \
1378
+ "Failed to copy gem to working directory"
1379
+ pushd "${tmpdir}"
1380
+
1381
+ # Run quick test to ensure bucket is accessible
1382
+ wrap aws s3 ls "s3://${HASHIGEMS_METADATA_BUCKET}" \
1383
+ "Failed to access hashigems asset bucket"
1384
+
1385
+ # Grab our remote metadata. If the file doesn't exist, that is always an error.
1386
+ debug "fetching hashigems metadata file from %s" "${HASHIGEMS_METADATA_BUCKET}"
1387
+ wrap aws s3 cp "s3://${HASHIGEMS_METADATA_BUCKET}/vagrant-rubygems.list" ./ \
1388
+ "Failed to retrieve hashigems metadata list"
1389
+
1390
+ # Add the new gem to the metadata file
1391
+ debug "adding new gem to the metadata file"
1392
+ wrap_stream "${reaper}" package add -S rubygems -p vagrant-rubygems.list ./hashigems/gems/*.gem \
1393
+ "Failed to add new gem to hashigems metadata list"
1394
+ # Generate the repository
1395
+ debug "generating the new hashigems repository content"
1396
+ wrap_stream "${reaper}" repo generate -p vagrant-rubygems.list -o hashigems -S rubygems \
1397
+ "Failed to generate the hashigems repository"
1398
+ # Upload the updated repository
1399
+ pushd ./hashigems
1400
+ debug "uploading new hashigems repository content to %s" "${HASHIGEMS_PUBLIC_BUCKET}"
1401
+ wrap_stream aws s3 sync . "s3://${HASHIGEMS_PUBLIC_BUCKET}" \
1402
+ "Failed to upload the hashigems repository"
1403
+ # Store the updated metadata
1404
+ popd
1405
+ debug "uploading updated hashigems metadata file to %s" "${HASHIGEMS_METADATA_BUCKET}"
1406
+ wrap_stream aws s3 cp vagrant-rubygems.list "s3://${HASHIGEMS_METADATA_BUCKET}/vagrant-rubygems.list" \
1407
+ "Failed to upload the updated hashigems metadata file"
1408
+
1409
+ # Invalidate cloudfront so the new content is available
1410
+ local invalid
1411
+ debug "invalidating hashigems cloudfront distribution (%s)" "${HASHIGEMS_CLOUDFRONT_ID}"
1412
+ invalid="$(aws cloudfront create-invalidation --distribution-id "${HASHIGEMS_CLOUDFRONT_ID}" --paths "/*")" ||
1413
+ failure "Invalidation of hashigems CDN distribution failed"
1414
+ local invalid_id
1415
+ invalid_id="$(printf '%s' "${invalid}" | jq -r ".Invalidation.Id")"
1416
+ if [ -z "${invalid_id}" ]; then
1417
+ failure "Failed to determine the ID of the hashigems CDN invalidation request"
1418
+ fi
1419
+ debug "hashigems cloudfront distribution invalidation identifer - %s" "${invalid_id}"
1420
+
1421
+ # Wait for the invalidation process to complete
1422
+ debug "starting wait for hashigems cloudfront distribution invalidation to complete (id: %s)" "${invalid_id}"
1423
+ wrap aws cloudfront wait invalidation-completed --distribution-id "${HASHIGEMS_CLOUDFRONT_ID}" --id "${invalid_id}" \
1424
+ "Failure encountered while waiting for hashigems CDN invalidation request to complete (ID: ${invalid_id})"
1425
+ debug "hashigems cloudfront distribution invalidation complete (id: %s)" "${invalid_id}"
1426
+
1427
+ # Clean up and we are done
1428
+ popd
1429
+ rm -rf "${tmpdir}"
1430
+ }
1431
+
1432
+ # Configures git for hashibot usage
1433
+ function hashibot_git() {
1434
+ wrap git config user.name "${HASHIBOT_USERNAME}" \
1435
+ "Failed to setup git for hashibot usage (username)"
1436
+ wrap git config user.email "${HASHIBOT_EMAIL}" \
1437
+ "Failed to setup git for hashibot usage (email)"
1438
+ wrap git remote set-url origin "https://${HASHIBOT_USERNAME}:${HASHIBOT_TOKEN}@github.com/${repository}" \
1439
+ "Failed to setup git for hashibot usage (remote)"
1440
+ }
1441
+
1442
+ # Get the default branch name for the current repository
1443
+ function default_branch() {
1444
+ local s
1445
+ s="$(git symbolic-ref refs/remotes/origin/HEAD)" ||
1446
+ failure "Failed to determine default branch (is working directory git repository?)"
1447
+ printf "%s" "${s##*origin/}"
1448
+ }
1449
+
1450
+
1451
+ # Send a notification to slack. All flag values can be set with
1452
+ # environment variables using the upcased name prefixed with SLACK_,
1453
+ # for example: --channel -> SLACK_CHANNEL
1454
+ #
1455
+ # -c --channel CHAN Send to channel
1456
+ # -u --username USER Send as username
1457
+ # -i --icon URL User icon image
1458
+ # -s --state STATE Message state (success, warn, error, or color code)
1459
+ # -m --message MESSAGE Message to send
1460
+ # -M --message-file PATH Use file contents as message
1461
+ # -f --file PATH Send raw contents of file in message (displayed in code block)
1462
+ # -t --title TITLE Message title
1463
+ # -T --tail NUMBER Send last NUMBER lines of content from raw message file
1464
+ # -w --webhook URL Slack webhook
1465
+ function slack() {
1466
+ # Convert any long names to short names
1467
+ for arg in "$@"; do
1468
+ shift
1469
+ case "${arg}" in
1470
+ "--channel") set -- "${@}" "-c" ;;
1471
+ "--username") set -- "${@}" "-u" ;;
1472
+ "--icon") set -- "${@}" "-i" ;;
1473
+ "--state") set -- "${@}" "-s" ;;
1474
+ "--message") set -- "${@}" "-m" ;;
1475
+ "--message-file") set -- "${@}" "-M" ;;
1476
+ "--file") set -- "${@}" "-f" ;;
1477
+ "--title") set -- "${@}" "-t" ;;
1478
+ "--tail") set -- "${@}" "-T" ;;
1479
+ "--webhook") set -- "${@}" "-w" ;;
1480
+ *) set -- "${@}" "${arg}" ;;
1481
+ esac
1482
+ done
1483
+ local OPTIND opt
1484
+ # Default all options to values provided by environment variables
1485
+ local channel="${SLACK_CHANNEL}"
1486
+ local username="${SLACK_USERNAME}"
1487
+ local icon="${SLACK_ICON}"
1488
+ local state="${SLACK_STATE}"
1489
+ local message="${SLACK_MESSAGE}"
1490
+ local message_file="${SLACK_MESSAGE_FILE}"
1491
+ local file="${SLACK_FILE}"
1492
+ local title="${SLACK_TITLE}"
1493
+ local tail="${SLACK_TAIL}"
1494
+ local webhook="${SLACK_WEBHOOK}"
1495
+ while getopts ":c:u:i:s:m:M:f:t:T:w:" opt; do
1496
+ case "${opt}" in
1497
+ "c") channel="${OPTARG}" ;;
1498
+ "u") username="${OPTARG}" ;;
1499
+ "i") icon="${OPTARG}" ;;
1500
+ "s") state="${OPTARG}" ;;
1501
+ "m") message="${OPTARG}" ;;
1502
+ "M") message_file="${OPTARG}" ;;
1503
+ "f") file="${OPTARG}" ;;
1504
+ "t") title="${OPTARG}" ;;
1505
+ "T") tail="${OPTARG}" ;;
1506
+ "w") webhook="${OPTARG}" ;;
1507
+ *) failure "Invalid flag provided to slack" ;;
1508
+ esac
1509
+ done
1510
+ shift $((OPTIND-1))
1511
+
1512
+ # If we don't have a webhook provided, stop here
1513
+ if [ -z "${webhook}" ]; then
1514
+ (>&2 echo "ERROR: Cannot send Slack notification, webhook unset")
1515
+ return 1
1516
+ fi
1517
+
1518
+ local footer footer_icon ts
1519
+
1520
+ # If we are using GitHub actions, format the footer
1521
+ if [ -n "${GITHUB_ACTIONS}" ]; then
1522
+ if [ -z "${icon}" ]; then
1523
+ icon="https://ca.slack-edge.com/T024UT03C-WG8NDATGT-f82ae03b9fca-48"
1524
+ fi
1525
+ if [ -z "${username}" ]; then
1526
+ username="GitHub"
1527
+ fi
1528
+ footer_icon="https://ca.slack-edge.com/T024UT03C-WG8NDATGT-f82ae03b9fca-48"
1529
+ footer="Actions - <https://github.com/${GITHUB_REPOSITORY}/commit/${GITHUB_SHA}/checks|${GITHUB_REPOSITORY}>"
1530
+ fi
1531
+
1532
+ # If no state was provided, default to good state
1533
+ if [ -z "${state}" ]; then
1534
+ state="good"
1535
+ fi
1536
+
1537
+ # Convert state aliases
1538
+ case "${state}" in
1539
+ "success" | "good")
1540
+ state="good";;
1541
+ "warn" | "warning")
1542
+ state="warning";;
1543
+ "error" | "danger")
1544
+ state="danger";;
1545
+ esac
1546
+
1547
+ # If we have a message file, read it
1548
+ if [ -n "${message_file}" ]; then
1549
+ local message_file_content
1550
+ message_file_content="$(<"${message_file}")"
1551
+ if [ -z "${message}" ]; then
1552
+ message="${message_file_content}"
1553
+ else
1554
+ message="${message}\n\n${message_file_content}"
1555
+ fi
1556
+ fi
1557
+
1558
+ # If we have a file to include, add it now. Files are
1559
+ # displayed as raw content, so be sure to wrap with
1560
+ # backticks
1561
+ if [ -n "${file}" ]; then
1562
+ local file_content
1563
+ # If tail is provided, then only include the last n number
1564
+ # of lines in the file
1565
+ if [ -n "${tail}" ]; then
1566
+ if ! file_content="$(tail -n "${tail}" "${file}")"; then
1567
+ file_content="UNEXPECTED ERROR: Failed to tail content in file ${file}"
1568
+ fi
1569
+ else
1570
+ file_content="$(<"${file}")"
1571
+ fi
1572
+ if [ -n "${file_content}" ]; then
1573
+ message="${message}\n\n\`\`\`\n${file_content}\n\`\`\`"
1574
+ fi
1575
+ fi
1576
+
1577
+ local attach attach_template payload payload_template ts
1578
+ ts="$(date '+%s')"
1579
+
1580
+ # shellcheck disable=SC2016
1581
+ attach_template='{text: $msg, color: $state, mrkdwn_in: ["text"], ts: $time'
1582
+ if [ -n "${title}" ]; then
1583
+ # shellcheck disable=SC2016
1584
+ attach_template+=', title: $title'
1585
+ fi
1586
+ if [ -n "${footer}" ]; then
1587
+ # shellcheck disable=SC2016
1588
+ attach_template+=', footer: $footer'
1589
+ fi
1590
+ if [ -n "${footer_icon}" ]; then
1591
+ # shellcheck disable=SC2016
1592
+ attach_template+=', footer_icon: $footer_icon'
1593
+ fi
1594
+ attach_template+='}'
1595
+
1596
+ attach=$(jq -n \
1597
+ --arg msg "$(printf "%b" "${message}")" \
1598
+ --arg title "${title}" \
1599
+ --arg state "${state}" \
1600
+ --arg time "${ts}" \
1601
+ --arg footer "${footer}" \
1602
+ --arg footer_icon "${footer_icon}" \
1603
+ "${attach_template}" \
1604
+ )
1605
+
1606
+ # shellcheck disable=SC2016
1607
+ payload_template='{attachments: [$attachment]'
1608
+ if [ -n "${username}" ]; then
1609
+ # shellcheck disable=SC2016
1610
+ payload_template+=', username: $username'
1611
+ fi
1612
+ if [ -n "${channel}" ]; then
1613
+ # shellcheck disable=SC2016
1614
+ payload_template+=', channel: $channel'
1615
+ fi
1616
+ if [ -n "${icon}" ]; then
1617
+ # shellcheck disable=SC2016
1618
+ payload_template+=', icon_url: $icon'
1619
+ fi
1620
+ payload_template+='}'
1621
+
1622
+ payload=$(jq -n \
1623
+ --argjson attachment "${attach}" \
1624
+ --arg username "${username}" \
1625
+ --arg channel "${channel}" \
1626
+ --arg icon "${icon}" \
1627
+ "${payload_template}" \
1628
+ )
1629
+
1630
+ debug "sending slack message with payload: %s" "${payload}"
1631
+ if ! curl -SsL --fail -X POST -H "Content-Type: application/json" -d "${payload}" "${webhook}"; then
1632
+ echo "ERROR: Failed to send slack notification" >&2
1633
+ fi
1634
+ }
1635
+
1636
+ # Install internal HashiCorp tools. These tools are expected to
1637
+ # be located in private (though not required) HashiCorp repositories.
1638
+ # It will attempt to download the correct artifact for the current
1639
+ # platform based on HashiCorp naming conventions. It expects that
1640
+ # the name of the repository is the name of the tool.
1641
+ #
1642
+ # $1: Name of repository
1643
+ function install_hashicorp_tool() {
1644
+ local tool_name="${1}"
1645
+ local extensions=("zip" "tar.gz")
1646
+ local asset release_content tmp
1647
+
1648
+ if [ -z "${tool_name}" ]; then
1649
+ failure "Repository name is required for hashicorp tool install"
1650
+ fi
1651
+
1652
+ debug "installing hashicorp tool: %s" "${tool_name}"
1653
+
1654
+ # Swap out repository to force correct github token
1655
+ local repository_bak="${repository}"
1656
+ repository="${repo_owner}/${release_repo}"
1657
+
1658
+ tmp="$(mktemp -d --tmpdir vagrantci-XXXXXX)" ||
1659
+ failure "Failed to create temporary working directory"
1660
+ pushd "${tmp}"
1661
+
1662
+ local platform
1663
+ platform="$(uname -s)" || failure "Failed to get local platform name"
1664
+ platform="${platform,,}" # downcase the platform name
1665
+
1666
+ local arches=()
1667
+
1668
+ local arch
1669
+ arch="$(uname -m)" || failure "Failed to get local platform architecture"
1670
+ arches+=("${arch}")
1671
+
1672
+ # If the architecture is listed as x86_64, add amd64 to the
1673
+ # arches collection. Hashicorp naming scheme is to use amd64 in
1674
+ # the file name, but isn't always followed
1675
+ if [ "${arch}" = "x86_64" ]; then
1676
+ arches+=("amd64")
1677
+ fi
1678
+
1679
+ release_content=$(github_request -H "Content-Type: application/json" \
1680
+ "https://api.github.com/repos/hashicorp/${tool_name}/releases/latest") ||
1681
+ failure "Failed to request latest releases for hashicorp/${tool_name}"
1682
+
1683
+ local exten
1684
+ for exten in "${extensions[@]}"; do
1685
+ for arch in "${arches[@]}"; do
1686
+ local suffix="${platform}_${arch}.${exten}"
1687
+ debug "checking for release artifact with suffix: %s" "${suffix}"
1688
+ asset=$(printf "%s" "${release_content}" | jq -r \
1689
+ '.assets[] | select(.name | contains("'"${suffix}"'")) | .url')
1690
+ if [ -n "${asset}" ]; then
1691
+ debug "release artifact found: %s" "${asset}"
1692
+ break
1693
+ fi
1694
+ done
1695
+ if [ -n "${asset}" ]; then
1696
+ break
1697
+ fi
1698
+ done
1699
+
1700
+ if [ -z "${asset}" ]; then
1701
+ failure "Failed to find release of hashicorp/${tool_name} for ${platform} ${arch[0]}"
1702
+ fi
1703
+
1704
+ debug "tool artifact match found for install: %s" "${asset}"
1705
+
1706
+ github_request -o "${tool_name}.${exten}" \
1707
+ -H "Accept: application/octet-stream" "${asset}" ||
1708
+ "Failed to download latest release for hashicorp/${tool_name}"
1709
+
1710
+ if [ "${exten}" = "zip" ]; then
1711
+ wrap unzip "${tool_name}.${exten}" \
1712
+ "Failed to unpack latest release for hashicorp/${tool_name}"
1713
+ else
1714
+ wrap tar xf "${tool_name}.${exten}" \
1715
+ "Failed to unpack latest release for hashicorp/${tool_name}"
1716
+ fi
1717
+
1718
+ rm -f "${tool_name}.${exten}"
1719
+
1720
+ local files=( ./* )
1721
+ wrap chmod 0755 ./* \
1722
+ "Failed to change mode on latest release for hashicorp/${tool_name}"
1723
+
1724
+ wrap mv ./* "${ci_bin_dir}" \
1725
+ "Failed to install latest release for hashicorp/${tool_name}"
1726
+
1727
+ debug "new files added to path: %s" "${files[*]}"
1728
+ popd
1729
+ rm -rf "${tmp}"
1730
+
1731
+ repository="${repository_bak}" # restore the repository value
1732
+ }
1733
+
1734
+ # Install tool from GitHub releases. It will fetch the latest release
1735
+ # of the tool and install it. The proper release artifact will be matched
1736
+ # by a "linux_amd64" string. This command is best effort and may not work.
1737
+ #
1738
+ # $1: Organization name
1739
+ # $2: Repository name
1740
+ # $3: Tool name (optional)
1741
+ function install_github_tool() {
1742
+ local org_name="${1}"
1743
+ local r_name="${2}"
1744
+ local tool_name="${3}"
1745
+
1746
+ if [ -z "${tool_name}" ]; then
1747
+ tool_name="${r_name}"
1748
+ fi
1749
+
1750
+ local asset release_content tmp
1751
+ local artifact_list artifact basen
1752
+
1753
+ tmp="$(mktemp -d --tmpdir vagrantci-XXXXXX)" ||
1754
+ failure "Failed to create temporary working directory"
1755
+ pushd "${tmp}"
1756
+
1757
+ debug "installing github tool %s from %s/%s" "${tool_name}" "${org_name}" "${r_name}"
1758
+
1759
+ release_content=$(github_request -H "Content-Type: application/json" \
1760
+ "https://api.github.com/repos/${org_name}/${r_name}/releases/latest") ||
1761
+ failure "Failed to request latest releases for ${org_name}/${r_name}"
1762
+
1763
+ asset=$(printf "%s" "${release_content}" | jq -r \
1764
+ '.assets[] | select( ( (.name | contains("amd64")) or (.name | contains("x86_64")) or (.name | contains("x86-64")) ) and (.name | contains("linux")) and (.name | endswith("sha256") | not) and (.name | endswith("sig") | not)) | .url') ||
1765
+ failure "Failed to detect latest release for ${org_name}/${r_name}"
1766
+
1767
+ artifact="${asset##*/}"
1768
+ github_request -o "${artifact}" -H "Accept: application/octet-stream" "${asset}" ||
1769
+ "Failed to download latest release for ${org_name}/${r_name}"
1770
+
1771
+ basen="${artifact##*.}"
1772
+ if [ "${basen}" = "zip" ]; then
1773
+ wrap unzip "${artifact}" \
1774
+ "Failed to unpack latest release for ${org_name}/${r_name}"
1775
+ rm -f "${artifact}"
1776
+ elif [ -n "${basen}" ]; then
1777
+ wrap tar xf "${artifact}" \
1778
+ "Failed to unpack latest release for ${org_name}/${r_name}"
1779
+ rm -f "${artifact}"
1780
+ fi
1781
+
1782
+ artifact_list=(./*)
1783
+
1784
+ # If the artifact only contained a directory, get
1785
+ # the contents of the directory
1786
+ if [ "${#artifact_list[@]}" -eq "1" ] && [ -d "${artifact_list[0]}" ]; then
1787
+ debug "unpacked artifact contained only directory, inspecting contents"
1788
+ artifact_list=( "${artifact_list[0]}/"* )
1789
+ fi
1790
+
1791
+ local tool_match tool_glob_match executable_match
1792
+ local item
1793
+ for item in "${artifact_list[@]}"; do
1794
+ if [ "${item##*/}" = "${tool_name}" ]; then
1795
+ debug "tool name match found: %s" "${item}"
1796
+ tool_match="${item}"
1797
+ elif [ -e "${item}" ]; then
1798
+ debug "executable match found: %s" "${item}"
1799
+ executable_match="${item}"
1800
+ elif [[ "${item}" = "${tool_name}"* ]]; then
1801
+ debug "tool name glob match found: %s" "${item}"
1802
+ tool_glob_match="${item}"
1803
+ fi
1804
+ done
1805
+
1806
+ # Install based on best match to worst match
1807
+ if [ -n "${tool_match}" ]; then
1808
+ debug "installing %s from tool name match (%s)" "${tool_name}" "${tool_match}"
1809
+ mv -f "${tool_match}" "${ci_bin_dir}/${tool_name}" ||
1810
+ "Failed to install latest release of %s from %s/%s" "${tool_name}" "${org_name}" "${r_name}"
1811
+ elif [ -n "${tool_glob_match}" ]; then
1812
+ debug "installing %s from tool name glob match (%s)" "${tool_name}" "${tool_glob_match}"
1813
+ mv -f "${tool_glob_match}" "${ci_bin_dir}/${tool_name}" ||
1814
+ "Failed to install latest release of %s from %s/%s" "${tool_name}" "${org_name}" "${r_name}"
1815
+ elif [ -n "${executable_match}" ]; then
1816
+ debug "installing %s from executable file match (%s)" "${tool_name}" "${executable_match}"
1817
+ mv -f "${executable_match}" "${ci_bin_dir}/${tool_name}" ||
1818
+ "Failed to install latest release of %s from %s/%s" "${tool_name}" "${org_name}" "${r_name}"
1819
+ else
1820
+ failure "Failed to locate tool '%s' in latest release from %s/%s" "${org_name}" "${r_name}"
1821
+ fi
1822
+
1823
+ popd
1824
+ rm -rf "${tmp}"
1825
+ }
1826
+
1827
+ # Prepare host for packet use. It will validate the
1828
+ # required environment variables are set, ensure
1829
+ # packet-exec is installed, and setup the SSH key.
1830
+ function packet-setup() {
1831
+ # First check that we have the environment variables
1832
+ if [ -z "${PACKET_EXEC_TOKEN}" ]; then
1833
+ failure "Cannot setup packet, missing token"
1834
+ fi
1835
+ if [ -z "${PACKET_EXEC_PROJECT_ID}" ]; then
1836
+ failure "Cannot setup packet, missing project"
1837
+ fi
1838
+ if [ -z "${PACKET_SSH_KEY_CONTENT}" ]; then
1839
+ failure "Cannot setup packet, missing ssh key"
1840
+ fi
1841
+
1842
+ install_hashicorp_tool "packet-exec"
1843
+
1844
+ # Write the ssh key to disk
1845
+ local content
1846
+ content="$(base64 --decode - <<< "${PACKET_SSH_KEY_CONTENT}")" ||
1847
+ failure "Cannot setup packet, failed to decode key"
1848
+ touch ./packet-key
1849
+ chmod 0600 ./packet-key
1850
+ printf "%s" "${content}" > ./packet-key
1851
+ local working_directory
1852
+ working_directory="$(pwd)" ||
1853
+ failure "Cannot setup packet, failed to determine working directory"
1854
+ export PACKET_EXEC_SSH_KEY="${working_directory}/packet-key"
1855
+ }
1856
+
1857
+ # Download artifact(s) from GitHub release. The artifact pattern is simply
1858
+ # a substring that is matched against the artifact download URL. Artifact(s)
1859
+ # will be downloaded to the working directory.
1860
+ #
1861
+ # $1: repository name
1862
+ # $2: release tag name
1863
+ # $3: artifact pattern (optional, all artifacts downloaded if omitted)
1864
+ function github_release_assets() {
1865
+ local req_args
1866
+ req_args=()
1867
+
1868
+ local asset_pattern
1869
+ local release_repo="${1}"
1870
+ local release_name="${2}"
1871
+ local asset_pattern="${3}"
1872
+
1873
+ # Swap out repository to force correct github token
1874
+ local repository_bak="${repository}"
1875
+ repository="${repo_owner}/${release_repo}"
1876
+
1877
+ req_args+=("-H" "Accept: application/vnd.github+json")
1878
+ req_args+=("https://api.github.com/repos/${repository}/releases/tags/${release_name}")
1879
+
1880
+ debug "fetching release asset list for release %s on %s" "${release_name}" "${repository}"
1881
+
1882
+ local release_content
1883
+ release_content=$(github_request "${req_args[@]}") ||
1884
+ failure "Failed to request release (${release_name}) for ${repository}"
1885
+
1886
+ local query=".assets[]"
1887
+ if [ -n "${asset_pattern}" ]; then
1888
+ debug "applying release asset list filter %s" "${asset_pattern}"
1889
+ query+="$(printf ' | select(.name | contains("%s"))' "${asset_pattern}")"
1890
+ fi
1891
+
1892
+ local asset_list
1893
+ asset_list=$(printf "%s" "${release_content}" | jq -r "${query} | .url") ||
1894
+ failure "Failed to detect asset in release (${release_name}) for ${release_repo}"
1895
+
1896
+ local name_list
1897
+ name_list=$(printf "%s" "${release_content}" | jq -r "${query} | .name") ||
1898
+ failure "Failed to detect asset in release (${release_name}) for ${release_repo}"
1899
+
1900
+ req_args=()
1901
+ req_args+=("-H" "Accept: application/octet-stream")
1902
+
1903
+ local assets asset_names
1904
+ readarray -t assets < <(printf "%s" "${asset_list}")
1905
+ readarray -t asset_names < <(printf "%s" "${name_list}")
1906
+
1907
+ local idx
1908
+ for ((idx=0; idx<"${#assets[@]}"; idx++ )); do
1909
+ local asset="${assets[$idx]}"
1910
+ local artifact="${asset_names[$idx]}"
1911
+
1912
+ github_request "${req_args[@]}" -o "${artifact}" "${asset}" ||
1913
+ "Failed to download asset (${artifact}) in release ${release_name} for ${repository}"
1914
+ printf "downloaded release asset %s from release %s on %s" "${artifact}" "${release_name}" "${repository}"
1915
+ done
1916
+
1917
+ repository="${repository_bak}" # restore the repository value
1918
+ }
1919
+
1920
+ # Basic helper to create a GitHub prerelease
1921
+ #
1922
+ # $1: repository name
1923
+ # $2: tag name for release
1924
+ # $3: path to artifact(s) - single file or directory
1925
+ function github_prerelease() {
1926
+ local prerelease_repo="${1}"
1927
+ local tag_name="${2}"
1928
+ local artifacts="${3}"
1929
+
1930
+ if [ -z "${prerelease_repo}" ]; then
1931
+ failure "Name of repository required for prerelease release"
1932
+ fi
1933
+
1934
+ if [ -z "${tag_name}" ]; then
1935
+ failure "Name is required for prerelease release"
1936
+ fi
1937
+
1938
+ if [ -z "${artifacts}" ]; then
1939
+ failure "Artifacts path is required for prerelease release"
1940
+ fi
1941
+
1942
+ if [ ! -e "${artifacts}" ]; then
1943
+ failure "No artifacts found at provided path (${artifacts})"
1944
+ fi
1945
+
1946
+ local prerelease_target="${repo_owner}/${prerelease_repo}"
1947
+
1948
+ # Create the prerelease
1949
+ local response
1950
+ response="$(github_create_release -p -t "${tag_name}" -o "${repo_owner}" -r "${prerelease_repo}" )" ||
1951
+ failure "Failed to create prerelease on %s/%s" "${repo_owner}" "${prerelease_repo}"
1952
+
1953
+ # Extract the release ID from the response
1954
+ local release_id
1955
+ release_id="$(printf "%s" "${response}" | jq -r '.id')" ||
1956
+ failure "Failed to extract prerelease ID from response for ${tag_name} on ${prerelease_target}"
1957
+
1958
+ github_upload_release_artifacts "${prerelease_repo}" "${release_id}" "${artifacts}"
1959
+
1960
+ }
1961
+
1962
+ # Upload artifacts to a release
1963
+ #
1964
+ # $1: target repository name
1965
+ # $2: release ID
1966
+ # $3: path to artifact(s) - single file or directory
1967
+ function github_upload_release_artifacts() {
1968
+ local target_repo_name="${1}"
1969
+ local release_id="${2}"
1970
+ local artifacts="${3}"
1971
+
1972
+ if [ -z "${target_repo_name}" ]; then
1973
+ failure "Repository name required for release artifact upload"
1974
+ fi
1975
+
1976
+ if [ -z "${release_id}" ]; then
1977
+ failure "Release ID require for release artifact upload"
1978
+ fi
1979
+
1980
+ if [ -z "${artifacts}" ]; then
1981
+ failure "Artifacts required for release artifact upload"
1982
+ fi
1983
+
1984
+ if [ ! -e "${artifacts}" ]; then
1985
+ failure "No artifacts found at provided path for release artifact upload (%s)" "${artifacts}"
1986
+ fi
1987
+
1988
+ # Swap out repository to force correct github token
1989
+ local repository_bak="${repository}"
1990
+ repository="${repo_owner}/${target_repo_name}"
1991
+
1992
+ local req_args=("-X" "POST" "-H" "Content-Type: application/octet-stream")
1993
+
1994
+ # Now upload the artifacts to the draft release
1995
+ local artifact_name
1996
+ if [ -f "${artifacts}" ]; then
1997
+ debug "uploading %s to release ID %s on %s" "${artifact}" "${release_id}" "${repository}"
1998
+ artifact_name="${artifacts##*/}"
1999
+ req_args+=("https://uploads.github.com/repos/${repository}/releases/${release_id}/assets?name=${artifact_name}"
2000
+ "--data-binary" "@${artifacts}")
2001
+ if ! github_request "${req_args[@]}" > /dev/null ; then
2002
+ failure "Failed to upload artifact '${artifacts}' to draft release on ${repository}"
2003
+ fi
2004
+ printf "Uploaded release artifact: %s\n" "${artifact_name}" >&2
2005
+ # Everything is done so get on outta here
2006
+ return 0
2007
+ fi
2008
+
2009
+ # Push into the directory
2010
+ pushd "${artifacts}"
2011
+
2012
+ local artifact_path
2013
+ # Walk through each item and upload
2014
+ for artifact_path in * ; do
2015
+ if [ ! -f "${artifact_path}" ]; then
2016
+ debug "skipping '%s' as it is not a file" "${artifact_path}"
2017
+ continue
2018
+ fi
2019
+ artifact_name="${artifact_path##*/}"
2020
+ debug "uploading %s/%s to release ID %s on %s" "${artifacts}" "${artifact_name}" "${release_id}" "${repository}"
2021
+ local r_args=( "${req_args[@]}" )
2022
+ r_args+=("https://uploads.github.com/repos/${repository}/releases/${release_id}/assets?name=${artifact_name}"
2023
+ "--data-binary" "@${artifact_path}")
2024
+ if ! github_request "${r_args[@]}" > /dev/null ; then
2025
+ failure "Failed to upload artifact '${artifact_name}' in '${artifacts}' to draft release on ${repository}"
2026
+ fi
2027
+ printf "Uploaded release artifact: %s\n" "${artifact_name}" >&2
2028
+ done
2029
+
2030
+ repository="${repository_bak}"
2031
+ }
2032
+
2033
+ # Basic helper to create a GitHub draft release
2034
+ #
2035
+ # $1: repository name
2036
+ # $2: tag name for release
2037
+ # $3: path to artifact(s) - single file or directory
2038
+ function github_draft_release() {
2039
+ local draft_repo="${1}"
2040
+ local tag_name="${2}"
2041
+ local artifacts="${3}"
2042
+
2043
+ if [ -z "${draft_repo}" ]; then
2044
+ failure "Name of repository required for draft release"
2045
+ fi
2046
+
2047
+ if [ -z "${tag_name}" ]; then
2048
+ failure "Name is required for draft release"
2049
+ fi
2050
+
2051
+ if [ -z "${artifacts}" ]; then
2052
+ failure "Artifacts path is required for draft release"
2053
+ fi
2054
+
2055
+ if [ ! -e "${artifacts}" ]; then
2056
+ failure "No artifacts found at provided path (%s)" "${artifacts}"
2057
+ fi
2058
+
2059
+ # Create the draft release
2060
+ local response
2061
+ response="$(github_create_release -d -t "${tag_name}" -o "${repo_owner}" -r "${draft_repo}" )" ||
2062
+ failure "Failed to create draft release on %s" "${repo_owner}/${draft_repo}"
2063
+
2064
+ # Extract the release ID from the response
2065
+ local release_id
2066
+ release_id="$(printf "%s" "${response}" | jq -r '.id')" ||
2067
+ failure "Failed to extract draft release ID from response for %s on %s" "${tag_name}" "${repo_owner}/${draft_repo}"
2068
+
2069
+ github_upload_release_artifacts "${draft_repo}" "${release_id}" "${artifacts}"
2070
+ }
2071
+
2072
+ # Create a GitHub release
2073
+ #
2074
+ # -b BODY - body of release
2075
+ # -c COMMITISH - commitish of release
2076
+ # -n NAME - name of the release
2077
+ # -o OWNER - repository owner (required)
2078
+ # -r REPO - repository name (required)
2079
+ # -t TAG_NAME - tag name for release (required)
2080
+ # -d - draft release
2081
+ # -p - prerelease
2082
+ # -g - generate release notes
2083
+ # -m - make release latest
2084
+ #
2085
+ # NOTE: Artifacts for release must be uploaded using `github_upload_release_artifacts`
2086
+ function github_create_release() {
2087
+ local OPTIND opt owner repo tag_name
2088
+ # Values that can be null
2089
+ local body commitish name
2090
+ # Values we default
2091
+ local draft="false"
2092
+ local generate_notes="false"
2093
+ local make_latest="false"
2094
+ local prerelease="false"
2095
+
2096
+ while getopts ":b:c:n:o:r:t:dpgm" opt; do
2097
+ case "${opt}" in
2098
+ "b") body="${OPTARG}" ;;
2099
+ "c") commitish="${OPTARG}" ;;
2100
+ "n") name="${OPTARG}" ;;
2101
+ "o") owner="${OPTARG}" ;;
2102
+ "r") repo="${OPTARG}" ;;
2103
+ "t") tag_name="${OPTARG}" ;;
2104
+ "d") draft="true" ;;
2105
+ "p") prerelease="true" ;;
2106
+ "g") generate_notes="true" ;;
2107
+ "m") make_latest="true" ;;
2108
+ *) failure "Invalid flag provided to github_create_release" ;;
2109
+ esac
2110
+ done
2111
+ shift $((OPTIND-1))
2112
+
2113
+ # Sanity check
2114
+ if [ -z "${owner}" ]; then
2115
+ failure "Repository owner value is required for GitHub release"
2116
+ fi
2117
+
2118
+ if [ -z "${repo}" ]; then
2119
+ failure "Repository name is required for GitHub release"
2120
+ fi
2121
+
2122
+ if [ -z "${tag_name}" ] && [ "${draft}" != "true" ]; then
2123
+ failure "Tag name is required for GitHub release"
2124
+ fi
2125
+
2126
+ if [ "${draft}" = "true" ] && [ "${prerelease}" = "true" ]; then
2127
+ failure "Release cannot be both draft and prerelease"
2128
+ fi
2129
+
2130
+ # If no name is provided, use the tag name value
2131
+ if [ -z "${name}" ]; then
2132
+ name="${tag_name}"
2133
+ fi
2134
+
2135
+ # shellcheck disable=SC2016
2136
+ local payload_template='{tag_name: $tag_name, draft: $draft, prerelease: $prerelease, generate_release_notes: $generate_notes, make_latest: $make_latest'
2137
+ local jq_args=("-n"
2138
+ "--arg" "tag_name" "${tag_name}"
2139
+ "--arg" "make_latest" "${make_latest}"
2140
+ "--argjson" "draft" "${draft}"
2141
+ "--argjson" "generate_notes" "${generate_notes}"
2142
+ "--argjson" "prerelease" "${prerelease}"
2143
+ )
2144
+
2145
+ if [ -n "${commitish}" ]; then
2146
+ # shellcheck disable=SC2016
2147
+ payload_template+=', target_commitish: $commitish'
2148
+ jq_args+=("--arg" "commitish" "${commitish}")
2149
+ fi
2150
+ if [ -n "${name}" ]; then
2151
+ # shellcheck disable=SC2016
2152
+ payload_template+=', name: $name'
2153
+ jq_args+=("--arg" "name" "${name}")
2154
+ fi
2155
+ if [ -n "${body}" ]; then
2156
+ # shellcheck disable=SC2016
2157
+ payload_template+=', body: $body'
2158
+ jq_args+=("--arg" "body" "${body}")
2159
+ fi
2160
+ payload_template+='}'
2161
+
2162
+ # Generate the payload
2163
+ local payload
2164
+ payload="$(jq "${jq_args[@]}" "${payload_template}" )" ||
2165
+ failure "Could not generate GitHub release JSON payload"
2166
+
2167
+ local target_repo="${owner}/${repo}"
2168
+ # Set repository to get correct token behavior on request
2169
+ local repository_bak="${repository}"
2170
+ repository="${target_repo}"
2171
+
2172
+ # Craft our request arguments
2173
+ local req_args=("-X" "POST" "https://api.github.com/repos/${target_repo}/releases" "-d" "${payload}")
2174
+
2175
+ # Create the draft release
2176
+ local response
2177
+ if ! response="$(github_request "${req_args[@]}")"; then
2178
+ failure "Could not create github release on ${target_repo}"
2179
+ fi
2180
+
2181
+ # Restore the repository
2182
+ repository="${repository_bak}"
2183
+
2184
+ local rel_type
2185
+ if [ "${draft}" = "true" ]; then
2186
+ rel_type="draft release"
2187
+ elif [ "${prerelease}" = "true" ]; then
2188
+ rel_type="prerelease"
2189
+ else
2190
+ rel_type="release"
2191
+ fi
2192
+
2193
+ # Report new draft release was created
2194
+ printf "New %s '%s' created on '%s'\n" "${rel_type}" "${tag_name}" "${target_repo}" >&2
2195
+
2196
+ # Print the response
2197
+ printf "%s" "${response}"
2198
+ }
2199
+
2200
+ # Check if a github release exists by tag name
2201
+ # NOTE: This can be used for release and prerelease checks.
2202
+ # Draft releases must use the github_draft_release_exists
2203
+ # function.
2204
+ #
2205
+ # $1: repository name
2206
+ # $2: release tag name
2207
+ function github_release_exists() {
2208
+ local release_repo="${1}"
2209
+ local release_name="${2}"
2210
+
2211
+ if [ -z "${release_repo}" ]; then
2212
+ failure "Repository name required for release lookup"
2213
+ fi
2214
+ if [ -z "${release_name}" ]; then
2215
+ failure "Release name required for release lookup"
2216
+ fi
2217
+
2218
+ # Override repository value to get correct token automatically
2219
+ local repository_bak="${repository}"
2220
+ repository="${repo_owner}/${release_repo}"
2221
+
2222
+ local result="1"
2223
+ if github_request \
2224
+ -H "Accept: application/vnd.github+json" \
2225
+ "https://api.github.com/repos/${repository}/releases/tags/${release_name}" > /dev/null; then
2226
+ debug "release '${release_name}' found in ${repository}"
2227
+ result="0"
2228
+ else
2229
+ debug "release '${release_name}' not found in ${repository}"
2230
+ fi
2231
+
2232
+ # Restore repository value
2233
+ repository="${repository_bak}"
2234
+
2235
+ return "${result}"
2236
+ }
2237
+
2238
+ # Check if a github release exists using fuzzy match
2239
+ #
2240
+ # $1: repository name
2241
+ # $2: release name
2242
+ function github_release_exists_fuzzy() {
2243
+ local release_repo="${1}"
2244
+ local release_name="${2}"
2245
+
2246
+ if [ -z "${release_repo}" ]; then
2247
+ failure "Repository name required for draft release lookup"
2248
+ fi
2249
+ if [ -z "${release_name}" ]; then
2250
+ failure "Release name required for draft release lookup"
2251
+ fi
2252
+
2253
+ # Override repository value to get correct token automatically
2254
+ local repository_bak="${repository}"
2255
+ repository="${repo_owner}/${release_repo}"
2256
+
2257
+ local page=$((1))
2258
+ local matched_name
2259
+
2260
+ while [ -z "${release_content}" ]; do
2261
+ local release_list
2262
+ release_list="$(github_request \
2263
+ -H "Content-Type: application/json" \
2264
+ "https://api.github.com/repos/${repository}/releases?per_page=100&page=${page}")" ||
2265
+ failure "Failed to request releases list for ${repository}"
2266
+
2267
+ # If there's no more results, just bust out of the loop
2268
+ if [ "$(jq 'length' <( printf "%s" "${release_list}" ))" -lt "1" ]; then
2269
+ break
2270
+ fi
2271
+
2272
+ local names name_list n matched_name
2273
+ name_list="$(printf "%s" "${release_list}" | jq '.[] | .name')" ||
2274
+ failure "Could not generate name list"
2275
+
2276
+ # shellcheck disable=SC2206
2277
+ names=( $name_list )
2278
+ for n in "${names[@]}"; do
2279
+ if [[ "${n}" =~ $release_name ]]; then
2280
+ matched_name="${n}"
2281
+ break
2282
+ fi
2283
+ done
2284
+
2285
+ if [ -n "${matched_name}" ]; then
2286
+ break
2287
+ fi
2288
+
2289
+ ((page++))
2290
+ done
2291
+
2292
+ # Restore the $repository value
2293
+ repository="${repository_bak}"
2294
+
2295
+ if [ -z "${matched_name}" ]; then
2296
+ debug "did not locate release named %s for %s" "${release_name}" "${repo_owner}/${release_repo}"
2297
+ return 1
2298
+ fi
2299
+
2300
+ debug "found release name %s in %s (pattern: %s)" "${matched_name}" "${repo_owner}/${release_repo}" "${release_name}"
2301
+ return 0
2302
+ }
2303
+
2304
+ # Check if a draft release exists by name
2305
+ #
2306
+ # $1: repository name
2307
+ # $2: release name
2308
+ function github_draft_release_exists() {
2309
+ local release_repo="${1}"
2310
+ local release_name="${2}"
2311
+
2312
+ if [ -z "${release_repo}" ]; then
2313
+ failure "Repository name required for draft release lookup"
2314
+ fi
2315
+ if [ -z "${release_name}" ]; then
2316
+ failure "Release name required for draft release lookup"
2317
+ fi
2318
+
2319
+ # Override repository value to get correct token automatically
2320
+ local repository_bak="${repository}"
2321
+ repository="${repo_owner}/${release_repo}"
2322
+
2323
+ local page=$((1))
2324
+ local release_content
2325
+
2326
+ while [ -z "${release_content}" ]; do
2327
+ local release_list
2328
+ release_list="$(github_request \
2329
+ -H "Content-Type: application/json" \
2330
+ "https://api.github.com/repos/${repository}/releases?per_page=100&page=${page}")" ||
2331
+ failure "Failed to request releases list for ${repository}"
2332
+
2333
+ # If there's no more results, just bust out of the loop
2334
+ if [ "$(jq 'length' <( printf "%s" "${release_list}" ))" -lt "1" ]; then
2335
+ break
2336
+ fi
2337
+
2338
+ query="$(printf '.[] | select(.name == "%s")' "${release_name}")"
2339
+
2340
+ release_content=$(printf "%s" "${release_list}" | jq -r "${query}")
2341
+
2342
+ ((page++))
2343
+ done
2344
+
2345
+ # Restore the $repository value
2346
+ repository="${repository_bak}"
2347
+
2348
+ if [ -z "${release_content}" ]; then
2349
+ debug "did not locate draft release named %s for %s" "${release_name}" "${repo_owner}/${release_repo}"
2350
+ return 1
2351
+ fi
2352
+
2353
+ debug "found draft release name %s in %s" "${release_name}" "${repo_owner}/${release_repo}"
2354
+ return 0
2355
+ }
2356
+
2357
+ # Download artifact(s) from GitHub draft release. A draft release is not
2358
+ # attached to a tag and therefore is referenced by the release name directly.
2359
+ # The artifact pattern is simply a substring that is matched against the
2360
+ # artifact download URL. Artifact(s) will be downloaded to the working directory.
2361
+ #
2362
+ # $1: repository name
2363
+ # $2: release name
2364
+ # $3: artifact pattern (optional, all artifacts downloaded if omitted)
2365
+ function github_draft_release_assets() {
2366
+ local release_repo_name="${1}"
2367
+ local release_name="${2}"
2368
+ local asset_pattern="${3}"
2369
+
2370
+ if [ -z "${release_repo_name}" ]; then
2371
+ failure "Repository name is required for draft release asset fetching"
2372
+ fi
2373
+ if [ -z "${release_name}" ]; then
2374
+ failure "Draft release name is required for draft release asset fetching"
2375
+ fi
2376
+
2377
+ # Override repository value to get correct token automatically
2378
+ local repository_bak="${repository}"
2379
+ repository="${repo_owner}/${release_repo_name}"
2380
+
2381
+ local page=$((1))
2382
+ local release_content query
2383
+ while [ -z "${release_content}" ]; do
2384
+ local release_list
2385
+ release_list=$(github_request -H "Content-Type: application/json" \
2386
+ "https://api.github.com/repos/${repository}/releases?per_page=100&page=${page}") ||
2387
+ failure "Failed to request releases list for ${repository}"
2388
+
2389
+ # If there's no more results, just bust out of the loop
2390
+ if [ "$(jq 'length' <( printf "%s" "${release_list}" ))" -lt "1" ]; then
2391
+ debug "did not locate draft release named %s in %s" "${release_name}" "${repository}"
2392
+ break
2393
+ fi
2394
+
2395
+ query="$(printf '.[] | select(.name == "%s")' "${release_name}")"
2396
+ release_content=$(printf "%s" "${release_list}" | jq -r "${query}")
2397
+
2398
+ ((page++))
2399
+ done
2400
+
2401
+ query=".assets[]"
2402
+ if [ -n "${asset_pattern}" ]; then
2403
+ debug "apply pattern filter to draft assets: %s" "${asset_pattern}"
2404
+ query+="$(printf ' | select(.name | contains("%s"))' "${asset_pattern}")"
2405
+ fi
2406
+
2407
+ local asset_list
2408
+ asset_list=$(printf "%s" "${release_content}" | jq -r "${query} | .url") ||
2409
+ failure "Failed to detect asset in release (${release_name}) for ${repository}"
2410
+
2411
+ local name_list
2412
+ name_list=$(printf "%s" "${release_content}" | jq -r "${query} | .name") ||
2413
+ failure "Failed to detect asset in release (${release_name}) for ${repository}"
2414
+
2415
+ debug "draft release assets list: %s" "${name_list}"
2416
+
2417
+ local assets asset_names
2418
+ readarray -t assets < <(printf "%s" "${asset_list}")
2419
+ readarray -t asset_names < <(printf "%s" "${name_list}")
2420
+
2421
+ if [ "${#assets[@]}" -ne "${#asset_names[@]}" ]; then
2422
+ failure "Failed to match download assets with names in release list for ${repository}"
2423
+ fi
2424
+
2425
+ local idx
2426
+ for ((idx=0; idx<"${#assets[@]}"; idx++ )); do
2427
+ local asset="${assets[$idx]}"
2428
+ local artifact="${asset_names[$idx]}"
2429
+ github_request -o "${artifact}" \
2430
+ -H "Accept: application/octet-stream" "${asset}" ||
2431
+ "Failed to download asset in release (${release_name}) for ${repository} - ${artifact}"
2432
+
2433
+ printf "downloaded draft release asset at %s\n" "${artifact}" >&2
2434
+ done
2435
+
2436
+ repository_bak="${repository}" # restore repository value
2437
+ }
2438
+
2439
+ # This function is identical to the github_draft_release_assets
2440
+ # function above with one caveat: it does not download the files.
2441
+ # Each file that would be downloaded is simply touched in the
2442
+ # current directory. This provides an easy way to check the
2443
+ # files that would be downloaded without actually downloading
2444
+ # them.
2445
+ #
2446
+ # An example usage of this can be seen in the vagrant package
2447
+ # building where we use this to enable building missing substrates
2448
+ # or packages on re-runs and only download the artifacts if
2449
+ # actually needed.
2450
+ function github_draft_release_asset_names() {
2451
+ local release_reponame="${1}"
2452
+ local release_name="${2}"
2453
+ local asset_pattern="${3}"
2454
+
2455
+ if [ -z "${release_reponame}" ]; then
2456
+ failure "Repository name is required for draft release assets names"
2457
+ fi
2458
+
2459
+ if [ -z "${release_name}" ]; then
2460
+ failure "Release name is required for draft release asset names"
2461
+ fi
2462
+
2463
+ # Override repository value to get correct token automatically
2464
+ local repository_bak="${repository}"
2465
+ repository="${repo_owner}/${release_reponame}"
2466
+
2467
+ local page=$((1))
2468
+ local release_content query
2469
+ while [ -z "${release_content}" ]; do
2470
+ local release_list
2471
+ release_list=$(github_request H "Content-Type: application/json" \
2472
+ "https://api.github.com/repos/${repository}/releases?per_page=100&page=${page}") ||
2473
+ failure "Failed to request releases list for ${repository}"
2474
+
2475
+ # If there's no more results, just bust out of the loop
2476
+ if [ "$(jq 'length' <( printf "%s" "${release_list}" ))" -lt "1" ]; then
2477
+ debug "did not locate draft release named %s in %s" "${release_name}" "${repository}"
2478
+ break
2479
+ fi
2480
+
2481
+ query="$(printf '.[] | select(.name == "%s")' "${release_name}")"
2482
+ release_content=$(printf "%s" "${release_list}" | jq -r "${query}")
2483
+
2484
+ ((page++))
2485
+ done
2486
+
2487
+ query=".assets[]"
2488
+ if [ -n "${asset_pattern}" ]; then
2489
+ debug "apply pattern filter to draft assets: %s" "${asset_pattern}"
2490
+ query+="$(printf ' | select(.name | contains("%s"))' "${asset_pattern}")"
2491
+ fi
2492
+
2493
+ local name_list
2494
+ name_list=$(printf "%s" "${release_content}" | jq -r "${query} | .name") ||
2495
+ failure "Failed to detect asset in release (${release_name}) for ${repository}"
2496
+
2497
+ debug "draft release assets list: %s" "${name_list}"
2498
+
2499
+ local asset_names
2500
+ readarray -t asset_names < <(printf "%s" "${name_list}")
2501
+
2502
+ local idx
2503
+ for ((idx=0; idx<"${#asset_names[@]}"; idx++ )); do
2504
+ local artifact="${asset_names[$idx]}"
2505
+ touch "${artifact}" ||
2506
+ failure "Failed to touch release asset at path: %s" "${artifact}"
2507
+ printf "touched draft release asset at %s\n" "${artifact}" >&2
2508
+ done
2509
+
2510
+ repository_bak="${repository}" # restore repository value
2511
+ }
2512
+
2513
+ # Delete a github release by tag name
2514
+ # NOTE: Releases and prereleases can be deleted using this
2515
+ # function. For draft releases use github_delete_draft_release
2516
+ #
2517
+ # $1: tag name of release
2518
+ # $2: repository name (optional, defaults to current repository name)
2519
+ function github_delete_release() {
2520
+ local release_name="${1}"
2521
+ local release_repo="${2:-$repo_name}"
2522
+
2523
+ if [ -z "${release_name}" ]; then
2524
+ failure "Release name is required for deletion"
2525
+ fi
2526
+ if [ -z "${release_repo}" ]; then
2527
+ failure "Repository is required for release deletion"
2528
+ fi
2529
+
2530
+ # Override repository value to get correct token automatically
2531
+ local repository_bak="${repository}"
2532
+ repository="${repo_owner}/${release_repo}"
2533
+
2534
+ # Fetch the release first
2535
+ local release_content
2536
+ release_content="$(github_request \
2537
+ -H "Accept: application/vnd.github+json" \
2538
+ "https://api.github.com/repos/${repository}/releases/tags/${release_name}")" ||
2539
+ failure "Failed to fetch release information for '${release_name}' in ${repository}"
2540
+
2541
+ # Get the release id to reference in delete request
2542
+ local rel_id
2543
+ rel_id="$(jq -r '.id' <( printf "%s" "${release_content}" ) )" ||
2544
+ failure "Failed to read release id for '${release_name}' in ${repository}"
2545
+
2546
+ debug "deleting github release '${release_name}' in ${repository} with id ${rel_id}"
2547
+
2548
+ # Send the deletion request
2549
+ github_request \
2550
+ -X "DELETE" \
2551
+ -H "Accept: application/vnd.github+json" \
2552
+ "https://api.github.com/repos/${repository}/releases/${rel_id}" > /dev/null ||
2553
+ failure "Failed to delete release '${release_name}' in ${repository}"
2554
+
2555
+ # Restore repository value
2556
+ repository="${repository_bak}"
2557
+ }
2558
+
2559
+ # Delete draft release with given name
2560
+ #
2561
+ # $1: name of draft release
2562
+ # $2: repository name (optional, defaults to current repository name)
2563
+ function github_delete_draft_release() {
2564
+ local draft_name="${1}"
2565
+ local delete_repo="${2:-$repo_name}"
2566
+
2567
+ if [ -z "${draft_name}" ]; then
2568
+ failure "Draft name is required for deletion"
2569
+ fi
2570
+
2571
+ if [ -z "${delete_repo}" ]; then
2572
+ failure "Repository is required for draft deletion"
2573
+ fi
2574
+
2575
+ # Override repository value to get correct token automatically
2576
+ local repository_bak="${repository}"
2577
+ repository="${repo_owner}/${delete_repo}"
2578
+
2579
+ local draft_ids=()
2580
+ local page=$((1))
2581
+ while true; do
2582
+ local release_list list_length
2583
+ release_list=$(github_request -H "Content-Type: application/json" \
2584
+ "https://api.github.com/repos/${repository}/releases?per_page=100&page=${page}") ||
2585
+ failure "Failed to request releases list for draft deletion on ${repository}"
2586
+ list_length="$(jq 'length' <( printf "%s" "${release_list}" ))" ||
2587
+ failure "Failed to calculate release length for draft deletion on ${repository}"
2588
+
2589
+ # If the list is empty then there are no more releases to process
2590
+ if [ -z "${list_length}" ] || [ "${list_length}" -lt 1 ]; then
2591
+ debug "no releases returned for page %d in repository %s" "${page}" "${repository}"
2592
+ break
2593
+ fi
2594
+
2595
+ local entry i release_draft release_id release_name
2596
+ for (( i=0; i < "${list_length}"; i++ )); do
2597
+ entry="$(jq ".[$i]" <( printf "%s" "${release_list}" ))" ||
2598
+ failure "Failed to read entry for draft deletion on ${repository}"
2599
+ release_draft="$(jq -r '.draft' <( printf "%s" "${entry}" ))" ||
2600
+ failure "Failed to read entry draft for draft deletion on ${repository}"
2601
+ release_id="$(jq -r '.id' <( printf "%s" "${entry}" ))" ||
2602
+ failure "Failed to read entry ID for draft deletion on ${repository}"
2603
+ release_name="$(jq -r '.name' <( printf "%s" "${entry}" ))" ||
2604
+ failure "Failed to read entry name for draft deletion on ${repository}"
2605
+
2606
+ # If the names don't match, skip
2607
+ if [ "${release_name}" != "${draft_name}" ]; then
2608
+ debug "skipping release deletion, name mismatch (%s != %s)" "${release_name}" "${draft_name}"
2609
+ continue
2610
+ fi
2611
+
2612
+ # If the release is not a draft, fail
2613
+ if [ "${release_draft}" != "true" ]; then
2614
+ debug "skipping release '%s' (ID: %s) from '%s' - release is not a draft" "${draft_name}" "${release_id}" "${repository}"
2615
+ continue
2616
+ fi
2617
+
2618
+ # If we are here, we found a match
2619
+ draft_ids+=( "${release_id}" )
2620
+ done
2621
+ ((page++))
2622
+ done
2623
+
2624
+ # If no draft ids were found, the release was not found
2625
+ # so we can just return success
2626
+ if [ "${#draft_ids[@]}" -lt "1" ]; then
2627
+ debug "no draft releases found matching name %s in %s" "${draft_name}" "${repository}"
2628
+ repository="${repository_bak}" # restore repository value before return
2629
+ return 0
2630
+ fi
2631
+
2632
+ # Still here? Okay! Delete the draft(s)
2633
+ local draft_id
2634
+ for draft_id in "${draft_ids[@]}"; do
2635
+ info "Deleting draft release %s from %s (ID: %d)\n" "${draft_name}" "${repository}" "${draft_id}"
2636
+ github_request -X DELETE "https://api.github.com/repos/${repository}/releases/${draft_id}" ||
2637
+ failure "Failed to prune draft release ${draft_name} from ${repository}"
2638
+ done
2639
+
2640
+ repository="${repository_bak}" # restore repository value before return
2641
+ }
2642
+
2643
+ # Delete prerelease with given name
2644
+ #
2645
+ # $1: tag name of prerelease
2646
+ # $2: repository name (optional, defaults to current repository name)
2647
+ function github_delete_prerelease() {
2648
+ local tag_name="${1}"
2649
+ local delete_repo="${2:-$repo_name}"
2650
+
2651
+ if [ -z "${tag_name}" ]; then
2652
+ failure "Tag name is required for deletion"
2653
+ fi
2654
+
2655
+ if [ -z "${delete_repo}" ]; then
2656
+ failure "Repository is required for prerelease deletion"
2657
+ fi
2658
+
2659
+ # Override repository value to get correct token automatically
2660
+ local repository_bak="${repository}"
2661
+ repository="${repo_owner}/${delete_repo}"
2662
+
2663
+ local prerelease
2664
+ prerelease=$(github_request -H "Content-Type: application/vnd.github+json" \
2665
+ "https://api.github.com/repos/${repository}/releases/tags/${tag_name}") ||
2666
+ failure "Failed to get prerelease %s from %s" "${tag_name}" "${repository}"
2667
+ local prerelease_id
2668
+ prerelease_id="$(jq -r '.id' <( printf "%s" "${prerelease}" ))" ||
2669
+ failure "Failed to read prerelease ID for %s on %s" "${tag_name}" "${repository}"
2670
+ local is_prerelease
2671
+ is_prerelease="$(jq -r '.prerelease' <( printf "%s" "${prerelease}" ))" ||
2672
+ failure "Failed to read prerelease status for %s on %s" "${tag_name}" "${repository}"
2673
+
2674
+ # Validate the matched release is a prerelease
2675
+ if [ "${is_prerelease}" != "true" ]; then
2676
+ failure "Prerelease %s on %s is not marked as a prerelease, cannot delete" "${tag_name}" "${repository}"
2677
+ fi
2678
+
2679
+ info "Deleting prerelease %s from repository %s" "${tag_name}" "${repository}"
2680
+ github_request -X DELETE "https://api.github.com/repos/${repository}/releases/${prerelease_id}" ||
2681
+ failure "Failed to delete prerelease %s from %s" "${tag_name}" "${repository}"
2682
+
2683
+ repository="${repository_bak}" # restore repository value before return
2684
+ }
2685
+
2686
+ # Delete any draft releases that are older than the
2687
+ # given number of days
2688
+ #
2689
+ # $1: days
2690
+ # $2: repository name (optional, defaults to current repository name)
2691
+ function github_draft_release_prune() {
2692
+ github_release_prune "draft" "${@}"
2693
+ }
2694
+
2695
+ # Delete any prereleases that are older than the
2696
+ # given number of days
2697
+ #
2698
+ # $1: days
2699
+ # $2: repository name (optional, defaults to current repository name)
2700
+ function github_prerelease_prune() {
2701
+ github_release_prune "prerelease" "${@}"
2702
+ }
2703
+
2704
+ # Delete any releases of provided type that are older than the
2705
+ # given number of days
2706
+ #
2707
+ # $1: type (prerelease or draft)
2708
+ # $2: days
2709
+ # $3: repository name (optional, defaults to current repository name)
2710
+ function github_release_prune() {
2711
+ local prune_type="${1}"
2712
+ if [ -z "${prune_type}" ]; then
2713
+ failure "Type is required for release pruning"
2714
+ fi
2715
+ if [ "${prune_type}" != "draft" ] && [ "${prune_type}" != "prerelease" ]; then
2716
+ failure "Invalid release pruning type provided '%s' (supported: draft or prerelease)" "${prune_type}"
2717
+ fi
2718
+
2719
+ local days="${2}"
2720
+ if [ -z "${days}" ]; then
2721
+ failure "Number of days to retain is required for pruning"
2722
+ fi
2723
+ if [[ "${days}" = *[!0123456789]* ]]; then
2724
+ failure "Invalid value provided for days to retain when pruning (%s)" "${days}"
2725
+ fi
2726
+
2727
+ local prune_repo="${3:-$repo_name}"
2728
+ if [ -z "${prune_repo}" ]; then
2729
+ failure "Repository name is required for pruning"
2730
+ fi
2731
+
2732
+ local prune_seconds now
2733
+ now="$(date '+%s')"
2734
+ prune_seconds=$(("${now}"-("${days}" * 86400)))
2735
+
2736
+ # Override repository value to get correct token automatically
2737
+ local repository_bak="${repository}"
2738
+ repository="${repo_owner}/${prune_repo}"
2739
+
2740
+ debug "deleting %ss over %d days old from %s" "${prune_type}" "${days}" "${repository}"
2741
+
2742
+ local page=$((1))
2743
+ while true; do
2744
+ local release_list list_length
2745
+
2746
+ release_list=$(github_request -H "Accept: application/vnd.github+json" \
2747
+ "https://api.github.com/repos/${repository}/releases?per_page=100&page=${page}") ||
2748
+ failure "Failed to request releases list for pruning on ${repository}"
2749
+
2750
+ list_length="$(jq 'length' <( printf "%s" "${release_list}" ))" ||
2751
+ failure "Failed to calculate release length for pruning on ${repository}"
2752
+
2753
+ if [ -z "${list_length}" ] || [ "${list_length}" -lt "1" ]; then
2754
+ debug "releases listing page %d for %s is empty" "${page}" "${repository}"
2755
+ break
2756
+ fi
2757
+
2758
+ local entry i release_type release_name release_id release_create date_check
2759
+ for (( i=0; i < "${list_length}"; i++ )); do
2760
+ entry="$(jq ".[${i}]" <( printf "%s" "${release_list}" ))" ||
2761
+ failure "Failed to read entry for pruning on %s" "${repository}"
2762
+ release_type="$(jq -r ".${prune_type}" <( printf "%s" "${entry}" ))" ||
2763
+ failure "Failed to read entry %s for pruning on %s" "${prune_type}" "${repository}"
2764
+ release_name="$(jq -r '.name' <( printf "%s" "${entry}" ))" ||
2765
+ failure "Failed to read entry name for pruning on %s" "${repository}"
2766
+ release_id="$(jq -r '.id' <( printf "%s" "${entry}" ))" ||
2767
+ failure "Failed to read entry ID for pruning on %s" "${repository}"
2768
+ release_create="$(jq -r '.created_at' <( printf "%s" "${entry}" ))" ||
2769
+ failure "Failed to read entry created date for pruning on %s" "${repository}"
2770
+ date_check="$(date --date="${release_create}" '+%s')" ||
2771
+ failure "Failed to parse entry created date for pruning on %s" "${repository}"
2772
+
2773
+ if [ "${release_type}" != "true" ]; then
2774
+ debug "Skipping %s on %s because release is not a %s" "${release_name}" "${repository}" "${prune_type}"
2775
+ continue
2776
+ fi
2777
+
2778
+ if [ "$(( "${date_check}" ))" -lt "${prune_seconds}" ]; then
2779
+ info "Deleting release %s from %s\n" "${release_name}" "${prune_repo}"
2780
+ github_request -X DELETE "https://api.github.com/repos/${repository}/releases/${release_id}" ||
2781
+ failure "Failed to prune %s %s from %s" "${prune_type}" "${release_name}" "${repository}"
2782
+ fi
2783
+ done
2784
+ ((page++))
2785
+ done
2786
+
2787
+ repository="${repository_bak}" # restore the repository value
2788
+ }
2789
+
2790
+ # Delete all but the latest N number of releases of the provided type
2791
+ #
2792
+ # $1: type (prerelease or draft)
2793
+ # $2: number of releases to retain
2794
+ # $3: repository name (optional, defaults to current repository name)
2795
+ function github_release_prune_retain() {
2796
+ local prune_type="${1}"
2797
+ if [ -z "${prune_type}" ]; then
2798
+ failure "Type is required for release pruning"
2799
+ fi
2800
+ if [ "${prune_type}" != "draft" ] && [ "${prune_type}" != "prerelease" ]; then
2801
+ failure "Invalid release pruning type provided '%s' (supported: draft or prerelease)" "${prune_type}"
2802
+ fi
2803
+
2804
+ local retain="${2}"
2805
+ if [ -z "${retain}" ]; then
2806
+ failure "Number of releases to retain is required for pruning"
2807
+ fi
2808
+ if [[ "${retain}" = *[!0123456789]* ]]; then
2809
+ failure "Invalid value provided for number of releases to retain when pruning (%s)" "${days}"
2810
+ fi
2811
+
2812
+ local prune_repo="${3:-$repo_name}"
2813
+ if [ -z "${prune_repo}" ]; then
2814
+ failure "Repository name is required for pruning"
2815
+ fi
2816
+
2817
+ # Override repository value to get correct token automatically
2818
+ local repository_bak="${repository}"
2819
+ repository="${repo_owner}/${prune_repo}"
2820
+
2821
+ debug "pruning all %s type releases except latest %d releases" "${prune_type}" "${retain}"
2822
+ local prune_list=()
2823
+ local page=$((1))
2824
+ while true; do
2825
+ local release_list list_length
2826
+
2827
+ release_list=$(github_request -H "Accept: application/vnd.github+json" \
2828
+ "https://api.github.com/repos/${repository}/releases?per_page=100&page=${page}&sort=created_at&direction=desc") ||
2829
+ failure "Failed to request releases list for pruning on ${repository}"
2830
+ list_length="$(jq 'length' <( printf "%s" "${release_list}" ))" ||
2831
+ failure "Failed to calculate release length for pruning on ${repository}"
2832
+ if [ -z "${list_length}" ] || [ "${list_length}" -lt "1" ]; then
2833
+ debug "releases listing page %d for %s is empty" "${page}" "${repository}"
2834
+ break
2835
+ fi
2836
+
2837
+ local entry i release_type release_name release_id release_create date_check
2838
+ for (( i=0; i < "${list_length}"; i++ )); do
2839
+ entry="$(jq ".[${i}]" <( printf "%s" "${release_list}" ))" ||
2840
+ failure "Failed to read entry for pruning on %s" "${repository}"
2841
+ release_type="$(jq -r ".${prune_type}" <( printf "%s" "${entry}" ))" ||
2842
+ failure "Failed to read entry %s for pruning on %s" "${prune_type}" "${repository}"
2843
+ release_name="$(jq -r '.name' <( printf "%s" "${entry}" ))" ||
2844
+ failure "Failed to read entry name for pruning on %s" "${repository}"
2845
+ release_id="$(jq -r '.id' <( printf "%s" "${entry}" ))" ||
2846
+ failure "Failed to read entry ID for pruning on %s" "${repository}"
2847
+
2848
+ if [ "${release_type}" != "true" ]; then
2849
+ debug "Skipping %s on %s because release is not a %s" "${release_name}" "${repository}" "${prune_type}"
2850
+ continue
2851
+ fi
2852
+
2853
+ debug "adding %s '%s' to prune list (ID: %s)" "${prune_type}" "${release_name}" "${release_id}"
2854
+ prune_list+=( "${release_id}" )
2855
+ done
2856
+ (( page++ ))
2857
+ done
2858
+
2859
+ local prune_count="${#prune_list[@]}"
2860
+ local prune_trim=$(( "${prune_count}" - "${retain}" ))
2861
+
2862
+ # If there won't be any remaining items in the list, bail
2863
+ if [ "${prune_trim}" -le 0 ]; then
2864
+ debug "no %ss in %s to prune" "${prune_type}" "${repository}"
2865
+ repository="${repository_bak}" # restore the repository value
2866
+ return 0
2867
+ fi
2868
+
2869
+ # Trim down the list to what should be deleted
2870
+ prune_list=("${prune_list[@]:$retain:$prune_trim}")
2871
+
2872
+ # Now delete what is left in the list
2873
+ local r_id
2874
+ for r_id in "${prune_list[@]}"; do
2875
+ debug "deleting release (ID: %s) from %s" "${r_id}" "${repository}"
2876
+ github_request -X DELETE "https://api.github.com/repos/${repository}/releases/${r_id}" ||
2877
+ failure "Failed to prune %s %s from %s" "${prune_type}" "${r_id}" "${repository}"
2878
+ done
2879
+
2880
+ repository="${repository_bak}" # restore the repository value
2881
+ }
2882
+
2883
+ # Grab the correct github token to use for authentication. The
2884
+ # rules used for the token to return are as follows:
2885
+ #
2886
+ # * only $GITHUB_TOKEN is set: $GITHUB_TOKEN
2887
+ # * only $HASHIBOT_TOKEN is set: $HASHIBOT_TOKEN
2888
+ #
2889
+ # when both $GITHUB_TOKEN and $HASHIBOT_TOKEN are set:
2890
+ #
2891
+ # * $repository value matches $GITHUB_REPOSITORY: $GITHUB_TOKEN
2892
+ # * $repository value does not match $GITHUB_REPOSITORY: $HASHIBOT_TOKEN
2893
+ #
2894
+ # Will return `0` when a token is returned, `1` when no token is returned
2895
+ function github_token() {
2896
+ local gtoken
2897
+
2898
+ # Return immediately if no tokens are available
2899
+ if [ -z "${GITHUB_TOKEN}" ] && [ -z "${HASHIBOT_TOKEN}" ]; then
2900
+ debug "no github or hashibot token set"
2901
+ return 1
2902
+ fi
2903
+
2904
+ # Return token if only one token exists
2905
+ if [ -n "${GITHUB_TOKEN}" ] && [ -z "${HASHIBOT_TOKEN}" ]; then
2906
+ debug "only github token set"
2907
+ printf "%s\n" "${GITHUB_TOKEN}"
2908
+ return 0
2909
+ elif [ -n "${HASHIBOT_TOKEN}" ] && [ -z "${GITHUB_TOKEN}" ]; then
2910
+ debug "only hashibot token set"
2911
+ printf "%s\n" "${HASHIBOT_TOKEN}"
2912
+ return 0
2913
+ fi
2914
+
2915
+ # If the $repository matches the original $GITHUB_REPOSITORY use the local token
2916
+ if [ "${repository}" = "${GITHUB_REPOSITORY}" ]; then
2917
+ debug "prefer github token "
2918
+ printf "%s\n" "${GITHUB_TOKEN}"
2919
+ return 0
2920
+ fi
2921
+
2922
+ # Still here, then we send back that hashibot token
2923
+ printf "%s\n" "${HASHIBOT_TOKEN}"
2924
+ return 0
2925
+ }
2926
+
2927
+ # This function is used to make requests to the GitHub API. It
2928
+ # accepts the same argument list that would be provided to the
2929
+ # curl executable. It will check the response status and if a
2930
+ # 429 is received (rate limited) it will pause until the defined
2931
+ # rate limit reset time and then try again.
2932
+ #
2933
+ # NOTE: Informative information (like rate limit pausing) will
2934
+ # be printed to stderr. The response body will be printed to
2935
+ # stdout. Return value of the function will be the exit code
2936
+ # from the curl process.
2937
+ function github_request() {
2938
+ local request_exit=0
2939
+ local info_prefix="__info__"
2940
+ local info_tmpl="${info_prefix}:code=%{response_code}:header=%{size_header}:download=%{size_download}:file=%{filename_effective}"
2941
+ local raw_response_content
2942
+
2943
+ local curl_cmd=("curl" "-w" "${info_tmpl}" "-i" "-SsL" "--fail")
2944
+ local gtoken
2945
+
2946
+ # Only add the authentication token if we have one
2947
+ if gtoken="$(github_token)"; then
2948
+ curl_cmd+=("-H" "Authorization: token ${gtoken}")
2949
+ fi
2950
+
2951
+ # Attach the rest of the arguments
2952
+ curl_cmd+=("${@#}")
2953
+
2954
+ debug "initial request: %s" "${curl_cmd[*]}"
2955
+
2956
+ # Make our request
2957
+ raw_response_content="$("${curl_cmd[@]}")" || request_exit="${?}"
2958
+
2959
+ # Define the status here since we will set it in
2960
+ # the conditional below of something weird happens
2961
+ local status
2962
+
2963
+ # Check if our response content starts with the info prefix.
2964
+ # If it does, we need to extract the headers from the file.
2965
+ if [[ "${raw_response_content}" = "${info_prefix}"* ]]; then
2966
+ debug "extracting request information from: %s" "${raw_response_content}"
2967
+ raw_response_content="${raw_response_content#"${info_prefix}":code=}"
2968
+ local response_code="${raw_response_content%%:*}"
2969
+ debug "response http code: %s" "${response_code}"
2970
+ raw_response_content="${raw_response_content#*:header=}"
2971
+ local header_size="${raw_response_content%%:*}"
2972
+ debug "response header size: %s" "${header_size}"
2973
+ raw_response_content="${raw_response_content#*:download=}"
2974
+ local download_size="${raw_response_content%%:*}"
2975
+ debug "response file size: %s" "${download_size}"
2976
+ raw_response_content="${raw_response_content#*:file=}"
2977
+ local file_name="${raw_response_content}"
2978
+ debug "response file name: %s" "${file_name}"
2979
+ if [ -f "${file_name}" ]; then
2980
+ # Read the headers from the file and place them in the
2981
+ # raw_response_content to be processed
2982
+ local download_fd
2983
+ exec {download_fd}<"${file_name}"
2984
+ debug "file descriptor created for header grab (source: %s): %q" "${file_name}" "${download_fd}"
2985
+ debug "reading response header content from %s" "${file_name}"
2986
+ read -r -N "${header_size}" -u "${download_fd}" raw_response_content
2987
+ # Close our descriptor
2988
+ debug "closing file descriptor: %q" "${download_fd}"
2989
+ exec {download_fd}<&-
2990
+ # Now trim the headers from the file content
2991
+ debug "trimming response header content from %s" "${file_name}"
2992
+ tail -c "${download_size}" "${file_name}" > "${file_name}.trimmed" ||
2993
+ failure "Could not trim headers from downloaded file (%s)" "${file_name}"
2994
+ mv -f "${file_name}.trimmed" "${file_name}" ||
2995
+ failure "Could not replace downloaded file with trimmed file (%s)" "${file_name}"
2996
+ else
2997
+ debug "expected file not found (%s)" "${file_name}"
2998
+ status="${response_code}"
2999
+ fi
3000
+ else
3001
+ # Since the response wasn't written to a file, trim the
3002
+ # info from the end of the response
3003
+ if [[ "${raw_response_content}" != *"${info_prefix}"* ]]; then
3004
+ debug "github request response does not include information footer"
3005
+ failure "Unexpected error encountered, partial GitHub response returned"
3006
+ fi
3007
+ raw_response_content="${raw_response_content%"${info_prefix}"*}"
3008
+ fi
3009
+
3010
+ local ratelimit_reset
3011
+ local response_content=""
3012
+
3013
+ # Read the response into lines for processing
3014
+ local lines
3015
+ mapfile -t lines < <( printf "%s" "${raw_response_content}" )
3016
+
3017
+ # Process the lines to extract out status and rate
3018
+ # limit information. Populate the response_content
3019
+ # variable with the actual response value
3020
+ local i
3021
+ for (( i=0; i < "${#lines[@]}"; i++ )); do
3022
+ # The line will have a trailing `\r` so just
3023
+ # trim it off
3024
+ local line="${lines[$i]%%$'\r'*}"
3025
+ # strip any leading/trailing whitespace characters
3026
+ read -rd '' line <<< "${line}"
3027
+
3028
+ if [ -z "${line}" ] && [[ "${status}" = "2"* ]]; then
3029
+ local start="$(( i + 1 ))"
3030
+ local remain="$(( "${#lines[@]}" - "${start}" ))"
3031
+ local response_lines=("${lines[@]:$start:$remain}")
3032
+ response_content="${response_lines[*]}"
3033
+ break
3034
+ fi
3035
+
3036
+ if [[ "${line}" == "HTTP/"* ]]; then
3037
+ status="${line##* }"
3038
+ debug "http status found: %d" "${status}"
3039
+ fi
3040
+ if [[ "${line}" == "x-ratelimit-reset"* ]]; then
3041
+ ratelimit_reset="${line##*ratelimit-reset: }"
3042
+ debug "ratelimit reset time found: %s" "${ratelimit_reset}"
3043
+ fi
3044
+ done
3045
+
3046
+ # If the status was not detected, force an error
3047
+ if [ -z "${status}" ]; then
3048
+ failure "Failed to detect response status for GitHub request"
3049
+ fi
3050
+
3051
+ # If the status was a 2xx code then everything is good
3052
+ # and we can return the response and be done
3053
+ if [[ "${status}" = "2"* ]]; then
3054
+ printf "%s" "${response_content}"
3055
+ return 0
3056
+ fi
3057
+
3058
+ # If we are being rate limited, print a notice and then
3059
+ # wait until the rate limit will be reset
3060
+ if [[ "${status}" = "429" ]] || [[ "${status}" = "403" ]]; then
3061
+ debug "request returned %d status, checking for rate limiting" "${status}"
3062
+
3063
+ # If the ratelimit reset was not detected force an error
3064
+ if [ -z "${ratelimit_reset}" ]; then
3065
+ if [ "${status}" = "403" ]; then
3066
+ failure "Request failed with 403 status response"
3067
+ fi
3068
+ failure "Failed to detect rate limit reset time for GitHub request"
3069
+ fi
3070
+
3071
+ debug "rate limiting has been detected on request"
3072
+
3073
+ local reset_date
3074
+ reset_date="$(date --date="@${ratelimit_reset}")" ||
3075
+ failure "Failed to GitHub parse ratelimit reset timestamp (${ratelimit_reset})"
3076
+
3077
+ local now
3078
+ now="$( date '+%s' )" || failure "Failed to get current timestamp in ratelimit check"
3079
+ local reset_wait="$(( "${ratelimit_reset}" - "${now}" + 2))"
3080
+
3081
+ printf "GitHub rate limit encountered, reset at %s (waiting %d seconds)\n" \
3082
+ "${reset_date}" "${reset_wait}" >&2
3083
+
3084
+ sleep "${reset_wait}" || failure "Pause for GitHub rate limited request retry failed"
3085
+
3086
+ github_request "${@}"
3087
+ return "${?}"
3088
+ fi
3089
+
3090
+ # At this point we just need to return error information
3091
+ printf "GitHub request returned HTTP status: %d\n" "${status}" >&2
3092
+ printf "Response body: %s\n" "${response_content}" >&2
3093
+
3094
+ return "${request_exit}"
3095
+ }
3096
+
3097
+ # Lock issues which have been closed for longer than
3098
+ # provided number of days. A date can optionally be
3099
+ # provided which will be used as the earliest date to
3100
+ # search. A message can optionally be provided which
3101
+ # will be added as a comment in the issue before locking.
3102
+ #
3103
+ # -d: number of days
3104
+ # -m: message to include when locking the issue (optional)
3105
+ # -s: date to begin searching from (optional)
3106
+ function lock_issues() {
3107
+ local OPTIND opt days start since message
3108
+ while getopts ":d:s:m:" opt; do
3109
+ case "${opt}" in
3110
+ "d") days="${OPTARG}" ;;
3111
+ "s") start="${OPTARG}" ;;
3112
+ "m") message="${OPTARG}" ;;
3113
+ *) failure "Invalid flag provided to lock_issues" ;;
3114
+ esac
3115
+ done
3116
+ shift $((OPTIND-1))
3117
+
3118
+ # If days where not provided, return error
3119
+ if [ -z "${days}" ]; then
3120
+ failure "Number of days since closed required for locking issues"
3121
+ fi
3122
+ # If a start date was provided, check that it is a format we can read
3123
+ if [ -n "${start}" ]; then
3124
+ if ! since="$(date --iso-8601=seconds --date="${start}" 2> /dev/null)"; then
3125
+ failure "$(printf "Start date provided for issue locking could not be parsed (%s)" "${start}")"
3126
+ fi
3127
+ fi
3128
+
3129
+ debug "locking issues that have been closed for at least %d days" "${days}"
3130
+
3131
+ local req_args=()
3132
+ # Start with basic setup
3133
+ req_args+=("-H" "Accept: application/vnd.github+json")
3134
+ # Add authorization header
3135
+ req_args+=("-H" "Authorization: token ${GITHUB_TOKEN}")
3136
+ # Construct our request endpoint
3137
+ local req_endpoint="https://api.github.com/repos/${repository}/issues"
3138
+ # Page counter for requests
3139
+ local page=$(( 1 ))
3140
+ # Request arguments
3141
+ local req_params=("per_page=20" "state=closed")
3142
+
3143
+ # If we have a start time, include it
3144
+ if [ -n "${since}" ]; then
3145
+ req_params+=("since=${since}")
3146
+ fi
3147
+
3148
+ # Compute upper bound for issues we can close
3149
+ local lock_seconds now
3150
+ now="$(date '+%s')"
3151
+ lock_seconds=$(("${now}"-("${days}" * 86400)))
3152
+
3153
+ while true; do
3154
+ # Join all request parameters with '&'
3155
+ local IFS_BAK="${IFS}"
3156
+ IFS="&"
3157
+ local all_params=("${req_params[*]}" "page=${page}")
3158
+ local params="${all_params[*]}"
3159
+ IFS="${IFS_BAK}"
3160
+
3161
+ local issue_list issue_count
3162
+ # Make our request to get a page of issues
3163
+ issue_list="$(github_request "${req_args[@]}" "${req_endpoint}?${params}")" ||
3164
+ failure "Failed to get repository issue list for ${repository}"
3165
+ issue_count="$(jq 'length' <( printf "%s" "${issue_list}" ))" ||
3166
+ failure "Failed to compute count of issues in list for ${repository}"
3167
+
3168
+ if [ -z "${issue_count}" ] || [ "${issue_count}" -lt 1 ]; then
3169
+ break
3170
+ fi
3171
+
3172
+ # Iterate through the list
3173
+ local i
3174
+ for (( i=0; i < "${issue_count}"; i++ )); do
3175
+ # Extract the issue we are going to process
3176
+ local issue
3177
+ issue="$(jq ".[${i}]" <( printf "%s" "${issue_list}" ))" ||
3178
+ failure "Failed to extract issue from list for ${repository}"
3179
+
3180
+ # Grab the ID of this issue
3181
+ local issue_id
3182
+ issue_id="$(jq -r '.id' <( printf "%s" "${issue}" ))" ||
3183
+ failure "Failed to read ID of issue for ${repository}"
3184
+
3185
+ # First check if issue is already locked
3186
+ local issue_locked
3187
+ issue_locked="$(jq -r '.locked' <( printf "%s" "${issue}" ))" ||
3188
+ failure "Failed to read locked state of issue for ${repository}"
3189
+
3190
+ if [ "${issue_locked}" == "true" ]; then
3191
+ debug "Skipping %s#%s because it is already locked" "${repository}" "${issue_id}"
3192
+ continue
3193
+ fi
3194
+
3195
+ # Get the closed date
3196
+ local issue_closed
3197
+ issue_closed="$(jq -r '.closed_at' <( printf "%s" "${issue}" ))" ||
3198
+ failure "Failed to read closed at date of issue for ${repository}"
3199
+
3200
+ # Convert closed date to unix timestamp
3201
+ local date_check
3202
+ date_check="$( date --date="${issue_closed}" '+%s' )" ||
3203
+ failure "Failed to parse closed at date of issue for ${repository}"
3204
+
3205
+ # Check if the issue is old enough to be locked
3206
+ if [ "$(( "${date_check}" ))" -lt "${lock_seconds}" ]; then
3207
+ printf "Locking issue %s#%s\n" "${repository}" "${issue_id}" >&2
3208
+
3209
+ # If we have a comment to add before locking, do that now
3210
+ if [ -n "${message}" ]; then
3211
+ local message_json
3212
+ message_json=$(jq -n \
3213
+ --arg msg "$(printf "%b" "${message}")" \
3214
+ '{body: $msg}'
3215
+ ) || failure "Failed to create issue comment JSON content for ${repository}"
3216
+
3217
+ debug "adding issue comment before locking on %s#%s" "${repository}" "${issue_id}"
3218
+
3219
+ github_request "${req_args[@]}" -X POST "${req_endpoint}/${issue_id}/comments" -d "${message_json}" ||
3220
+ failure "Failed to create issue comment on ${repository}#${issue_id}"
3221
+ fi
3222
+
3223
+ # Lock the issue
3224
+ github_request "${req_args[@]}" -X PUT "${req_endpoint}/${issue_id}/lock" -d '{"lock_reason":"resolved"}' ||
3225
+ failure "Failed to lock issue ${repository}#${issue_id}"
3226
+ fi
3227
+ done
3228
+
3229
+ ((page++))
3230
+ done
3231
+ }
3232
+
3233
+ # Send a repository dispatch to the defined repository
3234
+ #
3235
+ # $1: repository name
3236
+ # $2: event type (single word string)
3237
+ # $n: "key=value" pairs to build payload (optional)
3238
+ #
3239
+ function github_repository_dispatch() {
3240
+ local drepo_name="${1}"
3241
+ local event_type="${2}"
3242
+
3243
+ if [ -z "${drepo_name}" ]; then
3244
+ failure "Repository name is required for repository dispatch"
3245
+ fi
3246
+
3247
+ # shellcheck disable=SC2016
3248
+ local payload_template='{"vagrant-ci": $vagrant_ci'
3249
+ local jqargs=("--arg" "vagrant_ci" "true")
3250
+ local arg
3251
+ for arg in "${@:3}"; do
3252
+ local payload_key="${arg%%=*}"
3253
+ local payload_value="${arg##*=}"
3254
+ payload_template+=", \"${payload_key}\": \$${payload_key}"
3255
+ # shellcheck disable=SC2089
3256
+ jqargs+=("--arg" "${payload_key}" "${payload_value}")
3257
+ done
3258
+ payload_template+="}"
3259
+
3260
+ # NOTE: we want the arguments to be expanded below
3261
+ local payload
3262
+ payload=$(jq -n "${jqargs[@]}" "${payload_template}" ) ||
3263
+ failure "Failed to generate repository dispatch payload"
3264
+
3265
+ # shellcheck disable=SC2016
3266
+ local msg_template='{event_type: $event_type, client_payload: $payload}'
3267
+ local msg
3268
+ msg=$(jq -n \
3269
+ --argjson payload "${payload}" \
3270
+ --arg event_type "${event_type}" \
3271
+ "${msg_template}" \
3272
+ ) || failure "Failed to generate repository dispatch message"
3273
+
3274
+ # Update repository value to get correct token
3275
+ local repository_bak="${repository}"
3276
+ repository="${repo_owner}/${drepo_name}"
3277
+
3278
+ github_request -X "POST" \
3279
+ -H 'Accept: application/vnd.github.everest-v3+json' \
3280
+ --data "${msg}" \
3281
+ "https://api.github.com/repos/${repo_owner}/${drepo_name}/dispatches" ||
3282
+ failure "Repository dispatch to ${repo_owner}/${drepo_name} failed"
3283
+
3284
+ # Restore the repository value
3285
+ repository="${repository_bak}"
3286
+ }
3287
+
3288
+ # Copy a function to a new name
3289
+ #
3290
+ # $1: Original function name
3291
+ # $2: Copy function name
3292
+ function copy_function() {
3293
+ local orig="${1}"
3294
+ local new="${2}"
3295
+ local fn
3296
+ fn="$(declare -f "${orig}")" ||
3297
+ failure "Orignal function (${orig}) not defined"
3298
+ fn="${new}${fn#*"${orig}"}"
3299
+ eval "${fn}"
3300
+ }
3301
+
3302
+ # Rename a function to a new name
3303
+ #
3304
+ # $1: Original function name
3305
+ # $2: New function name
3306
+ function rename_function() {
3307
+ local orig="${1}"
3308
+ copy_function "${@}"
3309
+ unset -f "${orig}"
3310
+ }
3311
+
3312
+ # Cleanup wrapper so we get some output that cleanup is starting
3313
+ function _cleanup() {
3314
+ debug "* Running cleanup task..."
3315
+ # Always restore this value for cases where a failure
3316
+ # happened within a function while this value was in
3317
+ # a modified state
3318
+ repository="${_repository_backup}"
3319
+ cleanup
3320
+ }
3321
+
3322
+ # Stub cleanup method which can be redefined
3323
+ # within actual script
3324
+ function cleanup() {
3325
+ debug "** No cleanup tasks defined"
3326
+ }
3327
+
3328
+ # Only setup our cleanup trap and fail alias when not in testing
3329
+ if [ -z "${BATS_TEST_FILENAME}" ]; then
3330
+ trap _cleanup EXIT
3331
+ # This is a compatibility alias for existing scripts which
3332
+ # use the common.sh library. BATS support defines a `fail`
3333
+ # function so it has been renamed `failure` to prevent the
3334
+ # name collision. When not running under BATS we enable the
3335
+ # `fail` function so any scripts that have not been updated
3336
+ # will not be affected.
3337
+ copy_function "failure" "fail"
3338
+ fi
3339
+
3340
+ # Make sure the CI bin directory exists
3341
+ if [ ! -d "${ci_bin_dir}" ]; then
3342
+ wrap mkdir -p "${ci_bin_dir}" \
3343
+ "Failed to create CI bin directory"
3344
+ fi
3345
+
3346
+ # Always ensure CI bin directory is in PATH
3347
+ if [[ "${PATH}" != *"${ci_bin_dir}"* ]]; then
3348
+ export PATH="${PATH}:${ci_bin_dir}"
3349
+ fi
3350
+
3351
+ # Enable debugging. This needs to be enabled with
3352
+ # extreme caution when used on public repositories.
3353
+ # Output with debugging enabled will likely include
3354
+ # secret values which should not be publicly exposed.
3355
+ #
3356
+ # If repository is public, FORCE_PUBLIC_DEBUG environment
3357
+ # variable must also be set.
3358
+
3359
+ priv_args=("-H" "Accept: application/json")
3360
+ # If we have a token available, use it for the check query
3361
+ if [ -n "${HASHIBOT_TOKEN}" ]; then
3362
+ priv_args+=("-H" "Authorization: token ${GITHUB_TOKEN}")
3363
+ elif [ -n "${GITHUB_TOKEN}" ]; then
3364
+ priv_args+=("-H" "Authorization: token ${HASHIBOT_TOKEN}")
3365
+ fi
3366
+
3367
+ if [ -n "${GITHUB_ACTIONS}" ]; then
3368
+ priv_check="$(curl "${priv_args[@]}" -s "https://api.github.com/repos/${GITHUB_REPOSITORY}" | jq .private)" ||
3369
+ failure "Repository visibility check failed"
3370
+ fi
3371
+
3372
+ # If the value wasn't true we unset it to indicate not private. The
3373
+ # repository might actually be private but we weren't supplied a
3374
+ # token (or one with correct permissions) so we fallback to the safe
3375
+ # assumption of not private.
3376
+ if [ "${priv_check}" != "true" ]; then
3377
+ readonly is_public="1"
3378
+ readonly is_private=""
3379
+ else
3380
+ # shellcheck disable=SC2034
3381
+ readonly is_public=""
3382
+ # shellcheck disable=SC2034
3383
+ readonly is_private="1"
3384
+ fi
3385
+
3386
+ # Check if we are running a job created by a tag. If so,
3387
+ # mark this as being a release job and set the release_version
3388
+ if [[ "${GITHUB_REF}" == *"refs/tags/"* ]]; then
3389
+ export tag="${GITHUB_REF##*tags/}"
3390
+ if valid_release_version "${tag}"; then
3391
+ readonly release=1
3392
+ export release_version="${tag##*v}"
3393
+ else
3394
+ readonly release
3395
+ fi
3396
+ else
3397
+ # shellcheck disable=SC2034
3398
+ readonly release
3399
+ fi
3400
+
3401
+ # Seed an initial output file
3402
+ output_file > /dev/null 2>&1