@colisweb/rescript-toolkit 5.35.0 → 5.35.2
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
|
@@ -92,63 +92,6 @@ aws_ecr_token() {
|
|
|
92
92
|
aws ecr get-authorization-token --region=eu-west-1 --output text --query 'authorizationData[].authorizationToken'
|
|
93
93
|
}
|
|
94
94
|
|
|
95
|
-
# you will need jq to use these commands. You can install it using "brew install jq"
|
|
96
|
-
# delete_images colisweb_api 8
|
|
97
|
-
# will delete images older than 8 weeks
|
|
98
|
-
delete_images() {
|
|
99
|
-
|
|
100
|
-
REPO=$1
|
|
101
|
-
WEEKS=${2:-16}
|
|
102
|
-
|
|
103
|
-
WEEKS_AGO=$(date -v-${WEEKS}w +%F)
|
|
104
|
-
|
|
105
|
-
#Get all ecr images
|
|
106
|
-
IMAGES=$(aws ecr describe-images --repository-name $REPO --output json)
|
|
107
|
-
|
|
108
|
-
#Filter unnecessary values and map `imagePushedAt` to EPOCH
|
|
109
|
-
NON_LATEST_IMAGES=$(echo $IMAGES | jq '[.imageDetails[] | select(.imageTags | any(endswith("latest")) | not)]')
|
|
110
|
-
|
|
111
|
-
#Filter on EPOCH
|
|
112
|
-
OLD_IMAGES=$(echo $NON_LATEST_IMAGES | jq --arg date $WEEKS_AGO '.[] | select(.imagePushedAt[0:10] < $date).imageDigest')
|
|
113
|
-
while IFS= read -r IMAGE; do
|
|
114
|
-
if [ "$IMAGE" != "" ]; then
|
|
115
|
-
echo "Deleting $IMAGE from $REPO"
|
|
116
|
-
AWS_PAGER="" aws ecr batch-delete-image --repository-name $REPO --image-ids imageDigest=$IMAGE
|
|
117
|
-
fi
|
|
118
|
-
done <<< "$OLD_IMAGES"
|
|
119
|
-
}
|
|
120
|
-
|
|
121
|
-
# delete_images_all_repos 12
|
|
122
|
-
# will delete images in all repositories older than 12 weeks
|
|
123
|
-
delete_images_all_repos() {
|
|
124
|
-
REPOSITORIES=$(aws ecr describe-repositories --output json | jq -r '.[]|.[].repositoryName')
|
|
125
|
-
|
|
126
|
-
while IFS= read -r REPO; do
|
|
127
|
-
echo "processing ECR repository $REPO"
|
|
128
|
-
delete_images $REPO $1
|
|
129
|
-
done <<< "$REPOSITORIES"
|
|
130
|
-
}
|
|
131
|
-
|
|
132
|
-
delete_old_cache() {
|
|
133
|
-
DATE=${1:-$(date -v-1m +%F)}
|
|
134
|
-
CACHE_BUCKET=${2:-"s3://gitlab-colisweb-distributed-cache/project/"}
|
|
135
|
-
|
|
136
|
-
echo "deleting from cache $CACHE_BUCKET all older than $DATE"
|
|
137
|
-
|
|
138
|
-
aws_ecr_login
|
|
139
|
-
|
|
140
|
-
while read -r line; do
|
|
141
|
-
datum=$(echo $line | cut -c1-10)
|
|
142
|
-
if [[ "$datum" < "$DATE" ]] ; then
|
|
143
|
-
# Shell Parameter Expansion: ${parameter##word}
|
|
144
|
-
# Allow to return the result from "word" to the end of "parameters"
|
|
145
|
-
# Here we need the end of the string after "project/" (corresponding to the S3 gitlab project id and filename)
|
|
146
|
-
TO_DELETE="$CACHE_BUCKET${line##* project/}"
|
|
147
|
-
echo $TO_DELETE
|
|
148
|
-
aws s3 rm $TO_DELETE
|
|
149
|
-
fi
|
|
150
|
-
done < <(aws s3 ls $CACHE_BUCKET --recursive)
|
|
151
|
-
}
|
|
152
95
|
|
|
153
96
|
#!/usr/bin/env bash
|
|
154
97
|
|
|
@@ -250,11 +193,11 @@ entries:
|
|
|
250
193
|
cronjob:
|
|
251
194
|
EOT
|
|
252
195
|
|
|
253
|
-
#
|
|
254
|
-
|
|
255
|
-
|
|
256
|
-
|
|
257
|
-
|
|
196
|
+
# helm stable repo have changed and must be updated manually, in versions < v2.17.0
|
|
197
|
+
helm repo add colisweb s3://colisweb-helm-charts/colisweb --force-update
|
|
198
|
+
helm repo add stable https://charts.helm.sh/stable --force-update
|
|
199
|
+
helm repo update
|
|
200
|
+
helm dependency update ${ROOT_PATH}/${CHART_PATH}
|
|
258
201
|
|
|
259
202
|
# Gather values/*.yaml files
|
|
260
203
|
VALUES_PATH="${ROOT_PATH}/${CHART_NAME}/values"
|
|
@@ -262,7 +205,7 @@ EOT
|
|
|
262
205
|
[ -d $VALUES_PATH ] && VALUES_FILES=$(find $VALUES_PATH -type f -maxdepth 1 -name "*.yaml" | sed 's/^/ -f /' | tr -d \\n | sed 's/%//')
|
|
263
206
|
|
|
264
207
|
# Deploy
|
|
265
|
-
|
|
208
|
+
helm upgrade --install \
|
|
266
209
|
--namespace ${ENVIRONMENT} \
|
|
267
210
|
${VALUES_FILES} \
|
|
268
211
|
-f ${ROOT_PATH}/${CONFIG_PATH}/common.yaml \
|
|
@@ -272,7 +215,7 @@ EOT
|
|
|
272
215
|
${CHART_NAME} ${ROOT_PATH}/${CHART_PATH}
|
|
273
216
|
|
|
274
217
|
|
|
275
|
-
|
|
218
|
+
verify_deployments -t 10m $ENVIRONMENT $CHART_NAME
|
|
276
219
|
|
|
277
220
|
}
|
|
278
221
|
|
|
@@ -509,12 +452,12 @@ configure_kubectl_for() {
|
|
|
509
452
|
database_k8s() {
|
|
510
453
|
MODE=$1
|
|
511
454
|
case $MODE in
|
|
512
|
-
"tests") SSH_LOCAL_PORT=2224;PG_LOCAL_PORT=24440;CA_LOCAL_PORT=25430;ENV="tests";;
|
|
513
|
-
"testing") SSH_LOCAL_PORT=2225;PG_LOCAL_PORT=24441;CA_LOCAL_PORT=25431;ENV="testing";;
|
|
514
|
-
"staging") SSH_LOCAL_PORT=2226;PG_LOCAL_PORT=24442;CA_LOCAL_PORT=25432;ENV="staging";;
|
|
515
|
-
"production") SSH_LOCAL_PORT=2227;PG_LOCAL_PORT=24443;CA_LOCAL_PORT=25433;ENV="production";;
|
|
516
|
-
"production_rw") SSH_LOCAL_PORT=2227;PG_LOCAL_PORT=24444;CA_LOCAL_PORT=25434;ENV="production";;
|
|
517
|
-
"recette") SSH_LOCAL_PORT=2228;PG_LOCAL_PORT=24446;CA_LOCAL_PORT=25436;ENV="recette";;
|
|
455
|
+
"tests") SSH_LOCAL_PORT=2224;COMP_LOCAL_PORT=25550;PG_LOCAL_PORT=24440;CA_LOCAL_PORT=25430;ENV="tests";;
|
|
456
|
+
"testing") SSH_LOCAL_PORT=2225;COMP_LOCAL_PORT=25551;PG_LOCAL_PORT=24441;CA_LOCAL_PORT=25431;ENV="testing";;
|
|
457
|
+
"staging") SSH_LOCAL_PORT=2226;COMP_LOCAL_PORT=25552;PG_LOCAL_PORT=24442;CA_LOCAL_PORT=25432;ENV="staging";;
|
|
458
|
+
"production") SSH_LOCAL_PORT=2227;COMP_LOCAL_PORT=25553;PG_LOCAL_PORT=24443;CA_LOCAL_PORT=25433;ENV="production";;
|
|
459
|
+
"production_rw") SSH_LOCAL_PORT=2227;COMP_LOCAL_PORT=25554;PG_LOCAL_PORT=24444;CA_LOCAL_PORT=25434;ENV="production";;
|
|
460
|
+
"recette") SSH_LOCAL_PORT=2228;COMP_LOCAL_PORT=25556; PG_LOCAL_PORT=24446;CA_LOCAL_PORT=25436;ENV="recette";;
|
|
518
461
|
*) echo "Unsupported ENV : $MODE"; return 1 ;;
|
|
519
462
|
esac
|
|
520
463
|
|
|
@@ -535,23 +478,27 @@ database_k8s() {
|
|
|
535
478
|
HostName 127.0.0.1
|
|
536
479
|
Port 2225
|
|
537
480
|
LocalForward 24441 toutatis-testing-db.ca0rjdmnxf1x.eu-west-1.rds.amazonaws.com:5432
|
|
481
|
+
LocalForward 25551 toutatis-testing-composite-db.ca0rjdmnxf1x.eu-west-1.rds.amazonaws.com:5432
|
|
538
482
|
LocalForward 25431 toutatis-testing-mysql-db.ca0rjdmnxf1x.eu-west-1.rds.amazonaws.com:3306
|
|
539
483
|
LocalForward 25531 testapirds.ca0rjdmnxf1x.eu-west-1.rds.amazonaws.com:3306
|
|
540
484
|
Host bastion_staging
|
|
541
485
|
HostName 127.0.0.1
|
|
542
486
|
Port 2226
|
|
543
487
|
LocalForward 24442 toutatis-staging-db.ca0rjdmnxf1x.eu-west-1.rds.amazonaws.com:5432
|
|
488
|
+
LocalForward 25552 toutatis-staging-composite-db.ca0rjdmnxf1x.eu-west-1.rds.amazonaws.com:5432
|
|
544
489
|
LocalForward 25432 toutatis-staging-mysql-db.ca0rjdmnxf1x.eu-west-1.rds.amazonaws.com:3306
|
|
545
490
|
Host bastion_recette
|
|
546
491
|
HostName 127.0.0.1
|
|
547
492
|
Port 2228
|
|
548
493
|
LocalForward 24446 toutatis-recette-db.ca0rjdmnxf1x.eu-west-1.rds.amazonaws.com:5432
|
|
494
|
+
LocalForward 25556 toutatis-recette-composite-db.ca0rjdmnxf1x.eu-west-1.rds.amazonaws.com:5432
|
|
549
495
|
LocalForward 25436 toutatis-recette-mysql-db.ca0rjdmnxf1x.eu-west-1.rds.amazonaws.com:3306
|
|
550
496
|
LocalForward 25536 testapirds.ca0rjdmnxf1x.eu-west-1.rds.amazonaws.com:3306
|
|
551
497
|
Host bastion_production
|
|
552
498
|
HostName 127.0.0.1
|
|
553
499
|
Port 2227
|
|
554
500
|
LocalForward 24443 toutatis-production-db-replica.ca0rjdmnxf1x.eu-west-1.rds.amazonaws.com:5432
|
|
501
|
+
LocalForward 25553 toutatis-production-composite-db.ca0rjdmnxf1x.eu-west-1.rds.amazonaws.com:5432
|
|
555
502
|
LocalForward 25433 toutatis-production-mysql-db-replica.ca0rjdmnxf1x.eu-west-1.rds.amazonaws.com:3306
|
|
556
503
|
EOF
|
|
557
504
|
if [ "$MODE" = "production_rw" ] ; then
|
|
@@ -565,6 +512,7 @@ EOF
|
|
|
565
512
|
-F "$bastion_config" \
|
|
566
513
|
"bastion_$ENV"
|
|
567
514
|
|
|
515
|
+
echo "sample command (composite) : 'psql postgres://postgres@127.0.0.1:$COMP_LOCAL_PORT'"
|
|
568
516
|
echo "sample command : 'psql postgres://postgres@127.0.0.1:$PG_LOCAL_PORT'"
|
|
569
517
|
echo "sample command : 'mysql -u colisweb -h 127.0.0.1 -P $CA_LOCAL_PORT -p db_name'"
|
|
570
518
|
|
|
@@ -612,8 +560,6 @@ kube_init_database_once() {
|
|
|
612
560
|
echo " Initializing Database '$db_database' for namespace $namespace"
|
|
613
561
|
echo "======================="
|
|
614
562
|
|
|
615
|
-
set -x
|
|
616
|
-
|
|
617
563
|
echo "Checking if Database '$db_database' exists"
|
|
618
564
|
set +e
|
|
619
565
|
psql_on_k8 $namespace once "$db_init_username:$db_init_password@$db_host:$db_port" -lqtA | cut -d\| -f1 | grep "^$db_database$"
|
|
@@ -843,8 +789,6 @@ kube_init_service_database() {
|
|
|
843
789
|
|
|
844
790
|
local db_connection="$db_init_username:$db_init_password@$db_host:$db_port"
|
|
845
791
|
|
|
846
|
-
set -x
|
|
847
|
-
|
|
848
792
|
echo "Checking if Database '$db_database' exists"
|
|
849
793
|
set +e
|
|
850
794
|
psql_on_k8 $namespace $service $db_connection -lqtA | cut -d\| -f1 | grep "^$db_database$"
|
|
@@ -912,6 +856,17 @@ EOF
|
|
|
912
856
|
}
|
|
913
857
|
#!/usr/bin/env bash
|
|
914
858
|
|
|
859
|
+
function kstatus() {
|
|
860
|
+
if [ -z "$3" ]
|
|
861
|
+
then
|
|
862
|
+
configure_kubectl_for $1 && watch -n 1 "kubectl -n $1 get $2"
|
|
863
|
+
else
|
|
864
|
+
configure_kubectl_for $1 && watch -n 1 "kubectl -n $1 get $2 | grep $3"
|
|
865
|
+
fi
|
|
866
|
+
}
|
|
867
|
+
|
|
868
|
+
#!/usr/bin/env bash
|
|
869
|
+
|
|
915
870
|
k8_nodes_stats() {
|
|
916
871
|
kubectl get nodes -o name |
|
|
917
872
|
xargs kubectl describe |
|
|
@@ -999,6 +954,45 @@ pick_pod() {
|
|
|
999
954
|
fi
|
|
1000
955
|
}
|
|
1001
956
|
|
|
957
|
+
# pods_settings $ENV
|
|
958
|
+
# Will output a CSV (;) of all deployments on this environment with cpu and memory request and limits
|
|
959
|
+
# Errors and null outputs are ignored and won't be in the output.
|
|
960
|
+
pods_resources() {
|
|
961
|
+
ENV=$1
|
|
962
|
+
configure_kubectl_for $ENV
|
|
963
|
+
DEPLOYMENTS=(
|
|
964
|
+
$(kubectl -n $ENV get deployments | grep -Eo '^[^ ]+' | grep -v 'NAME')
|
|
965
|
+
)
|
|
966
|
+
echo "deployment; request_cpu; request_memory; limits_cpu;limits_memory"
|
|
967
|
+
for D in "${DEPLOYMENTS[@]}"; do
|
|
968
|
+
info=$(kubectl -n $ENV get deployment -o yaml $D |
|
|
969
|
+
yq '.spec.template.spec.containers[].resources' |
|
|
970
|
+
yq '.L = .requests.cpu + "; " + .requests.memory + "; " + .limits.cpu + "; " + .limits.memory' |
|
|
971
|
+
yq ".L") 2&>/dev/null
|
|
972
|
+
if ! [ "$info" = "null" ]; then
|
|
973
|
+
echo "$D; $info"
|
|
974
|
+
fi
|
|
975
|
+
done
|
|
976
|
+
}
|
|
977
|
+
|
|
978
|
+
pods_strategies() {
|
|
979
|
+
ENV=$1
|
|
980
|
+
configure_kubectl_for $ENV
|
|
981
|
+
DEPLOYMENTS=(
|
|
982
|
+
$(kubectl -n $ENV get deployments | grep -Eo '^[^ ]+' | grep -v 'NAME')
|
|
983
|
+
)
|
|
984
|
+
echo "deployment; max_surge; max_unavailable"
|
|
985
|
+
for D in "${DEPLOYMENTS[@]}"; do
|
|
986
|
+
info=$(kubectl -n $ENV get deployment -o yaml $D |
|
|
987
|
+
yq '.spec.strategy' |
|
|
988
|
+
yq '.L = .rollingUpdate.maxSurge + "; " + .rollingUpdate.maxUnavailable' |
|
|
989
|
+
yq ".L") 2&>/dev/null
|
|
990
|
+
if ! [ "$info" = "null" ]; then
|
|
991
|
+
echo "$D; $info"
|
|
992
|
+
fi
|
|
993
|
+
done
|
|
994
|
+
}
|
|
995
|
+
|
|
1002
996
|
#!/usr/bin/env bash
|
|
1003
997
|
|
|
1004
998
|
bastion_config_for_redis_ca() {
|
|
@@ -1551,6 +1545,368 @@ jwt_token() {
|
|
|
1551
1545
|
|
|
1552
1546
|
#!/usr/bin/env bash
|
|
1553
1547
|
|
|
1548
|
+
SCRIPT_PATH=$SCRIPT_FULL_PATH/shell/run
|
|
1549
|
+
PATH="$PATH:$SCRIPT_PATH/script"
|
|
1550
|
+
|
|
1551
|
+
function get_token {
|
|
1552
|
+
local ENV=$1
|
|
1553
|
+
local LOGIN_FILE="$HOME/scriptlogin"
|
|
1554
|
+
|
|
1555
|
+
if [ ! -f "$LOGIN_FILE" ]; then
|
|
1556
|
+
cat > "$LOGIN_FILE" <<-'EOF'
|
|
1557
|
+
#!/bin/bash
|
|
1558
|
+
case $ENV in
|
|
1559
|
+
"testing")
|
|
1560
|
+
local BO_USERNAME=""
|
|
1561
|
+
local BO_PASSWORD=""
|
|
1562
|
+
;;
|
|
1563
|
+
"recette")
|
|
1564
|
+
local BO_USERNAME=""
|
|
1565
|
+
local BO_PASSWORD=""
|
|
1566
|
+
;;
|
|
1567
|
+
"staging")
|
|
1568
|
+
local BO_USERNAME=""
|
|
1569
|
+
local BO_PASSWORD=""
|
|
1570
|
+
;;
|
|
1571
|
+
*)
|
|
1572
|
+
local BO_USERNAME=""
|
|
1573
|
+
local BO_PASSWORD=""
|
|
1574
|
+
echo "ENV ${ENV} inconu"
|
|
1575
|
+
return
|
|
1576
|
+
;;
|
|
1577
|
+
esac
|
|
1578
|
+
EOF
|
|
1579
|
+
fi
|
|
1580
|
+
|
|
1581
|
+
source "${LOGIN_FILE}"
|
|
1582
|
+
|
|
1583
|
+
if [ -z "$BO_PASSWORD" ] || [ -z "$BO_USERNAME" ]
|
|
1584
|
+
then
|
|
1585
|
+
echo éditer le ficher "$LOGIN_FILE"
|
|
1586
|
+
return 1
|
|
1587
|
+
fi
|
|
1588
|
+
|
|
1589
|
+
curl -o /dev/null -D - "https://api.$ENV.colisweb.com/api/v6/authent/external/session" \
|
|
1590
|
+
--data-raw '{"username":"'"${BO_USERNAME}"'","password":"'"${BO_PASSWORD/\"/\\\"}"'"}' \
|
|
1591
|
+
--compressed 2> /dev/null | grep set-cook | sed -e 's/.*session=//g;s/;.*//g'
|
|
1592
|
+
}
|
|
1593
|
+
|
|
1594
|
+
function bash_array_to_json {
|
|
1595
|
+
function join {
|
|
1596
|
+
local IFS="$1"
|
|
1597
|
+
shift
|
|
1598
|
+
echo "$*"
|
|
1599
|
+
}
|
|
1600
|
+
|
|
1601
|
+
echo '["'"$(join , $*| sed -e 's/,/","/g' )"'"]' | jq
|
|
1602
|
+
}
|
|
1603
|
+
|
|
1604
|
+
function get_random_street {
|
|
1605
|
+
local CODE_POSTAL=${1:-59000}
|
|
1606
|
+
if [[ ! "$CODE_POSTAL" =~ ^[0-9]{5}$ ]]; then
|
|
1607
|
+
echo "La CODE_POSTAL doit avoir une taille de 5 chiffre"
|
|
1608
|
+
exit 1
|
|
1609
|
+
fi
|
|
1610
|
+
|
|
1611
|
+
FILENAME="rue-$CODE_POSTAL.lst"
|
|
1612
|
+
if [ ! -f "$FILENAME" ]; then
|
|
1613
|
+
curl --output tmp1.gz https://adresse.data.gouv.fr/data/ban/adresses/latest/csv/adresses-"${CODE_POSTAL:0:2}".csv.gz
|
|
1614
|
+
gzip -d tmp1.gz
|
|
1615
|
+
cut -d\; -f3,5,6,8 tmp1 | sed "/;$CODE_POSTAL;/!d" > "$FILENAME"
|
|
1616
|
+
rm tmp1
|
|
1617
|
+
fi
|
|
1618
|
+
|
|
1619
|
+
sort -R "$FILENAME" | head -n 1
|
|
1620
|
+
}
|
|
1621
|
+
|
|
1622
|
+
function rand_slot {
|
|
1623
|
+
|
|
1624
|
+
local SCENARIO=$2
|
|
1625
|
+
if [ -f "$SCENARIO" ]; then
|
|
1626
|
+
source "$SCENARIO"
|
|
1627
|
+
fi
|
|
1628
|
+
local ORDER_DATE="$1"
|
|
1629
|
+
|
|
1630
|
+
DEFAULT=(
|
|
1631
|
+
"06:00+01:00[Europe/Paris]-08:00+01:00[Europe/Paris]"
|
|
1632
|
+
"08:00+01:00[Europe/Paris]-10:00+01:00[Europe/Paris]"
|
|
1633
|
+
"10:00+01:00[Europe/Paris]-12:00+01:00[Europe/Paris]"
|
|
1634
|
+
"16:00+01:00[Europe/Paris]-18:00+01:00[Europe/Paris]"
|
|
1635
|
+
"18:00+01:00[Europe/Paris]-20:00+01:00[Europe/Paris]"
|
|
1636
|
+
)
|
|
1637
|
+
USAGE=${DELIVERY_SLOTS:-${DEFAULT[@]}}
|
|
1638
|
+
|
|
1639
|
+
IFS="-" read -r start_time end_time < <(echo "${USAGE[@]}" | tr " " "\n" | sort -u -R | head -n 1 )
|
|
1640
|
+
|
|
1641
|
+
echo '{"start":"'"${ORDER_DATE}T${start_time}"'", "end":"'"${ORDER_DATE}T${end_time}"'" }'
|
|
1642
|
+
}
|
|
1643
|
+
|
|
1644
|
+
function call_create_sfh_order {
|
|
1645
|
+
local ENV=$1
|
|
1646
|
+
local TOKEN=$2
|
|
1647
|
+
source "$3"
|
|
1648
|
+
local POS=$4
|
|
1649
|
+
local BARCODES="$5"
|
|
1650
|
+
local CODE_POSTAL="$6"
|
|
1651
|
+
local PACKAGES=$(echo "$BARCODES" | jq '[{
|
|
1652
|
+
"barcode": .[],
|
|
1653
|
+
"length": 10.5,
|
|
1654
|
+
"height": 9.0,
|
|
1655
|
+
"width": 9.0,
|
|
1656
|
+
"weight": 10.11,
|
|
1657
|
+
"description": "test parel",
|
|
1658
|
+
"options": [],
|
|
1659
|
+
"productTypology": "Classical",
|
|
1660
|
+
"packageType": "Parcel"
|
|
1661
|
+
}
|
|
1662
|
+
]')
|
|
1663
|
+
|
|
1664
|
+
DELIVERY_OPTIONS_P='['
|
|
1665
|
+
for option in "${DELIVERY_OPTIONS[@]}"; do
|
|
1666
|
+
if [ "$DELIVERY_OPTIONS_P" != '[' ]; then
|
|
1667
|
+
DELIVERY_OPTIONS_P+=", "
|
|
1668
|
+
fi
|
|
1669
|
+
DELIVERY_OPTIONS_P+="\"$option\""
|
|
1670
|
+
done
|
|
1671
|
+
DELIVERY_OPTIONS_P+=']'
|
|
1672
|
+
|
|
1673
|
+
IFS=";" read -r nu rue code_postal ville < <(get_random_street "$CODE_POSTAL")
|
|
1674
|
+
JSON='{
|
|
1675
|
+
"primaryOrderReference": "'"${PRIMARY_REF}${POS}"'",
|
|
1676
|
+
"secondaryOrderReference": null,
|
|
1677
|
+
"stages": [
|
|
1678
|
+
{
|
|
1679
|
+
"type": "Pickup",
|
|
1680
|
+
"packageBarcodes": '"$BARCODES"',
|
|
1681
|
+
"location": {
|
|
1682
|
+
"type": "Warehouse",
|
|
1683
|
+
"warehouseCode": "'"$PICKUP_WAREHOUSE_CODE"'"
|
|
1684
|
+
}
|
|
1685
|
+
},
|
|
1686
|
+
{
|
|
1687
|
+
"type": "Dropoff",
|
|
1688
|
+
"packageBarcodes": '"$BARCODES"',
|
|
1689
|
+
"location": {
|
|
1690
|
+
"type": "Address",
|
|
1691
|
+
"address": {
|
|
1692
|
+
"address1": "'"$nu $rue"'",
|
|
1693
|
+
"postalCode": "'"$code_postal"'",
|
|
1694
|
+
"city": "'"$ville"'",
|
|
1695
|
+
"country": "France",
|
|
1696
|
+
"floor": 0,
|
|
1697
|
+
"lift": "with_lift"
|
|
1698
|
+
},
|
|
1699
|
+
"contact": {
|
|
1700
|
+
"name": "John Doe",
|
|
1701
|
+
"primaryPhone": "+33606060606"
|
|
1702
|
+
}
|
|
1703
|
+
}
|
|
1704
|
+
}
|
|
1705
|
+
],
|
|
1706
|
+
"packages": '"$PACKAGES"',
|
|
1707
|
+
"owner": {
|
|
1708
|
+
"accountIdentifier": "'$ACCOUNT_IDENTIFIER'"
|
|
1709
|
+
},
|
|
1710
|
+
"deliveryOptions": '"$DELIVERY_OPTIONS_P"',
|
|
1711
|
+
"ecommerceValidationDate": "'"${ORDER_DATE}"'"
|
|
1712
|
+
}'
|
|
1713
|
+
|
|
1714
|
+
RESULT=$(curl -s -X POST https://api.$ENV.colisweb.com/api/v6/order/external/warehouse/orders -H 'content-type: application/json' --cookie session="$TOKEN" --data-raw "$JSON")
|
|
1715
|
+
ORDER_ID=$(jq ".orderId" -r <<< "$RESULT")
|
|
1716
|
+
|
|
1717
|
+
echo "nouvelle commande : https://bo.$ENV.colisweb.com/admin/orders/$ORDER_ID" >&2
|
|
1718
|
+
|
|
1719
|
+
echo "$RESULT"
|
|
1720
|
+
}
|
|
1721
|
+
|
|
1722
|
+
|
|
1723
|
+
function call_scan {
|
|
1724
|
+
local ENV=$1
|
|
1725
|
+
local TOKEN=$2
|
|
1726
|
+
source "$3"
|
|
1727
|
+
local BARCODES="$4"
|
|
1728
|
+
local SCAN=$(echo "$BARCODES" | jq '[{"barcode" :.[], "context": "shuttle"}]')
|
|
1729
|
+
|
|
1730
|
+
JSON='{"scans":'$SCAN'}'
|
|
1731
|
+
|
|
1732
|
+
curl -X POST https://api.$ENV.colisweb.com/api/v6/parcel/external/units/scans/bulk -H 'content-type: application/json' --cookie session="$TOKEN" --data-raw "$JSON"
|
|
1733
|
+
}
|
|
1734
|
+
|
|
1735
|
+
|
|
1736
|
+
function call_register_delivery {
|
|
1737
|
+
local ENV=$1
|
|
1738
|
+
local TOKEN=$2
|
|
1739
|
+
|
|
1740
|
+
SCENARIO=$3
|
|
1741
|
+
source "$SCENARIO"
|
|
1742
|
+
|
|
1743
|
+
local ORDER_ID=$4
|
|
1744
|
+
local BARCODES="$5"
|
|
1745
|
+
|
|
1746
|
+
curl -X POST https://api.$ENV.colisweb.com/api/v6/order/external/warehouse/orders/"$ORDER_ID"/deliveries \
|
|
1747
|
+
--cookie session="$TOKEN" --data-raw '{
|
|
1748
|
+
"slot": '"$(rand_slot "${DELIVERY_DATE}" "$SCENARIO")"',
|
|
1749
|
+
"storeIdOwner":"'"$STORE_ID_OWNER"'",
|
|
1750
|
+
"pickup":{"type":"hub","code":"'"$HUB"'"},
|
|
1751
|
+
"barcodes":'"$BARCODES"',
|
|
1752
|
+
"price":{"origin":"auto","amount":25.9},
|
|
1753
|
+
"allowCustomerSlotUpdate":false
|
|
1754
|
+
}'
|
|
1755
|
+
}
|
|
1756
|
+
|
|
1757
|
+
|
|
1758
|
+
|
|
1759
|
+
function _create_scenario_file_if_not_exist () {
|
|
1760
|
+
if [ ! -f "$SCENARIO" ]
|
|
1761
|
+
then
|
|
1762
|
+
cat > "$SCENARIO" <<-'EOF'
|
|
1763
|
+
DELIVERY_DATE=$(date -v+7d '+%Y-%m-%d') # ici on demande de crée a date d'aujourd'hui + 7 jours
|
|
1764
|
+
# peu remplacer -v+7d par -v+1d pour une livrasion programmer demain
|
|
1765
|
+
# utiliser que par create_many_sfh_order_and_delivery
|
|
1766
|
+
ENV="testing" # sur quelle enviromement lancer le scripts
|
|
1767
|
+
# ENV="staging"
|
|
1768
|
+
# ENV="recette"
|
|
1769
|
+
|
|
1770
|
+
ACCOUNT_IDENTIFIER="102" # pour la creation de order force utilies owner.accountIdentifier
|
|
1771
|
+
# sur l'appel api/v6/order/external/warehouse/orders
|
|
1772
|
+
# (creation de la commade)
|
|
1773
|
+
HUB="duck" # pour sur l'appel api/v6/order/external/warehouse/orders
|
|
1774
|
+
# parametre pickup.code (type est a "hub")
|
|
1775
|
+
STORE_ID_OWNER="184" # parametre pickup.storeIdOwner
|
|
1776
|
+
PICKUP_WAREHOUSE_CODE="422" # sur l'appel api/v6/order/external/warehouse/orders
|
|
1777
|
+
# parametre stages.[0].location.warehouseCode
|
|
1778
|
+
|
|
1779
|
+
BARCODES_COUNT=5 # nombres packages
|
|
1780
|
+
PREF="aaaa" # doit faire 4 caractères utilies pour générer les barecode
|
|
1781
|
+
# des packtages
|
|
1782
|
+
|
|
1783
|
+
CODE_POSTAL="59000" # code postale sur lequelle une addresse aléatoire seras choisi
|
|
1784
|
+
# (creation de la commade)
|
|
1785
|
+
DELIVERY_SLOTS=( # liste des horraires de créneau de livraison choisi aléatoirement
|
|
1786
|
+
"06:00+01:00[Europe/Paris]-08:00+01:00[Europe/Paris]"
|
|
1787
|
+
"08:00+01:00[Europe/Paris]-10:00+01:00[Europe/Paris]"
|
|
1788
|
+
"10:00+01:00[Europe/Paris]-12:00+01:00[Europe/Paris]"
|
|
1789
|
+
"16:00+01:00[Europe/Paris]-18:00+01:00[Europe/Paris]"
|
|
1790
|
+
"18:00+01:00[Europe/Paris]-20:00+01:00[Europe/Paris]"
|
|
1791
|
+
)
|
|
1792
|
+
|
|
1793
|
+
# DELIVERY_OPTIONS=("skill1" "skill2") # liste des skill - a décommanter
|
|
1794
|
+
|
|
1795
|
+
# normalement pas bessoin modifer
|
|
1796
|
+
ORDER_DATE=$(date '+%Y-%m-%d') # date du jour
|
|
1797
|
+
RAND=$(date +%y%m%d%H%M%S) # valueur peudo aleadoire (ici basé par date) doit faire 17 caractères
|
|
1798
|
+
BARCODE_PART=0000$RAND # utiliser pour générer les bare code les barecode sont :
|
|
1799
|
+
# {BARECODE_PART}{00000} a {BARECODE_PART}{BARECODES_COUNT}
|
|
1800
|
+
PRIMARY_REF=$PREF$RAND # primaryOrderReference de la commande
|
|
1801
|
+
EOF
|
|
1802
|
+
echo "éditer le fichier $SCENARIO"
|
|
1803
|
+
return 1
|
|
1804
|
+
fi
|
|
1805
|
+
}
|
|
1806
|
+
|
|
1807
|
+
#!/usr/bin/env bash
|
|
1808
|
+
|
|
1809
|
+
cleanup_merged_mr() {
|
|
1810
|
+
COLISWEB_IDL_GROUP=3054234
|
|
1811
|
+
|
|
1812
|
+
BEFORE=${1:- $(date -I -v -2y)}
|
|
1813
|
+
|
|
1814
|
+
for (( COUNTER=1; COUNTER<=12; COUNTER+=2 )); do
|
|
1815
|
+
cleanup_grouped_merged_mr $COLISWEB_IDL_GROUP $BEFORE $COUNTER &
|
|
1816
|
+
done
|
|
1817
|
+
|
|
1818
|
+
}
|
|
1819
|
+
|
|
1820
|
+
cleanup_grouped_merged_mr() {
|
|
1821
|
+
GROUP=$1
|
|
1822
|
+
BEFORE=$2
|
|
1823
|
+
PAGE_COUNT=$3
|
|
1824
|
+
MERGED_MRS=($(curl --header "PRIVATE-TOKEN: $GITLAB_PAT" \
|
|
1825
|
+
--url "https://gitlab.com/api/v4/groups/$GROUP/merge_requests?updated_before=${BEFORE}T08:00:00Z&status=merged&per_page=50&page=$PAGE_COUNT" |
|
|
1826
|
+
jq -r '.[] | {iid: .iid|tostring, pid:.project_id|tostring} | (.pid + "/merge_requests/" + .iid)'))
|
|
1827
|
+
|
|
1828
|
+
for MR in ${MERGED_MRS[@]}; do
|
|
1829
|
+
echo "https://gitlab.com/api/v4/projects/$MR"
|
|
1830
|
+
curl --request DELETE \
|
|
1831
|
+
--header "PRIVATE-TOKEN: $GITLAB_PAT" \
|
|
1832
|
+
--url "https://gitlab.com/api/v4/projects/$MR"
|
|
1833
|
+
done
|
|
1834
|
+
}
|
|
1835
|
+
|
|
1836
|
+
# you will need jq to use these commands. You can install it using "brew install jq"
|
|
1837
|
+
# cleanup_all_ecr_images 12
|
|
1838
|
+
# will delete images in all repositories older than 12 weeks
|
|
1839
|
+
# cleanup_single_ecr_repository colisweb-api 8
|
|
1840
|
+
# will delete images older than 8 weeks in the colisweb-api repository
|
|
1841
|
+
cleanup_all_ecr_images() {
|
|
1842
|
+
WEEKS=$1
|
|
1843
|
+
|
|
1844
|
+
# OR to work on bash and zsh
|
|
1845
|
+
CLEAN_BEFORE=$(date -v-${WEEKS}w +%F || date --date="-${WEEKS} weeks" +'%Y-%m-%d')
|
|
1846
|
+
REPOSITORIES=$(aws ecr describe-repositories --output json | jq -r '.[] |.[].repositoryName')
|
|
1847
|
+
|
|
1848
|
+
while read -r REPOSITORY; do
|
|
1849
|
+
echo "processing ECR repository $REPOSITORY before $CLEAN_BEFORE"
|
|
1850
|
+
cleanup_single_ecr_repository $BEFORE $REPOSITORY
|
|
1851
|
+
done <<< "$REPOSITORIES"
|
|
1852
|
+
}
|
|
1853
|
+
|
|
1854
|
+
cleanup_single_ecr_repository() {
|
|
1855
|
+
BEFORE=$1
|
|
1856
|
+
REPOSITORY=$2
|
|
1857
|
+
|
|
1858
|
+
ALL_TAGS=$(aws ecr describe-images --repository-name "$REPOSITORY" --output json |
|
|
1859
|
+
jq '.imageDetails' |
|
|
1860
|
+
jq '. |= sort_by(.imagePushedAt)' |
|
|
1861
|
+
jq --arg date $BEFORE '.[] | select(.imagePushedAt[0:10] < $date)' |
|
|
1862
|
+
jq 'select((.imageTags != null) or (.imageTags == []))' |
|
|
1863
|
+
jq 'select(.imageTags | any(endswith("latest")) | not)' |
|
|
1864
|
+
jq -r '.imageTags | join(" ")' |
|
|
1865
|
+
sort -u)
|
|
1866
|
+
|
|
1867
|
+
while read image_tags; do
|
|
1868
|
+
SINGLE_TAG=$(echo $image_tags | grep -o '^\S*')
|
|
1869
|
+
|
|
1870
|
+
DIGESTS_TO_DELETE=$(docker buildx imagetools inspect \
|
|
1871
|
+
949316342391.dkr.ecr.eu-west-1.amazonaws.com/$REPOSITORY:$SINGLE_TAG --raw |
|
|
1872
|
+
jq -r '[.manifests | .[].digest] | join(" imageDigest=") | "imageDigest=" + .')
|
|
1873
|
+
|
|
1874
|
+
TAGS_TO_DELETE=$(echo "$image_tags" | sed 's/[^ ]* */imageTag=&/g')
|
|
1875
|
+
|
|
1876
|
+
export AWS_PAGER=""
|
|
1877
|
+
|
|
1878
|
+
aws ecr batch-delete-image --repository-name "$REPOSITORY" --image-ids $(echo $TAGS_TO_DELETE) > /dev/null 2>&1
|
|
1879
|
+
aws ecr batch-delete-image --repository-name "$REPOSITORY" --image-ids $(echo $DIGESTS_TO_DELETE)> /dev/null 2>&1
|
|
1880
|
+
done <<< $ALL_TAGS
|
|
1881
|
+
|
|
1882
|
+
echo "deleted $(echo $ALL_TAGS | wc -l) tags"
|
|
1883
|
+
|
|
1884
|
+
}
|
|
1885
|
+
|
|
1886
|
+
|
|
1887
|
+
cleanup_ci_cache() {
|
|
1888
|
+
DATE=${1:-$(date -v-1m +%F)}
|
|
1889
|
+
CACHE_BUCKET=${2:-"s3://gitlab-colisweb-distributed-cache/project/"}
|
|
1890
|
+
|
|
1891
|
+
echo "deleting from cache $CACHE_BUCKET all older than $DATE"
|
|
1892
|
+
|
|
1893
|
+
aws_ecr_login
|
|
1894
|
+
|
|
1895
|
+
while read -r line; do
|
|
1896
|
+
datum=$(echo $line | cut -c1-10)
|
|
1897
|
+
if [[ "$datum" < "$DATE" ]] ; then
|
|
1898
|
+
# Shell Parameter Expansion: ${parameter##word}
|
|
1899
|
+
# Allow to return the result from "word" to the end of "parameters"
|
|
1900
|
+
# Here we need the end of the string after "project/" (corresponding to the S3 gitlab project id and filename)
|
|
1901
|
+
TO_DELETE="$CACHE_BUCKET${line##* project/}"
|
|
1902
|
+
echo $TO_DELETE
|
|
1903
|
+
aws s3 rm $TO_DELETE
|
|
1904
|
+
fi
|
|
1905
|
+
done < <(aws s3 ls $CACHE_BUCKET --recursive)
|
|
1906
|
+
}
|
|
1907
|
+
|
|
1908
|
+
#!/usr/bin/env bash
|
|
1909
|
+
|
|
1554
1910
|
ftp_ikea_k8s() {
|
|
1555
1911
|
SSH_LOCAL_PORT=2230
|
|
1556
1912
|
FTP_LOCAL_PORT=25500
|
|
@@ -1928,6 +2284,7 @@ datadog_schedule_downtime_single() {
|
|
|
1928
2284
|
docker_build_push() {
|
|
1929
2285
|
read -r -a BUILD_ARGS <<< "$1"
|
|
1930
2286
|
DOCKER_BUILD_ARGS="--build-arg VCS_REF=$(git rev-parse --short HEAD)"
|
|
2287
|
+
|
|
1931
2288
|
for ARG_NAME in "${BUILD_ARGS[@]}"
|
|
1932
2289
|
do
|
|
1933
2290
|
DOCKER_BUILD_ARGS="$DOCKER_BUILD_ARGS --build-arg $ARG_NAME=${!ARG_NAME}"
|
|
@@ -1936,13 +2293,17 @@ docker_build_push() {
|
|
|
1936
2293
|
if ! image_exists $DOCKER_REGISTRY_ID $APPLICATION $CI_COMMIT_SHORT_SHA ; then
|
|
1937
2294
|
docker pull $DOCKER_IMAGE || true
|
|
1938
2295
|
SOURCE_URL=${CI_PROJECT_URL:8} # without "https://" protocol, like gitlab.com/colisweb-idl/colisweb/back/packing
|
|
1939
|
-
|
|
2296
|
+
|
|
2297
|
+
docker buildx create --use
|
|
2298
|
+
|
|
2299
|
+
docker buildx build $DOCKER_BUILD_ARGS \
|
|
1940
2300
|
-t $DOCKER_IMAGE_SHA \
|
|
2301
|
+
--platform "linux/arm64,linux/amd64" \
|
|
1941
2302
|
--label org.opencontainers.image.revision=$(git rev-parse HEAD) \
|
|
1942
2303
|
--label org.opencontainers.image.source=$SOURCE_URL \
|
|
1943
|
-
--
|
|
2304
|
+
--provenance=false \
|
|
2305
|
+
--push \
|
|
1944
2306
|
$DOCKER_STAGE_PATH
|
|
1945
|
-
docker push $DOCKER_IMAGE_SHA
|
|
1946
2307
|
fi
|
|
1947
2308
|
}
|
|
1948
2309
|
|
|
@@ -2078,7 +2439,7 @@ init_migrate_db() {
|
|
|
2078
2439
|
|
|
2079
2440
|
unset KUBECONFIG
|
|
2080
2441
|
|
|
2081
|
-
|
|
2442
|
+
configure_kubectl_for ${ENVIRONMENT}
|
|
2082
2443
|
|
|
2083
2444
|
kube_init_service_database \
|
|
2084
2445
|
--namespace ${ENVIRONMENT} \
|
|
@@ -2124,7 +2485,7 @@ flyway_migrate() {
|
|
|
2124
2485
|
CONFIGMAP_NAME="$service-flyway-migration-sql"
|
|
2125
2486
|
POD_NAME="$service-flyway-migration"
|
|
2126
2487
|
|
|
2127
|
-
|
|
2488
|
+
configure_kubectl_for $environment
|
|
2128
2489
|
|
|
2129
2490
|
kubectl -n $namespace delete configmap $CONFIGMAP_NAME --ignore-not-found
|
|
2130
2491
|
kubectl -n $namespace delete pod $POD_NAME --ignore-not-found
|
|
@@ -2177,7 +2538,7 @@ flyway_migrate() {
|
|
|
2177
2538
|
|
|
2178
2539
|
flyway_sql_folder=$(pwd)/${MIGRATION_SQL_PATH}
|
|
2179
2540
|
|
|
2180
|
-
|
|
2541
|
+
configure_kubectl_for "${ENVIRONMENT}"
|
|
2181
2542
|
POD_NAME="${APPLICATION}-flyway-repair"
|
|
2182
2543
|
CONFIGMAP_NAME="${APPLICATION}-flyway-repair-sql"
|
|
2183
2544
|
|
|
@@ -2243,11 +2604,11 @@ git_reveal() {
|
|
|
2243
2604
|
}
|
|
2244
2605
|
#!/usr/bin/env bash
|
|
2245
2606
|
|
|
2246
|
-
|
|
2607
|
+
helm_deploy() {
|
|
2247
2608
|
APPLICATION=$1
|
|
2248
2609
|
ENVIRONMENT=$2
|
|
2249
2610
|
VERSION=$3
|
|
2250
|
-
|
|
2611
|
+
deploy_chart \
|
|
2251
2612
|
--path_configs deploy \
|
|
2252
2613
|
--path_chart deploy/$APPLICATION \
|
|
2253
2614
|
--application $APPLICATION \
|
|
@@ -2256,7 +2617,7 @@ helm_deploy_v3() {
|
|
|
2256
2617
|
--helm_extra_args --set global.version=$VERSION
|
|
2257
2618
|
}
|
|
2258
2619
|
|
|
2259
|
-
|
|
2620
|
+
deploy_chart() {
|
|
2260
2621
|
set -e
|
|
2261
2622
|
set -x
|
|
2262
2623
|
|
|
@@ -2303,15 +2664,15 @@ deploy_chart_v3() {
|
|
|
2303
2664
|
unset KUBECONFIG
|
|
2304
2665
|
|
|
2305
2666
|
# Configure Kubectl
|
|
2306
|
-
|
|
2667
|
+
configure_kubectl_for ${environment}
|
|
2307
2668
|
|
|
2308
|
-
# Configure
|
|
2309
|
-
|
|
2310
|
-
#
|
|
2311
|
-
|
|
2312
|
-
|
|
2313
|
-
|
|
2314
|
-
|
|
2669
|
+
# Configure helm
|
|
2670
|
+
helm version --namespace ${namespace} || true
|
|
2671
|
+
# helm stable repo have changed and must be updated manually, in versions < v2.17.0
|
|
2672
|
+
helm repo add colisweb s3://colisweb-helm-charts/colisweb
|
|
2673
|
+
helm repo add stable https://charts.helm.sh/stable
|
|
2674
|
+
helm repo update
|
|
2675
|
+
helm dependency update ${root_path}/${path_chart}
|
|
2315
2676
|
|
|
2316
2677
|
# Gather values/*.yaml files
|
|
2317
2678
|
values_path="${root_path}/${path_chart}/values"
|
|
@@ -2319,7 +2680,7 @@ deploy_chart_v3() {
|
|
|
2319
2680
|
[ -d $values_path ] && values_files=$(find $values_path -type f -maxdepth 1 -name "*.yaml" | sed 's/^/ -f /' | tr -d \\n | sed 's/%//')
|
|
2320
2681
|
|
|
2321
2682
|
# Deploy
|
|
2322
|
-
|
|
2683
|
+
helm upgrade --install \
|
|
2323
2684
|
--namespace ${namespace} \
|
|
2324
2685
|
${values_files} \
|
|
2325
2686
|
-f ${root_path}/${path_configs}/common.yaml \
|
|
@@ -2341,7 +2702,7 @@ deploy_chart_v3() {
|
|
|
2341
2702
|
set +x
|
|
2342
2703
|
}
|
|
2343
2704
|
|
|
2344
|
-
|
|
2705
|
+
verify_deployments() {
|
|
2345
2706
|
set -e
|
|
2346
2707
|
|
|
2347
2708
|
# usage :
|
|
@@ -2361,7 +2722,7 @@ verify_deployments_v3() {
|
|
|
2361
2722
|
|
|
2362
2723
|
# Get all Deployments names from the deployed chart
|
|
2363
2724
|
DEPLOYMENTS=(
|
|
2364
|
-
$(
|
|
2725
|
+
$(helm get manifest --namespace $NAMESPACE $RELEASE | yq --no-doc -r 'select(.kind=="Deployment").metadata.name')
|
|
2365
2726
|
)
|
|
2366
2727
|
|
|
2367
2728
|
echo "verifying on $NAMESPACE deployments ${DEPLOYMENTS[@]} with a timeout of $TIMEOUT"
|
|
@@ -2404,40 +2765,6 @@ check_config_file() {
|
|
|
2404
2765
|
fi
|
|
2405
2766
|
}
|
|
2406
2767
|
|
|
2407
|
-
#!/usr/bin/env bash
|
|
2408
|
-
|
|
2409
|
-
configure_kubectl_for_ci() {
|
|
2410
|
-
if [ -z ${GITLAB_PAT} ]; then
|
|
2411
|
-
echo "Cannot configure kubectl: no GITLAB_PAT configured"
|
|
2412
|
-
exit 1
|
|
2413
|
-
fi
|
|
2414
|
-
|
|
2415
|
-
infra_env="$1"
|
|
2416
|
-
valid_envs="[testing][staging][production][performance][tests][recette]"
|
|
2417
|
-
echo "$valid_envs" | grep -q "\[$infra_env\]"
|
|
2418
|
-
|
|
2419
|
-
if [ $? -ne 0 ]; then
|
|
2420
|
-
echo "Cannot configure kubectl for invalid env : $infra_env"
|
|
2421
|
-
echo "choose one of $valid_envs"
|
|
2422
|
-
exit 1
|
|
2423
|
-
fi
|
|
2424
|
-
|
|
2425
|
-
mkdir -p ~/.kube
|
|
2426
|
-
curl -fsS \
|
|
2427
|
-
--header "PRIVATE-TOKEN: $GITLAB_PAT" \
|
|
2428
|
-
"https://gitlab.com/api/v4/projects/8141053/jobs/artifacts/$infra_env/raw/$infra_env.kubeconfig?job=4_kubernetes_config_output" \
|
|
2429
|
-
> ~/.kube/$infra_env.kubeconfig
|
|
2430
|
-
|
|
2431
|
-
curl_return_code=$?
|
|
2432
|
-
if [ ${curl_return_code} -ne 0 ]; then
|
|
2433
|
-
echo "Cannot configure kubectl for $infra_env, get configuration failed with code $curl_return_code"
|
|
2434
|
-
exit ${curl_return_code}
|
|
2435
|
-
fi
|
|
2436
|
-
|
|
2437
|
-
rm -f ~/.kube/config
|
|
2438
|
-
ln -s ~/.kube/$infra_env.kubeconfig ~/.kube/config
|
|
2439
|
-
echo "Configured kubectl for env : $infra_env"
|
|
2440
|
-
}
|
|
2441
2768
|
notify_new_deployment() {
|
|
2442
2769
|
jq --version || (apt update && apt install -y jq)
|
|
2443
2770
|
|
package/package.json
CHANGED
|
@@ -6206,3 +6206,27 @@ module FaHome = {
|
|
|
6206
6206
|
@module("react-icons/fa") @react.component
|
|
6207
6207
|
external make: (~size: int=?, ~color: string=?, ~className: string=?) => React.element = "FaHome"
|
|
6208
6208
|
}
|
|
6209
|
+
module FaWifi = {
|
|
6210
|
+
@module("react-icons/fa") @react.component
|
|
6211
|
+
external make: (~size: int=?, ~color: string=?, ~className: string=?) => React.element = "FaWifi"
|
|
6212
|
+
}
|
|
6213
|
+
module FaBackward = {
|
|
6214
|
+
@module("react-icons/fa") @react.component
|
|
6215
|
+
external make: (~size: int=?, ~color: string=?, ~className: string=?) => React.element =
|
|
6216
|
+
"FaBackward"
|
|
6217
|
+
}
|
|
6218
|
+
module FaForward = {
|
|
6219
|
+
@module("react-icons/fa") @react.component
|
|
6220
|
+
external make: (~size: int=?, ~color: string=?, ~className: string=?) => React.element =
|
|
6221
|
+
"FaForward"
|
|
6222
|
+
}
|
|
6223
|
+
module FaExclamation = {
|
|
6224
|
+
@module("react-icons/fa") @react.component
|
|
6225
|
+
external make: (~size: int=?, ~color: string=?, ~className: string=?) => React.element =
|
|
6226
|
+
"FaExclamation"
|
|
6227
|
+
}
|
|
6228
|
+
module FaArrowLeft = {
|
|
6229
|
+
@module("react-icons/fa") @react.component
|
|
6230
|
+
external make: (~size: int=?, ~color: string=?, ~className: string=?) => React.element =
|
|
6231
|
+
"FaArrowLeft"
|
|
6232
|
+
}
|