@colisweb/rescript-toolkit 5.42.4 → 5.42.9

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/.gitlab-ci.yml CHANGED
@@ -1,5 +1,5 @@
1
1
  include:
2
- - "https://colisweb-idl.gitlab.io/colisweb-open-source/ci-common/v18.0.1/templates/front.yml"
2
+ - "https://colisweb-idl.gitlab.io/colisweb-open-source/ci-common/v20.3.1/templates/front.yml"
3
3
 
4
4
  variables:
5
5
  GIT_COMMIT_FILES: ""
@@ -46,8 +46,8 @@ publish rescript-toolkit:
46
46
  # Build storybook
47
47
  # -----------------------------------------------
48
48
  pages:
49
- image: node:16-bullseye
50
- extends: .rules-only-for-master
49
+ image: node:lts-bullseye
50
+ extends: .rules-only-for-main
51
51
  tags:
52
52
  - aws
53
53
  stage: deploy
@@ -92,63 +92,6 @@ aws_ecr_token() {
92
92
  aws ecr get-authorization-token --region=eu-west-1 --output text --query 'authorizationData[].authorizationToken'
93
93
  }
94
94
 
95
- # you will need jq to use these commands. You can install it using "brew install jq"
96
- # delete_images colisweb_api 8
97
- # will delete images older than 8 weeks
98
- delete_images() {
99
-
100
- REPO=$1
101
- WEEKS=${2:-16}
102
-
103
- WEEKS_AGO=$(date -v-${WEEKS}w +%F)
104
-
105
- #Get all ecr images
106
- IMAGES=$(aws ecr describe-images --repository-name $REPO --output json)
107
-
108
- #Filter unnecessary values and map `imagePushedAt` to EPOCH
109
- NON_LATEST_IMAGES=$(echo $IMAGES | jq '[.imageDetails[] | select(.imageTags | any(endswith("latest")) | not)]')
110
-
111
- #Filter on EPOCH
112
- OLD_IMAGES=$(echo $NON_LATEST_IMAGES | jq --arg date $WEEKS_AGO '.[] | select(.imagePushedAt[0:10] < $date).imageDigest')
113
- while IFS= read -r IMAGE; do
114
- if [ "$IMAGE" != "" ]; then
115
- echo "Deleting $IMAGE from $REPO"
116
- AWS_PAGER="" aws ecr batch-delete-image --repository-name $REPO --image-ids imageDigest=$IMAGE
117
- fi
118
- done <<< "$OLD_IMAGES"
119
- }
120
-
121
- # delete_images_all_repos 12
122
- # will delete images in all repositories older than 12 weeks
123
- delete_images_all_repos() {
124
- REPOSITORIES=$(aws ecr describe-repositories --output json | jq -r '.[]|.[].repositoryName')
125
-
126
- while IFS= read -r REPO; do
127
- echo "processing ECR repository $REPO"
128
- delete_images $REPO $1
129
- done <<< "$REPOSITORIES"
130
- }
131
-
132
- delete_old_cache() {
133
- DATE=${1:-$(date -v-1m +%F)}
134
- CACHE_BUCKET=${2:-"s3://gitlab-colisweb-distributed-cache/project/"}
135
-
136
- echo "deleting from cache $CACHE_BUCKET all older than $DATE"
137
-
138
- aws_ecr_login
139
-
140
- while read -r line; do
141
- datum=$(echo $line | cut -c1-10)
142
- if [[ "$datum" < "$DATE" ]] ; then
143
- # Shell Parameter Expansion: ${parameter##word}
144
- # Allow to return the result from "word" to the end of "parameters"
145
- # Here we need the end of the string after "project/" (corresponding to the S3 gitlab project id and filename)
146
- TO_DELETE="$CACHE_BUCKET${line##* project/}"
147
- echo $TO_DELETE
148
- aws s3 rm $TO_DELETE
149
- fi
150
- done < <(aws s3 ls $CACHE_BUCKET --recursive)
151
- }
152
95
 
153
96
  #!/usr/bin/env bash
154
97
 
@@ -250,11 +193,11 @@ entries:
250
193
  cronjob:
251
194
  EOT
252
195
 
253
- # helm3 stable repo have changed and must be updated manually, in versions < v2.17.0
254
- helm3 repo add colisweb s3://colisweb-helm-charts/colisweb --force-update
255
- helm3 repo add stable https://charts.helm.sh/stable --force-update
256
- helm3 repo update
257
- helm3 dependency update ${ROOT_PATH}/${CHART_PATH}
196
+ # helm stable repo have changed and must be updated manually, in versions < v2.17.0
197
+ helm repo add colisweb s3://colisweb-helm-charts/colisweb --force-update
198
+ helm repo add stable https://charts.helm.sh/stable --force-update
199
+ helm repo update
200
+ helm dependency update ${ROOT_PATH}/${CHART_PATH}
258
201
 
259
202
  # Gather values/*.yaml files
260
203
  VALUES_PATH="${ROOT_PATH}/${CHART_NAME}/values"
@@ -262,7 +205,7 @@ EOT
262
205
  [ -d $VALUES_PATH ] && VALUES_FILES=$(find $VALUES_PATH -type f -maxdepth 1 -name "*.yaml" | sed 's/^/ -f /' | tr -d \\n | sed 's/%//')
263
206
 
264
207
  # Deploy
265
- helm3 upgrade --install \
208
+ helm upgrade --install \
266
209
  --namespace ${ENVIRONMENT} \
267
210
  ${VALUES_FILES} \
268
211
  -f ${ROOT_PATH}/${CONFIG_PATH}/common.yaml \
@@ -272,7 +215,7 @@ EOT
272
215
  ${CHART_NAME} ${ROOT_PATH}/${CHART_PATH}
273
216
 
274
217
 
275
- verify_deployments_v3 -t 10m $ENVIRONMENT $CHART_NAME
218
+ verify_deployments -t 10m $ENVIRONMENT $CHART_NAME
276
219
 
277
220
  }
278
221
 
@@ -509,12 +452,12 @@ configure_kubectl_for() {
509
452
  database_k8s() {
510
453
  MODE=$1
511
454
  case $MODE in
512
- "tests") SSH_LOCAL_PORT=2224;PG_LOCAL_PORT=24440;CA_LOCAL_PORT=25430;ENV="tests";;
513
- "testing") SSH_LOCAL_PORT=2225;PG_LOCAL_PORT=24441;CA_LOCAL_PORT=25431;ENV="testing";;
514
- "staging") SSH_LOCAL_PORT=2226;PG_LOCAL_PORT=24442;CA_LOCAL_PORT=25432;ENV="staging";;
515
- "production") SSH_LOCAL_PORT=2227;PG_LOCAL_PORT=24443;CA_LOCAL_PORT=25433;ENV="production";;
516
- "production_rw") SSH_LOCAL_PORT=2227;PG_LOCAL_PORT=24444;CA_LOCAL_PORT=25434;ENV="production";;
517
- "recette") SSH_LOCAL_PORT=2228;PG_LOCAL_PORT=24446;CA_LOCAL_PORT=25436;ENV="recette";;
455
+ "tests") SSH_LOCAL_PORT=2224;COMP_LOCAL_PORT=25550;PG_LOCAL_PORT=24440;CA_LOCAL_PORT=25430;ENV="tests";;
456
+ "testing") SSH_LOCAL_PORT=2225;COMP_LOCAL_PORT=25551;PG_LOCAL_PORT=24441;CA_LOCAL_PORT=25431;ENV="testing";;
457
+ "staging") SSH_LOCAL_PORT=2226;COMP_LOCAL_PORT=25552;PG_LOCAL_PORT=24442;CA_LOCAL_PORT=25432;ENV="staging";;
458
+ "production") SSH_LOCAL_PORT=2227;COMP_LOCAL_PORT=25553;PG_LOCAL_PORT=24443;CA_LOCAL_PORT=25433;ENV="production";;
459
+ "production_rw") SSH_LOCAL_PORT=2227;COMP_LOCAL_PORT=25554;PG_LOCAL_PORT=24444;CA_LOCAL_PORT=25434;ENV="production";;
460
+ "recette") SSH_LOCAL_PORT=2228;COMP_LOCAL_PORT=25556; PG_LOCAL_PORT=24446;CA_LOCAL_PORT=25436;ENV="recette";;
518
461
  *) echo "Unsupported ENV : $MODE"; return 1 ;;
519
462
  esac
520
463
 
@@ -535,23 +478,28 @@ database_k8s() {
535
478
  HostName 127.0.0.1
536
479
  Port 2225
537
480
  LocalForward 24441 toutatis-testing-db.ca0rjdmnxf1x.eu-west-1.rds.amazonaws.com:5432
481
+ LocalForward 25551 toutatis-testing-composite-db.ca0rjdmnxf1x.eu-west-1.rds.amazonaws.com:5432
538
482
  LocalForward 25431 toutatis-testing-mysql-db.ca0rjdmnxf1x.eu-west-1.rds.amazonaws.com:3306
539
483
  LocalForward 25531 testapirds.ca0rjdmnxf1x.eu-west-1.rds.amazonaws.com:3306
484
+ LocalForward 25561 toutatis-testing-oracle-db.ca0rjdmnxf1x.eu-west-1.rds.amazonaws.com:1521
540
485
  Host bastion_staging
541
486
  HostName 127.0.0.1
542
487
  Port 2226
543
488
  LocalForward 24442 toutatis-staging-db.ca0rjdmnxf1x.eu-west-1.rds.amazonaws.com:5432
489
+ LocalForward 25552 toutatis-staging-composite-db.ca0rjdmnxf1x.eu-west-1.rds.amazonaws.com:5432
544
490
  LocalForward 25432 toutatis-staging-mysql-db.ca0rjdmnxf1x.eu-west-1.rds.amazonaws.com:3306
545
491
  Host bastion_recette
546
492
  HostName 127.0.0.1
547
493
  Port 2228
548
494
  LocalForward 24446 toutatis-recette-db.ca0rjdmnxf1x.eu-west-1.rds.amazonaws.com:5432
495
+ LocalForward 25556 toutatis-recette-composite-db.ca0rjdmnxf1x.eu-west-1.rds.amazonaws.com:5432
549
496
  LocalForward 25436 toutatis-recette-mysql-db.ca0rjdmnxf1x.eu-west-1.rds.amazonaws.com:3306
550
497
  LocalForward 25536 testapirds.ca0rjdmnxf1x.eu-west-1.rds.amazonaws.com:3306
551
498
  Host bastion_production
552
499
  HostName 127.0.0.1
553
500
  Port 2227
554
501
  LocalForward 24443 toutatis-production-db-replica.ca0rjdmnxf1x.eu-west-1.rds.amazonaws.com:5432
502
+ LocalForward 25553 toutatis-production-composite-db.ca0rjdmnxf1x.eu-west-1.rds.amazonaws.com:5432
555
503
  LocalForward 25433 toutatis-production-mysql-db-replica.ca0rjdmnxf1x.eu-west-1.rds.amazonaws.com:3306
556
504
  EOF
557
505
  if [ "$MODE" = "production_rw" ] ; then
@@ -565,6 +513,7 @@ EOF
565
513
  -F "$bastion_config" \
566
514
  "bastion_$ENV"
567
515
 
516
+ echo "sample command (composite) : 'psql postgres://postgres@127.0.0.1:$COMP_LOCAL_PORT'"
568
517
  echo "sample command : 'psql postgres://postgres@127.0.0.1:$PG_LOCAL_PORT'"
569
518
  echo "sample command : 'mysql -u colisweb -h 127.0.0.1 -P $CA_LOCAL_PORT -p db_name'"
570
519
 
@@ -612,8 +561,6 @@ kube_init_database_once() {
612
561
  echo " Initializing Database '$db_database' for namespace $namespace"
613
562
  echo "======================="
614
563
 
615
- set -x
616
-
617
564
  echo "Checking if Database '$db_database' exists"
618
565
  set +e
619
566
  psql_on_k8 $namespace once "$db_init_username:$db_init_password@$db_host:$db_port" -lqtA | cut -d\| -f1 | grep "^$db_database$"
@@ -843,8 +790,6 @@ kube_init_service_database() {
843
790
 
844
791
  local db_connection="$db_init_username:$db_init_password@$db_host:$db_port"
845
792
 
846
- set -x
847
-
848
793
  echo "Checking if Database '$db_database' exists"
849
794
  set +e
850
795
  psql_on_k8 $namespace $service $db_connection -lqtA | cut -d\| -f1 | grep "^$db_database$"
@@ -912,14 +857,29 @@ EOF
912
857
  }
913
858
  #!/usr/bin/env bash
914
859
 
860
+ function kstatus() {
861
+ if [ -z "$3" ]
862
+ then
863
+ configure_kubectl_for $1 && watch -n 1 "kubectl -n $1 get $2"
864
+ else
865
+ configure_kubectl_for $1 && watch -n 1 "kubectl -n $1 get $2 | grep $3"
866
+ fi
867
+ }
868
+
869
+ #!/usr/bin/env bash
870
+
915
871
  k8_nodes_stats() {
916
- kubectl get nodes -o name |
917
- xargs kubectl describe |
918
- grep "^Name\|workType\|cpu \|memory " |
919
- sed -r 's/[ :=]+/\t/g' |
920
- sed 's/\tworkType\t//g' |
921
- sed -r 's/^Name/---\nName/g' |
922
- grep --color "Name\|web\|workers\|cpu\|memory\|---"
872
+ ENV=${1:-testing}
873
+
874
+ configure_kubectl_for "${ENV}"
875
+
876
+ kubectl get nodes -o name |
877
+ xargs kubectl describe |
878
+ grep "^Name\|workType\|cpu \|memory " |
879
+ sed -r 's/[ :=]+/\t/g' |
880
+ sed 's/\tworkType\t//g' |
881
+ sed -r 's/^Name/---\nName/g' |
882
+ grep --color "Name\|web\|workers\|cpu\|memory\|---"
923
883
  }
924
884
 
925
885
  #!/usr/bin/env bash
@@ -999,6 +959,45 @@ pick_pod() {
999
959
  fi
1000
960
  }
1001
961
 
962
+ # pods_settings $ENV
963
+ # Will output a CSV (;) of all deployments on this environment with cpu and memory request and limits
964
+ # Errors and null outputs are ignored and won't be in the output.
965
+ pods_resources() {
966
+ ENV=$1
967
+ configure_kubectl_for $ENV
968
+ DEPLOYMENTS=(
969
+ $(kubectl -n $ENV get deployments | grep -Eo '^[^ ]+' | grep -v 'NAME')
970
+ )
971
+ echo "deployment; request_cpu; request_memory; limits_cpu;limits_memory"
972
+ for D in "${DEPLOYMENTS[@]}"; do
973
+ info=$(kubectl -n $ENV get deployment -o yaml $D |
974
+ yq '.spec.template.spec.containers[].resources' |
975
+ yq '.L = .requests.cpu + "; " + .requests.memory + "; " + .limits.cpu + "; " + .limits.memory' |
976
+ yq ".L") 2&>/dev/null
977
+ if ! [ "$info" = "null" ]; then
978
+ echo "$D; $info"
979
+ fi
980
+ done
981
+ }
982
+
983
+ pods_strategies() {
984
+ ENV=$1
985
+ configure_kubectl_for $ENV
986
+ DEPLOYMENTS=(
987
+ $(kubectl -n $ENV get deployments | grep -Eo '^[^ ]+' | grep -v 'NAME')
988
+ )
989
+ echo "deployment; max_surge; max_unavailable"
990
+ for D in "${DEPLOYMENTS[@]}"; do
991
+ info=$(kubectl -n $ENV get deployment -o yaml $D |
992
+ yq '.spec.strategy' |
993
+ yq '.L = .rollingUpdate.maxSurge + "; " + .rollingUpdate.maxUnavailable' |
994
+ yq ".L") 2&>/dev/null
995
+ if ! [ "$info" = "null" ]; then
996
+ echo "$D; $info"
997
+ fi
998
+ done
999
+ }
1000
+
1002
1001
  #!/usr/bin/env bash
1003
1002
 
1004
1003
  bastion_config_for_redis_ca() {
@@ -1551,6 +1550,406 @@ jwt_token() {
1551
1550
 
1552
1551
  #!/usr/bin/env bash
1553
1552
 
1553
+ alias update_devtool="git -C ~/.oh-my-zsh/custom/dev-tools/ pull"
1554
+
1555
+ SCRIPT_PATH=$SCRIPT_FULL_PATH/shell/run
1556
+ PATH="$PATH:$SCRIPT_PATH/script"
1557
+
1558
+ function get_token {
1559
+ local ENV=$1
1560
+ local LOGIN_FILE="$HOME/scriptlogin"
1561
+
1562
+ if [ ! -f "$LOGIN_FILE" ]; then
1563
+ cat > "$LOGIN_FILE" <<-'EOF'
1564
+ #!/bin/bash
1565
+ case $ENV in
1566
+ "testing")
1567
+ local BO_USERNAME=""
1568
+ local BO_PASSWORD=""
1569
+ ;;
1570
+ "recette")
1571
+ local BO_USERNAME=""
1572
+ local BO_PASSWORD=""
1573
+ ;;
1574
+ "staging")
1575
+ local BO_USERNAME=""
1576
+ local BO_PASSWORD=""
1577
+ ;;
1578
+ *)
1579
+ local BO_USERNAME=""
1580
+ local BO_PASSWORD=""
1581
+ echo "ENV ${ENV} inconu"
1582
+ return
1583
+ ;;
1584
+ esac
1585
+ EOF
1586
+ fi
1587
+
1588
+ source "${LOGIN_FILE}"
1589
+
1590
+ if [ -z "$BO_PASSWORD" ] || [ -z "$BO_USERNAME" ]
1591
+ then
1592
+ echo éditer le ficher "$LOGIN_FILE"
1593
+ return 1
1594
+ fi
1595
+
1596
+ curl -o /dev/null -D - "https://api.$ENV.colisweb.com/api/v6/authent/external/session" \
1597
+ --data-raw '{"username":"'"${BO_USERNAME}"'","password":"'"${BO_PASSWORD/\"/\\\"}"'"}' \
1598
+ --compressed 2> /dev/null | grep set-cook | sed -e 's/.*session=//g;s/;.*//g'
1599
+ }
1600
+
1601
+ function bash_array_to_json {
1602
+ function join {
1603
+ local IFS="$1"
1604
+ shift
1605
+ echo "$*"
1606
+ }
1607
+
1608
+ echo '["'"$(join , $*| sed -e 's/,/","/g' )"'"]' | jq
1609
+ }
1610
+
1611
+ function get_random_street {
1612
+ local CODE_POSTAUX_ARG=${1:-59000}
1613
+ IFS=',' read -r -a CODE_POSTAUX <<< "$CODE_POSTAUX_ARG"
1614
+ for CODE_POSTAL in "${CODE_POSTAUX[@]}"; do
1615
+ if [[ ! "$CODE_POSTAL" =~ ^[0-9]{5}$ ]]; then
1616
+ echo "Chaque CODE_POSTAL doit avoir une taille de 5 chiffre : $CODE_POSTAL"
1617
+ exit 1
1618
+ fi
1619
+ done
1620
+ local CODE_POSTAL=$(echo "${CODE_POSTAUX[@]}" | tr " " "\n" | sort -u -R | head -n 1)
1621
+
1622
+ get_random_street_in_cp $CODE_POSTAL
1623
+ }
1624
+
1625
+ function get_random_street_in_cp {
1626
+ local CODE_POSTAL=$1
1627
+
1628
+ FILENAME="rue-$CODE_POSTAL.lst"
1629
+ if [ ! -f "$FILENAME" ]; then
1630
+ curl --output tmp1.gz https://adresse.data.gouv.fr/data/ban/adresses/latest/csv/adresses-"${CODE_POSTAL:0:2}".csv.gz
1631
+ gzip -d tmp1.gz
1632
+ cut -d\; -f3,5,6,8 tmp1 | sed "/;$CODE_POSTAL;/!d" > "$FILENAME"
1633
+ rm tmp1
1634
+ fi
1635
+
1636
+ sort -R "$FILENAME" | head -n 1
1637
+ }
1638
+
1639
+ function rand_slot {
1640
+
1641
+ local SCENARIO=$2
1642
+ if [ -f "$SCENARIO" ]; then
1643
+ source "$SCENARIO"
1644
+ fi
1645
+ local ORDER_DATE="$1"
1646
+
1647
+ DEFAULT=(
1648
+ "06:00+01:00[Europe/Paris]-08:00+01:00[Europe/Paris]"
1649
+ "08:00+01:00[Europe/Paris]-10:00+01:00[Europe/Paris]"
1650
+ "10:00+01:00[Europe/Paris]-12:00+01:00[Europe/Paris]"
1651
+ "16:00+01:00[Europe/Paris]-18:00+01:00[Europe/Paris]"
1652
+ "18:00+01:00[Europe/Paris]-20:00+01:00[Europe/Paris]"
1653
+ )
1654
+ USAGE=${DELIVERY_SLOTS:-${DEFAULT[@]}}
1655
+
1656
+ IFS="-" read -r start_time end_time < <(echo "${USAGE[@]}" | tr " " "\n" | sort -u -R | head -n 1 )
1657
+
1658
+ echo '{"start":"'"${ORDER_DATE}T${start_time}"'", "end":"'"${ORDER_DATE}T${end_time}"'" }'
1659
+ }
1660
+
1661
+ function call_create_sfh_order {
1662
+ local ENV=$1
1663
+ local TOKEN=$2
1664
+ source "$3"
1665
+ local POS=$4
1666
+ local BARCODES="$5"
1667
+ local CODE_POSTAUX="$6"
1668
+ local PACKAGES=$(echo "$BARCODES" | jq '[{
1669
+ "barcode": .[],
1670
+ "length": 20.0,
1671
+ "height": 15.0,
1672
+ "width": 4.0,
1673
+ "weight": 1.5,
1674
+ "description": "test parcel",
1675
+ "options": [],
1676
+ "productTypology": "Classical",
1677
+ "packageType": "Parcel"
1678
+ }
1679
+ ]')
1680
+
1681
+ DELIVERY_OPTIONS_P='['
1682
+ for option in "${DELIVERY_OPTIONS[@]}"; do
1683
+ if [ "$DELIVERY_OPTIONS_P" != '[' ]; then
1684
+ DELIVERY_OPTIONS_P+=", "
1685
+ fi
1686
+ DELIVERY_OPTIONS_P+="\"$option\""
1687
+ done
1688
+ DELIVERY_OPTIONS_P+=']'
1689
+
1690
+ IFS=";" read -r nu rue code_postal ville < <(get_random_street "$CODE_POSTAUX")
1691
+
1692
+ if [ -n "$PICKUP_STORE_CODE" ]; then
1693
+ PICKUP_LOCATION='{
1694
+ "type": "store",
1695
+ "storeCode": "'"$PICKUP_STORE_CODE"'"
1696
+ }'
1697
+ elif [ -n "$PICKUP_WAREHOUSE_CODE" ]; then
1698
+ PICKUP_LOCATION='{
1699
+ "type": "Warehouse",
1700
+ "warehouseCode": "'"$PICKUP_WAREHOUSE_CODE"'"
1701
+ }'
1702
+ else
1703
+ echo PICKUP_WAREHOUSE_CODE ou PICKUP_STORE_CODE doit être définie dans la "$3"
1704
+ exit 1
1705
+ fi
1706
+ JSON='{
1707
+ "primaryOrderReference": "'"${PRIMARY_REF}${POS}"'",
1708
+ "secondaryOrderReference": null,
1709
+ "stages": [
1710
+ {
1711
+ "type": "Pickup",
1712
+ "packageBarcodes": '"$BARCODES"',
1713
+ "location": '"$PICKUP_LOCATION"'
1714
+ },
1715
+ {
1716
+ "type": "Dropoff",
1717
+ "packageBarcodes": '"$BARCODES"',
1718
+ "location": {
1719
+ "type": "Address",
1720
+ "address": {
1721
+ "address1": "'"$nu $rue"'",
1722
+ "postalCode": "'"$code_postal"'",
1723
+ "city": "'"$ville"'",
1724
+ "country": "France",
1725
+ "floor": 0,
1726
+ "lift": "with_lift"
1727
+ },
1728
+ "contact": {
1729
+ "name": "John Doe",
1730
+ "primaryPhone": "+33606060606"
1731
+ }
1732
+ }
1733
+ }
1734
+ ],
1735
+ "packages": '"$PACKAGES"',
1736
+ "owner": {
1737
+ "accountIdentifier": "'$ACCOUNT_IDENTIFIER'"
1738
+ },
1739
+ "deliveryOptions": '"$DELIVERY_OPTIONS_P"',
1740
+ "ecommerceValidationDate": "'"${ORDER_DATE}"'"
1741
+ }'
1742
+
1743
+ RESULT=$(curl -s -X POST https://api.$ENV.colisweb.com/api/v6/order/external/warehouse/orders -H 'content-type: application/json' --cookie session="$TOKEN" --data-raw "$JSON")
1744
+ ORDER_ID=$(jq ".orderId" -r <<< "$RESULT")
1745
+
1746
+ echo "nouvelle commande : https://bo.$ENV.colisweb.com/admin/orders/$ORDER_ID" >&2
1747
+
1748
+ echo "$RESULT"
1749
+ }
1750
+
1751
+
1752
+ function call_scan {
1753
+ local ENV=$1
1754
+ local TOKEN=$2
1755
+ source "$3"
1756
+ local BARCODES="$4"
1757
+ local SCAN=$(echo "$BARCODES" | jq '[{"barcode" :.[], "context": "shuttle"}]')
1758
+
1759
+ JSON='{"scans":'$SCAN'}'
1760
+
1761
+ curl -X POST https://api.$ENV.colisweb.com/api/v6/parcel/external/units/scans/bulk -H 'content-type: application/json' --cookie session="$TOKEN" --data-raw "$JSON"
1762
+ }
1763
+
1764
+
1765
+ function call_register_delivery {
1766
+ local ENV=$1
1767
+ local TOKEN=$2
1768
+
1769
+ SCENARIO=$3
1770
+ source "$SCENARIO"
1771
+
1772
+ local ORDER_ID=$4
1773
+ local BARCODES="$5"
1774
+
1775
+ DATA='{
1776
+ "slot": '"$(rand_slot "${DELIVERY_DATE}" "$SCENARIO")"',
1777
+ "storeIdOwner":"'"$STORE_ID_OWNER"'",
1778
+ "pickup":{"type":"hub","code":"'"$HUB"'"},
1779
+ "barcodes":'"$BARCODES"',
1780
+ "price":{"origin":"auto","amount":25.9},
1781
+ "allowCustomerSlotUpdate":false,
1782
+ "withForcedSlot": false
1783
+ }'
1784
+
1785
+ curl -X POST https://api.$ENV.colisweb.com/api/v6/order/external/warehouse/orders/"$ORDER_ID"/deliveries \
1786
+ --cookie session="$TOKEN" --data-raw "$DATA"
1787
+ }
1788
+
1789
+
1790
+
1791
+ function _create_scenario_file_if_not_exist () {
1792
+ if [ ! -f "$SCENARIO" ]
1793
+ then
1794
+ cat > "$SCENARIO" <<-'EOF'
1795
+ DELIVERY_DATE=$(date -v+7d '+%Y-%m-%d') # ici on demande de crée a date d'aujourd'hui + 7 jours
1796
+ # peu remplacer -v+7d par -v+1d pour une livrasion programmer demain
1797
+ # utiliser que par create_many_sfh_order_and_delivery
1798
+ ENV="testing" # sur quelle enviromement lancer le scripts
1799
+ # ENV="staging"
1800
+ # ENV="recette"
1801
+
1802
+ ACCOUNT_IDENTIFIER="102" # pour la creation de order force utilies owner.accountIdentifier
1803
+ # sur l'appel api/v6/order/external/warehouse/orders
1804
+ # (creation de la commade)
1805
+ HUB="duck" # pour sur l'appel api/v6/order/external/warehouse/orders
1806
+ # parametre pickup.code (type est a "hub")
1807
+ STORE_ID_OWNER="184" # parametre pickup.storeIdOwner
1808
+ # sur l'appel api/v6/order/external/warehouse/orders
1809
+ # PICKUP_STORE_CODE="2" # si non commenté alors départ du magasin
1810
+ PICKUP_WAREHOUSE_CODE="422" # pour un départ d'entrepôt
1811
+
1812
+ BARCODES_COUNT=5 # nombres packages
1813
+ PREF="aaaa" # doit faire 4 caractères utilies pour générer les barecode
1814
+ # des packtages
1815
+
1816
+ CODE_POSTAUX=("59000", "75001") # liste code postale sur lequelle une addresse aléatoire seras choisi
1817
+ # (creation de la commade)
1818
+ DELIVERY_SLOTS=( # liste des horraires de créneau de livraison choisi aléatoirement
1819
+ "06:00+01:00[Europe/Paris]-08:00+01:00[Europe/Paris]"
1820
+ "08:00+01:00[Europe/Paris]-10:00+01:00[Europe/Paris]"
1821
+ "10:00+01:00[Europe/Paris]-12:00+01:00[Europe/Paris]"
1822
+ "16:00+01:00[Europe/Paris]-18:00+01:00[Europe/Paris]"
1823
+ "18:00+01:00[Europe/Paris]-20:00+01:00[Europe/Paris]"
1824
+ )
1825
+
1826
+ # DELIVERY_OPTIONS=("skill1" "skill2") # liste des nom skill - a décommanter
1827
+
1828
+ # normalement pas bessoin modifer
1829
+ ORDER_DATE=$(date '+%Y-%m-%d') # date du jour
1830
+ RAND=$(date +%y%m%d%H%M%S) # valueur peudo aleadoire (ici basé par date) doit faire 17 caractères
1831
+ BARCODE_PART=0000$RAND # utiliser pour générer les bare code les barecode sont :
1832
+ # {BARECODE_PART}{00000} a {BARECODE_PART}{BARECODES_COUNT}
1833
+ PRIMARY_REF=$PREF$RAND # primaryOrderReference de la commande
1834
+ EOF
1835
+ echo "éditer le fichier $SCENARIO"
1836
+ return 1
1837
+ fi
1838
+ }
1839
+
1840
+ #!/usr/bin/env bash
1841
+
1842
+ cleanup_merged_mr() {
1843
+ COLISWEB_IDL_GROUP=3054234
1844
+
1845
+ BEFORE=${1:- $(date -I -v -2y)}
1846
+
1847
+ for (( COUNTER=1; COUNTER<=12; COUNTER+=2 )); do
1848
+ cleanup_grouped_merged_mr $COLISWEB_IDL_GROUP $BEFORE $COUNTER &
1849
+ done
1850
+
1851
+ }
1852
+
1853
+ cleanup_grouped_merged_mr() {
1854
+ GROUP=$1
1855
+ BEFORE=$2
1856
+ PAGE_COUNT=$3
1857
+ MERGED_MRS=($(curl --header "PRIVATE-TOKEN: $GITLAB_PAT" \
1858
+ --url "https://gitlab.com/api/v4/groups/$GROUP/merge_requests?updated_before=${BEFORE}T08:00:00Z&status=merged&per_page=50&page=$PAGE_COUNT" |
1859
+ jq -r '.[] | {iid: .iid|tostring, pid:.project_id|tostring} | (.pid + "/merge_requests/" + .iid)'))
1860
+
1861
+ for MR in ${MERGED_MRS[@]}; do
1862
+ echo "https://gitlab.com/api/v4/projects/$MR"
1863
+ curl --request DELETE \
1864
+ --header "PRIVATE-TOKEN: $GITLAB_PAT" \
1865
+ --url "https://gitlab.com/api/v4/projects/$MR"
1866
+ done
1867
+ }
1868
+
1869
+ # you will need jq to use these commands. You can install it using "brew install jq"
1870
+ # cleanup_all_ecr_images 12
1871
+ # will delete images in all repositories older than 12 weeks
1872
+ # cleanup_single_ecr_repository colisweb-api 8
1873
+ # will delete images older than 8 weeks in the colisweb-api repository
1874
+ cleanup_all_ecr_images() {
1875
+ WEEKS=$1
1876
+
1877
+ # OR to work on bash and zsh
1878
+ CLEAN_BEFORE=$(date -v-${WEEKS}w +%F || date --date="-${WEEKS} weeks" +'%Y-%m-%d')
1879
+ REPOSITORIES=$(aws ecr describe-repositories --output json | jq -r '.[] |.[].repositoryName')
1880
+
1881
+ while read -r REPOSITORY; do
1882
+ echo "processing ECR repository $REPOSITORY before $CLEAN_BEFORE"
1883
+ cleanup_single_ecr_repository "$CLEAN_BEFORE" "$REPOSITORY"
1884
+ done <<< "$REPOSITORIES"
1885
+ }
1886
+
1887
+ cleanup_single_ecr_repository() {
1888
+ BEFORE=$1
1889
+ REPOSITORY=$2
1890
+
1891
+ echo "gettings tags for repository $REPOSITORY before $BEFORE"
1892
+
1893
+ ALL_TAGS=$(aws ecr describe-images --repository-name "$REPOSITORY" --output json |
1894
+ jq '.imageDetails' |
1895
+ jq '. |= sort_by(.imagePushedAt)' |
1896
+ jq --arg date $BEFORE '.[] | select(.imagePushedAt[0:10] < $date)' |
1897
+ jq 'select((.imageTags != null) or (.imageTags == []))' |
1898
+ jq 'select(.imageTags | any(endswith("latest")) | not)' |
1899
+ jq -r '.imageTags | join(" ")' |
1900
+ sort -u)
1901
+
1902
+ if [ -z "${ALL_TAGS}" ]; then
1903
+ echo "no tag to delete for repository $REPOSITORY"
1904
+ else
1905
+ echo "deleting $(echo $ALL_TAGS | wc -l) tags for $REPOSITORY"
1906
+
1907
+ while read image_tags; do
1908
+ SINGLE_TAG=$(echo $image_tags | grep -o '^\S*')
1909
+
1910
+ DIGESTS_TO_DELETE=$(docker buildx imagetools inspect \
1911
+ 949316342391.dkr.ecr.eu-west-1.amazonaws.com/$REPOSITORY:$SINGLE_TAG --raw |
1912
+ jq -r '[.manifests | .[].digest] | join(" imageDigest=") | "imageDigest=" + .' ||
1913
+ echo "")
1914
+
1915
+ TAGS_TO_DELETE=$(echo "$image_tags" | sed 's/[^ ]* */imageTag=&/g')
1916
+
1917
+ export AWS_PAGER=""
1918
+
1919
+ aws ecr batch-delete-image --repository-name "$REPOSITORY" --image-ids $(echo $TAGS_TO_DELETE) > /dev/null 2>&1
1920
+ test -z $DIGESTS_TO_DELETE ||
1921
+ aws ecr batch-delete-image --repository-name "$REPOSITORY" --image-ids $(echo $DIGESTS_TO_DELETE)> /dev/null 2>&1
1922
+ done <<< $ALL_TAGS
1923
+
1924
+ echo "deleted $(echo $ALL_TAGS | wc -l) tags"
1925
+ fi
1926
+
1927
+ }
1928
+
1929
+
1930
+ cleanup_ci_cache() {
1931
+ DATE=${1:-$(date -v-1m +%F)}
1932
+ CACHE_BUCKET=${2:-"s3://gitlab-colisweb-distributed-cache/project/"}
1933
+
1934
+ echo "deleting from cache $CACHE_BUCKET all older than $DATE"
1935
+
1936
+ aws_ecr_login
1937
+
1938
+ while read -r line; do
1939
+ datum=$(echo $line | cut -c1-10)
1940
+ if [[ "$datum" < "$DATE" ]] ; then
1941
+ # Shell Parameter Expansion: ${parameter##word}
1942
+ # Allow to return the result from "word" to the end of "parameters"
1943
+ # Here we need the end of the string after "project/" (corresponding to the S3 gitlab project id and filename)
1944
+ TO_DELETE="$CACHE_BUCKET${line##* project/}"
1945
+ echo $TO_DELETE
1946
+ aws s3 rm $TO_DELETE
1947
+ fi
1948
+ done < <(aws s3 ls $CACHE_BUCKET --recursive)
1949
+ }
1950
+
1951
+ #!/usr/bin/env bash
1952
+
1554
1953
  ftp_ikea_k8s() {
1555
1954
  SSH_LOCAL_PORT=2230
1556
1955
  FTP_LOCAL_PORT=25500
@@ -1926,8 +2325,12 @@ datadog_schedule_downtime_single() {
1926
2325
  #!/usr/bin/env bash
1927
2326
 
1928
2327
  docker_build_push() {
2328
+ PLATFORMS=$1
2329
+ shift
2330
+
1929
2331
  read -r -a BUILD_ARGS <<< "$1"
1930
2332
  DOCKER_BUILD_ARGS="--build-arg VCS_REF=$(git rev-parse --short HEAD)"
2333
+
1931
2334
  for ARG_NAME in "${BUILD_ARGS[@]}"
1932
2335
  do
1933
2336
  DOCKER_BUILD_ARGS="$DOCKER_BUILD_ARGS --build-arg $ARG_NAME=${!ARG_NAME}"
@@ -1936,13 +2339,17 @@ docker_build_push() {
1936
2339
  if ! image_exists $DOCKER_REGISTRY_ID $APPLICATION $CI_COMMIT_SHORT_SHA ; then
1937
2340
  docker pull $DOCKER_IMAGE || true
1938
2341
  SOURCE_URL=${CI_PROJECT_URL:8} # without "https://" protocol, like gitlab.com/colisweb-idl/colisweb/back/packing
1939
- docker build $DOCKER_BUILD_ARGS \
2342
+
2343
+ docker buildx create --use
2344
+
2345
+ docker buildx build $DOCKER_BUILD_ARGS \
1940
2346
  -t $DOCKER_IMAGE_SHA \
2347
+ --platform "$PLATFORMS" \
1941
2348
  --label org.opencontainers.image.revision=$(git rev-parse HEAD) \
1942
2349
  --label org.opencontainers.image.source=$SOURCE_URL \
1943
- --cache-from $DOCKER_IMAGE \
2350
+ --provenance=false \
2351
+ --push \
1944
2352
  $DOCKER_STAGE_PATH
1945
- docker push $DOCKER_IMAGE_SHA
1946
2353
  fi
1947
2354
  }
1948
2355
 
@@ -2243,11 +2650,11 @@ git_reveal() {
2243
2650
  }
2244
2651
  #!/usr/bin/env bash
2245
2652
 
2246
- helm_deploy_v3() {
2653
+ helm_deploy() {
2247
2654
  APPLICATION=$1
2248
2655
  ENVIRONMENT=$2
2249
2656
  VERSION=$3
2250
- deploy_chart_v3 \
2657
+ deploy_chart \
2251
2658
  --path_configs deploy \
2252
2659
  --path_chart deploy/$APPLICATION \
2253
2660
  --application $APPLICATION \
@@ -2256,7 +2663,7 @@ helm_deploy_v3() {
2256
2663
  --helm_extra_args --set global.version=$VERSION
2257
2664
  }
2258
2665
 
2259
- deploy_chart_v3() {
2666
+ deploy_chart() {
2260
2667
  set -e
2261
2668
  set -x
2262
2669
 
@@ -2305,13 +2712,13 @@ deploy_chart_v3() {
2305
2712
  # Configure Kubectl
2306
2713
  configure_kubectl_for ${environment}
2307
2714
 
2308
- # Configure helm3
2309
- helm3 version --namespace ${namespace} || true
2310
- # helm3 stable repo have changed and must be updated manually, in versions < v2.17.0
2311
- helm3 repo add colisweb s3://colisweb-helm-charts/colisweb
2312
- helm3 repo add stable https://charts.helm.sh/stable
2313
- helm3 repo update
2314
- helm3 dependency update ${root_path}/${path_chart}
2715
+ # Configure helm
2716
+ helm version --namespace ${namespace} || true
2717
+ # helm stable repo have changed and must be updated manually, in versions < v2.17.0
2718
+ helm repo add colisweb s3://colisweb-helm-charts/colisweb
2719
+ helm repo add stable https://charts.helm.sh/stable
2720
+ helm repo update
2721
+ helm dependency update ${root_path}/${path_chart}
2315
2722
 
2316
2723
  # Gather values/*.yaml files
2317
2724
  values_path="${root_path}/${path_chart}/values"
@@ -2319,7 +2726,7 @@ deploy_chart_v3() {
2319
2726
  [ -d $values_path ] && values_files=$(find $values_path -type f -maxdepth 1 -name "*.yaml" | sed 's/^/ -f /' | tr -d \\n | sed 's/%//')
2320
2727
 
2321
2728
  # Deploy
2322
- helm3 upgrade --install \
2729
+ helm upgrade --install \
2323
2730
  --namespace ${namespace} \
2324
2731
  ${values_files} \
2325
2732
  -f ${root_path}/${path_configs}/common.yaml \
@@ -2341,7 +2748,7 @@ deploy_chart_v3() {
2341
2748
  set +x
2342
2749
  }
2343
2750
 
2344
- verify_deployments_v3() {
2751
+ verify_deployments() {
2345
2752
  set -e
2346
2753
 
2347
2754
  # usage :
@@ -2361,7 +2768,7 @@ verify_deployments_v3() {
2361
2768
 
2362
2769
  # Get all Deployments names from the deployed chart
2363
2770
  DEPLOYMENTS=(
2364
- $(helm3 get manifest --namespace $NAMESPACE $RELEASE | yq -rs '.[] | select(.kind=="Deployment") | .metadata.name')
2771
+ $(helm get manifest --namespace $NAMESPACE $RELEASE | yq --no-doc -r 'select(.kind=="Deployment").metadata.name')
2365
2772
  )
2366
2773
 
2367
2774
  echo "verifying on $NAMESPACE deployments ${DEPLOYMENTS[@]} with a timeout of $TIMEOUT"
package/package.json CHANGED
@@ -1,6 +1,6 @@
1
1
  {
2
2
  "name": "@colisweb/rescript-toolkit",
3
- "version": "5.42.4",
3
+ "version": "5.42.9",
4
4
  "type": "module",
5
5
  "scripts": {
6
6
  "clean": "rescript clean",
@@ -5,6 +5,7 @@ external make: (
5
5
  ~countryCallingCodeEditable: bool=?,
6
6
  ~international: bool=?,
7
7
  ~defaultCountry: string=?,
8
+ ~initialValueFormat: string=?,
8
9
  ~onChange: string => unit=?,
9
10
  ~onBlur: 'a => unit=?,
10
11
  ~className: string=?,
@@ -22,3 +23,6 @@ type phoneNumber = {
22
23
  }
23
24
  @module("react-phone-number-input")
24
25
  external parsePhoneNumber: string => option<phoneNumber> = "parsePhoneNumber"
26
+
27
+ @module("react-phone-number-input")
28
+ external parsePhoneNumberWithOptions: (string, 'a) => option<phoneNumber> = "parsePhoneNumber"
package/vite.config.js CHANGED
@@ -14,7 +14,8 @@ const appDirectory = fs.realpathSync(process.cwd());
14
14
  const isProduction = process.env.NODE_ENV === "production";
15
15
 
16
16
  export default defineConfig({
17
- base: isProduction ? "/colisweb-open-source/rescript-toolkit/" : "/",
17
+ // base: isProduction ? "/colisweb-open-source/rescript-toolkit/" : "/",
18
+ base: "/",
18
19
  build: {
19
20
  sourcemap: true,
20
21
  },