cl-magic 1.2.10 → 1.3.1
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- checksums.yaml +4 -4
- data/Gemfile.lock +2 -2
- data/README.md +5 -3
- data/bin/cl +2 -1
- data/bin/common/process_pids +19 -0
- data/bin/world-scripts/build +135 -0
- data/bin/world-scripts/down +78 -0
- data/bin/world-scripts/get +112 -0
- data/bin/world-scripts/rm +76 -0
- data/bin/world-scripts/up +83 -0
- data/lib/cl/magic/cl-curl +130 -0
- data/lib/cl/magic/cl-dk +4 -0
- data/lib/cl/magic/cl-dk-make +4 -1
- data/lib/cl/magic/cl-dk-make-world +3 -2
- data/lib/cl/magic/cl-dk-world +32 -18
- data/lib/cl/magic/common/load_runner.rb +127 -0
- data/lib/cl/magic/dk/world_settings.rb +8 -0
- data/lib/cl/magic/dk/yaml_arg_munger.rb +60 -9
- data/lib/cl/magic/version.rb +1 -1
- metadata +10 -2
checksums.yaml
CHANGED
@@ -1,7 +1,7 @@
|
|
1
1
|
---
|
2
2
|
SHA256:
|
3
|
-
metadata.gz:
|
4
|
-
data.tar.gz:
|
3
|
+
metadata.gz: bb8d14f6e383683f5e14fe5ada204b1f78c7024b22663ff05080fddfa7d23f40
|
4
|
+
data.tar.gz: 2ab5fb3e7faca8be5fae4d9e2c9ea503cab062d7fc496496574ba21e4dbfdd29
|
5
5
|
SHA512:
|
6
|
-
metadata.gz:
|
7
|
-
data.tar.gz:
|
6
|
+
metadata.gz: 49d6817adf444b4c77bf546d5d4468cd0fed212e17e27703971a8172561deb58d501ad9f03b3a24d5f00621e009ecc94018ca31f091eb1260353524b8d4752c9
|
7
|
+
data.tar.gz: 874d23bca143e1fd4c34ffa77da6e969fa2e4ac3fe5789e9b026712ca6bc8dfb429e969da2025f937d19cc0dff10bf2cb2a4b6753d66783f231920d6e03df69c
|
data/Gemfile.lock
CHANGED
@@ -1,7 +1,7 @@
|
|
1
1
|
PATH
|
2
2
|
remote: .
|
3
3
|
specs:
|
4
|
-
cl-magic (1.
|
4
|
+
cl-magic (1.3.1)
|
5
5
|
activesupport
|
6
6
|
baran
|
7
7
|
concurrent-ruby
|
@@ -23,7 +23,7 @@ GEM
|
|
23
23
|
tzinfo (~> 2.0)
|
24
24
|
baran (0.1.9)
|
25
25
|
byebug (11.1.3)
|
26
|
-
concurrent-ruby (1.2.
|
26
|
+
concurrent-ruby (1.2.3)
|
27
27
|
i18n (1.14.1)
|
28
28
|
concurrent-ruby (~> 1.0)
|
29
29
|
minitest (5.20.0)
|
data/README.md
CHANGED
@@ -24,19 +24,21 @@ ln -s $MAGIC_DIR/bin/cl /usr/local/bin
|
|
24
24
|
|
25
25
|
## Development
|
26
26
|
|
27
|
-
|
27
|
+
For local development
|
28
|
+
|
29
|
+
Remove any previously installed simlink
|
28
30
|
|
29
31
|
```
|
30
32
|
rm /usr/local/bin/cl
|
31
33
|
```
|
32
34
|
|
33
|
-
Then sim-link
|
35
|
+
Then sim-link the source code instead
|
34
36
|
|
35
37
|
```
|
36
38
|
ln -s $(pwd)/bin/cl /usr/local/bin
|
37
39
|
```
|
38
40
|
|
39
|
-
if using `dk make-world`, you'll need to reset your world path
|
41
|
+
NOTE: if using `dk make-world`, you'll need to reset your world path
|
40
42
|
|
41
43
|
```
|
42
44
|
dk world set --path ./dk-world
|
data/bin/cl
CHANGED
@@ -11,7 +11,8 @@ while [ -h "$SOURCE" ]; do # resolve $SOURCE until the file is no longer a symli
|
|
11
11
|
[[ $SOURCE != /* ]] && SOURCE="$DIR/$SOURCE" # if $SOURCE was a relative symlink, we need to resolve it relative to the path where the symlink file was located
|
12
12
|
done
|
13
13
|
MAGIC_DIR="$( cd -P "$( dirname "$SOURCE" )/../lib/cl/magic" >/dev/null 2>&1 && pwd )"
|
14
|
+
MAGIC_BIN="$MAGIC_DIR/../../../bin"
|
14
15
|
|
15
16
|
# make magic - with the correct version of ruby & the bundle
|
16
17
|
cd $MAGIC_DIR
|
17
|
-
RBENV_VERSION=3.1.2 CL_WORKING_DIR=$CL_WORKING_DIR bundle exec cl $@
|
18
|
+
RBENV_VERSION=3.1.2 MAGIC_BIN=$MAGIC_BIN CL_WORKING_DIR=$CL_WORKING_DIR bundle exec cl $@
|
@@ -0,0 +1,19 @@
|
|
1
|
+
#!/bin/bash
|
2
|
+
set -euo pipefail
|
3
|
+
|
4
|
+
LOG_FILEPATHS_ARRAY=()
|
5
|
+
PIDS_ARRAY=()
|
6
|
+
|
7
|
+
process_pids () {
|
8
|
+
|
9
|
+
for pid in "${PIDS_ARRAY[@]}"
|
10
|
+
do
|
11
|
+
werr=0
|
12
|
+
wait $pid || werr=$?
|
13
|
+
if [ "$werr" != 0 ] && [ "$werr" != 127 ] ; then
|
14
|
+
echo " ℹ failed - ${LOG_FILEPATHS_ARRAY[$pid]}"
|
15
|
+
else
|
16
|
+
echo " ✔ ${LOG_FILEPATHS_ARRAY[$pid]}"
|
17
|
+
fi
|
18
|
+
done
|
19
|
+
}
|
@@ -0,0 +1,135 @@
|
|
1
|
+
#!/bin/bash
|
2
|
+
set -euo pipefail
|
3
|
+
|
4
|
+
# source world constants
|
5
|
+
source "$WORLD_DIR/_bin/common/repo_constants"
|
6
|
+
|
7
|
+
# script path
|
8
|
+
SCRIPT_PATH="$( cd "$( dirname "${BASH_SOURCE[0]}" )" && pwd )"
|
9
|
+
cd $SCRIPT_PATH
|
10
|
+
|
11
|
+
# source scripts
|
12
|
+
source ../common/process_pids
|
13
|
+
|
14
|
+
# code path
|
15
|
+
if [ -z "$CODE_DIR" ]; thenw
|
16
|
+
echo "missing CODE_DIR"
|
17
|
+
exit 1
|
18
|
+
fi
|
19
|
+
code_path=$CODE_DIR
|
20
|
+
|
21
|
+
# ensure shared net
|
22
|
+
docker network create shared > /dev/null 2>&1 || true
|
23
|
+
|
24
|
+
echo ""
|
25
|
+
echo "------------------------"
|
26
|
+
echo " make build"
|
27
|
+
echo "------------------------"
|
28
|
+
echo ""
|
29
|
+
|
30
|
+
REPO_ARRAY=$(get_repo_array "$1")
|
31
|
+
for repo in ${REPO_ARRAY[@]};
|
32
|
+
do
|
33
|
+
repo_base_name=$(basename "${repo##*/}" .git)
|
34
|
+
repo_path=$code_path/$repo_base_name
|
35
|
+
|
36
|
+
echo " $repo_base_name"
|
37
|
+
|
38
|
+
if [ ! -d $repo_path ]; then
|
39
|
+
echo " ℹ skip, project not cloned"
|
40
|
+
else
|
41
|
+
if [[ "$repo_base_name" =~ $(get_skip_build_regex) ]]; then
|
42
|
+
echo " ℹ skip; please build manually"
|
43
|
+
else
|
44
|
+
|
45
|
+
cd $repo_path
|
46
|
+
|
47
|
+
# compose settings?
|
48
|
+
if cl dk config &> /dev/null; then
|
49
|
+
|
50
|
+
# has make init?
|
51
|
+
if echo $(cl dk make) | grep -q init; then
|
52
|
+
|
53
|
+
# start: make init
|
54
|
+
log_filepath="/tmp/make.build.$repo_base_name.output"
|
55
|
+
echo " ⌛ start make init"
|
56
|
+
|
57
|
+
# set command
|
58
|
+
MAKE_INIT_CMD="cl dk make init"
|
59
|
+
|
60
|
+
# log header
|
61
|
+
echo "" >> $log_filepath
|
62
|
+
echo "start make init..." > $log_filepath
|
63
|
+
echo $MAKE_INIT_CMD >> $log_filepath
|
64
|
+
echo "" >> $log_filepath
|
65
|
+
|
66
|
+
# go!
|
67
|
+
$MAKE_INIT_CMD >> $log_filepath 2>&1 &
|
68
|
+
|
69
|
+
# save
|
70
|
+
pid=$!
|
71
|
+
PIDS_ARRAY+=($pid)
|
72
|
+
LOG_FILEPATHS_ARRAY[$pid]=$log_filepath
|
73
|
+
else
|
74
|
+
echo " ℹ skip, no 'dk make init'"
|
75
|
+
fi
|
76
|
+
else
|
77
|
+
|
78
|
+
# docker file?
|
79
|
+
dockerfile_filepath="$repo_path/Dockerfile"
|
80
|
+
if test -f "$dockerfile_filepath"; then
|
81
|
+
cd $repo_path
|
82
|
+
|
83
|
+
# arm version?
|
84
|
+
ARCH=$(uname -m)
|
85
|
+
if [[ ${ARCH} == 'arm64' ]]; then
|
86
|
+
if test -f "$dockerfile_filepath.arm"; then
|
87
|
+
echo " ℹ found Dockerfile.arm"
|
88
|
+
dockerfile_filepath="$repo_path/Dockerfile.arm"
|
89
|
+
fi
|
90
|
+
fi
|
91
|
+
|
92
|
+
# start: docker build
|
93
|
+
log_filepath="/tmp/make.build.$repo_base_name.output"
|
94
|
+
echo " ⌛ start docker build"
|
95
|
+
|
96
|
+
# command
|
97
|
+
DOCKER_BUILD_CMD="docker build -t dev-$repo_base_name -f $dockerfile_filepath ."
|
98
|
+
|
99
|
+
# log header
|
100
|
+
echo "" >> $log_filepath
|
101
|
+
echo "start docker build..." > $log_filepath
|
102
|
+
echo $DOCKER_BUILD_CMD >> $log_filepath
|
103
|
+
echo "" >> $log_filepath
|
104
|
+
|
105
|
+
# go!
|
106
|
+
$DOCKER_BUILD_CMD >> $log_filepath 2>&1 &
|
107
|
+
|
108
|
+
# save
|
109
|
+
pid=$!
|
110
|
+
PIDS_ARRAY+=($pid)
|
111
|
+
LOG_FILEPATHS_ARRAY[$pid]=$log_filepath
|
112
|
+
else
|
113
|
+
echo " ℹ skip, no docker"
|
114
|
+
fi
|
115
|
+
fi
|
116
|
+
fi
|
117
|
+
fi
|
118
|
+
echo ""
|
119
|
+
done
|
120
|
+
|
121
|
+
if [ ! -z "${PIDS_ARRAY:-}" ]; then
|
122
|
+
echo ""
|
123
|
+
echo " waiting"
|
124
|
+
echo " 👀 tail -f /tmp/make.build.*.output"
|
125
|
+
process_pids
|
126
|
+
else
|
127
|
+
echo " nothing to do.. did you 'make get' yet?"
|
128
|
+
fi
|
129
|
+
|
130
|
+
#
|
131
|
+
# DONE
|
132
|
+
#
|
133
|
+
|
134
|
+
printf " DONE"
|
135
|
+
echo ""
|
@@ -0,0 +1,78 @@
|
|
1
|
+
#!/bin/bash
|
2
|
+
set -euo pipefail
|
3
|
+
|
4
|
+
# source world constants
|
5
|
+
source "$WORLD_DIR/_bin/common/repo_constants"
|
6
|
+
|
7
|
+
# script path
|
8
|
+
SCRIPT_PATH="$( cd "$( dirname "${BASH_SOURCE[0]}" )" && pwd )"
|
9
|
+
cd $SCRIPT_PATH
|
10
|
+
|
11
|
+
# source scripts
|
12
|
+
source ../common/process_pids
|
13
|
+
|
14
|
+
# code path
|
15
|
+
if [ -z "$CODE_DIR" ]; then
|
16
|
+
echo "missing CODE_DIR"
|
17
|
+
exit 1
|
18
|
+
fi
|
19
|
+
code_path=$CODE_DIR
|
20
|
+
|
21
|
+
|
22
|
+
log_filepaths=()
|
23
|
+
pids=()
|
24
|
+
|
25
|
+
echo ""
|
26
|
+
echo "------------------------"
|
27
|
+
echo " bring down services"
|
28
|
+
echo "------------------------"
|
29
|
+
echo ""
|
30
|
+
|
31
|
+
REPO_ARRAY=$(get_repo_array "$1")
|
32
|
+
for repo in ${REPO_ARRAY[@]};
|
33
|
+
do
|
34
|
+
repo_base_name=$(basename "${repo##*/}" .git)
|
35
|
+
repo_path=$code_path/$repo_base_name
|
36
|
+
|
37
|
+
cd $repo_path
|
38
|
+
|
39
|
+
# compose settings?
|
40
|
+
if cl dk config &> /dev/null; then
|
41
|
+
|
42
|
+
# has make init?
|
43
|
+
if echo $(cl dk make) | grep -q down; then
|
44
|
+
echo " $repo_base_name"
|
45
|
+
dk_parts=$(get_repo_dk_parts $repo_base_name)
|
46
|
+
|
47
|
+
# run
|
48
|
+
log_filepath="/tmp/make.down.$repo_base_name.output"
|
49
|
+
echo " ⌛ running dk down"
|
50
|
+
echo ""
|
51
|
+
|
52
|
+
# clear parts
|
53
|
+
cl dk parts clear > /dev/null 2>&1 || true
|
54
|
+
|
55
|
+
# down stack
|
56
|
+
cl dk make $dk_parts down > $log_filepath 2>&1 &
|
57
|
+
|
58
|
+
# save
|
59
|
+
pid=$!
|
60
|
+
PIDS_ARRAY+=($pid)
|
61
|
+
LOG_FILEPATHS_ARRAY[$pid]=$log_filepath
|
62
|
+
fi
|
63
|
+
fi
|
64
|
+
done
|
65
|
+
|
66
|
+
if [ ! -z "${PIDS_ARRAY:-}" ]; then
|
67
|
+
echo ""
|
68
|
+
echo " waiting"
|
69
|
+
echo " 👀 tail -f /tmp/make.down.*.output"
|
70
|
+
process_pids
|
71
|
+
fi
|
72
|
+
|
73
|
+
#
|
74
|
+
# DONE
|
75
|
+
#
|
76
|
+
|
77
|
+
printf " DONE"
|
78
|
+
echo ""
|
@@ -0,0 +1,112 @@
|
|
1
|
+
#!/bin/bash
|
2
|
+
set -euo pipefail
|
3
|
+
|
4
|
+
# source world constants
|
5
|
+
source "$WORLD_DIR/_bin/common/repo_constants"
|
6
|
+
|
7
|
+
# script path
|
8
|
+
SCRIPT_PATH="$( cd "$( dirname "${BASH_SOURCE[0]}" )" && pwd )"
|
9
|
+
cd $SCRIPT_PATH
|
10
|
+
|
11
|
+
# source scripts
|
12
|
+
source ../common/process_pids
|
13
|
+
|
14
|
+
# code path
|
15
|
+
if [ -z "$CODE_DIR" ]; then
|
16
|
+
echo "missing CODE_DIR"
|
17
|
+
exit 1
|
18
|
+
fi
|
19
|
+
code_path=$CODE_DIR
|
20
|
+
|
21
|
+
# Check if the SSH agent is running
|
22
|
+
if [ -z "$SSH_AUTH_SOCK" ]; then
|
23
|
+
SSH_VALID=false
|
24
|
+
else
|
25
|
+
# Check if the SSH key is added to the agent
|
26
|
+
if ssh-add -l > /dev/null 2>&1; then
|
27
|
+
SSH_VALID=true
|
28
|
+
else
|
29
|
+
SSH_VALID=false
|
30
|
+
fi
|
31
|
+
fi
|
32
|
+
|
33
|
+
# Check if SSH_VALID is false and display an error message
|
34
|
+
if [ "$SSH_VALID" = false ]; then
|
35
|
+
echo ""
|
36
|
+
echo "ERROR - SSH agent is not running"
|
37
|
+
echo ""
|
38
|
+
echo "This script clones multiple repositories asyncronously"
|
39
|
+
echo "You must have an SSH agent running with a valid key"
|
40
|
+
echo ""
|
41
|
+
echo "learn more: "
|
42
|
+
echo "https://docs.github.com/en/authentication/connecting-to-github-with-ssh/generating-a-new-ssh-key-and-adding-it-to-the-ssh-agent"
|
43
|
+
echo ""
|
44
|
+
exit 1
|
45
|
+
fi
|
46
|
+
|
47
|
+
echo ""
|
48
|
+
echo "------------------------"
|
49
|
+
echo " make get"
|
50
|
+
echo "------------------------"
|
51
|
+
echo ""
|
52
|
+
|
53
|
+
REPO_ARRAY=$(get_repo_array "$1")
|
54
|
+
for repo in ${REPO_ARRAY[@]};
|
55
|
+
do
|
56
|
+
cd $code_path
|
57
|
+
repo_base_name=$(basename "${repo##*/}" .git)
|
58
|
+
repo_path=$code_path/$repo_base_name
|
59
|
+
|
60
|
+
echo " $repo_base_name"
|
61
|
+
|
62
|
+
log_filepath="/tmp/make.get.$repo_base_name.output"
|
63
|
+
|
64
|
+
if [ ! -d $repo_path ]; then
|
65
|
+
echo " ⌛ clone repo"
|
66
|
+
git clone $repo >> $log_filepath 2>&1 &
|
67
|
+
|
68
|
+
# save
|
69
|
+
pid=$!
|
70
|
+
PIDS_ARRAY+=($pid)
|
71
|
+
LOG_FILEPATHS_ARRAY[$pid]=$log_filepath
|
72
|
+
else
|
73
|
+
# main branch?
|
74
|
+
cd $repo_path
|
75
|
+
co_branch_name=$(git rev-parse --abbrev-ref HEAD)
|
76
|
+
if echo $co_branch_name | grep -qE 'main|master'; then
|
77
|
+
|
78
|
+
# no local changes
|
79
|
+
if [[ ! `git status --porcelain` ]]; then
|
80
|
+
|
81
|
+
# then pull latest
|
82
|
+
echo " ⌛ pull latest (main)"
|
83
|
+
git pull --rebase >> $log_filepath 2>&1 &
|
84
|
+
|
85
|
+
# save
|
86
|
+
pid=$!
|
87
|
+
PIDS_ARRAY+=($pid)
|
88
|
+
LOG_FILEPATHS_ARRAY[$pid]=$log_filepath
|
89
|
+
else
|
90
|
+
echo " ℹ skip pull (repo has local changes)"
|
91
|
+
fi
|
92
|
+
else
|
93
|
+
echo " ℹ skip pull ($co_branch_name)"
|
94
|
+
fi
|
95
|
+
fi
|
96
|
+
echo ""
|
97
|
+
|
98
|
+
done
|
99
|
+
|
100
|
+
if [ ! -z "${PIDS_ARRAY:-}" ]; then
|
101
|
+
echo ""
|
102
|
+
echo " waiting"
|
103
|
+
echo " 👀 tail -f /tmp/make.get.*.output"
|
104
|
+
process_pids
|
105
|
+
fi
|
106
|
+
|
107
|
+
#
|
108
|
+
# DONE
|
109
|
+
#
|
110
|
+
|
111
|
+
printf " DONE"
|
112
|
+
echo ""
|
@@ -0,0 +1,76 @@
|
|
1
|
+
#!/bin/bash
|
2
|
+
set -euo pipefail
|
3
|
+
|
4
|
+
# source world constants
|
5
|
+
source "$WORLD_DIR/_bin/common/repo_constants"
|
6
|
+
|
7
|
+
# script path
|
8
|
+
SCRIPT_PATH="$( cd "$( dirname "${BASH_SOURCE[0]}" )" && pwd )"
|
9
|
+
cd $SCRIPT_PATH
|
10
|
+
|
11
|
+
# source scripts
|
12
|
+
source ../common/process_pids
|
13
|
+
|
14
|
+
# code path
|
15
|
+
if [ -z "$CODE_DIR" ]; then
|
16
|
+
echo "missing CODE_DIR"
|
17
|
+
exit 1
|
18
|
+
fi
|
19
|
+
code_path=$CODE_DIR
|
20
|
+
|
21
|
+
|
22
|
+
echo ""
|
23
|
+
echo "🔥 WARNING: This script will delete all the $1."
|
24
|
+
echo ""
|
25
|
+
read -p "Type \"DELETE EVERYTHING\" to proceed: " confirmation
|
26
|
+
echo ""
|
27
|
+
if [ "$confirmation" != "DELETE EVERYTHING" ]; then
|
28
|
+
echo "Aborting!"
|
29
|
+
exit 1
|
30
|
+
fi
|
31
|
+
|
32
|
+
echo ""
|
33
|
+
echo "------------------------"
|
34
|
+
echo " make rm"
|
35
|
+
echo "------------------------"
|
36
|
+
echo ""
|
37
|
+
|
38
|
+
REPO_ARRAY=$(get_repo_array "$1")
|
39
|
+
for repo in ${REPO_ARRAY[@]};
|
40
|
+
do
|
41
|
+
cd $code_path
|
42
|
+
repo_base_name=$(basename "${repo##*/}" .git)
|
43
|
+
repo_path=$code_path/$repo_base_name
|
44
|
+
|
45
|
+
echo " $repo_base_name"
|
46
|
+
|
47
|
+
log_filepath="/tmp/make.rm.$repo_base_name.output"
|
48
|
+
|
49
|
+
if [ -d $repo_path ]; then
|
50
|
+
echo " ⌛ removing project"
|
51
|
+
rm -rf $repo_path >> $log_filepath 2>&1 &
|
52
|
+
|
53
|
+
# save
|
54
|
+
pid=$!
|
55
|
+
PIDS_ARRAY+=($pid)
|
56
|
+
LOG_FILEPATHS_ARRAY[$pid]=$log_filepath
|
57
|
+
else
|
58
|
+
echo " ℹ skip, no local copy"
|
59
|
+
fi
|
60
|
+
echo ""
|
61
|
+
|
62
|
+
done
|
63
|
+
|
64
|
+
if [ ! -z "${PIDS_ARRAY:-}" ]; then
|
65
|
+
echo ""
|
66
|
+
echo " waiting"
|
67
|
+
echo " 👀 tail -f /tmp/make.rm.*.output"
|
68
|
+
process_pids
|
69
|
+
fi
|
70
|
+
|
71
|
+
#
|
72
|
+
# DONE
|
73
|
+
#
|
74
|
+
|
75
|
+
printf " DONE"
|
76
|
+
echo ""
|
@@ -0,0 +1,83 @@
|
|
1
|
+
#!/bin/bash
|
2
|
+
set -euo pipefail
|
3
|
+
|
4
|
+
# source world constants
|
5
|
+
source "$WORLD_DIR/_bin/common/repo_constants"
|
6
|
+
|
7
|
+
# script path
|
8
|
+
SCRIPT_PATH="$( cd "$( dirname "${BASH_SOURCE[0]}" )" && pwd )"
|
9
|
+
cd $SCRIPT_PATH
|
10
|
+
|
11
|
+
# source scripts
|
12
|
+
source ../common/process_pids
|
13
|
+
|
14
|
+
# code path
|
15
|
+
if [ -z "$CODE_DIR" ]; then
|
16
|
+
echo "missing CODE_DIR"
|
17
|
+
exit 1
|
18
|
+
fi
|
19
|
+
code_path=$CODE_DIR
|
20
|
+
|
21
|
+
# ensure shared net
|
22
|
+
docker network create shared > /dev/null 2>&1 || true
|
23
|
+
|
24
|
+
echo ""
|
25
|
+
echo "------------------------"
|
26
|
+
echo " bring up compose stacks"
|
27
|
+
echo "------------------------"
|
28
|
+
echo ""
|
29
|
+
|
30
|
+
REPO_ARRAY=$(get_repo_array "$1")
|
31
|
+
for repo in ${REPO_ARRAY[@]};
|
32
|
+
do
|
33
|
+
repo_base_name=$(basename "${repo##*/}" .git)
|
34
|
+
repo_path=$code_path/$repo_base_name
|
35
|
+
|
36
|
+
cd $repo_path
|
37
|
+
|
38
|
+
# compose settings?
|
39
|
+
if cl dk config &> /dev/null; then
|
40
|
+
|
41
|
+
# has make init?
|
42
|
+
if echo $(cl dk make) | grep -q up; then
|
43
|
+
echo " $repo_base_name"
|
44
|
+
dk_parts=$(get_repo_dk_parts $repo_base_name)
|
45
|
+
|
46
|
+
# run
|
47
|
+
log_filepath="/tmp/make.up.$repo_base_name.output"
|
48
|
+
echo " ⌛ up compose stack"
|
49
|
+
|
50
|
+
# set parts
|
51
|
+
cl dk parts set $dk_parts > /dev/null 2>&1 || true
|
52
|
+
|
53
|
+
# up stack
|
54
|
+
if [[ "$repo_base_name" =~ $(get_non_async_repo_regex) ]]; then
|
55
|
+
echo " 👀 tail -f $log_filepath"
|
56
|
+
cl dk make down up > $log_filepath 2>&1
|
57
|
+
echo " ✔ complete"
|
58
|
+
echo ""
|
59
|
+
else
|
60
|
+
echo ""
|
61
|
+
cl dk make down up > $log_filepath 2>&1 &
|
62
|
+
|
63
|
+
# save
|
64
|
+
pid=$!
|
65
|
+
PIDS_ARRAY+=($pid)
|
66
|
+
LOG_FILEPATHS_ARRAY[$pid]=$log_filepath
|
67
|
+
fi
|
68
|
+
fi
|
69
|
+
fi
|
70
|
+
done
|
71
|
+
|
72
|
+
if [ ! -z "${PIDS_ARRAY:-}" ]; then
|
73
|
+
echo ""
|
74
|
+
echo " waiting"
|
75
|
+
echo " 👀 tail -f /tmp/make.up.*.output"
|
76
|
+
process_pids
|
77
|
+
fi
|
78
|
+
|
79
|
+
#
|
80
|
+
# DONE
|
81
|
+
#
|
82
|
+
|
83
|
+
echo ""
|
@@ -0,0 +1,130 @@
|
|
1
|
+
#!/usr/bin/env ruby
|
2
|
+
# curl web pages by id
|
3
|
+
require 'tty-command'
|
4
|
+
require 'tty-prompt'
|
5
|
+
|
6
|
+
require 'cl/magic/common/sub_command.rb'
|
7
|
+
require 'cl/magic/common/common_options.rb'
|
8
|
+
require 'cl/magic/common/logging.rb'
|
9
|
+
require 'cl/magic/common/load_runner.rb'
|
10
|
+
|
11
|
+
@logger = get_logger()
|
12
|
+
@se_cmd_name = File.basename(__FILE__).split('-').join(' ')
|
13
|
+
|
14
|
+
#
|
15
|
+
# Features
|
16
|
+
#
|
17
|
+
|
18
|
+
def do_curls(options)
|
19
|
+
options[:ids].shuffle.each do |id|
|
20
|
+
uri = "#{options[:uri]}/#{id}"
|
21
|
+
status_code = `curl -s -o /dev/null -w \"%{http_code}\" #{uri}`
|
22
|
+
@logger.info "uri=#{uri}|status=#{status_code}"
|
23
|
+
end
|
24
|
+
end
|
25
|
+
|
26
|
+
def load_and_run(q, options)
|
27
|
+
q.load(options[:threads]) { do_curls(options) }
|
28
|
+
q.run
|
29
|
+
end
|
30
|
+
|
31
|
+
def run_for_durration(q, options)
|
32
|
+
num_seconds = 10
|
33
|
+
max_sleep = options[:max_sleep]
|
34
|
+
q.load(options[:threads]) { do_curls(options) }
|
35
|
+
q.run_for_durration(num_seconds, max_sleep)
|
36
|
+
end
|
37
|
+
|
38
|
+
def run_and_stagger(q, options)
|
39
|
+
max_sleep = options[:max_sleep]
|
40
|
+
q.load(options[:threads]) { do_curls(options) }
|
41
|
+
q.run_for_durration(max_sleep)
|
42
|
+
end
|
43
|
+
|
44
|
+
def do_work(options)
|
45
|
+
q = LoadRunner::Queue.new(@logger)
|
46
|
+
if options[:durration]
|
47
|
+
@logger.info "run for durration"
|
48
|
+
run_for_durration(q, options)
|
49
|
+
else
|
50
|
+
if options[:max_sleep] > 0
|
51
|
+
@logger.info "run and stagger"
|
52
|
+
run_and_stagger(q, options)
|
53
|
+
else
|
54
|
+
@logger.info "load and run"
|
55
|
+
load_and_run(q, options)
|
56
|
+
end
|
57
|
+
end
|
58
|
+
end
|
59
|
+
|
60
|
+
#
|
61
|
+
# Options
|
62
|
+
#
|
63
|
+
|
64
|
+
options = {
|
65
|
+
threads: 1,
|
66
|
+
max_sleep: 0
|
67
|
+
}
|
68
|
+
global_banner = <<DOC
|
69
|
+
|
70
|
+
curl web pages by id
|
71
|
+
|
72
|
+
Usage: #{@se_cmd_name} [options]
|
73
|
+
|
74
|
+
DOC
|
75
|
+
|
76
|
+
global = OptionParser.new do |g|
|
77
|
+
g.banner = global_banner
|
78
|
+
add_help_and_verbose(g)
|
79
|
+
|
80
|
+
g.on("--base-uri URI", "uri to curl") do |v|
|
81
|
+
options[:uri] = v
|
82
|
+
end
|
83
|
+
|
84
|
+
g.on("--ids CSV", "comma separated list of ids") do |v|
|
85
|
+
options[:ids] = v.split(',')
|
86
|
+
end
|
87
|
+
|
88
|
+
g.on("-t", "--thread NUMBER", "number of threads (default: 1)") do |v|
|
89
|
+
options[:threads] = v.to_i
|
90
|
+
end
|
91
|
+
|
92
|
+
g.on("--max-sleep NUMBER", "max number of seconds to sleep; randomly staggers requests") do |v|
|
93
|
+
options[:max_sleep] = v.to_i
|
94
|
+
end
|
95
|
+
|
96
|
+
g.on("--durration SECONDS", "run threads for a specific amount of time") do |v|
|
97
|
+
options[:durration] = v.to_i
|
98
|
+
end
|
99
|
+
end
|
100
|
+
|
101
|
+
#
|
102
|
+
# Run
|
103
|
+
#
|
104
|
+
|
105
|
+
@working_dir = ENV['SE_WORKING_DIR'] # passed through se-magic to here
|
106
|
+
global.parse(ARGV)
|
107
|
+
|
108
|
+
if options[:uri].nil?
|
109
|
+
@logger.error "missing --base-uri"
|
110
|
+
exit
|
111
|
+
end
|
112
|
+
if options[:ids].nil?
|
113
|
+
@logger.error "missing --ids"
|
114
|
+
exit
|
115
|
+
end
|
116
|
+
|
117
|
+
history_command = """#{@se_cmd_name} \\
|
118
|
+
--base-uri #{options[:uri]} \\
|
119
|
+
--ids #{options[:ids].join(',')} \\
|
120
|
+
--thread #{options[:threads]} \\
|
121
|
+
--max-sleep #{options[:max_sleep]}
|
122
|
+
"""
|
123
|
+
|
124
|
+
history_command = """#{history_command.strip} \\
|
125
|
+
--durration #{options[:durration]}
|
126
|
+
""" if options[:durration]
|
127
|
+
|
128
|
+
write_history(history_command)
|
129
|
+
|
130
|
+
do_work(options)
|
data/lib/cl/magic/cl-dk
CHANGED
@@ -44,6 +44,10 @@ def try_print_dk_help(dk_parts_hash, dk_make_hash, args)
|
|
44
44
|
puts " - <dk-project-path> absolute filepath to world/project directory"
|
45
45
|
puts " - <dk-working-path> absolute filepath to location dk command was run from"
|
46
46
|
puts ""
|
47
|
+
puts "PATHS"
|
48
|
+
puts " - world-path: #{@world_settings.get_world_path_from_settings()}"
|
49
|
+
puts " - project-path: #{@world_settings.get_world_project_path()}"
|
50
|
+
puts " - working-path: #{@working_dir}"
|
47
51
|
puts "-------------------------"
|
48
52
|
end
|
49
53
|
end
|
data/lib/cl/magic/cl-dk-make
CHANGED
@@ -97,7 +97,7 @@ def prep_make_command(c, selected_parts)
|
|
97
97
|
|
98
98
|
# run command
|
99
99
|
c = interpolate_parts_into_command(c, selected_parts)
|
100
|
-
cmd = "cd #{@working_dir} && #{c}"
|
100
|
+
cmd = "cd #{@working_dir} && WORLD_DIR=#{@world_path} CODE_DIR=#{@code_path} WORKING_DIR=#{@working_dir} #{c}"
|
101
101
|
end
|
102
102
|
|
103
103
|
def interpolate_parts_into_command(cmd, selected_parts)
|
@@ -135,6 +135,9 @@ def do_work()
|
|
135
135
|
@yaml_arg_munger = YamlArgMunger.new(@working_dir, @world_settings)
|
136
136
|
@parts_merger = PartsMerger.new(@working_dir, @yaml_arg_munger, @help_printer, @logger)
|
137
137
|
|
138
|
+
@world_path = @world_settings.get_world_path_from_settings()
|
139
|
+
@code_path = @world_settings.get_code_path_from_settings()
|
140
|
+
|
138
141
|
# world files
|
139
142
|
compose_hash, dk_parts_hash, dk_make_hash = @yaml_arg_munger.get_base_compose_parts_and_make_hashes()
|
140
143
|
if compose_hash
|
@@ -1,6 +1,6 @@
|
|
1
|
-
|
2
1
|
#!/usr/bin/env ruby
|
3
2
|
# cli for your world
|
3
|
+
|
4
4
|
require 'yaml'
|
5
5
|
require 'tty-command'
|
6
6
|
|
@@ -94,7 +94,7 @@ def prep_make_command(c)
|
|
94
94
|
@logger.wait(c.gsub("cl dk", "dk"))
|
95
95
|
|
96
96
|
# run command - from world path
|
97
|
-
cmd = "cd #{@world_path} && WORKING_DIR=#{@working_dir} #{c}"
|
97
|
+
cmd = "cd #{@world_path} && WORLD_DIR=#{@world_path} CODE_DIR=#{@code_path} WORKING_DIR=#{@working_dir} #{c}"
|
98
98
|
end
|
99
99
|
|
100
100
|
#
|
@@ -153,5 +153,6 @@ global.parse(ARGV)
|
|
153
153
|
|
154
154
|
# world path
|
155
155
|
@world_path = @world_settings.get_world_path_from_settings()
|
156
|
+
@code_path = @world_settings.get_code_path_from_settings()
|
156
157
|
|
157
158
|
do_work()
|
data/lib/cl/magic/cl-dk-world
CHANGED
@@ -13,6 +13,14 @@ require_relative 'dk/world_settings'
|
|
13
13
|
@logger = get_logger()
|
14
14
|
@cl_cmd_name = File.basename(__FILE__).split('-').join(' ')
|
15
15
|
|
16
|
+
def expand_path(path)
|
17
|
+
if path.start_with?("/") or path.start_with?("~")
|
18
|
+
return File.expand_path(path)
|
19
|
+
else
|
20
|
+
return File.expand_path(File.join(@working_dir, path))
|
21
|
+
end
|
22
|
+
end
|
23
|
+
|
16
24
|
#
|
17
25
|
# Features
|
18
26
|
#
|
@@ -20,23 +28,19 @@ require_relative 'dk/world_settings'
|
|
20
28
|
def set(options, world_settings)
|
21
29
|
world_settings_hash = world_settings.get_world_settings_hash()
|
22
30
|
|
23
|
-
#
|
24
|
-
world_path = options[:world_path]
|
31
|
+
# expand paths
|
32
|
+
world_path = expand_path(options[:world_path])
|
33
|
+
code_path = expand_path(options[:code_path])
|
25
34
|
|
26
|
-
|
27
|
-
|
28
|
-
|
29
|
-
world_path = File.expand_path(File.join(@working_dir, world_path))
|
30
|
-
end
|
31
|
-
|
32
|
-
if TTY::Prompt.new.yes?("Set world to: #{world_path}?")
|
33
|
-
world_settings_hash[:world_path] = world_path
|
34
|
-
world_settings.save_world_settings(world_settings_hash)
|
35
|
-
switch_context(world_settings)
|
36
|
-
else
|
37
|
-
exit
|
38
|
-
end
|
35
|
+
# confirm
|
36
|
+
exit unless TTY::Prompt.new.yes?("Set world to: #{world_path}?")
|
37
|
+
exit unless TTY::Prompt.new.yes?("Set code to: #{code_path}?")
|
39
38
|
|
39
|
+
# set & switch context
|
40
|
+
world_settings_hash[:code_path] = code_path
|
41
|
+
world_settings_hash[:world_path] = world_path
|
42
|
+
world_settings.save_world_settings(world_settings_hash)
|
43
|
+
switch_context(world_settings)
|
40
44
|
end
|
41
45
|
|
42
46
|
def switch_context(world_settings)
|
@@ -97,9 +101,13 @@ global = OptionParser.new do |g|
|
|
97
101
|
s.banner = dk_world_set_banner
|
98
102
|
options[:action] = :set
|
99
103
|
|
100
|
-
s.on("-
|
104
|
+
s.on("-w", "--world PATH", "path to world folder") do |v|
|
101
105
|
options[:world_path] = v
|
102
106
|
end
|
107
|
+
|
108
|
+
s.on("-c", "--code PATH", "path to clone repos into") do |v|
|
109
|
+
options[:code_path] = v
|
110
|
+
end
|
103
111
|
end
|
104
112
|
|
105
113
|
g.subcommand 'switch_context' do |s|
|
@@ -122,12 +130,18 @@ world_settings = WorldSettings.new(@working_dir)
|
|
122
130
|
case options[:action]
|
123
131
|
when :set
|
124
132
|
if options[:world_path].nil?
|
125
|
-
@logger.error("missing --
|
133
|
+
@logger.error("missing --world argument")
|
134
|
+
exit 1
|
135
|
+
end
|
136
|
+
|
137
|
+
if options[:code_path].nil?
|
138
|
+
@logger.error("missing --code argument")
|
126
139
|
exit 1
|
127
140
|
end
|
128
141
|
|
129
142
|
history_command = """#{@cl_cmd_name} world set \\
|
130
|
-
--
|
143
|
+
--world #{options[:world_path]} \
|
144
|
+
--code #{options[:code_path]}
|
131
145
|
"""
|
132
146
|
write_history(history_command)
|
133
147
|
|
@@ -0,0 +1,127 @@
|
|
1
|
+
|
2
|
+
module LoadRunner
|
3
|
+
|
4
|
+
class Queue
|
5
|
+
attr_accessor :failures
|
6
|
+
|
7
|
+
def initialize(logger = nil)
|
8
|
+
@logger = logger if logger!=nil
|
9
|
+
|
10
|
+
# setup
|
11
|
+
@threads = []
|
12
|
+
@transactions = 0
|
13
|
+
@num_threads = 1
|
14
|
+
@failures = 0
|
15
|
+
end
|
16
|
+
|
17
|
+
def load(num_threads=1, &work)
|
18
|
+
@num_threads = num_threads
|
19
|
+
@work = work # save work for new threads
|
20
|
+
|
21
|
+
# create threads
|
22
|
+
num_threads.times do
|
23
|
+
|
24
|
+
# do work
|
25
|
+
create_thread
|
26
|
+
|
27
|
+
# log
|
28
|
+
@logger.debug "action=create|name=thread" if @logger!=nil
|
29
|
+
end
|
30
|
+
end
|
31
|
+
|
32
|
+
def run()
|
33
|
+
@start_time = Time.now
|
34
|
+
start_all
|
35
|
+
wait_until_finished
|
36
|
+
end
|
37
|
+
|
38
|
+
def run_and_stagger(max_sleep=10)
|
39
|
+
@start_time = Time.now
|
40
|
+
start_all { rand_sleep(max_sleep) }
|
41
|
+
wait_until_finished
|
42
|
+
end
|
43
|
+
|
44
|
+
def run_for_durration(num_seconds, max_sleep=0)
|
45
|
+
@start_time = Time.now
|
46
|
+
|
47
|
+
# keep them running
|
48
|
+
to_time = Time.now + num_seconds
|
49
|
+
|
50
|
+
# log
|
51
|
+
@logger.info "action=run_for_durration|until=#{to_time}" if @logger!=nil
|
52
|
+
|
53
|
+
# durration
|
54
|
+
while (Time.now <= to_time)
|
55
|
+
|
56
|
+
# start threads
|
57
|
+
@threads.each do |t|
|
58
|
+
break if Time.now > to_time
|
59
|
+
|
60
|
+
# restart threads
|
61
|
+
if (t.status=="sleep")
|
62
|
+
rand_sleep(max_sleep)
|
63
|
+
t.run
|
64
|
+
end
|
65
|
+
|
66
|
+
if (t.status==false or t.status == nil)
|
67
|
+
@transactions += 1
|
68
|
+
@threads.delete t
|
69
|
+
thread = create_thread
|
70
|
+
rand_sleep(max_sleep)
|
71
|
+
thread.run
|
72
|
+
|
73
|
+
# log
|
74
|
+
@logger.debug "thread_count=#{@threads.count}" if @logger!=nil
|
75
|
+
end
|
76
|
+
end
|
77
|
+
end
|
78
|
+
end
|
79
|
+
|
80
|
+
private
|
81
|
+
|
82
|
+
def create_thread()
|
83
|
+
thread = Thread.new do
|
84
|
+
@work.call
|
85
|
+
end
|
86
|
+
@threads << thread
|
87
|
+
sleep(0.1)
|
88
|
+
return thread
|
89
|
+
end
|
90
|
+
|
91
|
+
def start_all(&block)
|
92
|
+
@threads.each do |t|
|
93
|
+
|
94
|
+
# pausing block?
|
95
|
+
block
|
96
|
+
|
97
|
+
# log
|
98
|
+
@logger.debug "action=start|name=thread|status=#{t.status}" if @logger!=nil
|
99
|
+
|
100
|
+
# run thread
|
101
|
+
t.run unless t.status==false
|
102
|
+
end
|
103
|
+
end
|
104
|
+
|
105
|
+
# wait for threads to complete
|
106
|
+
def wait_until_finished()
|
107
|
+
@threads.each do |t|
|
108
|
+
@logger.debug "action=check_status|name=thread|status=#{t.status}" if @logger!=nil
|
109
|
+
if (t.status=="run" or t.status=="sleep")
|
110
|
+
t.join; # wait to finish
|
111
|
+
@logger.debug "action=done|name=thread|status=#{t.status}" if @logger!=nil
|
112
|
+
end
|
113
|
+
end
|
114
|
+
|
115
|
+
# log
|
116
|
+
@logger.success "transactions=#{@transactions}|threads=#{@num_threads}|failures=#{@failures}|durration=#{Time.now - @start_time}" if @logger!=nil
|
117
|
+
end
|
118
|
+
|
119
|
+
# random sleep
|
120
|
+
def rand_sleep(max_sleep=10)
|
121
|
+
if max_sleep > 0
|
122
|
+
time_to_sleep = rand(max_sleep)
|
123
|
+
sleep(time_to_sleep)
|
124
|
+
end
|
125
|
+
end
|
126
|
+
end
|
127
|
+
end
|
@@ -52,6 +52,14 @@ class WorldSettings
|
|
52
52
|
return ""
|
53
53
|
end
|
54
54
|
|
55
|
+
def get_code_path_from_settings()
|
56
|
+
world_settings = get_world_settings_hash()
|
57
|
+
if world_settings.key?(:code_path)
|
58
|
+
return File.join(world_settings[:code_path])
|
59
|
+
end
|
60
|
+
return ""
|
61
|
+
end
|
62
|
+
|
55
63
|
private
|
56
64
|
|
57
65
|
def get_repo_basename()
|
@@ -12,11 +12,10 @@ class YamlArgMunger
|
|
12
12
|
compose_hash = get_base_compose_hash()
|
13
13
|
dk_parts_hash = {}
|
14
14
|
dk_make_hash = {}
|
15
|
-
|
16
|
-
|
17
|
-
|
18
|
-
|
19
|
-
end
|
15
|
+
|
16
|
+
compose_hash = merge_world_files(compose_hash, show_help=ARGV.include?("--help"))
|
17
|
+
dk_parts_hash = compose_hash['x-dk-parts'] ? compose_hash.delete('x-dk-parts') : {}
|
18
|
+
dk_make_hash = compose_hash['x-dk-make'] ? compose_hash.delete('x-dk-make') : {}
|
20
19
|
return compose_hash, dk_parts_hash, dk_make_hash
|
21
20
|
end
|
22
21
|
|
@@ -48,11 +47,27 @@ class YamlArgMunger
|
|
48
47
|
private
|
49
48
|
|
50
49
|
def get_base_compose_hash()
|
51
|
-
|
52
|
-
|
53
|
-
|
50
|
+
p2_file_path=".cl-dk-config.output"
|
51
|
+
cmd = "cd #{@working_dir} && docker compose config --no-normalize 2> #{p2_file_path}"
|
52
|
+
config_output = `#{cmd}`
|
53
|
+
if $?.success?
|
54
|
+
hash = YAML.load(config_output)
|
55
|
+
hotfix_environment_array(hash)
|
56
|
+
hotfix_x_dk_settings(hash)
|
57
|
+
return hash ? hash : {}
|
58
|
+
else
|
59
|
+
|
60
|
+
# is config invalid?
|
61
|
+
p2_output = `cd #{@working_dir} && cat #{p2_file_path}` # get p2 output
|
62
|
+
`cd #{@working_dir} && rm #{p2_file_path}` # clean up file
|
63
|
+
unless p2_output.include? "no configuration file provided"
|
64
|
+
puts "🔥 invalid docker config"
|
65
|
+
exit 1
|
66
|
+
end
|
54
67
|
|
55
|
-
|
68
|
+
# otherwise support no base config
|
69
|
+
return {}
|
70
|
+
end
|
56
71
|
end
|
57
72
|
|
58
73
|
def merge_world_files(compose_hash, show_help=false)
|
@@ -105,6 +120,42 @@ class YamlArgMunger
|
|
105
120
|
end
|
106
121
|
end
|
107
122
|
|
123
|
+
#
|
124
|
+
# docker compose config - alphabetizes everything
|
125
|
+
# so we'll only get x-dk settings from a file docker-compose.yml
|
126
|
+
#
|
127
|
+
|
128
|
+
def hotfix_x_dk_settings(compose_hash)
|
129
|
+
|
130
|
+
# remove x-dk settings
|
131
|
+
compose_hash.delete('x-dk-parts')
|
132
|
+
compose_hash.delete('x-dk-make')
|
133
|
+
|
134
|
+
# fetch them directly
|
135
|
+
supported_paths = [
|
136
|
+
'compose.yaml',
|
137
|
+
'compose.yml',
|
138
|
+
'docker-compose.yaml',
|
139
|
+
'docker-compose.yml'
|
140
|
+
]
|
141
|
+
raw_compose_hash = nil
|
142
|
+
supported_paths.each do |filepath|
|
143
|
+
full_filepath = File.join(@working_dir, filepath)
|
144
|
+
if File.exist?(full_filepath)
|
145
|
+
begin
|
146
|
+
raw_compose_hash = YAML.safe_load(File.read(full_filepath))
|
147
|
+
break;
|
148
|
+
rescue StandardError => e
|
149
|
+
# Handle any errors during parsing
|
150
|
+
puts "Error parsing YAML file #{full_filepath}: #{e.message}"
|
151
|
+
exit 1
|
152
|
+
end
|
153
|
+
end
|
154
|
+
end
|
155
|
+
compose_hash['x-dk-parts'] = raw_compose_hash['x-dk-parts']
|
156
|
+
compose_hash['x-dk-make'] = raw_compose_hash['x-dk-make']
|
157
|
+
end
|
158
|
+
|
108
159
|
#
|
109
160
|
# our dk hash merging process
|
110
161
|
#
|
data/lib/cl/magic/version.rb
CHANGED
metadata
CHANGED
@@ -1,14 +1,14 @@
|
|
1
1
|
--- !ruby/object:Gem::Specification
|
2
2
|
name: cl-magic
|
3
3
|
version: !ruby/object:Gem::Version
|
4
|
-
version: 1.
|
4
|
+
version: 1.3.1
|
5
5
|
platform: ruby
|
6
6
|
authors:
|
7
7
|
- Don Najd
|
8
8
|
autorequire:
|
9
9
|
bindir: bin
|
10
10
|
cert_chain: []
|
11
|
-
date: 2024-
|
11
|
+
date: 2024-04-13 00:00:00.000000000 Z
|
12
12
|
dependencies:
|
13
13
|
- !ruby/object:Gem::Dependency
|
14
14
|
name: rake
|
@@ -189,9 +189,15 @@ files:
|
|
189
189
|
- Gemfile.lock
|
190
190
|
- README.md
|
191
191
|
- bin/cl
|
192
|
+
- bin/common/process_pids
|
192
193
|
- bin/console
|
193
194
|
- bin/install_gem
|
194
195
|
- bin/setup
|
196
|
+
- bin/world-scripts/build
|
197
|
+
- bin/world-scripts/down
|
198
|
+
- bin/world-scripts/get
|
199
|
+
- bin/world-scripts/rm
|
200
|
+
- bin/world-scripts/up
|
195
201
|
- cl-magic.gemspec
|
196
202
|
- lib/cl/magic.rb
|
197
203
|
- lib/cl/magic/cl
|
@@ -200,6 +206,7 @@ files:
|
|
200
206
|
- lib/cl/magic/cl-ai-store-jira
|
201
207
|
- lib/cl/magic/cl-auth
|
202
208
|
- lib/cl/magic/cl-chrome
|
209
|
+
- lib/cl/magic/cl-curl
|
203
210
|
- lib/cl/magic/cl-dk
|
204
211
|
- lib/cl/magic/cl-dk-make
|
205
212
|
- lib/cl/magic/cl-dk-make-world
|
@@ -233,6 +240,7 @@ files:
|
|
233
240
|
- lib/cl/magic/common/gcloud.rb
|
234
241
|
- lib/cl/magic/common/jira.rb
|
235
242
|
- lib/cl/magic/common/kubectl.rb
|
243
|
+
- lib/cl/magic/common/load_runner.rb
|
236
244
|
- lib/cl/magic/common/logging.rb
|
237
245
|
- lib/cl/magic/common/milvus.rb
|
238
246
|
- lib/cl/magic/common/parse_and_pick.rb
|