cl-magic 0.4.0 → 1.2.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- checksums.yaml +4 -4
- data/Gemfile.lock +16 -2
- data/cl-magic.gemspec +5 -0
- data/lib/cl/magic/cl +21 -14
- data/lib/cl/magic/cl-ai-chat +117 -0
- data/lib/cl/magic/cl-ai-query +116 -0
- data/lib/cl/magic/cl-ai-store-jira +158 -0
- data/lib/cl/magic/cl-dk +23 -456
- data/lib/cl/magic/cl-dk-make +174 -0
- data/lib/cl/magic/cl-dk-make-world +163 -0
- data/lib/cl/magic/cl-dk-parts +253 -0
- data/lib/cl/magic/cl-dk-world +140 -0
- data/lib/cl/magic/cl-jira-fetch +15 -47
- data/lib/cl/magic/cl-jira-fetch-by-epics +112 -0
- data/lib/cl/magic/cl-jira-to-elastic +126 -0
- data/lib/cl/magic/cl-jira-to-markdown +68 -0
- data/lib/cl/magic/{cl-jira-stats → cl-jira-to-stats} +13 -10
- data/lib/cl/magic/cl-kube-logs +3 -1
- data/lib/cl/magic/common/ai_prompt.rb +169 -0
- data/lib/cl/magic/common/ai_text_splitter.rb +78 -0
- data/lib/cl/magic/common/common_options.rb +1 -1
- data/lib/cl/magic/common/elastic.rb +41 -0
- data/lib/cl/magic/common/jira.rb +169 -42
- data/lib/cl/magic/common/milvus.rb +78 -0
- data/lib/cl/magic/dk/help_printer.rb +29 -0
- data/lib/cl/magic/dk/parts_merger.rb +67 -0
- data/lib/cl/magic/dk/world_settings.rb +52 -0
- data/lib/cl/magic/dk/yaml_arg_munger.rb +107 -0
- data/lib/cl/magic/version.rb +1 -1
- metadata +77 -3
data/lib/cl/magic/common/jira.rb
CHANGED
@@ -1,11 +1,70 @@
|
|
1
|
+
require 'tty-progressbar'
|
2
|
+
require 'tty-spinner'
|
3
|
+
require 'concurrent'
|
1
4
|
|
2
5
|
class Jira
|
3
6
|
|
7
|
+
MAX_THREADS = 20 # set to 1 to debug without concurrency
|
8
|
+
|
4
9
|
def initialize(base_uri, username, token, break_at_one_page=false)
|
5
10
|
@base_uri = base_uri.chomp("/")
|
6
11
|
@username = username
|
7
12
|
@token = token
|
8
13
|
@break_at_one_page = break_at_one_page
|
14
|
+
|
15
|
+
@thread_pool = Concurrent::ThreadPoolExecutor.new(
|
16
|
+
min_threads: 0,
|
17
|
+
max_threads: MAX_THREADS,
|
18
|
+
max_queue: 0,
|
19
|
+
fallback_policy: :caller_runs
|
20
|
+
)
|
21
|
+
end
|
22
|
+
|
23
|
+
#
|
24
|
+
# Formatter
|
25
|
+
#
|
26
|
+
|
27
|
+
def self.jira_to_markdown(issue)
|
28
|
+
|
29
|
+
md = []
|
30
|
+
md << ""
|
31
|
+
md << "# #{issue['key']}"
|
32
|
+
md << "project: #{issue['fields']['project']['key']}"
|
33
|
+
md << "created: #{issue['fields']['created']}"
|
34
|
+
md << "updated: #{issue['fields']['updated']}"
|
35
|
+
md << "status: #{issue['fields']['status']['statusCategory']['name']}" unless issue['fields']["status"].nil?
|
36
|
+
md << "priority: #{issue['fields']['priority']['name']}"
|
37
|
+
md << "labels: #{issue['fields']['labels'].join(',')}"
|
38
|
+
md << "issue_type: #{issue['fields']['issuetype']['name']}" unless issue['fields']["issuetype"].nil?
|
39
|
+
md << "assignee: #{issue['fields']['assignee']['displayName']}" unless issue['fields']["assignee"].nil?
|
40
|
+
md << ""
|
41
|
+
md << "## Summary"
|
42
|
+
md << "#{issue['fields']['summary']}"
|
43
|
+
md << ""
|
44
|
+
md << ""
|
45
|
+
issue_md = md.join("\n")
|
46
|
+
|
47
|
+
comments = []
|
48
|
+
issue["comments"].each_with_index do |comment, i|
|
49
|
+
c_md = []
|
50
|
+
c_md << "### Comment - #{comment["author"]["displayName"]} "
|
51
|
+
c_md << ""
|
52
|
+
c_md << "created: #{comment["created"]}"
|
53
|
+
|
54
|
+
# nest markdown deeper
|
55
|
+
comment["body"].split("\n").each do |line|
|
56
|
+
c_md << if line.start_with?("#")
|
57
|
+
"####{line}"
|
58
|
+
else
|
59
|
+
line
|
60
|
+
end
|
61
|
+
end
|
62
|
+
|
63
|
+
c_md << ""
|
64
|
+
comments << [comment["id"], c_md.join("\n")]
|
65
|
+
end
|
66
|
+
|
67
|
+
return issue_md, comments
|
9
68
|
end
|
10
69
|
|
11
70
|
#
|
@@ -20,7 +79,7 @@ class Jira
|
|
20
79
|
return epic_ids, epics
|
21
80
|
end
|
22
81
|
|
23
|
-
def
|
82
|
+
def get_issues_by_epic_ids(project, epic_ids)
|
24
83
|
jql_query = "project = \"#{project}\" AND parentEpic IN (#{epic_ids.join(',')})"
|
25
84
|
return run_jql_query(jql_query)
|
26
85
|
end
|
@@ -33,6 +92,14 @@ class Jira
|
|
33
92
|
end
|
34
93
|
end
|
35
94
|
|
95
|
+
def get_issue_comments(issue_key)
|
96
|
+
uri = URI.parse("#{@base_uri}/rest/api/2/issue/#{issue_key}/comment")
|
97
|
+
jira_get(uri) do |response|
|
98
|
+
result = JSON.parse(response.body)
|
99
|
+
return result["comments"]
|
100
|
+
end
|
101
|
+
end
|
102
|
+
|
36
103
|
#
|
37
104
|
# Helpers: GET & POST
|
38
105
|
#
|
@@ -50,7 +117,11 @@ class Jira
|
|
50
117
|
if response.code == '200'
|
51
118
|
yield response
|
52
119
|
else
|
53
|
-
raise "
|
120
|
+
raise """
|
121
|
+
Jira query failed with HTTP status code #{response.code}
|
122
|
+
|
123
|
+
#{response.body}
|
124
|
+
"""
|
54
125
|
end
|
55
126
|
end
|
56
127
|
|
@@ -69,7 +140,13 @@ class Jira
|
|
69
140
|
if response.code == '200'
|
70
141
|
yield response
|
71
142
|
else
|
72
|
-
raise "
|
143
|
+
raise """
|
144
|
+
Jira query failed with HTTP status code #{response.code}
|
145
|
+
|
146
|
+
BODY: #{body.to_json}
|
147
|
+
|
148
|
+
RESPONSE: #{response.body}
|
149
|
+
"""
|
73
150
|
end
|
74
151
|
end
|
75
152
|
|
@@ -78,6 +155,9 @@ class Jira
|
|
78
155
|
#
|
79
156
|
|
80
157
|
def run_jql_query(jql)
|
158
|
+
spinner = TTY::Spinner.new("[:spinner] fetching ...", format: :pulse_2)
|
159
|
+
spinner.auto_spin # Automatic animation with default interval
|
160
|
+
|
81
161
|
start_at = 0
|
82
162
|
max_results = 50
|
83
163
|
total_results = nil
|
@@ -111,53 +191,100 @@ class Jira
|
|
111
191
|
start_at += max_results # else next page
|
112
192
|
end
|
113
193
|
end
|
114
|
-
|
115
|
-
print '.' # loop
|
116
194
|
end
|
195
|
+
spinner.stop("#{all_results.count} issues")
|
117
196
|
all_results.map {|h| h}
|
118
197
|
end
|
119
|
-
end
|
120
198
|
|
121
|
-
|
122
|
-
|
123
|
-
|
124
|
-
|
125
|
-
|
126
|
-
|
127
|
-
|
128
|
-
|
129
|
-
|
130
|
-
|
131
|
-
|
132
|
-
|
133
|
-
|
134
|
-
|
135
|
-
|
136
|
-
|
137
|
-
|
138
|
-
|
139
|
-
|
140
|
-
|
141
|
-
|
142
|
-
|
143
|
-
|
144
|
-
|
145
|
-
|
146
|
-
|
147
|
-
|
148
|
-
|
149
|
-
|
150
|
-
|
151
|
-
|
199
|
+
def collect_comments(jira, issues)
|
200
|
+
final_issue_hashes = []
|
201
|
+
bar = TTY::ProgressBar.new("fetching [:bar]", total: issues.count)
|
202
|
+
|
203
|
+
issues.each do |issue|
|
204
|
+
do_concurently do
|
205
|
+
issue_key = issue["key"]
|
206
|
+
issue["comments"] = []
|
207
|
+
|
208
|
+
# fetch change log
|
209
|
+
comments = get_issue_comments(issue_key)
|
210
|
+
issue["comments"] = comments
|
211
|
+
final_issue_hashes << issue # save
|
212
|
+
bar.advance
|
213
|
+
end
|
214
|
+
end
|
215
|
+
|
216
|
+
# wait
|
217
|
+
wait_concurrently
|
218
|
+
return final_issue_hashes
|
219
|
+
end
|
220
|
+
|
221
|
+
#
|
222
|
+
# Collect status changelogs
|
223
|
+
#
|
224
|
+
# Given a array of jira issue hashes
|
225
|
+
# * fetch the change log
|
226
|
+
# * filter down to status changes
|
227
|
+
# * add it to the issue hash as ["status_changelogs"]
|
228
|
+
#
|
229
|
+
|
230
|
+
def collect_status_changelogs(jira, issues)
|
231
|
+
final_issue_hashes = []
|
232
|
+
bar = TTY::ProgressBar.new("fetching [:bar]", total: issues.count)
|
233
|
+
|
234
|
+
issues.each do |issue|
|
235
|
+
do_concurently do
|
236
|
+
issue_key = issue["key"]
|
237
|
+
issue["status_changelogs"] = []
|
238
|
+
|
239
|
+
# fetch change log
|
240
|
+
changelogs = get_issue_status_changelog(issue_key)
|
241
|
+
|
242
|
+
changelogs.each do |change_log|
|
243
|
+
|
244
|
+
# all items that are status changes
|
245
|
+
status_logs = change_log["items"].select {|i| i["field"]=="status"}
|
246
|
+
status_logs = status_logs.collect do |status_log|
|
247
|
+
{
|
248
|
+
"key": issue_key,
|
249
|
+
"created": change_log["created"],
|
250
|
+
"toString": status_log["toString"],
|
251
|
+
"fromString": status_log["fromString"]
|
252
|
+
}
|
253
|
+
end
|
254
|
+
|
255
|
+
# append them to issue
|
256
|
+
status_logs.each do |status_log|
|
257
|
+
issue["status_changelogs"] << status_log
|
258
|
+
end
|
259
|
+
end
|
260
|
+
|
261
|
+
final_issue_hashes << issue # save
|
262
|
+
bar.advance
|
152
263
|
end
|
264
|
+
end
|
153
265
|
|
154
|
-
|
155
|
-
|
156
|
-
|
266
|
+
# wait
|
267
|
+
wait_concurrently
|
268
|
+
return final_issue_hashes
|
269
|
+
end
|
270
|
+
|
271
|
+
private
|
272
|
+
|
273
|
+
def do_concurently
|
274
|
+
if MAX_THREADS > 1
|
275
|
+
@thread_pool.post do
|
276
|
+
yield
|
157
277
|
end
|
278
|
+
else
|
279
|
+
yield
|
158
280
|
end
|
281
|
+
end
|
159
282
|
|
160
|
-
|
283
|
+
def wait_concurrently
|
284
|
+
if MAX_THREADS > 1
|
285
|
+
@thread_pool.shutdown
|
286
|
+
@thread_pool.wait_for_termination
|
287
|
+
end
|
161
288
|
end
|
162
|
-
|
289
|
+
|
163
290
|
end
|
@@ -0,0 +1,78 @@
|
|
1
|
+
|
2
|
+
class Milvus
|
3
|
+
def initialize(host, port)
|
4
|
+
@host = host
|
5
|
+
@port = port
|
6
|
+
end
|
7
|
+
|
8
|
+
def search(collection_name, embedding)
|
9
|
+
final_url = "http://#{@host}:#{@port}/v1/vector/search"
|
10
|
+
data = {
|
11
|
+
collectionName: collection_name,
|
12
|
+
vector: embedding,
|
13
|
+
outputFields: ["id", "name", "doc_key", "distance"],
|
14
|
+
}
|
15
|
+
|
16
|
+
# post
|
17
|
+
sanitized_data = data.to_json
|
18
|
+
cmd = """
|
19
|
+
curl -s \
|
20
|
+
'#{final_url}' \
|
21
|
+
-X 'POST' \
|
22
|
+
-H 'accept: application/json' \
|
23
|
+
-H 'Content-Type: application/json' \
|
24
|
+
-d '#{sanitized_data}'
|
25
|
+
"""
|
26
|
+
return `#{cmd}`
|
27
|
+
end
|
28
|
+
|
29
|
+
def create_collection(collection_name)
|
30
|
+
final_url = "http://#{@host}:#{@port}/v1/vector/collections/create"
|
31
|
+
data = {
|
32
|
+
dbName: "default",
|
33
|
+
collectionName: collection_name,
|
34
|
+
dimension: 1536,
|
35
|
+
metricType: "L2",
|
36
|
+
primaryField: "id",
|
37
|
+
vectorField: "vector"
|
38
|
+
}
|
39
|
+
|
40
|
+
# post
|
41
|
+
sanitized_data = data.to_json
|
42
|
+
cmd = """
|
43
|
+
curl -s \
|
44
|
+
'#{final_url}' \
|
45
|
+
-X 'POST' \
|
46
|
+
-H 'accept: application/json' \
|
47
|
+
-H 'Content-Type: application/json' \
|
48
|
+
-d '#{sanitized_data}'
|
49
|
+
"""
|
50
|
+
return `#{cmd}`
|
51
|
+
end
|
52
|
+
|
53
|
+
def post_to_collection(collection_name, doc_key, embedding)
|
54
|
+
final_url = "http://#{@host}:#{@port}/v1/vector/insert"
|
55
|
+
data = {
|
56
|
+
collectionName: collection_name,
|
57
|
+
data: {
|
58
|
+
doc_key: doc_key,
|
59
|
+
vector: embedding
|
60
|
+
}
|
61
|
+
}
|
62
|
+
|
63
|
+
# post
|
64
|
+
sanitized_data = data.to_json
|
65
|
+
cmd = """
|
66
|
+
curl -s \
|
67
|
+
'#{final_url}' \
|
68
|
+
-X POST \
|
69
|
+
-H 'accept: application/json' \
|
70
|
+
-H 'Content-Type: application/json' \
|
71
|
+
-d '#{sanitized_data}'
|
72
|
+
"""
|
73
|
+
response = `#{cmd}`
|
74
|
+
data = JSON.parse(response)
|
75
|
+
raise "Error: #{data.to_json}\n\nData #{sanitized_data}" if data.has_key?("message")
|
76
|
+
return data.to_json
|
77
|
+
end
|
78
|
+
end
|
@@ -0,0 +1,29 @@
|
|
1
|
+
|
2
|
+
class HelpPrinter
|
3
|
+
|
4
|
+
def initialize(logger)
|
5
|
+
@logger = logger
|
6
|
+
end
|
7
|
+
|
8
|
+
def print_dk_help_line(key, help)
|
9
|
+
if $stdout.isatty
|
10
|
+
if help.nil?
|
11
|
+
@logger.puts("#{key.ljust(15, ' ')} ???no help???")
|
12
|
+
else
|
13
|
+
key = key.ljust(15, ' ')
|
14
|
+
help_parts = help.split(";")
|
15
|
+
|
16
|
+
# first line
|
17
|
+
@logger.puts(key, help_parts.shift)
|
18
|
+
|
19
|
+
# following lines
|
20
|
+
padding = "".ljust(15, ' ')
|
21
|
+
help_parts.each do |p|
|
22
|
+
@logger.puts(padding, p)
|
23
|
+
end
|
24
|
+
@logger.puts("") if help.end_with?(";")
|
25
|
+
end
|
26
|
+
end
|
27
|
+
end
|
28
|
+
|
29
|
+
end
|
@@ -0,0 +1,67 @@
|
|
1
|
+
require 'tty-command'
|
2
|
+
require 'yaml'
|
3
|
+
|
4
|
+
class PartsMerger
|
5
|
+
|
6
|
+
def initialize(working_dir, yaml_arg_munger, help_printer, logger)
|
7
|
+
@working_dir = working_dir
|
8
|
+
@yaml_arg_munger = yaml_arg_munger
|
9
|
+
@help_printer = help_printer
|
10
|
+
@logger = logger
|
11
|
+
end
|
12
|
+
|
13
|
+
def merge_parts(compose_hash, dk_parts_hash, args)
|
14
|
+
selected_part_keys = []
|
15
|
+
|
16
|
+
# merge: saved parts
|
17
|
+
saved_part_keys = get_saved_parts(dk_parts_hash)
|
18
|
+
saved_part_keys.each do |potential_part_key|
|
19
|
+
dk_part = dk_parts_hash.fetch(potential_part_key, nil) # yml detail
|
20
|
+
if dk_part
|
21
|
+
compose_hash = print_and_merge_part(potential_part_key, dk_part, compose_hash)
|
22
|
+
selected_part_keys << potential_part_key
|
23
|
+
end
|
24
|
+
end
|
25
|
+
|
26
|
+
# merge: arg parts
|
27
|
+
while true
|
28
|
+
potential_part_key = args.first
|
29
|
+
dk_part = dk_parts_hash.fetch(potential_part_key, nil) # yml detail
|
30
|
+
if dk_part
|
31
|
+
unless selected_part_keys.include?(potential_part_key)
|
32
|
+
compose_hash = print_and_merge_part(potential_part_key, dk_part, compose_hash)
|
33
|
+
selected_part_keys << potential_part_key
|
34
|
+
end
|
35
|
+
args.shift # remove part arg
|
36
|
+
else
|
37
|
+
break
|
38
|
+
end
|
39
|
+
end
|
40
|
+
@logger.puts "" if $stdout.isatty # tailing line break
|
41
|
+
|
42
|
+
return compose_hash, selected_part_keys, args
|
43
|
+
end
|
44
|
+
|
45
|
+
def get_saved_parts(dk_parts_hash)
|
46
|
+
|
47
|
+
# get saved parts
|
48
|
+
cmd = "cd #{@working_dir} && cl dk parts list"
|
49
|
+
out, err = TTY::Command.new(:printer => :null).run("#{cmd}")
|
50
|
+
return out.split("\n")
|
51
|
+
|
52
|
+
end
|
53
|
+
|
54
|
+
private
|
55
|
+
|
56
|
+
def print_and_merge_part(part_key, dk_part, compose_hash)
|
57
|
+
|
58
|
+
# print
|
59
|
+
if $stdout.isatty
|
60
|
+
help_str = dk_part.fetch('help')
|
61
|
+
@help_printer.print_dk_help_line("#{part_key}", "#{help_str ? '- ' + help_str : ''}") if dk_part.keys.any?
|
62
|
+
end
|
63
|
+
# merge
|
64
|
+
return @yaml_arg_munger.dk_merge_and_remove(compose_hash, dk_part)
|
65
|
+
end
|
66
|
+
|
67
|
+
end
|
@@ -0,0 +1,52 @@
|
|
1
|
+
require 'tty-command'
|
2
|
+
require 'yaml'
|
3
|
+
|
4
|
+
class WorldSettings
|
5
|
+
|
6
|
+
def initialize(working_dir)
|
7
|
+
@working_dir = working_dir
|
8
|
+
end
|
9
|
+
|
10
|
+
def get_world_settings_hash()
|
11
|
+
filepath = get_world_settings_filepath()
|
12
|
+
return File.exist?(filepath) ? YAML.load_file(filepath) : {}
|
13
|
+
end
|
14
|
+
|
15
|
+
def save_world_settings(world_settings_hash)
|
16
|
+
filepath = get_world_settings_filepath()
|
17
|
+
tempfile = File.new(filepath, 'w')
|
18
|
+
tempfile.write(world_settings_hash.to_yaml)
|
19
|
+
tempfile.close
|
20
|
+
end
|
21
|
+
|
22
|
+
def get_world_project_path()
|
23
|
+
repo_basename = get_repo_basename()
|
24
|
+
world_path = get_world_path_from_settings()
|
25
|
+
return File.join(world_path, repo_basename) if world_path and repo_basename
|
26
|
+
return nil
|
27
|
+
end
|
28
|
+
|
29
|
+
def get_world_path_from_settings()
|
30
|
+
world_settings = get_world_settings_hash()
|
31
|
+
if world_settings.key?(:world_path) and world_settings.key?(:context)
|
32
|
+
return File.join(world_settings[:world_path], world_settings[:context])
|
33
|
+
end
|
34
|
+
return ""
|
35
|
+
end
|
36
|
+
|
37
|
+
private
|
38
|
+
|
39
|
+
def get_repo_basename()
|
40
|
+
command = "cd #{@working_dir} && basename $(git remote get-url origin 2> /dev/null) .git"
|
41
|
+
repo_basename = TTY::Command.new(:printer => :null).run(command).out.gsub('.git', '').strip.chomp
|
42
|
+
if repo_basename==".git" or repo_basename==""
|
43
|
+
return File.basename(@working_dir)
|
44
|
+
end
|
45
|
+
return repo_basename
|
46
|
+
end
|
47
|
+
|
48
|
+
def get_world_settings_filepath()
|
49
|
+
return File.join(".cl-dk-world.yml")
|
50
|
+
end
|
51
|
+
|
52
|
+
end
|
@@ -0,0 +1,107 @@
|
|
1
|
+
require 'yaml'
|
2
|
+
|
3
|
+
class YamlArgMunger
|
4
|
+
|
5
|
+
def initialize(working_dir, world_settings)
|
6
|
+
@working_dir = working_dir
|
7
|
+
@world_path = world_settings.get_world_path_from_settings()
|
8
|
+
@dk_proj_path = world_settings.get_world_project_path()
|
9
|
+
end
|
10
|
+
|
11
|
+
def get_base_compose_parts_and_make_hashes()
|
12
|
+
compose_hash = get_base_compose_hash()
|
13
|
+
dk_parts_hash = {}
|
14
|
+
dk_make_hash = {}
|
15
|
+
if compose_hash
|
16
|
+
compose_hash = merge_world_files(compose_hash, show_help=ARGV.include?("--help"))
|
17
|
+
dk_parts_hash = compose_hash['x-dk-parts'] ? compose_hash.delete('x-dk-parts') : {}
|
18
|
+
dk_make_hash = compose_hash['x-dk-make'] ? compose_hash.delete('x-dk-make') : {}
|
19
|
+
end
|
20
|
+
return compose_hash, dk_parts_hash, dk_make_hash
|
21
|
+
end
|
22
|
+
|
23
|
+
def dk_merge_and_remove(compose_hash, yml_hash)
|
24
|
+
# remove help & merge
|
25
|
+
clean_yml = yml_hash.clone
|
26
|
+
clean_yml.delete('help')
|
27
|
+
return compose_hash.dk_merge(clean_yml).dk_reject! { |k, v| v=='<dk-remove>' }
|
28
|
+
end
|
29
|
+
|
30
|
+
def save_yaml_and_adjust_args(compose_hash, args)
|
31
|
+
|
32
|
+
# generate final compose file
|
33
|
+
tempfile = File.new(File.join(@working_dir, ".cl-dk.yml"), 'w')
|
34
|
+
tempfile.write(compose_hash.to_yaml) # write it to the tempfile
|
35
|
+
|
36
|
+
# remove existing '-f' flag, if needed
|
37
|
+
file_flag_index = args.index('-f')
|
38
|
+
if file_flag_index==0
|
39
|
+
args.delete_at(file_flag_index)
|
40
|
+
args.delete_at(file_flag_index)
|
41
|
+
end
|
42
|
+
args.unshift('-f', tempfile.path) # add new '-f' flag
|
43
|
+
|
44
|
+
tempfile.close
|
45
|
+
return args
|
46
|
+
end
|
47
|
+
|
48
|
+
private
|
49
|
+
|
50
|
+
def get_base_compose_hash()
|
51
|
+
cmd = "cd #{@working_dir} && docker compose config 2> /dev/null"
|
52
|
+
return YAML.load(`#{cmd}`)
|
53
|
+
end
|
54
|
+
|
55
|
+
def merge_world_files(compose_hash, show_help=false)
|
56
|
+
if @dk_proj_path
|
57
|
+
print_dk_help_line("dk-project-path", "#{@dk_proj_path}") if show_help and $stdout.isatty
|
58
|
+
|
59
|
+
Dir.glob("#{@dk_proj_path}/*.yml").sort.each do |filepath|
|
60
|
+
print_dk_help_line("dk-world", "#{filepath}") if show_help and $stdout.isatty
|
61
|
+
|
62
|
+
# read file and replace
|
63
|
+
contents = File.read(filepath)
|
64
|
+
contents.gsub!('<dk-world-path>', @world_path)
|
65
|
+
contents.gsub!('<dk-project-path>', @dk_proj_path)
|
66
|
+
contents.gsub!('<dk-working-path>', @working_dir)
|
67
|
+
|
68
|
+
# yml merge
|
69
|
+
yml_hash = YAML.load(contents)
|
70
|
+
compose_hash = dk_merge_and_remove(compose_hash, yml_hash)
|
71
|
+
end
|
72
|
+
end
|
73
|
+
return compose_hash
|
74
|
+
end
|
75
|
+
|
76
|
+
class ::Hash
|
77
|
+
def dk_merge(second)
|
78
|
+
merger = proc { |_, v1, v2|
|
79
|
+
if Hash === v1 && Hash === v2
|
80
|
+
v1.merge(v2, &merger)
|
81
|
+
else
|
82
|
+
if Array === v1 && Array === v2
|
83
|
+
if v2.first=="<dk-replace>"
|
84
|
+
v2[1..-1] # everything but the first item
|
85
|
+
else
|
86
|
+
v1 | v2 # union arrays
|
87
|
+
end
|
88
|
+
else
|
89
|
+
if [:undefined, nil, :nil].include?(v2)
|
90
|
+
v1
|
91
|
+
else
|
92
|
+
v2
|
93
|
+
end
|
94
|
+
end
|
95
|
+
end
|
96
|
+
}
|
97
|
+
merge(second.to_h, &merger)
|
98
|
+
end
|
99
|
+
def dk_reject!(&blk)
|
100
|
+
self.each do |k, v|
|
101
|
+
v.dk_reject!(&blk) if v.is_a?(Hash)
|
102
|
+
self.delete(k) if blk.call(k, v)
|
103
|
+
end
|
104
|
+
end
|
105
|
+
end
|
106
|
+
|
107
|
+
end
|