makit 0.0.177 → 0.0.178
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- checksums.yaml +4 -4
- data/lib/makit/azure-pipelines.rb +14 -3
- data/lib/makit/azure_devops.rb +378 -0
- data/lib/makit/gitlab_devops.rb +343 -0
- data/lib/makit/version.rb +1 -1
- data/lib/makit.rb +2 -0
- metadata +3 -1
checksums.yaml
CHANGED
|
@@ -1,7 +1,7 @@
|
|
|
1
1
|
---
|
|
2
2
|
SHA256:
|
|
3
|
-
metadata.gz:
|
|
4
|
-
data.tar.gz:
|
|
3
|
+
metadata.gz: f937ec1cf935ad85175afeb03a0581f9aa4ad4c25eb110b0e6dd3270d11450b7
|
|
4
|
+
data.tar.gz: 07cf1f1a0066881d547e4ab630100fc114d69598a61f70655cdea1b677cb6a79
|
|
5
5
|
SHA512:
|
|
6
|
-
metadata.gz:
|
|
7
|
-
data.tar.gz:
|
|
6
|
+
metadata.gz: 074e28f520cd60a88ca3e19334a55b904defcb467f2f85d27cae574f34d6fb712e1c4aece3ac38bc4cd74acbf7bf3f321a5d849cfacf0e49921050e988cc0c14
|
|
7
|
+
data.tar.gz: 03b2ee40a67e007774d59c8d5a01c62d23cd5465a02f50df5c5d786116f785aab79efd7f7c1438d6ae0fa1970a739f36b0e3ccd3e85276413f696cb649067a04
|
|
@@ -1,6 +1,15 @@
|
|
|
1
|
+
# frozen_string_literal: true
|
|
2
|
+
|
|
1
3
|
module Makit
|
|
2
|
-
|
|
3
|
-
|
|
4
|
+
# @deprecated Use {Makit::AzureDevOps} for pipeline run status, timeline, failed steps, and syncing results.
|
|
5
|
+
# This class will be removed in a future version. For NuGet publish to Azure DevOps feeds,
|
|
6
|
+
# consider using Makit::NuGet or dotnet nuget push directly.
|
|
7
|
+
class AzurePipelines
|
|
8
|
+
DEPRECATION_MESSAGE = "Makit::AzurePipelines is deprecated; use Makit::AzureDevOps for pipeline run status and results. " \
|
|
9
|
+
"See Makit::AzureDevOps#sync_pipelines and #report."
|
|
10
|
+
|
|
11
|
+
def self.status(project_name: nil, org: nil, project: nil, pipeline_id: nil, pipeline_name: nil, pat: nil)
|
|
12
|
+
warn "[DEPRECATION] #{DEPRECATION_MESSAGE}"
|
|
4
13
|
require "net/http"
|
|
5
14
|
require "uri"
|
|
6
15
|
require "json"
|
|
@@ -134,7 +143,7 @@ module Makit
|
|
|
134
143
|
exit 1
|
|
135
144
|
end
|
|
136
145
|
end
|
|
137
|
-
|
|
146
|
+
# Publishes a NuGet package to an Azure DevOps feed
|
|
138
147
|
#
|
|
139
148
|
# @param package_name [String] The name of the NuGet package
|
|
140
149
|
# @param package_version [String] The version of the package
|
|
@@ -142,7 +151,9 @@ module Makit
|
|
|
142
151
|
# @param feed_source [String] The name of the Azure DevOps feed
|
|
143
152
|
# @param api_key [String, nil] Optional API key (defaults to "az" for Azure CLI)
|
|
144
153
|
# @raise [RuntimeError] If the push fails and it's not a duplicate package error
|
|
154
|
+
# @deprecated Makit::AzurePipelines is deprecated; for pipeline status use Makit::AzureDevOps. This method remains for NuGet publish only.
|
|
145
155
|
def self.publish(package_name:, package_version:, package_file:, feed_source:, api_key: "az")
|
|
156
|
+
warn "[DEPRECATION] #{DEPRECATION_MESSAGE}"
|
|
146
157
|
puts " Checking if package #{package_name} version #{package_version} already exists in feed..."
|
|
147
158
|
|
|
148
159
|
# Check if package version already exists
|
|
@@ -0,0 +1,378 @@
|
|
|
1
|
+
# frozen_string_literal: true
|
|
2
|
+
|
|
3
|
+
# Azure DevOps Pipelines API client for querying run status and saving results.
|
|
4
|
+
# Requires: AZURE_DEVOPS_ORG, AZURE_DEVOPS_PROJECT, AZURE_DEVOPS_PIPELINE_ID, AZURE_DEVOPS_TOKEN (PAT with Build Read).
|
|
5
|
+
require "fileutils"
|
|
6
|
+
require "json"
|
|
7
|
+
require "net/http"
|
|
8
|
+
require "stringio"
|
|
9
|
+
require "time"
|
|
10
|
+
require "uri"
|
|
11
|
+
require "zlib"
|
|
12
|
+
|
|
13
|
+
module Makit
|
|
14
|
+
class AzureDevOps
|
|
15
|
+
API_VERSION = "7.1"
|
|
16
|
+
BASE_URL = "https://dev.azure.com"
|
|
17
|
+
|
|
18
|
+
def initialize(org:, project:, pipeline_id:, token:)
|
|
19
|
+
@org = org
|
|
20
|
+
@project = project
|
|
21
|
+
@pipeline_id = pipeline_id.to_s
|
|
22
|
+
@token = token
|
|
23
|
+
end
|
|
24
|
+
|
|
25
|
+
def base_uri(path)
|
|
26
|
+
URI("#{BASE_URL}/#{URI.encode_www_form_component(@org)}/#{URI.encode_www_form_component(@project)}/_apis/#{path}?api-version=#{API_VERSION}")
|
|
27
|
+
end
|
|
28
|
+
|
|
29
|
+
def get(path)
|
|
30
|
+
uri = base_uri(path)
|
|
31
|
+
request(uri)
|
|
32
|
+
end
|
|
33
|
+
|
|
34
|
+
# GET a full URL (e.g. log URL); uses same PAT auth. Returns response body as string.
|
|
35
|
+
def get_url(full_url)
|
|
36
|
+
uri = URI(full_url)
|
|
37
|
+
request(uri, parse_json: false)
|
|
38
|
+
end
|
|
39
|
+
|
|
40
|
+
def request(uri, parse_json: true)
|
|
41
|
+
req = Net::HTTP::Get.new(uri)
|
|
42
|
+
req.basic_auth("", @token)
|
|
43
|
+
req["Accept"] = parse_json ? "application/json" : "text/plain"
|
|
44
|
+
resp = Net::HTTP.start(uri.hostname, uri.port, use_ssl: uri.scheme == "https") { |http| http.request(req) }
|
|
45
|
+
raise "HTTP #{resp.code}: #{resp.body}" unless resp.is_a?(Net::HTTPSuccess)
|
|
46
|
+
|
|
47
|
+
parse_json ? JSON.parse(resp.body) : resp.body
|
|
48
|
+
end
|
|
49
|
+
|
|
50
|
+
# List recent runs; returns array of runs (newest first).
|
|
51
|
+
def list_runs(top: 10)
|
|
52
|
+
path = "pipelines/#{@pipeline_id}/runs"
|
|
53
|
+
data = get(path)
|
|
54
|
+
runs = data["value"] || []
|
|
55
|
+
runs.first(top)
|
|
56
|
+
end
|
|
57
|
+
|
|
58
|
+
# Get the latest run (first in list).
|
|
59
|
+
def latest_run
|
|
60
|
+
list_runs(top: 1).first
|
|
61
|
+
end
|
|
62
|
+
|
|
63
|
+
# Get the latest run that has completed (state == "completed").
|
|
64
|
+
# If the latest run is not yet completed, returns the most recent completed run from the list.
|
|
65
|
+
# Optional date_scope: "YYYY-MM-DD" to only consider runs from that date (finishedDate or createdDate).
|
|
66
|
+
def latest_run_or_latest_completed(top: 20, date_scope: nil)
|
|
67
|
+
runs = list_runs(top: top)
|
|
68
|
+
latest = runs.first
|
|
69
|
+
if latest && latest["state"]&.downcase == "completed"
|
|
70
|
+
return latest if !date_scope || run_on_date?(latest, date_scope)
|
|
71
|
+
end
|
|
72
|
+
|
|
73
|
+
completed = runs.find { |r| r["state"]&.downcase == "completed" && (!date_scope || run_on_date?(r, date_scope)) }
|
|
74
|
+
completed || latest
|
|
75
|
+
end
|
|
76
|
+
|
|
77
|
+
def run_on_date?(run, date_scope)
|
|
78
|
+
return true unless date_scope
|
|
79
|
+
|
|
80
|
+
ts = run["finishedDate"] || run["createdDate"].to_s
|
|
81
|
+
ts.start_with?(date_scope)
|
|
82
|
+
end
|
|
83
|
+
|
|
84
|
+
# Build a timestamp-based directory name from a run (for organizing results by run time).
|
|
85
|
+
# Uses finishedDate if set, else createdDate; format: 20260224T205001Z (safe for dirs).
|
|
86
|
+
def run_timestamp_dir(run)
|
|
87
|
+
return "unknown" unless run
|
|
88
|
+
|
|
89
|
+
ts = run["finishedDate"] || run["createdDate"]
|
|
90
|
+
return "unknown" unless ts
|
|
91
|
+
|
|
92
|
+
# Parse ISO8601 and format as compact: 20260224T205001Z
|
|
93
|
+
begin
|
|
94
|
+
t = Time.parse(ts)
|
|
95
|
+
t.utc.strftime("%Y%m%dT%H%M%SZ")
|
|
96
|
+
rescue ArgumentError
|
|
97
|
+
"unknown"
|
|
98
|
+
end
|
|
99
|
+
end
|
|
100
|
+
|
|
101
|
+
# Get a single run by id.
|
|
102
|
+
def get_run(run_id)
|
|
103
|
+
path = "pipelines/#{@pipeline_id}/runs/#{run_id}"
|
|
104
|
+
get(path)
|
|
105
|
+
end
|
|
106
|
+
|
|
107
|
+
# Get timeline for a run (uses Build API; run_id often equals build_id for YAML pipelines).
|
|
108
|
+
# Returns timeline hash with "records" or nil if not available (e.g. 404).
|
|
109
|
+
def get_timeline(build_id)
|
|
110
|
+
path = "build/builds/#{build_id}/timeline"
|
|
111
|
+
get(path)
|
|
112
|
+
rescue StandardError => e
|
|
113
|
+
return nil if e.message.include?("404")
|
|
114
|
+
|
|
115
|
+
raise
|
|
116
|
+
end
|
|
117
|
+
|
|
118
|
+
# True if the run result is succeeded (or succeeded with issues/warnings).
|
|
119
|
+
def run_passed?(run)
|
|
120
|
+
run && %w[succeeded succeededwithissues].include?(run["result"]&.downcase)
|
|
121
|
+
end
|
|
122
|
+
|
|
123
|
+
# Extract detailed info for all failed steps from the timeline (for storage and display).
|
|
124
|
+
# Returns array of hashes: name, type, identifier, result, resultCode, issues, logUrl, logSnippet (if fetched).
|
|
125
|
+
def failed_steps_details(timeline, fetch_log_snippet: true, log_snippet_lines: 80)
|
|
126
|
+
return [] unless timeline && timeline["records"].is_a?(Array)
|
|
127
|
+
|
|
128
|
+
failed = timeline["records"].select { |r| r["result"]&.downcase == "failed" }
|
|
129
|
+
failed.map do |r|
|
|
130
|
+
detail = {
|
|
131
|
+
"name" => r["name"] || r["identifier"] || "Unknown",
|
|
132
|
+
"type" => r["type"],
|
|
133
|
+
"identifier" => r["identifier"],
|
|
134
|
+
"result" => r["result"],
|
|
135
|
+
"resultCode" => r["resultCode"],
|
|
136
|
+
"workerName" => r["workerName"],
|
|
137
|
+
"startTime" => r["startTime"],
|
|
138
|
+
"finishTime" => r["finishTime"],
|
|
139
|
+
"issues" => (r["issues"] || []).map { |i| { "type" => i["type"], "category" => i["category"], "message" => i["message"], "data" => i["data"] } },
|
|
140
|
+
"logUrl" => r.dig("log", "url")
|
|
141
|
+
}
|
|
142
|
+
if fetch_log_snippet && detail["logUrl"]
|
|
143
|
+
snippet = fetch_log_snippet_content(detail["logUrl"], lines: log_snippet_lines)
|
|
144
|
+
detail["logSnippet"] = snippet.to_s.encode("UTF-8", invalid: :replace, undef: :replace) if snippet && !snippet.to_s.empty?
|
|
145
|
+
end
|
|
146
|
+
detail.delete("logSnippet") if detail["logSnippet"].to_s.empty?
|
|
147
|
+
detail
|
|
148
|
+
end
|
|
149
|
+
end
|
|
150
|
+
|
|
151
|
+
def fetch_log_snippet_content(log_url, lines: 80)
|
|
152
|
+
raw = get_url(log_url)
|
|
153
|
+
return nil if raw.to_s.empty?
|
|
154
|
+
|
|
155
|
+
body = decompress_log_body(raw)
|
|
156
|
+
body = body.encode("UTF-8", invalid: :replace, undef: :replace) unless body.encoding == Encoding::UTF_8
|
|
157
|
+
|
|
158
|
+
all_lines = body.split("\n")
|
|
159
|
+
snippet = all_lines.size <= lines ? body : "... (#{all_lines.size - lines} lines omitted) ...\n" + all_lines.last(lines).join("\n")
|
|
160
|
+
snippet
|
|
161
|
+
rescue StandardError => e
|
|
162
|
+
"[Log fetch error: #{e.message}]"
|
|
163
|
+
end
|
|
164
|
+
|
|
165
|
+
# Azure DevOps may return log content gzip-compressed; decompress if so.
|
|
166
|
+
def decompress_log_body(raw)
|
|
167
|
+
raw = raw.to_s
|
|
168
|
+
return raw if raw.empty?
|
|
169
|
+
return raw unless raw.bytesize >= 2 && raw.getbyte(0) == 0x1F && raw.getbyte(1) == 0x8B
|
|
170
|
+
|
|
171
|
+
io = StringIO.new(raw)
|
|
172
|
+
Zlib::GzipReader.new(io).read
|
|
173
|
+
rescue Zlib::Error
|
|
174
|
+
raw
|
|
175
|
+
end
|
|
176
|
+
|
|
177
|
+
# Extract warning issues from all timeline records.
|
|
178
|
+
# Returns array of hashes: name, type, issues (warning-only).
|
|
179
|
+
def warning_details(timeline)
|
|
180
|
+
return [] unless timeline && timeline["records"].is_a?(Array)
|
|
181
|
+
|
|
182
|
+
warnings = []
|
|
183
|
+
timeline["records"].each do |r|
|
|
184
|
+
next unless r["issues"].is_a?(Array)
|
|
185
|
+
|
|
186
|
+
warn_issues = r["issues"].select { |i| i["type"]&.downcase&.include?("warning") }
|
|
187
|
+
next if warn_issues.empty?
|
|
188
|
+
|
|
189
|
+
warnings << {
|
|
190
|
+
"name" => r["name"] || r["identifier"] || "Unknown",
|
|
191
|
+
"type" => r["type"],
|
|
192
|
+
"issues" => warn_issues.map { |i| { "type" => i["type"], "message" => i["message"] } }
|
|
193
|
+
}
|
|
194
|
+
end
|
|
195
|
+
warnings
|
|
196
|
+
end
|
|
197
|
+
|
|
198
|
+
# Human-readable summary of warnings.
|
|
199
|
+
def format_warnings(warnings)
|
|
200
|
+
return "" if warnings.nil? || warnings.empty?
|
|
201
|
+
|
|
202
|
+
out = []
|
|
203
|
+
warnings.each do |w|
|
|
204
|
+
(w["issues"] || []).each do |iss|
|
|
205
|
+
out << " [#{w['name']}] #{iss['message']}"
|
|
206
|
+
end
|
|
207
|
+
end
|
|
208
|
+
out.join("\n")
|
|
209
|
+
end
|
|
210
|
+
|
|
211
|
+
# Human-readable summary of failed steps (for display and pipeline_failures.txt).
|
|
212
|
+
def format_failed_steps(failures)
|
|
213
|
+
return "" if failures.nil? || failures.empty?
|
|
214
|
+
|
|
215
|
+
out = []
|
|
216
|
+
failures.each_with_index do |f, i|
|
|
217
|
+
out << "--- Failed step #{i + 1}: #{f['name']} (#{f['type']}) ---"
|
|
218
|
+
out << " Result: #{f['result']}"
|
|
219
|
+
out << " Worker: #{f['workerName']}" if f["workerName"]
|
|
220
|
+
(f["issues"] || []).each do |iss|
|
|
221
|
+
out << " [#{iss['type']}] #{iss['message']}"
|
|
222
|
+
end
|
|
223
|
+
if f["logSnippet"]
|
|
224
|
+
out << " Log (last lines):"
|
|
225
|
+
f["logSnippet"].to_s.each_line { |line| out << " #{line.rstrip}" }
|
|
226
|
+
end
|
|
227
|
+
out << ""
|
|
228
|
+
end
|
|
229
|
+
out.join("\n")
|
|
230
|
+
end
|
|
231
|
+
|
|
232
|
+
# Extract the first failure message from run + timeline (job/record name + result/issue).
|
|
233
|
+
def first_failure(run, timeline = nil)
|
|
234
|
+
if timeline && timeline["records"]
|
|
235
|
+
records = timeline["records"].is_a?(Array) ? timeline["records"] : []
|
|
236
|
+
failed = records.find { |r| r["result"]&.downcase == "failed" }
|
|
237
|
+
if failed
|
|
238
|
+
name = failed["name"] || failed["identifier"] || "Unknown"
|
|
239
|
+
msg = failed["issues"]&.first&.dig("message") || failed["resultCode"] || failed["result"]
|
|
240
|
+
return "#{name}: #{msg}"
|
|
241
|
+
end
|
|
242
|
+
end
|
|
243
|
+
return "Run result: #{run['result'] || 'unknown'}" if run && run["result"]&.downcase != "succeeded" && run["result"]&.downcase != "succeededwithissues"
|
|
244
|
+
|
|
245
|
+
nil
|
|
246
|
+
end
|
|
247
|
+
|
|
248
|
+
# Save run (and optional timeline) to artifacts dir for visibility.
|
|
249
|
+
# Also writes pipeline_failures.json and pipeline_failures.txt when there are failed steps.
|
|
250
|
+
def save_pipeline_result(run, timeline, artifacts_dir, fetch_log_snippet: true)
|
|
251
|
+
FileUtils.mkdir_p(artifacts_dir)
|
|
252
|
+
run_path = File.join(artifacts_dir, "pipeline_run.json")
|
|
253
|
+
File.write(run_path, JSON.pretty_generate(run || {}))
|
|
254
|
+
timeline_path = File.join(artifacts_dir, "pipeline_timeline.json")
|
|
255
|
+
File.write(timeline_path, JSON.pretty_generate(timeline || {}))
|
|
256
|
+
summary_path = File.join(artifacts_dir, "pipeline_result.txt")
|
|
257
|
+
summary = build_summary(run, timeline)
|
|
258
|
+
File.write(summary_path, summary)
|
|
259
|
+
result = { run: run_path, timeline: timeline_path, summary: summary_path }
|
|
260
|
+
|
|
261
|
+
failures = failed_steps_details(timeline, fetch_log_snippet: fetch_log_snippet)
|
|
262
|
+
if failures.any?
|
|
263
|
+
failures_path = File.join(artifacts_dir, "pipeline_failures.json")
|
|
264
|
+
File.write(failures_path, JSON.pretty_generate(failures))
|
|
265
|
+
failures_txt_path = File.join(artifacts_dir, "pipeline_failures.txt")
|
|
266
|
+
File.write(failures_txt_path, format_failed_steps(failures))
|
|
267
|
+
result[:failures] = failures_txt_path
|
|
268
|
+
result[:failures_json] = failures_path
|
|
269
|
+
result[:failures_detail] = failures
|
|
270
|
+
end
|
|
271
|
+
result
|
|
272
|
+
end
|
|
273
|
+
|
|
274
|
+
def build_summary(run, timeline)
|
|
275
|
+
lines = []
|
|
276
|
+
if run
|
|
277
|
+
lines << "Run: #{run['name']} (id=#{run['id']})"
|
|
278
|
+
lines << "State: #{run['state']}"
|
|
279
|
+
lines << "Result: #{run['result']}"
|
|
280
|
+
lines << "Created: #{run['createdDate']}"
|
|
281
|
+
lines << "Finished: #{run['finishedDate']}"
|
|
282
|
+
end
|
|
283
|
+
if timeline && timeline["records"]
|
|
284
|
+
lines << ""
|
|
285
|
+
lines << "Records:"
|
|
286
|
+
(timeline["records"] || []).each do |r|
|
|
287
|
+
lines << " - #{r['name'] || r['identifier']}: #{r['state']} / #{r['result']}"
|
|
288
|
+
end
|
|
289
|
+
end
|
|
290
|
+
lines.join("\n")
|
|
291
|
+
end
|
|
292
|
+
|
|
293
|
+
# Timestamp dir pattern: 20260224T205001Z (from run_timestamp_dir).
|
|
294
|
+
TIMESTAMP_DIR_PATTERN = /\A\d{8}T\d{6}Z\z/
|
|
295
|
+
|
|
296
|
+
# Cull older pipeline data under base_dir: delete all timestamp-named subdirs except keep_timestamp_dir.
|
|
297
|
+
# Only call when the latest pipeline run is successful so that we keep history while failing, then purge once green.
|
|
298
|
+
def cull_old_pipeline_data(base_dir, keep_timestamp_dir)
|
|
299
|
+
return unless File.directory?(base_dir)
|
|
300
|
+
return if keep_timestamp_dir.to_s.empty?
|
|
301
|
+
|
|
302
|
+
Dir.children(base_dir).each do |name|
|
|
303
|
+
next unless name.match?(TIMESTAMP_DIR_PATTERN)
|
|
304
|
+
next if name == keep_timestamp_dir
|
|
305
|
+
|
|
306
|
+
path = File.join(base_dir, name)
|
|
307
|
+
next unless File.directory?(path)
|
|
308
|
+
|
|
309
|
+
FileUtils.rm_rf(path)
|
|
310
|
+
puts "Culled old pipeline data: #{path}"
|
|
311
|
+
end
|
|
312
|
+
end
|
|
313
|
+
|
|
314
|
+
# Sync pipeline run data to a directory: fetch latest (or latest completed) run, save to base_dir/<timestamp>/,
|
|
315
|
+
# cull old data if run passed. Returns a result hash for report(); does not output. Set CI_FETCH_LOG_SNIPPET=0 to skip log snippets.
|
|
316
|
+
def sync_pipelines(base_dir, top: 20)
|
|
317
|
+
run = latest_run_or_latest_completed(top: top, date_scope: nil)
|
|
318
|
+
return { passed: false, failure_message: "check_ci: no pipeline run found" } unless run
|
|
319
|
+
|
|
320
|
+
timestamp_dir = run_timestamp_dir(run)
|
|
321
|
+
output_dir = File.join(base_dir, timestamp_dir)
|
|
322
|
+
timeline = get_timeline(run["id"])
|
|
323
|
+
fetch_log = ENV["CI_FETCH_LOG_SNIPPET"] != "0"
|
|
324
|
+
paths = save_pipeline_result(run, timeline, output_dir, fetch_log_snippet: fetch_log)
|
|
325
|
+
|
|
326
|
+
if run_passed?(run)
|
|
327
|
+
cull_old_pipeline_data(base_dir, timestamp_dir)
|
|
328
|
+
warnings = warning_details(timeline)
|
|
329
|
+
return { passed: true, paths: paths, warnings: warnings }
|
|
330
|
+
else
|
|
331
|
+
msg = first_failure(run, timeline) || "Pipeline result: #{run['result']}"
|
|
332
|
+
return { passed: false, failure_message: msg, paths: paths, failures_detail: paths[:failures_detail] }
|
|
333
|
+
end
|
|
334
|
+
rescue RuntimeError => e
|
|
335
|
+
return { error: e.message } if e.message.include?("HTTP 401")
|
|
336
|
+
raise
|
|
337
|
+
end
|
|
338
|
+
|
|
339
|
+
# Report sync result to stdout and abort on failure or 401. Result comes from sync_pipelines.
|
|
340
|
+
def report(result)
|
|
341
|
+
if result[:error]
|
|
342
|
+
if result[:error].include?("HTTP 401")
|
|
343
|
+
abort([
|
|
344
|
+
"check_ci: Azure DevOps returned 401 Unauthorized.",
|
|
345
|
+
"Use a valid PAT with Build (Read): https://dev.azure.com → User Settings → Personal access tokens.",
|
|
346
|
+
"Set AZURE_DEVOPS_TOKEN or MUSCO_AZURE_DEVOPS_TOKEN. To skip CI check: SKIP_CHECK_CI=1 rake"
|
|
347
|
+
].join("\n"))
|
|
348
|
+
end
|
|
349
|
+
abort(result[:error])
|
|
350
|
+
end
|
|
351
|
+
|
|
352
|
+
if result[:passed] == false && result[:paths].nil?
|
|
353
|
+
abort(result[:failure_message])
|
|
354
|
+
end
|
|
355
|
+
|
|
356
|
+
paths = result[:paths] || {}
|
|
357
|
+
puts "Pipeline result written to #{paths[:summary]}"
|
|
358
|
+
puts "Failed steps detail written to #{paths[:failures]}" if paths[:failures]
|
|
359
|
+
|
|
360
|
+
if result[:passed]
|
|
361
|
+
# For successful runs, always show warnings when present (from sync_pipelines warning_details).
|
|
362
|
+
warnings = result[:warnings] || []
|
|
363
|
+
if warnings.any?
|
|
364
|
+
puts "\n--- Pipeline warnings ---"
|
|
365
|
+
puts format_warnings(warnings)
|
|
366
|
+
puts ""
|
|
367
|
+
end
|
|
368
|
+
puts "success"
|
|
369
|
+
else
|
|
370
|
+
failures_detail = result[:failures_detail]
|
|
371
|
+
puts "\n--- Failed build steps ---" if failures_detail&.any?
|
|
372
|
+
puts format_failed_steps(failures_detail) if failures_detail&.any?
|
|
373
|
+
puts result[:failure_message]
|
|
374
|
+
abort(result[:failure_message])
|
|
375
|
+
end
|
|
376
|
+
end
|
|
377
|
+
end
|
|
378
|
+
end
|
|
@@ -0,0 +1,343 @@
|
|
|
1
|
+
# frozen_string_literal: true
|
|
2
|
+
|
|
3
|
+
# GitLab CI/CD Pipelines API client for querying pipeline status and saving results.
|
|
4
|
+
# Requires: GITLAB_URL (optional, default https://gitlab.com), GITLAB_PROJECT (path e.g. "gems-rb/makit"),
|
|
5
|
+
# GITLAB_TOKEN (personal or project access token with read_api scope).
|
|
6
|
+
require "fileutils"
|
|
7
|
+
require "json"
|
|
8
|
+
require "net/http"
|
|
9
|
+
require "time"
|
|
10
|
+
require "uri"
|
|
11
|
+
|
|
12
|
+
module Makit
|
|
13
|
+
class GitLabDevOps
|
|
14
|
+
API_VERSION = "v4"
|
|
15
|
+
DEFAULT_BASE_URL = "https://gitlab.com"
|
|
16
|
+
|
|
17
|
+
def initialize(project:, token:, base_url: nil)
|
|
18
|
+
@project = project.to_s
|
|
19
|
+
@project_encoded = URI.encode_www_form_component(@project)
|
|
20
|
+
@token = token
|
|
21
|
+
@base_url = (base_url || ENV["GITLAB_URL"] || DEFAULT_BASE_URL).to_s.sub(%r{/+$}, "")
|
|
22
|
+
end
|
|
23
|
+
|
|
24
|
+
def base_uri(path, query = {})
|
|
25
|
+
q = query.empty? ? "" : "?#{URI.encode_www_form(query)}"
|
|
26
|
+
URI("#{@base_url}/api/#{API_VERSION}/projects/#{@project_encoded}/#{path}#{q}")
|
|
27
|
+
end
|
|
28
|
+
|
|
29
|
+
def get(path, query = {})
|
|
30
|
+
uri = path.start_with?("http") ? URI(path) : base_uri(path, query)
|
|
31
|
+
request(uri)
|
|
32
|
+
end
|
|
33
|
+
|
|
34
|
+
def get_url(full_url)
|
|
35
|
+
uri = URI(full_url)
|
|
36
|
+
request(uri, parse_json: false)
|
|
37
|
+
end
|
|
38
|
+
|
|
39
|
+
def request(uri, parse_json: true)
|
|
40
|
+
req = Net::HTTP::Get.new(uri)
|
|
41
|
+
req["PRIVATE-TOKEN"] = @token
|
|
42
|
+
req["Accept"] = parse_json ? "application/json" : "text/plain"
|
|
43
|
+
resp = Net::HTTP.start(uri.hostname, uri.port, use_ssl: uri.scheme == "https") { |http| http.request(req) }
|
|
44
|
+
raise "HTTP #{resp.code}: #{resp.body}" unless resp.is_a?(Net::HTTPSuccess)
|
|
45
|
+
|
|
46
|
+
parse_json ? JSON.parse(resp.body) : resp.body
|
|
47
|
+
end
|
|
48
|
+
|
|
49
|
+
# List recent pipeline runs; returns array of runs (newest first).
|
|
50
|
+
# GitLab returns array directly; we normalize to a run-like hash with id, status, created_at, updated_at, finished_at.
|
|
51
|
+
def list_runs(top: 10)
|
|
52
|
+
data = get("pipelines", { per_page: [top, 100].min, order_by: "updated_at", sort: "desc" })
|
|
53
|
+
runs = data.is_a?(Array) ? data : []
|
|
54
|
+
runs.first(top)
|
|
55
|
+
end
|
|
56
|
+
|
|
57
|
+
def latest_run
|
|
58
|
+
list_runs(top: 1).first
|
|
59
|
+
end
|
|
60
|
+
|
|
61
|
+
# Get the latest run that has completed (status in success, failed, canceled, skipped).
|
|
62
|
+
# If the latest run is not yet completed, returns the most recent completed run from the list.
|
|
63
|
+
def latest_run_or_latest_completed(top: 20, date_scope: nil)
|
|
64
|
+
runs = list_runs(top: top)
|
|
65
|
+
latest = runs.first
|
|
66
|
+
completed_statuses = %w[success failed canceled skipped]
|
|
67
|
+
if latest && completed_statuses.include?(latest["status"]&.downcase)
|
|
68
|
+
return latest if !date_scope || run_on_date?(latest, date_scope)
|
|
69
|
+
end
|
|
70
|
+
|
|
71
|
+
completed = runs.find { |r| completed_statuses.include?(r["status"]&.downcase) && (!date_scope || run_on_date?(r, date_scope)) }
|
|
72
|
+
completed || latest
|
|
73
|
+
end
|
|
74
|
+
|
|
75
|
+
def run_on_date?(run, date_scope)
|
|
76
|
+
return true unless date_scope
|
|
77
|
+
|
|
78
|
+
ts = run["finished_at"] || run["updated_at"] || run["created_at"].to_s
|
|
79
|
+
ts.to_s.start_with?(date_scope)
|
|
80
|
+
end
|
|
81
|
+
|
|
82
|
+
# Build a timestamp-based directory name from a run.
|
|
83
|
+
# Uses finished_at, updated_at, or created_at; format: 20260224T205001Z.
|
|
84
|
+
def run_timestamp_dir(run)
|
|
85
|
+
return "unknown" unless run
|
|
86
|
+
|
|
87
|
+
ts = run["finished_at"] || run["updated_at"] || run["created_at"]
|
|
88
|
+
return "unknown" unless ts
|
|
89
|
+
|
|
90
|
+
begin
|
|
91
|
+
t = Time.parse(ts)
|
|
92
|
+
t.utc.strftime("%Y%m%dT%H%M%SZ")
|
|
93
|
+
rescue ArgumentError
|
|
94
|
+
"unknown"
|
|
95
|
+
end
|
|
96
|
+
end
|
|
97
|
+
|
|
98
|
+
def get_run(pipeline_id)
|
|
99
|
+
get("pipelines/#{pipeline_id}")
|
|
100
|
+
end
|
|
101
|
+
|
|
102
|
+
# Get jobs for a pipeline (GitLab equivalent of timeline). Returns hash with "records" (array of jobs)
|
|
103
|
+
# so callers can use the same pattern as Azure DevOps (timeline["records"]).
|
|
104
|
+
def get_timeline(pipeline_id)
|
|
105
|
+
jobs = get("pipelines/#{pipeline_id}/jobs", { per_page: 100 })
|
|
106
|
+
jobs = jobs.is_a?(Array) ? jobs : []
|
|
107
|
+
{ "records" => jobs }
|
|
108
|
+
rescue StandardError => e
|
|
109
|
+
return nil if e.message.include?("404")
|
|
110
|
+
|
|
111
|
+
raise
|
|
112
|
+
end
|
|
113
|
+
|
|
114
|
+
# True if pipeline status is success.
|
|
115
|
+
def run_passed?(run)
|
|
116
|
+
run && run["status"]&.downcase == "success"
|
|
117
|
+
end
|
|
118
|
+
|
|
119
|
+
# Extract detailed info for all failed jobs from the timeline.
|
|
120
|
+
# Returns array of hashes: name, type (stage), identifier (id), result (status), failure_reason, logUrl, logSnippet (if fetched).
|
|
121
|
+
def failed_steps_details(timeline, fetch_log_snippet: true, log_snippet_lines: 80)
|
|
122
|
+
return [] unless timeline && timeline["records"].is_a?(Array)
|
|
123
|
+
|
|
124
|
+
failed = timeline["records"].select { |r| r["status"]&.downcase == "failed" }
|
|
125
|
+
failed.map do |r|
|
|
126
|
+
job_id = r["id"]
|
|
127
|
+
detail = {
|
|
128
|
+
"name" => r["name"] || "job_#{job_id}",
|
|
129
|
+
"type" => r["stage"] || "job",
|
|
130
|
+
"identifier" => job_id,
|
|
131
|
+
"result" => r["status"],
|
|
132
|
+
"resultCode" => r["failure_reason"],
|
|
133
|
+
"workerName" => r.dig("runner", "description").to_s,
|
|
134
|
+
"startTime" => r["started_at"],
|
|
135
|
+
"finishTime" => r["finished_at"],
|
|
136
|
+
"issues" => r["failure_reason"] ? [{ "type" => "failure", "message" => r["failure_reason"] }] : [],
|
|
137
|
+
"logUrl" => job_id ? "#{@base_url}/api/#{API_VERSION}/projects/#{@project_encoded}/jobs/#{job_id}/trace" : nil
|
|
138
|
+
}
|
|
139
|
+
if fetch_log_snippet && detail["logUrl"]
|
|
140
|
+
snippet = fetch_log_snippet_content(detail["logUrl"], lines: log_snippet_lines)
|
|
141
|
+
detail["logSnippet"] = snippet.to_s.encode("UTF-8", invalid: :replace, undef: :replace) if snippet && !snippet.to_s.empty?
|
|
142
|
+
end
|
|
143
|
+
detail.delete("logSnippet") if detail["logSnippet"].to_s.empty?
|
|
144
|
+
detail
|
|
145
|
+
end
|
|
146
|
+
end
|
|
147
|
+
|
|
148
|
+
def fetch_log_snippet_content(trace_url, lines: 80)
|
|
149
|
+
raw = get_url(trace_url)
|
|
150
|
+
return nil if raw.to_s.empty?
|
|
151
|
+
|
|
152
|
+
body = raw.to_s.encode("UTF-8", invalid: :replace, undef: :replace)
|
|
153
|
+
all_lines = body.split("\n")
|
|
154
|
+
snippet = all_lines.size <= lines ? body : "... (#{all_lines.size - lines} lines omitted) ...\n" + all_lines.last(lines).join("\n")
|
|
155
|
+
snippet
|
|
156
|
+
rescue StandardError => e
|
|
157
|
+
"[Log fetch error: #{e.message}]"
|
|
158
|
+
end
|
|
159
|
+
|
|
160
|
+
# GitLab jobs don't have a separate "issues" array for warnings; we could parse log or use allow_failure.
|
|
161
|
+
# Return empty array for compatibility with report().
|
|
162
|
+
def warning_details(timeline)
|
|
163
|
+
return [] unless timeline && timeline["records"].is_a?(Array)
|
|
164
|
+
|
|
165
|
+
timeline["records"].select { |r| r["allow_failure"] && r["status"]&.downcase == "failed" }.map do |r|
|
|
166
|
+
{
|
|
167
|
+
"name" => r["name"] || r["id"].to_s,
|
|
168
|
+
"type" => r["stage"] || "job",
|
|
169
|
+
"issues" => [{ "type" => "allow_failure", "message" => (r["failure_reason"] || "Job failed but allow_failure is true") }]
|
|
170
|
+
}
|
|
171
|
+
end
|
|
172
|
+
end
|
|
173
|
+
|
|
174
|
+
def format_warnings(warnings)
|
|
175
|
+
return "" if warnings.nil? || warnings.empty?
|
|
176
|
+
|
|
177
|
+
out = []
|
|
178
|
+
warnings.each do |w|
|
|
179
|
+
(w["issues"] || []).each do |iss|
|
|
180
|
+
out << " [#{w['name']}] #{iss['message']}"
|
|
181
|
+
end
|
|
182
|
+
end
|
|
183
|
+
out.join("\n")
|
|
184
|
+
end
|
|
185
|
+
|
|
186
|
+
def format_failed_steps(failures)
|
|
187
|
+
return "" if failures.nil? || failures.empty?
|
|
188
|
+
|
|
189
|
+
out = []
|
|
190
|
+
failures.each_with_index do |f, i|
|
|
191
|
+
out << "--- Failed step #{i + 1}: #{f['name']} (#{f['type']}) ---"
|
|
192
|
+
out << " Result: #{f['result']}"
|
|
193
|
+
out << " Failure: #{f['resultCode']}" if f["resultCode"]
|
|
194
|
+
out << " Worker: #{f['workerName']}" if f["workerName"]
|
|
195
|
+
(f["issues"] || []).each do |iss|
|
|
196
|
+
out << " [#{iss['type']}] #{iss['message']}"
|
|
197
|
+
end
|
|
198
|
+
if f["logSnippet"]
|
|
199
|
+
out << " Log (last lines):"
|
|
200
|
+
f["logSnippet"].to_s.each_line { |line| out << " #{line.rstrip}" }
|
|
201
|
+
end
|
|
202
|
+
out << ""
|
|
203
|
+
end
|
|
204
|
+
out.join("\n")
|
|
205
|
+
end
|
|
206
|
+
|
|
207
|
+
def first_failure(run, timeline = nil)
|
|
208
|
+
if timeline && timeline["records"]
|
|
209
|
+
failed = timeline["records"].find { |r| r["status"]&.downcase == "failed" }
|
|
210
|
+
if failed
|
|
211
|
+
name = failed["name"] || failed["id"].to_s
|
|
212
|
+
msg = failed["failure_reason"] || failed["status"]
|
|
213
|
+
return "#{name}: #{msg}"
|
|
214
|
+
end
|
|
215
|
+
end
|
|
216
|
+
return "Pipeline result: #{run['status'] || 'unknown'}" if run && run["status"]&.downcase != "success"
|
|
217
|
+
|
|
218
|
+
nil
|
|
219
|
+
end
|
|
220
|
+
|
|
221
|
+
def save_pipeline_result(run, timeline, artifacts_dir, fetch_log_snippet: true)
|
|
222
|
+
FileUtils.mkdir_p(artifacts_dir)
|
|
223
|
+
run_path = File.join(artifacts_dir, "pipeline_run.json")
|
|
224
|
+
File.write(run_path, JSON.pretty_generate(run || {}))
|
|
225
|
+
timeline_path = File.join(artifacts_dir, "pipeline_timeline.json")
|
|
226
|
+
File.write(timeline_path, JSON.pretty_generate(timeline || {}))
|
|
227
|
+
summary_path = File.join(artifacts_dir, "pipeline_result.txt")
|
|
228
|
+
summary = build_summary(run, timeline)
|
|
229
|
+
File.write(summary_path, summary)
|
|
230
|
+
result = { run: run_path, timeline: timeline_path, summary: summary_path }
|
|
231
|
+
|
|
232
|
+
failures = failed_steps_details(timeline, fetch_log_snippet: fetch_log_snippet)
|
|
233
|
+
if failures.any?
|
|
234
|
+
failures_path = File.join(artifacts_dir, "pipeline_failures.json")
|
|
235
|
+
File.write(failures_path, JSON.pretty_generate(failures))
|
|
236
|
+
failures_txt_path = File.join(artifacts_dir, "pipeline_failures.txt")
|
|
237
|
+
File.write(failures_txt_path, format_failed_steps(failures))
|
|
238
|
+
result[:failures] = failures_txt_path
|
|
239
|
+
result[:failures_json] = failures_path
|
|
240
|
+
result[:failures_detail] = failures
|
|
241
|
+
end
|
|
242
|
+
result
|
|
243
|
+
end
|
|
244
|
+
|
|
245
|
+
def build_summary(run, timeline)
|
|
246
|
+
lines = []
|
|
247
|
+
if run
|
|
248
|
+
lines << "Pipeline: #{run['id']} (#{run['ref']} @ #{run['sha']&.slice(0, 8)})"
|
|
249
|
+
lines << "Status: #{run['status']}"
|
|
250
|
+
lines << "Created: #{run['created_at']}"
|
|
251
|
+
lines << "Updated: #{run['updated_at']}"
|
|
252
|
+
lines << "Finished: #{run['finished_at']}"
|
|
253
|
+
lines << "Web: #{run['web_url']}" if run["web_url"]
|
|
254
|
+
end
|
|
255
|
+
if timeline && timeline["records"]
|
|
256
|
+
lines << ""
|
|
257
|
+
lines << "Jobs:"
|
|
258
|
+
(timeline["records"] || []).each do |r|
|
|
259
|
+
lines << " - #{r['name'] || r['id']} (#{r['stage']}): #{r['status']}"
|
|
260
|
+
end
|
|
261
|
+
end
|
|
262
|
+
lines.join("\n")
|
|
263
|
+
end
|
|
264
|
+
|
|
265
|
+
TIMESTAMP_DIR_PATTERN = /\A\d{8}T\d{6}Z\z/
|
|
266
|
+
|
|
267
|
+
def cull_old_pipeline_data(base_dir, keep_timestamp_dir)
|
|
268
|
+
return unless File.directory?(base_dir)
|
|
269
|
+
return if keep_timestamp_dir.to_s.empty?
|
|
270
|
+
|
|
271
|
+
Dir.children(base_dir).each do |name|
|
|
272
|
+
next unless name.match?(TIMESTAMP_DIR_PATTERN)
|
|
273
|
+
next if name == keep_timestamp_dir
|
|
274
|
+
|
|
275
|
+
path = File.join(base_dir, name)
|
|
276
|
+
next unless File.directory?(path)
|
|
277
|
+
|
|
278
|
+
FileUtils.rm_rf(path)
|
|
279
|
+
puts "Culled old pipeline data: #{path}"
|
|
280
|
+
end
|
|
281
|
+
end
|
|
282
|
+
|
|
283
|
+
def sync_pipelines(base_dir, top: 20)
|
|
284
|
+
run = latest_run_or_latest_completed(top: top, date_scope: nil)
|
|
285
|
+
return { passed: false, failure_message: "check_ci: no pipeline run found" } unless run
|
|
286
|
+
|
|
287
|
+
timestamp_dir = run_timestamp_dir(run)
|
|
288
|
+
output_dir = File.join(base_dir, timestamp_dir)
|
|
289
|
+
timeline = get_timeline(run["id"])
|
|
290
|
+
fetch_log = ENV["CI_FETCH_LOG_SNIPPET"] != "0"
|
|
291
|
+
paths = save_pipeline_result(run, timeline, output_dir, fetch_log_snippet: fetch_log)
|
|
292
|
+
|
|
293
|
+
if run_passed?(run)
|
|
294
|
+
cull_old_pipeline_data(base_dir, timestamp_dir)
|
|
295
|
+
warnings = warning_details(timeline)
|
|
296
|
+
return { passed: true, paths: paths, warnings: warnings }
|
|
297
|
+
else
|
|
298
|
+
msg = first_failure(run, timeline) || "Pipeline result: #{run['status']}"
|
|
299
|
+
return { passed: false, failure_message: msg, paths: paths, failures_detail: paths[:failures_detail] }
|
|
300
|
+
end
|
|
301
|
+
rescue RuntimeError => e
|
|
302
|
+
return { error: e.message } if e.message.include?("401") || e.message.include?("403")
|
|
303
|
+
raise
|
|
304
|
+
end
|
|
305
|
+
|
|
306
|
+
def report(result)
|
|
307
|
+
if result[:error]
|
|
308
|
+
if result[:error].include?("401") || result[:error].include?("403")
|
|
309
|
+
abort([
|
|
310
|
+
"check_ci: GitLab returned 401/403 Unauthorized.",
|
|
311
|
+
"Use a valid token with read_api scope: GitLab → User Settings → Access Tokens.",
|
|
312
|
+
"Set GITLAB_TOKEN or CI_JOB_TOKEN (in CI). To skip CI check: SKIP_CHECK_CI=1 rake"
|
|
313
|
+
].join("\n"))
|
|
314
|
+
end
|
|
315
|
+
abort(result[:error])
|
|
316
|
+
end
|
|
317
|
+
|
|
318
|
+
if result[:passed] == false && result[:paths].nil?
|
|
319
|
+
abort(result[:failure_message])
|
|
320
|
+
end
|
|
321
|
+
|
|
322
|
+
paths = result[:paths] || {}
|
|
323
|
+
puts "Pipeline result written to #{paths[:summary]}"
|
|
324
|
+
puts "Failed steps detail written to #{paths[:failures]}" if paths[:failures]
|
|
325
|
+
|
|
326
|
+
if result[:passed]
|
|
327
|
+
warnings = result[:warnings] || []
|
|
328
|
+
if warnings.any?
|
|
329
|
+
puts "\n--- Pipeline warnings ---"
|
|
330
|
+
puts format_warnings(warnings)
|
|
331
|
+
puts ""
|
|
332
|
+
end
|
|
333
|
+
puts "success"
|
|
334
|
+
else
|
|
335
|
+
failures_detail = result[:failures_detail]
|
|
336
|
+
puts "\n--- Failed build steps ---" if failures_detail&.any?
|
|
337
|
+
puts format_failed_steps(failures_detail) if failures_detail&.any?
|
|
338
|
+
puts result[:failure_message]
|
|
339
|
+
abort(result[:failure_message])
|
|
340
|
+
end
|
|
341
|
+
end
|
|
342
|
+
end
|
|
343
|
+
end
|
data/lib/makit/version.rb
CHANGED
data/lib/makit.rb
CHANGED
|
@@ -24,6 +24,8 @@ require_relative "makit/rubygems"
|
|
|
24
24
|
require_relative "makit/azure/cli"
|
|
25
25
|
require_relative "makit/azure/blob_storage"
|
|
26
26
|
require_relative "makit/azure-pipelines"
|
|
27
|
+
require_relative "makit/azure_devops"
|
|
28
|
+
require_relative "makit/gitlab_devops"
|
|
27
29
|
require_relative "makit/github_actions"
|
|
28
30
|
require_relative "makit/humanize"
|
|
29
31
|
require_relative "makit/directories"
|
metadata
CHANGED
|
@@ -1,7 +1,7 @@
|
|
|
1
1
|
--- !ruby/object:Gem::Specification
|
|
2
2
|
name: makit
|
|
3
3
|
version: !ruby/object:Gem::Version
|
|
4
|
-
version: 0.0.
|
|
4
|
+
version: 0.0.178
|
|
5
5
|
platform: ruby
|
|
6
6
|
authors:
|
|
7
7
|
- Lou Parslow
|
|
@@ -238,6 +238,7 @@ files:
|
|
|
238
238
|
- lib/makit/azure-pipelines.rb
|
|
239
239
|
- lib/makit/azure/blob_storage.rb
|
|
240
240
|
- lib/makit/azure/cli.rb
|
|
241
|
+
- lib/makit/azure_devops.rb
|
|
241
242
|
- lib/makit/cli/base.rb
|
|
242
243
|
- lib/makit/cli/build_commands.rb
|
|
243
244
|
- lib/makit/cli/generators/base_generator.rb
|
|
@@ -315,6 +316,7 @@ files:
|
|
|
315
316
|
- lib/makit/github_actions.rb
|
|
316
317
|
- lib/makit/gitlab/pipeline.rb
|
|
317
318
|
- lib/makit/gitlab/pipeline_service_impl.rb
|
|
319
|
+
- lib/makit/gitlab_devops.rb
|
|
318
320
|
- lib/makit/gitlab_runner.rb
|
|
319
321
|
- lib/makit/humanize.rb
|
|
320
322
|
- lib/makit/indexer.rb
|