rakit 0.1.0 → 0.1.2
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- checksums.yaml +4 -4
- data/lib/generated/azure.devops_pb.rb +28 -0
- data/lib/generated/example_pb.rb +18 -0
- data/lib/rakit/azure/dev_ops.rb +417 -0
- data/lib/rakit/ruby_gems.rb +48 -0
- data/lib/rakit.rb +5 -0
- metadata +64 -3
checksums.yaml
CHANGED
|
@@ -1,7 +1,7 @@
|
|
|
1
1
|
---
|
|
2
2
|
SHA256:
|
|
3
|
-
metadata.gz:
|
|
4
|
-
data.tar.gz:
|
|
3
|
+
metadata.gz: d44a8bebc79f4a4234f06fc398f870786dee8bc9b6eb290ad217c69fadc4da4f
|
|
4
|
+
data.tar.gz: b36d1f72c8e9378ce5cb45c6876aad1deb7e2896c90e58a959692c3b0e5557d5
|
|
5
5
|
SHA512:
|
|
6
|
-
metadata.gz:
|
|
7
|
-
data.tar.gz:
|
|
6
|
+
metadata.gz: 29d7c4732c1107dcc831edd0168633c47a65a128f34825875f41fc22694c9591ba6df15a53273c3bf98361bd60133cd6b8e78fc21bb0aa608acda513f02f0a6b
|
|
7
|
+
data.tar.gz: 274fffdf4ccfdeb4b9fb2774d05f6205c3dff94e9839e0d46ac28c71ffd8a0f7e30c236dc600ce88651e2551068ab4e49003b1b4bf3597c9357d0674d1249f48
|
|
@@ -0,0 +1,28 @@
|
|
|
1
|
+
# frozen_string_literal: true
|
|
2
|
+
# Generated by the protocol buffer compiler. DO NOT EDIT!
|
|
3
|
+
# source: azure.devops.proto
|
|
4
|
+
|
|
5
|
+
require 'google/protobuf'
|
|
6
|
+
|
|
7
|
+
|
|
8
|
+
descriptor_data = "\n\x12\x61zure.devops.proto\x12\x0brakit.azure\"=\n\x08Pipeline\x12\x0b\n\x03org\x18\x01 \x01(\t\x12\x0f\n\x07project\x18\x02 \x01(\t\x12\x13\n\x0bpipeline_id\x18\x03 \x01(\x05\"R\n\x18GetPipelineResultRequest\x12\'\n\x08pipeline\x18\x01 \x01(\x0b\x32\x15.rakit.azure.Pipeline\x12\r\n\x05token\x18\x02 \x01(\t\"C\n\x0ePipelineStatus\x12\x0f\n\x07success\x18\x01 \x01(\x08\x12\x0e\n\x06\x65rrors\x18\x02 \x03(\t\x12\x10\n\x08warnings\x18\x03 \x03(\t\"8\n\x0ePipelineResult\x12&\n\x04runs\x18\x01 \x03(\x0b\x32\x18.rakit.azure.PipelineRun\"\x97\x01\n\x0bPipelineRun\x12\n\n\x02id\x18\x01 \x01(\x05\x12\x0c\n\x04name\x18\x02 \x01(\t\x12\r\n\x05state\x18\x03 \x01(\t\x12\x0e\n\x06result\x18\x04 \x01(\t\x12\x14\n\x0c\x63reated_date\x18\x05 \x01(\t\x12\x15\n\rfinished_date\x18\x06 \x01(\t\x12\"\n\x06stages\x18\x07 \x03(\x0b\x32\x12.rakit.azure.Stage\"&\n\x05Issue\x12\x0c\n\x04type\x18\x01 \x01(\t\x12\x0f\n\x07message\x18\x02 \x01(\t\"G\n\x03Job\x12\x0c\n\x04name\x18\x01 \x01(\t\x12\x0e\n\x06result\x18\x02 \x01(\t\x12\"\n\x06issues\x18\x03 \x03(\x0b\x32\x12.rakit.azure.Issue\"i\n\x05Stage\x12\x0c\n\x04name\x18\x01 \x01(\t\x12\x0e\n\x06result\x18\x02 \x01(\t\x12\x1e\n\x04jobs\x18\x03 \x03(\x0b\x32\x10.rakit.azure.Job\x12\"\n\x06issues\x18\x04 \x03(\x0b\x32\x12.rakit.azure.Issue\"`\n\x0eTimelineRecord\x12\x0c\n\x04name\x18\x01 \x01(\t\x12\x0c\n\x04type\x18\x02 \x01(\t\x12\x0e\n\x06result\x18\x03 \x01(\t\x12\"\n\x06issues\x18\x04 \x03(\x0b\x32\x12.rakit.azure.Issue\"\xa8\x01\n\x14PipelineResultDetail\x12\x12\n\nsuccessful\x18\x01 \x01(\x08\x12\x0e\n\x06\x65rrors\x18\x02 \x01(\t\x12\x10\n\x08warnings\x18\x03 \x01(\t\x12%\n\x03run\x18\x04 \x01(\x0b\x32\x18.rakit.azure.PipelineRun\x12\x33\n\x0e\x66\x61iled_records\x18\x05 \x03(\x0b\x32\x1b.rakit.azure.TimelineRecord2i\n\x0ePipelineServer\x12W\n\x11GetPipelineResult\x12%.rakit.azure.GetPipelineResultRequest\x1a\x1b.rakit.azure.PipelineResultB\x1a\xea\x02\x17Rakit::Azure::Generatedb\x06proto3"
|
|
9
|
+
|
|
10
|
+
pool = ::Google::Protobuf::DescriptorPool.generated_pool
|
|
11
|
+
pool.add_serialized_file(descriptor_data)
|
|
12
|
+
|
|
13
|
+
module Rakit
|
|
14
|
+
module Azure
|
|
15
|
+
module Generated
|
|
16
|
+
Pipeline = ::Google::Protobuf::DescriptorPool.generated_pool.lookup("rakit.azure.Pipeline").msgclass
|
|
17
|
+
GetPipelineResultRequest = ::Google::Protobuf::DescriptorPool.generated_pool.lookup("rakit.azure.GetPipelineResultRequest").msgclass
|
|
18
|
+
PipelineStatus = ::Google::Protobuf::DescriptorPool.generated_pool.lookup("rakit.azure.PipelineStatus").msgclass
|
|
19
|
+
PipelineResult = ::Google::Protobuf::DescriptorPool.generated_pool.lookup("rakit.azure.PipelineResult").msgclass
|
|
20
|
+
PipelineRun = ::Google::Protobuf::DescriptorPool.generated_pool.lookup("rakit.azure.PipelineRun").msgclass
|
|
21
|
+
Issue = ::Google::Protobuf::DescriptorPool.generated_pool.lookup("rakit.azure.Issue").msgclass
|
|
22
|
+
Job = ::Google::Protobuf::DescriptorPool.generated_pool.lookup("rakit.azure.Job").msgclass
|
|
23
|
+
Stage = ::Google::Protobuf::DescriptorPool.generated_pool.lookup("rakit.azure.Stage").msgclass
|
|
24
|
+
TimelineRecord = ::Google::Protobuf::DescriptorPool.generated_pool.lookup("rakit.azure.TimelineRecord").msgclass
|
|
25
|
+
PipelineResultDetail = ::Google::Protobuf::DescriptorPool.generated_pool.lookup("rakit.azure.PipelineResultDetail").msgclass
|
|
26
|
+
end
|
|
27
|
+
end
|
|
28
|
+
end
|
|
@@ -0,0 +1,18 @@
|
|
|
1
|
+
# frozen_string_literal: true
|
|
2
|
+
# Generated by the protocol buffer compiler. DO NOT EDIT!
|
|
3
|
+
# source: example.proto
|
|
4
|
+
|
|
5
|
+
require 'google/protobuf'
|
|
6
|
+
|
|
7
|
+
|
|
8
|
+
descriptor_data = "\n\rexample.proto\x12\rrakit.example\"\x1c\n\x0cHelloRequest\x12\x0c\n\x04name\x18\x01 \x01(\t\" \n\rHelloResponse\x12\x0f\n\x07message\x18\x01 \x01(\tB\x13\xea\x02\x10Rakit::Generatedb\x06proto3"
|
|
9
|
+
|
|
10
|
+
pool = ::Google::Protobuf::DescriptorPool.generated_pool
|
|
11
|
+
pool.add_serialized_file(descriptor_data)
|
|
12
|
+
|
|
13
|
+
module Rakit
|
|
14
|
+
module Generated
|
|
15
|
+
HelloRequest = ::Google::Protobuf::DescriptorPool.generated_pool.lookup("rakit.example.HelloRequest").msgclass
|
|
16
|
+
HelloResponse = ::Google::Protobuf::DescriptorPool.generated_pool.lookup("rakit.example.HelloResponse").msgclass
|
|
17
|
+
end
|
|
18
|
+
end
|
|
@@ -0,0 +1,417 @@
|
|
|
1
|
+
# frozen_string_literal: true
|
|
2
|
+
|
|
3
|
+
# Azure DevOps Pipelines API: pipeline status.
|
|
4
|
+
# Requires: AZURE_DEVOPS_ORG, AZURE_DEVOPS_PROJECT, AZURE_DEVOPS_PIPELINE_ID, AZURE_DEVOPS_TOKEN (PAT with Build Read).
|
|
5
|
+
require "json"
|
|
6
|
+
require "net/http"
|
|
7
|
+
require "uri"
|
|
8
|
+
|
|
9
|
+
begin
|
|
10
|
+
require "generated/azure.devops_pb"
|
|
11
|
+
rescue StandardError
|
|
12
|
+
# google-protobuf not available or version conflict (e.g. running without bundle exec)
|
|
13
|
+
end
|
|
14
|
+
|
|
15
|
+
module Rakit
|
|
16
|
+
module Azure
|
|
17
|
+
module DevOps
|
|
18
|
+
# Use generated proto PipelineStatus when available; otherwise fallback with same interface (success, errors, warnings).
|
|
19
|
+
Generated = defined?(Rakit::Azure::Generated) ? Rakit::Azure::Generated : nil
|
|
20
|
+
# Pipeline at Rakit::Azure::Pipeline: proto when generated code loaded, else Struct (org, project, pipeline_id, token).
|
|
21
|
+
Rakit::Azure.const_set(:Pipeline, Generated && Generated.const_defined?(:Pipeline) ? Generated::Pipeline : Struct.new(:org, :project, :pipeline_id, :token, keyword_init: true)) unless Rakit::Azure.const_defined?(:Pipeline)
|
|
22
|
+
PipelineStatusFallback = Struct.new(:success, :errors, :warnings, keyword_init: true)
|
|
23
|
+
|
|
24
|
+
# Value types matching proto/azure.devops.proto (for get_pipeline_result; PipelineStatus uses generated proto when loaded).
|
|
25
|
+
Issue = Struct.new(:type, :message, keyword_init: true)
|
|
26
|
+
Job = Struct.new(:name, :result, :issues, keyword_init: true)
|
|
27
|
+
Stage = Struct.new(:name, :result, :jobs, :issues, keyword_init: true)
|
|
28
|
+
PipelineRun = Struct.new(:id, :name, :state, :result, :created_date, :finished_date, :stages, keyword_init: true)
|
|
29
|
+
# Matches proto PipelineResult: runs + optional warnings (e.g. token not set).
|
|
30
|
+
PipelineResult = Struct.new(:runs, :warnings, keyword_init: true)
|
|
31
|
+
|
|
32
|
+
API_VERSION = "7.1"
|
|
33
|
+
BASE_URL = "https://dev.azure.com"
|
|
34
|
+
|
|
35
|
+
class << self
|
|
36
|
+
# Returns the latest pipeline status (proto message PipelineStatus: success, errors, warnings).
|
|
37
|
+
# When pipeline is given, uses its org, project, pipeline_id, token (token may be an ENV var name).
|
|
38
|
+
# When pipeline is nil, uses ENV: AZURE_DEVOPS_ORG, AZURE_DEVOPS_PROJECT, AZURE_DEVOPS_PIPELINE_ID, AZURE_DEVOPS_TOKEN.
|
|
39
|
+
#
|
|
40
|
+
# @param pipeline [Object, nil] Pipeline message value (org, project, pipeline_id, token) or nil for ENV
|
|
41
|
+
# @return [PipelineStatus] success (bool), errors (repeated string), warnings (repeated string)
|
|
42
|
+
#
|
|
43
|
+
# @example
|
|
44
|
+
# status = Rakit::Azure::DevOps.get_pipeline_status(pipeline: my_pipeline)
|
|
45
|
+
# if status.success
|
|
46
|
+
# puts "Pipeline passed"
|
|
47
|
+
# else
|
|
48
|
+
# warn "Pipeline failed: #{status.errors.join("\n")}"
|
|
49
|
+
# end
|
|
50
|
+
def get_pipeline_status(pipeline: nil)
|
|
51
|
+
org, project, pipeline_id, token = _resolve_pipeline_config(pipeline)
|
|
52
|
+
|
|
53
|
+
if [org, project, pipeline_id, token].any?(&:nil?) || [org, project, pipeline_id, token].any?(&:empty?)
|
|
54
|
+
return _pipeline_status_new(
|
|
55
|
+
success: false,
|
|
56
|
+
errors: [],
|
|
57
|
+
warnings: [_token_not_set_message(pipeline)]
|
|
58
|
+
)
|
|
59
|
+
end
|
|
60
|
+
|
|
61
|
+
if _token_unresolved?(pipeline, token)
|
|
62
|
+
return _pipeline_status_new(success: false, errors: [], warnings: [_token_not_set_message(pipeline)])
|
|
63
|
+
end
|
|
64
|
+
|
|
65
|
+
run = _list_runs(org, project, pipeline_id.to_s, token).first
|
|
66
|
+
unless run
|
|
67
|
+
return _pipeline_status_new(success: false, errors: ["No pipeline run found"], warnings: [])
|
|
68
|
+
end
|
|
69
|
+
|
|
70
|
+
timeline = _get_timeline_for_run(org, project, pipeline_id.to_s, token, run)
|
|
71
|
+
success = _run_passed?(run)
|
|
72
|
+
errors_str = _format_failed_steps(_failed_steps_details(timeline))
|
|
73
|
+
errors_str = _run_result_summary(run) if errors_str.to_s.strip.empty? && !success
|
|
74
|
+
warnings_str = _format_warnings(_warning_details(timeline))
|
|
75
|
+
errors_list = errors_str.to_s.strip.empty? ? [] : errors_str.to_s.split("\n").map(&:strip).reject(&:empty?)
|
|
76
|
+
errors_list = ["(no details)"] if errors_list.empty? && !success
|
|
77
|
+
warnings_list = warnings_str.to_s.strip.empty? ? [] : warnings_str.to_s.split("\n").map(&:strip).reject(&:empty?)
|
|
78
|
+
|
|
79
|
+
_pipeline_status_new(success: success, errors: errors_list, warnings: warnings_list)
|
|
80
|
+
rescue StandardError => e
|
|
81
|
+
_pipeline_status_new(success: false, errors: ["Pipeline status error: #{e.message}"], warnings: [])
|
|
82
|
+
end
|
|
83
|
+
|
|
84
|
+
# Returns a PipelineResult (proto: only runs) for the given pipeline.
|
|
85
|
+
# For success/errors/warnings summary use get_pipeline_status.
|
|
86
|
+
# When pipeline is given, uses its org, project, pipeline_id, token (token may be an ENV var name).
|
|
87
|
+
# When pipeline is nil, uses ENV.
|
|
88
|
+
#
|
|
89
|
+
# @param pipeline [Object, nil] Pipeline message value (org, project, pipeline_id, token) or nil for ENV
|
|
90
|
+
# @return [PipelineResult] .runs (array of PipelineRun)
|
|
91
|
+
def get_pipeline_result(pipeline: nil)
|
|
92
|
+
org, project, pipeline_id, token = _resolve_pipeline_config(pipeline)
|
|
93
|
+
|
|
94
|
+
if [org, project, pipeline_id, token].any?(&:nil?) || [org, project, pipeline_id, token].any?(&:empty?)
|
|
95
|
+
return PipelineResult.new(runs: [], warnings: [_token_not_set_message(pipeline)])
|
|
96
|
+
end
|
|
97
|
+
|
|
98
|
+
if _token_unresolved?(pipeline, token)
|
|
99
|
+
return PipelineResult.new(runs: [], warnings: [_token_not_set_message(pipeline)])
|
|
100
|
+
end
|
|
101
|
+
|
|
102
|
+
run = _list_runs(org, project, pipeline_id, token).first
|
|
103
|
+
unless run
|
|
104
|
+
return PipelineResult.new(runs: [], warnings: [])
|
|
105
|
+
end
|
|
106
|
+
|
|
107
|
+
timeline = _get_timeline_for_run(org, project, pipeline_id, token, run)
|
|
108
|
+
runs = [_run_to_pipeline_run(run, timeline)]
|
|
109
|
+
PipelineResult.new(runs: runs, warnings: [])
|
|
110
|
+
rescue StandardError => _e
|
|
111
|
+
PipelineResult.new(runs: [], warnings: [])
|
|
112
|
+
end
|
|
113
|
+
|
|
114
|
+
# Converts a PipelineResult (Struct or proto) to a hash recursively for JSON.
|
|
115
|
+
# @param result [PipelineResult]
|
|
116
|
+
# @return [Hash]
|
|
117
|
+
def pipeline_result_to_h(result)
|
|
118
|
+
_to_h_deep(result)
|
|
119
|
+
end
|
|
120
|
+
|
|
121
|
+
# Returns pretty-printed JSON for a PipelineResult.
|
|
122
|
+
# @param result [PipelineResult]
|
|
123
|
+
# @return [String]
|
|
124
|
+
def pipeline_result_pretty_json(result)
|
|
125
|
+
JSON.pretty_generate(pipeline_result_to_h(result))
|
|
126
|
+
end
|
|
127
|
+
|
|
128
|
+
# Converts a PipelineStatus (Struct or proto) to a hash for JSON.
|
|
129
|
+
# @param status [PipelineStatus]
|
|
130
|
+
# @return [Hash]
|
|
131
|
+
def pipeline_status_to_h(status)
|
|
132
|
+
_to_h_deep(status)
|
|
133
|
+
end
|
|
134
|
+
|
|
135
|
+
# Returns pretty-printed JSON for a PipelineStatus.
|
|
136
|
+
# @param status [PipelineStatus]
|
|
137
|
+
# @return [String]
|
|
138
|
+
def pipeline_status_pretty_json(status)
|
|
139
|
+
JSON.pretty_generate(pipeline_status_to_h(status))
|
|
140
|
+
end
|
|
141
|
+
|
|
142
|
+
private
|
|
143
|
+
|
|
144
|
+
def _to_h_deep(obj)
|
|
145
|
+
case obj
|
|
146
|
+
when Struct
|
|
147
|
+
obj.to_h.transform_values { |v| _to_h_deep(v) }
|
|
148
|
+
when Array
|
|
149
|
+
obj.map { |e| _to_h_deep(e) }
|
|
150
|
+
when Hash
|
|
151
|
+
obj.transform_values { |v| _to_h_deep(v) }
|
|
152
|
+
else
|
|
153
|
+
obj.respond_to?(:to_h) ? obj.to_h.transform_values { |v| _to_h_deep(v) } : obj
|
|
154
|
+
end
|
|
155
|
+
end
|
|
156
|
+
|
|
157
|
+
def _resolve_pipeline_config(pipeline)
|
|
158
|
+
if pipeline
|
|
159
|
+
org = _attr(pipeline, :org)
|
|
160
|
+
project = _attr(pipeline, :project)
|
|
161
|
+
pipeline_id = _attr(pipeline, :pipeline_id)&.to_s
|
|
162
|
+
token_raw = _attr(pipeline, :token).to_s
|
|
163
|
+
token = token_raw.empty? ? "" : (ENV[token_raw].to_s.empty? ? token_raw : ENV[token_raw].to_s)
|
|
164
|
+
[org, project, pipeline_id, token]
|
|
165
|
+
else
|
|
166
|
+
[
|
|
167
|
+
ENV["AZURE_DEVOPS_ORG"],
|
|
168
|
+
ENV["AZURE_DEVOPS_PROJECT"],
|
|
169
|
+
ENV["AZURE_DEVOPS_PIPELINE_ID"]&.to_s,
|
|
170
|
+
ENV["AZURE_DEVOPS_TOKEN"]
|
|
171
|
+
]
|
|
172
|
+
end
|
|
173
|
+
end
|
|
174
|
+
|
|
175
|
+
def _attr(obj, key)
|
|
176
|
+
obj.respond_to?(key) ? obj.public_send(key) : obj[key]
|
|
177
|
+
end
|
|
178
|
+
|
|
179
|
+
def _token_unresolved?(pipeline, token)
|
|
180
|
+
return true if token.to_s.strip.empty?
|
|
181
|
+
return false unless pipeline
|
|
182
|
+
token_raw = _attr(pipeline, :token).to_s
|
|
183
|
+
token_raw == token.to_s && ENV[token_raw].to_s.strip.empty?
|
|
184
|
+
end
|
|
185
|
+
|
|
186
|
+
def _token_not_set_message(pipeline)
|
|
187
|
+
name = pipeline && !_attr(pipeline, :token).to_s.empty? ? _attr(pipeline, :token).to_s : "AZURE_DEVOPS_TOKEN"
|
|
188
|
+
"Token not set (e.g. set #{name}); skipping pipeline check"
|
|
189
|
+
end
|
|
190
|
+
|
|
191
|
+
def _pipeline_status_new(success:, errors:, warnings:)
|
|
192
|
+
if Generated && Generated.const_defined?(:PipelineStatus)
|
|
193
|
+
Generated::PipelineStatus.new(success: success, errors: errors, warnings: warnings)
|
|
194
|
+
else
|
|
195
|
+
PipelineStatusFallback.new(success: success, errors: errors, warnings: warnings)
|
|
196
|
+
end
|
|
197
|
+
end
|
|
198
|
+
|
|
199
|
+
def _base_uri(org, project, path)
|
|
200
|
+
sep = path.include?("?") ? "&" : "?"
|
|
201
|
+
URI("#{BASE_URL}/#{URI.encode_www_form_component(org)}/#{URI.encode_www_form_component(project)}/_apis/#{path}#{sep}api-version=#{API_VERSION}")
|
|
202
|
+
end
|
|
203
|
+
|
|
204
|
+
def _get(org, project, token, path)
|
|
205
|
+
uri = _base_uri(org, project, path)
|
|
206
|
+
req = Net::HTTP::Get.new(uri)
|
|
207
|
+
req.basic_auth("", token)
|
|
208
|
+
req["Accept"] = "application/json"
|
|
209
|
+
resp = Net::HTTP.start(uri.hostname, uri.port, use_ssl: uri.scheme == "https") { |http| http.request(req) }
|
|
210
|
+
raise "HTTP #{resp.code}: #{resp.body}" unless resp.is_a?(Net::HTTPSuccess)
|
|
211
|
+
|
|
212
|
+
body = resp.body.to_s.strip
|
|
213
|
+
body.empty? ? {} : JSON.parse(body)
|
|
214
|
+
end
|
|
215
|
+
|
|
216
|
+
def _list_runs(org, project, pipeline_id, token, top: 10)
|
|
217
|
+
data = _get(org, project, token, "pipelines/#{pipeline_id}/runs")
|
|
218
|
+
(data["value"] || []).first(top)
|
|
219
|
+
end
|
|
220
|
+
|
|
221
|
+
def _get_build(org, project, token, build_id)
|
|
222
|
+
_get(org, project, token, "build/builds/#{build_id}")
|
|
223
|
+
rescue StandardError => e
|
|
224
|
+
return nil if e.message.include?("404")
|
|
225
|
+
|
|
226
|
+
raise
|
|
227
|
+
end
|
|
228
|
+
|
|
229
|
+
def _resolve_build_id(org, project, token, pipeline_id, run)
|
|
230
|
+
build_id = run["id"]
|
|
231
|
+
build = _get_build(org, project, token, build_id)
|
|
232
|
+
return build_id, build if build
|
|
233
|
+
|
|
234
|
+
build_number = run["name"].to_s
|
|
235
|
+
return build_id, nil if build_number.empty?
|
|
236
|
+
|
|
237
|
+
data = _get(org, project, token, "build/builds?definitions=#{pipeline_id}&buildNumber=#{URI.encode_www_form_component(build_number)}&$top=1")
|
|
238
|
+
builds = data["value"] || []
|
|
239
|
+
first = builds.first
|
|
240
|
+
return build_id, nil unless first
|
|
241
|
+
|
|
242
|
+
resolved_id = first["id"]
|
|
243
|
+
[resolved_id, _get_build(org, project, token, resolved_id)]
|
|
244
|
+
end
|
|
245
|
+
|
|
246
|
+
def _get_timeline_for_run(org, project, pipeline_id, token, run)
|
|
247
|
+
build_id, build = _resolve_build_id(org, project, token, pipeline_id, run)
|
|
248
|
+
build ||= _get_build(org, project, token, build_id)
|
|
249
|
+
return nil unless build
|
|
250
|
+
|
|
251
|
+
timeline_url = build.dig("_links", "timeline", "href")
|
|
252
|
+
if timeline_url
|
|
253
|
+
return _get_url(URI(timeline_url), token)
|
|
254
|
+
end
|
|
255
|
+
|
|
256
|
+
plan_id = build.dig("orchestrationPlan", "planId") || build.dig("plans", 0, "planId")
|
|
257
|
+
if plan_id
|
|
258
|
+
return _get(org, project, token, "build/builds/#{build_id}/timeline/#{plan_id}")
|
|
259
|
+
end
|
|
260
|
+
|
|
261
|
+
_get(org, project, token, "build/builds/#{build_id}/timeline")
|
|
262
|
+
rescue StandardError => e
|
|
263
|
+
return nil if e.message.include?("404")
|
|
264
|
+
|
|
265
|
+
raise
|
|
266
|
+
end
|
|
267
|
+
|
|
268
|
+
def _get_url(uri, token)
|
|
269
|
+
req = Net::HTTP::Get.new(uri)
|
|
270
|
+
req.basic_auth("", token)
|
|
271
|
+
req["Accept"] = "application/json"
|
|
272
|
+
resp = Net::HTTP.start(uri.hostname, uri.port, use_ssl: uri.scheme == "https") { |http| http.request(req) }
|
|
273
|
+
raise "HTTP #{resp.code}: #{resp.body}" unless resp.is_a?(Net::HTTPSuccess)
|
|
274
|
+
|
|
275
|
+
body = resp.body.to_s.strip
|
|
276
|
+
body.empty? ? {} : JSON.parse(body)
|
|
277
|
+
end
|
|
278
|
+
|
|
279
|
+
def _run_passed?(run)
|
|
280
|
+
run && %w[succeeded succeededwithissues].include?(run["result"]&.downcase)
|
|
281
|
+
end
|
|
282
|
+
|
|
283
|
+
def _run_result_summary(run)
|
|
284
|
+
return "Pipeline run failed (no details)" unless run.is_a?(Hash)
|
|
285
|
+
|
|
286
|
+
state = run["state"].to_s
|
|
287
|
+
result = run["result"].to_s
|
|
288
|
+
name = run["name"].to_s
|
|
289
|
+
id = run["id"].to_s
|
|
290
|
+
parts = []
|
|
291
|
+
parts << "Run: #{name} (id=#{id})" if name != "" || id != ""
|
|
292
|
+
parts << "State: #{state}" if state != ""
|
|
293
|
+
parts << "Result: #{result}" if result != ""
|
|
294
|
+
parts.empty? ? "Pipeline run failed (no details)" : parts.join(", ")
|
|
295
|
+
end
|
|
296
|
+
|
|
297
|
+
def _failed_steps_details(timeline)
|
|
298
|
+
return [] unless timeline && timeline["records"].is_a?(Array)
|
|
299
|
+
|
|
300
|
+
timeline["records"]
|
|
301
|
+
.select { |r| r["result"]&.downcase == "failed" }
|
|
302
|
+
.map do |r|
|
|
303
|
+
{
|
|
304
|
+
"name" => r["name"] || r["identifier"] || "Unknown",
|
|
305
|
+
"type" => r["type"],
|
|
306
|
+
"result" => r["result"],
|
|
307
|
+
"issues" => (r["issues"] || []).map { |i| { "type" => i["type"], "message" => i["message"] } }
|
|
308
|
+
}
|
|
309
|
+
end
|
|
310
|
+
end
|
|
311
|
+
|
|
312
|
+
def _format_failed_steps(failures)
|
|
313
|
+
return "" if failures.nil? || failures.empty?
|
|
314
|
+
|
|
315
|
+
job_or_task = failures.select { |f| %w[job task].include?((f["type"] || "").to_s.downcase) }
|
|
316
|
+
job_or_task.map do |f|
|
|
317
|
+
type_label = (f["type"] || "").to_s.capitalize
|
|
318
|
+
lines = ["#{type_label}: #{f['name']}"]
|
|
319
|
+
(f["issues"] || []).each { |iss| lines << iss["message"].to_s.strip }
|
|
320
|
+
lines.reject(&:empty?).join("\n")
|
|
321
|
+
end.reject(&:empty?).join("\n\n")
|
|
322
|
+
end
|
|
323
|
+
|
|
324
|
+
def _warning_details(timeline)
|
|
325
|
+
return [] unless timeline && timeline["records"].is_a?(Array)
|
|
326
|
+
|
|
327
|
+
timeline["records"].each_with_object([]) do |r, out|
|
|
328
|
+
next unless r["issues"].is_a?(Array)
|
|
329
|
+
|
|
330
|
+
warn_issues = r["issues"].select { |i| i["type"]&.downcase&.include?("warning") }
|
|
331
|
+
next if warn_issues.empty?
|
|
332
|
+
|
|
333
|
+
out << {
|
|
334
|
+
"name" => r["name"] || r["identifier"] || "Unknown",
|
|
335
|
+
"issues" => warn_issues.map { |i| { "type" => i["type"], "message" => i["message"] } }
|
|
336
|
+
}
|
|
337
|
+
end
|
|
338
|
+
end
|
|
339
|
+
|
|
340
|
+
def _format_warnings(warnings)
|
|
341
|
+
return "" if warnings.nil? || warnings.empty?
|
|
342
|
+
|
|
343
|
+
warnings.flat_map do |w|
|
|
344
|
+
(w["issues"] || []).map { |iss| " [#{w['name']}] #{iss['message']}" }
|
|
345
|
+
end.join("\n")
|
|
346
|
+
end
|
|
347
|
+
|
|
348
|
+
def _run_to_pipeline_run(run, timeline)
|
|
349
|
+
stages = _timeline_to_stages(timeline)
|
|
350
|
+
PipelineRun.new(
|
|
351
|
+
id: run["id"],
|
|
352
|
+
name: run["name"].to_s,
|
|
353
|
+
state: run["state"].to_s,
|
|
354
|
+
result: run["result"].to_s,
|
|
355
|
+
created_date: run["createdDate"].to_s,
|
|
356
|
+
finished_date: run["finishedDate"].to_s,
|
|
357
|
+
stages: stages
|
|
358
|
+
)
|
|
359
|
+
end
|
|
360
|
+
|
|
361
|
+
def _timeline_to_stages(timeline)
|
|
362
|
+
return [] unless timeline && timeline["records"].is_a?(Array)
|
|
363
|
+
|
|
364
|
+
records = timeline["records"]
|
|
365
|
+
# Stage or Phase: top-level container; Job (or Task): child of stage/phase
|
|
366
|
+
stage_records = records.select { |r| %w[stage phase].include?(r["type"].to_s.downcase) }
|
|
367
|
+
job_records = records.select { |r| r["type"].to_s.casecmp("job").zero? }
|
|
368
|
+
task_records = records.select { |r| r["type"].to_s.casecmp("task").zero? }
|
|
369
|
+
# Group by parent id (and parentIdentifier) so lookup works whether API uses id or identifier
|
|
370
|
+
child_by_parent = proc { |list|
|
|
371
|
+
list.each_with_object(Hash.new { |h, k| h[k] = [] }) do |r, h|
|
|
372
|
+
key = (r["parentId"] || r["parent_id"]).to_s
|
|
373
|
+
h[key] << r
|
|
374
|
+
key2 = (r["parentIdentifier"] || r["parent_identifier"]).to_s
|
|
375
|
+
h[key2] << r if key2 != key && !key2.empty?
|
|
376
|
+
end
|
|
377
|
+
}
|
|
378
|
+
jobs_by_parent = child_by_parent.call(job_records)
|
|
379
|
+
tasks_by_parent = child_by_parent.call(task_records)
|
|
380
|
+
|
|
381
|
+
stage_records.map do |sr|
|
|
382
|
+
parent_key = sr["id"].to_s
|
|
383
|
+
parent_key_alt = sr["identifier"].to_s
|
|
384
|
+
job_list = (jobs_by_parent[parent_key] || []) + (jobs_by_parent[parent_key_alt] || [])
|
|
385
|
+
# If no Job records (e.g. API uses different type or flat structure), treat Task records as jobs
|
|
386
|
+
if job_list.empty?
|
|
387
|
+
task_list = (tasks_by_parent[parent_key] || []) + (tasks_by_parent[parent_key_alt] || [])
|
|
388
|
+
job_list = task_list.uniq
|
|
389
|
+
end
|
|
390
|
+
jobs = job_list.uniq.map do |jr|
|
|
391
|
+
job_issues = _issues_from_record(jr)
|
|
392
|
+
jr_id = jr["id"].to_s
|
|
393
|
+
jr_ident = jr["identifier"].to_s
|
|
394
|
+
child_tasks = (tasks_by_parent[jr_id] || []) + (tasks_by_parent[jr_ident] || [])
|
|
395
|
+
task_issues = child_tasks.uniq.flat_map { |t| _issues_from_record(t) }
|
|
396
|
+
Job.new(
|
|
397
|
+
name: jr["name"].to_s,
|
|
398
|
+
result: jr["result"].to_s,
|
|
399
|
+
issues: job_issues + task_issues
|
|
400
|
+
)
|
|
401
|
+
end
|
|
402
|
+
Stage.new(
|
|
403
|
+
name: sr["name"].to_s,
|
|
404
|
+
result: sr["result"].to_s,
|
|
405
|
+
jobs: jobs,
|
|
406
|
+
issues: _issues_from_record(sr)
|
|
407
|
+
)
|
|
408
|
+
end
|
|
409
|
+
end
|
|
410
|
+
|
|
411
|
+
def _issues_from_record(record)
|
|
412
|
+
(record["issues"] || []).map { |i| Issue.new(type: i["type"].to_s, message: i["message"].to_s) }
|
|
413
|
+
end
|
|
414
|
+
end
|
|
415
|
+
end
|
|
416
|
+
end
|
|
417
|
+
end
|
|
@@ -0,0 +1,48 @@
|
|
|
1
|
+
# frozen_string_literal: true
|
|
2
|
+
|
|
3
|
+
module Rakit
|
|
4
|
+
module RubyGems
|
|
5
|
+
# Bump the last digit of the version in the gemspec file (e.g. "0.1.0" -> "0.1.1").
|
|
6
|
+
# Writes the file in place. Returns the new version string.
|
|
7
|
+
def self.bump(gemspec_path)
|
|
8
|
+
content = File.read(gemspec_path)
|
|
9
|
+
content.sub!(/^(\s*s\.version\s*=\s*["'])([\d.]+)(["'])/) do
|
|
10
|
+
segs = Regexp.last_match(2).split(".")
|
|
11
|
+
segs[-1] = (segs[-1].to_i + 1).to_s
|
|
12
|
+
"#{Regexp.last_match(1)}#{segs.join('.')}#{Regexp.last_match(3)}"
|
|
13
|
+
end or raise "No s.version line found in #{gemspec_path}"
|
|
14
|
+
File.write(gemspec_path, content)
|
|
15
|
+
content[/s\.version\s*=\s*["']([^"']+)["']/, 1]
|
|
16
|
+
end
|
|
17
|
+
|
|
18
|
+
def self.version_published?(name, version)
|
|
19
|
+
require "net/http"
|
|
20
|
+
require "uri"
|
|
21
|
+
uri = URI("https://rubygems.org/api/v2/rubygems/#{URI::DEFAULT_PARSER.escape(name)}/versions/#{URI::DEFAULT_PARSER.escape(version)}.json")
|
|
22
|
+
response = Net::HTTP.get_response(uri)
|
|
23
|
+
response.is_a?(Net::HTTPSuccess)
|
|
24
|
+
rescue StandardError
|
|
25
|
+
false
|
|
26
|
+
end
|
|
27
|
+
|
|
28
|
+
# Publish the .gem at gem_path to rubygems.org. If that version is already
|
|
29
|
+
# published, warns and returns without pushing. Raises if the file is missing
|
|
30
|
+
# or if gem push fails.
|
|
31
|
+
def self.publish(gem_path)
|
|
32
|
+
raise "Gem not found: #{gem_path}. Run rake package first." unless File.file?(gem_path)
|
|
33
|
+
|
|
34
|
+
base = File.basename(gem_path, ".gem")
|
|
35
|
+
parts = base.split("-")
|
|
36
|
+
version = parts.pop
|
|
37
|
+
name = parts.join("-")
|
|
38
|
+
|
|
39
|
+
if version_published?(name, version)
|
|
40
|
+
warn "publish: Version #{version} of #{name} is already published on rubygems.org. Skipping push. Bump the version in the gemspec to publish again."
|
|
41
|
+
return
|
|
42
|
+
end
|
|
43
|
+
|
|
44
|
+
success = system("gem", "push", gem_path)
|
|
45
|
+
raise "gem push failed" unless success
|
|
46
|
+
end
|
|
47
|
+
end
|
|
48
|
+
end
|
data/lib/rakit.rb
CHANGED
metadata
CHANGED
|
@@ -1,20 +1,81 @@
|
|
|
1
1
|
--- !ruby/object:Gem::Specification
|
|
2
2
|
name: rakit
|
|
3
3
|
version: !ruby/object:Gem::Version
|
|
4
|
-
version: 0.1.
|
|
4
|
+
version: 0.1.2
|
|
5
5
|
platform: ruby
|
|
6
6
|
authors:
|
|
7
7
|
- rakit
|
|
8
8
|
bindir: bin
|
|
9
9
|
cert_chain: []
|
|
10
10
|
date: 1980-01-02 00:00:00.000000000 Z
|
|
11
|
-
dependencies:
|
|
11
|
+
dependencies:
|
|
12
|
+
- !ruby/object:Gem::Dependency
|
|
13
|
+
name: json
|
|
14
|
+
requirement: !ruby/object:Gem::Requirement
|
|
15
|
+
requirements:
|
|
16
|
+
- - ">="
|
|
17
|
+
- !ruby/object:Gem::Version
|
|
18
|
+
version: '0'
|
|
19
|
+
type: :runtime
|
|
20
|
+
prerelease: false
|
|
21
|
+
version_requirements: !ruby/object:Gem::Requirement
|
|
22
|
+
requirements:
|
|
23
|
+
- - ">="
|
|
24
|
+
- !ruby/object:Gem::Version
|
|
25
|
+
version: '0'
|
|
26
|
+
- !ruby/object:Gem::Dependency
|
|
27
|
+
name: google-protobuf
|
|
28
|
+
requirement: !ruby/object:Gem::Requirement
|
|
29
|
+
requirements:
|
|
30
|
+
- - "~>"
|
|
31
|
+
- !ruby/object:Gem::Version
|
|
32
|
+
version: '3.25'
|
|
33
|
+
type: :runtime
|
|
34
|
+
prerelease: false
|
|
35
|
+
version_requirements: !ruby/object:Gem::Requirement
|
|
36
|
+
requirements:
|
|
37
|
+
- - "~>"
|
|
38
|
+
- !ruby/object:Gem::Version
|
|
39
|
+
version: '3.25'
|
|
40
|
+
- !ruby/object:Gem::Dependency
|
|
41
|
+
name: grpc-tools
|
|
42
|
+
requirement: !ruby/object:Gem::Requirement
|
|
43
|
+
requirements:
|
|
44
|
+
- - "~>"
|
|
45
|
+
- !ruby/object:Gem::Version
|
|
46
|
+
version: '1.72'
|
|
47
|
+
type: :development
|
|
48
|
+
prerelease: false
|
|
49
|
+
version_requirements: !ruby/object:Gem::Requirement
|
|
50
|
+
requirements:
|
|
51
|
+
- - "~>"
|
|
52
|
+
- !ruby/object:Gem::Version
|
|
53
|
+
version: '1.72'
|
|
54
|
+
- !ruby/object:Gem::Dependency
|
|
55
|
+
name: rake
|
|
56
|
+
requirement: !ruby/object:Gem::Requirement
|
|
57
|
+
requirements:
|
|
58
|
+
- - "~>"
|
|
59
|
+
- !ruby/object:Gem::Version
|
|
60
|
+
version: '13'
|
|
61
|
+
type: :development
|
|
62
|
+
prerelease: false
|
|
63
|
+
version_requirements: !ruby/object:Gem::Requirement
|
|
64
|
+
requirements:
|
|
65
|
+
- - "~>"
|
|
66
|
+
- !ruby/object:Gem::Version
|
|
67
|
+
version: '13'
|
|
12
68
|
executables: []
|
|
13
69
|
extensions: []
|
|
14
70
|
extra_rdoc_files: []
|
|
15
71
|
files:
|
|
72
|
+
- lib/generated/azure.devops_pb.rb
|
|
73
|
+
- lib/generated/example_pb.rb
|
|
16
74
|
- lib/rakit.rb
|
|
17
|
-
|
|
75
|
+
- lib/rakit/azure/dev_ops.rb
|
|
76
|
+
- lib/rakit/ruby_gems.rb
|
|
77
|
+
licenses:
|
|
78
|
+
- MIT
|
|
18
79
|
metadata: {}
|
|
19
80
|
rdoc_options: []
|
|
20
81
|
require_paths:
|