rakit 0.1.3 → 0.1.4
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- checksums.yaml +4 -4
- data/lib/generated/azure.devops_pb.rb +12 -15
- data/lib/generated/data_pb.rb +18 -0
- data/lib/generated/example_pb.rb +1 -2
- data/lib/generated/shell_pb.rb +22 -0
- data/lib/rakit/azure/dev_ops.rb +18 -18
- data/lib/rakit/data.rb +173 -0
- data/lib/rakit/{ruby_gems.rb → gem.rb} +36 -14
- data/lib/rakit/git.rb +41 -0
- data/lib/rakit/protobuf.rb +38 -0
- data/lib/rakit/shell.rb +204 -0
- data/lib/rakit/task.rb +11 -0
- data/lib/rakit.rb +11 -1
- metadata +43 -7
checksums.yaml
CHANGED
|
@@ -1,7 +1,7 @@
|
|
|
1
1
|
---
|
|
2
2
|
SHA256:
|
|
3
|
-
metadata.gz:
|
|
4
|
-
data.tar.gz:
|
|
3
|
+
metadata.gz: 6dccf1b9c3ff38dfd13effcd855e9185861e7f2be033bf6b3d4592a1280fdc63
|
|
4
|
+
data.tar.gz: 9a02e557e8d7611503598acc9040711c6af37f1ad8c315b1f6eb10e65ecd3308
|
|
5
5
|
SHA512:
|
|
6
|
-
metadata.gz:
|
|
7
|
-
data.tar.gz:
|
|
6
|
+
metadata.gz: c638ae4a880d61900c8252de617b501c3580a1b56a87c234ac2e72f9084d658012b174136f67cc8f40aa8add72aee9f4ab0a6df71e76a011a6e7ee264b5acbfb
|
|
7
|
+
data.tar.gz: 4cd01b862624d2a21e2d70a0a78163447ac1c33a1eceedbfce54dfecacb9a2b339fde3f11ce9f6f49dd7451ad9f4f2b55248646df05b5961f07b76d716773783
|
|
@@ -2,27 +2,24 @@
|
|
|
2
2
|
# Generated by the protocol buffer compiler. DO NOT EDIT!
|
|
3
3
|
# source: azure.devops.proto
|
|
4
4
|
|
|
5
|
-
require
|
|
5
|
+
require "google/protobuf"
|
|
6
6
|
|
|
7
|
-
|
|
8
|
-
descriptor_data = "\n\x12\x61zure.devops.proto\x12\x0brakit.azure\"=\n\x08Pipeline\x12\x0b\n\x03org\x18\x01 \x01(\t\x12\x0f\n\x07project\x18\x02 \x01(\t\x12\x13\n\x0bpipeline_id\x18\x03 \x01(\x05\"R\n\x18GetPipelineResultRequest\x12\'\n\x08pipeline\x18\x01 \x01(\x0b\x32\x15.rakit.azure.Pipeline\x12\r\n\x05token\x18\x02 \x01(\t\"C\n\x0ePipelineStatus\x12\x0f\n\x07success\x18\x01 \x01(\x08\x12\x0e\n\x06\x65rrors\x18\x02 \x03(\t\x12\x10\n\x08warnings\x18\x03 \x03(\t\"8\n\x0ePipelineResult\x12&\n\x04runs\x18\x01 \x03(\x0b\x32\x18.rakit.azure.PipelineRun\"\x97\x01\n\x0bPipelineRun\x12\n\n\x02id\x18\x01 \x01(\x05\x12\x0c\n\x04name\x18\x02 \x01(\t\x12\r\n\x05state\x18\x03 \x01(\t\x12\x0e\n\x06result\x18\x04 \x01(\t\x12\x14\n\x0c\x63reated_date\x18\x05 \x01(\t\x12\x15\n\rfinished_date\x18\x06 \x01(\t\x12\"\n\x06stages\x18\x07 \x03(\x0b\x32\x12.rakit.azure.Stage\"&\n\x05Issue\x12\x0c\n\x04type\x18\x01 \x01(\t\x12\x0f\n\x07message\x18\x02 \x01(\t\"G\n\x03Job\x12\x0c\n\x04name\x18\x01 \x01(\t\x12\x0e\n\x06result\x18\x02 \x01(\t\x12\"\n\x06issues\x18\x03 \x03(\x0b\x32\x12.rakit.azure.Issue\"i\n\x05Stage\x12\x0c\n\x04name\x18\x01 \x01(\t\x12\x0e\n\x06result\x18\x02 \x01(\t\x12\x1e\n\x04jobs\x18\x03 \x03(\x0b\x32\x10.rakit.azure.Job\x12\"\n\x06issues\x18\x04 \x03(\x0b\x32\x12.rakit.azure.Issue\"`\n\x0eTimelineRecord\x12\x0c\n\x04name\x18\x01 \x01(\t\x12\x0c\n\x04type\x18\x02 \x01(\t\x12\x0e\n\x06result\x18\x03 \x01(\t\x12\"\n\x06issues\x18\x04 \x03(\x0b\x32\x12.rakit.azure.Issue\"\xa8\x01\n\x14PipelineResultDetail\x12\x12\n\nsuccessful\x18\x01 \x01(\x08\x12\x0e\n\x06\x65rrors\x18\x02 \x01(\t\x12\x10\n\x08warnings\x18\x03 \x01(\t\x12%\n\x03run\x18\x04 \x01(\x0b\x32\x18.rakit.azure.PipelineRun\x12\x33\n\x0e\x66\x61iled_records\x18\x05 \x03(\x0b\x32\x1b.rakit.azure.TimelineRecord2i\n\x0ePipelineServer\x12W\n\x11GetPipelineResult\x12%.rakit.azure.GetPipelineResultRequest\x1a\x1b.rakit.azure.PipelineResultB\x1a\xea\x02\x17Rakit::Azure::Generatedb\x06proto3"
|
|
7
|
+
descriptor_data = "\n\x12\x61zure.devops.proto\x12\x0brakit.azure\"L\n\x08Pipeline\x12\x0b\n\x03org\x18\x01 \x01(\t\x12\x0f\n\x07project\x18\x02 \x01(\t\x12\x13\n\x0bpipeline_id\x18\x03 \x01(\x05\x12\r\n\x05token\x18\x04 \x01(\t\"R\n\x18GetPipelineResultRequest\x12\'\n\x08pipeline\x18\x01 \x01(\x0b\x32\x15.rakit.azure.Pipeline\x12\r\n\x05token\x18\x02 \x01(\t\"C\n\x0ePipelineStatus\x12\x0f\n\x07success\x18\x01 \x01(\x08\x12\x0e\n\x06\x65rrors\x18\x02 \x03(\t\x12\x10\n\x08warnings\x18\x03 \x03(\t\"8\n\x0ePipelineResult\x12&\n\x04runs\x18\x01 \x03(\x0b\x32\x18.rakit.azure.PipelineRun\"\x97\x01\n\x0bPipelineRun\x12\n\n\x02id\x18\x01 \x01(\x05\x12\x0c\n\x04name\x18\x02 \x01(\t\x12\r\n\x05state\x18\x03 \x01(\t\x12\x0e\n\x06result\x18\x04 \x01(\t\x12\x14\n\x0c\x63reated_date\x18\x05 \x01(\t\x12\x15\n\rfinished_date\x18\x06 \x01(\t\x12\"\n\x06stages\x18\x07 \x03(\x0b\x32\x12.rakit.azure.Stage\"&\n\x05Issue\x12\x0c\n\x04type\x18\x01 \x01(\t\x12\x0f\n\x07message\x18\x02 \x01(\t\"G\n\x03Job\x12\x0c\n\x04name\x18\x01 \x01(\t\x12\x0e\n\x06result\x18\x02 \x01(\t\x12\"\n\x06issues\x18\x03 \x03(\x0b\x32\x12.rakit.azure.Issue\"i\n\x05Stage\x12\x0c\n\x04name\x18\x01 \x01(\t\x12\x0e\n\x06result\x18\x02 \x01(\t\x12\x1e\n\x04jobs\x18\x03 \x03(\x0b\x32\x10.rakit.azure.Job\x12\"\n\x06issues\x18\x04 \x03(\x0b\x32\x12.rakit.azure.Issue\"`\n\x0eTimelineRecord\x12\x0c\n\x04name\x18\x01 \x01(\t\x12\x0c\n\x04type\x18\x02 \x01(\t\x12\x0e\n\x06result\x18\x03 \x01(\t\x12\"\n\x06issues\x18\x04 \x03(\x0b\x32\x12.rakit.azure.Issue\"\xa8\x01\n\x14PipelineResultDetail\x12\x12\n\nsuccessful\x18\x01 \x01(\x08\x12\x0e\n\x06\x65rrors\x18\x02 \x01(\t\x12\x10\n\x08warnings\x18\x03 \x01(\t\x12%\n\x03run\x18\x04 \x01(\x0b\x32\x18.rakit.azure.PipelineRun\x12\x33\n\x0e\x66\x61iled_records\x18\x05 \x03(\x0b\x32\x1b.rakit.azure.TimelineRecord2i\n\x0ePipelineServer\x12W\n\x11GetPipelineResult\x12%.rakit.azure.GetPipelineResultRequest\x1a\x1b.rakit.azure.PipelineResultB\x0f\xea\x02\x0cRakit::Azureb\x06proto3"
|
|
9
8
|
|
|
10
9
|
pool = ::Google::Protobuf::DescriptorPool.generated_pool
|
|
11
10
|
pool.add_serialized_file(descriptor_data)
|
|
12
11
|
|
|
13
12
|
module Rakit
|
|
14
13
|
module Azure
|
|
15
|
-
|
|
16
|
-
|
|
17
|
-
|
|
18
|
-
|
|
19
|
-
|
|
20
|
-
|
|
21
|
-
|
|
22
|
-
|
|
23
|
-
|
|
24
|
-
|
|
25
|
-
PipelineResultDetail = ::Google::Protobuf::DescriptorPool.generated_pool.lookup("rakit.azure.PipelineResultDetail").msgclass
|
|
26
|
-
end
|
|
14
|
+
Pipeline = ::Google::Protobuf::DescriptorPool.generated_pool.lookup("rakit.azure.Pipeline").msgclass
|
|
15
|
+
GetPipelineResultRequest = ::Google::Protobuf::DescriptorPool.generated_pool.lookup("rakit.azure.GetPipelineResultRequest").msgclass
|
|
16
|
+
PipelineStatus = ::Google::Protobuf::DescriptorPool.generated_pool.lookup("rakit.azure.PipelineStatus").msgclass
|
|
17
|
+
PipelineResult = ::Google::Protobuf::DescriptorPool.generated_pool.lookup("rakit.azure.PipelineResult").msgclass
|
|
18
|
+
PipelineRun = ::Google::Protobuf::DescriptorPool.generated_pool.lookup("rakit.azure.PipelineRun").msgclass
|
|
19
|
+
Issue = ::Google::Protobuf::DescriptorPool.generated_pool.lookup("rakit.azure.Issue").msgclass
|
|
20
|
+
Job = ::Google::Protobuf::DescriptorPool.generated_pool.lookup("rakit.azure.Job").msgclass
|
|
21
|
+
Stage = ::Google::Protobuf::DescriptorPool.generated_pool.lookup("rakit.azure.Stage").msgclass
|
|
22
|
+
TimelineRecord = ::Google::Protobuf::DescriptorPool.generated_pool.lookup("rakit.azure.TimelineRecord").msgclass
|
|
23
|
+
PipelineResultDetail = ::Google::Protobuf::DescriptorPool.generated_pool.lookup("rakit.azure.PipelineResultDetail").msgclass
|
|
27
24
|
end
|
|
28
25
|
end
|
|
@@ -0,0 +1,18 @@
|
|
|
1
|
+
# frozen_string_literal: true
|
|
2
|
+
# Generated by the protocol buffer compiler. DO NOT EDIT!
|
|
3
|
+
# source: data.proto
|
|
4
|
+
|
|
5
|
+
require 'google/protobuf'
|
|
6
|
+
|
|
7
|
+
|
|
8
|
+
descriptor_data = "\n\ndata.proto\x12\nrakit.data\"h\n\x05Index\x12/\n\x07\x65ntries\x18\x01 \x03(\x0b\x32\x1e.rakit.data.Index.EntriesEntry\x1a.\n\x0c\x45ntriesEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\r\n\x05value\x18\x02 \x01(\t:\x02\x38\x01*^\n\x0c\x45xportFormat\x12\x19\n\x15PROTOBUF_BINARY_FILES\x10\x00\x12\x17\n\x13PROTOBUF_JSON_FILES\x10\x01\x12\x1a\n\x16PROTOBUF_BINARY_ZIPPED\x10\x02\x42\x0e\xea\x02\x0bRakit::Datab\x06proto3"
|
|
9
|
+
|
|
10
|
+
pool = ::Google::Protobuf::DescriptorPool.generated_pool
|
|
11
|
+
pool.add_serialized_file(descriptor_data)
|
|
12
|
+
|
|
13
|
+
module Rakit
|
|
14
|
+
module Data
|
|
15
|
+
Index = ::Google::Protobuf::DescriptorPool.generated_pool.lookup("rakit.data.Index").msgclass
|
|
16
|
+
ExportFormat = ::Google::Protobuf::DescriptorPool.generated_pool.lookup("rakit.data.ExportFormat").enummodule
|
|
17
|
+
end
|
|
18
|
+
end
|
data/lib/generated/example_pb.rb
CHANGED
|
@@ -2,8 +2,7 @@
|
|
|
2
2
|
# Generated by the protocol buffer compiler. DO NOT EDIT!
|
|
3
3
|
# source: example.proto
|
|
4
4
|
|
|
5
|
-
require
|
|
6
|
-
|
|
5
|
+
require "google/protobuf"
|
|
7
6
|
|
|
8
7
|
descriptor_data = "\n\rexample.proto\x12\rrakit.example\"\x1c\n\x0cHelloRequest\x12\x0c\n\x04name\x18\x01 \x01(\t\" \n\rHelloResponse\x12\x0f\n\x07message\x18\x01 \x01(\tB\x13\xea\x02\x10Rakit::Generatedb\x06proto3"
|
|
9
8
|
|
|
@@ -0,0 +1,22 @@
|
|
|
1
|
+
# frozen_string_literal: true
|
|
2
|
+
# Generated by the protocol buffer compiler. DO NOT EDIT!
|
|
3
|
+
# source: shell.proto
|
|
4
|
+
|
|
5
|
+
require 'google/protobuf'
|
|
6
|
+
|
|
7
|
+
|
|
8
|
+
descriptor_data = "\n\x0bshell.proto\x12\x0brakit.shell\"\x9a\x02\n\x07\x43ommand\x12\x0c\n\x04name\x18\x01 \x01(\t\x12\x0c\n\x04\x61rgs\x18\x02 \x03(\t\x12\x19\n\x11working_directory\x18\x03 \x01(\t\x12\x17\n\x0ftimeout_seconds\x18\x04 \x01(\x05\x12\x1a\n\x12\x65xpected_exit_code\x18\x05 \x01(\x05\x12\x17\n\x0f\x65xpected_stdout\x18\x06 \x01(\t\x12\x17\n\x0f\x65xpected_stderr\x18\x07 \x01(\t\x12<\n\x13\x61\x63\x63\x65ptance_criteria\x18\x08 \x03(\x0b\x32\x1f.rakit.shell.AcceptanceCriteria\x12\x13\n\x0b\x65xit_status\x18\t \x01(\x05\x12\x0e\n\x06stdout\x18\n \x01(\t\x12\x0e\n\x06stderr\x18\x0b \x01(\t\"1\n\x12\x41\x63\x63\x65ptanceCriteria\x12\x0c\n\x04kind\x18\x01 \x01(\t\x12\r\n\x05value\x18\x02 \x01(\t\"-\n\nTestResult\x12\x0f\n\x07success\x18\x01 \x01(\x08\x12\x0e\n\x06\x65rrors\x18\x02 \x03(\t\"b\n\rFormatRequest\x12%\n\x07\x63ommand\x18\x01 \x01(\x0b\x32\x14.rakit.shell.Command\x12*\n\x06\x66ormat\x18\x02 \x01(\x0e\x32\x1a.rakit.shell.CommandFormat\" \n\x0e\x46ormatResponse\x12\x0e\n\x06output\x18\x01 \x01(\t*Z\n\rCommandFormat\x12\x1e\n\x1a\x43OMMAND_FORMAT_UNSPECIFIED\x10\x00\x12\x0c\n\x08ONE_LINE\x10\x01\x12\x0e\n\nMULTI_LINE\x10\x02\x12\x0b\n\x07\x43OMPACT\x10\x03\x32\xc1\x01\n\x0e\x43ommandService\x12\x35\n\x07\x45xecute\x12\x14.rakit.shell.Command\x1a\x14.rakit.shell.Command\x12\x35\n\x04Test\x12\x14.rakit.shell.Command\x1a\x17.rakit.shell.TestResult\x12\x41\n\x06\x46ormat\x12\x1a.rakit.shell.FormatRequest\x1a\x1b.rakit.shell.FormatResponseB\x0f\xea\x02\x0cRakit::Shellb\x06proto3"
|
|
9
|
+
|
|
10
|
+
pool = ::Google::Protobuf::DescriptorPool.generated_pool
|
|
11
|
+
pool.add_serialized_file(descriptor_data)
|
|
12
|
+
|
|
13
|
+
module Rakit
|
|
14
|
+
module Shell
|
|
15
|
+
Command = ::Google::Protobuf::DescriptorPool.generated_pool.lookup("rakit.shell.Command").msgclass
|
|
16
|
+
AcceptanceCriteria = ::Google::Protobuf::DescriptorPool.generated_pool.lookup("rakit.shell.AcceptanceCriteria").msgclass
|
|
17
|
+
TestResult = ::Google::Protobuf::DescriptorPool.generated_pool.lookup("rakit.shell.TestResult").msgclass
|
|
18
|
+
FormatRequest = ::Google::Protobuf::DescriptorPool.generated_pool.lookup("rakit.shell.FormatRequest").msgclass
|
|
19
|
+
FormatResponse = ::Google::Protobuf::DescriptorPool.generated_pool.lookup("rakit.shell.FormatResponse").msgclass
|
|
20
|
+
CommandFormat = ::Google::Protobuf::DescriptorPool.generated_pool.lookup("rakit.shell.CommandFormat").enummodule
|
|
21
|
+
end
|
|
22
|
+
end
|
data/lib/rakit/azure/dev_ops.rb
CHANGED
|
@@ -52,10 +52,10 @@ module Rakit
|
|
|
52
52
|
|
|
53
53
|
if [org, project, pipeline_id, token].any?(&:nil?) || [org, project, pipeline_id, token].any?(&:empty?)
|
|
54
54
|
return _pipeline_status_new(
|
|
55
|
-
|
|
56
|
-
|
|
57
|
-
|
|
58
|
-
|
|
55
|
+
success: false,
|
|
56
|
+
errors: [],
|
|
57
|
+
warnings: [_token_not_set_message(pipeline)],
|
|
58
|
+
)
|
|
59
59
|
end
|
|
60
60
|
|
|
61
61
|
if _token_unresolved?(pipeline, token)
|
|
@@ -167,7 +167,7 @@ module Rakit
|
|
|
167
167
|
ENV["AZURE_DEVOPS_ORG"],
|
|
168
168
|
ENV["AZURE_DEVOPS_PROJECT"],
|
|
169
169
|
ENV["AZURE_DEVOPS_PIPELINE_ID"]&.to_s,
|
|
170
|
-
ENV["AZURE_DEVOPS_TOKEN"]
|
|
170
|
+
ENV["AZURE_DEVOPS_TOKEN"],
|
|
171
171
|
]
|
|
172
172
|
end
|
|
173
173
|
end
|
|
@@ -300,13 +300,13 @@ module Rakit
|
|
|
300
300
|
timeline["records"]
|
|
301
301
|
.select { |r| r["result"]&.downcase == "failed" }
|
|
302
302
|
.map do |r|
|
|
303
|
-
|
|
304
|
-
|
|
305
|
-
|
|
306
|
-
|
|
307
|
-
|
|
308
|
-
|
|
309
|
-
|
|
303
|
+
{
|
|
304
|
+
"name" => r["name"] || r["identifier"] || "Unknown",
|
|
305
|
+
"type" => r["type"],
|
|
306
|
+
"result" => r["result"],
|
|
307
|
+
"issues" => (r["issues"] || []).map { |i| { "type" => i["type"], "message" => i["message"] } },
|
|
308
|
+
}
|
|
309
|
+
end
|
|
310
310
|
end
|
|
311
311
|
|
|
312
312
|
def _format_failed_steps(failures)
|
|
@@ -315,7 +315,7 @@ module Rakit
|
|
|
315
315
|
job_or_task = failures.select { |f| %w[job task].include?((f["type"] || "").to_s.downcase) }
|
|
316
316
|
job_or_task.map do |f|
|
|
317
317
|
type_label = (f["type"] || "").to_s.capitalize
|
|
318
|
-
lines = ["#{type_label}: #{f[
|
|
318
|
+
lines = ["#{type_label}: #{f["name"]}"]
|
|
319
319
|
(f["issues"] || []).each { |iss| lines << iss["message"].to_s.strip }
|
|
320
320
|
lines.reject(&:empty?).join("\n")
|
|
321
321
|
end.reject(&:empty?).join("\n\n")
|
|
@@ -332,7 +332,7 @@ module Rakit
|
|
|
332
332
|
|
|
333
333
|
out << {
|
|
334
334
|
"name" => r["name"] || r["identifier"] || "Unknown",
|
|
335
|
-
"issues" => warn_issues.map { |i| { "type" => i["type"], "message" => i["message"] } }
|
|
335
|
+
"issues" => warn_issues.map { |i| { "type" => i["type"], "message" => i["message"] } },
|
|
336
336
|
}
|
|
337
337
|
end
|
|
338
338
|
end
|
|
@@ -341,7 +341,7 @@ module Rakit
|
|
|
341
341
|
return "" if warnings.nil? || warnings.empty?
|
|
342
342
|
|
|
343
343
|
warnings.flat_map do |w|
|
|
344
|
-
(w["issues"] || []).map { |iss| " [#{w[
|
|
344
|
+
(w["issues"] || []).map { |iss| " [#{w["name"]}] #{iss["message"]}" }
|
|
345
345
|
end.join("\n")
|
|
346
346
|
end
|
|
347
347
|
|
|
@@ -354,7 +354,7 @@ module Rakit
|
|
|
354
354
|
result: run["result"].to_s,
|
|
355
355
|
created_date: run["createdDate"].to_s,
|
|
356
356
|
finished_date: run["finishedDate"].to_s,
|
|
357
|
-
stages: stages
|
|
357
|
+
stages: stages,
|
|
358
358
|
)
|
|
359
359
|
end
|
|
360
360
|
|
|
@@ -396,14 +396,14 @@ module Rakit
|
|
|
396
396
|
Job.new(
|
|
397
397
|
name: jr["name"].to_s,
|
|
398
398
|
result: jr["result"].to_s,
|
|
399
|
-
issues: job_issues + task_issues
|
|
399
|
+
issues: job_issues + task_issues,
|
|
400
400
|
)
|
|
401
401
|
end
|
|
402
402
|
Stage.new(
|
|
403
403
|
name: sr["name"].to_s,
|
|
404
404
|
result: sr["result"].to_s,
|
|
405
405
|
jobs: jobs,
|
|
406
|
-
issues: _issues_from_record(sr)
|
|
406
|
+
issues: _issues_from_record(sr),
|
|
407
407
|
)
|
|
408
408
|
end
|
|
409
409
|
end
|
data/lib/rakit/data.rb
ADDED
|
@@ -0,0 +1,173 @@
|
|
|
1
|
+
# frozen_string_literal: true
|
|
2
|
+
|
|
3
|
+
require "fileutils"
|
|
4
|
+
|
|
5
|
+
require "generated/data_pb"
|
|
6
|
+
|
|
7
|
+
module Rakit
|
|
8
|
+
module Data
|
|
9
|
+
# Export all stored messages from DataService.data_dir to export_dir in the given format.
|
|
10
|
+
# If there are no .pb files under data_dir, no files are created.
|
|
11
|
+
#
|
|
12
|
+
# @param export_dir [String] target directory (created if needed)
|
|
13
|
+
# @param export_format [Rakit::Data::ExportFormat] PROTOBUF_BINARY_FILES (mirror .pb layout),
|
|
14
|
+
# PROTOBUF_JSON_FILES (same layout with .json), or PROTOBUF_BINARY_ZIPPED (single data.zip)
|
|
15
|
+
# @return [void]
|
|
16
|
+
# @raise [ArgumentError] if export_format is not a supported ExportFormat value
|
|
17
|
+
def self.export(export_dir, export_format)
|
|
18
|
+
base = DataService.data_dir
|
|
19
|
+
export_dir = File.expand_path(export_dir)
|
|
20
|
+
FileUtils.mkdir_p(export_dir)
|
|
21
|
+
|
|
22
|
+
pb_paths = Dir.glob(File.join(base, "**", "*.pb"))
|
|
23
|
+
return if pb_paths.empty?
|
|
24
|
+
|
|
25
|
+
case export_format
|
|
26
|
+
when ExportFormat::PROTOBUF_BINARY_FILES
|
|
27
|
+
_export_binary_files(pb_paths, base, export_dir)
|
|
28
|
+
when ExportFormat::PROTOBUF_JSON_FILES
|
|
29
|
+
_export_json_files(pb_paths, base, export_dir)
|
|
30
|
+
when ExportFormat::PROTOBUF_BINARY_ZIPPED
|
|
31
|
+
_export_binary_zipped(pb_paths, base, export_dir)
|
|
32
|
+
else
|
|
33
|
+
raise ArgumentError, "unsupported export_format: #{export_format.inspect}"
|
|
34
|
+
end
|
|
35
|
+
end
|
|
36
|
+
|
|
37
|
+
def self._rel_parts(base, path)
|
|
38
|
+
rel = path[(base.end_with?(File::SEPARATOR) ? base.length : base.length + 1)..]
|
|
39
|
+
parts = rel.split(File::SEPARATOR)
|
|
40
|
+
type_name = parts[0..-2].join("::")
|
|
41
|
+
unique_name = File.basename(parts[-1], ".pb")
|
|
42
|
+
[type_name, unique_name, rel]
|
|
43
|
+
end
|
|
44
|
+
|
|
45
|
+
def self._export_binary_files(pb_paths, base, export_dir)
|
|
46
|
+
pb_paths.each do |path|
|
|
47
|
+
type_name, _unique_name, rel = _rel_parts(base, path)
|
|
48
|
+
message = DataService.load(type_name, File.basename(path, ".pb"))
|
|
49
|
+
out_path = File.join(export_dir, rel)
|
|
50
|
+
FileUtils.mkdir_p(File.dirname(out_path))
|
|
51
|
+
File.binwrite(out_path, message.class.encode(message))
|
|
52
|
+
end
|
|
53
|
+
end
|
|
54
|
+
|
|
55
|
+
def self._export_json_files(pb_paths, base, export_dir)
|
|
56
|
+
pb_paths.each do |path|
|
|
57
|
+
type_name, unique_name, rel = _rel_parts(base, path)
|
|
58
|
+
message = DataService.load(type_name, unique_name)
|
|
59
|
+
out_rel = rel.sub(/\.pb\z/, ".json")
|
|
60
|
+
out_path = File.join(export_dir, out_rel)
|
|
61
|
+
FileUtils.mkdir_p(File.dirname(out_path))
|
|
62
|
+
File.write(out_path, message.class.encode_json(message))
|
|
63
|
+
end
|
|
64
|
+
end
|
|
65
|
+
|
|
66
|
+
def self._export_binary_zipped(pb_paths, base, export_dir)
|
|
67
|
+
require "zip"
|
|
68
|
+
zip_path = File.join(export_dir, "data.zip")
|
|
69
|
+
FileUtils.rm_f(zip_path)
|
|
70
|
+
Zip::File.open(zip_path, Zip::File::CREATE) do |zip|
|
|
71
|
+
pb_paths.each do |path|
|
|
72
|
+
type_name, unique_name, rel = _rel_parts(base, path)
|
|
73
|
+
message = DataService.load(type_name, unique_name)
|
|
74
|
+
zip.get_output_stream(rel) { |io| io.write(message.class.encode(message)) }
|
|
75
|
+
end
|
|
76
|
+
end
|
|
77
|
+
end
|
|
78
|
+
|
|
79
|
+
# Persist and retrieve protobuf message instances under a configurable data root.
|
|
80
|
+
#
|
|
81
|
+
# Storage layout: root is {data_dir} (default +~/.rakit/data+). Path for a message is
|
|
82
|
+
# +data_dir/TYPE_PATH/unique_name.pb+. TYPE_PATH is the Ruby class name with +::+ replaced by
|
|
83
|
+
# +File::SEPARATOR+ (e.g. +Rakit::Shell::Command+ → +Rakit/Shell/Command+). File content is
|
|
84
|
+
# binary protobuf; no character encoding.
|
|
85
|
+
module DataService
|
|
86
|
+
# @return [String] current data root (default: expanded +~/.rakit/data+)
|
|
87
|
+
def self.data_dir
|
|
88
|
+
@data_dir ||= File.expand_path("~/.rakit/data")
|
|
89
|
+
end
|
|
90
|
+
|
|
91
|
+
# @param path [String] set the data root for subsequent operations (e.g. tests use a temp dir)
|
|
92
|
+
# @return [void]
|
|
93
|
+
def self.data_dir=(path)
|
|
94
|
+
@data_dir = path
|
|
95
|
+
end
|
|
96
|
+
|
|
97
|
+
# Store a proto message under a unique name.
|
|
98
|
+
# @param message [Object] instance of a generated protobuf message class
|
|
99
|
+
# @param unique_name [String] non-empty, must not contain path separators or +..+
|
|
100
|
+
# @return [void]
|
|
101
|
+
# @raise [ArgumentError] if unique_name is empty/blank or contains path traversal
|
|
102
|
+
# @raise [Errno::EACCES] etc. on permission failure
|
|
103
|
+
def self.store(message, unique_name)
|
|
104
|
+
_validate_unique_name!(unique_name)
|
|
105
|
+
klass = message.class
|
|
106
|
+
path = _path(klass.name, unique_name)
|
|
107
|
+
FileUtils.mkdir_p(File.dirname(path))
|
|
108
|
+
File.binwrite(path, klass.encode(message))
|
|
109
|
+
end
|
|
110
|
+
|
|
111
|
+
# Load a stored message.
|
|
112
|
+
# @param type_name [String] Ruby class name (e.g. +"Rakit::Shell::Command"+)
|
|
113
|
+
# @param unique_name [String] same rules as for store
|
|
114
|
+
# @return [Object] decoded message instance
|
|
115
|
+
# @raise [ArgumentError] if unique_name invalid
|
|
116
|
+
# @raise [NameError] if type_name is not a valid constant
|
|
117
|
+
# @raise [Errno::ENOENT] if the file does not exist
|
|
118
|
+
def self.load(type_name, unique_name)
|
|
119
|
+
_validate_unique_name!(unique_name)
|
|
120
|
+
klass = Object.const_get(type_name.to_s)
|
|
121
|
+
path = _path(klass.name, unique_name.to_s)
|
|
122
|
+
raise Errno::ENOENT, path unless File.file?(path)
|
|
123
|
+
|
|
124
|
+
klass.decode(File.binread(path))
|
|
125
|
+
end
|
|
126
|
+
|
|
127
|
+
# Remove a stored message by type and unique name (no-op if file absent).
|
|
128
|
+
# @param type_name [String] Ruby class name
|
|
129
|
+
# @param unique_name [String] same rules as for store
|
|
130
|
+
# @return [void]
|
|
131
|
+
# @raise [ArgumentError] if unique_name invalid
|
|
132
|
+
def self.remove(type_name, unique_name)
|
|
133
|
+
_validate_unique_name!(unique_name)
|
|
134
|
+
path = _path(type_name.to_s, unique_name.to_s)
|
|
135
|
+
File.delete(path) if File.file?(path)
|
|
136
|
+
end
|
|
137
|
+
|
|
138
|
+
# Return unique names (without +.pb+) for the given type.
|
|
139
|
+
# @param type_name [String] Ruby class name for directory resolution
|
|
140
|
+
# @return [Array<String>] empty if directory missing or no .pb files
|
|
141
|
+
# @raise [NameError] if type_name is not a valid constant
|
|
142
|
+
# @raise [ArgumentError] if type_name yields empty path segments (e.g. from _dir_for_type)
|
|
143
|
+
def self.get_names(type_name)
|
|
144
|
+
dir = _dir_for_type(type_name.to_s)
|
|
145
|
+
return [] unless File.directory?(dir)
|
|
146
|
+
|
|
147
|
+
Dir.children(dir).select { |f| File.file?(File.join(dir, f)) && f.end_with?(".pb") }.map { |f| f.chomp(".pb") }
|
|
148
|
+
end
|
|
149
|
+
|
|
150
|
+
def self._validate_unique_name!(unique_name)
|
|
151
|
+
u = unique_name.to_s
|
|
152
|
+
raise ArgumentError, "unique_name must be a non-empty string" if u.strip.empty?
|
|
153
|
+
if u.include?("/") || u.include?("\\") || u.include?("..")
|
|
154
|
+
raise ArgumentError, "unique_name must not contain path separators or '..'"
|
|
155
|
+
end
|
|
156
|
+
end
|
|
157
|
+
|
|
158
|
+
def self._path(type_name, unique_name)
|
|
159
|
+
dir = _dir_for_type(type_name)
|
|
160
|
+
File.join(dir, "#{unique_name}.pb")
|
|
161
|
+
end
|
|
162
|
+
|
|
163
|
+
# PACKAGE_PATH/MESSAGE_NAME: e.g. Rakit::Shell::Command -> Rakit/Shell/Command
|
|
164
|
+
def self._dir_for_type(type_name)
|
|
165
|
+
parts = type_name.split("::")
|
|
166
|
+
raise ArgumentError, "type_name must be a qualified constant path" if parts.empty?
|
|
167
|
+
|
|
168
|
+
relative = parts.join(File::SEPARATOR)
|
|
169
|
+
File.join(data_dir, relative)
|
|
170
|
+
end
|
|
171
|
+
end
|
|
172
|
+
end
|
|
173
|
+
end
|
|
@@ -1,7 +1,29 @@
|
|
|
1
1
|
# frozen_string_literal: true
|
|
2
2
|
|
|
3
|
+
require "fileutils"
|
|
4
|
+
require "rubygems/package"
|
|
5
|
+
|
|
3
6
|
module Rakit
|
|
4
|
-
module
|
|
7
|
+
module Gem
|
|
8
|
+
# Build the gem for the given spec and move it into out_dir.
|
|
9
|
+
# Returns the path to the built .gem file.
|
|
10
|
+
def self.package(spec, out_dir)
|
|
11
|
+
FileUtils.mkdir_p(out_dir)
|
|
12
|
+
gem_file = ::Gem::Package.build(spec)
|
|
13
|
+
FileUtils.mv(gem_file, out_dir)
|
|
14
|
+
File.join(out_dir, gem_file)
|
|
15
|
+
end
|
|
16
|
+
|
|
17
|
+
# Publish the gem to rubygems.org. Loads the gemspec from gemspec_path and
|
|
18
|
+
# expects the .gem file in dirname(gemspec_path)/artifacts/. Run package first.
|
|
19
|
+
def self.publish(gemspec_path)
|
|
20
|
+
path = File.expand_path(gemspec_path)
|
|
21
|
+
spec = ::Gem::Specification.load(path)
|
|
22
|
+
out_dir = File.join(File.dirname(path), "artifacts")
|
|
23
|
+
gem_path = File.join(out_dir, "#{spec.full_name}.gem")
|
|
24
|
+
push(gem_path)
|
|
25
|
+
end
|
|
26
|
+
|
|
5
27
|
# Bump the last digit of the version in the gemspec file (e.g. "0.1.0" -> "0.1.1").
|
|
6
28
|
# Writes the file in place. Returns the new version string.
|
|
7
29
|
def self.bump(gemspec_path)
|
|
@@ -9,26 +31,16 @@ module Rakit
|
|
|
9
31
|
content.sub!(/^(\s*s\.version\s*=\s*["'])([\d.]+)(["'])/) do
|
|
10
32
|
segs = Regexp.last_match(2).split(".")
|
|
11
33
|
segs[-1] = (segs[-1].to_i + 1).to_s
|
|
12
|
-
"#{Regexp.last_match(1)}#{segs.join(
|
|
34
|
+
"#{Regexp.last_match(1)}#{segs.join(".")}#{Regexp.last_match(3)}"
|
|
13
35
|
end or raise "No s.version line found in #{gemspec_path}"
|
|
14
36
|
File.write(gemspec_path, content)
|
|
15
37
|
content[/s\.version\s*=\s*["']([^"']+)["']/, 1]
|
|
16
38
|
end
|
|
17
39
|
|
|
18
|
-
|
|
19
|
-
require "net/http"
|
|
20
|
-
require "uri"
|
|
21
|
-
uri = URI("https://rubygems.org/api/v2/rubygems/#{URI::DEFAULT_PARSER.escape(name)}/versions/#{URI::DEFAULT_PARSER.escape(version)}.json")
|
|
22
|
-
response = Net::HTTP.get_response(uri)
|
|
23
|
-
response.is_a?(Net::HTTPSuccess)
|
|
24
|
-
rescue StandardError
|
|
25
|
-
false
|
|
26
|
-
end
|
|
27
|
-
|
|
28
|
-
# Publish the .gem at gem_path to rubygems.org. If that version is already
|
|
40
|
+
# Push the .gem at gem_path to rubygems.org. If that version is already
|
|
29
41
|
# published, warns and returns without pushing. Raises if the file is missing
|
|
30
42
|
# or if gem push fails.
|
|
31
|
-
def self.
|
|
43
|
+
def self.push(gem_path)
|
|
32
44
|
raise "Gem not found: #{gem_path}. Run rake package first." unless File.file?(gem_path)
|
|
33
45
|
|
|
34
46
|
base = File.basename(gem_path, ".gem")
|
|
@@ -44,5 +56,15 @@ module Rakit
|
|
|
44
56
|
success = system("gem", "push", gem_path)
|
|
45
57
|
raise "gem push failed" unless success
|
|
46
58
|
end
|
|
59
|
+
|
|
60
|
+
def self.version_published?(name, version)
|
|
61
|
+
require "net/http"
|
|
62
|
+
require "uri"
|
|
63
|
+
uri = URI("https://rubygems.org/api/v2/rubygems/#{URI::DEFAULT_PARSER.escape(name)}/versions/#{URI::DEFAULT_PARSER.escape(version)}.json")
|
|
64
|
+
response = Net::HTTP.get_response(uri)
|
|
65
|
+
response.is_a?(Net::HTTPSuccess)
|
|
66
|
+
rescue StandardError
|
|
67
|
+
false
|
|
68
|
+
end
|
|
47
69
|
end
|
|
48
70
|
end
|
data/lib/rakit/git.rb
ADDED
|
@@ -0,0 +1,41 @@
|
|
|
1
|
+
# frozen_string_literal: true
|
|
2
|
+
|
|
3
|
+
module Rakit
|
|
4
|
+
module Git
|
|
5
|
+
# Sync the current directory with the remote (git pull, then git push).
|
|
6
|
+
# Runs from Dir.pwd. Raises if not a git repo or if pull/push fails.
|
|
7
|
+
def self.sync(dir = nil)
|
|
8
|
+
require_relative "shell"
|
|
9
|
+
target = dir ? File.expand_path(dir) : Dir.pwd
|
|
10
|
+
raise "Not a git repository: #{target}" unless File.directory?(File.join(target, ".git"))
|
|
11
|
+
|
|
12
|
+
Dir.chdir(target) do
|
|
13
|
+
result = Rakit::Shell.run("git pull")
|
|
14
|
+
raise "git pull failed" unless result.exit_status == 0
|
|
15
|
+
result = Rakit::Shell.run("git push")
|
|
16
|
+
raise "git push failed" unless result.exit_status == 0
|
|
17
|
+
end
|
|
18
|
+
end
|
|
19
|
+
|
|
20
|
+
# Stage all changes and commit. commit_message defaults to "Integrate" when nil.
|
|
21
|
+
# If there is nothing to commit after staging, skips commit (no error).
|
|
22
|
+
# Raises if not a git repo or if add fails.
|
|
23
|
+
def self.integrate(commit_message = nil, dir = nil)
|
|
24
|
+
require_relative "shell"
|
|
25
|
+
target = dir ? File.expand_path(dir) : Dir.pwd
|
|
26
|
+
raise "Not a git repository: #{target}" unless File.directory?(File.join(target, ".git"))
|
|
27
|
+
|
|
28
|
+
message = commit_message || "Integrate"
|
|
29
|
+
Dir.chdir(target) do
|
|
30
|
+
result = Rakit::Shell.run("git add -A")
|
|
31
|
+
raise "git add failed" unless result.exit_status == 0
|
|
32
|
+
# Only commit if there are staged changes (--cached --quiet exits 1 when there are)
|
|
33
|
+
check = Rakit::Shell::CommandService.execute(Rakit::Shell::Command.new(name: "git", args: ["diff", "--cached", "--quiet"], working_directory: ""))
|
|
34
|
+
unless check.exit_status == 0
|
|
35
|
+
result = Rakit::Shell.run("git commit -m #{message.inspect}")
|
|
36
|
+
raise "git commit failed" unless result.exit_status == 0
|
|
37
|
+
end
|
|
38
|
+
end
|
|
39
|
+
end
|
|
40
|
+
end
|
|
41
|
+
end
|
|
@@ -0,0 +1,38 @@
|
|
|
1
|
+
# frozen_string_literal: true
|
|
2
|
+
|
|
3
|
+
require "fileutils"
|
|
4
|
+
|
|
5
|
+
module Rakit
|
|
6
|
+
module Protobuf
|
|
7
|
+
# Generate Ruby from .proto files.
|
|
8
|
+
# proto_dir: directory containing .proto files (and -I root)
|
|
9
|
+
# ruby_out: directory for generated *_pb.rb files
|
|
10
|
+
# protoc: executable name/path (default: "protoc")
|
|
11
|
+
# Returns ruby_out if generation ran, nil if no proto files found.
|
|
12
|
+
def self.generate(proto_dir:, ruby_out:, protoc: "protoc")
|
|
13
|
+
# output, in grey, " generating code from .proto files..."
|
|
14
|
+
puts "\e[30m generating code from .proto files...\e[0m"
|
|
15
|
+
expanded_proto_dir = File.expand_path(proto_dir)
|
|
16
|
+
proto_files = Dir[File.join(expanded_proto_dir, "**", "*.proto")]
|
|
17
|
+
return nil if proto_files.empty?
|
|
18
|
+
|
|
19
|
+
FileUtils.mkdir_p(ruby_out)
|
|
20
|
+
args = ["-I", expanded_proto_dir, "--ruby_out=#{ruby_out}", *proto_files]
|
|
21
|
+
system(protoc, *args) or raise "protoc failed"
|
|
22
|
+
|
|
23
|
+
# output a green checkmark and the command that was run
|
|
24
|
+
puts "\e[32m✓\e[0m #{protoc} #{args.join(" ")}"
|
|
25
|
+
# output that the files were generated
|
|
26
|
+
#puts " Generated #{proto_files.size} files in #{ruby_out}"
|
|
27
|
+
# output the files that were generated (all files in the ruby_out directory), once per line
|
|
28
|
+
ruby_out_files = Dir[File.join(ruby_out, "**", "*_pb.rb")]
|
|
29
|
+
ruby_out_files.each do |file|
|
|
30
|
+
# output, in grey, " #{File.basename(file)}"
|
|
31
|
+
puts "\e[30m #{File.basename(file)}\e[0m"
|
|
32
|
+
end
|
|
33
|
+
# output the number of files that were generated
|
|
34
|
+
#puts " Generated #{ruby_out_files.size} files in #{ruby_out}"
|
|
35
|
+
ruby_out
|
|
36
|
+
end
|
|
37
|
+
end
|
|
38
|
+
end
|
data/lib/rakit/shell.rb
ADDED
|
@@ -0,0 +1,204 @@
|
|
|
1
|
+
# frozen_string_literal: true
|
|
2
|
+
|
|
3
|
+
require "open3"
|
|
4
|
+
require "timeout"
|
|
5
|
+
require "generated/shell_pb"
|
|
6
|
+
|
|
7
|
+
module Rakit
|
|
8
|
+
module Shell
|
|
9
|
+
CHECK = "\e[32m✓\e[0m"
|
|
10
|
+
CROSS = "\e[31m✗\e[0m"
|
|
11
|
+
|
|
12
|
+
# CommandService: execute runs a Command; test runs it; format returns display string.
|
|
13
|
+
module CommandService
|
|
14
|
+
# In-memory cache of executed Commands (result with exit_status, stdout, stderr). Keyed by request identity.
|
|
15
|
+
@_executed_cache = {}
|
|
16
|
+
|
|
17
|
+
class << self
|
|
18
|
+
attr_reader :_executed_cache
|
|
19
|
+
end
|
|
20
|
+
|
|
21
|
+
# Returns the current cache of executed Commands (key => Command with result). Read-only view; use clear_cache to reset.
|
|
22
|
+
def self.cache
|
|
23
|
+
_executed_cache.dup
|
|
24
|
+
end
|
|
25
|
+
|
|
26
|
+
# Clears the in-memory cache of executed commands.
|
|
27
|
+
def self.clear_cache
|
|
28
|
+
_executed_cache.clear
|
|
29
|
+
end
|
|
30
|
+
|
|
31
|
+
def self._cache_key(command)
|
|
32
|
+
name = (command.respond_to?(:name) ? command.name : "").to_s
|
|
33
|
+
args = (command.respond_to?(:args) ? command.args.to_a : []).map(&:to_s)
|
|
34
|
+
cwd = (command.respond_to?(:working_directory) ? command.working_directory : "").to_s
|
|
35
|
+
timeout_sec = command.respond_to?(:timeout_seconds) ? command.timeout_seconds.to_i : 10
|
|
36
|
+
timeout_sec = 10 if timeout_sec <= 0
|
|
37
|
+
[name, *args, cwd, timeout_sec].join("\0")
|
|
38
|
+
end
|
|
39
|
+
|
|
40
|
+
# Format RPC: returns FormatResponse with the command formatted for display.
|
|
41
|
+
def self.format(format_request)
|
|
42
|
+
command = format_request.respond_to?(:command) ? format_request.command : nil
|
|
43
|
+
fmt = format_request.respond_to?(:format) ? format_request.format : CommandFormat::ONE_LINE
|
|
44
|
+
output = _format_command_to_s(command, fmt)
|
|
45
|
+
FormatResponse.new(output: output)
|
|
46
|
+
end
|
|
47
|
+
|
|
48
|
+
# Display the command in the given format (default ONE_LINE). Prints to stdout.
|
|
49
|
+
# Builds a FormatRequest and uses the Format RPC.
|
|
50
|
+
def self.show(command, format_enum = nil)
|
|
51
|
+
fmt = format_enum || CommandFormat::ONE_LINE
|
|
52
|
+
req = FormatRequest.new(command: command, format: fmt)
|
|
53
|
+
puts CommandService.format(req).output
|
|
54
|
+
end
|
|
55
|
+
|
|
56
|
+
def self._format_command_to_s(command, fmt)
|
|
57
|
+
return "" unless command
|
|
58
|
+
name = (command.respond_to?(:name) ? command.name : "").to_s
|
|
59
|
+
args = command.respond_to?(:args) ? command.args.to_a : []
|
|
60
|
+
cwd = command.respond_to?(:working_directory) ? command.working_directory.to_s : ""
|
|
61
|
+
timeout_sec = command.respond_to?(:timeout_seconds) ? command.timeout_seconds.to_i : 0
|
|
62
|
+
|
|
63
|
+
argv = [name, *args].reject { |a| a.nil? || a.to_s.empty? }
|
|
64
|
+
cmd_line = argv.map { |a| a.include?(" ") ? a.inspect : a }.join(" ")
|
|
65
|
+
|
|
66
|
+
case fmt
|
|
67
|
+
when CommandFormat::MULTI_LINE
|
|
68
|
+
lines = ["name: #{name}", "args: #{args.inspect}"]
|
|
69
|
+
lines << "working_directory: #{cwd}" if cwd && !cwd.empty?
|
|
70
|
+
lines << "timeout_seconds: #{timeout_sec}" if timeout_sec.positive?
|
|
71
|
+
lines.join("\n")
|
|
72
|
+
when CommandFormat::COMPACT
|
|
73
|
+
exit_status = command.respond_to?(:exit_status) ? command.exit_status.to_i : 0
|
|
74
|
+
out_str = command.respond_to?(:stdout) ? command.stdout.to_s : ""
|
|
75
|
+
err_str = command.respond_to?(:stderr) ? command.stderr.to_s : ""
|
|
76
|
+
if exit_status != 0
|
|
77
|
+
lines = ["#{CROSS} #{cmd_line}"]
|
|
78
|
+
lines << "stdout:\n#{out_str}" if out_str && !out_str.empty?
|
|
79
|
+
lines << "stderr:\n#{err_str}" if err_str && !err_str.empty?
|
|
80
|
+
lines.join("\n")
|
|
81
|
+
else
|
|
82
|
+
"#{CHECK} #{cmd_line}"
|
|
83
|
+
end
|
|
84
|
+
else
|
|
85
|
+
cmd_line
|
|
86
|
+
end
|
|
87
|
+
end
|
|
88
|
+
|
|
89
|
+
# Run the command (name + args) with optional cwd and timeout. Returns Command with exit_status, stdout, stderr set.
|
|
90
|
+
# When use_cache: true (default), returns a cached result if the same command was already executed.
|
|
91
|
+
def self.execute(command, use_cache: true)
|
|
92
|
+
key = _cache_key(command)
|
|
93
|
+
if use_cache && _executed_cache.key?(key)
|
|
94
|
+
return _executed_cache[key]
|
|
95
|
+
end
|
|
96
|
+
|
|
97
|
+
name = (command.respond_to?(:name) ? command.name : "").to_s
|
|
98
|
+
args = command.respond_to?(:args) ? command.args.to_a : []
|
|
99
|
+
argv = [name, *args].reject { |a| a.nil? || a.to_s.empty? }
|
|
100
|
+
raise ArgumentError, "Command name is required" if argv.empty?
|
|
101
|
+
|
|
102
|
+
cwd = command.respond_to?(:working_directory) ? command.working_directory.to_s : ""
|
|
103
|
+
cwd = nil if cwd.nil? || cwd.empty?
|
|
104
|
+
timeout_sec = command.respond_to?(:timeout_seconds) ? command.timeout_seconds.to_i : 10
|
|
105
|
+
timeout_sec = 10 if timeout_sec <= 0
|
|
106
|
+
|
|
107
|
+
opts = {}
|
|
108
|
+
opts[:chdir] = cwd if cwd
|
|
109
|
+
|
|
110
|
+
stdout = ""
|
|
111
|
+
stderr = ""
|
|
112
|
+
exit_status = -1
|
|
113
|
+
|
|
114
|
+
run_block = lambda do
|
|
115
|
+
Open3.popen3(*argv, **opts) do |_stdin, out, err, wait_thr|
|
|
116
|
+
stdout = out.read
|
|
117
|
+
stderr = err.read
|
|
118
|
+
exit_status = wait_thr.value&.exitstatus || -1
|
|
119
|
+
end
|
|
120
|
+
end
|
|
121
|
+
|
|
122
|
+
Timeout.timeout(timeout_sec) { run_block.call }
|
|
123
|
+
|
|
124
|
+
result = _command_with_result(command, exit_status, stdout, stderr)
|
|
125
|
+
_executed_cache[key] = result
|
|
126
|
+
result
|
|
127
|
+
rescue Timeout::Error
|
|
128
|
+
result = _command_with_result(command, -1, stdout, stderr + " (timeout after #{timeout_sec}s)")
|
|
129
|
+
_executed_cache[key] = result
|
|
130
|
+
result
|
|
131
|
+
end
|
|
132
|
+
|
|
133
|
+
def self._command_with_result(command, exit_status, stdout, stderr)
|
|
134
|
+
criteria = command.respond_to?(:acceptance_criteria) ? command.acceptance_criteria.to_a : []
|
|
135
|
+
Command.new(
|
|
136
|
+
name: command.name,
|
|
137
|
+
args: command.args.to_a,
|
|
138
|
+
working_directory: command.respond_to?(:working_directory) ? command.working_directory.to_s : "",
|
|
139
|
+
timeout_seconds: command.respond_to?(:timeout_seconds) ? command.timeout_seconds.to_i : 0,
|
|
140
|
+
expected_exit_code: command.respond_to?(:expected_exit_code) ? command.expected_exit_code : 0,
|
|
141
|
+
expected_stdout: command.respond_to?(:expected_stdout) ? command.expected_stdout.to_s : "",
|
|
142
|
+
expected_stderr: command.respond_to?(:expected_stderr) ? command.expected_stderr.to_s : "",
|
|
143
|
+
acceptance_criteria: criteria,
|
|
144
|
+
exit_status: exit_status,
|
|
145
|
+
stdout: stdout,
|
|
146
|
+
stderr: stderr,
|
|
147
|
+
)
|
|
148
|
+
end
|
|
149
|
+
|
|
150
|
+
# Run the command and check expected_exit_code, expected_stdout, expected_stderr, and acceptance_criteria. Returns TestResult.
|
|
151
|
+
def self.test(command)
|
|
152
|
+
result = execute(command)
|
|
153
|
+
errors = []
|
|
154
|
+
|
|
155
|
+
expected_exit = command.respond_to?(:expected_exit_code) ? command.expected_exit_code : 0
|
|
156
|
+
errors << "exit_code: expected #{expected_exit}, got #{result.exit_status}" if result.exit_status != expected_exit
|
|
157
|
+
|
|
158
|
+
if command.respond_to?(:expected_stdout) && !command.expected_stdout.to_s.empty? && result.stdout != command.expected_stdout
|
|
159
|
+
errors << "stdout: expected #{command.expected_stdout.inspect}, got #{result.stdout.inspect}"
|
|
160
|
+
end
|
|
161
|
+
|
|
162
|
+
if command.respond_to?(:expected_stderr) && !command.expected_stderr.to_s.empty? && result.stderr != command.expected_stderr
|
|
163
|
+
errors << "stderr: expected #{command.expected_stderr.inspect}, got #{result.stderr.inspect}"
|
|
164
|
+
end
|
|
165
|
+
|
|
166
|
+
if command.respond_to?(:acceptance_criteria)
|
|
167
|
+
command.acceptance_criteria.each do |crit|
|
|
168
|
+
kind = (crit.respond_to?(:kind) ? crit.kind : "").to_s
|
|
169
|
+
value = (crit.respond_to?(:value) ? crit.value : "").to_s
|
|
170
|
+
case kind
|
|
171
|
+
when "exit_code"
|
|
172
|
+
errors << "acceptance exit_code: expected #{value}, got #{result.exit_status}" unless result.exit_status.to_s == value
|
|
173
|
+
when "stdout_contains"
|
|
174
|
+
errors << "acceptance stdout_contains #{value.inspect}" unless result.stdout.to_s.include?(value)
|
|
175
|
+
when "stderr_contains"
|
|
176
|
+
errors << "acceptance stderr_contains #{value.inspect}" unless result.stderr.to_s.include?(value)
|
|
177
|
+
when "stdout_matches"
|
|
178
|
+
errors << "acceptance stdout_matches #{value.inspect}" unless result.stdout.to_s.match?(Regexp.new(value))
|
|
179
|
+
when "stderr_matches"
|
|
180
|
+
errors << "acceptance stderr_matches #{value.inspect}" unless result.stderr.to_s.match?(Regexp.new(value))
|
|
181
|
+
end
|
|
182
|
+
end
|
|
183
|
+
end
|
|
184
|
+
|
|
185
|
+
TestResult.new(success: errors.empty?, errors: errors)
|
|
186
|
+
end
|
|
187
|
+
end
|
|
188
|
+
|
|
189
|
+
# Run a shell command string (like sh). Builds a Command (sh -c "cmd") and executes it.
|
|
190
|
+
# Prints ✓ (green) + cmd on success, or ✗ (red) + cmd then stdout/stderr on failure. Returns the Command with result fields set.
|
|
191
|
+
def self.run(cmd)
|
|
192
|
+
command = Command.new(name: "sh", args: ["-c", cmd.to_s], working_directory: "")
|
|
193
|
+
result = CommandService.execute(command)
|
|
194
|
+
if result.exit_status == 0
|
|
195
|
+
puts "#{CHECK} #{cmd}"
|
|
196
|
+
else
|
|
197
|
+
puts "#{CROSS} #{cmd}"
|
|
198
|
+
puts "stdout:\n#{result.stdout}" if result.stdout && !result.stdout.empty?
|
|
199
|
+
puts "stderr:\n#{result.stderr}" if result.stderr && !result.stderr.empty?
|
|
200
|
+
end
|
|
201
|
+
result
|
|
202
|
+
end
|
|
203
|
+
end
|
|
204
|
+
end
|
data/lib/rakit/task.rb
ADDED
data/lib/rakit.rb
CHANGED
|
@@ -18,5 +18,15 @@ module Rakit
|
|
|
18
18
|
end
|
|
19
19
|
end
|
|
20
20
|
|
|
21
|
-
require_relative "rakit/
|
|
21
|
+
require_relative "rakit/gem"
|
|
22
|
+
require_relative "rakit/git"
|
|
23
|
+
require_relative "rakit/task"
|
|
24
|
+
require_relative "rakit/protobuf"
|
|
25
|
+
# Defer loading so rake tasks that don't need Shell/Data (e.g. clobber) work without google-protobuf.
|
|
26
|
+
autoload :Shell, "rakit/shell"
|
|
27
|
+
autoload :Data, "rakit/data"
|
|
22
28
|
require_relative "rakit/azure/dev_ops"
|
|
29
|
+
|
|
30
|
+
def run(cmd)
|
|
31
|
+
Rakit::Shell.run(cmd)
|
|
32
|
+
end
|
metadata
CHANGED
|
@@ -1,7 +1,7 @@
|
|
|
1
1
|
--- !ruby/object:Gem::Specification
|
|
2
2
|
name: rakit
|
|
3
3
|
version: !ruby/object:Gem::Version
|
|
4
|
-
version: 0.1.
|
|
4
|
+
version: 0.1.4
|
|
5
5
|
platform: ruby
|
|
6
6
|
authors:
|
|
7
7
|
- rakit
|
|
@@ -13,16 +13,16 @@ dependencies:
|
|
|
13
13
|
name: json
|
|
14
14
|
requirement: !ruby/object:Gem::Requirement
|
|
15
15
|
requirements:
|
|
16
|
-
- - "
|
|
16
|
+
- - "~>"
|
|
17
17
|
- !ruby/object:Gem::Version
|
|
18
|
-
version: '0'
|
|
18
|
+
version: '2.0'
|
|
19
19
|
type: :runtime
|
|
20
20
|
prerelease: false
|
|
21
21
|
version_requirements: !ruby/object:Gem::Requirement
|
|
22
22
|
requirements:
|
|
23
|
-
- - "
|
|
23
|
+
- - "~>"
|
|
24
24
|
- !ruby/object:Gem::Version
|
|
25
|
-
version: '0'
|
|
25
|
+
version: '2.0'
|
|
26
26
|
- !ruby/object:Gem::Dependency
|
|
27
27
|
name: rake
|
|
28
28
|
requirement: !ruby/object:Gem::Requirement
|
|
@@ -51,6 +51,20 @@ dependencies:
|
|
|
51
51
|
- - "~>"
|
|
52
52
|
- !ruby/object:Gem::Version
|
|
53
53
|
version: '3.25'
|
|
54
|
+
- !ruby/object:Gem::Dependency
|
|
55
|
+
name: rubyzip
|
|
56
|
+
requirement: !ruby/object:Gem::Requirement
|
|
57
|
+
requirements:
|
|
58
|
+
- - "~>"
|
|
59
|
+
- !ruby/object:Gem::Version
|
|
60
|
+
version: '2.0'
|
|
61
|
+
type: :runtime
|
|
62
|
+
prerelease: false
|
|
63
|
+
version_requirements: !ruby/object:Gem::Requirement
|
|
64
|
+
requirements:
|
|
65
|
+
- - "~>"
|
|
66
|
+
- !ruby/object:Gem::Version
|
|
67
|
+
version: '2.0'
|
|
54
68
|
- !ruby/object:Gem::Dependency
|
|
55
69
|
name: grpc-tools
|
|
56
70
|
requirement: !ruby/object:Gem::Requirement
|
|
@@ -65,15 +79,37 @@ dependencies:
|
|
|
65
79
|
- - "~>"
|
|
66
80
|
- !ruby/object:Gem::Version
|
|
67
81
|
version: '1.72'
|
|
82
|
+
- !ruby/object:Gem::Dependency
|
|
83
|
+
name: minitest
|
|
84
|
+
requirement: !ruby/object:Gem::Requirement
|
|
85
|
+
requirements:
|
|
86
|
+
- - "~>"
|
|
87
|
+
- !ruby/object:Gem::Version
|
|
88
|
+
version: '5.0'
|
|
89
|
+
type: :development
|
|
90
|
+
prerelease: false
|
|
91
|
+
version_requirements: !ruby/object:Gem::Requirement
|
|
92
|
+
requirements:
|
|
93
|
+
- - "~>"
|
|
94
|
+
- !ruby/object:Gem::Version
|
|
95
|
+
version: '5.0'
|
|
68
96
|
executables: []
|
|
69
97
|
extensions: []
|
|
70
98
|
extra_rdoc_files: []
|
|
71
99
|
files:
|
|
72
100
|
- lib/generated/azure.devops_pb.rb
|
|
101
|
+
- lib/generated/data_pb.rb
|
|
73
102
|
- lib/generated/example_pb.rb
|
|
103
|
+
- lib/generated/shell_pb.rb
|
|
74
104
|
- lib/rakit.rb
|
|
75
105
|
- lib/rakit/azure/dev_ops.rb
|
|
76
|
-
- lib/rakit/
|
|
106
|
+
- lib/rakit/data.rb
|
|
107
|
+
- lib/rakit/gem.rb
|
|
108
|
+
- lib/rakit/git.rb
|
|
109
|
+
- lib/rakit/protobuf.rb
|
|
110
|
+
- lib/rakit/shell.rb
|
|
111
|
+
- lib/rakit/task.rb
|
|
112
|
+
homepage: https://gitlab.com/gems/rakit
|
|
77
113
|
licenses:
|
|
78
114
|
- MIT
|
|
79
115
|
metadata: {}
|
|
@@ -84,7 +120,7 @@ required_ruby_version: !ruby/object:Gem::Requirement
|
|
|
84
120
|
requirements:
|
|
85
121
|
- - ">="
|
|
86
122
|
- !ruby/object:Gem::Version
|
|
87
|
-
version: '0'
|
|
123
|
+
version: '3.0'
|
|
88
124
|
required_rubygems_version: !ruby/object:Gem::Requirement
|
|
89
125
|
requirements:
|
|
90
126
|
- - ">="
|