makit 0.0.144 → 0.0.145
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- checksums.yaml +4 -4
- data/README.md +41 -41
- data/exe/makit +5 -5
- data/lib/makit/apache.rb +28 -28
- data/lib/makit/auto.rb +48 -48
- data/lib/makit/cli/base.rb +17 -0
- data/lib/makit/cli/build_commands.rb +500 -500
- data/lib/makit/cli/generators/base_generator.rb +74 -74
- data/lib/makit/cli/generators/dotnet_generator.rb +50 -50
- data/lib/makit/cli/generators/generator_factory.rb +49 -49
- data/lib/makit/cli/generators/node_generator.rb +50 -50
- data/lib/makit/cli/generators/ruby_generator.rb +77 -77
- data/lib/makit/cli/generators/rust_generator.rb +50 -50
- data/lib/makit/cli/generators/templates/dotnet_templates.rb +167 -167
- data/lib/makit/cli/generators/templates/node_templates.rb +161 -161
- data/lib/makit/cli/generators/templates/ruby/gemfile.rb +26 -26
- data/lib/makit/cli/generators/templates/ruby/gemspec.rb +41 -40
- data/lib/makit/cli/generators/templates/ruby/main_lib.rb +33 -33
- data/lib/makit/cli/generators/templates/ruby/rakefile.rb +35 -35
- data/lib/makit/cli/generators/templates/ruby/readme.rb +63 -63
- data/lib/makit/cli/generators/templates/ruby/test.rb +39 -39
- data/lib/makit/cli/generators/templates/ruby/test_helper.rb +29 -29
- data/lib/makit/cli/generators/templates/ruby/version.rb +29 -29
- data/lib/makit/cli/generators/templates/rust_templates.rb +128 -128
- data/lib/makit/cli/main.rb +78 -69
- data/lib/makit/cli/pipeline_commands.rb +311 -0
- data/lib/makit/cli/project_commands.rb +868 -868
- data/lib/makit/cli/repository_commands.rb +661 -661
- data/lib/makit/cli/strategy_commands.rb +207 -212
- data/lib/makit/cli/utility_commands.rb +521 -521
- data/lib/makit/commands/factory.rb +359 -359
- data/lib/makit/commands/middleware/base.rb +73 -73
- data/lib/makit/commands/middleware/cache.rb +248 -248
- data/lib/makit/commands/middleware/command_logger.rb +312 -312
- data/lib/makit/commands/middleware/validator.rb +269 -269
- data/lib/makit/commands/request.rb +316 -316
- data/lib/makit/commands/result.rb +323 -323
- data/lib/makit/commands/runner.rb +386 -386
- data/lib/makit/commands/strategies/base.rb +171 -171
- data/lib/makit/commands/strategies/child_process.rb +162 -162
- data/lib/makit/commands/strategies/factory.rb +136 -136
- data/lib/makit/commands/strategies/synchronous.rb +139 -139
- data/lib/makit/commands.rb +50 -50
- data/lib/makit/configuration/dotnet_project.rb +48 -48
- data/lib/makit/configuration/gitlab_helper.rb +61 -58
- data/lib/makit/configuration/project.rb +446 -168
- data/lib/makit/configuration/rakefile_helper.rb +43 -43
- data/lib/makit/configuration/step.rb +34 -34
- data/lib/makit/configuration/timeout.rb +74 -74
- data/lib/makit/configuration.rb +21 -16
- data/lib/makit/content/default_gitignore.rb +7 -7
- data/lib/makit/content/default_gitignore.txt +225 -225
- data/lib/makit/content/default_rakefile.rb +13 -13
- data/lib/makit/content/gem_rakefile.rb +16 -16
- data/lib/makit/context.rb +1 -1
- data/lib/makit/data.rb +49 -49
- data/lib/makit/directories.rb +140 -140
- data/lib/makit/directory.rb +262 -262
- data/lib/makit/docs/files.rb +89 -89
- data/lib/makit/docs/rake.rb +102 -102
- data/lib/makit/dotnet/cli.rb +69 -69
- data/lib/makit/dotnet/project.rb +217 -217
- data/lib/makit/dotnet/solution.rb +38 -38
- data/lib/makit/dotnet/solution_classlib.rb +239 -239
- data/lib/makit/dotnet/solution_console.rb +264 -264
- data/lib/makit/dotnet/solution_maui.rb +354 -354
- data/lib/makit/dotnet/solution_wasm.rb +275 -275
- data/lib/makit/dotnet/solution_wpf.rb +304 -304
- data/lib/makit/dotnet.rb +102 -102
- data/lib/makit/email.rb +90 -90
- data/lib/makit/environment.rb +142 -142
- data/lib/makit/examples/runner.rb +370 -370
- data/lib/makit/exceptions.rb +45 -45
- data/lib/makit/fileinfo.rb +32 -24
- data/lib/makit/files.rb +43 -43
- data/lib/makit/gems.rb +40 -40
- data/lib/makit/git/cli.rb +54 -54
- data/lib/makit/git/repository.rb +266 -90
- data/lib/makit/git.rb +104 -98
- data/lib/makit/gitlab/pipeline.rb +857 -0
- data/lib/makit/gitlab/pipeline_service_impl.rb +1536 -0
- data/lib/makit/gitlab_runner.rb +59 -59
- data/lib/makit/humanize.rb +218 -137
- data/lib/makit/indexer.rb +47 -47
- data/lib/makit/io/filesystem.rb +111 -0
- data/lib/makit/io/filesystem_service_impl.rb +337 -0
- data/lib/makit/logging/configuration.rb +308 -308
- data/lib/makit/logging/format_registry.rb +84 -84
- data/lib/makit/logging/formatters/base.rb +39 -39
- data/lib/makit/logging/formatters/console_formatter.rb +140 -140
- data/lib/makit/logging/formatters/json_formatter.rb +65 -65
- data/lib/makit/logging/formatters/plain_text_formatter.rb +71 -71
- data/lib/makit/logging/formatters/text_formatter.rb +64 -64
- data/lib/makit/logging/log_request.rb +119 -119
- data/lib/makit/logging/logger.rb +199 -199
- data/lib/makit/logging/sinks/base.rb +91 -91
- data/lib/makit/logging/sinks/console.rb +72 -72
- data/lib/makit/logging/sinks/file_sink.rb +92 -92
- data/lib/makit/logging/sinks/structured.rb +123 -123
- data/lib/makit/logging/sinks/unified_file_sink.rb +296 -296
- data/lib/makit/logging.rb +565 -565
- data/lib/makit/markdown.rb +75 -75
- data/lib/makit/mp/basic_object_mp.rb +17 -17
- data/lib/makit/mp/command_mp.rb +13 -13
- data/lib/makit/mp/command_request.mp.rb +17 -17
- data/lib/makit/mp/project_mp.rb +199 -199
- data/lib/makit/mp/string_mp.rb +205 -199
- data/lib/makit/nuget.rb +74 -74
- data/lib/makit/podman/podman.rb +458 -0
- data/lib/makit/podman/podman_service_impl.rb +1081 -0
- data/lib/makit/port.rb +32 -32
- data/lib/makit/process.rb +377 -377
- data/lib/makit/protoc.rb +112 -107
- data/lib/makit/rake/cli.rb +196 -196
- data/lib/makit/rake/trace_controller.rb +174 -174
- data/lib/makit/rake.rb +81 -81
- data/lib/makit/ruby/cli.rb +185 -185
- data/lib/makit/ruby.rb +25 -25
- data/lib/makit/secrets.rb +51 -51
- data/lib/makit/serializer.rb +130 -130
- data/lib/makit/services/builder.rb +186 -186
- data/lib/makit/services/error_handler.rb +226 -226
- data/lib/makit/services/repository_manager.rb +367 -231
- data/lib/makit/services/validator.rb +112 -112
- data/lib/makit/setup/classlib.rb +101 -101
- data/lib/makit/setup/gem.rb +268 -268
- data/lib/makit/setup/pages.rb +11 -11
- data/lib/makit/setup/razorclasslib.rb +101 -101
- data/lib/makit/setup/runner.rb +54 -54
- data/lib/makit/setup.rb +5 -5
- data/lib/makit/show.rb +110 -110
- data/lib/makit/storage.rb +126 -126
- data/lib/makit/symbols.rb +175 -170
- data/lib/makit/task_info.rb +130 -130
- data/lib/makit/tasks/at_exit.rb +15 -15
- data/lib/makit/tasks/build.rb +22 -22
- data/lib/makit/tasks/clean.rb +13 -13
- data/lib/makit/tasks/configure.rb +10 -10
- data/lib/makit/tasks/format.rb +10 -10
- data/lib/makit/tasks/hook_manager.rb +443 -443
- data/lib/makit/tasks/init.rb +49 -49
- data/lib/makit/tasks/integrate.rb +29 -29
- data/lib/makit/tasks/pull_incoming.rb +13 -13
- data/lib/makit/tasks/setup.rb +16 -16
- data/lib/makit/tasks/sync.rb +17 -17
- data/lib/makit/tasks/tag.rb +16 -16
- data/lib/makit/tasks/task_monkey_patch.rb +81 -81
- data/lib/makit/tasks/test.rb +22 -22
- data/lib/makit/tasks/update.rb +18 -18
- data/lib/makit/tasks.rb +20 -20
- data/lib/makit/test_cache.rb +239 -239
- data/lib/makit/tree.rb +37 -37
- data/lib/makit/v1/configuration/project_service_impl.rb +371 -0
- data/lib/makit/v1/git/git_repository_service_impl.rb +295 -0
- data/lib/makit/v1/makit.v1_pb.rb +35 -35
- data/lib/makit/v1/makit.v1_services_pb.rb +27 -27
- data/lib/makit/v1/services/repository_manager_service_impl.rb +572 -0
- data/lib/makit/version.rb +100 -100
- data/lib/makit/version_util.rb +21 -21
- data/lib/makit/wix.rb +95 -95
- data/lib/makit/yaml.rb +29 -29
- data/lib/makit/zip.rb +17 -17
- data/lib/makit copy.rb +44 -44
- data/lib/makit.rb +111 -43
- metadata +61 -36
|
@@ -0,0 +1,1536 @@
|
|
|
1
|
+
# frozen_string_literal: true
|
|
2
|
+
|
|
3
|
+
require "yaml"
|
|
4
|
+
require "json"
|
|
5
|
+
require "time"
|
|
6
|
+
require "securerandom"
|
|
7
|
+
|
|
8
|
+
module Makit
|
|
9
|
+
module Gitlab
|
|
10
|
+
# Implementation of the PipelineService gRPC service
|
|
11
|
+
# Provides methods for parsing, validating, and converting GitLab CI pipelines
|
|
12
|
+
class PipelineServiceImpl
|
|
13
|
+
# Check if gRPC service is available
|
|
14
|
+
def self.grpc_available?
|
|
15
|
+
defined?(Gitlab::Pipeline::PipelineService)
|
|
16
|
+
end
|
|
17
|
+
|
|
18
|
+
# Parse GitLab CI YAML content into Pipeline message
|
|
19
|
+
def parse_yaml(request, _unused_call = nil)
|
|
20
|
+
begin
|
|
21
|
+
# Parse YAML content
|
|
22
|
+
yaml_data = YAML.safe_load(request.yaml_content, permitted_classes: [Symbol], aliases: true)
|
|
23
|
+
|
|
24
|
+
# Convert to protobuf Pipeline message
|
|
25
|
+
pipeline = convert_yaml_to_pipeline(yaml_data)
|
|
26
|
+
|
|
27
|
+
# Validate the pipeline
|
|
28
|
+
errors, warnings = validate_pipeline_data(yaml_data)
|
|
29
|
+
|
|
30
|
+
if grpc_available?
|
|
31
|
+
Gitlab::Pipeline::ParseYamlResponse.new(
|
|
32
|
+
pipeline: pipeline,
|
|
33
|
+
errors: errors,
|
|
34
|
+
warnings: warnings,
|
|
35
|
+
success: errors.empty?
|
|
36
|
+
)
|
|
37
|
+
else
|
|
38
|
+
# Fallback response structure
|
|
39
|
+
{
|
|
40
|
+
pipeline: pipeline,
|
|
41
|
+
errors: errors,
|
|
42
|
+
warnings: warnings,
|
|
43
|
+
success: errors.empty?
|
|
44
|
+
}
|
|
45
|
+
end
|
|
46
|
+
rescue Psych::SyntaxError => e
|
|
47
|
+
if grpc_available?
|
|
48
|
+
Gitlab::Pipeline::ParseYamlResponse.new(
|
|
49
|
+
pipeline: Gitlab::Pipeline::Pipeline.new,
|
|
50
|
+
errors: ["YAML syntax error: #{e.message}"],
|
|
51
|
+
warnings: [],
|
|
52
|
+
success: false
|
|
53
|
+
)
|
|
54
|
+
else
|
|
55
|
+
{
|
|
56
|
+
pipeline: create_empty_pipeline,
|
|
57
|
+
errors: ["YAML syntax error: #{e.message}"],
|
|
58
|
+
warnings: [],
|
|
59
|
+
success: false
|
|
60
|
+
}
|
|
61
|
+
end
|
|
62
|
+
rescue StandardError => e
|
|
63
|
+
if grpc_available?
|
|
64
|
+
Gitlab::Pipeline::ParseYamlResponse.new(
|
|
65
|
+
pipeline: Gitlab::Pipeline::Pipeline.new,
|
|
66
|
+
errors: ["Parse error: #{e.message}"],
|
|
67
|
+
warnings: [],
|
|
68
|
+
success: false
|
|
69
|
+
)
|
|
70
|
+
else
|
|
71
|
+
{
|
|
72
|
+
pipeline: create_empty_pipeline,
|
|
73
|
+
errors: ["Parse error: #{e.message}"],
|
|
74
|
+
warnings: [],
|
|
75
|
+
success: false
|
|
76
|
+
}
|
|
77
|
+
end
|
|
78
|
+
end
|
|
79
|
+
end
|
|
80
|
+
|
|
81
|
+
# Convert Pipeline message back to YAML
|
|
82
|
+
def to_yaml(request, _unused_call = nil)
|
|
83
|
+
begin
|
|
84
|
+
# Convert protobuf Pipeline to YAML
|
|
85
|
+
yaml_content = convert_pipeline_to_yaml(request.pipeline, request.pretty_format, request.indent_size)
|
|
86
|
+
|
|
87
|
+
if grpc_available?
|
|
88
|
+
Gitlab::Pipeline::ToYamlResponse.new(
|
|
89
|
+
yaml_content: yaml_content,
|
|
90
|
+
errors: [],
|
|
91
|
+
success: true
|
|
92
|
+
)
|
|
93
|
+
else
|
|
94
|
+
{
|
|
95
|
+
yaml_content: yaml_content,
|
|
96
|
+
errors: [],
|
|
97
|
+
success: true
|
|
98
|
+
}
|
|
99
|
+
end
|
|
100
|
+
rescue StandardError => e
|
|
101
|
+
if grpc_available?
|
|
102
|
+
Gitlab::Pipeline::ToYamlResponse.new(
|
|
103
|
+
yaml_content: "",
|
|
104
|
+
errors: ["YAML generation error: #{e.message}"],
|
|
105
|
+
success: false
|
|
106
|
+
)
|
|
107
|
+
else
|
|
108
|
+
{
|
|
109
|
+
yaml_content: "",
|
|
110
|
+
errors: ["YAML generation error: #{e.message}"],
|
|
111
|
+
success: false
|
|
112
|
+
}
|
|
113
|
+
end
|
|
114
|
+
end
|
|
115
|
+
end
|
|
116
|
+
|
|
117
|
+
# Validate pipeline structure
|
|
118
|
+
def validate_pipeline(request, _unused_call = nil)
|
|
119
|
+
errors = []
|
|
120
|
+
warnings = []
|
|
121
|
+
suggestions = []
|
|
122
|
+
|
|
123
|
+
pipeline = request.pipeline
|
|
124
|
+
|
|
125
|
+
# Basic validation
|
|
126
|
+
if pipeline.jobs.empty?
|
|
127
|
+
warnings << "Pipeline has no jobs defined"
|
|
128
|
+
end
|
|
129
|
+
|
|
130
|
+
if pipeline.stages.empty?
|
|
131
|
+
warnings << "Pipeline has no stages defined"
|
|
132
|
+
end
|
|
133
|
+
|
|
134
|
+
# Validate jobs
|
|
135
|
+
pipeline.jobs.each do |job_name, job|
|
|
136
|
+
job_errors, job_warnings, job_suggestions = validate_job(job_name, job, pipeline)
|
|
137
|
+
errors.concat(job_errors)
|
|
138
|
+
warnings.concat(job_warnings)
|
|
139
|
+
suggestions.concat(job_suggestions)
|
|
140
|
+
end
|
|
141
|
+
|
|
142
|
+
# Validate stages
|
|
143
|
+
pipeline.stages.each do |stage|
|
|
144
|
+
stage_errors, stage_warnings = validate_stage(stage, pipeline)
|
|
145
|
+
errors.concat(stage_errors)
|
|
146
|
+
warnings.concat(stage_warnings)
|
|
147
|
+
end
|
|
148
|
+
|
|
149
|
+
if grpc_available?
|
|
150
|
+
Gitlab::Pipeline::ValidatePipelineResponse.new(
|
|
151
|
+
is_valid: errors.empty?,
|
|
152
|
+
errors: errors,
|
|
153
|
+
warnings: warnings,
|
|
154
|
+
suggestions: suggestions
|
|
155
|
+
)
|
|
156
|
+
else
|
|
157
|
+
{
|
|
158
|
+
is_valid: errors.empty?,
|
|
159
|
+
errors: errors,
|
|
160
|
+
warnings: warnings,
|
|
161
|
+
suggestions: suggestions
|
|
162
|
+
}
|
|
163
|
+
end
|
|
164
|
+
end
|
|
165
|
+
|
|
166
|
+
# Merge multiple pipelines
|
|
167
|
+
def merge_pipelines(request, _unused_call = nil)
|
|
168
|
+
base = request.base_pipeline
|
|
169
|
+
override = request.override_pipeline
|
|
170
|
+
conflicts = []
|
|
171
|
+
warnings = []
|
|
172
|
+
|
|
173
|
+
# Create merged pipeline
|
|
174
|
+
merged = create_empty_pipeline
|
|
175
|
+
|
|
176
|
+
# Merge basic fields
|
|
177
|
+
merged.image = override.image.empty? ? base.image : override.image
|
|
178
|
+
merged.timeout = override.timeout == 0 ? base.timeout : override.timeout
|
|
179
|
+
|
|
180
|
+
# Merge variables
|
|
181
|
+
merged.variables.merge!(base.variables)
|
|
182
|
+
override.variables.each do |key, value|
|
|
183
|
+
if merged.variables.key?(key) && merged.variables[key] != value
|
|
184
|
+
conflicts << "Variable '#{key}' has different values: '#{merged.variables[key]}' vs '#{value}'"
|
|
185
|
+
end
|
|
186
|
+
merged.variables[key] = value
|
|
187
|
+
end
|
|
188
|
+
|
|
189
|
+
# Merge cache
|
|
190
|
+
if override.cache && !override.cache.key.empty?
|
|
191
|
+
merged.cache = override.cache
|
|
192
|
+
elsif base.cache && !base.cache.key.empty?
|
|
193
|
+
merged.cache = base.cache
|
|
194
|
+
end
|
|
195
|
+
|
|
196
|
+
# Merge stages
|
|
197
|
+
merged.stages.concat(base.stages)
|
|
198
|
+
override.stages.each do |override_stage|
|
|
199
|
+
existing_stage = merged.stages.find { |s| s.name == override_stage.name }
|
|
200
|
+
if existing_stage
|
|
201
|
+
existing_stage.jobs.concat(override_stage.jobs)
|
|
202
|
+
else
|
|
203
|
+
merged.stages << override_stage
|
|
204
|
+
end
|
|
205
|
+
end
|
|
206
|
+
|
|
207
|
+
# Merge jobs
|
|
208
|
+
merged.jobs.merge!(base.jobs)
|
|
209
|
+
override.jobs.each do |job_name, job|
|
|
210
|
+
if merged.jobs.key?(job_name)
|
|
211
|
+
conflicts << "Job '#{job_name}' exists in both pipelines"
|
|
212
|
+
end
|
|
213
|
+
merged.jobs[job_name] = job
|
|
214
|
+
end
|
|
215
|
+
|
|
216
|
+
if grpc_available?
|
|
217
|
+
Gitlab::Pipeline::MergePipelinesResponse.new(
|
|
218
|
+
merged_pipeline: merged,
|
|
219
|
+
conflicts: conflicts,
|
|
220
|
+
warnings: warnings
|
|
221
|
+
)
|
|
222
|
+
else
|
|
223
|
+
{
|
|
224
|
+
merged_pipeline: merged,
|
|
225
|
+
conflicts: conflicts,
|
|
226
|
+
warnings: warnings
|
|
227
|
+
}
|
|
228
|
+
end
|
|
229
|
+
end
|
|
230
|
+
|
|
231
|
+
# Get pipeline statistics
|
|
232
|
+
def get_pipeline_stats(request, _unused_call = nil)
|
|
233
|
+
pipeline = request.pipeline
|
|
234
|
+
|
|
235
|
+
total_artifacts = pipeline.jobs.values.sum { |job| job.artifacts_paths.length }
|
|
236
|
+
total_services = pipeline.jobs.values.sum { |job| job.services.length }
|
|
237
|
+
total_variables = pipeline.variables.length + pipeline.jobs.values.sum { |job| job.variables.length }
|
|
238
|
+
|
|
239
|
+
if grpc_available?
|
|
240
|
+
Gitlab::Pipeline::GetPipelineStatsResponse.new(
|
|
241
|
+
total_jobs: pipeline.jobs.length,
|
|
242
|
+
total_stages: pipeline.stages.length,
|
|
243
|
+
total_artifacts: total_artifacts,
|
|
244
|
+
total_services: total_services,
|
|
245
|
+
total_variables: total_variables,
|
|
246
|
+
stage_names: pipeline.stages.map(&:name),
|
|
247
|
+
job_names: pipeline.jobs.keys
|
|
248
|
+
)
|
|
249
|
+
else
|
|
250
|
+
{
|
|
251
|
+
total_jobs: pipeline.jobs.length,
|
|
252
|
+
total_stages: pipeline.stages.length,
|
|
253
|
+
total_artifacts: total_artifacts,
|
|
254
|
+
total_services: total_services,
|
|
255
|
+
total_variables: total_variables,
|
|
256
|
+
stage_names: pipeline.stages.map(&:name),
|
|
257
|
+
job_names: pipeline.jobs.keys
|
|
258
|
+
}
|
|
259
|
+
end
|
|
260
|
+
end
|
|
261
|
+
|
|
262
|
+
# Execute a pipeline using Podman
|
|
263
|
+
def execute_pipeline(request, _unused_call = nil)
|
|
264
|
+
begin
|
|
265
|
+
execution_id = SecureRandom.uuid
|
|
266
|
+
start_time = Time.now
|
|
267
|
+
|
|
268
|
+
# Create execution result
|
|
269
|
+
result = create_execution_result(execution_id, start_time)
|
|
270
|
+
|
|
271
|
+
# Check if Podman is available (cross-platform)
|
|
272
|
+
podman_executable = if request.respond_to?(:podman_executable)
|
|
273
|
+
request.podman_executable || "podman"
|
|
274
|
+
else
|
|
275
|
+
request[:podman_executable] || "podman"
|
|
276
|
+
end
|
|
277
|
+
|
|
278
|
+
# Find the actual executable path
|
|
279
|
+
found_executable = find_executable(podman_executable)
|
|
280
|
+
unless found_executable
|
|
281
|
+
return create_error_response("Podman executable '#{podman_executable}' not found or not working")
|
|
282
|
+
end
|
|
283
|
+
|
|
284
|
+
podman_executable = found_executable
|
|
285
|
+
|
|
286
|
+
# Get Podman version (cross-platform)
|
|
287
|
+
podman_version = capture_command_output("#{podman_executable} --version")
|
|
288
|
+
result.podman_version = podman_version
|
|
289
|
+
result.execution_host = capture_command_output("hostname")
|
|
290
|
+
|
|
291
|
+
# Set working directory
|
|
292
|
+
working_dir = if request.respond_to?(:working_directory)
|
|
293
|
+
request.working_directory || Dir.pwd
|
|
294
|
+
else
|
|
295
|
+
request[:working_directory] || Dir.pwd
|
|
296
|
+
end
|
|
297
|
+
|
|
298
|
+
# Execute pipeline stages
|
|
299
|
+
pipeline = if request.respond_to?(:pipeline)
|
|
300
|
+
request.pipeline
|
|
301
|
+
else
|
|
302
|
+
request[:pipeline]
|
|
303
|
+
end
|
|
304
|
+
stage_results = []
|
|
305
|
+
|
|
306
|
+
pipeline.stages.each do |stage|
|
|
307
|
+
stage_result = execute_stage(stage, pipeline, working_dir, request, execution_id, podman_executable)
|
|
308
|
+
stage_results << stage_result
|
|
309
|
+
result.job_results.concat(stage_result.job_results)
|
|
310
|
+
end
|
|
311
|
+
|
|
312
|
+
# Determine overall status
|
|
313
|
+
result.status = determine_pipeline_status(result.job_results)
|
|
314
|
+
result.finished_at = Time.now
|
|
315
|
+
result.total_duration_seconds = (result.finished_at - start_time).to_i
|
|
316
|
+
|
|
317
|
+
# Add execution logs
|
|
318
|
+
result.logs.push("Pipeline execution started at #{start_time}")
|
|
319
|
+
result.logs.push("Pipeline execution completed at #{result.finished_at}")
|
|
320
|
+
result.logs.push("Total duration: #{result.total_duration_seconds} seconds")
|
|
321
|
+
result.logs.push("Podman version: #{podman_version}")
|
|
322
|
+
|
|
323
|
+
if grpc_available?
|
|
324
|
+
Gitlab::Pipeline::ExecutePipelineResponse.new(
|
|
325
|
+
result: result,
|
|
326
|
+
success: result.status == Gitlab::Pipeline::PipelineStatus::PIPELINE_STATUS_SUCCESS,
|
|
327
|
+
errors: result.errors,
|
|
328
|
+
warnings: result.warnings
|
|
329
|
+
)
|
|
330
|
+
else
|
|
331
|
+
{
|
|
332
|
+
result: result,
|
|
333
|
+
success: result.status == :success,
|
|
334
|
+
errors: result.errors,
|
|
335
|
+
warnings: result.warnings
|
|
336
|
+
}
|
|
337
|
+
end
|
|
338
|
+
|
|
339
|
+
rescue StandardError => e
|
|
340
|
+
error_result = create_execution_result(SecureRandom.uuid, Time.now)
|
|
341
|
+
error_result.status = grpc_available? ? Gitlab::Pipeline::PipelineStatus::PIPELINE_STATUS_FAILED : :failed
|
|
342
|
+
error_result.errors.push("Pipeline execution failed: #{e.message}")
|
|
343
|
+
error_result.logs.push("Error: #{e.message}")
|
|
344
|
+
error_result.logs.push("Backtrace: #{e.backtrace.join("\n")}")
|
|
345
|
+
|
|
346
|
+
if grpc_available?
|
|
347
|
+
Gitlab::Pipeline::ExecutePipelineResponse.new(
|
|
348
|
+
result: error_result,
|
|
349
|
+
success: false,
|
|
350
|
+
errors: error_result.errors,
|
|
351
|
+
warnings: error_result.warnings
|
|
352
|
+
)
|
|
353
|
+
else
|
|
354
|
+
{
|
|
355
|
+
result: error_result,
|
|
356
|
+
success: false,
|
|
357
|
+
errors: error_result.errors,
|
|
358
|
+
warnings: error_result.warnings
|
|
359
|
+
}
|
|
360
|
+
end
|
|
361
|
+
end
|
|
362
|
+
end
|
|
363
|
+
|
|
364
|
+
# Compare two pipelines
|
|
365
|
+
def compare_pipelines(request, _unused_call = nil)
|
|
366
|
+
pipeline1 = request.pipeline1
|
|
367
|
+
pipeline2 = request.pipeline2
|
|
368
|
+
|
|
369
|
+
differences = []
|
|
370
|
+
added_jobs = []
|
|
371
|
+
removed_jobs = []
|
|
372
|
+
modified_jobs = []
|
|
373
|
+
added_stages = []
|
|
374
|
+
removed_stages = []
|
|
375
|
+
|
|
376
|
+
# Compare jobs
|
|
377
|
+
pipeline1_jobs = pipeline1.jobs.keys.to_set
|
|
378
|
+
pipeline2_jobs = pipeline2.jobs.keys.to_set
|
|
379
|
+
|
|
380
|
+
added_jobs = (pipeline2_jobs - pipeline1_jobs).to_a
|
|
381
|
+
removed_jobs = (pipeline1_jobs - pipeline2_jobs).to_a
|
|
382
|
+
|
|
383
|
+
common_jobs = pipeline1_jobs & pipeline2_jobs
|
|
384
|
+
common_jobs.each do |job_name|
|
|
385
|
+
job1 = pipeline1.jobs[job_name]
|
|
386
|
+
job2 = pipeline2.jobs[job_name]
|
|
387
|
+
unless jobs_equal?(job1, job2)
|
|
388
|
+
modified_jobs << job_name
|
|
389
|
+
differences << "Job '#{job_name}' has been modified"
|
|
390
|
+
end
|
|
391
|
+
end
|
|
392
|
+
|
|
393
|
+
# Compare stages
|
|
394
|
+
pipeline1_stages = pipeline1.stages.map(&:name).to_set
|
|
395
|
+
pipeline2_stages = pipeline2.stages.map(&:name).to_set
|
|
396
|
+
|
|
397
|
+
added_stages = (pipeline2_stages - pipeline1_stages).to_a
|
|
398
|
+
removed_stages = (pipeline1_stages - pipeline2_stages).to_a
|
|
399
|
+
|
|
400
|
+
if grpc_available?
|
|
401
|
+
Gitlab::Pipeline::ComparePipelinesResponse.new(
|
|
402
|
+
are_identical: differences.empty? && added_jobs.empty? && removed_jobs.empty? && added_stages.empty? && removed_stages.empty?,
|
|
403
|
+
differences: differences,
|
|
404
|
+
added_jobs: added_jobs,
|
|
405
|
+
removed_jobs: removed_jobs,
|
|
406
|
+
modified_jobs: modified_jobs,
|
|
407
|
+
added_stages: added_stages,
|
|
408
|
+
removed_stages: removed_stages
|
|
409
|
+
)
|
|
410
|
+
else
|
|
411
|
+
{
|
|
412
|
+
are_identical: differences.empty? && added_jobs.empty? && removed_jobs.empty? && added_stages.empty? && removed_stages.empty?,
|
|
413
|
+
differences: differences,
|
|
414
|
+
added_jobs: added_jobs,
|
|
415
|
+
removed_jobs: removed_jobs,
|
|
416
|
+
modified_jobs: modified_jobs,
|
|
417
|
+
added_stages: added_stages,
|
|
418
|
+
removed_stages: removed_stages
|
|
419
|
+
}
|
|
420
|
+
end
|
|
421
|
+
end
|
|
422
|
+
|
|
423
|
+
private
|
|
424
|
+
|
|
425
|
+
def grpc_available?
|
|
426
|
+
self.class.grpc_available?
|
|
427
|
+
end
|
|
428
|
+
|
|
429
|
+
# Cross-platform command execution helper
|
|
430
|
+
def capture_command_output(command)
|
|
431
|
+
if RUBY_PLATFORM =~ /mswin|mingw|cygwin/
|
|
432
|
+
# Windows: Use PowerShell call operator for paths with spaces
|
|
433
|
+
if command.include?('"') && command.include?(' ')
|
|
434
|
+
# Command already has quotes, use PowerShell call operator
|
|
435
|
+
powershell_cmd = "& #{command}"
|
|
436
|
+
`powershell -Command "#{powershell_cmd}" 2>nul`.strip
|
|
437
|
+
else
|
|
438
|
+
# Simple command without spaces
|
|
439
|
+
`#{command} 2>nul`.strip
|
|
440
|
+
end
|
|
441
|
+
else
|
|
442
|
+
# Unix-like systems (Linux, macOS)
|
|
443
|
+
`#{command} 2>/dev/null`.strip
|
|
444
|
+
end
|
|
445
|
+
rescue StandardError
|
|
446
|
+
""
|
|
447
|
+
end
|
|
448
|
+
|
|
449
|
+
# Cross-platform executable detection
|
|
450
|
+
def find_executable(executable_name)
|
|
451
|
+
if RUBY_PLATFORM =~ /mswin|mingw|cygwin/
|
|
452
|
+
# Windows - check common locations and extensions
|
|
453
|
+
extensions = ['.exe', '.bat', '.cmd', '']
|
|
454
|
+
paths = ENV['PATH'].split(File::PATH_SEPARATOR)
|
|
455
|
+
|
|
456
|
+
extensions.each do |ext|
|
|
457
|
+
full_name = executable_name + ext
|
|
458
|
+
paths.each do |path|
|
|
459
|
+
full_path = File.join(path, full_name)
|
|
460
|
+
return full_path if File.executable?(full_path)
|
|
461
|
+
end
|
|
462
|
+
end
|
|
463
|
+
|
|
464
|
+
# Try direct execution
|
|
465
|
+
return executable_name if system("#{executable_name} --version", out: File::NULL, err: File::NULL)
|
|
466
|
+
else
|
|
467
|
+
# Unix-like systems
|
|
468
|
+
return executable_name if system("which #{executable_name} > /dev/null 2>&1")
|
|
469
|
+
end
|
|
470
|
+
|
|
471
|
+
nil
|
|
472
|
+
end
|
|
473
|
+
|
|
474
|
+
# Create execution result object
|
|
475
|
+
def create_execution_result(execution_id, start_time)
|
|
476
|
+
if grpc_available?
|
|
477
|
+
Gitlab::Pipeline::PipelineExecutionResult.new(
|
|
478
|
+
execution_id: execution_id,
|
|
479
|
+
status: Gitlab::Pipeline::PipelineStatus::PIPELINE_STATUS_RUNNING,
|
|
480
|
+
started_at: start_time.iso8601,
|
|
481
|
+
job_results: [],
|
|
482
|
+
logs: [],
|
|
483
|
+
errors: [],
|
|
484
|
+
warnings: []
|
|
485
|
+
)
|
|
486
|
+
else
|
|
487
|
+
result = Object.new
|
|
488
|
+
|
|
489
|
+
# Initialize instance variables
|
|
490
|
+
result.instance_variable_set(:@execution_id, execution_id)
|
|
491
|
+
result.instance_variable_set(:@status, :running)
|
|
492
|
+
result.instance_variable_set(:@started_at, start_time.iso8601)
|
|
493
|
+
result.instance_variable_set(:@job_results, [])
|
|
494
|
+
result.instance_variable_set(:@logs, [])
|
|
495
|
+
result.instance_variable_set(:@errors, [])
|
|
496
|
+
result.instance_variable_set(:@warnings, [])
|
|
497
|
+
|
|
498
|
+
# Add getter and setter methods for fallback
|
|
499
|
+
def result.execution_id
|
|
500
|
+
@execution_id
|
|
501
|
+
end
|
|
502
|
+
|
|
503
|
+
def result.execution_id=(value)
|
|
504
|
+
@execution_id = value
|
|
505
|
+
end
|
|
506
|
+
|
|
507
|
+
def result.status=(value)
|
|
508
|
+
@status = value
|
|
509
|
+
end
|
|
510
|
+
|
|
511
|
+
def result.status
|
|
512
|
+
@status
|
|
513
|
+
end
|
|
514
|
+
|
|
515
|
+
def result.started_at
|
|
516
|
+
@started_at
|
|
517
|
+
end
|
|
518
|
+
|
|
519
|
+
def result.started_at=(value)
|
|
520
|
+
@started_at = value
|
|
521
|
+
end
|
|
522
|
+
|
|
523
|
+
def result.finished_at=(value)
|
|
524
|
+
@finished_at = value
|
|
525
|
+
end
|
|
526
|
+
|
|
527
|
+
def result.finished_at
|
|
528
|
+
@finished_at
|
|
529
|
+
end
|
|
530
|
+
|
|
531
|
+
def result.total_duration_seconds=(value)
|
|
532
|
+
@total_duration_seconds = value
|
|
533
|
+
end
|
|
534
|
+
|
|
535
|
+
def result.total_duration_seconds
|
|
536
|
+
@total_duration_seconds
|
|
537
|
+
end
|
|
538
|
+
|
|
539
|
+
def result.podman_version=(value)
|
|
540
|
+
@podman_version = value
|
|
541
|
+
end
|
|
542
|
+
|
|
543
|
+
def result.podman_version
|
|
544
|
+
@podman_version
|
|
545
|
+
end
|
|
546
|
+
|
|
547
|
+
def result.execution_host=(value)
|
|
548
|
+
@execution_host = value
|
|
549
|
+
end
|
|
550
|
+
|
|
551
|
+
def result.execution_host
|
|
552
|
+
@execution_host
|
|
553
|
+
end
|
|
554
|
+
|
|
555
|
+
def result.job_results
|
|
556
|
+
@job_results
|
|
557
|
+
end
|
|
558
|
+
|
|
559
|
+
def result.job_results=(value)
|
|
560
|
+
@job_results = value
|
|
561
|
+
end
|
|
562
|
+
|
|
563
|
+
def result.errors
|
|
564
|
+
@errors
|
|
565
|
+
end
|
|
566
|
+
|
|
567
|
+
def result.errors=(value)
|
|
568
|
+
@errors = value
|
|
569
|
+
end
|
|
570
|
+
|
|
571
|
+
def result.warnings
|
|
572
|
+
@warnings
|
|
573
|
+
end
|
|
574
|
+
|
|
575
|
+
def result.warnings=(value)
|
|
576
|
+
@warnings = value
|
|
577
|
+
end
|
|
578
|
+
|
|
579
|
+
def result.logs
|
|
580
|
+
@logs
|
|
581
|
+
end
|
|
582
|
+
|
|
583
|
+
def result.logs=(value)
|
|
584
|
+
@logs = value
|
|
585
|
+
end
|
|
586
|
+
|
|
587
|
+
result
|
|
588
|
+
end
|
|
589
|
+
end
|
|
590
|
+
|
|
591
|
+
# Create error response
|
|
592
|
+
def create_error_response(message)
|
|
593
|
+
error_result = create_execution_result(SecureRandom.uuid, Time.now)
|
|
594
|
+
error_result.status = grpc_available? ? Gitlab::Pipeline::PipelineStatus::PIPELINE_STATUS_FAILED : :failed
|
|
595
|
+
error_result.errors.push(message)
|
|
596
|
+
error_result.logs.push("Error: #{message}")
|
|
597
|
+
|
|
598
|
+
if grpc_available?
|
|
599
|
+
Gitlab::Pipeline::ExecutePipelineResponse.new(
|
|
600
|
+
result: error_result,
|
|
601
|
+
success: false,
|
|
602
|
+
errors: error_result.errors,
|
|
603
|
+
warnings: error_result.warnings
|
|
604
|
+
)
|
|
605
|
+
else
|
|
606
|
+
{
|
|
607
|
+
result: error_result,
|
|
608
|
+
success: false,
|
|
609
|
+
errors: error_result.errors,
|
|
610
|
+
warnings: error_result.warnings
|
|
611
|
+
}
|
|
612
|
+
end
|
|
613
|
+
end
|
|
614
|
+
|
|
615
|
+
# Execute a single stage
|
|
616
|
+
def execute_stage(stage, pipeline, working_dir, request, execution_id, podman_executable)
|
|
617
|
+
stage_result = if grpc_available?
|
|
618
|
+
Gitlab::Pipeline::StageExecutionResult.new(
|
|
619
|
+
stage_name: stage.name,
|
|
620
|
+
status: Gitlab::Pipeline::PipelineStatus::PIPELINE_STATUS_RUNNING,
|
|
621
|
+
started_at: Time.now.iso8601,
|
|
622
|
+
job_results: []
|
|
623
|
+
)
|
|
624
|
+
else
|
|
625
|
+
stage_result = Object.new
|
|
626
|
+
|
|
627
|
+
# Initialize instance variables
|
|
628
|
+
stage_result.instance_variable_set(:@stage_name, stage.name)
|
|
629
|
+
stage_result.instance_variable_set(:@status, :running)
|
|
630
|
+
stage_result.instance_variable_set(:@started_at, Time.now.iso8601)
|
|
631
|
+
stage_result.instance_variable_set(:@job_results, [])
|
|
632
|
+
|
|
633
|
+
# Add getter and setter methods for fallback
|
|
634
|
+
def stage_result.stage_name
|
|
635
|
+
@stage_name
|
|
636
|
+
end
|
|
637
|
+
|
|
638
|
+
def stage_result.stage_name=(value)
|
|
639
|
+
@stage_name = value
|
|
640
|
+
end
|
|
641
|
+
|
|
642
|
+
def stage_result.status=(value)
|
|
643
|
+
@status = value
|
|
644
|
+
end
|
|
645
|
+
|
|
646
|
+
def stage_result.status
|
|
647
|
+
@status
|
|
648
|
+
end
|
|
649
|
+
|
|
650
|
+
def stage_result.started_at
|
|
651
|
+
@started_at
|
|
652
|
+
end
|
|
653
|
+
|
|
654
|
+
def stage_result.started_at=(value)
|
|
655
|
+
@started_at = value
|
|
656
|
+
end
|
|
657
|
+
|
|
658
|
+
def stage_result.finished_at=(value)
|
|
659
|
+
@finished_at = value
|
|
660
|
+
end
|
|
661
|
+
|
|
662
|
+
def stage_result.finished_at
|
|
663
|
+
@finished_at
|
|
664
|
+
end
|
|
665
|
+
|
|
666
|
+
def stage_result.job_results
|
|
667
|
+
@job_results
|
|
668
|
+
end
|
|
669
|
+
|
|
670
|
+
def stage_result.job_results=(value)
|
|
671
|
+
@job_results = value
|
|
672
|
+
end
|
|
673
|
+
|
|
674
|
+
stage_result
|
|
675
|
+
end
|
|
676
|
+
|
|
677
|
+
# Find jobs for this stage
|
|
678
|
+
stage_jobs = pipeline.jobs.select { |name, job| job.stage == stage.name }
|
|
679
|
+
|
|
680
|
+
stage_jobs.each do |job_name, job|
|
|
681
|
+
job_result = execute_job(job_name, job, pipeline, working_dir, request, execution_id, podman_executable)
|
|
682
|
+
stage_result.job_results.push(job_result)
|
|
683
|
+
end
|
|
684
|
+
|
|
685
|
+
# Determine stage status
|
|
686
|
+
stage_result.status = determine_stage_status(stage_result.job_results)
|
|
687
|
+
stage_result.finished_at = Time.now.iso8601
|
|
688
|
+
|
|
689
|
+
stage_result
|
|
690
|
+
end
|
|
691
|
+
|
|
692
|
+
# Execute a single job
|
|
693
|
+
def execute_job(job_name, job, pipeline, working_dir, request, execution_id, podman_executable)
|
|
694
|
+
job_result = if grpc_available?
|
|
695
|
+
Gitlab::Pipeline::JobExecutionResult.new(
|
|
696
|
+
job_name: job_name,
|
|
697
|
+
status: Gitlab::Pipeline::PipelineStatus::PIPELINE_STATUS_RUNNING,
|
|
698
|
+
started_at: Time.now.iso8601,
|
|
699
|
+
logs: [],
|
|
700
|
+
errors: []
|
|
701
|
+
)
|
|
702
|
+
else
|
|
703
|
+
job_result = Object.new
|
|
704
|
+
|
|
705
|
+
# Initialize instance variables
|
|
706
|
+
job_result.instance_variable_set(:@job_name, job_name)
|
|
707
|
+
job_result.instance_variable_set(:@status, :running)
|
|
708
|
+
job_result.instance_variable_set(:@started_at, Time.now.iso8601)
|
|
709
|
+
job_result.instance_variable_set(:@logs, [])
|
|
710
|
+
job_result.instance_variable_set(:@errors, [])
|
|
711
|
+
job_result.instance_variable_set(:@stdout, "")
|
|
712
|
+
job_result.instance_variable_set(:@stderr, "")
|
|
713
|
+
|
|
714
|
+
# Add getter and setter methods for fallback
|
|
715
|
+
def job_result.job_name
|
|
716
|
+
@job_name
|
|
717
|
+
end
|
|
718
|
+
|
|
719
|
+
def job_result.job_name=(value)
|
|
720
|
+
@job_name = value
|
|
721
|
+
end
|
|
722
|
+
|
|
723
|
+
def job_result.status=(value)
|
|
724
|
+
@status = value
|
|
725
|
+
end
|
|
726
|
+
|
|
727
|
+
def job_result.status
|
|
728
|
+
@status
|
|
729
|
+
end
|
|
730
|
+
|
|
731
|
+
def job_result.started_at
|
|
732
|
+
@started_at
|
|
733
|
+
end
|
|
734
|
+
|
|
735
|
+
def job_result.started_at=(value)
|
|
736
|
+
@started_at = value
|
|
737
|
+
end
|
|
738
|
+
|
|
739
|
+
def job_result.finished_at=(value)
|
|
740
|
+
@finished_at = value
|
|
741
|
+
end
|
|
742
|
+
|
|
743
|
+
def job_result.finished_at
|
|
744
|
+
@finished_at
|
|
745
|
+
end
|
|
746
|
+
|
|
747
|
+
def job_result.exit_code=(value)
|
|
748
|
+
@exit_code = value
|
|
749
|
+
end
|
|
750
|
+
|
|
751
|
+
def job_result.exit_code
|
|
752
|
+
@exit_code
|
|
753
|
+
end
|
|
754
|
+
|
|
755
|
+
def job_result.logs
|
|
756
|
+
@logs
|
|
757
|
+
end
|
|
758
|
+
|
|
759
|
+
def job_result.logs=(value)
|
|
760
|
+
@logs = value
|
|
761
|
+
end
|
|
762
|
+
|
|
763
|
+
def job_result.errors
|
|
764
|
+
@errors
|
|
765
|
+
end
|
|
766
|
+
|
|
767
|
+
def job_result.errors=(value)
|
|
768
|
+
@errors = value
|
|
769
|
+
end
|
|
770
|
+
|
|
771
|
+
def job_result.stdout=(value)
|
|
772
|
+
@stdout = value
|
|
773
|
+
end
|
|
774
|
+
|
|
775
|
+
def job_result.stdout
|
|
776
|
+
@stdout
|
|
777
|
+
end
|
|
778
|
+
|
|
779
|
+
def job_result.stderr=(value)
|
|
780
|
+
@stderr = value
|
|
781
|
+
end
|
|
782
|
+
|
|
783
|
+
def job_result.stderr
|
|
784
|
+
@stderr
|
|
785
|
+
end
|
|
786
|
+
|
|
787
|
+
job_result
|
|
788
|
+
end
|
|
789
|
+
|
|
790
|
+
begin
|
|
791
|
+
# Check if this is a dry run
|
|
792
|
+
dry_run = if request.respond_to?(:dry_run)
|
|
793
|
+
request.dry_run
|
|
794
|
+
else
|
|
795
|
+
request[:dry_run]
|
|
796
|
+
end
|
|
797
|
+
|
|
798
|
+
if dry_run
|
|
799
|
+
job_result.logs.push("DRY RUN: Would execute job: #{job_name}")
|
|
800
|
+
job_result.logs.push("DRY RUN: Skipping actual execution")
|
|
801
|
+
job_result.status = grpc_available? ? Gitlab::Pipeline::PipelineStatus::PIPELINE_STATUS_SUCCESS : :success
|
|
802
|
+
job_result.exit_code = 0
|
|
803
|
+
job_result.finished_at = Time.now.iso8601
|
|
804
|
+
return job_result
|
|
805
|
+
end
|
|
806
|
+
|
|
807
|
+
job_result.logs.push("Executing job: #{job_name}")
|
|
808
|
+
|
|
809
|
+
# Use the new Podman service for cleaner execution
|
|
810
|
+
podman_service = Makit::Podman::Podman.new(podman_executable: podman_executable)
|
|
811
|
+
|
|
812
|
+
# Build script content
|
|
813
|
+
script_content = build_job_script(job, pipeline)
|
|
814
|
+
|
|
815
|
+
# Prepare environment variables
|
|
816
|
+
environment = {}
|
|
817
|
+
pipeline.variables.each { |k, v| environment[k] = v }
|
|
818
|
+
(request.respond_to?(:variables) ? request.variables : request[:variables]).each { |k, v| environment[k] = v }
|
|
819
|
+
|
|
820
|
+
# Prepare volume mounts
|
|
821
|
+
volume_mounts = [
|
|
822
|
+
podman_service.create_volume_mount(
|
|
823
|
+
host_path: working_dir,
|
|
824
|
+
container_path: "/workspace",
|
|
825
|
+
mode: "rw"
|
|
826
|
+
)
|
|
827
|
+
]
|
|
828
|
+
|
|
829
|
+
# Determine image to use
|
|
830
|
+
image = (job.image && !job.image.empty?) ? job.image : pipeline.image
|
|
831
|
+
image = "alpine:latest" if image.empty?
|
|
832
|
+
|
|
833
|
+
job_result.logs.push("Using image: #{image}")
|
|
834
|
+
job_result.logs.push("Working directory: #{working_dir}")
|
|
835
|
+
|
|
836
|
+
# Execute script using Podman service
|
|
837
|
+
result = podman_service.run_script(
|
|
838
|
+
image,
|
|
839
|
+
script_content,
|
|
840
|
+
environment: environment,
|
|
841
|
+
volume_mounts: volume_mounts,
|
|
842
|
+
working_directory: "/tmp",
|
|
843
|
+
timeout: 300,
|
|
844
|
+
auto_remove: true,
|
|
845
|
+
name: "#{execution_id}-#{job_name}".gsub(/[^a-zA-Z0-9_-]/, '-')
|
|
846
|
+
)
|
|
847
|
+
|
|
848
|
+
# Process results
|
|
849
|
+
if result.respond_to?(:success)
|
|
850
|
+
# gRPC mode
|
|
851
|
+
job_result.exit_code = result.exit_code
|
|
852
|
+
job_result.finished_at = Time.now.iso8601
|
|
853
|
+
|
|
854
|
+
if result.success
|
|
855
|
+
job_result.status = grpc_available? ? Gitlab::Pipeline::PipelineStatus::PIPELINE_STATUS_SUCCESS : :success
|
|
856
|
+
job_result.stdout = result.stdout if result.stdout
|
|
857
|
+
job_result.stderr = result.stderr if result.stderr
|
|
858
|
+
else
|
|
859
|
+
job_result.status = grpc_available? ? Gitlab::Pipeline::PipelineStatus::PIPELINE_STATUS_FAILED : :failed
|
|
860
|
+
job_result.errors.push("Job failed with exit code #{result.exit_code}")
|
|
861
|
+
job_result.stdout = result.stdout if result.stdout
|
|
862
|
+
job_result.stderr = result.stderr if result.stderr
|
|
863
|
+
end
|
|
864
|
+
else
|
|
865
|
+
# Fallback mode
|
|
866
|
+
job_result.exit_code = result[:exit_code]
|
|
867
|
+
job_result.finished_at = Time.now.iso8601
|
|
868
|
+
|
|
869
|
+
if result[:success]
|
|
870
|
+
job_result.status = grpc_available? ? Gitlab::Pipeline::PipelineStatus::PIPELINE_STATUS_SUCCESS : :success
|
|
871
|
+
job_result.stdout = result[:stdout] if result[:stdout]
|
|
872
|
+
job_result.stderr = result[:stderr] if result[:stderr]
|
|
873
|
+
else
|
|
874
|
+
job_result.status = grpc_available? ? Gitlab::Pipeline::PipelineStatus::PIPELINE_STATUS_FAILED : :failed
|
|
875
|
+
job_result.errors.push("Job failed with exit code #{result[:exit_code]}")
|
|
876
|
+
job_result.stdout = result[:stdout] if result[:stdout]
|
|
877
|
+
job_result.stderr = result[:stderr] if result[:stderr]
|
|
878
|
+
end
|
|
879
|
+
end
|
|
880
|
+
|
|
881
|
+
rescue StandardError => e
|
|
882
|
+
job_result.status = grpc_available? ? Gitlab::Pipeline::PipelineStatus::PIPELINE_STATUS_FAILED : :failed
|
|
883
|
+
job_result.errors.push("Job execution error: #{e.message}")
|
|
884
|
+
job_result.logs.push("Error: #{e.message}")
|
|
885
|
+
job_result.finished_at = Time.now.iso8601
|
|
886
|
+
end
|
|
887
|
+
|
|
888
|
+
job_result
|
|
889
|
+
end
|
|
890
|
+
|
|
891
|
+
# Build job script content
|
|
892
|
+
def build_job_script(job, pipeline)
|
|
893
|
+
script_lines = []
|
|
894
|
+
|
|
895
|
+
# Add shebang and environment setup for Unix containers (ensure clean lines)
|
|
896
|
+
script_lines.push("#!/bin/sh")
|
|
897
|
+
script_lines.push("set -e")
|
|
898
|
+
|
|
899
|
+
# Fix directory permissions for Bundler security BEFORE any commands run
|
|
900
|
+
script_lines.push("chmod 755 /workspace")
|
|
901
|
+
script_lines.push("mkdir -p /workspace/vendor/ruby")
|
|
902
|
+
script_lines.push("chmod 755 /workspace/vendor/ruby")
|
|
903
|
+
script_lines.push("chmod 755 /workspace/vendor")
|
|
904
|
+
script_lines.push("chmod 755 /workspace/vendor/ruby/ruby")
|
|
905
|
+
script_lines.push("mkdir -p /workspace/vendor/ruby/ruby/3.4.0/gems")
|
|
906
|
+
script_lines.push("chmod 755 /workspace/vendor/ruby/ruby/3.4.0/gems")
|
|
907
|
+
|
|
908
|
+
# Add before_script if present, but modify Bundler commands to use secure path
|
|
909
|
+
if pipeline.before_script && !pipeline.before_script.empty?
|
|
910
|
+
pipeline.before_script.each do |line|
|
|
911
|
+
# Normalize line endings and fix command substitutions
|
|
912
|
+
normalized_line = normalize_script_line(line)
|
|
913
|
+
|
|
914
|
+
# Replace insecure Bundler path configuration with secure path
|
|
915
|
+
if normalized_line.include?("bundle config set --local path 'vendor/ruby'")
|
|
916
|
+
script_lines.push("bundle config set --local path '/tmp/bundle'")
|
|
917
|
+
script_lines.push("mkdir -p /tmp/bundle")
|
|
918
|
+
script_lines.push("chmod 755 /tmp/bundle")
|
|
919
|
+
else
|
|
920
|
+
script_lines.push(normalized_line)
|
|
921
|
+
end
|
|
922
|
+
end
|
|
923
|
+
end
|
|
924
|
+
|
|
925
|
+
# Add job script
|
|
926
|
+
if job.script && !job.script.empty?
|
|
927
|
+
job.script.each do |line|
|
|
928
|
+
# Normalize line endings and fix command substitutions
|
|
929
|
+
normalized_line = normalize_script_line(line)
|
|
930
|
+
script_lines.push(normalized_line)
|
|
931
|
+
end
|
|
932
|
+
end
|
|
933
|
+
|
|
934
|
+
# Add after_script if present
|
|
935
|
+
if pipeline.after_script && !pipeline.after_script.empty?
|
|
936
|
+
pipeline.after_script.each do |line|
|
|
937
|
+
# Normalize line endings and fix command substitutions
|
|
938
|
+
normalized_line = normalize_script_line(line)
|
|
939
|
+
script_lines.push(normalized_line)
|
|
940
|
+
end
|
|
941
|
+
end
|
|
942
|
+
|
|
943
|
+
script_lines.join("\n")
|
|
944
|
+
end
|
|
945
|
+
|
|
946
|
+
# Normalize script line to handle Windows line endings and command substitutions
|
|
947
|
+
def normalize_script_line(line)
|
|
948
|
+
# Normalize line endings to Unix format
|
|
949
|
+
normalized = line.gsub(/\r\n/, "\n").gsub(/\r/, "\n").strip
|
|
950
|
+
|
|
951
|
+
# Fix common command substitutions that might have Windows line endings
|
|
952
|
+
# Replace $(nproc) with a safer version that strips line endings
|
|
953
|
+
normalized = normalized.gsub(/\$\(nproc\)/, '$(nproc | tr -d "\\r\\n")')
|
|
954
|
+
|
|
955
|
+
# Fix other common command substitutions
|
|
956
|
+
normalized = normalized.gsub(/\$\(([^)]+)\)/) do |match|
|
|
957
|
+
cmd = $1
|
|
958
|
+
# For commands that might return line endings, wrap them
|
|
959
|
+
if cmd.match?(/^(nproc|hostname|pwd|whoami)$/)
|
|
960
|
+
"$(#{cmd} | tr -d \"\\r\\n\")"
|
|
961
|
+
else
|
|
962
|
+
match
|
|
963
|
+
end
|
|
964
|
+
end
|
|
965
|
+
|
|
966
|
+
normalized
|
|
967
|
+
end
|
|
968
|
+
|
|
969
|
+
# Determine pipeline status from job results
|
|
970
|
+
def determine_pipeline_status(job_results)
|
|
971
|
+
return grpc_available? ? Gitlab::Pipeline::PipelineStatus::PIPELINE_STATUS_SUCCESS : :success if job_results.empty?
|
|
972
|
+
|
|
973
|
+
failed_jobs = job_results.select do |job|
|
|
974
|
+
if grpc_available?
|
|
975
|
+
job.status == Gitlab::Pipeline::PipelineStatus::PIPELINE_STATUS_FAILED
|
|
976
|
+
else
|
|
977
|
+
job.status == :failed
|
|
978
|
+
end
|
|
979
|
+
end
|
|
980
|
+
|
|
981
|
+
if failed_jobs.any?
|
|
982
|
+
grpc_available? ? Gitlab::Pipeline::PipelineStatus::PIPELINE_STATUS_FAILED : :failed
|
|
983
|
+
else
|
|
984
|
+
grpc_available? ? Gitlab::Pipeline::PipelineStatus::PIPELINE_STATUS_SUCCESS : :success
|
|
985
|
+
end
|
|
986
|
+
end
|
|
987
|
+
|
|
988
|
+
# Determine stage status from job results
|
|
989
|
+
def determine_stage_status(job_results)
|
|
990
|
+
return grpc_available? ? Gitlab::Pipeline::PipelineStatus::PIPELINE_STATUS_SUCCESS : :success if job_results.empty?
|
|
991
|
+
|
|
992
|
+
failed_jobs = job_results.select do |job|
|
|
993
|
+
if grpc_available?
|
|
994
|
+
job.status == Gitlab::Pipeline::PipelineStatus::PIPELINE_STATUS_FAILED
|
|
995
|
+
else
|
|
996
|
+
job.status == :failed
|
|
997
|
+
end
|
|
998
|
+
end
|
|
999
|
+
|
|
1000
|
+
if failed_jobs.any?
|
|
1001
|
+
grpc_available? ? Gitlab::Pipeline::PipelineStatus::PIPELINE_STATUS_FAILED : :failed
|
|
1002
|
+
else
|
|
1003
|
+
grpc_available? ? Gitlab::Pipeline::PipelineStatus::PIPELINE_STATUS_SUCCESS : :success
|
|
1004
|
+
end
|
|
1005
|
+
end
|
|
1006
|
+
|
|
1007
|
+
def create_empty_pipeline
|
|
1008
|
+
if grpc_available?
|
|
1009
|
+
Gitlab::Pipeline::Pipeline.new
|
|
1010
|
+
else
|
|
1011
|
+
create_empty_pipeline_fallback
|
|
1012
|
+
end
|
|
1013
|
+
end
|
|
1014
|
+
|
|
1015
|
+
def create_empty_pipeline_fallback
|
|
1016
|
+
pipeline = Object.new
|
|
1017
|
+
|
|
1018
|
+
def pipeline.image
|
|
1019
|
+
@image ||= ""
|
|
1020
|
+
end
|
|
1021
|
+
|
|
1022
|
+
def pipeline.image=(value)
|
|
1023
|
+
@image = value
|
|
1024
|
+
end
|
|
1025
|
+
|
|
1026
|
+
def pipeline.jobs
|
|
1027
|
+
@jobs ||= {}
|
|
1028
|
+
end
|
|
1029
|
+
|
|
1030
|
+
def pipeline.stages
|
|
1031
|
+
@stages ||= []
|
|
1032
|
+
end
|
|
1033
|
+
|
|
1034
|
+
def pipeline.variables
|
|
1035
|
+
@variables ||= {}
|
|
1036
|
+
end
|
|
1037
|
+
|
|
1038
|
+
def pipeline.cache
|
|
1039
|
+
@cache ||= nil
|
|
1040
|
+
end
|
|
1041
|
+
|
|
1042
|
+
def pipeline.cache=(value)
|
|
1043
|
+
@cache = value
|
|
1044
|
+
end
|
|
1045
|
+
|
|
1046
|
+
def pipeline.before_script
|
|
1047
|
+
@before_script ||= []
|
|
1048
|
+
end
|
|
1049
|
+
|
|
1050
|
+
def pipeline.after_script
|
|
1051
|
+
@after_script ||= []
|
|
1052
|
+
end
|
|
1053
|
+
|
|
1054
|
+
def pipeline.timeout
|
|
1055
|
+
@timeout ||= 0
|
|
1056
|
+
end
|
|
1057
|
+
|
|
1058
|
+
def pipeline.timeout=(value)
|
|
1059
|
+
@timeout = value
|
|
1060
|
+
end
|
|
1061
|
+
|
|
1062
|
+
pipeline
|
|
1063
|
+
end
|
|
1064
|
+
|
|
1065
|
+
# Convert YAML data to protobuf Pipeline
|
|
1066
|
+
def convert_yaml_to_pipeline(yaml_data)
|
|
1067
|
+
if grpc_available?
|
|
1068
|
+
convert_yaml_to_pipeline_grpc(yaml_data)
|
|
1069
|
+
else
|
|
1070
|
+
convert_yaml_to_pipeline_fallback(yaml_data)
|
|
1071
|
+
end
|
|
1072
|
+
end
|
|
1073
|
+
|
|
1074
|
+
def convert_yaml_to_pipeline_grpc(yaml_data)
|
|
1075
|
+
pipeline = Gitlab::Pipeline::Pipeline.new
|
|
1076
|
+
|
|
1077
|
+
pipeline.image = yaml_data["image"] || ""
|
|
1078
|
+
pipeline.timeout = yaml_data["timeout"] || 0
|
|
1079
|
+
|
|
1080
|
+
# Convert variables
|
|
1081
|
+
if yaml_data["variables"]
|
|
1082
|
+
yaml_data["variables"].each do |key, value|
|
|
1083
|
+
pipeline.variables[key] = value.to_s
|
|
1084
|
+
end
|
|
1085
|
+
end
|
|
1086
|
+
|
|
1087
|
+
# Convert cache
|
|
1088
|
+
if yaml_data["cache"]
|
|
1089
|
+
cache = Gitlab::Pipeline::Cache.new
|
|
1090
|
+
cache.key = yaml_data["cache"]["key"] || ""
|
|
1091
|
+
cache.policy = yaml_data["cache"]["policy"] || ""
|
|
1092
|
+
cache.expire_in = yaml_data["cache"]["expire_in"] || 0
|
|
1093
|
+
if yaml_data["cache"]["paths"]
|
|
1094
|
+
cache.paths.concat(yaml_data["cache"]["paths"])
|
|
1095
|
+
end
|
|
1096
|
+
pipeline.cache = cache
|
|
1097
|
+
end
|
|
1098
|
+
|
|
1099
|
+
# Convert before_script and after_script
|
|
1100
|
+
pipeline.before_script.concat(yaml_data["before_script"] || [])
|
|
1101
|
+
pipeline.after_script.concat(yaml_data["after_script"] || [])
|
|
1102
|
+
|
|
1103
|
+
# Convert stages
|
|
1104
|
+
if yaml_data["stages"]
|
|
1105
|
+
yaml_data["stages"].each do |stage_name|
|
|
1106
|
+
stage = Gitlab::Pipeline::Stage.new
|
|
1107
|
+
stage.name = stage_name
|
|
1108
|
+
pipeline.stages << stage
|
|
1109
|
+
end
|
|
1110
|
+
end
|
|
1111
|
+
|
|
1112
|
+
# Convert jobs
|
|
1113
|
+
yaml_data.each do |key, value|
|
|
1114
|
+
next if %w[image variables cache before_script after_script stages include extends services tags timeout].include?(key)
|
|
1115
|
+
|
|
1116
|
+
if value.is_a?(Hash)
|
|
1117
|
+
job = convert_yaml_to_job_grpc(value)
|
|
1118
|
+
pipeline.jobs[key] = job
|
|
1119
|
+
end
|
|
1120
|
+
end
|
|
1121
|
+
|
|
1122
|
+
pipeline
|
|
1123
|
+
end
|
|
1124
|
+
|
|
1125
|
+
def convert_yaml_to_pipeline_fallback(yaml_data)
|
|
1126
|
+
pipeline = create_empty_pipeline_fallback
|
|
1127
|
+
|
|
1128
|
+
# Set basic properties
|
|
1129
|
+
pipeline.image = yaml_data["image"] || ""
|
|
1130
|
+
pipeline.timeout = yaml_data["timeout"] || 0
|
|
1131
|
+
|
|
1132
|
+
# Set variables
|
|
1133
|
+
if yaml_data["variables"]
|
|
1134
|
+
pipeline.variables.merge!(yaml_data["variables"])
|
|
1135
|
+
end
|
|
1136
|
+
|
|
1137
|
+
# Set scripts
|
|
1138
|
+
pipeline.before_script.concat(yaml_data["before_script"] || [])
|
|
1139
|
+
pipeline.after_script.concat(yaml_data["after_script"] || [])
|
|
1140
|
+
|
|
1141
|
+
# Set stages
|
|
1142
|
+
if yaml_data["stages"]
|
|
1143
|
+
yaml_data["stages"].each do |stage_name|
|
|
1144
|
+
stage = Object.new
|
|
1145
|
+
def stage.name
|
|
1146
|
+
@name
|
|
1147
|
+
end
|
|
1148
|
+
def stage.name=(value)
|
|
1149
|
+
@name = value
|
|
1150
|
+
end
|
|
1151
|
+
stage.name = stage_name
|
|
1152
|
+
pipeline.stages << stage
|
|
1153
|
+
end
|
|
1154
|
+
end
|
|
1155
|
+
|
|
1156
|
+
# Set jobs
|
|
1157
|
+
yaml_data.each do |key, value|
|
|
1158
|
+
next if %w[image variables cache before_script after_script stages include extends services tags timeout].include?(key)
|
|
1159
|
+
|
|
1160
|
+
if value.is_a?(Hash)
|
|
1161
|
+
job = convert_yaml_to_job_fallback(value)
|
|
1162
|
+
pipeline.jobs[key] = job
|
|
1163
|
+
end
|
|
1164
|
+
end
|
|
1165
|
+
|
|
1166
|
+
pipeline
|
|
1167
|
+
end
|
|
1168
|
+
|
|
1169
|
+
# Convert YAML job data to protobuf Job
|
|
1170
|
+
def convert_yaml_to_job(yaml_data)
|
|
1171
|
+
if grpc_available?
|
|
1172
|
+
convert_yaml_to_job_grpc(yaml_data)
|
|
1173
|
+
else
|
|
1174
|
+
convert_yaml_to_job_fallback(yaml_data)
|
|
1175
|
+
end
|
|
1176
|
+
end
|
|
1177
|
+
|
|
1178
|
+
def convert_yaml_to_job_grpc(yaml_data)
|
|
1179
|
+
job = Gitlab::Pipeline::Job.new
|
|
1180
|
+
|
|
1181
|
+
job.stage = yaml_data["stage"] || ""
|
|
1182
|
+
job.image = yaml_data["image"] || ""
|
|
1183
|
+
job.artifacts_when = yaml_data.dig("artifacts", "when") || ""
|
|
1184
|
+
job.coverage = yaml_data["coverage"] || ""
|
|
1185
|
+
job.allow_failure = yaml_data["allow_failure"] || false
|
|
1186
|
+
job.timeout = yaml_data["timeout"] || 0
|
|
1187
|
+
|
|
1188
|
+
# Convert script arrays
|
|
1189
|
+
job.script.concat(yaml_data["script"] || [])
|
|
1190
|
+
job.before_script.concat(yaml_data["before_script"] || [])
|
|
1191
|
+
job.after_script.concat(yaml_data["after_script"] || [])
|
|
1192
|
+
job.dependencies.concat(yaml_data["dependencies"] || [])
|
|
1193
|
+
job.services.concat(yaml_data["services"] || [])
|
|
1194
|
+
job.tags.concat(yaml_data["tags"] || [])
|
|
1195
|
+
job.needs.concat(yaml_data["needs"] || [])
|
|
1196
|
+
job.when.concat(yaml_data["when"] || [])
|
|
1197
|
+
job.only.concat(yaml_data["only"] || [])
|
|
1198
|
+
job.except.concat(yaml_data["except"] || [])
|
|
1199
|
+
job.rules.concat(yaml_data["rules"] || [])
|
|
1200
|
+
|
|
1201
|
+
# Convert artifacts
|
|
1202
|
+
if yaml_data["artifacts"]
|
|
1203
|
+
artifacts = yaml_data["artifacts"]
|
|
1204
|
+
job.artifacts_paths.concat(artifacts["paths"] || [])
|
|
1205
|
+
job.artifacts_expire_in = artifacts["expire_in"] || 0
|
|
1206
|
+
job.artifacts_when = artifacts["when"] || ""
|
|
1207
|
+
job.artifacts_name.concat(artifacts["name"] || [])
|
|
1208
|
+
job.artifacts_untracked.concat(artifacts["untracked"] || [])
|
|
1209
|
+
job.artifacts_reports.concat(artifacts["reports"] || [])
|
|
1210
|
+
job.artifacts_expose_as.concat(artifacts["expose_as"] || [])
|
|
1211
|
+
end
|
|
1212
|
+
|
|
1213
|
+
# Convert variables
|
|
1214
|
+
if yaml_data["variables"]
|
|
1215
|
+
yaml_data["variables"].each do |key, value|
|
|
1216
|
+
job.variables[key] = value.to_s
|
|
1217
|
+
end
|
|
1218
|
+
end
|
|
1219
|
+
|
|
1220
|
+
job
|
|
1221
|
+
end
|
|
1222
|
+
|
|
1223
|
+
def convert_yaml_to_job_fallback(yaml_data)
|
|
1224
|
+
job = Object.new
|
|
1225
|
+
|
|
1226
|
+
def job.stage
|
|
1227
|
+
@stage ||= ""
|
|
1228
|
+
end
|
|
1229
|
+
|
|
1230
|
+
def job.stage=(value)
|
|
1231
|
+
@stage = value
|
|
1232
|
+
end
|
|
1233
|
+
|
|
1234
|
+
def job.script
|
|
1235
|
+
@script ||= []
|
|
1236
|
+
end
|
|
1237
|
+
|
|
1238
|
+
def job.before_script
|
|
1239
|
+
@before_script ||= []
|
|
1240
|
+
end
|
|
1241
|
+
|
|
1242
|
+
def job.after_script
|
|
1243
|
+
@after_script ||= []
|
|
1244
|
+
end
|
|
1245
|
+
|
|
1246
|
+
def job.image
|
|
1247
|
+
@image ||= ""
|
|
1248
|
+
end
|
|
1249
|
+
|
|
1250
|
+
def job.image=(value)
|
|
1251
|
+
@image = value
|
|
1252
|
+
end
|
|
1253
|
+
|
|
1254
|
+
def job.variables
|
|
1255
|
+
@variables ||= {}
|
|
1256
|
+
end
|
|
1257
|
+
|
|
1258
|
+
def job.services
|
|
1259
|
+
@services ||= []
|
|
1260
|
+
end
|
|
1261
|
+
|
|
1262
|
+
def job.tags
|
|
1263
|
+
@tags ||= []
|
|
1264
|
+
end
|
|
1265
|
+
|
|
1266
|
+
def job.artifacts_paths
|
|
1267
|
+
@artifacts_paths ||= []
|
|
1268
|
+
end
|
|
1269
|
+
|
|
1270
|
+
def job.artifacts_when
|
|
1271
|
+
@artifacts_when ||= ""
|
|
1272
|
+
end
|
|
1273
|
+
|
|
1274
|
+
def job.artifacts_when=(value)
|
|
1275
|
+
@artifacts_when = value
|
|
1276
|
+
end
|
|
1277
|
+
|
|
1278
|
+
def job.coverage
|
|
1279
|
+
@coverage ||= ""
|
|
1280
|
+
end
|
|
1281
|
+
|
|
1282
|
+
def job.coverage=(value)
|
|
1283
|
+
@coverage = value
|
|
1284
|
+
end
|
|
1285
|
+
|
|
1286
|
+
def job.allow_failure
|
|
1287
|
+
@allow_failure ||= false
|
|
1288
|
+
end
|
|
1289
|
+
|
|
1290
|
+
def job.allow_failure=(value)
|
|
1291
|
+
@allow_failure = value
|
|
1292
|
+
end
|
|
1293
|
+
|
|
1294
|
+
def job.timeout
|
|
1295
|
+
@timeout ||= 0
|
|
1296
|
+
end
|
|
1297
|
+
|
|
1298
|
+
def job.timeout=(value)
|
|
1299
|
+
@timeout = value
|
|
1300
|
+
end
|
|
1301
|
+
|
|
1302
|
+
def job.dependencies
|
|
1303
|
+
@dependencies ||= []
|
|
1304
|
+
end
|
|
1305
|
+
|
|
1306
|
+
def job.needs
|
|
1307
|
+
@needs ||= []
|
|
1308
|
+
end
|
|
1309
|
+
|
|
1310
|
+
def job.when
|
|
1311
|
+
@when ||= []
|
|
1312
|
+
end
|
|
1313
|
+
|
|
1314
|
+
def job.only
|
|
1315
|
+
@only ||= []
|
|
1316
|
+
end
|
|
1317
|
+
|
|
1318
|
+
def job.except
|
|
1319
|
+
@except ||= []
|
|
1320
|
+
end
|
|
1321
|
+
|
|
1322
|
+
def job.rules
|
|
1323
|
+
@rules ||= []
|
|
1324
|
+
end
|
|
1325
|
+
|
|
1326
|
+
# Set job properties
|
|
1327
|
+
job.stage = yaml_data["stage"] || ""
|
|
1328
|
+
job.image = yaml_data["image"] || ""
|
|
1329
|
+
job.coverage = yaml_data["coverage"] || ""
|
|
1330
|
+
job.allow_failure = yaml_data["allow_failure"] || false
|
|
1331
|
+
job.timeout = yaml_data["timeout"] || 0
|
|
1332
|
+
|
|
1333
|
+
# Set script arrays
|
|
1334
|
+
job.script.concat(yaml_data["script"] || [])
|
|
1335
|
+
job.before_script.concat(yaml_data["before_script"] || [])
|
|
1336
|
+
job.after_script.concat(yaml_data["after_script"] || [])
|
|
1337
|
+
job.dependencies.concat(yaml_data["dependencies"] || [])
|
|
1338
|
+
job.services.concat(yaml_data["services"] || [])
|
|
1339
|
+
job.tags.concat(yaml_data["tags"] || [])
|
|
1340
|
+
job.needs.concat(yaml_data["needs"] || [])
|
|
1341
|
+
job.when.concat(yaml_data["when"] || [])
|
|
1342
|
+
job.only.concat(yaml_data["only"] || [])
|
|
1343
|
+
job.except.concat(yaml_data["except"] || [])
|
|
1344
|
+
job.rules.concat(yaml_data["rules"] || [])
|
|
1345
|
+
|
|
1346
|
+
# Set artifacts
|
|
1347
|
+
if yaml_data["artifacts"]
|
|
1348
|
+
artifacts = yaml_data["artifacts"]
|
|
1349
|
+
job.artifacts_paths.concat(artifacts["paths"] || [])
|
|
1350
|
+
job.artifacts_when = artifacts["when"] || ""
|
|
1351
|
+
end
|
|
1352
|
+
|
|
1353
|
+
# Set variables
|
|
1354
|
+
if yaml_data["variables"]
|
|
1355
|
+
job.variables.merge!(yaml_data["variables"])
|
|
1356
|
+
end
|
|
1357
|
+
|
|
1358
|
+
job
|
|
1359
|
+
end
|
|
1360
|
+
|
|
1361
|
+
# Convert protobuf Pipeline to YAML
|
|
1362
|
+
def convert_pipeline_to_yaml(pipeline, pretty_format = true, indent_size = 2)
|
|
1363
|
+
yaml_data = {}
|
|
1364
|
+
|
|
1365
|
+
# Add basic fields
|
|
1366
|
+
yaml_data["image"] = pipeline.image unless pipeline.image.empty?
|
|
1367
|
+
yaml_data["timeout"] = pipeline.timeout unless pipeline.timeout == 0
|
|
1368
|
+
|
|
1369
|
+
# Add variables
|
|
1370
|
+
unless pipeline.variables.empty?
|
|
1371
|
+
yaml_data["variables"] = pipeline.variables.to_h
|
|
1372
|
+
end
|
|
1373
|
+
|
|
1374
|
+
# Add cache
|
|
1375
|
+
if pipeline.cache && !pipeline.cache.key.empty?
|
|
1376
|
+
cache_data = {}
|
|
1377
|
+
cache_data["key"] = pipeline.cache.key
|
|
1378
|
+
cache_data["policy"] = pipeline.cache.policy unless pipeline.cache.policy.empty?
|
|
1379
|
+
cache_data["expire_in"] = pipeline.cache.expire_in unless pipeline.cache.expire_in == 0
|
|
1380
|
+
cache_data["paths"] = pipeline.cache.paths.to_a unless pipeline.cache.paths.empty?
|
|
1381
|
+
yaml_data["cache"] = cache_data
|
|
1382
|
+
end
|
|
1383
|
+
|
|
1384
|
+
# Add scripts
|
|
1385
|
+
yaml_data["before_script"] = pipeline.before_script.to_a unless pipeline.before_script.empty?
|
|
1386
|
+
yaml_data["after_script"] = pipeline.after_script.to_a unless pipeline.after_script.empty?
|
|
1387
|
+
|
|
1388
|
+
# Add stages
|
|
1389
|
+
unless pipeline.stages.empty?
|
|
1390
|
+
yaml_data["stages"] = pipeline.stages.map(&:name)
|
|
1391
|
+
end
|
|
1392
|
+
|
|
1393
|
+
# Add jobs
|
|
1394
|
+
pipeline.jobs.each do |job_name, job|
|
|
1395
|
+
yaml_data[job_name] = convert_job_to_yaml(job)
|
|
1396
|
+
end
|
|
1397
|
+
|
|
1398
|
+
# Add other fields
|
|
1399
|
+
yaml_data["include"] = pipeline.include.to_a unless pipeline.include.empty?
|
|
1400
|
+
yaml_data["extends"] = pipeline.extends.to_a unless pipeline.extends.empty?
|
|
1401
|
+
yaml_data["services"] = pipeline.services.to_a unless pipeline.services.empty?
|
|
1402
|
+
yaml_data["tags"] = pipeline.tags.to_a unless pipeline.tags.empty?
|
|
1403
|
+
|
|
1404
|
+
if pretty_format
|
|
1405
|
+
YAML.dump(yaml_data)
|
|
1406
|
+
else
|
|
1407
|
+
yaml_data.to_yaml
|
|
1408
|
+
end
|
|
1409
|
+
end
|
|
1410
|
+
|
|
1411
|
+
# Convert protobuf Job to YAML
|
|
1412
|
+
def convert_job_to_yaml(job)
|
|
1413
|
+
job_data = {}
|
|
1414
|
+
|
|
1415
|
+
# Add basic fields
|
|
1416
|
+
job_data["stage"] = job.stage unless job.stage.empty?
|
|
1417
|
+
job_data["image"] = job.image unless job.image.empty?
|
|
1418
|
+
job_data["coverage"] = job.coverage unless job.coverage.empty?
|
|
1419
|
+
job_data["allow_failure"] = job.allow_failure if job.allow_failure
|
|
1420
|
+
job_data["timeout"] = job.timeout unless job.timeout == 0
|
|
1421
|
+
|
|
1422
|
+
# Add script arrays
|
|
1423
|
+
job_data["script"] = job.script.to_a unless job.script.empty?
|
|
1424
|
+
job_data["before_script"] = job.before_script.to_a unless job.before_script.empty?
|
|
1425
|
+
job_data["after_script"] = job.after_script.to_a unless job.after_script.empty?
|
|
1426
|
+
job_data["dependencies"] = job.dependencies.to_a unless job.dependencies.empty?
|
|
1427
|
+
job_data["services"] = job.services.to_a unless job.services.empty?
|
|
1428
|
+
job_data["tags"] = job.tags.to_a unless job.tags.empty?
|
|
1429
|
+
job_data["needs"] = job.needs.to_a unless job.needs.empty?
|
|
1430
|
+
job_data["when"] = job.when.to_a unless job.when.empty?
|
|
1431
|
+
job_data["only"] = job.only.to_a unless job.only.empty?
|
|
1432
|
+
job_data["except"] = job.except.to_a unless job.except.empty?
|
|
1433
|
+
job_data["rules"] = job.rules.to_a unless job.rules.empty?
|
|
1434
|
+
|
|
1435
|
+
# Add artifacts
|
|
1436
|
+
if !job.artifacts_paths.empty? || !job.artifacts_when.empty? || job.artifacts_expire_in != 0
|
|
1437
|
+
artifacts_data = {}
|
|
1438
|
+
artifacts_data["paths"] = job.artifacts_paths.to_a unless job.artifacts_paths.empty?
|
|
1439
|
+
artifacts_data["when"] = job.artifacts_when unless job.artifacts_when.empty?
|
|
1440
|
+
artifacts_data["expire_in"] = job.artifacts_expire_in unless job.artifacts_expire_in == 0
|
|
1441
|
+
artifacts_data["name"] = job.artifacts_name.to_a unless job.artifacts_name.empty?
|
|
1442
|
+
artifacts_data["untracked"] = job.artifacts_untracked.to_a unless job.artifacts_untracked.empty?
|
|
1443
|
+
artifacts_data["reports"] = job.artifacts_reports.to_a unless job.artifacts_reports.empty?
|
|
1444
|
+
artifacts_data["expose_as"] = job.artifacts_expose_as.to_a unless job.artifacts_expose_as.empty?
|
|
1445
|
+
job_data["artifacts"] = artifacts_data
|
|
1446
|
+
end
|
|
1447
|
+
|
|
1448
|
+
# Add variables
|
|
1449
|
+
unless job.variables.empty?
|
|
1450
|
+
job_data["variables"] = job.variables.to_h
|
|
1451
|
+
end
|
|
1452
|
+
|
|
1453
|
+
job_data
|
|
1454
|
+
end
|
|
1455
|
+
|
|
1456
|
+
# Validate pipeline data
|
|
1457
|
+
def validate_pipeline_data(yaml_data)
|
|
1458
|
+
errors = []
|
|
1459
|
+
warnings = []
|
|
1460
|
+
|
|
1461
|
+
# Check for required fields
|
|
1462
|
+
unless yaml_data["stages"] || yaml_data.keys.any? { |k| k.is_a?(String) && !%w[image variables cache before_script after_script include extends services tags timeout].include?(k) }
|
|
1463
|
+
warnings << "Pipeline has no stages or jobs defined"
|
|
1464
|
+
end
|
|
1465
|
+
|
|
1466
|
+
errors
|
|
1467
|
+
end
|
|
1468
|
+
|
|
1469
|
+
# Validate individual job
|
|
1470
|
+
def validate_job(job_name, job, pipeline)
|
|
1471
|
+
errors = []
|
|
1472
|
+
warnings = []
|
|
1473
|
+
suggestions = []
|
|
1474
|
+
|
|
1475
|
+
# Check if job stage exists
|
|
1476
|
+
if !job.stage.empty? && !pipeline.stages.any? { |s| s.name == job.stage }
|
|
1477
|
+
errors << "Job '#{job_name}' references non-existent stage '#{job.stage}'"
|
|
1478
|
+
end
|
|
1479
|
+
|
|
1480
|
+
# Check for required fields
|
|
1481
|
+
if job.script.empty?
|
|
1482
|
+
warnings << "Job '#{job_name}' has no script defined"
|
|
1483
|
+
end
|
|
1484
|
+
|
|
1485
|
+
# Check dependencies
|
|
1486
|
+
job.dependencies.each do |dep|
|
|
1487
|
+
unless pipeline.jobs.key?(dep)
|
|
1488
|
+
errors << "Job '#{job_name}' depends on non-existent job '#{dep}'"
|
|
1489
|
+
end
|
|
1490
|
+
end
|
|
1491
|
+
|
|
1492
|
+
# Check needs
|
|
1493
|
+
job.needs.each do |need|
|
|
1494
|
+
unless pipeline.jobs.key?(need)
|
|
1495
|
+
errors << "Job '#{job_name}' needs non-existent job '#{need}'"
|
|
1496
|
+
end
|
|
1497
|
+
end
|
|
1498
|
+
|
|
1499
|
+
[errors, warnings, suggestions]
|
|
1500
|
+
end
|
|
1501
|
+
|
|
1502
|
+
# Validate individual stage
|
|
1503
|
+
def validate_stage(stage, pipeline)
|
|
1504
|
+
errors = []
|
|
1505
|
+
warnings = []
|
|
1506
|
+
|
|
1507
|
+
# Check if stage has jobs
|
|
1508
|
+
stage_jobs = pipeline.jobs.values.select { |job| job.stage == stage.name }
|
|
1509
|
+
if stage_jobs.empty?
|
|
1510
|
+
warnings << "Stage '#{stage.name}' has no jobs"
|
|
1511
|
+
end
|
|
1512
|
+
|
|
1513
|
+
[errors, warnings]
|
|
1514
|
+
end
|
|
1515
|
+
|
|
1516
|
+
# Compare two jobs for equality
|
|
1517
|
+
def jobs_equal?(job1, job2)
|
|
1518
|
+
job1.stage == job2.stage &&
|
|
1519
|
+
job1.script.to_a == job2.script.to_a &&
|
|
1520
|
+
job1.before_script.to_a == job2.before_script.to_a &&
|
|
1521
|
+
job1.after_script.to_a == job2.after_script.to_a &&
|
|
1522
|
+
job1.dependencies.to_a == job2.dependencies.to_a &&
|
|
1523
|
+
job1.artifacts_paths.to_a == job2.artifacts_paths.to_a &&
|
|
1524
|
+
job1.artifacts_when == job2.artifacts_when &&
|
|
1525
|
+
job1.coverage == job2.coverage &&
|
|
1526
|
+
job1.variables.to_h == job2.variables.to_h &&
|
|
1527
|
+
job1.image == job2.image &&
|
|
1528
|
+
job1.services.to_a == job2.services.to_a &&
|
|
1529
|
+
job1.tags.to_a == job2.tags.to_a &&
|
|
1530
|
+
job1.allow_failure == job2.allow_failure &&
|
|
1531
|
+
job1.timeout == job2.timeout &&
|
|
1532
|
+
job1.needs.to_a == job2.needs.to_a
|
|
1533
|
+
end
|
|
1534
|
+
end
|
|
1535
|
+
end
|
|
1536
|
+
end
|