makit 0.0.144 → 0.0.145
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- checksums.yaml +4 -4
- data/README.md +41 -41
- data/exe/makit +5 -5
- data/lib/makit/apache.rb +28 -28
- data/lib/makit/auto.rb +48 -48
- data/lib/makit/cli/base.rb +17 -0
- data/lib/makit/cli/build_commands.rb +500 -500
- data/lib/makit/cli/generators/base_generator.rb +74 -74
- data/lib/makit/cli/generators/dotnet_generator.rb +50 -50
- data/lib/makit/cli/generators/generator_factory.rb +49 -49
- data/lib/makit/cli/generators/node_generator.rb +50 -50
- data/lib/makit/cli/generators/ruby_generator.rb +77 -77
- data/lib/makit/cli/generators/rust_generator.rb +50 -50
- data/lib/makit/cli/generators/templates/dotnet_templates.rb +167 -167
- data/lib/makit/cli/generators/templates/node_templates.rb +161 -161
- data/lib/makit/cli/generators/templates/ruby/gemfile.rb +26 -26
- data/lib/makit/cli/generators/templates/ruby/gemspec.rb +41 -40
- data/lib/makit/cli/generators/templates/ruby/main_lib.rb +33 -33
- data/lib/makit/cli/generators/templates/ruby/rakefile.rb +35 -35
- data/lib/makit/cli/generators/templates/ruby/readme.rb +63 -63
- data/lib/makit/cli/generators/templates/ruby/test.rb +39 -39
- data/lib/makit/cli/generators/templates/ruby/test_helper.rb +29 -29
- data/lib/makit/cli/generators/templates/ruby/version.rb +29 -29
- data/lib/makit/cli/generators/templates/rust_templates.rb +128 -128
- data/lib/makit/cli/main.rb +78 -69
- data/lib/makit/cli/pipeline_commands.rb +311 -0
- data/lib/makit/cli/project_commands.rb +868 -868
- data/lib/makit/cli/repository_commands.rb +661 -661
- data/lib/makit/cli/strategy_commands.rb +207 -212
- data/lib/makit/cli/utility_commands.rb +521 -521
- data/lib/makit/commands/factory.rb +359 -359
- data/lib/makit/commands/middleware/base.rb +73 -73
- data/lib/makit/commands/middleware/cache.rb +248 -248
- data/lib/makit/commands/middleware/command_logger.rb +312 -312
- data/lib/makit/commands/middleware/validator.rb +269 -269
- data/lib/makit/commands/request.rb +316 -316
- data/lib/makit/commands/result.rb +323 -323
- data/lib/makit/commands/runner.rb +386 -386
- data/lib/makit/commands/strategies/base.rb +171 -171
- data/lib/makit/commands/strategies/child_process.rb +162 -162
- data/lib/makit/commands/strategies/factory.rb +136 -136
- data/lib/makit/commands/strategies/synchronous.rb +139 -139
- data/lib/makit/commands.rb +50 -50
- data/lib/makit/configuration/dotnet_project.rb +48 -48
- data/lib/makit/configuration/gitlab_helper.rb +61 -58
- data/lib/makit/configuration/project.rb +446 -168
- data/lib/makit/configuration/rakefile_helper.rb +43 -43
- data/lib/makit/configuration/step.rb +34 -34
- data/lib/makit/configuration/timeout.rb +74 -74
- data/lib/makit/configuration.rb +21 -16
- data/lib/makit/content/default_gitignore.rb +7 -7
- data/lib/makit/content/default_gitignore.txt +225 -225
- data/lib/makit/content/default_rakefile.rb +13 -13
- data/lib/makit/content/gem_rakefile.rb +16 -16
- data/lib/makit/context.rb +1 -1
- data/lib/makit/data.rb +49 -49
- data/lib/makit/directories.rb +140 -140
- data/lib/makit/directory.rb +262 -262
- data/lib/makit/docs/files.rb +89 -89
- data/lib/makit/docs/rake.rb +102 -102
- data/lib/makit/dotnet/cli.rb +69 -69
- data/lib/makit/dotnet/project.rb +217 -217
- data/lib/makit/dotnet/solution.rb +38 -38
- data/lib/makit/dotnet/solution_classlib.rb +239 -239
- data/lib/makit/dotnet/solution_console.rb +264 -264
- data/lib/makit/dotnet/solution_maui.rb +354 -354
- data/lib/makit/dotnet/solution_wasm.rb +275 -275
- data/lib/makit/dotnet/solution_wpf.rb +304 -304
- data/lib/makit/dotnet.rb +102 -102
- data/lib/makit/email.rb +90 -90
- data/lib/makit/environment.rb +142 -142
- data/lib/makit/examples/runner.rb +370 -370
- data/lib/makit/exceptions.rb +45 -45
- data/lib/makit/fileinfo.rb +32 -24
- data/lib/makit/files.rb +43 -43
- data/lib/makit/gems.rb +40 -40
- data/lib/makit/git/cli.rb +54 -54
- data/lib/makit/git/repository.rb +266 -90
- data/lib/makit/git.rb +104 -98
- data/lib/makit/gitlab/pipeline.rb +857 -0
- data/lib/makit/gitlab/pipeline_service_impl.rb +1536 -0
- data/lib/makit/gitlab_runner.rb +59 -59
- data/lib/makit/humanize.rb +218 -137
- data/lib/makit/indexer.rb +47 -47
- data/lib/makit/io/filesystem.rb +111 -0
- data/lib/makit/io/filesystem_service_impl.rb +337 -0
- data/lib/makit/logging/configuration.rb +308 -308
- data/lib/makit/logging/format_registry.rb +84 -84
- data/lib/makit/logging/formatters/base.rb +39 -39
- data/lib/makit/logging/formatters/console_formatter.rb +140 -140
- data/lib/makit/logging/formatters/json_formatter.rb +65 -65
- data/lib/makit/logging/formatters/plain_text_formatter.rb +71 -71
- data/lib/makit/logging/formatters/text_formatter.rb +64 -64
- data/lib/makit/logging/log_request.rb +119 -119
- data/lib/makit/logging/logger.rb +199 -199
- data/lib/makit/logging/sinks/base.rb +91 -91
- data/lib/makit/logging/sinks/console.rb +72 -72
- data/lib/makit/logging/sinks/file_sink.rb +92 -92
- data/lib/makit/logging/sinks/structured.rb +123 -123
- data/lib/makit/logging/sinks/unified_file_sink.rb +296 -296
- data/lib/makit/logging.rb +565 -565
- data/lib/makit/markdown.rb +75 -75
- data/lib/makit/mp/basic_object_mp.rb +17 -17
- data/lib/makit/mp/command_mp.rb +13 -13
- data/lib/makit/mp/command_request.mp.rb +17 -17
- data/lib/makit/mp/project_mp.rb +199 -199
- data/lib/makit/mp/string_mp.rb +205 -199
- data/lib/makit/nuget.rb +74 -74
- data/lib/makit/podman/podman.rb +458 -0
- data/lib/makit/podman/podman_service_impl.rb +1081 -0
- data/lib/makit/port.rb +32 -32
- data/lib/makit/process.rb +377 -377
- data/lib/makit/protoc.rb +112 -107
- data/lib/makit/rake/cli.rb +196 -196
- data/lib/makit/rake/trace_controller.rb +174 -174
- data/lib/makit/rake.rb +81 -81
- data/lib/makit/ruby/cli.rb +185 -185
- data/lib/makit/ruby.rb +25 -25
- data/lib/makit/secrets.rb +51 -51
- data/lib/makit/serializer.rb +130 -130
- data/lib/makit/services/builder.rb +186 -186
- data/lib/makit/services/error_handler.rb +226 -226
- data/lib/makit/services/repository_manager.rb +367 -231
- data/lib/makit/services/validator.rb +112 -112
- data/lib/makit/setup/classlib.rb +101 -101
- data/lib/makit/setup/gem.rb +268 -268
- data/lib/makit/setup/pages.rb +11 -11
- data/lib/makit/setup/razorclasslib.rb +101 -101
- data/lib/makit/setup/runner.rb +54 -54
- data/lib/makit/setup.rb +5 -5
- data/lib/makit/show.rb +110 -110
- data/lib/makit/storage.rb +126 -126
- data/lib/makit/symbols.rb +175 -170
- data/lib/makit/task_info.rb +130 -130
- data/lib/makit/tasks/at_exit.rb +15 -15
- data/lib/makit/tasks/build.rb +22 -22
- data/lib/makit/tasks/clean.rb +13 -13
- data/lib/makit/tasks/configure.rb +10 -10
- data/lib/makit/tasks/format.rb +10 -10
- data/lib/makit/tasks/hook_manager.rb +443 -443
- data/lib/makit/tasks/init.rb +49 -49
- data/lib/makit/tasks/integrate.rb +29 -29
- data/lib/makit/tasks/pull_incoming.rb +13 -13
- data/lib/makit/tasks/setup.rb +16 -16
- data/lib/makit/tasks/sync.rb +17 -17
- data/lib/makit/tasks/tag.rb +16 -16
- data/lib/makit/tasks/task_monkey_patch.rb +81 -81
- data/lib/makit/tasks/test.rb +22 -22
- data/lib/makit/tasks/update.rb +18 -18
- data/lib/makit/tasks.rb +20 -20
- data/lib/makit/test_cache.rb +239 -239
- data/lib/makit/tree.rb +37 -37
- data/lib/makit/v1/configuration/project_service_impl.rb +371 -0
- data/lib/makit/v1/git/git_repository_service_impl.rb +295 -0
- data/lib/makit/v1/makit.v1_pb.rb +35 -35
- data/lib/makit/v1/makit.v1_services_pb.rb +27 -27
- data/lib/makit/v1/services/repository_manager_service_impl.rb +572 -0
- data/lib/makit/version.rb +100 -100
- data/lib/makit/version_util.rb +21 -21
- data/lib/makit/wix.rb +95 -95
- data/lib/makit/yaml.rb +29 -29
- data/lib/makit/zip.rb +17 -17
- data/lib/makit copy.rb +44 -44
- data/lib/makit.rb +111 -43
- metadata +61 -36
|
@@ -0,0 +1,857 @@
|
|
|
1
|
+
# frozen_string_literal: true
|
|
2
|
+
|
|
3
|
+
require "yaml"
|
|
4
|
+
require "json"
|
|
5
|
+
|
|
6
|
+
module Makit
|
|
7
|
+
module Gitlab
|
|
8
|
+
# Ruby wrapper for GitLab Pipeline operations
|
|
9
|
+
# Provides a convenient interface for working with GitLab CI pipelines
|
|
10
|
+
class Pipeline
|
|
11
|
+
attr_accessor :pipeline_data
|
|
12
|
+
|
|
13
|
+
def initialize(yaml_content = nil)
|
|
14
|
+
if yaml_content
|
|
15
|
+
@pipeline_data = parse_yaml(yaml_content)
|
|
16
|
+
else
|
|
17
|
+
@pipeline_data = create_empty_pipeline
|
|
18
|
+
end
|
|
19
|
+
end
|
|
20
|
+
|
|
21
|
+
# Parse GitLab CI YAML content
|
|
22
|
+
def self.parse_yaml(yaml_content)
|
|
23
|
+
new(yaml_content)
|
|
24
|
+
end
|
|
25
|
+
|
|
26
|
+
# Parse GitLab CI YAML from file
|
|
27
|
+
def self.parse_file(file_path)
|
|
28
|
+
content = File.read(file_path)
|
|
29
|
+
new(content)
|
|
30
|
+
end
|
|
31
|
+
|
|
32
|
+
# Convert pipeline to YAML
|
|
33
|
+
def to_yaml(pretty_format = true)
|
|
34
|
+
if defined?(Gitlab::Pipeline::PipelineService) && @grpc_available
|
|
35
|
+
# Use gRPC service if available
|
|
36
|
+
service = PipelineServiceImpl.new
|
|
37
|
+
request = Gitlab::Pipeline::ToYamlRequest.new(
|
|
38
|
+
pipeline: @pipeline_data,
|
|
39
|
+
pretty_format: pretty_format,
|
|
40
|
+
indent_size: 2
|
|
41
|
+
)
|
|
42
|
+
response = service.to_yaml(request, nil)
|
|
43
|
+
response.yaml_content
|
|
44
|
+
else
|
|
45
|
+
# Fallback implementation
|
|
46
|
+
convert_pipeline_to_yaml(@pipeline_data, pretty_format)
|
|
47
|
+
end
|
|
48
|
+
end
|
|
49
|
+
|
|
50
|
+
# Validate pipeline
|
|
51
|
+
def validate
|
|
52
|
+
if defined?(Gitlab::Pipeline::PipelineService) && @grpc_available
|
|
53
|
+
# Use gRPC service if available
|
|
54
|
+
service = PipelineServiceImpl.new
|
|
55
|
+
request = Gitlab::Pipeline::ValidatePipelineRequest.new(
|
|
56
|
+
pipeline: @pipeline_data,
|
|
57
|
+
check_gitlab_compatibility: true
|
|
58
|
+
)
|
|
59
|
+
response = service.validate_pipeline(request, nil)
|
|
60
|
+
{
|
|
61
|
+
is_valid: response.is_valid,
|
|
62
|
+
errors: response.errors.to_a,
|
|
63
|
+
warnings: response.warnings.to_a,
|
|
64
|
+
suggestions: response.suggestions.to_a
|
|
65
|
+
}
|
|
66
|
+
else
|
|
67
|
+
# Fallback implementation
|
|
68
|
+
validate_pipeline_fallback
|
|
69
|
+
end
|
|
70
|
+
end
|
|
71
|
+
|
|
72
|
+
# Get pipeline statistics
|
|
73
|
+
def stats
|
|
74
|
+
if defined?(Gitlab::Pipeline::PipelineService) && @grpc_available
|
|
75
|
+
# Use gRPC service if available
|
|
76
|
+
service = PipelineServiceImpl.new
|
|
77
|
+
request = Gitlab::Pipeline::GetPipelineStatsRequest.new(pipeline: @pipeline_data)
|
|
78
|
+
response = service.get_pipeline_stats(request, nil)
|
|
79
|
+
{
|
|
80
|
+
total_jobs: response.total_jobs,
|
|
81
|
+
total_stages: response.total_stages,
|
|
82
|
+
total_artifacts: response.total_artifacts,
|
|
83
|
+
total_services: response.total_services,
|
|
84
|
+
total_variables: response.total_variables,
|
|
85
|
+
stage_names: response.stage_names.to_a,
|
|
86
|
+
job_names: response.job_names.to_a
|
|
87
|
+
}
|
|
88
|
+
else
|
|
89
|
+
# Fallback implementation
|
|
90
|
+
calculate_stats_fallback
|
|
91
|
+
end
|
|
92
|
+
end
|
|
93
|
+
|
|
94
|
+
# Compare with another pipeline
|
|
95
|
+
def compare(other_pipeline)
|
|
96
|
+
if defined?(Gitlab::Pipeline::PipelineService) && @grpc_available
|
|
97
|
+
# Use gRPC service if available
|
|
98
|
+
service = PipelineServiceImpl.new
|
|
99
|
+
request = Gitlab::Pipeline::ComparePipelinesRequest.new(
|
|
100
|
+
pipeline1: @pipeline_data,
|
|
101
|
+
pipeline2: other_pipeline.pipeline_data
|
|
102
|
+
)
|
|
103
|
+
response = service.compare_pipelines(request, nil)
|
|
104
|
+
{
|
|
105
|
+
are_identical: response.are_identical,
|
|
106
|
+
differences: response.differences.to_a,
|
|
107
|
+
added_jobs: response.added_jobs.to_a,
|
|
108
|
+
removed_jobs: response.removed_jobs.to_a,
|
|
109
|
+
modified_jobs: response.modified_jobs.to_a,
|
|
110
|
+
added_stages: response.added_stages.to_a,
|
|
111
|
+
removed_stages: response.removed_stages.to_a
|
|
112
|
+
}
|
|
113
|
+
else
|
|
114
|
+
# Fallback implementation
|
|
115
|
+
compare_pipelines_fallback(other_pipeline)
|
|
116
|
+
end
|
|
117
|
+
end
|
|
118
|
+
|
|
119
|
+
# Merge with another pipeline
|
|
120
|
+
def merge(other_pipeline, preserve_base = true)
|
|
121
|
+
if defined?(Gitlab::Pipeline::PipelineService) && @grpc_available
|
|
122
|
+
# Use gRPC service if available
|
|
123
|
+
service = PipelineServiceImpl.new
|
|
124
|
+
request = Gitlab::Pipeline::MergePipelinesRequest.new(
|
|
125
|
+
base_pipeline: @pipeline_data,
|
|
126
|
+
override_pipeline: other_pipeline.pipeline_data,
|
|
127
|
+
preserve_base: preserve_base
|
|
128
|
+
)
|
|
129
|
+
response = service.merge_pipelines(request, nil)
|
|
130
|
+
merged_pipeline = Pipeline.new
|
|
131
|
+
merged_pipeline.pipeline_data = response.merged_pipeline
|
|
132
|
+
{
|
|
133
|
+
pipeline: merged_pipeline,
|
|
134
|
+
conflicts: response.conflicts.to_a,
|
|
135
|
+
warnings: response.warnings.to_a
|
|
136
|
+
}
|
|
137
|
+
else
|
|
138
|
+
# Fallback implementation
|
|
139
|
+
merge_pipelines_fallback(other_pipeline, preserve_base)
|
|
140
|
+
end
|
|
141
|
+
end
|
|
142
|
+
|
|
143
|
+
# Add a job to the pipeline
|
|
144
|
+
def add_job(name, job_data)
|
|
145
|
+
if @pipeline_data.jobs.key?(name)
|
|
146
|
+
raise ArgumentError, "Job '#{name}' already exists"
|
|
147
|
+
end
|
|
148
|
+
|
|
149
|
+
job = convert_hash_to_job(job_data)
|
|
150
|
+
@pipeline_data.jobs[name] = job
|
|
151
|
+
end
|
|
152
|
+
|
|
153
|
+
# Remove a job from the pipeline
|
|
154
|
+
def remove_job(name)
|
|
155
|
+
@pipeline_data.jobs.delete(name)
|
|
156
|
+
end
|
|
157
|
+
|
|
158
|
+
# Add a stage to the pipeline
|
|
159
|
+
def add_stage(name)
|
|
160
|
+
existing_stage = @pipeline_data.stages.find { |s| s.name == name }
|
|
161
|
+
unless existing_stage
|
|
162
|
+
if @grpc_available
|
|
163
|
+
stage = Gitlab::Pipeline::Stage.new
|
|
164
|
+
stage.name = name
|
|
165
|
+
@pipeline_data.stages << stage
|
|
166
|
+
else
|
|
167
|
+
stage = Object.new
|
|
168
|
+
def stage.name
|
|
169
|
+
@name
|
|
170
|
+
end
|
|
171
|
+
def stage.name=(value)
|
|
172
|
+
@name = value
|
|
173
|
+
end
|
|
174
|
+
stage.name = name
|
|
175
|
+
@pipeline_data.stages << stage
|
|
176
|
+
end
|
|
177
|
+
end
|
|
178
|
+
end
|
|
179
|
+
|
|
180
|
+
# Remove a stage from the pipeline
|
|
181
|
+
def remove_stage(name)
|
|
182
|
+
@pipeline_data.stages.reject! { |s| s.name == name }
|
|
183
|
+
# Also remove jobs in this stage
|
|
184
|
+
@pipeline_data.jobs.reject! { |_name, job| job.stage == name }
|
|
185
|
+
end
|
|
186
|
+
|
|
187
|
+
# Get all job names
|
|
188
|
+
def job_names
|
|
189
|
+
@pipeline_data.jobs.keys
|
|
190
|
+
end
|
|
191
|
+
|
|
192
|
+
# Get all stage names
|
|
193
|
+
def stage_names
|
|
194
|
+
@pipeline_data.stages.map(&:name)
|
|
195
|
+
end
|
|
196
|
+
|
|
197
|
+
# Check if pipeline is valid
|
|
198
|
+
def valid?
|
|
199
|
+
validation_result = validate
|
|
200
|
+
validation_result[:is_valid]
|
|
201
|
+
end
|
|
202
|
+
|
|
203
|
+
# Get validation errors
|
|
204
|
+
def errors
|
|
205
|
+
validation_result = validate
|
|
206
|
+
validation_result[:errors]
|
|
207
|
+
end
|
|
208
|
+
|
|
209
|
+
# Get validation warnings
|
|
210
|
+
def warnings
|
|
211
|
+
validation_result = validate
|
|
212
|
+
validation_result[:warnings]
|
|
213
|
+
end
|
|
214
|
+
|
|
215
|
+
# Execute pipeline using Podman
|
|
216
|
+
def execute_pipeline(variables: {}, working_directory: nil, podman_executable: "podman", dry_run: false)
|
|
217
|
+
if Makit::Gitlab::PipelineServiceImpl.grpc_available?
|
|
218
|
+
execute_pipeline_grpc(variables, working_directory, podman_executable, dry_run)
|
|
219
|
+
else
|
|
220
|
+
execute_pipeline_fallback(variables, working_directory, podman_executable, dry_run)
|
|
221
|
+
end
|
|
222
|
+
end
|
|
223
|
+
|
|
224
|
+
private
|
|
225
|
+
|
|
226
|
+
def initialize_grpc_availability
|
|
227
|
+
@grpc_available = defined?(Gitlab::Pipeline::PipelineService)
|
|
228
|
+
end
|
|
229
|
+
|
|
230
|
+
def execute_pipeline_grpc(variables, working_directory, podman_executable, dry_run)
|
|
231
|
+
service = Makit::Gitlab::PipelineServiceImpl.new
|
|
232
|
+
|
|
233
|
+
request = Gitlab::Pipeline::ExecutePipelineRequest.new(
|
|
234
|
+
pipeline: @pipeline_data,
|
|
235
|
+
variables: variables,
|
|
236
|
+
working_directory: working_directory,
|
|
237
|
+
podman_executable: podman_executable,
|
|
238
|
+
dry_run: dry_run
|
|
239
|
+
)
|
|
240
|
+
|
|
241
|
+
service.execute_pipeline(request)
|
|
242
|
+
end
|
|
243
|
+
|
|
244
|
+
def execute_pipeline_fallback(variables, working_directory, podman_executable, dry_run)
|
|
245
|
+
service = Makit::Gitlab::PipelineServiceImpl.new
|
|
246
|
+
|
|
247
|
+
request = {
|
|
248
|
+
pipeline: @pipeline_data,
|
|
249
|
+
variables: variables,
|
|
250
|
+
working_directory: working_directory,
|
|
251
|
+
podman_executable: podman_executable,
|
|
252
|
+
dry_run: dry_run
|
|
253
|
+
}
|
|
254
|
+
|
|
255
|
+
service.execute_pipeline(request)
|
|
256
|
+
end
|
|
257
|
+
|
|
258
|
+
def parse_yaml(yaml_content)
|
|
259
|
+
initialize_grpc_availability
|
|
260
|
+
|
|
261
|
+
if @grpc_available
|
|
262
|
+
# Use gRPC service if available
|
|
263
|
+
service = PipelineServiceImpl.new
|
|
264
|
+
request = Gitlab::Pipeline::ParseYamlRequest.new(
|
|
265
|
+
yaml_content: yaml_content,
|
|
266
|
+
strict_mode: true
|
|
267
|
+
)
|
|
268
|
+
response = service.parse_yaml(request, nil)
|
|
269
|
+
response.pipeline
|
|
270
|
+
else
|
|
271
|
+
# Fallback implementation
|
|
272
|
+
parse_yaml_fallback(yaml_content)
|
|
273
|
+
end
|
|
274
|
+
end
|
|
275
|
+
|
|
276
|
+
def create_empty_pipeline
|
|
277
|
+
initialize_grpc_availability
|
|
278
|
+
|
|
279
|
+
if @grpc_available
|
|
280
|
+
Gitlab::Pipeline::Pipeline.new
|
|
281
|
+
else
|
|
282
|
+
create_empty_pipeline_fallback
|
|
283
|
+
end
|
|
284
|
+
end
|
|
285
|
+
|
|
286
|
+
def create_empty_pipeline_fallback
|
|
287
|
+
pipeline = Object.new
|
|
288
|
+
|
|
289
|
+
def pipeline.image
|
|
290
|
+
@image ||= ""
|
|
291
|
+
end
|
|
292
|
+
|
|
293
|
+
def pipeline.image=(value)
|
|
294
|
+
@image = value
|
|
295
|
+
end
|
|
296
|
+
|
|
297
|
+
def pipeline.jobs
|
|
298
|
+
@jobs ||= {}
|
|
299
|
+
end
|
|
300
|
+
|
|
301
|
+
def pipeline.stages
|
|
302
|
+
@stages ||= []
|
|
303
|
+
end
|
|
304
|
+
|
|
305
|
+
def pipeline.variables
|
|
306
|
+
@variables ||= {}
|
|
307
|
+
end
|
|
308
|
+
|
|
309
|
+
def pipeline.cache
|
|
310
|
+
@cache ||= nil
|
|
311
|
+
end
|
|
312
|
+
|
|
313
|
+
def pipeline.cache=(value)
|
|
314
|
+
@cache = value
|
|
315
|
+
end
|
|
316
|
+
|
|
317
|
+
def pipeline.before_script
|
|
318
|
+
@before_script ||= []
|
|
319
|
+
end
|
|
320
|
+
|
|
321
|
+
def pipeline.after_script
|
|
322
|
+
@after_script ||= []
|
|
323
|
+
end
|
|
324
|
+
|
|
325
|
+
def pipeline.timeout
|
|
326
|
+
@timeout ||= 0
|
|
327
|
+
end
|
|
328
|
+
|
|
329
|
+
def pipeline.timeout=(value)
|
|
330
|
+
@timeout = value
|
|
331
|
+
end
|
|
332
|
+
|
|
333
|
+
pipeline
|
|
334
|
+
end
|
|
335
|
+
|
|
336
|
+
def parse_yaml_fallback(yaml_content)
|
|
337
|
+
# Simple fallback implementation
|
|
338
|
+
yaml_data = YAML.safe_load(yaml_content, permitted_classes: [Symbol], aliases: true)
|
|
339
|
+
convert_yaml_to_pipeline_fallback(yaml_data)
|
|
340
|
+
end
|
|
341
|
+
|
|
342
|
+
def convert_yaml_to_pipeline_fallback(yaml_data)
|
|
343
|
+
# Use the fallback pipeline object
|
|
344
|
+
pipeline = create_empty_pipeline_fallback
|
|
345
|
+
|
|
346
|
+
# Set basic properties
|
|
347
|
+
pipeline.image = yaml_data["image"] || ""
|
|
348
|
+
pipeline.timeout = yaml_data["timeout"] || 0
|
|
349
|
+
|
|
350
|
+
# Set variables
|
|
351
|
+
if yaml_data["variables"]
|
|
352
|
+
pipeline.variables.merge!(yaml_data["variables"])
|
|
353
|
+
end
|
|
354
|
+
|
|
355
|
+
# Set scripts
|
|
356
|
+
pipeline.before_script.concat(yaml_data["before_script"] || [])
|
|
357
|
+
pipeline.after_script.concat(yaml_data["after_script"] || [])
|
|
358
|
+
|
|
359
|
+
# Set stages
|
|
360
|
+
if yaml_data["stages"]
|
|
361
|
+
yaml_data["stages"].each do |stage_name|
|
|
362
|
+
stage = Object.new
|
|
363
|
+
def stage.name
|
|
364
|
+
@name
|
|
365
|
+
end
|
|
366
|
+
def stage.name=(value)
|
|
367
|
+
@name = value
|
|
368
|
+
end
|
|
369
|
+
stage.name = stage_name
|
|
370
|
+
pipeline.stages << stage
|
|
371
|
+
end
|
|
372
|
+
end
|
|
373
|
+
|
|
374
|
+
# Set jobs
|
|
375
|
+
yaml_data.each do |key, value|
|
|
376
|
+
next if %w[image variables cache before_script after_script stages include extends services tags timeout].include?(key)
|
|
377
|
+
|
|
378
|
+
if value.is_a?(Hash)
|
|
379
|
+
job = convert_hash_to_job_fallback(value)
|
|
380
|
+
pipeline.jobs[key] = job
|
|
381
|
+
end
|
|
382
|
+
end
|
|
383
|
+
|
|
384
|
+
pipeline
|
|
385
|
+
end
|
|
386
|
+
|
|
387
|
+
def convert_hash_to_job_fallback(job_data)
|
|
388
|
+
job = Object.new
|
|
389
|
+
|
|
390
|
+
def job.stage
|
|
391
|
+
@stage ||= ""
|
|
392
|
+
end
|
|
393
|
+
|
|
394
|
+
def job.stage=(value)
|
|
395
|
+
@stage = value
|
|
396
|
+
end
|
|
397
|
+
|
|
398
|
+
def job.script
|
|
399
|
+
@script ||= []
|
|
400
|
+
end
|
|
401
|
+
|
|
402
|
+
def job.before_script
|
|
403
|
+
@before_script ||= []
|
|
404
|
+
end
|
|
405
|
+
|
|
406
|
+
def job.after_script
|
|
407
|
+
@after_script ||= []
|
|
408
|
+
end
|
|
409
|
+
|
|
410
|
+
def job.image
|
|
411
|
+
@image ||= ""
|
|
412
|
+
end
|
|
413
|
+
|
|
414
|
+
def job.image=(value)
|
|
415
|
+
@image = value
|
|
416
|
+
end
|
|
417
|
+
|
|
418
|
+
def job.variables
|
|
419
|
+
@variables ||= {}
|
|
420
|
+
end
|
|
421
|
+
|
|
422
|
+
def job.services
|
|
423
|
+
@services ||= []
|
|
424
|
+
end
|
|
425
|
+
|
|
426
|
+
def job.tags
|
|
427
|
+
@tags ||= []
|
|
428
|
+
end
|
|
429
|
+
|
|
430
|
+
def job.artifacts_paths
|
|
431
|
+
@artifacts_paths ||= []
|
|
432
|
+
end
|
|
433
|
+
|
|
434
|
+
def job.artifacts_when
|
|
435
|
+
@artifacts_when ||= ""
|
|
436
|
+
end
|
|
437
|
+
|
|
438
|
+
def job.artifacts_when=(value)
|
|
439
|
+
@artifacts_when = value
|
|
440
|
+
end
|
|
441
|
+
|
|
442
|
+
def job.coverage
|
|
443
|
+
@coverage ||= ""
|
|
444
|
+
end
|
|
445
|
+
|
|
446
|
+
def job.coverage=(value)
|
|
447
|
+
@coverage = value
|
|
448
|
+
end
|
|
449
|
+
|
|
450
|
+
def job.allow_failure
|
|
451
|
+
@allow_failure ||= false
|
|
452
|
+
end
|
|
453
|
+
|
|
454
|
+
def job.allow_failure=(value)
|
|
455
|
+
@allow_failure = value
|
|
456
|
+
end
|
|
457
|
+
|
|
458
|
+
def job.timeout
|
|
459
|
+
@timeout ||= 0
|
|
460
|
+
end
|
|
461
|
+
|
|
462
|
+
def job.timeout=(value)
|
|
463
|
+
@timeout = value
|
|
464
|
+
end
|
|
465
|
+
|
|
466
|
+
def job.dependencies
|
|
467
|
+
@dependencies ||= []
|
|
468
|
+
end
|
|
469
|
+
|
|
470
|
+
def job.needs
|
|
471
|
+
@needs ||= []
|
|
472
|
+
end
|
|
473
|
+
|
|
474
|
+
def job.when
|
|
475
|
+
@when ||= []
|
|
476
|
+
end
|
|
477
|
+
|
|
478
|
+
def job.only
|
|
479
|
+
@only ||= []
|
|
480
|
+
end
|
|
481
|
+
|
|
482
|
+
def job.except
|
|
483
|
+
@except ||= []
|
|
484
|
+
end
|
|
485
|
+
|
|
486
|
+
def job.rules
|
|
487
|
+
@rules ||= []
|
|
488
|
+
end
|
|
489
|
+
|
|
490
|
+
def job.artifacts_expire_in
|
|
491
|
+
@artifacts_expire_in ||= 0
|
|
492
|
+
end
|
|
493
|
+
|
|
494
|
+
def job.artifacts_expire_in=(value)
|
|
495
|
+
@artifacts_expire_in = value
|
|
496
|
+
end
|
|
497
|
+
|
|
498
|
+
def job.artifacts_name
|
|
499
|
+
@artifacts_name ||= []
|
|
500
|
+
end
|
|
501
|
+
|
|
502
|
+
def job.artifacts_untracked
|
|
503
|
+
@artifacts_untracked ||= []
|
|
504
|
+
end
|
|
505
|
+
|
|
506
|
+
def job.artifacts_reports
|
|
507
|
+
@artifacts_reports ||= []
|
|
508
|
+
end
|
|
509
|
+
|
|
510
|
+
def job.artifacts_expose_as
|
|
511
|
+
@artifacts_expose_as ||= []
|
|
512
|
+
end
|
|
513
|
+
|
|
514
|
+
# Set job properties
|
|
515
|
+
job.stage = job_data["stage"] || ""
|
|
516
|
+
job.image = job_data["image"] || ""
|
|
517
|
+
job.coverage = job_data["coverage"] || ""
|
|
518
|
+
job.allow_failure = job_data["allow_failure"] || false
|
|
519
|
+
job.timeout = job_data["timeout"] || 0
|
|
520
|
+
|
|
521
|
+
# Set script arrays
|
|
522
|
+
job.script.concat(Array(job_data["script"] || []))
|
|
523
|
+
job.before_script.concat(Array(job_data["before_script"] || []))
|
|
524
|
+
job.after_script.concat(Array(job_data["after_script"] || []))
|
|
525
|
+
job.dependencies.concat(Array(job_data["dependencies"] || []))
|
|
526
|
+
job.services.concat(Array(job_data["services"] || []))
|
|
527
|
+
job.tags.concat(Array(job_data["tags"] || []))
|
|
528
|
+
job.needs.concat(Array(job_data["needs"] || []))
|
|
529
|
+
job.when.concat(Array(job_data["when"] || []))
|
|
530
|
+
job.only.concat(Array(job_data["only"] || []))
|
|
531
|
+
job.except.concat(Array(job_data["except"] || []))
|
|
532
|
+
job.rules.concat(Array(job_data["rules"] || []))
|
|
533
|
+
|
|
534
|
+
# Set artifacts
|
|
535
|
+
if job_data["artifacts"]
|
|
536
|
+
artifacts = job_data["artifacts"]
|
|
537
|
+
job.artifacts_paths.concat(Array(artifacts["paths"] || []))
|
|
538
|
+
job.artifacts_when = artifacts["when"] || ""
|
|
539
|
+
end
|
|
540
|
+
|
|
541
|
+
# Set variables
|
|
542
|
+
if job_data["variables"]
|
|
543
|
+
job.variables.merge!(job_data["variables"])
|
|
544
|
+
end
|
|
545
|
+
|
|
546
|
+
job
|
|
547
|
+
end
|
|
548
|
+
|
|
549
|
+
def convert_hash_to_job(job_data)
|
|
550
|
+
if @grpc_available
|
|
551
|
+
# Use protobuf Job
|
|
552
|
+
job = Gitlab::Pipeline::Job.new
|
|
553
|
+
job.stage = job_data["stage"] || ""
|
|
554
|
+
job.image = job_data["image"] || ""
|
|
555
|
+
job.coverage = job_data["coverage"] || ""
|
|
556
|
+
job.allow_failure = job_data["allow_failure"] || false
|
|
557
|
+
job.timeout = job_data["timeout"] || 0
|
|
558
|
+
|
|
559
|
+
job.script.concat(job_data["script"] || [])
|
|
560
|
+
job.before_script.concat(job_data["before_script"] || [])
|
|
561
|
+
job.after_script.concat(job_data["after_script"] || [])
|
|
562
|
+
job.dependencies.concat(job_data["dependencies"] || [])
|
|
563
|
+
job.services.concat(job_data["services"] || [])
|
|
564
|
+
job.tags.concat(job_data["tags"] || [])
|
|
565
|
+
job.needs.concat(job_data["needs"] || [])
|
|
566
|
+
job.when.concat(job_data["when"] || [])
|
|
567
|
+
job.only.concat(job_data["only"] || [])
|
|
568
|
+
job.except.concat(job_data["except"] || [])
|
|
569
|
+
job.rules.concat(job_data["rules"] || [])
|
|
570
|
+
|
|
571
|
+
if job_data["artifacts"]
|
|
572
|
+
artifacts = job_data["artifacts"]
|
|
573
|
+
job.artifacts_paths.concat(artifacts["paths"] || [])
|
|
574
|
+
job.artifacts_when = artifacts["when"] || ""
|
|
575
|
+
job.artifacts_expire_in = artifacts["expire_in"] || 0
|
|
576
|
+
end
|
|
577
|
+
|
|
578
|
+
if job_data["variables"]
|
|
579
|
+
job_data["variables"].each do |key, value|
|
|
580
|
+
job.variables[key] = value.to_s
|
|
581
|
+
end
|
|
582
|
+
end
|
|
583
|
+
|
|
584
|
+
job
|
|
585
|
+
else
|
|
586
|
+
convert_hash_to_job_fallback(job_data)
|
|
587
|
+
end
|
|
588
|
+
end
|
|
589
|
+
|
|
590
|
+
def convert_pipeline_to_yaml(pipeline, pretty_format = true)
|
|
591
|
+
yaml_data = {}
|
|
592
|
+
|
|
593
|
+
# Add basic fields
|
|
594
|
+
yaml_data["image"] = pipeline.image unless pipeline.image.empty?
|
|
595
|
+
yaml_data["timeout"] = pipeline.timeout unless pipeline.timeout == 0
|
|
596
|
+
|
|
597
|
+
# Add variables
|
|
598
|
+
unless pipeline.variables.empty?
|
|
599
|
+
yaml_data["variables"] = pipeline.variables.to_h
|
|
600
|
+
end
|
|
601
|
+
|
|
602
|
+
# Add scripts
|
|
603
|
+
yaml_data["before_script"] = pipeline.before_script.to_a unless pipeline.before_script.empty?
|
|
604
|
+
yaml_data["after_script"] = pipeline.after_script.to_a unless pipeline.after_script.empty?
|
|
605
|
+
|
|
606
|
+
# Add stages
|
|
607
|
+
unless pipeline.stages.empty?
|
|
608
|
+
yaml_data["stages"] = pipeline.stages.map(&:name)
|
|
609
|
+
end
|
|
610
|
+
|
|
611
|
+
# Add jobs
|
|
612
|
+
pipeline.jobs.each do |job_name, job|
|
|
613
|
+
yaml_data[job_name] = convert_job_to_yaml(job)
|
|
614
|
+
end
|
|
615
|
+
|
|
616
|
+
if pretty_format
|
|
617
|
+
YAML.dump(yaml_data)
|
|
618
|
+
else
|
|
619
|
+
yaml_data.to_yaml
|
|
620
|
+
end
|
|
621
|
+
end
|
|
622
|
+
|
|
623
|
+
def convert_job_to_yaml(job)
|
|
624
|
+
job_data = {}
|
|
625
|
+
|
|
626
|
+
# Add basic fields
|
|
627
|
+
job_data["stage"] = job.stage unless job.stage.empty?
|
|
628
|
+
job_data["image"] = job.image unless job.image.empty?
|
|
629
|
+
job_data["coverage"] = job.coverage unless job.coverage.empty?
|
|
630
|
+
job_data["allow_failure"] = job.allow_failure if job.allow_failure
|
|
631
|
+
job_data["timeout"] = job.timeout unless job.timeout == 0
|
|
632
|
+
|
|
633
|
+
# Add script arrays
|
|
634
|
+
job_data["script"] = job.script.to_a unless job.script.empty?
|
|
635
|
+
job_data["before_script"] = job.before_script.to_a unless job.before_script.empty?
|
|
636
|
+
job_data["after_script"] = job.after_script.to_a unless job.after_script.empty?
|
|
637
|
+
job_data["dependencies"] = job.dependencies.to_a unless job.dependencies.empty?
|
|
638
|
+
job_data["services"] = job.services.to_a unless job.services.empty?
|
|
639
|
+
job_data["tags"] = job.tags.to_a unless job.tags.empty?
|
|
640
|
+
job_data["needs"] = job.needs.to_a unless job.needs.empty?
|
|
641
|
+
job_data["when"] = job.when.to_a unless job.when.empty?
|
|
642
|
+
job_data["only"] = job.only.to_a unless job.only.empty?
|
|
643
|
+
job_data["except"] = job.except.to_a unless job.except.empty?
|
|
644
|
+
job_data["rules"] = job.rules.to_a unless job.rules.empty?
|
|
645
|
+
|
|
646
|
+
# Add artifacts
|
|
647
|
+
if !job.artifacts_paths.empty? || !job.artifacts_when.empty? || job.artifacts_expire_in != 0
|
|
648
|
+
artifacts_data = {}
|
|
649
|
+
artifacts_data["paths"] = job.artifacts_paths.to_a unless job.artifacts_paths.empty?
|
|
650
|
+
artifacts_data["when"] = job.artifacts_when unless job.artifacts_when.empty?
|
|
651
|
+
artifacts_data["expire_in"] = job.artifacts_expire_in unless job.artifacts_expire_in == 0
|
|
652
|
+
job_data["artifacts"] = artifacts_data
|
|
653
|
+
end
|
|
654
|
+
|
|
655
|
+
# Add variables
|
|
656
|
+
unless job.variables.empty?
|
|
657
|
+
job_data["variables"] = job.variables.to_h
|
|
658
|
+
end
|
|
659
|
+
|
|
660
|
+
job_data
|
|
661
|
+
end
|
|
662
|
+
|
|
663
|
+
def validate_pipeline_fallback
|
|
664
|
+
errors = []
|
|
665
|
+
warnings = []
|
|
666
|
+
|
|
667
|
+
# Basic validation
|
|
668
|
+
if @pipeline_data.jobs.empty? && @pipeline_data.stages.empty?
|
|
669
|
+
errors << "Pipeline has no jobs or stages defined"
|
|
670
|
+
elsif @pipeline_data.jobs.empty?
|
|
671
|
+
warnings << "Pipeline has no jobs defined"
|
|
672
|
+
end
|
|
673
|
+
|
|
674
|
+
if @pipeline_data.stages.empty?
|
|
675
|
+
warnings << "Pipeline has no stages defined"
|
|
676
|
+
end
|
|
677
|
+
|
|
678
|
+
# Validate jobs
|
|
679
|
+
@pipeline_data.jobs.each do |job_name, job|
|
|
680
|
+
if job.stage && !@pipeline_data.stages.any? { |s| s.name == job.stage }
|
|
681
|
+
errors << "Job '#{job_name}' references non-existent stage '#{job.stage}'"
|
|
682
|
+
end
|
|
683
|
+
|
|
684
|
+
if job.script.empty?
|
|
685
|
+
warnings << "Job '#{job_name}' has no script defined"
|
|
686
|
+
end
|
|
687
|
+
end
|
|
688
|
+
|
|
689
|
+
{
|
|
690
|
+
is_valid: errors.empty?,
|
|
691
|
+
errors: errors,
|
|
692
|
+
warnings: warnings,
|
|
693
|
+
suggestions: []
|
|
694
|
+
}
|
|
695
|
+
end
|
|
696
|
+
|
|
697
|
+
def calculate_stats_fallback
|
|
698
|
+
total_artifacts = @pipeline_data.jobs.values.sum { |job| job.artifacts_paths.length }
|
|
699
|
+
total_services = @pipeline_data.jobs.values.sum { |job| job.services.length }
|
|
700
|
+
total_variables = @pipeline_data.variables.length + @pipeline_data.jobs.values.sum { |job| job.variables.length }
|
|
701
|
+
|
|
702
|
+
{
|
|
703
|
+
total_jobs: @pipeline_data.jobs.length,
|
|
704
|
+
total_stages: @pipeline_data.stages.length,
|
|
705
|
+
total_artifacts: total_artifacts,
|
|
706
|
+
total_services: total_services,
|
|
707
|
+
total_variables: total_variables,
|
|
708
|
+
stage_names: @pipeline_data.stages.map(&:name),
|
|
709
|
+
job_names: @pipeline_data.jobs.keys
|
|
710
|
+
}
|
|
711
|
+
end
|
|
712
|
+
|
|
713
|
+
def compare_pipelines_fallback(other_pipeline)
|
|
714
|
+
differences = []
|
|
715
|
+
added_jobs = []
|
|
716
|
+
removed_jobs = []
|
|
717
|
+
modified_jobs = []
|
|
718
|
+
added_stages = []
|
|
719
|
+
removed_stages = []
|
|
720
|
+
|
|
721
|
+
# Compare jobs
|
|
722
|
+
pipeline1_jobs = @pipeline_data.jobs.keys.to_set
|
|
723
|
+
pipeline2_jobs = other_pipeline.pipeline_data.jobs.keys.to_set
|
|
724
|
+
|
|
725
|
+
added_jobs = (pipeline2_jobs - pipeline1_jobs).to_a
|
|
726
|
+
removed_jobs = (pipeline1_jobs - pipeline2_jobs).to_a
|
|
727
|
+
|
|
728
|
+
# Add differences for added and removed jobs
|
|
729
|
+
added_jobs.each { |job| differences << "Job '#{job}' was added" }
|
|
730
|
+
removed_jobs.each { |job| differences << "Job '#{job}' was removed" }
|
|
731
|
+
|
|
732
|
+
common_jobs = pipeline1_jobs & pipeline2_jobs
|
|
733
|
+
common_jobs.each do |job_name|
|
|
734
|
+
job1 = @pipeline_data.jobs[job_name]
|
|
735
|
+
job2 = other_pipeline.pipeline_data.jobs[job_name]
|
|
736
|
+
unless jobs_equal?(job1, job2)
|
|
737
|
+
modified_jobs << job_name
|
|
738
|
+
differences << "Job '#{job_name}' has been modified"
|
|
739
|
+
end
|
|
740
|
+
end
|
|
741
|
+
|
|
742
|
+
# Compare stages
|
|
743
|
+
pipeline1_stages = @pipeline_data.stages.map(&:name).to_set
|
|
744
|
+
pipeline2_stages = other_pipeline.pipeline_data.stages.map(&:name).to_set
|
|
745
|
+
|
|
746
|
+
added_stages = (pipeline2_stages - pipeline1_stages).to_a
|
|
747
|
+
removed_stages = (pipeline1_stages - pipeline2_stages).to_a
|
|
748
|
+
|
|
749
|
+
# Add differences for added and removed stages
|
|
750
|
+
added_stages.each { |stage| differences << "Stage '#{stage}' was added" }
|
|
751
|
+
removed_stages.each { |stage| differences << "Stage '#{stage}' was removed" }
|
|
752
|
+
|
|
753
|
+
{
|
|
754
|
+
are_identical: differences.empty? && added_jobs.empty? && removed_jobs.empty? && added_stages.empty? && removed_stages.empty?,
|
|
755
|
+
differences: differences,
|
|
756
|
+
added_jobs: added_jobs,
|
|
757
|
+
removed_jobs: removed_jobs,
|
|
758
|
+
modified_jobs: modified_jobs,
|
|
759
|
+
added_stages: added_stages,
|
|
760
|
+
removed_stages: removed_stages
|
|
761
|
+
}
|
|
762
|
+
end
|
|
763
|
+
|
|
764
|
+
def merge_pipelines_fallback(other_pipeline, preserve_base)
|
|
765
|
+
conflicts = []
|
|
766
|
+
warnings = []
|
|
767
|
+
|
|
768
|
+
# Create merged pipeline
|
|
769
|
+
merged_pipeline = Pipeline.new
|
|
770
|
+
merged_pipeline.pipeline_data = create_empty_pipeline_fallback
|
|
771
|
+
|
|
772
|
+
# Copy basic fields from base pipeline
|
|
773
|
+
merged_pipeline.pipeline_data.image = @pipeline_data.image
|
|
774
|
+
merged_pipeline.pipeline_data.timeout = @pipeline_data.timeout
|
|
775
|
+
|
|
776
|
+
# Copy variables
|
|
777
|
+
@pipeline_data.variables.each do |key, value|
|
|
778
|
+
merged_pipeline.pipeline_data.variables[key] = value
|
|
779
|
+
end
|
|
780
|
+
|
|
781
|
+
# Copy stages
|
|
782
|
+
@pipeline_data.stages.each do |stage|
|
|
783
|
+
new_stage = Object.new
|
|
784
|
+
def new_stage.name
|
|
785
|
+
@name
|
|
786
|
+
end
|
|
787
|
+
def new_stage.name=(value)
|
|
788
|
+
@name = value
|
|
789
|
+
end
|
|
790
|
+
new_stage.name = stage.name
|
|
791
|
+
merged_pipeline.pipeline_data.stages << new_stage
|
|
792
|
+
end
|
|
793
|
+
|
|
794
|
+
# Copy jobs
|
|
795
|
+
@pipeline_data.jobs.each do |job_name, job|
|
|
796
|
+
merged_pipeline.pipeline_data.jobs[job_name] = job
|
|
797
|
+
end
|
|
798
|
+
|
|
799
|
+
# Merge basic fields
|
|
800
|
+
if !other_pipeline.pipeline_data.image.empty?
|
|
801
|
+
merged_pipeline.pipeline_data.image = other_pipeline.pipeline_data.image
|
|
802
|
+
end
|
|
803
|
+
if other_pipeline.pipeline_data.timeout != 0
|
|
804
|
+
merged_pipeline.pipeline_data.timeout = other_pipeline.pipeline_data.timeout
|
|
805
|
+
end
|
|
806
|
+
|
|
807
|
+
# Merge variables
|
|
808
|
+
other_pipeline.pipeline_data.variables.each do |key, value|
|
|
809
|
+
if merged_pipeline.pipeline_data.variables.key?(key) && merged_pipeline.pipeline_data.variables[key] != value
|
|
810
|
+
conflicts << "Variable '#{key}' has different values: '#{merged_pipeline.pipeline_data.variables[key]}' vs '#{value}'"
|
|
811
|
+
end
|
|
812
|
+
merged_pipeline.pipeline_data.variables[key] = value
|
|
813
|
+
end
|
|
814
|
+
|
|
815
|
+
# Merge stages
|
|
816
|
+
other_pipeline.pipeline_data.stages.each do |override_stage|
|
|
817
|
+
existing_stage = merged_pipeline.pipeline_data.stages.find { |s| s.name == override_stage.name }
|
|
818
|
+
unless existing_stage
|
|
819
|
+
merged_pipeline.pipeline_data.stages << override_stage
|
|
820
|
+
end
|
|
821
|
+
end
|
|
822
|
+
|
|
823
|
+
# Merge jobs
|
|
824
|
+
other_pipeline.pipeline_data.jobs.each do |job_name, job|
|
|
825
|
+
if merged_pipeline.pipeline_data.jobs.key?(job_name)
|
|
826
|
+
conflicts << "Job '#{job_name}' exists in both pipelines"
|
|
827
|
+
end
|
|
828
|
+
merged_pipeline.pipeline_data.jobs[job_name] = job
|
|
829
|
+
end
|
|
830
|
+
|
|
831
|
+
{
|
|
832
|
+
pipeline: merged_pipeline,
|
|
833
|
+
conflicts: conflicts,
|
|
834
|
+
warnings: warnings
|
|
835
|
+
}
|
|
836
|
+
end
|
|
837
|
+
|
|
838
|
+
def jobs_equal?(job1, job2)
|
|
839
|
+
job1.stage == job2.stage &&
|
|
840
|
+
job1.script.to_a == job2.script.to_a &&
|
|
841
|
+
job1.before_script.to_a == job2.before_script.to_a &&
|
|
842
|
+
job1.after_script.to_a == job2.after_script.to_a &&
|
|
843
|
+
job1.dependencies.to_a == job2.dependencies.to_a &&
|
|
844
|
+
job1.artifacts_paths.to_a == job2.artifacts_paths.to_a &&
|
|
845
|
+
job1.artifacts_when == job2.artifacts_when &&
|
|
846
|
+
job1.coverage == job2.coverage &&
|
|
847
|
+
job1.variables.to_h == job2.variables.to_h &&
|
|
848
|
+
job1.image == job2.image &&
|
|
849
|
+
job1.services.to_a == job2.services.to_a &&
|
|
850
|
+
job1.tags.to_a == job2.tags.to_a &&
|
|
851
|
+
job1.allow_failure == job2.allow_failure &&
|
|
852
|
+
job1.timeout == job2.timeout &&
|
|
853
|
+
job1.needs.to_a == job2.needs.to_a
|
|
854
|
+
end
|
|
855
|
+
end
|
|
856
|
+
end
|
|
857
|
+
end
|