kdeploy 0.1.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -0,0 +1,439 @@
1
+ # frozen_string_literal: true
2
+
3
+ module Kdeploy
4
+ # Handles deployment statistics collection and analysis
5
+ class Statistics
6
+ attr_reader :data
7
+
8
+ def initialize(stats_file: nil)
9
+ @stats_file = stats_file || default_stats_file
10
+ @data = load_statistics
11
+ @session_start_time = Time.now
12
+ end
13
+
14
+ # Record a deployment execution
15
+ # @param result [Hash] Deployment result
16
+ def record_deployment(result)
17
+ deployment_data = build_deployment_data(result)
18
+ @data[:deployments] << deployment_data
19
+ update_global_stats(deployment_data)
20
+ save_statistics
21
+ end
22
+
23
+ # Record a task execution
24
+ # @param task_name [String] Task name
25
+ # @param result [Hash] Task execution result
26
+ def record_task(task_name, result)
27
+ task_data = build_task_data(task_name, result)
28
+ @data[:tasks] << task_data
29
+ update_task_stats(task_data)
30
+ save_statistics
31
+ end
32
+
33
+ # Record a command execution
34
+ # @param command_name [String] Command name
35
+ # @param host [String] Target host
36
+ # @param success [Boolean] Execution success
37
+ # @param duration [Float] Execution duration
38
+ def record_command(command_name, host, success, duration)
39
+ command_data = build_command_data(command_name, host, success, duration)
40
+ @data[:commands] << command_data
41
+ update_command_stats(command_data)
42
+ save_statistics
43
+ end
44
+
45
+ # Get deployment statistics summary
46
+ # @param days [Integer] Number of days to include (default: 30)
47
+ # @return [Hash] Statistics summary
48
+ def deployment_summary(days: 30)
49
+ cutoff_time = calculate_cutoff_time(days)
50
+ recent_deployments = filter_recent_data(@data[:deployments], cutoff_time)
51
+
52
+ return empty_summary if recent_deployments.empty?
53
+
54
+ build_deployment_summary(recent_deployments, days)
55
+ end
56
+
57
+ # Get task statistics summary
58
+ # @param days [Integer] Number of days to include (default: 30)
59
+ # @return [Hash] Task statistics summary
60
+ def task_summary(days: 30)
61
+ cutoff_time = calculate_cutoff_time(days)
62
+ recent_tasks = filter_recent_data(@data[:tasks], cutoff_time)
63
+
64
+ return empty_task_summary(days) if recent_tasks.empty?
65
+
66
+ build_task_summary(recent_tasks, days)
67
+ end
68
+
69
+ # Get global statistics
70
+ # @return [Hash] Global statistics
71
+ def global_summary
72
+ {
73
+ total_deployments: @data[:global][:deployments][:total],
74
+ successful_deployments: @data[:global][:deployments][:successful],
75
+ failed_deployments: @data[:global][:deployments][:failed],
76
+ total_tasks: @data[:global][:tasks][:total],
77
+ successful_tasks: @data[:global][:tasks][:successful],
78
+ failed_tasks: @data[:global][:tasks][:failed],
79
+ total_commands: @data[:global][:commands][:total],
80
+ successful_commands: @data[:global][:commands][:successful],
81
+ failed_commands: @data[:global][:commands][:failed],
82
+ total_execution_time: @data[:global][:total_execution_time].round(2),
83
+ session_start_time: @session_start_time,
84
+ session_duration: (Time.now - @session_start_time).round(2)
85
+ }
86
+ end
87
+
88
+ # Get top failed tasks
89
+ # @param limit [Integer] Number of tasks to return
90
+ # @param days [Integer] Number of days to include
91
+ # @return [Array] Top failed tasks
92
+ def top_failed_tasks(limit: 10, days: 30)
93
+ cutoff_time = calculate_cutoff_time(days)
94
+ recent_tasks = filter_recent_failed_tasks(cutoff_time)
95
+
96
+ build_top_failed_tasks(recent_tasks, limit)
97
+ end
98
+
99
+ # Get performance trends
100
+ # @param days [Integer] Number of days to analyze
101
+ # @return [Hash] Performance trends
102
+ def performance_trends(days: 7)
103
+ cutoff_time = calculate_cutoff_time(days)
104
+ recent_deployments = filter_recent_data(@data[:deployments], cutoff_time)
105
+
106
+ return { period_days: days, trends: {} } if recent_deployments.empty?
107
+
108
+ build_performance_trends(recent_deployments, days)
109
+ end
110
+
111
+ # Clear all statistics
112
+ def clear_statistics!
113
+ @data = default_statistics_structure
114
+ save_statistics
115
+ end
116
+
117
+ # Export statistics to file
118
+ # @param file_path [String] Export file path
119
+ # @param format [Symbol] Export format (:json, :csv)
120
+ def export_statistics(file_path, format: :json)
121
+ case format
122
+ when :json
123
+ export_to_json(file_path)
124
+ when :csv
125
+ export_to_csv(file_path)
126
+ else
127
+ raise ArgumentError, "Unsupported export format: #{format}"
128
+ end
129
+ end
130
+
131
+ private
132
+
133
+ def default_stats_file
134
+ File.join(Dir.home, '.kdeploy', 'statistics.json')
135
+ end
136
+
137
+ def load_statistics
138
+ return default_statistics_structure unless File.exist?(@stats_file)
139
+
140
+ begin
141
+ JSON.parse(File.read(@stats_file), symbolize_names: true)
142
+ rescue JSON::ParserError, StandardError
143
+ KdeployLogger.warn('Failed to load statistics file, creating new one')
144
+ default_statistics_structure
145
+ end
146
+ end
147
+
148
+ def save_statistics
149
+ FileUtils.mkdir_p(File.dirname(@stats_file))
150
+ File.write(@stats_file, JSON.pretty_generate(@data))
151
+ rescue StandardError => e
152
+ KdeployLogger.error("Failed to save statistics: #{e.message}")
153
+ end
154
+
155
+ def build_deployment_data(result)
156
+ {
157
+ timestamp: Time.now.to_f,
158
+ success: result[:success],
159
+ duration: result[:duration],
160
+ tasks_count: result[:tasks_count] || 0,
161
+ success_count: result[:success_count] || 0,
162
+ pipeline_name: result[:pipeline_name] || 'unknown',
163
+ hosts_count: result[:hosts_count] || 0
164
+ }
165
+ end
166
+
167
+ def build_task_data(task_name, result)
168
+ {
169
+ timestamp: Time.now.to_f,
170
+ name: task_name,
171
+ success: result[:success],
172
+ duration: result[:duration],
173
+ hosts_count: result[:hosts_count] || 0,
174
+ success_count: result[:success_count] || 0
175
+ }
176
+ end
177
+
178
+ def build_command_data(command_name, host, success, duration)
179
+ {
180
+ timestamp: Time.now.to_f,
181
+ name: command_name,
182
+ host: host,
183
+ success: success,
184
+ duration: duration
185
+ }
186
+ end
187
+
188
+ def calculate_cutoff_time(days)
189
+ Time.now - (days * 24 * 60 * 60)
190
+ end
191
+
192
+ def filter_recent_data(data, cutoff_time)
193
+ data.select { |d| d[:timestamp] >= cutoff_time.to_f }
194
+ end
195
+
196
+ def filter_recent_failed_tasks(cutoff_time)
197
+ @data[:tasks].select { |t| t[:timestamp] >= cutoff_time.to_f && !t[:success] }
198
+ end
199
+
200
+ def build_deployment_summary(deployments, days)
201
+ successful = deployments.count { |d| d[:success] }
202
+ failed = deployments.size - successful
203
+ durations = deployments.map { |d| d[:duration] }
204
+
205
+ {
206
+ period_days: days,
207
+ total_deployments: deployments.size,
208
+ successful_deployments: successful,
209
+ failed_deployments: failed,
210
+ success_rate: calculate_success_rate(successful, deployments.size),
211
+ avg_duration: calculate_average(durations),
212
+ min_duration: durations.min&.round(2),
213
+ max_duration: durations.max&.round(2),
214
+ total_duration: durations.sum.round(2)
215
+ }
216
+ end
217
+
218
+ def build_task_summary(tasks, days)
219
+ task_groups = tasks.group_by { |t| t[:name] }
220
+ task_stats = build_task_group_stats(task_groups)
221
+
222
+ {
223
+ period_days: days,
224
+ total_task_executions: tasks.size,
225
+ unique_tasks: task_groups.size,
226
+ tasks: task_stats
227
+ }
228
+ end
229
+
230
+ def build_task_group_stats(task_groups)
231
+ task_groups.transform_values do |tasks|
232
+ successful = tasks.count { |t| t[:success] }
233
+ failed = tasks.size - successful
234
+ durations = tasks.map { |t| t[:duration] }
235
+
236
+ {
237
+ total_executions: tasks.size,
238
+ successful: successful,
239
+ failed: failed,
240
+ success_rate: calculate_success_rate(successful, tasks.size),
241
+ avg_duration: calculate_average(durations),
242
+ total_duration: durations.sum.round(2)
243
+ }
244
+ end
245
+ end
246
+
247
+ def build_top_failed_tasks(tasks, limit)
248
+ failure_counts = calculate_failure_counts(tasks)
249
+ failure_counts.take(limit).map do |task_name, count|
250
+ last_failure = find_last_failure(tasks, task_name)
251
+ {
252
+ name: task_name,
253
+ failure_count: count,
254
+ last_failure_time: Time.at(last_failure[:timestamp]).utc,
255
+ last_failure_duration: last_failure[:duration].round(2)
256
+ }
257
+ end
258
+ end
259
+
260
+ def calculate_failure_counts(tasks)
261
+ tasks.group_by { |t| t[:name] }
262
+ .transform_values(&:size)
263
+ .sort_by { |_, count| -count }
264
+ end
265
+
266
+ def find_last_failure(tasks, task_name)
267
+ tasks.select { |t| t[:name] == task_name }
268
+ .max_by { |t| t[:timestamp] }
269
+ end
270
+
271
+ def build_performance_trends(deployments, days)
272
+ daily_stats = group_by_day(deployments)
273
+ trends = calculate_daily_trends(daily_stats)
274
+
275
+ {
276
+ period_days: days,
277
+ trends: trends
278
+ }
279
+ end
280
+
281
+ def group_by_day(deployments)
282
+ deployments.group_by do |d|
283
+ Time.at(d[:timestamp]).utc.to_date
284
+ end
285
+ end
286
+
287
+ def calculate_daily_trends(daily_stats)
288
+ daily_stats.transform_values do |deployments|
289
+ successful = deployments.count { |d| d[:success] }
290
+ durations = deployments.map { |d| d[:duration] }
291
+
292
+ {
293
+ total: deployments.size,
294
+ successful: successful,
295
+ failed: deployments.size - successful,
296
+ success_rate: calculate_success_rate(successful, deployments.size),
297
+ avg_duration: calculate_average(durations),
298
+ total_duration: durations.sum.round(2)
299
+ }
300
+ end
301
+ end
302
+
303
+ def calculate_success_rate(successful, total)
304
+ (successful.to_f / total * 100).round(2)
305
+ end
306
+
307
+ def calculate_average(values)
308
+ values.empty? ? 0 : (values.sum / values.size).round(2)
309
+ end
310
+
311
+ def empty_summary
312
+ {
313
+ period_days: 0,
314
+ total_deployments: 0,
315
+ successful_deployments: 0,
316
+ failed_deployments: 0,
317
+ success_rate: 0,
318
+ avg_duration: 0,
319
+ min_duration: 0,
320
+ max_duration: 0,
321
+ total_duration: 0
322
+ }
323
+ end
324
+
325
+ def empty_task_summary(days)
326
+ {
327
+ period_days: days,
328
+ total_task_executions: 0,
329
+ unique_tasks: 0,
330
+ tasks: {}
331
+ }
332
+ end
333
+
334
+ def default_statistics_structure
335
+ {
336
+ deployments: [],
337
+ tasks: [],
338
+ commands: [],
339
+ global: {
340
+ deployments: { total: 0, successful: 0, failed: 0 },
341
+ tasks: { total: 0, successful: 0, failed: 0 },
342
+ commands: { total: 0, successful: 0, failed: 0 },
343
+ total_execution_time: 0
344
+ }
345
+ }
346
+ end
347
+
348
+ def update_global_stats(deployment_data)
349
+ @data[:global][:deployments][:total] += 1
350
+ if deployment_data[:success]
351
+ @data[:global][:deployments][:successful] += 1
352
+ else
353
+ @data[:global][:deployments][:failed] += 1
354
+ end
355
+ @data[:global][:total_execution_time] += deployment_data[:duration]
356
+ end
357
+
358
+ def update_task_stats(task_data)
359
+ @data[:global][:tasks][:total] += 1
360
+ if task_data[:success]
361
+ @data[:global][:tasks][:successful] += 1
362
+ else
363
+ @data[:global][:tasks][:failed] += 1
364
+ end
365
+ @data[:global][:total_execution_time] += task_data[:duration]
366
+ end
367
+
368
+ def update_command_stats(command_data)
369
+ @data[:global][:commands][:total] += 1
370
+ if command_data[:success]
371
+ @data[:global][:commands][:successful] += 1
372
+ else
373
+ @data[:global][:commands][:failed] += 1
374
+ end
375
+ @data[:global][:total_execution_time] += command_data[:duration]
376
+ end
377
+
378
+ def export_to_json(file_path)
379
+ File.write(file_path, JSON.pretty_generate(@data))
380
+ end
381
+
382
+ def export_to_csv(file_path)
383
+ require 'csv'
384
+
385
+ CSV.open(file_path, 'w') do |csv|
386
+ export_deployments_to_csv(csv)
387
+ csv << []
388
+ export_tasks_to_csv(csv)
389
+ csv << []
390
+ export_commands_to_csv(csv)
391
+ end
392
+ end
393
+
394
+ def export_deployments_to_csv(csv)
395
+ csv << ['Deployments']
396
+ csv << %w[Timestamp Success Duration TasksCount SuccessCount PipelineName HostsCount]
397
+ @data[:deployments].each do |d|
398
+ csv << [
399
+ Time.at(d[:timestamp]).utc,
400
+ d[:success],
401
+ d[:duration],
402
+ d[:tasks_count],
403
+ d[:success_count],
404
+ d[:pipeline_name],
405
+ d[:hosts_count]
406
+ ]
407
+ end
408
+ end
409
+
410
+ def export_tasks_to_csv(csv)
411
+ csv << ['Tasks']
412
+ csv << %w[Timestamp Name Success Duration HostsCount SuccessCount]
413
+ @data[:tasks].each do |t|
414
+ csv << [
415
+ Time.at(t[:timestamp]).utc,
416
+ t[:name],
417
+ t[:success],
418
+ t[:duration],
419
+ t[:hosts_count],
420
+ t[:success_count]
421
+ ]
422
+ end
423
+ end
424
+
425
+ def export_commands_to_csv(csv)
426
+ csv << ['Commands']
427
+ csv << %w[Timestamp Name Host Success Duration]
428
+ @data[:commands].each do |c|
429
+ csv << [
430
+ Time.at(c[:timestamp]).utc,
431
+ c[:name],
432
+ c[:host],
433
+ c[:success],
434
+ c[:duration]
435
+ ]
436
+ end
437
+ end
438
+ end
439
+ end
@@ -0,0 +1,240 @@
1
+ # frozen_string_literal: true
2
+
3
+ module Kdeploy
4
+ # Task class for managing command execution on hosts
5
+ class Task
6
+ attr_reader :name, :hosts, :commands, :options
7
+ attr_accessor :global_variables
8
+
9
+ def initialize(name, hosts = [], options = {})
10
+ @name = name
11
+ @hosts = Array(hosts)
12
+ @commands = []
13
+ @options = default_options.merge(options)
14
+ @global_variables = {}
15
+ end
16
+
17
+ # Add command to task
18
+ # @param name [String] Command name
19
+ # @param command [String] Command to execute
20
+ # @param options [Hash] Command options
21
+ # @return [Command] Created command
22
+ def add_command(name, command, options = {})
23
+ command_options = options.merge(global_variables: @global_variables)
24
+ cmd = Command.new(name, command, command_options)
25
+ @commands << cmd
26
+ cmd
27
+ end
28
+
29
+ # Add host to task
30
+ # @param host [Host] Host to add
31
+ # @return [Host] Added host
32
+ def add_host(host)
33
+ @hosts << host unless @hosts.include?(host)
34
+ host
35
+ end
36
+
37
+ # Remove host from task
38
+ # @param host [Host] Host to remove
39
+ # @return [Host, nil] Removed host or nil if not found
40
+ def remove_host(host)
41
+ @hosts.delete(host)
42
+ end
43
+
44
+ # Execute task on all hosts
45
+ # @return [Hash] Execution results
46
+ def execute
47
+ return empty_execution_result if @commands.empty? || @hosts.empty?
48
+
49
+ log_task_start
50
+ start_time = Time.now
51
+ results = execute_commands
52
+ duration = Time.now - start_time
53
+ success_count = count_successful_hosts(results)
54
+
55
+ log_task_completion(duration, success_count)
56
+ build_task_result(results, duration, success_count)
57
+ end
58
+
59
+ private
60
+
61
+ def default_options
62
+ {
63
+ parallel: true,
64
+ fail_fast: false,
65
+ max_concurrent: nil
66
+ }
67
+ end
68
+
69
+ def empty_execution_result
70
+ { success: true, results: {} }
71
+ end
72
+
73
+ def log_task_start
74
+ KdeployLogger.info("Starting task '#{@name}' on #{@hosts.size} host(s)")
75
+ end
76
+
77
+ def execute_commands
78
+ @options[:parallel] ? execute_parallel : execute_sequential
79
+ end
80
+
81
+ def execute_parallel
82
+ max_concurrent = determine_max_concurrent
83
+ pool = create_thread_pool(max_concurrent)
84
+ futures = create_futures(pool)
85
+ results = collect_future_results(futures)
86
+
87
+ shutdown_pool(pool)
88
+ results
89
+ end
90
+
91
+ def determine_max_concurrent
92
+ @options[:max_concurrent] ||
93
+ Kdeploy.configuration&.max_concurrent_tasks ||
94
+ 10
95
+ end
96
+
97
+ def create_thread_pool(max_concurrent)
98
+ Concurrent::ThreadPoolExecutor.new(
99
+ min_threads: 1,
100
+ max_threads: [max_concurrent, @hosts.size].min
101
+ )
102
+ end
103
+
104
+ def create_futures(pool)
105
+ @hosts.map do |host|
106
+ Concurrent::Future.execute(executor: pool) do
107
+ execute_on_host(host)
108
+ end
109
+ end
110
+ end
111
+
112
+ def collect_future_results(futures)
113
+ results = {}
114
+ futures.each_with_index do |future, index|
115
+ host = @hosts[index]
116
+ results[host.hostname] = future.value
117
+ end
118
+ results
119
+ end
120
+
121
+ def shutdown_pool(pool)
122
+ pool.shutdown
123
+ pool.wait_for_termination(30)
124
+ end
125
+
126
+ def execute_sequential
127
+ results = {}
128
+
129
+ @hosts.each do |host|
130
+ results[host.hostname] = execute_on_host(host)
131
+
132
+ if should_stop_execution?(results[host.hostname])
133
+ log_fail_fast_stop(host)
134
+ break
135
+ end
136
+ end
137
+
138
+ results
139
+ end
140
+
141
+ def should_stop_execution?(result)
142
+ @options[:fail_fast] && !result[:success]
143
+ end
144
+
145
+ def log_fail_fast_stop(host)
146
+ KdeployLogger.error(
147
+ "Task '#{@name}' failed on #{host}, stopping execution due to fail_fast option"
148
+ )
149
+ end
150
+
151
+ def execute_on_host(host)
152
+ connection = SSHConnection.new(host)
153
+ host_results = initialize_host_results
154
+
155
+ begin
156
+ connection.connect
157
+ execute_commands_on_host(host, connection, host_results)
158
+ rescue StandardError => e
159
+ handle_host_execution_error(host, e, host_results)
160
+ ensure
161
+ connection.cleanup
162
+ end
163
+
164
+ host_results
165
+ end
166
+
167
+ def initialize_host_results
168
+ {
169
+ success: true,
170
+ commands: {},
171
+ error: nil
172
+ }
173
+ end
174
+
175
+ def execute_commands_on_host(host, connection, host_results)
176
+ @commands.each do |command|
177
+ next unless command.should_run_on?(host)
178
+
179
+ command_success = command.execute(host, connection)
180
+ record_command_result(command, command_success, host_results)
181
+
182
+ break if should_stop_command_execution?(command_success, host_results)
183
+ end
184
+ end
185
+
186
+ def record_command_result(command, success, host_results)
187
+ host_results[:commands][command.name] = {
188
+ success: success,
189
+ result: command.result
190
+ }
191
+
192
+ return if success
193
+
194
+ host_results[:success] = false
195
+ host_results[:error] = "Command '#{command.name}' failed" if @options[:fail_fast]
196
+ end
197
+
198
+ def should_stop_command_execution?(command_success, host_results)
199
+ !command_success && @options[:fail_fast] && host_results[:error]
200
+ end
201
+
202
+ def handle_host_execution_error(host, error, host_results)
203
+ KdeployLogger.error("Task '#{@name}' failed on #{host}: #{error.message}")
204
+ host_results[:success] = false
205
+ host_results[:error] = error.message
206
+ end
207
+
208
+ def count_successful_hosts(results)
209
+ results.values.count { |r| r[:success] }
210
+ end
211
+
212
+ def log_task_completion(duration, success_count)
213
+ KdeployLogger.info(
214
+ "Task '#{@name}' completed in #{duration.round(2)}s: " \
215
+ "#{success_count}/#{@hosts.size} hosts successful"
216
+ )
217
+ end
218
+
219
+ def build_task_result(results, duration, success_count)
220
+ task_result = {
221
+ success: calculate_overall_success(results, success_count),
222
+ results: results,
223
+ duration: duration,
224
+ hosts_count: @hosts.size,
225
+ success_count: success_count
226
+ }
227
+
228
+ record_task_statistics(task_result)
229
+ task_result
230
+ end
231
+
232
+ def calculate_overall_success(results, success_count)
233
+ @options[:fail_fast] ? results.values.all? { |r| r[:success] } : success_count.positive?
234
+ end
235
+
236
+ def record_task_statistics(task_result)
237
+ Kdeploy.statistics.record_task(@name, task_result)
238
+ end
239
+ end
240
+ end