smart_proxy_reports 0.0.7 → 1.0.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
checksums.yaml CHANGED
@@ -1,7 +1,7 @@
1
1
  ---
2
2
  SHA256:
3
- metadata.gz: daed1707b3085291e478f53a98ac15c72a0bf8389b2d4843784c4945f1dcace9
4
- data.tar.gz: d54be2907dcdfadbac97820804e8afb6c9c66b1b507c4199d2b6b65c6daed2e1
3
+ metadata.gz: ada1a0027fc55bf9192b9f31e6547173c9e491184274f79dc29664dae8b54dc7
4
+ data.tar.gz: 15a2bd27cd02863d15b3a1352cfdf183e10e02226cb37d90777f735a14b5adce
5
5
  SHA512:
6
- metadata.gz: 744f8177feeb96b3da827a570c66827c49a31ddc1ec33995af7fd48c7ee6c42b54bf5fa7810cae9e871fb27273c44f189c8a628c26d0c5bca34e2f905fdbffd1
7
- data.tar.gz: 392c1d166303bec98edc9cf138529815415b1e56f4659ab60cb99ec2e47b19eefe7ec50165345460166ad33ca6a1312d3e8eb538f898e8ecfe6803c08d04d972
6
+ metadata.gz: b3f70e46aa78232547ac0ef5016ba8777be07e1ee3484fd60cde4e3bcb11dcf7bade9ff3fe40615cb944b827a81e104a0331f42e0638c36d7c1ec8b0769309d9
7
+ data.tar.gz: 488cdacf8e8685b46cd1163f889f22903b27e815baee13d3a139eedc921bdc5a3f4c81fb510d3fe4135c65e1d07745c2458adc7e53d405ab71712a2946dfd6b9
data/README.md CHANGED
@@ -26,15 +26,17 @@ Few words about setting up a dev setup.
26
26
  Checkoud foreman-ansible-modules and build it via `make` command. Configure
27
27
  Ansible collection path to the build directory:
28
28
 
29
- [defaults]
30
- collection_path = /home/lzap/work/foreman-ansible-modules/build
31
- callback_whitelist = foreman
32
- [callback_foreman]
33
- report_type = proxy
34
- proxy_url = http://localhost:8000/reports
35
- verify_certs = 0
36
- client_cert = /home/lzap/DummyX509/client-one.crt
37
- client_key = /home/lzap/DummyX509/client-one.key
29
+ ```ini
30
+ [defaults]
31
+ collection_path = /home/lzap/work/foreman-ansible-modules/build
32
+ callback_whitelist = foreman
33
+ [callback_foreman]
34
+ report_type = proxy
35
+ proxy_url = http://localhost:8000/reports
36
+ verify_certs = 0
37
+ client_cert = /home/lzap/DummyX509/client-one.crt
38
+ client_key = /home/lzap/DummyX509/client-one.key
39
+ ```
38
40
 
39
41
  Configure Foreman Ansible callback with the correct Foreman URL:
40
42
 
@@ -46,7 +48,7 @@ Then call Ansible:
46
48
 
47
49
  For testing, there are several example data. Before importing them into Foreman, make sure to have `localhost` smart proxy and also a host named `report.example.com`. It is possible to capture example data via `incoming_save_dir` setting. Name generated files correctly and put them into the `contrib/fixtures` directory. There is a utility to use fixtures for development and testing purposes:
48
50
 
49
- ```
51
+ ```console
50
52
  $ contrib/upload-fixture
51
53
  Usage:
52
54
  contrib/upload-fixture -h Display this help message
@@ -90,7 +92,7 @@ systemctl enable --now puppetserver
90
92
  ```
91
93
 
92
94
  If you prefer to use HTTPS, set the different reporturl and configure the CA certificates according to the example below
93
- ```
95
+ ```bash
94
96
  # use HTTPS, without Katello the port is 8443, with Katello it's 9090
95
97
  puppet config set reporturl https://$HOSTNAME:8443/reports/puppet
96
98
  # install the Smart Proxy CA certificate to the Puppet's localcacert store
@@ -1,10 +1,10 @@
1
- require_relative "friendly_message"
2
-
3
1
  # frozen_string_literal: true
4
2
 
3
+ require_relative "friendly_message"
4
+
5
5
  module Proxy::Reports
6
6
  class AnsibleProcessor < Processor
7
- KEYS_TO_COPY = %w[status check_mode].freeze
7
+ KEYS_TO_COPY = %w[check_mode].freeze
8
8
 
9
9
  def initialize(data, json_body: true)
10
10
  super(data, json_body: json_body)
@@ -12,6 +12,9 @@ module Proxy::Reports
12
12
  @data = JSON.parse(data)
13
13
  end
14
14
  @body = {}
15
+ @failure = 0
16
+ @change = 0
17
+ @nochange = 0
15
18
  logger.debug "Processing report #{report_id}"
16
19
  debug_payload("Input", @data)
17
20
  end
@@ -20,36 +23,50 @@ module Proxy::Reports
20
23
  @data["uuid"] || generated_report_id
21
24
  end
22
25
 
26
+ def count_summary(result)
27
+ if result["result"]["changed"]
28
+ @change += 1
29
+ else
30
+ @nochange += 1
31
+ end
32
+ if result["failed"]
33
+ @failure += 1
34
+ end
35
+ end
36
+
23
37
  def process_results
24
38
  @data["results"]&.each do |result|
25
39
  raise("Report do not contain required 'results/result' element") unless result["result"]
26
40
  raise("Report do not contain required 'results/task' element") unless result["task"]
27
- process_facts(result)
28
41
  process_level(result)
29
42
  friendly_message = FriendlyMessage.new(result)
30
43
  result["friendly_message"] = friendly_message.generate_message
31
44
  process_keywords(result)
45
+ count_summary(result)
32
46
  end
33
47
  @data["results"]
34
48
  rescue StandardError => e
35
- logger.error "Unable to parse results", e
49
+ log_error("Unable to parse results", e)
36
50
  @data["results"]
37
51
  end
38
52
 
39
53
  def process
40
- measure :process do
41
- @body["format"] = "ansible"
42
- @body["id"] = report_id
43
- @body["host"] = hostname_from_config || @data["host"]
44
- @body["proxy"] = Proxy::Reports::Plugin.settings.reported_proxy_hostname
45
- @body["reported_at"] = @data["reported_at"]
54
+ @body["format"] = "ansible"
55
+ @body["id"] = report_id
56
+ @body["host"] = hostname_from_config || @data["host"]
57
+ @body["proxy"] = Proxy::Reports::Plugin.settings.reported_proxy_hostname
58
+ @body["reported_at"] = @data["reported_at"]
59
+ @body["reported_at_proxy"] = now_utc
60
+ measure :process_results do
46
61
  @body["results"] = process_results
47
- @body["keywords"] = keywords
48
- @body["telemetry"] = telemetry
49
- @body["errors"] = errors if errors?
50
- KEYS_TO_COPY.each do |key|
51
- @body[key] = @data[key]
52
- end
62
+ end
63
+ @body["summary"] = build_summary
64
+ process_root_keywords
65
+ @body["keywords"] = keywords
66
+ @body["telemetry"] = telemetry
67
+ @body["errors"] = errors if errors?
68
+ KEYS_TO_COPY.each do |key|
69
+ @body[key] = @data[key]
53
70
  end
54
71
  end
55
72
 
@@ -62,36 +79,121 @@ module Proxy::Reports
62
79
  format: "ansible",
63
80
  version: 1,
64
81
  host: @body["host"],
65
- reported_at: @body["reported_at"],
66
- statuses: process_statuses,
67
82
  proxy: @body["proxy"],
68
- body: @body,
83
+ change: @body["summary"]["foreman"]["change"],
84
+ nochange: @body["summary"]["foreman"]["nochange"],
85
+ failure: @body["summary"]["foreman"]["failure"],
69
86
  keywords: @body["keywords"],
87
+ body: @body,
70
88
  )
71
89
  end
72
90
 
73
91
  def spool_report
92
+ facts_hash = measure :build_facts do
93
+ build_facts
94
+ end
95
+ if facts_hash
96
+ debug_payload("Facts output", facts_hash)
97
+ payload = measure :format_facts do
98
+ facts_hash.to_json
99
+ end
100
+ SpooledHttpClient.instance.spool(:ansible_facts, payload)
101
+ end
102
+
74
103
  report_hash = build_report
75
104
  debug_payload("Output", report_hash)
76
105
  payload = measure :format do
77
106
  report_hash.to_json
78
107
  end
79
- SpooledHttpClient.instance.spool(report_id, payload)
108
+ SpooledHttpClient.instance.spool(:report, payload)
109
+ end
110
+
111
+ def find_facts_task
112
+ @data["results"]&.each do |result|
113
+ if result["result"] && result["result"]["ansible_facts"]
114
+ return result["result"]["ansible_facts"]
115
+ end
116
+ end
117
+ false
118
+ end
119
+
120
+ def build_facts
121
+ facts = find_facts_task
122
+ return nil unless facts
123
+ {
124
+ "name" => hostname_from_config || @data["host"],
125
+ "facts" => {
126
+ "ansible_facts" => facts,
127
+ "_type" => "ansible",
128
+ "_timestamp" => @data["reported_at"],
129
+ },
130
+ }
80
131
  end
81
132
 
82
133
  private
83
134
 
84
- def process_facts(result)
85
- # TODO: add fact processing and sending to the fact endpoint
86
- result["result"]["ansible_facts"] = {}
135
+ # foreman-ansible-modules 3.0 does not contain summary field, convert it here
136
+ # https://github.com/theforeman/foreman-ansible-modules/pull/1325/files
137
+ def build_summary
138
+ if @data["summary"]
139
+ native = @data["summary"]
140
+ elsif (status = @data["status"])
141
+ native = {
142
+ "changed" => status["applied"] || 0,
143
+ "failures" => status["failed"] || 0,
144
+ "ignored" => 0,
145
+ "ok" => 0,
146
+ "rescued" => 0,
147
+ "skipped" => status["skipped"] || 0,
148
+ "unreachable" => 0,
149
+ }
150
+ else
151
+ native = {}
152
+ end
153
+ {
154
+ "foreman" => {
155
+ "change" => @change, "nochange" => @nochange, "failure" => @failure,
156
+ },
157
+ "native" => native,
158
+ }
159
+ rescue StandardError => e
160
+ log_error("Unable to build summary", e)
161
+ {
162
+ "foreman" => {
163
+ "change" => @change, "nochange" => @nochange, "failure" => @failure,
164
+ },
165
+ "native" => {},
166
+ }
167
+ end
168
+
169
+ def process_root_keywords
170
+ if (summary = @body["summary"])
171
+ if summary["changed"] && summary["changed"] > 0
172
+ add_keywords("AnsibleChanged")
173
+ elsif summary["failures"] && summary["failures"] > 0
174
+ add_keywords("AnsibleFailures")
175
+ elsif summary["unreachable"] && summary["unreachable"] > 0
176
+ add_keywords("AnsibleUnreachable")
177
+ elsif summary["rescued"] && summary["rescued"] > 0
178
+ add_keywords("AnsibleRescued")
179
+ elsif summary["ignored"] && summary["ignored"] > 0
180
+ add_keywords("AnsibleIgnored")
181
+ elsif summary["skipped"] && summary["skipped"] > 0
182
+ add_keywords("AnsibleSkipped")
183
+ end
184
+ end
185
+ rescue StandardError => e
186
+ log_error("Unable to parse root summary keywords", e)
87
187
  end
88
188
 
89
189
  def process_keywords(result)
90
190
  if result["failed"]
91
- add_keywords("HasFailure", "AnsibleTaskFailed:#{result["task"]["action"]}")
191
+ add_keywords("AnsibleFailure", "AnsibleFailure:#{result["task"]["action"]}")
92
192
  elsif result["result"]["changed"]
93
- add_keywords("HasChange")
193
+ add_keywords("AnsibleChanged")
94
194
  end
195
+ rescue StandardError => e
196
+ log_error("Unable to parse keywords", e)
95
197
  end
96
198
 
97
199
  def process_level(result)
@@ -102,18 +204,19 @@ module Proxy::Reports
102
204
  else
103
205
  result["level"] = "info"
104
206
  end
207
+ rescue StandardError => e
208
+ log_error("Unable to parse log level", e)
209
+ result["level"] = "info"
105
210
  end
211
+ end
106
212
 
107
- def process_statuses
108
- {
109
- "applied" => @body["status"]["applied"],
110
- "failed" => @body["status"]["failed"],
111
- "pending" => @body["status"]["pending"] || 0, # It's only present in check mode
112
- "other" => @body["status"]["skipped"],
113
- }
114
- rescue StandardError => e
115
- logger.error "Unable to process statuses", e
116
- { "applied" => 0, "failed" => 0, "pending" => 0, "other" => 0 }
213
+ def search_for_facts2222222222222222(result)
214
+ if result.respond_to?(:key?) && result.key?(:ansible_facts)
215
+ result[:ansible_facts]
216
+ elsif result.respond_to?(:each)
217
+ r = nil
218
+ result.find { |*a| r = search_for_facts(a.last) }
219
+ r
117
220
  end
118
221
  end
119
222
  end
@@ -13,8 +13,11 @@ class FriendlyMessage
13
13
 
14
14
  case @task_tree["action"]
15
15
  when "ansible.builtin.package", "package" then msg = package_message
16
+ when "ansible.builtin.known_hosts", "known_hosts" then msg = known_hosts_message
17
+ when "ansible.builtin.pip", "pip" then msg = pip_message
16
18
  when "ansible.builtin.template", "template" then msg = template_message
17
19
  when "ansible.builtin.service", "service" then msg = service_message
20
+ when "ansible.builtin.unarchive", "unarchive" then msg = unarchive_message
18
21
  when "ansible.builtin.group", "group" then msg = group_message
19
22
  when "ansible.builtin.user", "user" then msg = user_message
20
23
  when "ansible.builtin.cron", "cron" then msg = cron_message
@@ -40,6 +43,15 @@ class FriendlyMessage
40
43
  "Package(s) #{packages} are #{state}"
41
44
  end
42
45
 
46
+ def known_hosts_message
47
+ "#{@module_args_tree["name"]} is #{@module_args_tree["state"]} in #{@module_args_tree["path"]}"
48
+ end
49
+
50
+ def pip_message
51
+ packages = human_readable_array(@module_args_tree["name"]) || "contained in #{@module_args_tree["requirements"]}"
52
+ "Package(s) #{packages} are #{@module_args_tree["state"]}"
53
+ end
54
+
43
55
  def template_message
44
56
  "Render template #{@module_args_tree["_original_basename"]} to #{@result_tree["dest"]}"
45
57
  end
@@ -48,6 +60,10 @@ class FriendlyMessage
48
60
  "Service #{@result_tree["name"]} is #{@result_tree["state"]} and enabled: #{@result_tree["enabled"]}"
49
61
  end
50
62
 
63
+ def unarchive_message
64
+ "Archive #{@module_args_tree["src"]} unpacked into #{@module_args_tree["dest"]}"
65
+ end
66
+
51
67
  def group_message
52
68
  "User group #{@result_tree["name"]} is #{@result_tree["state"]} with gid: #{@result_tree["gid"]}"
53
69
  end
@@ -31,19 +31,26 @@ module Proxy::Reports
31
31
  @hostname_from_config ||= Proxy::Reports::Plugin.settings.override_hostname
32
32
  end
33
33
 
34
- def build_report_root(format:, version:, host:, reported_at:, statuses:, proxy:, body:, keywords:)
34
+ def now_utc
35
+ # make sure it contains TZ info: "2022-01-20 14:16:26 UTC"
36
+ Time.now.utc.to_s
37
+ end
38
+
39
+ def build_report_root(format:, version:, host:, proxy:, change:, nochange:, failure:, body:, keywords:)
35
40
  {
36
41
  "host_report" => {
37
42
  "format" => format,
38
43
  "version" => version,
39
44
  "host" => host,
40
- "reported_at" => reported_at,
45
+ "reported_at" => now_utc,
41
46
  "proxy" => proxy,
42
47
  "body" => @json_body ? body.to_json : body,
43
48
  "keywords" => keywords,
44
- }.merge(statuses),
49
+ "change" => change,
50
+ "nochange" => nochange,
51
+ "failure" => failure,
52
+ },
45
53
  }
46
- # TODO add metric with total time
47
54
  end
48
55
 
49
56
  def debug_payload?
@@ -67,8 +74,14 @@ module Proxy::Reports
67
74
 
68
75
  attr_reader :errors
69
76
 
70
- def log_error(message)
71
- @errors << message.to_s
77
+ def log_error(message, exception = nil)
78
+ msg = if exception
79
+ "#{message}: #{exception}"
80
+ else
81
+ message
82
+ end
83
+ logger.error msg, exception
84
+ @errors << msg.to_s
72
85
  end
73
86
 
74
87
  def errors?
@@ -34,37 +34,46 @@ module Proxy::Reports
34
34
  end
35
35
  logs
36
36
  rescue StandardError => e
37
- logger.error "Unable to parse logs", e
37
+ log_error("Unable to parse logs", e)
38
38
  logs
39
39
  end
40
40
 
41
+ def process_root_keywords
42
+ status = @data["status"]
43
+ if status == "changed"
44
+ add_keywords("PuppetStatusChanged")
45
+ elsif status == "unchanged"
46
+ add_keywords("PuppetStatusUnchanged")
47
+ elsif status == "failed"
48
+ add_keywords("PuppetStatusFailed")
49
+ end
50
+ if @data["noop"] == "true"
51
+ add_keywords("PuppetNoop")
52
+ end
53
+ if @data["noop_pending"] == "true"
54
+ add_keywords("PuppetNoopPending")
55
+ end
56
+ rescue StandardError => e
57
+ log_error("Unable to parse root keywords", e)
58
+ end
59
+
41
60
  def process_resource_statuses
42
61
  statuses = []
43
62
  @data["resource_statuses"]&.each_pair do |key, value|
44
63
  statuses << key
45
64
  @evaluation_times << [key, value["evaluation_time"]]
46
- # failures
47
- add_keywords("PuppetResourceFailed:#{key}", "PuppetHasFailure") if value["failed"] || value["failed_to_restart"]
48
- value["events"]&.each do |event|
49
- add_keywords("PuppetResourceFailed:#{key}", "PuppetHasFailure") if event["status"] == "failed"
50
- add_keywords("PuppetHasCorrectiveChange") if event["corrective_change"]
51
- end
52
- # changes
53
- add_keywords("PuppetHasChange") if value["changed"]
54
- add_keywords("PuppetHasChange") if value["change_count"] && value["change_count"] > 0
55
- # changes
56
- add_keywords("PuppetIsOutOfSync") if value["out_of_sync"]
57
- add_keywords("PuppetIsOutOfSync") if value["out_of_sync_count"] && value["out_of_sync_count"] > 0
58
- # skips
59
- add_keywords("PuppetHasSkips") if value["skipped"]
60
- # corrective changes
61
- add_keywords("PuppetHasCorrectiveChange") if value["corrective_change"]
62
- # others
65
+ add_keywords("PuppetFailed:#{key}", "PuppetFailed") if value["failed"]
66
+ add_keywords("PuppetFailedToRestart:#{key}", "PuppetFailedToRestart") if value["failed_to_restart"]
67
+ add_keywords("PuppetCorrectiveChange") if value["corrective_change"]
68
+ add_keywords("PuppetSkipped") if value["skipped"]
69
+ add_keywords("PuppetRestarted") if value["restarted"]
70
+ add_keywords("PuppetScheduled") if value["scheduled"]
71
+ add_keywords("PuppetOutOfSync") if value["out_of_sync"]
63
72
  add_keywords("PuppetEnvironment:#{@data["environment"]}") if @data["environment"]
64
73
  end
65
74
  statuses
66
75
  rescue StandardError => e
67
- logger.error "Unable to parse resource_statuses", e
76
+ log_error("Unable to parse resource_statuses", e)
68
77
  statuses
69
78
  end
70
79
 
@@ -77,27 +86,36 @@ module Proxy::Reports
77
86
  end
78
87
  @evaluation_times
79
88
  rescue StandardError => e
80
- logger.error "Unable to process evaluation_times", e
89
+ log_error("Unable to parse evaluation_times", e)
81
90
  []
82
91
  end
83
92
 
84
93
  def process
85
- measure :process do
86
- @body["format"] = "puppet"
87
- @body["id"] = report_id
88
- @body["host"] = hostname_from_config || @data["host"]
89
- @body["proxy"] = Proxy::Reports::Plugin.settings.reported_proxy_hostname
90
- @body["reported_at"] = @data["time"]
91
- KEYS_TO_COPY.each do |key|
92
- @body[key] = @data[key]
93
- end
94
+ @body["format"] = "puppet"
95
+ @body["id"] = report_id
96
+ @body["host"] = hostname_from_config || @data["host"]
97
+ @body["proxy"] = Proxy::Reports::Plugin.settings.reported_proxy_hostname
98
+ @body["reported_at"] = @data["time"]
99
+ @body["reported_at_proxy"] = now_utc
100
+ KEYS_TO_COPY.each do |key|
101
+ @body[key] = @data[key]
102
+ end
103
+ process_root_keywords
104
+ measure :process_logs do
94
105
  @body["logs"] = process_logs
106
+ end
107
+ measure :process_resource_statuses do
95
108
  @body["resource_statuses"] = process_resource_statuses
96
- @body["keywords"] = keywords
109
+ end
110
+ measure :process_summary do
111
+ @body["summary"] = process_summary
112
+ end
113
+ measure :process_evaluation_times do
97
114
  @body["evaluation_times"] = process_evaluation_times
98
- @body["telemetry"] = telemetry
99
- @body["errors"] = errors if errors?
100
115
  end
116
+ @body["telemetry"] = telemetry
117
+ @body["keywords"] = keywords
118
+ @body["errors"] = errors if errors?
101
119
  end
102
120
 
103
121
  def build_report
@@ -109,11 +127,12 @@ module Proxy::Reports
109
127
  format: "puppet",
110
128
  version: 1,
111
129
  host: @body["host"],
112
- reported_at: @body["reported_at"],
113
- statuses: process_statuses,
114
130
  proxy: @body["proxy"],
115
- body: @body,
131
+ change: @body["summary"]["foreman"]["change"],
132
+ nochange: @body["summary"]["foreman"]["nochange"],
133
+ failure: @body["summary"]["foreman"]["failure"],
116
134
  keywords: @body["keywords"],
135
+ body: @body,
117
136
  )
118
137
  end
119
138
 
@@ -123,22 +142,32 @@ module Proxy::Reports
123
142
  payload = measure :format do
124
143
  report_hash.to_json
125
144
  end
126
- SpooledHttpClient.instance.spool(report_id, payload)
145
+ SpooledHttpClient.instance.spool(:report, payload)
127
146
  end
128
147
 
129
148
  private
130
149
 
131
- def process_statuses
132
- stats = @body["metrics"]["resources"]["values"].collect { |s| [s[0], s[2]] }.to_h
150
+ def process_summary
151
+ events = @body["metrics"]["events"]["values"].collect { |s| [s[0], s[2]] }.to_h
152
+ resources = @body["metrics"]["resources"]["values"].collect { |s| [s[0], s[2]] }.to_h
133
153
  {
134
- "applied" => stats["changed"] + stats["corrective_change"],
135
- "failed" => stats["failed"] + stats["failed_to_restart"],
136
- "pending" => stats["scheduled"],
137
- "other" => stats["restarted"] + stats["skipped"] + stats["out_of_sync"],
154
+ "foreman" => {
155
+ "change" => events["success"],
156
+ "nochange" => resources["total"] - events["failure"] - events["success"],
157
+ "failure" => events["failure"],
158
+ },
159
+ "native" => resources,
138
160
  }
139
161
  rescue StandardError => e
140
- logger.error "Unable to process statuses", e
141
- { "applied" => 0, "failed" => 0, "pending" => 0, "other" => 0 }
162
+ log_error("Unable to parse summary counts", e)
163
+ {
164
+ "foreman" => {
165
+ "change" => 0,
166
+ "nochange" => 0,
167
+ "failure" => 0,
168
+ },
169
+ "native" => {},
170
+ }
142
171
  end
143
172
  end
144
173
  end
@@ -47,6 +47,7 @@ module Proxy::Reports
47
47
  log_halt(415, "Missing body") if input.empty?
48
48
  json_body = to_bool(params[:json_body], true)
49
49
  processor = Processor.new_processor(format, input, json_body: json_body)
50
+ status 202
50
51
  processor.spool_report
51
52
  rescue => e
52
53
  log_halt 415, e, "Error during report processing: #{e.message}"
@@ -52,6 +52,14 @@ module Proxy::Reports
52
52
  @worker.join
53
53
  end
54
54
 
55
+ def get_endpoint(name)
56
+ if name&.end_with? 'ansible_facts'
57
+ "/api/v2/hosts/facts"
58
+ else
59
+ "/api/v2/host_reports"
60
+ end
61
+ end
62
+
55
63
  def process
56
64
  processed = 0
57
65
  client = ::Proxy::HttpRequest::ForemanRequest.new
@@ -59,11 +67,12 @@ module Proxy::Reports
59
67
  # send all files via a single persistent HTTP connection
60
68
  logger.debug "Opening HTTP connection to Foreman"
61
69
  client.http.start do |http|
62
- Dir.glob(spool_path("todo", "*")) do |filename|
70
+ Dir.glob(spool_path("todo", "*")).sort.each do |filename|
63
71
  basename = File.basename(filename)
72
+ endpoint = get_endpoint(basename)
64
73
  logger.debug "Sending report #{basename}"
65
74
  begin
66
- post = factory.create_post("/api/v2/host_reports", File.read(filename))
75
+ post = factory.create_post(endpoint, File.read(filename))
67
76
  response = http.request(post)
68
77
  logger.info "Report #{basename} sent with HTTP response #{response.code}"
69
78
  logger.debug { "Response body: #{response.body}" }
@@ -74,7 +83,7 @@ module Proxy::Reports
74
83
  FileUtils.rm_f spool_path("todo", basename)
75
84
  end
76
85
  else
77
- logger.debug { "Moving failed report to 'fail' spool directory" }
86
+ logger.debug { "Moving failed report to 'fail' spool directory" }
78
87
  spool_move("todo", "done", basename)
79
88
  end
80
89
  processed += 1
@@ -91,12 +100,20 @@ module Proxy::Reports
91
100
  @worker.wakeup if @worker
92
101
  end
93
102
 
94
- def spool(filename, data)
95
- filename = filename.gsub(/[^0-9a-z]/i, "")
103
+ def spool(prefix, data)
104
+ filename = unique_filename + "-" + prefix.to_s
96
105
  file = spool_path("temp", filename)
97
106
  File.open(file, "w") { |f| f.write(data) }
98
107
  spool_move("temp", "todo", filename)
99
108
  wakeup
100
109
  end
110
+
111
+ private
112
+
113
+ # Ensure that files are named so they can be sorted and processed in the same order
114
+ def unique_filename
115
+ Process.clock_gettime(Process::CLOCK_REALTIME, :second).to_s(36) +
116
+ Process.clock_gettime(Process::CLOCK_MONOTONIC, :nanosecond).to_s(36)
117
+ end
101
118
  end
102
119
  end
@@ -1,5 +1,5 @@
1
1
  module Proxy
2
2
  module Reports
3
- VERSION = "0.0.7"
3
+ VERSION = "1.0.0"
4
4
  end
5
5
  end
@@ -16,7 +16,9 @@
16
16
  :keep_reports: false
17
17
 
18
18
  # Development settings (do not use)
19
+ #
19
20
  # Override hostnames of incoming reports
20
21
  #:override_hostname: report.example.com
22
+ #
21
23
  # Store input payloads in a directory
22
24
  #:incoming_save_dir: /var/lib/foreman-proxy/reports/incoming
metadata CHANGED
@@ -1,14 +1,14 @@
1
1
  --- !ruby/object:Gem::Specification
2
2
  name: smart_proxy_reports
3
3
  version: !ruby/object:Gem::Version
4
- version: 0.0.7
4
+ version: 1.0.0
5
5
  platform: ruby
6
6
  authors:
7
7
  - Lukas Zapletal
8
8
  autorequire:
9
9
  bindir: bin
10
10
  cert_chain: []
11
- date: 2021-12-14 00:00:00.000000000 Z
11
+ date: 2022-02-16 00:00:00.000000000 Z
12
12
  dependencies: []
13
13
  description: Transform and upload Foreman host reports via REST API
14
14
  email: lukas-x@zapletalovi.com