nucleus 0.1.0 → 0.2.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (39) hide show
  1. checksums.yaml +4 -4
  2. data/.rubocop.yml +3 -0
  3. data/CHANGELOG.md +18 -4
  4. data/README.md +28 -40
  5. data/Rakefile +137 -137
  6. data/config/nucleus_config.rb +0 -4
  7. data/lib/nucleus/adapter_resolver.rb +115 -115
  8. data/lib/nucleus/adapters/buildpack_translator.rb +79 -79
  9. data/lib/nucleus/adapters/v1/cloud_control/application.rb +108 -108
  10. data/lib/nucleus/adapters/v1/cloud_control/authentication.rb +27 -27
  11. data/lib/nucleus/adapters/v1/cloud_control/cloud_control.rb +153 -153
  12. data/lib/nucleus/adapters/v1/cloud_control/domains.rb +68 -68
  13. data/lib/nucleus/adapters/v1/cloud_control/logs.rb +103 -103
  14. data/lib/nucleus/adapters/v1/cloud_control/vars.rb +88 -88
  15. data/lib/nucleus/adapters/v1/cloud_foundry_v2/domains.rb +149 -149
  16. data/lib/nucleus/adapters/v1/cloud_foundry_v2/logs.rb +303 -303
  17. data/lib/nucleus/adapters/v1/cloud_foundry_v2/services.rb +286 -286
  18. data/lib/nucleus/adapters/v1/heroku/heroku.rb +2 -2
  19. data/lib/nucleus/adapters/v1/heroku/logs.rb +108 -108
  20. data/lib/nucleus/core/adapter_authentication_inductor.rb +0 -2
  21. data/lib/nucleus/core/adapter_extensions/auth/http_basic_auth_client.rb +37 -37
  22. data/lib/nucleus/core/adapter_extensions/http_client.rb +177 -177
  23. data/lib/nucleus/core/common/files/archive_extractor.rb +112 -112
  24. data/lib/nucleus/core/common/files/archiver.rb +91 -91
  25. data/lib/nucleus/core/common/logging/request_log_formatter.rb +48 -48
  26. data/lib/nucleus/core/error_messages.rb +127 -127
  27. data/lib/nucleus/core/models/abstract_model.rb +29 -29
  28. data/lib/nucleus/scripts/load_dependencies.rb +0 -1
  29. data/lib/nucleus/scripts/setup_config.rb +28 -28
  30. data/lib/nucleus/version.rb +3 -3
  31. data/nucleus.gemspec +10 -12
  32. data/spec/factories/models.rb +63 -61
  33. data/spec/integration/api/auth_spec.rb +58 -58
  34. data/spec/test_suites.rake +31 -31
  35. data/spec/unit/common/helpers/auth_helper_spec.rb +73 -73
  36. data/spec/unit/common/oauth2_auth_client_spec.rb +1 -1
  37. data/tasks/compatibility.rake +113 -113
  38. data/tasks/evaluation.rake +162 -162
  39. metadata +16 -30
@@ -1,303 +1,303 @@
1
- module Nucleus
2
- module Adapters
3
- module V1
4
- class CloudFoundryV2 < Stub
5
- module Logs
6
- LOGGREGATOR_TYPES = [Enums::ApplicationLogfileType::API,
7
- Enums::ApplicationLogfileType::APPLICATION,
8
- Enums::ApplicationLogfileType::REQUEST,
9
- Enums::ApplicationLogfileType::SYSTEM]
10
- # Carriage return (newline in Mac OS) + line feed (newline in Unix) == CRLF (newline in Windows)
11
- CRLF = "\r\n"
12
- WSP = "\s"
13
-
14
- # @see Stub#logs
15
- def logs(application_name_or_id)
16
- app_guid = app_guid(application_name_or_id)
17
- # retrieve app for timestamps only :/
18
- app_created = get("/v2/apps/#{app_guid}").body[:metadata][:created_at]
19
- logs = []
20
-
21
- begin
22
- log_files_list = download_file(app_guid, 'logs')
23
- # parse raw response to array
24
- log_files_list.split(CRLF).each do |logfile_line|
25
- filename = logfile_line.rpartition(' ').first.strip
26
- if filename == 'staging_task.log'
27
- filename = 'build'
28
- log_type = Enums::ApplicationLogfileType::BUILD
29
- else
30
- log_type = Enums::ApplicationLogfileType::OTHER
31
- end
32
- # TODO: right now, we always assume the log has recently been updated
33
- logs.push(id: filename, name: filename, type: log_type, created_at: app_created,
34
- updated_at: Time.now.utc.iso8601)
35
- end
36
- rescue Errors::AdapterError
37
- log.debug('no logs directory found for cf application')
38
- end
39
-
40
- # add the default logtypes, available according to:
41
- # http://docs.cloudfoundry.org/devguide/deploy-apps/streaming-logs.html#format
42
- LOGGREGATOR_TYPES.each do |type|
43
- logs.push(id: type, name: type, type: type, created_at: app_created, updated_at: Time.now.utc.iso8601)
44
- end
45
- # TODO: 'all' is probably not perfect, since the build log wont be included
46
- logs.push(id: 'all', name: 'all', type: Enums::ApplicationLogfileType::OTHER,
47
- created_at: app_created, updated_at: Time.now.utc.iso8601)
48
- logs
49
- end
50
-
51
- # @see Stub#log?
52
- def log?(application_name_or_id, log_id)
53
- app_guid = app_guid(application_name_or_id)
54
- # test file existence
55
- log_id = 'staging_task.log' if log_id.to_sym == Enums::ApplicationLogfileType::BUILD
56
- # checks also if application is even valid
57
- response = get("/v2/apps/#{app_guid}/instances/0/files/logs/#{log_id}",
58
- follow_redirects: false, expects: [200, 302, 400])
59
- return true if response == 200 || log_stream?(log_id)
60
- return false if response == 400
61
- # if 302 (only remaining option), followup...
62
-
63
- # download log file
64
- download_file(app_guid, "logs/#{log_id}")
65
- # no error, file exists
66
- true
67
- rescue Errors::AdapterResourceNotFoundError, Errors::UnknownAdapterCallError,
68
- Excon::Errors::NotFound, Excon::Errors::BadRequest
69
- false
70
- end
71
-
72
- # @see Stub#tail
73
- def tail(application_name_or_id, log_id, stream)
74
- app_guid = app_guid(application_name_or_id)
75
- return tail_stream(app_guid, log_id, stream) if log_stream?(log_id)
76
- tail_file(app_guid, log_id, stream)
77
- end
78
-
79
- # @see Stub#log_entries
80
- def log_entries(application_name_or_id, log_id)
81
- app_guid = app_guid(application_name_or_id)
82
- # first check if this log is a file or must be fetched from the loggregator
83
- if log_stream?(log_id)
84
- # fetch recent data from loggregator and return an array of log entries
85
- recent_decoded = recent_log_messages(app_guid, loggregator_filter(log_id))
86
- recent_decoded.collect { |log_msg| construct_log_entry(log_msg) }
87
- elsif log_id.to_sym == Enums::ApplicationLogfileType::BUILD
88
- # handle special staging log
89
- build_log_entries(app_guid)
90
- else
91
- download_logfile_entries(app_guid, log_id)
92
- end
93
- end
94
-
95
- private
96
-
97
- def build_log_entries(app_guid)
98
- log_id = 'staging_task.log'
99
- download_logfile_entries(app_guid, log_id)
100
- rescue Errors::AdapterResourceNotFoundError
101
- # if there was no build yet, return no entries instead of the 404 error
102
- []
103
- end
104
-
105
- def loggregator_filter(log_id)
106
- case log_id.to_sym
107
- when Enums::ApplicationLogfileType::API
108
- filter = ['API']
109
- when Enums::ApplicationLogfileType::APPLICATION
110
- filter = ['APP']
111
- when Enums::ApplicationLogfileType::REQUEST
112
- filter = ['RTR']
113
- when Enums::ApplicationLogfileType::SYSTEM
114
- filter = %w(STG LGR DEA)
115
- when :all
116
- # no filter, show all
117
- filter = nil
118
- else
119
- # invalid log requests --> 404
120
- fail Errors::AdapterResourceNotFoundError,
121
- "Invalid log file '#{log_id}', not available for application '#{app_guid}'"
122
- end
123
- filter
124
- end
125
-
126
- def construct_log_entry(decoded_message)
127
- # 2015-03-22T15:28:55.83+0100 [RTR/0] OUT message...
128
- "#{Time.at(decoded_message.timestamp / 1_000_000_000.0).iso8601} "\
129
- "[#{decoded_message.source_name}/#{decoded_message.source_id}] "\
130
- "#{decoded_message.message_type == 1 ? 'OUT' : 'ERR'} #{decoded_message.message}"
131
- end
132
-
133
- def download_logfile_entries(app_guid, log_id, headers_to_use = nil)
134
- # download log file
135
- logfile_contents = download_file(app_guid, "logs/#{log_id}", headers_to_use)
136
- # split file into entries by line breaks and return an array of log entries
137
- logfile_contents.split("\n")
138
- end
139
-
140
- def download_file(app_guid, file_path, headers_to_use = nil)
141
- expected_statuses = [200, 302, 400, 404]
142
- # Hack, do not create fresh headers (which would fail) when in a deferred action
143
- headers_to_use = headers unless headers_to_use
144
-
145
- # log list consists of 2 parts, loggregator and files
146
- log_files = get("/v2/apps/#{app_guid}/instances/0/files/#{file_path}",
147
- follow_redirects: false, expects: expected_statuses, headers: headers_to_use)
148
- if log_files.status == 400 || log_files.status == 404
149
- fail Errors::AdapterResourceNotFoundError,
150
- "Invalid log file: '#{file_path}' not available for application '#{app_guid}'"
151
- end
152
- return log_files.body if log_files.status == 200
153
-
154
- # status must be 302, follow to the Location
155
- download_location = log_files.headers[:Location]
156
- # if IBM f*cked with the download URL, fix the address
157
- download_location.gsub!(/objectstorage.service.networklayer.com/, 'objectstorage.softlayer.net')
158
- Excon.defaults[:ssl_verify_peer] = false unless @check_certificates
159
-
160
- connection_params = { ssl_verify_peer: @check_certificates }
161
- connection = Excon.new(download_location, connection_params)
162
- downloaded_logfile_response = connection.request(method: :get, expects: expected_statuses)
163
-
164
- if downloaded_logfile_response.status == 404
165
- fail Errors::AdapterResourceNotFoundError,
166
- "Invalid log file: '#{file_path}' not available for application '#{app_guid}'"
167
- end
168
- downloaded_logfile_response.body
169
- end
170
-
171
- def recent_log_messages(app_guid, filter = nil)
172
- loggregator_recent_uri = "https://#{loggregator_endpoint}:443/recent?app=#{app_guid}"
173
- # current log state before tailing, multipart message of protobuf objects
174
- current_log_response = get(loggregator_recent_uri)
175
- current_log_boundary = /boundary=(\w+)/.match(current_log_response.headers['Content-Type'])[1]
176
- current_log = current_log_response.body
177
-
178
- boundary_regexp = /--#{Regexp.quote(current_log_boundary)}(--)?#{CRLF}/
179
- parts = current_log.split(boundary_regexp).collect do |chunk|
180
- header_part = chunk.split(/#{CRLF}#{WSP}*#{CRLF}/m, 2)[0]
181
- if header_part
182
- headers = header_part.split(/\r\n/).map { |kv| kv }
183
- headers.length > 1 ? headers[1] : nil
184
- end
185
- end.compact
186
- # decode log messages
187
- decoded_messages = parts.collect do |proto_message|
188
- Message.decode(proto_message)
189
- end.compact
190
- return decoded_messages unless filter
191
- # return filtered messages
192
- decoded_messages.find_all do |msg|
193
- filter.include?(msg.source_name)
194
- end
195
- end
196
-
197
- def log_stream?(log_id)
198
- LOGGREGATOR_TYPES.include?(log_id.to_sym) || log_id.to_sym == :all
199
- end
200
-
201
- def loggregator_endpoint
202
- @endpoint_url.gsub(%r{^(\w*://)?(api)([-\.\w]+)$}i, 'loggregator\3')
203
- end
204
-
205
- def tail_file(app_guid, log_id, stream)
206
- log.debug 'Tailing CF log file'
207
- log_id = 'staging_task.log' if log_id.to_sym == Enums::ApplicationLogfileType::BUILD
208
-
209
- # cache headers as they are bound to a request and could be lost with the next tick
210
- headers_to_use = headers
211
- latest_pushed_line = -1
212
-
213
- # update every 3 seconds
214
- @tail_file_timer = EM.add_periodic_timer(3) do
215
- log.debug('Poll updated file tail...')
216
- begin
217
- latest_pushed_line = push_file_tail(app_guid, log_id, stream, latest_pushed_line, headers_to_use)
218
- rescue Errors::AdapterResourceNotFoundError
219
- log.debug('Logfile not found, finished tailing')
220
- # file lost, close stream
221
- @tail_file_timer.cancel if @tail_file_timer
222
- stream.close
223
- end
224
- end
225
- # listener to stop polling
226
- StopListener.new(@tail_file_timer, :cancel)
227
- end
228
-
229
- def push_file_tail(app_guid, log_id, stream, pushed_line_idx, headers_to_use)
230
- log.debug('Fetching file for tail response...')
231
- entries = download_logfile_entries(app_guid, log_id, headers_to_use)
232
- # file was shortened, close stream since we do not know where to continue
233
- if entries.length < pushed_line_idx
234
- log.debug('File was modified and shortened, stop tailing the file...')
235
- stream.close
236
- else
237
- entries.each_with_index do |entry, index|
238
- next if index <= pushed_line_idx
239
- pushed_line_idx = index
240
- stream.send_message(entry)
241
- end
242
- pushed_line_idx
243
- end
244
- end
245
-
246
- def tail_stream(app_guid, log_id, stream)
247
- filter = loggregator_filter(log_id)
248
-
249
- # push current state
250
- recent_log_messages(app_guid, filter).each { |entry| stream.send_message(construct_log_entry(entry)) }
251
-
252
- # Now register websocket to receive the latest updates
253
- ws = Faye::WebSocket::Client.new("wss://#{loggregator_endpoint}:443/tail/?app=#{app_guid}",
254
- nil, headers: headers.slice('Authorization'))
255
-
256
- ws.on :message do |event|
257
- log.debug "CF loggregator message received: #{event}"
258
- begin
259
- msg = Message.decode(event.data.pack('C*'))
260
- # notify stream to print new log line if msg type matches the applied filter
261
- stream.send_message(construct_log_entry(msg)) if filter.nil? || filter.include?(msg.source_name)
262
- rescue StandardError => e
263
- log.error "Cloud Foundry log message de-serialization failed: #{e}"
264
- end
265
- end
266
-
267
- ws.on :close do |event|
268
- log.debug "Closing CF loggregator websocket: code=#{event.code}, reason=#{event.reason}"
269
- ws = nil
270
- # notify stream that no more update are to arrive and stream shall be closed
271
- stream.close
272
- end
273
- # return listener to stop websocket
274
- TailStopper.new(ws, :close)
275
- end
276
-
277
- # Message class definition, matching the Protocol Buffer definition of the Cloud Foundry loggregator.
278
- # see also: https://github.com/cloudfoundry/loggregatorlib/blob/master/logmessage/log_message.proto
279
- class Message < ::Protobuf::Message
280
- class MessageType < ::Protobuf::Enum
281
- define :OUT, 1
282
- define :ERR, 2
283
- end
284
-
285
- required :bytes, :message, 1
286
- required Logs::Message::MessageType, :message_type, 2
287
- required :sint64, :timestamp, 3
288
- required :string, :app_id, 4
289
- optional :string, :source_id, 6
290
- repeated :string, :drain_urls, 7
291
- optional :string, :source_name, 8
292
- end
293
-
294
- class Envelope < ::Protobuf::Message
295
- required :string, :routing_key, 1
296
- required :bytes, :signature, 2
297
- required Logs::Message, :log_message, 3
298
- end
299
- end
300
- end
301
- end
302
- end
303
- end
1
+ module Nucleus
2
+ module Adapters
3
+ module V1
4
+ class CloudFoundryV2 < Stub
5
+ module Logs
6
+ LOGGREGATOR_TYPES = [Enums::ApplicationLogfileType::API,
7
+ Enums::ApplicationLogfileType::APPLICATION,
8
+ Enums::ApplicationLogfileType::REQUEST,
9
+ Enums::ApplicationLogfileType::SYSTEM].freeze
10
+ # Carriage return (newline in Mac OS) + line feed (newline in Unix) == CRLF (newline in Windows)
11
+ CRLF = "\r\n".freeze
12
+ WSP = ' '.freeze
13
+
14
+ # @see Stub#logs
15
+ def logs(application_name_or_id)
16
+ app_guid = app_guid(application_name_or_id)
17
+ # retrieve app for timestamps only :/
18
+ app_created = get("/v2/apps/#{app_guid}").body[:metadata][:created_at]
19
+ logs = []
20
+
21
+ begin
22
+ log_files_list = download_file(app_guid, 'logs')
23
+ # parse raw response to array
24
+ log_files_list.split(CRLF).each do |logfile_line|
25
+ filename = logfile_line.rpartition(' ').first.strip
26
+ if filename == 'staging_task.log'
27
+ filename = 'build'
28
+ log_type = Enums::ApplicationLogfileType::BUILD
29
+ else
30
+ log_type = Enums::ApplicationLogfileType::OTHER
31
+ end
32
+ # TODO: right now, we always assume the log has recently been updated
33
+ logs.push(id: filename, name: filename, type: log_type, created_at: app_created,
34
+ updated_at: Time.now.utc.iso8601)
35
+ end
36
+ rescue Errors::AdapterError
37
+ log.debug('no logs directory found for cf application')
38
+ end
39
+
40
+ # add the default logtypes, available according to:
41
+ # http://docs.cloudfoundry.org/devguide/deploy-apps/streaming-logs.html#format
42
+ LOGGREGATOR_TYPES.each do |type|
43
+ logs.push(id: type, name: type, type: type, created_at: app_created, updated_at: Time.now.utc.iso8601)
44
+ end
45
+ # TODO: 'all' is probably not perfect, since the build log wont be included
46
+ logs.push(id: 'all', name: 'all', type: Enums::ApplicationLogfileType::OTHER,
47
+ created_at: app_created, updated_at: Time.now.utc.iso8601)
48
+ logs
49
+ end
50
+
51
+ # @see Stub#log?
52
+ def log?(application_name_or_id, log_id)
53
+ app_guid = app_guid(application_name_or_id)
54
+ # test file existence
55
+ log_id = 'staging_task.log' if log_id.to_sym == Enums::ApplicationLogfileType::BUILD
56
+ # checks also if application is even valid
57
+ response = get("/v2/apps/#{app_guid}/instances/0/files/logs/#{log_id}",
58
+ follow_redirects: false, expects: [200, 302, 400])
59
+ return true if response == 200 || log_stream?(log_id)
60
+ return false if response == 400
61
+ # if 302 (only remaining option), followup...
62
+
63
+ # download log file
64
+ download_file(app_guid, "logs/#{log_id}")
65
+ # no error, file exists
66
+ true
67
+ rescue Errors::AdapterResourceNotFoundError, Errors::UnknownAdapterCallError,
68
+ Excon::Errors::NotFound, Excon::Errors::BadRequest
69
+ false
70
+ end
71
+
72
+ # @see Stub#tail
73
+ def tail(application_name_or_id, log_id, stream)
74
+ app_guid = app_guid(application_name_or_id)
75
+ return tail_stream(app_guid, log_id, stream) if log_stream?(log_id)
76
+ tail_file(app_guid, log_id, stream)
77
+ end
78
+
79
+ # @see Stub#log_entries
80
+ def log_entries(application_name_or_id, log_id)
81
+ app_guid = app_guid(application_name_or_id)
82
+ # first check if this log is a file or must be fetched from the loggregator
83
+ if log_stream?(log_id)
84
+ # fetch recent data from loggregator and return an array of log entries
85
+ recent_decoded = recent_log_messages(app_guid, loggregator_filter(log_id))
86
+ recent_decoded.collect { |log_msg| construct_log_entry(log_msg) }
87
+ elsif log_id.to_sym == Enums::ApplicationLogfileType::BUILD
88
+ # handle special staging log
89
+ build_log_entries(app_guid)
90
+ else
91
+ download_logfile_entries(app_guid, log_id)
92
+ end
93
+ end
94
+
95
+ private
96
+
97
+ def build_log_entries(app_guid)
98
+ log_id = 'staging_task.log'
99
+ download_logfile_entries(app_guid, log_id)
100
+ rescue Errors::AdapterResourceNotFoundError
101
+ # if there was no build yet, return no entries instead of the 404 error
102
+ []
103
+ end
104
+
105
+ def loggregator_filter(log_id)
106
+ case log_id.to_sym
107
+ when Enums::ApplicationLogfileType::API
108
+ filter = ['API']
109
+ when Enums::ApplicationLogfileType::APPLICATION
110
+ filter = ['APP']
111
+ when Enums::ApplicationLogfileType::REQUEST
112
+ filter = ['RTR']
113
+ when Enums::ApplicationLogfileType::SYSTEM
114
+ filter = %w(STG LGR DEA)
115
+ when :all
116
+ # no filter, show all
117
+ filter = nil
118
+ else
119
+ # invalid log requests --> 404
120
+ fail Errors::AdapterResourceNotFoundError,
121
+ "Invalid log file '#{log_id}', not available for application '#{app_guid}'"
122
+ end
123
+ filter
124
+ end
125
+
126
+ def construct_log_entry(decoded_message)
127
+ # 2015-03-22T15:28:55.83+0100 [RTR/0] OUT message...
128
+ "#{Time.at(decoded_message.timestamp / 1_000_000_000.0).iso8601} "\
129
+ "[#{decoded_message.source_name}/#{decoded_message.source_id}] "\
130
+ "#{decoded_message.message_type == 1 ? 'OUT' : 'ERR'} #{decoded_message.message}"
131
+ end
132
+
133
+ def download_logfile_entries(app_guid, log_id, headers_to_use = nil)
134
+ # download log file
135
+ logfile_contents = download_file(app_guid, "logs/#{log_id}", headers_to_use)
136
+ # split file into entries by line breaks and return an array of log entries
137
+ logfile_contents.split("\n")
138
+ end
139
+
140
+ def download_file(app_guid, file_path, headers_to_use = nil)
141
+ expected_statuses = [200, 302, 400, 404]
142
+ # Hack, do not create fresh headers (which would fail) when in a deferred action
143
+ headers_to_use = headers unless headers_to_use
144
+
145
+ # log list consists of 2 parts, loggregator and files
146
+ log_files = get("/v2/apps/#{app_guid}/instances/0/files/#{file_path}",
147
+ follow_redirects: false, expects: expected_statuses, headers: headers_to_use)
148
+ if log_files.status == 400 || log_files.status == 404
149
+ fail Errors::AdapterResourceNotFoundError,
150
+ "Invalid log file: '#{file_path}' not available for application '#{app_guid}'"
151
+ end
152
+ return log_files.body if log_files.status == 200
153
+
154
+ # status must be 302, follow to the Location
155
+ download_location = log_files.headers[:Location]
156
+ # if IBM f*cked with the download URL, fix the address
157
+ download_location.gsub!(/objectstorage.service.networklayer.com/, 'objectstorage.softlayer.net')
158
+ Excon.defaults[:ssl_verify_peer] = false unless @check_certificates
159
+
160
+ connection_params = { ssl_verify_peer: @check_certificates }
161
+ connection = Excon.new(download_location, connection_params)
162
+ downloaded_logfile_response = connection.request(method: :get, expects: expected_statuses)
163
+
164
+ if downloaded_logfile_response.status == 404
165
+ fail Errors::AdapterResourceNotFoundError,
166
+ "Invalid log file: '#{file_path}' not available for application '#{app_guid}'"
167
+ end
168
+ downloaded_logfile_response.body
169
+ end
170
+
171
+ def recent_log_messages(app_guid, filter = nil)
172
+ loggregator_recent_uri = "https://#{loggregator_endpoint}:443/recent?app=#{app_guid}"
173
+ # current log state before tailing, multipart message of protobuf objects
174
+ current_log_response = get(loggregator_recent_uri)
175
+ current_log_boundary = /boundary=(\w+)/.match(current_log_response.headers['Content-Type'])[1]
176
+ current_log = current_log_response.body
177
+
178
+ boundary_regexp = /--#{Regexp.quote(current_log_boundary)}(--)?#{CRLF}/
179
+ parts = current_log.split(boundary_regexp).collect do |chunk|
180
+ header_part = chunk.split(/#{CRLF}#{WSP}*#{CRLF}/m, 2)[0]
181
+ if header_part
182
+ headers = header_part.split(/\r\n/).map { |kv| kv }
183
+ headers.length > 1 ? headers[1] : nil
184
+ end
185
+ end.compact
186
+ # decode log messages
187
+ decoded_messages = parts.collect do |proto_message|
188
+ Message.decode(proto_message)
189
+ end.compact
190
+ return decoded_messages unless filter
191
+ # return filtered messages
192
+ decoded_messages.find_all do |msg|
193
+ filter.include?(msg.source_name)
194
+ end
195
+ end
196
+
197
+ def log_stream?(log_id)
198
+ LOGGREGATOR_TYPES.include?(log_id.to_sym) || log_id.to_sym == :all
199
+ end
200
+
201
+ def loggregator_endpoint
202
+ @endpoint_url.gsub(%r{^(\w*://)?(api)([-\.\w]+)$}i, 'loggregator\3')
203
+ end
204
+
205
+ def tail_file(app_guid, log_id, stream)
206
+ log.debug 'Tailing CF log file'
207
+ log_id = 'staging_task.log' if log_id.to_sym == Enums::ApplicationLogfileType::BUILD
208
+
209
+ # cache headers as they are bound to a request and could be lost with the next tick
210
+ headers_to_use = headers
211
+ latest_pushed_line = -1
212
+
213
+ # update every 3 seconds
214
+ @tail_file_timer = EM.add_periodic_timer(3) do
215
+ log.debug('Poll updated file tail...')
216
+ begin
217
+ latest_pushed_line = push_file_tail(app_guid, log_id, stream, latest_pushed_line, headers_to_use)
218
+ rescue Errors::AdapterResourceNotFoundError
219
+ log.debug('Logfile not found, finished tailing')
220
+ # file lost, close stream
221
+ @tail_file_timer.cancel if @tail_file_timer
222
+ stream.close
223
+ end
224
+ end
225
+ # listener to stop polling
226
+ StopListener.new(@tail_file_timer, :cancel)
227
+ end
228
+
229
+ def push_file_tail(app_guid, log_id, stream, pushed_line_idx, headers_to_use)
230
+ log.debug('Fetching file for tail response...')
231
+ entries = download_logfile_entries(app_guid, log_id, headers_to_use)
232
+ # file was shortened, close stream since we do not know where to continue
233
+ if entries.length < pushed_line_idx
234
+ log.debug('File was modified and shortened, stop tailing the file...')
235
+ stream.close
236
+ else
237
+ entries.each_with_index do |entry, index|
238
+ next if index <= pushed_line_idx
239
+ pushed_line_idx = index
240
+ stream.send_message(entry)
241
+ end
242
+ pushed_line_idx
243
+ end
244
+ end
245
+
246
+ def tail_stream(app_guid, log_id, stream)
247
+ filter = loggregator_filter(log_id)
248
+
249
+ # push current state
250
+ recent_log_messages(app_guid, filter).each { |entry| stream.send_message(construct_log_entry(entry)) }
251
+
252
+ # Now register websocket to receive the latest updates
253
+ ws = Faye::WebSocket::Client.new("wss://#{loggregator_endpoint}:443/tail/?app=#{app_guid}",
254
+ nil, headers: headers.slice('Authorization'))
255
+
256
+ ws.on :message do |event|
257
+ log.debug "CF loggregator message received: #{event}"
258
+ begin
259
+ msg = Message.decode(event.data.pack('C*'))
260
+ # notify stream to print new log line if msg type matches the applied filter
261
+ stream.send_message(construct_log_entry(msg)) if filter.nil? || filter.include?(msg.source_name)
262
+ rescue StandardError => e
263
+ log.error "Cloud Foundry log message de-serialization failed: #{e}"
264
+ end
265
+ end
266
+
267
+ ws.on :close do |event|
268
+ log.debug "Closing CF loggregator websocket: code=#{event.code}, reason=#{event.reason}"
269
+ ws = nil
270
+ # notify stream that no more update are to arrive and stream shall be closed
271
+ stream.close
272
+ end
273
+ # return listener to stop websocket
274
+ TailStopper.new(ws, :close)
275
+ end
276
+
277
+ # Message class definition, matching the Protocol Buffer definition of the Cloud Foundry loggregator.
278
+ # see also: https://github.com/cloudfoundry/loggregatorlib/blob/master/logmessage/log_message.proto
279
+ class Message < ::Protobuf::Message
280
+ class MessageType < ::Protobuf::Enum
281
+ define :OUT, 1
282
+ define :ERR, 2
283
+ end
284
+
285
+ required :bytes, :message, 1
286
+ required Logs::Message::MessageType, :message_type, 2
287
+ required :sint64, :timestamp, 3
288
+ required :string, :app_id, 4
289
+ optional :string, :source_id, 6
290
+ repeated :string, :drain_urls, 7
291
+ optional :string, :source_name, 8
292
+ end
293
+
294
+ class Envelope < ::Protobuf::Message
295
+ required :string, :routing_key, 1
296
+ required :bytes, :signature, 2
297
+ required Logs::Message, :log_message, 3
298
+ end
299
+ end
300
+ end
301
+ end
302
+ end
303
+ end