fluent-plugin-vadimberezniker-gcp 0.1.0

Sign up to get free protection for your applications and to get access to all the features.
@@ -0,0 +1,2680 @@
1
+ # Copyright 2016 Google Inc. All rights reserved.
2
+ #
3
+ # Licensed under the Apache License, Version 2.0 (the "License");
4
+ # you may not use this file except in compliance with the License.
5
+ # You may obtain a copy of the License at
6
+ #
7
+ # http://www.apache.org/licenses/LICENSE-2.0
8
+ #
9
+ # Unless required by applicable law or agreed to in writing, software
10
+ # distributed under the License is distributed on an "AS IS" BASIS,
11
+ # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12
+ # See the License for the specific language governing permissions and
13
+ # limitations under the License.
14
+
15
+ # Enable coveralls for plugin test coverage analysis.
16
+ require 'coveralls'
17
+ Coveralls.wear!
18
+
19
+ require 'google/apis'
20
+ require 'helper'
21
+ require 'mocha/test_unit'
22
+ require 'prometheus/client'
23
+ require 'webmock/test_unit'
24
+ require 'cgi'
25
+
26
+ require_relative 'asserts'
27
+ require_relative 'constants'
28
+ require_relative 'utils'
29
+
30
+ module Monitoring
31
+ # Prevent OpenCensus from writing to the network.
32
+ OpenCensusMonitoringRegistry.class_eval do
33
+ # Suppress redefine warning (https://bugs.ruby-lang.org/issues/17055).
34
+ alias_method :export, :export
35
+ define_method(:export) do
36
+ nil
37
+ end
38
+ end
39
+ end
40
+
41
+ # Unit tests for Google Cloud Logging plugin
42
+ module BaseTest
43
+ include Asserts
44
+ include Constants
45
+ include Utils
46
+
47
+ def setup
48
+ Fluent::Test.setup
49
+ delete_env_vars
50
+
51
+ # Unregister Prometheus metrics.
52
+ registry = Prometheus::Client.registry
53
+ registry.unregister(:stackdriver_successful_requests_count)
54
+ registry.unregister(:stackdriver_failed_requests_count)
55
+ registry.unregister(:stackdriver_ingested_entries_count)
56
+ registry.unregister(:stackdriver_dropped_entries_count)
57
+ registry.unregister(:stackdriver_retried_entries_count)
58
+
59
+ setup_auth_stubs('https://www.googleapis.com/oauth2/v4/token')
60
+ setup_auth_stubs('https://oauth2.googleapis.com/token')
61
+ @logs_sent = []
62
+ end
63
+
64
+ # Shared tests.
65
+
66
+ def test_configure_service_account_application_default
67
+ setup_gce_metadata_stubs
68
+ d = create_driver
69
+ assert_equal HOSTNAME, d.instance.vm_name
70
+ end
71
+
72
+ def test_configure_service_account_private_key
73
+ # Using out-of-date config method.
74
+ exception_count = 0
75
+ begin
76
+ create_driver(PRIVATE_KEY_CONFIG)
77
+ rescue Fluent::ConfigError => e
78
+ assert e.message.include? 'Please remove configuration parameters'
79
+ exception_count += 1
80
+ end
81
+ assert_equal 1, exception_count
82
+ end
83
+
84
+ def test_configure_logging_api_url
85
+ setup_gce_metadata_stubs
86
+ {
87
+ APPLICATION_DEFAULT_CONFIG => DEFAULT_LOGGING_API_URL,
88
+ CUSTOM_LOGGING_API_URL_CONFIG => CUSTOM_LOGGING_API_URL
89
+ }.each do |(config, url)|
90
+ d = create_driver(config)
91
+ assert_equal url, d.instance.instance_variable_get(:@logging_api_url)
92
+ end
93
+ end
94
+
95
+ def test_configure_custom_metadata
96
+ setup_no_metadata_service_stubs
97
+ d = create_driver(CUSTOM_METADATA_CONFIG)
98
+ assert_equal CUSTOM_PROJECT_ID, d.instance.project_id
99
+ assert_equal CUSTOM_ZONE, d.instance.zone
100
+ assert_equal CUSTOM_VM_ID, d.instance.vm_id
101
+ end
102
+
103
+ def test_configure_metadata_missing_parts_on_other_platforms
104
+ setup_no_metadata_service_stubs
105
+ Common::Utils::CredentialsInfo.stubs(:project_id).returns(nil)
106
+ [
107
+ [CONFIG_MISSING_METADATA_PROJECT_ID, ['project_id'], false],
108
+ [CONFIG_MISSING_METADATA_ZONE, [], true],
109
+ [CONFIG_MISSING_METADATA_VM_ID, [], true],
110
+ [CONFIG_MISSING_METADATA_ALL, ['project_id'], false]
111
+ ].each_with_index do |(config, missing_parts, is_valid_config), index|
112
+ create_driver(config)
113
+ assert_true is_valid_config, "Invalid config at index #{index} should "\
114
+ 'have raised an error.'
115
+ rescue Fluent::ConfigError => e
116
+ assert_false is_valid_config, "Valid config at index #{index} should "\
117
+ "not have raised an error #{e}."
118
+ assert e.message.include?('Unable to obtain metadata parameters:'),
119
+ "Index #{index} failed."
120
+ missing_parts.each do |part|
121
+ assert e.message.include?(part), "Index #{index} failed."
122
+ end
123
+ end
124
+ end
125
+
126
+ def test_configure_ignores_unknown_monitoring_type
127
+ # Verify that driver creation succeeds when monitoring type is not
128
+ # "prometheus" or "opencensus" (in which case, we simply don't record
129
+ # metrics), and that the counters are set to nil.
130
+ setup_gce_metadata_stubs
131
+ create_driver(CONFIG_UNKNOWN_MONITORING_TYPE)
132
+ assert_nil(Prometheus::Client.registry.get(
133
+ :stackdriver_successful_requests_count
134
+ ))
135
+ assert_nil(Prometheus::Client.registry.get(
136
+ :stackdriver_failed_requests_count
137
+ ))
138
+ assert_nil(Prometheus::Client.registry.get(
139
+ :stackdriver_ingested_entries_count
140
+ ))
141
+ assert_nil(Prometheus::Client.registry.get(
142
+ :stackdriver_dropped_entries_count
143
+ ))
144
+ assert_nil(Prometheus::Client.registry.get(
145
+ :stackdriver_retried_entries_count
146
+ ))
147
+ assert_nil(OpenCensus::Stats::MeasureRegistry.get(
148
+ Monitoring::MetricTranslator.new(
149
+ :stackdriver_successful_requests_count, {}
150
+ )
151
+ ))
152
+ assert_nil(OpenCensus::Stats::MeasureRegistry.get(
153
+ Monitoring::MetricTranslator.new(
154
+ :stackdriver_failed_requests_count, {}
155
+ )
156
+ ))
157
+ assert_nil(OpenCensus::Stats::MeasureRegistry.get(
158
+ Monitoring::MetricTranslator.new(
159
+ :stackdriver_ingested_entries_count, {}
160
+ )
161
+ ))
162
+ assert_nil(OpenCensus::Stats::MeasureRegistry.get(
163
+ Monitoring::MetricTranslator.new(
164
+ :stackdriver_dropped_entries_count, {}
165
+ )
166
+ ))
167
+ assert_nil(OpenCensus::Stats::MeasureRegistry.get(
168
+ Monitoring::MetricTranslator.new(
169
+ :stackdriver_retried_entries_count, {}
170
+ )
171
+ ))
172
+ end
173
+
174
+ def test_configure_uses_metrics_resource
175
+ setup_gce_metadata_stubs
176
+ [CONFIG_METRICS_RESOURCE_JSON,
177
+ CONFIG_METRICS_RESOURCE_HASH,
178
+ CONFIG_METRICS_RESOURCE_JSON_HASH].each_with_index do |config, index|
179
+ d = create_driver(config)
180
+ assert_equal 'custom_resource', d.instance.monitoring_resource.type, \
181
+ "Index #{index}"
182
+ assert_equal '123', d.instance.monitoring_resource.labels['label1'], \
183
+ "Index #{index}"
184
+ assert_equal 'abc', d.instance.monitoring_resource.labels['label2'], \
185
+ "Index #{index}"
186
+ assert_true d.instance.instance_variable_get(:@enable_monitoring)
187
+ registry = d.instance.instance_variable_get(:@registry)
188
+ assert_not_nil registry
189
+ monitored_resource = registry.instance_variable_get(
190
+ :@metrics_monitored_resource
191
+ )
192
+ assert_equal('custom_resource', monitored_resource.type, "Index #{index}")
193
+ assert_equal({ 'label1' => '123', 'label2' => 'abc' },
194
+ monitored_resource.labels, "Index #{index}")
195
+ end
196
+ end
197
+
198
+ def test_configure_metrics_resource_validation
199
+ setup_gce_metadata_stubs
200
+ {
201
+ CONFIG_METRICS_RESOURCE_JSON_NO_TYPE => /type must be a string/,
202
+ CONFIG_METRICS_RESOURCE_JSON_BAD_LABELS => /labels must be a hash/,
203
+ CONFIG_METRICS_RESOURCE_JSON_BAD_KEYS =>
204
+ /unrecognized keys: \[:random\]/,
205
+ CONFIG_METRICS_RESOURCE_JSON_BAD_KEYS_LABELS =>
206
+ /unrecognized keys: \[:"labels\.random"\]/,
207
+ CONFIG_METRICS_RESOURCE_JSON_BAD_KEYS_NO_LABELS =>
208
+ /unrecognized keys: \[:random\]/
209
+ }.each_with_index do |(config, pattern), index|
210
+ create_driver(config)
211
+ assert false,
212
+ "Invalid config at index #{index} should have raised an error."
213
+ rescue Fluent::ConfigError => e
214
+ assert e.message.match?(pattern), \
215
+ "Index #{index} failed: got #{e.message}."
216
+ end
217
+ end
218
+
219
+ def test_metadata_loading
220
+ setup_gce_metadata_stubs
221
+ d = create_driver
222
+ d.run
223
+ assert_equal PROJECT_ID, d.instance.project_id
224
+ assert_equal ZONE, d.instance.zone
225
+ assert_equal VM_ID, d.instance.vm_id
226
+ assert_equal COMPUTE_CONSTANTS[:resource_type], d.instance.resource.type
227
+ end
228
+
229
+ def test_managed_vm_metadata_loading
230
+ setup_gce_metadata_stubs
231
+ setup_managed_vm_metadata_stubs
232
+ d = create_driver
233
+ d.run
234
+ assert_equal PROJECT_ID, d.instance.project_id
235
+ assert_equal ZONE, d.instance.zone
236
+ assert_equal VM_ID, d.instance.vm_id
237
+ assert_equal APPENGINE_CONSTANTS[:resource_type], d.instance.resource.type
238
+ assert_equal MANAGED_VM_BACKEND_NAME,
239
+ d.instance.resource.labels['module_id']
240
+ assert_equal MANAGED_VM_BACKEND_VERSION,
241
+ d.instance.resource.labels['version_id']
242
+ end
243
+
244
+ def test_gce_metadata_does_not_load_when_use_metadata_service_is_false
245
+ Fluent::GoogleCloudOutput.any_instance.expects(:fetch_metadata).never
246
+ d = create_driver(NO_METADATA_SERVICE_CONFIG + CUSTOM_METADATA_CONFIG)
247
+ d.run
248
+ assert_equal CUSTOM_PROJECT_ID, d.instance.project_id
249
+ assert_equal CUSTOM_ZONE, d.instance.zone
250
+ assert_equal CUSTOM_VM_ID, d.instance.vm_id
251
+ assert_equal COMPUTE_CONSTANTS[:resource_type], d.instance.resource.type
252
+ end
253
+
254
+ def test_gce_used_when_detect_subservice_is_false
255
+ setup_gce_metadata_stubs
256
+ # This would cause the resource type to be container.googleapis.com if not
257
+ # for the detect_subservice=false config.
258
+ setup_k8s_metadata_stubs
259
+ d = create_driver(NO_DETECT_SUBSERVICE_CONFIG)
260
+ d.run
261
+ assert_equal COMPUTE_CONSTANTS[:resource_type], d.instance.resource.type
262
+ end
263
+
264
+ def test_metadata_overrides
265
+ {
266
+ # In this case we are overriding all configured parameters so we should
267
+ # see all "custom" values rather than the ones from the metadata server.
268
+ CUSTOM_METADATA_CONFIG =>
269
+ ['gce', CUSTOM_PROJECT_ID, CUSTOM_ZONE, CUSTOM_VM_ID],
270
+ # Similar to above, but we are not overriding project_id in this config so
271
+ # we should see the metadata value for project_id and "custom" otherwise.
272
+ CONFIG_MISSING_METADATA_PROJECT_ID =>
273
+ ['gce', PROJECT_ID, CUSTOM_ZONE, CUSTOM_VM_ID],
274
+ CONFIG_EC2_PROJECT_ID =>
275
+ ['ec2', EC2_PROJECT_ID, EC2_PREFIXED_ZONE, EC2_VM_ID],
276
+ CONFIG_EC2_PROJECT_ID_AND_CUSTOM_VM_ID =>
277
+ ['ec2', EC2_PROJECT_ID, EC2_PREFIXED_ZONE, CUSTOM_VM_ID],
278
+ CONFIG_EC2_PROJECT_ID_USE_REGION =>
279
+ ['ec2', EC2_PROJECT_ID, EC2_PREFIXED_REGION, EC2_VM_ID]
280
+ }.each_with_index do |(config, parts), index|
281
+ send("setup_#{parts[0]}_metadata_stubs")
282
+ d = create_driver(config)
283
+ d.run
284
+ assert_equal parts[1], d.instance.project_id, "Index #{index} failed."
285
+ assert_equal parts[2], d.instance.zone, "Index #{index} failed."
286
+ assert_equal parts[3], d.instance.vm_id, "Index #{index} failed."
287
+ end
288
+ end
289
+
290
+ def test_ec2_metadata_requires_project_id
291
+ setup_ec2_metadata_stubs
292
+ exception_count = 0
293
+ begin
294
+ create_driver
295
+ rescue Fluent::ConfigError => e
296
+ assert e.message.include? 'Unable to obtain metadata parameters:'
297
+ assert e.message.include? 'project_id'
298
+ exception_count += 1
299
+ end
300
+ assert_equal 1, exception_count
301
+ end
302
+
303
+ def test_project_id_from_credentials
304
+ %w[gce ec2].each do |platform|
305
+ send("setup_#{platform}_metadata_stubs")
306
+ [IAM_CREDENTIALS, NEW_STYLE_CREDENTIALS, LEGACY_CREDENTIALS].each \
307
+ do |creds|
308
+ ENV[CREDENTIALS_PATH_ENV_VAR] = creds[:path]
309
+ d = create_driver
310
+ d.run
311
+ assert_equal creds[:project_id], d.instance.project_id
312
+ end
313
+ end
314
+ end
315
+
316
+ def test_one_log
317
+ setup_gce_metadata_stubs
318
+ setup_logging_stubs do
319
+ d = create_driver
320
+ d.emit('message' => log_entry(0))
321
+ d.run
322
+ end
323
+ verify_log_entries(1, COMPUTE_PARAMS)
324
+ end
325
+
326
+ def test_one_log_with_json_credentials
327
+ setup_gce_metadata_stubs
328
+ ENV[CREDENTIALS_PATH_ENV_VAR] = IAM_CREDENTIALS[:path]
329
+ setup_logging_stubs do
330
+ d = create_driver
331
+ d.emit('message' => log_entry(0))
332
+ d.run
333
+ end
334
+ verify_log_entries(1, COMPUTE_PARAMS.merge(
335
+ project_id: IAM_CREDENTIALS[:project_id]
336
+ ))
337
+ end
338
+
339
+ def test_invalid_json_credentials
340
+ %w[gce_metadata ec2_metadata no_metadata_service].each do |platform|
341
+ send("setup_#{platform}_stubs")
342
+ exception_count = 0
343
+ ENV[CREDENTIALS_PATH_ENV_VAR] = INVALID_CREDENTIALS[:path]
344
+ begin
345
+ create_driver
346
+ rescue RuntimeError => e
347
+ assert e.message.include? 'Unable to read the credential file'
348
+ exception_count += 1
349
+ end
350
+ assert_equal 1, exception_count
351
+ end
352
+ end
353
+
354
+ def test_unset_or_empty_credentials_path_env_var
355
+ # An empty string should be treated as if it's not set.
356
+ [nil, ''].each do |value|
357
+ ENV[CREDENTIALS_PATH_ENV_VAR] = value
358
+ setup_gce_metadata_stubs
359
+ create_driver
360
+ assert_nil ENV[CREDENTIALS_PATH_ENV_VAR]
361
+ end
362
+ end
363
+
364
+ def test_one_log_custom_metadata
365
+ # don't set up any metadata stubs, so the test will fail if we try to
366
+ # fetch metadata (and explicitly check this as well).
367
+ Fluent::GoogleCloudOutput.any_instance.expects(:fetch_metadata).never
368
+ ENV[CREDENTIALS_PATH_ENV_VAR] = IAM_CREDENTIALS[:path]
369
+ setup_logging_stubs do
370
+ d = create_driver(NO_METADATA_SERVICE_CONFIG + CUSTOM_METADATA_CONFIG)
371
+ d.emit('message' => log_entry(0))
372
+ d.run
373
+ end
374
+ verify_log_entries(1, CUSTOM_PARAMS)
375
+ end
376
+
377
+ def test_one_log_ec2
378
+ ENV[CREDENTIALS_PATH_ENV_VAR] = IAM_CREDENTIALS[:path]
379
+ setup_ec2_metadata_stubs
380
+ setup_logging_stubs do
381
+ d = create_driver(CONFIG_EC2_PROJECT_ID)
382
+ d.emit('message' => log_entry(0))
383
+ d.run
384
+ end
385
+ verify_log_entries(1, EC2_ZONE_PARAMS)
386
+ end
387
+
388
+ def test_one_log_ec2_region
389
+ ENV[CREDENTIALS_PATH_ENV_VAR] = IAM_CREDENTIALS[:path]
390
+ setup_ec2_metadata_stubs
391
+ setup_logging_stubs do
392
+ d = create_driver(CONFIG_EC2_PROJECT_ID_USE_REGION)
393
+ d.emit('message' => log_entry(0))
394
+ d.run
395
+ end
396
+ verify_log_entries(1, EC2_REGION_PARAMS)
397
+ end
398
+
399
+ def test_structured_payload_log
400
+ setup_gce_metadata_stubs
401
+ setup_logging_stubs do
402
+ d = create_driver
403
+ d.emit('msg' => log_entry(0), 'tag2' => 'test', 'data' => 5000,
404
+ 'some_null_field' => nil)
405
+ d.run
406
+ end
407
+ verify_log_entries(1, COMPUTE_PARAMS, 'jsonPayload') do |entry, i|
408
+ fields = entry['jsonPayload']
409
+ assert_equal 4, fields.size, entry
410
+ verify_default_log_entry_text(fields['msg'], i, entry)
411
+ assert_equal 'test', fields['tag2'], entry
412
+ assert_equal 5000, fields['data'], entry
413
+ assert_nil fields['some_null_field'], entry
414
+ end
415
+ end
416
+
417
+ def test_autoformat_enabled_with_stackdriver_trace_id_as_trace
418
+ [
419
+ APPLICATION_DEFAULT_CONFIG,
420
+ ENABLE_AUTOFORMAT_STACKDRIVER_TRACE_CONFIG
421
+ ].each do |config|
422
+ new_stub_context do
423
+ setup_gce_metadata_stubs
424
+ setup_logging_stubs do
425
+ d = create_driver(config)
426
+ d.emit(DEFAULT_TRACE_KEY => STACKDRIVER_TRACE_ID)
427
+ d.run
428
+ verify_log_entries(1, COMPUTE_PARAMS, 'jsonPayload') do |entry|
429
+ assert_equal FULL_STACKDRIVER_TRACE, entry['trace'],
430
+ 'stackdriver trace id should be autoformatted ' \
431
+ 'when autoformat_stackdriver_trace is enabled.'
432
+ end
433
+ end
434
+ end
435
+ end
436
+ end
437
+
438
+ def test_autoformat_disabled_with_stackdriver_trace_id_as_trace
439
+ setup_gce_metadata_stubs
440
+ setup_logging_stubs do
441
+ d = create_driver(DISABLE_AUTOFORMAT_STACKDRIVER_TRACE_CONFIG)
442
+ d.emit(DEFAULT_TRACE_KEY => STACKDRIVER_TRACE_ID)
443
+ d.run
444
+ verify_log_entries(1, COMPUTE_PARAMS, 'jsonPayload') do |entry|
445
+ assert_equal STACKDRIVER_TRACE_ID, entry['trace'],
446
+ 'trace as stackdriver trace id should not be ' \
447
+ 'autoformatted with config ' \
448
+ "#{DISABLE_AUTOFORMAT_STACKDRIVER_TRACE_CONFIG}."
449
+ end
450
+ end
451
+ end
452
+
453
+ def test_no_trace_when_trace_key_not_exists_with_any_autoformat_config
454
+ [
455
+ APPLICATION_DEFAULT_CONFIG,
456
+ ENABLE_AUTOFORMAT_STACKDRIVER_TRACE_CONFIG,
457
+ DISABLE_AUTOFORMAT_STACKDRIVER_TRACE_CONFIG
458
+ ].each do |config|
459
+ new_stub_context do
460
+ setup_gce_metadata_stubs
461
+ setup_logging_stubs do
462
+ d = create_driver(config)
463
+ d.emit('msg' => log_entry(0))
464
+ d.run
465
+ verify_log_entries(1, COMPUTE_PARAMS, 'jsonPayload') do |entry|
466
+ assert_false entry.key?('trace'), entry
467
+ end
468
+ end
469
+ end
470
+ end
471
+ end
472
+
473
+ def test_non_stackdriver_trace_id_compliant_trace_with_any_autoformat_config
474
+ configs = [
475
+ APPLICATION_DEFAULT_CONFIG,
476
+ ENABLE_AUTOFORMAT_STACKDRIVER_TRACE_CONFIG,
477
+ DISABLE_AUTOFORMAT_STACKDRIVER_TRACE_CONFIG
478
+ ]
479
+ traces = [
480
+ TRACE, # Full trace won't be modified.
481
+ EMPTY_STRING,
482
+ INVALID_SHORT_STACKDRIVER_TRACE_ID,
483
+ INVALID_LONG_STACKDRIVER_TRACE_ID,
484
+ INVALID_NON_HEX_STACKDRIVER_TRACE_ID,
485
+ INVALID_TRACE_NO_TRACE_ID,
486
+ INVALID_TRACE_NO_PROJECT_ID,
487
+ INVALID_TRACE_WITH_SHORT_TRACE_ID,
488
+ INVALID_TRACE_WITH_LONG_TRACE_ID,
489
+ INVALID_TRACE_WITH_NON_HEX_TRACE_ID
490
+ ]
491
+ configs.product(traces).each do |config, trace|
492
+ new_stub_context do
493
+ setup_gce_metadata_stubs
494
+ setup_logging_stubs do
495
+ d = create_driver(config)
496
+ d.emit(DEFAULT_TRACE_KEY => trace)
497
+ d.run
498
+ verify_log_entries(1, COMPUTE_PARAMS, 'jsonPayload') do |entry|
499
+ assert_equal_with_default \
500
+ entry['trace'], trace, '',
501
+ 'trace as non stackdriver trace id should not be ' \
502
+ "autoformatted with config #{config}."
503
+ end
504
+ end
505
+ end
506
+ end
507
+ end
508
+
509
+ def test_structured_payload_malformatted_log
510
+ setup_gce_metadata_stubs
511
+ message = 'test message'
512
+ setup_logging_stubs do
513
+ d = create_driver
514
+ d.emit(
515
+ 'int_key' => { 1 => message },
516
+ 'int_array_key' => { [1, 2, 3, 4] => message },
517
+ 'string_array_key' => { %w[a b c] => message },
518
+ 'hash_key' => { { 'some_key' => 'some_value' } => message },
519
+ 'mixed_key' => { { 'some_key' => %w[a b c] } => message },
520
+ 'symbol_key' => { some_symbol: message },
521
+ 'nil_key' => { nil => message }
522
+ )
523
+ d.run
524
+ end
525
+ verify_log_entries(1, COMPUTE_PARAMS, 'jsonPayload') do |entry|
526
+ fields = entry['jsonPayload']
527
+ assert_equal 7, fields.size, entry
528
+ assert_equal message, fields['int_key']['1'], entry
529
+ assert_equal message, fields['int_array_key']['[1, 2, 3, 4]'], entry
530
+ assert_equal message, fields['string_array_key']['["a", "b", "c"]'], entry
531
+ assert_equal message, fields['hash_key']['{"some_key"=>"some_value"}'],
532
+ entry
533
+ assert_equal message,
534
+ fields['mixed_key']['{"some_key"=>["a", "b", "c"]}'], entry
535
+ assert_equal message, fields['symbol_key']['some_symbol'], entry
536
+ assert_equal message, fields['nil_key'][''], entry
537
+ end
538
+ end
539
+
540
+ def test_structured_payload_json_log_default_not_parsed_text
541
+ setup_gce_metadata_stubs
542
+ json_string = '{"msg": "test log entry 0", "tag2": "test", ' \
543
+ '"data": 5000, "some_null_field": null}'
544
+ setup_logging_stubs do
545
+ d = create_driver(APPLICATION_DEFAULT_CONFIG)
546
+ d.emit('message' => "notJSON #{json_string}")
547
+ d.emit('message' => json_string)
548
+ d.emit('message' => " \r\n \t#{json_string}")
549
+ d.run
550
+ end
551
+ verify_log_entries(3, COMPUTE_PARAMS, 'textPayload') do
552
+ # Only check for the existence of textPayload.
553
+ end
554
+ end
555
+
556
+ def test_structured_payload_json_log_default_not_parsed_json
557
+ setup_gce_metadata_stubs
558
+ json_string = '{"msg": "test log entry 0", "tag2": "test", ' \
559
+ '"data": 5000, "some_null_field": null}'
560
+ setup_logging_stubs do
561
+ d = create_driver(APPLICATION_DEFAULT_CONFIG)
562
+ %w[log msg].each do |field|
563
+ d.emit(field => "notJSON #{json_string}")
564
+ d.emit(field => json_string)
565
+ d.emit(field => " \r\n \t#{json_string}")
566
+ end
567
+ d.run
568
+ end
569
+ verify_log_entries(6, COMPUTE_PARAMS, 'jsonPayload') do |entry|
570
+ fields = entry['jsonPayload']
571
+ assert !fields.key?('tag2'), 'Did not expect tag2'
572
+ assert !fields.key?('data'), 'Did not expect data'
573
+ assert !fields.key?('some_null_field'), 'Did not expect some_null_field'
574
+ end
575
+ end
576
+
577
+ def test_structured_payload_json_log_detect_json_not_parsed_text
578
+ setup_gce_metadata_stubs
579
+ json_string = '{"msg": "test log entry 0", "tag2": "test", ' \
580
+ '"data": 5000, "some_null_field": null}'
581
+ setup_logging_stubs do
582
+ d = create_driver(DETECT_JSON_CONFIG)
583
+ d.emit('message' => "notJSON #{json_string}")
584
+ d.run
585
+ end
586
+ verify_log_entries(1, COMPUTE_PARAMS, 'textPayload') do
587
+ # Only check for the existence of textPayload.
588
+ end
589
+ end
590
+
591
+ def test_structured_payload_json_log_detect_json_not_parsed_json
592
+ setup_gce_metadata_stubs
593
+ json_string = '{"msg": "test log entry 0", "tag2": "test", ' \
594
+ '"data": 5000, "some_null_field": null}'
595
+ setup_logging_stubs do
596
+ d = create_driver(DETECT_JSON_CONFIG)
597
+ %w[log msg].each do |field|
598
+ d.emit(field => "notJSON #{json_string}")
599
+ end
600
+ d.run
601
+ end
602
+ verify_log_entries(2, COMPUTE_PARAMS, 'jsonPayload') do |entry|
603
+ fields = entry['jsonPayload']
604
+ assert !fields.key?('tag2'), 'Did not expect tag2'
605
+ assert !fields.key?('data'), 'Did not expect data'
606
+ assert !fields.key?('some_null_field'), 'Did not expect some_null_field'
607
+ end
608
+ end
609
+
610
+ # TODO(qingling128): Fix the inconsistent behavior of 'message', 'log' and
611
+ # 'msg' in the next major version 1.0.0.
612
+ def test_structured_payload_json_log_detect_json_with_hash_input
613
+ hash_value = {
614
+ 'msg' => 'test log entry 0',
615
+ 'tag2' => 'test',
616
+ 'data' => 5000,
617
+ 'some_null_field' => nil
618
+ }
619
+ [
620
+ {
621
+ config: APPLICATION_DEFAULT_CONFIG,
622
+ field_name: 'log',
623
+ expected_payload: 'jsonPayload'
624
+ },
625
+ {
626
+ config: APPLICATION_DEFAULT_CONFIG,
627
+ field_name: 'msg',
628
+ expected_payload: 'jsonPayload'
629
+ },
630
+ {
631
+ config: APPLICATION_DEFAULT_CONFIG,
632
+ field_name: 'message',
633
+ expected_payload: 'textPayload'
634
+ },
635
+ {
636
+ config: DETECT_JSON_CONFIG,
637
+ field_name: 'log',
638
+ expected_payload: 'jsonPayload'
639
+ },
640
+ {
641
+ config: DETECT_JSON_CONFIG,
642
+ field_name: 'msg',
643
+ expected_payload: 'jsonPayload'
644
+ },
645
+ {
646
+ config: DETECT_JSON_CONFIG,
647
+ field_name: 'message',
648
+ expected_payload: 'textPayload'
649
+ }
650
+ ].each do |test_params|
651
+ new_stub_context do
652
+ setup_gce_metadata_stubs
653
+ setup_logging_stubs do
654
+ d = create_driver(test_params[:config])
655
+ d.emit(test_params[:field_name] => hash_value)
656
+ d.run
657
+ end
658
+ if test_params[:expected_payload] == 'textPayload'
659
+ verify_log_entries(1, COMPUTE_PARAMS, 'textPayload') do |entry|
660
+ text_payload = entry['textPayload']
661
+ assert_equal '{"msg"=>"test log entry 0", "tag2"=>"test", ' \
662
+ '"data"=>5000, "some_null_field"=>nil}',
663
+ text_payload, entry
664
+ end
665
+ else
666
+ verify_log_entries(1, COMPUTE_PARAMS, 'jsonPayload') do |entry|
667
+ json_payload = entry['jsonPayload']
668
+ assert_equal 1, json_payload.size, entry
669
+ fields = json_payload[test_params[:field_name]]
670
+ assert_equal 4, fields.size, entry
671
+ assert_equal 'test log entry 0', fields['msg'], entry
672
+ assert_equal 'test', fields['tag2'], entry
673
+ assert_equal 5000, fields['data'], entry
674
+ assert_nil fields['some_null_field'], entry
675
+ end
676
+ end
677
+ end
678
+ end
679
+ end
680
+
681
+ def test_structured_payload_json_log_detect_json_parsed
682
+ setup_gce_metadata_stubs
683
+ json_string = '{"msg": "test log entry 0", "tag2": "test", ' \
684
+ '"data": 5000, "some_null_field": null}'
685
+ setup_logging_stubs do
686
+ d = create_driver(DETECT_JSON_CONFIG)
687
+ %w[message log msg].each do |field|
688
+ d.emit(field => json_string)
689
+ d.emit(field => " \r\n \t#{json_string}")
690
+ end
691
+ d.run
692
+ end
693
+ verify_log_entries(6, COMPUTE_PARAMS, 'jsonPayload') do |entry|
694
+ fields = entry['jsonPayload']
695
+ assert_equal 4, fields.size, entry
696
+ assert_equal 'test log entry 0', fields['msg'], entry
697
+ assert_equal 'test', fields['tag2'], entry
698
+ assert_equal 5000, fields['data'], entry
699
+ assert_nil fields['some_null_field'], entry
700
+ end
701
+ end
702
+
703
+ def test_structured_payload_json_log_default_container_not_parsed
704
+ setup_gce_metadata_stubs
705
+ setup_k8s_metadata_stubs
706
+ json_string = '{"msg": "test log entry 0", "tag2": "test", ' \
707
+ '"data": 5000, "some_null_field": null}'
708
+ setup_logging_stubs do
709
+ d = create_driver(APPLICATION_DEFAULT_CONFIG, CONTAINER_TAG)
710
+ d.emit(container_log_entry_with_metadata("notJSON#{json_string}"))
711
+ d.emit(container_log_entry_with_metadata(json_string))
712
+ d.emit(container_log_entry_with_metadata(" \r\n \t#{json_string}"))
713
+ d.run
714
+ end
715
+ verify_log_entries(3, CONTAINER_FROM_METADATA_PARAMS, 'textPayload') do
716
+ # Only check for the existence of textPayload.
717
+ end
718
+ end
719
+
720
+ def test_structured_payload_json_log_detect_json_container_not_parsed
721
+ setup_gce_metadata_stubs
722
+ setup_k8s_metadata_stubs
723
+ json_string = '{"msg": "test log entry 0", "tag2": "test", ' \
724
+ '"data": 5000, "some_null_field": null}'
725
+ setup_logging_stubs do
726
+ d = create_driver(DETECT_JSON_CONFIG, CONTAINER_TAG)
727
+ d.emit(container_log_entry_with_metadata("notJSON#{json_string}"))
728
+ d.run
729
+ end
730
+ verify_log_entries(1, CONTAINER_FROM_METADATA_PARAMS, 'textPayload') do
731
+ # Only check for the existence of textPayload.
732
+ end
733
+ end
734
+
735
+ def test_structured_payload_json_log_detect_json_container_parsed
736
+ setup_gce_metadata_stubs
737
+ setup_k8s_metadata_stubs
738
+ json_string = '{"msg": "test log entry 0", "tag2": "test", ' \
739
+ '"data": 5000, "some_null_field": null}'
740
+ setup_logging_stubs do
741
+ d = create_driver(DETECT_JSON_CONFIG, CONTAINER_TAG)
742
+ d.emit(container_log_entry_with_metadata(json_string))
743
+ d.emit(container_log_entry_with_metadata(" \r\n \t#{json_string}"))
744
+ d.run
745
+ end
746
+ verify_log_entries(2, CONTAINER_FROM_METADATA_PARAMS, 'jsonPayload') \
747
+ do |entry|
748
+ fields = entry['jsonPayload']
749
+ assert_equal 4, fields.size, entry
750
+ assert_equal 'test log entry 0', fields['msg'], entry
751
+ assert_equal 'test', fields['tag2'], entry
752
+ assert_equal 5000, fields['data'], entry
753
+ assert_nil fields['some_null_field'], entry
754
+ end
755
+ end
756
+
757
+ # Verify that when the log has only one effective field (named 'log',
758
+ # 'message', or 'msg') and the field is in JSON format, the field is parsed as
759
+ # JSON and sent as jsonPayload.
760
+ def test_detect_json_auto_triggered_with_one_field
761
+ setup_gce_metadata_stubs
762
+ json_string = '{"msg": "test log entry 0", "tag2": "test", ' \
763
+ '"data": 5000, "some_null_field": null}'
764
+ PRESERVED_KEYS_TIMESTAMP_FIELDS.each do |timestamp_fields|
765
+ setup_logging_stubs do
766
+ @logs_sent = []
767
+ d = create_driver(DETECT_JSON_CONFIG)
768
+ %w[message log msg].each do |field|
769
+ d.emit(PRESERVED_KEYS_MAP.merge(
770
+ field => json_string
771
+ ).merge(timestamp_fields))
772
+ end
773
+ d.run
774
+ end
775
+ expected_params = COMPUTE_PARAMS.merge(
776
+ labels: COMPUTE_PARAMS[:labels].merge(LABELS_MESSAGE)
777
+ )
778
+ verify_log_entries(3, expected_params, 'jsonPayload') do |entry|
779
+ fields = entry['jsonPayload']
780
+ assert_equal 4, fields.size, entry
781
+ assert_equal 'test log entry 0', fields['msg'], entry
782
+ assert_equal 'test', fields['tag2'], entry
783
+ assert_equal 5000, fields['data'], entry
784
+ assert_nil fields['some_null_field'], entry
785
+ end
786
+ end
787
+ end
788
+
789
+ # Verify that we drop the log entries when 'require_valid_tags' is true and
790
+ # any non-string tags or tags with non-utf8 characters are detected.
791
+ def test_reject_invalid_tags_with_require_valid_tags_true
792
+ setup_gce_metadata_stubs
793
+ INVALID_TAGS.each_key do |tag|
794
+ setup_logging_stubs do
795
+ @logs_sent = []
796
+ d = create_driver(REQUIRE_VALID_TAGS_CONFIG, tag)
797
+ d.emit('msg' => log_entry(0))
798
+ d.run
799
+ end
800
+ verify_log_entries(0, COMPUTE_PARAMS, 'jsonPayload')
801
+ end
802
+ end
803
+
804
+ # Verify that empty string container name should fail the kubernetes regex
805
+ # match, thus the original tag is used as the log name.
806
+ def test_handle_empty_container_name
807
+ setup_gce_metadata_stubs
808
+ setup_k8s_metadata_stubs
809
+ container_name = ''
810
+ # This tag will not match the kubernetes regex because it requires a
811
+ # non-empty container name.
812
+ tag = container_tag_with_container_name(container_name)
813
+ setup_logging_stubs do
814
+ d = create_driver(REQUIRE_VALID_TAGS_CONFIG, tag)
815
+ d.emit(container_log_entry_with_metadata(log_entry(0), container_name))
816
+ d.run
817
+ end
818
+ params = CONTAINER_FROM_METADATA_PARAMS.merge(
819
+ resource: CONTAINER_FROM_METADATA_PARAMS[:resource].merge(
820
+ labels: CONTAINER_FROM_METADATA_PARAMS[:resource][:labels].merge(
821
+ 'container_name' => container_name
822
+ )
823
+ ),
824
+ log_name: tag
825
+ )
826
+ verify_log_entries(1, params, 'textPayload')
827
+ end
828
+
829
+ # Verify that container names with non-utf8 characters should be rejected when
830
+ # 'require_valid_tags' is true.
831
+ def test_reject_non_utf8_container_name_with_require_valid_tags_true
832
+ setup_gce_metadata_stubs
833
+ setup_k8s_metadata_stubs
834
+ non_utf8_tags = INVALID_TAGS.select do |tag, _|
835
+ tag.is_a?(String) && !tag.empty?
836
+ end
837
+ non_utf8_tags.each do |container_name, encoded_name|
838
+ setup_logging_stubs do
839
+ @logs_sent = []
840
+ d = create_driver(REQUIRE_VALID_TAGS_CONFIG,
841
+ container_tag_with_container_name(container_name))
842
+ d.emit(container_log_entry_with_metadata(log_entry(0), container_name))
843
+ d.run
844
+ end
845
+ params = CONTAINER_FROM_METADATA_PARAMS.merge(
846
+ labels: CONTAINER_FROM_METADATA_PARAMS[:labels].merge(
847
+ "#{GKE_CONSTANTS[:service]}/container_name" =>
848
+ CGI.unescape(encoded_name)
849
+ ),
850
+ log_name: encoded_name
851
+ )
852
+ verify_log_entries(0, params, 'textPayload')
853
+ end
854
+ end
855
+
856
+ # Verify that tags are properly encoded. When 'require_valid_tags' is true, we
857
+ # only accept string tags with utf8 characters.
858
+ def test_encode_tags_with_require_valid_tags_true
859
+ setup_gce_metadata_stubs
860
+ VALID_TAGS.each do |tag, encoded_tag|
861
+ setup_logging_stubs do
862
+ @logs_sent = []
863
+ d = create_driver(REQUIRE_VALID_TAGS_CONFIG, tag)
864
+ d.emit('msg' => log_entry(0))
865
+ d.run
866
+ end
867
+ verify_log_entries(1, COMPUTE_PARAMS.merge(log_name: encoded_tag),
868
+ 'jsonPayload')
869
+ end
870
+ end
871
+
872
+ # Verify that tags extracted from container names are properly encoded.
873
+ def test_encode_tags_from_container_name_with_require_valid_tags_true
874
+ setup_gce_metadata_stubs
875
+ setup_k8s_metadata_stubs
876
+ VALID_TAGS.each do |tag, encoded_tag|
877
+ setup_logging_stubs do
878
+ @logs_sent = []
879
+ d = create_driver(REQUIRE_VALID_TAGS_CONFIG,
880
+ container_tag_with_container_name(tag))
881
+ d.emit(container_log_entry_with_metadata(log_entry(0), tag))
882
+ d.run
883
+ end
884
+ params = CONTAINER_FROM_METADATA_PARAMS.merge(
885
+ resource: CONTAINER_FROM_METADATA_PARAMS[:resource].merge(
886
+ labels: CONTAINER_FROM_METADATA_PARAMS[:resource][:labels].merge(
887
+ 'container_name' => tag
888
+ )
889
+ ),
890
+ log_name: encoded_tag
891
+ )
892
+ verify_log_entries(1, params, 'textPayload')
893
+ end
894
+ end
895
+
896
+ # Verify that tags are properly encoded and sanitized. When
897
+ # 'require_valid_tags' is false, we try to convert any non-string tags to
898
+ # strings, and replace non-utf8 characters with a replacement string.
899
+ def test_sanitize_tags_with_require_valid_tags_false
900
+ setup_gce_metadata_stubs
901
+ ALL_TAGS.each do |tag, sanitized_tag|
902
+ setup_logging_stubs do
903
+ @logs_sent = []
904
+ d = create_driver(APPLICATION_DEFAULT_CONFIG, tag)
905
+ d.emit('msg' => log_entry(0))
906
+ d.run
907
+ end
908
+ verify_log_entries(1, COMPUTE_PARAMS.merge(log_name: sanitized_tag),
909
+ 'jsonPayload')
910
+ end
911
+ end
912
+
913
+ # Verify that tags extracted from container names are properly encoded and
914
+ # sanitized.
915
+ def test_sanitize_tags_from_container_name_with_require_valid_tags_false
916
+ setup_gce_metadata_stubs
917
+ setup_k8s_metadata_stubs
918
+ # Log names are derived from container names for containers. And container
919
+ # names are extracted from the tag based on a regex match pattern. As a
920
+ # prerequisite, the tag should already be a string, thus we only test
921
+ # non-empty string cases here.
922
+ string_tags = ALL_TAGS.select { |tag, _| tag.is_a?(String) && !tag.empty? }
923
+ string_tags.each do |container_name, encoded_container_name|
924
+ # Container name in the label is sanitized but not encoded, while the log
925
+ # name is encoded.
926
+ setup_logging_stubs do
927
+ @logs_sent = []
928
+ d = create_driver(APPLICATION_DEFAULT_CONFIG,
929
+ container_tag_with_container_name(container_name))
930
+ d.emit(container_log_entry_with_metadata(log_entry(0), container_name))
931
+ d.run
932
+ end
933
+ params = CONTAINER_FROM_METADATA_PARAMS.merge(
934
+ resource: CONTAINER_FROM_METADATA_PARAMS[:resource].merge(
935
+ labels: CONTAINER_FROM_METADATA_PARAMS[:resource][:labels].merge(
936
+ 'container_name' => CGI.unescape(encoded_container_name)
937
+ )
938
+ ),
939
+ log_name: encoded_container_name
940
+ )
941
+ verify_log_entries(1, params, 'textPayload')
942
+ end
943
+ end
944
+
945
+ def test_configure_split_logs_by_tag
946
+ setup_gce_metadata_stubs
947
+ {
948
+ APPLICATION_DEFAULT_CONFIG => false,
949
+ ENABLE_SPLIT_LOGS_BY_TAG_CONFIG => true
950
+ }.each do |(config, split_logs_by_tag)|
951
+ d = create_driver(config)
952
+ assert_equal split_logs_by_tag,
953
+ d.instance.instance_variable_get(:@split_logs_by_tag)
954
+ end
955
+ end
956
+
957
+ def test_split_logs_by_tag
958
+ setup_gce_metadata_stubs
959
+ log_entry_count = 5
960
+ dynamic_log_names = (0..log_entry_count - 1).map do |index|
961
+ "projects/test-project-id/logs/tag#{index}"
962
+ end
963
+ [
964
+ [APPLICATION_DEFAULT_CONFIG, 1, [''], dynamic_log_names],
965
+ # [] returns nil for any index.
966
+ [ENABLE_SPLIT_LOGS_BY_TAG_CONFIG, log_entry_count, dynamic_log_names, []]
967
+ ].each do |(config, request_count, request_log_names, entry_log_names)|
968
+ clear_metrics
969
+ setup_logging_stubs do
970
+ @logs_sent = []
971
+ d = create_driver(config + ENABLE_PROMETHEUS_CONFIG, 'test', true)
972
+ log_entry_count.times do |i|
973
+ d.emit("tag#{i}", 'message' => log_entry(i))
974
+ end
975
+ d.run
976
+ @logs_sent.zip(request_log_names).each do |request, log_name|
977
+ assert_equal log_name, request['logName']
978
+ end
979
+ verify_log_entries(log_entry_count, COMPUTE_PARAMS_NO_LOG_NAME,
980
+ 'textPayload') do |entry, entry_index|
981
+ verify_default_log_entry_text(entry['textPayload'], entry_index,
982
+ entry)
983
+ assert_equal entry_log_names[entry_index], entry['logName']
984
+ end
985
+ # Verify the number of requests is different based on whether the
986
+ # 'split_logs_by_tag' flag is enabled.
987
+ assert_prometheus_metric_value(
988
+ :stackdriver_successful_requests_count,
989
+ request_count,
990
+ 'agent.googleapis.com/agent',
991
+ OpenCensus::Stats::Aggregation::Sum, d,
992
+ :aggregate
993
+ )
994
+ assert_prometheus_metric_value(
995
+ :stackdriver_ingested_entries_count,
996
+ log_entry_count,
997
+ 'agent.googleapis.com/agent',
998
+ OpenCensus::Stats::Aggregation::Sum, d,
999
+ :aggregate
1000
+ )
1001
+ end
1002
+ end
1003
+ end
1004
+
1005
+ def test_compute_timestamp
1006
+ setup_gce_metadata_stubs
1007
+ d = create_driver(APPLICATION_DEFAULT_CONFIG)
1008
+
1009
+ compute_timestamp = lambda do |driver, record, time|
1010
+ driver.instance.send(:compute_timestamp, record, time)
1011
+ end
1012
+
1013
+ current_time = Time.new(2019, 12, 29, 10, 23, 35, '-08:00')
1014
+ one_day_later = current_time.to_datetime.next_day.to_time
1015
+ just_under_one_day_later = one_day_later - 1
1016
+ next_year = Time.mktime(current_time.year + 1)
1017
+ one_second_before_next_year = next_year - 1
1018
+ one_second_into_next_year = next_year + 1
1019
+ one_day_into_next_year = next_year.to_datetime.next_day.to_time
1020
+
1021
+ [
1022
+ Time.at(123_456.789),
1023
+ Time.at(0),
1024
+ current_time,
1025
+ just_under_one_day_later,
1026
+ one_second_before_next_year,
1027
+ next_year,
1028
+ one_second_into_next_year,
1029
+ one_day_into_next_year
1030
+ ].each do |ts|
1031
+ # Use record collection time.
1032
+ ts_secs, ts_nanos, actual_ts = compute_timestamp[d, {
1033
+ 'message' => ''
1034
+ }, ts.to_f]
1035
+ assert_timestamp_matches ts, ts_secs, ts_nanos, actual_ts.iso8601
1036
+
1037
+ # Use the (deprecated) timeNanos key.
1038
+ ts_secs, ts_nanos, actual_ts = compute_timestamp[d, {
1039
+ 'message' => '',
1040
+ 'timeNanos' => ts.tv_sec * 1_000_000_000 + ts.tv_nsec
1041
+ }, 1.0]
1042
+ assert_timestamp_matches ts, ts_secs, ts_nanos, actual_ts.iso8601
1043
+
1044
+ # Use the structured timestamp key.
1045
+ ts_secs, ts_nanos, actual_ts = compute_timestamp[d, {
1046
+ 'message' => '',
1047
+ 'timestamp' => { 'seconds' => ts.tv_sec, 'nanos' => ts.tv_nsec }
1048
+ }, 1.0]
1049
+ assert_timestamp_matches ts, ts_secs, ts_nanos, actual_ts.iso8601
1050
+
1051
+ # Use the timestampSeconds/timestampNanos keys.
1052
+ ts_secs, ts_nanos, actual_ts = compute_timestamp[d, {
1053
+ 'message' => '',
1054
+ 'timestampSeconds' => ts.tv_sec,
1055
+ 'timestampNanos' => ts.tv_nsec
1056
+ }, 1.0]
1057
+ assert_timestamp_matches ts, ts_secs, ts_nanos, actual_ts.iso8601
1058
+
1059
+ # Use the string timestampSeconds/timestampNanos keys.
1060
+ ts_secs, ts_nanos, actual_ts = compute_timestamp[d, {
1061
+ 'message' => '',
1062
+ 'timestampSeconds' => ts.tv_sec.to_s,
1063
+ 'timestampNanos' => ts.tv_nsec.to_s
1064
+ }, 1.0]
1065
+ assert_timestamp_matches ts, ts_secs, ts_nanos, actual_ts.iso8601
1066
+ end
1067
+ end
1068
+
1069
+ def test_adjust_timestamp
1070
+ setup_gce_metadata_stubs
1071
+ d = create_driver(APPLICATION_DEFAULT_CONFIG)
1072
+
1073
+ adjust_timestamp_if_invalid = lambda do |driver, timestamp, current_time|
1074
+ driver.instance.send(:adjust_timestamp_if_invalid, timestamp,
1075
+ current_time)
1076
+ end
1077
+
1078
+ december29 = Time.new(2019, 12, 29, 10, 23, 35, '-08:00')
1079
+ december31 = Time.new(2019, 12, 31, 10, 23, 35, '-08:00')
1080
+ january1 = Time.new(2020, 1, 1, 10, 23, 35, '-08:00')
1081
+
1082
+ {
1083
+ # December 29, 2019 (normal operation).
1084
+ december29 => begin
1085
+ one_day_later = Time.new(2019, 12, 30, 10, 23, 35, '-08:00')
1086
+ one_day_a_year_earlier = Time.new(2018, 12, 30, 10, 23, 35, '-08:00')
1087
+ just_under_one_day_later = Time.new(2019, 12, 30, 10, 23, 34, '-08:00')
1088
+ next_year = Time.new(2020, 1, 1, 0, 0, 0, '-08:00')
1089
+ one_second_before_next_year =
1090
+ Time.new(2019, 12, 31, 11, 59, 59, '-08:00')
1091
+ one_second_before_this_year =
1092
+ Time.new(2018, 12, 31, 11, 59, 59, '-08:00')
1093
+ one_second_into_next_year = Time.new(2020, 1, 1, 0, 0, 1, '-08:00')
1094
+ one_day_into_next_year = Time.new(2020, 1, 2, 0, 0, 0, '-08:00')
1095
+ {
1096
+ Time.at(123_456.789) => Time.at(123_456.789),
1097
+ Time.at(0) => Time.at(0),
1098
+ december29 => december29,
1099
+ one_day_later => one_day_a_year_earlier,
1100
+ just_under_one_day_later => just_under_one_day_later,
1101
+ one_second_before_next_year => one_second_before_this_year,
1102
+ next_year => Time.at(0),
1103
+ one_second_into_next_year => Time.at(0),
1104
+ one_day_into_next_year => Time.at(0)
1105
+ }
1106
+ end,
1107
+ # January 1, 2020 (normal operation).
1108
+ january1 => begin
1109
+ one_day_later = Time.new(2020, 1, 2, 10, 23, 35, '-08:00')
1110
+ one_day_a_year_earlier = Time.new(2019, 1, 2, 10, 23, 35, '-08:00')
1111
+ just_under_one_day_later = Time.new(2020, 1, 2, 10, 23, 34, '-08:00')
1112
+ next_year = Time.new(2021, 1, 1, 0, 0, 0, '-08:00')
1113
+ one_second_before_next_year =
1114
+ Time.new(2020, 12, 31, 11, 59, 59, '-08:00')
1115
+ one_second_before_this_year =
1116
+ Time.new(2019, 12, 31, 11, 59, 59, '-08:00')
1117
+ one_second_into_next_year = Time.new(2021, 1, 1, 0, 0, 1, '-08:00')
1118
+ one_day_into_next_year = Time.new(2021, 1, 2, 0, 0, 0, '-08:00')
1119
+ {
1120
+ Time.at(123_456.789) => Time.at(123_456.789),
1121
+ Time.at(0) => Time.at(0),
1122
+ january1 => january1,
1123
+ one_day_later => one_day_a_year_earlier,
1124
+ just_under_one_day_later => just_under_one_day_later,
1125
+ one_second_before_next_year => one_second_before_this_year,
1126
+ next_year => Time.at(0),
1127
+ one_second_into_next_year => Time.at(0),
1128
+ one_day_into_next_year => Time.at(0)
1129
+ }
1130
+ end,
1131
+ # December 31, 2019 (next day overlaps new year).
1132
+ december31 => begin
1133
+ one_day_later = Time.new(2020, 1, 1, 10, 23, 35, '-08:00')
1134
+ just_under_one_day_later = Time.new(2020, 1, 1, 10, 23, 34, '-08:00')
1135
+ next_year = Time.new(2020, 1, 1, 0, 0, 0, '-08:00')
1136
+ one_second_before_next_year =
1137
+ Time.new(2019, 12, 31, 11, 59, 59, '-08:00')
1138
+ one_second_into_next_year = Time.new(2020, 1, 1, 0, 0, 1, '-08:00')
1139
+ one_day_into_next_year = Time.new(2020, 1, 2, 0, 0, 0, '-08:00')
1140
+ {
1141
+ Time.at(123_456.789) => Time.at(123_456.789),
1142
+ Time.at(0) => Time.at(0),
1143
+ december31 => december31,
1144
+ one_day_later => Time.at(0), # Falls into the next year.
1145
+ just_under_one_day_later => just_under_one_day_later,
1146
+ one_second_before_next_year => one_second_before_next_year,
1147
+ next_year => next_year,
1148
+ one_second_into_next_year => one_second_into_next_year,
1149
+ one_day_into_next_year => Time.at(0)
1150
+ }
1151
+ end
1152
+ }.each do |current_time, timestamps|
1153
+ timestamps.each do |ts, expected_ts|
1154
+ ts_secs, ts_nanos = adjust_timestamp_if_invalid[d, ts, current_time]
1155
+ adjusted_ts = Time.at(ts_secs, ts_nanos / 1_000.0)
1156
+ assert_timestamp_matches expected_ts, ts_secs, ts_nanos,
1157
+ adjusted_ts.iso8601
1158
+ end
1159
+ end
1160
+ end
1161
+
1162
+ def test_log_timestamps
1163
+ setup_gce_metadata_stubs
1164
+ current_time = Time.now
1165
+ {
1166
+ # Verify that timestamps make it through.
1167
+ Time.at(123_456.789) => Time.at(123_456.789),
1168
+ Time.at(0) => Time.at(0),
1169
+ current_time => current_time
1170
+ }.each do |ts, expected_ts|
1171
+ emit_index = 0
1172
+ setup_logging_stubs do
1173
+ @logs_sent = []
1174
+ d = create_driver(APPLICATION_DEFAULT_CONFIG)
1175
+ # Test the "native" fluentd timestamp as well as our nanosecond tags.
1176
+ d.emit({ 'message' => log_entry(emit_index) }, ts.to_f)
1177
+ emit_index += 1
1178
+ d.emit('message' => log_entry(emit_index),
1179
+ 'timeNanos' => ts.tv_sec * 1_000_000_000 + ts.tv_nsec)
1180
+ emit_index += 1
1181
+ d.emit('message' => log_entry(emit_index),
1182
+ 'timestamp' => { 'seconds' => ts.tv_sec,
1183
+ 'nanos' => ts.tv_nsec })
1184
+ emit_index += 1
1185
+ d.emit('message' => log_entry(emit_index),
1186
+ 'timestampSeconds' => ts.tv_sec,
1187
+ 'timestampNanos' => ts.tv_nsec)
1188
+ emit_index += 1
1189
+ d.emit('message' => log_entry(emit_index),
1190
+ 'timestampSeconds' => ts.tv_sec.to_s,
1191
+ 'timestampNanos' => ts.tv_nsec.to_s)
1192
+ emit_index += 1
1193
+ d.run
1194
+ verify_log_entries(emit_index, COMPUTE_PARAMS) do |entry, i|
1195
+ verify_default_log_entry_text(entry['textPayload'], i, entry)
1196
+ actual_timestamp = timestamp_parse(entry['timestamp'])
1197
+ assert_timestamp_matches expected_ts, actual_timestamp['seconds'],
1198
+ actual_timestamp['nanos'], entry
1199
+ end
1200
+ end
1201
+ end
1202
+ end
1203
+
1204
+ def test_malformed_timestamp_field
1205
+ setup_gce_metadata_stubs
1206
+ setup_logging_stubs do
1207
+ d = create_driver
1208
+ # if timestamp is not a hash it is passed through to the json payload.
1209
+ d.emit('message' => log_entry(0), 'timestamp' => 'not-a-hash')
1210
+ d.run
1211
+ end
1212
+ verify_log_entries(1, COMPUTE_PARAMS, 'jsonPayload') do |entry|
1213
+ fields = entry['jsonPayload']
1214
+ assert_equal 2, fields.size, entry
1215
+ assert_equal 'not-a-hash', fields['timestamp'], entry
1216
+ end
1217
+ end
1218
+
1219
+ # Make parse_severity public so we can test it.
1220
+ class Fluent::GoogleCloudOutput # rubocop:disable Style/ClassAndModuleChildren
1221
+ public :parse_severity
1222
+ end
1223
+
1224
+ def test_label_map_without_field_present
1225
+ setup_gce_metadata_stubs
1226
+ setup_logging_stubs do
1227
+ config = %(label_map { "label_field": "sent_label" })
1228
+ d = create_driver(config)
1229
+ d.emit('message' => log_entry(0))
1230
+ d.run
1231
+ # No additional labels should be present
1232
+ end
1233
+ verify_log_entries(1, COMPUTE_PARAMS)
1234
+ end
1235
+
1236
+ def test_label_map_with_field_present
1237
+ setup_gce_metadata_stubs
1238
+ setup_logging_stubs do
1239
+ config = %(label_map { "label_field": "sent_label" })
1240
+ d = create_driver(config)
1241
+ d.emit('message' => log_entry(0), 'label_field' => 'label_value')
1242
+ d.run
1243
+ end
1244
+ # make a deep copy of COMPUTE_PARAMS and add the parsed label.
1245
+ params = Marshal.load(Marshal.dump(COMPUTE_PARAMS))
1246
+ params[:labels]['sent_label'] = 'label_value'
1247
+ verify_log_entries(1, params)
1248
+ end
1249
+
1250
+ def test_label_map_with_numeric_field
1251
+ setup_gce_metadata_stubs
1252
+ setup_logging_stubs do
1253
+ config = %(label_map { "label_field": "sent_label" })
1254
+ d = create_driver(config)
1255
+ d.emit('message' => log_entry(0), 'label_field' => 123_456_789)
1256
+ d.run
1257
+ end
1258
+ # make a deep copy of COMPUTE_PARAMS and add the parsed label.
1259
+ params = Marshal.load(Marshal.dump(COMPUTE_PARAMS))
1260
+ params[:labels]['sent_label'] = '123456789'
1261
+ verify_log_entries(1, params)
1262
+ end
1263
+
1264
+ def test_label_map_with_hash_field
1265
+ setup_gce_metadata_stubs
1266
+ setup_logging_stubs do
1267
+ config = %(label_map { "label_field": "sent_label" })
1268
+ d = create_driver(config)
1269
+ # I'm not sure this actually makes sense for a user to do, but make
1270
+ # sure that it works if they try it.
1271
+ d.emit('message' => log_entry(0),
1272
+ 'label_field' => { 'k1' => 10, 'k2' => 'val' })
1273
+ d.run
1274
+ end
1275
+ # make a deep copy of COMPUTE_PARAMS and add the parsed label.
1276
+ params = Marshal.load(Marshal.dump(COMPUTE_PARAMS))
1277
+ params[:labels]['sent_label'] = '{"k1"=>10, "k2"=>"val"}'
1278
+ verify_log_entries(1, params)
1279
+ end
1280
+
1281
+ def test_label_map_with_multiple_fields
1282
+ setup_gce_metadata_stubs
1283
+ setup_logging_stubs do
1284
+ config = %(
1285
+ label_map {
1286
+ "label1": "sent_label_1",
1287
+ "label_number_two": "foo.googleapis.com/bar",
1288
+ "label3": "label3"
1289
+ }
1290
+ )
1291
+ d = create_driver(config)
1292
+ # not_a_label passes through to the json payload
1293
+ d.emit('message' => log_entry(0),
1294
+ 'label1' => 'value1',
1295
+ 'label_number_two' => 'value2',
1296
+ 'not_a_label' => 'value4',
1297
+ 'label3' => 'value3')
1298
+ d.run
1299
+ end
1300
+ # make a deep copy of COMPUTE_PARAMS and add the parsed labels.
1301
+ params = Marshal.load(Marshal.dump(COMPUTE_PARAMS))
1302
+ params[:labels]['sent_label_1'] = 'value1'
1303
+ params[:labels]['foo.googleapis.com/bar'] = 'value2'
1304
+ params[:labels]['label3'] = 'value3'
1305
+ verify_log_entries(1, params, 'jsonPayload') do |entry, i|
1306
+ fields = entry['jsonPayload']
1307
+ assert_equal 2, fields.size, entry
1308
+ verify_default_log_entry_text(fields['message'], i, entry)
1309
+ assert_equal 'value4', fields['not_a_label'], entry
1310
+ end
1311
+ end
1312
+
1313
+ def test_multiple_logs
1314
+ setup_gce_metadata_stubs
1315
+ # Only test a few values because otherwise the test can take minutes.
1316
+ [2, 3, 5, 11, 50].each do |n|
1317
+ setup_logging_stubs do
1318
+ d = create_driver
1319
+ # The test driver doesn't clear its buffer of entries after running, so
1320
+ # do it manually here.
1321
+ d.instance_variable_get('@entries').clear
1322
+ @logs_sent = []
1323
+ n.times { |i| d.emit('message' => log_entry(i)) }
1324
+ d.run
1325
+ end
1326
+ verify_log_entries(n, COMPUTE_PARAMS)
1327
+ end
1328
+ end
1329
+
1330
+ def test_malformed_log
1331
+ setup_gce_metadata_stubs
1332
+ setup_logging_stubs do
1333
+ d = create_driver
1334
+ # if the entry is not a hash, the plugin should silently drop it.
1335
+ d.emit('a string is not a valid message')
1336
+ d.run
1337
+ end
1338
+ assert @logs_sent.empty?
1339
+ end
1340
+
1341
+ def test_one_managed_vm_log
1342
+ setup_gce_metadata_stubs
1343
+ setup_managed_vm_metadata_stubs
1344
+ setup_logging_stubs do
1345
+ d = create_driver
1346
+ d.emit('message' => log_entry(0))
1347
+ d.run
1348
+ end
1349
+ verify_log_entries(1, VMENGINE_PARAMS)
1350
+ end
1351
+
1352
+ def test_multiple_managed_vm_logs
1353
+ setup_gce_metadata_stubs
1354
+ setup_managed_vm_metadata_stubs
1355
+ [2, 3, 5, 11, 50].each do |n|
1356
+ setup_logging_stubs do
1357
+ d = create_driver
1358
+ # The test driver doesn't clear its buffer of entries after running, so
1359
+ # do it manually here.
1360
+ d.instance_variable_get('@entries').clear
1361
+ @logs_sent = []
1362
+ n.times { |i| d.emit('message' => log_entry(i)) }
1363
+ d.run
1364
+ end
1365
+ verify_log_entries(n, VMENGINE_PARAMS)
1366
+ end
1367
+ end
1368
+
1369
+ # Test container logs when metadata is extracted from the 'kubernetes' field
1370
+ # in the log record.
1371
+ def test_container_logs_metadata_from_record
1372
+ verify_container_logs(method(:container_log_entry_with_metadata),
1373
+ CONTAINER_FROM_METADATA_PARAMS)
1374
+ end
1375
+
1376
+ # Test container logs when metadata is extracted from the tag.
1377
+ def test_container_logs_metadata_from_tag
1378
+ verify_container_logs(method(:container_log_entry),
1379
+ CONTAINER_FROM_TAG_PARAMS)
1380
+ end
1381
+
1382
+ def test_one_container_log_from_tag_stderr
1383
+ setup_gce_metadata_stubs
1384
+ setup_k8s_metadata_stubs
1385
+ setup_logging_stubs do
1386
+ d = create_driver(APPLICATION_DEFAULT_CONFIG, CONTAINER_TAG)
1387
+ d.emit(container_log_entry(log_entry(0), 'stderr'))
1388
+ d.run
1389
+ end
1390
+ expected_params = CONTAINER_FROM_TAG_PARAMS.merge(
1391
+ labels: { "#{GKE_CONSTANTS[:service]}/stream" => 'stderr' }
1392
+ ) { |_, oldval, newval| oldval.merge(newval) }
1393
+ verify_log_entries(1, expected_params) do |entry, i|
1394
+ verify_default_log_entry_text(entry['textPayload'], i, entry)
1395
+ actual_timestamp = timestamp_parse(entry['timestamp'])
1396
+ assert_equal K8S_SECONDS_EPOCH, actual_timestamp['seconds'], entry
1397
+ assert_equal K8S_NANOS, actual_timestamp['nanos'], entry
1398
+ assert_equal 'ERROR', entry['severity'], entry
1399
+ end
1400
+ end
1401
+
1402
+ def test_json_container_log_metadata_from_plugin
1403
+ setup_gce_metadata_stubs
1404
+ setup_k8s_metadata_stubs
1405
+ setup_logging_stubs do
1406
+ d = create_driver(DETECT_JSON_CONFIG, CONTAINER_TAG)
1407
+ d.emit(container_log_entry_with_metadata('{"msg": "test log entry 0", ' \
1408
+ '"tag2": "test", "data": ' \
1409
+ '5000, "severity": "WARNING"}'))
1410
+ d.run
1411
+ end
1412
+ verify_log_entries(1, CONTAINER_FROM_METADATA_PARAMS,
1413
+ 'jsonPayload') do |entry|
1414
+ fields = entry['jsonPayload']
1415
+ assert_equal 3, fields.size, entry
1416
+ assert_equal 'test log entry 0', fields['msg'], entry
1417
+ assert_equal 'test', fields['tag2'], entry
1418
+ assert_equal 5000, fields['data'], entry
1419
+ actual_timestamp = timestamp_parse(entry['timestamp'])
1420
+ assert_equal K8S_SECONDS_EPOCH, actual_timestamp['seconds'], entry
1421
+ assert_equal K8S_NANOS, actual_timestamp['nanos'], entry
1422
+ assert_equal 'WARNING', entry['severity'], entry
1423
+ end
1424
+ end
1425
+
1426
+ def test_json_container_log_metadata_from_tag
1427
+ setup_gce_metadata_stubs
1428
+ setup_k8s_metadata_stubs
1429
+ setup_logging_stubs do
1430
+ d = create_driver(DETECT_JSON_CONFIG, CONTAINER_TAG)
1431
+ d.emit(container_log_entry('{"msg": "test log entry 0", ' \
1432
+ '"tag2": "test", "data": 5000, ' \
1433
+ '"severity": "W"}'))
1434
+ d.run
1435
+ end
1436
+ verify_log_entries(1, CONTAINER_FROM_TAG_PARAMS,
1437
+ 'jsonPayload') do |entry|
1438
+ fields = entry['jsonPayload']
1439
+ assert_equal 3, fields.size, entry
1440
+ assert_equal 'test log entry 0', fields['msg'], entry
1441
+ assert_equal 'test', fields['tag2'], entry
1442
+ assert_equal 5000, fields['data'], entry
1443
+ actual_timestamp = timestamp_parse(entry['timestamp'])
1444
+ assert_equal K8S_SECONDS_EPOCH, actual_timestamp['seconds'], entry
1445
+ assert_equal K8S_NANOS, actual_timestamp['nanos'], entry
1446
+ assert_equal 'WARNING', entry['severity'], entry
1447
+ end
1448
+ end
1449
+
1450
+ def test_dataproc_log
1451
+ setup_gce_metadata_stubs
1452
+ setup_dataproc_metadata_stubs
1453
+ setup_logging_stubs do
1454
+ d = create_driver
1455
+ d.emit(dataproc_log_entry('test message'))
1456
+ d.run
1457
+ end
1458
+ verify_log_entries(1, DATAPROC_PARAMS, 'jsonPayload')
1459
+ end
1460
+
1461
+ def test_cloud_ml_log
1462
+ setup_gce_metadata_stubs
1463
+ setup_logging_stubs do
1464
+ d = create_driver(CONFIG_ML, ML_TAG)
1465
+ d.emit(ml_log_entry(0))
1466
+ d.run
1467
+ end
1468
+ verify_log_entries(1, ML_PARAMS)
1469
+ end
1470
+
1471
+ def test_cloud_dataflow_log
1472
+ setup_gce_metadata_stubs
1473
+ setup_logging_stubs do
1474
+ d = create_driver(CONFIG_DATAFLOW, DATAFLOW_TAG)
1475
+ d.emit(dataflow_log_entry(0))
1476
+ d.run
1477
+ end
1478
+ verify_log_entries(1, DATAFLOW_PARAMS)
1479
+ end
1480
+
1481
+ # Verify the subfields extraction of LogEntry fields.
1482
+
1483
+ def test_log_entry_http_request_field_from_record
1484
+ verify_subfields_from_record(DEFAULT_HTTP_REQUEST_KEY)
1485
+ end
1486
+
1487
+ def test_log_entry_labels_field_from_record
1488
+ verify_subfields_from_record(DEFAULT_LABELS_KEY, false)
1489
+ end
1490
+
1491
+ def test_log_entry_operation_field_from_record
1492
+ verify_subfields_from_record(DEFAULT_OPERATION_KEY)
1493
+ end
1494
+
1495
+ def test_log_entry_source_location_field_from_record
1496
+ verify_subfields_from_record(DEFAULT_SOURCE_LOCATION_KEY)
1497
+ end
1498
+
1499
+ # Verify the subfields extraction of LogEntry fields when there are other
1500
+ # fields.
1501
+
1502
+ def test_log_entry_http_request_field_partial_from_record
1503
+ verify_subfields_partial_from_record(DEFAULT_HTTP_REQUEST_KEY)
1504
+ end
1505
+
1506
+ # We don't need a test like 'test_log_entry_labels_field_partial_from_record'
1507
+ # because labels are free range strings. Everything in the labels field should
1508
+ # be in the resulting logEntry->labels field. There is no need to check
1509
+ # partial transformation (aka, some 'labels' fields are extracted, while
1510
+ # others are left as it is).
1511
+
1512
+ def test_log_entry_operation_field_partial_from_record
1513
+ verify_subfields_partial_from_record(DEFAULT_OPERATION_KEY)
1514
+ end
1515
+
1516
+ def test_log_entry_source_location_field_partial_from_record
1517
+ verify_subfields_partial_from_record(DEFAULT_SOURCE_LOCATION_KEY)
1518
+ end
1519
+
1520
+ # Verify the subfields extraction of LogEntry fields when they are not hashes.
1521
+
1522
+ def test_log_entry_http_request_field_when_not_hash
1523
+ # TODO(qingling128) On the next major after 0.7.4, make all logEntry
1524
+ # subfields behave the same way: if the field is not in the correct format,
1525
+ # log an error in the Fluentd log and remove this field from payload. This
1526
+ # is the preferred behavior per PM decision.
1527
+ verify_subfields_untouched_when_not_hash(DEFAULT_HTTP_REQUEST_KEY)
1528
+ end
1529
+
1530
+ def test_log_entry_labels_field_when_not_hash
1531
+ verify_subfields_removed_when_not_hash(DEFAULT_LABELS_KEY)
1532
+ end
1533
+
1534
+ def test_log_entry_operation_field_when_not_hash
1535
+ # TODO(qingling128) On the next major after 0.7.4, make all logEntry
1536
+ # subfields behave the same way: if the field is not in the correct format,
1537
+ # log an error in the Fluentd log and remove this field from payload. This
1538
+ # is the preferred behavior per PM decision.
1539
+ verify_subfields_untouched_when_not_hash(DEFAULT_OPERATION_KEY)
1540
+ end
1541
+
1542
+ def test_log_entry_source_location_field_when_not_hash
1543
+ # TODO(qingling128) On the next major after 0.7.4, make all logEntry
1544
+ # subfields behave the same way: if the field is not in the correct format,
1545
+ # log an error in the Fluentd log and remove this field from payload. This
1546
+ # is the preferred behavior per PM decision.
1547
+ verify_subfields_untouched_when_not_hash(DEFAULT_SOURCE_LOCATION_KEY)
1548
+ end
1549
+
1550
+ # Verify the subfields extraction of LogEntry fields when they are nil.
1551
+
1552
+ def test_log_entry_http_request_field_when_nil
1553
+ verify_subfields_when_nil(DEFAULT_HTTP_REQUEST_KEY)
1554
+ end
1555
+
1556
+ def test_log_entry_labels_field_when_nil
1557
+ verify_subfields_when_nil(DEFAULT_LABELS_KEY)
1558
+ end
1559
+
1560
+ def test_log_entry_operation_field_when_nil
1561
+ verify_subfields_when_nil(DEFAULT_OPERATION_KEY)
1562
+ end
1563
+
1564
+ def test_log_entry_source_location_field_when_nil
1565
+ verify_subfields_when_nil(DEFAULT_SOURCE_LOCATION_KEY)
1566
+ end
1567
+
1568
+ def test_http_request_from_record_with_referer_nil_or_absent
1569
+ setup_gce_metadata_stubs
1570
+ [
1571
+ http_request_message_with_nil_referer,
1572
+ http_request_message_with_absent_referer
1573
+ ].each do |input|
1574
+ setup_logging_stubs do
1575
+ @logs_sent = []
1576
+ d = create_driver
1577
+ d.emit('httpRequest' => input)
1578
+ d.run
1579
+ end
1580
+ verify_log_entries(1, COMPUTE_PARAMS, 'httpRequest') do |entry|
1581
+ assert_equal http_request_message_with_absent_referer,
1582
+ entry['httpRequest'], entry
1583
+ assert_nil entry['jsonPayload']['httpRequest'], entry
1584
+ end
1585
+ end
1586
+ end
1587
+
1588
+ def test_http_request_with_latency
1589
+ setup_gce_metadata_stubs
1590
+ latency_conversion.each do |input, expected|
1591
+ setup_logging_stubs do
1592
+ d = create_driver
1593
+ @logs_sent = []
1594
+ d.emit('httpRequest' => HTTP_REQUEST_MESSAGE.merge('latency' => input))
1595
+ d.run
1596
+ end
1597
+ verify_log_entries(1, COMPUTE_PARAMS, 'httpRequest') do |entry|
1598
+ assert_equal HTTP_REQUEST_MESSAGE.merge('latency' => expected),
1599
+ entry['httpRequest'], entry
1600
+ assert_nil entry['jsonPayload']['httpRequest'], entry
1601
+ end
1602
+ end
1603
+ end
1604
+
1605
+ # Skip setting latency when the field is null or has invalid format.
1606
+ def test_http_request_skip_setting_latency
1607
+ setup_gce_metadata_stubs
1608
+ [
1609
+ '', ' ', nil, 'null', '123', '1.23 seconds',
1610
+ ' 123 s econds ', '1min', 'abc&^!$*('
1611
+ ].each do |input|
1612
+ setup_logging_stubs do
1613
+ d = create_driver
1614
+ @logs_sent = []
1615
+ d.emit('httpRequest' => HTTP_REQUEST_MESSAGE.merge('latency' => input))
1616
+ d.run
1617
+ end
1618
+ verify_log_entries(1, COMPUTE_PARAMS, 'httpRequest') do |entry|
1619
+ assert_equal HTTP_REQUEST_MESSAGE, entry['httpRequest'], entry
1620
+ assert_nil entry['jsonPayload']['httpRequest'], entry
1621
+ end
1622
+ end
1623
+ end
1624
+
1625
+ # Verify the default and customization of LogEntry field extraction key.
1626
+
1627
+ def test_log_entry_insert_id_field
1628
+ verify_field_key('insertId',
1629
+ default_key: DEFAULT_INSERT_ID_KEY,
1630
+ custom_key: 'custom_insert_id_key',
1631
+ custom_key_config: CONFIG_CUSTOM_INSERT_ID_KEY_SPECIFIED,
1632
+ sample_value: INSERT_ID)
1633
+ end
1634
+
1635
+ def test_log_entry_labels_field
1636
+ verify_field_key('labels',
1637
+ default_key: DEFAULT_LABELS_KEY,
1638
+ custom_key: 'custom_labels_key',
1639
+ custom_key_config: CONFIG_CUSTOM_LABELS_KEY_SPECIFIED,
1640
+ sample_value: COMPUTE_PARAMS[:labels].merge(
1641
+ LABELS_MESSAGE
1642
+ ),
1643
+ default_value: COMPUTE_PARAMS[:labels])
1644
+ end
1645
+
1646
+ def test_log_entry_operation_field
1647
+ verify_field_key('operation',
1648
+ default_key: DEFAULT_OPERATION_KEY,
1649
+ custom_key: 'custom_operation_key',
1650
+ custom_key_config: CONFIG_CUSTOM_OPERATION_KEY_SPECIFIED,
1651
+ sample_value: OPERATION_MESSAGE)
1652
+ end
1653
+
1654
+ def test_log_entry_source_location_field
1655
+ verify_field_key('sourceLocation',
1656
+ default_key: DEFAULT_SOURCE_LOCATION_KEY,
1657
+ custom_key: 'custom_source_location_key',
1658
+ custom_key_config: \
1659
+ CONFIG_CUSTOM_SOURCE_LOCATION_KEY_SPECIFIED,
1660
+ sample_value: SOURCE_LOCATION_MESSAGE)
1661
+ end
1662
+
1663
+ def test_log_entry_span_id_field
1664
+ verify_field_key('spanId',
1665
+ default_key: DEFAULT_SPAN_ID_KEY,
1666
+ custom_key: 'custom_span_id_key',
1667
+ custom_key_config: CONFIG_CUSTOM_SPAN_ID_KEY_SPECIFIED,
1668
+ sample_value: SPAN_ID)
1669
+ end
1670
+
1671
+ def test_log_entry_trace_field
1672
+ verify_field_key('trace',
1673
+ default_key: DEFAULT_TRACE_KEY,
1674
+ custom_key: 'custom_trace_key',
1675
+ custom_key_config: CONFIG_CUSTOM_TRACE_KEY_SPECIFIED,
1676
+ sample_value: TRACE)
1677
+ end
1678
+
1679
+ def test_log_entry_trace_sampled_field
1680
+ verify_field_key('traceSampled',
1681
+ default_key: DEFAULT_TRACE_SAMPLED_KEY,
1682
+ custom_key: 'custom_trace_sampled_key',
1683
+ custom_key_config:
1684
+ CONFIG_CUSTOM_TRACE_SAMPLED_KEY_SPECIFIED,
1685
+ sample_value: TRACE_SAMPLED)
1686
+ end
1687
+
1688
+ # Verify the cascading JSON detection of LogEntry fields.
1689
+
1690
+ def test_cascading_json_detection_with_log_entry_insert_id_field
1691
+ verify_cascading_json_detection_with_log_entry_fields(
1692
+ 'insertId', DEFAULT_INSERT_ID_KEY,
1693
+ root_level_value: INSERT_ID,
1694
+ nested_level_value: INSERT_ID2
1695
+ )
1696
+ end
1697
+
1698
+ def test_cascading_json_detection_with_log_entry_labels_field
1699
+ verify_cascading_json_detection_with_log_entry_fields(
1700
+ 'labels', DEFAULT_LABELS_KEY,
1701
+ root_level_value: LABELS_MESSAGE,
1702
+ nested_level_value: LABELS_MESSAGE2,
1703
+ expected_value_from_root: COMPUTE_PARAMS[:labels].merge(LABELS_MESSAGE),
1704
+ expected_value_from_nested: COMPUTE_PARAMS[:labels].merge(
1705
+ LABELS_MESSAGE2
1706
+ )
1707
+ )
1708
+ end
1709
+
1710
+ def test_cascading_json_detection_with_log_entry_operation_field
1711
+ verify_cascading_json_detection_with_log_entry_fields(
1712
+ 'operation', DEFAULT_OPERATION_KEY,
1713
+ root_level_value: OPERATION_MESSAGE,
1714
+ nested_level_value: OPERATION_MESSAGE2,
1715
+ expected_value_from_nested: expected_operation_message2
1716
+ )
1717
+ end
1718
+
1719
+ def test_cascading_json_detection_with_log_entry_source_location_field
1720
+ verify_cascading_json_detection_with_log_entry_fields(
1721
+ 'sourceLocation', DEFAULT_SOURCE_LOCATION_KEY,
1722
+ root_level_value: SOURCE_LOCATION_MESSAGE,
1723
+ nested_level_value: SOURCE_LOCATION_MESSAGE2
1724
+ )
1725
+ end
1726
+
1727
+ def test_cascading_json_detection_with_log_entry_span_id_field
1728
+ verify_cascading_json_detection_with_log_entry_fields(
1729
+ 'spanId', DEFAULT_SPAN_ID_KEY,
1730
+ root_level_value: SPAN_ID,
1731
+ nested_level_value: SPAN_ID2
1732
+ )
1733
+ end
1734
+
1735
+ def test_cascading_json_detection_with_log_entry_trace_field
1736
+ verify_cascading_json_detection_with_log_entry_fields(
1737
+ 'trace', DEFAULT_TRACE_KEY,
1738
+ root_level_value: TRACE,
1739
+ nested_level_value: TRACE2
1740
+ )
1741
+ end
1742
+
1743
+ def test_cascading_json_detection_with_log_entry_trace_sampled_field
1744
+ verify_cascading_json_detection_with_log_entry_fields(
1745
+ 'traceSampled', DEFAULT_TRACE_SAMPLED_KEY,
1746
+ root_level_value: TRACE_SAMPLED,
1747
+ nested_level_value: TRACE_SAMPLED2,
1748
+ default_value_from_root: false,
1749
+ default_value_from_nested: false
1750
+ )
1751
+ end
1752
+
1753
+ # Verify that labels present in multiple inputs respect the expected priority
1754
+ # order:
1755
+ # 1. Labels from the field "logging.googleapis.com/labels" in payload.
1756
+ # 2. Labels from the config "label_map".
1757
+ # 3. Labels from the config "labels".
1758
+ def test_labels_order
1759
+ [
1760
+ # Labels from the config "labels".
1761
+ {
1762
+ config: CONFIG_LABELS,
1763
+ emitted_log: {},
1764
+ expected_labels: LABELS_FROM_LABELS_CONFIG
1765
+ },
1766
+ # Labels from the config "label_map".
1767
+ {
1768
+ config: CONFIG_LABEL_MAP,
1769
+ emitted_log: PAYLOAD_FOR_LABEL_MAP,
1770
+ expected_labels: LABELS_FROM_LABEL_MAP_CONFIG
1771
+ },
1772
+ # Labels from the field "logging.googleapis.com/labels" in payload.
1773
+ {
1774
+ config: APPLICATION_DEFAULT_CONFIG,
1775
+ emitted_log: { DEFAULT_LABELS_KEY => LABELS_MESSAGE },
1776
+ expected_labels: LABELS_MESSAGE
1777
+ },
1778
+ # All three types of labels that do not conflict.
1779
+ {
1780
+ config: CONFIG_LABLES_AND_LABLE_MAP,
1781
+ emitted_log: PAYLOAD_FOR_LABEL_MAP.merge(
1782
+ DEFAULT_LABELS_KEY => LABELS_MESSAGE
1783
+ ),
1784
+ expected_labels: LABELS_MESSAGE.merge(LABELS_FROM_LABELS_CONFIG).merge(
1785
+ LABELS_FROM_LABEL_MAP_CONFIG
1786
+ )
1787
+ },
1788
+ # labels from the config "labels" and "label_map" conflict.
1789
+ {
1790
+ config: CONFIG_LABLES_AND_LABLE_MAP_CONFLICTING,
1791
+ emitted_log: PAYLOAD_FOR_LABEL_MAP_CONFLICTING,
1792
+ expected_labels: LABELS_FROM_LABEL_MAP_CONFIG_CONFLICTING
1793
+ },
1794
+ # labels from the config "labels" and labels from the field
1795
+ # "logging.googleapis.com/labels" in payload conflict.
1796
+ {
1797
+ config: CONFIG_LABELS_CONFLICTING,
1798
+ emitted_log: { DEFAULT_LABELS_KEY => LABELS_FROM_PAYLOAD_CONFLICTING },
1799
+ expected_labels: LABELS_FROM_PAYLOAD_CONFLICTING
1800
+ },
1801
+ # labels from the config "label_map" and labels from the field
1802
+ # "logging.googleapis.com/labels" in payload conflict.
1803
+ {
1804
+ config: CONFIG_LABEL_MAP_CONFLICTING,
1805
+ emitted_log: PAYLOAD_FOR_LABEL_MAP_CONFLICTING.merge(
1806
+ DEFAULT_LABELS_KEY => LABELS_FROM_PAYLOAD_CONFLICTING
1807
+ ),
1808
+ expected_labels: LABELS_FROM_PAYLOAD_CONFLICTING
1809
+ },
1810
+ # All three types of labels conflict.
1811
+ {
1812
+ config: CONFIG_LABLES_AND_LABLE_MAP_CONFLICTING,
1813
+ emitted_log: PAYLOAD_FOR_LABEL_MAP_CONFLICTING.merge(
1814
+ DEFAULT_LABELS_KEY => LABELS_FROM_PAYLOAD_CONFLICTING
1815
+ ),
1816
+ expected_labels: LABELS_FROM_PAYLOAD_CONFLICTING
1817
+ }
1818
+ ].each do |test_params|
1819
+ new_stub_context do
1820
+ setup_gce_metadata_stubs
1821
+ setup_logging_stubs do
1822
+ d = create_driver(test_params[:config])
1823
+ d.emit({ 'message' => log_entry(0) }.merge(test_params[:emitted_log]))
1824
+ d.run
1825
+ end
1826
+ expected_params = COMPUTE_PARAMS.merge(
1827
+ labels: COMPUTE_PARAMS[:labels].merge(test_params[:expected_labels])
1828
+ )
1829
+ verify_log_entries(1, expected_params)
1830
+ end
1831
+ end
1832
+ end
1833
+
1834
+ # Test k8s_container monitored resource.
1835
+ def test_k8s_container_monitored_resource
1836
+ [
1837
+ {
1838
+ config: APPLICATION_DEFAULT_CONFIG,
1839
+ setup_k8s_stub: false,
1840
+ log_entry: k8s_container_log_entry(log_entry(0)),
1841
+ expected_params: K8S_CONTAINER_PARAMS_FROM_FALLBACK
1842
+ },
1843
+ {
1844
+ config: APPLICATION_DEFAULT_CONFIG,
1845
+ setup_k8s_stub: false,
1846
+ log_entry: k8s_container_log_entry(log_entry(0)),
1847
+ expected_params: K8S_CONTAINER_PARAMS_FROM_FALLBACK
1848
+ },
1849
+ {
1850
+ config: APPLICATION_DEFAULT_CONFIG,
1851
+ setup_k8s_stub: true,
1852
+ log_entry: k8s_container_log_entry(log_entry(0)),
1853
+ expected_params: K8S_CONTAINER_PARAMS_FROM_LOCAL
1854
+ },
1855
+ {
1856
+ config: APPLICATION_DEFAULT_CONFIG,
1857
+ setup_k8s_stub: true,
1858
+ log_entry: k8s_container_log_entry(log_entry(0)),
1859
+ expected_params: K8S_CONTAINER_PARAMS_FROM_LOCAL
1860
+ },
1861
+ {
1862
+ config: CUSTOM_K8S_CONFIG,
1863
+ setup_k8s_stub: false,
1864
+ log_entry: k8s_container_log_entry(log_entry(0)),
1865
+ expected_params: K8S_CONTAINER_PARAMS_CUSTOM
1866
+ },
1867
+ {
1868
+ config: EMPTY_K8S_CONFIG,
1869
+ setup_k8s_stub: true,
1870
+ log_entry: k8s_container_log_entry(log_entry(0)),
1871
+ expected_params: K8S_CONTAINER_PARAMS_FROM_LOCAL
1872
+ }
1873
+ ].each do |test_params|
1874
+ new_stub_context do
1875
+ setup_gce_metadata_stubs
1876
+ setup_k8s_metadata_stubs(test_params[:setup_k8s_stub])
1877
+ setup_logging_stubs do
1878
+ d = create_driver(test_params[:config], CONTAINER_TAG)
1879
+ d.emit(test_params[:log_entry])
1880
+ d.run
1881
+ end
1882
+ verify_log_entries(1, test_params[:expected_params],
1883
+ 'jsonPayload') do |entry|
1884
+ fields = entry['jsonPayload']
1885
+ assert_equal 2, fields.size, entry
1886
+ assert_equal 'test log entry 0', fields['log'], entry
1887
+ assert_equal K8S_STREAM, fields['stream'], entry
1888
+ end
1889
+ end
1890
+ end
1891
+ end
1892
+
1893
+ def test_k8s_container_monitored_resource_invalid_local_resource_id
1894
+ [
1895
+ # When local_resource_id is not present or does not match k8s regexes.
1896
+ {
1897
+ config: APPLICATION_DEFAULT_CONFIG,
1898
+ setup_k8s_stub: true,
1899
+ log_entry: k8s_container_log_entry(
1900
+ log_entry(0)
1901
+ ).reject { |k, _| k == LOCAL_RESOURCE_ID_KEY },
1902
+ expected_params: CONTAINER_FROM_TAG_PARAMS
1903
+ },
1904
+ {
1905
+ config: APPLICATION_DEFAULT_CONFIG,
1906
+ setup_k8s_stub: true,
1907
+ log_entry: k8s_container_log_entry(
1908
+ log_entry(0),
1909
+ local_resource_id: RANDOM_LOCAL_RESOURCE_ID
1910
+ ),
1911
+ expected_params: CONTAINER_FROM_TAG_PARAMS
1912
+ }
1913
+ ].each do |test_params|
1914
+ new_stub_context do
1915
+ setup_gce_metadata_stubs
1916
+ setup_k8s_metadata_stubs(test_params[:setup_k8s_stub])
1917
+ setup_logging_stubs do
1918
+ d = create_driver(test_params[:config], CONTAINER_TAG)
1919
+ d.emit(test_params[:log_entry])
1920
+ d.run
1921
+ end
1922
+ verify_log_entries(1, test_params[:expected_params]) do |entry|
1923
+ assert_equal 'test log entry 0', entry['textPayload'], entry
1924
+ end
1925
+ end
1926
+ end
1927
+ end
1928
+
1929
+ # Test k8s_pod monitored resource.
1930
+ def test_k8s_pod_monitored_resource
1931
+ [
1932
+ {
1933
+ config: APPLICATION_DEFAULT_CONFIG,
1934
+ setup_k8s_stub: true,
1935
+ log_entry: k8s_pod_log_entry(log_entry(0)),
1936
+ expected_params: K8S_POD_PARAMS_FROM_LOCAL
1937
+ },
1938
+ {
1939
+ config: CUSTOM_K8S_CONFIG,
1940
+ setup_k8s_stub: false,
1941
+ log_entry: k8s_pod_log_entry(log_entry(0)),
1942
+ expected_params: K8S_POD_PARAMS_CUSTOM
1943
+ },
1944
+ {
1945
+ config: EMPTY_K8S_CONFIG,
1946
+ setup_k8s_stub: true,
1947
+ log_entry: k8s_pod_log_entry(log_entry(0)),
1948
+ expected_params: K8S_POD_PARAMS_FROM_LOCAL
1949
+ }
1950
+ ].each do |test_params|
1951
+ new_stub_context do
1952
+ setup_gce_metadata_stubs
1953
+ setup_k8s_metadata_stubs(test_params[:setup_k8s_stub])
1954
+ setup_logging_stubs do
1955
+ d = create_driver(test_params[:config])
1956
+ d.emit(test_params[:log_entry])
1957
+ d.run
1958
+ end
1959
+ verify_log_entries(1, test_params[:expected_params],
1960
+ 'jsonPayload') do |entry|
1961
+ fields = entry['jsonPayload']
1962
+ assert_equal 2, fields.size, entry
1963
+ assert_equal 'test log entry 0', fields['log'], entry
1964
+ assert_equal K8S_STREAM, fields['stream'], entry
1965
+ end
1966
+ end
1967
+ end
1968
+ end
1969
+
1970
+ # Test k8s_node monitored resource.
1971
+ def test_k8s_node_monitored_resource
1972
+ [
1973
+ {
1974
+ config: APPLICATION_DEFAULT_CONFIG,
1975
+ setup_k8s_stub: true,
1976
+ log_entry: k8s_node_log_entry(log_entry(0)),
1977
+ expected_params: K8S_NODE_PARAMS_FROM_LOCAL
1978
+ },
1979
+ {
1980
+ config: CUSTOM_K8S_CONFIG,
1981
+ setup_k8s_stub: false,
1982
+ log_entry: k8s_node_log_entry(log_entry(0)),
1983
+ expected_params: K8S_NODE_PARAMS_CUSTOM
1984
+ },
1985
+ {
1986
+ config: EMPTY_K8S_CONFIG,
1987
+ setup_k8s_stub: true,
1988
+ log_entry: k8s_node_log_entry(log_entry(0)),
1989
+ expected_params: K8S_NODE_PARAMS_FROM_LOCAL
1990
+ }
1991
+ ].each do |test_params|
1992
+ new_stub_context do
1993
+ setup_gce_metadata_stubs
1994
+ setup_k8s_metadata_stubs(test_params[:setup_k8s_stub])
1995
+ setup_logging_stubs do
1996
+ d = create_driver(test_params[:config])
1997
+ d.emit(test_params[:log_entry])
1998
+ d.run
1999
+ end
2000
+ verify_log_entries(1, test_params[:expected_params],
2001
+ 'jsonPayload') do |entry|
2002
+ fields = entry['jsonPayload']
2003
+ assert_equal 2, fields.size, entry
2004
+ assert_equal 'test log entry 0', fields['log'], entry
2005
+ assert_equal K8S_STREAM, fields['stream'], entry
2006
+ end
2007
+ end
2008
+ end
2009
+ end
2010
+
2011
+ def test_uptime_metric
2012
+ setup_gce_metadata_stubs
2013
+ [
2014
+ [ENABLE_PROMETHEUS_CONFIG, method(:assert_prometheus_metric_value)],
2015
+ [ENABLE_OPENCENSUS_CONFIG, method(:assert_opencensus_metric_value)]
2016
+ ].each do |config, assert_metric_value|
2017
+ clear_metrics
2018
+ start_time = Time.now.to_i
2019
+ d = create_driver(config)
2020
+ d.run
2021
+ begin
2022
+ # Retry to protect from time races.
2023
+ retries ||= 0
2024
+ expected = Time.now.to_i - start_time
2025
+ d.instance.update_uptime
2026
+ assert_metric_value.call(
2027
+ :uptime, expected, 'agent.googleapis.com/agent',
2028
+ OpenCensus::Stats::Aggregation::Sum, d,
2029
+ version: Fluent::GoogleCloudOutput.version_string
2030
+ )
2031
+ rescue Test::Unit::AssertionFailedError
2032
+ retry if (retries += 1) < 3
2033
+ end
2034
+ assert_not_equal 3, retries
2035
+ end
2036
+ end
2037
+
2038
+ def test_metrics
2039
+ setup_gce_metadata_stubs
2040
+ [
2041
+ [ENABLE_PROMETHEUS_CONFIG, method(:assert_prometheus_metric_value)],
2042
+ [ENABLE_OPENCENSUS_CONFIG, method(:assert_opencensus_metric_value)]
2043
+ ].each do |config, assert_metric_value|
2044
+ [
2045
+ # Single successful request.
2046
+ [ok_status_code, 1, 1, [0, 0, 0]],
2047
+ # Several successful requests.
2048
+ [ok_status_code, 2, 1, [0, 0, 0]],
2049
+ # Single successful request with several entries.
2050
+ [ok_status_code, 1, 2, [0, 0, 0]],
2051
+ # Single failed request that causes logs to be dropped.
2052
+ [client_error_status_code, 1, 1, [1, 1, 0]],
2053
+ # Single failed request that escalates without logs being dropped with
2054
+ # several entries.
2055
+ [server_error_status_code, 1, 2, [0, 0, 2]]
2056
+ ].each do |code, request_count, entry_count, metric_values|
2057
+ clear_metrics
2058
+ setup_logging_stubs(nil, code, 'SomeMessage') do
2059
+ (1..request_count).each do |request_index|
2060
+ d = create_driver(config)
2061
+ (1..entry_count).each do |entry_index|
2062
+ d.emit('message' => log_entry(entry_index.to_s))
2063
+ end
2064
+ # rubocop:disable Lint/SuppressedException
2065
+ begin
2066
+ d.run
2067
+ rescue mock_error_type
2068
+ end
2069
+ # rubocop:enable Lint/SuppressedException
2070
+ failed_requests_count, dropped_entries_count,
2071
+ retried_entries_count = metric_values
2072
+
2073
+ successful_requests_count = \
2074
+ if code != ok_status_code
2075
+ 0
2076
+ elsif config == ENABLE_OPENCENSUS_CONFIG
2077
+ # TODO(b/173215689) Improve the Open Census side of testing.
2078
+ # The test driver instance variables can not survive between
2079
+ # test driver runs. So the open cencensus side counter gets
2080
+ # reset as expected.
2081
+ 1
2082
+ else
2083
+ request_index
2084
+ end
2085
+
2086
+ ingested_entries_count = \
2087
+ if code != ok_status_code
2088
+ 0
2089
+ elsif config == ENABLE_OPENCENSUS_CONFIG
2090
+ # TODO(b/173215689) Improve the Open Census side of testing.
2091
+ # The test driver instance variables can not survive between
2092
+ # test driver runs. So the open cencensus side counter gets
2093
+ # reset as expected.
2094
+ entry_count
2095
+ else
2096
+ request_index * entry_count
2097
+ end
2098
+
2099
+ assert_metric_value.call(:stackdriver_successful_requests_count,
2100
+ successful_requests_count,
2101
+ 'agent.googleapis.com/agent',
2102
+ OpenCensus::Stats::Aggregation::Sum, d,
2103
+ grpc: use_grpc, code: ok_status_code)
2104
+ assert_metric_value.call(:stackdriver_ingested_entries_count,
2105
+ ingested_entries_count,
2106
+ 'agent.googleapis.com/agent',
2107
+ OpenCensus::Stats::Aggregation::Sum, d,
2108
+ grpc: use_grpc, code: ok_status_code)
2109
+ assert_metric_value.call(:stackdriver_retried_entries_count,
2110
+ retried_entries_count,
2111
+ 'agent.googleapis.com/agent',
2112
+ OpenCensus::Stats::Aggregation::Sum, d,
2113
+ grpc: use_grpc, code: code)
2114
+ # Skip failure assertions when code indicates success, because the
2115
+ # assertion will fail in the case when a single metric contains time
2116
+ # series with success and failure events.
2117
+ next if code == ok_status_code
2118
+
2119
+ assert_metric_value.call(:stackdriver_failed_requests_count,
2120
+ failed_requests_count,
2121
+ 'agent.googleapis.com/agent',
2122
+ OpenCensus::Stats::Aggregation::Sum, d,
2123
+ grpc: use_grpc, code: code)
2124
+ assert_metric_value.call(:stackdriver_dropped_entries_count,
2125
+ dropped_entries_count,
2126
+ 'agent.googleapis.com/agent',
2127
+ OpenCensus::Stats::Aggregation::Sum, d,
2128
+ grpc: use_grpc, code: code)
2129
+ end
2130
+ end
2131
+ end
2132
+ end
2133
+ end
2134
+
2135
+ private
2136
+
2137
+ # Provide a stub context that initializes @logs_sent, executes the block and
2138
+ # resets WebMock at the end.
2139
+ def new_stub_context
2140
+ @logs_sent = []
2141
+ yield
2142
+ WebMock.reset!
2143
+ end
2144
+
2145
+ # GKE Container.
2146
+
2147
+ def container_tag_with_container_name(container_name)
2148
+ "kubernetes.#{K8S_POD_NAME}_#{K8S_NAMESPACE_NAME}_#{container_name}"
2149
+ end
2150
+
2151
+ def container_log_entry_with_metadata(
2152
+ log, container_name = K8S_CONTAINER_NAME
2153
+ )
2154
+ {
2155
+ log: log,
2156
+ stream: K8S_STREAM,
2157
+ time: K8S_TIMESTAMP,
2158
+ kubernetes: {
2159
+ namespace_id: CONTAINER_NAMESPACE_ID,
2160
+ namespace_name: K8S_NAMESPACE_NAME,
2161
+ pod_id: CONTAINER_POD_ID,
2162
+ pod_name: K8S_POD_NAME,
2163
+ container_name: container_name,
2164
+ labels: {
2165
+ CONTAINER_LABEL_KEY => CONTAINER_LABEL_VALUE
2166
+ }
2167
+ }
2168
+ }
2169
+ end
2170
+
2171
+ def container_log_entry(log, stream = K8S_STREAM)
2172
+ {
2173
+ log: log,
2174
+ stream: stream,
2175
+ time: K8S_TIMESTAMP
2176
+ }
2177
+ end
2178
+
2179
+ def k8s_container_log_entry(log,
2180
+ local_resource_id: K8S_LOCAL_RESOURCE_ID)
2181
+ {
2182
+ log: log,
2183
+ stream: K8S_STREAM,
2184
+ time: K8S_TIMESTAMP,
2185
+ LOCAL_RESOURCE_ID_KEY => local_resource_id
2186
+ }
2187
+ end
2188
+
2189
+ def k8s_pod_log_entry(log)
2190
+ {
2191
+ log: log,
2192
+ stream: K8S_STREAM,
2193
+ time: K8S_TIMESTAMP,
2194
+ LOCAL_RESOURCE_ID_KEY =>
2195
+ "#{K8S_POD_LOCAL_RESOURCE_ID_PREFIX}" \
2196
+ ".#{K8S_NAMESPACE_NAME}" \
2197
+ ".#{K8S_POD_NAME}"
2198
+ }
2199
+ end
2200
+
2201
+ def k8s_node_log_entry(log)
2202
+ {
2203
+ log: log,
2204
+ stream: K8S_STREAM,
2205
+ time: K8S_TIMESTAMP,
2206
+ LOCAL_RESOURCE_ID_KEY =>
2207
+ "#{K8S_NODE_LOCAL_RESOURCE_ID_PREFIX}" \
2208
+ ".#{K8S_NODE_NAME}"
2209
+ }
2210
+ end
2211
+
2212
+ def dataflow_log_entry(index)
2213
+ {
2214
+ step: DATAFLOW_STEP_ID,
2215
+ message: log_entry(index)
2216
+ }
2217
+ end
2218
+
2219
+ def dataproc_log_entry(message, source_class = 'com.example.Example',
2220
+ filename = 'test.log')
2221
+ {
2222
+ filename: filename,
2223
+ class: source_class,
2224
+ message: log_entry(message)
2225
+ }
2226
+ end
2227
+
2228
+ def ml_log_entry(index)
2229
+ {
2230
+ name: ML_LOG_AREA,
2231
+ message: log_entry(index)
2232
+ }
2233
+ end
2234
+
2235
+ def structured_log_entry
2236
+ {
2237
+ 'name' => 'test name',
2238
+ 'code' => 'test code'
2239
+ }
2240
+ end
2241
+
2242
+ def log_entry(index)
2243
+ "test log entry #{index}"
2244
+ end
2245
+
2246
+ # If check_exact_labels is true, assert 'labels' and 'expected_labels' match
2247
+ # exactly. If check_exact_labels is false, assert 'labels' is a subset of
2248
+ # 'expected_labels'.
2249
+ def check_labels(expected_labels, labels, check_exact_labels = true)
2250
+ return if expected_labels.empty? && labels.empty?
2251
+
2252
+ expected_labels.each do |expected_key, expected_value|
2253
+ assert labels.key?(expected_key), "Expected label #{expected_key} not" \
2254
+ " found. Got labels: #{labels}."
2255
+ actual_value = labels[expected_key]
2256
+ assert actual_value.is_a?(String), 'Value for label' \
2257
+ " #{expected_key} is not a string: #{actual_value}."
2258
+ assert_equal expected_value, actual_value, "Value for #{expected_key}" \
2259
+ " mismatch. Expected #{expected_value}. Got #{actual_value}"
2260
+ end
2261
+ return unless check_exact_labels
2262
+
2263
+ assert_equal expected_labels.length, labels.length, 'Expected ' \
2264
+ "#{expected_labels.length} labels: #{expected_labels}, got " \
2265
+ "#{labels.length} labels: #{labels}"
2266
+ end
2267
+
2268
+ def verify_default_log_entry_text(text, index, entry)
2269
+ assert_equal "test log entry #{index}", text,
2270
+ "Entry ##{index} had unexpected text: #{entry}"
2271
+ end
2272
+
2273
+ # The caller can optionally provide a block which is called for each entry.
2274
+ def verify_json_log_entries(expected_count, params,
2275
+ payload_type = 'textPayload',
2276
+ check_exact_entry_labels = true)
2277
+ entry_count = 0
2278
+ @logs_sent.each do |request|
2279
+ request['entries'].each do |entry|
2280
+ unless payload_type.empty?
2281
+ assert entry.key?(payload_type),
2282
+ "Entry ##{entry_count} did not contain expected" \
2283
+ " #{payload_type} key: #{entry}."
2284
+ end
2285
+
2286
+ # per-entry resource or log_name overrides the corresponding field
2287
+ # from the request. Labels are merged, with the per-entry label
2288
+ # taking precedence in case of overlap.
2289
+ resource = entry['resource'] || request['resource']
2290
+ log_name = entry['logName'] || request['logName']
2291
+
2292
+ labels ||= request['labels']
2293
+ labels.merge!(entry['labels'] || {})
2294
+
2295
+ if params[:log_name]
2296
+ assert_equal \
2297
+ "projects/#{params[:project_id]}/logs/#{params[:log_name]}",
2298
+ log_name
2299
+ end
2300
+ assert_equal params[:resource][:type], resource['type']
2301
+ check_labels params[:resource][:labels], resource['labels']
2302
+
2303
+ check_labels params[:labels], labels, check_exact_entry_labels
2304
+
2305
+ if block_given?
2306
+ yield(entry, entry_count)
2307
+ elsif payload_type == 'textPayload'
2308
+ # Check the payload for textPayload, otherwise it's up to the caller.
2309
+ verify_default_log_entry_text(entry['textPayload'], entry_count,
2310
+ entry)
2311
+ end
2312
+ entry_count += 1
2313
+ assert entry_count <= expected_count,
2314
+ "Number of entries #{entry_count} exceeds expected number #{expected_count}."
2315
+ end
2316
+ end
2317
+ assert_equal expected_count, entry_count
2318
+ end
2319
+
2320
+ def verify_container_logs(log_entry_factory, expected_params)
2321
+ setup_gce_metadata_stubs
2322
+ setup_k8s_metadata_stubs
2323
+ [1, 2, 3, 5, 11, 50].each do |n|
2324
+ @logs_sent = []
2325
+ setup_logging_stubs do
2326
+ d = create_driver(APPLICATION_DEFAULT_CONFIG, CONTAINER_TAG)
2327
+ n.times { |i| d.emit(log_entry_factory.call(log_entry(i))) }
2328
+ d.run
2329
+ end
2330
+ verify_log_entries(n, expected_params) do |entry, i|
2331
+ verify_default_log_entry_text(entry['textPayload'], i, entry)
2332
+ actual_timestamp = timestamp_parse(entry['timestamp'])
2333
+ assert_equal K8S_SECONDS_EPOCH, actual_timestamp['seconds'], entry
2334
+ assert_equal K8S_NANOS, actual_timestamp['nanos'], entry
2335
+ assert_equal CONTAINER_SEVERITY, entry['severity'], entry
2336
+ end
2337
+ end
2338
+ end
2339
+
2340
+ def log_entry_subfields_params
2341
+ {
2342
+ # The keys are the names of fields in the payload that we are extracting
2343
+ # LogEntry info from. The values are lists of two elements: the name of
2344
+ # the subfield in LogEntry object and the expected value of that field.
2345
+ DEFAULT_HTTP_REQUEST_KEY => [
2346
+ 'httpRequest', HTTP_REQUEST_MESSAGE
2347
+ ],
2348
+ DEFAULT_LABELS_KEY => [
2349
+ 'labels', COMPUTE_PARAMS[:labels].merge(LABELS_MESSAGE)
2350
+ ],
2351
+ DEFAULT_OPERATION_KEY => [
2352
+ 'operation', OPERATION_MESSAGE
2353
+ ],
2354
+ DEFAULT_SOURCE_LOCATION_KEY => [
2355
+ 'sourceLocation', SOURCE_LOCATION_MESSAGE
2356
+ ]
2357
+ }
2358
+ end
2359
+
2360
+ def verify_subfields_from_record(payload_key, check_exact_entry_labels = true)
2361
+ destination_key, payload_value = log_entry_subfields_params[payload_key]
2362
+ @logs_sent = []
2363
+ setup_gce_metadata_stubs
2364
+ setup_logging_stubs do
2365
+ d = create_driver
2366
+ d.emit(payload_key => payload_value)
2367
+ d.run
2368
+ end
2369
+ verify_log_entries(1, COMPUTE_PARAMS, destination_key,
2370
+ check_exact_entry_labels) do |entry|
2371
+ assert_equal payload_value, entry[destination_key], entry
2372
+ fields = entry['jsonPayload']
2373
+ assert_nil fields[payload_key], entry
2374
+ end
2375
+ end
2376
+
2377
+ def verify_subfields_partial_from_record(payload_key)
2378
+ destination_key, payload_value = log_entry_subfields_params[payload_key]
2379
+ @logs_sent = []
2380
+ setup_gce_metadata_stubs
2381
+ setup_logging_stubs do
2382
+ d = create_driver
2383
+ d.emit(payload_key => payload_value.merge('otherKey' => 'value'))
2384
+ d.run
2385
+ end
2386
+ verify_log_entries(1, COMPUTE_PARAMS, destination_key) do |entry|
2387
+ assert_equal payload_value, entry[destination_key], entry
2388
+ fields = entry['jsonPayload']
2389
+ request = fields[payload_key]
2390
+ assert_equal 'value', request['otherKey'], entry
2391
+ end
2392
+ end
2393
+
2394
+ def verify_subfields_removed_when_not_hash(payload_key)
2395
+ destination_key = log_entry_subfields_params[payload_key][0]
2396
+ @logs_sent = []
2397
+ setup_gce_metadata_stubs
2398
+ setup_logging_stubs do
2399
+ d = create_driver
2400
+ d.emit(payload_key => 'a_string')
2401
+ d.run
2402
+ end
2403
+ verify_log_entries(1, COMPUTE_PARAMS, 'jsonPayload') do |entry|
2404
+ # The malformed field has been removed from the payload.
2405
+ assert_true entry['jsonPayload'].empty?, entry
2406
+ # No additional labels.
2407
+ assert_equal COMPUTE_PARAMS[:labels].size,
2408
+ entry[destination_key].size, entry
2409
+ end
2410
+ end
2411
+
2412
+ def verify_subfields_untouched_when_not_hash(payload_key)
2413
+ destination_key = log_entry_subfields_params[payload_key][0]
2414
+ @logs_sent = []
2415
+ setup_gce_metadata_stubs
2416
+ setup_logging_stubs do
2417
+ d = create_driver
2418
+ d.emit(payload_key => 'a_string')
2419
+ d.run
2420
+ end
2421
+ verify_log_entries(1, COMPUTE_PARAMS, 'jsonPayload') do |entry|
2422
+ # Verify that we leave the malformed field as it is.
2423
+ field = entry['jsonPayload'][payload_key]
2424
+ assert_equal 'a_string', field, entry
2425
+ assert_false entry.key?(destination_key), entry
2426
+ end
2427
+ end
2428
+
2429
+ def verify_subfields_when_nil(payload_key)
2430
+ destination_key = log_entry_subfields_params[payload_key][0]
2431
+ @logs_sent = []
2432
+ setup_gce_metadata_stubs
2433
+ setup_logging_stubs do
2434
+ d = create_driver
2435
+ d.emit(payload_key => nil)
2436
+ d.run
2437
+ end
2438
+
2439
+ verify_log_entries(1, COMPUTE_PARAMS, 'jsonPayload') do |entry|
2440
+ fields = entry['jsonPayload']
2441
+ assert_false fields.key?(payload_key), entry
2442
+ if payload_key == DEFAULT_LABELS_KEY
2443
+ # No additional labels.
2444
+ assert_equal COMPUTE_PARAMS[:labels].size,
2445
+ entry[destination_key].size, entry
2446
+ else
2447
+ assert_false entry.key?(destination_key), entry
2448
+ end
2449
+ end
2450
+ end
2451
+
2452
+ # Cascading JSON detection is only triggered when the record has one field
2453
+ # left with name "log", "message" or "msg". This test verifies additional
2454
+ # LogEntry fields like spanId and traceId do not disable that by accident.
2455
+ def verify_cascading_json_detection_with_log_entry_fields(
2456
+ log_entry_field, default_key, expectation
2457
+ )
2458
+ root_level_value = expectation[:root_level_value]
2459
+ nested_level_value = expectation[:nested_level_value]
2460
+ expected_value_from_root = expectation.fetch(
2461
+ :expected_value_from_root, root_level_value
2462
+ )
2463
+ expected_value_from_nested = expectation.fetch(
2464
+ :expected_value_from_nested, nested_level_value
2465
+ )
2466
+ default_value_from_root = expectation.fetch(
2467
+ :default_value_from_root, nil
2468
+ )
2469
+ default_value_from_nested = expectation.fetch(
2470
+ :default_value_from_nested, nil
2471
+ )
2472
+
2473
+ setup_gce_metadata_stubs
2474
+
2475
+ # {
2476
+ # "logging.googleapis.com/XXX' => 'sample value'
2477
+ # "msg": {
2478
+ # "name": "test name",
2479
+ # "code": "test code"
2480
+ # }
2481
+ # }
2482
+ log_entry_with_root_level_field = {
2483
+ default_key => root_level_value,
2484
+ 'msg' => structured_log_entry.to_json
2485
+ }
2486
+ # {
2487
+ # "msg": {
2488
+ # "logging.googleapis.com/XXX' => 'another value',
2489
+ # "name": "test name",
2490
+ # "code": "test code"
2491
+ # }
2492
+ # }
2493
+ log_entry_with_nested_level_field = {
2494
+ 'msg' => {
2495
+ default_key => nested_level_value
2496
+ }.merge(structured_log_entry).to_json
2497
+ }
2498
+ # {
2499
+ # "logging.googleapis.com/XXX' => 'sample value'
2500
+ # "msg": {
2501
+ # "logging.googleapis.com/XXX' => 'another value',
2502
+ # "name": "test name",
2503
+ # "code": "test code"
2504
+ # }
2505
+ # }
2506
+ log_entry_with_both_level_fields = log_entry_with_nested_level_field.merge(
2507
+ default_key => root_level_value
2508
+ )
2509
+
2510
+ [
2511
+ [
2512
+ log_entry_with_root_level_field,
2513
+ expected_value_from_root,
2514
+ default_value_from_root
2515
+ ],
2516
+ [
2517
+ log_entry_with_nested_level_field,
2518
+ expected_value_from_nested,
2519
+ default_value_from_nested
2520
+ ],
2521
+ [
2522
+ log_entry_with_both_level_fields,
2523
+ expected_value_from_nested,
2524
+ default_value_from_nested
2525
+ ]
2526
+ ].each_with_index do |(log_entry, expected_value, default_value), index|
2527
+ setup_logging_stubs do
2528
+ @logs_sent = []
2529
+ d = create_driver(DETECT_JSON_CONFIG)
2530
+ d.emit(log_entry)
2531
+ d.run
2532
+ end
2533
+ verify_log_entries(1, COMPUTE_PARAMS, 'jsonPayload', false) do |entry|
2534
+ assert_equal_with_default \
2535
+ entry[log_entry_field], expected_value, default_value,
2536
+ "Index #{index} failed. #{expected_value} is expected for " \
2537
+ "#{log_entry_field} field."
2538
+ payload_fields = entry['jsonPayload']
2539
+ assert_equal structured_log_entry.size, payload_fields.size
2540
+ payload_fields.each do |key, value|
2541
+ assert_equal structured_log_entry[key], value
2542
+ end
2543
+ end
2544
+ end
2545
+ end
2546
+
2547
+ def verify_field_key(log_entry_field, test_params)
2548
+ default_key = test_params[:default_key]
2549
+ custom_key = test_params[:custom_key]
2550
+ custom_key_config = test_params[:custom_key_config]
2551
+ sample_value = test_params[:sample_value]
2552
+ default_value = test_params.fetch(:default_value, nil)
2553
+
2554
+ setup_gce_metadata_stubs
2555
+ message = log_entry(0)
2556
+ [
2557
+ {
2558
+ # It leaves log entry field nil if no keyed value sent.
2559
+ driver_config: APPLICATION_DEFAULT_CONFIG,
2560
+ emitted_log: { 'msg' => message },
2561
+ expected_payload: { 'msg' => message },
2562
+ expected_field_value: default_value
2563
+ },
2564
+ {
2565
+ # By default, it sets log entry field via a default key.
2566
+ driver_config: APPLICATION_DEFAULT_CONFIG,
2567
+ emitted_log: { 'msg' => message, default_key => sample_value },
2568
+ expected_payload: { 'msg' => message },
2569
+ expected_field_value: sample_value
2570
+ },
2571
+ {
2572
+ # It allows setting the log entry field via a custom configured key.
2573
+ driver_config: custom_key_config,
2574
+ emitted_log: { 'msg' => message, custom_key => sample_value },
2575
+ expected_payload: { 'msg' => message },
2576
+ expected_field_value: sample_value
2577
+ },
2578
+ {
2579
+ # It doesn't set log entry field by default key if custom key specified.
2580
+ driver_config: custom_key_config,
2581
+ emitted_log: { 'msg' => message, default_key => sample_value },
2582
+ expected_payload: { 'msg' => message, default_key => sample_value },
2583
+ expected_field_value: default_value
2584
+ }
2585
+ ].each do |input|
2586
+ setup_logging_stubs do
2587
+ @logs_sent = []
2588
+ d = create_driver(input[:driver_config])
2589
+ d.emit(input[:emitted_log])
2590
+ d.run
2591
+ end
2592
+ verify_log_entries(1, COMPUTE_PARAMS, 'jsonPayload', false) do |entry|
2593
+ assert_equal input[:expected_field_value], entry[log_entry_field], input
2594
+ payload_fields = entry['jsonPayload']
2595
+ assert_equal input[:expected_payload].size, payload_fields.size, input
2596
+ payload_fields.each do |key, value|
2597
+ assert_equal input[:expected_payload][key], value
2598
+ end
2599
+ end
2600
+ end
2601
+ end
2602
+
2603
+ # Replace the 'referer' field with nil.
2604
+ def http_request_message_with_nil_referer
2605
+ HTTP_REQUEST_MESSAGE.merge('referer' => nil)
2606
+ end
2607
+
2608
+ # Unset the 'referer' field.
2609
+ def http_request_message_with_absent_referer
2610
+ HTTP_REQUEST_MESSAGE.reject do |k, _|
2611
+ k == 'referer'
2612
+ end
2613
+ end
2614
+
2615
+ # The conversions from user input to output.
2616
+ def latency_conversion
2617
+ _undefined
2618
+ end
2619
+
2620
+ # This module expects the methods below to be overridden.
2621
+
2622
+ # Create a Fluentd output test driver with the Google Cloud Output plugin.
2623
+ def create_driver(_conf = APPLICATION_DEFAULT_CONFIG, _tag = 'test')
2624
+ _undefined
2625
+ end
2626
+
2627
+ # Set up http or grpc stubs to mock the external calls.
2628
+ def setup_logging_stubs(_error = nil, _code = nil, _message = nil)
2629
+ _undefined
2630
+ end
2631
+
2632
+ # Whether this is the grpc path
2633
+ def use_grpc
2634
+ _undefined
2635
+ end
2636
+
2637
+ # The OK status code.
2638
+ def ok_status_code
2639
+ _undefined
2640
+ end
2641
+
2642
+ # A client side error status code.
2643
+ def client_error_status_code
2644
+ _undefined
2645
+ end
2646
+
2647
+ # A server side error status code.
2648
+ def server_error_status_code
2649
+ _undefined
2650
+ end
2651
+
2652
+ # The parent error type to expect in the mock
2653
+ def mock_error_type
2654
+ _undefined
2655
+ end
2656
+
2657
+ # Verify the number and the content of the log entries match the expectation.
2658
+ # The caller can optionally provide a block which is called for each entry.
2659
+ def verify_log_entries(_expected_count, _params,
2660
+ _payload_type = 'textPayload',
2661
+ _check_exact_entry_labels = true, &_block)
2662
+ _undefined
2663
+ end
2664
+
2665
+ # Defined in specific gRPC or REST files.
2666
+ def expected_operation_message2
2667
+ _undefined
2668
+ end
2669
+
2670
+ # Parse timestamp and convert it to a hash with the "seconds" and "nanos" keys
2671
+ # if necessary.
2672
+ # Defined in specific gRPC or REST files.
2673
+ def timestamp_parse(_timestamp)
2674
+ _undefined
2675
+ end
2676
+
2677
+ def _undefined
2678
+ raise "Method #{__callee__} is unimplemented and needs to be overridden."
2679
+ end
2680
+ end