datadog 2.13.0 → 2.15.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (71) hide show
  1. checksums.yaml +4 -4
  2. data/CHANGELOG.md +39 -2
  3. data/ext/datadog_profiling_native_extension/collectors_thread_context.c +7 -6
  4. data/ext/datadog_profiling_native_extension/datadog_ruby_common.h +3 -0
  5. data/ext/datadog_profiling_native_extension/encoded_profile.c +69 -0
  6. data/ext/datadog_profiling_native_extension/encoded_profile.h +7 -0
  7. data/ext/datadog_profiling_native_extension/http_transport.c +25 -32
  8. data/ext/datadog_profiling_native_extension/profiling.c +2 -0
  9. data/ext/datadog_profiling_native_extension/stack_recorder.c +22 -21
  10. data/ext/libdatadog_api/datadog_ruby_common.h +3 -0
  11. data/lib/datadog/appsec/assets/waf_rules/README.md +50 -5
  12. data/lib/datadog/appsec/assets/waf_rules/processors.json +239 -10
  13. data/lib/datadog/appsec/assets/waf_rules/recommended.json +0 -1344
  14. data/lib/datadog/appsec/assets/waf_rules/scanners.json +926 -17
  15. data/lib/datadog/appsec/assets/waf_rules/strict.json +0 -1344
  16. data/lib/datadog/appsec/component.rb +19 -17
  17. data/lib/datadog/appsec/compressed_json.rb +40 -0
  18. data/lib/datadog/appsec/contrib/active_record/integration.rb +1 -1
  19. data/lib/datadog/appsec/event.rb +21 -50
  20. data/lib/datadog/appsec/remote.rb +4 -0
  21. data/lib/datadog/core/diagnostics/environment_logger.rb +1 -1
  22. data/lib/datadog/core/environment/agent_info.rb +1 -1
  23. data/lib/datadog/core/metrics/client.rb +1 -1
  24. data/lib/datadog/core/remote/client.rb +1 -1
  25. data/lib/datadog/core/remote/negotiation.rb +1 -1
  26. data/lib/datadog/core/remote/transport/config.rb +2 -2
  27. data/lib/datadog/core/remote/transport/http/client.rb +1 -1
  28. data/lib/datadog/core/remote/transport/http.rb +2 -2
  29. data/lib/datadog/core/remote/transport/negotiation.rb +2 -2
  30. data/lib/datadog/core/telemetry/metric.rb +5 -5
  31. data/lib/datadog/core/telemetry/request.rb +1 -1
  32. data/lib/datadog/core/transport/http/builder.rb +2 -2
  33. data/lib/datadog/core/transport/http.rb +1 -1
  34. data/lib/datadog/di/probe_notification_builder.rb +1 -1
  35. data/lib/datadog/di/transport/diagnostics.rb +2 -2
  36. data/lib/datadog/di/transport/http/client.rb +1 -1
  37. data/lib/datadog/di/transport/http/diagnostics.rb +0 -1
  38. data/lib/datadog/di/transport/http/input.rb +0 -1
  39. data/lib/datadog/di/transport/http.rb +0 -6
  40. data/lib/datadog/di/transport/input.rb +2 -2
  41. data/lib/datadog/kit/appsec/events.rb +6 -3
  42. data/lib/datadog/profiling/collectors/info.rb +3 -0
  43. data/lib/datadog/profiling/encoded_profile.rb +11 -0
  44. data/lib/datadog/profiling/exporter.rb +2 -3
  45. data/lib/datadog/profiling/ext.rb +0 -1
  46. data/lib/datadog/profiling/flush.rb +4 -7
  47. data/lib/datadog/profiling/http_transport.rb +10 -59
  48. data/lib/datadog/profiling/stack_recorder.rb +4 -4
  49. data/lib/datadog/profiling.rb +1 -0
  50. data/lib/datadog/tracing/contrib/active_record/integration.rb +1 -1
  51. data/lib/datadog/tracing/contrib/ext.rb +1 -0
  52. data/lib/datadog/tracing/contrib/karafka/configuration/settings.rb +27 -0
  53. data/lib/datadog/tracing/contrib/karafka/distributed/propagation.rb +46 -0
  54. data/lib/datadog/tracing/contrib/karafka/ext.rb +27 -0
  55. data/lib/datadog/tracing/contrib/karafka/integration.rb +45 -0
  56. data/lib/datadog/tracing/contrib/karafka/monitor.rb +66 -0
  57. data/lib/datadog/tracing/contrib/karafka/patcher.rb +71 -0
  58. data/lib/datadog/tracing/contrib/karafka.rb +37 -0
  59. data/lib/datadog/tracing/contrib/opensearch/configuration/settings.rb +17 -0
  60. data/lib/datadog/tracing/contrib/opensearch/ext.rb +9 -0
  61. data/lib/datadog/tracing/contrib/opensearch/patcher.rb +5 -1
  62. data/lib/datadog/tracing/contrib/rack/request_queue.rb +1 -1
  63. data/lib/datadog/tracing/contrib/sidekiq/server_tracer.rb +1 -1
  64. data/lib/datadog/tracing/contrib.rb +1 -0
  65. data/lib/datadog/tracing/span_event.rb +1 -1
  66. data/lib/datadog/tracing/transport/http/client.rb +1 -1
  67. data/lib/datadog/tracing/transport/http.rb +1 -1
  68. data/lib/datadog/tracing/transport/traces.rb +11 -6
  69. data/lib/datadog/version.rb +1 -1
  70. data/lib/datadog.rb +1 -1
  71. metadata +17 -6
@@ -20,34 +20,21 @@ module Datadog
20
20
  [:agent, agent_settings.url].freeze
21
21
  end
22
22
 
23
- status, result = validate_exporter(exporter_configuration)
23
+ status, result = self.class._native_validate_exporter(exporter_configuration)
24
24
 
25
25
  raise(ArgumentError, "Failed to initialize transport: #{result}") if status == :error
26
26
  end
27
27
 
28
28
  def export(flush)
29
- status, result = do_export(
30
- exporter_configuration: exporter_configuration,
31
- upload_timeout_milliseconds: @upload_timeout_milliseconds,
32
-
33
- # why "timespec"?
34
- # libdatadog represents time using POSIX's struct timespec, see
35
- # https://www.gnu.org/software/libc/manual/html_node/Time-Types.html
36
- # aka it represents the seconds part separate from the nanoseconds part
37
- start_timespec_seconds: flush.start.tv_sec,
38
- start_timespec_nanoseconds: flush.start.tv_nsec,
39
- finish_timespec_seconds: flush.finish.tv_sec,
40
- finish_timespec_nanoseconds: flush.finish.tv_nsec,
41
-
42
- pprof_file_name: flush.pprof_file_name,
43
- pprof_data: flush.pprof_data,
44
- code_provenance_file_name: flush.code_provenance_file_name,
45
- code_provenance_data: flush.code_provenance_data,
46
-
47
- tags_as_array: flush.tags_as_array,
48
- internal_metadata_json: flush.internal_metadata_json,
49
-
50
- info_json: flush.info_json
29
+ status, result = self.class._native_do_export(
30
+ exporter_configuration,
31
+ @upload_timeout_milliseconds,
32
+ flush,
33
+ # TODO: This is going to be removed once we move to libdatadog 17
34
+ flush.start.tv_sec,
35
+ flush.start.tv_nsec,
36
+ flush.finish.tv_sec,
37
+ flush.finish.tv_nsec,
51
38
  )
52
39
 
53
40
  if status == :ok
@@ -77,42 +64,6 @@ module Datadog
77
64
  site && api_key && Core::Environment::VariableHelpers.env_to_bool(Profiling::Ext::ENV_AGENTLESS, false)
78
65
  end
79
66
 
80
- def validate_exporter(exporter_configuration)
81
- self.class._native_validate_exporter(exporter_configuration)
82
- end
83
-
84
- def do_export(
85
- exporter_configuration:,
86
- upload_timeout_milliseconds:,
87
- start_timespec_seconds:,
88
- start_timespec_nanoseconds:,
89
- finish_timespec_seconds:,
90
- finish_timespec_nanoseconds:,
91
- pprof_file_name:,
92
- pprof_data:,
93
- code_provenance_file_name:,
94
- code_provenance_data:,
95
- tags_as_array:,
96
- internal_metadata_json:,
97
- info_json:
98
- )
99
- self.class._native_do_export(
100
- exporter_configuration,
101
- upload_timeout_milliseconds,
102
- start_timespec_seconds,
103
- start_timespec_nanoseconds,
104
- finish_timespec_seconds,
105
- finish_timespec_nanoseconds,
106
- pprof_file_name,
107
- pprof_data,
108
- code_provenance_file_name,
109
- code_provenance_data,
110
- tags_as_array,
111
- internal_metadata_json,
112
- info_json,
113
- )
114
- end
115
-
116
67
  def config_without_api_key
117
68
  "#{exporter_configuration[0]}: #{exporter_configuration[1]}"
118
69
  end
@@ -63,11 +63,11 @@ module Datadog
63
63
  status, result = @no_concurrent_synchronize_mutex.synchronize { self.class._native_serialize(self) }
64
64
 
65
65
  if status == :ok
66
- start, finish, encoded_pprof, profile_stats = result
66
+ start, finish, encoded_profile, profile_stats = result
67
67
 
68
68
  Datadog.logger.debug { "Encoded profile covering #{start.iso8601} to #{finish.iso8601}" }
69
69
 
70
- [start, finish, encoded_pprof, profile_stats]
70
+ [start, finish, encoded_profile, profile_stats]
71
71
  else
72
72
  error_message = result
73
73
 
@@ -82,9 +82,9 @@ module Datadog
82
82
  status, result = @no_concurrent_synchronize_mutex.synchronize { self.class._native_serialize(self) }
83
83
 
84
84
  if status == :ok
85
- _start, _finish, encoded_pprof = result
85
+ _start, _finish, encoded_profile = result
86
86
 
87
- encoded_pprof
87
+ encoded_profile
88
88
  else
89
89
  error_message = result
90
90
 
@@ -149,6 +149,7 @@ module Datadog
149
149
  require_relative 'profiling/collectors/thread_context'
150
150
  require_relative 'profiling/stack_recorder'
151
151
  require_relative 'profiling/exporter'
152
+ require_relative 'profiling/encoded_profile'
152
153
  require_relative 'profiling/flush'
153
154
  require_relative 'profiling/scheduler'
154
155
  require_relative 'profiling/tasks/setup'
@@ -57,7 +57,7 @@ module Datadog
57
57
  end
58
58
 
59
59
  def reset_resolver_cache
60
- @resolver&.reset_cache
60
+ @resolver&.reset_cache if defined?(@resolver)
61
61
  end
62
62
 
63
63
  Contrib::Component.register('activerecord') do |_config|
@@ -48,6 +48,7 @@ module Datadog
48
48
 
49
49
  module Messaging
50
50
  TAG_SYSTEM = 'messaging.system'
51
+ TAG_DESTINATION = 'messaging.destination'
51
52
  PEER_SERVICE_SOURCES = Array[Tracing::Metadata::Ext::NET::TAG_DESTINATION_NAME,
52
53
  Tracing::Metadata::Ext::TAG_PEER_HOSTNAME,
53
54
  Tracing::Metadata::Ext::NET::TAG_TARGET_HOST,].freeze
@@ -0,0 +1,27 @@
1
+ # frozen_string_literal: true
2
+
3
+ require_relative '../../configuration/settings'
4
+ require_relative '../ext'
5
+
6
+ module Datadog
7
+ module Tracing
8
+ module Contrib
9
+ module Karafka
10
+ module Configuration
11
+ # @public_api
12
+ class Settings < Contrib::Configuration::Settings
13
+ option :enabled do |o|
14
+ o.type :bool
15
+ o.env Ext::ENV_ENABLED
16
+ o.default true
17
+ end
18
+
19
+ option :service_name
20
+
21
+ option :distributed_tracing, default: false, type: :bool
22
+ end
23
+ end
24
+ end
25
+ end
26
+ end
27
+ end
@@ -0,0 +1,46 @@
1
+ # frozen_string_literal: true
2
+
3
+ require_relative '../../../distributed/fetcher'
4
+ require_relative '../../../distributed/propagation'
5
+ require_relative '../../../distributed/b3_multi'
6
+ require_relative '../../../distributed/b3_single'
7
+ require_relative '../../../distributed/datadog'
8
+ require_relative '../../../distributed/none'
9
+ require_relative '../../../distributed/trace_context'
10
+ require_relative '../../../configuration/ext'
11
+
12
+ module Datadog
13
+ module Tracing
14
+ module Contrib
15
+ module Karafka
16
+ module Distributed
17
+ # Extracts and injects propagation through Kafka message headers.
18
+ class Propagation < Tracing::Distributed::Propagation
19
+ def initialize(
20
+ propagation_style_inject:,
21
+ propagation_style_extract:,
22
+ propagation_extract_first:
23
+ )
24
+ super(
25
+ propagation_styles: {
26
+ Tracing::Configuration::Ext::Distributed::PROPAGATION_STYLE_B3_MULTI_HEADER =>
27
+ Tracing::Distributed::B3Multi.new(fetcher: Tracing::Distributed::Fetcher),
28
+ Tracing::Configuration::Ext::Distributed::PROPAGATION_STYLE_B3_SINGLE_HEADER =>
29
+ Tracing::Distributed::B3Single.new(fetcher: Tracing::Distributed::Fetcher),
30
+ Tracing::Configuration::Ext::Distributed::PROPAGATION_STYLE_DATADOG =>
31
+ Tracing::Distributed::Datadog.new(fetcher: Tracing::Distributed::Fetcher),
32
+ Tracing::Configuration::Ext::Distributed::PROPAGATION_STYLE_TRACE_CONTEXT =>
33
+ Tracing::Distributed::TraceContext.new(fetcher: Tracing::Distributed::Fetcher),
34
+ Tracing::Configuration::Ext::Distributed::PROPAGATION_STYLE_NONE => Tracing::Distributed::None.new
35
+ },
36
+ propagation_style_inject: propagation_style_inject,
37
+ propagation_style_extract: propagation_style_extract,
38
+ propagation_extract_first: propagation_extract_first
39
+ )
40
+ end
41
+ end
42
+ end
43
+ end
44
+ end
45
+ end
46
+ end
@@ -0,0 +1,27 @@
1
+ # frozen_string_literal: true
2
+
3
+ module Datadog
4
+ module Tracing
5
+ module Contrib
6
+ module Karafka
7
+ module Ext
8
+ ENV_ENABLED = 'DD_TRACE_KARAFKA_ENABLED'
9
+
10
+ SPAN_MESSAGE_CONSUME = 'karafka.consume'
11
+ SPAN_WORKER_PROCESS = 'worker.process'
12
+
13
+ TAG_CONSUMER = 'kafka.consumer'
14
+ TAG_TOPIC = 'kafka.topic'
15
+ TAG_PARTITION = 'kafka.partition'
16
+ TAG_OFFSET = 'kafka.offset'
17
+ TAG_OFFSET_LAG = 'kafka.offset_lag'
18
+ TAG_MESSAGE_COUNT = 'kafka.message_count'
19
+ TAG_MESSAGE_KEY = 'kafka.message_key'
20
+ TAG_SYSTEM = 'kafka'
21
+
22
+ TAG_OPERATION_PROCESS_BATCH = 'consumer.process_batch'
23
+ end
24
+ end
25
+ end
26
+ end
27
+ end
@@ -0,0 +1,45 @@
1
+ # frozen_string_literal: true
2
+
3
+ require_relative '../integration'
4
+ require_relative 'configuration/settings'
5
+ require_relative 'patcher'
6
+
7
+ module Datadog
8
+ module Tracing
9
+ module Contrib
10
+ module Karafka
11
+ # Description of Kafka integration
12
+ class Integration
13
+ include Contrib::Integration
14
+
15
+ # Minimum version of the Karafka library that we support
16
+ # https://karafka.io/docs/Versions-Lifecycle-and-EOL/#versioning-strategy
17
+ MINIMUM_VERSION = Gem::Version.new('2.3.0')
18
+
19
+ # @public_api Changing the integration name or integration options can cause breaking changes
20
+ register_as :karafka, auto_patch: false
21
+
22
+ def self.version
23
+ Gem.loaded_specs['karafka']&.version
24
+ end
25
+
26
+ def self.loaded?
27
+ !defined?(::Karafka).nil?
28
+ end
29
+
30
+ def self.compatible?
31
+ super && version >= MINIMUM_VERSION
32
+ end
33
+
34
+ def new_configuration
35
+ Configuration::Settings.new
36
+ end
37
+
38
+ def patcher
39
+ Patcher
40
+ end
41
+ end
42
+ end
43
+ end
44
+ end
45
+ end
@@ -0,0 +1,66 @@
1
+ # frozen_string_literal: true
2
+
3
+ require_relative 'ext'
4
+
5
+ module Datadog
6
+ module Tracing
7
+ module Contrib
8
+ module Karafka
9
+ # Custom monitor for Karafka.
10
+ # Creating a custom monitor, instead of subscribing to an event
11
+ # (e.g. `Karafka.monitor.subscribe 'worker.processed'`),
12
+ # is required because event subscriptions cannot wrap the event execution (`yield`).
13
+ module Monitor
14
+ TRACEABLE_EVENTS = %w[
15
+ worker.processed
16
+ ].freeze
17
+
18
+ def instrument(event_id, payload = EMPTY_HASH, &block)
19
+ return super unless TRACEABLE_EVENTS.include?(event_id)
20
+
21
+ Datadog::Tracing.trace(Ext::SPAN_WORKER_PROCESS) do |span|
22
+ job = payload[:job]
23
+ job_type = fetch_job_type(job.class)
24
+ consumer = job.executor.topic.consumer
25
+
26
+ action = case job_type
27
+ when 'Periodic', 'PeriodicNonBlocking'
28
+ 'tick'
29
+ when 'Shutdown'
30
+ 'shutdown'
31
+ when 'Revoked', 'RevokedNonBlocking'
32
+ 'revoked'
33
+ when 'Idle'
34
+ 'idle'
35
+ when 'Eofed', 'EofedNonBlocking'
36
+ 'eofed'
37
+ else
38
+ 'consume'
39
+ end
40
+
41
+ span.resource = "#{consumer}##{action}"
42
+
43
+ if action == 'consume'
44
+ span.set_tag(Ext::TAG_MESSAGE_COUNT, job.messages.count)
45
+ span.set_tag(Ext::TAG_PARTITION, job.executor.partition)
46
+ span.set_tag(Ext::TAG_OFFSET, job.messages.first.metadata.offset)
47
+ span.set_tag(Ext::TAG_CONSUMER, consumer)
48
+ span.set_tag(Contrib::Ext::Messaging::TAG_DESTINATION, job.executor.topic.name)
49
+ span.set_tag(Contrib::Ext::Messaging::TAG_SYSTEM, Ext::TAG_SYSTEM)
50
+ end
51
+
52
+ super
53
+ end
54
+ end
55
+
56
+ private
57
+
58
+ def fetch_job_type(job_class)
59
+ @job_types_cache ||= {}
60
+ @job_types_cache[job_class] ||= job_class.to_s.split('::').last
61
+ end
62
+ end
63
+ end
64
+ end
65
+ end
66
+ end
@@ -0,0 +1,71 @@
1
+ # frozen_string_literal: true
2
+
3
+ require_relative '../patcher'
4
+ require_relative 'ext'
5
+ require_relative 'distributed/propagation'
6
+
7
+ module Datadog
8
+ module Tracing
9
+ module Contrib
10
+ module Karafka
11
+ # Patch to add tracing to Karafka::Messages::Messages
12
+ module MessagesPatch
13
+ def configuration
14
+ Datadog.configuration.tracing[:karafka]
15
+ end
16
+
17
+ def propagation
18
+ @propagation ||= Contrib::Karafka::Distributed::Propagation.new
19
+ end
20
+
21
+ # `each` is the most popular access point to Karafka messages,
22
+ # but not the only one
23
+ # Other access patterns do not have a straightforward tracing avenue
24
+ # (e.g. `my_batch_operation messages.payloads`)
25
+ # @see https://github.com/karafka/karafka/blob/b06d1f7c17818e1605f80c2bb573454a33376b40/README.md?plain=1#L29-L35
26
+ def each(&block)
27
+ @messages_array.each do |message|
28
+ if configuration[:distributed_tracing]
29
+ headers = if message.metadata.respond_to?(:raw_headers)
30
+ message.metadata.raw_headers
31
+ else
32
+ message.metadata.headers
33
+ end
34
+ trace_digest = Karafka.extract(headers)
35
+ Datadog::Tracing.continue_trace!(trace_digest) if trace_digest
36
+ end
37
+
38
+ Tracing.trace(Ext::SPAN_MESSAGE_CONSUME) do |span|
39
+ span.set_tag(Ext::TAG_OFFSET, message.metadata.offset)
40
+ span.set_tag(Contrib::Ext::Messaging::TAG_DESTINATION, message.topic)
41
+ span.set_tag(Contrib::Ext::Messaging::TAG_SYSTEM, Ext::TAG_SYSTEM)
42
+
43
+ span.resource = message.topic
44
+
45
+ yield message
46
+ end
47
+ end
48
+ end
49
+ end
50
+
51
+ # Patcher enables patching of 'karafka' module.
52
+ module Patcher
53
+ include Contrib::Patcher
54
+
55
+ module_function
56
+
57
+ def target_version
58
+ Integration.version
59
+ end
60
+
61
+ def patch
62
+ require_relative 'monitor'
63
+
64
+ ::Karafka::Instrumentation::Monitor.prepend(Monitor)
65
+ ::Karafka::Messages::Messages.prepend(MessagesPatch)
66
+ end
67
+ end
68
+ end
69
+ end
70
+ end
71
+ end
@@ -0,0 +1,37 @@
1
+ # frozen_string_literal: true
2
+
3
+ require_relative 'component'
4
+ require_relative 'karafka/integration'
5
+ require_relative 'karafka/distributed/propagation'
6
+
7
+ module Datadog
8
+ module Tracing
9
+ module Contrib
10
+ # `Karafka` integration public API
11
+ module Karafka
12
+ def self.inject(digest, data)
13
+ raise 'Please invoke Datadog.configure at least once before calling this method' unless @propagation
14
+
15
+ @propagation.inject!(digest, data)
16
+ end
17
+
18
+ def self.extract(data)
19
+ raise 'Please invoke Datadog.configure at least once before calling this method' unless @propagation
20
+
21
+ @propagation.extract(data)
22
+ end
23
+
24
+ Contrib::Component.register('karafka') do |config|
25
+ tracing = config.tracing
26
+ tracing.propagation_style
27
+
28
+ @propagation = Karafka::Distributed::Propagation.new(
29
+ propagation_style_inject: tracing.propagation_style_inject,
30
+ propagation_style_extract: tracing.propagation_style_extract,
31
+ propagation_extract_first: tracing.propagation_extract_first
32
+ )
33
+ end
34
+ end
35
+ end
36
+ end
37
+ end
@@ -46,6 +46,23 @@ module Datadog
46
46
  o.type :string, nilable: true
47
47
  o.env Ext::ENV_PEER_SERVICE
48
48
  end
49
+
50
+ option :resource_pattern do |o|
51
+ o.type :string
52
+ o.env Ext::ENV_RESOURCE_PATTERN
53
+ o.default Ext::DEFAULT_RESOURCE_PATTERN
54
+ o.setter do |value|
55
+ next value if Ext::VALID_RESOURCE_PATTERNS.include?(value)
56
+
57
+ Datadog.logger.warn(
58
+ "Invalid resource pattern: #{value}. " \
59
+ "Supported values are: #{Ext::VALID_RESOURCE_PATTERNS.join(' | ')}. " \
60
+ "Using default value: #{Ext::DEFAULT_RESOURCE_PATTERN}."
61
+ )
62
+
63
+ Ext::DEFAULT_RESOURCE_PATTERN
64
+ end
65
+ end
49
66
  end
50
67
  end
51
68
  end
@@ -13,6 +13,15 @@ module Datadog
13
13
  # @!visibility private
14
14
  ENV_ANALYTICS_ENABLED = 'DD_TRACE_OPENSEARCH_ANALYTICS_ENABLED'
15
15
  ENV_ANALYTICS_SAMPLE_RATE = 'DD_TRACE_OPENSEARCH_ANALYTICS_SAMPLE_RATE'
16
+ ENV_RESOURCE_PATTERN = 'DD_TRACE_OPENSEARCH_RESOURCE_PATTERN'
17
+ ABSOLUTE_RESOURCE_PATTERN = 'absolute'
18
+ RELATIVE_RESOURCE_PATTERN = 'relative'
19
+ VALID_RESOURCE_PATTERNS = [
20
+ ABSOLUTE_RESOURCE_PATTERN,
21
+ RELATIVE_RESOURCE_PATTERN
22
+ ].freeze
23
+ # Default should be changed to RELATIVE in 3.0 to match the Elasticsearch integration
24
+ DEFAULT_RESOURCE_PATTERN = ABSOLUTE_RESOURCE_PATTERN
16
25
  DEFAULT_PEER_SERVICE_NAME = 'opensearch'
17
26
  SPAN_QUERY = 'opensearch.query'
18
27
  SPAN_TYPE_QUERY = 'opensearch'
@@ -77,7 +77,11 @@ module Datadog
77
77
  span.set_tag(Tracing::Metadata::Ext::TAG_PEER_HOSTNAME, host) if host
78
78
 
79
79
  # Define span resource
80
- quantized_url = OpenSearch::Quantize.format_url(url)
80
+ quantized_url = if datadog_configuration[:resource_pattern] == Ext::RELATIVE_RESOURCE_PATTERN
81
+ OpenSearch::Quantize.format_url(url.path)
82
+ else # Default to Ext::ABSOLUTE_RESOURCE_PATTERN
83
+ OpenSearch::Quantize.format_url(url)
84
+ end
81
85
  span.resource = "#{method} #{quantized_url}"
82
86
  Contrib::SpanAttributeSchema.set_peer_service!(span, Ext::PEER_SERVICE_SOURCES)
83
87
  rescue StandardError => e
@@ -17,7 +17,7 @@ module Datadog
17
17
 
18
18
  module_function
19
19
 
20
- def get_request_start(env, now = Time.now.utc)
20
+ def get_request_start(env, now = Core::Utils::Time.now.utc)
21
21
  header = env[REQUEST_START] || env[QUEUE_START]
22
22
  return unless header
23
23
 
@@ -61,7 +61,7 @@ module Datadog
61
61
  span.set_tag(Ext::TAG_JOB_RETRY_COUNT, job['retry_count'])
62
62
  span.set_tag(Ext::TAG_JOB_QUEUE, job['queue'])
63
63
  span.set_tag(Ext::TAG_JOB_WRAPPER, job['class']) if job['wrapped']
64
- span.set_tag(Ext::TAG_JOB_DELAY, 1000.0 * (Time.now.utc.to_f - job['enqueued_at'].to_f))
64
+ span.set_tag(Ext::TAG_JOB_DELAY, 1000.0 * (Core::Utils::Time.now.utc.to_f - job['enqueued_at'].to_f))
65
65
 
66
66
  args = job['args']
67
67
  if args && !args.empty?
@@ -55,6 +55,7 @@ require_relative 'contrib/httpclient/integration'
55
55
  require_relative 'contrib/httprb/integration'
56
56
  require_relative 'contrib/integration'
57
57
  require_relative 'contrib/kafka/integration'
58
+ require_relative 'contrib/karafka'
58
59
  require_relative 'contrib/lograge/integration'
59
60
  require_relative 'contrib/mongodb/integration'
60
61
  require_relative 'contrib/mysql2/integration'
@@ -33,7 +33,7 @@ module Datadog
33
33
 
34
34
  # OpenTelemetry SDK stores span event timestamps in nanoseconds (not seconds).
35
35
  # We will do the same here to avoid unnecessary conversions and inconsistencies.
36
- @time_unix_nano = time_unix_nano || (Time.now.to_r * 1_000_000_000).to_i
36
+ @time_unix_nano = time_unix_nano || (Core::Utils::Time.now.to_r * 1_000_000_000).to_i
37
37
  end
38
38
 
39
39
  # Converts the span event into a hash to be used by with the span tag serialization
@@ -14,7 +14,7 @@ module Datadog
14
14
 
15
15
  attr_reader :api, :logger
16
16
 
17
- def initialize(api, logger)
17
+ def initialize(api, logger: Datadog.logger)
18
18
  @api = api
19
19
  @logger = logger
20
20
  end
@@ -18,7 +18,7 @@ module Datadog
18
18
  # Pass a block to override any settings.
19
19
  def default(
20
20
  agent_settings:,
21
- logger:,
21
+ logger: Datadog.logger,
22
22
  api_version: nil,
23
23
  headers: nil
24
24
  )
@@ -51,7 +51,7 @@ module Datadog
51
51
  # @param encoder [Datadog::Core::Encoding::Encoder]
52
52
  # @param logger [Datadog::Core::Logger]
53
53
  # @param max_size [String] maximum acceptable payload size
54
- def initialize(encoder, logger, native_events_supported:, max_size: DEFAULT_MAX_PAYLOAD_SIZE)
54
+ def initialize(encoder, logger:, native_events_supported:, max_size: DEFAULT_MAX_PAYLOAD_SIZE)
55
55
  @encoder = encoder
56
56
  @logger = logger
57
57
  @native_events_supported = native_events_supported
@@ -80,7 +80,12 @@ module Datadog
80
80
  private
81
81
 
82
82
  def encode_one(trace)
83
- encoded = Encoder.encode_trace(encoder, trace, logger, native_events_supported: @native_events_supported)
83
+ encoded = Encoder.encode_trace(
84
+ encoder,
85
+ trace,
86
+ logger: logger,
87
+ native_events_supported: @native_events_supported
88
+ )
84
89
 
85
90
  if encoded.size > max_size
86
91
  # This single trace is too large, we can't flush it
@@ -98,7 +103,7 @@ module Datadog
98
103
  module Encoder
99
104
  module_function
100
105
 
101
- def encode_trace(encoder, trace, logger, native_events_supported:)
106
+ def encode_trace(encoder, trace, logger:, native_events_supported:)
102
107
  # Format the trace for transport
103
108
  TraceFormatter.format!(trace)
104
109
 
@@ -121,7 +126,7 @@ module Datadog
121
126
  class Transport
122
127
  attr_reader :client, :apis, :default_api, :current_api_id, :logger
123
128
 
124
- def initialize(apis, default_api, logger)
129
+ def initialize(apis, default_api, logger: Datadog.logger)
125
130
  @apis = apis
126
131
  @default_api = default_api
127
132
  @logger = logger
@@ -133,7 +138,7 @@ module Datadog
133
138
  encoder = current_api.encoder
134
139
  chunker = Datadog::Tracing::Transport::Traces::Chunker.new(
135
140
  encoder,
136
- logger,
141
+ logger: logger,
137
142
  native_events_supported: native_events_supported?
138
143
  )
139
144
 
@@ -194,7 +199,7 @@ module Datadog
194
199
  raise UnknownApiVersionError, api_id unless apis.key?(api_id)
195
200
 
196
201
  @current_api_id = api_id
197
- @client = HTTP::Client.new(current_api, logger)
202
+ @client = HTTP::Client.new(current_api, logger: logger)
198
203
  end
199
204
 
200
205
  # Queries the agent for native span events serialization support.
@@ -3,7 +3,7 @@
3
3
  module Datadog
4
4
  module VERSION
5
5
  MAJOR = 2
6
- MINOR = 13
6
+ MINOR = 15
7
7
  PATCH = 0
8
8
  PRE = nil
9
9
  BUILD = nil
data/lib/datadog.rb CHANGED
@@ -9,5 +9,5 @@ require_relative 'datadog/profiling'
9
9
  require_relative 'datadog/appsec'
10
10
  # Line probes will not work on Ruby < 2.6 because of lack of :script_compiled
11
11
  # trace point. Only load DI on supported Ruby versions.
12
- require_relative 'datadog/di' if RUBY_VERSION >= '2.6'
12
+ require_relative 'datadog/di' if RUBY_VERSION >= '2.6' && RUBY_ENGINE != 'jruby'
13
13
  require_relative 'datadog/kit'