rudder-sdk-ruby 0.0.2

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
checksums.yaml ADDED
@@ -0,0 +1,7 @@
1
+ ---
2
+ SHA256:
3
+ metadata.gz: 0e6ef6e7b5604161a3c1720d185dfb7be510c5ba5a264ccf3ab12ce8b03e7d45
4
+ data.tar.gz: a4bf7a438fb923c4fdb131a4e0248a605da49a72e9126d5598f80c314ba16524
5
+ SHA512:
6
+ metadata.gz: f5effd4ca17a51f69cae929507d157fbfa769d7a2a36bead5bd1e16149e2bd2b7505c3f8c4085742345aaa686447b5fbbb061f9ff8cfd10597b43397f83a252a
7
+ data.tar.gz: 757f320ec99b316ca04982772e8b80a41d44895ad2f483e5b33f0ff5f8bcec2c2741185c68afd9a887ac8448fe812fbb9f83f898b436172840a6779d4fcda5df
data/bin/analytics ADDED
@@ -0,0 +1,111 @@
1
+ #!/usr/bin/env ruby
2
+
3
+ require 'rudder/analytics'
4
+ require 'rubygems'
5
+ require 'commander/import'
6
+ require 'time'
7
+ require 'json'
8
+
9
+ program :name, 'simulator.rb'
10
+ program :version, '0.0.1'
11
+ program :description, 'scripting simulator'
12
+
13
+ def json_hash(str)
14
+ if str
15
+ return JSON.parse(str)
16
+ end
17
+ end
18
+
19
+ # analytics -method=<method> -segment-write-key=<segmentWriteKey> [options]
20
+ # Run in dev with ruby -ilib bin/analytics -method=<method> -segment-write-key=<segmentWriteKey> [options]
21
+
22
+ default_command :send
23
+
24
+ command :send do |c|
25
+ c.description = 'send a segment message'
26
+
27
+ c.option '--writeKey=<writeKey>', String, 'the Rudder writeKey'
28
+ c.option '--dataPlaneUrl=<dataPlaneUrl>', String, 'the Rudder data plane URL'
29
+ c.option '--type=<type>', String, 'The Segment message type'
30
+
31
+ c.option '--userId=<userId>', String, 'the user id to send the event as'
32
+ c.option '--anonymousId=<anonymousId>', String, 'the anonymous user id to send the event as'
33
+ c.option '--context=<context>', 'additional context for the event (JSON-encoded)'
34
+ c.option '--integrations=<integrations>', 'additional integrations for the event (JSON-encoded)'
35
+
36
+ c.option '--event=<event>', String, 'the event name to send with the event'
37
+ c.option '--properties=<properties>', 'the event properties to send (JSON-encoded)'
38
+
39
+ c.option '--name=<name>', 'name of the screen or page to send with the message'
40
+
41
+ c.option '--traits=<traits>', 'the identify/group traits to send (JSON-encoded)'
42
+
43
+ c.option '--groupId=<groupId>', String, 'the group id'
44
+ c.option '--previousId=<previousId>', String, 'the previous id'
45
+
46
+ c.action do |args, options|
47
+ Analytics = Rudder::Analytics.new({
48
+ write_key: options.writeKey,
49
+ data_plane_url: options.dataPlaneUrl,
50
+ on_error: Proc.new { |status, msg| print msg }
51
+ })
52
+
53
+ case options.type
54
+ when "track"
55
+ Analytics.track({
56
+ user_id: options.userId,
57
+ event: options.event,
58
+ anonymous_id: options.anonymousId,
59
+ properties: json_hash(options.properties),
60
+ context: json_hash(options.context),
61
+ integrations: json_hash(options.integrations)
62
+ })
63
+ when "page"
64
+ Analytics.page({
65
+ user_id: options.userId,
66
+ anonymous_id: options.anonymousId,
67
+ name: options.name,
68
+ properties: json_hash(options.properties),
69
+ context: json_hash(options.context),
70
+ integrations: json_hash(options.integrations)
71
+ })
72
+ when "screen"
73
+ Analytics.screen({
74
+ user_id: options.userId,
75
+ anonymous_id: options.anonymousId,
76
+ name: options.name,
77
+ properties: json_hash(options.properties),
78
+ context: json_hash(options.context),
79
+ integrations: json_hash(options.integrations)
80
+ })
81
+ when "identify"
82
+ Analytics.identify({
83
+ user_id: options.userId,
84
+ anonymous_id: options.anonymousId,
85
+ traits: json_hash(options.traits),
86
+ context: json_hash(options.context),
87
+ integrations: json_hash(options.integrations)
88
+ })
89
+ when "group"
90
+ Analytics.group({
91
+ user_id: options.userId,
92
+ anonymous_id: options.anonymousId,
93
+ group_id: options.groupId,
94
+ traits: json_hash(options.traits),
95
+ context: json_hash(options.context),
96
+ integrations: json_hash(options.integrations)
97
+ })
98
+ when "alias"
99
+ Analytics.alias({
100
+ previous_id: options.previousId,
101
+ user_id: options.userId,
102
+ anonymous_id: options.anonymousId,
103
+ context: json_hash(options.context),
104
+ integrations: json_hash(options.integrations)
105
+ })
106
+ else
107
+ raise "Invalid Message Type #{options.type}"
108
+ end
109
+ Analytics.flush
110
+ end
111
+ end
@@ -0,0 +1,51 @@
1
+ # frozen_string_literal: true
2
+
3
+ require 'rudder/analytics/defaults'
4
+
5
+ module Rudder
6
+ class Analytics
7
+ class BackoffPolicy
8
+ include Rudder::Analytics::Defaults::BackoffPolicy
9
+
10
+ # @param [Hash] opts
11
+ # @option opts [Numeric] :min_timeout_ms The minimum backoff timeout
12
+ # @option opts [Numeric] :max_timeout_ms The maximum backoff timeout
13
+ # @option opts [Numeric] :multiplier The value to multiply the current
14
+ # interval with for each retry attempt
15
+ # @option opts [Numeric] :randomization_factor The randomization factor
16
+ # to use to create a range around the retry interval
17
+ def initialize(opts = {})
18
+ @min_timeout_ms = opts[:min_timeout_ms] || MIN_TIMEOUT_MS
19
+ @max_timeout_ms = opts[:max_timeout_ms] || MAX_TIMEOUT_MS
20
+ @multiplier = opts[:multiplier] || MULTIPLIER
21
+ @randomization_factor = opts[:randomization_factor] || RANDOMIZATION_FACTOR
22
+
23
+ @attempts = 0
24
+ end
25
+
26
+ # @return [Numeric] the next backoff interval, in milliseconds.
27
+ def next_interval
28
+ interval = @min_timeout_ms * (@multiplier**@attempts)
29
+ interval = add_jitter(interval, @randomization_factor)
30
+
31
+ @attempts += 1
32
+
33
+ [interval, @max_timeout_ms].min
34
+ end
35
+
36
+ private
37
+
38
+ def add_jitter(base, randomization_factor)
39
+ random_number = rand
40
+ max_deviation = base * randomization_factor
41
+ deviation = random_number * max_deviation
42
+
43
+ if random_number < 0.5
44
+ base - deviation
45
+ else
46
+ base + deviation
47
+ end
48
+ end
49
+ end
50
+ end
51
+ end
@@ -0,0 +1,205 @@
1
+ # frozen_string_literal: true
2
+
3
+ require 'time'
4
+ require 'uri'
5
+
6
+ require 'rudder/analytics/defaults'
7
+ require 'rudder/analytics/logging'
8
+ require 'rudder/analytics/utils'
9
+ require 'rudder/analytics/worker'
10
+ require 'rudder/analytics/defaults'
11
+ require 'net/http'
12
+
13
+ module Rudder
14
+ class Analytics
15
+ class Client
16
+ include Rudder::Analytics::Utils
17
+ include Rudder::Analytics::Logging
18
+
19
+ # @param [Hash] opts
20
+ # @option opts [String] :write_key Your project's write_key
21
+ # @option opts [String] :data_plane_url Your data plane URL
22
+ # @option opts [FixNum] :max_queue_size Maximum number of calls to be
23
+ # remain queued.
24
+ # @option opts [Proc] :on_error Handles error calls from the API.
25
+ def initialize(opts = {})
26
+ symbolize_keys!(opts)
27
+
28
+ @queue = Queue.new
29
+ @write_key = opts[:write_key]
30
+ @data_plane_url = opts[:data_plane_url]
31
+ @max_queue_size = opts[:max_queue_size] || Defaults::Queue::MAX_SIZE
32
+ @worker_mutex = Mutex.new
33
+ @worker = Worker.new(@queue, @data_plane_url, @write_key, opts)
34
+ @worker_thread = nil
35
+
36
+ uri = URI(opts[:data_plane_url])
37
+
38
+ @host = uri.host
39
+ @port = uri.port
40
+
41
+ check_write_key!
42
+
43
+ at_exit { @worker_thread && @worker_thread[:should_exit] = true }
44
+ end
45
+
46
+ # Synchronously waits until the worker has flushed the queue.
47
+ #
48
+ # Use only for scripts which are not long-running, and will specifically
49
+ # exit
50
+ def flush
51
+ while !@queue.empty? || @worker.is_requesting?
52
+ ensure_worker_running
53
+ sleep(0.1)
54
+ end
55
+ end
56
+
57
+ # @!macro common_attrs
58
+ # @option attrs [String] :anonymous_id ID for a user when you don't know
59
+ # who they are yet. (optional but you must provide either an
60
+ # `anonymous_id` or `user_id`)
61
+ # @option attrs [Hash] :context ({})
62
+ # @option attrs [Hash] :integrations What integrations this event
63
+ # goes to (optional)
64
+ # @option attrs [String] :message_id ID that uniquely
65
+ # identifies a message across the API. (optional)
66
+ # @option attrs [Time] :timestamp When the event occurred (optional)
67
+ # @option attrs [String] :user_id The ID for this user in your database
68
+ # (optional but you must provide either an `anonymous_id` or `user_id`)
69
+ # @option attrs [Hash] :options Options such as user traits (optional)
70
+
71
+ # Tracks an event
72
+ #
73
+ # @see https://segment.com/docs/sources/server/ruby/#track
74
+ #
75
+ # @param [Hash] attrs
76
+ #
77
+ # @option attrs [String] :event Event name
78
+ # @option attrs [Hash] :properties Event properties (optional)
79
+ # @macro common_attrs
80
+ def track(attrs)
81
+ symbolize_keys! attrs
82
+ enqueue(FieldParser.parse_for_track(attrs))
83
+ end
84
+
85
+ # Identifies a user
86
+ #
87
+ # @see https://segment.com/docs/sources/server/ruby/#identify
88
+ #
89
+ # @param [Hash] attrs
90
+ #
91
+ # @option attrs [Hash] :traits User traits (optional)
92
+ # @macro common_attrs
93
+ def identify(attrs)
94
+ printf("\nInside Identifyu \n")
95
+ symbolize_keys! attrs
96
+ enqueue(FieldParser.parse_for_identify(attrs))
97
+ end
98
+
99
+ # Aliases a user from one id to another
100
+ #
101
+ # @see https://segment.com/docs/sources/server/ruby/#alias
102
+ #
103
+ # @param [Hash] attrs
104
+ #
105
+ # @option attrs [String] :previous_id The ID to alias from
106
+ # @macro common_attrs
107
+ def alias(attrs)
108
+ symbolize_keys! attrs
109
+ enqueue(FieldParser.parse_for_alias(attrs))
110
+ end
111
+
112
+ # Associates a user identity with a group.
113
+ #
114
+ # @see https://segment.com/docs/sources/server/ruby/#group
115
+ #
116
+ # @param [Hash] attrs
117
+ #
118
+ # @option attrs [String] :group_id The ID of the group
119
+ # @option attrs [Hash] :traits User traits (optional)
120
+ # @macro common_attrs
121
+ def group(attrs)
122
+ symbolize_keys! attrs
123
+ enqueue(FieldParser.parse_for_group(attrs))
124
+ end
125
+
126
+ # Records a page view
127
+ #
128
+ # @see https://segment.com/docs/sources/server/ruby/#page
129
+ #
130
+ # @param [Hash] attrs
131
+ #
132
+ # @option attrs [String] :name Name of the page
133
+ # @option attrs [Hash] :properties Page properties (optional)
134
+ # @macro common_attrs
135
+ def page(attrs)
136
+ symbolize_keys! attrs
137
+ enqueue(FieldParser.parse_for_page(attrs))
138
+ end
139
+
140
+ # Records a screen view (for a mobile app)
141
+ #
142
+ # @param [Hash] attrs
143
+ #
144
+ # @option attrs [String] :name Name of the screen
145
+ # @option attrs [Hash] :properties Screen properties (optional)
146
+ # @option attrs [String] :category The screen category (optional)
147
+ # @macro common_attrs
148
+ def screen(attrs)
149
+ symbolize_keys! attrs
150
+ enqueue(FieldParser.parse_for_screen(attrs))
151
+ end
152
+
153
+ # @return [Fixnum] number of messages in the queue
154
+ def queued_messages
155
+ @queue.length
156
+ end
157
+
158
+ private
159
+
160
+ # private: Enqueues the action.
161
+ #
162
+ # returns Boolean of whether the item was added to the queue.
163
+ def enqueue(action)
164
+ # puts action
165
+ # add our request id for tracing purposes
166
+ action[:messageId] ||= uid
167
+
168
+ if @queue.length < @max_queue_size
169
+ @queue << action
170
+ ensure_worker_running
171
+
172
+ true
173
+ else
174
+ logger.warn(
175
+ 'Queue is full, dropping events. The :max_queue_size ' \
176
+ 'configuration parameter can be increased to prevent this from ' \
177
+ 'happening.'
178
+ )
179
+ false
180
+ end
181
+ end
182
+
183
+ # private: Checks that the write_key is properly initialized
184
+ def check_write_key!
185
+ raise ArgumentError, 'Write key must be initialized' if @write_key.nil?
186
+ end
187
+
188
+ def ensure_worker_running
189
+ return if worker_running?
190
+
191
+ @worker_mutex.synchronize do
192
+ return if worker_running?
193
+
194
+ @worker_thread = Thread.new do
195
+ @worker.run
196
+ end
197
+ end
198
+ end
199
+
200
+ def worker_running?
201
+ @worker_thread&.alive?
202
+ end
203
+ end
204
+ end
205
+ end
@@ -0,0 +1,39 @@
1
+ # frozen_string_literal: true
2
+
3
+ module Rudder
4
+ class Analytics
5
+ module Defaults
6
+ module Request
7
+ HOST = 'localhost'
8
+ PORT = 8080
9
+ PATH = '/v1/batch'
10
+ DATA_PLANE_URL = 'http://localhost:8080/v1/batch'
11
+ SSL = false
12
+ HEADERS = { 'Accept' => 'application/json',
13
+ 'Content-Type' => 'application/json',
14
+ 'User-Agent' => "rudderanalytics-ruby/#{Analytics::VERSION}" }
15
+ RETRIES = 10
16
+ end
17
+
18
+ module Queue
19
+ MAX_SIZE = 10000
20
+ end
21
+
22
+ module Message
23
+ MAX_BYTES = 32768 # 32Kb
24
+ end
25
+
26
+ module MessageBatch
27
+ MAX_BYTES = 512_000 # 500Kb
28
+ MAX_SIZE = 100
29
+ end
30
+
31
+ module BackoffPolicy
32
+ MIN_TIMEOUT_MS = 100
33
+ MAX_TIMEOUT_MS = 10000
34
+ MULTIPLIER = 1.5
35
+ RANDOMIZATION_FACTOR = 0.5
36
+ end
37
+ end
38
+ end
39
+ end
@@ -0,0 +1,190 @@
1
+ # frozen_string_literal: true
2
+
3
+ module Rudder
4
+ class Analytics
5
+ # Handles parsing fields according to the Segment Spec
6
+ #
7
+ # @see https://segment.com/docs/spec/
8
+ class FieldParser
9
+ class << self
10
+ include Rudder::Analytics::Utils
11
+
12
+ # In addition to the common fields, track accepts:
13
+ #
14
+ # - "event"
15
+ # - "properties"
16
+ def parse_for_track(fields)
17
+ common = parse_common_fields(fields)
18
+
19
+ event = fields[:event]
20
+ properties = fields[:properties] || {}
21
+
22
+ check_presence!(event, 'event')
23
+ check_is_hash!(properties, 'properties')
24
+
25
+ isoify_dates! properties
26
+
27
+ common.merge({
28
+ :type => 'track',
29
+ :event => event.to_s,
30
+ :properties => properties
31
+ })
32
+ end
33
+
34
+ # In addition to the common fields, identify accepts:
35
+ #
36
+ # - "traits"
37
+ def parse_for_identify(fields)
38
+ common = parse_common_fields(fields)
39
+
40
+ traits = fields[:traits] || {}
41
+ check_is_hash!(traits, 'traits')
42
+ isoify_dates! traits
43
+
44
+ common.merge({
45
+ :type => 'identify',
46
+ :traits => traits
47
+ })
48
+ end
49
+
50
+ # In addition to the common fields, alias accepts:
51
+ #
52
+ # - "previous_id"
53
+ def parse_for_alias(fields)
54
+ common = parse_common_fields(fields)
55
+
56
+ previous_id = fields[:previous_id]
57
+ check_presence!(previous_id, 'previous_id')
58
+
59
+ common.merge({
60
+ :type => 'alias',
61
+ :previousId => previous_id
62
+ })
63
+ end
64
+
65
+ # In addition to the common fields, group accepts:
66
+ #
67
+ # - "group_id"
68
+ # - "traits"
69
+ def parse_for_group(fields)
70
+ common = parse_common_fields(fields)
71
+
72
+ group_id = fields[:group_id]
73
+ traits = fields[:traits] || {}
74
+
75
+ check_presence!(group_id, 'group_id')
76
+ check_is_hash!(traits, 'traits')
77
+
78
+ isoify_dates! traits
79
+
80
+ common.merge({
81
+ :type => 'group',
82
+ :groupId => group_id,
83
+ :traits => traits
84
+ })
85
+ end
86
+
87
+ # In addition to the common fields, page accepts:
88
+ #
89
+ # - "name"
90
+ # - "properties"
91
+ def parse_for_page(fields)
92
+ common = parse_common_fields(fields)
93
+
94
+ name = fields[:name] || ''
95
+ properties = fields[:properties] || {}
96
+
97
+ check_is_hash!(properties, 'properties')
98
+
99
+ isoify_dates! properties
100
+
101
+ common.merge({
102
+ :type => 'page',
103
+ :name => name.to_s,
104
+ :properties => properties
105
+ })
106
+ end
107
+
108
+ # In addition to the common fields, screen accepts:
109
+ #
110
+ # - "name"
111
+ # - "properties"
112
+ # - "category" (Not in spec, retained for backward compatibility"
113
+ def parse_for_screen(fields)
114
+ common = parse_common_fields(fields)
115
+
116
+ name = fields[:name]
117
+ properties = fields[:properties] || {}
118
+ category = fields[:category]
119
+
120
+ check_presence!(name, 'name')
121
+ check_is_hash!(properties, 'properties')
122
+
123
+ isoify_dates! properties
124
+
125
+ parsed = common.merge({
126
+ :type => 'screen',
127
+ :name => name,
128
+ :properties => properties
129
+ })
130
+
131
+ parsed[:category] = category if category
132
+
133
+ parsed
134
+ end
135
+
136
+ private
137
+
138
+ def parse_common_fields(fields)
139
+ timestamp = fields[:timestamp] || Time.new
140
+ message_id = fields[:message_id].to_s if fields[:message_id]
141
+ context = fields[:context] || {}
142
+
143
+ check_user_id! fields
144
+ check_timestamp! timestamp
145
+
146
+ add_context! context
147
+
148
+ parsed = {
149
+ :context => context,
150
+ :messageId => message_id,
151
+ :timestamp => datetime_in_iso8601(timestamp)
152
+ }
153
+
154
+ parsed[:userId] = fields[:user_id] if fields[:user_id]
155
+ parsed[:anonymousId] = fields[:anonymous_id] if fields[:anonymous_id]
156
+ parsed[:integrations] = fields[:integrations] if fields[:integrations]
157
+
158
+ # Not in spec, retained for backward compatibility
159
+ parsed[:options] = fields[:options] if fields[:options]
160
+
161
+ parsed
162
+ end
163
+
164
+ def check_user_id!(fields)
165
+ raise ArgumentError, 'Must supply either user_id or anonymous_id' unless fields[:user_id] || fields[:anonymous_id]
166
+ end
167
+
168
+ def check_timestamp!(timestamp)
169
+ raise ArgumentError, 'Timestamp must be a Time' unless timestamp.is_a? Time
170
+ end
171
+
172
+ def add_context!(context)
173
+ context[:library] = { :name => 'rudderanalytics-ruby', :version => Rudder::Analytics::VERSION.to_s }
174
+ end
175
+
176
+ # private: Ensures that a string is non-empty
177
+ #
178
+ # obj - String|Number that must be non-blank
179
+ # name - Name of the validated value
180
+ def check_presence!(obj, name)
181
+ raise ArgumentError, "#{name} must be given" if obj.nil? || (obj.is_a?(String) && obj.empty?)
182
+ end
183
+
184
+ def check_is_hash!(obj, name)
185
+ raise ArgumentError, "#{name} must be a Hash" unless obj.is_a? Hash
186
+ end
187
+ end
188
+ end
189
+ end
190
+ end
@@ -0,0 +1,62 @@
1
+ # frozen_string_literal: true
2
+
3
+ require 'logger'
4
+
5
+ module Rudder
6
+ class Analytics
7
+ # Wraps an existing logger and adds a prefix to all messages
8
+ class PrefixedLogger
9
+ def initialize(logger, prefix)
10
+ @logger = logger
11
+ @prefix = prefix
12
+ end
13
+
14
+ def debug(msg)
15
+ @logger.debug("#{@prefix} #{msg}")
16
+ end
17
+
18
+ def info(msg)
19
+ @logger.info("#{@prefix} #{msg}")
20
+ end
21
+
22
+ def warn(msg)
23
+ @logger.warn("#{@prefix} #{msg}")
24
+ end
25
+
26
+ def error(msg)
27
+ @logger.error("#{@prefix} #{msg}")
28
+ end
29
+ end
30
+
31
+ module Logging
32
+ class << self
33
+ def logger
34
+ return @logger if @logger
35
+
36
+ base_logger = if defined?(Rails)
37
+ Rails.logger
38
+ else
39
+ logger = Logger.new STDOUT
40
+ logger.progname = 'Rudder::Analytics'
41
+ logger
42
+ end
43
+ @logger = PrefixedLogger.new(base_logger, '[rudderanalytics-ruby]')
44
+ end
45
+
46
+ attr_writer :logger
47
+ end
48
+
49
+ def self.included(base)
50
+ class << base
51
+ def logger
52
+ Logging.logger
53
+ end
54
+ end
55
+ end
56
+
57
+ def logger
58
+ Logging.logger
59
+ end
60
+ end
61
+ end
62
+ end
@@ -0,0 +1,75 @@
1
+ # frozen_string_literal: true
2
+
3
+ require 'forwardable'
4
+ require 'rudder/analytics/logging'
5
+
6
+ module Rudder
7
+ class Analytics
8
+ # A batch of `Message`s to be sent to the API
9
+ class MessageBatch
10
+ class JSONGenerationError < StandardError; end
11
+
12
+ extend Forwardable
13
+ include Rudder::Analytics::Logging
14
+ include Rudder::Analytics::Defaults::MessageBatch
15
+
16
+ def initialize(max_message_count)
17
+ @messages = []
18
+ @max_message_count = max_message_count
19
+ @json_size = 0
20
+ end
21
+
22
+ def <<(message)
23
+ begin
24
+ message_json = message.to_json
25
+ # puts message_json
26
+ rescue StandardError => e
27
+ raise JSONGenerationError, "Serialization error: #{e}"
28
+ end
29
+
30
+ message_json_size = message_json.bytesize
31
+ if message_too_big?(message_json_size)
32
+ logger.error('a message exceeded the maximum allowed size')
33
+ else
34
+ @messages << message
35
+ @json_size += message_json_size + 1 # One byte for the comma
36
+ end
37
+ end
38
+
39
+ def full?
40
+ item_count_exhausted? || size_exhausted?
41
+ end
42
+
43
+ def clear
44
+ @messages.clear
45
+ @json_size = 0
46
+ end
47
+
48
+ def_delegators :@messages, :to_json
49
+ def_delegators :@messages, :empty?
50
+ def_delegators :@messages, :length
51
+
52
+ private
53
+
54
+ def item_count_exhausted?
55
+ @messages.length >= @max_message_count
56
+ end
57
+
58
+ def message_too_big?(message_json_size)
59
+ message_json_size > Defaults::Message::MAX_BYTES
60
+ end
61
+
62
+ # We consider the max size here as just enough to leave room for one more
63
+ # message of the largest size possible. This is a shortcut that allows us
64
+ # to use a native Ruby `Queue` that doesn't allow peeking. The tradeoff
65
+ # here is that we might fit in less messages than possible into a batch.
66
+ #
67
+ # The alternative is to use our own `Queue` implementation that allows
68
+ # peeking, and to consider the next message size when calculating whether
69
+ # the message can be accomodated in this batch.
70
+ def size_exhausted?
71
+ @json_size >= (MAX_BYTES - Defaults::Message::MAX_BYTES)
72
+ end
73
+ end
74
+ end
75
+ end
@@ -0,0 +1,157 @@
1
+ # frozen_string_literal: true
2
+
3
+ require 'rudder/analytics/defaults'
4
+ require 'rudder/analytics/utils'
5
+ require 'rudder/analytics/response'
6
+ require 'rudder/analytics/logging'
7
+ require 'rudder/analytics/backoff_policy'
8
+ require 'net/http'
9
+ require 'net/https'
10
+ require 'json'
11
+ require 'pry'
12
+ require 'uri'
13
+
14
+ module Rudder
15
+ class Analytics
16
+ class Request
17
+ include Rudder::Analytics::Defaults::Request
18
+ include Rudder::Analytics::Utils
19
+ include Rudder::Analytics::Logging
20
+
21
+ # public: Creates a new request object to send analytics batch
22
+ #
23
+ def initialize(options = {})
24
+ options[:host] ||= HOST
25
+ options[:port] ||= PORT
26
+ options[:ssl] ||= SSL
27
+ @headers = options[:headers] || HEADERS
28
+ @path = options[:path] || PATH
29
+ @retries = options[:retries] || RETRIES
30
+ @backoff_policy =
31
+ options[:backoff_policy] || Rudder::Analytics::BackoffPolicy.new
32
+
33
+ uri = URI(options[:data_plane_url] || DATA_PLANE_URL)
34
+ printf("************\n")
35
+ printf("************\n")
36
+ printf(options[:data_plane_url] || DATA_PLANE_URL)
37
+ printf("\n************\n")
38
+ printf(uri.host)
39
+ printf("\n************\n")
40
+ printf(uri.port.to_s)
41
+ printf("************\n")
42
+
43
+ http = Net::HTTP.new(uri.host, uri.port)
44
+ http.use_ssl = options[:ssl]
45
+ http.read_timeout = 8
46
+ http.open_timeout = 4
47
+
48
+ @http = http
49
+ end
50
+
51
+ # public: Posts the write key and batch of messages to the API.
52
+ #
53
+ # returns - Response of the status and error if it exists
54
+ def post(write_key, batch)
55
+ logger.debug("Sending request for #{batch.length} items")
56
+
57
+ last_response, exception = retry_with_backoff(@retries) do
58
+ status_code, body = send_request(write_key, batch)
59
+ error = nil
60
+ # rudder server now return 'OK'
61
+ begin
62
+ error = JSON.parse(body)['error']
63
+ rescue StandardError
64
+ error = JSON.parse(body.to_json)
65
+ end
66
+
67
+ # puts error
68
+ should_retry = should_retry_request?(status_code, body)
69
+ logger.debug("Response status code: #{status_code}")
70
+ logger.debug("Response error: #{error}") if error
71
+
72
+ [Response.new(status_code, error), should_retry]
73
+ end
74
+
75
+ if exception
76
+ logger.error(exception.message)
77
+ exception.backtrace.each { |line| logger.error(line) }
78
+ Response.new(-1, exception.to_s)
79
+ else
80
+ last_response
81
+ end
82
+ end
83
+
84
+ private
85
+
86
+ def should_retry_request?(status_code, body)
87
+ if status_code >= 500
88
+ true # Server error
89
+ elsif status_code == 429
90
+ true # Rate limited
91
+ elsif status_code >= 400
92
+ logger.error(body)
93
+ false # Client error. Do not retry, but log
94
+ else
95
+ false
96
+ end
97
+ end
98
+
99
+ # Takes a block that returns [result, should_retry].
100
+ #
101
+ # Retries upto `retries_remaining` times, if `should_retry` is false or
102
+ # an exception is raised. `@backoff_policy` is used to determine the
103
+ # duration to sleep between attempts
104
+ #
105
+ # Returns [last_result, raised_exception]
106
+ def retry_with_backoff(retries_remaining, &block)
107
+ result, caught_exception = nil
108
+ should_retry = false
109
+
110
+ begin
111
+ result, should_retry = yield
112
+ return [result, nil] unless should_retry
113
+ rescue StandardError => e
114
+ should_retry = true
115
+ caught_exception = e
116
+ end
117
+
118
+ if should_retry && (retries_remaining > 1)
119
+ logger.debug("Retrying request, #{retries_remaining} retries left")
120
+ sleep(@backoff_policy.next_interval.to_f / 1000)
121
+ retry_with_backoff(retries_remaining - 1, &block)
122
+ else
123
+ [result, caught_exception]
124
+ end
125
+ end
126
+
127
+ # Sends a request for the batch, returns [status_code, body]
128
+ def send_request(write_key, batch)
129
+ payload = JSON.generate(
130
+ :sentAt => datetime_in_iso8601(Time.now),
131
+ :batch => batch
132
+ )
133
+ request = Net::HTTP::Post.new(@path, @headers)
134
+ request.basic_auth(write_key, nil)
135
+
136
+ if self.class.stub
137
+ logger.debug "stubbed request to #{@path}: " \
138
+ "write key = #{write_key}, batch = #{JSON.generate(batch)}"
139
+
140
+ [200, '{}']
141
+ else
142
+ puts payload
143
+ response = @http.request(request, payload)
144
+ [response.code.to_i, response.body]
145
+ end
146
+ end
147
+
148
+ class << self
149
+ attr_writer :stub
150
+
151
+ def stub
152
+ @stub || ENV['STUB']
153
+ end
154
+ end
155
+ end
156
+ end
157
+ end
@@ -0,0 +1,17 @@
1
+ # frozen_string_literal: true
2
+
3
+ module Rudder
4
+ class Analytics
5
+ class Response
6
+ attr_reader :status, :error
7
+
8
+ # public: Simple class to wrap responses from the API
9
+ #
10
+ #
11
+ def initialize(status = 200, error = nil)
12
+ @status = status
13
+ @error = error
14
+ end
15
+ end
16
+ end
17
+ end
@@ -0,0 +1,8 @@
1
+
2
+ psql --host=torpedo-prod-data-plane-infra.cu6wlvcnqwq4.us-east-1.rds.amazonaws.com --port=5432 --username=rudder_backend --password --dbname=jobsdb
3
+
4
+ ^JYGDvq:9GA?}.%
5
+
6
+
7
+
8
+
@@ -0,0 +1,42 @@
1
+ {
2
+ "sentAt": "2020-01-03T22:35:08.605+05:30",
3
+ "batch": [
4
+ {
5
+ "context": {
6
+ "ip": "8.8.8.8",
7
+ "library": { "name": "rudderanalytics-ruby", "version": "0.0.1" }
8
+ },
9
+ "messageId": "49d690ae-bae1-4995-a172-4cf5fbcd101b",
10
+ "timestamp": "2020-01-03T22:35:08.605+05:30",
11
+ "userId": "019mr8mf4r",
12
+ "type": "identify",
13
+ "traits": { "email": "sumanth", "friends": 872 }
14
+ },
15
+ {
16
+ "context": {
17
+ "library": { "name": "rudderanalytics-ruby", "version": "0.0.1" }
18
+ },
19
+ "messageId": "f9b52a95-c2da-4326-8cc8-4aa05780e0c2",
20
+ "timestamp": "2020-01-03T22:35:08.605+05:30",
21
+ "userId": "f4ca124298",
22
+ "type": "track",
23
+ "event": "Article Bookmarked",
24
+ "properties": {
25
+ "title": "Snow Fall",
26
+ "subtitle": "The Avalance at Tunnel Creek",
27
+ "author": "John Branch"
28
+ }
29
+ },
30
+ {
31
+ "context": {
32
+ "library": { "name": "rudderanalytics-ruby", "version": "0.0.1" }
33
+ },
34
+ "messageId": "79ce9bda-427d-4d42-a7db-4c4d5f7317f5",
35
+ "timestamp": "2020-01-03T22:35:08.605+05:30",
36
+ "userId": "user_id",
37
+ "type": "page",
38
+ "name": "Ruby library",
39
+ "properties": { "url": "https://test_page.in" }
40
+ }
41
+ ]
42
+ }
@@ -0,0 +1,91 @@
1
+ # frozen_string_literal: true
2
+
3
+ require 'securerandom'
4
+
5
+ module Rudder
6
+ class Analytics
7
+ module Utils
8
+ extend self
9
+
10
+ # public: Return a new hash with keys converted from strings to symbols
11
+ #
12
+ def symbolize_keys(hash)
13
+ hash.each_with_object({}) do |(k, v), memo|
14
+ memo[k.to_sym] = v
15
+ end
16
+ end
17
+
18
+ # public: Convert hash keys from strings to symbols in place
19
+ #
20
+ def symbolize_keys!(hash)
21
+ hash.replace symbolize_keys hash
22
+ end
23
+
24
+ # public: Return a new hash with keys as strings
25
+ #
26
+ def stringify_keys(hash)
27
+ hash.each_with_object({}) do |(k, v), memo|
28
+ memo[k.to_s] = v
29
+ end
30
+ end
31
+
32
+ # public: Returns a new hash with all the date values in the into iso8601
33
+ # strings
34
+ #
35
+ def isoify_dates(hash)
36
+ hash.each_with_object({}) do |(k, v), memo|
37
+ memo[k] = datetime_in_iso8601(v)
38
+ end
39
+ end
40
+
41
+ # public: Converts all the date values in the into iso8601 strings in place
42
+ #
43
+ def isoify_dates!(hash)
44
+ hash.replace isoify_dates hash
45
+ end
46
+
47
+ # public: Returns a uid string
48
+ #
49
+ def uid
50
+ arr = SecureRandom.random_bytes(16).unpack('NnnnnN')
51
+ arr[2] = (arr[2] & 0x0fff) | 0x4000
52
+ arr[3] = (arr[3] & 0x3fff) | 0x8000
53
+ '%08x-%04x-%04x-%04x-%04x%08x' % arr
54
+ end
55
+
56
+ def datetime_in_iso8601(datetime)
57
+ case datetime
58
+ when Time
59
+ time_in_iso8601 datetime
60
+ when DateTime
61
+ time_in_iso8601 datetime.to_time
62
+ when Date
63
+ date_in_iso8601 datetime
64
+ else
65
+ datetime
66
+ end
67
+ end
68
+
69
+ def time_in_iso8601(time, fraction_digits = 3)
70
+ fraction = (('.%06i' % time.usec)[0, fraction_digits + 1] if fraction_digits > 0)
71
+
72
+ "#{time.strftime('%Y-%m-%dT%H:%M:%S')}#{fraction}#{formatted_offset(time, true, 'Z')}"
73
+ end
74
+
75
+ def date_in_iso8601(date)
76
+ date.strftime('%F')
77
+ end
78
+
79
+ def formatted_offset(time, colon = true, alternate_utc_string = nil)
80
+ time.utc? && alternate_utc_string || seconds_to_utc_offset(time.utc_offset, colon)
81
+ end
82
+
83
+ def seconds_to_utc_offset(seconds, colon = true)
84
+ (colon ? UTC_OFFSET_WITH_COLON : UTC_OFFSET_WITHOUT_COLON) % [(seconds < 0 ? '-' : '+'), (seconds.abs / 3600), ((seconds.abs % 3600) / 60)]
85
+ end
86
+
87
+ UTC_OFFSET_WITH_COLON = '%s%02d:%02d'
88
+ UTC_OFFSET_WITHOUT_COLON = UTC_OFFSET_WITH_COLON.sub(':', '')
89
+ end
90
+ end
91
+ end
@@ -0,0 +1,7 @@
1
+ # frozen_string_literal: true
2
+
3
+ module Rudder
4
+ class Analytics
5
+ VERSION = '0.0.2'
6
+ end
7
+ end
@@ -0,0 +1,69 @@
1
+ # frozen_string_literal: true
2
+
3
+ require 'rudder/analytics/defaults'
4
+ require 'rudder/analytics/message_batch'
5
+ require 'rudder/analytics/request'
6
+ require 'rudder/analytics/utils'
7
+
8
+ module Rudder
9
+ class Analytics
10
+ class Worker
11
+ include Rudder::Analytics::Utils
12
+ include Rudder::Analytics::Defaults
13
+ include Rudder::Analytics::Logging
14
+
15
+ # public: Creates a new worker
16
+ #
17
+ # The worker continuously takes messages off the queue
18
+ # and makes requests to the segment.io api
19
+ #
20
+ # queue - Queue synchronized between client and worker
21
+ # write_key - String of the project's Write key
22
+ # options - Hash of worker options
23
+ # batch_size - Fixnum of how many items to send in a batch
24
+ # on_error - Proc of what to do on an error
25
+ #
26
+ def initialize(queue, data_plane_url, write_key, options = {})
27
+ symbolize_keys! options
28
+ @queue = queue
29
+ @data_plane_url = data_plane_url
30
+ @write_key = write_key
31
+ @on_error = options[:on_error] || proc { |status, error| }
32
+ batch_size = options[:batch_size] || Defaults::MessageBatch::MAX_SIZE
33
+ @batch = MessageBatch.new(batch_size)
34
+ @lock = Mutex.new
35
+ end
36
+
37
+ # public: Continuously runs the loop to check for new events
38
+ #
39
+ def run
40
+ until Thread.current[:should_exit]
41
+ return if @queue.empty?
42
+
43
+ @lock.synchronize do
44
+ consume_message_from_queue! until @batch.full? || @queue.empty?
45
+ end
46
+
47
+ res = Request.new(:data_plane_url => @data_plane_url).post @write_key, @batch
48
+ @on_error.call(res.status, res.error) unless res.status == 200
49
+
50
+ @lock.synchronize { @batch.clear }
51
+ end
52
+ end
53
+
54
+ # public: Check whether we have outstanding requests.
55
+ #
56
+ def is_requesting?
57
+ @lock.synchronize { !@batch.empty? }
58
+ end
59
+
60
+ private
61
+
62
+ def consume_message_from_queue!
63
+ @batch << @queue.pop
64
+ rescue MessageBatch::JSONGenerationError => e
65
+ @on_error.call(-1, e.to_s)
66
+ end
67
+ end
68
+ end
69
+ end
@@ -0,0 +1,41 @@
1
+ # frozen_string_literal: true
2
+
3
+ require 'rudder/analytics/version'
4
+ require 'rudder/analytics/defaults'
5
+ require 'rudder/analytics/utils'
6
+ require 'rudder/analytics/field_parser'
7
+ require 'rudder/analytics/client'
8
+ require 'rudder/analytics/worker'
9
+ require 'rudder/analytics/request'
10
+ require 'rudder/analytics/response'
11
+ require 'rudder/analytics/logging'
12
+
13
+ module Rudder
14
+ class Analytics
15
+ # Initializes a new instance of {Rudder::Analytics::Client}, to which all
16
+ # method calls are proxied.
17
+ #
18
+ # @param options includes options that are passed down to
19
+ # {Rudder::Analytics::Client#initialize}
20
+ # @option options [Boolean] :stub (false) If true, requests don't hit the
21
+ # server and are stubbed to be successful.
22
+ def initialize(options = {})
23
+ Request.stub = options[:stub] if options.has_key?(:stub)
24
+ @client = Rudder::Analytics::Client.new options
25
+ end
26
+
27
+ def method_missing(message, *args, &block)
28
+ if @client.respond_to? message
29
+ @client.send message, *args, &block
30
+ else
31
+ super
32
+ end
33
+ end
34
+
35
+ def respond_to_missing?(method_name, include_private = false)
36
+ @client.respond_to?(method_name) || super
37
+ end
38
+
39
+ include Logging
40
+ end
41
+ end
@@ -0,0 +1,3 @@
1
+ # frozen_string_literal: true
2
+
3
+ require 'rudder'
data/lib/rudder.rb ADDED
@@ -0,0 +1,3 @@
1
+ # frozen_string_literal: true
2
+
3
+ require 'rudder/analytics'
metadata ADDED
@@ -0,0 +1,172 @@
1
+ --- !ruby/object:Gem::Specification
2
+ name: rudder-sdk-ruby
3
+ version: !ruby/object:Gem::Version
4
+ version: 0.0.2
5
+ platform: ruby
6
+ authors:
7
+ - Rudder
8
+ autorequire:
9
+ bindir: bin
10
+ cert_chain: []
11
+ date: 2020-01-08 00:00:00.000000000 Z
12
+ dependencies:
13
+ - !ruby/object:Gem::Dependency
14
+ name: commander
15
+ requirement: !ruby/object:Gem::Requirement
16
+ requirements:
17
+ - - "~>"
18
+ - !ruby/object:Gem::Version
19
+ version: '4.4'
20
+ type: :development
21
+ prerelease: false
22
+ version_requirements: !ruby/object:Gem::Requirement
23
+ requirements:
24
+ - - "~>"
25
+ - !ruby/object:Gem::Version
26
+ version: '4.4'
27
+ - !ruby/object:Gem::Dependency
28
+ name: rake
29
+ requirement: !ruby/object:Gem::Requirement
30
+ requirements:
31
+ - - "~>"
32
+ - !ruby/object:Gem::Version
33
+ version: '10.3'
34
+ type: :development
35
+ prerelease: false
36
+ version_requirements: !ruby/object:Gem::Requirement
37
+ requirements:
38
+ - - "~>"
39
+ - !ruby/object:Gem::Version
40
+ version: '10.3'
41
+ - !ruby/object:Gem::Dependency
42
+ name: rspec
43
+ requirement: !ruby/object:Gem::Requirement
44
+ requirements:
45
+ - - "~>"
46
+ - !ruby/object:Gem::Version
47
+ version: '3.0'
48
+ type: :development
49
+ prerelease: false
50
+ version_requirements: !ruby/object:Gem::Requirement
51
+ requirements:
52
+ - - "~>"
53
+ - !ruby/object:Gem::Version
54
+ version: '3.0'
55
+ - !ruby/object:Gem::Dependency
56
+ name: tzinfo
57
+ requirement: !ruby/object:Gem::Requirement
58
+ requirements:
59
+ - - '='
60
+ - !ruby/object:Gem::Version
61
+ version: 1.2.1
62
+ type: :development
63
+ prerelease: false
64
+ version_requirements: !ruby/object:Gem::Requirement
65
+ requirements:
66
+ - - '='
67
+ - !ruby/object:Gem::Version
68
+ version: 1.2.1
69
+ - !ruby/object:Gem::Dependency
70
+ name: activesupport
71
+ requirement: !ruby/object:Gem::Requirement
72
+ requirements:
73
+ - - "~>"
74
+ - !ruby/object:Gem::Version
75
+ version: 6.0.2
76
+ type: :development
77
+ prerelease: false
78
+ version_requirements: !ruby/object:Gem::Requirement
79
+ requirements:
80
+ - - "~>"
81
+ - !ruby/object:Gem::Version
82
+ version: 6.0.2
83
+ - !ruby/object:Gem::Dependency
84
+ name: oj
85
+ requirement: !ruby/object:Gem::Requirement
86
+ requirements:
87
+ - - "~>"
88
+ - !ruby/object:Gem::Version
89
+ version: 3.6.2
90
+ type: :development
91
+ prerelease: false
92
+ version_requirements: !ruby/object:Gem::Requirement
93
+ requirements:
94
+ - - "~>"
95
+ - !ruby/object:Gem::Version
96
+ version: 3.6.2
97
+ - !ruby/object:Gem::Dependency
98
+ name: rubocop
99
+ requirement: !ruby/object:Gem::Requirement
100
+ requirements:
101
+ - - "~>"
102
+ - !ruby/object:Gem::Version
103
+ version: 0.78.0
104
+ type: :development
105
+ prerelease: false
106
+ version_requirements: !ruby/object:Gem::Requirement
107
+ requirements:
108
+ - - "~>"
109
+ - !ruby/object:Gem::Version
110
+ version: 0.78.0
111
+ - !ruby/object:Gem::Dependency
112
+ name: codecov
113
+ requirement: !ruby/object:Gem::Requirement
114
+ requirements:
115
+ - - "~>"
116
+ - !ruby/object:Gem::Version
117
+ version: 0.1.4
118
+ type: :development
119
+ prerelease: false
120
+ version_requirements: !ruby/object:Gem::Requirement
121
+ requirements:
122
+ - - "~>"
123
+ - !ruby/object:Gem::Version
124
+ version: 0.1.4
125
+ description: The Rudder ruby analytics library
126
+ email: soumya@rudderlabs.com
127
+ executables:
128
+ - analytics
129
+ extensions: []
130
+ extra_rdoc_files: []
131
+ files:
132
+ - bin/analytics
133
+ - lib/rudder-sdk-ruby.rb
134
+ - lib/rudder.rb
135
+ - lib/rudder/analytics.rb
136
+ - lib/rudder/analytics/backoff_policy.rb
137
+ - lib/rudder/analytics/client.rb
138
+ - lib/rudder/analytics/defaults.rb
139
+ - lib/rudder/analytics/field_parser.rb
140
+ - lib/rudder/analytics/logging.rb
141
+ - lib/rudder/analytics/message_batch.rb
142
+ - lib/rudder/analytics/request.rb
143
+ - lib/rudder/analytics/response.rb
144
+ - lib/rudder/analytics/test
145
+ - lib/rudder/analytics/test-am.json
146
+ - lib/rudder/analytics/utils.rb
147
+ - lib/rudder/analytics/version.rb
148
+ - lib/rudder/analytics/worker.rb
149
+ homepage: https://github.com/rudderlabs/rudder-sdk-ruby
150
+ licenses:
151
+ - MIT
152
+ metadata: {}
153
+ post_install_message:
154
+ rdoc_options: []
155
+ require_paths:
156
+ - lib
157
+ required_ruby_version: !ruby/object:Gem::Requirement
158
+ requirements:
159
+ - - ">="
160
+ - !ruby/object:Gem::Version
161
+ version: '2.0'
162
+ required_rubygems_version: !ruby/object:Gem::Requirement
163
+ requirements:
164
+ - - ">="
165
+ - !ruby/object:Gem::Version
166
+ version: '0'
167
+ requirements: []
168
+ rubygems_version: 3.1.2
169
+ signing_key:
170
+ specification_version: 4
171
+ summary: Rudder analytics library
172
+ test_files: []