analytics-ruby-rudder 0.0.1

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -0,0 +1,7 @@
1
+ ---
2
+ SHA256:
3
+ metadata.gz: 4d2c29f49d1dd6478f4837712a4cae08930e7c7563293004e486f7c4f2d9ed26
4
+ data.tar.gz: 6e0bb5c66275f29c350755ed7b496da6e09154d715c11137b7feb4a02b29aa8e
5
+ SHA512:
6
+ metadata.gz: 19ca45ff753e76ee997d6c40e3a64287ee7cce7dbc966a1c628618f0ac732a5fe5264787bea8a40852020ff4e4b726e6e63a5fe75043edc430a06a898fb80aed
7
+ data.tar.gz: ddb877127e67b45dc060f3e6ba4416f976f4cf6a39bdac0e71c8d15f57b883c42cbe8668cb1bcc5bda377c8daea41e3c50ccfa721bcd1f7397220f73aa1ece85
@@ -0,0 +1,110 @@
1
+ #!/usr/bin/env ruby
2
+
3
+ require 'rudder/analytics'
4
+ require 'rubygems'
5
+ require 'commander/import'
6
+ require 'time'
7
+ require 'json'
8
+
9
+ program :name, 'simulator.rb'
10
+ program :version, '0.0.1'
11
+ program :description, 'scripting simulator'
12
+
13
+ def json_hash(str)
14
+ if str
15
+ return JSON.parse(str)
16
+ end
17
+ end
18
+
19
+ # analytics -method=<method> -segment-write-key=<segmentWriteKey> [options]
20
+
21
+ default_command :send
22
+
23
+ command :send do |c|
24
+ c.description = 'send a segment message'
25
+
26
+ c.option '--writeKey=<writeKey>', String, 'the Rudder writeKey'
27
+ c.option '--dataPlaneUrl=<dataPlaneUrl>', String, 'the Rudder data plane URL'
28
+ c.option '--type=<type>', String, 'The Segment message type'
29
+
30
+ c.option '--userId=<userId>', String, 'the user id to send the event as'
31
+ c.option '--anonymousId=<anonymousId>', String, 'the anonymous user id to send the event as'
32
+ c.option '--context=<context>', 'additional context for the event (JSON-encoded)'
33
+ c.option '--integrations=<integrations>', 'additional integrations for the event (JSON-encoded)'
34
+
35
+ c.option '--event=<event>', String, 'the event name to send with the event'
36
+ c.option '--properties=<properties>', 'the event properties to send (JSON-encoded)'
37
+
38
+ c.option '--name=<name>', 'name of the screen or page to send with the message'
39
+
40
+ c.option '--traits=<traits>', 'the identify/group traits to send (JSON-encoded)'
41
+
42
+ c.option '--groupId=<groupId>', String, 'the group id'
43
+ c.option '--previousId=<previousId>', String, 'the previous id'
44
+
45
+ c.action do |args, options|
46
+ Analytics = Rudder::Analytics.new({
47
+ write_key: options.writeKey,
48
+ data_plane_url: options.dataPlaneUrl,
49
+ on_error: Proc.new { |status, msg| print msg }
50
+ })
51
+
52
+ case options.type
53
+ when "track"
54
+ Analytics.track({
55
+ user_id: options.userId,
56
+ event: options.event,
57
+ anonymous_id: options.anonymousId,
58
+ properties: json_hash(options.properties),
59
+ context: json_hash(options.context),
60
+ integrations: json_hash(options.integrations)
61
+ })
62
+ when "page"
63
+ Analytics.page({
64
+ user_id: options.userId,
65
+ anonymous_id: options.anonymousId,
66
+ name: options.name,
67
+ properties: json_hash(options.properties),
68
+ context: json_hash(options.context),
69
+ integrations: json_hash(options.integrations)
70
+ })
71
+ when "screen"
72
+ Analytics.screen({
73
+ user_id: options.userId,
74
+ anonymous_id: options.anonymousId,
75
+ name: options.name,
76
+ properties: json_hash(options.properties),
77
+ context: json_hash(options.context),
78
+ integrations: json_hash(options.integrations)
79
+ })
80
+ when "identify"
81
+ Analytics.identify({
82
+ user_id: options.userId,
83
+ anonymous_id: options.anonymousId,
84
+ traits: json_hash(options.traits),
85
+ context: json_hash(options.context),
86
+ integrations: json_hash(options.integrations)
87
+ })
88
+ when "group"
89
+ Analytics.group({
90
+ user_id: options.userId,
91
+ anonymous_id: options.anonymousId,
92
+ group_id: options.groupId,
93
+ traits: json_hash(options.traits),
94
+ context: json_hash(options.context),
95
+ integrations: json_hash(options.integrations)
96
+ })
97
+ when "alias"
98
+ Analytics.alias({
99
+ previous_id: options.previousId,
100
+ user_id: options.userId,
101
+ anonymous_id: options.anonymousId,
102
+ context: json_hash(options.context),
103
+ integrations: json_hash(options.integrations)
104
+ })
105
+ else
106
+ raise "Invalid Message Type #{options.type}"
107
+ end
108
+ Analytics.flush
109
+ end
110
+ end
@@ -0,0 +1 @@
1
+ require 'rudder'
@@ -0,0 +1 @@
1
+ require 'rudder/analytics'
@@ -0,0 +1,39 @@
1
+ require 'rudder/analytics/version'
2
+ require 'rudder/analytics/defaults'
3
+ require 'rudder/analytics/utils'
4
+ require 'rudder/analytics/field_parser'
5
+ require 'rudder/analytics/client'
6
+ require 'rudder/analytics/worker'
7
+ require 'rudder/analytics/request'
8
+ require 'rudder/analytics/response'
9
+ require 'rudder/analytics/logging'
10
+
11
+ module Rudder
12
+ class Analytics
13
+ # Initializes a new instance of {Rudder::Analytics::Client}, to which all
14
+ # method calls are proxied.
15
+ #
16
+ # @param options includes options that are passed down to
17
+ # {Rudder::Analytics::Client#initialize}
18
+ # @option options [Boolean] :stub (false) If true, requests don't hit the
19
+ # server and are stubbed to be successful.
20
+ def initialize(options = {})
21
+ Request.stub = options[:stub] if options.has_key?(:stub)
22
+ @client = Rudder::Analytics::Client.new options
23
+ end
24
+
25
+ def method_missing(message, *args, &block)
26
+ if @client.respond_to? message
27
+ @client.send message, *args, &block
28
+ else
29
+ super
30
+ end
31
+ end
32
+
33
+ def respond_to_missing?(method_name, include_private = false)
34
+ @client.respond_to?(method_name) || super
35
+ end
36
+
37
+ include Logging
38
+ end
39
+ end
@@ -0,0 +1,49 @@
1
+ require 'rudder/analytics/defaults'
2
+
3
+ module Rudder
4
+ class Analytics
5
+ class BackoffPolicy
6
+ include Rudder::Analytics::Defaults::BackoffPolicy
7
+
8
+ # @param [Hash] opts
9
+ # @option opts [Numeric] :min_timeout_ms The minimum backoff timeout
10
+ # @option opts [Numeric] :max_timeout_ms The maximum backoff timeout
11
+ # @option opts [Numeric] :multiplier The value to multiply the current
12
+ # interval with for each retry attempt
13
+ # @option opts [Numeric] :randomization_factor The randomization factor
14
+ # to use to create a range around the retry interval
15
+ def initialize(opts = {})
16
+ @min_timeout_ms = opts[:min_timeout_ms] || MIN_TIMEOUT_MS
17
+ @max_timeout_ms = opts[:max_timeout_ms] || MAX_TIMEOUT_MS
18
+ @multiplier = opts[:multiplier] || MULTIPLIER
19
+ @randomization_factor = opts[:randomization_factor] || RANDOMIZATION_FACTOR
20
+
21
+ @attempts = 0
22
+ end
23
+
24
+ # @return [Numeric] the next backoff interval, in milliseconds.
25
+ def next_interval
26
+ interval = @min_timeout_ms * (@multiplier**@attempts)
27
+ interval = add_jitter(interval, @randomization_factor)
28
+
29
+ @attempts += 1
30
+
31
+ [interval, @max_timeout_ms].min
32
+ end
33
+
34
+ private
35
+
36
+ def add_jitter(base, randomization_factor)
37
+ random_number = rand
38
+ max_deviation = base * randomization_factor
39
+ deviation = random_number * max_deviation
40
+
41
+ if random_number < 0.5
42
+ base - deviation
43
+ else
44
+ base + deviation
45
+ end
46
+ end
47
+ end
48
+ end
49
+ end
@@ -0,0 +1,202 @@
1
+ require 'thread'
2
+ require 'time'
3
+ require 'uri'
4
+
5
+ require 'rudder/analytics/defaults'
6
+ require 'rudder/analytics/logging'
7
+ require 'rudder/analytics/utils'
8
+ require 'rudder/analytics/worker'
9
+ require 'rudder/analytics/defaults'
10
+ require 'net/http'
11
+
12
+ module Rudder
13
+ class Analytics
14
+ class Client
15
+ include Rudder::Analytics::Utils
16
+ include Rudder::Analytics::Logging
17
+
18
+ # @param [Hash] opts
19
+ # @option opts [String] :write_key Your project's write_key
20
+ # @option opts [String] :data_plane_url Your data plane URL
21
+ # @option opts [FixNum] :max_queue_size Maximum number of calls to be
22
+ # remain queued.
23
+ # @option opts [Proc] :on_error Handles error calls from the API.
24
+ def initialize(opts = {})
25
+ symbolize_keys!(opts)
26
+
27
+ @queue = Queue.new
28
+ @write_key = opts[:write_key]
29
+ @data_plane_url = opts[:data_plane_url]
30
+ @max_queue_size = opts[:max_queue_size] || Defaults::Queue::MAX_SIZE
31
+ @worker_mutex = Mutex.new
32
+ @worker = Worker.new(@queue, @data_plane_url, @write_key, opts)
33
+ @worker_thread = nil
34
+
35
+ uri = URI(opts[:data_plane_url])
36
+
37
+ @host = uri.host
38
+ @port = uri.port
39
+
40
+ check_write_key!
41
+
42
+ at_exit { @worker_thread && @worker_thread[:should_exit] = true }
43
+ end
44
+
45
+ # Synchronously waits until the worker has flushed the queue.
46
+ #
47
+ # Use only for scripts which are not long-running, and will specifically
48
+ # exit
49
+ def flush
50
+ while !@queue.empty? || @worker.is_requesting?
51
+ ensure_worker_running
52
+ sleep(0.1)
53
+ end
54
+ end
55
+
56
+ # @!macro common_attrs
57
+ # @option attrs [String] :anonymous_id ID for a user when you don't know
58
+ # who they are yet. (optional but you must provide either an
59
+ # `anonymous_id` or `user_id`)
60
+ # @option attrs [Hash] :context ({})
61
+ # @option attrs [Hash] :integrations What integrations this event
62
+ # goes to (optional)
63
+ # @option attrs [String] :message_id ID that uniquely
64
+ # identifies a message across the API. (optional)
65
+ # @option attrs [Time] :timestamp When the event occurred (optional)
66
+ # @option attrs [String] :user_id The ID for this user in your database
67
+ # (optional but you must provide either an `anonymous_id` or `user_id`)
68
+ # @option attrs [Hash] :options Options such as user traits (optional)
69
+
70
+ # Tracks an event
71
+ #
72
+ # @see https://segment.com/docs/sources/server/ruby/#track
73
+ #
74
+ # @param [Hash] attrs
75
+ #
76
+ # @option attrs [String] :event Event name
77
+ # @option attrs [Hash] :properties Event properties (optional)
78
+ # @macro common_attrs
79
+ def track(attrs)
80
+ symbolize_keys! attrs
81
+ enqueue(FieldParser.parse_for_track(attrs))
82
+ end
83
+
84
+ # Identifies a user
85
+ #
86
+ # @see https://segment.com/docs/sources/server/ruby/#identify
87
+ #
88
+ # @param [Hash] attrs
89
+ #
90
+ # @option attrs [Hash] :traits User traits (optional)
91
+ # @macro common_attrs
92
+ def identify(attrs)
93
+ printf("\nInside Identifyu \n")
94
+ symbolize_keys! attrs
95
+ enqueue(FieldParser.parse_for_identify(attrs))
96
+ end
97
+
98
+ # Aliases a user from one id to another
99
+ #
100
+ # @see https://segment.com/docs/sources/server/ruby/#alias
101
+ #
102
+ # @param [Hash] attrs
103
+ #
104
+ # @option attrs [String] :previous_id The ID to alias from
105
+ # @macro common_attrs
106
+ def alias(attrs)
107
+ symbolize_keys! attrs
108
+ enqueue(FieldParser.parse_for_alias(attrs))
109
+ end
110
+
111
+ # Associates a user identity with a group.
112
+ #
113
+ # @see https://segment.com/docs/sources/server/ruby/#group
114
+ #
115
+ # @param [Hash] attrs
116
+ #
117
+ # @option attrs [String] :group_id The ID of the group
118
+ # @option attrs [Hash] :traits User traits (optional)
119
+ # @macro common_attrs
120
+ def group(attrs)
121
+ symbolize_keys! attrs
122
+ enqueue(FieldParser.parse_for_group(attrs))
123
+ end
124
+
125
+ # Records a page view
126
+ #
127
+ # @see https://segment.com/docs/sources/server/ruby/#page
128
+ #
129
+ # @param [Hash] attrs
130
+ #
131
+ # @option attrs [String] :name Name of the page
132
+ # @option attrs [Hash] :properties Page properties (optional)
133
+ # @macro common_attrs
134
+ def page(attrs)
135
+ symbolize_keys! attrs
136
+ enqueue(FieldParser.parse_for_page(attrs))
137
+ end
138
+
139
+ # Records a screen view (for a mobile app)
140
+ #
141
+ # @param [Hash] attrs
142
+ #
143
+ # @option attrs [String] :name Name of the screen
144
+ # @option attrs [Hash] :properties Screen properties (optional)
145
+ # @option attrs [String] :category The screen category (optional)
146
+ # @macro common_attrs
147
+ def screen(attrs)
148
+ symbolize_keys! attrs
149
+ enqueue(FieldParser.parse_for_screen(attrs))
150
+ end
151
+
152
+ # @return [Fixnum] number of messages in the queue
153
+ def queued_messages
154
+ @queue.length
155
+ end
156
+
157
+ private
158
+
159
+ # private: Enqueues the action.
160
+ #
161
+ # returns Boolean of whether the item was added to the queue.
162
+ def enqueue(action)
163
+ puts action
164
+ # add our request id for tracing purposes
165
+ action[:messageId] ||= uid
166
+
167
+ if @queue.length < @max_queue_size
168
+ @queue << action
169
+ ensure_worker_running
170
+
171
+ true
172
+ else
173
+ logger.warn(
174
+ 'Queue is full, dropping events. The :max_queue_size ' \
175
+ 'configuration parameter can be increased to prevent this from ' \
176
+ 'happening.'
177
+ )
178
+ false
179
+ end
180
+ end
181
+
182
+ # private: Checks that the write_key is properly initialized
183
+ def check_write_key!
184
+ raise ArgumentError, 'Write key must be initialized' if @write_key.nil?
185
+ end
186
+
187
+ def ensure_worker_running
188
+ return if worker_running?
189
+ @worker_mutex.synchronize do
190
+ return if worker_running?
191
+ @worker_thread = Thread.new do
192
+ @worker.run
193
+ end
194
+ end
195
+ end
196
+
197
+ def worker_running?
198
+ @worker_thread && @worker_thread.alive?
199
+ end
200
+ end
201
+ end
202
+ end
@@ -0,0 +1,37 @@
1
+ module Rudder
2
+ class Analytics
3
+ module Defaults
4
+ module Request
5
+ HOST = 'localhost'
6
+ PORT = 8080
7
+ PATH = '/v1/batch'
8
+ DATA_PLANE_URL = 'http://localhost:8080/v1/batch'
9
+ SSL = false
10
+ HEADERS = { 'Accept' => 'application/json',
11
+ 'Content-Type' => 'application/json',
12
+ 'User-Agent' => "rudderanalytics-ruby/#{Analytics::VERSION}" }
13
+ RETRIES = 10
14
+ end
15
+
16
+ module Queue
17
+ MAX_SIZE = 10000
18
+ end
19
+
20
+ module Message
21
+ MAX_BYTES = 32768 # 32Kb
22
+ end
23
+
24
+ module MessageBatch
25
+ MAX_BYTES = 512_000 # 500Kb
26
+ MAX_SIZE = 100
27
+ end
28
+
29
+ module BackoffPolicy
30
+ MIN_TIMEOUT_MS = 100
31
+ MAX_TIMEOUT_MS = 10000
32
+ MULTIPLIER = 1.5
33
+ RANDOMIZATION_FACTOR = 0.5
34
+ end
35
+ end
36
+ end
37
+ end
@@ -0,0 +1,192 @@
1
+ module Rudder
2
+ class Analytics
3
+ # Handles parsing fields according to the Segment Spec
4
+ #
5
+ # @see https://segment.com/docs/spec/
6
+ class FieldParser
7
+ class << self
8
+ include Rudder::Analytics::Utils
9
+
10
+ # In addition to the common fields, track accepts:
11
+ #
12
+ # - "event"
13
+ # - "properties"
14
+ def parse_for_track(fields)
15
+ common = parse_common_fields(fields)
16
+
17
+ event = fields[:event]
18
+ properties = fields[:properties] || {}
19
+
20
+ check_presence!(event, 'event')
21
+ check_is_hash!(properties, 'properties')
22
+
23
+ isoify_dates! properties
24
+
25
+ common.merge({
26
+ :type => 'track',
27
+ :event => event.to_s,
28
+ :properties => properties
29
+ })
30
+ end
31
+
32
+ # In addition to the common fields, identify accepts:
33
+ #
34
+ # - "traits"
35
+ def parse_for_identify(fields)
36
+ common = parse_common_fields(fields)
37
+
38
+ traits = fields[:traits] || {}
39
+ check_is_hash!(traits, 'traits')
40
+ isoify_dates! traits
41
+
42
+ common.merge({
43
+ :type => 'identify',
44
+ :traits => traits
45
+ })
46
+ end
47
+
48
+ # In addition to the common fields, alias accepts:
49
+ #
50
+ # - "previous_id"
51
+ def parse_for_alias(fields)
52
+ common = parse_common_fields(fields)
53
+
54
+ previous_id = fields[:previous_id]
55
+ check_presence!(previous_id, 'previous_id')
56
+
57
+ common.merge({
58
+ :type => 'alias',
59
+ :previousId => previous_id
60
+ })
61
+ end
62
+
63
+ # In addition to the common fields, group accepts:
64
+ #
65
+ # - "group_id"
66
+ # - "traits"
67
+ def parse_for_group(fields)
68
+ common = parse_common_fields(fields)
69
+
70
+ group_id = fields[:group_id]
71
+ traits = fields[:traits] || {}
72
+
73
+ check_presence!(group_id, 'group_id')
74
+ check_is_hash!(traits, 'traits')
75
+
76
+ isoify_dates! traits
77
+
78
+ common.merge({
79
+ :type => 'group',
80
+ :groupId => group_id,
81
+ :traits => traits
82
+ })
83
+ end
84
+
85
+ # In addition to the common fields, page accepts:
86
+ #
87
+ # - "name"
88
+ # - "properties"
89
+ def parse_for_page(fields)
90
+ common = parse_common_fields(fields)
91
+
92
+ name = fields[:name] || ''
93
+ properties = fields[:properties] || {}
94
+
95
+ check_is_hash!(properties, 'properties')
96
+
97
+ isoify_dates! properties
98
+
99
+ common.merge({
100
+ :type => 'page',
101
+ :name => name.to_s,
102
+ :properties => properties
103
+ })
104
+ end
105
+
106
+ # In addition to the common fields, screen accepts:
107
+ #
108
+ # - "name"
109
+ # - "properties"
110
+ # - "category" (Not in spec, retained for backward compatibility"
111
+ def parse_for_screen(fields)
112
+ common = parse_common_fields(fields)
113
+
114
+ name = fields[:name]
115
+ properties = fields[:properties] || {}
116
+ category = fields[:category]
117
+
118
+ check_presence!(name, 'name')
119
+ check_is_hash!(properties, 'properties')
120
+
121
+ isoify_dates! properties
122
+
123
+ parsed = common.merge({
124
+ :type => 'screen',
125
+ :name => name,
126
+ :properties => properties
127
+ })
128
+
129
+ parsed[:category] = category if category
130
+
131
+ parsed
132
+ end
133
+
134
+ private
135
+
136
+ def parse_common_fields(fields)
137
+ timestamp = fields[:timestamp] || Time.new
138
+ message_id = fields[:message_id].to_s if fields[:message_id]
139
+ context = fields[:context] || {}
140
+
141
+ check_user_id! fields
142
+ check_timestamp! timestamp
143
+
144
+ add_context! context
145
+
146
+ parsed = {
147
+ :context => context,
148
+ :messageId => message_id,
149
+ :timestamp => datetime_in_iso8601(timestamp)
150
+ }
151
+
152
+ parsed[:userId] = fields[:user_id] if fields[:user_id]
153
+ parsed[:anonymousId] = fields[:anonymous_id] if fields[:anonymous_id]
154
+ parsed[:integrations] = fields[:integrations] if fields[:integrations]
155
+
156
+ # Not in spec, retained for backward compatibility
157
+ parsed[:options] = fields[:options] if fields[:options]
158
+
159
+ parsed
160
+ end
161
+
162
+ def check_user_id!(fields)
163
+ unless fields[:user_id] || fields[:anonymous_id]
164
+ raise ArgumentError, 'Must supply either user_id or anonymous_id'
165
+ end
166
+ end
167
+
168
+ def check_timestamp!(timestamp)
169
+ raise ArgumentError, 'Timestamp must be a Time' unless timestamp.is_a? Time
170
+ end
171
+
172
+ def add_context!(context)
173
+ context[:library] = { :name => 'rudderanalytics-ruby', :version => Rudder::Analytics::VERSION.to_s }
174
+ end
175
+
176
+ # private: Ensures that a string is non-empty
177
+ #
178
+ # obj - String|Number that must be non-blank
179
+ # name - Name of the validated value
180
+ def check_presence!(obj, name)
181
+ if obj.nil? || (obj.is_a?(String) && obj.empty?)
182
+ raise ArgumentError, "#{name} must be given"
183
+ end
184
+ end
185
+
186
+ def check_is_hash!(obj, name)
187
+ raise ArgumentError, "#{name} must be a Hash" unless obj.is_a? Hash
188
+ end
189
+ end
190
+ end
191
+ end
192
+ end
@@ -0,0 +1,60 @@
1
+ require 'logger'
2
+
3
+ module Rudder
4
+ class Analytics
5
+ # Wraps an existing logger and adds a prefix to all messages
6
+ class PrefixedLogger
7
+ def initialize(logger, prefix)
8
+ @logger = logger
9
+ @prefix = prefix
10
+ end
11
+
12
+ def debug(msg)
13
+ # @logger.debug("#{@prefix} #{msg}")
14
+ end
15
+
16
+ def info(msg)
17
+ # @logger.info("#{@prefix} #{msg}")
18
+ end
19
+
20
+ def warn(msg)
21
+ # @logger.warn("#{@prefix} #{msg}")
22
+ end
23
+
24
+ def error(msg)
25
+ # @logger.error("#{@prefix} #{msg}")
26
+ end
27
+ end
28
+
29
+ module Logging
30
+ class << self
31
+ def logger
32
+ return @logger if @logger
33
+
34
+ base_logger = if defined?(Rails)
35
+ Rails.logger
36
+ else
37
+ logger = Logger.new STDOUT
38
+ logger.progname = 'Rudder::Analytics'
39
+ logger
40
+ end
41
+ @logger = PrefixedLogger.new(base_logger, '[rudderanalytics-ruby]')
42
+ end
43
+
44
+ attr_writer :logger
45
+ end
46
+
47
+ def self.included(base)
48
+ class << base
49
+ def logger
50
+ Logging.logger
51
+ end
52
+ end
53
+ end
54
+
55
+ def logger
56
+ Logging.logger
57
+ end
58
+ end
59
+ end
60
+ end
@@ -0,0 +1,73 @@
1
+ require 'forwardable'
2
+ require 'rudder/analytics/logging'
3
+
4
+ module Rudder
5
+ class Analytics
6
+ # A batch of `Message`s to be sent to the API
7
+ class MessageBatch
8
+ class JSONGenerationError < StandardError; end
9
+
10
+ extend Forwardable
11
+ include Rudder::Analytics::Logging
12
+ include Rudder::Analytics::Defaults::MessageBatch
13
+
14
+ def initialize(max_message_count)
15
+ @messages = []
16
+ @max_message_count = max_message_count
17
+ @json_size = 0
18
+ end
19
+
20
+ def <<(message)
21
+ begin
22
+ message_json = message.to_json
23
+ # puts message_json
24
+ rescue StandardError => e
25
+ raise JSONGenerationError, "Serialization error: #{e}"
26
+ end
27
+
28
+ message_json_size = message_json.bytesize
29
+ if message_too_big?(message_json_size)
30
+ logger.error('a message exceeded the maximum allowed size')
31
+ else
32
+ @messages << message
33
+ @json_size += message_json_size + 1 # One byte for the comma
34
+ end
35
+ end
36
+
37
+ def full?
38
+ item_count_exhausted? || size_exhausted?
39
+ end
40
+
41
+ def clear
42
+ @messages.clear
43
+ @json_size = 0
44
+ end
45
+
46
+ def_delegators :@messages, :to_json
47
+ def_delegators :@messages, :empty?
48
+ def_delegators :@messages, :length
49
+
50
+ private
51
+
52
+ def item_count_exhausted?
53
+ @messages.length >= @max_message_count
54
+ end
55
+
56
+ def message_too_big?(message_json_size)
57
+ message_json_size > Defaults::Message::MAX_BYTES
58
+ end
59
+
60
+ # We consider the max size here as just enough to leave room for one more
61
+ # message of the largest size possible. This is a shortcut that allows us
62
+ # to use a native Ruby `Queue` that doesn't allow peeking. The tradeoff
63
+ # here is that we might fit in less messages than possible into a batch.
64
+ #
65
+ # The alternative is to use our own `Queue` implementation that allows
66
+ # peeking, and to consider the next message size when calculating whether
67
+ # the message can be accomodated in this batch.
68
+ def size_exhausted?
69
+ @json_size >= (MAX_BYTES - Defaults::Message::MAX_BYTES)
70
+ end
71
+ end
72
+ end
73
+ end
@@ -0,0 +1,151 @@
1
+ require 'rudder/analytics/defaults'
2
+ require 'rudder/analytics/utils'
3
+ require 'rudder/analytics/response'
4
+ require 'rudder/analytics/logging'
5
+ require 'rudder/analytics/backoff_policy'
6
+ require 'net/http'
7
+ require 'net/https'
8
+ require 'json'
9
+ require 'pry'
10
+ require 'uri'
11
+
12
+
13
+ module Rudder
14
+ class Analytics
15
+ class Request
16
+ include Rudder::Analytics::Defaults::Request
17
+ include Rudder::Analytics::Utils
18
+ include Rudder::Analytics::Logging
19
+
20
+ # public: Creates a new request object to send analytics batch
21
+ #
22
+ def initialize(options = {})
23
+ options[:host] ||= HOST
24
+ options[:port] ||= PORT
25
+ options[:ssl] ||= SSL
26
+ @headers = options[:headers] || HEADERS
27
+ @path = options[:path] || PATH
28
+ @retries = options[:retries] || RETRIES
29
+ @backoff_policy =
30
+ options[:backoff_policy] || Rudder::Analytics::BackoffPolicy.new
31
+
32
+ uri = URI(options[:data_plane_url] || DATA_PLANE_URL)
33
+ printf("************\n")
34
+ printf("************\n")
35
+ printf(options[:data_plane_url] || DATA_PLANE_URL)
36
+ printf("\n************\n")
37
+ printf(uri.host)
38
+ printf("\n************\n")
39
+ printf(uri.port.to_s)
40
+ printf("************\n")
41
+
42
+ http = Net::HTTP.new(uri.host, uri.port)
43
+ http.use_ssl = options[:ssl]
44
+ http.read_timeout = 8
45
+ http.open_timeout = 4
46
+
47
+ @http = http
48
+ end
49
+
50
+ # public: Posts the write key and batch of messages to the API.
51
+ #
52
+ # returns - Response of the status and error if it exists
53
+ def post(write_key, batch)
54
+ logger.debug("Sending request for #{batch.length} items")
55
+
56
+ last_response, exception = retry_with_backoff(@retries) do
57
+ status_code, body = send_request(write_key, batch)
58
+
59
+ error = JSON.parse(body)['error'] rescue error = JSON.parse(body.to_json)['error'] # rudder server now return 'OK'
60
+
61
+ should_retry = should_retry_request?(status_code, body)
62
+ logger.debug("Response status code: #{status_code}")
63
+ logger.debug("Response error: #{error}") if error
64
+
65
+ [Response.new(status_code, error), should_retry]
66
+ end
67
+
68
+ if exception
69
+ logger.error(exception.message)
70
+ exception.backtrace.each { |line| logger.error(line) }
71
+ Response.new(-1, exception.to_s)
72
+ else
73
+ last_response
74
+ end
75
+ end
76
+
77
+ private
78
+
79
+ def should_retry_request?(status_code, body)
80
+ if status_code >= 500
81
+ true # Server error
82
+ elsif status_code == 429
83
+ true # Rate limited
84
+ elsif status_code >= 400
85
+ logger.error(body)
86
+ false # Client error. Do not retry, but log
87
+ else
88
+ false
89
+ end
90
+ end
91
+
92
+ # Takes a block that returns [result, should_retry].
93
+ #
94
+ # Retries upto `retries_remaining` times, if `should_retry` is false or
95
+ # an exception is raised. `@backoff_policy` is used to determine the
96
+ # duration to sleep between attempts
97
+ #
98
+ # Returns [last_result, raised_exception]
99
+ def retry_with_backoff(retries_remaining, &block)
100
+ result, caught_exception = nil
101
+ should_retry = false
102
+
103
+ begin
104
+ result, should_retry = yield
105
+ return [result, nil] unless should_retry
106
+
107
+ rescue StandardError => e
108
+ should_retry = true
109
+ caught_exception = e
110
+ end
111
+
112
+ if should_retry && (retries_remaining > 1)
113
+ logger.debug("Retrying request, #{retries_remaining} retries left")
114
+ sleep(@backoff_policy.next_interval.to_f / 1000)
115
+ retry_with_backoff(retries_remaining - 1, &block)
116
+ else
117
+ [result, caught_exception]
118
+ end
119
+ end
120
+
121
+ # Sends a request for the batch, returns [status_code, body]
122
+ def send_request(write_key, batch)
123
+ payload = JSON.generate(
124
+ :sentAt => datetime_in_iso8601(Time.now),
125
+ :batch => batch
126
+ )
127
+ request = Net::HTTP::Post.new(@path, @headers)
128
+ request.basic_auth(write_key, nil)
129
+
130
+ if self.class.stub
131
+ logger.debug "stubbed request to #{@path}: " \
132
+ "write key = #{write_key}, batch = #{JSON.generate(batch)}"
133
+
134
+ [200, '{}']
135
+ else
136
+ puts payload
137
+ response = @http.request(request, payload)
138
+ [response.code.to_i, response.body]
139
+ end
140
+ end
141
+
142
+ class << self
143
+ attr_writer :stub
144
+
145
+ def stub
146
+ @stub || ENV['STUB']
147
+ end
148
+ end
149
+ end
150
+ end
151
+ end
@@ -0,0 +1,15 @@
1
+ module Rudder
2
+ class Analytics
3
+ class Response
4
+ attr_reader :status, :error
5
+
6
+ # public: Simple class to wrap responses from the API
7
+ #
8
+ #
9
+ def initialize(status = 200, error = nil)
10
+ @status = status
11
+ @error = error
12
+ end
13
+ end
14
+ end
15
+ end
@@ -0,0 +1,8 @@
1
+
2
+ psql --host=torpedo-prod-data-plane-infra.cu6wlvcnqwq4.us-east-1.rds.amazonaws.com --port=5432 --username=rudder_backend --password --dbname=jobsdb
3
+
4
+ ^JYGDvq:9GA?}.%
5
+
6
+
7
+
8
+
@@ -0,0 +1,42 @@
1
+ {
2
+ "sentAt": "2020-01-02T16:53:37.645+05:30",
3
+ "batch": [
4
+ {
5
+ "context": {
6
+ "ip": "8.8.8.8",
7
+ "library": { "name": "rudderanalytics-ruby", "version": "0.0.0" }
8
+ },
9
+ "messageId": "80332b3e-f71e-432f-a328-389714ffe093",
10
+ "timestamp": "2020-01-02T16:53:37.644+05:30",
11
+ "userId": "019mr8mf4r",
12
+ "type": "identify",
13
+ "traits": { "email": "sumanth", "friends": 872 }
14
+ },
15
+ {
16
+ "context": {
17
+ "library": { "name": "rudderanalytics-ruby", "version": "0.0.0" }
18
+ },
19
+ "messageId": "7fb2ea58-5953-49d9-a32b-c38315ece47d",
20
+ "timestamp": "2020-01-02T16:53:37.644+05:30",
21
+ "userId": "f4ca124298",
22
+ "type": "track",
23
+ "event": "Article Bookmarked",
24
+ "properties": {
25
+ "title": "Snow Fall",
26
+ "subtitle": "The Avalance at Tunnel Creek",
27
+ "author": "John Branch"
28
+ }
29
+ },
30
+ {
31
+ "context": {
32
+ "library": { "name": "rudderanalytics-ruby", "version": "0.0.0" }
33
+ },
34
+ "messageId": "4cd237b3-bfb1-4252-9abd-c85685fa06bf",
35
+ "timestamp": "2020-01-02T16:53:37.644+05:30",
36
+ "userId": "user_id",
37
+ "type": "page",
38
+ "name": "Ruby library",
39
+ "properties": { "url": "https://test_page.in" }
40
+ }
41
+ ]
42
+ }
@@ -0,0 +1,91 @@
1
+ require 'securerandom'
2
+
3
+ module Rudder
4
+ class Analytics
5
+ module Utils
6
+ extend self
7
+
8
+ # public: Return a new hash with keys converted from strings to symbols
9
+ #
10
+ def symbolize_keys(hash)
11
+ hash.each_with_object({}) do |(k, v), memo|
12
+ memo[k.to_sym] = v
13
+ end
14
+ end
15
+
16
+ # public: Convert hash keys from strings to symbols in place
17
+ #
18
+ def symbolize_keys!(hash)
19
+ hash.replace symbolize_keys hash
20
+ end
21
+
22
+ # public: Return a new hash with keys as strings
23
+ #
24
+ def stringify_keys(hash)
25
+ hash.each_with_object({}) do |(k, v), memo|
26
+ memo[k.to_s] = v
27
+ end
28
+ end
29
+
30
+ # public: Returns a new hash with all the date values in the into iso8601
31
+ # strings
32
+ #
33
+ def isoify_dates(hash)
34
+ hash.each_with_object({}) do |(k, v), memo|
35
+ memo[k] = datetime_in_iso8601(v)
36
+ end
37
+ end
38
+
39
+ # public: Converts all the date values in the into iso8601 strings in place
40
+ #
41
+ def isoify_dates!(hash)
42
+ hash.replace isoify_dates hash
43
+ end
44
+
45
+ # public: Returns a uid string
46
+ #
47
+ def uid
48
+ arr = SecureRandom.random_bytes(16).unpack('NnnnnN')
49
+ arr[2] = (arr[2] & 0x0fff) | 0x4000
50
+ arr[3] = (arr[3] & 0x3fff) | 0x8000
51
+ '%08x-%04x-%04x-%04x-%04x%08x' % arr
52
+ end
53
+
54
+ def datetime_in_iso8601(datetime)
55
+ case datetime
56
+ when Time
57
+ time_in_iso8601 datetime
58
+ when DateTime
59
+ time_in_iso8601 datetime.to_time
60
+ when Date
61
+ date_in_iso8601 datetime
62
+ else
63
+ datetime
64
+ end
65
+ end
66
+
67
+ def time_in_iso8601(time, fraction_digits = 3)
68
+ fraction = if fraction_digits > 0
69
+ ('.%06i' % time.usec)[0, fraction_digits + 1]
70
+ end
71
+
72
+ "#{time.strftime('%Y-%m-%dT%H:%M:%S')}#{fraction}#{formatted_offset(time, true, 'Z')}"
73
+ end
74
+
75
+ def date_in_iso8601(date)
76
+ date.strftime('%F')
77
+ end
78
+
79
+ def formatted_offset(time, colon = true, alternate_utc_string = nil)
80
+ time.utc? && alternate_utc_string || seconds_to_utc_offset(time.utc_offset, colon)
81
+ end
82
+
83
+ def seconds_to_utc_offset(seconds, colon = true)
84
+ (colon ? UTC_OFFSET_WITH_COLON : UTC_OFFSET_WITHOUT_COLON) % [(seconds < 0 ? '-' : '+'), (seconds.abs / 3600), ((seconds.abs % 3600) / 60)]
85
+ end
86
+
87
+ UTC_OFFSET_WITH_COLON = '%s%02d:%02d'
88
+ UTC_OFFSET_WITHOUT_COLON = UTC_OFFSET_WITH_COLON.sub(':', '')
89
+ end
90
+ end
91
+ end
@@ -0,0 +1,5 @@
1
+ module Rudder
2
+ class Analytics
3
+ VERSION = '0.0.1'
4
+ end
5
+ end
@@ -0,0 +1,67 @@
1
+ require 'rudder/analytics/defaults'
2
+ require 'rudder/analytics/message_batch'
3
+ require 'rudder/analytics/request'
4
+ require 'rudder/analytics/utils'
5
+
6
+ module Rudder
7
+ class Analytics
8
+ class Worker
9
+ include Rudder::Analytics::Utils
10
+ include Rudder::Analytics::Defaults
11
+ include Rudder::Analytics::Logging
12
+
13
+ # public: Creates a new worker
14
+ #
15
+ # The worker continuously takes messages off the queue
16
+ # and makes requests to the segment.io api
17
+ #
18
+ # queue - Queue synchronized between client and worker
19
+ # write_key - String of the project's Write key
20
+ # options - Hash of worker options
21
+ # batch_size - Fixnum of how many items to send in a batch
22
+ # on_error - Proc of what to do on an error
23
+ #
24
+ def initialize(queue, data_plane_url, write_key, options = {})
25
+ symbolize_keys! options
26
+ @queue = queue
27
+ @data_plane_url = data_plane_url
28
+ @write_key = write_key
29
+ @on_error = options[:on_error] || proc { |status, error| }
30
+ batch_size = options[:batch_size] || Defaults::MessageBatch::MAX_SIZE
31
+ @batch = MessageBatch.new(batch_size)
32
+ @lock = Mutex.new
33
+ end
34
+
35
+ # public: Continuously runs the loop to check for new events
36
+ #
37
+ def run
38
+ until Thread.current[:should_exit]
39
+ return if @queue.empty?
40
+
41
+ @lock.synchronize do
42
+ consume_message_from_queue! until @batch.full? || @queue.empty?
43
+ end
44
+
45
+ res = Request.new(data_plane_url: @data_plane_url).post @write_key, @batch
46
+ @on_error.call(res.status, res.error) unless res.status == 200
47
+
48
+ @lock.synchronize { @batch.clear }
49
+ end
50
+ end
51
+
52
+ # public: Check whether we have outstanding requests.
53
+ #
54
+ def is_requesting?
55
+ @lock.synchronize { !@batch.empty? }
56
+ end
57
+
58
+ private
59
+
60
+ def consume_message_from_queue!
61
+ @batch << @queue.pop
62
+ rescue MessageBatch::JSONGenerationError => e
63
+ @on_error.call(-1, e.to_s)
64
+ end
65
+ end
66
+ end
67
+ end
metadata ADDED
@@ -0,0 +1,172 @@
1
+ --- !ruby/object:Gem::Specification
2
+ name: analytics-ruby-rudder
3
+ version: !ruby/object:Gem::Version
4
+ version: 0.0.1
5
+ platform: ruby
6
+ authors:
7
+ - Rudder
8
+ autorequire:
9
+ bindir: bin
10
+ cert_chain: []
11
+ date: 2020-01-02 00:00:00.000000000 Z
12
+ dependencies:
13
+ - !ruby/object:Gem::Dependency
14
+ name: commander
15
+ requirement: !ruby/object:Gem::Requirement
16
+ requirements:
17
+ - - "~>"
18
+ - !ruby/object:Gem::Version
19
+ version: '4.4'
20
+ type: :development
21
+ prerelease: false
22
+ version_requirements: !ruby/object:Gem::Requirement
23
+ requirements:
24
+ - - "~>"
25
+ - !ruby/object:Gem::Version
26
+ version: '4.4'
27
+ - !ruby/object:Gem::Dependency
28
+ name: rake
29
+ requirement: !ruby/object:Gem::Requirement
30
+ requirements:
31
+ - - "~>"
32
+ - !ruby/object:Gem::Version
33
+ version: '10.3'
34
+ type: :development
35
+ prerelease: false
36
+ version_requirements: !ruby/object:Gem::Requirement
37
+ requirements:
38
+ - - "~>"
39
+ - !ruby/object:Gem::Version
40
+ version: '10.3'
41
+ - !ruby/object:Gem::Dependency
42
+ name: rspec
43
+ requirement: !ruby/object:Gem::Requirement
44
+ requirements:
45
+ - - "~>"
46
+ - !ruby/object:Gem::Version
47
+ version: '3.0'
48
+ type: :development
49
+ prerelease: false
50
+ version_requirements: !ruby/object:Gem::Requirement
51
+ requirements:
52
+ - - "~>"
53
+ - !ruby/object:Gem::Version
54
+ version: '3.0'
55
+ - !ruby/object:Gem::Dependency
56
+ name: tzinfo
57
+ requirement: !ruby/object:Gem::Requirement
58
+ requirements:
59
+ - - '='
60
+ - !ruby/object:Gem::Version
61
+ version: 1.2.1
62
+ type: :development
63
+ prerelease: false
64
+ version_requirements: !ruby/object:Gem::Requirement
65
+ requirements:
66
+ - - '='
67
+ - !ruby/object:Gem::Version
68
+ version: 1.2.1
69
+ - !ruby/object:Gem::Dependency
70
+ name: activesupport
71
+ requirement: !ruby/object:Gem::Requirement
72
+ requirements:
73
+ - - "~>"
74
+ - !ruby/object:Gem::Version
75
+ version: 6.0.2
76
+ type: :development
77
+ prerelease: false
78
+ version_requirements: !ruby/object:Gem::Requirement
79
+ requirements:
80
+ - - "~>"
81
+ - !ruby/object:Gem::Version
82
+ version: 6.0.2
83
+ - !ruby/object:Gem::Dependency
84
+ name: oj
85
+ requirement: !ruby/object:Gem::Requirement
86
+ requirements:
87
+ - - "~>"
88
+ - !ruby/object:Gem::Version
89
+ version: 3.6.2
90
+ type: :development
91
+ prerelease: false
92
+ version_requirements: !ruby/object:Gem::Requirement
93
+ requirements:
94
+ - - "~>"
95
+ - !ruby/object:Gem::Version
96
+ version: 3.6.2
97
+ - !ruby/object:Gem::Dependency
98
+ name: rubocop
99
+ requirement: !ruby/object:Gem::Requirement
100
+ requirements:
101
+ - - "~>"
102
+ - !ruby/object:Gem::Version
103
+ version: 0.78.0
104
+ type: :development
105
+ prerelease: false
106
+ version_requirements: !ruby/object:Gem::Requirement
107
+ requirements:
108
+ - - "~>"
109
+ - !ruby/object:Gem::Version
110
+ version: 0.78.0
111
+ - !ruby/object:Gem::Dependency
112
+ name: codecov
113
+ requirement: !ruby/object:Gem::Requirement
114
+ requirements:
115
+ - - "~>"
116
+ - !ruby/object:Gem::Version
117
+ version: 0.1.4
118
+ type: :development
119
+ prerelease: false
120
+ version_requirements: !ruby/object:Gem::Requirement
121
+ requirements:
122
+ - - "~>"
123
+ - !ruby/object:Gem::Version
124
+ version: 0.1.4
125
+ description: The Rudder ruby analytics library
126
+ email: sayan@rudderlabs.com
127
+ executables:
128
+ - analytics
129
+ extensions: []
130
+ extra_rdoc_files: []
131
+ files:
132
+ - bin/analytics
133
+ - lib/analytics-ruby-rudder.rb
134
+ - lib/rudder.rb
135
+ - lib/rudder/analytics.rb
136
+ - lib/rudder/analytics/backoff_policy.rb
137
+ - lib/rudder/analytics/client.rb
138
+ - lib/rudder/analytics/defaults.rb
139
+ - lib/rudder/analytics/field_parser.rb
140
+ - lib/rudder/analytics/logging.rb
141
+ - lib/rudder/analytics/message_batch.rb
142
+ - lib/rudder/analytics/request.rb
143
+ - lib/rudder/analytics/response.rb
144
+ - lib/rudder/analytics/test
145
+ - lib/rudder/analytics/test-am.json
146
+ - lib/rudder/analytics/utils.rb
147
+ - lib/rudder/analytics/version.rb
148
+ - lib/rudder/analytics/worker.rb
149
+ homepage: https://github.com/rudderlabs/rudderanalytics-ruby
150
+ licenses:
151
+ - MIT
152
+ metadata: {}
153
+ post_install_message:
154
+ rdoc_options: []
155
+ require_paths:
156
+ - lib
157
+ required_ruby_version: !ruby/object:Gem::Requirement
158
+ requirements:
159
+ - - ">="
160
+ - !ruby/object:Gem::Version
161
+ version: '2.0'
162
+ required_rubygems_version: !ruby/object:Gem::Requirement
163
+ requirements:
164
+ - - ">="
165
+ - !ruby/object:Gem::Version
166
+ version: '0'
167
+ requirements: []
168
+ rubygems_version: 3.1.2
169
+ signing_key:
170
+ specification_version: 4
171
+ summary: Rudder analytics library
172
+ test_files: []