ruddertest 0.0.1

Sign up to get free protection for your applications and to get access to all the features.
checksums.yaml ADDED
@@ -0,0 +1,7 @@
1
+ ---
2
+ SHA1:
3
+ metadata.gz: 53dcdb06e2e9577a5b366836dcb2bde0feb01b00
4
+ data.tar.gz: 7d64fadf5adfb060f0a8e63ed08a5165d41c3ddb
5
+ SHA512:
6
+ metadata.gz: 80f085f8d74998a3ffd739e75dc1d86fe6e1d890fbbed24437e7cb27fed5594b963dccd45be26ea4ef47f6011bad68ed35ba24c07c7e35b11e2142d42c3ca411
7
+ data.tar.gz: b58b5baad9de35c590aebd8232bc98a86938a9fcb5649f94005f03a0a9c59f48035527f395019ebf8004847d5c5c31944591867e721cad4d05cfdd0ebb2d05a5
data/bin/analytics ADDED
@@ -0,0 +1,110 @@
1
+ #!/usr/bin/env ruby
2
+
3
+ require 'segment/analytics'
4
+ require 'rubygems'
5
+ require 'commander/import'
6
+ require 'time'
7
+ require 'json'
8
+
9
+ program :name, 'simulator.rb'
10
+ program :version, '0.0.1'
11
+ program :description, 'scripting simulator'
12
+
13
+ def json_hash(str)
14
+ if str
15
+ return JSON.parse(str)
16
+ end
17
+ end
18
+
19
+ # analytics -method=<method> -segment-write-key=<segmentWriteKey> [options]
20
+
21
+ default_command :send
22
+
23
+ command :send do |c|
24
+ c.description = 'send a segment message'
25
+
26
+ c.option '--writeKey=<writeKey>', String, 'the Rudder writeKey'
27
+ c.option '--dataPlaneUrl=<dataPlaneUrl>', String, 'the Rudder data plane URL'
28
+ c.option '--type=<type>', String, 'The Segment message type'
29
+
30
+ c.option '--userId=<userId>', String, 'the user id to send the event as'
31
+ c.option '--anonymousId=<anonymousId>', String, 'the anonymous user id to send the event as'
32
+ c.option '--context=<context>', 'additional context for the event (JSON-encoded)'
33
+ c.option '--integrations=<integrations>', 'additional integrations for the event (JSON-encoded)'
34
+
35
+ c.option '--event=<event>', String, 'the event name to send with the event'
36
+ c.option '--properties=<properties>', 'the event properties to send (JSON-encoded)'
37
+
38
+ c.option '--name=<name>', 'name of the screen or page to send with the message'
39
+
40
+ c.option '--traits=<traits>', 'the identify/group traits to send (JSON-encoded)'
41
+
42
+ c.option '--groupId=<groupId>', String, 'the group id'
43
+ c.option '--previousId=<previousId>', String, 'the previous id'
44
+
45
+ c.action do |args, options|
46
+ Analytics = Segment::Analytics.new({
47
+ write_key: options.writeKey,
48
+ data_plane_url: options.dataPlaneUrl,
49
+ on_error: Proc.new { |status, msg| print msg }
50
+ })
51
+
52
+ case options.type
53
+ when "track"
54
+ Analytics.track({
55
+ user_id: options.userId,
56
+ event: options.event,
57
+ anonymous_id: options.anonymousId,
58
+ properties: json_hash(options.properties),
59
+ context: json_hash(options.context),
60
+ integrations: json_hash(options.integrations)
61
+ })
62
+ when "page"
63
+ Analytics.page({
64
+ user_id: options.userId,
65
+ anonymous_id: options.anonymousId,
66
+ name: options.name,
67
+ properties: json_hash(options.properties),
68
+ context: json_hash(options.context),
69
+ integrations: json_hash(options.integrations)
70
+ })
71
+ when "screen"
72
+ Analytics.screen({
73
+ user_id: options.userId,
74
+ anonymous_id: options.anonymousId,
75
+ name: options.name,
76
+ properties: json_hash(options.properties),
77
+ context: json_hash(options.context),
78
+ integrations: json_hash(options.integrations)
79
+ })
80
+ when "identify"
81
+ Analytics.identify({
82
+ user_id: options.userId,
83
+ anonymous_id: options.anonymousId,
84
+ traits: json_hash(options.traits),
85
+ context: json_hash(options.context),
86
+ integrations: json_hash(options.integrations)
87
+ })
88
+ when "group"
89
+ Analytics.group({
90
+ user_id: options.userId,
91
+ anonymous_id: options.anonymousId,
92
+ group_id: options.groupId,
93
+ traits: json_hash(options.traits),
94
+ context: json_hash(options.context),
95
+ integrations: json_hash(options.integrations)
96
+ })
97
+ when "alias"
98
+ Analytics.alias({
99
+ previous_id: options.previousId,
100
+ user_id: options.userId,
101
+ anonymous_id: options.anonymousId,
102
+ context: json_hash(options.context),
103
+ integrations: json_hash(options.integrations)
104
+ })
105
+ else
106
+ raise "Invalid Message Type #{options.type}"
107
+ end
108
+ Analytics.flush
109
+ end
110
+ end
@@ -0,0 +1 @@
1
+ require 'segment'
@@ -0,0 +1,49 @@
1
+ require 'segment/analytics/defaults'
2
+
3
+ module Segment
4
+ class Analytics
5
+ class BackoffPolicy
6
+ include Segment::Analytics::Defaults::BackoffPolicy
7
+
8
+ # @param [Hash] opts
9
+ # @option opts [Numeric] :min_timeout_ms The minimum backoff timeout
10
+ # @option opts [Numeric] :max_timeout_ms The maximum backoff timeout
11
+ # @option opts [Numeric] :multiplier The value to multiply the current
12
+ # interval with for each retry attempt
13
+ # @option opts [Numeric] :randomization_factor The randomization factor
14
+ # to use to create a range around the retry interval
15
+ def initialize(opts = {})
16
+ @min_timeout_ms = opts[:min_timeout_ms] || MIN_TIMEOUT_MS
17
+ @max_timeout_ms = opts[:max_timeout_ms] || MAX_TIMEOUT_MS
18
+ @multiplier = opts[:multiplier] || MULTIPLIER
19
+ @randomization_factor = opts[:randomization_factor] || RANDOMIZATION_FACTOR
20
+
21
+ @attempts = 0
22
+ end
23
+
24
+ # @return [Numeric] the next backoff interval, in milliseconds.
25
+ def next_interval
26
+ interval = @min_timeout_ms * (@multiplier**@attempts)
27
+ interval = add_jitter(interval, @randomization_factor)
28
+
29
+ @attempts += 1
30
+
31
+ [interval, @max_timeout_ms].min
32
+ end
33
+
34
+ private
35
+
36
+ def add_jitter(base, randomization_factor)
37
+ random_number = rand
38
+ max_deviation = base * randomization_factor
39
+ deviation = random_number * max_deviation
40
+
41
+ if random_number < 0.5
42
+ base - deviation
43
+ else
44
+ base + deviation
45
+ end
46
+ end
47
+ end
48
+ end
49
+ end
@@ -0,0 +1,193 @@
1
+ require 'thread'
2
+ require 'time'
3
+
4
+ require 'segment/analytics/defaults'
5
+ require 'segment/analytics/logging'
6
+ require 'segment/analytics/utils'
7
+ require 'segment/analytics/worker'
8
+ require 'segment/analytics/defaults'
9
+
10
+ module Segment
11
+ class Analytics
12
+ class Client
13
+ include Segment::Analytics::Utils
14
+ include Segment::Analytics::Logging
15
+
16
+ # @param [Hash] opts
17
+ # @option opts [String] :write_key Your project's write_key
18
+ # @option opts [String] :data_plane_url Your data plane URL
19
+ # @option opts [FixNum] :max_queue_size Maximum number of calls to be
20
+ # remain queued.
21
+ # @option opts [Proc] :on_error Handles error calls from the API.
22
+ def initialize(opts = {})
23
+ symbolize_keys!(opts)
24
+
25
+ @queue = Queue.new
26
+ @write_key = opts[:write_key]
27
+ @data_plane_url = opts[:data_plane_url]
28
+ @max_queue_size = opts[:max_queue_size] || Defaults::Queue::MAX_SIZE
29
+ @worker_mutex = Mutex.new
30
+ @worker = Worker.new(@queue, @data_plane_url, @write_key, opts)
31
+ @worker_thread = nil
32
+
33
+ check_write_key!
34
+
35
+ at_exit { @worker_thread && @worker_thread[:should_exit] = true }
36
+ end
37
+
38
+ # Synchronously waits until the worker has flushed the queue.
39
+ #
40
+ # Use only for scripts which are not long-running, and will specifically
41
+ # exit
42
+ def flush
43
+ while !@queue.empty? || @worker.is_requesting?
44
+ ensure_worker_running
45
+ sleep(0.1)
46
+ end
47
+ end
48
+
49
+ # @!macro common_attrs
50
+ # @option attrs [String] :anonymous_id ID for a user when you don't know
51
+ # who they are yet. (optional but you must provide either an
52
+ # `anonymous_id` or `user_id`)
53
+ # @option attrs [Hash] :context ({})
54
+ # @option attrs [Hash] :integrations What integrations this event
55
+ # goes to (optional)
56
+ # @option attrs [String] :message_id ID that uniquely
57
+ # identifies a message across the API. (optional)
58
+ # @option attrs [Time] :timestamp When the event occurred (optional)
59
+ # @option attrs [String] :user_id The ID for this user in your database
60
+ # (optional but you must provide either an `anonymous_id` or `user_id`)
61
+ # @option attrs [Hash] :options Options such as user traits (optional)
62
+
63
+ # Tracks an event
64
+ #
65
+ # @see https://segment.com/docs/sources/server/ruby/#track
66
+ #
67
+ # @param [Hash] attrs
68
+ #
69
+ # @option attrs [String] :event Event name
70
+ # @option attrs [Hash] :properties Event properties (optional)
71
+ # @macro common_attrs
72
+ def track(attrs)
73
+ symbolize_keys! attrs
74
+ enqueue(FieldParser.parse_for_track(attrs))
75
+ end
76
+
77
+ # Identifies a user
78
+ #
79
+ # @see https://segment.com/docs/sources/server/ruby/#identify
80
+ #
81
+ # @param [Hash] attrs
82
+ #
83
+ # @option attrs [Hash] :traits User traits (optional)
84
+ # @macro common_attrs
85
+ def identify(attrs)
86
+ symbolize_keys! attrs
87
+ enqueue(FieldParser.parse_for_identify(attrs))
88
+ end
89
+
90
+ # Aliases a user from one id to another
91
+ #
92
+ # @see https://segment.com/docs/sources/server/ruby/#alias
93
+ #
94
+ # @param [Hash] attrs
95
+ #
96
+ # @option attrs [String] :previous_id The ID to alias from
97
+ # @macro common_attrs
98
+ def alias(attrs)
99
+ symbolize_keys! attrs
100
+ enqueue(FieldParser.parse_for_alias(attrs))
101
+ end
102
+
103
+ # Associates a user identity with a group.
104
+ #
105
+ # @see https://segment.com/docs/sources/server/ruby/#group
106
+ #
107
+ # @param [Hash] attrs
108
+ #
109
+ # @option attrs [String] :group_id The ID of the group
110
+ # @option attrs [Hash] :traits User traits (optional)
111
+ # @macro common_attrs
112
+ def group(attrs)
113
+ symbolize_keys! attrs
114
+ enqueue(FieldParser.parse_for_group(attrs))
115
+ end
116
+
117
+ # Records a page view
118
+ #
119
+ # @see https://segment.com/docs/sources/server/ruby/#page
120
+ #
121
+ # @param [Hash] attrs
122
+ #
123
+ # @option attrs [String] :name Name of the page
124
+ # @option attrs [Hash] :properties Page properties (optional)
125
+ # @macro common_attrs
126
+ def page(attrs)
127
+ symbolize_keys! attrs
128
+ enqueue(FieldParser.parse_for_page(attrs))
129
+ end
130
+
131
+ # Records a screen view (for a mobile app)
132
+ #
133
+ # @param [Hash] attrs
134
+ #
135
+ # @option attrs [String] :name Name of the screen
136
+ # @option attrs [Hash] :properties Screen properties (optional)
137
+ # @option attrs [String] :category The screen category (optional)
138
+ # @macro common_attrs
139
+ def screen(attrs)
140
+ symbolize_keys! attrs
141
+ enqueue(FieldParser.parse_for_screen(attrs))
142
+ end
143
+
144
+ # @return [Fixnum] number of messages in the queue
145
+ def queued_messages
146
+ @queue.length
147
+ end
148
+
149
+ private
150
+
151
+ # private: Enqueues the action.
152
+ #
153
+ # returns Boolean of whether the item was added to the queue.
154
+ def enqueue(action)
155
+ # add our request id for tracing purposes
156
+ action[:messageId] ||= uid
157
+
158
+ if @queue.length < @max_queue_size
159
+ @queue << action
160
+ ensure_worker_running
161
+
162
+ true
163
+ else
164
+ logger.warn(
165
+ 'Queue is full, dropping events. The :max_queue_size ' \
166
+ 'configuration parameter can be increased to prevent this from ' \
167
+ 'happening.'
168
+ )
169
+ false
170
+ end
171
+ end
172
+
173
+ # private: Checks that the write_key is properly initialized
174
+ def check_write_key!
175
+ raise ArgumentError, 'Write key must be initialized' if @write_key.nil?
176
+ end
177
+
178
+ def ensure_worker_running
179
+ return if worker_running?
180
+ @worker_mutex.synchronize do
181
+ return if worker_running?
182
+ @worker_thread = Thread.new do
183
+ @worker.run
184
+ end
185
+ end
186
+ end
187
+
188
+ def worker_running?
189
+ @worker_thread && @worker_thread.alive?
190
+ end
191
+ end
192
+ end
193
+ end
@@ -0,0 +1,36 @@
1
+ module Segment
2
+ class Analytics
3
+ module Defaults
4
+ module Request
5
+ HOST = 'api.segment.io'
6
+ PORT = 443
7
+ PATH = '/v1/import'
8
+ SSL = true
9
+ HEADERS = { 'Accept' => 'application/json',
10
+ 'Content-Type' => 'application/json',
11
+ 'User-Agent' => "analytics-ruby/#{Analytics::VERSION}" }
12
+ RETRIES = 10
13
+ end
14
+
15
+ module Queue
16
+ MAX_SIZE = 10000
17
+ end
18
+
19
+ module Message
20
+ MAX_BYTES = 32768 # 32Kb
21
+ end
22
+
23
+ module MessageBatch
24
+ MAX_BYTES = 512_000 # 500Kb
25
+ MAX_SIZE = 100
26
+ end
27
+
28
+ module BackoffPolicy
29
+ MIN_TIMEOUT_MS = 100
30
+ MAX_TIMEOUT_MS = 10000
31
+ MULTIPLIER = 1.5
32
+ RANDOMIZATION_FACTOR = 0.5
33
+ end
34
+ end
35
+ end
36
+ end
@@ -0,0 +1,192 @@
1
+ module Segment
2
+ class Analytics
3
+ # Handles parsing fields according to the Segment Spec
4
+ #
5
+ # @see https://segment.com/docs/spec/
6
+ class FieldParser
7
+ class << self
8
+ include Segment::Analytics::Utils
9
+
10
+ # In addition to the common fields, track accepts:
11
+ #
12
+ # - "event"
13
+ # - "properties"
14
+ def parse_for_track(fields)
15
+ common = parse_common_fields(fields)
16
+
17
+ event = fields[:event]
18
+ properties = fields[:properties] || {}
19
+
20
+ check_presence!(event, 'event')
21
+ check_is_hash!(properties, 'properties')
22
+
23
+ isoify_dates! properties
24
+
25
+ common.merge({
26
+ :type => 'track',
27
+ :event => event.to_s,
28
+ :properties => properties
29
+ })
30
+ end
31
+
32
+ # In addition to the common fields, identify accepts:
33
+ #
34
+ # - "traits"
35
+ def parse_for_identify(fields)
36
+ common = parse_common_fields(fields)
37
+
38
+ traits = fields[:traits] || {}
39
+ check_is_hash!(traits, 'traits')
40
+ isoify_dates! traits
41
+
42
+ common.merge({
43
+ :type => 'identify',
44
+ :traits => traits
45
+ })
46
+ end
47
+
48
+ # In addition to the common fields, alias accepts:
49
+ #
50
+ # - "previous_id"
51
+ def parse_for_alias(fields)
52
+ common = parse_common_fields(fields)
53
+
54
+ previous_id = fields[:previous_id]
55
+ check_presence!(previous_id, 'previous_id')
56
+
57
+ common.merge({
58
+ :type => 'alias',
59
+ :previousId => previous_id
60
+ })
61
+ end
62
+
63
+ # In addition to the common fields, group accepts:
64
+ #
65
+ # - "group_id"
66
+ # - "traits"
67
+ def parse_for_group(fields)
68
+ common = parse_common_fields(fields)
69
+
70
+ group_id = fields[:group_id]
71
+ traits = fields[:traits] || {}
72
+
73
+ check_presence!(group_id, 'group_id')
74
+ check_is_hash!(traits, 'traits')
75
+
76
+ isoify_dates! traits
77
+
78
+ common.merge({
79
+ :type => 'group',
80
+ :groupId => group_id,
81
+ :traits => traits
82
+ })
83
+ end
84
+
85
+ # In addition to the common fields, page accepts:
86
+ #
87
+ # - "name"
88
+ # - "properties"
89
+ def parse_for_page(fields)
90
+ common = parse_common_fields(fields)
91
+
92
+ name = fields[:name] || ''
93
+ properties = fields[:properties] || {}
94
+
95
+ check_is_hash!(properties, 'properties')
96
+
97
+ isoify_dates! properties
98
+
99
+ common.merge({
100
+ :type => 'page',
101
+ :name => name.to_s,
102
+ :properties => properties
103
+ })
104
+ end
105
+
106
+ # In addition to the common fields, screen accepts:
107
+ #
108
+ # - "name"
109
+ # - "properties"
110
+ # - "category" (Not in spec, retained for backward compatibility"
111
+ def parse_for_screen(fields)
112
+ common = parse_common_fields(fields)
113
+
114
+ name = fields[:name]
115
+ properties = fields[:properties] || {}
116
+ category = fields[:category]
117
+
118
+ check_presence!(name, 'name')
119
+ check_is_hash!(properties, 'properties')
120
+
121
+ isoify_dates! properties
122
+
123
+ parsed = common.merge({
124
+ :type => 'screen',
125
+ :name => name,
126
+ :properties => properties
127
+ })
128
+
129
+ parsed[:category] = category if category
130
+
131
+ parsed
132
+ end
133
+
134
+ private
135
+
136
+ def parse_common_fields(fields)
137
+ timestamp = fields[:timestamp] || Time.new
138
+ message_id = fields[:message_id].to_s if fields[:message_id]
139
+ context = fields[:context] || {}
140
+
141
+ check_user_id! fields
142
+ check_timestamp! timestamp
143
+
144
+ add_context! context
145
+
146
+ parsed = {
147
+ :context => context,
148
+ :messageId => message_id,
149
+ :timestamp => datetime_in_iso8601(timestamp)
150
+ }
151
+
152
+ parsed[:userId] = fields[:user_id] if fields[:user_id]
153
+ parsed[:anonymousId] = fields[:anonymous_id] if fields[:anonymous_id]
154
+ parsed[:integrations] = fields[:integrations] if fields[:integrations]
155
+
156
+ # Not in spec, retained for backward compatibility
157
+ parsed[:options] = fields[:options] if fields[:options]
158
+
159
+ parsed
160
+ end
161
+
162
+ def check_user_id!(fields)
163
+ unless fields[:user_id] || fields[:anonymous_id]
164
+ raise ArgumentError, 'Must supply either user_id or anonymous_id'
165
+ end
166
+ end
167
+
168
+ def check_timestamp!(timestamp)
169
+ raise ArgumentError, 'Timestamp must be a Time' unless timestamp.is_a? Time
170
+ end
171
+
172
+ def add_context!(context)
173
+ context[:library] = { :name => 'analytics-ruby', :version => Segment::Analytics::VERSION.to_s }
174
+ end
175
+
176
+ # private: Ensures that a string is non-empty
177
+ #
178
+ # obj - String|Number that must be non-blank
179
+ # name - Name of the validated value
180
+ def check_presence!(obj, name)
181
+ if obj.nil? || (obj.is_a?(String) && obj.empty?)
182
+ raise ArgumentError, "#{name} must be given"
183
+ end
184
+ end
185
+
186
+ def check_is_hash!(obj, name)
187
+ raise ArgumentError, "#{name} must be a Hash" unless obj.is_a? Hash
188
+ end
189
+ end
190
+ end
191
+ end
192
+ end
@@ -0,0 +1,60 @@
1
+ require 'logger'
2
+
3
+ module Segment
4
+ class Analytics
5
+ # Wraps an existing logger and adds a prefix to all messages
6
+ class PrefixedLogger
7
+ def initialize(logger, prefix)
8
+ @logger = logger
9
+ @prefix = prefix
10
+ end
11
+
12
+ def debug(msg)
13
+ @logger.debug("#{@prefix} #{msg}")
14
+ end
15
+
16
+ def info(msg)
17
+ @logger.info("#{@prefix} #{msg}")
18
+ end
19
+
20
+ def warn(msg)
21
+ @logger.warn("#{@prefix} #{msg}")
22
+ end
23
+
24
+ def error(msg)
25
+ @logger.error("#{@prefix} #{msg}")
26
+ end
27
+ end
28
+
29
+ module Logging
30
+ class << self
31
+ def logger
32
+ return @logger if @logger
33
+
34
+ base_logger = if defined?(Rails)
35
+ Rails.logger
36
+ else
37
+ logger = Logger.new STDOUT
38
+ logger.progname = 'Segment::Analytics'
39
+ logger
40
+ end
41
+ @logger = PrefixedLogger.new(base_logger, '[analytics-ruby]')
42
+ end
43
+
44
+ attr_writer :logger
45
+ end
46
+
47
+ def self.included(base)
48
+ class << base
49
+ def logger
50
+ Logging.logger
51
+ end
52
+ end
53
+ end
54
+
55
+ def logger
56
+ Logging.logger
57
+ end
58
+ end
59
+ end
60
+ end
@@ -0,0 +1,72 @@
1
+ require 'forwardable'
2
+ require 'segment/analytics/logging'
3
+
4
+ module Segment
5
+ class Analytics
6
+ # A batch of `Message`s to be sent to the API
7
+ class MessageBatch
8
+ class JSONGenerationError < StandardError; end
9
+
10
+ extend Forwardable
11
+ include Segment::Analytics::Logging
12
+ include Segment::Analytics::Defaults::MessageBatch
13
+
14
+ def initialize(max_message_count)
15
+ @messages = []
16
+ @max_message_count = max_message_count
17
+ @json_size = 0
18
+ end
19
+
20
+ def <<(message)
21
+ begin
22
+ message_json = message.to_json
23
+ rescue StandardError => e
24
+ raise JSONGenerationError, "Serialization error: #{e}"
25
+ end
26
+
27
+ message_json_size = message_json.bytesize
28
+ if message_too_big?(message_json_size)
29
+ logger.error('a message exceeded the maximum allowed size')
30
+ else
31
+ @messages << message
32
+ @json_size += message_json_size + 1 # One byte for the comma
33
+ end
34
+ end
35
+
36
+ def full?
37
+ item_count_exhausted? || size_exhausted?
38
+ end
39
+
40
+ def clear
41
+ @messages.clear
42
+ @json_size = 0
43
+ end
44
+
45
+ def_delegators :@messages, :to_json
46
+ def_delegators :@messages, :empty?
47
+ def_delegators :@messages, :length
48
+
49
+ private
50
+
51
+ def item_count_exhausted?
52
+ @messages.length >= @max_message_count
53
+ end
54
+
55
+ def message_too_big?(message_json_size)
56
+ message_json_size > Defaults::Message::MAX_BYTES
57
+ end
58
+
59
+ # We consider the max size here as just enough to leave room for one more
60
+ # message of the largest size possible. This is a shortcut that allows us
61
+ # to use a native Ruby `Queue` that doesn't allow peeking. The tradeoff
62
+ # here is that we might fit in less messages than possible into a batch.
63
+ #
64
+ # The alternative is to use our own `Queue` implementation that allows
65
+ # peeking, and to consider the next message size when calculating whether
66
+ # the message can be accomodated in this batch.
67
+ def size_exhausted?
68
+ @json_size >= (MAX_BYTES - Defaults::Message::MAX_BYTES)
69
+ end
70
+ end
71
+ end
72
+ end
@@ -0,0 +1,135 @@
1
+ require 'segment/analytics/defaults'
2
+ require 'segment/analytics/utils'
3
+ require 'segment/analytics/response'
4
+ require 'segment/analytics/logging'
5
+ require 'segment/analytics/backoff_policy'
6
+ require 'net/http'
7
+ require 'net/https'
8
+ require 'json'
9
+
10
+ module Segment
11
+ class Analytics
12
+ class Request
13
+ include Segment::Analytics::Defaults::Request
14
+ include Segment::Analytics::Utils
15
+ include Segment::Analytics::Logging
16
+
17
+ # public: Creates a new request object to send analytics batch
18
+ #
19
+ def initialize(options = {})
20
+ options[:host] ||= HOST
21
+ options[:port] ||= PORT
22
+ options[:ssl] ||= SSL
23
+ @headers = options[:headers] || HEADERS
24
+ @path = options[:path] || PATH
25
+ @retries = options[:retries] || RETRIES
26
+ @backoff_policy =
27
+ options[:backoff_policy] || Segment::Analytics::BackoffPolicy.new
28
+
29
+ uri = URI(options[:data_plane_url])
30
+ http = Net::HTTP.new(uri)
31
+ http.use_ssl = options[:ssl]
32
+ http.read_timeout = 8
33
+ http.open_timeout = 4
34
+
35
+ @http = http
36
+ end
37
+
38
+ # public: Posts the write key and batch of messages to the API.
39
+ #
40
+ # returns - Response of the status and error if it exists
41
+ def post(write_key, batch)
42
+ logger.debug("Sending request for #{batch.length} items")
43
+
44
+ last_response, exception = retry_with_backoff(@retries) do
45
+ status_code, body = send_request(write_key, batch)
46
+ error = JSON.parse(body)['error']
47
+ should_retry = should_retry_request?(status_code, body)
48
+ logger.debug("Response status code: #{status_code}")
49
+ logger.debug("Response error: #{error}") if error
50
+
51
+ [Response.new(status_code, error), should_retry]
52
+ end
53
+
54
+ if exception
55
+ logger.error(exception.message)
56
+ exception.backtrace.each { |line| logger.error(line) }
57
+ Response.new(-1, exception.to_s)
58
+ else
59
+ last_response
60
+ end
61
+ end
62
+
63
+ private
64
+
65
+ def should_retry_request?(status_code, body)
66
+ if status_code >= 500
67
+ true # Server error
68
+ elsif status_code == 429
69
+ true # Rate limited
70
+ elsif status_code >= 400
71
+ logger.error(body)
72
+ false # Client error. Do not retry, but log
73
+ else
74
+ false
75
+ end
76
+ end
77
+
78
+ # Takes a block that returns [result, should_retry].
79
+ #
80
+ # Retries upto `retries_remaining` times, if `should_retry` is false or
81
+ # an exception is raised. `@backoff_policy` is used to determine the
82
+ # duration to sleep between attempts
83
+ #
84
+ # Returns [last_result, raised_exception]
85
+ def retry_with_backoff(retries_remaining, &block)
86
+ result, caught_exception = nil
87
+ should_retry = false
88
+
89
+ begin
90
+ result, should_retry = yield
91
+ return [result, nil] unless should_retry
92
+ rescue StandardError => e
93
+ should_retry = true
94
+ caught_exception = e
95
+ end
96
+
97
+ if should_retry && (retries_remaining > 1)
98
+ logger.debug("Retrying request, #{retries_remaining} retries left")
99
+ sleep(@backoff_policy.next_interval.to_f / 1000)
100
+ retry_with_backoff(retries_remaining - 1, &block)
101
+ else
102
+ [result, caught_exception]
103
+ end
104
+ end
105
+
106
+ # Sends a request for the batch, returns [status_code, body]
107
+ def send_request(write_key, batch)
108
+ payload = JSON.generate(
109
+ :sentAt => datetime_in_iso8601(Time.now),
110
+ :batch => batch
111
+ )
112
+ request = Net::HTTP::Post.new(@path, @headers)
113
+ request.basic_auth(write_key, nil)
114
+
115
+ if self.class.stub
116
+ logger.debug "stubbed request to #{@path}: " \
117
+ "write key = #{write_key}, batch = #{JSON.generate(batch)}"
118
+
119
+ [200, '{}']
120
+ else
121
+ response = @http.request(request, payload)
122
+ [response.code.to_i, response.body]
123
+ end
124
+ end
125
+
126
+ class << self
127
+ attr_writer :stub
128
+
129
+ def stub
130
+ @stub || ENV['STUB']
131
+ end
132
+ end
133
+ end
134
+ end
135
+ end
@@ -0,0 +1,15 @@
1
+ module Segment
2
+ class Analytics
3
+ class Response
4
+ attr_reader :status, :error
5
+
6
+ # public: Simple class to wrap responses from the API
7
+ #
8
+ #
9
+ def initialize(status = 200, error = nil)
10
+ @status = status
11
+ @error = error
12
+ end
13
+ end
14
+ end
15
+ end
@@ -0,0 +1,91 @@
1
+ require 'securerandom'
2
+
3
+ module Segment
4
+ class Analytics
5
+ module Utils
6
+ extend self
7
+
8
+ # public: Return a new hash with keys converted from strings to symbols
9
+ #
10
+ def symbolize_keys(hash)
11
+ hash.each_with_object({}) do |(k, v), memo|
12
+ memo[k.to_sym] = v
13
+ end
14
+ end
15
+
16
+ # public: Convert hash keys from strings to symbols in place
17
+ #
18
+ def symbolize_keys!(hash)
19
+ hash.replace symbolize_keys hash
20
+ end
21
+
22
+ # public: Return a new hash with keys as strings
23
+ #
24
+ def stringify_keys(hash)
25
+ hash.each_with_object({}) do |(k, v), memo|
26
+ memo[k.to_s] = v
27
+ end
28
+ end
29
+
30
+ # public: Returns a new hash with all the date values in the into iso8601
31
+ # strings
32
+ #
33
+ def isoify_dates(hash)
34
+ hash.each_with_object({}) do |(k, v), memo|
35
+ memo[k] = datetime_in_iso8601(v)
36
+ end
37
+ end
38
+
39
+ # public: Converts all the date values in the into iso8601 strings in place
40
+ #
41
+ def isoify_dates!(hash)
42
+ hash.replace isoify_dates hash
43
+ end
44
+
45
+ # public: Returns a uid string
46
+ #
47
+ def uid
48
+ arr = SecureRandom.random_bytes(16).unpack('NnnnnN')
49
+ arr[2] = (arr[2] & 0x0fff) | 0x4000
50
+ arr[3] = (arr[3] & 0x3fff) | 0x8000
51
+ '%08x-%04x-%04x-%04x-%04x%08x' % arr
52
+ end
53
+
54
+ def datetime_in_iso8601(datetime)
55
+ case datetime
56
+ when Time
57
+ time_in_iso8601 datetime
58
+ when DateTime
59
+ time_in_iso8601 datetime.to_time
60
+ when Date
61
+ date_in_iso8601 datetime
62
+ else
63
+ datetime
64
+ end
65
+ end
66
+
67
+ def time_in_iso8601(time, fraction_digits = 3)
68
+ fraction = if fraction_digits > 0
69
+ ('.%06i' % time.usec)[0, fraction_digits + 1]
70
+ end
71
+
72
+ "#{time.strftime('%Y-%m-%dT%H:%M:%S')}#{fraction}#{formatted_offset(time, true, 'Z')}"
73
+ end
74
+
75
+ def date_in_iso8601(date)
76
+ date.strftime('%F')
77
+ end
78
+
79
+ def formatted_offset(time, colon = true, alternate_utc_string = nil)
80
+ time.utc? && alternate_utc_string || seconds_to_utc_offset(time.utc_offset, colon)
81
+ end
82
+
83
+ def seconds_to_utc_offset(seconds, colon = true)
84
+ (colon ? UTC_OFFSET_WITH_COLON : UTC_OFFSET_WITHOUT_COLON) % [(seconds < 0 ? '-' : '+'), (seconds.abs / 3600), ((seconds.abs % 3600) / 60)]
85
+ end
86
+
87
+ UTC_OFFSET_WITH_COLON = '%s%02d:%02d'
88
+ UTC_OFFSET_WITHOUT_COLON = UTC_OFFSET_WITH_COLON.sub(':', '')
89
+ end
90
+ end
91
+ end
@@ -0,0 +1,5 @@
1
+ module Segment
2
+ class Analytics
3
+ VERSION = '0.0.1'
4
+ end
5
+ end
@@ -0,0 +1,67 @@
1
+ require 'segment/analytics/defaults'
2
+ require 'segment/analytics/message_batch'
3
+ require 'segment/analytics/request'
4
+ require 'segment/analytics/utils'
5
+
6
+ module Segment
7
+ class Analytics
8
+ class Worker
9
+ include Segment::Analytics::Utils
10
+ include Segment::Analytics::Defaults
11
+ include Segment::Analytics::Logging
12
+
13
+ # public: Creates a new worker
14
+ #
15
+ # The worker continuously takes messages off the queue
16
+ # and makes requests to the segment.io api
17
+ #
18
+ # queue - Queue synchronized between client and worker
19
+ # write_key - String of the project's Write key
20
+ # options - Hash of worker options
21
+ # batch_size - Fixnum of how many items to send in a batch
22
+ # on_error - Proc of what to do on an error
23
+ #
24
+ def initialize(queue, data_plane_url, write_key, options = {})
25
+ symbolize_keys! options
26
+ @queue = queue
27
+ @data_plane_url = data_plane_url
28
+ @write_key = write_key
29
+ @on_error = options[:on_error] || proc { |status, error| }
30
+ batch_size = options[:batch_size] || Defaults::MessageBatch::MAX_SIZE
31
+ @batch = MessageBatch.new(batch_size)
32
+ @lock = Mutex.new
33
+ end
34
+
35
+ # public: Continuously runs the loop to check for new events
36
+ #
37
+ def run
38
+ until Thread.current[:should_exit]
39
+ return if @queue.empty?
40
+
41
+ @lock.synchronize do
42
+ consume_message_from_queue! until @batch.full? || @queue.empty?
43
+ end
44
+
45
+ res = Request.new(data_plane_url: @data_plane_url).post @write_key, @batch
46
+ @on_error.call(res.status, res.error) unless res.status == 200
47
+
48
+ @lock.synchronize { @batch.clear }
49
+ end
50
+ end
51
+
52
+ # public: Check whether we have outstanding requests.
53
+ #
54
+ def is_requesting?
55
+ @lock.synchronize { !@batch.empty? }
56
+ end
57
+
58
+ private
59
+
60
+ def consume_message_from_queue!
61
+ @batch << @queue.pop
62
+ rescue MessageBatch::JSONGenerationError => e
63
+ @on_error.call(-1, e.to_s)
64
+ end
65
+ end
66
+ end
67
+ end
@@ -0,0 +1,39 @@
1
+ require 'segment/analytics/version'
2
+ require 'segment/analytics/defaults'
3
+ require 'segment/analytics/utils'
4
+ require 'segment/analytics/field_parser'
5
+ require 'segment/analytics/client'
6
+ require 'segment/analytics/worker'
7
+ require 'segment/analytics/request'
8
+ require 'segment/analytics/response'
9
+ require 'segment/analytics/logging'
10
+
11
+ module Segment
12
+ class Analytics
13
+ # Initializes a new instance of {Segment::Analytics::Client}, to which all
14
+ # method calls are proxied.
15
+ #
16
+ # @param options includes options that are passed down to
17
+ # {Segment::Analytics::Client#initialize}
18
+ # @option options [Boolean] :stub (false) If true, requests don't hit the
19
+ # server and are stubbed to be successful.
20
+ def initialize(options = {})
21
+ Request.stub = options[:stub] if options.has_key?(:stub)
22
+ @client = Segment::Analytics::Client.new options
23
+ end
24
+
25
+ def method_missing(message, *args, &block)
26
+ if @client.respond_to? message
27
+ @client.send message, *args, &block
28
+ else
29
+ super
30
+ end
31
+ end
32
+
33
+ def respond_to_missing?(method_name, include_private = false)
34
+ @client.respond_to?(method_name) || super
35
+ end
36
+
37
+ include Logging
38
+ end
39
+ end
data/lib/segment.rb ADDED
@@ -0,0 +1 @@
1
+ require 'segment/analytics'
metadata ADDED
@@ -0,0 +1,171 @@
1
+ --- !ruby/object:Gem::Specification
2
+ name: ruddertest
3
+ version: !ruby/object:Gem::Version
4
+ version: 0.0.1
5
+ platform: ruby
6
+ authors:
7
+ - Rudder
8
+ autorequire:
9
+ bindir: bin
10
+ cert_chain: []
11
+ date: 2019-12-15 00:00:00.000000000 Z
12
+ dependencies:
13
+ - !ruby/object:Gem::Dependency
14
+ name: commander
15
+ requirement: !ruby/object:Gem::Requirement
16
+ requirements:
17
+ - - "~>"
18
+ - !ruby/object:Gem::Version
19
+ version: '4.4'
20
+ type: :development
21
+ prerelease: false
22
+ version_requirements: !ruby/object:Gem::Requirement
23
+ requirements:
24
+ - - "~>"
25
+ - !ruby/object:Gem::Version
26
+ version: '4.4'
27
+ - !ruby/object:Gem::Dependency
28
+ name: rake
29
+ requirement: !ruby/object:Gem::Requirement
30
+ requirements:
31
+ - - "~>"
32
+ - !ruby/object:Gem::Version
33
+ version: '10.3'
34
+ type: :development
35
+ prerelease: false
36
+ version_requirements: !ruby/object:Gem::Requirement
37
+ requirements:
38
+ - - "~>"
39
+ - !ruby/object:Gem::Version
40
+ version: '10.3'
41
+ - !ruby/object:Gem::Dependency
42
+ name: rspec
43
+ requirement: !ruby/object:Gem::Requirement
44
+ requirements:
45
+ - - "~>"
46
+ - !ruby/object:Gem::Version
47
+ version: '3.0'
48
+ type: :development
49
+ prerelease: false
50
+ version_requirements: !ruby/object:Gem::Requirement
51
+ requirements:
52
+ - - "~>"
53
+ - !ruby/object:Gem::Version
54
+ version: '3.0'
55
+ - !ruby/object:Gem::Dependency
56
+ name: tzinfo
57
+ requirement: !ruby/object:Gem::Requirement
58
+ requirements:
59
+ - - '='
60
+ - !ruby/object:Gem::Version
61
+ version: 1.2.1
62
+ type: :development
63
+ prerelease: false
64
+ version_requirements: !ruby/object:Gem::Requirement
65
+ requirements:
66
+ - - '='
67
+ - !ruby/object:Gem::Version
68
+ version: 1.2.1
69
+ - !ruby/object:Gem::Dependency
70
+ name: activesupport
71
+ requirement: !ruby/object:Gem::Requirement
72
+ requirements:
73
+ - - "~>"
74
+ - !ruby/object:Gem::Version
75
+ version: 4.1.11
76
+ type: :development
77
+ prerelease: false
78
+ version_requirements: !ruby/object:Gem::Requirement
79
+ requirements:
80
+ - - "~>"
81
+ - !ruby/object:Gem::Version
82
+ version: 4.1.11
83
+ - !ruby/object:Gem::Dependency
84
+ name: oj
85
+ requirement: !ruby/object:Gem::Requirement
86
+ requirements:
87
+ - - "~>"
88
+ - !ruby/object:Gem::Version
89
+ version: 3.6.2
90
+ type: :development
91
+ prerelease: false
92
+ version_requirements: !ruby/object:Gem::Requirement
93
+ requirements:
94
+ - - "~>"
95
+ - !ruby/object:Gem::Version
96
+ version: 3.6.2
97
+ - !ruby/object:Gem::Dependency
98
+ name: rubocop
99
+ requirement: !ruby/object:Gem::Requirement
100
+ requirements:
101
+ - - "~>"
102
+ - !ruby/object:Gem::Version
103
+ version: 0.51.0
104
+ type: :development
105
+ prerelease: false
106
+ version_requirements: !ruby/object:Gem::Requirement
107
+ requirements:
108
+ - - "~>"
109
+ - !ruby/object:Gem::Version
110
+ version: 0.51.0
111
+ - !ruby/object:Gem::Dependency
112
+ name: codecov
113
+ requirement: !ruby/object:Gem::Requirement
114
+ requirements:
115
+ - - "~>"
116
+ - !ruby/object:Gem::Version
117
+ version: 0.1.4
118
+ type: :development
119
+ prerelease: false
120
+ version_requirements: !ruby/object:Gem::Requirement
121
+ requirements:
122
+ - - "~>"
123
+ - !ruby/object:Gem::Version
124
+ version: 0.1.4
125
+ description: The Rudder ruby analytics library
126
+ email: sumanth@rudderlabs.com
127
+ executables:
128
+ - analytics
129
+ extensions: []
130
+ extra_rdoc_files: []
131
+ files:
132
+ - bin/analytics
133
+ - lib/analytics-ruby.rb
134
+ - lib/segment.rb
135
+ - lib/segment/analytics.rb
136
+ - lib/segment/analytics/backoff_policy.rb
137
+ - lib/segment/analytics/client.rb
138
+ - lib/segment/analytics/defaults.rb
139
+ - lib/segment/analytics/field_parser.rb
140
+ - lib/segment/analytics/logging.rb
141
+ - lib/segment/analytics/message_batch.rb
142
+ - lib/segment/analytics/request.rb
143
+ - lib/segment/analytics/response.rb
144
+ - lib/segment/analytics/utils.rb
145
+ - lib/segment/analytics/version.rb
146
+ - lib/segment/analytics/worker.rb
147
+ homepage: https://github.com/rudderlabs/analytics-ruby
148
+ licenses:
149
+ - MIT
150
+ metadata: {}
151
+ post_install_message:
152
+ rdoc_options: []
153
+ require_paths:
154
+ - lib
155
+ required_ruby_version: !ruby/object:Gem::Requirement
156
+ requirements:
157
+ - - ">="
158
+ - !ruby/object:Gem::Version
159
+ version: '2.0'
160
+ required_rubygems_version: !ruby/object:Gem::Requirement
161
+ requirements:
162
+ - - ">="
163
+ - !ruby/object:Gem::Version
164
+ version: '0'
165
+ requirements: []
166
+ rubyforge_project:
167
+ rubygems_version: 2.5.2.3
168
+ signing_key:
169
+ specification_version: 4
170
+ summary: Rudder analytics library
171
+ test_files: []