analytics-ruby 2.0.5 → 2.4.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- checksums.yaml +7 -0
- data/bin/analytics +108 -0
- data/lib/analytics-ruby.rb +1 -0
- data/lib/segment/analytics/backoff_policy.rb +49 -0
- data/lib/segment/analytics/client.rb +121 -256
- data/lib/segment/analytics/defaults.rb +20 -4
- data/lib/segment/analytics/field_parser.rb +192 -0
- data/lib/segment/analytics/logging.rb +36 -11
- data/lib/segment/analytics/message_batch.rb +72 -0
- data/lib/segment/analytics/response.rb +0 -1
- data/lib/segment/analytics/test_queue.rb +56 -0
- data/lib/segment/analytics/transport.rb +138 -0
- data/lib/segment/analytics/utils.rb +20 -19
- data/lib/segment/analytics/version.rb +1 -1
- data/lib/segment/analytics/worker.rb +20 -10
- data/lib/segment/analytics.rb +15 -6
- metadata +101 -55
- data/Gemfile +0 -2
- data/Gemfile.lock +0 -60
- data/History.md +0 -124
- data/Makefile +0 -8
- data/README.md +0 -39
- data/Rakefile +0 -7
- data/analytics-ruby.gemspec +0 -23
- data/lib/segment/analytics/request.rb +0 -84
- data/spec/segment/analytics/client_spec.rb +0 -299
- data/spec/segment/analytics/worker_spec.rb +0 -96
- data/spec/segment/analytics_spec.rb +0 -103
- data/spec/spec_helper.rb +0 -81
checksums.yaml
ADDED
@@ -0,0 +1,7 @@
|
|
1
|
+
---
|
2
|
+
SHA256:
|
3
|
+
metadata.gz: 51f3f02afed05c9c8461b454e0a3067962a75b8efde0930db0098eebf4a62829
|
4
|
+
data.tar.gz: e7d486d6c6186535e9a005c4d950e6e29d2dd9937f9b6da625191d786f76846a
|
5
|
+
SHA512:
|
6
|
+
metadata.gz: c2ce09d450de65620eb99f9fae86aa5ce57709d53aa4a93c73c2d3db0b173afad532383a50fe4dcb953df72171b731a263ff7308aed79b646a25bc202c555e4d
|
7
|
+
data.tar.gz: b3625484ebca6117ae79fc42f03a595dff861b8c5ca02a6ef12ea9e933229c1479dad40b4db1d85e61ddf585c7c5a072d68e9cd21ced6b2482db338f45073707
|
data/bin/analytics
ADDED
@@ -0,0 +1,108 @@
|
|
1
|
+
#!/usr/bin/env ruby
|
2
|
+
|
3
|
+
require 'segment/analytics'
|
4
|
+
require 'rubygems'
|
5
|
+
require 'commander/import'
|
6
|
+
require 'time'
|
7
|
+
require 'json'
|
8
|
+
|
9
|
+
program :name, 'simulator.rb'
|
10
|
+
program :version, '0.0.1'
|
11
|
+
program :description, 'scripting simulator'
|
12
|
+
|
13
|
+
def json_hash(str)
|
14
|
+
if str
|
15
|
+
return JSON.parse(str)
|
16
|
+
end
|
17
|
+
end
|
18
|
+
|
19
|
+
# analytics -method=<method> -segment-write-key=<segmentWriteKey> [options]
|
20
|
+
|
21
|
+
default_command :send
|
22
|
+
|
23
|
+
command :send do |c|
|
24
|
+
c.description = 'send a segment message'
|
25
|
+
|
26
|
+
c.option '--writeKey=<writeKey>', String, 'the Segment writeKey'
|
27
|
+
c.option '--type=<type>', String, 'The Segment message type'
|
28
|
+
|
29
|
+
c.option '--userId=<userId>', String, 'the user id to send the event as'
|
30
|
+
c.option '--anonymousId=<anonymousId>', String, 'the anonymous user id to send the event as'
|
31
|
+
c.option '--context=<context>', 'additional context for the event (JSON-encoded)'
|
32
|
+
c.option '--integrations=<integrations>', 'additional integrations for the event (JSON-encoded)'
|
33
|
+
|
34
|
+
c.option '--event=<event>', String, 'the event name to send with the event'
|
35
|
+
c.option '--properties=<properties>', 'the event properties to send (JSON-encoded)'
|
36
|
+
|
37
|
+
c.option '--name=<name>', 'name of the screen or page to send with the message'
|
38
|
+
|
39
|
+
c.option '--traits=<traits>', 'the identify/group traits to send (JSON-encoded)'
|
40
|
+
|
41
|
+
c.option '--groupId=<groupId>', String, 'the group id'
|
42
|
+
c.option '--previousId=<previousId>', String, 'the previous id'
|
43
|
+
|
44
|
+
c.action do |args, options|
|
45
|
+
Analytics = Segment::Analytics.new({
|
46
|
+
write_key: options.writeKey,
|
47
|
+
on_error: Proc.new { |status, msg| print msg }
|
48
|
+
})
|
49
|
+
|
50
|
+
case options.type
|
51
|
+
when "track"
|
52
|
+
Analytics.track({
|
53
|
+
user_id: options.userId,
|
54
|
+
event: options.event,
|
55
|
+
anonymous_id: options.anonymousId,
|
56
|
+
properties: json_hash(options.properties),
|
57
|
+
context: json_hash(options.context),
|
58
|
+
integrations: json_hash(options.integrations)
|
59
|
+
})
|
60
|
+
when "page"
|
61
|
+
Analytics.page({
|
62
|
+
user_id: options.userId,
|
63
|
+
anonymous_id: options.anonymousId,
|
64
|
+
name: options.name,
|
65
|
+
properties: json_hash(options.properties),
|
66
|
+
context: json_hash(options.context),
|
67
|
+
integrations: json_hash(options.integrations)
|
68
|
+
})
|
69
|
+
when "screen"
|
70
|
+
Analytics.screen({
|
71
|
+
user_id: options.userId,
|
72
|
+
anonymous_id: options.anonymousId,
|
73
|
+
name: options.name,
|
74
|
+
properties: json_hash(options.properties),
|
75
|
+
context: json_hash(options.context),
|
76
|
+
integrations: json_hash(options.integrations)
|
77
|
+
})
|
78
|
+
when "identify"
|
79
|
+
Analytics.identify({
|
80
|
+
user_id: options.userId,
|
81
|
+
anonymous_id: options.anonymousId,
|
82
|
+
traits: json_hash(options.traits),
|
83
|
+
context: json_hash(options.context),
|
84
|
+
integrations: json_hash(options.integrations)
|
85
|
+
})
|
86
|
+
when "group"
|
87
|
+
Analytics.group({
|
88
|
+
user_id: options.userId,
|
89
|
+
anonymous_id: options.anonymousId,
|
90
|
+
group_id: options.groupId,
|
91
|
+
traits: json_hash(options.traits),
|
92
|
+
context: json_hash(options.context),
|
93
|
+
integrations: json_hash(options.integrations)
|
94
|
+
})
|
95
|
+
when "alias"
|
96
|
+
Analytics.alias({
|
97
|
+
previous_id: options.previousId,
|
98
|
+
user_id: options.userId,
|
99
|
+
anonymous_id: options.anonymousId,
|
100
|
+
context: json_hash(options.context),
|
101
|
+
integrations: json_hash(options.integrations)
|
102
|
+
})
|
103
|
+
else
|
104
|
+
raise "Invalid Message Type #{options.type}"
|
105
|
+
end
|
106
|
+
Analytics.flush
|
107
|
+
end
|
108
|
+
end
|
@@ -0,0 +1 @@
|
|
1
|
+
require 'segment'
|
@@ -0,0 +1,49 @@
|
|
1
|
+
require 'segment/analytics/defaults'
|
2
|
+
|
3
|
+
module Segment
|
4
|
+
class Analytics
|
5
|
+
class BackoffPolicy
|
6
|
+
include Segment::Analytics::Defaults::BackoffPolicy
|
7
|
+
|
8
|
+
# @param [Hash] opts
|
9
|
+
# @option opts [Numeric] :min_timeout_ms The minimum backoff timeout
|
10
|
+
# @option opts [Numeric] :max_timeout_ms The maximum backoff timeout
|
11
|
+
# @option opts [Numeric] :multiplier The value to multiply the current
|
12
|
+
# interval with for each retry attempt
|
13
|
+
# @option opts [Numeric] :randomization_factor The randomization factor
|
14
|
+
# to use to create a range around the retry interval
|
15
|
+
def initialize(opts = {})
|
16
|
+
@min_timeout_ms = opts[:min_timeout_ms] || MIN_TIMEOUT_MS
|
17
|
+
@max_timeout_ms = opts[:max_timeout_ms] || MAX_TIMEOUT_MS
|
18
|
+
@multiplier = opts[:multiplier] || MULTIPLIER
|
19
|
+
@randomization_factor = opts[:randomization_factor] || RANDOMIZATION_FACTOR
|
20
|
+
|
21
|
+
@attempts = 0
|
22
|
+
end
|
23
|
+
|
24
|
+
# @return [Numeric] the next backoff interval, in milliseconds.
|
25
|
+
def next_interval
|
26
|
+
interval = @min_timeout_ms * (@multiplier**@attempts)
|
27
|
+
interval = add_jitter(interval, @randomization_factor)
|
28
|
+
|
29
|
+
@attempts += 1
|
30
|
+
|
31
|
+
[interval, @max_timeout_ms].min
|
32
|
+
end
|
33
|
+
|
34
|
+
private
|
35
|
+
|
36
|
+
def add_jitter(base, randomization_factor)
|
37
|
+
random_number = rand
|
38
|
+
max_deviation = base * randomization_factor
|
39
|
+
deviation = random_number * max_deviation
|
40
|
+
|
41
|
+
if random_number < 0.5
|
42
|
+
base - deviation
|
43
|
+
else
|
44
|
+
base + deviation
|
45
|
+
end
|
46
|
+
end
|
47
|
+
end
|
48
|
+
end
|
49
|
+
end
|
@@ -1,39 +1,42 @@
|
|
1
1
|
require 'thread'
|
2
2
|
require 'time'
|
3
|
+
|
4
|
+
require 'segment/analytics/defaults'
|
5
|
+
require 'segment/analytics/logging'
|
3
6
|
require 'segment/analytics/utils'
|
4
7
|
require 'segment/analytics/worker'
|
5
|
-
require 'segment/analytics/defaults'
|
6
8
|
|
7
9
|
module Segment
|
8
10
|
class Analytics
|
9
11
|
class Client
|
10
12
|
include Segment::Analytics::Utils
|
13
|
+
include Segment::Analytics::Logging
|
11
14
|
|
12
|
-
#
|
13
|
-
#
|
14
|
-
#
|
15
|
-
#
|
16
|
-
#
|
17
|
-
|
18
|
-
|
19
|
-
symbolize_keys! options
|
15
|
+
# @param [Hash] opts
|
16
|
+
# @option opts [String] :write_key Your project's write_key
|
17
|
+
# @option opts [FixNum] :max_queue_size Maximum number of calls to be
|
18
|
+
# remain queued.
|
19
|
+
# @option opts [Proc] :on_error Handles error calls from the API.
|
20
|
+
def initialize(opts = {})
|
21
|
+
symbolize_keys!(opts)
|
20
22
|
|
21
23
|
@queue = Queue.new
|
22
|
-
@
|
23
|
-
@
|
24
|
-
@
|
24
|
+
@test = opts[:test]
|
25
|
+
@write_key = opts[:write_key]
|
26
|
+
@max_queue_size = opts[:max_queue_size] || Defaults::Queue::MAX_SIZE
|
25
27
|
@worker_mutex = Mutex.new
|
26
|
-
@worker = Worker.new
|
28
|
+
@worker = Worker.new(@queue, @write_key, opts)
|
29
|
+
@worker_thread = nil
|
27
30
|
|
28
31
|
check_write_key!
|
29
32
|
|
30
33
|
at_exit { @worker_thread && @worker_thread[:should_exit] = true }
|
31
34
|
end
|
32
35
|
|
33
|
-
#
|
34
|
-
# Use only for scripts which are not long-running, and will
|
35
|
-
# specifically exit
|
36
|
+
# Synchronously waits until the worker has flushed the queue.
|
36
37
|
#
|
38
|
+
# Use only for scripts which are not long-running, and will specifically
|
39
|
+
# exit
|
37
40
|
def flush
|
38
41
|
while !@queue.empty? || @worker.is_requesting?
|
39
42
|
ensure_worker_running
|
@@ -41,222 +44,114 @@ module Segment
|
|
41
44
|
end
|
42
45
|
end
|
43
46
|
|
44
|
-
#
|
47
|
+
# @!macro common_attrs
|
48
|
+
# @option attrs [String] :anonymous_id ID for a user when you don't know
|
49
|
+
# who they are yet. (optional but you must provide either an
|
50
|
+
# `anonymous_id` or `user_id`)
|
51
|
+
# @option attrs [Hash] :context ({})
|
52
|
+
# @option attrs [Hash] :integrations What integrations this event
|
53
|
+
# goes to (optional)
|
54
|
+
# @option attrs [String] :message_id ID that uniquely
|
55
|
+
# identifies a message across the API. (optional)
|
56
|
+
# @option attrs [Time] :timestamp When the event occurred (optional)
|
57
|
+
# @option attrs [String] :user_id The ID for this user in your database
|
58
|
+
# (optional but you must provide either an `anonymous_id` or `user_id`)
|
59
|
+
# @option attrs [Hash] :options Options such as user traits (optional)
|
60
|
+
|
61
|
+
# Tracks an event
|
45
62
|
#
|
46
|
-
#
|
47
|
-
#
|
48
|
-
#
|
49
|
-
#
|
50
|
-
#
|
51
|
-
#
|
52
|
-
|
53
|
-
|
54
|
-
|
55
|
-
|
56
|
-
event = options[:event]
|
57
|
-
properties = options[:properties] || {}
|
58
|
-
timestamp = options[:timestamp] || Time.new
|
59
|
-
context = options[:context] || {}
|
60
|
-
|
61
|
-
check_timestamp! timestamp
|
62
|
-
|
63
|
-
if event.nil? || event.empty?
|
64
|
-
fail ArgumentError, 'Must supply event as a non-empty string'
|
65
|
-
end
|
66
|
-
|
67
|
-
fail ArgumentError, 'Properties must be a Hash' unless properties.is_a? Hash
|
68
|
-
isoify_dates! properties
|
69
|
-
|
70
|
-
add_context context
|
71
|
-
|
72
|
-
enqueue({
|
73
|
-
:event => event,
|
74
|
-
:userId => options[:user_id],
|
75
|
-
:anonymousId => options[:anonymous_id],
|
76
|
-
:context => context,
|
77
|
-
:integrations => options[:integrations],
|
78
|
-
:properties => properties,
|
79
|
-
:timestamp => datetime_in_iso8601(timestamp),
|
80
|
-
:type => 'track'
|
81
|
-
})
|
63
|
+
# @see https://segment.com/docs/sources/server/ruby/#track
|
64
|
+
#
|
65
|
+
# @param [Hash] attrs
|
66
|
+
#
|
67
|
+
# @option attrs [String] :event Event name
|
68
|
+
# @option attrs [Hash] :properties Event properties (optional)
|
69
|
+
# @macro common_attrs
|
70
|
+
def track(attrs)
|
71
|
+
symbolize_keys! attrs
|
72
|
+
enqueue(FieldParser.parse_for_track(attrs))
|
82
73
|
end
|
83
74
|
|
84
|
-
#
|
75
|
+
# Identifies a user
|
85
76
|
#
|
86
|
-
#
|
87
|
-
#
|
88
|
-
#
|
89
|
-
#
|
90
|
-
#
|
91
|
-
|
92
|
-
|
93
|
-
|
94
|
-
|
95
|
-
traits = options[:traits] || {}
|
96
|
-
timestamp = options[:timestamp] || Time.new
|
97
|
-
context = options[:context] || {}
|
98
|
-
|
99
|
-
check_timestamp! timestamp
|
100
|
-
|
101
|
-
fail ArgumentError, 'Must supply traits as a hash' unless traits.is_a? Hash
|
102
|
-
isoify_dates! traits
|
103
|
-
|
104
|
-
add_context context
|
105
|
-
|
106
|
-
enqueue({
|
107
|
-
:userId => options[:user_id],
|
108
|
-
:anonymousId => options[:anonymous_id],
|
109
|
-
:integrations => options[:integrations],
|
110
|
-
:context => context,
|
111
|
-
:traits => traits,
|
112
|
-
:timestamp => datetime_in_iso8601(timestamp),
|
113
|
-
:type => 'identify'
|
114
|
-
})
|
77
|
+
# @see https://segment.com/docs/sources/server/ruby/#identify
|
78
|
+
#
|
79
|
+
# @param [Hash] attrs
|
80
|
+
#
|
81
|
+
# @option attrs [Hash] :traits User traits (optional)
|
82
|
+
# @macro common_attrs
|
83
|
+
def identify(attrs)
|
84
|
+
symbolize_keys! attrs
|
85
|
+
enqueue(FieldParser.parse_for_identify(attrs))
|
115
86
|
end
|
116
87
|
|
117
|
-
#
|
88
|
+
# Aliases a user from one id to another
|
118
89
|
#
|
119
|
-
#
|
120
|
-
#
|
121
|
-
#
|
122
|
-
#
|
123
|
-
#
|
124
|
-
|
125
|
-
|
126
|
-
|
127
|
-
|
128
|
-
to = options[:user_id]
|
129
|
-
timestamp = options[:timestamp] || Time.new
|
130
|
-
context = options[:context] || {}
|
131
|
-
|
132
|
-
check_presence! from, 'previous_id'
|
133
|
-
check_presence! to, 'user_id'
|
134
|
-
check_timestamp! timestamp
|
135
|
-
add_context context
|
136
|
-
|
137
|
-
enqueue({
|
138
|
-
:previousId => from,
|
139
|
-
:userId => to,
|
140
|
-
:integrations => options[:integrations],
|
141
|
-
:context => context,
|
142
|
-
:timestamp => datetime_in_iso8601(timestamp),
|
143
|
-
:type => 'alias'
|
144
|
-
})
|
90
|
+
# @see https://segment.com/docs/sources/server/ruby/#alias
|
91
|
+
#
|
92
|
+
# @param [Hash] attrs
|
93
|
+
#
|
94
|
+
# @option attrs [String] :previous_id The ID to alias from
|
95
|
+
# @macro common_attrs
|
96
|
+
def alias(attrs)
|
97
|
+
symbolize_keys! attrs
|
98
|
+
enqueue(FieldParser.parse_for_alias(attrs))
|
145
99
|
end
|
146
100
|
|
147
|
-
#
|
101
|
+
# Associates a user identity with a group.
|
148
102
|
#
|
149
|
-
#
|
150
|
-
#
|
151
|
-
#
|
152
|
-
#
|
153
|
-
#
|
154
|
-
|
155
|
-
|
156
|
-
|
157
|
-
|
158
|
-
|
159
|
-
user_id = options[:user_id]
|
160
|
-
traits = options[:traits] || {}
|
161
|
-
timestamp = options[:timestamp] || Time.new
|
162
|
-
context = options[:context] || {}
|
163
|
-
|
164
|
-
fail ArgumentError, '.traits must be a hash' unless traits.is_a? Hash
|
165
|
-
isoify_dates! traits
|
166
|
-
|
167
|
-
check_presence! group_id, 'group_id'
|
168
|
-
check_timestamp! timestamp
|
169
|
-
add_context context
|
170
|
-
|
171
|
-
enqueue({
|
172
|
-
:groupId => group_id,
|
173
|
-
:userId => user_id,
|
174
|
-
:traits => traits,
|
175
|
-
:integrations => options[:integrations],
|
176
|
-
:context => context,
|
177
|
-
:timestamp => datetime_in_iso8601(timestamp),
|
178
|
-
:type => 'group'
|
179
|
-
})
|
103
|
+
# @see https://segment.com/docs/sources/server/ruby/#group
|
104
|
+
#
|
105
|
+
# @param [Hash] attrs
|
106
|
+
#
|
107
|
+
# @option attrs [String] :group_id The ID of the group
|
108
|
+
# @option attrs [Hash] :traits User traits (optional)
|
109
|
+
# @macro common_attrs
|
110
|
+
def group(attrs)
|
111
|
+
symbolize_keys! attrs
|
112
|
+
enqueue(FieldParser.parse_for_group(attrs))
|
180
113
|
end
|
181
114
|
|
182
|
-
#
|
115
|
+
# Records a page view
|
183
116
|
#
|
184
|
-
#
|
185
|
-
# :user_id - String of the id to alias from
|
186
|
-
# :name - String name of the page
|
187
|
-
# :properties - Hash of page properties (optional)
|
188
|
-
# :timestamp - Time of when the pageview occured (optional)
|
189
|
-
# :context - Hash of context (optional)
|
190
|
-
def page(options)
|
191
|
-
symbolize_keys! options
|
192
|
-
check_user_id! options
|
193
|
-
|
194
|
-
name = options[:name].to_s
|
195
|
-
properties = options[:properties] || {}
|
196
|
-
timestamp = options[:timestamp] || Time.new
|
197
|
-
context = options[:context] || {}
|
198
|
-
|
199
|
-
fail ArgumentError, '.name must be a string' unless !name.empty?
|
200
|
-
fail ArgumentError, '.properties must be a hash' unless properties.is_a? Hash
|
201
|
-
isoify_dates! properties
|
202
|
-
|
203
|
-
check_timestamp! timestamp
|
204
|
-
add_context context
|
205
|
-
|
206
|
-
enqueue({
|
207
|
-
:userId => options[:user_id],
|
208
|
-
:anonymousId => options[:anonymous_id],
|
209
|
-
:name => name,
|
210
|
-
:properties => properties,
|
211
|
-
:integrations => options[:integrations],
|
212
|
-
:context => context,
|
213
|
-
:timestamp => datetime_in_iso8601(timestamp),
|
214
|
-
:type => 'page'
|
215
|
-
})
|
216
|
-
end
|
217
|
-
# public: Records a screen view (for a mobile app)
|
117
|
+
# @see https://segment.com/docs/sources/server/ruby/#page
|
218
118
|
#
|
219
|
-
#
|
220
|
-
#
|
221
|
-
#
|
222
|
-
#
|
223
|
-
#
|
224
|
-
|
225
|
-
|
226
|
-
|
227
|
-
check_user_id! options
|
228
|
-
|
229
|
-
name = options[:name].to_s
|
230
|
-
properties = options[:properties] || {}
|
231
|
-
timestamp = options[:timestamp] || Time.new
|
232
|
-
context = options[:context] || {}
|
233
|
-
|
234
|
-
fail ArgumentError, '.name must be a string' if name.empty?
|
235
|
-
fail ArgumentError, '.properties must be a hash' unless properties.is_a? Hash
|
236
|
-
isoify_dates! properties
|
237
|
-
|
238
|
-
check_timestamp! timestamp
|
239
|
-
add_context context
|
240
|
-
|
241
|
-
enqueue({
|
242
|
-
:userId => options[:user_id],
|
243
|
-
:anonymousId => options[:anonymous_id],
|
244
|
-
:name => name,
|
245
|
-
:properties => properties,
|
246
|
-
:integrations => options[:integrations],
|
247
|
-
:context => context,
|
248
|
-
:timestamp => timestamp.iso8601,
|
249
|
-
:type => 'screen'
|
250
|
-
})
|
119
|
+
# @param [Hash] attrs
|
120
|
+
#
|
121
|
+
# @option attrs [String] :name Name of the page
|
122
|
+
# @option attrs [Hash] :properties Page properties (optional)
|
123
|
+
# @macro common_attrs
|
124
|
+
def page(attrs)
|
125
|
+
symbolize_keys! attrs
|
126
|
+
enqueue(FieldParser.parse_for_page(attrs))
|
251
127
|
end
|
252
128
|
|
253
|
-
#
|
129
|
+
# Records a screen view (for a mobile app)
|
130
|
+
#
|
131
|
+
# @param [Hash] attrs
|
254
132
|
#
|
255
|
-
#
|
133
|
+
# @option attrs [String] :name Name of the screen
|
134
|
+
# @option attrs [Hash] :properties Screen properties (optional)
|
135
|
+
# @option attrs [String] :category The screen category (optional)
|
136
|
+
# @macro common_attrs
|
137
|
+
def screen(attrs)
|
138
|
+
symbolize_keys! attrs
|
139
|
+
enqueue(FieldParser.parse_for_screen(attrs))
|
140
|
+
end
|
141
|
+
|
142
|
+
# @return [Fixnum] number of messages in the queue
|
256
143
|
def queued_messages
|
257
144
|
@queue.length
|
258
145
|
end
|
259
146
|
|
147
|
+
def test_queue
|
148
|
+
unless @test
|
149
|
+
raise 'Test queue only available when setting :test to true.'
|
150
|
+
end
|
151
|
+
|
152
|
+
@test_queue ||= TestQueue.new
|
153
|
+
end
|
154
|
+
|
260
155
|
private
|
261
156
|
|
262
157
|
# private: Enqueues the action.
|
@@ -264,57 +159,28 @@ module Segment
|
|
264
159
|
# returns Boolean of whether the item was added to the queue.
|
265
160
|
def enqueue(action)
|
266
161
|
# add our request id for tracing purposes
|
267
|
-
action[:messageId]
|
268
|
-
|
269
|
-
|
162
|
+
action[:messageId] ||= uid
|
163
|
+
|
164
|
+
test_queue << action if @test
|
165
|
+
|
166
|
+
if @queue.length < @max_queue_size
|
270
167
|
@queue << action
|
271
|
-
|
272
|
-
!queue_full
|
273
|
-
end
|
168
|
+
ensure_worker_running
|
274
169
|
|
275
|
-
|
276
|
-
|
277
|
-
|
278
|
-
|
279
|
-
|
280
|
-
|
281
|
-
|
282
|
-
|
170
|
+
true
|
171
|
+
else
|
172
|
+
logger.warn(
|
173
|
+
'Queue is full, dropping events. The :max_queue_size ' \
|
174
|
+
'configuration parameter can be increased to prevent this from ' \
|
175
|
+
'happening.'
|
176
|
+
)
|
177
|
+
false
|
283
178
|
end
|
284
179
|
end
|
285
180
|
|
286
|
-
# private: Adds contextual information to the call
|
287
|
-
#
|
288
|
-
# context - Hash of call context
|
289
|
-
def add_context(context)
|
290
|
-
context[:library] = { :name => "analytics-ruby", :version => Segment::Analytics::VERSION.to_s }
|
291
|
-
end
|
292
|
-
|
293
181
|
# private: Checks that the write_key is properly initialized
|
294
182
|
def check_write_key!
|
295
|
-
|
296
|
-
end
|
297
|
-
|
298
|
-
# private: Checks the timstamp option to make sure it is a Time.
|
299
|
-
def check_timestamp!(timestamp)
|
300
|
-
fail ArgumentError, 'Timestamp must be a Time' unless timestamp.is_a? Time
|
301
|
-
end
|
302
|
-
|
303
|
-
def event attrs
|
304
|
-
symbolize_keys! attrs
|
305
|
-
|
306
|
-
{
|
307
|
-
:userId => user_id,
|
308
|
-
:name => name,
|
309
|
-
:properties => properties,
|
310
|
-
:context => context,
|
311
|
-
:timestamp => datetime_in_iso8601(timestamp),
|
312
|
-
:type => 'screen'
|
313
|
-
}
|
314
|
-
end
|
315
|
-
|
316
|
-
def check_user_id! options
|
317
|
-
fail ArgumentError, 'Must supply either user_id or anonymous_id' unless options[:user_id] || options[:anonymous_id]
|
183
|
+
raise ArgumentError, 'Write key must be initialized' if @write_key.nil?
|
318
184
|
end
|
319
185
|
|
320
186
|
def ensure_worker_running
|
@@ -333,4 +199,3 @@ module Segment
|
|
333
199
|
end
|
334
200
|
end
|
335
201
|
end
|
336
|
-
|
@@ -6,15 +6,31 @@ module Segment
|
|
6
6
|
PORT = 443
|
7
7
|
PATH = '/v1/import'
|
8
8
|
SSL = true
|
9
|
-
HEADERS = {
|
10
|
-
|
11
|
-
|
9
|
+
HEADERS = { 'Accept' => 'application/json',
|
10
|
+
'Content-Type' => 'application/json',
|
11
|
+
'User-Agent' => "analytics-ruby/#{Analytics::VERSION}" }
|
12
|
+
RETRIES = 10
|
12
13
|
end
|
13
14
|
|
14
15
|
module Queue
|
15
|
-
BATCH_SIZE = 100
|
16
16
|
MAX_SIZE = 10000
|
17
17
|
end
|
18
|
+
|
19
|
+
module Message
|
20
|
+
MAX_BYTES = 32768 # 32Kb
|
21
|
+
end
|
22
|
+
|
23
|
+
module MessageBatch
|
24
|
+
MAX_BYTES = 512_000 # 500Kb
|
25
|
+
MAX_SIZE = 100
|
26
|
+
end
|
27
|
+
|
28
|
+
module BackoffPolicy
|
29
|
+
MIN_TIMEOUT_MS = 100
|
30
|
+
MAX_TIMEOUT_MS = 10000
|
31
|
+
MULTIPLIER = 1.5
|
32
|
+
RANDOMIZATION_FACTOR = 0.5
|
33
|
+
end
|
18
34
|
end
|
19
35
|
end
|
20
36
|
end
|