analytics-ruby 2.0.13 → 2.4.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- checksums.yaml +5 -5
- data/bin/analytics +108 -0
- data/lib/analytics-ruby.rb +1 -0
- data/lib/segment/analytics/backoff_policy.rb +49 -0
- data/lib/segment/analytics/client.rb +111 -269
- data/lib/segment/analytics/defaults.rb +20 -4
- data/lib/segment/analytics/field_parser.rb +192 -0
- data/lib/segment/analytics/logging.rb +36 -11
- data/lib/segment/analytics/message_batch.rb +72 -0
- data/lib/segment/analytics/response.rb +0 -1
- data/lib/segment/analytics/test_queue.rb +56 -0
- data/lib/segment/analytics/transport.rb +138 -0
- data/lib/segment/analytics/utils.rb +18 -19
- data/lib/segment/analytics/version.rb +1 -1
- data/lib/segment/analytics/worker.rb +20 -11
- data/lib/segment/analytics.rb +15 -6
- metadata +65 -34
- data/Gemfile +0 -2
- data/Gemfile.lock +0 -53
- data/History.md +0 -160
- data/Makefile +0 -8
- data/README.md +0 -91
- data/Rakefile +0 -7
- data/analytics-ruby.gemspec +0 -23
- data/lib/segment/analytics/request.rb +0 -82
- data/spec/segment/analytics/client_spec.rb +0 -291
- data/spec/segment/analytics/worker_spec.rb +0 -96
- data/spec/segment/analytics_spec.rb +0 -95
- data/spec/spec_helper.rb +0 -81
checksums.yaml
CHANGED
@@ -1,7 +1,7 @@
|
|
1
1
|
---
|
2
|
-
|
3
|
-
metadata.gz:
|
4
|
-
data.tar.gz:
|
2
|
+
SHA256:
|
3
|
+
metadata.gz: 51f3f02afed05c9c8461b454e0a3067962a75b8efde0930db0098eebf4a62829
|
4
|
+
data.tar.gz: e7d486d6c6186535e9a005c4d950e6e29d2dd9937f9b6da625191d786f76846a
|
5
5
|
SHA512:
|
6
|
-
metadata.gz:
|
7
|
-
data.tar.gz:
|
6
|
+
metadata.gz: c2ce09d450de65620eb99f9fae86aa5ce57709d53aa4a93c73c2d3db0b173afad532383a50fe4dcb953df72171b731a263ff7308aed79b646a25bc202c555e4d
|
7
|
+
data.tar.gz: b3625484ebca6117ae79fc42f03a595dff861b8c5ca02a6ef12ea9e933229c1479dad40b4db1d85e61ddf585c7c5a072d68e9cd21ced6b2482db338f45073707
|
data/bin/analytics
ADDED
@@ -0,0 +1,108 @@
|
|
1
|
+
#!/usr/bin/env ruby
|
2
|
+
|
3
|
+
require 'segment/analytics'
|
4
|
+
require 'rubygems'
|
5
|
+
require 'commander/import'
|
6
|
+
require 'time'
|
7
|
+
require 'json'
|
8
|
+
|
9
|
+
program :name, 'simulator.rb'
|
10
|
+
program :version, '0.0.1'
|
11
|
+
program :description, 'scripting simulator'
|
12
|
+
|
13
|
+
def json_hash(str)
|
14
|
+
if str
|
15
|
+
return JSON.parse(str)
|
16
|
+
end
|
17
|
+
end
|
18
|
+
|
19
|
+
# analytics -method=<method> -segment-write-key=<segmentWriteKey> [options]
|
20
|
+
|
21
|
+
default_command :send
|
22
|
+
|
23
|
+
command :send do |c|
|
24
|
+
c.description = 'send a segment message'
|
25
|
+
|
26
|
+
c.option '--writeKey=<writeKey>', String, 'the Segment writeKey'
|
27
|
+
c.option '--type=<type>', String, 'The Segment message type'
|
28
|
+
|
29
|
+
c.option '--userId=<userId>', String, 'the user id to send the event as'
|
30
|
+
c.option '--anonymousId=<anonymousId>', String, 'the anonymous user id to send the event as'
|
31
|
+
c.option '--context=<context>', 'additional context for the event (JSON-encoded)'
|
32
|
+
c.option '--integrations=<integrations>', 'additional integrations for the event (JSON-encoded)'
|
33
|
+
|
34
|
+
c.option '--event=<event>', String, 'the event name to send with the event'
|
35
|
+
c.option '--properties=<properties>', 'the event properties to send (JSON-encoded)'
|
36
|
+
|
37
|
+
c.option '--name=<name>', 'name of the screen or page to send with the message'
|
38
|
+
|
39
|
+
c.option '--traits=<traits>', 'the identify/group traits to send (JSON-encoded)'
|
40
|
+
|
41
|
+
c.option '--groupId=<groupId>', String, 'the group id'
|
42
|
+
c.option '--previousId=<previousId>', String, 'the previous id'
|
43
|
+
|
44
|
+
c.action do |args, options|
|
45
|
+
Analytics = Segment::Analytics.new({
|
46
|
+
write_key: options.writeKey,
|
47
|
+
on_error: Proc.new { |status, msg| print msg }
|
48
|
+
})
|
49
|
+
|
50
|
+
case options.type
|
51
|
+
when "track"
|
52
|
+
Analytics.track({
|
53
|
+
user_id: options.userId,
|
54
|
+
event: options.event,
|
55
|
+
anonymous_id: options.anonymousId,
|
56
|
+
properties: json_hash(options.properties),
|
57
|
+
context: json_hash(options.context),
|
58
|
+
integrations: json_hash(options.integrations)
|
59
|
+
})
|
60
|
+
when "page"
|
61
|
+
Analytics.page({
|
62
|
+
user_id: options.userId,
|
63
|
+
anonymous_id: options.anonymousId,
|
64
|
+
name: options.name,
|
65
|
+
properties: json_hash(options.properties),
|
66
|
+
context: json_hash(options.context),
|
67
|
+
integrations: json_hash(options.integrations)
|
68
|
+
})
|
69
|
+
when "screen"
|
70
|
+
Analytics.screen({
|
71
|
+
user_id: options.userId,
|
72
|
+
anonymous_id: options.anonymousId,
|
73
|
+
name: options.name,
|
74
|
+
properties: json_hash(options.properties),
|
75
|
+
context: json_hash(options.context),
|
76
|
+
integrations: json_hash(options.integrations)
|
77
|
+
})
|
78
|
+
when "identify"
|
79
|
+
Analytics.identify({
|
80
|
+
user_id: options.userId,
|
81
|
+
anonymous_id: options.anonymousId,
|
82
|
+
traits: json_hash(options.traits),
|
83
|
+
context: json_hash(options.context),
|
84
|
+
integrations: json_hash(options.integrations)
|
85
|
+
})
|
86
|
+
when "group"
|
87
|
+
Analytics.group({
|
88
|
+
user_id: options.userId,
|
89
|
+
anonymous_id: options.anonymousId,
|
90
|
+
group_id: options.groupId,
|
91
|
+
traits: json_hash(options.traits),
|
92
|
+
context: json_hash(options.context),
|
93
|
+
integrations: json_hash(options.integrations)
|
94
|
+
})
|
95
|
+
when "alias"
|
96
|
+
Analytics.alias({
|
97
|
+
previous_id: options.previousId,
|
98
|
+
user_id: options.userId,
|
99
|
+
anonymous_id: options.anonymousId,
|
100
|
+
context: json_hash(options.context),
|
101
|
+
integrations: json_hash(options.integrations)
|
102
|
+
})
|
103
|
+
else
|
104
|
+
raise "Invalid Message Type #{options.type}"
|
105
|
+
end
|
106
|
+
Analytics.flush
|
107
|
+
end
|
108
|
+
end
|
@@ -0,0 +1 @@
|
|
1
|
+
require 'segment'
|
@@ -0,0 +1,49 @@
|
|
1
|
+
require 'segment/analytics/defaults'
|
2
|
+
|
3
|
+
module Segment
|
4
|
+
class Analytics
|
5
|
+
class BackoffPolicy
|
6
|
+
include Segment::Analytics::Defaults::BackoffPolicy
|
7
|
+
|
8
|
+
# @param [Hash] opts
|
9
|
+
# @option opts [Numeric] :min_timeout_ms The minimum backoff timeout
|
10
|
+
# @option opts [Numeric] :max_timeout_ms The maximum backoff timeout
|
11
|
+
# @option opts [Numeric] :multiplier The value to multiply the current
|
12
|
+
# interval with for each retry attempt
|
13
|
+
# @option opts [Numeric] :randomization_factor The randomization factor
|
14
|
+
# to use to create a range around the retry interval
|
15
|
+
def initialize(opts = {})
|
16
|
+
@min_timeout_ms = opts[:min_timeout_ms] || MIN_TIMEOUT_MS
|
17
|
+
@max_timeout_ms = opts[:max_timeout_ms] || MAX_TIMEOUT_MS
|
18
|
+
@multiplier = opts[:multiplier] || MULTIPLIER
|
19
|
+
@randomization_factor = opts[:randomization_factor] || RANDOMIZATION_FACTOR
|
20
|
+
|
21
|
+
@attempts = 0
|
22
|
+
end
|
23
|
+
|
24
|
+
# @return [Numeric] the next backoff interval, in milliseconds.
|
25
|
+
def next_interval
|
26
|
+
interval = @min_timeout_ms * (@multiplier**@attempts)
|
27
|
+
interval = add_jitter(interval, @randomization_factor)
|
28
|
+
|
29
|
+
@attempts += 1
|
30
|
+
|
31
|
+
[interval, @max_timeout_ms].min
|
32
|
+
end
|
33
|
+
|
34
|
+
private
|
35
|
+
|
36
|
+
def add_jitter(base, randomization_factor)
|
37
|
+
random_number = rand
|
38
|
+
max_deviation = base * randomization_factor
|
39
|
+
deviation = random_number * max_deviation
|
40
|
+
|
41
|
+
if random_number < 0.5
|
42
|
+
base - deviation
|
43
|
+
else
|
44
|
+
base + deviation
|
45
|
+
end
|
46
|
+
end
|
47
|
+
end
|
48
|
+
end
|
49
|
+
end
|
@@ -1,39 +1,42 @@
|
|
1
1
|
require 'thread'
|
2
2
|
require 'time'
|
3
|
+
|
4
|
+
require 'segment/analytics/defaults'
|
5
|
+
require 'segment/analytics/logging'
|
3
6
|
require 'segment/analytics/utils'
|
4
7
|
require 'segment/analytics/worker'
|
5
|
-
require 'segment/analytics/defaults'
|
6
8
|
|
7
9
|
module Segment
|
8
10
|
class Analytics
|
9
11
|
class Client
|
10
12
|
include Segment::Analytics::Utils
|
13
|
+
include Segment::Analytics::Logging
|
11
14
|
|
12
|
-
#
|
13
|
-
#
|
14
|
-
#
|
15
|
-
#
|
16
|
-
#
|
17
|
-
|
18
|
-
|
19
|
-
symbolize_keys! attrs
|
15
|
+
# @param [Hash] opts
|
16
|
+
# @option opts [String] :write_key Your project's write_key
|
17
|
+
# @option opts [FixNum] :max_queue_size Maximum number of calls to be
|
18
|
+
# remain queued.
|
19
|
+
# @option opts [Proc] :on_error Handles error calls from the API.
|
20
|
+
def initialize(opts = {})
|
21
|
+
symbolize_keys!(opts)
|
20
22
|
|
21
23
|
@queue = Queue.new
|
22
|
-
@
|
23
|
-
@
|
24
|
-
@
|
24
|
+
@test = opts[:test]
|
25
|
+
@write_key = opts[:write_key]
|
26
|
+
@max_queue_size = opts[:max_queue_size] || Defaults::Queue::MAX_SIZE
|
25
27
|
@worker_mutex = Mutex.new
|
26
|
-
@worker = Worker.new
|
28
|
+
@worker = Worker.new(@queue, @write_key, opts)
|
29
|
+
@worker_thread = nil
|
27
30
|
|
28
31
|
check_write_key!
|
29
32
|
|
30
33
|
at_exit { @worker_thread && @worker_thread[:should_exit] = true }
|
31
34
|
end
|
32
35
|
|
33
|
-
#
|
34
|
-
# Use only for scripts which are not long-running, and will
|
35
|
-
# specifically exit
|
36
|
+
# Synchronously waits until the worker has flushed the queue.
|
36
37
|
#
|
38
|
+
# Use only for scripts which are not long-running, and will specifically
|
39
|
+
# exit
|
37
40
|
def flush
|
38
41
|
while !@queue.empty? || @worker.is_requesting?
|
39
42
|
ensure_worker_running
|
@@ -41,246 +44,114 @@ module Segment
|
|
41
44
|
end
|
42
45
|
end
|
43
46
|
|
44
|
-
#
|
47
|
+
# @!macro common_attrs
|
48
|
+
# @option attrs [String] :anonymous_id ID for a user when you don't know
|
49
|
+
# who they are yet. (optional but you must provide either an
|
50
|
+
# `anonymous_id` or `user_id`)
|
51
|
+
# @option attrs [Hash] :context ({})
|
52
|
+
# @option attrs [Hash] :integrations What integrations this event
|
53
|
+
# goes to (optional)
|
54
|
+
# @option attrs [String] :message_id ID that uniquely
|
55
|
+
# identifies a message across the API. (optional)
|
56
|
+
# @option attrs [Time] :timestamp When the event occurred (optional)
|
57
|
+
# @option attrs [String] :user_id The ID for this user in your database
|
58
|
+
# (optional but you must provide either an `anonymous_id` or `user_id`)
|
59
|
+
# @option attrs [Hash] :options Options such as user traits (optional)
|
60
|
+
|
61
|
+
# Tracks an event
|
45
62
|
#
|
46
|
-
#
|
47
|
-
#
|
48
|
-
#
|
49
|
-
#
|
50
|
-
#
|
51
|
-
#
|
52
|
-
#
|
53
|
-
|
54
|
-
# :user_id - String of the user id.
|
55
|
-
def track attrs
|
63
|
+
# @see https://segment.com/docs/sources/server/ruby/#track
|
64
|
+
#
|
65
|
+
# @param [Hash] attrs
|
66
|
+
#
|
67
|
+
# @option attrs [String] :event Event name
|
68
|
+
# @option attrs [Hash] :properties Event properties (optional)
|
69
|
+
# @macro common_attrs
|
70
|
+
def track(attrs)
|
56
71
|
symbolize_keys! attrs
|
57
|
-
|
58
|
-
|
59
|
-
event = attrs[:event]
|
60
|
-
properties = attrs[:properties] || {}
|
61
|
-
timestamp = attrs[:timestamp] || Time.new
|
62
|
-
context = attrs[:context] || {}
|
63
|
-
|
64
|
-
check_timestamp! timestamp
|
65
|
-
|
66
|
-
if event.nil? || event.empty?
|
67
|
-
fail ArgumentError, 'Must supply event as a non-empty string'
|
68
|
-
end
|
69
|
-
|
70
|
-
fail ArgumentError, 'Properties must be a Hash' unless properties.is_a? Hash
|
71
|
-
isoify_dates! properties
|
72
|
-
|
73
|
-
add_context context
|
74
|
-
|
75
|
-
enqueue({
|
76
|
-
:event => event,
|
77
|
-
:userId => attrs[:user_id],
|
78
|
-
:anonymousId => attrs[:anonymous_id],
|
79
|
-
:context => context,
|
80
|
-
:options => attrs[:options],
|
81
|
-
:integrations => attrs[:integrations],
|
82
|
-
:properties => properties,
|
83
|
-
:timestamp => datetime_in_iso8601(timestamp),
|
84
|
-
:type => 'track'
|
85
|
-
})
|
72
|
+
enqueue(FieldParser.parse_for_track(attrs))
|
86
73
|
end
|
87
74
|
|
88
|
-
#
|
75
|
+
# Identifies a user
|
76
|
+
#
|
77
|
+
# @see https://segment.com/docs/sources/server/ruby/#identify
|
89
78
|
#
|
90
|
-
#
|
91
|
-
#
|
92
|
-
#
|
93
|
-
#
|
94
|
-
|
95
|
-
# :timestamp - Time of when the event occurred. (optional)
|
96
|
-
# :traits - Hash of user traits. (optional)
|
97
|
-
# :user_id - String of the user id
|
98
|
-
def identify attrs
|
79
|
+
# @param [Hash] attrs
|
80
|
+
#
|
81
|
+
# @option attrs [Hash] :traits User traits (optional)
|
82
|
+
# @macro common_attrs
|
83
|
+
def identify(attrs)
|
99
84
|
symbolize_keys! attrs
|
100
|
-
|
101
|
-
|
102
|
-
traits = attrs[:traits] || {}
|
103
|
-
timestamp = attrs[:timestamp] || Time.new
|
104
|
-
context = attrs[:context] || {}
|
105
|
-
|
106
|
-
check_timestamp! timestamp
|
107
|
-
|
108
|
-
fail ArgumentError, 'Must supply traits as a hash' unless traits.is_a? Hash
|
109
|
-
isoify_dates! traits
|
110
|
-
|
111
|
-
add_context context
|
112
|
-
|
113
|
-
enqueue({
|
114
|
-
:userId => attrs[:user_id],
|
115
|
-
:anonymousId => attrs[:anonymous_id],
|
116
|
-
:integrations => attrs[:integrations],
|
117
|
-
:context => context,
|
118
|
-
:traits => traits,
|
119
|
-
:options => attrs[:options],
|
120
|
-
:timestamp => datetime_in_iso8601(timestamp),
|
121
|
-
:type => 'identify'
|
122
|
-
})
|
85
|
+
enqueue(FieldParser.parse_for_identify(attrs))
|
123
86
|
end
|
124
87
|
|
125
|
-
#
|
88
|
+
# Aliases a user from one id to another
|
89
|
+
#
|
90
|
+
# @see https://segment.com/docs/sources/server/ruby/#alias
|
126
91
|
#
|
127
|
-
#
|
128
|
-
#
|
129
|
-
#
|
130
|
-
#
|
131
|
-
# :previous_id - String of the id to alias from
|
132
|
-
# :timestamp - Time of when the alias occured (optional)
|
133
|
-
# :user_id - String of the id to alias to
|
92
|
+
# @param [Hash] attrs
|
93
|
+
#
|
94
|
+
# @option attrs [String] :previous_id The ID to alias from
|
95
|
+
# @macro common_attrs
|
134
96
|
def alias(attrs)
|
135
97
|
symbolize_keys! attrs
|
136
|
-
|
137
|
-
from = attrs[:previous_id]
|
138
|
-
to = attrs[:user_id]
|
139
|
-
timestamp = attrs[:timestamp] || Time.new
|
140
|
-
context = attrs[:context] || {}
|
141
|
-
|
142
|
-
check_presence! from, 'previous_id'
|
143
|
-
check_presence! to, 'user_id'
|
144
|
-
check_timestamp! timestamp
|
145
|
-
add_context context
|
146
|
-
|
147
|
-
enqueue({
|
148
|
-
:previousId => from,
|
149
|
-
:userId => to,
|
150
|
-
:integrations => attrs[:integrations],
|
151
|
-
:context => context,
|
152
|
-
:options => attrs[:options],
|
153
|
-
:timestamp => datetime_in_iso8601(timestamp),
|
154
|
-
:type => 'alias'
|
155
|
-
})
|
98
|
+
enqueue(FieldParser.parse_for_alias(attrs))
|
156
99
|
end
|
157
100
|
|
158
|
-
#
|
101
|
+
# Associates a user identity with a group.
|
159
102
|
#
|
160
|
-
#
|
161
|
-
#
|
162
|
-
#
|
163
|
-
#
|
164
|
-
#
|
165
|
-
#
|
166
|
-
#
|
103
|
+
# @see https://segment.com/docs/sources/server/ruby/#group
|
104
|
+
#
|
105
|
+
# @param [Hash] attrs
|
106
|
+
#
|
107
|
+
# @option attrs [String] :group_id The ID of the group
|
108
|
+
# @option attrs [Hash] :traits User traits (optional)
|
109
|
+
# @macro common_attrs
|
167
110
|
def group(attrs)
|
168
111
|
symbolize_keys! attrs
|
169
|
-
|
170
|
-
|
171
|
-
group_id = attrs[:group_id]
|
172
|
-
user_id = attrs[:user_id]
|
173
|
-
traits = attrs[:traits] || {}
|
174
|
-
timestamp = attrs[:timestamp] || Time.new
|
175
|
-
context = attrs[:context] || {}
|
176
|
-
|
177
|
-
fail ArgumentError, '.traits must be a hash' unless traits.is_a? Hash
|
178
|
-
isoify_dates! traits
|
179
|
-
|
180
|
-
check_presence! group_id, 'group_id'
|
181
|
-
check_timestamp! timestamp
|
182
|
-
add_context context
|
183
|
-
|
184
|
-
enqueue({
|
185
|
-
:groupId => group_id,
|
186
|
-
:userId => user_id,
|
187
|
-
:traits => traits,
|
188
|
-
:integrations => attrs[:integrations],
|
189
|
-
:options => attrs[:options],
|
190
|
-
:context => context,
|
191
|
-
:timestamp => datetime_in_iso8601(timestamp),
|
192
|
-
:type => 'group'
|
193
|
-
})
|
112
|
+
enqueue(FieldParser.parse_for_group(attrs))
|
194
113
|
end
|
195
114
|
|
196
|
-
#
|
115
|
+
# Records a page view
|
116
|
+
#
|
117
|
+
# @see https://segment.com/docs/sources/server/ruby/#page
|
118
|
+
#
|
119
|
+
# @param [Hash] attrs
|
197
120
|
#
|
198
|
-
# attrs
|
199
|
-
#
|
200
|
-
#
|
201
|
-
# :context - Hash of context (optional)
|
202
|
-
# :integrations - Hash specifying what integrations this event goes to. (optional)
|
203
|
-
# :name - String name of the page
|
204
|
-
# :options - Hash specifying options such as user traits. (optional)
|
205
|
-
# :properties - Hash of page properties (optional)
|
206
|
-
# :timestamp - Time of when the pageview occured (optional)
|
207
|
-
# :user_id - String of the id to alias from
|
121
|
+
# @option attrs [String] :name Name of the page
|
122
|
+
# @option attrs [Hash] :properties Page properties (optional)
|
123
|
+
# @macro common_attrs
|
208
124
|
def page(attrs)
|
209
125
|
symbolize_keys! attrs
|
210
|
-
|
211
|
-
|
212
|
-
name = attrs[:name].to_s
|
213
|
-
properties = attrs[:properties] || {}
|
214
|
-
timestamp = attrs[:timestamp] || Time.new
|
215
|
-
context = attrs[:context] || {}
|
216
|
-
|
217
|
-
fail ArgumentError, '.properties must be a hash' unless properties.is_a? Hash
|
218
|
-
isoify_dates! properties
|
219
|
-
|
220
|
-
check_timestamp! timestamp
|
221
|
-
add_context context
|
222
|
-
|
223
|
-
enqueue({
|
224
|
-
:userId => attrs[:user_id],
|
225
|
-
:anonymousId => attrs[:anonymous_id],
|
226
|
-
:name => name,
|
227
|
-
:category => attrs[:category],
|
228
|
-
:properties => properties,
|
229
|
-
:integrations => attrs[:integrations],
|
230
|
-
:options => attrs[:options],
|
231
|
-
:context => context,
|
232
|
-
:timestamp => datetime_in_iso8601(timestamp),
|
233
|
-
:type => 'page'
|
234
|
-
})
|
126
|
+
enqueue(FieldParser.parse_for_page(attrs))
|
235
127
|
end
|
236
|
-
|
128
|
+
|
129
|
+
# Records a screen view (for a mobile app)
|
237
130
|
#
|
238
|
-
#
|
239
|
-
#
|
240
|
-
#
|
241
|
-
#
|
242
|
-
#
|
243
|
-
#
|
244
|
-
# :options - Hash specifying options such as user traits. (optional)
|
245
|
-
# :properties - Hash of screen properties (optional)
|
246
|
-
# :timestamp - Time of when the screen occured (optional)
|
247
|
-
# :user_id - String of the id to alias from
|
131
|
+
# @param [Hash] attrs
|
132
|
+
#
|
133
|
+
# @option attrs [String] :name Name of the screen
|
134
|
+
# @option attrs [Hash] :properties Screen properties (optional)
|
135
|
+
# @option attrs [String] :category The screen category (optional)
|
136
|
+
# @macro common_attrs
|
248
137
|
def screen(attrs)
|
249
138
|
symbolize_keys! attrs
|
250
|
-
|
251
|
-
|
252
|
-
name = attrs[:name].to_s
|
253
|
-
properties = attrs[:properties] || {}
|
254
|
-
timestamp = attrs[:timestamp] || Time.new
|
255
|
-
context = attrs[:context] || {}
|
256
|
-
|
257
|
-
fail ArgumentError, '.properties must be a hash' unless properties.is_a? Hash
|
258
|
-
isoify_dates! properties
|
259
|
-
|
260
|
-
check_timestamp! timestamp
|
261
|
-
add_context context
|
262
|
-
|
263
|
-
enqueue({
|
264
|
-
:userId => attrs[:user_id],
|
265
|
-
:anonymousId => attrs[:anonymous_id],
|
266
|
-
:name => name,
|
267
|
-
:properties => properties,
|
268
|
-
:category => attrs[:category],
|
269
|
-
:options => attrs[:options],
|
270
|
-
:integrations => attrs[:integrations],
|
271
|
-
:context => context,
|
272
|
-
:timestamp => timestamp.iso8601,
|
273
|
-
:type => 'screen'
|
274
|
-
})
|
139
|
+
enqueue(FieldParser.parse_for_screen(attrs))
|
275
140
|
end
|
276
141
|
|
277
|
-
#
|
278
|
-
#
|
279
|
-
# returns Fixnum of messages in the queue
|
142
|
+
# @return [Fixnum] number of messages in the queue
|
280
143
|
def queued_messages
|
281
144
|
@queue.length
|
282
145
|
end
|
283
146
|
|
147
|
+
def test_queue
|
148
|
+
unless @test
|
149
|
+
raise 'Test queue only available when setting :test to true.'
|
150
|
+
end
|
151
|
+
|
152
|
+
@test_queue ||= TestQueue.new
|
153
|
+
end
|
154
|
+
|
284
155
|
private
|
285
156
|
|
286
157
|
# private: Enqueues the action.
|
@@ -288,57 +159,28 @@ module Segment
|
|
288
159
|
# returns Boolean of whether the item was added to the queue.
|
289
160
|
def enqueue(action)
|
290
161
|
# add our request id for tracing purposes
|
291
|
-
action[:messageId]
|
292
|
-
|
293
|
-
|
162
|
+
action[:messageId] ||= uid
|
163
|
+
|
164
|
+
test_queue << action if @test
|
165
|
+
|
166
|
+
if @queue.length < @max_queue_size
|
294
167
|
@queue << action
|
295
|
-
|
296
|
-
!queue_full
|
297
|
-
end
|
168
|
+
ensure_worker_running
|
298
169
|
|
299
|
-
|
300
|
-
|
301
|
-
|
302
|
-
|
303
|
-
|
304
|
-
|
305
|
-
|
306
|
-
|
170
|
+
true
|
171
|
+
else
|
172
|
+
logger.warn(
|
173
|
+
'Queue is full, dropping events. The :max_queue_size ' \
|
174
|
+
'configuration parameter can be increased to prevent this from ' \
|
175
|
+
'happening.'
|
176
|
+
)
|
177
|
+
false
|
307
178
|
end
|
308
179
|
end
|
309
180
|
|
310
|
-
# private: Adds contextual information to the call
|
311
|
-
#
|
312
|
-
# context - Hash of call context
|
313
|
-
def add_context(context)
|
314
|
-
context[:library] = { :name => "analytics-ruby", :version => Segment::Analytics::VERSION.to_s }
|
315
|
-
end
|
316
|
-
|
317
181
|
# private: Checks that the write_key is properly initialized
|
318
182
|
def check_write_key!
|
319
|
-
|
320
|
-
end
|
321
|
-
|
322
|
-
# private: Checks the timstamp option to make sure it is a Time.
|
323
|
-
def check_timestamp!(timestamp)
|
324
|
-
fail ArgumentError, 'Timestamp must be a Time' unless timestamp.is_a? Time
|
325
|
-
end
|
326
|
-
|
327
|
-
def event attrs
|
328
|
-
symbolize_keys! attrs
|
329
|
-
|
330
|
-
{
|
331
|
-
:userId => user_id,
|
332
|
-
:name => name,
|
333
|
-
:properties => properties,
|
334
|
-
:context => context,
|
335
|
-
:timestamp => datetime_in_iso8601(timestamp),
|
336
|
-
:type => 'screen'
|
337
|
-
}
|
338
|
-
end
|
339
|
-
|
340
|
-
def check_user_id! attrs
|
341
|
-
fail ArgumentError, 'Must supply either user_id or anonymous_id' unless attrs[:user_id] || attrs[:anonymous_id]
|
183
|
+
raise ArgumentError, 'Write key must be initialized' if @write_key.nil?
|
342
184
|
end
|
343
185
|
|
344
186
|
def ensure_worker_running
|
@@ -6,15 +6,31 @@ module Segment
|
|
6
6
|
PORT = 443
|
7
7
|
PATH = '/v1/import'
|
8
8
|
SSL = true
|
9
|
-
HEADERS = {
|
10
|
-
|
11
|
-
|
9
|
+
HEADERS = { 'Accept' => 'application/json',
|
10
|
+
'Content-Type' => 'application/json',
|
11
|
+
'User-Agent' => "analytics-ruby/#{Analytics::VERSION}" }
|
12
|
+
RETRIES = 10
|
12
13
|
end
|
13
14
|
|
14
15
|
module Queue
|
15
|
-
BATCH_SIZE = 100
|
16
16
|
MAX_SIZE = 10000
|
17
17
|
end
|
18
|
+
|
19
|
+
module Message
|
20
|
+
MAX_BYTES = 32768 # 32Kb
|
21
|
+
end
|
22
|
+
|
23
|
+
module MessageBatch
|
24
|
+
MAX_BYTES = 512_000 # 500Kb
|
25
|
+
MAX_SIZE = 100
|
26
|
+
end
|
27
|
+
|
28
|
+
module BackoffPolicy
|
29
|
+
MIN_TIMEOUT_MS = 100
|
30
|
+
MAX_TIMEOUT_MS = 10000
|
31
|
+
MULTIPLIER = 1.5
|
32
|
+
RANDOMIZATION_FACTOR = 0.5
|
33
|
+
end
|
18
34
|
end
|
19
35
|
end
|
20
36
|
end
|