posthog-ruby 1.0.0.pre
Sign up to get free protection for your applications and to get access to all the features.
- checksums.yaml +7 -0
- data/bin/posthog +91 -0
- data/lib/posthog/backoff_policy.rb +47 -0
- data/lib/posthog/client.rb +134 -0
- data/lib/posthog/defaults.rb +34 -0
- data/lib/posthog/field_parser.rb +112 -0
- data/lib/posthog/logging.rb +59 -0
- data/lib/posthog/message_batch.rb +70 -0
- data/lib/posthog/response.rb +14 -0
- data/lib/posthog/transport.rb +143 -0
- data/lib/posthog/utils.rb +90 -0
- data/lib/posthog/version.rb +3 -0
- data/lib/posthog/worker.rb +68 -0
- data/lib/posthog-ruby.rb +1 -0
- data/lib/posthog.rb +9 -0
- metadata +170 -0
checksums.yaml
ADDED
@@ -0,0 +1,7 @@
|
|
1
|
+
---
|
2
|
+
SHA1:
|
3
|
+
metadata.gz: 88d2c921b1cdccea3548414fd9d7f95fb0a4d99f
|
4
|
+
data.tar.gz: 8167ab967256d9f487cc29ba9b937f7c96839793
|
5
|
+
SHA512:
|
6
|
+
metadata.gz: 8f4acbdd445e76ca520a1b821645ead4c1be6ddc3bc0f3e385b75cadbf80162911daa9b02887a6f08020c07e84edc095d94ce0ae1c670b3de8dc11c2c83f76e7
|
7
|
+
data.tar.gz: 07fe6c72ca3be46ed0b85cb32b7b740ddbed80dc65407ddf3e2e49606e03a90290db3800f491b79bc776fbd0cdf64f5ef21b1b49149774f99d28d7f03f14a3de
|
data/bin/posthog
ADDED
@@ -0,0 +1,91 @@
|
|
1
|
+
#!/usr/bin/env ruby
|
2
|
+
|
3
|
+
require 'posthog'
|
4
|
+
require 'rubygems'
|
5
|
+
require 'commander/import'
|
6
|
+
require 'time'
|
7
|
+
require 'json'
|
8
|
+
|
9
|
+
program :name, 'posthog'
|
10
|
+
program :version, '1.0.0'
|
11
|
+
program :description, 'PostHog API'
|
12
|
+
|
13
|
+
def json_hash(str)
|
14
|
+
if str
|
15
|
+
return JSON.parse(str)
|
16
|
+
end
|
17
|
+
end
|
18
|
+
|
19
|
+
command :capture do |c|
|
20
|
+
c.description = 'capture an event'
|
21
|
+
|
22
|
+
c.option '--api-key=<string>', String, 'The PostHog API Key'
|
23
|
+
c.option '--api-host=<url>', String, 'The PostHog API URL host part (scheme+domain)'
|
24
|
+
c.option '--distinct-id=<distinct_id>', String, 'The distinct id to send the event as'
|
25
|
+
c.option '--event=<event>', String, 'The event name to send with the event'
|
26
|
+
c.option '--properties=<properties>', 'The properties to send (JSON-encoded)'
|
27
|
+
|
28
|
+
c.action do |args, options|
|
29
|
+
posthog = PostHog::Client.new({
|
30
|
+
api_key: options.api_key,
|
31
|
+
api_host: options.api_host,
|
32
|
+
on_error: Proc.new { |status, msg| print msg }
|
33
|
+
})
|
34
|
+
|
35
|
+
posthog.capture({
|
36
|
+
distinct_id: options.distinct_id,
|
37
|
+
event: options.event,
|
38
|
+
properties: json_hash(options.properties)
|
39
|
+
})
|
40
|
+
|
41
|
+
posthog.flush
|
42
|
+
end
|
43
|
+
end
|
44
|
+
|
45
|
+
command :identify do |c|
|
46
|
+
c.description = 'identify the user'
|
47
|
+
|
48
|
+
c.option '--api-key=<api_key>', String, 'The PostHog API Key'
|
49
|
+
c.option '--api-host=<url>', String, 'The PostHog API URL host part (scheme+domain)'
|
50
|
+
c.option '--distinct-id=<distinct_id>', String, 'The distinct id to send the event as'
|
51
|
+
c.option '--properties=<properties>', 'The properties to send (JSON-encoded)'
|
52
|
+
|
53
|
+
c.action do |args, options|
|
54
|
+
posthog = PostHog::Client.new({
|
55
|
+
api_key: options.api_key,
|
56
|
+
api_host: options.api_host,
|
57
|
+
on_error: Proc.new { |status, msg| print msg }
|
58
|
+
})
|
59
|
+
|
60
|
+
posthog.identify({
|
61
|
+
distinct_id: options.distinct_id,
|
62
|
+
properties: json_hash(options.properties)
|
63
|
+
})
|
64
|
+
|
65
|
+
posthog.flush
|
66
|
+
end
|
67
|
+
end
|
68
|
+
|
69
|
+
command :alias do |c|
|
70
|
+
c.description = 'set an alias for a distinct id'
|
71
|
+
|
72
|
+
c.option '--api-key=<api_key>', String, 'The PostHog API Key'
|
73
|
+
c.option '--api-host=<url>', String, 'The PostHog API URL host part (scheme+domain)'
|
74
|
+
c.option '--distinct-id=<distinct_id>', String, 'The distinct id'
|
75
|
+
c.option '--alias=<alias>', 'The alias to give to the distinct id'
|
76
|
+
|
77
|
+
c.action do |args, options|
|
78
|
+
posthog = PostHog::Client.new({
|
79
|
+
api_key: options.api_key,
|
80
|
+
api_host: options.api_host,
|
81
|
+
on_error: Proc.new { |status, msg| print msg }
|
82
|
+
})
|
83
|
+
|
84
|
+
posthog.alias({
|
85
|
+
distinct_id: options.distinct_id,
|
86
|
+
alias: options.alias,
|
87
|
+
})
|
88
|
+
|
89
|
+
posthog.flush
|
90
|
+
end
|
91
|
+
end
|
@@ -0,0 +1,47 @@
|
|
1
|
+
require 'posthog/defaults'
|
2
|
+
|
3
|
+
class PostHog
|
4
|
+
class BackoffPolicy
|
5
|
+
include PostHog::Defaults::BackoffPolicy
|
6
|
+
|
7
|
+
# @param [Hash] opts
|
8
|
+
# @option opts [Numeric] :min_timeout_ms The minimum backoff timeout
|
9
|
+
# @option opts [Numeric] :max_timeout_ms The maximum backoff timeout
|
10
|
+
# @option opts [Numeric] :multiplier The value to multiply the current
|
11
|
+
# interval with for each retry attempt
|
12
|
+
# @option opts [Numeric] :randomization_factor The randomization factor
|
13
|
+
# to use to create a range around the retry interval
|
14
|
+
def initialize(opts = {})
|
15
|
+
@min_timeout_ms = opts[:min_timeout_ms] || MIN_TIMEOUT_MS
|
16
|
+
@max_timeout_ms = opts[:max_timeout_ms] || MAX_TIMEOUT_MS
|
17
|
+
@multiplier = opts[:multiplier] || MULTIPLIER
|
18
|
+
@randomization_factor = opts[:randomization_factor] || RANDOMIZATION_FACTOR
|
19
|
+
|
20
|
+
@attempts = 0
|
21
|
+
end
|
22
|
+
|
23
|
+
# @return [Numeric] the next backoff interval, in milliseconds.
|
24
|
+
def next_interval
|
25
|
+
interval = @min_timeout_ms * (@multiplier**@attempts)
|
26
|
+
interval = add_jitter(interval, @randomization_factor)
|
27
|
+
|
28
|
+
@attempts += 1
|
29
|
+
|
30
|
+
[interval, @max_timeout_ms].min
|
31
|
+
end
|
32
|
+
|
33
|
+
private
|
34
|
+
|
35
|
+
def add_jitter(base, randomization_factor)
|
36
|
+
random_number = rand
|
37
|
+
max_deviation = base * randomization_factor
|
38
|
+
deviation = random_number * max_deviation
|
39
|
+
|
40
|
+
if random_number < 0.5
|
41
|
+
base - deviation
|
42
|
+
else
|
43
|
+
base + deviation
|
44
|
+
end
|
45
|
+
end
|
46
|
+
end
|
47
|
+
end
|
@@ -0,0 +1,134 @@
|
|
1
|
+
require 'thread'
|
2
|
+
require 'time'
|
3
|
+
|
4
|
+
require 'posthog/defaults'
|
5
|
+
require 'posthog/logging'
|
6
|
+
require 'posthog/utils'
|
7
|
+
require 'posthog/worker'
|
8
|
+
|
9
|
+
class PostHog
|
10
|
+
class Client
|
11
|
+
include PostHog::Utils
|
12
|
+
include PostHog::Logging
|
13
|
+
|
14
|
+
# @param [Hash] opts
|
15
|
+
# @option opts [String] :api_key Your project's api_key
|
16
|
+
# @option opts [FixNum] :max_queue_size Maximum number of calls to be
|
17
|
+
# remain queued.
|
18
|
+
# @option opts [Proc] :on_error Handles error calls from the API.
|
19
|
+
def initialize(opts = {})
|
20
|
+
symbolize_keys!(opts)
|
21
|
+
|
22
|
+
@queue = Queue.new
|
23
|
+
@api_key = opts[:api_key]
|
24
|
+
@max_queue_size = opts[:max_queue_size] || Defaults::Queue::MAX_SIZE
|
25
|
+
@worker_mutex = Mutex.new
|
26
|
+
@worker = Worker.new(@queue, @api_key, opts)
|
27
|
+
@worker_thread = nil
|
28
|
+
|
29
|
+
check_api_key!
|
30
|
+
|
31
|
+
at_exit { @worker_thread && @worker_thread[:should_exit] = true }
|
32
|
+
end
|
33
|
+
|
34
|
+
# Synchronously waits until the worker has flushed the queue.
|
35
|
+
#
|
36
|
+
# Use only for scripts which are not long-running, and will specifically
|
37
|
+
# exit
|
38
|
+
def flush
|
39
|
+
while !@queue.empty? || @worker.is_requesting?
|
40
|
+
ensure_worker_running
|
41
|
+
sleep(0.1)
|
42
|
+
end
|
43
|
+
end
|
44
|
+
|
45
|
+
# @!macro common_attrs
|
46
|
+
# @option attrs [String] :message_id ID that uniquely
|
47
|
+
# identifies a message across the API. (optional)
|
48
|
+
# @option attrs [Time] :timestamp When the event occurred (optional)
|
49
|
+
# @option attrs [String] :distinct_id The ID for this user in your database
|
50
|
+
|
51
|
+
# Captures an event
|
52
|
+
#
|
53
|
+
# @param [Hash] attrs
|
54
|
+
#
|
55
|
+
# @option attrs [String] :event Event name
|
56
|
+
# @option attrs [Hash] :properties Event properties (optional)
|
57
|
+
# @macro common_attrs
|
58
|
+
def capture(attrs)
|
59
|
+
symbolize_keys! attrs
|
60
|
+
enqueue(FieldParser.parse_for_capture(attrs))
|
61
|
+
end
|
62
|
+
|
63
|
+
# Identifies a user
|
64
|
+
#
|
65
|
+
# @param [Hash] attrs
|
66
|
+
#
|
67
|
+
# @option attrs [Hash] :properties User properties (optional)
|
68
|
+
# @macro common_attrs
|
69
|
+
def identify(attrs)
|
70
|
+
symbolize_keys! attrs
|
71
|
+
enqueue(FieldParser.parse_for_identify(attrs))
|
72
|
+
end
|
73
|
+
|
74
|
+
# Aliases a user from one id to another
|
75
|
+
#
|
76
|
+
# @param [Hash] attrs
|
77
|
+
#
|
78
|
+
# @option attrs [String] :alias The alias to give the distinct id
|
79
|
+
# @macro common_attrs
|
80
|
+
def alias(attrs)
|
81
|
+
symbolize_keys! attrs
|
82
|
+
enqueue(FieldParser.parse_for_alias(attrs))
|
83
|
+
end
|
84
|
+
|
85
|
+
# @return [Fixnum] number of messages in the queue
|
86
|
+
def queued_messages
|
87
|
+
@queue.length
|
88
|
+
end
|
89
|
+
|
90
|
+
private
|
91
|
+
|
92
|
+
# private: Enqueues the action.
|
93
|
+
#
|
94
|
+
# returns Boolean of whether the item was added to the queue.
|
95
|
+
def enqueue(action)
|
96
|
+
# add our request id for tracing purposes
|
97
|
+
action[:messageId] ||= uid
|
98
|
+
|
99
|
+
if @queue.length < @max_queue_size
|
100
|
+
@queue << action
|
101
|
+
ensure_worker_running
|
102
|
+
|
103
|
+
true
|
104
|
+
else
|
105
|
+
logger.warn(
|
106
|
+
'Queue is full, dropping events. The :max_queue_size ' \
|
107
|
+
'configuration parameter can be increased to prevent this from ' \
|
108
|
+
'happening.'
|
109
|
+
)
|
110
|
+
false
|
111
|
+
end
|
112
|
+
end
|
113
|
+
|
114
|
+
# private: Checks that the api_key is properly initialized
|
115
|
+
def check_api_key!
|
116
|
+
raise ArgumentError, 'API key must be initialized' if @api_key.nil?
|
117
|
+
end
|
118
|
+
|
119
|
+
def ensure_worker_running
|
120
|
+
return if worker_running?
|
121
|
+
@worker_mutex.synchronize do
|
122
|
+
return if worker_running?
|
123
|
+
@worker_thread = Thread.new do
|
124
|
+
@worker.run
|
125
|
+
end
|
126
|
+
end
|
127
|
+
end
|
128
|
+
|
129
|
+
def worker_running?
|
130
|
+
@worker_thread && @worker_thread.alive?
|
131
|
+
end
|
132
|
+
end
|
133
|
+
end
|
134
|
+
|
@@ -0,0 +1,34 @@
|
|
1
|
+
class PostHog
|
2
|
+
module Defaults
|
3
|
+
module Request
|
4
|
+
HOST = 't.posthog.com'
|
5
|
+
PORT = 443
|
6
|
+
PATH = '/batch/'
|
7
|
+
SSL = true
|
8
|
+
HEADERS = { 'Accept' => 'application/json',
|
9
|
+
'Content-Type' => 'application/json',
|
10
|
+
'User-Agent' => "posthog-ruby/#{PostHog::VERSION}" }
|
11
|
+
RETRIES = 10
|
12
|
+
end
|
13
|
+
|
14
|
+
module Queue
|
15
|
+
MAX_SIZE = 10000
|
16
|
+
end
|
17
|
+
|
18
|
+
module Message
|
19
|
+
MAX_BYTES = 32768 # 32Kb
|
20
|
+
end
|
21
|
+
|
22
|
+
module MessageBatch
|
23
|
+
MAX_BYTES = 512_000 # 500Kb
|
24
|
+
MAX_SIZE = 100
|
25
|
+
end
|
26
|
+
|
27
|
+
module BackoffPolicy
|
28
|
+
MIN_TIMEOUT_MS = 100
|
29
|
+
MAX_TIMEOUT_MS = 10000
|
30
|
+
MULTIPLIER = 1.5
|
31
|
+
RANDOMIZATION_FACTOR = 0.5
|
32
|
+
end
|
33
|
+
end
|
34
|
+
end
|
@@ -0,0 +1,112 @@
|
|
1
|
+
class PostHog
|
2
|
+
class FieldParser
|
3
|
+
class << self
|
4
|
+
include PostHog::Utils
|
5
|
+
|
6
|
+
# In addition to the common fields, capture accepts:
|
7
|
+
#
|
8
|
+
# - "event"
|
9
|
+
# - "properties"
|
10
|
+
def parse_for_capture(fields)
|
11
|
+
common = parse_common_fields(fields)
|
12
|
+
|
13
|
+
event = fields[:event]
|
14
|
+
properties = fields[:properties] || {}
|
15
|
+
|
16
|
+
check_presence!(event, 'event')
|
17
|
+
check_is_hash!(properties, 'properties')
|
18
|
+
|
19
|
+
isoify_dates! properties
|
20
|
+
|
21
|
+
common.merge({
|
22
|
+
:type => 'capture',
|
23
|
+
:event => event.to_s,
|
24
|
+
:properties => properties
|
25
|
+
})
|
26
|
+
end
|
27
|
+
|
28
|
+
# In addition to the common fields, identify accepts:
|
29
|
+
#
|
30
|
+
# - "properties"
|
31
|
+
def parse_for_identify(fields)
|
32
|
+
common = parse_common_fields(fields)
|
33
|
+
|
34
|
+
properties = fields[:properties] || {}
|
35
|
+
check_is_hash!(properties, 'properties')
|
36
|
+
|
37
|
+
isoify_dates! properties
|
38
|
+
|
39
|
+
common.merge({
|
40
|
+
:type => 'identify',
|
41
|
+
:event => '$identify',
|
42
|
+
:'$set' => properties
|
43
|
+
})
|
44
|
+
end
|
45
|
+
|
46
|
+
# In addition to the common fields, alias accepts:
|
47
|
+
#
|
48
|
+
# - "alias"
|
49
|
+
def parse_for_alias(fields)
|
50
|
+
common = parse_common_fields(fields)
|
51
|
+
|
52
|
+
distinct_id = common[:distinct_id] # must move to properties...
|
53
|
+
|
54
|
+
alias_field = fields[:alias]
|
55
|
+
check_presence! alias_field, 'alias'
|
56
|
+
|
57
|
+
common.merge({
|
58
|
+
:type => 'alias',
|
59
|
+
:event => '$create_alias',
|
60
|
+
:distinct_id => nil,
|
61
|
+
:properties => {
|
62
|
+
:distinct_id => distinct_id,
|
63
|
+
:alias => alias_field,
|
64
|
+
}
|
65
|
+
})
|
66
|
+
end
|
67
|
+
|
68
|
+
private
|
69
|
+
|
70
|
+
# Common fields are:
|
71
|
+
#
|
72
|
+
# - "timestamp"
|
73
|
+
# - "distinct_id"
|
74
|
+
# - "message_id"
|
75
|
+
def parse_common_fields(fields)
|
76
|
+
timestamp = fields[:timestamp] || Time.new
|
77
|
+
distinct_id = fields[:distinct_id]
|
78
|
+
message_id = fields[:message_id].to_s if fields[:message_id]
|
79
|
+
|
80
|
+
check_timestamp! timestamp
|
81
|
+
check_presence! distinct_id, 'distinct_id'
|
82
|
+
|
83
|
+
parsed = {
|
84
|
+
:timestamp => datetime_in_iso8601(timestamp),
|
85
|
+
:library => 'posthog-ruby',
|
86
|
+
:library_version => PostHog::VERSION.to_s,
|
87
|
+
:messageId => message_id,
|
88
|
+
:distinct_id => distinct_id
|
89
|
+
}
|
90
|
+
parsed
|
91
|
+
end
|
92
|
+
|
93
|
+
def check_timestamp!(timestamp)
|
94
|
+
raise ArgumentError, 'Timestamp must be a Time' unless timestamp.is_a? Time
|
95
|
+
end
|
96
|
+
|
97
|
+
# private: Ensures that a string is non-empty
|
98
|
+
#
|
99
|
+
# obj - String|Number that must be non-blank
|
100
|
+
# name - Name of the validated value
|
101
|
+
def check_presence!(obj, name)
|
102
|
+
if obj.nil? || (obj.is_a?(String) && obj.empty?)
|
103
|
+
raise ArgumentError, "#{name} must be given"
|
104
|
+
end
|
105
|
+
end
|
106
|
+
|
107
|
+
def check_is_hash!(obj, name)
|
108
|
+
raise ArgumentError, "#{name} must be a Hash" unless obj.is_a? Hash
|
109
|
+
end
|
110
|
+
end
|
111
|
+
end
|
112
|
+
end
|
@@ -0,0 +1,59 @@
|
|
1
|
+
require 'logger'
|
2
|
+
|
3
|
+
class PostHog
|
4
|
+
# Wraps an existing logger and adds a prefix to all messages
|
5
|
+
class PrefixedLogger
|
6
|
+
def initialize(logger, prefix)
|
7
|
+
@logger = logger
|
8
|
+
@prefix = prefix
|
9
|
+
end
|
10
|
+
|
11
|
+
def debug(msg)
|
12
|
+
@logger.debug("#{@prefix} #{msg}")
|
13
|
+
end
|
14
|
+
|
15
|
+
def info(msg)
|
16
|
+
@logger.info("#{@prefix} #{msg}")
|
17
|
+
end
|
18
|
+
|
19
|
+
def warn(msg)
|
20
|
+
@logger.warn("#{@prefix} #{msg}")
|
21
|
+
end
|
22
|
+
|
23
|
+
def error(msg)
|
24
|
+
@logger.error("#{@prefix} #{msg}")
|
25
|
+
end
|
26
|
+
end
|
27
|
+
|
28
|
+
module Logging
|
29
|
+
class << self
|
30
|
+
def logger
|
31
|
+
return @logger if @logger
|
32
|
+
|
33
|
+
base_logger = if defined?(Rails)
|
34
|
+
Rails.logger
|
35
|
+
else
|
36
|
+
logger = Logger.new STDOUT
|
37
|
+
logger.progname = 'PostHog'
|
38
|
+
logger
|
39
|
+
end
|
40
|
+
@logger = PrefixedLogger.new(base_logger, '[posthog-ruby]')
|
41
|
+
end
|
42
|
+
|
43
|
+
attr_writer :logger
|
44
|
+
end
|
45
|
+
|
46
|
+
def self.included(base)
|
47
|
+
class << base
|
48
|
+
def logger
|
49
|
+
Logging.logger
|
50
|
+
end
|
51
|
+
end
|
52
|
+
end
|
53
|
+
|
54
|
+
def logger
|
55
|
+
Logging.logger
|
56
|
+
end
|
57
|
+
end
|
58
|
+
end
|
59
|
+
|
@@ -0,0 +1,70 @@
|
|
1
|
+
require 'forwardable'
|
2
|
+
require 'posthog/logging'
|
3
|
+
|
4
|
+
class PostHog
|
5
|
+
# A batch of `Message`s to be sent to the API
|
6
|
+
class MessageBatch
|
7
|
+
class JSONGenerationError < StandardError; end
|
8
|
+
|
9
|
+
extend Forwardable
|
10
|
+
include PostHog::Logging
|
11
|
+
include PostHog::Defaults::MessageBatch
|
12
|
+
|
13
|
+
def initialize(max_message_count)
|
14
|
+
@messages = []
|
15
|
+
@max_message_count = max_message_count
|
16
|
+
@json_size = 0
|
17
|
+
end
|
18
|
+
|
19
|
+
def <<(message)
|
20
|
+
begin
|
21
|
+
message_json = message.to_json
|
22
|
+
rescue StandardError => e
|
23
|
+
raise JSONGenerationError, "Serialization error: #{e}"
|
24
|
+
end
|
25
|
+
|
26
|
+
message_json_size = message_json.bytesize
|
27
|
+
if message_too_big?(message_json_size)
|
28
|
+
logger.error('a message exceeded the maximum allowed size')
|
29
|
+
else
|
30
|
+
@messages << message
|
31
|
+
@json_size += message_json_size + 1 # One byte for the comma
|
32
|
+
end
|
33
|
+
end
|
34
|
+
|
35
|
+
def full?
|
36
|
+
item_count_exhausted? || size_exhausted?
|
37
|
+
end
|
38
|
+
|
39
|
+
def clear
|
40
|
+
@messages.clear
|
41
|
+
@json_size = 0
|
42
|
+
end
|
43
|
+
|
44
|
+
def_delegators :@messages, :to_json
|
45
|
+
def_delegators :@messages, :empty?
|
46
|
+
def_delegators :@messages, :length
|
47
|
+
|
48
|
+
private
|
49
|
+
|
50
|
+
def item_count_exhausted?
|
51
|
+
@messages.length >= @max_message_count
|
52
|
+
end
|
53
|
+
|
54
|
+
def message_too_big?(message_json_size)
|
55
|
+
message_json_size > Defaults::Message::MAX_BYTES
|
56
|
+
end
|
57
|
+
|
58
|
+
# We consider the max size here as just enough to leave room for one more
|
59
|
+
# message of the largest size possible. This is a shortcut that allows us
|
60
|
+
# to use a native Ruby `Queue` that doesn't allow peeking. The tradeoff
|
61
|
+
# here is that we might fit in less messages than possible into a batch.
|
62
|
+
#
|
63
|
+
# The alternative is to use our own `Queue` implementation that allows
|
64
|
+
# peeking, and to consider the next message size when calculating whether
|
65
|
+
# the message can be accomodated in this batch.
|
66
|
+
def size_exhausted?
|
67
|
+
@json_size >= (MAX_BYTES - Defaults::Message::MAX_BYTES)
|
68
|
+
end
|
69
|
+
end
|
70
|
+
end
|
@@ -0,0 +1,143 @@
|
|
1
|
+
require 'posthog/defaults'
|
2
|
+
require 'posthog/utils'
|
3
|
+
require 'posthog/response'
|
4
|
+
require 'posthog/logging'
|
5
|
+
require 'posthog/backoff_policy'
|
6
|
+
require 'net/http'
|
7
|
+
require 'net/https'
|
8
|
+
require 'json'
|
9
|
+
|
10
|
+
class PostHog
|
11
|
+
class Transport
|
12
|
+
include PostHog::Defaults::Request
|
13
|
+
include PostHog::Utils
|
14
|
+
include PostHog::Logging
|
15
|
+
|
16
|
+
def initialize(options = {})
|
17
|
+
if options[:api_host]
|
18
|
+
uri = URI.parse(options[:api_host])
|
19
|
+
options[:host] = uri.host
|
20
|
+
options[:ssl] = uri.scheme == 'https'
|
21
|
+
options[:port] = uri.port
|
22
|
+
end
|
23
|
+
options[:host] ||= HOST
|
24
|
+
options[:port] ||= PORT
|
25
|
+
options[:ssl] ||= SSL
|
26
|
+
@headers = options[:headers] || HEADERS
|
27
|
+
@path = options[:path] || PATH
|
28
|
+
@retries = options[:retries] || RETRIES
|
29
|
+
@backoff_policy =
|
30
|
+
options[:backoff_policy] || PostHog::BackoffPolicy.new
|
31
|
+
|
32
|
+
http = Net::HTTP.new(options[:host], options[:port])
|
33
|
+
http.use_ssl = options[:ssl]
|
34
|
+
http.read_timeout = 8
|
35
|
+
http.open_timeout = 4
|
36
|
+
|
37
|
+
@http = http
|
38
|
+
end
|
39
|
+
|
40
|
+
# Sends a batch of messages to the API
|
41
|
+
#
|
42
|
+
# @return [Response] API response
|
43
|
+
def send(api_key, batch)
|
44
|
+
logger.debug("Sending request for #{batch.length} items")
|
45
|
+
|
46
|
+
last_response, exception = retry_with_backoff(@retries) do
|
47
|
+
status_code, body = send_request(api_key, batch)
|
48
|
+
error = JSON.parse(body)['error']
|
49
|
+
should_retry = should_retry_request?(status_code, body)
|
50
|
+
logger.debug("Response status code: #{status_code}")
|
51
|
+
logger.debug("Response error: #{error}") if error
|
52
|
+
|
53
|
+
[Response.new(status_code, error), should_retry]
|
54
|
+
end
|
55
|
+
|
56
|
+
if exception
|
57
|
+
logger.error(exception.message)
|
58
|
+
exception.backtrace.each { |line| logger.error(line) }
|
59
|
+
Response.new(-1, exception.to_s)
|
60
|
+
else
|
61
|
+
last_response
|
62
|
+
end
|
63
|
+
end
|
64
|
+
|
65
|
+
# Closes a persistent connection if it exists
|
66
|
+
def shutdown
|
67
|
+
@http.finish if @http.started?
|
68
|
+
end
|
69
|
+
|
70
|
+
private
|
71
|
+
|
72
|
+
def should_retry_request?(status_code, body)
|
73
|
+
if status_code >= 500
|
74
|
+
true # Server error
|
75
|
+
elsif status_code == 429
|
76
|
+
true # Rate limited
|
77
|
+
elsif status_code >= 400
|
78
|
+
logger.error(body)
|
79
|
+
false # Client error. Do not retry, but log
|
80
|
+
else
|
81
|
+
false
|
82
|
+
end
|
83
|
+
end
|
84
|
+
|
85
|
+
# Takes a block that returns [result, should_retry].
|
86
|
+
#
|
87
|
+
# Retries upto `retries_remaining` times, if `should_retry` is false or
|
88
|
+
# an exception is raised. `@backoff_policy` is used to determine the
|
89
|
+
# duration to sleep between attempts
|
90
|
+
#
|
91
|
+
# Returns [last_result, raised_exception]
|
92
|
+
def retry_with_backoff(retries_remaining, &block)
|
93
|
+
result, caught_exception = nil
|
94
|
+
should_retry = false
|
95
|
+
|
96
|
+
begin
|
97
|
+
result, should_retry = yield
|
98
|
+
return [result, nil] unless should_retry
|
99
|
+
rescue StandardError => e
|
100
|
+
should_retry = true
|
101
|
+
caught_exception = e
|
102
|
+
end
|
103
|
+
|
104
|
+
if should_retry && (retries_remaining > 1)
|
105
|
+
logger.debug("Retrying request, #{retries_remaining} retries left")
|
106
|
+
sleep(@backoff_policy.next_interval.to_f / 1000)
|
107
|
+
retry_with_backoff(retries_remaining - 1, &block)
|
108
|
+
else
|
109
|
+
[result, caught_exception]
|
110
|
+
end
|
111
|
+
end
|
112
|
+
|
113
|
+
# Sends a request for the batch, returns [status_code, body]
|
114
|
+
def send_request(api_key, batch)
|
115
|
+
payload = JSON.generate(
|
116
|
+
api_key: api_key,
|
117
|
+
batch: batch
|
118
|
+
)
|
119
|
+
|
120
|
+
request = Net::HTTP::Post.new(@path, @headers)
|
121
|
+
|
122
|
+
if self.class.stub
|
123
|
+
logger.debug "stubbed request to #{@path}: " \
|
124
|
+
"api key = #{api_key}, batch = #{JSON.generate(batch)}"
|
125
|
+
|
126
|
+
[200, '{}']
|
127
|
+
else
|
128
|
+
@http.start unless @http.started? # Maintain a persistent connection
|
129
|
+
response = @http.request(request, payload)
|
130
|
+
[response.code.to_i, response.body]
|
131
|
+
end
|
132
|
+
end
|
133
|
+
|
134
|
+
class << self
|
135
|
+
attr_writer :stub
|
136
|
+
|
137
|
+
def stub
|
138
|
+
@stub || ENV['STUB']
|
139
|
+
end
|
140
|
+
end
|
141
|
+
end
|
142
|
+
end
|
143
|
+
|
@@ -0,0 +1,90 @@
|
|
1
|
+
require 'securerandom'
|
2
|
+
|
3
|
+
class PostHog
|
4
|
+
module Utils
|
5
|
+
extend self
|
6
|
+
|
7
|
+
# public: Return a new hash with keys converted from strings to symbols
|
8
|
+
#
|
9
|
+
def symbolize_keys(hash)
|
10
|
+
hash.each_with_object({}) do |(k, v), memo|
|
11
|
+
memo[k.to_sym] = v
|
12
|
+
end
|
13
|
+
end
|
14
|
+
|
15
|
+
# public: Convert hash keys from strings to symbols in place
|
16
|
+
#
|
17
|
+
def symbolize_keys!(hash)
|
18
|
+
hash.replace symbolize_keys hash
|
19
|
+
end
|
20
|
+
|
21
|
+
# public: Return a new hash with keys as strings
|
22
|
+
#
|
23
|
+
def stringify_keys(hash)
|
24
|
+
hash.each_with_object({}) do |(k, v), memo|
|
25
|
+
memo[k.to_s] = v
|
26
|
+
end
|
27
|
+
end
|
28
|
+
|
29
|
+
# public: Returns a new hash with all the date values in the into iso8601
|
30
|
+
# strings
|
31
|
+
#
|
32
|
+
def isoify_dates(hash)
|
33
|
+
hash.each_with_object({}) do |(k, v), memo|
|
34
|
+
memo[k] = datetime_in_iso8601(v)
|
35
|
+
end
|
36
|
+
end
|
37
|
+
|
38
|
+
# public: Converts all the date values in the into iso8601 strings in place
|
39
|
+
#
|
40
|
+
def isoify_dates!(hash)
|
41
|
+
hash.replace isoify_dates hash
|
42
|
+
end
|
43
|
+
|
44
|
+
# public: Returns a uid string
|
45
|
+
#
|
46
|
+
def uid
|
47
|
+
arr = SecureRandom.random_bytes(16).unpack('NnnnnN')
|
48
|
+
arr[2] = (arr[2] & 0x0fff) | 0x4000
|
49
|
+
arr[3] = (arr[3] & 0x3fff) | 0x8000
|
50
|
+
'%08x-%04x-%04x-%04x-%04x%08x' % arr
|
51
|
+
end
|
52
|
+
|
53
|
+
def datetime_in_iso8601(datetime)
|
54
|
+
case datetime
|
55
|
+
when Time
|
56
|
+
time_in_iso8601 datetime
|
57
|
+
when DateTime
|
58
|
+
time_in_iso8601 datetime.to_time
|
59
|
+
when Date
|
60
|
+
date_in_iso8601 datetime
|
61
|
+
else
|
62
|
+
datetime
|
63
|
+
end
|
64
|
+
end
|
65
|
+
|
66
|
+
def time_in_iso8601(time, fraction_digits = 3)
|
67
|
+
fraction = if fraction_digits > 0
|
68
|
+
('.%06i' % time.usec)[0, fraction_digits + 1]
|
69
|
+
end
|
70
|
+
|
71
|
+
"#{time.strftime('%Y-%m-%dT%H:%M:%S')}#{fraction}#{formatted_offset(time, true, 'Z')}"
|
72
|
+
end
|
73
|
+
|
74
|
+
def date_in_iso8601(date)
|
75
|
+
date.strftime('%F')
|
76
|
+
end
|
77
|
+
|
78
|
+
def formatted_offset(time, colon = true, alternate_utc_string = nil)
|
79
|
+
time.utc? && alternate_utc_string || seconds_to_utc_offset(time.utc_offset, colon)
|
80
|
+
end
|
81
|
+
|
82
|
+
def seconds_to_utc_offset(seconds, colon = true)
|
83
|
+
(colon ? UTC_OFFSET_WITH_COLON : UTC_OFFSET_WITHOUT_COLON) % [(seconds < 0 ? '-' : '+'), (seconds.abs / 3600), ((seconds.abs % 3600) / 60)]
|
84
|
+
end
|
85
|
+
|
86
|
+
UTC_OFFSET_WITH_COLON = '%s%02d:%02d'
|
87
|
+
UTC_OFFSET_WITHOUT_COLON = UTC_OFFSET_WITH_COLON.sub(':', '')
|
88
|
+
end
|
89
|
+
end
|
90
|
+
|
@@ -0,0 +1,68 @@
|
|
1
|
+
require 'posthog/defaults'
|
2
|
+
require 'posthog/message_batch'
|
3
|
+
require 'posthog/transport'
|
4
|
+
require 'posthog/utils'
|
5
|
+
|
6
|
+
class PostHog
|
7
|
+
class Worker
|
8
|
+
include PostHog::Utils
|
9
|
+
include PostHog::Defaults
|
10
|
+
include PostHog::Logging
|
11
|
+
|
12
|
+
# public: Creates a new worker
|
13
|
+
#
|
14
|
+
# The worker continuously takes messages off the queue
|
15
|
+
# and makes requests to the posthog.com api
|
16
|
+
#
|
17
|
+
# queue - Queue synchronized between client and worker
|
18
|
+
# api_key - String of the project's API key
|
19
|
+
# options - Hash of worker options
|
20
|
+
# batch_size - Fixnum of how many items to send in a batch
|
21
|
+
# on_error - Proc of what to do on an error
|
22
|
+
#
|
23
|
+
def initialize(queue, api_key, options = {})
|
24
|
+
symbolize_keys! options
|
25
|
+
@queue = queue
|
26
|
+
@api_key = api_key
|
27
|
+
@on_error = options[:on_error] || proc { |status, error| }
|
28
|
+
batch_size = options[:batch_size] || Defaults::MessageBatch::MAX_SIZE
|
29
|
+
@batch = MessageBatch.new(batch_size)
|
30
|
+
@lock = Mutex.new
|
31
|
+
@transport = Transport.new api_host: options[:api_host]
|
32
|
+
end
|
33
|
+
|
34
|
+
# public: Continuously runs the loop to check for new events
|
35
|
+
#
|
36
|
+
def run
|
37
|
+
until Thread.current[:should_exit]
|
38
|
+
return if @queue.empty?
|
39
|
+
|
40
|
+
@lock.synchronize do
|
41
|
+
consume_message_from_queue! until @batch.full? || @queue.empty?
|
42
|
+
end
|
43
|
+
|
44
|
+
res = @transport.send @api_key, @batch
|
45
|
+
@on_error.call(res.status, res.error) unless res.status == 200
|
46
|
+
|
47
|
+
@lock.synchronize { @batch.clear }
|
48
|
+
end
|
49
|
+
ensure
|
50
|
+
@transport.shutdown
|
51
|
+
end
|
52
|
+
|
53
|
+
# public: Check whether we have outstanding requests.
|
54
|
+
#
|
55
|
+
def is_requesting?
|
56
|
+
@lock.synchronize { !@batch.empty? }
|
57
|
+
end
|
58
|
+
|
59
|
+
private
|
60
|
+
|
61
|
+
def consume_message_from_queue!
|
62
|
+
@batch << @queue.pop
|
63
|
+
rescue MessageBatch::JSONGenerationError => e
|
64
|
+
@on_error.call(-1, e.to_s)
|
65
|
+
end
|
66
|
+
end
|
67
|
+
end
|
68
|
+
|
data/lib/posthog-ruby.rb
ADDED
@@ -0,0 +1 @@
|
|
1
|
+
require 'posthog'
|
data/lib/posthog.rb
ADDED
metadata
ADDED
@@ -0,0 +1,170 @@
|
|
1
|
+
--- !ruby/object:Gem::Specification
|
2
|
+
name: posthog-ruby
|
3
|
+
version: !ruby/object:Gem::Version
|
4
|
+
version: 1.0.0.pre
|
5
|
+
platform: ruby
|
6
|
+
authors:
|
7
|
+
- ''
|
8
|
+
autorequire:
|
9
|
+
bindir: bin
|
10
|
+
cert_chain: []
|
11
|
+
date: 2020-02-20 00:00:00.000000000 Z
|
12
|
+
dependencies:
|
13
|
+
- !ruby/object:Gem::Dependency
|
14
|
+
name: commander
|
15
|
+
requirement: !ruby/object:Gem::Requirement
|
16
|
+
requirements:
|
17
|
+
- - "~>"
|
18
|
+
- !ruby/object:Gem::Version
|
19
|
+
version: '4.4'
|
20
|
+
type: :development
|
21
|
+
prerelease: false
|
22
|
+
version_requirements: !ruby/object:Gem::Requirement
|
23
|
+
requirements:
|
24
|
+
- - "~>"
|
25
|
+
- !ruby/object:Gem::Version
|
26
|
+
version: '4.4'
|
27
|
+
- !ruby/object:Gem::Dependency
|
28
|
+
name: rake
|
29
|
+
requirement: !ruby/object:Gem::Requirement
|
30
|
+
requirements:
|
31
|
+
- - "~>"
|
32
|
+
- !ruby/object:Gem::Version
|
33
|
+
version: '10.3'
|
34
|
+
type: :development
|
35
|
+
prerelease: false
|
36
|
+
version_requirements: !ruby/object:Gem::Requirement
|
37
|
+
requirements:
|
38
|
+
- - "~>"
|
39
|
+
- !ruby/object:Gem::Version
|
40
|
+
version: '10.3'
|
41
|
+
- !ruby/object:Gem::Dependency
|
42
|
+
name: rspec
|
43
|
+
requirement: !ruby/object:Gem::Requirement
|
44
|
+
requirements:
|
45
|
+
- - "~>"
|
46
|
+
- !ruby/object:Gem::Version
|
47
|
+
version: '3.0'
|
48
|
+
type: :development
|
49
|
+
prerelease: false
|
50
|
+
version_requirements: !ruby/object:Gem::Requirement
|
51
|
+
requirements:
|
52
|
+
- - "~>"
|
53
|
+
- !ruby/object:Gem::Version
|
54
|
+
version: '3.0'
|
55
|
+
- !ruby/object:Gem::Dependency
|
56
|
+
name: tzinfo
|
57
|
+
requirement: !ruby/object:Gem::Requirement
|
58
|
+
requirements:
|
59
|
+
- - '='
|
60
|
+
- !ruby/object:Gem::Version
|
61
|
+
version: 1.2.1
|
62
|
+
type: :development
|
63
|
+
prerelease: false
|
64
|
+
version_requirements: !ruby/object:Gem::Requirement
|
65
|
+
requirements:
|
66
|
+
- - '='
|
67
|
+
- !ruby/object:Gem::Version
|
68
|
+
version: 1.2.1
|
69
|
+
- !ruby/object:Gem::Dependency
|
70
|
+
name: activesupport
|
71
|
+
requirement: !ruby/object:Gem::Requirement
|
72
|
+
requirements:
|
73
|
+
- - "~>"
|
74
|
+
- !ruby/object:Gem::Version
|
75
|
+
version: 4.1.11
|
76
|
+
type: :development
|
77
|
+
prerelease: false
|
78
|
+
version_requirements: !ruby/object:Gem::Requirement
|
79
|
+
requirements:
|
80
|
+
- - "~>"
|
81
|
+
- !ruby/object:Gem::Version
|
82
|
+
version: 4.1.11
|
83
|
+
- !ruby/object:Gem::Dependency
|
84
|
+
name: oj
|
85
|
+
requirement: !ruby/object:Gem::Requirement
|
86
|
+
requirements:
|
87
|
+
- - "~>"
|
88
|
+
- !ruby/object:Gem::Version
|
89
|
+
version: 3.6.2
|
90
|
+
type: :development
|
91
|
+
prerelease: false
|
92
|
+
version_requirements: !ruby/object:Gem::Requirement
|
93
|
+
requirements:
|
94
|
+
- - "~>"
|
95
|
+
- !ruby/object:Gem::Version
|
96
|
+
version: 3.6.2
|
97
|
+
- !ruby/object:Gem::Dependency
|
98
|
+
name: rubocop
|
99
|
+
requirement: !ruby/object:Gem::Requirement
|
100
|
+
requirements:
|
101
|
+
- - "~>"
|
102
|
+
- !ruby/object:Gem::Version
|
103
|
+
version: 0.51.0
|
104
|
+
type: :development
|
105
|
+
prerelease: false
|
106
|
+
version_requirements: !ruby/object:Gem::Requirement
|
107
|
+
requirements:
|
108
|
+
- - "~>"
|
109
|
+
- !ruby/object:Gem::Version
|
110
|
+
version: 0.51.0
|
111
|
+
- !ruby/object:Gem::Dependency
|
112
|
+
name: codecov
|
113
|
+
requirement: !ruby/object:Gem::Requirement
|
114
|
+
requirements:
|
115
|
+
- - "~>"
|
116
|
+
- !ruby/object:Gem::Version
|
117
|
+
version: 0.1.4
|
118
|
+
type: :development
|
119
|
+
prerelease: false
|
120
|
+
version_requirements: !ruby/object:Gem::Requirement
|
121
|
+
requirements:
|
122
|
+
- - "~>"
|
123
|
+
- !ruby/object:Gem::Version
|
124
|
+
version: 0.1.4
|
125
|
+
description: The PostHog ruby library
|
126
|
+
email: hey@posthog.com
|
127
|
+
executables:
|
128
|
+
- posthog
|
129
|
+
extensions: []
|
130
|
+
extra_rdoc_files: []
|
131
|
+
files:
|
132
|
+
- bin/posthog
|
133
|
+
- lib/posthog-ruby.rb
|
134
|
+
- lib/posthog.rb
|
135
|
+
- lib/posthog/backoff_policy.rb
|
136
|
+
- lib/posthog/client.rb
|
137
|
+
- lib/posthog/defaults.rb
|
138
|
+
- lib/posthog/field_parser.rb
|
139
|
+
- lib/posthog/logging.rb
|
140
|
+
- lib/posthog/message_batch.rb
|
141
|
+
- lib/posthog/response.rb
|
142
|
+
- lib/posthog/transport.rb
|
143
|
+
- lib/posthog/utils.rb
|
144
|
+
- lib/posthog/version.rb
|
145
|
+
- lib/posthog/worker.rb
|
146
|
+
homepage: https://github.com/PostHog/posthog-ruby
|
147
|
+
licenses:
|
148
|
+
- MIT
|
149
|
+
metadata: {}
|
150
|
+
post_install_message:
|
151
|
+
rdoc_options: []
|
152
|
+
require_paths:
|
153
|
+
- lib
|
154
|
+
required_ruby_version: !ruby/object:Gem::Requirement
|
155
|
+
requirements:
|
156
|
+
- - ">="
|
157
|
+
- !ruby/object:Gem::Version
|
158
|
+
version: '2.0'
|
159
|
+
required_rubygems_version: !ruby/object:Gem::Requirement
|
160
|
+
requirements:
|
161
|
+
- - ">"
|
162
|
+
- !ruby/object:Gem::Version
|
163
|
+
version: 1.3.1
|
164
|
+
requirements: []
|
165
|
+
rubyforge_project:
|
166
|
+
rubygems_version: 2.5.2.3
|
167
|
+
signing_key:
|
168
|
+
specification_version: 4
|
169
|
+
summary: PostHog library
|
170
|
+
test_files: []
|