posthog-rails 3.5.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -0,0 +1,194 @@
1
+ # frozen_string_literal: true
2
+
3
+ require 'posthog/logging'
4
+
5
+ module PostHog
6
+ class FieldParser
7
+ class << self
8
+ include PostHog::Utils
9
+ include PostHog::Logging
10
+
11
+ # In addition to the common fields, capture accepts:
12
+ #
13
+ # - "event"
14
+ # - "properties"
15
+ # - "groups"
16
+ # - "uuid"
17
+ def parse_for_capture(fields)
18
+ common = parse_common_fields(fields)
19
+
20
+ event = fields[:event]
21
+ properties = fields[:properties] || {}
22
+ groups = fields[:groups]
23
+ uuid = fields[:uuid]
24
+ check_presence!(event, 'event')
25
+ check_is_hash!(properties, 'properties')
26
+
27
+ if groups
28
+ check_is_hash!(groups, 'groups')
29
+ properties['$groups'] = groups
30
+ end
31
+
32
+ isoify_dates! properties
33
+
34
+ common['uuid'] = uuid if valid_uuid_for_event_props? uuid
35
+
36
+ common.merge(
37
+ {
38
+ type: 'capture',
39
+ event: event.to_s,
40
+ properties: properties.merge(common[:properties] || {})
41
+ }
42
+ )
43
+ end
44
+
45
+ # In addition to the common fields, identify accepts:
46
+ #
47
+ # - "properties"
48
+ def parse_for_identify(fields)
49
+ common = parse_common_fields(fields)
50
+
51
+ properties = fields[:properties] || {}
52
+ check_is_hash!(properties, 'properties')
53
+
54
+ isoify_dates! properties
55
+
56
+ common.merge(
57
+ {
58
+ type: 'identify',
59
+ event: '$identify',
60
+ '$set': properties,
61
+ properties: properties.merge(common[:properties] || {})
62
+ }
63
+ )
64
+ end
65
+
66
+ def parse_for_group_identify(fields)
67
+ properties = fields[:properties] || {}
68
+ group_type = fields[:group_type]
69
+ group_key = fields[:group_key]
70
+
71
+ check_presence!(group_type, 'group type')
72
+ check_presence!(group_key, 'group_key')
73
+ check_is_hash!(properties, 'properties')
74
+
75
+ fields[:distinct_id] ||= "$#{group_type}_#{group_key}"
76
+ common = parse_common_fields(fields)
77
+
78
+ isoify_dates! properties
79
+
80
+ common.merge(
81
+ {
82
+ event: '$groupidentify',
83
+ properties: {
84
+ '$group_type': group_type,
85
+ '$group_key': group_key,
86
+ '$group_set': properties.merge(common[:properties] || {})
87
+ }
88
+ }
89
+ )
90
+ end
91
+
92
+ # In addition to the common fields, alias accepts:
93
+ #
94
+ # - "alias"
95
+ def parse_for_alias(fields)
96
+ common = parse_common_fields(fields)
97
+
98
+ distinct_id = common[:distinct_id] # must both be set and move to properties
99
+
100
+ alias_field = fields[:alias]
101
+ check_presence! alias_field, 'alias'
102
+
103
+ common.merge(
104
+ {
105
+ type: 'alias',
106
+ event: '$create_alias',
107
+ distinct_id: distinct_id,
108
+ properties:
109
+ { distinct_id: distinct_id, alias: alias_field }.merge(
110
+ common[:properties] || {}
111
+ )
112
+ }
113
+ )
114
+ end
115
+
116
+ private
117
+
118
+ # Common fields are:
119
+ #
120
+ # - "timestamp"
121
+ # - "distinct_id"
122
+ # - "message_id"
123
+ # - "send_feature_flags"
124
+ def parse_common_fields(fields)
125
+ timestamp = fields[:timestamp] || Time.new
126
+ distinct_id = fields[:distinct_id]
127
+ message_id = fields[:message_id].to_s if fields[:message_id]
128
+ send_feature_flags = fields[:send_feature_flags]
129
+
130
+ check_timestamp! timestamp
131
+ check_presence! distinct_id, 'distinct_id'
132
+
133
+ parsed = {
134
+ timestamp: datetime_in_iso8601(timestamp),
135
+ library: 'posthog-ruby',
136
+ library_version: PostHog::VERSION.to_s,
137
+ messageId: message_id,
138
+ distinct_id: distinct_id,
139
+ properties: {
140
+ '$lib' => 'posthog-ruby',
141
+ '$lib_version' => PostHog::VERSION.to_s
142
+ }
143
+ }
144
+
145
+ if send_feature_flags && fields[:feature_variants]
146
+ feature_variants = fields[:feature_variants]
147
+ active_feature_variants = {}
148
+ feature_variants.each do |key, value|
149
+ parsed[:properties]["$feature/#{key}"] = value
150
+ active_feature_variants[key] = value if value != false
151
+ end
152
+ parsed[:properties]['$active_feature_flags'] = active_feature_variants.keys
153
+ end
154
+ parsed
155
+ end
156
+
157
+ def check_timestamp!(timestamp)
158
+ return if timestamp.is_a? Time
159
+
160
+ raise ArgumentError, 'Timestamp must be a Time'
161
+ end
162
+
163
+ # private: Ensures that a string is non-empty
164
+ #
165
+ # obj - String|Number that must be not blank
166
+ # name - The name of the validated value
167
+ def check_presence!(obj, name)
168
+ return unless obj.nil? || (obj.is_a?(String) && obj.empty?)
169
+
170
+ raise ArgumentError, "#{name} must be given"
171
+ end
172
+
173
+ def check_is_hash!(obj, name)
174
+ raise ArgumentError, "#{name} must be a Hash" unless obj.is_a? Hash
175
+ end
176
+
177
+ # @param [Object] uuid - the UUID to validate, user provided, so we don't know the type
178
+ # @return [TrueClass, FalseClass] - true if the UUID is valid or absent, false otherwise
179
+ def valid_uuid_for_event_props?(uuid)
180
+ return true if uuid.nil?
181
+
182
+ unless uuid.is_a?(String)
183
+ logger.warn 'UUID is not a string. Ignoring it.'
184
+ return false
185
+ end
186
+
187
+ is_valid_uuid = uuid.match?(/^[0-9a-f]{8}-([0-9a-f]{4}-){3}[0-9a-f]{12}$/i)
188
+ logger.warn "UUID is not valid: #{uuid}. Ignoring it." unless is_valid_uuid
189
+
190
+ is_valid_uuid
191
+ end
192
+ end
193
+ end
194
+ end
@@ -0,0 +1,70 @@
1
+ # frozen_string_literal: true
2
+
3
+ require 'logger'
4
+
5
+ module PostHog
6
+ # Wraps an existing logger and adds a prefix to all messages
7
+ class PrefixedLogger
8
+ def initialize(logger, prefix)
9
+ @logger = logger
10
+ @prefix = prefix
11
+ end
12
+
13
+ def debug(msg)
14
+ @logger.debug("#{@prefix} #{msg}")
15
+ end
16
+
17
+ def info(msg)
18
+ @logger.info("#{@prefix} #{msg}")
19
+ end
20
+
21
+ def warn(msg)
22
+ @logger.warn("#{@prefix} #{msg}")
23
+ end
24
+
25
+ def error(msg)
26
+ @logger.error("#{@prefix} #{msg}")
27
+ end
28
+
29
+ def level=(severity)
30
+ @logger.level = severity
31
+ end
32
+
33
+ def level
34
+ @logger.level
35
+ end
36
+ end
37
+
38
+ module Logging
39
+ class << self
40
+ def logger
41
+ return @logger if @logger
42
+
43
+ base_logger =
44
+ if defined?(::Rails)
45
+ ::Rails.logger
46
+ else
47
+ logger = Logger.new $stdout
48
+ logger.progname = 'PostHog'
49
+ logger.level = Logger::WARN
50
+ logger
51
+ end
52
+ @logger = PrefixedLogger.new(base_logger, '[posthog-ruby]')
53
+ end
54
+
55
+ attr_writer :logger
56
+ end
57
+
58
+ def self.included(base)
59
+ class << base
60
+ def logger
61
+ Logging.logger
62
+ end
63
+ end
64
+ end
65
+
66
+ def logger
67
+ Logging.logger
68
+ end
69
+ end
70
+ end
@@ -0,0 +1,73 @@
1
+ # frozen_string_literal: true
2
+
3
+ require 'forwardable'
4
+ require 'posthog/logging'
5
+
6
+ module PostHog
7
+ # A batch of `Message`s to be sent to the API
8
+ class MessageBatch
9
+ class JSONGenerationError < StandardError
10
+ end
11
+
12
+ extend Forwardable
13
+ include PostHog::Logging
14
+ include PostHog::Defaults::MessageBatch
15
+
16
+ def initialize(max_message_count)
17
+ @messages = []
18
+ @max_message_count = max_message_count
19
+ @json_size = 0
20
+ end
21
+
22
+ def <<(message)
23
+ begin
24
+ message_json = message.to_json
25
+ rescue StandardError => e
26
+ raise JSONGenerationError, "Serialization error: #{e}"
27
+ end
28
+
29
+ message_json_size = message_json.bytesize
30
+ if message_too_big?(message_json_size)
31
+ logger.error('a message exceeded the maximum allowed size')
32
+ else
33
+ @messages << message
34
+ @json_size += message_json_size + 1 # One byte for the comma
35
+ end
36
+ end
37
+
38
+ def full?
39
+ item_count_exhausted? || size_exhausted?
40
+ end
41
+
42
+ def clear
43
+ @messages.clear
44
+ @json_size = 0
45
+ end
46
+
47
+ def_delegators :@messages, :to_json
48
+ def_delegators :@messages, :empty?
49
+ def_delegators :@messages, :length
50
+
51
+ private
52
+
53
+ def item_count_exhausted?
54
+ @messages.length >= @max_message_count
55
+ end
56
+
57
+ def message_too_big?(message_json_size)
58
+ message_json_size > Defaults::Message::MAX_BYTES
59
+ end
60
+
61
+ # We consider the max size here as just enough to leave room for one more
62
+ # message of the largest size possible. This is a shortcut that allows us
63
+ # to use a native Ruby `Queue` that doesn't allow peeking. The tradeoff
64
+ # here is that we might fit in less messages than possible into a batch.
65
+ #
66
+ # The alternative is to use our own `Queue` implementation that allows
67
+ # peeking, and to consider the next message size when calculating whether
68
+ # the message can be accomodated in this batch.
69
+ def size_exhausted?
70
+ @json_size >= (MAX_BYTES - Defaults::Message::MAX_BYTES)
71
+ end
72
+ end
73
+ end
@@ -0,0 +1,19 @@
1
+ # frozen_string_literal: true
2
+
3
+ # A worker that doesn't consume jobs
4
+ module PostHog
5
+ class NoopWorker
6
+ def initialize(queue)
7
+ @queue = queue
8
+ end
9
+
10
+ def run
11
+ # Does nothing
12
+ end
13
+
14
+ # TODO: Rename to `requesting?` in future version
15
+ def is_requesting? # rubocop:disable Naming/PredicateName
16
+ false
17
+ end
18
+ end
19
+ end
@@ -0,0 +1,15 @@
1
+ # frozen_string_literal: true
2
+
3
+ module PostHog
4
+ class Response
5
+ attr_reader :status, :error
6
+
7
+ # public: Simple class to wrap responses from the API
8
+ #
9
+ #
10
+ def initialize(status = 200, error = nil)
11
+ @status = status
12
+ @error = error
13
+ end
14
+ end
15
+ end
@@ -0,0 +1,34 @@
1
+ # frozen_string_literal: true
2
+
3
+ require 'posthog/utils'
4
+
5
+ module PostHog
6
+ # Options for configuring feature flag behavior in capture calls
7
+ class SendFeatureFlagsOptions
8
+ attr_reader :only_evaluate_locally, :person_properties, :group_properties
9
+
10
+ def initialize(only_evaluate_locally: nil, person_properties: nil, group_properties: nil)
11
+ @only_evaluate_locally = only_evaluate_locally
12
+ @person_properties = person_properties || {}
13
+ @group_properties = group_properties || {}
14
+ end
15
+
16
+ def to_h
17
+ {
18
+ only_evaluate_locally: @only_evaluate_locally,
19
+ person_properties: @person_properties,
20
+ group_properties: @group_properties
21
+ }
22
+ end
23
+
24
+ def self.from_hash(hash)
25
+ return nil unless hash.is_a?(Hash)
26
+
27
+ new(
28
+ only_evaluate_locally: PostHog::Utils.get_by_symbol_or_string_key(hash, :only_evaluate_locally),
29
+ person_properties: PostHog::Utils.get_by_symbol_or_string_key(hash, :person_properties),
30
+ group_properties: PostHog::Utils.get_by_symbol_or_string_key(hash, :group_properties)
31
+ )
32
+ end
33
+ end
34
+ end
@@ -0,0 +1,70 @@
1
+ # frozen_string_literal: true
2
+
3
+ require 'posthog/defaults'
4
+ require 'posthog/message_batch'
5
+ require 'posthog/transport'
6
+ require 'posthog/utils'
7
+
8
+ module PostHog
9
+ class SendWorker
10
+ include PostHog::Utils
11
+ include PostHog::Defaults
12
+ include PostHog::Logging
13
+
14
+ # public: Creates a new worker
15
+ #
16
+ # The worker continuously takes messages off the queue
17
+ # and makes requests to the posthog.com api
18
+ #
19
+ # queue - Queue synchronized between client and worker
20
+ # api_key - String of the project's API key
21
+ # options - Hash of worker options
22
+ # batch_size - Fixnum of how many items to send in a batch
23
+ # on_error - Proc of what to do on an error
24
+ #
25
+ def initialize(queue, api_key, options = {})
26
+ symbolize_keys! options
27
+ @queue = queue
28
+ @api_key = api_key
29
+ @on_error = options[:on_error] || proc { |status, error| }
30
+ batch_size = options[:batch_size] || Defaults::MessageBatch::MAX_SIZE
31
+ @batch = MessageBatch.new(batch_size)
32
+ @lock = Mutex.new
33
+ @transport = Transport.new api_host: options[:host], skip_ssl_verification: options[:skip_ssl_verification]
34
+ end
35
+
36
+ # public: Continuously runs the loop to check for new events
37
+ #
38
+ def run
39
+ until Thread.current[:should_exit]
40
+ return if @queue.empty?
41
+
42
+ @lock.synchronize do
43
+ consume_message_from_queue! until @batch.full? || @queue.empty?
44
+ end
45
+
46
+ res = @transport.send @api_key, @batch
47
+ @on_error.call(res.status, res.error) unless res.status == 200
48
+
49
+ @lock.synchronize { @batch.clear }
50
+ end
51
+ ensure
52
+ @transport.shutdown
53
+ end
54
+
55
+ # public: Check whether we have outstanding requests.
56
+ #
57
+ # TODO: Rename to `requesting?` in future version
58
+ def is_requesting? # rubocop:disable Naming/PredicateName
59
+ @lock.synchronize { !@batch.empty? }
60
+ end
61
+
62
+ private
63
+
64
+ def consume_message_from_queue!
65
+ @batch << @queue.pop
66
+ rescue MessageBatch::JSONGenerationError => e
67
+ @on_error.call(-1, e.to_s)
68
+ end
69
+ end
70
+ end
@@ -0,0 +1,144 @@
1
+ # frozen_string_literal: true
2
+
3
+ require 'posthog/defaults'
4
+ require 'posthog/utils'
5
+ require 'posthog/response'
6
+ require 'posthog/logging'
7
+ require 'posthog/backoff_policy'
8
+ require 'net/http'
9
+ require 'net/https'
10
+ require 'json'
11
+
12
+ module PostHog
13
+ class Transport
14
+ include PostHog::Defaults::Request
15
+ include PostHog::Utils
16
+ include PostHog::Logging
17
+
18
+ def initialize(options = {})
19
+ if options[:api_host]
20
+ uri = URI.parse(options[:api_host])
21
+ options[:host] = uri.host
22
+ options[:ssl] = uri.scheme == 'https'
23
+ options[:port] = uri.port
24
+ end
25
+
26
+ options[:host] = options[:host].nil? ? HOST : options[:host]
27
+ options[:port] = options[:port].nil? ? PORT : options[:port]
28
+ options[:ssl] = options[:ssl].nil? ? SSL : options[:ssl]
29
+
30
+ @headers = options[:headers] || HEADERS
31
+ @path = options[:path] || PATH
32
+ @retries = options[:retries] || RETRIES
33
+ @backoff_policy = options[:backoff_policy] || PostHog::BackoffPolicy.new
34
+
35
+ http = Net::HTTP.new(options[:host], options[:port])
36
+ http.use_ssl = options[:ssl]
37
+ http.read_timeout = 8
38
+ http.open_timeout = 4
39
+ http.verify_mode = OpenSSL::SSL::VERIFY_NONE if options[:skip_ssl_verification]
40
+
41
+ @http = http
42
+ end
43
+
44
+ # Sends a batch of messages to the API
45
+ #
46
+ # @return [Response] API response
47
+ def send(api_key, batch)
48
+ logger.debug("Sending request for #{batch.length} items")
49
+
50
+ last_response, exception =
51
+ retry_with_backoff(@retries) do
52
+ status_code, body = send_request(api_key, batch)
53
+ error = JSON.parse(body)['error']
54
+ should_retry = should_retry_request?(status_code, body)
55
+ logger.debug("Response status code: #{status_code}")
56
+ logger.debug("Response error: #{error}") if error
57
+
58
+ [Response.new(status_code, error), should_retry]
59
+ end
60
+
61
+ if exception
62
+ logger.error(exception.message)
63
+ exception.backtrace.each { |line| logger.error(line) }
64
+ Response.new(-1, exception.to_s)
65
+ else
66
+ last_response
67
+ end
68
+ end
69
+
70
+ # Closes a persistent connection if it exists
71
+ def shutdown
72
+ @http.finish if @http.started?
73
+ end
74
+
75
+ private
76
+
77
+ def should_retry_request?(status_code, body)
78
+ if status_code >= 500
79
+ true # Server error
80
+ elsif status_code == 429 # rubocop:disable Lint/DuplicateBranch
81
+ true # Rate limited
82
+ elsif status_code >= 400
83
+ logger.error(body)
84
+ false # Client error. Do not retry, but log
85
+ else
86
+ false
87
+ end
88
+ end
89
+
90
+ # Takes a block that returns [result, should_retry].
91
+ #
92
+ # Retries upto `retries_remaining` times, if `should_retry` is false or
93
+ # an exception is raised. `@backoff_policy` is used to determine the
94
+ # duration to sleep between attempts
95
+ #
96
+ # Returns [last_result, raised_exception]
97
+ def retry_with_backoff(retries_remaining, &block)
98
+ result, caught_exception = nil
99
+ should_retry = false
100
+
101
+ begin
102
+ result, should_retry = yield
103
+ return result, nil unless should_retry
104
+ rescue StandardError => e
105
+ should_retry = true
106
+ caught_exception = e
107
+ end
108
+
109
+ if should_retry && (retries_remaining > 1)
110
+ logger.debug("Retrying request, #{retries_remaining} retries left")
111
+ sleep(@backoff_policy.next_interval.to_f / 1000)
112
+ retry_with_backoff(retries_remaining - 1, &block)
113
+ else
114
+ [result, caught_exception]
115
+ end
116
+ end
117
+
118
+ # Sends a request for the batch, returns [status_code, body]
119
+ def send_request(api_key, batch)
120
+ payload = JSON.generate(api_key: api_key, batch: batch)
121
+
122
+ request = Net::HTTP::Post.new(@path, @headers)
123
+
124
+ if self.class.stub
125
+ logger.debug "stubbed request to #{@path}: " \
126
+ "api key = #{api_key}, batch = #{JSON.generate(batch)}"
127
+
128
+ [200, '{}']
129
+ else
130
+ @http.start unless @http.started? # Maintain a persistent connection
131
+ response = @http.request(request, payload)
132
+ [response.code.to_i, response.body]
133
+ end
134
+ end
135
+
136
+ class << self
137
+ attr_writer :stub
138
+
139
+ def stub
140
+ @stub || ENV.fetch('STUB', nil)
141
+ end
142
+ end
143
+ end
144
+ end