redis-time-series 0.3.0 → 0.6.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -0,0 +1,88 @@
1
+ # frozen_string_literal: true
2
+ class Redis
3
+ class TimeSeries
4
+ # An aggregation is a combination of a mathematical function, and a time window over
5
+ # which to apply that function. In RedisTimeSeries, aggregations are used to downsample
6
+ # data from a source series to a destination series, using compaction rules.
7
+ #
8
+ # @see Redis::TimeSeries#create_rule
9
+ # @see Redis::TimeSeries::Rule
10
+ # @see https://oss.redislabs.com/redistimeseries/commands/#aggregation-compaction-downsampling
11
+ class Aggregation
12
+ TYPES = %w[
13
+ avg
14
+ count
15
+ first
16
+ last
17
+ max
18
+ min
19
+ range
20
+ std.p
21
+ std.s
22
+ sum
23
+ var.p
24
+ var.s
25
+ ]
26
+
27
+ # @return [String] the type of aggregation to apply
28
+ # @see TYPES
29
+ attr_reader :type
30
+ alias aggregation_type type
31
+
32
+ # @return [Integer] the time window to apply the aggregation over, in milliseconds
33
+ attr_reader :duration
34
+ alias time_bucket duration
35
+
36
+ # Parse a method argument into an aggregation.
37
+ #
38
+ # @param agg [Array, Aggregation] an aggregation object, or an array of type and duration +[:avg, 60000]+
39
+ # @return [Aggregation] the parsed aggregation, or the original argument if already an aggregation
40
+ # @raise [AggregationError] when given an unparseable value
41
+ def self.parse(agg)
42
+ return unless agg
43
+ return agg if agg.is_a?(self)
44
+ return new(agg.first, agg.last) if agg.is_a?(Array) && agg.size == 2
45
+ raise AggregationError, "Couldn't parse #{agg} into an aggregation rule!"
46
+ end
47
+
48
+ # Create a new Aggregation given a type and duration.
49
+ # @param type [String, Symbol] one of the valid aggregation {TYPES}
50
+ # @param duration [Integer, ActiveSupport::Duration]
51
+ # A time window to apply this aggregation over.
52
+ # If you're using ActiveSupport, duration objects (e.g. +10.minutes+) will be automatically coerced.
53
+ # @return [Aggregation]
54
+ # @raise [AggregationError] if the given aggregation type is not valid
55
+ def initialize(type, duration)
56
+ type = type.to_s.downcase
57
+ unless TYPES.include? type
58
+ raise AggregationError, "#{type} is not a valid aggregation type!"
59
+ end
60
+ @type = type
61
+ if defined?(ActiveSupport::Duration) && duration.is_a?(ActiveSupport::Duration)
62
+ @duration = duration.in_milliseconds
63
+ else
64
+ @duration = duration.to_i
65
+ end
66
+ end
67
+
68
+ # @api private
69
+ # @return [Array]
70
+ def to_a
71
+ ['AGGREGATION', type, duration]
72
+ end
73
+
74
+ # @api private
75
+ # @return [String]
76
+ def to_s
77
+ to_a.join(' ')
78
+ end
79
+
80
+ # Compares aggregations based on type and duration.
81
+ # @return [Boolean] whether the given aggregations are equivalent
82
+ def ==(other)
83
+ parsed = self.class.parse(other)
84
+ type == parsed.type && duration == parsed.duration
85
+ end
86
+ end
87
+ end
88
+ end
@@ -0,0 +1,77 @@
1
+ # frozen_string_literal: true
2
+ using TimeMsec
3
+
4
+ class Redis
5
+ class TimeSeries
6
+ # The client module handles connection management for individual time series, and
7
+ # the parent {TimeSeries} class methods. You can enable or disable debugging, and set
8
+ # a default Redis client to use for time series objects.
9
+ module Client
10
+ def self.extended(base)
11
+ base.class_eval do
12
+ attr_reader :redis
13
+
14
+ private
15
+
16
+ def cmd(name, *args)
17
+ self.class.send :cmd_with_redis, redis, name, *args
18
+ end
19
+ end
20
+ end
21
+
22
+ # Check debug status. Defaults to on with +DEBUG=true+ environment variable.
23
+ # @return [Boolean] current debug status
24
+ def debug
25
+ @debug.nil? ? [true, 'true', 1].include?(ENV['DEBUG']) : @debug
26
+ end
27
+
28
+ # Enable or disable debug output for time series commands. Enabling debug will
29
+ # print commands to +STDOUT+ as they're executed.
30
+ #
31
+ # @example
32
+ # [1] pry(main)> @ts1.get
33
+ # => #<Redis::TimeSeries::Sample:0x00007fc82e9de150 @time=2020-07-19 15:01:13 -0700, @value=0.56e2>
34
+ # [2] pry(main)> Redis::TimeSeries.debug = true
35
+ # => true
36
+ # [3] pry(main)> @ts1.get
37
+ # DEBUG: TS.GET ts1
38
+ # => #<Redis::TimeSeries::Sample:0x00007fc82f11b7b0 @time=2020-07-19 15:01:13 -0700, @value=0.56e2>
39
+ #
40
+ # @return [Boolean] new debug status
41
+ def debug=(bool)
42
+ @debug = !!bool
43
+ end
44
+
45
+ # @return [Redis] the current Redis client. Defaults to +Redis.current+
46
+ def redis
47
+ @redis ||= Redis.current
48
+ end
49
+
50
+ # Set the default Redis client for time series objects.
51
+ # This may be useful if you already use a non-time-series Redis database, and want
52
+ # to use both at the same time.
53
+ #
54
+ # @example
55
+ # # config/initializers/redis_time_series.rb
56
+ # Redis::TimeSeries.redis = Redis.new(url: 'redis://my-redis-server:6379/0')
57
+ #
58
+ # @param client [Redis] a Redis client
59
+ # @return [Redis]
60
+ def redis=(client)
61
+ @redis = client
62
+ end
63
+
64
+ private
65
+
66
+ def cmd(name, *args)
67
+ cmd_with_redis redis, name, *args
68
+ end
69
+
70
+ def cmd_with_redis(redis, name, *args)
71
+ args = args.flatten.compact.map { |arg| arg.is_a?(Time) ? arg.ts_msec : arg }
72
+ puts "DEBUG: #{name} #{args.join(' ')}" if debug
73
+ redis.call name, args
74
+ end
75
+ end
76
+ end
77
+ end
@@ -0,0 +1,49 @@
1
+ # frozen_string_literal: true
2
+ class Redis
3
+ class TimeSeries
4
+ # Duplication policies can be applied to a time series in order to resolve conflicts
5
+ # when adding data that already exists in the series.
6
+ #
7
+ # @see https://oss.redislabs.com/redistimeseries/master/configuration/#duplicate_policy
8
+ class DuplicatePolicy
9
+ VALID_POLICIES = %i[
10
+ block
11
+ first
12
+ last
13
+ min
14
+ max
15
+ sum
16
+ ].freeze
17
+
18
+ attr_reader :policy
19
+
20
+ def initialize(policy)
21
+ policy = policy.to_s.downcase.to_sym
22
+ if VALID_POLICIES.include?(policy)
23
+ @policy = policy
24
+ else
25
+ raise UnknownPolicyError, "#{policy} is not a valid duplicate policy"
26
+ end
27
+ end
28
+
29
+ def to_a(cmd = 'DUPLICATE_POLICY')
30
+ [cmd, policy]
31
+ end
32
+
33
+ def to_s(cmd = 'DUPLICATE_POLICY')
34
+ to_a(cmd).join(' ')
35
+ end
36
+
37
+ def ==(other)
38
+ return policy == other.policy if other.is_a?(self.class)
39
+ policy == self.class.new(other).policy
40
+ end
41
+
42
+ VALID_POLICIES.each do |policy|
43
+ define_method("#{policy}?") do
44
+ @policy == policy
45
+ end
46
+ end
47
+ end
48
+ end
49
+ end
@@ -0,0 +1,24 @@
1
+ class Redis
2
+ class TimeSeries
3
+ # Base error class for convenient +rescue+-ing.
4
+ #
5
+ # Descendant of +Redis::BaseError+, so you can rescue that and capture all
6
+ # time-series errors, as well as standard Redis command errors.
7
+ class Error < Redis::BaseError; end
8
+
9
+ # +FilterError+ is raised when a given set of filters is invalid (i.e. does not contain
10
+ # a equality comparison "foo=bar"), or the filter value is unparseable.
11
+ # @see Redis::TimeSeries::Filters
12
+ class FilterError < Error; end
13
+
14
+ # +AggregationError+ is raised when attempting to create an aggreation with
15
+ # an unknown type, or when calling a command with an invalid aggregation value.
16
+ # @see Redis::TimeSeries::Aggregation
17
+ class AggregationError < Error; end
18
+
19
+ # +UnknownPolicyError+ is raised when attempting to apply an unkown type of
20
+ # duplicate policy when creating or adding to a series.
21
+ # @see Redis::TimeSeries::DuplicatePolicy
22
+ class UnknownPolicyError < Error; end
23
+ end
24
+ end
@@ -1,7 +1,7 @@
1
1
  # frozen_string_literal: true
2
2
  class Redis
3
3
  class TimeSeries
4
- class Filter
4
+ class Filters
5
5
  Equal = Struct.new(:label, :value) do
6
6
  self::REGEX = /^[^!]+=[^(]+/
7
7
 
@@ -9,6 +9,10 @@ class Redis
9
9
  new(*str.split('='))
10
10
  end
11
11
 
12
+ def to_h
13
+ { label => value }
14
+ end
15
+
12
16
  def to_s
13
17
  "#{label}=#{value}"
14
18
  end
@@ -21,6 +25,10 @@ class Redis
21
25
  new(*str.split('!='))
22
26
  end
23
27
 
28
+ def to_h
29
+ { label => { not: value } }
30
+ end
31
+
24
32
  def to_s
25
33
  "#{label}!=#{value}"
26
34
  end
@@ -33,6 +41,10 @@ class Redis
33
41
  new(str.delete('='))
34
42
  end
35
43
 
44
+ def to_h
45
+ { label => false }
46
+ end
47
+
36
48
  def to_s
37
49
  "#{label}="
38
50
  end
@@ -45,6 +57,10 @@ class Redis
45
57
  new(str.delete('!='))
46
58
  end
47
59
 
60
+ def to_h
61
+ { label => true }
62
+ end
63
+
48
64
  def to_s
49
65
  "#{label}!="
50
66
  end
@@ -59,8 +75,12 @@ class Redis
59
75
  new(label, values)
60
76
  end
61
77
 
78
+ def to_h
79
+ { label => values }
80
+ end
81
+
62
82
  def to_s
63
- "#{label}=(#{values.join(',')})"
83
+ "#{label}=(#{values.map(&:to_s).join(',')})"
64
84
  end
65
85
  end
66
86
 
@@ -73,14 +93,18 @@ class Redis
73
93
  new(label, values)
74
94
  end
75
95
 
96
+ def to_h
97
+ { label => { not: values } }
98
+ end
99
+
76
100
  def to_s
77
- "#{label}!=(#{values.join(',')})"
101
+ "#{label}!=(#{values.map(&:to_s).join(',')})"
78
102
  end
79
103
  end
80
104
 
81
105
  TYPES = [Equal, NotEqual, Absent, Present, AnyValue, NoValues]
82
106
  TYPES.each do |type|
83
- define_method "#{type.to_s.split('::').last.gsub(/(.)([A-Z])/,'\1_\2').downcase}_filters" do
107
+ define_method "#{type.to_s.split('::').last.gsub(/(.)([A-Z])/,'\1_\2').downcase}" do
84
108
  filters.select { |f| f.is_a? type }
85
109
  end
86
110
  end
@@ -88,12 +112,15 @@ class Redis
88
112
  attr_reader :filters
89
113
 
90
114
  def initialize(filters = nil)
91
- filters = parse_string(filters) if filters.is_a?(String)
92
- @filters = filters.presence || {}
115
+ @filters = case filters
116
+ when String then parse_string(filters)
117
+ when Hash then parse_hash(filters)
118
+ else []
119
+ end
93
120
  end
94
121
 
95
122
  def validate!
96
- valid? || raise('Filtering requires at least one equality comparison')
123
+ valid? || raise(FilterError, 'Filtering requires at least one equality comparison')
97
124
  end
98
125
 
99
126
  def valid?
@@ -104,15 +131,39 @@ class Redis
104
131
  filters.map(&:to_s)
105
132
  end
106
133
 
134
+ def to_h
135
+ filters.reduce({}) { |h, filter| h.merge(filter.to_h) }
136
+ end
137
+
138
+ def to_s
139
+ to_a.join(' ')
140
+ end
141
+
107
142
  private
108
143
 
109
144
  def parse_string(filter_string)
145
+ return unless filter_string.is_a? String
110
146
  filter_string.split(' ').map do |str|
111
147
  match = TYPES.find { |f| f::REGEX.match? str }
112
- raise "Unable to parse '#{str}'" unless match
148
+ raise(FilterError, "Unable to parse '#{str}'") unless match
113
149
  match.parse(str)
114
150
  end
115
151
  end
152
+
153
+ def parse_hash(filter_hash)
154
+ return unless filter_hash.is_a? Hash
155
+ filter_hash.map do |label, value|
156
+ case value
157
+ when TrueClass then Present.new(label)
158
+ when FalseClass then Absent.new(label)
159
+ when Array then AnyValue.new(label, value)
160
+ when Hash
161
+ raise(FilterError, "Invalid filter hash value #{value}") unless value.keys === [:not]
162
+ (v = value.values.first).is_a?(Array) ? NoValues.new(label, v) : NotEqual.new(label, v)
163
+ else Equal.new(label, value)
164
+ end
165
+ end
166
+ end
116
167
  end
117
168
  end
118
169
  end
@@ -1,28 +1,110 @@
1
1
  # frozen_string_literal: true
2
2
  class Redis
3
3
  class TimeSeries
4
+ # The Info struct wraps the result of the +TS.INFO+ command with method access.
5
+ # It also applies some limited parsing to the result values, mainly snakifying
6
+ # the property keys, and instantiating Rule objects if necessary.
7
+ #
8
+ # All properties of the struct are also available on a TimeSeries object itself
9
+ # via delegation.
10
+ #
11
+ # @!attribute [r] chunk_count
12
+ # @return [Integer] number of memory chunks used for the time-series
13
+ # @!attribute [r] chunk_size
14
+ # @return [Integer] amount of allocated memory in bytes
15
+ # @!attribute [r] chunk_type
16
+ # @return [String] whether the chunk is "compressed" or "uncompressed"
17
+ # @!attribute [r] first_timestamp
18
+ # @return [Integer] first timestamp present in the time-series (milliseconds since epoch)
19
+ # @!attribute [r] labels
20
+ # @return [Hash] a hash of label-value pairs that represent metadata labels of the time-series
21
+ # @!attribute [r] last_timestamp
22
+ # @return [Integer] last timestamp present in the time-series (milliseconds since epoch)
23
+ # @!attribute [r] max_samples_per_chunk
24
+ # @return [Integer] maximum number of samples per memory chunk
25
+ # @!attribute [r] memory_usage
26
+ # @return [Integer] total number of bytes allocated for the time-series
27
+ # @!attribute [r] retention_time
28
+ # @return [Integer] retention time, in milliseconds, for the time-series.
29
+ # A zero value means unlimited retention.
30
+ # @!attribute [r] rules
31
+ # @return [Array<Rule>] an array of configured compaction {Rule}s
32
+ # @!attribute [r] series
33
+ # @return [TimeSeries] the series this info is from
34
+ # @!attribute [r] source_key
35
+ # @return [String, nil] the key of the source series, if this series is the destination
36
+ # of a compaction rule
37
+ # @!attribute [r] total_samples
38
+ # @return [Integer] the total number of samples in the series
39
+ #
40
+ # @see TimeSeries#info
41
+ # @see https://oss.redislabs.com/redistimeseries/commands/#tsinfo
4
42
  Info = Struct.new(
5
- :total_samples,
6
- :memory_usage,
43
+ :chunk_count,
44
+ :chunk_size,
45
+ :chunk_type,
46
+ :duplicate_policy,
7
47
  :first_timestamp,
48
+ :labels,
8
49
  :last_timestamp,
9
- :retention_time,
10
- :chunk_count,
11
50
  :max_samples_per_chunk,
12
- :labels,
13
- :source_key,
51
+ :memory_usage,
52
+ :retention_time,
14
53
  :rules,
54
+ :series,
55
+ :source_key,
56
+ :total_samples,
15
57
  keyword_init: true
16
58
  ) do
17
- def self.parse(raw_array)
18
- raw_array.each_slice(2).reduce({}) do |h, (key, value)|
19
- # Convert camelCase info keys to snake_case
20
- h[key.gsub(/(.)([A-Z])/,'\1_\2').downcase] = value
21
- h
22
- end.then do |parsed_hash|
23
- parsed_hash['labels'] = parsed_hash['labels'].to_h
24
- new(parsed_hash)
59
+ class << self
60
+ # @api private
61
+ # @return [Info]
62
+ def parse(series:, data:)
63
+ build_hash(data)
64
+ .merge(series: series)
65
+ .then(&method(:parse_labels))
66
+ .then(&method(:parse_policies))
67
+ .then(&method(:parse_rules))
68
+ .then(&method(:new))
69
+ end
70
+
71
+ private
72
+
73
+ def build_hash(data)
74
+ data.each_slice(2).reduce({}) do |h, (key, value)|
75
+ # Convert camelCase info keys to snake_case
76
+ key = key.gsub(/(.)([A-Z])/,'\1_\2').downcase.to_sym
77
+ # Skip unknown properties
78
+ next h unless members.include?(key)
79
+ h.merge(key => value)
80
+ end
25
81
  end
82
+
83
+ def parse_labels(hash)
84
+ hash[:labels] = hash[:labels].to_h.transform_values { |v| v.to_i.to_s == v ? v.to_i : v }
85
+ hash
86
+ end
87
+
88
+ def parse_policies(hash)
89
+ hash[:duplicate_policy] = DuplicatePolicy.new(hash[:duplicate_policy]) if hash[:duplicate_policy]
90
+ hash
91
+ end
92
+
93
+ def parse_rules(hash)
94
+ hash[:rules] = hash[:rules].map { |d| Rule.new(source: hash[:series], data: d) }
95
+ hash
96
+ end
97
+ end
98
+
99
+ alias count total_samples
100
+ alias length total_samples
101
+ alias size total_samples
102
+
103
+ # If this series is the destination of a compaction rule, returns the source series of the data.
104
+ # @return [TimeSeries, nil] the series referred to by {source_key}
105
+ def source
106
+ return unless source_key
107
+ @source ||= TimeSeries.new(source_key, redis: series.redis)
26
108
  end
27
109
  end
28
110
  end