redis-time-series 0.2.0 → 0.5.2

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -0,0 +1,77 @@
1
+ # frozen_string_literal: true
2
+ using TimeMsec
3
+
4
+ class Redis
5
+ class TimeSeries
6
+ # The client module handles connection management for individual time series, and
7
+ # the parent {TimeSeries} class methods. You can enable or disable debugging, and set
8
+ # a default Redis client to use for time series objects.
9
+ module Client
10
+ def self.extended(base)
11
+ base.class_eval do
12
+ attr_reader :redis
13
+
14
+ private
15
+
16
+ def cmd(name, *args)
17
+ self.class.send :cmd_with_redis, redis, name, *args
18
+ end
19
+ end
20
+ end
21
+
22
+ # Check debug status. Defaults to on with +DEBUG=true+ environment variable.
23
+ # @return [Boolean] current debug status
24
+ def debug
25
+ @debug.nil? ? [true, 'true', 1].include?(ENV['DEBUG']) : @debug
26
+ end
27
+
28
+ # Enable or disable debug output for time series commands. Enabling debug will
29
+ # print commands to +STDOUT+ as they're executed.
30
+ #
31
+ # @example
32
+ # [1] pry(main)> @ts1.get
33
+ # => #<Redis::TimeSeries::Sample:0x00007fc82e9de150 @time=2020-07-19 15:01:13 -0700, @value=0.56e2>
34
+ # [2] pry(main)> Redis::TimeSeries.debug = true
35
+ # => true
36
+ # [3] pry(main)> @ts1.get
37
+ # DEBUG: TS.GET ts1
38
+ # => #<Redis::TimeSeries::Sample:0x00007fc82f11b7b0 @time=2020-07-19 15:01:13 -0700, @value=0.56e2>
39
+ #
40
+ # @return [Boolean] new debug status
41
+ def debug=(bool)
42
+ @debug = !!bool
43
+ end
44
+
45
+ # @return [Redis] the current Redis client. Defaults to +Redis.current+
46
+ def redis
47
+ @redis ||= Redis.current
48
+ end
49
+
50
+ # Set the default Redis client for time series objects.
51
+ # This may be useful if you already use a non-time-series Redis database, and want
52
+ # to use both at the same time.
53
+ #
54
+ # @example
55
+ # # config/initializers/redis_time_series.rb
56
+ # Redis::TimeSeries.redis = Redis.new(url: 'redis://my-redis-server:6379/0')
57
+ #
58
+ # @param client [Redis] a Redis client
59
+ # @return [Redis]
60
+ def redis=(client)
61
+ @redis = client
62
+ end
63
+
64
+ private
65
+
66
+ def cmd(name, *args)
67
+ cmd_with_redis redis, name, *args
68
+ end
69
+
70
+ def cmd_with_redis(redis, name, *args)
71
+ args = args.flatten.compact.map { |arg| arg.is_a?(Time) ? arg.ts_msec : arg }
72
+ puts "DEBUG: #{name} #{args.join(' ')}" if debug
73
+ redis.call name, args
74
+ end
75
+ end
76
+ end
77
+ end
@@ -0,0 +1,19 @@
1
+ class Redis
2
+ class TimeSeries
3
+ # Base error class for convenient +rescue+-ing.
4
+ #
5
+ # Descendant of +Redis::BaseError+, so you can rescue that and capture all
6
+ # time-series errors, as well as standard Redis command errors.
7
+ class Error < Redis::BaseError; end
8
+
9
+ # +FilterError+ is raised when a given set of filters is invalid (i.e. does not contain
10
+ # a equality comparison "foo=bar"), or the filter value is unparseable.
11
+ # @see Redis::TimeSeries::Filters
12
+ class FilterError < Error; end
13
+
14
+ # +AggregationError+ is raised when attempting to create an aggreation with
15
+ # an unknown type, or when calling a command with an invalid aggregation value.
16
+ # @see Redis::TimeSeries::Aggregation
17
+ class AggregationError < Error; end
18
+ end
19
+ end
@@ -0,0 +1,169 @@
1
+ # frozen_string_literal: true
2
+ class Redis
3
+ class TimeSeries
4
+ class Filters
5
+ Equal = Struct.new(:label, :value) do
6
+ self::REGEX = /^[^!]+=[^(]+/
7
+
8
+ def self.parse(str)
9
+ new(*str.split('='))
10
+ end
11
+
12
+ def to_h
13
+ { label => value }
14
+ end
15
+
16
+ def to_s
17
+ "#{label}=#{value}"
18
+ end
19
+ end
20
+
21
+ NotEqual = Struct.new(:label, :value) do
22
+ self::REGEX = /^.+!=[^(]+/
23
+
24
+ def self.parse(str)
25
+ new(*str.split('!='))
26
+ end
27
+
28
+ def to_h
29
+ { label => { not: value } }
30
+ end
31
+
32
+ def to_s
33
+ "#{label}!=#{value}"
34
+ end
35
+ end
36
+
37
+ Absent = Struct.new(:label) do
38
+ self::REGEX = /^[^!]+=$/
39
+
40
+ def self.parse(str)
41
+ new(str.delete('='))
42
+ end
43
+
44
+ def to_h
45
+ { label => false }
46
+ end
47
+
48
+ def to_s
49
+ "#{label}="
50
+ end
51
+ end
52
+
53
+ Present = Struct.new(:label) do
54
+ self::REGEX = /^.+!=$/
55
+
56
+ def self.parse(str)
57
+ new(str.delete('!='))
58
+ end
59
+
60
+ def to_h
61
+ { label => true }
62
+ end
63
+
64
+ def to_s
65
+ "#{label}!="
66
+ end
67
+ end
68
+
69
+ AnyValue = Struct.new(:label, :values) do
70
+ self::REGEX = /^[^!]+=\(.+\)/
71
+
72
+ def self.parse(str)
73
+ label, values = str.split('=')
74
+ values = values.tr('()', '').split(',')
75
+ new(label, values)
76
+ end
77
+
78
+ def to_h
79
+ { label => values }
80
+ end
81
+
82
+ def to_s
83
+ "#{label}=(#{values.map(&:to_s).join(',')})"
84
+ end
85
+ end
86
+
87
+ NoValues = Struct.new(:label, :values) do
88
+ self::REGEX = /^.+!=\(.+\)/
89
+
90
+ def self.parse(str)
91
+ label, values = str.split('!=')
92
+ values = values.tr('()', '').split(',')
93
+ new(label, values)
94
+ end
95
+
96
+ def to_h
97
+ { label => { not: values } }
98
+ end
99
+
100
+ def to_s
101
+ "#{label}!=(#{values.map(&:to_s).join(',')})"
102
+ end
103
+ end
104
+
105
+ TYPES = [Equal, NotEqual, Absent, Present, AnyValue, NoValues]
106
+ TYPES.each do |type|
107
+ define_method "#{type.to_s.split('::').last.gsub(/(.)([A-Z])/,'\1_\2').downcase}" do
108
+ filters.select { |f| f.is_a? type }
109
+ end
110
+ end
111
+
112
+ attr_reader :filters
113
+
114
+ def initialize(filters = nil)
115
+ @filters = case filters
116
+ when String then parse_string(filters)
117
+ when Hash then parse_hash(filters)
118
+ else []
119
+ end
120
+ end
121
+
122
+ def validate!
123
+ valid? || raise(FilterError, 'Filtering requires at least one equality comparison')
124
+ end
125
+
126
+ def valid?
127
+ !!filters.find { |f| f.is_a? Equal }
128
+ end
129
+
130
+ def to_a
131
+ filters.map(&:to_s)
132
+ end
133
+
134
+ def to_h
135
+ filters.reduce({}) { |h, filter| h.merge(filter.to_h) }
136
+ end
137
+
138
+ def to_s
139
+ to_a.join(' ')
140
+ end
141
+
142
+ private
143
+
144
+ def parse_string(filter_string)
145
+ return unless filter_string.is_a? String
146
+ filter_string.split(' ').map do |str|
147
+ match = TYPES.find { |f| f::REGEX.match? str }
148
+ raise(FilterError, "Unable to parse '#{str}'") unless match
149
+ match.parse(str)
150
+ end
151
+ end
152
+
153
+ def parse_hash(filter_hash)
154
+ return unless filter_hash.is_a? Hash
155
+ filter_hash.map do |label, value|
156
+ case value
157
+ when TrueClass then Present.new(label)
158
+ when FalseClass then Absent.new(label)
159
+ when Array then AnyValue.new(label, value)
160
+ when Hash
161
+ raise(FilterError, "Invalid filter hash value #{value}") unless value.keys === [:not]
162
+ (v = value.values.first).is_a?(Array) ? NoValues.new(label, v) : NotEqual.new(label, v)
163
+ else Equal.new(label, value)
164
+ end
165
+ end
166
+ end
167
+ end
168
+ end
169
+ end
@@ -1,29 +1,84 @@
1
1
  # frozen_string_literal: true
2
2
  class Redis
3
3
  class TimeSeries
4
+ # The Info struct wraps the result of the +TS.INFO+ command with method access.
5
+ # It also applies some limited parsing to the result values, mainly snakifying
6
+ # the property keys, and instantiating Rule objects if necessary.
7
+ #
8
+ # All properties of the struct are also available on a TimeSeries object itself
9
+ # via delegation.
10
+ #
11
+ # @!attribute [r] chunk_count
12
+ # @return [Integer] number of memory chunks used for the time-series
13
+ # @!attribute [r] first_timestamp
14
+ # @return [Integer] first timestamp present in the time-series (milliseconds since epoch)
15
+ # @!attribute [r] labels
16
+ # @return [Hash] a hash of label-value pairs that represent metadata labels of the time-series
17
+ # @!attribute [r] last_timestamp
18
+ # @return [Integer] last timestamp present in the time-series (milliseconds since epoch)
19
+ # @!attribute [r] max_samples_per_chunk
20
+ # @return [Integer] maximum number of samples per memory chunk
21
+ # @!attribute [r] memory_usage
22
+ # @return [Integer] total number of bytes allocated for the time-series
23
+ # @!attribute [r] retention_time
24
+ # @return [Integer] retention time, in milliseconds, for the time-series.
25
+ # A zero value means unlimited retention.
26
+ # @!attribute [r] rules
27
+ # @return [Array<Rule>] an array of configured compaction {Rule}s
28
+ # @!attribute [r] series
29
+ # @return [TimeSeries] the series this info is from
30
+ # @!attribute [r] source_key
31
+ # @return [String, nil] the key of the source series, if this series is the destination
32
+ # of a compaction rule
33
+ # @!attribute [r] total_samples
34
+ # @return [Integer] the total number of samples in the series
35
+ #
36
+ # @see TimeSeries#info
37
+ # @see https://oss.redislabs.com/redistimeseries/commands/#tsinfo
4
38
  Info = Struct.new(
5
- :total_samples,
6
- :memory_usage,
39
+ :chunk_count,
40
+ :chunk_size,
41
+ :chunk_type,
42
+ :duplicate_policy,
7
43
  :first_timestamp,
44
+ :labels,
8
45
  :last_timestamp,
9
- :retention_time,
10
- :chunk_count,
11
46
  :max_samples_per_chunk,
12
- :labels,
13
- :source_key,
47
+ :memory_usage,
48
+ :retention_time,
14
49
  :rules,
50
+ :series,
51
+ :source_key,
52
+ :total_samples,
15
53
  keyword_init: true
16
54
  ) do
17
- def self.parse(raw_array)
18
- raw_array.each_slice(2).reduce({}) do |h, (key, value)|
55
+ # @api private
56
+ # @return [Info]
57
+ def self.parse(series:, data:)
58
+ data.each_slice(2).reduce({}) do |h, (key, value)|
19
59
  # Convert camelCase info keys to snake_case
20
- h[key.gsub(/(.)([A-Z])/,'\1_\2').downcase] = value
60
+ key = key.gsub(/(.)([A-Z])/,'\1_\2').downcase.to_sym
61
+ next h unless members.include?(key)
62
+ h[key] = value
21
63
  h
22
64
  end.then do |parsed_hash|
23
- parsed_hash['labels'] = parsed_hash['labels'].to_h
65
+ parsed_hash[:series] = series
66
+ parsed_hash[:labels] = parsed_hash[:labels].to_h
67
+ parsed_hash[:rules] = parsed_hash[:rules].map { |d| Rule.new(source: series, data: d) }
24
68
  new(parsed_hash)
25
69
  end
26
70
  end
71
+
72
+ alias count total_samples
73
+ alias length total_samples
74
+ alias size total_samples
75
+
76
+ # If this series is the destination of a compaction rule, returns the source series of the data.
77
+ # @return [TimeSeries, nil] the series referred to by {source_key}
78
+ def source
79
+ return unless source_key
80
+ @source ||= TimeSeries.new(source_key, redis: series.redis)
81
+ end
27
82
  end
28
83
  end
29
84
  end
@@ -0,0 +1,49 @@
1
+ # frozen_string_literal: true
2
+ class Redis
3
+ class TimeSeries
4
+ # A compaction rule applies an aggregation from a source series to a destination series.
5
+ # As data is added to the source, it will be aggregated based on any configured rule(s) and
6
+ # distributed to the correct destination(s).
7
+ #
8
+ # Compaction rules are useful to retain data over long time periods without requiring exorbitant
9
+ # amounts of memory and storage. For example, if you're collecting data on a minute-by-minute basis,
10
+ # you may want to retain a week's worth of data at full fidelity, and a year's worth of data downsampled
11
+ # to hourly, which would require 60x less memory.
12
+ class Rule
13
+ # @return [Aggregation] the configured aggregation for this rule
14
+ attr_reader :aggregation
15
+
16
+ # @return [String] the Redis key of the destination series
17
+ attr_reader :destination_key
18
+
19
+ # @return [TimeSeries] the data source of this compaction rule
20
+ attr_reader :source
21
+
22
+ # Manually instantiating a rule does nothing, don't bother.
23
+ # @api private
24
+ # @see Info#rules
25
+ def initialize(source:, data:)
26
+ @source = source
27
+ @destination_key, duration, aggregation_type = data
28
+ @aggregation = Aggregation.new(aggregation_type, duration)
29
+ end
30
+
31
+ # Delete this compaction rule.
32
+ # @return [String] the string "OK"
33
+ def delete
34
+ source.delete_rule(dest: destination_key)
35
+ end
36
+
37
+ # @return [TimeSeries] the destination time series this rule refers to
38
+ def destination
39
+ @dest ||= TimeSeries.new(destination_key, redis: source.redis)
40
+ end
41
+ alias dest destination
42
+
43
+ # @return [String] the Redis key of the source series
44
+ def source_key
45
+ source.key
46
+ end
47
+ end
48
+ end
49
+ end
@@ -1,23 +1,39 @@
1
1
  # frozen_string_literal: true
2
2
  class Redis
3
3
  class TimeSeries
4
+ # A sample is an immutable value object that represents a single data point within a time series.
4
5
  class Sample
5
- TS_FACTOR = 1000.0
6
+ using TimeMsec
6
7
 
7
- attr_reader :time, :value
8
+ # @return [Time] the sample's timestamp
9
+ attr_reader :time
10
+ # @return [BigDecimal] the decimal value of the sample
11
+ attr_reader :value
8
12
 
13
+ # Samples are returned by time series query methods, there's no need to create one yourself.
14
+ # @api private
15
+ # @see TimeSeries#get
16
+ # @see TimeSeries#range
9
17
  def initialize(timestamp, value)
10
- @time = Time.at(timestamp / TS_FACTOR)
18
+ @time = Time.from_msec(timestamp)
11
19
  @value = BigDecimal(value)
12
20
  end
13
21
 
14
- def ts_msec
15
- (time.to_f * TS_FACTOR).to_i
22
+ # @return [Integer] the millisecond value of the sample's timestamp
23
+ # @note
24
+ # We're wrapping the method provided by the {TimeMsec} refinement for convenience,
25
+ # otherwise it wouldn't be callable on {time} and devs would have to litter
26
+ # +using TimeMsec+ or +* 1000.0+ wherever they wanted the value.
27
+ def to_msec
28
+ time.ts_msec
16
29
  end
17
30
 
31
+ # @return [Hash] a hash representation of the sample
32
+ # @example
33
+ # {:timestamp=>1595199272401, :value=>0.2e1}
18
34
  def to_h
19
35
  {
20
- timestamp: ts_msec,
36
+ timestamp: to_msec,
21
37
  value: value
22
38
  }
23
39
  end