influxdb 0.1.9 → 0.2.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (51) hide show
  1. checksums.yaml +4 -4
  2. data/.gitignore +1 -0
  3. data/.rubocop.yml +41 -0
  4. data/.travis.yml +3 -2
  5. data/Gemfile +7 -1
  6. data/README.md +218 -102
  7. data/Rakefile +2 -6
  8. data/lib/influxdb.rb +15 -5
  9. data/lib/influxdb/client.rb +38 -433
  10. data/lib/influxdb/client/http.rb +123 -0
  11. data/lib/influxdb/config.rb +66 -0
  12. data/lib/influxdb/errors.rb +8 -2
  13. data/lib/influxdb/{logger.rb → logging.rb} +6 -5
  14. data/lib/influxdb/max_queue.rb +2 -1
  15. data/lib/influxdb/point_value.rb +27 -25
  16. data/lib/influxdb/query/cluster.rb +17 -0
  17. data/lib/influxdb/query/continuous_query.rb +22 -0
  18. data/lib/influxdb/query/core.rb +110 -0
  19. data/lib/influxdb/query/database.rb +21 -0
  20. data/lib/influxdb/query/retention_policy.rb +26 -0
  21. data/lib/influxdb/query/user.rb +41 -0
  22. data/lib/influxdb/version.rb +2 -2
  23. data/lib/influxdb/writer/async.rb +115 -0
  24. data/lib/influxdb/writer/udp.rb +21 -0
  25. data/spec/influxdb/cases/async_client_spec.rb +33 -0
  26. data/spec/influxdb/cases/query_cluster_spec.rb +65 -0
  27. data/spec/influxdb/cases/query_continuous_query_spec.rb +82 -0
  28. data/spec/influxdb/cases/query_core.rb +34 -0
  29. data/spec/influxdb/cases/query_database_spec.rb +58 -0
  30. data/spec/influxdb/cases/query_retention_policy_spec.rb +84 -0
  31. data/spec/influxdb/cases/query_series_spec.rb +50 -0
  32. data/spec/influxdb/cases/query_shard_space_spec.rb +105 -0
  33. data/spec/influxdb/cases/query_shard_spec.rb +43 -0
  34. data/spec/influxdb/cases/query_user_spec.rb +127 -0
  35. data/spec/influxdb/cases/querying_spec.rb +149 -0
  36. data/spec/influxdb/cases/retry_requests_spec.rb +102 -0
  37. data/spec/influxdb/cases/udp_client_spec.rb +21 -0
  38. data/spec/influxdb/cases/write_points_spec.rb +140 -0
  39. data/spec/influxdb/client_spec.rb +37 -810
  40. data/spec/influxdb/config_spec.rb +118 -0
  41. data/spec/influxdb/{logger_spec.rb → logging_spec.rb} +4 -8
  42. data/spec/influxdb/max_queue_spec.rb +29 -0
  43. data/spec/influxdb/point_value_spec.rb +81 -14
  44. data/spec/influxdb/worker_spec.rb +8 -11
  45. data/spec/spec_helper.rb +7 -10
  46. metadata +65 -30
  47. data/lib/influxdb/udp_client.rb +0 -16
  48. data/lib/influxdb/worker.rb +0 -80
  49. data/spec/influxdb/udp_client_spec.rb +0 -33
  50. data/spec/influxdb_spec.rb +0 -4
  51. data/spec/max_queue_spec.rb +0 -32
@@ -0,0 +1,123 @@
1
+ require 'uri'
2
+ require 'cgi'
3
+ require 'net/http'
4
+ require 'net/https'
5
+
6
+ module InfluxDB
7
+ # rubocop:disable Metrics/MethodLength
8
+ # rubocop:disable Metrics/AbcSize
9
+ module HTTP # :nodoc:
10
+ def get(url, options = {})
11
+ connect_with_retry do |http|
12
+ response = do_request http, Net::HTTP::Get.new(url)
13
+ if response.is_a? Net::HTTPSuccess
14
+ handle_successful_response(response, options)
15
+ elsif response.is_a? Net::HTTPUnauthorized
16
+ fail InfluxDB::AuthenticationError, response.body
17
+ else
18
+ resolve_error(response.body)
19
+ end
20
+ end
21
+ end
22
+
23
+ def post(url, data)
24
+ headers = { "Content-Type" => "application/octet-stream" }
25
+ connect_with_retry do |http|
26
+ response = do_request http, Net::HTTP::Post.new(url, headers), data
27
+ if response.is_a? Net::HTTPSuccess
28
+ return response
29
+ elsif response.is_a? Net::HTTPUnauthorized
30
+ fail InfluxDB::AuthenticationError, response.body
31
+ else
32
+ resolve_error(response.body)
33
+ end
34
+ end
35
+ end
36
+
37
+ private
38
+
39
+ def connect_with_retry(&block)
40
+ hosts = config.hosts.dup
41
+ delay = config.initial_delay
42
+ retry_count = 0
43
+
44
+ begin
45
+ hosts.push(host = hosts.shift)
46
+ http = Net::HTTP.new(host, config.port)
47
+ http.open_timeout = config.open_timeout
48
+ http.read_timeout = config.read_timeout
49
+
50
+ http = setup_ssl(http)
51
+
52
+ block.call(http)
53
+
54
+ rescue Timeout::Error, *InfluxDB::NET_HTTP_EXCEPTIONS => e
55
+ retry_count += 1
56
+ if (config.retry == -1 || retry_count <= config.retry) && !stopped?
57
+ log :error, "Failed to contact host #{host}: #{e.inspect} - retrying in #{delay}s."
58
+ sleep delay
59
+ delay = [config.max_delay, delay * 2].min
60
+ retry
61
+ else
62
+ raise InfluxDB::ConnectionError, "Tried #{retry_count - 1} times to reconnect but failed."
63
+ end
64
+ ensure
65
+ http.finish if http.started?
66
+ end
67
+ end
68
+
69
+ def do_request(http, req, data = nil)
70
+ req.basic_auth config.username, config.password if basic_auth?
71
+ req.body = data if data
72
+ http.request(req)
73
+ end
74
+
75
+ def basic_auth?
76
+ config.auth_method == 'basic_auth'
77
+ end
78
+
79
+ def resolve_error(response)
80
+ if response =~ /Couldn\'t find series/
81
+ fail InfluxDB::SeriesNotFound, response
82
+ else
83
+ fail InfluxDB::Error, response
84
+ end
85
+ end
86
+
87
+ def handle_successful_response(response, options)
88
+ parsed_response = JSON.parse(response.body) if response.body
89
+ errors = errors_from_response(parsed_response) if parsed_response
90
+ fail InfluxDB::QueryError, errors if errors
91
+ options.fetch(:parse, false) ? parsed_response : response
92
+ end
93
+
94
+ def errors_from_response(parsed_resp)
95
+ parsed_resp.is_a?(Hash) && parsed_resp.fetch('results', [])
96
+ .fetch(0, {})
97
+ .fetch('error', nil)
98
+ end
99
+
100
+ def setup_ssl(http)
101
+ http.use_ssl = config.use_ssl
102
+ http.verify_mode = OpenSSL::SSL::VERIFY_NONE unless config.verify_ssl
103
+
104
+ return http unless config.use_ssl
105
+
106
+ http.cert_store = generate_cert_store
107
+ http
108
+ end
109
+
110
+ def generate_cert_store
111
+ store = OpenSSL::X509::Store.new
112
+ store.set_default_paths
113
+ if config.ssl_ca_cert
114
+ if File.directory?(config.ssl_ca_cert)
115
+ store.add_path(config.ssl_ca_cert)
116
+ else
117
+ store.add_file(config.ssl_ca_cert)
118
+ end
119
+ end
120
+ store
121
+ end
122
+ end
123
+ end
@@ -0,0 +1,66 @@
1
+ module InfluxDB
2
+ # InfluxDB client configuration
3
+ class Config
4
+ AUTH_METHODS = %w(params basic_auth)
5
+
6
+ attr_accessor :hosts,
7
+ :port,
8
+ :username,
9
+ :password,
10
+ :database,
11
+ :time_precision,
12
+ :use_ssl,
13
+ :verify_ssl,
14
+ :ssl_ca_cert,
15
+ :auth_method,
16
+ :initial_delay,
17
+ :max_delay,
18
+ :open_timeout,
19
+ :read_timeout,
20
+ :retry,
21
+ :prefix,
22
+ :denormalize
23
+
24
+ attr_reader :async, :udp
25
+
26
+ # rubocop:disable all
27
+ def initialize(opts = {})
28
+ @database = opts[:database]
29
+ @hosts = Array(opts[:hosts] || opts[:host] || ["localhost"])
30
+ @port = opts.fetch(:port, 8086)
31
+ @prefix = opts.fetch(:prefix, '')
32
+ @username = opts.fetch(:username, "root")
33
+ @password = opts.fetch(:password, "root")
34
+ @auth_method = AUTH_METHODS.include?(opts[:auth_method]) ? opts[:auth_method] : "params"
35
+ @use_ssl = opts.fetch(:use_ssl, false)
36
+ @verify_ssl = opts.fetch(:verify_ssl, true)
37
+ @ssl_ca_cert = opts.fetch(:ssl_ca_cert, false)
38
+ @time_precision = opts.fetch(:time_precision, "s")
39
+ @initial_delay = opts.fetch(:initial_delay, 0.01)
40
+ @max_delay = opts.fetch(:max_delay, 30)
41
+ @open_timeout = opts.fetch(:write_timeout, 5)
42
+ @read_timeout = opts.fetch(:read_timeout, 300)
43
+ @async = opts.fetch(:async, false)
44
+ @udp = opts.fetch(:udp, false)
45
+ @retry = opts.fetch(:retry, nil)
46
+ @denormalize = opts.fetch(:denormalize, true)
47
+ @retry =
48
+ case @retry
49
+ when Integer
50
+ @retry
51
+ when true, nil
52
+ -1
53
+ when false
54
+ 0
55
+ end
56
+ end
57
+
58
+ def udp?
59
+ !!udp
60
+ end
61
+
62
+ def async?
63
+ !!async
64
+ end
65
+ end
66
+ end
@@ -1,7 +1,7 @@
1
1
  require "net/http"
2
2
  require "zlib"
3
3
 
4
- module InfluxDB
4
+ module InfluxDB # :nodoc:
5
5
  class Error < StandardError
6
6
  end
7
7
 
@@ -11,9 +11,15 @@ module InfluxDB
11
11
  class ConnectionError < Error
12
12
  end
13
13
 
14
+ class SeriesNotFound < Error
15
+ end
16
+
14
17
  class JSONParserError < Error
15
18
  end
16
19
 
20
+ class QueryError < Error
21
+ end
22
+
17
23
  # Taken from: https://github.com/lostisland/faraday/blob/master/lib/faraday/adapter/net_http.rb
18
24
  NET_HTTP_EXCEPTIONS = [
19
25
  EOFError,
@@ -27,7 +33,7 @@ module InfluxDB
27
33
  Net::HTTPHeaderSyntaxError,
28
34
  Net::ProtocolError,
29
35
  SocketError,
30
- Zlib::GzipFile::Error,
36
+ Zlib::GzipFile::Error
31
37
  ]
32
38
 
33
39
  NET_HTTP_EXCEPTIONS << OpenSSL::SSL::SSLError if defined?(OpenSSL)
@@ -1,19 +1,20 @@
1
1
  require 'logger'
2
2
 
3
3
  module InfluxDB
4
- module Logging
4
+ module Logging # :nodoc:
5
5
  PREFIX = "[InfluxDB] "
6
6
 
7
- def self.logger=(new_logger)
8
- @logger = new_logger
7
+ class << self
8
+ attr_writer :logger
9
9
  end
10
10
 
11
11
  def self.logger
12
- return @logger unless @logger.nil?
13
- @logger = ::Logger.new(STDERR).tap {|logger| logger.level = Logger::INFO}
12
+ return false if @logger == false
13
+ @logger ||= ::Logger.new(STDERR).tap { |logger| logger.level = Logger::INFO }
14
14
  end
15
15
 
16
16
  private
17
+
17
18
  def log(level, message)
18
19
  InfluxDB::Logging.logger.send(level.to_sym, PREFIX + message) if InfluxDB::Logging.logger
19
20
  end
@@ -1,11 +1,12 @@
1
1
  require "thread"
2
2
 
3
3
  module InfluxDB
4
+ # Queue with max length limit
4
5
  class MaxQueue < Queue
5
6
  attr_reader :max
6
7
 
7
8
  def initialize(max = 10_000)
8
- raise ArgumentError, "queue size must be positive" unless max > 0
9
+ fail ArgumentError, "queue size must be positive" unless max > 0
9
10
  @max = max
10
11
  super()
11
12
  end
@@ -1,36 +1,38 @@
1
- require 'json'
2
-
3
1
  module InfluxDB
4
-
2
+ # Convert data point to string using Line protocol
5
3
  class PointValue
6
- attr_accessor :value
4
+ attr_reader :series, :values, :tags, :timestamp
7
5
 
8
- def initialize(value)
9
- @value = value
6
+ def initialize(data)
7
+ @series = data[:series].gsub(/\s/, '\ ').gsub(',','\,')
8
+ @values = stringify(data[:values])
9
+ @tags = stringify(data[:tags])
10
+ @timestamp = data[:timestamp]
10
11
  end
11
12
 
12
13
  def dump
13
- if value.is_a?(Array) || value.is_a?(Hash)
14
- JSON.generate(value)
15
- else
16
- value
17
- end
14
+ dump = "#{@series}"
15
+ dump << ",#{@tags}" if @tags
16
+ dump << " #{@values}"
17
+ dump << " #{@timestamp}" if @timestamp
18
+ dump
18
19
  end
19
20
 
20
- def load
21
- if maybe_json?
22
- begin
23
- JSON.parse(value)
24
- rescue JSON::ParserError => e
25
- value
26
- end
27
- else
28
- value
29
- end
30
- end
21
+ private
31
22
 
32
- def maybe_json?
33
- value.is_a?(String) && value =~ /\A(\{|\[).*(\}|\])$/
23
+ def stringify(hash)
24
+ return nil unless hash && !hash.empty?
25
+ hash.map do |k, v|
26
+ key = k.to_s.gsub(/\s/, '\ ').gsub(',','\,')
27
+ val = v
28
+ if val.is_a?(String)
29
+ val.gsub!(/\s/, '\ ')
30
+ val.gsub!(',', '\,')
31
+ val.gsub!('"', '\"')
32
+ val = '"' + val + '"'
33
+ end
34
+ "#{key}=#{val}"
35
+ end.join(',')
34
36
  end
35
37
  end
36
- end
38
+ end
@@ -0,0 +1,17 @@
1
+ module InfluxDB
2
+ module Query
3
+ module Cluster # :nodoc:
4
+ def create_cluster_admin(username, password)
5
+ execute("CREATE USER #{username} WITH PASSWORD '#{password}' WITH ALL PRIVILEGES")
6
+ end
7
+
8
+ def list_cluster_admins
9
+ list_users.select { |u| u['admin'] }.map { |u| u['username'] }
10
+ end
11
+
12
+ def revoke_cluster_admin_privileges(username)
13
+ execute("REVOKE ALL PRIVILEGES FROM #{username}")
14
+ end
15
+ end
16
+ end
17
+ end
@@ -0,0 +1,22 @@
1
+ module InfluxDB
2
+ module Query
3
+ module ContinuousQuery # :nodoc:
4
+ def list_continuous_queries(database)
5
+ resp = execute("SHOW CONTINUOUS QUERIES", parse: true)
6
+ fetch_series(resp).select { |v| v['name'] == database }
7
+ .fetch(0, {})
8
+ .fetch('values', [])
9
+ .map { |v| { 'name' => v.first, 'query' => v.last } }
10
+ end
11
+
12
+ def create_continuous_query(name, database, query)
13
+ clause = ["CREATE CONTINUOUS QUERY #{name} ON #{database} BEGIN", query, "END"].join("\n")
14
+ execute(clause)
15
+ end
16
+
17
+ def delete_continuous_query(name, database)
18
+ execute("DROP CONTINUOUS QUERY #{name} ON #{database}")
19
+ end
20
+ end
21
+ end
22
+ end
@@ -0,0 +1,110 @@
1
+ module InfluxDB
2
+ module Query # :nodoc: all
3
+ # rubocop:disable Metrics/AbcSize
4
+ module Core
5
+ def ping
6
+ get "/ping"
7
+ end
8
+
9
+ # rubocop:disable Metrics/MethodLength
10
+ def query(query, opts = {})
11
+ precision = opts.fetch(:precision, config.time_precision)
12
+ denormalize = opts.fetch(:denormalize, config.denormalize)
13
+
14
+ url = full_url("/query", q: query, db: config.database, precision: precision)
15
+ series = fetch_series(get(url, parse: true))
16
+
17
+ if block_given?
18
+ series.each do |s|
19
+ yield s['name'], s['tags'], denormalize ? denormalize_series(s) : raw_values(s)
20
+ end
21
+ else
22
+ denormalize ? denormalized_series_list(series) : series
23
+ end
24
+ end
25
+ # rubocop:enable Metrics/MethodLength
26
+
27
+ # Example:
28
+ # write_points([
29
+ # {
30
+ # series: 'cpu',
31
+ # tags: { host: 'server_nl', regios: 'us' },
32
+ # values: {internal: 5, external: 6},
33
+ # timestamp: 1422568543702900257
34
+ # },
35
+ # {
36
+ # series: 'gpu',
37
+ # values: {value: 0.9999},
38
+ # }
39
+ # ])
40
+ def write_points(data, precision = nil)
41
+ data = data.is_a?(Array) ? data : [data]
42
+ payload = generate_payload(data)
43
+ writer.write(payload, precision)
44
+ end
45
+
46
+ # Example:
47
+ # write_point('cpu', tags: {region: 'us'}, values: {internal: 60})
48
+ def write_point(series, data, precision = nil)
49
+ data.merge!(series: series)
50
+ write_points(data, precision)
51
+ end
52
+
53
+ def write(data, precision)
54
+ precision ||= config.time_precision
55
+ url = full_url("/write", db: config.database, precision: precision)
56
+ post(url, data)
57
+ end
58
+
59
+ private
60
+
61
+ def denormalized_series_list(series)
62
+ series.map do |s|
63
+ {
64
+ 'name' => s['name'],
65
+ 'tags' => s['tags'],
66
+ 'values' => denormalize_series(s)
67
+ }
68
+ end
69
+ end
70
+
71
+ def fetch_series(response)
72
+ response.fetch('results', [])
73
+ .fetch(0, {})
74
+ .fetch('series', [])
75
+ end
76
+
77
+ def generate_payload(data)
78
+ data.map do |point|
79
+ InfluxDB::PointValue.new(point).dump
80
+ end.join("\n")
81
+ end
82
+
83
+ def execute(query, options = {})
84
+ url = full_url("/query", q: query)
85
+ get(url, options)
86
+ end
87
+
88
+ def denormalize_series(series)
89
+ Array(series["values"]).map do |values|
90
+ Hash[series["columns"].zip(values)]
91
+ end
92
+ end
93
+
94
+ def raw_values(series)
95
+ series.select { |k, _| %w(columns values).include?(k) }
96
+ end
97
+
98
+ def full_url(path, params = {})
99
+ unless basic_auth?
100
+ params[:u] = config.username
101
+ params[:p] = config.password
102
+ end
103
+
104
+ query = params.map { |k, v| [CGI.escape(k.to_s), "=", CGI.escape(v.to_s)].join }.join("&")
105
+
106
+ URI::Generic.build(path: File.join(config.prefix, path), query: query).to_s
107
+ end
108
+ end
109
+ end
110
+ end