influxdb-client 1.0.0.pre.183 → 1.1.0.pre.380

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -0,0 +1,284 @@
1
+ =begin
2
+ #Influx API Service
3
+
4
+ #No description provided (generated by Openapi Generator https://github.com/openapitools/openapi-generator)
5
+
6
+ OpenAPI spec version: 0.1.0
7
+
8
+ Generated by: https://openapi-generator.tech
9
+ OpenAPI Generator version: 3.3.4
10
+
11
+ =end
12
+
13
+ require 'date'
14
+
15
+ module InfluxDB2
16
+ # Query influx with specific return formatting.
17
+ class Query
18
+ attr_accessor :extern
19
+
20
+ # Query script to execute.
21
+ attr_accessor :query
22
+
23
+ # The type of query.
24
+ attr_accessor :type
25
+
26
+ # Required for `influxql` type queries.
27
+ attr_accessor :db
28
+
29
+ # Required for `influxql` type queries.
30
+ attr_accessor :rp
31
+
32
+ # Required for `influxql` type queries.
33
+ attr_accessor :cluster
34
+
35
+ attr_accessor :dialect
36
+
37
+ class EnumAttributeValidator
38
+ attr_reader :datatype
39
+ attr_reader :allowable_values
40
+
41
+ def initialize(datatype, allowable_values)
42
+ @allowable_values = allowable_values.map do |value|
43
+ case datatype.to_s
44
+ when /Integer/i
45
+ value.to_i
46
+ when /Float/i
47
+ value.to_f
48
+ else
49
+ value
50
+ end
51
+ end
52
+ end
53
+
54
+ def valid?(value)
55
+ !value || allowable_values.include?(value)
56
+ end
57
+ end
58
+
59
+ # Attribute mapping from ruby-style variable name to JSON key.
60
+ def self.attribute_map
61
+ {
62
+ :'extern' => :'extern',
63
+ :'query' => :'query',
64
+ :'type' => :'type',
65
+ :'db' => :'db',
66
+ :'rp' => :'rp',
67
+ :'cluster' => :'cluster',
68
+ :'dialect' => :'dialect'
69
+ }
70
+ end
71
+
72
+ # Attribute type mapping.
73
+ def self.openapi_types
74
+ {
75
+ :'extern' => :'File',
76
+ :'query' => :'String',
77
+ :'type' => :'String',
78
+ :'db' => :'String',
79
+ :'rp' => :'String',
80
+ :'cluster' => :'String',
81
+ :'dialect' => :'Dialect'
82
+ }
83
+ end
84
+
85
+ # Initializes the object
86
+ # @param [Hash] attributes Model attributes in the form of hash
87
+ def initialize(attributes = {})
88
+ return unless attributes.is_a?(Hash)
89
+
90
+ # convert string to symbol for hash key
91
+ attributes = attributes.each_with_object({}) { |(k, v), h| h[k.to_sym] = v }
92
+
93
+ if attributes.has_key?(:'extern')
94
+ self.extern = attributes[:'extern']
95
+ end
96
+
97
+ if attributes.has_key?(:'query')
98
+ self.query = attributes[:'query']
99
+ end
100
+
101
+ if attributes.has_key?(:'type')
102
+ self.type = attributes[:'type']
103
+ else
104
+ self.type = 'flux'
105
+ end
106
+
107
+ if attributes.has_key?(:'db')
108
+ self.db = attributes[:'db']
109
+ end
110
+
111
+ if attributes.has_key?(:'rp')
112
+ self.rp = attributes[:'rp']
113
+ end
114
+
115
+ if attributes.has_key?(:'cluster')
116
+ self.cluster = attributes[:'cluster']
117
+ end
118
+
119
+ if attributes.has_key?(:'dialect')
120
+ self.dialect = attributes[:'dialect']
121
+ end
122
+ end
123
+
124
+ # Show invalid properties with the reasons. Usually used together with valid?
125
+ # @return Array for valid properties with the reasons
126
+ def list_invalid_properties
127
+ invalid_properties = Array.new
128
+ if @query.nil?
129
+ invalid_properties.push('invalid value for "query", query cannot be nil.')
130
+ end
131
+
132
+ invalid_properties
133
+ end
134
+
135
+ # Check to see if the all the properties in the model are valid
136
+ # @return true if the model is valid
137
+ def valid?
138
+ return false if @query.nil?
139
+ type_validator = EnumAttributeValidator.new('String', ['flux', 'influxql'])
140
+ return false unless type_validator.valid?(@type)
141
+ true
142
+ end
143
+
144
+ # Custom attribute writer method checking allowed values (enum).
145
+ # @param [Object] type Object to be assigned
146
+ def type=(type)
147
+ validator = EnumAttributeValidator.new('String', ['flux', 'influxql'])
148
+ unless validator.valid?(type)
149
+ fail ArgumentError, 'invalid value for "type", must be one of #{validator.allowable_values}.'
150
+ end
151
+ @type = type
152
+ end
153
+
154
+ # Checks equality by comparing each attribute.
155
+ # @param [Object] Object to be compared
156
+ def ==(o)
157
+ return true if self.equal?(o)
158
+ self.class == o.class &&
159
+ extern == o.extern &&
160
+ query == o.query &&
161
+ type == o.type &&
162
+ db == o.db &&
163
+ rp == o.rp &&
164
+ cluster == o.cluster &&
165
+ dialect == o.dialect
166
+ end
167
+
168
+ # @see the `==` method
169
+ # @param [Object] Object to be compared
170
+ def eql?(o)
171
+ self == o
172
+ end
173
+
174
+ # Calculates hash code according to all attributes.
175
+ # @return [Fixnum] Hash code
176
+ def hash
177
+ [extern, query, type, db, rp, cluster, dialect].hash
178
+ end
179
+
180
+ # Builds the object from hash
181
+ # @param [Hash] attributes Model attributes in the form of hash
182
+ # @return [Object] Returns the model itself
183
+ def build_from_hash(attributes)
184
+ return nil unless attributes.is_a?(Hash)
185
+ self.class.openapi_types.each_pair do |key, type|
186
+ if type =~ /\AArray<(.*)>/i
187
+ # check to ensure the input is an array given that the the attribute
188
+ # is documented as an array but the input is not
189
+ if attributes[self.class.attribute_map[key]].is_a?(Array)
190
+ self.send("#{key}=", attributes[self.class.attribute_map[key]].map { |v| _deserialize($1, v) })
191
+ end
192
+ elsif !attributes[self.class.attribute_map[key]].nil?
193
+ self.send("#{key}=", _deserialize(type, attributes[self.class.attribute_map[key]]))
194
+ end # or else data not found in attributes(hash), not an issue as the data can be optional
195
+ end
196
+
197
+ self
198
+ end
199
+
200
+ # Deserializes the data based on type
201
+ # @param string type Data type
202
+ # @param string value Value to be deserialized
203
+ # @return [Object] Deserialized data
204
+ def _deserialize(type, value)
205
+ case type.to_sym
206
+ when :DateTime
207
+ DateTime.parse(value)
208
+ when :Date
209
+ Date.parse(value)
210
+ when :String
211
+ value.to_s
212
+ when :Integer
213
+ value.to_i
214
+ when :Float
215
+ value.to_f
216
+ when :BOOLEAN
217
+ if value.to_s =~ /\A(true|t|yes|y|1)\z/i
218
+ true
219
+ else
220
+ false
221
+ end
222
+ when :Object
223
+ # generic object (usually a Hash), return directly
224
+ value
225
+ when /\AArray<(?<inner_type>.+)>\z/
226
+ inner_type = Regexp.last_match[:inner_type]
227
+ value.map { |v| _deserialize(inner_type, v) }
228
+ when /\AHash<(?<k_type>.+?), (?<v_type>.+)>\z/
229
+ k_type = Regexp.last_match[:k_type]
230
+ v_type = Regexp.last_match[:v_type]
231
+ {}.tap do |hash|
232
+ value.each do |k, v|
233
+ hash[_deserialize(k_type, k)] = _deserialize(v_type, v)
234
+ end
235
+ end
236
+ else # model
237
+ temp_model = InfluxDB2.const_get(type).new
238
+ temp_model.build_from_hash(value)
239
+ end
240
+ end
241
+
242
+ # Returns the string representation of the object
243
+ # @return [String] String presentation of the object
244
+ def to_s
245
+ to_hash.to_s
246
+ end
247
+
248
+ # to_body is an alias to to_hash (backward compatibility)
249
+ # @return [Hash] Returns the object in the form of hash
250
+ def to_body
251
+ to_hash
252
+ end
253
+
254
+ # Returns the object in the form of hash
255
+ # @return [Hash] Returns the object in the form of hash
256
+ def to_hash
257
+ hash = {}
258
+ self.class.attribute_map.each_pair do |attr, param|
259
+ value = self.send(attr)
260
+ next if value.nil?
261
+ hash[param] = _to_hash(value)
262
+ end
263
+ hash
264
+ end
265
+
266
+ # Outputs non-array value in the form of hash
267
+ # For object, use to_hash. Otherwise, just return the value
268
+ # @param [Object] value Any valid value
269
+ # @return [Hash] Returns the value in the form of hash
270
+ def _to_hash(value)
271
+ if value.is_a?(Array)
272
+ value.compact.map { |v| _to_hash(v) }
273
+ elsif value.is_a?(Hash)
274
+ {}.tap do |hash|
275
+ value.each { |k, v| hash[k] = _to_hash(v) }
276
+ end
277
+ elsif value.respond_to? :to_hash
278
+ value.to_hash
279
+ else
280
+ value
281
+ end
282
+ end
283
+ end
284
+ end
@@ -0,0 +1,93 @@
1
+ # The MIT License
2
+ #
3
+ # Permission is hereby granted, free of charge, to any person obtaining a copy
4
+ # of this software and associated documentation files (the "Software"), to deal
5
+ # in the Software without restriction, including without limitation the rights
6
+ # to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
7
+ # copies of the Software, and to permit persons to whom the Software is
8
+ # furnished to do so, subject to the following conditions:
9
+ #
10
+ # The above copyright notice and this permission notice shall be included in
11
+ # all copies or substantial portions of the Software.
12
+ #
13
+ # THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
14
+ # IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
15
+ # FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
16
+ # AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
17
+ # LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
18
+ # OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
19
+ # THE SOFTWARE.
20
+ require_relative 'models/dialect'
21
+ require_relative 'models/query'
22
+ require_relative 'flux_csv_parser'
23
+ require 'json'
24
+
25
+ module InfluxDB2
26
+ # The client of the InfluxDB 2.0 that implement Query HTTP API endpoint.
27
+ #
28
+ class QueryApi < DefaultApi
29
+ DEFAULT_DIALECT = InfluxDB2::Dialect.new(header: true, delimiter: ',', comment_prefix: '#',
30
+ annotations: %w[datatype group default])
31
+
32
+ # @param [Hash] options The options to be used by the client.
33
+ def initialize(options:)
34
+ super(options: options)
35
+ end
36
+
37
+ # @param [Object] query the flux query to execute. The data could be represent by [String], [Query]
38
+ # @param [String] org specifies the source organization
39
+ # @return [String] result of query
40
+ def query_raw(query: nil, org: nil, dialect: DEFAULT_DIALECT)
41
+ _post_query(query: query, org: org, dialect: dialect).read_body
42
+ end
43
+
44
+ # @param [Object] query the flux query to execute. The data could be represent by [String], [Query]
45
+ # @param [String] org specifies the source organization
46
+ # @return [Array] list of FluxTables which are matched the query
47
+ def query(query: nil, org: nil, dialect: DEFAULT_DIALECT)
48
+ response = query_raw(query: query, org: org, dialect: dialect)
49
+ parser = InfluxDB2::FluxCsvParser.new(response)
50
+
51
+ parser.parse
52
+ parser.tables
53
+ end
54
+
55
+ # @param [Object] query the flux query to execute. The data could be represent by [String], [Query]
56
+ # @param [String] org specifies the source organization
57
+ # @return stream of Flux Records
58
+ def query_stream(query: nil, org: nil, dialect: DEFAULT_DIALECT)
59
+ response = _post_query(query: query, org: org, dialect: dialect)
60
+
61
+ InfluxDB2::FluxCsvParser.new(response, stream: true)
62
+ end
63
+
64
+ private
65
+
66
+ def _post_query(query: nil, org: nil, dialect: DEFAULT_DIALECT)
67
+ org_param = org || @options[:org]
68
+ _check('org', org_param)
69
+
70
+ payload = _generate_payload(query, dialect)
71
+ return nil if payload.nil?
72
+
73
+ uri = URI.parse(File.join(@options[:url], '/api/v2/query'))
74
+ uri.query = URI.encode_www_form(org: org_param)
75
+
76
+ _post(payload.to_body.to_json, uri)
77
+ end
78
+
79
+ def _generate_payload(query, dialect)
80
+ if query.nil?
81
+ nil
82
+ elsif query.is_a?(Query)
83
+ query
84
+ elsif query.is_a?(String)
85
+ if query.empty?
86
+ nil
87
+ else
88
+ Query.new(query: query, dialect: dialect, type: nil)
89
+ end
90
+ end
91
+ end
92
+ end
93
+ end
@@ -19,5 +19,5 @@
19
19
  # THE SOFTWARE.
20
20
 
21
21
  module InfluxDB2
22
- VERSION = '1.0.0'.freeze
22
+ VERSION = '1.1.0'.freeze
23
23
  end
@@ -37,14 +37,10 @@ module InfluxDB2
37
37
 
38
38
  # Write time series data into InfluxDB.
39
39
  #
40
- class WriteApi
41
- DEFAULT_TIMEOUT = 10
42
- DEFAULT_REDIRECT_COUNT = 10
43
-
40
+ class WriteApi < DefaultApi
44
41
  # @param [Hash] options The options to be used by the client.
45
42
  def initialize(options:)
46
- @options = options
47
- @max_redirect_count = @options[:max_redirect_count] || DEFAULT_REDIRECT_COUNT
43
+ super(options: options)
48
44
  end
49
45
 
50
46
  # Write data into specified Bucket.
@@ -98,39 +94,6 @@ module InfluxDB2
98
94
 
99
95
  private
100
96
 
101
- def _post(payload, uri, limit = @max_redirect_count)
102
- raise InfluxError.from_message("Too many HTTP redirects. Exceeded limit: #{@max_redirect_count}") if limit.zero?
103
-
104
- http = Net::HTTP.new(uri.host, uri.port)
105
- http.open_timeout = @options[:open_timeout] || DEFAULT_TIMEOUT
106
- http.write_timeout = @options[:write_timeout] || DEFAULT_TIMEOUT if Net::HTTP.method_defined? :write_timeout
107
- http.read_timeout = @options[:read_timeout] || DEFAULT_TIMEOUT
108
- http.use_ssl = @options[:use_ssl].nil? ? true : @options[:use_ssl]
109
-
110
- request = Net::HTTP::Post.new(uri.request_uri)
111
- request['Authorization'] = "Token #{@options[:token]}"
112
- request.body = payload
113
-
114
- begin
115
- response = http.request(request)
116
- case response
117
- when Net::HTTPSuccess then
118
- response
119
- when Net::HTTPRedirection then
120
- location = response['location']
121
- _post(payload, URI.parse(location), limit - 1)
122
- else
123
- raise InfluxError.from_response(response)
124
- end
125
- ensure
126
- http.finish if http.started?
127
- end
128
- end
129
-
130
- def _check(key, value)
131
- raise ArgumentError, "The '#{key}' should be defined as argument or default option: #{@options}" if value.nil?
132
- end
133
-
134
97
  def _generate_payload(data)
135
98
  if data.nil?
136
99
  nil
@@ -0,0 +1,326 @@
1
+ # The MIT License
2
+ #
3
+ # Permission is hereby granted, free of charge, to any person obtaining a copy
4
+ # of this software and associated documentation files (the "Software"), to deal
5
+ # in the Software without restriction, including without limitation the rights
6
+ # to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
7
+ # copies of the Software, and to permit persons to whom the Software is
8
+ # furnished to do so, subject to the following conditions:
9
+ #
10
+ # The above copyright notice and this permission notice shall be included in
11
+ # all copies or substantial portions of the Software.
12
+ #
13
+ # THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
14
+ # IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
15
+ # FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
16
+ # AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
17
+ # LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
18
+ # OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
19
+ # THE SOFTWARE.
20
+
21
+ require 'test_helper'
22
+
23
+ class FluxCsvParserTest < MiniTest::Test
24
+ def test_multiple_values
25
+ data = "#datatype,string,long,dateTime:RFC3339,dateTime:RFC3339,string,string,string,string,long,long,string\n" \
26
+ "#group,false,false,true,true,true,true,true,true,false,false,false\n" \
27
+ "#default,_result,,,,,,,,,,\n" \
28
+ ",result,table,_start,_stop,_field,_measurement,host,region,_value2,value1,value_str\n" \
29
+ ",,0,1677-09-21T00:12:43.145224192Z,2018-07-16T11:21:02.547596934Z,free,mem,A,west,121,11,test\n" \
30
+ ",,1,1677-09-21T00:12:43.145224192Z,2018-07-16T11:21:02.547596934Z,free,mem,B,west,484,22,test\n" \
31
+ ",,2,1677-09-21T00:12:43.145224192Z,2018-07-16T11:21:02.547596934Z,usage_system,cpu,A,west,1444,38,test\n" \
32
+ ',,3,1677-09-21T00:12:43.145224192Z,2018-07-16T11:21:02.547596934Z,user_usage,cpu,A,west,2401,49,test'
33
+
34
+ tables = InfluxDB2::FluxCsvParser.new(data).parse.tables
35
+
36
+ column_headers = tables[0].columns
37
+ assert_equal 11, column_headers.size
38
+
39
+ values = [false, false, true, true, true, true, true, true, false, false, false]
40
+ _assert_columns(column_headers, values: values)
41
+ assert_equal 4, tables.size
42
+
43
+ _assert_multiple_record(tables)
44
+ end
45
+
46
+ def test_parse_shortcut
47
+ data = '#datatype,string,long,dateTime:RFC3339,dateTime:RFC3339,' \
48
+ "dateTime:RFC3339,long,string,string,string,boolean\n" \
49
+ "#group,false,false,false,false,false,false,false,false,false,true\n" \
50
+ "#default,_result,,,,,,,,,true\n" \
51
+ ",result,table,_start,_stop,_time,_value,_field,_measurement,host,value\n" \
52
+ ",,0,1970-01-01T00:00:10Z,1970-01-01T00:00:20Z,1970-01-01T00:00:10Z,10,free,mem,A,true\n"
53
+
54
+ tables = InfluxDB2::FluxCsvParser.new(data).parse.tables
55
+
56
+ assert_equal 1, tables.size
57
+ assert_equal 1, tables[0].records.size
58
+
59
+ record = tables[0].records[0]
60
+
61
+ assert_equal _parse_time('1970-01-01T00:00:10Z'), record.start
62
+ assert_equal _parse_time('1970-01-01T00:00:20Z'), record.stop
63
+ assert_equal _parse_time('1970-01-01T00:00:10Z'), record.time
64
+ assert_equal 10, record.value
65
+ assert_equal 'free', record.field
66
+ assert_equal 'mem', record.measurement
67
+ end
68
+
69
+ def test_mapping_boolean
70
+ data = '#datatype,string,long,dateTime:RFC3339,dateTime:RFC3339,' \
71
+ "dateTime:RFC3339,long,string,string,string,boolean\n" \
72
+ "#group,false,false,false,false,false,false,false,false,false,true\n" \
73
+ "#default,_result,,,,,,,,,true\n" \
74
+ ",result,table,_start,_stop,_time,_value,_field,_measurement,host,value\n" \
75
+ ",,0,1970-01-01T00:00:10Z,1970-01-01T00:00:20Z,1970-01-01T00:00:10Z,10,free,mem,A,true\n" \
76
+ ",,0,1970-01-01T00:00:10Z,1970-01-01T00:00:20Z,1970-01-01T00:00:10Z,10,free,mem,A,false\n" \
77
+ ",,0,1970-01-01T00:00:10Z,1970-01-01T00:00:20Z,1970-01-01T00:00:10Z,10,free,mem,A,x\n" \
78
+ ",,0,1970-01-01T00:00:10Z,1970-01-01T00:00:20Z,1970-01-01T00:00:10Z,10,free,mem,A,\n"
79
+
80
+ tables = InfluxDB2::FluxCsvParser.new(data).parse.tables
81
+ records = tables[0].records
82
+
83
+ assert_equal true, records[0].values['value']
84
+ assert_equal false, records[1].values['value']
85
+ assert_equal false, records[2].values['value']
86
+ assert_equal true, records[3].values['value']
87
+ end
88
+
89
+ def test_mapping_unsigned_long
90
+ data = '#datatype,string,long,dateTime:RFC3339,dateTime:RFC3339,' \
91
+ "dateTime:RFC3339,long,string,string,string,unsignedLong\n" \
92
+ "#group,false,false,false,false,false,false,false,false,false,true\n" \
93
+ "#default,_result,,,,,,,,,\n" \
94
+ ",result,table,_start,_stop,_time,_value,_field,_measurement,host,value\n" \
95
+ ",,0,1970-01-01T00:00:10Z,1970-01-01T00:00:20Z,1970-01-01T00:00:10Z,10,free,mem,A,17916881237904312345\n" \
96
+ ",,0,1970-01-01T00:00:10Z,1970-01-01T00:00:20Z,1970-01-01T00:00:10Z,10,free,mem,A,\n"
97
+
98
+ expected = 17_916_881_237_904_312_345
99
+
100
+ tables = InfluxDB2::FluxCsvParser.new(data).parse.tables
101
+ records = tables[0].records
102
+
103
+ assert_equal expected, records[0].values['value']
104
+ assert_nil records[1].values['value']
105
+ end
106
+
107
+ def test_mapping_double
108
+ data = '#datatype,string,long,dateTime:RFC3339,dateTime:RFC3339,' \
109
+ "dateTime:RFC3339,long,string,string,string,double\n" \
110
+ "#group,false,false,false,false,false,false,false,false,false,true\n" \
111
+ "#default,_result,,,,,,,,,\n" \
112
+ ",result,table,_start,_stop,_time,_value,_field,_measurement,host,value\n" \
113
+ ",,0,1970-01-01T00:00:10Z,1970-01-01T00:00:20Z,1970-01-01T00:00:10Z,10,free,mem,A,12.25\n" \
114
+ ",,0,1970-01-01T00:00:10Z,1970-01-01T00:00:20Z,1970-01-01T00:00:10Z,10,free,mem,A,\n" \
115
+
116
+ tables = InfluxDB2::FluxCsvParser.new(data).parse.tables
117
+ records = tables[0].records
118
+
119
+ assert_equal 12.25, records[0].values['value']
120
+ assert_nil records[1].values['value']
121
+ end
122
+
123
+ def test_mapping_base64_binary
124
+ binary_data = 'test value'
125
+ encoded_data = Base64.encode64(binary_data)
126
+
127
+ data = '#datatype,string,long,dateTime:RFC3339,dateTime:RFC3339,' \
128
+ "dateTime:RFC3339,long,string,string,string,base64Binary\n" \
129
+ "#group,false,false,false,false,false,false,false,false,false,true\n" \
130
+ "#default,_result,,,,,,,,,\n" \
131
+ ",result,table,_start,_stop,_time,_value,_field,_measurement,host,value\n" \
132
+ ',,0,1970-01-01T00:00:10Z,1970-01-01T00:00:20Z,1970-01-01T00:00:10Z,10,free,mem,A,' + encoded_data + "\n" \
133
+ ",,0,1970-01-01T00:00:10Z,1970-01-01T00:00:20Z,1970-01-01T00:00:10Z,10,free,mem,A,\n"
134
+
135
+ tables = InfluxDB2::FluxCsvParser.new(data).parse.tables
136
+ records = tables[0].records
137
+
138
+ value = records[0].values['value']
139
+
140
+ assert !value.nil?
141
+ assert_equal binary_data, value
142
+
143
+ assert_nil records[1].values['value']
144
+ end
145
+
146
+ def test_mapping_rfc3339
147
+ data = '#datatype,string,long,dateTime:RFC3339,dateTime:RFC3339,' \
148
+ "dateTime:RFC3339,long,string,string,string,dateTime:RFC3339\n" \
149
+ "#group,false,false,false,false,false,false,false,false,false,true\n" \
150
+ "#default,_result,,,,,,,,,\n" \
151
+ ",result,table,_start,_stop,_time,_value,_field,_measurement,host,value\n" \
152
+ ",,0,1970-01-01T00:00:10Z,1970-01-01T00:00:20Z,1970-01-01T00:00:10Z,10,free,mem,A,1970-01-01T00:00:10Z\n" \
153
+ ",,0,1970-01-01T00:00:10Z,1970-01-01T00:00:20Z,1970-01-01T00:00:10Z,10,free,mem,A,\n"
154
+
155
+ tables = InfluxDB2::FluxCsvParser.new(data).parse.tables
156
+ records = tables[0].records
157
+
158
+ assert_equal Time.parse('1970-01-01T00:00:10Z').to_datetime.rfc3339, records[0].values['value']
159
+ assert_nil records[1].values['value']
160
+ end
161
+
162
+ def test_mapping_duration
163
+ data = '#datatype,string,long,dateTime:RFC3339,dateTime:RFC3339' \
164
+ ",dateTime:RFC3339,long,string,string,string,duration\n" \
165
+ "#group,false,false,false,false,false,false,false,false,false,true\n" \
166
+ "#default,_result,,,,,,,,,\n" \
167
+ ",result,table,_start,_stop,_time,_value,_field,_measurement,host,value\n" \
168
+ ",,0,1970-01-01T00:00:10Z,1970-01-01T00:00:20Z,1970-01-01T00:00:10Z,10,free,mem,A,125\n" \
169
+ ",,0,1970-01-01T00:00:10Z,1970-01-01T00:00:20Z,1970-01-01T00:00:10Z,10,free,mem,A,\n"
170
+
171
+ tables = InfluxDB2::FluxCsvParser.new(data).parse.tables
172
+ records = tables[0].records
173
+
174
+ assert_equal 125, records[0].values['value']
175
+ assert_nil records[1].values['value']
176
+ end
177
+
178
+ def test_group_key
179
+ data = '#datatype,string,long,dateTime:RFC3339,dateTime:RFC3339,' \
180
+ "dateTime:RFC3339,long,string,string,string,duration\n" \
181
+ "#group,false,false,false,false,true,false,false,false,false,true\n" \
182
+ "#default,_result,,,,,,,,,\n" \
183
+ ",result,table,_start,_stop,_time,_value,_field,_measurement,host,value\n" \
184
+ ",,0,1970-01-01T00:00:10Z,1970-01-01T00:00:20Z,1970-01-01T00:00:10Z,10,free,mem,A,125\n" \
185
+ ",,0,1970-01-01T00:00:10Z,1970-01-01T00:00:20Z,1970-01-01T00:00:10Z,10,free,mem,A,\n" \
186
+
187
+ tables = InfluxDB2::FluxCsvParser.new(data).parse.tables
188
+
189
+ assert_equal 10, tables[0].columns.size
190
+ assert_equal 2, tables[0].group_key.size
191
+ end
192
+
193
+ def test_unknown_type_as_string
194
+ data = '#datatype,string,long,dateTime:RFC3339,dateTime:RFC3339,' \
195
+ "dateTime:RFC3339,long,string,string,string,unknown\n" \
196
+ "#group,false,false,false,false,false,false,false,false,false,true\n" \
197
+ "#default,_result,,,,,,,,,\n" \
198
+ ",result,table,_start,_stop,_time,_value,_field,_measurement,host,value\n" \
199
+ ",,0,1970-01-01T00:00:10Z,1970-01-01T00:00:20Z,1970-01-01T00:00:10Z,10,free,mem,A,12.25\n" \
200
+ ",,0,1970-01-01T00:00:10Z,1970-01-01T00:00:20Z,1970-01-01T00:00:10Z,10,free,mem,A,\n"
201
+
202
+ tables = InfluxDB2::FluxCsvParser.new(data).parse.tables
203
+ records = tables[0].records
204
+
205
+ assert_equal '12.25', records[0].values['value']
206
+ assert_nil records[1].values['value']
207
+ end
208
+
209
+ private
210
+
211
+ def _parse_time(time)
212
+ Time.parse(time).to_datetime.rfc3339
213
+ end
214
+
215
+ def _assert_record(flux_record, values: nil, size: 0, value: nil)
216
+ values.keys.each do |key|
217
+ assert_equal values[key], flux_record.values[key]
218
+ end
219
+
220
+ if value.nil?
221
+ assert_nil value
222
+ else
223
+ assert_equal value, flux_record.value
224
+ end
225
+
226
+ assert_equal size, flux_record.values.size
227
+ end
228
+
229
+ def _assert_columns(column_headers, values: nil)
230
+ i = 0
231
+ values.each do |value|
232
+ assert_equal value, column_headers[i].group
233
+ i += 1
234
+ end
235
+ end
236
+
237
+ def _assert_multiple_record(tables)
238
+ # Record 1
239
+ table_records = tables[0].records
240
+ assert_equal 1, table_records.size
241
+
242
+ values = { 'table' => 0, 'host' => 'A', 'region' => 'west', 'value1' => 11, '_value2' => 121,
243
+ 'value_str' => 'test' }
244
+
245
+ _assert_record(table_records[0], values: values, size: 11)
246
+
247
+ # Record 2
248
+ table_records = tables[1].records
249
+ assert_equal 1, table_records.size
250
+
251
+ values = { 'table' => 1, 'host' => 'B', 'region' => 'west', 'value1' => 22, '_value2' => 484,
252
+ 'value_str' => 'test' }
253
+
254
+ _assert_record(table_records[0], values: values, size: 11)
255
+
256
+ # Record 3
257
+ table_records = tables[2].records
258
+ assert_equal 1, table_records.size
259
+
260
+ values = { 'table' => 2, 'host' => 'A', 'region' => 'west', 'value1' => 38, '_value2' => 1444,
261
+ 'value_str' => 'test' }
262
+
263
+ _assert_record(table_records[0], values: values, size: 11)
264
+
265
+ # Record 4
266
+ table_records = tables[3].records
267
+ assert_equal 1, table_records.size
268
+
269
+ values = { 'table' => 3, 'host' => 'A', 'region' => 'west', 'value1' => 49, '_value2' => 2401,
270
+ 'value_str' => 'test' }
271
+
272
+ _assert_record(table_records[0], values: values, size: 11)
273
+ end
274
+ end
275
+
276
+ class FluxCsvParserErrorTest < MiniTest::Test
277
+ def test_error
278
+ data = "#datatype,string,string\n" \
279
+ "#group,true,true\n" \
280
+ "#default,,\n" \
281
+ ",error,reference\n" \
282
+ ',failed to create physical plan: invalid time bounds from procedure from: bounds contain zero time,897'
283
+
284
+ parser = InfluxDB2::FluxCsvParser.new(data)
285
+
286
+ error = assert_raises InfluxDB2::FluxQueryError do
287
+ parser.parse
288
+ end
289
+
290
+ assert_equal 'failed to create physical plan: invalid time bounds from procedure from: bounds contain zero time',
291
+ error.message
292
+ assert_equal 897, error.reference
293
+ end
294
+
295
+ def test_error_without_reference
296
+ data = "#datatype,string,string\n" \
297
+ "#group,true,true\n" \
298
+ "#default,,\n" \
299
+ ",error,reference\n" \
300
+ ',failed to create physical plan: invalid time bounds from procedure from: bounds contain zero time,'
301
+
302
+ parser = InfluxDB2::FluxCsvParser.new(data)
303
+
304
+ error = assert_raises InfluxDB2::FluxQueryError do
305
+ parser.parse
306
+ end
307
+
308
+ assert_equal 'failed to create physical plan: invalid time bounds from procedure from: bounds contain zero time',
309
+ error.message
310
+ assert_equal 0, error.reference
311
+ end
312
+
313
+ def test_without_table_definition
314
+ data = ",result,table,_start,_stop,_time,_value,_field,_measurement,host,value\n" \
315
+ ",,0,1970-01-01T00:00:10Z,1970-01-01T00:00:20Z,1970-01-01T00:00:10Z,10,free,mem,A,12.25\n" \
316
+ ",,0,1970-01-01T00:00:10Z,1970-01-01T00:00:20Z,1970-01-01T00:00:10Z,10,free,mem,A,\n"
317
+
318
+ parser = InfluxDB2::FluxCsvParser.new(data)
319
+
320
+ error = assert_raises InfluxDB2::FluxCsvParserError do
321
+ parser.parse
322
+ end
323
+
324
+ assert_equal 'Unable to parse CSV response. FluxTable definition was not found.', error.message
325
+ end
326
+ end