influxdb-client 1.1.0.pre.203 → 1.1.0.pre.323

Sign up to get free protection for your applications and to get access to all the features.
@@ -0,0 +1,284 @@
1
+ =begin
2
+ #Influx API Service
3
+
4
+ #No description provided (generated by Openapi Generator https://github.com/openapitools/openapi-generator)
5
+
6
+ OpenAPI spec version: 0.1.0
7
+
8
+ Generated by: https://openapi-generator.tech
9
+ OpenAPI Generator version: 3.3.4
10
+
11
+ =end
12
+
13
+ require 'date'
14
+
15
+ module InfluxDB2
16
+ # Query influx with specific return formatting.
17
+ class Query
18
+ attr_accessor :extern
19
+
20
+ # Query script to execute.
21
+ attr_accessor :query
22
+
23
+ # The type of query.
24
+ attr_accessor :type
25
+
26
+ # Required for `influxql` type queries.
27
+ attr_accessor :db
28
+
29
+ # Required for `influxql` type queries.
30
+ attr_accessor :rp
31
+
32
+ # Required for `influxql` type queries.
33
+ attr_accessor :cluster
34
+
35
+ attr_accessor :dialect
36
+
37
+ class EnumAttributeValidator
38
+ attr_reader :datatype
39
+ attr_reader :allowable_values
40
+
41
+ def initialize(datatype, allowable_values)
42
+ @allowable_values = allowable_values.map do |value|
43
+ case datatype.to_s
44
+ when /Integer/i
45
+ value.to_i
46
+ when /Float/i
47
+ value.to_f
48
+ else
49
+ value
50
+ end
51
+ end
52
+ end
53
+
54
+ def valid?(value)
55
+ !value || allowable_values.include?(value)
56
+ end
57
+ end
58
+
59
+ # Attribute mapping from ruby-style variable name to JSON key.
60
+ def self.attribute_map
61
+ {
62
+ :'extern' => :'extern',
63
+ :'query' => :'query',
64
+ :'type' => :'type',
65
+ :'db' => :'db',
66
+ :'rp' => :'rp',
67
+ :'cluster' => :'cluster',
68
+ :'dialect' => :'dialect'
69
+ }
70
+ end
71
+
72
+ # Attribute type mapping.
73
+ def self.openapi_types
74
+ {
75
+ :'extern' => :'File',
76
+ :'query' => :'String',
77
+ :'type' => :'String',
78
+ :'db' => :'String',
79
+ :'rp' => :'String',
80
+ :'cluster' => :'String',
81
+ :'dialect' => :'Dialect'
82
+ }
83
+ end
84
+
85
+ # Initializes the object
86
+ # @param [Hash] attributes Model attributes in the form of hash
87
+ def initialize(attributes = {})
88
+ return unless attributes.is_a?(Hash)
89
+
90
+ # convert string to symbol for hash key
91
+ attributes = attributes.each_with_object({}) { |(k, v), h| h[k.to_sym] = v }
92
+
93
+ if attributes.has_key?(:'extern')
94
+ self.extern = attributes[:'extern']
95
+ end
96
+
97
+ if attributes.has_key?(:'query')
98
+ self.query = attributes[:'query']
99
+ end
100
+
101
+ if attributes.has_key?(:'type')
102
+ self.type = attributes[:'type']
103
+ else
104
+ self.type = 'flux'
105
+ end
106
+
107
+ if attributes.has_key?(:'db')
108
+ self.db = attributes[:'db']
109
+ end
110
+
111
+ if attributes.has_key?(:'rp')
112
+ self.rp = attributes[:'rp']
113
+ end
114
+
115
+ if attributes.has_key?(:'cluster')
116
+ self.cluster = attributes[:'cluster']
117
+ end
118
+
119
+ if attributes.has_key?(:'dialect')
120
+ self.dialect = attributes[:'dialect']
121
+ end
122
+ end
123
+
124
+ # Show invalid properties with the reasons. Usually used together with valid?
125
+ # @return Array for valid properties with the reasons
126
+ def list_invalid_properties
127
+ invalid_properties = Array.new
128
+ if @query.nil?
129
+ invalid_properties.push('invalid value for "query", query cannot be nil.')
130
+ end
131
+
132
+ invalid_properties
133
+ end
134
+
135
+ # Check to see if the all the properties in the model are valid
136
+ # @return true if the model is valid
137
+ def valid?
138
+ return false if @query.nil?
139
+ type_validator = EnumAttributeValidator.new('String', ['flux', 'influxql'])
140
+ return false unless type_validator.valid?(@type)
141
+ true
142
+ end
143
+
144
+ # Custom attribute writer method checking allowed values (enum).
145
+ # @param [Object] type Object to be assigned
146
+ def type=(type)
147
+ validator = EnumAttributeValidator.new('String', ['flux', 'influxql'])
148
+ unless validator.valid?(type)
149
+ fail ArgumentError, 'invalid value for "type", must be one of #{validator.allowable_values}.'
150
+ end
151
+ @type = type
152
+ end
153
+
154
+ # Checks equality by comparing each attribute.
155
+ # @param [Object] Object to be compared
156
+ def ==(o)
157
+ return true if self.equal?(o)
158
+ self.class == o.class &&
159
+ extern == o.extern &&
160
+ query == o.query &&
161
+ type == o.type &&
162
+ db == o.db &&
163
+ rp == o.rp &&
164
+ cluster == o.cluster &&
165
+ dialect == o.dialect
166
+ end
167
+
168
+ # @see the `==` method
169
+ # @param [Object] Object to be compared
170
+ def eql?(o)
171
+ self == o
172
+ end
173
+
174
+ # Calculates hash code according to all attributes.
175
+ # @return [Fixnum] Hash code
176
+ def hash
177
+ [extern, query, type, db, rp, cluster, dialect].hash
178
+ end
179
+
180
+ # Builds the object from hash
181
+ # @param [Hash] attributes Model attributes in the form of hash
182
+ # @return [Object] Returns the model itself
183
+ def build_from_hash(attributes)
184
+ return nil unless attributes.is_a?(Hash)
185
+ self.class.openapi_types.each_pair do |key, type|
186
+ if type =~ /\AArray<(.*)>/i
187
+ # check to ensure the input is an array given that the the attribute
188
+ # is documented as an array but the input is not
189
+ if attributes[self.class.attribute_map[key]].is_a?(Array)
190
+ self.send("#{key}=", attributes[self.class.attribute_map[key]].map { |v| _deserialize($1, v) })
191
+ end
192
+ elsif !attributes[self.class.attribute_map[key]].nil?
193
+ self.send("#{key}=", _deserialize(type, attributes[self.class.attribute_map[key]]))
194
+ end # or else data not found in attributes(hash), not an issue as the data can be optional
195
+ end
196
+
197
+ self
198
+ end
199
+
200
+ # Deserializes the data based on type
201
+ # @param string type Data type
202
+ # @param string value Value to be deserialized
203
+ # @return [Object] Deserialized data
204
+ def _deserialize(type, value)
205
+ case type.to_sym
206
+ when :DateTime
207
+ DateTime.parse(value)
208
+ when :Date
209
+ Date.parse(value)
210
+ when :String
211
+ value.to_s
212
+ when :Integer
213
+ value.to_i
214
+ when :Float
215
+ value.to_f
216
+ when :BOOLEAN
217
+ if value.to_s =~ /\A(true|t|yes|y|1)\z/i
218
+ true
219
+ else
220
+ false
221
+ end
222
+ when :Object
223
+ # generic object (usually a Hash), return directly
224
+ value
225
+ when /\AArray<(?<inner_type>.+)>\z/
226
+ inner_type = Regexp.last_match[:inner_type]
227
+ value.map { |v| _deserialize(inner_type, v) }
228
+ when /\AHash<(?<k_type>.+?), (?<v_type>.+)>\z/
229
+ k_type = Regexp.last_match[:k_type]
230
+ v_type = Regexp.last_match[:v_type]
231
+ {}.tap do |hash|
232
+ value.each do |k, v|
233
+ hash[_deserialize(k_type, k)] = _deserialize(v_type, v)
234
+ end
235
+ end
236
+ else # model
237
+ temp_model = InfluxDB2.const_get(type).new
238
+ temp_model.build_from_hash(value)
239
+ end
240
+ end
241
+
242
+ # Returns the string representation of the object
243
+ # @return [String] String presentation of the object
244
+ def to_s
245
+ to_hash.to_s
246
+ end
247
+
248
+ # to_body is an alias to to_hash (backward compatibility)
249
+ # @return [Hash] Returns the object in the form of hash
250
+ def to_body
251
+ to_hash
252
+ end
253
+
254
+ # Returns the object in the form of hash
255
+ # @return [Hash] Returns the object in the form of hash
256
+ def to_hash
257
+ hash = {}
258
+ self.class.attribute_map.each_pair do |attr, param|
259
+ value = self.send(attr)
260
+ next if value.nil?
261
+ hash[param] = _to_hash(value)
262
+ end
263
+ hash
264
+ end
265
+
266
+ # Outputs non-array value in the form of hash
267
+ # For object, use to_hash. Otherwise, just return the value
268
+ # @param [Object] value Any valid value
269
+ # @return [Hash] Returns the value in the form of hash
270
+ def _to_hash(value)
271
+ if value.is_a?(Array)
272
+ value.compact.map { |v| _to_hash(v) }
273
+ elsif value.is_a?(Hash)
274
+ {}.tap do |hash|
275
+ value.each { |k, v| hash[k] = _to_hash(v) }
276
+ end
277
+ elsif value.respond_to? :to_hash
278
+ value.to_hash
279
+ else
280
+ value
281
+ end
282
+ end
283
+ end
284
+ end
@@ -0,0 +1,79 @@
1
+ # The MIT License
2
+ #
3
+ # Permission is hereby granted, free of charge, to any person obtaining a copy
4
+ # of this software and associated documentation files (the "Software"), to deal
5
+ # in the Software without restriction, including without limitation the rights
6
+ # to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
7
+ # copies of the Software, and to permit persons to whom the Software is
8
+ # furnished to do so, subject to the following conditions:
9
+ #
10
+ # The above copyright notice and this permission notice shall be included in
11
+ # all copies or substantial portions of the Software.
12
+ #
13
+ # THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
14
+ # IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
15
+ # FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
16
+ # AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
17
+ # LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
18
+ # OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
19
+ # THE SOFTWARE.
20
+ require_relative 'models/dialect'
21
+ require_relative 'models/query'
22
+ require_relative 'flux_csv_parser'
23
+ require 'json'
24
+
25
+ module InfluxDB2
26
+ # The client of the InfluxDB 2.0 that implement Query HTTP API endpoint.
27
+ #
28
+ class QueryApi < DefaultApi
29
+ DEFAULT_DIALECT = InfluxDB2::Dialect.new(header: true, delimiter: ',', comment_prefix: '#',
30
+ annotations: %w[datatype group default])
31
+
32
+ # @param [Hash] options The options to be used by the client.
33
+ def initialize(options:)
34
+ super(options: options)
35
+ end
36
+
37
+ # @param [Object] query the flux query to execute. The data could be represent by [String], [Query]
38
+ # @param [String] org specifies the source organization
39
+ # @return [String] result of query
40
+ def query_raw(query: nil, org: nil, dialect: DEFAULT_DIALECT)
41
+ org_param = org || @options[:org]
42
+ _check('org', org_param)
43
+
44
+ payload = _generate_payload(query, dialect)
45
+ return nil if payload.nil?
46
+
47
+ uri = URI.parse(File.join(@options[:url], '/api/v2/query'))
48
+ uri.query = URI.encode_www_form(org: org_param)
49
+
50
+ _post(payload.to_body.to_json, uri).read_body
51
+ end
52
+
53
+ # @param [Object] query the flux query to execute. The data could be represent by [String], [Query]
54
+ # @param [String] org specifies the source organization
55
+ # @return [Array] list of FluxTables which are matched the query
56
+ def query(query: nil, org: nil, dialect: DEFAULT_DIALECT)
57
+ response = query_raw(query: query, org: org, dialect: dialect)
58
+ parser = InfluxDB2::FluxCsvParser.new
59
+
60
+ parser.parse(response)
61
+ end
62
+
63
+ private
64
+
65
+ def _generate_payload(query, dialect)
66
+ if query.nil?
67
+ nil
68
+ elsif query.is_a?(Query)
69
+ query
70
+ elsif query.is_a?(String)
71
+ if query.empty?
72
+ nil
73
+ else
74
+ Query.new(query: query, dialect: dialect, type: nil)
75
+ end
76
+ end
77
+ end
78
+ end
79
+ end
@@ -37,14 +37,10 @@ module InfluxDB2
37
37
 
38
38
  # Write time series data into InfluxDB.
39
39
  #
40
- class WriteApi
41
- DEFAULT_TIMEOUT = 10
42
- DEFAULT_REDIRECT_COUNT = 10
43
-
40
+ class WriteApi < DefaultApi
44
41
  # @param [Hash] options The options to be used by the client.
45
42
  def initialize(options:)
46
- @options = options
47
- @max_redirect_count = @options[:max_redirect_count] || DEFAULT_REDIRECT_COUNT
43
+ super(options: options)
48
44
  end
49
45
 
50
46
  # Write data into specified Bucket.
@@ -98,39 +94,6 @@ module InfluxDB2
98
94
 
99
95
  private
100
96
 
101
- def _post(payload, uri, limit = @max_redirect_count)
102
- raise InfluxError.from_message("Too many HTTP redirects. Exceeded limit: #{@max_redirect_count}") if limit.zero?
103
-
104
- http = Net::HTTP.new(uri.host, uri.port)
105
- http.open_timeout = @options[:open_timeout] || DEFAULT_TIMEOUT
106
- http.write_timeout = @options[:write_timeout] || DEFAULT_TIMEOUT if Net::HTTP.method_defined? :write_timeout
107
- http.read_timeout = @options[:read_timeout] || DEFAULT_TIMEOUT
108
- http.use_ssl = @options[:use_ssl].nil? ? true : @options[:use_ssl]
109
-
110
- request = Net::HTTP::Post.new(uri.request_uri)
111
- request['Authorization'] = "Token #{@options[:token]}"
112
- request.body = payload
113
-
114
- begin
115
- response = http.request(request)
116
- case response
117
- when Net::HTTPSuccess then
118
- response
119
- when Net::HTTPRedirection then
120
- location = response['location']
121
- _post(payload, URI.parse(location), limit - 1)
122
- else
123
- raise InfluxError.from_response(response)
124
- end
125
- ensure
126
- http.finish if http.started?
127
- end
128
- end
129
-
130
- def _check(key, value)
131
- raise ArgumentError, "The '#{key}' should be defined as argument or default option: #{@options}" if value.nil?
132
- end
133
-
134
97
  def _generate_payload(data)
135
98
  if data.nil?
136
99
  nil
@@ -0,0 +1,328 @@
1
+ # The MIT License
2
+ #
3
+ # Permission is hereby granted, free of charge, to any person obtaining a copy
4
+ # of this software and associated documentation files (the "Software"), to deal
5
+ # in the Software without restriction, including without limitation the rights
6
+ # to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
7
+ # copies of the Software, and to permit persons to whom the Software is
8
+ # furnished to do so, subject to the following conditions:
9
+ #
10
+ # The above copyright notice and this permission notice shall be included in
11
+ # all copies or substantial portions of the Software.
12
+ #
13
+ # THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
14
+ # IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
15
+ # FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
16
+ # AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
17
+ # LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
18
+ # OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
19
+ # THE SOFTWARE.
20
+
21
+ require 'test_helper'
22
+
23
+ class FluxCsvParserTest < MiniTest::Test
24
+ def setup
25
+ @parser = InfluxDB2::FluxCsvParser.new
26
+ end
27
+
28
+ def test_multiple_values
29
+ data = "#datatype,string,long,dateTime:RFC3339,dateTime:RFC3339,string,string,string,string,long,long,string\n" \
30
+ "#group,false,false,true,true,true,true,true,true,false,false,false\n" \
31
+ "#default,_result,,,,,,,,,,\n" \
32
+ ",result,table,_start,_stop,_field,_measurement,host,region,_value2,value1,value_str\n" \
33
+ ",,0,1677-09-21T00:12:43.145224192Z,2018-07-16T11:21:02.547596934Z,free,mem,A,west,121,11,test\n" \
34
+ ",,1,1677-09-21T00:12:43.145224192Z,2018-07-16T11:21:02.547596934Z,free,mem,B,west,484,22,test\n" \
35
+ ",,2,1677-09-21T00:12:43.145224192Z,2018-07-16T11:21:02.547596934Z,usage_system,cpu,A,west,1444,38,test\n" \
36
+ ',,3,1677-09-21T00:12:43.145224192Z,2018-07-16T11:21:02.547596934Z,user_usage,cpu,A,west,2401,49,test'
37
+
38
+ tables = @parser.parse(data)
39
+
40
+ column_headers = tables[0].columns
41
+ assert_equal 11, column_headers.size
42
+
43
+ values = [false, false, true, true, true, true, true, true, false, false, false]
44
+ _assert_columns(column_headers, values: values)
45
+ assert_equal 4, tables.size
46
+
47
+ _assert_multiple_record(tables)
48
+ end
49
+
50
+ def test_parse_shortcut
51
+ data = '#datatype,string,long,dateTime:RFC3339,dateTime:RFC3339,' \
52
+ "dateTime:RFC3339,long,string,string,string,boolean\n" \
53
+ "#group,false,false,false,false,false,false,false,false,false,true\n" \
54
+ "#default,_result,,,,,,,,,true\n" \
55
+ ",result,table,_start,_stop,_time,_value,_field,_measurement,host,value\n" \
56
+ ",,0,1970-01-01T00:00:10Z,1970-01-01T00:00:20Z,1970-01-01T00:00:10Z,10,free,mem,A,true\n"
57
+
58
+ tables = @parser.parse(data)
59
+
60
+ assert_equal 1, tables.size
61
+ assert_equal 1, tables[0].records.size
62
+
63
+ record = tables[0].records[0]
64
+
65
+ assert_equal _parse_time('1970-01-01T00:00:10Z'), record.start
66
+ assert_equal _parse_time('1970-01-01T00:00:20Z'), record.stop
67
+ assert_equal _parse_time('1970-01-01T00:00:10Z'), record.time
68
+ assert_equal 10, record.value
69
+ assert_equal 'free', record.field
70
+ assert_equal 'mem', record.measurement
71
+ end
72
+
73
+ def test_mapping_boolean
74
+ data = '#datatype,string,long,dateTime:RFC3339,dateTime:RFC3339,' \
75
+ "dateTime:RFC3339,long,string,string,string,boolean\n" \
76
+ "#group,false,false,false,false,false,false,false,false,false,true\n" \
77
+ "#default,_result,,,,,,,,,true\n" \
78
+ ",result,table,_start,_stop,_time,_value,_field,_measurement,host,value\n" \
79
+ ",,0,1970-01-01T00:00:10Z,1970-01-01T00:00:20Z,1970-01-01T00:00:10Z,10,free,mem,A,true\n" \
80
+ ",,0,1970-01-01T00:00:10Z,1970-01-01T00:00:20Z,1970-01-01T00:00:10Z,10,free,mem,A,false\n" \
81
+ ",,0,1970-01-01T00:00:10Z,1970-01-01T00:00:20Z,1970-01-01T00:00:10Z,10,free,mem,A,x\n" \
82
+ ",,0,1970-01-01T00:00:10Z,1970-01-01T00:00:20Z,1970-01-01T00:00:10Z,10,free,mem,A,\n"
83
+
84
+ tables = @parser.parse(data)
85
+ records = tables[0].records
86
+
87
+ assert_equal true, records[0].values['value']
88
+ assert_equal false, records[1].values['value']
89
+ assert_equal false, records[2].values['value']
90
+ assert_equal true, records[3].values['value']
91
+ end
92
+
93
+ def test_mapping_unsigned_long
94
+ data = '#datatype,string,long,dateTime:RFC3339,dateTime:RFC3339,' \
95
+ "dateTime:RFC3339,long,string,string,string,unsignedLong\n" \
96
+ "#group,false,false,false,false,false,false,false,false,false,true\n" \
97
+ "#default,_result,,,,,,,,,\n" \
98
+ ",result,table,_start,_stop,_time,_value,_field,_measurement,host,value\n" \
99
+ ",,0,1970-01-01T00:00:10Z,1970-01-01T00:00:20Z,1970-01-01T00:00:10Z,10,free,mem,A,17916881237904312345\n" \
100
+ ",,0,1970-01-01T00:00:10Z,1970-01-01T00:00:20Z,1970-01-01T00:00:10Z,10,free,mem,A,\n"
101
+
102
+ expected = 17_916_881_237_904_312_345
103
+
104
+ tables = @parser.parse(data)
105
+ records = tables[0].records
106
+
107
+ assert_equal expected, records[0].values['value']
108
+ assert_nil records[1].values['value']
109
+ end
110
+
111
+ def test_mapping_double
112
+ data = '#datatype,string,long,dateTime:RFC3339,dateTime:RFC3339,' \
113
+ "dateTime:RFC3339,long,string,string,string,double\n" \
114
+ "#group,false,false,false,false,false,false,false,false,false,true\n" \
115
+ "#default,_result,,,,,,,,,\n" \
116
+ ",result,table,_start,_stop,_time,_value,_field,_measurement,host,value\n" \
117
+ ",,0,1970-01-01T00:00:10Z,1970-01-01T00:00:20Z,1970-01-01T00:00:10Z,10,free,mem,A,12.25\n" \
118
+ ",,0,1970-01-01T00:00:10Z,1970-01-01T00:00:20Z,1970-01-01T00:00:10Z,10,free,mem,A,\n" \
119
+
120
+ tables = @parser.parse(data)
121
+ records = tables[0].records
122
+
123
+ assert_equal 12.25, records[0].values['value']
124
+ assert_nil records[1].values['value']
125
+ end
126
+
127
+ def test_mapping_base64_binary
128
+ binary_data = 'test value'
129
+ encoded_data = Base64.encode64(binary_data)
130
+
131
+ data = '#datatype,string,long,dateTime:RFC3339,dateTime:RFC3339,' \
132
+ "dateTime:RFC3339,long,string,string,string,base64Binary\n" \
133
+ "#group,false,false,false,false,false,false,false,false,false,true\n" \
134
+ "#default,_result,,,,,,,,,\n" \
135
+ ",result,table,_start,_stop,_time,_value,_field,_measurement,host,value\n" \
136
+ ',,0,1970-01-01T00:00:10Z,1970-01-01T00:00:20Z,1970-01-01T00:00:10Z,10,free,mem,A,' + encoded_data + "\n" \
137
+ ",,0,1970-01-01T00:00:10Z,1970-01-01T00:00:20Z,1970-01-01T00:00:10Z,10,free,mem,A,\n"
138
+
139
+ tables = @parser.parse(data)
140
+ records = tables[0].records
141
+
142
+ value = records[0].values['value']
143
+
144
+ assert !value.nil?
145
+ assert_equal binary_data, value
146
+
147
+ assert_nil records[1].values['value']
148
+ end
149
+
150
+ def test_mapping_rfc3339
151
+ data = '#datatype,string,long,dateTime:RFC3339,dateTime:RFC3339,' \
152
+ "dateTime:RFC3339,long,string,string,string,dateTime:RFC3339\n" \
153
+ "#group,false,false,false,false,false,false,false,false,false,true\n" \
154
+ "#default,_result,,,,,,,,,\n" \
155
+ ",result,table,_start,_stop,_time,_value,_field,_measurement,host,value\n" \
156
+ ",,0,1970-01-01T00:00:10Z,1970-01-01T00:00:20Z,1970-01-01T00:00:10Z,10,free,mem,A,1970-01-01T00:00:10Z\n" \
157
+ ",,0,1970-01-01T00:00:10Z,1970-01-01T00:00:20Z,1970-01-01T00:00:10Z,10,free,mem,A,\n"
158
+
159
+ tables = @parser.parse(data)
160
+ records = tables[0].records
161
+
162
+ assert_equal Time.parse('1970-01-01T00:00:10Z').to_datetime.rfc3339, records[0].values['value']
163
+ assert_nil records[1].values['value']
164
+ end
165
+
166
+ def test_mapping_duration
167
+ data = '#datatype,string,long,dateTime:RFC3339,dateTime:RFC3339' \
168
+ ",dateTime:RFC3339,long,string,string,string,duration\n" \
169
+ "#group,false,false,false,false,false,false,false,false,false,true\n" \
170
+ "#default,_result,,,,,,,,,\n" \
171
+ ",result,table,_start,_stop,_time,_value,_field,_measurement,host,value\n" \
172
+ ",,0,1970-01-01T00:00:10Z,1970-01-01T00:00:20Z,1970-01-01T00:00:10Z,10,free,mem,A,125\n" \
173
+ ",,0,1970-01-01T00:00:10Z,1970-01-01T00:00:20Z,1970-01-01T00:00:10Z,10,free,mem,A,\n"
174
+
175
+ tables = @parser.parse(data)
176
+ records = tables[0].records
177
+
178
+ assert_equal 125, records[0].values['value']
179
+ assert_nil records[1].values['value']
180
+ end
181
+
182
+ def test_group_key
183
+ data = '#datatype,string,long,dateTime:RFC3339,dateTime:RFC3339,' \
184
+ "dateTime:RFC3339,long,string,string,string,duration\n" \
185
+ "#group,false,false,false,false,true,false,false,false,false,true\n" \
186
+ "#default,_result,,,,,,,,,\n" \
187
+ ",result,table,_start,_stop,_time,_value,_field,_measurement,host,value\n" \
188
+ ",,0,1970-01-01T00:00:10Z,1970-01-01T00:00:20Z,1970-01-01T00:00:10Z,10,free,mem,A,125\n" \
189
+ ",,0,1970-01-01T00:00:10Z,1970-01-01T00:00:20Z,1970-01-01T00:00:10Z,10,free,mem,A,\n" \
190
+
191
+ tables = @parser.parse(data)
192
+
193
+ assert_equal 10, tables[0].columns.size
194
+ assert_equal 2, tables[0].group_key.size
195
+ end
196
+
197
+ def test_unknown_type_as_string
198
+ data = '#datatype,string,long,dateTime:RFC3339,dateTime:RFC3339,' \
199
+ "dateTime:RFC3339,long,string,string,string,unknown\n" \
200
+ "#group,false,false,false,false,false,false,false,false,false,true\n" \
201
+ "#default,_result,,,,,,,,,\n" \
202
+ ",result,table,_start,_stop,_time,_value,_field,_measurement,host,value\n" \
203
+ ",,0,1970-01-01T00:00:10Z,1970-01-01T00:00:20Z,1970-01-01T00:00:10Z,10,free,mem,A,12.25\n" \
204
+ ",,0,1970-01-01T00:00:10Z,1970-01-01T00:00:20Z,1970-01-01T00:00:10Z,10,free,mem,A,\n"
205
+
206
+ tables = @parser.parse(data)
207
+ records = tables[0].records
208
+
209
+ assert_equal '12.25', records[0].values['value']
210
+ assert_nil records[1].values['value']
211
+ end
212
+
213
+ private
214
+
215
+ def _parse_time(time)
216
+ Time.parse(time).to_datetime.rfc3339
217
+ end
218
+
219
+ def _assert_record(flux_record, values: nil, size: 0, value: nil)
220
+ values.keys.each do |key|
221
+ assert_equal values[key], flux_record.values[key]
222
+ end
223
+
224
+ if value.nil?
225
+ assert_nil value
226
+ else
227
+ assert_equal value, flux_record.value
228
+ end
229
+
230
+ assert_equal size, flux_record.values.size
231
+ end
232
+
233
+ def _assert_columns(column_headers, values: nil)
234
+ i = 0
235
+ values.each do |value|
236
+ assert_equal value, column_headers[i].group
237
+ i += 1
238
+ end
239
+ end
240
+
241
+ def _assert_multiple_record(tables)
242
+ # Record 1
243
+ table_records = tables[0].records
244
+ assert_equal 1, table_records.size
245
+
246
+ values = { 'table' => 0, 'host' => 'A', 'region' => 'west', 'value1' => 11, '_value2' => 121,
247
+ 'value_str' => 'test' }
248
+
249
+ _assert_record(table_records[0], values: values, size: 11)
250
+
251
+ # Record 2
252
+ table_records = tables[1].records
253
+ assert_equal 1, table_records.size
254
+
255
+ values = { 'table' => 1, 'host' => 'B', 'region' => 'west', 'value1' => 22, '_value2' => 484,
256
+ 'value_str' => 'test' }
257
+
258
+ _assert_record(table_records[0], values: values, size: 11)
259
+
260
+ # Record 3
261
+ table_records = tables[2].records
262
+ assert_equal 1, table_records.size
263
+
264
+ values = { 'table' => 2, 'host' => 'A', 'region' => 'west', 'value1' => 38, '_value2' => 1444,
265
+ 'value_str' => 'test' }
266
+
267
+ _assert_record(table_records[0], values: values, size: 11)
268
+
269
+ # Record 4
270
+ table_records = tables[3].records
271
+ assert_equal 1, table_records.size
272
+
273
+ values = { 'table' => 3, 'host' => 'A', 'region' => 'west', 'value1' => 49, '_value2' => 2401,
274
+ 'value_str' => 'test' }
275
+
276
+ _assert_record(table_records[0], values: values, size: 11)
277
+ end
278
+ end
279
+
280
+ class FluxCsvParserErrorTest < MiniTest::Test
281
+ def setup
282
+ @parser = InfluxDB2::FluxCsvParser.new
283
+ end
284
+
285
+ def test_error
286
+ data = "#datatype,string,string\n" \
287
+ "#group,true,true\n" \
288
+ "#default,,\n" \
289
+ ",error,reference\n" \
290
+ ',failed to create physical plan: invalid time bounds from procedure from: bounds contain zero time,897'
291
+
292
+ error = assert_raises InfluxDB2::FluxQueryError do
293
+ @parser.parse(data)
294
+ end
295
+
296
+ assert_equal 'failed to create physical plan: invalid time bounds from procedure from: bounds contain zero time',
297
+ error.message
298
+ assert_equal 897, error.reference
299
+ end
300
+
301
+ def test_error_without_reference
302
+ data = "#datatype,string,string\n" \
303
+ "#group,true,true\n" \
304
+ "#default,,\n" \
305
+ ",error,reference\n" \
306
+ ',failed to create physical plan: invalid time bounds from procedure from: bounds contain zero time,'
307
+
308
+ error = assert_raises InfluxDB2::FluxQueryError do
309
+ @parser.parse(data)
310
+ end
311
+
312
+ assert_equal 'failed to create physical plan: invalid time bounds from procedure from: bounds contain zero time',
313
+ error.message
314
+ assert_equal 0, error.reference
315
+ end
316
+
317
+ def test_without_table_definition
318
+ data = ",result,table,_start,_stop,_time,_value,_field,_measurement,host,value\n" \
319
+ ",,0,1970-01-01T00:00:10Z,1970-01-01T00:00:20Z,1970-01-01T00:00:10Z,10,free,mem,A,12.25\n" \
320
+ ",,0,1970-01-01T00:00:10Z,1970-01-01T00:00:20Z,1970-01-01T00:00:10Z,10,free,mem,A,\n"
321
+
322
+ error = assert_raises InfluxDB2::FluxCsvParserError do
323
+ @parser.parse(data)
324
+ end
325
+
326
+ assert_equal 'Unable to parse CSV response. FluxTable definition was not found.', error.message
327
+ end
328
+ end