influxdb-client 1.1.0.pre.323 → 1.1.0.pre.373

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
checksums.yaml CHANGED
@@ -1,7 +1,7 @@
1
1
  ---
2
2
  SHA256:
3
- metadata.gz: 5468123420673da53963b6610fc3c42ba6379a919b07d2e3623c949303962fd2
4
- data.tar.gz: 1eaab67ebca09a668c1669780807808d3fe07fb973bd5f387e846be3fd7449db
3
+ metadata.gz: 83c5ef3d43578302150587bea139d9169dfc59736cf3bab4e360faa0b1f28257
4
+ data.tar.gz: 122c4b77d8ae2aa65eef10257a7f57d0c0b105e8a3e8dd29668be04a37159002
5
5
  SHA512:
6
- metadata.gz: 1cd55d49226c9d1a8060fff080674c8e18118361296c7975dfd456e1465f969c376ddfe4b30f15341d333353abc0fa40c74992d50fc61e63252a97bc0c6463ea
7
- data.tar.gz: c7044374c0f284f7b329a46dc0c9d6beb896554c49b166315997bf9e6df3136c9f295107030e08618401ab0a9f97be2997d0c3d51b499bdba262ddeb9f440841
6
+ metadata.gz: ab2b29dc351db166a073bdbaa9a7cc787f12fe75487907dd7e5a2dcc4f83967b6946279db6b29092e198668d9edeb8822d1c397f35b1144013bf378adf30758b
7
+ data.tar.gz: c9dff47d659d43afb66f2e1132660252df0f0e90f1ad0565b6a4c3e4838b96c228da349b549c167e5ac3b305780ebe6ed3774edcfbc51622ead1f5a6bcf57f24
@@ -2,6 +2,7 @@
2
2
 
3
3
  ### Features
4
4
  1. [#14](https://github.com/influxdata/influxdb-client-ruby/issues/14): Added QueryApi
5
+ 2. [#17](https://github.com/influxdata/influxdb-client-ruby/issues/17): Added query_steam
5
6
 
6
7
  ## 1.0.0.beta [2020-01-17]
7
8
 
data/README.md CHANGED
@@ -63,6 +63,7 @@ The result retrieved by [QueryApi](https://github.com/influxdata/influxdb-client
63
63
 
64
64
  1. Raw query response
65
65
  2. Flux data structure: [FluxTable, FluxColumn and FluxRecord](https://github.com/influxdata/influxdb-client-ruby/blob/master/lib/influxdb2/client/flux_table.rb)
66
+ 3. Stream of [FluxRecord](https://github.com/influxdata/influxdb-client-ruby/blob/master/lib/influxdb2/client/flux_table.rb)
66
67
 
67
68
  #### Query raw
68
69
 
@@ -86,6 +87,23 @@ query_api = client.create_query_api
86
87
  result = query_api.query(query: 'from(bucket:"' + bucket + '") |> range(start: 1970-01-01T00:00:00.000000001Z) |> last()')
87
88
  ```
88
89
 
90
+ #### Query stream
91
+ Synchronously executes the Flux query and return stream of [FluxRecord](https://github.com/influxdata/influxdb-client-ruby/blob/master/lib/influxdb2/client/flux_table.rb)
92
+ ```ruby
93
+ client = InfluxDB2::Client.new('https://localhost:9999', 'my-token',
94
+ bucket: 'my-bucket',
95
+ org: 'my-org')
96
+
97
+ query_api = client.create_query_api
98
+
99
+ query = 'from(bucket: "my-bucket") |> range(start: -10m, stop: now()) ' \
100
+ "|> filter(fn: (r) => r._measurement == \"#{measurement}\")"
101
+
102
+ query_api.query_stream(query: query).each do |record|
103
+ puts record.to_s
104
+ end
105
+ ```
106
+
89
107
  ### Writing data
90
108
 
91
109
  ```ruby
@@ -22,6 +22,7 @@ require 'base64'
22
22
 
23
23
  module InfluxDB2
24
24
  # This class represents Flux query error
25
+ #
25
26
  class FluxQueryError < StandardError
26
27
  def initialize(message, reference)
27
28
  super(message)
@@ -32,6 +33,7 @@ module InfluxDB2
32
33
  end
33
34
 
34
35
  # This class represents Flux query error
36
+ #
35
37
  class FluxCsvParserError < StandardError
36
38
  def initialize(message)
37
39
  super(message)
@@ -39,20 +41,28 @@ module InfluxDB2
39
41
  end
40
42
 
41
43
  # This class us used to construct FluxResult from CSV.
44
+ #
42
45
  class FluxCsvParser
43
- def initialize
46
+ include Enumerable
47
+ def initialize(response, stream: false)
48
+ @response = response
49
+ @stream = stream
44
50
  @tables = {}
45
51
 
46
52
  @table_index = 0
47
53
  @start_new_table = false
48
54
  @table = nil
49
55
  @parsing_state_error = false
56
+
57
+ @closed = false
50
58
  end
51
59
 
52
- attr_reader :tables
60
+ attr_reader :tables, :closed
61
+
62
+ def parse
63
+ @csv_file = CSV.new(@response.instance_of?(Net::HTTPOK) ? @response.body : @response)
53
64
 
54
- def parse(response)
55
- CSV.parse(response) do |csv|
65
+ while (csv = @csv_file.shift)
56
66
  # Response has HTTP status ok, but response is error.
57
67
  next if csv.empty?
58
68
 
@@ -68,10 +78,24 @@ module InfluxDB2
68
78
  raise FluxQueryError.new(error, reference_value.nil? || reference_value.empty? ? 0 : reference_value.to_i)
69
79
  end
70
80
 
71
- _parse_line(csv)
81
+ result = _parse_line(csv)
82
+
83
+ yield result if @stream && result.instance_of?(InfluxDB2::FluxRecord)
84
+ end
85
+
86
+ self
87
+ end
88
+
89
+ def each
90
+ return enum_for(:each) unless block_given?
91
+
92
+ parse do |record|
93
+ yield record
72
94
  end
73
95
 
74
- @tables
96
+ self
97
+ ensure
98
+ _close_connection
75
99
  end
76
100
 
77
101
  private
@@ -84,7 +108,9 @@ module InfluxDB2
84
108
  # Return already parsed DataFrame
85
109
  @start_new_table = true
86
110
  @table = InfluxDB2::FluxTable.new
87
- @tables[@table_index] = @table
111
+
112
+ @tables[@table_index] = @table unless @stream
113
+
88
114
  @table_index += 1
89
115
  elsif @table.nil?
90
116
  raise FluxCsvParserError, 'Unable to parse CSV response. FluxTable definition was not found.'
@@ -157,13 +183,17 @@ module InfluxDB2
157
183
  @table.columns.push(column)
158
184
  end
159
185
 
160
- @tables[@table_index] = @table
186
+ @tables[@table_index] = @table unless @stream
161
187
  @table_index += 1
162
188
  end
163
189
 
164
190
  flux_record = _parse_record(@table_index - 1, @table, csv)
165
191
 
166
- @tables[@table_index - 1].records.push(flux_record)
192
+ if @stream
193
+ flux_record
194
+ else
195
+ @tables[@table_index - 1].records.push(flux_record)
196
+ end
167
197
  end
168
198
 
169
199
  def _parse_record(table_index, table, csv)
@@ -206,5 +236,11 @@ module InfluxDB2
206
236
  str_val
207
237
  end
208
238
  end
239
+
240
+ def _close_connection
241
+ # Close CSV Parser
242
+ @csv_file.close
243
+ @closed = true
244
+ end
209
245
  end
210
246
  end
@@ -38,16 +38,7 @@ module InfluxDB2
38
38
  # @param [String] org specifies the source organization
39
39
  # @return [String] result of query
40
40
  def query_raw(query: nil, org: nil, dialect: DEFAULT_DIALECT)
41
- org_param = org || @options[:org]
42
- _check('org', org_param)
43
-
44
- payload = _generate_payload(query, dialect)
45
- return nil if payload.nil?
46
-
47
- uri = URI.parse(File.join(@options[:url], '/api/v2/query'))
48
- uri.query = URI.encode_www_form(org: org_param)
49
-
50
- _post(payload.to_body.to_json, uri).read_body
41
+ _post_query(query: query, org: org, dialect: dialect).read_body
51
42
  end
52
43
 
53
44
  # @param [Object] query the flux query to execute. The data could be represent by [String], [Query]
@@ -55,13 +46,36 @@ module InfluxDB2
55
46
  # @return [Array] list of FluxTables which are matched the query
56
47
  def query(query: nil, org: nil, dialect: DEFAULT_DIALECT)
57
48
  response = query_raw(query: query, org: org, dialect: dialect)
58
- parser = InfluxDB2::FluxCsvParser.new
49
+ parser = InfluxDB2::FluxCsvParser.new(response)
59
50
 
60
- parser.parse(response)
51
+ parser.parse
52
+ parser.tables
53
+ end
54
+
55
+ # @param [Object] query the flux query to execute. The data could be represent by [String], [Query]
56
+ # @param [String] org specifies the source organization
57
+ # @return stream of Flux Records
58
+ def query_stream(query: nil, org: nil, dialect: DEFAULT_DIALECT)
59
+ response = _post_query(query: query, org: org, dialect: dialect)
60
+
61
+ InfluxDB2::FluxCsvParser.new(response, stream: true)
61
62
  end
62
63
 
63
64
  private
64
65
 
66
+ def _post_query(query: nil, org: nil, dialect: DEFAULT_DIALECT)
67
+ org_param = org || @options[:org]
68
+ _check('org', org_param)
69
+
70
+ payload = _generate_payload(query, dialect)
71
+ return nil if payload.nil?
72
+
73
+ uri = URI.parse(File.join(@options[:url], '/api/v2/query'))
74
+ uri.query = URI.encode_www_form(org: org_param)
75
+
76
+ _post(payload.to_body.to_json, uri)
77
+ end
78
+
65
79
  def _generate_payload(query, dialect)
66
80
  if query.nil?
67
81
  nil
@@ -21,10 +21,6 @@
21
21
  require 'test_helper'
22
22
 
23
23
  class FluxCsvParserTest < MiniTest::Test
24
- def setup
25
- @parser = InfluxDB2::FluxCsvParser.new
26
- end
27
-
28
24
  def test_multiple_values
29
25
  data = "#datatype,string,long,dateTime:RFC3339,dateTime:RFC3339,string,string,string,string,long,long,string\n" \
30
26
  "#group,false,false,true,true,true,true,true,true,false,false,false\n" \
@@ -35,7 +31,7 @@ class FluxCsvParserTest < MiniTest::Test
35
31
  ",,2,1677-09-21T00:12:43.145224192Z,2018-07-16T11:21:02.547596934Z,usage_system,cpu,A,west,1444,38,test\n" \
36
32
  ',,3,1677-09-21T00:12:43.145224192Z,2018-07-16T11:21:02.547596934Z,user_usage,cpu,A,west,2401,49,test'
37
33
 
38
- tables = @parser.parse(data)
34
+ tables = InfluxDB2::FluxCsvParser.new(data).parse.tables
39
35
 
40
36
  column_headers = tables[0].columns
41
37
  assert_equal 11, column_headers.size
@@ -55,7 +51,7 @@ class FluxCsvParserTest < MiniTest::Test
55
51
  ",result,table,_start,_stop,_time,_value,_field,_measurement,host,value\n" \
56
52
  ",,0,1970-01-01T00:00:10Z,1970-01-01T00:00:20Z,1970-01-01T00:00:10Z,10,free,mem,A,true\n"
57
53
 
58
- tables = @parser.parse(data)
54
+ tables = InfluxDB2::FluxCsvParser.new(data).parse.tables
59
55
 
60
56
  assert_equal 1, tables.size
61
57
  assert_equal 1, tables[0].records.size
@@ -81,7 +77,7 @@ class FluxCsvParserTest < MiniTest::Test
81
77
  ",,0,1970-01-01T00:00:10Z,1970-01-01T00:00:20Z,1970-01-01T00:00:10Z,10,free,mem,A,x\n" \
82
78
  ",,0,1970-01-01T00:00:10Z,1970-01-01T00:00:20Z,1970-01-01T00:00:10Z,10,free,mem,A,\n"
83
79
 
84
- tables = @parser.parse(data)
80
+ tables = InfluxDB2::FluxCsvParser.new(data).parse.tables
85
81
  records = tables[0].records
86
82
 
87
83
  assert_equal true, records[0].values['value']
@@ -101,7 +97,7 @@ class FluxCsvParserTest < MiniTest::Test
101
97
 
102
98
  expected = 17_916_881_237_904_312_345
103
99
 
104
- tables = @parser.parse(data)
100
+ tables = InfluxDB2::FluxCsvParser.new(data).parse.tables
105
101
  records = tables[0].records
106
102
 
107
103
  assert_equal expected, records[0].values['value']
@@ -117,7 +113,7 @@ class FluxCsvParserTest < MiniTest::Test
117
113
  ",,0,1970-01-01T00:00:10Z,1970-01-01T00:00:20Z,1970-01-01T00:00:10Z,10,free,mem,A,12.25\n" \
118
114
  ",,0,1970-01-01T00:00:10Z,1970-01-01T00:00:20Z,1970-01-01T00:00:10Z,10,free,mem,A,\n" \
119
115
 
120
- tables = @parser.parse(data)
116
+ tables = InfluxDB2::FluxCsvParser.new(data).parse.tables
121
117
  records = tables[0].records
122
118
 
123
119
  assert_equal 12.25, records[0].values['value']
@@ -136,7 +132,7 @@ class FluxCsvParserTest < MiniTest::Test
136
132
  ',,0,1970-01-01T00:00:10Z,1970-01-01T00:00:20Z,1970-01-01T00:00:10Z,10,free,mem,A,' + encoded_data + "\n" \
137
133
  ",,0,1970-01-01T00:00:10Z,1970-01-01T00:00:20Z,1970-01-01T00:00:10Z,10,free,mem,A,\n"
138
134
 
139
- tables = @parser.parse(data)
135
+ tables = InfluxDB2::FluxCsvParser.new(data).parse.tables
140
136
  records = tables[0].records
141
137
 
142
138
  value = records[0].values['value']
@@ -156,7 +152,7 @@ class FluxCsvParserTest < MiniTest::Test
156
152
  ",,0,1970-01-01T00:00:10Z,1970-01-01T00:00:20Z,1970-01-01T00:00:10Z,10,free,mem,A,1970-01-01T00:00:10Z\n" \
157
153
  ",,0,1970-01-01T00:00:10Z,1970-01-01T00:00:20Z,1970-01-01T00:00:10Z,10,free,mem,A,\n"
158
154
 
159
- tables = @parser.parse(data)
155
+ tables = InfluxDB2::FluxCsvParser.new(data).parse.tables
160
156
  records = tables[0].records
161
157
 
162
158
  assert_equal Time.parse('1970-01-01T00:00:10Z').to_datetime.rfc3339, records[0].values['value']
@@ -172,7 +168,7 @@ class FluxCsvParserTest < MiniTest::Test
172
168
  ",,0,1970-01-01T00:00:10Z,1970-01-01T00:00:20Z,1970-01-01T00:00:10Z,10,free,mem,A,125\n" \
173
169
  ",,0,1970-01-01T00:00:10Z,1970-01-01T00:00:20Z,1970-01-01T00:00:10Z,10,free,mem,A,\n"
174
170
 
175
- tables = @parser.parse(data)
171
+ tables = InfluxDB2::FluxCsvParser.new(data).parse.tables
176
172
  records = tables[0].records
177
173
 
178
174
  assert_equal 125, records[0].values['value']
@@ -188,7 +184,7 @@ class FluxCsvParserTest < MiniTest::Test
188
184
  ",,0,1970-01-01T00:00:10Z,1970-01-01T00:00:20Z,1970-01-01T00:00:10Z,10,free,mem,A,125\n" \
189
185
  ",,0,1970-01-01T00:00:10Z,1970-01-01T00:00:20Z,1970-01-01T00:00:10Z,10,free,mem,A,\n" \
190
186
 
191
- tables = @parser.parse(data)
187
+ tables = InfluxDB2::FluxCsvParser.new(data).parse.tables
192
188
 
193
189
  assert_equal 10, tables[0].columns.size
194
190
  assert_equal 2, tables[0].group_key.size
@@ -203,7 +199,7 @@ class FluxCsvParserTest < MiniTest::Test
203
199
  ",,0,1970-01-01T00:00:10Z,1970-01-01T00:00:20Z,1970-01-01T00:00:10Z,10,free,mem,A,12.25\n" \
204
200
  ",,0,1970-01-01T00:00:10Z,1970-01-01T00:00:20Z,1970-01-01T00:00:10Z,10,free,mem,A,\n"
205
201
 
206
- tables = @parser.parse(data)
202
+ tables = InfluxDB2::FluxCsvParser.new(data).parse.tables
207
203
  records = tables[0].records
208
204
 
209
205
  assert_equal '12.25', records[0].values['value']
@@ -278,10 +274,6 @@ class FluxCsvParserTest < MiniTest::Test
278
274
  end
279
275
 
280
276
  class FluxCsvParserErrorTest < MiniTest::Test
281
- def setup
282
- @parser = InfluxDB2::FluxCsvParser.new
283
- end
284
-
285
277
  def test_error
286
278
  data = "#datatype,string,string\n" \
287
279
  "#group,true,true\n" \
@@ -289,8 +281,10 @@ class FluxCsvParserErrorTest < MiniTest::Test
289
281
  ",error,reference\n" \
290
282
  ',failed to create physical plan: invalid time bounds from procedure from: bounds contain zero time,897'
291
283
 
284
+ parser = InfluxDB2::FluxCsvParser.new(data)
285
+
292
286
  error = assert_raises InfluxDB2::FluxQueryError do
293
- @parser.parse(data)
287
+ parser.parse
294
288
  end
295
289
 
296
290
  assert_equal 'failed to create physical plan: invalid time bounds from procedure from: bounds contain zero time',
@@ -305,8 +299,10 @@ class FluxCsvParserErrorTest < MiniTest::Test
305
299
  ",error,reference\n" \
306
300
  ',failed to create physical plan: invalid time bounds from procedure from: bounds contain zero time,'
307
301
 
302
+ parser = InfluxDB2::FluxCsvParser.new(data)
303
+
308
304
  error = assert_raises InfluxDB2::FluxQueryError do
309
- @parser.parse(data)
305
+ parser.parse
310
306
  end
311
307
 
312
308
  assert_equal 'failed to create physical plan: invalid time bounds from procedure from: bounds contain zero time',
@@ -319,8 +315,10 @@ class FluxCsvParserErrorTest < MiniTest::Test
319
315
  ",,0,1970-01-01T00:00:10Z,1970-01-01T00:00:20Z,1970-01-01T00:00:10Z,10,free,mem,A,12.25\n" \
320
316
  ",,0,1970-01-01T00:00:10Z,1970-01-01T00:00:20Z,1970-01-01T00:00:10Z,10,free,mem,A,\n"
321
317
 
318
+ parser = InfluxDB2::FluxCsvParser.new(data)
319
+
322
320
  error = assert_raises InfluxDB2::FluxCsvParserError do
323
- @parser.parse(data)
321
+ parser.parse
324
322
  end
325
323
 
326
324
  assert_equal 'Unable to parse CSV response. FluxTable definition was not found.', error.message
@@ -0,0 +1,98 @@
1
+ # The MIT License
2
+ #
3
+ # Permission is hereby granted, free of charge, to any person obtaining a copy
4
+ # of this software and associated documentation files (the "Software"), to deal
5
+ # in the Software without restriction, including without limitation the rights
6
+ # to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
7
+ # copies of the Software, and to permit persons to whom the Software is
8
+ # furnished to do so, subject to the following conditions:
9
+ #
10
+ # The above copyright notice and this permission notice shall be included in
11
+ # all copies or substantial portions of the Software.
12
+ #
13
+ # THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
14
+ # IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
15
+ # FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
16
+ # AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
17
+ # LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
18
+ # OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
19
+ # THE SOFTWARE.
20
+
21
+ require 'test_helper'
22
+
23
+ class QueryApiStreamTest < MiniTest::Test
24
+ def setup
25
+ WebMock.allow_net_connect!
26
+
27
+ @client = InfluxDB2::Client.new('http://localhost:9999', 'my-token',
28
+ bucket: 'my-bucket',
29
+ org: 'my-org',
30
+ precision: InfluxDB2::WritePrecision::NANOSECOND,
31
+ use_ssl: false)
32
+ @now = Time.now.utc
33
+ end
34
+
35
+ def test_query_stream
36
+ measurement = 'h2o_query_stream' + @now.to_i.to_s + @now.nsec.to_s
37
+ _write(10, measurement: measurement)
38
+
39
+ query = 'from(bucket: "my-bucket") |> range(start: -1m, stop: now()) ' \
40
+ "|> filter(fn: (r) => r._measurement == \"#{measurement}\")"
41
+
42
+ count = 0
43
+ @client.create_query_api.query_stream(query: query).each do |record|
44
+ count += 1
45
+ assert_equal measurement, record.measurement
46
+ assert_equal 'europe', record.values['location']
47
+ assert_equal count, record.value
48
+ assert_equal 'level', record.field
49
+ end
50
+
51
+ assert_equal 10, count
52
+ end
53
+
54
+ def test_query_stream_break
55
+ measurement = 'h2o_query_stream_break' + @now.to_i.to_s + @now.nsec.to_s
56
+ _write(20, measurement: measurement)
57
+
58
+ query = 'from(bucket: "my-bucket") |> range(start: -1m, stop: now()) ' \
59
+ "|> filter(fn: (r) => r._measurement == \"#{measurement}\")"
60
+
61
+ records = []
62
+
63
+ parser = @client.create_query_api.query_stream(query: query)
64
+
65
+ assert_equal false, parser.closed
66
+
67
+ count = 0
68
+ parser.each do |record|
69
+ records.push(record)
70
+ count += 1
71
+
72
+ break if count >= 5
73
+ end
74
+
75
+ assert_equal 5, records.size
76
+ assert_equal true, parser.closed
77
+
78
+ # record 1
79
+ record = records[0]
80
+ assert_equal measurement, record.measurement
81
+ assert_equal 'europe', record.values['location']
82
+ assert_equal 1, record.value
83
+ assert_equal 'level', record.field
84
+ end
85
+
86
+ private
87
+
88
+ def _write(values, measurement:)
89
+ write_api = @client.create_write_api
90
+
91
+ (1..values).each do |value|
92
+ write_api.write(data: InfluxDB2::Point.new(name: measurement)
93
+ .add_tag('location', 'europe')
94
+ .add_field('level', value)
95
+ .time(@now - values + value, InfluxDB2::WritePrecision::NANOSECOND))
96
+ end
97
+ end
98
+ end
metadata CHANGED
@@ -1,14 +1,14 @@
1
1
  --- !ruby/object:Gem::Specification
2
2
  name: influxdb-client
3
3
  version: !ruby/object:Gem::Version
4
- version: 1.1.0.pre.323
4
+ version: 1.1.0.pre.373
5
5
  platform: ruby
6
6
  authors:
7
7
  - Jakub Bednar
8
8
  autorequire:
9
9
  bindir: bin
10
10
  cert_chain: []
11
- date: 2020-01-31 00:00:00.000000000 Z
11
+ date: 2020-02-04 00:00:00.000000000 Z
12
12
  dependencies:
13
13
  - !ruby/object:Gem::Dependency
14
14
  name: bundler
@@ -162,6 +162,7 @@ files:
162
162
  - test/influxdb/flux_csv_parser_test.rb
163
163
  - test/influxdb/point_test.rb
164
164
  - test/influxdb/query_api_integration_test.rb
165
+ - test/influxdb/query_api_stream_test.rb
165
166
  - test/influxdb/query_api_test.rb
166
167
  - test/influxdb/write_api_integration_test.rb
167
168
  - test/influxdb/write_api_test.rb
@@ -197,6 +198,7 @@ test_files:
197
198
  - test/influxdb/flux_csv_parser_test.rb
198
199
  - test/influxdb/point_test.rb
199
200
  - test/influxdb/query_api_integration_test.rb
201
+ - test/influxdb/query_api_stream_test.rb
200
202
  - test/influxdb/query_api_test.rb
201
203
  - test/influxdb/write_api_integration_test.rb
202
204
  - test/influxdb/write_api_test.rb