trino-client 1.0.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- checksums.yaml +7 -0
- data/.github/CODEOWNERS +1 -0
- data/.github/PULL_REQUEST_TEMPLATE.md +18 -0
- data/.github/workflows/ruby.yml +30 -0
- data/.gitignore +4 -0
- data/ChangeLog.md +168 -0
- data/Gemfile +7 -0
- data/LICENSE +202 -0
- data/README.md +131 -0
- data/Rakefile +45 -0
- data/lib/trino-client.rb +1 -0
- data/lib/trino/client.rb +23 -0
- data/lib/trino/client/client.rb +78 -0
- data/lib/trino/client/errors.rb +46 -0
- data/lib/trino/client/faraday_client.rb +242 -0
- data/lib/trino/client/model_versions/0.149.rb +1683 -0
- data/lib/trino/client/model_versions/0.153.rb +1719 -0
- data/lib/trino/client/model_versions/0.173.rb +1685 -0
- data/lib/trino/client/model_versions/0.178.rb +1964 -0
- data/lib/trino/client/model_versions/0.205.rb +2169 -0
- data/lib/trino/client/model_versions/303.rb +2574 -0
- data/lib/trino/client/model_versions/316.rb +2595 -0
- data/lib/trino/client/model_versions/351.rb +2726 -0
- data/lib/trino/client/models.rb +38 -0
- data/lib/trino/client/query.rb +144 -0
- data/lib/trino/client/statement_client.rb +279 -0
- data/lib/trino/client/version.rb +20 -0
- data/modelgen/model_versions.rb +280 -0
- data/modelgen/modelgen.rb +119 -0
- data/modelgen/models.rb +31 -0
- data/modelgen/trino_models.rb +270 -0
- data/release.rb +56 -0
- data/spec/basic_query_spec.rb +82 -0
- data/spec/client_spec.rb +75 -0
- data/spec/gzip_spec.rb +40 -0
- data/spec/model_spec.rb +35 -0
- data/spec/spec_helper.rb +42 -0
- data/spec/statement_client_spec.rb +637 -0
- data/spec/tpch/q01.sql +21 -0
- data/spec/tpch/q02.sql +43 -0
- data/spec/tpch_query_spec.rb +41 -0
- data/trino-client.gemspec +31 -0
- metadata +211 -0
@@ -0,0 +1,270 @@
|
|
1
|
+
|
2
|
+
module TrinoModels
|
3
|
+
require 'find'
|
4
|
+
require 'stringio'
|
5
|
+
|
6
|
+
PRIMITIVE_TYPES = %w[String boolean long int short byte double float Integer Double Boolean]
|
7
|
+
ARRAY_PRIMITIVE_TYPES = PRIMITIVE_TYPES.map { |t| "#{t}[]" }
|
8
|
+
|
9
|
+
class Model < Struct.new(:name, :fields)
|
10
|
+
end
|
11
|
+
|
12
|
+
class Field < Struct.new(:key, :nullable, :array, :map, :type, :base_type, :map_value_base_type, :base_type_alias)
|
13
|
+
alias_method :nullable?, :nullable
|
14
|
+
alias_method :array?, :array
|
15
|
+
alias_method :map?, :map
|
16
|
+
|
17
|
+
def name
|
18
|
+
@name ||= key.gsub(/[A-Z]/) {|f| "_#{f.downcase}" }
|
19
|
+
end
|
20
|
+
end
|
21
|
+
|
22
|
+
class ModelAnalysisError < StandardError
|
23
|
+
end
|
24
|
+
|
25
|
+
class ModelAnalyzer
|
26
|
+
def initialize(source_path, options={})
|
27
|
+
@source_path = source_path
|
28
|
+
@ignore_types = PRIMITIVE_TYPES + ARRAY_PRIMITIVE_TYPES + (options[:skip_models] || [])
|
29
|
+
@path_mapping = options[:path_mapping] || {}
|
30
|
+
@name_mapping = options[:name_mapping] || {}
|
31
|
+
@extra_fields = options[:extra_fields] || {}
|
32
|
+
@models = {}
|
33
|
+
@skipped_models = []
|
34
|
+
end
|
35
|
+
|
36
|
+
attr_reader :skipped_models
|
37
|
+
|
38
|
+
def models
|
39
|
+
@models.values.sort_by {|model| model.name }
|
40
|
+
end
|
41
|
+
|
42
|
+
def analyze(root_models)
|
43
|
+
root_models.each {|model_name|
|
44
|
+
analyze_model(model_name)
|
45
|
+
}
|
46
|
+
end
|
47
|
+
|
48
|
+
private
|
49
|
+
|
50
|
+
PROPERTY_PATTERN = /@JsonProperty\(\"(\w+)\"\)\s+(@Nullable\s+)?([\w\<\>\[\]\,\s\.]+)\s+\w+/
|
51
|
+
CREATOR_PATTERN = /@JsonCreator[\s]+public[\s]+(static\s+)?(\w+)[\w\s]*\((?:\s*#{PROPERTY_PATTERN}\s*,?)+\)/
|
52
|
+
GENERIC_PATTERN = /(\w+)\<(\w+)\>/
|
53
|
+
|
54
|
+
def analyze_fields(model_name, creator_block, generic: nil)
|
55
|
+
model_name = "#{model_name}_#{generic}" if generic
|
56
|
+
extra = @extra_fields[model_name] || []
|
57
|
+
fields = creator_block.scan(PROPERTY_PATTERN).concat(extra).map do |key,nullable,type|
|
58
|
+
map = false
|
59
|
+
array = false
|
60
|
+
nullable = !!nullable
|
61
|
+
if m = /(?:List|Set)<(\w+)>/.match(type)
|
62
|
+
base_type = m[1]
|
63
|
+
array = true
|
64
|
+
elsif m = /(?:Map|ListMultimap)<(\w+),\s*(\w+)>/.match(type)
|
65
|
+
base_type = m[1]
|
66
|
+
map_value_base_type = m[2]
|
67
|
+
map = true
|
68
|
+
elsif m = /Optional<([\w\[\]\<\>]+)>/.match(type)
|
69
|
+
base_type = m[1]
|
70
|
+
nullable = true
|
71
|
+
elsif m = /OptionalInt/.match(type)
|
72
|
+
base_type = 'Integer'
|
73
|
+
nullable = true
|
74
|
+
elsif m = /OptionalLong/.match(type)
|
75
|
+
base_type = 'Long'
|
76
|
+
nullable = true
|
77
|
+
elsif m = /OptionalDouble/.match(type)
|
78
|
+
base_type = 'Double'
|
79
|
+
nullable = true
|
80
|
+
elsif type =~ /\w+/
|
81
|
+
base_type = type
|
82
|
+
else
|
83
|
+
raise ModelAnalysisError, "Unsupported type #{type} in model #{model_name}"
|
84
|
+
end
|
85
|
+
base_type = @name_mapping[[model_name, base_type]] || base_type
|
86
|
+
map_value_base_type = @name_mapping[[model_name, map_value_base_type]] || map_value_base_type
|
87
|
+
|
88
|
+
if generic
|
89
|
+
base_type = generic if base_type == 'T'
|
90
|
+
map_value_base_type = generic if map_value_base_type == 'T'
|
91
|
+
end
|
92
|
+
if m = GENERIC_PATTERN.match(base_type)
|
93
|
+
base_type_alias = "#{m[1]}_#{m[2]}"
|
94
|
+
end
|
95
|
+
|
96
|
+
Field.new(key, !!nullable, array, map, type, base_type, map_value_base_type, base_type_alias)
|
97
|
+
end
|
98
|
+
|
99
|
+
@models[model_name] = Model.new(model_name, fields)
|
100
|
+
# recursive call
|
101
|
+
fields.each do |field|
|
102
|
+
analyze_model(field.base_type, model_name)
|
103
|
+
analyze_model(field.map_value_base_type, model_name) if field.map_value_base_type
|
104
|
+
end
|
105
|
+
|
106
|
+
return fields
|
107
|
+
end
|
108
|
+
|
109
|
+
def analyze_model(model_name, parent_model= nil, generic: nil)
|
110
|
+
return if @models[model_name] || @ignore_types.include?(model_name)
|
111
|
+
|
112
|
+
if m = GENERIC_PATTERN.match(model_name)
|
113
|
+
analyze_model(m[1], generic: m[2])
|
114
|
+
analyze_model(m[2])
|
115
|
+
return
|
116
|
+
end
|
117
|
+
|
118
|
+
path = find_class_file(model_name, parent_model)
|
119
|
+
java = File.read(path)
|
120
|
+
|
121
|
+
m = CREATOR_PATTERN.match(java)
|
122
|
+
unless m
|
123
|
+
raise ModelAnalysisError, "Can't find JsonCreator of a model class #{model_name} of #{parent_model} at #{path}"
|
124
|
+
end
|
125
|
+
|
126
|
+
body = m[0]
|
127
|
+
# check inner class first
|
128
|
+
while true
|
129
|
+
offset = m.end(0)
|
130
|
+
m = CREATOR_PATTERN.match(java, offset)
|
131
|
+
break unless m
|
132
|
+
inner_model_name = m[2]
|
133
|
+
next if @models[inner_model_name] || @ignore_types.include?(inner_model_name)
|
134
|
+
fields = analyze_fields(inner_model_name, m[0])
|
135
|
+
end
|
136
|
+
|
137
|
+
fields = analyze_fields(model_name, body, generic: generic)
|
138
|
+
|
139
|
+
rescue => e
|
140
|
+
puts "Skipping model #{parent_model}/#{model_name}: #{e}"
|
141
|
+
@skipped_models << model_name
|
142
|
+
end
|
143
|
+
|
144
|
+
def find_class_file(model_name, parent_model)
|
145
|
+
return @path_mapping[model_name] if @path_mapping.has_key? model_name
|
146
|
+
|
147
|
+
@source_files ||= Find.find(@source_path).to_a
|
148
|
+
pattern = /\/#{model_name}.java$/
|
149
|
+
matched = @source_files.find_all {|path| path =~ pattern && !path.include?('/test/') && !path.include?('/verifier/')}
|
150
|
+
if matched.empty?
|
151
|
+
raise ModelAnalysisError, "Model class #{model_name} is not found"
|
152
|
+
end
|
153
|
+
if matched.size == 1
|
154
|
+
return matched.first
|
155
|
+
else
|
156
|
+
raise ModelAnalysisError, "Model class #{model_name} of #{parent_model} found multiple match #{matched}"
|
157
|
+
end
|
158
|
+
end
|
159
|
+
end
|
160
|
+
|
161
|
+
class ModelFormatter
|
162
|
+
def initialize(options={})
|
163
|
+
@indent = options[:indent] || ' '
|
164
|
+
@base_indent_count = options[:base_indent_count] || 0
|
165
|
+
@struct_class = options[:struct_class] || 'Struct'
|
166
|
+
@special_struct_initialize_method = options[:special_struct_initialize_method]
|
167
|
+
@primitive_types = PRIMITIVE_TYPES + ARRAY_PRIMITIVE_TYPES + (options[:primitive_types] || [])
|
168
|
+
@skip_types = options[:skip_types] || []
|
169
|
+
@simple_classes = options[:simple_classes]
|
170
|
+
@enum_types = options[:enum_types]
|
171
|
+
@special_types = options[:special_types] || {}
|
172
|
+
@data = StringIO.new
|
173
|
+
end
|
174
|
+
|
175
|
+
def contents
|
176
|
+
@data.string
|
177
|
+
end
|
178
|
+
|
179
|
+
def format(models)
|
180
|
+
@models = models
|
181
|
+
models.each do |model|
|
182
|
+
@model = model
|
183
|
+
|
184
|
+
puts_with_indent 0, "class << #{model.name} ="
|
185
|
+
puts_with_indent 2, "#{@struct_class}.new(#{model.fields.map {|f| ":#{f.name}" }.join(', ')})"
|
186
|
+
format_decode
|
187
|
+
puts_with_indent 0, "end"
|
188
|
+
line
|
189
|
+
end
|
190
|
+
end
|
191
|
+
|
192
|
+
private
|
193
|
+
|
194
|
+
def line
|
195
|
+
@data.puts ""
|
196
|
+
end
|
197
|
+
|
198
|
+
def puts_with_indent(n, str)
|
199
|
+
@data.puts "#{@indent * (@base_indent_count + n)}#{str}"
|
200
|
+
end
|
201
|
+
|
202
|
+
def format_decode
|
203
|
+
puts_with_indent 1, "def decode(hash)"
|
204
|
+
|
205
|
+
puts_with_indent 2, "unless hash.is_a?(Hash)"
|
206
|
+
puts_with_indent 3, "raise TypeError, \"Can't convert \#{hash.class} to Hash\""
|
207
|
+
puts_with_indent 2, "end"
|
208
|
+
|
209
|
+
if @special_struct_initialize_method
|
210
|
+
puts_with_indent 2, "obj = allocate"
|
211
|
+
puts_with_indent 2, "obj.send(:#{@special_struct_initialize_method},"
|
212
|
+
else
|
213
|
+
puts_with_indent 2, "new("
|
214
|
+
end
|
215
|
+
|
216
|
+
@model.fields.each do |field|
|
217
|
+
next if @skip_types.include?(field.base_type) || @skip_types.include?(field.map_value_base_type)
|
218
|
+
|
219
|
+
if @primitive_types.include?(field.base_type) && !field.map?
|
220
|
+
expr = "hash[\"#{field.key}\"]"
|
221
|
+
else
|
222
|
+
expr = ""
|
223
|
+
expr << "hash[\"#{field.key}\"] && " #if field.nullable?
|
224
|
+
|
225
|
+
if field.map?
|
226
|
+
key_expr = convert_expression(field.base_type, field.base_type, "k")
|
227
|
+
value_expr = convert_expression(field.map_value_base_type, field.map_value_base_type, "v")
|
228
|
+
if key_expr == 'k' && value_expr == 'v'
|
229
|
+
expr = "hash[\"#{field.key}\"]"
|
230
|
+
else
|
231
|
+
expr << "Hash[hash[\"#{field.key}\"].to_a.map! {|k,v| [#{key_expr}, #{value_expr}] }]"
|
232
|
+
end
|
233
|
+
elsif field.array?
|
234
|
+
elem_expr = convert_expression(field.base_type, field.base_type, "h")
|
235
|
+
expr << "hash[\"#{field.key}\"].map {|h| #{elem_expr} }"
|
236
|
+
else
|
237
|
+
expr << convert_expression(field.type, field.base_type_alias || field.base_type, "hash[\"#{field.key}\"]")
|
238
|
+
end
|
239
|
+
end
|
240
|
+
|
241
|
+
#comment = "# #{field.base_type}#{field.array? ? '[]' : ''} #{field.key}"
|
242
|
+
#puts_with_indent 3, "#{expr}, #{comment}"
|
243
|
+
puts_with_indent 3, "#{expr},"
|
244
|
+
end
|
245
|
+
|
246
|
+
puts_with_indent 2, ")"
|
247
|
+
|
248
|
+
if @special_struct_initialize_method
|
249
|
+
puts_with_indent 2, "obj"
|
250
|
+
end
|
251
|
+
|
252
|
+
puts_with_indent 1, "end"
|
253
|
+
end
|
254
|
+
|
255
|
+
def convert_expression(type, base_type, key)
|
256
|
+
if @special_types[type]
|
257
|
+
special.call(key)
|
258
|
+
elsif @enum_types.include?(type) || @enum_types.include?(base_type)
|
259
|
+
"#{key}.downcase.to_sym"
|
260
|
+
elsif @primitive_types.include?(base_type)
|
261
|
+
key
|
262
|
+
elsif @simple_classes.include?(base_type)
|
263
|
+
"#{base_type}.new(#{key})"
|
264
|
+
else # model class
|
265
|
+
"#{base_type}.decode(#{key})"
|
266
|
+
end
|
267
|
+
end
|
268
|
+
end
|
269
|
+
end
|
270
|
+
|
data/release.rb
ADDED
@@ -0,0 +1,56 @@
|
|
1
|
+
#!/usr/bin/env ruby
|
2
|
+
|
3
|
+
require 'fileutils'
|
4
|
+
|
5
|
+
PREFIX = 'https://github.com/treasure-data/trino-client-ruby'
|
6
|
+
RELEASE_NOTES_FILE = "ChangeLog.md"
|
7
|
+
|
8
|
+
last_tag = `git describe --tags --abbrev=0`.chomp
|
9
|
+
last_version = last_tag.sub("v", "")
|
10
|
+
puts "last version: #{last_version}"
|
11
|
+
|
12
|
+
print "next version? "
|
13
|
+
next_version = STDIN.gets.chomp
|
14
|
+
|
15
|
+
abort("Can't use empty version string") if next_version.empty?
|
16
|
+
|
17
|
+
logs = `git log #{last_tag}..HEAD --pretty=format:'%h %s'`
|
18
|
+
# Add links to GitHub issues
|
19
|
+
logs = logs.gsub(/\#([0-9]+)/, "[#\\1](#{PREFIX}/issues/\\1)")
|
20
|
+
|
21
|
+
new_release_notes = []
|
22
|
+
new_release_notes <<= "\#\# #{next_version}\n"
|
23
|
+
new_release_notes <<= logs.split(/\n/)
|
24
|
+
.reject{|line| line.include?("#{last_version} release notes")}
|
25
|
+
.map{|x|
|
26
|
+
rev = x[0..6]
|
27
|
+
"- #{x[8..-1]} [[#{rev}](#{PREFIX}/commit/#{rev})]\n"
|
28
|
+
}
|
29
|
+
|
30
|
+
release_notes = []
|
31
|
+
notes = File.readlines(RELEASE_NOTES_FILE)
|
32
|
+
|
33
|
+
release_notes <<= notes[0..1]
|
34
|
+
release_notes <<= new_release_notes
|
35
|
+
release_notes <<= "\n"
|
36
|
+
release_notes <<= notes[2..-1]
|
37
|
+
|
38
|
+
TMP_RELEASE_NOTES_FILE = "#{RELEASE_NOTES_FILE}.tmp"
|
39
|
+
File.delete(TMP_RELEASE_NOTES_FILE) if File.exists?(TMP_RELEASE_NOTES_FILE)
|
40
|
+
File.write("#{TMP_RELEASE_NOTES_FILE}", release_notes.join)
|
41
|
+
system("cat #{TMP_RELEASE_NOTES_FILE} | vim - -c ':f #{TMP_RELEASE_NOTES_FILE}' -c ':9'")
|
42
|
+
|
43
|
+
abort("The release note file is not saved. Aborted") unless File.exists?(TMP_RELEASE_NOTES_FILE)
|
44
|
+
|
45
|
+
def run(cmd)
|
46
|
+
puts cmd
|
47
|
+
system cmd
|
48
|
+
end
|
49
|
+
|
50
|
+
FileUtils.cp(TMP_RELEASE_NOTES_FILE, RELEASE_NOTES_FILE)
|
51
|
+
File.delete(TMP_RELEASE_NOTES_FILE)
|
52
|
+
|
53
|
+
# run "git commit #{RELEASE_NOTES_FILE} -m \"Add #{next_version} release notes\""
|
54
|
+
# run "git tag v#{next_version}"
|
55
|
+
# run "git push"
|
56
|
+
# run "git push --tags"
|
@@ -0,0 +1,82 @@
|
|
1
|
+
require 'spec_helper'
|
2
|
+
|
3
|
+
describe Trino::Client::Client do
|
4
|
+
before(:all) do
|
5
|
+
WebMock.disable!
|
6
|
+
@cluster = TinyPresto::Cluster.new()
|
7
|
+
@container = @cluster.run
|
8
|
+
@client = Trino::Client.new(server: 'localhost:8080', catalog: 'memory', user: 'test-user', schema: 'default')
|
9
|
+
loop do
|
10
|
+
begin
|
11
|
+
@client.run('show schemas')
|
12
|
+
break
|
13
|
+
rescue StandardError => exception
|
14
|
+
puts "Waiting for cluster ready... #{exception}"
|
15
|
+
sleep(3)
|
16
|
+
end
|
17
|
+
end
|
18
|
+
puts 'Cluster is ready'
|
19
|
+
end
|
20
|
+
|
21
|
+
after(:all) do
|
22
|
+
@cluster.stop
|
23
|
+
WebMock.enable!
|
24
|
+
end
|
25
|
+
|
26
|
+
it 'show schemas' do
|
27
|
+
columns, rows = run_with_retry(@client, 'show schemas')
|
28
|
+
expect(columns.length).to be(1)
|
29
|
+
expect(rows.length).to be(2)
|
30
|
+
end
|
31
|
+
|
32
|
+
it 'ctas' do
|
33
|
+
expected = [[1, 'a'], [2, 'b']]
|
34
|
+
run_with_retry(@client, "create table ctas1 as select * from (values (1, 'a'), (2, 'b')) t(c1, c2)")
|
35
|
+
columns, rows = run_with_retry(@client, 'select * from ctas1')
|
36
|
+
expect(columns.map(&:name)).to match_array(%w[c1 c2])
|
37
|
+
expect(rows).to eq(expected)
|
38
|
+
end
|
39
|
+
|
40
|
+
it 'next_uri' do
|
41
|
+
@client.query('show schemas') do |q|
|
42
|
+
expect(q.next_uri).to start_with('http://localhost:8080/v1/statement/')
|
43
|
+
end
|
44
|
+
end
|
45
|
+
|
46
|
+
it 'advance' do
|
47
|
+
@client.query('show schemas') do |q|
|
48
|
+
expect(q.advance).to be(true)
|
49
|
+
end
|
50
|
+
end
|
51
|
+
|
52
|
+
it 'current query result' do
|
53
|
+
@client.query('show schemas') do |q|
|
54
|
+
expect(q.current_results.info_uri).to start_with('http://localhost:8080/ui/query.html')
|
55
|
+
end
|
56
|
+
end
|
57
|
+
|
58
|
+
it 'statement stats' do
|
59
|
+
@client.query('show schemas') do |q|
|
60
|
+
stats = q.current_results.stats
|
61
|
+
# Immediate subsequent request should get queued result
|
62
|
+
expect(stats.queued).to be(true)
|
63
|
+
expect(stats.scheduled).to be(false)
|
64
|
+
end
|
65
|
+
end
|
66
|
+
|
67
|
+
it 'partial cancel' do
|
68
|
+
@client.query('show schemas') do |q|
|
69
|
+
q.cancel
|
70
|
+
expect { q.query_info }.to raise_error(Trino::Client::TrinoHttpError, /Error 410 Gone/)
|
71
|
+
end
|
72
|
+
end
|
73
|
+
|
74
|
+
it 'row chunk' do
|
75
|
+
expected_schemas = %w[default information_schema]
|
76
|
+
@client.query('show schemas') do |q|
|
77
|
+
q.each_row do |r|
|
78
|
+
expect(expected_schemas).to include(r[0])
|
79
|
+
end
|
80
|
+
end
|
81
|
+
end
|
82
|
+
end
|
data/spec/client_spec.rb
ADDED
@@ -0,0 +1,75 @@
|
|
1
|
+
require 'spec_helper'
|
2
|
+
|
3
|
+
describe Trino::Client::Client do
|
4
|
+
let(:client) { Trino::Client.new({}) }
|
5
|
+
|
6
|
+
describe 'rehashes' do
|
7
|
+
let(:columns) do
|
8
|
+
[
|
9
|
+
Models::Column.new(name: 'animal', type: 'string'),
|
10
|
+
Models::Column.new(name: 'score', type: 'integer'),
|
11
|
+
Models::Column.new(name: 'name', type: 'string')
|
12
|
+
]
|
13
|
+
end
|
14
|
+
|
15
|
+
it 'multiple rows' do
|
16
|
+
rows = [
|
17
|
+
['dog', 1, 'Lassie'],
|
18
|
+
['horse', 5, 'Mr. Ed'],
|
19
|
+
['t-rex', 37, 'Doug']
|
20
|
+
]
|
21
|
+
client.stub(:run).and_return([columns, rows])
|
22
|
+
|
23
|
+
rehashed = client.run_with_names('fake query')
|
24
|
+
|
25
|
+
rehashed.length.should == 3
|
26
|
+
|
27
|
+
rehashed[0]['animal'].should == 'dog'
|
28
|
+
rehashed[0]['score'].should == 1
|
29
|
+
rehashed[0]['name'].should == 'Lassie'
|
30
|
+
|
31
|
+
rehashed[0].values[0].should == 'dog'
|
32
|
+
rehashed[0].values[1].should == 1
|
33
|
+
rehashed[0].values[2].should == 'Lassie'
|
34
|
+
|
35
|
+
rehashed[1]['animal'].should == 'horse'
|
36
|
+
rehashed[1]['score'].should == 5
|
37
|
+
rehashed[1]['name'].should == 'Mr. Ed'
|
38
|
+
|
39
|
+
rehashed[1].values[0].should == 'horse'
|
40
|
+
rehashed[1].values[1].should == 5
|
41
|
+
rehashed[1].values[2].should == 'Mr. Ed'
|
42
|
+
end
|
43
|
+
|
44
|
+
it 'empty results' do
|
45
|
+
rows = []
|
46
|
+
client.stub(:run).and_return([columns, rows])
|
47
|
+
|
48
|
+
rehashed = client.run_with_names('fake query')
|
49
|
+
|
50
|
+
rehashed.length.should == 0
|
51
|
+
end
|
52
|
+
|
53
|
+
it 'handles too few result columns' do
|
54
|
+
rows = [['wrong', 'count']]
|
55
|
+
client.stub(:run).and_return([columns, rows])
|
56
|
+
|
57
|
+
client.run_with_names('fake query').should == [{
|
58
|
+
"animal" => "wrong",
|
59
|
+
"score" => "count",
|
60
|
+
"name" => nil,
|
61
|
+
}]
|
62
|
+
end
|
63
|
+
|
64
|
+
it 'handles too many result columns' do
|
65
|
+
rows = [['wrong', 'count', 'too', 'much', 'columns']]
|
66
|
+
client.stub(:run).and_return([columns, rows])
|
67
|
+
|
68
|
+
client.run_with_names('fake query').should == [{
|
69
|
+
"animal" => "wrong",
|
70
|
+
"score" => "count",
|
71
|
+
"name" => 'too',
|
72
|
+
}]
|
73
|
+
end
|
74
|
+
end
|
75
|
+
end
|