databricks_sql 0.1.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- checksums.yaml +7 -0
- data/.rspec +3 -0
- data/CHANGELOG.md +8 -0
- data/CODE_OF_CONDUCT.md +10 -0
- data/LICENSE.txt +21 -0
- data/README.md +215 -0
- data/Rakefile +10 -0
- data/lib/databricks_sql/client.rb +477 -0
- data/lib/databricks_sql/configuration.rb +22 -0
- data/lib/databricks_sql/errors.rb +53 -0
- data/lib/databricks_sql/external_link_handler.rb +89 -0
- data/lib/databricks_sql/result.rb +25 -0
- data/lib/databricks_sql/type_coercer.rb +59 -0
- data/lib/databricks_sql/version.rb +5 -0
- data/lib/databricks_sql.rb +31 -0
- data/sig/databricks_sql.rbs +182 -0
- data/spec/databricks_sql/client_spec.rb +533 -0
- data/spec/databricks_sql_spec.rb +37 -0
- data/spec/spec_helper.rb +16 -0
- metadata +77 -0
|
@@ -0,0 +1,533 @@
|
|
|
1
|
+
# frozen_string_literal: true
|
|
2
|
+
|
|
3
|
+
require "spec_helper"
|
|
4
|
+
|
|
5
|
+
RSpec.describe DatabricksSql::Client do
|
|
6
|
+
before do
|
|
7
|
+
DatabricksSql.reset_configuration!
|
|
8
|
+
end
|
|
9
|
+
|
|
10
|
+
let(:host) { "https://dbc.example.com" }
|
|
11
|
+
let(:token) { "dapi-token" }
|
|
12
|
+
let(:warehouse_id) { "warehouse-123" }
|
|
13
|
+
let(:client) { described_class.new(host: host, token: token, warehouse_id: warehouse_id) }
|
|
14
|
+
let(:statement) { "SELECT id, name FROM users" }
|
|
15
|
+
|
|
16
|
+
describe "#initialize" do
|
|
17
|
+
it "raises when host does not use HTTPS" do
|
|
18
|
+
expect do
|
|
19
|
+
described_class.new(host: "http://dbc.example.com", token: token, warehouse_id: warehouse_id)
|
|
20
|
+
end.to raise_error(DatabricksSql::ConfigurationError, /HTTPS/)
|
|
21
|
+
end
|
|
22
|
+
|
|
23
|
+
it "raises when warehouse_id is missing" do
|
|
24
|
+
expect do
|
|
25
|
+
described_class.new(host: host, token: token, warehouse_id: "")
|
|
26
|
+
end.to raise_error(DatabricksSql::ConfigurationError)
|
|
27
|
+
end
|
|
28
|
+
|
|
29
|
+
it "uses global configured values when args are omitted" do
|
|
30
|
+
DatabricksSql.configure do |config|
|
|
31
|
+
config.host = host
|
|
32
|
+
config.token = token
|
|
33
|
+
config.warehouse_id = warehouse_id
|
|
34
|
+
end
|
|
35
|
+
|
|
36
|
+
configured_client = described_class.new
|
|
37
|
+
expect(configured_client.host).to eq(host)
|
|
38
|
+
expect(configured_client.warehouse_id).to eq(warehouse_id)
|
|
39
|
+
end
|
|
40
|
+
|
|
41
|
+
it "redacts token in inspect output" do
|
|
42
|
+
inspected = client.inspect
|
|
43
|
+
|
|
44
|
+
expect(inspected).to include("[REDACTED]")
|
|
45
|
+
expect(inspected).not_to include(token)
|
|
46
|
+
end
|
|
47
|
+
end
|
|
48
|
+
|
|
49
|
+
describe "#execute_statement" do
|
|
50
|
+
it "sends official optional parameters in statement payload" do
|
|
51
|
+
stub_request(:post, "#{host}/api/2.0/sql/statements")
|
|
52
|
+
.with do |request|
|
|
53
|
+
payload = JSON.parse(request.body)
|
|
54
|
+
payload["byte_limit"] == 1000 &&
|
|
55
|
+
payload["row_limit"] == 2 &&
|
|
56
|
+
payload["query_tags"] == [{ "key" => "team", "value" => "finance" }]
|
|
57
|
+
end
|
|
58
|
+
.to_return(
|
|
59
|
+
status: 200,
|
|
60
|
+
headers: { "Content-Type" => "application/json" },
|
|
61
|
+
body: JSON.generate(
|
|
62
|
+
{
|
|
63
|
+
"statement_id" => "stmt-params",
|
|
64
|
+
"status" => { "state" => "SUCCEEDED" },
|
|
65
|
+
"result" => {
|
|
66
|
+
"disposition" => "INLINE",
|
|
67
|
+
"schema" => { "columns" => [{ "name" => "id" }] },
|
|
68
|
+
"data_array" => [[1]]
|
|
69
|
+
}
|
|
70
|
+
}
|
|
71
|
+
)
|
|
72
|
+
)
|
|
73
|
+
|
|
74
|
+
result = client.execute_statement(
|
|
75
|
+
statement: statement,
|
|
76
|
+
byte_limit: 1000,
|
|
77
|
+
row_limit: 2,
|
|
78
|
+
query_tags: [{ key: "team", value: "finance" }]
|
|
79
|
+
)
|
|
80
|
+
|
|
81
|
+
expect(result.rows).to eq([{ "id" => 1 }])
|
|
82
|
+
end
|
|
83
|
+
|
|
84
|
+
it "automatically fetches remaining INLINE chunks" do
|
|
85
|
+
stub_request(:post, "#{host}/api/2.0/sql/statements").to_return(
|
|
86
|
+
status: 200,
|
|
87
|
+
headers: { "Content-Type" => "application/json" },
|
|
88
|
+
body: JSON.generate(
|
|
89
|
+
{
|
|
90
|
+
"statement_id" => "stmt-multi-inline",
|
|
91
|
+
"status" => { "state" => "SUCCEEDED" },
|
|
92
|
+
"manifest" => {
|
|
93
|
+
"schema" => { "columns" => [{ "name" => "id" }] }
|
|
94
|
+
},
|
|
95
|
+
"result" => {
|
|
96
|
+
"chunk_index" => 0,
|
|
97
|
+
"data_array" => [[1], [2]],
|
|
98
|
+
"next_chunk_internal_link" => "/api/2.0/sql/statements/stmt-multi-inline/result/chunks/1?row_offset=2"
|
|
99
|
+
}
|
|
100
|
+
}
|
|
101
|
+
)
|
|
102
|
+
)
|
|
103
|
+
|
|
104
|
+
stub_request(:get, "#{host}/api/2.0/sql/statements/stmt-multi-inline/result/chunks/1?row_offset=2").to_return(
|
|
105
|
+
status: 200,
|
|
106
|
+
headers: { "Content-Type" => "application/json" },
|
|
107
|
+
body: JSON.generate(
|
|
108
|
+
{
|
|
109
|
+
"chunk_index" => 1,
|
|
110
|
+
"data_array" => [[3], [4]]
|
|
111
|
+
}
|
|
112
|
+
)
|
|
113
|
+
)
|
|
114
|
+
|
|
115
|
+
result = client.execute_statement(statement: statement)
|
|
116
|
+
|
|
117
|
+
expect(result.rows).to eq([
|
|
118
|
+
{ "id" => 1 },
|
|
119
|
+
{ "id" => 2 },
|
|
120
|
+
{ "id" => 3 },
|
|
121
|
+
{ "id" => 4 }
|
|
122
|
+
])
|
|
123
|
+
expect(result.next_chunk_internal_link).to be_nil
|
|
124
|
+
end
|
|
125
|
+
|
|
126
|
+
it "allows manual next chunk retrieval" do
|
|
127
|
+
stub_request(:post, "#{host}/api/2.0/sql/statements").to_return(
|
|
128
|
+
status: 200,
|
|
129
|
+
headers: { "Content-Type" => "application/json" },
|
|
130
|
+
body: JSON.generate(
|
|
131
|
+
{
|
|
132
|
+
"statement_id" => "stmt-manual",
|
|
133
|
+
"status" => { "state" => "SUCCEEDED" },
|
|
134
|
+
"manifest" => {
|
|
135
|
+
"schema" => { "columns" => [{ "name" => "id" }] }
|
|
136
|
+
},
|
|
137
|
+
"result" => {
|
|
138
|
+
"chunk_index" => 0,
|
|
139
|
+
"data_array" => [[1]],
|
|
140
|
+
"next_chunk_internal_link" => "/api/2.0/sql/statements/stmt-manual/result/chunks/1?row_offset=1"
|
|
141
|
+
}
|
|
142
|
+
}
|
|
143
|
+
)
|
|
144
|
+
)
|
|
145
|
+
|
|
146
|
+
stub_request(:get, "#{host}/api/2.0/sql/statements/stmt-manual/result/chunks/1?row_offset=1").to_return(
|
|
147
|
+
status: 200,
|
|
148
|
+
headers: { "Content-Type" => "application/json" },
|
|
149
|
+
body: JSON.generate(
|
|
150
|
+
{
|
|
151
|
+
"chunk_index" => 1,
|
|
152
|
+
"data_array" => [[2]]
|
|
153
|
+
}
|
|
154
|
+
)
|
|
155
|
+
)
|
|
156
|
+
|
|
157
|
+
result = client.execute_statement(statement: statement, auto_fetch_chunks: false)
|
|
158
|
+
expect(result.rows).to eq([{ "id" => 1 }])
|
|
159
|
+
expect(result.next_chunk_internal_link).to eq(
|
|
160
|
+
"/api/2.0/sql/statements/stmt-manual/result/chunks/1?row_offset=1"
|
|
161
|
+
)
|
|
162
|
+
|
|
163
|
+
next_chunk = client.fetch_next_chunk(
|
|
164
|
+
statement_id: result.statement_id,
|
|
165
|
+
next_chunk_internal_link: result.next_chunk_internal_link,
|
|
166
|
+
columns: result.columns,
|
|
167
|
+
disposition: result.disposition
|
|
168
|
+
)
|
|
169
|
+
|
|
170
|
+
expect(next_chunk.rows).to eq([{ "id" => 2 }])
|
|
171
|
+
expect(next_chunk.next_chunk_internal_link).to be_nil
|
|
172
|
+
end
|
|
173
|
+
|
|
174
|
+
it "fetches statement details when submit response succeeds without result payload" do
|
|
175
|
+
stub_request(:post, "#{host}/api/2.0/sql/statements").to_return(
|
|
176
|
+
status: 200,
|
|
177
|
+
headers: { "Content-Type" => "application/json" },
|
|
178
|
+
body: JSON.generate(
|
|
179
|
+
{
|
|
180
|
+
"statement_id" => "stmt-doc-shape",
|
|
181
|
+
"status" => { "state" => "SUCCEEDED" }
|
|
182
|
+
}
|
|
183
|
+
)
|
|
184
|
+
)
|
|
185
|
+
|
|
186
|
+
stub_request(:get, "#{host}/api/2.0/sql/statements/stmt-doc-shape").to_return(
|
|
187
|
+
status: 200,
|
|
188
|
+
headers: { "Content-Type" => "application/json" },
|
|
189
|
+
body: JSON.generate(
|
|
190
|
+
{
|
|
191
|
+
"statement_id" => "stmt-doc-shape",
|
|
192
|
+
"status" => { "state" => "SUCCEEDED" },
|
|
193
|
+
"manifest" => {
|
|
194
|
+
"format" => "JSON_ARRAY",
|
|
195
|
+
"schema" => {
|
|
196
|
+
"columns" => [
|
|
197
|
+
{ "name" => "l_orderkey", "position" => 0, "type_name" => "LONG", "type_text" => "BIGINT" },
|
|
198
|
+
{
|
|
199
|
+
"name" => "l_extendedprice",
|
|
200
|
+
"position" => 1,
|
|
201
|
+
"type_name" => "DECIMAL",
|
|
202
|
+
"type_precision" => 18,
|
|
203
|
+
"type_scale" => 2,
|
|
204
|
+
"type_text" => "DECIMAL(18,2)"
|
|
205
|
+
},
|
|
206
|
+
{ "name" => "l_shipdate", "position" => 2, "type_name" => "DATE", "type_text" => "DATE" }
|
|
207
|
+
]
|
|
208
|
+
}
|
|
209
|
+
},
|
|
210
|
+
"result" => {
|
|
211
|
+
"chunk_index" => 0,
|
|
212
|
+
"data_array" => [
|
|
213
|
+
["2", "71433.16", "1997-01-28"],
|
|
214
|
+
["7", "86152.02", "1996-01-15"]
|
|
215
|
+
],
|
|
216
|
+
"row_count" => 2,
|
|
217
|
+
"row_offset" => 0
|
|
218
|
+
}
|
|
219
|
+
}
|
|
220
|
+
)
|
|
221
|
+
)
|
|
222
|
+
|
|
223
|
+
result = client.execute_statement(statement: statement)
|
|
224
|
+
|
|
225
|
+
expect(result.rows).to eq([
|
|
226
|
+
{
|
|
227
|
+
"l_orderkey" => "2",
|
|
228
|
+
"l_extendedprice" => "71433.16",
|
|
229
|
+
"l_shipdate" => "1997-01-28"
|
|
230
|
+
},
|
|
231
|
+
{
|
|
232
|
+
"l_orderkey" => "7",
|
|
233
|
+
"l_extendedprice" => "86152.02",
|
|
234
|
+
"l_shipdate" => "1996-01-15"
|
|
235
|
+
}
|
|
236
|
+
])
|
|
237
|
+
end
|
|
238
|
+
|
|
239
|
+
it "returns INLINE rows" do
|
|
240
|
+
stub_request(:post, "#{host}/api/2.0/sql/statements").to_return(
|
|
241
|
+
status: 200,
|
|
242
|
+
headers: { "Content-Type" => "application/json" },
|
|
243
|
+
body: JSON.generate(
|
|
244
|
+
{
|
|
245
|
+
"statement_id" => "stmt-inline",
|
|
246
|
+
"status" => { "state" => "SUCCEEDED" },
|
|
247
|
+
"result" => {
|
|
248
|
+
"disposition" => "INLINE",
|
|
249
|
+
"schema" => { "columns" => [{ "name" => "id" }, { "name" => "name" }] },
|
|
250
|
+
"data_array" => [[1, "Ana"], [2, "Bia"]]
|
|
251
|
+
}
|
|
252
|
+
}
|
|
253
|
+
)
|
|
254
|
+
)
|
|
255
|
+
|
|
256
|
+
result = client.execute_statement(statement: statement)
|
|
257
|
+
|
|
258
|
+
expect(result).to be_a(DatabricksSql::Result)
|
|
259
|
+
expect(result.success?).to be(true)
|
|
260
|
+
expect(result.rows).to eq([
|
|
261
|
+
{ "id" => 1, "name" => "Ana" },
|
|
262
|
+
{ "id" => 2, "name" => "Bia" }
|
|
263
|
+
])
|
|
264
|
+
end
|
|
265
|
+
|
|
266
|
+
it "downloads and parses EXTERNAL_LINK results" do
|
|
267
|
+
stub_request(:post, "#{host}/api/2.0/sql/statements").to_return(
|
|
268
|
+
status: 200,
|
|
269
|
+
headers: { "Content-Type" => "application/json" },
|
|
270
|
+
body: JSON.generate(
|
|
271
|
+
{
|
|
272
|
+
"statement_id" => "stmt-external",
|
|
273
|
+
"status" => { "state" => "SUCCEEDED" },
|
|
274
|
+
"result" => {
|
|
275
|
+
"disposition" => "EXTERNAL_LINK",
|
|
276
|
+
"external_link" => "https://files.example.com/result.csv"
|
|
277
|
+
}
|
|
278
|
+
}
|
|
279
|
+
)
|
|
280
|
+
)
|
|
281
|
+
|
|
282
|
+
stub_request(:get, "https://files.example.com/result.csv").to_return(
|
|
283
|
+
status: 200,
|
|
284
|
+
headers: { "Content-Type" => "text/csv" },
|
|
285
|
+
body: "id,name\n1,Ana\n2,Bia\n"
|
|
286
|
+
)
|
|
287
|
+
|
|
288
|
+
result = client.execute_statement(statement: statement, disposition: "EXTERNAL_LINK")
|
|
289
|
+
|
|
290
|
+
expect(result.rows).to eq([
|
|
291
|
+
{ "id" => "1", "name" => "Ana" },
|
|
292
|
+
{ "id" => "2", "name" => "Bia" }
|
|
293
|
+
])
|
|
294
|
+
end
|
|
295
|
+
|
|
296
|
+
it "downloads and parses EXTERNAL_LINKS results" do
|
|
297
|
+
stub_request(:post, "#{host}/api/2.0/sql/statements").to_return(
|
|
298
|
+
status: 200,
|
|
299
|
+
headers: { "Content-Type" => "application/json" },
|
|
300
|
+
body: JSON.generate(
|
|
301
|
+
{
|
|
302
|
+
"statement_id" => "stmt-external-links",
|
|
303
|
+
"status" => { "state" => "SUCCEEDED" },
|
|
304
|
+
"result" => {
|
|
305
|
+
"disposition" => "EXTERNAL_LINKS",
|
|
306
|
+
"external_link" => "https://files.example.com/result-links.csv"
|
|
307
|
+
}
|
|
308
|
+
}
|
|
309
|
+
)
|
|
310
|
+
)
|
|
311
|
+
|
|
312
|
+
stub_request(:get, "https://files.example.com/result-links.csv").to_return(
|
|
313
|
+
status: 200,
|
|
314
|
+
headers: { "Content-Type" => "text/csv" },
|
|
315
|
+
body: "id,name\n3,Caio\n"
|
|
316
|
+
)
|
|
317
|
+
|
|
318
|
+
result = client.execute_statement(statement: statement, disposition: "EXTERNAL_LINKS")
|
|
319
|
+
|
|
320
|
+
expect(result.rows).to eq([
|
|
321
|
+
{ "id" => "3", "name" => "Caio" }
|
|
322
|
+
])
|
|
323
|
+
end
|
|
324
|
+
|
|
325
|
+
it "automatically fetches remaining EXTERNAL_LINKS chunks" do
|
|
326
|
+
stub_request(:post, "#{host}/api/2.0/sql/statements").to_return(
|
|
327
|
+
status: 200,
|
|
328
|
+
headers: { "Content-Type" => "application/json" },
|
|
329
|
+
body: JSON.generate(
|
|
330
|
+
{
|
|
331
|
+
"statement_id" => "stmt-external-multi",
|
|
332
|
+
"status" => { "state" => "SUCCEEDED" },
|
|
333
|
+
"result" => {
|
|
334
|
+
"disposition" => "EXTERNAL_LINKS",
|
|
335
|
+
"external_links" => [
|
|
336
|
+
{
|
|
337
|
+
"external_link" => "https://files.example.com/chunk0.csv",
|
|
338
|
+
"next_chunk_internal_link" => "/api/2.0/sql/statements/stmt-external-multi/result/chunks/1"
|
|
339
|
+
}
|
|
340
|
+
]
|
|
341
|
+
}
|
|
342
|
+
}
|
|
343
|
+
)
|
|
344
|
+
)
|
|
345
|
+
|
|
346
|
+
stub_request(:get, "https://files.example.com/chunk0.csv").to_return(
|
|
347
|
+
status: 200,
|
|
348
|
+
headers: { "Content-Type" => "text/csv" },
|
|
349
|
+
body: "id,name\n1,Ana\n"
|
|
350
|
+
)
|
|
351
|
+
|
|
352
|
+
stub_request(:get, "#{host}/api/2.0/sql/statements/stmt-external-multi/result/chunks/1").to_return(
|
|
353
|
+
status: 200,
|
|
354
|
+
headers: { "Content-Type" => "application/json" },
|
|
355
|
+
body: JSON.generate(
|
|
356
|
+
{
|
|
357
|
+
"external_links" => [
|
|
358
|
+
{
|
|
359
|
+
"external_link" => "https://files.example.com/chunk1.csv"
|
|
360
|
+
}
|
|
361
|
+
]
|
|
362
|
+
}
|
|
363
|
+
)
|
|
364
|
+
)
|
|
365
|
+
|
|
366
|
+
stub_request(:get, "https://files.example.com/chunk1.csv").to_return(
|
|
367
|
+
status: 200,
|
|
368
|
+
headers: { "Content-Type" => "text/csv" },
|
|
369
|
+
body: "id,name\n2,Bia\n"
|
|
370
|
+
)
|
|
371
|
+
|
|
372
|
+
result = client.execute_statement(statement: statement, disposition: "EXTERNAL_LINKS")
|
|
373
|
+
|
|
374
|
+
expect(result.rows).to eq([
|
|
375
|
+
{ "id" => "1", "name" => "Ana" },
|
|
376
|
+
{ "id" => "2", "name" => "Bia" }
|
|
377
|
+
])
|
|
378
|
+
expect(result.next_chunk_internal_link).to be_nil
|
|
379
|
+
end
|
|
380
|
+
|
|
381
|
+
it "rejects EXTERNAL_LINK over HTTP by default" do
|
|
382
|
+
stub_request(:post, "#{host}/api/2.0/sql/statements").to_return(
|
|
383
|
+
status: 200,
|
|
384
|
+
headers: { "Content-Type" => "application/json" },
|
|
385
|
+
body: JSON.generate(
|
|
386
|
+
{
|
|
387
|
+
"statement_id" => "stmt-external-http",
|
|
388
|
+
"status" => { "state" => "SUCCEEDED" },
|
|
389
|
+
"result" => {
|
|
390
|
+
"disposition" => "EXTERNAL_LINK",
|
|
391
|
+
"external_link" => "http://files.example.com/result.csv"
|
|
392
|
+
}
|
|
393
|
+
}
|
|
394
|
+
)
|
|
395
|
+
)
|
|
396
|
+
|
|
397
|
+
expect do
|
|
398
|
+
client.execute_statement(statement: statement, disposition: "EXTERNAL_LINK")
|
|
399
|
+
end.to raise_error(DatabricksSql::ConfigurationError, /HTTPS/)
|
|
400
|
+
end
|
|
401
|
+
|
|
402
|
+
it "rejects EXTERNAL_LINK host not in allowlist" do
|
|
403
|
+
DatabricksSql.configure do |config|
|
|
404
|
+
config.host = host
|
|
405
|
+
config.token = token
|
|
406
|
+
config.warehouse_id = warehouse_id
|
|
407
|
+
config.external_link_allowed_hosts = ["trusted.example.com"]
|
|
408
|
+
end
|
|
409
|
+
|
|
410
|
+
secure_client = described_class.new
|
|
411
|
+
|
|
412
|
+
stub_request(:post, "#{host}/api/2.0/sql/statements").to_return(
|
|
413
|
+
status: 200,
|
|
414
|
+
headers: { "Content-Type" => "application/json" },
|
|
415
|
+
body: JSON.generate(
|
|
416
|
+
{
|
|
417
|
+
"statement_id" => "stmt-external-blocked",
|
|
418
|
+
"status" => { "state" => "SUCCEEDED" },
|
|
419
|
+
"result" => {
|
|
420
|
+
"disposition" => "EXTERNAL_LINK",
|
|
421
|
+
"external_link" => "https://files.example.com/result.csv"
|
|
422
|
+
}
|
|
423
|
+
}
|
|
424
|
+
)
|
|
425
|
+
)
|
|
426
|
+
|
|
427
|
+
expect do
|
|
428
|
+
secure_client.execute_statement(statement: statement, disposition: "EXTERNAL_LINK")
|
|
429
|
+
end.to raise_error(DatabricksSql::ConfigurationError, /allowed list/)
|
|
430
|
+
end
|
|
431
|
+
|
|
432
|
+
it "allows EXTERNAL_LINK host in allowlist" do
|
|
433
|
+
DatabricksSql.configure do |config|
|
|
434
|
+
config.host = host
|
|
435
|
+
config.token = token
|
|
436
|
+
config.warehouse_id = warehouse_id
|
|
437
|
+
config.external_link_allowed_hosts = ["files.example.com"]
|
|
438
|
+
end
|
|
439
|
+
|
|
440
|
+
secure_client = described_class.new
|
|
441
|
+
|
|
442
|
+
stub_request(:post, "#{host}/api/2.0/sql/statements").to_return(
|
|
443
|
+
status: 200,
|
|
444
|
+
headers: { "Content-Type" => "application/json" },
|
|
445
|
+
body: JSON.generate(
|
|
446
|
+
{
|
|
447
|
+
"statement_id" => "stmt-external-allowed",
|
|
448
|
+
"status" => { "state" => "SUCCEEDED" },
|
|
449
|
+
"result" => {
|
|
450
|
+
"disposition" => "EXTERNAL_LINK",
|
|
451
|
+
"external_link" => "https://files.example.com/result.csv"
|
|
452
|
+
}
|
|
453
|
+
}
|
|
454
|
+
)
|
|
455
|
+
)
|
|
456
|
+
|
|
457
|
+
stub_request(:get, "https://files.example.com/result.csv").to_return(
|
|
458
|
+
status: 200,
|
|
459
|
+
headers: { "Content-Type" => "text/csv" },
|
|
460
|
+
body: "id,name\n1,Ana\n"
|
|
461
|
+
)
|
|
462
|
+
|
|
463
|
+
result = secure_client.execute_statement(statement: statement, disposition: "EXTERNAL_LINK")
|
|
464
|
+
expect(result.rows).to eq([{ "id" => "1", "name" => "Ana" }])
|
|
465
|
+
end
|
|
466
|
+
|
|
467
|
+
it "raises mapped error for non-2xx" do
|
|
468
|
+
stub_request(:post, "#{host}/api/2.0/sql/statements").to_return(status: 401, body: "unauthorized")
|
|
469
|
+
|
|
470
|
+
expect do
|
|
471
|
+
client.execute_statement(statement: statement)
|
|
472
|
+
end.to raise_error(DatabricksSql::AuthenticationError)
|
|
473
|
+
end
|
|
474
|
+
end
|
|
475
|
+
|
|
476
|
+
describe "asynchronous polling" do
|
|
477
|
+
it "submits async and waits until terminal state" do
|
|
478
|
+
stub_request(:post, "#{host}/api/2.0/sql/statements").to_return(
|
|
479
|
+
status: 200,
|
|
480
|
+
headers: { "Content-Type" => "application/json" },
|
|
481
|
+
body: JSON.generate(
|
|
482
|
+
{
|
|
483
|
+
"statement_id" => "stmt-async",
|
|
484
|
+
"status" => { "state" => "PENDING" }
|
|
485
|
+
}
|
|
486
|
+
)
|
|
487
|
+
)
|
|
488
|
+
|
|
489
|
+
stub_request(:get, "#{host}/api/2.0/sql/statements/stmt-async")
|
|
490
|
+
.to_return(
|
|
491
|
+
{
|
|
492
|
+
status: 200,
|
|
493
|
+
headers: { "Content-Type" => "application/json" },
|
|
494
|
+
body: JSON.generate({ "statement_id" => "stmt-async", "status" => { "state" => "RUNNING" } })
|
|
495
|
+
},
|
|
496
|
+
{
|
|
497
|
+
status: 200,
|
|
498
|
+
headers: { "Content-Type" => "application/json" },
|
|
499
|
+
body: JSON.generate(
|
|
500
|
+
{
|
|
501
|
+
"statement_id" => "stmt-async",
|
|
502
|
+
"status" => { "state" => "SUCCEEDED" },
|
|
503
|
+
"result" => {
|
|
504
|
+
"disposition" => "INLINE",
|
|
505
|
+
"schema" => { "columns" => [{ "name" => "id" }] },
|
|
506
|
+
"data_array" => [[7]]
|
|
507
|
+
}
|
|
508
|
+
}
|
|
509
|
+
)
|
|
510
|
+
}
|
|
511
|
+
)
|
|
512
|
+
|
|
513
|
+
submission = client.execute_statement_async(statement: statement)
|
|
514
|
+
expect(submission.fetch("statement_id")).to eq("stmt-async")
|
|
515
|
+
|
|
516
|
+
result = client.wait_for_statement(statement_id: "stmt-async", max_wait: 5, poll_interval: 0)
|
|
517
|
+
expect(result.status).to eq("SUCCEEDED")
|
|
518
|
+
expect(result.rows).to eq([{ "id" => 7 }])
|
|
519
|
+
end
|
|
520
|
+
|
|
521
|
+
it "raises timeout error when polling exceeds max_wait" do
|
|
522
|
+
stub_request(:get, "#{host}/api/2.0/sql/statements/stmt-timeout").to_return(
|
|
523
|
+
status: 200,
|
|
524
|
+
headers: { "Content-Type" => "application/json" },
|
|
525
|
+
body: JSON.generate({ "statement_id" => "stmt-timeout", "status" => { "state" => "RUNNING" } })
|
|
526
|
+
)
|
|
527
|
+
|
|
528
|
+
expect do
|
|
529
|
+
client.wait_for_statement(statement_id: "stmt-timeout", max_wait: 0, poll_interval: 0)
|
|
530
|
+
end.to raise_error(DatabricksSql::TimeoutError)
|
|
531
|
+
end
|
|
532
|
+
end
|
|
533
|
+
end
|
|
@@ -0,0 +1,37 @@
|
|
|
1
|
+
# frozen_string_literal: true
|
|
2
|
+
|
|
3
|
+
require "spec_helper"
|
|
4
|
+
|
|
5
|
+
RSpec.describe DatabricksSql do
|
|
6
|
+
before do
|
|
7
|
+
DatabricksSql.reset_configuration!
|
|
8
|
+
end
|
|
9
|
+
|
|
10
|
+
it "has a version number" do
|
|
11
|
+
expect(DatabricksSql::VERSION).not_to be_nil
|
|
12
|
+
end
|
|
13
|
+
|
|
14
|
+
it "supports global configure with DatabricksSql" do
|
|
15
|
+
DatabricksSql.configure do |config|
|
|
16
|
+
config.host = "https://dbc.example.com"
|
|
17
|
+
config.token = "dapi-token"
|
|
18
|
+
config.warehouse_id = "wh-1"
|
|
19
|
+
end
|
|
20
|
+
|
|
21
|
+
client = DatabricksSql::Client.new
|
|
22
|
+
expect(client.host).to eq("https://dbc.example.com")
|
|
23
|
+
expect(client.warehouse_id).to eq("wh-1")
|
|
24
|
+
end
|
|
25
|
+
|
|
26
|
+
it "supports Databricks.configure alias" do
|
|
27
|
+
Databricks.configure do |config|
|
|
28
|
+
config.host = "https://dbc.alias.example.com"
|
|
29
|
+
config.token = "dapi-alias"
|
|
30
|
+
config.warehouse_id = "wh-alias"
|
|
31
|
+
end
|
|
32
|
+
|
|
33
|
+
client = DatabricksSql::Client.new
|
|
34
|
+
expect(client.host).to eq("https://dbc.alias.example.com")
|
|
35
|
+
expect(client.warehouse_id).to eq("wh-alias")
|
|
36
|
+
end
|
|
37
|
+
end
|
data/spec/spec_helper.rb
ADDED
|
@@ -0,0 +1,16 @@
|
|
|
1
|
+
# frozen_string_literal: true
|
|
2
|
+
|
|
3
|
+
require "bundler/setup"
|
|
4
|
+
require "databricks_sql"
|
|
5
|
+
require "webmock/rspec"
|
|
6
|
+
|
|
7
|
+
RSpec.configure do |config|
|
|
8
|
+
config.disable_monkey_patching!
|
|
9
|
+
config.warnings = true
|
|
10
|
+
config.order = :random
|
|
11
|
+
Kernel.srand config.seed
|
|
12
|
+
|
|
13
|
+
config.before do
|
|
14
|
+
WebMock.disable_net_connect!(allow_localhost: true)
|
|
15
|
+
end
|
|
16
|
+
end
|
metadata
ADDED
|
@@ -0,0 +1,77 @@
|
|
|
1
|
+
--- !ruby/object:Gem::Specification
|
|
2
|
+
name: databricks_sql
|
|
3
|
+
version: !ruby/object:Gem::Version
|
|
4
|
+
version: 0.1.0
|
|
5
|
+
platform: ruby
|
|
6
|
+
authors:
|
|
7
|
+
- Lairton Mendes
|
|
8
|
+
bindir: exe
|
|
9
|
+
cert_chain: []
|
|
10
|
+
date: 1980-01-02 00:00:00.000000000 Z
|
|
11
|
+
dependencies:
|
|
12
|
+
- !ruby/object:Gem::Dependency
|
|
13
|
+
name: csv
|
|
14
|
+
requirement: !ruby/object:Gem::Requirement
|
|
15
|
+
requirements:
|
|
16
|
+
- - ">="
|
|
17
|
+
- !ruby/object:Gem::Version
|
|
18
|
+
version: '3.3'
|
|
19
|
+
type: :runtime
|
|
20
|
+
prerelease: false
|
|
21
|
+
version_requirements: !ruby/object:Gem::Requirement
|
|
22
|
+
requirements:
|
|
23
|
+
- - ">="
|
|
24
|
+
- !ruby/object:Gem::Version
|
|
25
|
+
version: '3.3'
|
|
26
|
+
description: Execute SQL statements against Databricks SQL Warehouse with synchronous
|
|
27
|
+
and asynchronous polling support, inline results, and external link downloads.
|
|
28
|
+
email:
|
|
29
|
+
- lairton.mendes@gmail.com
|
|
30
|
+
executables: []
|
|
31
|
+
extensions: []
|
|
32
|
+
extra_rdoc_files: []
|
|
33
|
+
files:
|
|
34
|
+
- ".rspec"
|
|
35
|
+
- CHANGELOG.md
|
|
36
|
+
- CODE_OF_CONDUCT.md
|
|
37
|
+
- LICENSE.txt
|
|
38
|
+
- README.md
|
|
39
|
+
- Rakefile
|
|
40
|
+
- lib/databricks_sql.rb
|
|
41
|
+
- lib/databricks_sql/client.rb
|
|
42
|
+
- lib/databricks_sql/configuration.rb
|
|
43
|
+
- lib/databricks_sql/errors.rb
|
|
44
|
+
- lib/databricks_sql/external_link_handler.rb
|
|
45
|
+
- lib/databricks_sql/result.rb
|
|
46
|
+
- lib/databricks_sql/type_coercer.rb
|
|
47
|
+
- lib/databricks_sql/version.rb
|
|
48
|
+
- sig/databricks_sql.rbs
|
|
49
|
+
- spec/databricks_sql/client_spec.rb
|
|
50
|
+
- spec/databricks_sql_spec.rb
|
|
51
|
+
- spec/spec_helper.rb
|
|
52
|
+
homepage: https://github.com/lairtonmendes/databricks_sql#readme
|
|
53
|
+
licenses:
|
|
54
|
+
- MIT
|
|
55
|
+
metadata:
|
|
56
|
+
homepage_uri: https://github.com/lairtonmendes/databricks_sql#readme
|
|
57
|
+
source_code_uri: https://github.com/lairtonmendes/databricks_sql
|
|
58
|
+
changelog_uri: https://github.com/lairtonmendes/databricks_sql/blob/main/CHANGELOG.md
|
|
59
|
+
rubygems_mfa_required: 'true'
|
|
60
|
+
rdoc_options: []
|
|
61
|
+
require_paths:
|
|
62
|
+
- lib
|
|
63
|
+
required_ruby_version: !ruby/object:Gem::Requirement
|
|
64
|
+
requirements:
|
|
65
|
+
- - ">="
|
|
66
|
+
- !ruby/object:Gem::Version
|
|
67
|
+
version: 3.2.0
|
|
68
|
+
required_rubygems_version: !ruby/object:Gem::Requirement
|
|
69
|
+
requirements:
|
|
70
|
+
- - ">="
|
|
71
|
+
- !ruby/object:Gem::Version
|
|
72
|
+
version: '0'
|
|
73
|
+
requirements: []
|
|
74
|
+
rubygems_version: 4.0.3
|
|
75
|
+
specification_version: 4
|
|
76
|
+
summary: Ruby client for Databricks SQL Statements API
|
|
77
|
+
test_files: []
|