sequel-hexspace 1.0.0

Sign up to get free protection for your applications and to get access to all the features.
checksums.yaml ADDED
@@ -0,0 +1,7 @@
1
+ ---
2
+ SHA256:
3
+ metadata.gz: b2949511f39f19c48b5120965d8b236fe93215c07ee9cf987d7b80f81d649b81
4
+ data.tar.gz: 68b65e86ccc1ad3cc1fef989eb1d79b7927f96c4f4b5a60abb9699c7b425f4a4
5
+ SHA512:
6
+ metadata.gz: 021db16089f80aee9a8398b54747265e6eedb59713b7fac36316d4623a324ce96036e532df0ac75171ec47a04ade882ee168c5c773e5487a7591567bfaab707c
7
+ data.tar.gz: 2767b9661167bc70fb775513f4abcb0465a77cc19c16f9bdb94972acd30896a310c3daeb6d8a34cfdd089ccf6c8bd6558bc459a9ae2eb3415c7cfd35b5d7273a
data/LICENSE ADDED
@@ -0,0 +1,18 @@
1
+ Copyright (c) 2023 Jeremy Evans
2
+
3
+ Permission is hereby granted, free of charge, to any person obtaining a copy
4
+ of this software and associated documentation files (the "Software"), to
5
+ deal in the Software without restriction, including without limitation the
6
+ rights to use, copy, modify, merge, publish, distribute, sublicense, and/or
7
+ sell copies of the Software, and to permit persons to whom the Software is
8
+ furnished to do so, subject to the following conditions:
9
+
10
+ The above copyright notice and this permission notice shall be included in
11
+ all copies or substantial portions of the Software.
12
+
13
+ THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
14
+ IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
15
+ FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL
16
+ THE AUTHORS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER
17
+ IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN
18
+ CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
data/README ADDED
@@ -0,0 +1,3 @@
1
+ This is a hexspace adapter for Sequel, designed to be used with Spark (not
2
+ Hive). You can use the hexspace:// protocol in the Sequel connection URL
3
+ to use this adapter.
@@ -0,0 +1,92 @@
1
+ require 'hexspace'
2
+ require_relative 'shared/spark'
3
+
4
+ module Sequel
5
+ module Hexspace
6
+ class Database < Sequel::Database
7
+ include Spark::DatabaseMethods
8
+
9
+ set_adapter_scheme :hexspace
10
+
11
+ ALLOWED_CLIENT_KEYWORDS = ::Hexspace::Client.instance_method(:initialize).parameters.map(&:last).freeze
12
+
13
+ def connect(server)
14
+ opts = server_opts(server)
15
+ opts[:username] = opts[:user]
16
+ opts.select!{|k,v| v.to_s != '' && ALLOWED_CLIENT_KEYWORDS.include?(k)}
17
+ ::Hexspace::Client.new(**opts)
18
+ end
19
+
20
+ def dataset_class_default
21
+ Dataset
22
+ end
23
+
24
+ def disconnect_connection(conn)
25
+ # Hexspace does not appear to support a disconnection method
26
+ # To keep tests happy, mark the connection as invalid
27
+ conn.instance_variable_set(:@sequel_invalid, true)
28
+ end
29
+
30
+ def execute(sql, opts=OPTS)
31
+ synchronize(opts[:server]) do |conn|
32
+ res = log_connection_yield(sql, conn){conn.execute(sql, result_object: true)}
33
+ rescue => e
34
+ raise_error(e)
35
+ else
36
+ yield res if defined?(yield)
37
+ end
38
+ end
39
+
40
+ def execute_insert(sql, opts=OPTS)
41
+ execute(sql, opts)
42
+
43
+ # Return nil instead of empty array.
44
+ # Spark does not support primary keys nor autoincrementing values
45
+ nil
46
+ end
47
+
48
+ def valid_connection?(conn)
49
+ !conn.instance_variable_get(:@sequel_invalid)
50
+ end
51
+ end
52
+
53
+ class Dataset < Sequel::Dataset
54
+ include Spark::DatasetMethods
55
+
56
+ def fetch_rows(sql)
57
+ execute(sql) do |result|
58
+ columns = result.columns.map(&:to_sym)
59
+ self.columns = columns
60
+ next if result.rows.empty?
61
+
62
+ types = result.column_types
63
+ column_info = columns.map.with_index do |name, i|
64
+ conversion_proc = case types[i]
65
+ when 'binary'
66
+ Sequel.method(:blob)
67
+ when 'timestamp'
68
+ db.method(:to_application_timestamp)
69
+ end
70
+
71
+ [i, name, conversion_proc]
72
+ end
73
+
74
+ result.rows.each do |row|
75
+ h = {}
76
+ column_info.each do |i, name, conversion_proc|
77
+ value = row[i]
78
+ h[name] = if value.nil?
79
+ nil
80
+ elsif conversion_proc
81
+ conversion_proc.call(value)
82
+ else
83
+ value
84
+ end
85
+ end
86
+ yield h
87
+ end
88
+ end
89
+ end
90
+ end
91
+ end
92
+ end
@@ -0,0 +1,467 @@
1
+ # frozen-string-literal: true
2
+
3
+ require 'sequel/adapters/utils/unmodified_identifiers'
4
+
5
+ module Sequel
6
+ module Spark
7
+ Sequel::Database.set_shared_adapter_scheme(:spark, self)
8
+
9
+ module DatabaseMethods
10
+ include UnmodifiedIdentifiers::DatabaseMethods
11
+
12
+ def create_schema(schema_name, opts=OPTS)
13
+ run(create_schema_sql(schema_name, opts))
14
+ end
15
+
16
+ def database_type
17
+ :spark
18
+ end
19
+
20
+ def drop_schema(schema_name, opts=OPTS)
21
+ run(drop_schema_sql(schema_name, opts))
22
+ end
23
+
24
+ # Spark does not support primary keys, so do not
25
+ # add any options
26
+ def serial_primary_key_options
27
+ # We could raise an exception here instead of just
28
+ # ignoring the primary key setting.
29
+ {:type=>Integer}
30
+ end
31
+
32
+ def supports_create_table_if_not_exists?
33
+ true
34
+ end
35
+
36
+ def tables(opts=OPTS)
37
+ _mangle_tables(_tables("TABLES", :tableName, opts) - _views(opts), opts)
38
+ end
39
+
40
+ # Spark does not support transactions.
41
+ def transaction(opts=nil)
42
+ yield
43
+ end
44
+
45
+ # Use an inline VALUES table.
46
+ def values(v)
47
+ @default_dataset.clone(:values=>v)
48
+ end
49
+
50
+ def views(opts=OPTS)
51
+ _mangle_tables(_views(opts), opts)
52
+ end
53
+
54
+ private
55
+
56
+ def _tables(type, column, opts)
57
+ sql = String.new
58
+ sql << "SHOW " << type
59
+ if schema = opts[:schema]
60
+ sql << " IN " << literal(schema)
61
+ end
62
+ if like = opts[:like]
63
+ sql << " LIKE " << literal(like)
64
+ end
65
+
66
+ ds = dataset.with_sql(sql)
67
+
68
+ # Always internally qualify, so that if a table name in a schema
69
+ # has the same name as a temporary view, it will not exclude
70
+ # the table name.
71
+ ds.map([:namespace, column]).map do |ns, name|
72
+ if ns && !ns.empty?
73
+ Sequel::SQL::QualifiedIdentifier.new(ns, name)
74
+ else
75
+ name.to_sym
76
+ end
77
+ end
78
+ end
79
+
80
+ def _views(opts)
81
+ _tables("VIEWS", :viewName, opts)
82
+ end
83
+
84
+ def _mangle_tables(tables, opts)
85
+ if opts[:qualify]
86
+ tables
87
+ else
88
+ tables.map{|t| t.is_a?(Sequel::SQL::QualifiedIdentifier) ? t.column.to_sym : t}
89
+ end
90
+ end
91
+
92
+ def create_schema_sql(schema_name, opts)
93
+ sql = String.new
94
+ sql << 'CREATE SCHEMA '
95
+ sql << 'IF NOT EXISTS ' if opts[:if_not_exists]
96
+ sql << literal(schema_name)
97
+
98
+ if comment = opts[:comment]
99
+ sql << ' COMMENT '
100
+ sql << literal(comment)
101
+ end
102
+
103
+ if location = opts[:location]
104
+ sql << ' LOCATION '
105
+ sql << literal(location)
106
+ end
107
+
108
+ if properties = opts[:properties]
109
+ sql << ' WITH DBPROPERTIES ('
110
+ properties.each do |k, v|
111
+ sql << literal(k.to_s) << "=" << literal(v.to_s)
112
+ end
113
+ sql << ')'
114
+ end
115
+
116
+ sql
117
+ end
118
+
119
+ def create_table_sql(name, generator, options)
120
+ _append_table_view_options_sql(super, options)
121
+ end
122
+
123
+ def create_table_as_sql(name, sql, options)
124
+ _append_table_view_options_sql(create_table_prefix_sql(name, options), options) << " AS #{sql}"
125
+ end
126
+
127
+ def create_view_sql(name, source, options)
128
+ if source.is_a?(Hash)
129
+ options = source
130
+ source = nil
131
+ end
132
+
133
+ sql = String.new
134
+ sql << create_view_sql_append_columns("CREATE #{'OR REPLACE 'if options[:replace]}#{'TEMPORARY ' if options[:temp]}VIEW#{' IF NOT EXISTS' if options[:if_not_exists]} #{quote_schema_table(name)}", options[:columns])
135
+
136
+ if source
137
+ source = source.sql if source.is_a?(Dataset)
138
+ sql << " AS " << source
139
+ end
140
+
141
+ _append_table_view_options_sql(sql, options)
142
+ end
143
+
144
+ def _append_table_view_options_sql(sql, options)
145
+ if options[:using]
146
+ sql << " USING " << options[:using].to_s
147
+ end
148
+
149
+ if options[:partitioned_by]
150
+ sql << " PARTITIONED BY "
151
+ _append_column_list_sql(sql, options[:partitioned_by])
152
+ end
153
+
154
+ if options[:clustered_by]
155
+ sql << " CLUSTERED BY "
156
+ _append_column_list_sql(sql, options[:clustered_by])
157
+
158
+ if options[:sorted_by]
159
+ sql << " SORTED BY "
160
+ _append_column_list_sql(sql, options[:sorted_by])
161
+ end
162
+ raise "Must specify :num_buckets when :clustered_by is used" unless options[:num_buckets]
163
+ sql << " INTO " << literal(options[:num_buckets]) << " BUCKETS"
164
+ end
165
+
166
+ if options[:options]
167
+ sql << ' OPTIONS ('
168
+ options[:options].each do |k, v|
169
+ sql << literal(k.to_s) << "=" << literal(v.to_s)
170
+ end
171
+ sql << ')'
172
+ end
173
+
174
+ sql
175
+ end
176
+
177
+ def _append_column_list_sql(sql, columns)
178
+ sql << '('
179
+ schema_utility_dataset.send(:identifier_list_append, sql, Array(columns))
180
+ sql << ')'
181
+ end
182
+
183
+ def drop_schema_sql(schema_name, opts)
184
+ sql = String.new
185
+ sql << 'DROP SCHEMA '
186
+ sql << 'IF EXISTS ' if opts[:if_exists]
187
+ sql << literal(schema_name)
188
+ sql << ' CASCADE' if opts[:cascade]
189
+ sql
190
+ end
191
+
192
+ def schema_parse_table(table, opts)
193
+ m = output_identifier_meth(opts[:dataset])
194
+ im = input_identifier_meth(opts[:dataset])
195
+ metadata_dataset.with_sql("DESCRIBE #{"#{im.call(opts[:schema])}." if opts[:schema]}#{im.call(table)}").map do |row|
196
+ [m.call(row[:col_name]), {:db_type=>row[:data_type], :type=>schema_column_type(row[:data_type])}]
197
+ end
198
+ end
199
+
200
+ def supports_create_or_replace_view?
201
+ true
202
+ end
203
+
204
+ def type_literal_generic_file(column)
205
+ 'binary'
206
+ end
207
+
208
+ def type_literal_generic_float(column)
209
+ 'float'
210
+ end
211
+
212
+ def type_literal_generic_string(column)
213
+ 'string'
214
+ end
215
+ end
216
+
217
+ module DatasetMethods
218
+ include UnmodifiedIdentifiers::DatasetMethods
219
+
220
+ Dataset.def_sql_method(self, :select, [['if opts[:values]', %w'values'], ['else', %w'with select distinct columns from join where group having compounds order limit']])
221
+
222
+ def date_add_sql_append(sql, da)
223
+ expr = da.expr
224
+ cast_type = da.cast_type || Time
225
+
226
+ h = Hash.new(0)
227
+ da.interval.each do |k, v|
228
+ h[k] = v || 0
229
+ end
230
+
231
+ if h[:weeks]
232
+ h[:days] += h[:weeks] * 7
233
+ end
234
+
235
+ if h[:years] != 0 || h[:months] != 0
236
+ expr = Sequel.+(expr, Sequel.function(:make_ym_interval, h[:years], h[:months]))
237
+ end
238
+
239
+ if h[:days] != 0 || h[:hours] != 0 || h[:minutes] != 0 || h[:seconds] != 0
240
+ expr = Sequel.+(expr, Sequel.function(:make_dt_interval, h[:days], h[:hours], h[:minutes], h[:seconds]))
241
+ end
242
+
243
+ literal_append(sql, expr)
244
+ end
245
+
246
+ # Emulate delete by selecting all rows except the ones being deleted
247
+ # into a new table, drop the current table, and rename the new
248
+ # table to the current table name.
249
+ #
250
+ # This is designed to minimize the changes to the tests, and is
251
+ # not recommended for production use.
252
+ def delete
253
+ _with_temp_table
254
+ end
255
+
256
+ def update(columns)
257
+ updated_cols = columns.keys
258
+ other_cols = db.from(first_source_table).columns - updated_cols
259
+ updated_vals = columns.values
260
+
261
+ _with_temp_table do |tmp_name|
262
+ db.from(tmp_name).insert([*updated_cols, *other_cols], select(*updated_vals, *other_cols))
263
+ end
264
+ end
265
+
266
+ private def _with_temp_table
267
+ n = count
268
+ table_name = first_source_table
269
+ tmp_name = literal(table_name).gsub('`', '') + "__sequel_delete_emulate"
270
+ db.create_table(tmp_name, :as=>select_all.invert)
271
+ yield tmp_name if defined?(yield)
272
+ db.drop_table(table_name)
273
+ db.rename_table(tmp_name, table_name)
274
+ n
275
+ end
276
+
277
+ protected def compound_clone(type, dataset, opts)
278
+ dataset = dataset.from_self if dataset.opts[:with]
279
+ super
280
+ end
281
+
282
+ def complex_expression_sql_append(sql, op, args)
283
+ case op
284
+ when :<<
285
+ literal_append(sql, Sequel.function(:shiftleft, *args))
286
+ when :>>
287
+ literal_append(sql, Sequel.function(:shiftright, *args))
288
+ when :~
289
+ literal_append(sql, Sequel.function(:regexp, *args))
290
+ when :'!~'
291
+ literal_append(sql, ~Sequel.function(:regexp, *args))
292
+ when :'~*'
293
+ literal_append(sql, Sequel.function(:regexp, Sequel.function(:lower, args[0]), Sequel.function(:lower, args[1])))
294
+ when :'!~*'
295
+ literal_append(sql, ~Sequel.function(:regexp, Sequel.function(:lower, args[0]), Sequel.function(:lower, args[1])))
296
+ else
297
+ super
298
+ end
299
+ end
300
+
301
+ def multi_insert_sql_strategy
302
+ :values
303
+ end
304
+
305
+ def quoted_identifier_append(sql, name)
306
+ sql << '`' << name.to_s.gsub('`', '``') << '`'
307
+ end
308
+
309
+ def requires_sql_standard_datetimes?
310
+ true
311
+ end
312
+
313
+ def insert_supports_empty_values?
314
+ false
315
+ end
316
+
317
+ def literal_blob_append(sql, v)
318
+ sql << "to_binary('" << [v].pack("m*").gsub("\n", "") << "', 'base64')"
319
+ end
320
+
321
+ def literal_false
322
+ "false"
323
+ end
324
+
325
+ def literal_string_append(sql, v)
326
+ sql << "'" << v.gsub(/(['\\])/, '\\\\\1') << "'"
327
+ end
328
+
329
+ def literal_true
330
+ "true"
331
+ end
332
+
333
+ def supports_cte?(type=:select)
334
+ type == :select
335
+ end
336
+
337
+ def supports_cte_in_subqueries?
338
+ true
339
+ end
340
+
341
+ def supports_group_cube?
342
+ true
343
+ end
344
+
345
+ def supports_group_rollup?
346
+ true
347
+ end
348
+
349
+ def supports_grouping_sets?
350
+ true
351
+ end
352
+
353
+ def supports_regexp?
354
+ true
355
+ end
356
+
357
+ def supports_window_functions?
358
+ true
359
+ end
360
+
361
+ # Handle forward references in existing CTEs in the dataset by inserting this
362
+ # dataset before any dataset that would reference it.
363
+ def with(name, dataset, opts=OPTS)
364
+ opts = Hash[opts].merge!(:name=>name, :dataset=>dataset).freeze
365
+ references = ReferenceExtractor.references(dataset)
366
+
367
+ if with = @opts[:with]
368
+ with = with.dup
369
+ existing_references = @opts[:with_references]
370
+
371
+ if referencing_dataset = existing_references[literal(name)]
372
+ unless i = with.find_index{|o| o[:dataset].equal?(referencing_dataset)}
373
+ raise Sequel::Error, "internal error finding referencing dataset"
374
+ end
375
+
376
+ with.insert(i, opts)
377
+
378
+ # When not inserting dataset at the end, if both the new dataset and the
379
+ # dataset right after it refer to the same reference, keep the reference
380
+ # to the new dataset, so that that dataset is inserted before the new dataset
381
+ # dataset
382
+ existing_references = existing_references.reject do |k, v|
383
+ references[k] && v.equal?(referencing_dataset)
384
+ end
385
+ else
386
+ with << opts
387
+ end
388
+
389
+ # Assume we will insert the dataset at the end, so existing references have priority
390
+ references = references.merge(existing_references)
391
+ else
392
+ with = [opts]
393
+ end
394
+
395
+ clone(:with=>with.freeze, :with_references=>references.freeze)
396
+ end
397
+
398
+ private def select_values_sql(sql)
399
+ sql << 'VALUES '
400
+ expression_list_append(sql, opts[:values])
401
+ end
402
+ end
403
+
404
+ # ReferenceExtractor extracts references from datasets that will be used as CTEs.
405
+ class ReferenceExtractor < ASTTransformer
406
+ TABLE_IDENTIFIER_KEYS = [:from, :join].freeze
407
+ COLUMN_IDENTIFIER_KEYS = [:select, :where, :having, :order, :group, :compounds].freeze
408
+
409
+ # Returns a hash of literal string identifier keys referenced by the given
410
+ # dataset with the given dataset as the value for each key.
411
+ def self.references(dataset)
412
+ new(dataset).tap{|ext| ext.transform(dataset)}.references
413
+ end
414
+
415
+ attr_reader :references
416
+
417
+ def initialize(dataset)
418
+ @dataset = dataset
419
+ @references = {}
420
+ end
421
+
422
+ private
423
+
424
+ # Extract references from FROM/JOIN, where bare identifiers represent tables.
425
+ def table_identifier_extract(o)
426
+ case o
427
+ when String
428
+ @references[@dataset.literal(Sequel.identifier(o))] = @dataset
429
+ when Symbol, SQL::Identifier
430
+ @references[@dataset.literal(o)] = @dataset
431
+ when SQL::AliasedExpression
432
+ table_identifier_extract(o.expression)
433
+ when SQL::JoinOnClause
434
+ table_identifier_extract(o.table_expr)
435
+ v(o.on)
436
+ when SQL::JoinClause
437
+ table_identifier_extract(o.table_expr)
438
+ else
439
+ v(o)
440
+ end
441
+ end
442
+
443
+ # Extract references from datasets, where bare identifiers in most case represent columns,
444
+ # and only qualified identifiers include a table reference.
445
+ def v(o)
446
+ case o
447
+ when Sequel::Dataset
448
+ # Special case FROM/JOIN, because identifiers inside refer to tables and not columns
449
+ TABLE_IDENTIFIER_KEYS.each{|k| o.opts[k]&.each{|jc| table_identifier_extract(jc)}}
450
+
451
+ # Look in other keys that may have qualified references or subqueries
452
+ COLUMN_IDENTIFIER_KEYS.each{|k| v(o.opts[k])}
453
+ when SQL::QualifiedIdentifier
454
+ # If a qualified identifier has a qualified identifier as a key,
455
+ # such as schema.table.column, ignore it, because CTE identifiers shouldn't
456
+ # be schema qualified.
457
+ unless o.table.is_a?(SQL::QualifiedIdentifier)
458
+ @references[@dataset.literal(Sequel.identifier(o.table))] = @dataset
459
+ end
460
+ else
461
+ super
462
+ end
463
+ end
464
+ end
465
+ private_constant :ReferenceExtractor
466
+ end
467
+ end
metadata ADDED
@@ -0,0 +1,141 @@
1
+ --- !ruby/object:Gem::Specification
2
+ name: sequel-hexspace
3
+ version: !ruby/object:Gem::Version
4
+ version: 1.0.0
5
+ platform: ruby
6
+ authors:
7
+ - Jeremy Evans
8
+ autorequire:
9
+ bindir: bin
10
+ cert_chain: []
11
+ date: 2024-04-03 00:00:00.000000000 Z
12
+ dependencies:
13
+ - !ruby/object:Gem::Dependency
14
+ name: sequel
15
+ requirement: !ruby/object:Gem::Requirement
16
+ requirements:
17
+ - - "~>"
18
+ - !ruby/object:Gem::Version
19
+ version: '5.0'
20
+ type: :runtime
21
+ prerelease: false
22
+ version_requirements: !ruby/object:Gem::Requirement
23
+ requirements:
24
+ - - "~>"
25
+ - !ruby/object:Gem::Version
26
+ version: '5.0'
27
+ - !ruby/object:Gem::Dependency
28
+ name: hexspace
29
+ requirement: !ruby/object:Gem::Requirement
30
+ requirements:
31
+ - - ">="
32
+ - !ruby/object:Gem::Version
33
+ version: 0.2.1
34
+ type: :runtime
35
+ prerelease: false
36
+ version_requirements: !ruby/object:Gem::Requirement
37
+ requirements:
38
+ - - ">="
39
+ - !ruby/object:Gem::Version
40
+ version: 0.2.1
41
+ - !ruby/object:Gem::Dependency
42
+ name: rake
43
+ requirement: !ruby/object:Gem::Requirement
44
+ requirements:
45
+ - - ">="
46
+ - !ruby/object:Gem::Version
47
+ version: '0'
48
+ type: :development
49
+ prerelease: false
50
+ version_requirements: !ruby/object:Gem::Requirement
51
+ requirements:
52
+ - - ">="
53
+ - !ruby/object:Gem::Version
54
+ version: '0'
55
+ - !ruby/object:Gem::Dependency
56
+ name: minitest
57
+ requirement: !ruby/object:Gem::Requirement
58
+ requirements:
59
+ - - "~>"
60
+ - !ruby/object:Gem::Version
61
+ version: '5.7'
62
+ type: :development
63
+ prerelease: false
64
+ version_requirements: !ruby/object:Gem::Requirement
65
+ requirements:
66
+ - - "~>"
67
+ - !ruby/object:Gem::Version
68
+ version: '5.7'
69
+ - !ruby/object:Gem::Dependency
70
+ name: minitest-hooks
71
+ requirement: !ruby/object:Gem::Requirement
72
+ requirements:
73
+ - - ">="
74
+ - !ruby/object:Gem::Version
75
+ version: '0'
76
+ type: :development
77
+ prerelease: false
78
+ version_requirements: !ruby/object:Gem::Requirement
79
+ requirements:
80
+ - - ">="
81
+ - !ruby/object:Gem::Version
82
+ version: '0'
83
+ - !ruby/object:Gem::Dependency
84
+ name: minitest-global_expectations
85
+ requirement: !ruby/object:Gem::Requirement
86
+ requirements:
87
+ - - ">="
88
+ - !ruby/object:Gem::Version
89
+ version: '0'
90
+ type: :development
91
+ prerelease: false
92
+ version_requirements: !ruby/object:Gem::Requirement
93
+ requirements:
94
+ - - ">="
95
+ - !ruby/object:Gem::Version
96
+ version: '0'
97
+ description: |
98
+ This is a hexspace adapter for Sequel, designed to be used with Spark (not
99
+ Hive). You can use the hexspace:// protocol in the Sequel connection URL
100
+ to use this adapter.
101
+ email: code@jeremyevans.net
102
+ executables: []
103
+ extensions: []
104
+ extra_rdoc_files:
105
+ - LICENSE
106
+ files:
107
+ - LICENSE
108
+ - README
109
+ - lib/sequel/adapters/hexspace.rb
110
+ - lib/sequel/adapters/shared/spark.rb
111
+ homepage: https://github.com/jeremyevans/sequel-hexspace.git
112
+ licenses:
113
+ - MIT
114
+ metadata: {}
115
+ post_install_message:
116
+ rdoc_options:
117
+ - "--quiet"
118
+ - "--line-numbers"
119
+ - "--inline-source"
120
+ - "--title"
121
+ - 'sequel-hexspace: Sequel adapter for hexspace driver and Apache Spark database'
122
+ - "--main"
123
+ - README
124
+ require_paths:
125
+ - lib
126
+ required_ruby_version: !ruby/object:Gem::Requirement
127
+ requirements:
128
+ - - ">="
129
+ - !ruby/object:Gem::Version
130
+ version: '0'
131
+ required_rubygems_version: !ruby/object:Gem::Requirement
132
+ requirements:
133
+ - - ">="
134
+ - !ruby/object:Gem::Version
135
+ version: '0'
136
+ requirements: []
137
+ rubygems_version: 3.4.6
138
+ signing_key:
139
+ specification_version: 4
140
+ summary: Sequel adapter for hexspace driver and Apache Spark database
141
+ test_files: []