bigbroda 0.0.7 → 0.1.0.pre
Sign up to get free protection for your applications and to get access to all the features.
- checksums.yaml +4 -4
- data/Appraisals +15 -0
- data/Gemfile +1 -0
- data/README.md +39 -21
- data/Rakefile +5 -2
- data/{google_bigquery.gemspec → bigbroda.gemspec} +2 -2
- data/gemfiles/rails_3.gemfile +20 -0
- data/gemfiles/rails_4.0.3.gemfile +20 -0
- data/gemfiles/rails_4.0.3.gemfile.lock +176 -0
- data/gemfiles/rails_4.1.gemfile +20 -0
- data/gemfiles/rails_4.1.gemfile.lock +182 -0
- data/gemfiles/rails_4.2.gemfile +20 -0
- data/gemfiles/rails_4.2.gemfile.lock +202 -0
- data/gemfiles/rails_4.gemfile +20 -0
- data/gemfiles/rails_4.gemfile.lock +176 -0
- data/lib/active_record/connection_adapters/bigquery_adapter.rb +32 -601
- data/lib/active_record/connection_adapters/rails_41.rb +607 -0
- data/lib/active_record/connection_adapters/rails_42.rb +628 -0
- data/lib/{google_bigquery → bigbroda}/auth.rb +3 -3
- data/lib/{google_bigquery → bigbroda}/client.rb +3 -3
- data/lib/{google_bigquery → bigbroda}/config.rb +1 -1
- data/lib/{google_bigquery → bigbroda}/dataset.rb +23 -23
- data/lib/{google_bigquery → bigbroda}/engine.rb +4 -4
- data/lib/{google_bigquery → bigbroda}/jobs.rb +28 -28
- data/lib/bigbroda/project.rb +16 -0
- data/lib/{google_bigquery → bigbroda}/railtie.rb +3 -3
- data/lib/{google_bigquery → bigbroda}/table.rb +19 -19
- data/lib/{google_bigquery → bigbroda}/table_data.rb +7 -7
- data/lib/bigbroda/version.rb +3 -0
- data/lib/bigbroda.rb +27 -0
- data/lib/generators/{google_bigquery → bigbroda}/install/install_generator.rb +2 -2
- data/lib/generators/templates/{bigquery.rb.erb → bigbroda.rb.erb} +1 -1
- data/spec/dummy/config/application.rb +1 -1
- data/spec/functional/adapter/adapter_spec.rb +40 -38
- data/spec/functional/auth_spec.rb +3 -3
- data/spec/functional/config_spec.rb +5 -5
- data/spec/functional/dataset_spec.rb +19 -19
- data/spec/functional/project_spec.rb +4 -4
- data/spec/functional/table_data_spec.rb +13 -13
- data/spec/functional/table_spec.rb +30 -30
- data/spec/spec_helper.rb +2 -2
- metadata +32 -20
- data/lib/google_bigquery/project.rb +0 -16
- data/lib/google_bigquery/version.rb +0 -3
- data/lib/google_bigquery.rb +0 -27
@@ -0,0 +1,628 @@
|
|
1
|
+
|
2
|
+
|
3
|
+
module ActiveRecord
|
4
|
+
|
5
|
+
|
6
|
+
module ConnectionAdapters
|
7
|
+
|
8
|
+
|
9
|
+
class BigqueryColumn < Column
|
10
|
+
class << self
|
11
|
+
TRUE_VALUES = [true, 1, '1', 'true', 'TRUE'].to_set
|
12
|
+
FALSE_VALUES = [false, 0, '0','false', 'FALSE'].to_set
|
13
|
+
|
14
|
+
def binary_to_string(value)
|
15
|
+
if value.encoding != Encoding::ASCII_8BIT
|
16
|
+
value = value.force_encoding(Encoding::ASCII_8BIT)
|
17
|
+
end
|
18
|
+
value
|
19
|
+
end
|
20
|
+
|
21
|
+
def string_to_time(string)
|
22
|
+
return string unless string.is_a?(String)
|
23
|
+
return nil if string.empty?
|
24
|
+
fast_string_to_time(string) || fallback_string_to_time(string) || Time.at(string.to_f).send(Base.default_timezone)
|
25
|
+
end
|
26
|
+
end
|
27
|
+
end
|
28
|
+
|
29
|
+
class BQBinary < Type::Binary # :nodoc:
|
30
|
+
def cast_value(value)
|
31
|
+
if value.encoding != Encoding::ASCII_8BIT
|
32
|
+
value = value.force_encoding(Encoding::ASCII_8BIT)
|
33
|
+
end
|
34
|
+
value
|
35
|
+
end
|
36
|
+
end
|
37
|
+
|
38
|
+
class BQString < Type::String # :nodoc:
|
39
|
+
def type_cast_for_database(value)
|
40
|
+
binding.pry
|
41
|
+
if value.is_a?(::String) && value.encoding == Encoding::ASCII_8BIT
|
42
|
+
value.encode(Encoding::UTF_8)
|
43
|
+
else
|
44
|
+
super
|
45
|
+
end
|
46
|
+
end
|
47
|
+
|
48
|
+
def type_cast_from_user(value)
|
49
|
+
binding.pry
|
50
|
+
end
|
51
|
+
end
|
52
|
+
|
53
|
+
class BigqueryAdapter < AbstractAdapter
|
54
|
+
|
55
|
+
#include SchemaStatements
|
56
|
+
|
57
|
+
NATIVE_DATABASE_TYPES = {
|
58
|
+
:primary_key => "STRING",
|
59
|
+
:string => { :name => "STRING", :default=> nil },
|
60
|
+
:integer => { :name => "INTEGER", :default=> nil },
|
61
|
+
:float => { :name => "FLOAT", :default=> 0.0 },
|
62
|
+
:datetime => { :name => "TIMESTAMP" },
|
63
|
+
:timestamp => { name: "TIMESTAMP" },
|
64
|
+
:date => { :name => "TIMESTAMP" },
|
65
|
+
:record => { :name => "RECORD" },
|
66
|
+
:boolean => { :name => "BOOLEAN" }
|
67
|
+
}
|
68
|
+
|
69
|
+
class Version
|
70
|
+
end
|
71
|
+
|
72
|
+
class ColumnDefinition < ActiveRecord::ConnectionAdapters::ColumnDefinition
|
73
|
+
attr_accessor :array
|
74
|
+
end
|
75
|
+
|
76
|
+
class TableDefinition < ActiveRecord::ConnectionAdapters::TableDefinition
|
77
|
+
|
78
|
+
def primary_key(name, type = :primary_key, options = {})
|
79
|
+
return column name, :string, options
|
80
|
+
end
|
81
|
+
|
82
|
+
def record(*args)
|
83
|
+
options = args.extract_options!
|
84
|
+
column(:created_at, :record, options)
|
85
|
+
end
|
86
|
+
|
87
|
+
def timestamps(*args)
|
88
|
+
options = args.extract_options!
|
89
|
+
column(:created_at, :timestamp, options)
|
90
|
+
column(:updated_at, :timestamp, options)
|
91
|
+
end
|
92
|
+
|
93
|
+
def references(*args)
|
94
|
+
options = args.extract_options!
|
95
|
+
polymorphic = options.delete(:polymorphic)
|
96
|
+
index_options = options.delete(:index)
|
97
|
+
args.each do |col|
|
98
|
+
column("#{col}_id", :string, options)
|
99
|
+
column("#{col}_type", :string, polymorphic.is_a?(Hash) ? polymorphic : options) if polymorphic
|
100
|
+
index(polymorphic ? %w(id type).map { |t| "#{col}_#{t}" } : "#{col}_id", index_options.is_a?(Hash) ? index_options : {}) if index_options
|
101
|
+
end
|
102
|
+
end
|
103
|
+
|
104
|
+
end
|
105
|
+
|
106
|
+
class StatementPool < ConnectionAdapters::StatementPool
|
107
|
+
def initialize(connection, max)
|
108
|
+
super
|
109
|
+
@cache = Hash.new { |h,pid| h[pid] = {} }
|
110
|
+
end
|
111
|
+
|
112
|
+
def each(&block); cache.each(&block); end
|
113
|
+
def key?(key); cache.key?(key); end
|
114
|
+
def [](key); cache[key]; end
|
115
|
+
def length; cache.length; end
|
116
|
+
|
117
|
+
def []=(sql, key)
|
118
|
+
while @max <= cache.size
|
119
|
+
dealloc(cache.shift.last[:stmt])
|
120
|
+
end
|
121
|
+
cache[sql] = key
|
122
|
+
end
|
123
|
+
|
124
|
+
def clear
|
125
|
+
cache.values.each do |hash|
|
126
|
+
dealloc hash[:stmt]
|
127
|
+
end
|
128
|
+
cache.clear
|
129
|
+
end
|
130
|
+
|
131
|
+
private
|
132
|
+
def cache
|
133
|
+
@cache[$$]
|
134
|
+
end
|
135
|
+
|
136
|
+
def dealloc(stmt)
|
137
|
+
stmt.close unless stmt.closed?
|
138
|
+
end
|
139
|
+
end
|
140
|
+
|
141
|
+
|
142
|
+
class BindSubstitution < Arel::Visitors::SQLite # :nodoc:
|
143
|
+
include Arel::Visitors::BindVisitor
|
144
|
+
end
|
145
|
+
|
146
|
+
def initialize(connection, logger, config)
|
147
|
+
super(connection, logger)
|
148
|
+
|
149
|
+
@active = nil
|
150
|
+
@statements = StatementPool.new(@connection,
|
151
|
+
self.class.type_cast_config_to_integer(config.fetch(:statement_limit) { 1000 }))
|
152
|
+
@config = config
|
153
|
+
@config.merge(prepared_statements: false) if BigQueryRailsHelpers.rails40?
|
154
|
+
@prepared_statements = false
|
155
|
+
#if self.class.type_cast_config_to_boolean(config.fetch(:prepared_statements) { true })
|
156
|
+
# @prepared_statements = true
|
157
|
+
# @visitor = Arel::Visitors::SQLite.new self
|
158
|
+
#else
|
159
|
+
#use the sql without prepraded statements, as I know BQ doesn't support them.
|
160
|
+
@type_map = Type::HashLookupTypeMap.new
|
161
|
+
initialize_type_map(type_map)
|
162
|
+
@visitor = unprepared_visitor unless ActiveRecord::VERSION::MINOR >= 2
|
163
|
+
end
|
164
|
+
|
165
|
+
def adapter_name #:nodoc:
|
166
|
+
'BigQuery'
|
167
|
+
end
|
168
|
+
|
169
|
+
def supports_ddl_transactions?
|
170
|
+
false
|
171
|
+
end
|
172
|
+
|
173
|
+
def supports_savepoints?
|
174
|
+
false
|
175
|
+
end
|
176
|
+
|
177
|
+
def supports_partial_index?
|
178
|
+
true
|
179
|
+
end
|
180
|
+
|
181
|
+
# Returns true, since this connection adapter supports prepared statement
|
182
|
+
# caching.
|
183
|
+
def supports_statement_cache?
|
184
|
+
false
|
185
|
+
end
|
186
|
+
|
187
|
+
# Returns true, since this connection adapter supports migrations.
|
188
|
+
def supports_migrations? #:nodoc:
|
189
|
+
true
|
190
|
+
end
|
191
|
+
|
192
|
+
def supports_primary_key? #:nodoc:
|
193
|
+
true
|
194
|
+
end
|
195
|
+
|
196
|
+
def requires_reloading?
|
197
|
+
false
|
198
|
+
end
|
199
|
+
|
200
|
+
def supports_add_column?
|
201
|
+
true
|
202
|
+
end
|
203
|
+
|
204
|
+
def active?
|
205
|
+
@active != false
|
206
|
+
end
|
207
|
+
|
208
|
+
# Disconnects from the database if already connected. Otherwise, this
|
209
|
+
# method does nothing.
|
210
|
+
def disconnect!
|
211
|
+
super
|
212
|
+
@active = false
|
213
|
+
@connection.close rescue nil
|
214
|
+
end
|
215
|
+
|
216
|
+
# Clears the prepared statements cache.
|
217
|
+
def clear_cache!
|
218
|
+
@statements.clear
|
219
|
+
end
|
220
|
+
|
221
|
+
def supports_index_sort_order?
|
222
|
+
true
|
223
|
+
end
|
224
|
+
|
225
|
+
# Returns true
|
226
|
+
def supports_count_distinct? #:nodoc:
|
227
|
+
true
|
228
|
+
end
|
229
|
+
|
230
|
+
# Returns false
|
231
|
+
def supports_autoincrement? #:nodoc:
|
232
|
+
false
|
233
|
+
end
|
234
|
+
|
235
|
+
def supports_index_sort_order?
|
236
|
+
false
|
237
|
+
end
|
238
|
+
|
239
|
+
# Returns 62. SQLite supports index names up to 64
|
240
|
+
# characters. The rest is used by rails internally to perform
|
241
|
+
# temporary rename operations
|
242
|
+
def allowed_index_name_length
|
243
|
+
index_name_length - 2
|
244
|
+
end
|
245
|
+
|
246
|
+
def default_primary_key_type
|
247
|
+
if supports_autoincrement?
|
248
|
+
'STRING'
|
249
|
+
else
|
250
|
+
'STRING'
|
251
|
+
end
|
252
|
+
end
|
253
|
+
|
254
|
+
def native_database_types #:nodoc:
|
255
|
+
NATIVE_DATABASE_TYPES
|
256
|
+
end
|
257
|
+
|
258
|
+
# Returns the current database encoding format as a string, eg: 'UTF-8'
|
259
|
+
def encoding
|
260
|
+
@connection.encoding.to_s
|
261
|
+
end
|
262
|
+
|
263
|
+
# Returns false.
|
264
|
+
def supports_explain?
|
265
|
+
false
|
266
|
+
end
|
267
|
+
|
268
|
+
def create_database(database)
|
269
|
+
result = BigBroda::Dataset.create(@config[:project],
|
270
|
+
{"datasetReference"=> { "datasetId" => database }} )
|
271
|
+
result
|
272
|
+
end
|
273
|
+
|
274
|
+
def drop_database(database)
|
275
|
+
tables = BigBroda::Table.list(@config[:project], database)["tables"]
|
276
|
+
unless tables.blank?
|
277
|
+
tables.map!{|o| o["tableReference"]["tableId"]}
|
278
|
+
tables.each do |table_id|
|
279
|
+
BigBroda::Table.delete(@config[:project], database, table_id)
|
280
|
+
end
|
281
|
+
end
|
282
|
+
result = BigBroda::Dataset.delete(@config[:project], database )
|
283
|
+
result
|
284
|
+
end
|
285
|
+
|
286
|
+
# QUOTING ==================================================
|
287
|
+
|
288
|
+
def _quote(value, column = nil)
|
289
|
+
if value.kind_of?(String) && column && column.type == :binary && column.class.respond_to?(:string_to_binary)
|
290
|
+
s = column.class.string_to_binary(value).unpack("H*")[0]
|
291
|
+
"x'#{s}'"
|
292
|
+
else
|
293
|
+
super
|
294
|
+
end
|
295
|
+
end
|
296
|
+
|
297
|
+
def quote_table_name(name)
|
298
|
+
"#{@config[:database]}.#{name}"
|
299
|
+
end
|
300
|
+
|
301
|
+
def quote_table_name_for_assignment(table, attr)
|
302
|
+
quote_column_name(attr)
|
303
|
+
end
|
304
|
+
|
305
|
+
def quote_column_name(name) #:nodoc:
|
306
|
+
name
|
307
|
+
end
|
308
|
+
|
309
|
+
# Quote date/time values for use in SQL input. Includes microseconds
|
310
|
+
# if the value is a Time responding to usec.
|
311
|
+
def quoted_date(value) #:nodoc:
|
312
|
+
if value.respond_to?(:usec)
|
313
|
+
"#{super}.#{sprintf("%06d", value.usec)}"
|
314
|
+
else
|
315
|
+
super
|
316
|
+
end
|
317
|
+
end
|
318
|
+
|
319
|
+
def quoted_true
|
320
|
+
"1"
|
321
|
+
end
|
322
|
+
|
323
|
+
def quoted_false
|
324
|
+
"0"
|
325
|
+
end
|
326
|
+
|
327
|
+
def type_cast(value, column) # :nodoc:
|
328
|
+
binding.pry
|
329
|
+
return value.to_f if BigDecimal === value
|
330
|
+
return super unless String === value
|
331
|
+
return super unless column && value
|
332
|
+
|
333
|
+
value = super
|
334
|
+
if column.type == :string && value.encoding == Encoding::ASCII_8BIT
|
335
|
+
logger.error "Binary data inserted for `string` type on column `#{column.name}`" if logger
|
336
|
+
value = value.encode Encoding::UTF_8
|
337
|
+
end
|
338
|
+
value
|
339
|
+
end
|
340
|
+
|
341
|
+
# DATABASE STATEMENTS ======================================
|
342
|
+
|
343
|
+
def explain(arel, binds = [])
|
344
|
+
bypass_feature
|
345
|
+
end
|
346
|
+
|
347
|
+
class ExplainPrettyPrinter
|
348
|
+
# Pretty prints the result of a EXPLAIN QUERY PLAN in a way that resembles
|
349
|
+
# the output of the SQLite shell:
|
350
|
+
#
|
351
|
+
# 0|0|0|SEARCH TABLE users USING INTEGER PRIMARY KEY (rowid=?) (~1 rows)
|
352
|
+
# 0|1|1|SCAN TABLE posts (~100000 rows)
|
353
|
+
#
|
354
|
+
def pp(result) # :nodoc:
|
355
|
+
result.rows.map do |row|
|
356
|
+
row.join('|')
|
357
|
+
end.join("\n") + "\n"
|
358
|
+
end
|
359
|
+
end
|
360
|
+
|
361
|
+
def exec_query(sql, name = nil, binds = [])
|
362
|
+
binding.pry
|
363
|
+
log(sql, name, binds) do
|
364
|
+
|
365
|
+
# Don't cache statements if they are not prepared
|
366
|
+
#if without_prepared_statement?(binds)
|
367
|
+
result = BigBroda::Jobs.query(@config[:project], {"query"=> sql })
|
368
|
+
cols = result["schema"]["fields"].map{|o| o["name"] }
|
369
|
+
records = result["totalRows"].to_i.zero? ? [] : result["rows"].map{|o| o["f"].map{|k,v| k["v"]} }
|
370
|
+
stmt = records
|
371
|
+
#else
|
372
|
+
#binding.pry
|
373
|
+
#BQ does not support prepared statements, yiak!
|
374
|
+
#end
|
375
|
+
|
376
|
+
ActiveRecord::Result.new(cols, stmt)
|
377
|
+
end
|
378
|
+
end
|
379
|
+
|
380
|
+
def exec_delete(sql, name = 'SQL', binds = [])
|
381
|
+
exec_query(sql, name, binds)
|
382
|
+
@connection.changes
|
383
|
+
end
|
384
|
+
|
385
|
+
alias :exec_update :exec_delete
|
386
|
+
|
387
|
+
def last_inserted_id(result)
|
388
|
+
@connection.last_insert_row_id
|
389
|
+
end
|
390
|
+
|
391
|
+
def execute(sql, name = nil) #:nodoc:
|
392
|
+
log(sql, name) { @connection.execute(sql) }
|
393
|
+
end
|
394
|
+
|
395
|
+
def insert_sql(sql, name = nil, pk = nil, id_value = nil, sequence_name = nil) #:nodoc:
|
396
|
+
binding.pry
|
397
|
+
super
|
398
|
+
id_value || @connection.last_insert_row_id
|
399
|
+
end
|
400
|
+
alias :create :insert_sql
|
401
|
+
|
402
|
+
def select_rows(sql, name = nil)
|
403
|
+
exec_query(sql, name).rows
|
404
|
+
end
|
405
|
+
|
406
|
+
def begin_db_transaction #:nodoc:
|
407
|
+
log('begin transaction',nil) { } #@connection.transaction
|
408
|
+
end
|
409
|
+
|
410
|
+
def commit_db_transaction #:nodoc:
|
411
|
+
log('commit transaction',nil) { } #@connection.commit
|
412
|
+
end
|
413
|
+
|
414
|
+
def rollback_db_transaction #:nodoc:
|
415
|
+
log('rollback transaction',nil) { } #@connection.rollback
|
416
|
+
end
|
417
|
+
|
418
|
+
# SCHEMA STATEMENTS ========================================
|
419
|
+
|
420
|
+
def tables(name = nil, table_name = nil) #:nodoc:
|
421
|
+
table = BigBroda::Table.list(@config[:project], @config[:database])
|
422
|
+
return [] if table["tables"].blank?
|
423
|
+
table_names = table["tables"].map{|o| o["tableReference"]["tableId"]}
|
424
|
+
table_names = table_names.select{|o| o == table_name } if table_name
|
425
|
+
table_names
|
426
|
+
end
|
427
|
+
|
428
|
+
def table_exists?(table_name)
|
429
|
+
table_name && tables(nil, table_name).any?
|
430
|
+
end
|
431
|
+
|
432
|
+
# Returns an array of +SQLite3Column+ objects for the table specified by +table_name+.
|
433
|
+
def columns(table_name) #:nodoc:
|
434
|
+
schema = BigBroda::Table.get(@config[:project], @config[:database], table_name)
|
435
|
+
schema["schema"]["fields"].map do |field|
|
436
|
+
mode = field['mode'].present? && field['mode'] == "REQUIRED" ? false : true
|
437
|
+
#column expects (name, default, sql_type = nil, null = true)
|
438
|
+
BigqueryColumn.new(field['name'], nil, field['type'], mode )
|
439
|
+
end
|
440
|
+
end
|
441
|
+
|
442
|
+
# Returns an array of indexes for the given table.
|
443
|
+
def indexes(table_name, name = nil) #:nodoc:
|
444
|
+
[]
|
445
|
+
end
|
446
|
+
|
447
|
+
def primary_key(table_name) #:nodoc:
|
448
|
+
"id"
|
449
|
+
end
|
450
|
+
|
451
|
+
def remove_index!(table_name, index_name) #:nodoc:
|
452
|
+
#exec_query "DROP INDEX #{quote_column_name(index_name)}"
|
453
|
+
end
|
454
|
+
|
455
|
+
# See also TableDefinition#column for details on how to create columns.
|
456
|
+
def create_table(table_name, options = {})
|
457
|
+
td = create_table_definition table_name, options[:temporary], options[:options]
|
458
|
+
|
459
|
+
unless options[:id] == false
|
460
|
+
pk = options.fetch(:primary_key) {
|
461
|
+
Base.get_primary_key table_name.to_s.singularize
|
462
|
+
}
|
463
|
+
|
464
|
+
td.primary_key pk, options.fetch(:id, :primary_key), options
|
465
|
+
end
|
466
|
+
|
467
|
+
yield td if block_given?
|
468
|
+
|
469
|
+
if options[:force] && table_exists?(table_name)
|
470
|
+
drop_table(table_name, options)
|
471
|
+
end
|
472
|
+
|
473
|
+
hsh = td.columns.map { |c| {"name"=> c[:name], "type"=> type_to_sql(c[:type]) } }
|
474
|
+
|
475
|
+
@table_body = { "tableReference"=> {
|
476
|
+
"projectId"=> @config[:project],
|
477
|
+
"datasetId"=> @config[:database],
|
478
|
+
"tableId"=> td.name},
|
479
|
+
"schema"=> [fields: hsh]
|
480
|
+
}
|
481
|
+
|
482
|
+
res = BigBroda::Table.create(@config[:project], @config[:database], @table_body )
|
483
|
+
|
484
|
+
raise res["error"]["errors"].map{|o| "[#{o['domain']}]: #{o['reason']} #{o['message']}" }.join(", ") if res["error"].present?
|
485
|
+
end
|
486
|
+
|
487
|
+
# See also Table for details on all of the various column transformation.
|
488
|
+
def change_table(table_name, options = {})
|
489
|
+
if supports_bulk_alter? && options[:bulk]
|
490
|
+
recorder = ActiveRecord::Migration::CommandRecorder.new(self)
|
491
|
+
yield update_table_definition(table_name, recorder)
|
492
|
+
bulk_change_table(table_name, recorder.commands)
|
493
|
+
else
|
494
|
+
yield update_table_definition(table_name, self)
|
495
|
+
end
|
496
|
+
end
|
497
|
+
# Renames a table.
|
498
|
+
#
|
499
|
+
# Example:
|
500
|
+
# rename_table('octopuses', 'octopi')
|
501
|
+
def rename_table(table_name, new_name)
|
502
|
+
raise Error::PendingFeature
|
503
|
+
end
|
504
|
+
|
505
|
+
# See: http://www.sqlite.org/lang_altertable.html
|
506
|
+
# SQLite has an additional restriction on the ALTER TABLE statement
|
507
|
+
def valid_alter_table_type?(type)
|
508
|
+
type.to_sym != :primary_key
|
509
|
+
end
|
510
|
+
|
511
|
+
def add_column(table_name, column_name, type, options = {}) #:nodoc:
|
512
|
+
if valid_alter_table_type?(type)
|
513
|
+
|
514
|
+
hsh = table_name.classify.constantize.columns.map { |c| {"name"=> c.name, "type"=> c.cast_type } }
|
515
|
+
hsh << {"name"=> column_name, :type=> type}
|
516
|
+
fields = [ fields: hsh ]
|
517
|
+
|
518
|
+
res = BigBroda::Table.patch(@config[:project], @config[:database], table_name,
|
519
|
+
{"tableReference"=> {
|
520
|
+
"projectId" => @config[:project],
|
521
|
+
"datasetId" =>@config[:database],
|
522
|
+
"tableId" => table_name },
|
523
|
+
"schema" => fields,
|
524
|
+
"description"=> "added from migration"} )
|
525
|
+
else
|
526
|
+
bypass_feature
|
527
|
+
end
|
528
|
+
end
|
529
|
+
|
530
|
+
def bypass_feature
|
531
|
+
begin
|
532
|
+
raise Error::NotImplementedColumnOperation
|
533
|
+
rescue => e
|
534
|
+
puts e.message
|
535
|
+
logger.warn(e.message)
|
536
|
+
end
|
537
|
+
end
|
538
|
+
|
539
|
+
def remove_column(table_name, column_name, type = nil, options = {}) #:nodoc:
|
540
|
+
bypass_feature
|
541
|
+
end
|
542
|
+
|
543
|
+
def change_column_default(table_name, column_name, default) #:nodoc:
|
544
|
+
bypass_feature
|
545
|
+
end
|
546
|
+
|
547
|
+
def change_column_null(table_name, column_name, null, default = nil)
|
548
|
+
bypass_feature
|
549
|
+
end
|
550
|
+
|
551
|
+
def change_column(table_name, column_name, type, options = {}) #:nodoc:
|
552
|
+
bypass_feature
|
553
|
+
end
|
554
|
+
|
555
|
+
def rename_column(table_name, column_name, new_column_name) #:nodoc:
|
556
|
+
bypass_feature
|
557
|
+
end
|
558
|
+
|
559
|
+
def add_reference(table_name, ref_name, options = {})
|
560
|
+
polymorphic = options.delete(:polymorphic)
|
561
|
+
index_options = options.delete(:index)
|
562
|
+
add_column(table_name, "#{ref_name}_id", :string, options)
|
563
|
+
add_column(table_name, "#{ref_name}_type", :string, polymorphic.is_a?(Hash) ? polymorphic : options) if polymorphic
|
564
|
+
add_index(table_name, polymorphic ? %w[id type].map{ |t| "#{ref_name}_#{t}" } : "#{ref_name}_id", index_options.is_a?(Hash) ? index_options : nil) if index_options
|
565
|
+
end
|
566
|
+
|
567
|
+
def drop_table(table_name)
|
568
|
+
BigBroda::Table.delete(@config[:project], @config[:database], table_name )
|
569
|
+
end
|
570
|
+
|
571
|
+
def dump_schema_information #:nodoc:
|
572
|
+
bypass_feature
|
573
|
+
end
|
574
|
+
|
575
|
+
def assume_migrated_upto_version(version, migrations_paths = ActiveRecord::Migrator.migrations_paths)
|
576
|
+
bypass_feature
|
577
|
+
end
|
578
|
+
|
579
|
+
|
580
|
+
protected
|
581
|
+
|
582
|
+
def initialize_type_map(m)
|
583
|
+
super
|
584
|
+
puts "INITILIZLIE TYPES MAP"
|
585
|
+
m.register_type(/binary/i, BQBinary.new)
|
586
|
+
register_class_with_limit m, %r(char)i, BQString
|
587
|
+
end
|
588
|
+
|
589
|
+
def select(sql, name = nil, binds = []) #:nodoc:
|
590
|
+
exec_query(sql, name, binds)
|
591
|
+
end
|
592
|
+
|
593
|
+
def table_structure(table_name)
|
594
|
+
structure = BigBroda::Table.get(@config[:project], @config[:database], table_name)["schema"]["fields"]
|
595
|
+
raise(ActiveRecord::StatementInvalid, "Could not find table '#{table_name}'") if structure.empty?
|
596
|
+
structure
|
597
|
+
end
|
598
|
+
|
599
|
+
def alter_table(table_name, options = {}) #:nodoc:
|
600
|
+
|
601
|
+
end
|
602
|
+
|
603
|
+
def move_table(from, to, options = {}, &block) #:nodoc:
|
604
|
+
copy_table(from, to, options, &block)
|
605
|
+
drop_table(from)
|
606
|
+
end
|
607
|
+
|
608
|
+
def copy_table(from, to, options = {}) #:nodoc:
|
609
|
+
|
610
|
+
end
|
611
|
+
|
612
|
+
def copy_table_indexes(from, to, rename = {}) #:nodoc:
|
613
|
+
|
614
|
+
end
|
615
|
+
|
616
|
+
def copy_table_contents(from, to, columns, rename = {}) #:nodoc:
|
617
|
+
|
618
|
+
end
|
619
|
+
|
620
|
+
def create_table_definition(name, temporary, options)
|
621
|
+
TableDefinition.new native_database_types, name, temporary, options
|
622
|
+
end
|
623
|
+
|
624
|
+
end
|
625
|
+
|
626
|
+
end
|
627
|
+
|
628
|
+
end
|
@@ -1,17 +1,17 @@
|
|
1
|
-
module
|
1
|
+
module BigBroda
|
2
2
|
class Auth
|
3
3
|
|
4
4
|
attr_accessor :api, :client
|
5
5
|
cattr_accessor :api, :client
|
6
6
|
|
7
7
|
def initialize
|
8
|
-
@config =
|
8
|
+
@config = BigBroda::Config
|
9
9
|
@key = Google::APIClient::KeyUtils.load_from_pkcs12(@config.key_file, @config.pass_phrase)
|
10
10
|
@asserter = Google::APIClient::JWTAsserter.new( @config.email, @config.scope, @key)
|
11
11
|
end
|
12
12
|
|
13
13
|
def authorize
|
14
|
-
@client = Google::APIClient.new(application_name: "BigBroda", application_version:
|
14
|
+
@client = Google::APIClient.new(application_name: "BigBroda", application_version: BigBroda::VERSION )
|
15
15
|
@client.authorization = @asserter.authorize
|
16
16
|
@client.retries = @config.retries.to_i if @config.retries.to_i > 1
|
17
17
|
@api = @client.discovered_api('bigquery', 'v2')
|
@@ -1,12 +1,12 @@
|
|
1
|
-
module
|
1
|
+
module BigBroda
|
2
2
|
class Client
|
3
3
|
|
4
4
|
attr_accessor :options, :api, :client
|
5
5
|
|
6
6
|
def initialize(opts = {})
|
7
7
|
|
8
|
-
@api ||=
|
9
|
-
@client ||=
|
8
|
+
@api ||= BigBroda::Auth.api
|
9
|
+
@client ||= BigBroda::Auth.client
|
10
10
|
#@auth.authorize # check expiration and cache ?
|
11
11
|
|
12
12
|
self.tap do |client|
|