bigbroda 0.0.1
Sign up to get free protection for your applications and to get access to all the features.
- checksums.yaml +7 -0
- data/.gitignore +25 -0
- data/.pryrc +3 -0
- data/.rspec +2 -0
- data/.rvmrc +1 -0
- data/Gemfile +18 -0
- data/LICENSE.txt +22 -0
- data/README.md +408 -0
- data/Rakefile +12 -0
- data/google_bigquery.gemspec +30 -0
- data/lib/.DS_Store +0 -0
- data/lib/active_record/.DS_Store +0 -0
- data/lib/active_record/connection_adapters/bigquery_adapter.rb +949 -0
- data/lib/active_record/tasks/bigquery_database_tasks.rb +42 -0
- data/lib/generators/.DS_Store +0 -0
- data/lib/generators/google_bigquery/.DS_Store +0 -0
- data/lib/generators/google_bigquery/install/install_generator.rb +21 -0
- data/lib/generators/templates/README +11 -0
- data/lib/generators/templates/bigquery.rb.erb +7 -0
- data/lib/google_bigquery/auth.rb +27 -0
- data/lib/google_bigquery/client.rb +52 -0
- data/lib/google_bigquery/config.rb +17 -0
- data/lib/google_bigquery/dataset.rb +77 -0
- data/lib/google_bigquery/engine.rb +21 -0
- data/lib/google_bigquery/jobs.rb +173 -0
- data/lib/google_bigquery/project.rb +16 -0
- data/lib/google_bigquery/railtie.rb +39 -0
- data/lib/google_bigquery/table.rb +63 -0
- data/lib/google_bigquery/table_data.rb +23 -0
- data/lib/google_bigquery/version.rb +3 -0
- data/lib/google_bigquery.rb +27 -0
- data/spec/.DS_Store +0 -0
- data/spec/dummy/.DS_Store +0 -0
- data/spec/dummy/.gitignore +20 -0
- data/spec/dummy/README.rdoc +261 -0
- data/spec/dummy/Rakefile +7 -0
- data/spec/dummy/app/assets/javascripts/application.js +16 -0
- data/spec/dummy/app/assets/stylesheets/application.css.scss +13 -0
- data/spec/dummy/app/controllers/application_controller.rb +4 -0
- data/spec/dummy/app/helpers/application_helper.rb +3 -0
- data/spec/dummy/app/mailers/.gitkeep +0 -0
- data/spec/dummy/app/models/log_data.rb +3 -0
- data/spec/dummy/app/models/post.rb +3 -0
- data/spec/dummy/app/models/user.rb +4 -0
- data/spec/dummy/app/views/layouts/application.html.haml +32 -0
- data/spec/dummy/config/application.rb +23 -0
- data/spec/dummy/config/boot.rb +11 -0
- data/spec/dummy/config/database.yml +32 -0
- data/spec/dummy/config/environment.rb +6 -0
- data/spec/dummy/config/environments/development.rb +29 -0
- data/spec/dummy/config/environments/production.rb +80 -0
- data/spec/dummy/config/environments/test.rb +36 -0
- data/spec/dummy/config/initializers/backtrace_silencers.rb +8 -0
- data/spec/dummy/config/initializers/bigquery.rb +19 -0
- data/spec/dummy/config/initializers/inflections.rb +16 -0
- data/spec/dummy/config/initializers/mime_types.rb +6 -0
- data/spec/dummy/config/initializers/secret_token.rb +8 -0
- data/spec/dummy/config/initializers/session_store.rb +9 -0
- data/spec/dummy/config/initializers/wrap_parameters.rb +15 -0
- data/spec/dummy/config/locales/devise.en.yml +58 -0
- data/spec/dummy/config/locales/en.yml +5 -0
- data/spec/dummy/config/locales/simple_form.en.yml +26 -0
- data/spec/dummy/config/routes.rb +4 -0
- data/spec/dummy/config.ru +4 -0
- data/spec/dummy/db/migrate/20140224051640_create_users.rb +11 -0
- data/spec/dummy/db/migrate/20140224063709_add_last_name_to_user.rb +5 -0
- data/spec/dummy/db/migrate/20140225014314_create_log_data.rb +12 -0
- data/spec/dummy/db/migrate/20140227015551_create_posts.rb +9 -0
- data/spec/dummy/db/schema.rb +39 -0
- data/spec/dummy/db/schema_migrations.json +1 -0
- data/spec/dummy/lib/assets/.gitkeep +0 -0
- data/spec/dummy/lib/templates/erb/scaffold/_form.html.erb +13 -0
- data/spec/dummy/log/.gitkeep +0 -0
- data/spec/dummy/public/404.html +26 -0
- data/spec/dummy/public/422.html +26 -0
- data/spec/dummy/public/500.html +25 -0
- data/spec/dummy/public/favicon.ico +0 -0
- data/spec/dummy/script/rails +6 -0
- data/spec/dummy/test/fixtures/log_data.yml +9 -0
- data/spec/dummy/test/fixtures/posts.yml +11 -0
- data/spec/dummy/test/fixtures/users.yml +11 -0
- data/spec/dummy/test/models/log_data_test.rb +7 -0
- data/spec/dummy/test/models/post_test.rb +7 -0
- data/spec/dummy/test/models/user_test.rb +7 -0
- data/spec/fixtures/.DS_Store +0 -0
- data/spec/fixtures/configs/account_config.yml-example +6 -0
- data/spec/fixtures/keys/.DS_Store +0 -0
- data/spec/fixtures/keys/example-privatekey-p12 +0 -0
- data/spec/fixtures/vcr_cassettes/ActiveRecord_Adapter/.DS_Store +0 -0
- data/spec/fixtures/vcr_cassettes/ActiveRecord_Adapter/adapter/simple_quering.yml +324 -0
- data/spec/fixtures/vcr_cassettes/ActiveRecord_Adapter/after_each.yml +154 -0
- data/spec/fixtures/vcr_cassettes/ActiveRecord_Adapter/authorize_config.yml +367 -0
- data/spec/fixtures/vcr_cassettes/ActiveRecord_Adapter/create_each.yml +195 -0
- data/spec/fixtures/vcr_cassettes/ActiveRecord_Adapter/migrations/_down/adds_the_email_at_utc_hour_column.yml +575 -0
- data/spec/fixtures/vcr_cassettes/ActiveRecord_Adapter/migrations/_up/adds_the_created_at_updated_at_column.yml +644 -0
- data/spec/fixtures/vcr_cassettes/ActiveRecord_Adapter/migrations/add_column/adds_published_column.yml +779 -0
- data/spec/fixtures/vcr_cassettes/ActiveRecord_Adapter/migrations/associations/users_posts.yml +1464 -0
- data/spec/fixtures/vcr_cassettes/ActiveRecord_Adapter/migrations/remove_column/should_raise_error.yml +713 -0
- data/spec/fixtures/vcr_cassettes/Dataset/_list.yml +64 -0
- data/spec/fixtures/vcr_cassettes/Dataset/authorize_config.yml +367 -0
- data/spec/fixtures/vcr_cassettes/Dataset/operations/_get_delete.yml +237 -0
- data/spec/fixtures/vcr_cassettes/Dataset/operations/_patch_delete.yml +240 -0
- data/spec/fixtures/vcr_cassettes/Dataset/operations/_update_delete.yml +297 -0
- data/spec/fixtures/vcr_cassettes/Dataset/operations/create_delete.yml +173 -0
- data/spec/fixtures/vcr_cassettes/Project/_list.yml +64 -0
- data/spec/fixtures/vcr_cassettes/Project/authorize_config.yml +2166 -0
- data/spec/fixtures/vcr_cassettes/Table/authorize_config.yml +367 -0
- data/spec/fixtures/vcr_cassettes/Table/operations/creation_edition/_create_delete.yml +404 -0
- data/spec/fixtures/vcr_cassettes/Table/operations/creation_edition/_create_update_delete.yml +471 -0
- data/spec/fixtures/vcr_cassettes/Table/operations/list.yml +232 -0
- data/spec/fixtures/vcr_cassettes/TableData/authorize_config.yml +2166 -0
- data/spec/fixtures/vcr_cassettes/TableData/create_each.yml +135 -0
- data/spec/fixtures/vcr_cassettes/TableData/delete_each.yml +154 -0
- data/spec/fixtures/vcr_cassettes/TableData/insertAll2.yml +189 -0
- data/spec/fixtures/vcr_cassettes/auth.yml +2168 -0
- data/spec/fixtures/vcr_cassettes/authorize_config.yml +2166 -0
- data/spec/fixtures/vcr_cassettes/datasets.yml +119 -0
- data/spec/fixtures/vcr_cassettes/delete_each_dataset.yml +48 -0
- data/spec/functional/adapter/adapter_spec.rb +213 -0
- data/spec/functional/auth_spec.rb +24 -0
- data/spec/functional/client_spec.rb +9 -0
- data/spec/functional/config_spec.rb +24 -0
- data/spec/functional/dataset_spec.rb +77 -0
- data/spec/functional/project_spec.rb +24 -0
- data/spec/functional/table_data_spec.rb +61 -0
- data/spec/functional/table_spec.rb +105 -0
- data/spec/models/user_spec.rb +0 -0
- data/spec/spec_helper.rb +48 -0
- data/spec/support/models.rb +11 -0
- data/spec/support/schema.rb +43 -0
- metadata +370 -0
@@ -0,0 +1,949 @@
|
|
1
|
+
require 'active_record/connection_adapters/abstract_adapter'
|
2
|
+
require 'active_record/connection_adapters/statement_pool'
|
3
|
+
require 'arel/visitors/bind_visitor'
|
4
|
+
|
5
|
+
module ActiveRecord
|
6
|
+
|
7
|
+
module Error
|
8
|
+
class Standard < StandardError; end
|
9
|
+
class NotImplementedFeature < Standard
|
10
|
+
def message
|
11
|
+
"This Adapter doesn't offer updating single rows, Google Big query is append only by design"
|
12
|
+
end
|
13
|
+
end
|
14
|
+
class NotImplementedColumnOperation < Standard
|
15
|
+
def message
|
16
|
+
"Google big query doesn't allow this column operation"
|
17
|
+
end
|
18
|
+
end
|
19
|
+
|
20
|
+
class PendingFeature < Standard
|
21
|
+
def message
|
22
|
+
"Sorry, this is a pending feature, it will be implemented soon."
|
23
|
+
end
|
24
|
+
end
|
25
|
+
end
|
26
|
+
|
27
|
+
module ConnectionHandling # :nodoc:
|
28
|
+
# bigquery adapter reuses GoogleBigquery::Auth.
|
29
|
+
def bigquery_connection(config)
|
30
|
+
|
31
|
+
# Require database.
|
32
|
+
unless config[:database]
|
33
|
+
raise ArgumentError, "No database file specified. Missing argument: database"
|
34
|
+
end
|
35
|
+
db = GoogleBigquery::Auth.authorized? ? GoogleBigquery::Auth.client : GoogleBigquery::Auth.new.authorize
|
36
|
+
#db #quizas deberia ser auth.api o auth.client
|
37
|
+
|
38
|
+
#In case we are using a bigquery adapter as standard config in database.yml
|
39
|
+
#All models are BigQuery enabled
|
40
|
+
ActiveRecord::Base.send :include, ActiveRecord::BigQueryPersistence
|
41
|
+
ActiveRecord::SchemaMigration.send :include, ActiveRecord::BigQuerySchemaMigration
|
42
|
+
ActiveRecord::Migrator.send :include, ActiveRecord::BigQueryMigrator
|
43
|
+
ActiveRecord::Relation.send :include, ActiveRecord::BigQueryRelation
|
44
|
+
ActiveRecord::Base.send :include, ActiveRecord::BigQuerying
|
45
|
+
#db.busy_timeout(ConnectionAdapters::SQLite3Adapter.type_cast_config_to_integer(config[:timeout])) if config[:timeout]
|
46
|
+
ConnectionAdapters::BigqueryAdapter.new(db, logger, config)
|
47
|
+
rescue => e
|
48
|
+
raise e
|
49
|
+
#Errno::ENOENT => error
|
50
|
+
#if error.message.include?("No such file or directory")
|
51
|
+
# raise ActiveRecord::NoDatabaseError.new(error.message)
|
52
|
+
#else
|
53
|
+
# raise error
|
54
|
+
#end
|
55
|
+
end
|
56
|
+
end
|
57
|
+
|
58
|
+
module BQConnector
|
59
|
+
extend ActiveSupport::Concern
|
60
|
+
module ClassMethods
|
61
|
+
def establish_bq_connection(path)
|
62
|
+
self.send :include, ActiveRecord::BigQueryPersistence
|
63
|
+
establish_connection path
|
64
|
+
end
|
65
|
+
end
|
66
|
+
end
|
67
|
+
|
68
|
+
ActiveRecord::Base.send :include, BQConnector
|
69
|
+
|
70
|
+
|
71
|
+
# = Active Record Persistence
|
72
|
+
module BigQueryPersistence
|
73
|
+
extend ActiveSupport::Concern
|
74
|
+
|
75
|
+
def delete
|
76
|
+
raise Error::NotImplementedFeature
|
77
|
+
end
|
78
|
+
|
79
|
+
module ClassMethods
|
80
|
+
|
81
|
+
end
|
82
|
+
|
83
|
+
private
|
84
|
+
# Creates a record with values matching those of the instance attributes
|
85
|
+
# and returns its id.
|
86
|
+
def create_record(attribute_names = @attributes.keys)
|
87
|
+
record_timestamps_hardcoded
|
88
|
+
attributes_values = self.changes.values.map(&:last)
|
89
|
+
|
90
|
+
row_hash = Hash[ [ self.changes.keys, attributes_values ].transpose ]
|
91
|
+
new_id = SecureRandom.hex
|
92
|
+
@rows = {"rows"=> [{
|
93
|
+
"insertId"=> Time.now.to_i.to_s,
|
94
|
+
"json"=> row_hash.merge("id"=> new_id)
|
95
|
+
}]
|
96
|
+
}
|
97
|
+
conn_cfg = self.class.connection_config
|
98
|
+
result = GoogleBigquery::TableData.create(conn_cfg[:project],
|
99
|
+
conn_cfg[:database],
|
100
|
+
self.class.table_name ,
|
101
|
+
@rows )
|
102
|
+
|
103
|
+
#raise result["error"]["errors"].map{|o| "[#{o['domain']}]: #{o['reason']} #{o['message']}" }.join(", ") if result["error"].present?
|
104
|
+
#here we output the IN MEMORY id , because of the BQ latency
|
105
|
+
self.id = new_id #||= new_id if self.class.primary_key
|
106
|
+
|
107
|
+
@new_record = false
|
108
|
+
id
|
109
|
+
end
|
110
|
+
|
111
|
+
#Partially copied from activerecord::Timezones
|
112
|
+
def record_timestamps_hardcoded
|
113
|
+
if self.record_timestamps
|
114
|
+
current_time = current_time_from_proper_timezone
|
115
|
+
|
116
|
+
all_timestamp_attributes.each do |column|
|
117
|
+
if respond_to?(column) && respond_to?("#{column}=") && self.send(column).nil?
|
118
|
+
write_attribute(column.to_s, current_time)
|
119
|
+
end
|
120
|
+
end
|
121
|
+
end
|
122
|
+
end
|
123
|
+
|
124
|
+
# DISABLED FEATURE, Google Big query is append only by design.
|
125
|
+
def update_record(attribute_names = @attributes.keys)
|
126
|
+
raise Error::NotImplementedFeature
|
127
|
+
end
|
128
|
+
end
|
129
|
+
|
130
|
+
# = Active Record Quering
|
131
|
+
module BigQuerying
|
132
|
+
def find_by_sql(sql, binds = [])
|
133
|
+
cfg = ActiveRecord::Base.connection_config
|
134
|
+
result_set = connection.select_all(sanitize_sql(sql), "#{name} Load", binds)
|
135
|
+
column_types = {}
|
136
|
+
|
137
|
+
if result_set.respond_to? :column_types
|
138
|
+
column_types = result_set.column_types
|
139
|
+
else
|
140
|
+
ActiveSupport::Deprecation.warn "the object returned from `select_all` must respond to `column_types`"
|
141
|
+
end
|
142
|
+
# When AR BigQuery queries uses joins , the fields appear as [database.table].field ,
|
143
|
+
# so at least whe clean the class columns to initialize the record propperly
|
144
|
+
#"whoa1393194159_users_id".gsub(/#{@config[:database]}_#{self.table_name}_/, "")
|
145
|
+
result_set.instance_variable_set("@columns", result_set.columns.map{|o| o.gsub(/#{cfg[:database]}_#{self.table_name}_/, "") } )
|
146
|
+
|
147
|
+
result_set.map { |record| instantiate(record, column_types) }
|
148
|
+
end
|
149
|
+
end
|
150
|
+
|
151
|
+
# = Active Record Relation
|
152
|
+
module BigQueryRelation
|
153
|
+
|
154
|
+
def self.included base
|
155
|
+
base.class_eval do
|
156
|
+
def delete(id_or_array)
|
157
|
+
raise Error::NotImplementedFeature
|
158
|
+
end
|
159
|
+
|
160
|
+
def update(id, attributes)
|
161
|
+
raise Error::NotImplementedFeature
|
162
|
+
end
|
163
|
+
|
164
|
+
def destroy_all(conditions = nil)
|
165
|
+
raise Error::NotImplementedFeature
|
166
|
+
end
|
167
|
+
|
168
|
+
def destroy(id)
|
169
|
+
raise Error::NotImplementedFeature
|
170
|
+
end
|
171
|
+
|
172
|
+
def delete_all(conditions = nil)
|
173
|
+
raise Error::NotImplementedFeature
|
174
|
+
end
|
175
|
+
|
176
|
+
def update_all(updates)
|
177
|
+
raise Error::NotImplementedFeature
|
178
|
+
end
|
179
|
+
end
|
180
|
+
end
|
181
|
+
end
|
182
|
+
|
183
|
+
module BigQuerySchemaMigration
|
184
|
+
|
185
|
+
def self.included base
|
186
|
+
attr_accessor :migration_file_pwd
|
187
|
+
base.instance_eval do
|
188
|
+
def schema_migration_hash
|
189
|
+
file = schema_migration_file("r")
|
190
|
+
json = JSON.parse(file.read)
|
191
|
+
end
|
192
|
+
|
193
|
+
def schema_migration_path
|
194
|
+
Dir.pwd + "/db/schema_migrations.json"
|
195
|
+
end
|
196
|
+
|
197
|
+
def schema_migration_file(mode="w+")
|
198
|
+
file_pwd = Dir.pwd + "/db/schema_migrations.json"
|
199
|
+
File.open( file_pwd, mode )
|
200
|
+
end
|
201
|
+
|
202
|
+
def create_table(limit=nil)
|
203
|
+
@migration_file_pwd = Dir.pwd + "/db/schema_migrations.json"
|
204
|
+
unless File.exists?(@migration_file_pwd)
|
205
|
+
puts "SCHEMA MIGRATION HERE"
|
206
|
+
version_options = {null: false}
|
207
|
+
version_options[:limit] = limit if limit
|
208
|
+
|
209
|
+
#connection.create_table(table_name, id: false) do |t|
|
210
|
+
# t.column :version, :string, version_options
|
211
|
+
#end
|
212
|
+
file = schema_migration_file
|
213
|
+
file.puts({ db:{ table_name.to_sym => [] } }.to_json )
|
214
|
+
file.close
|
215
|
+
#connection.add_index table_name, :version, unique: true, name: index_name
|
216
|
+
end
|
217
|
+
end
|
218
|
+
|
219
|
+
#def self.drop_table
|
220
|
+
# binding.pry
|
221
|
+
# File.delete(schema_migration_path)
|
222
|
+
#end
|
223
|
+
|
224
|
+
def delete_version(options)
|
225
|
+
#versions = ActiveRecord::SchemaMigration.where(:version => version.to_s)
|
226
|
+
version = options[:version]
|
227
|
+
new_data = SchemaMigration.schema_migration_hash["db"]["schema_migrations"].delete_if{|o| o["version"] == version.to_s}
|
228
|
+
hsh = {:db=>{:schema_migrations => new_data } }
|
229
|
+
f = schema_migration_file
|
230
|
+
f.puts hsh.to_json
|
231
|
+
f.close
|
232
|
+
end
|
233
|
+
|
234
|
+
def create!(args, *opts)
|
235
|
+
current_data = schema_migration_hash
|
236
|
+
unless schema_migration_hash["db"]["schema_migrations"].map{|o| o["version"]}.include?(args[:version].to_s)
|
237
|
+
hsh = {:db=>{:schema_migrations => current_data["db"]["schema_migrations"] << args } }
|
238
|
+
f = schema_migration_file
|
239
|
+
f.puts hsh.to_json
|
240
|
+
f.close
|
241
|
+
end
|
242
|
+
true
|
243
|
+
end
|
244
|
+
|
245
|
+
def all
|
246
|
+
schema_migration_hash["db"]["schema_migrations"]
|
247
|
+
end
|
248
|
+
|
249
|
+
def where(args)
|
250
|
+
all.select{|o| o[args.keys.first.to_s] == args.values.first}
|
251
|
+
end
|
252
|
+
end
|
253
|
+
end
|
254
|
+
end
|
255
|
+
|
256
|
+
module BigQueryMigrator
|
257
|
+
|
258
|
+
def self.included base
|
259
|
+
#overload class methods
|
260
|
+
base.instance_eval do
|
261
|
+
def get_all_versions
|
262
|
+
SchemaMigration.all.map { |x| x["version"].to_i }.sort
|
263
|
+
end
|
264
|
+
|
265
|
+
def current_version
|
266
|
+
sm_table = schema_migrations_table_name
|
267
|
+
migration_file_pwd = Dir.pwd + "/db/schema_migrations.json"
|
268
|
+
|
269
|
+
if File.exists?(migration_file_pwd)
|
270
|
+
get_all_versions.max || 0
|
271
|
+
else
|
272
|
+
0
|
273
|
+
end
|
274
|
+
end
|
275
|
+
|
276
|
+
def needs_migration?
|
277
|
+
current_version < last_version
|
278
|
+
end
|
279
|
+
|
280
|
+
def last_version
|
281
|
+
get_all_versions.min.to_i
|
282
|
+
#last_migration.version
|
283
|
+
end
|
284
|
+
|
285
|
+
def last_migration #:nodoc:
|
286
|
+
migrations(migrations_paths).last || NullMigration.new
|
287
|
+
end
|
288
|
+
|
289
|
+
end
|
290
|
+
#overload instance methods
|
291
|
+
base.class_eval do
|
292
|
+
def current_version
|
293
|
+
migrated.max || 0
|
294
|
+
end
|
295
|
+
|
296
|
+
def current_migration
|
297
|
+
migrations.detect { |m| m["version"] == current_version }
|
298
|
+
end
|
299
|
+
|
300
|
+
#def migrated
|
301
|
+
# @migrated_versions ||= Set.new(self.class.get_all_versions)
|
302
|
+
#end
|
303
|
+
|
304
|
+
private
|
305
|
+
|
306
|
+
def record_version_state_after_migrating(version)
|
307
|
+
|
308
|
+
if down?
|
309
|
+
migrated.delete(version)
|
310
|
+
ActiveRecord::SchemaMigration.delete_version(:version => version.to_s)
|
311
|
+
else
|
312
|
+
migrated << version
|
313
|
+
ActiveRecord::SchemaMigration.create!(:version => version.to_s)
|
314
|
+
end
|
315
|
+
end
|
316
|
+
end
|
317
|
+
end
|
318
|
+
|
319
|
+
#alias :current :current_migration
|
320
|
+
end
|
321
|
+
|
322
|
+
module LoadOperations
|
323
|
+
extend ActiveSupport::Concern
|
324
|
+
module ClassMethods
|
325
|
+
def bigquery_export(bucket_location = nil)
|
326
|
+
bucket_location = bucket_location.nil? ? "#{table_name}.json" : bucket_location
|
327
|
+
cfg = connection_config
|
328
|
+
GoogleBigquery::Jobs.export(cfg[:project],
|
329
|
+
cfg[:database],
|
330
|
+
table_name,
|
331
|
+
"#{cfg[:database]}/#{bucket_location}")
|
332
|
+
end
|
333
|
+
|
334
|
+
def bigquery_load(bucket_location = [])
|
335
|
+
bucket_location = bucket_location.empty? ? ["#{cfg[:database]}/#{table_name}.json"] : bucket_location
|
336
|
+
cfg = connection_config
|
337
|
+
fields = columns.map{|o| {name: o.name, type: o.type} }
|
338
|
+
GoogleBigquery::Jobs.load(cfg[:project],
|
339
|
+
cfg[:database],
|
340
|
+
table_name,
|
341
|
+
bucket_location,
|
342
|
+
fields)
|
343
|
+
end
|
344
|
+
|
345
|
+
def bigquery_import()
|
346
|
+
end
|
347
|
+
end
|
348
|
+
end
|
349
|
+
|
350
|
+
ActiveRecord::Base.send :include, LoadOperations
|
351
|
+
|
352
|
+
module ConnectionAdapters
|
353
|
+
|
354
|
+
|
355
|
+
class BigqueryColumn < Column
|
356
|
+
class << self
|
357
|
+
TRUE_VALUES = [true, 1, '1', 'true', 'TRUE'].to_set
|
358
|
+
FALSE_VALUES = [false, 0, '0','false', 'FALSE'].to_set
|
359
|
+
|
360
|
+
def binary_to_string(value)
|
361
|
+
if value.encoding != Encoding::ASCII_8BIT
|
362
|
+
value = value.force_encoding(Encoding::ASCII_8BIT)
|
363
|
+
end
|
364
|
+
value
|
365
|
+
end
|
366
|
+
|
367
|
+
def string_to_time(string)
|
368
|
+
return string unless string.is_a?(String)
|
369
|
+
return nil if string.empty?
|
370
|
+
fast_string_to_time(string) || fallback_string_to_time(string) || Time.at(string.to_f).send(Base.default_timezone)
|
371
|
+
end
|
372
|
+
end
|
373
|
+
end
|
374
|
+
|
375
|
+
class BigqueryAdapter < AbstractAdapter
|
376
|
+
|
377
|
+
|
378
|
+
class Version
|
379
|
+
end
|
380
|
+
|
381
|
+
class ColumnDefinition < ActiveRecord::ConnectionAdapters::ColumnDefinition
|
382
|
+
attr_accessor :array
|
383
|
+
end
|
384
|
+
|
385
|
+
class TableDefinition < ActiveRecord::ConnectionAdapters::TableDefinition
|
386
|
+
|
387
|
+
def primary_key(name, type = :primary_key, options = {})
|
388
|
+
return column name, :string, options
|
389
|
+
end
|
390
|
+
|
391
|
+
def record(*args)
|
392
|
+
options = args.extract_options!
|
393
|
+
column(:created_at, :record, options)
|
394
|
+
end
|
395
|
+
|
396
|
+
def timestamps(*args)
|
397
|
+
options = args.extract_options!
|
398
|
+
column(:created_at, :timestamp, options)
|
399
|
+
column(:updated_at, :timestamp, options)
|
400
|
+
end
|
401
|
+
|
402
|
+
def references(*args)
|
403
|
+
options = args.extract_options!
|
404
|
+
polymorphic = options.delete(:polymorphic)
|
405
|
+
index_options = options.delete(:index)
|
406
|
+
args.each do |col|
|
407
|
+
column("#{col}_id", :string, options)
|
408
|
+
column("#{col}_type", :string, polymorphic.is_a?(Hash) ? polymorphic : options) if polymorphic
|
409
|
+
index(polymorphic ? %w(id type).map { |t| "#{col}_#{t}" } : "#{col}_id", index_options.is_a?(Hash) ? index_options : {}) if index_options
|
410
|
+
end
|
411
|
+
end
|
412
|
+
|
413
|
+
end
|
414
|
+
|
415
|
+
class StatementPool < ConnectionAdapters::StatementPool
|
416
|
+
def initialize(connection, max)
|
417
|
+
super
|
418
|
+
@cache = Hash.new { |h,pid| h[pid] = {} }
|
419
|
+
end
|
420
|
+
|
421
|
+
def each(&block); cache.each(&block); end
|
422
|
+
def key?(key); cache.key?(key); end
|
423
|
+
def [](key); cache[key]; end
|
424
|
+
def length; cache.length; end
|
425
|
+
|
426
|
+
def []=(sql, key)
|
427
|
+
while @max <= cache.size
|
428
|
+
dealloc(cache.shift.last[:stmt])
|
429
|
+
end
|
430
|
+
cache[sql] = key
|
431
|
+
end
|
432
|
+
|
433
|
+
def clear
|
434
|
+
cache.values.each do |hash|
|
435
|
+
dealloc hash[:stmt]
|
436
|
+
end
|
437
|
+
cache.clear
|
438
|
+
end
|
439
|
+
|
440
|
+
private
|
441
|
+
def cache
|
442
|
+
@cache[$$]
|
443
|
+
end
|
444
|
+
|
445
|
+
def dealloc(stmt)
|
446
|
+
stmt.close unless stmt.closed?
|
447
|
+
end
|
448
|
+
end
|
449
|
+
|
450
|
+
class BindSubstitution < Arel::Visitors::SQLite # :nodoc:
|
451
|
+
include Arel::Visitors::BindVisitor
|
452
|
+
end
|
453
|
+
|
454
|
+
def initialize(connection, logger, config)
|
455
|
+
super(connection, logger)
|
456
|
+
|
457
|
+
@active = nil
|
458
|
+
@statements = StatementPool.new(@connection,
|
459
|
+
self.class.type_cast_config_to_integer(config.fetch(:statement_limit) { 1000 }))
|
460
|
+
@config = config
|
461
|
+
|
462
|
+
#if self.class.type_cast_config_to_boolean(config.fetch(:prepared_statements) { true })
|
463
|
+
# @prepared_statements = true
|
464
|
+
# @visitor = Arel::Visitors::SQLite.new self
|
465
|
+
#else
|
466
|
+
#use the sql without prepraded statements, as I know BQ doesn't support them.
|
467
|
+
@visitor = unprepared_visitor
|
468
|
+
end
|
469
|
+
|
470
|
+
def adapter_name #:nodoc:
|
471
|
+
'BigQuery'
|
472
|
+
end
|
473
|
+
|
474
|
+
def supports_ddl_transactions?
|
475
|
+
false
|
476
|
+
end
|
477
|
+
|
478
|
+
def supports_savepoints?
|
479
|
+
false
|
480
|
+
end
|
481
|
+
|
482
|
+
def supports_partial_index?
|
483
|
+
true
|
484
|
+
end
|
485
|
+
|
486
|
+
# Returns true, since this connection adapter supports prepared statement
|
487
|
+
# caching.
|
488
|
+
def supports_statement_cache?
|
489
|
+
false
|
490
|
+
end
|
491
|
+
|
492
|
+
# Returns true, since this connection adapter supports migrations.
|
493
|
+
def supports_migrations? #:nodoc:
|
494
|
+
true
|
495
|
+
end
|
496
|
+
|
497
|
+
def supports_primary_key? #:nodoc:
|
498
|
+
true
|
499
|
+
end
|
500
|
+
|
501
|
+
def requires_reloading?
|
502
|
+
false
|
503
|
+
end
|
504
|
+
|
505
|
+
def supports_add_column?
|
506
|
+
true
|
507
|
+
end
|
508
|
+
|
509
|
+
def active?
|
510
|
+
@active != false
|
511
|
+
end
|
512
|
+
|
513
|
+
# Disconnects from the database if already connected. Otherwise, this
|
514
|
+
# method does nothing.
|
515
|
+
def disconnect!
|
516
|
+
super
|
517
|
+
@active = false
|
518
|
+
@connection.close rescue nil
|
519
|
+
end
|
520
|
+
|
521
|
+
# Clears the prepared statements cache.
|
522
|
+
def clear_cache!
|
523
|
+
@statements.clear
|
524
|
+
end
|
525
|
+
|
526
|
+
def supports_index_sort_order?
|
527
|
+
true
|
528
|
+
end
|
529
|
+
|
530
|
+
# Returns true
|
531
|
+
def supports_count_distinct? #:nodoc:
|
532
|
+
true
|
533
|
+
end
|
534
|
+
|
535
|
+
# Returns false
|
536
|
+
def supports_autoincrement? #:nodoc:
|
537
|
+
false
|
538
|
+
end
|
539
|
+
|
540
|
+
def supports_index_sort_order?
|
541
|
+
false
|
542
|
+
end
|
543
|
+
|
544
|
+
# Returns 62. SQLite supports index names up to 64
|
545
|
+
# characters. The rest is used by rails internally to perform
|
546
|
+
# temporary rename operations
|
547
|
+
def allowed_index_name_length
|
548
|
+
index_name_length - 2
|
549
|
+
end
|
550
|
+
|
551
|
+
def default_primary_key_type
|
552
|
+
if supports_autoincrement?
|
553
|
+
'STRING'
|
554
|
+
else
|
555
|
+
'STRING'
|
556
|
+
end
|
557
|
+
end
|
558
|
+
|
559
|
+
def native_database_types #:nodoc:
|
560
|
+
{
|
561
|
+
:primary_key => default_primary_key_type,
|
562
|
+
:string => { :name => "STRING", :default=> nil },
|
563
|
+
#:text => { :name => "text" },
|
564
|
+
:integer => { :name => "INTEGER", :default=> nil },
|
565
|
+
:float => { :name => "FLOAT", :default=> 0.0 },
|
566
|
+
#:decimal => { :name => "decimal" },
|
567
|
+
:datetime => { :name => "TIMESTAMP" },
|
568
|
+
#:timestamp => { :name => "datetime" },
|
569
|
+
:timestamp => { name: "TIMESTAMP" },
|
570
|
+
#:time => { :name => "time" },
|
571
|
+
#:date => { :name => "date" },
|
572
|
+
:record => { :name => "RECORD" },
|
573
|
+
:boolean => { :name => "BOOLEAN" }
|
574
|
+
}
|
575
|
+
end
|
576
|
+
|
577
|
+
# Returns the current database encoding format as a string, eg: 'UTF-8'
|
578
|
+
def encoding
|
579
|
+
@connection.encoding.to_s
|
580
|
+
end
|
581
|
+
|
582
|
+
# Returns false.
|
583
|
+
def supports_explain?
|
584
|
+
false
|
585
|
+
end
|
586
|
+
|
587
|
+
def create_database(database)
|
588
|
+
result = GoogleBigquery::Dataset.create(@config[:project],
|
589
|
+
{"datasetReference"=> { "datasetId" => database }} )
|
590
|
+
result
|
591
|
+
end
|
592
|
+
|
593
|
+
def drop_database(database)
|
594
|
+
tables = GoogleBigquery::Table.list(@config[:project], database)["tables"]
|
595
|
+
unless tables.blank?
|
596
|
+
tables.map!{|o| o["tableReference"]["tableId"]}
|
597
|
+
tables.each do |table_id|
|
598
|
+
GoogleBigquery::Table.delete(@config[:project], database, table_id)
|
599
|
+
end
|
600
|
+
end
|
601
|
+
result = GoogleBigquery::Dataset.delete(@config[:project], database )
|
602
|
+
result
|
603
|
+
end
|
604
|
+
|
605
|
+
# QUOTING ==================================================
|
606
|
+
|
607
|
+
def quote(value, column = nil)
|
608
|
+
if value.kind_of?(String) && column && column.type == :binary && column.class.respond_to?(:string_to_binary)
|
609
|
+
s = column.class.string_to_binary(value).unpack("H*")[0]
|
610
|
+
"x'#{s}'"
|
611
|
+
else
|
612
|
+
super
|
613
|
+
end
|
614
|
+
end
|
615
|
+
|
616
|
+
def quote_table_name(name)
|
617
|
+
"#{@config[:database]}.#{name}"
|
618
|
+
end
|
619
|
+
|
620
|
+
def quote_table_name_for_assignment(table, attr)
|
621
|
+
quote_column_name(attr)
|
622
|
+
end
|
623
|
+
|
624
|
+
def quote_column_name(name) #:nodoc:
|
625
|
+
name
|
626
|
+
end
|
627
|
+
|
628
|
+
# Quote date/time values for use in SQL input. Includes microseconds
|
629
|
+
# if the value is a Time responding to usec.
|
630
|
+
def quoted_date(value) #:nodoc:
|
631
|
+
if value.respond_to?(:usec)
|
632
|
+
"#{super}.#{sprintf("%06d", value.usec)}"
|
633
|
+
else
|
634
|
+
super
|
635
|
+
end
|
636
|
+
end
|
637
|
+
|
638
|
+
def quoted_true
|
639
|
+
"1"
|
640
|
+
end
|
641
|
+
|
642
|
+
def quoted_false
|
643
|
+
"0"
|
644
|
+
end
|
645
|
+
|
646
|
+
def type_cast(value, column) # :nodoc:
|
647
|
+
return value.to_f if BigDecimal === value
|
648
|
+
return super unless String === value
|
649
|
+
return super unless column && value
|
650
|
+
|
651
|
+
value = super
|
652
|
+
if column.type == :string && value.encoding == Encoding::ASCII_8BIT
|
653
|
+
logger.error "Binary data inserted for `string` type on column `#{column.name}`" if logger
|
654
|
+
value = value.encode Encoding::UTF_8
|
655
|
+
end
|
656
|
+
value
|
657
|
+
end
|
658
|
+
|
659
|
+
# DATABASE STATEMENTS ======================================
|
660
|
+
|
661
|
+
def explain(arel, binds = [])
|
662
|
+
bypass_feature
|
663
|
+
end
|
664
|
+
|
665
|
+
class ExplainPrettyPrinter
|
666
|
+
# Pretty prints the result of a EXPLAIN QUERY PLAN in a way that resembles
|
667
|
+
# the output of the SQLite shell:
|
668
|
+
#
|
669
|
+
# 0|0|0|SEARCH TABLE users USING INTEGER PRIMARY KEY (rowid=?) (~1 rows)
|
670
|
+
# 0|1|1|SCAN TABLE posts (~100000 rows)
|
671
|
+
#
|
672
|
+
def pp(result) # :nodoc:
|
673
|
+
result.rows.map do |row|
|
674
|
+
row.join('|')
|
675
|
+
end.join("\n") + "\n"
|
676
|
+
end
|
677
|
+
end
|
678
|
+
|
679
|
+
def exec_query(sql, name = nil, binds = [])
|
680
|
+
log(sql, name, binds) do
|
681
|
+
|
682
|
+
# Don't cache statements if they are not prepared
|
683
|
+
if without_prepared_statement?(binds)
|
684
|
+
result = GoogleBigquery::Jobs.query(@config[:project], {"query"=> sql })
|
685
|
+
cols = result["schema"]["fields"].map{|o| o["name"] }
|
686
|
+
records = result["totalRows"].to_i.zero? ? [] : result["rows"].map{|o| o["f"].map{|k,v| k["v"]} }
|
687
|
+
stmt = records
|
688
|
+
else
|
689
|
+
#binding.pry
|
690
|
+
#BQ does not support prepared statements, yiak!
|
691
|
+
end
|
692
|
+
|
693
|
+
ActiveRecord::Result.new(cols, stmt)
|
694
|
+
end
|
695
|
+
end
|
696
|
+
|
697
|
+
def exec_delete(sql, name = 'SQL', binds = [])
|
698
|
+
exec_query(sql, name, binds)
|
699
|
+
@connection.changes
|
700
|
+
end
|
701
|
+
|
702
|
+
alias :exec_update :exec_delete
|
703
|
+
|
704
|
+
def last_inserted_id(result)
|
705
|
+
@connection.last_insert_row_id
|
706
|
+
end
|
707
|
+
|
708
|
+
def execute(sql, name = nil) #:nodoc:
|
709
|
+
log(sql, name) { @connection.execute(sql) }
|
710
|
+
end
|
711
|
+
|
712
|
+
def insert_sql(sql, name = nil, pk = nil, id_value = nil, sequence_name = nil) #:nodoc:
|
713
|
+
super
|
714
|
+
id_value || @connection.last_insert_row_id
|
715
|
+
end
|
716
|
+
alias :create :insert_sql
|
717
|
+
|
718
|
+
def select_rows(sql, name = nil)
|
719
|
+
exec_query(sql, name).rows
|
720
|
+
end
|
721
|
+
|
722
|
+
def begin_db_transaction #:nodoc:
|
723
|
+
log('begin transaction',nil) { } #@connection.transaction
|
724
|
+
end
|
725
|
+
|
726
|
+
def commit_db_transaction #:nodoc:
|
727
|
+
log('commit transaction',nil) { } #@connection.commit
|
728
|
+
end
|
729
|
+
|
730
|
+
def rollback_db_transaction #:nodoc:
|
731
|
+
log('rollback transaction',nil) { } #@connection.rollback
|
732
|
+
end
|
733
|
+
|
734
|
+
# SCHEMA STATEMENTS ========================================
|
735
|
+
|
736
|
+
def tables(name = nil, table_name = nil) #:nodoc:
|
737
|
+
table = GoogleBigquery::Table.list(@config[:project], @config[:database])
|
738
|
+
return [] if table["tables"].blank?
|
739
|
+
table_names = table["tables"].map{|o| o["tableReference"]["tableId"]}
|
740
|
+
table_names = table_names.select{|o| o == table_name } if table_name
|
741
|
+
table_names
|
742
|
+
end
|
743
|
+
|
744
|
+
def table_exists?(table_name)
|
745
|
+
table_name && tables(nil, table_name).any?
|
746
|
+
end
|
747
|
+
|
748
|
+
# Returns an array of +SQLite3Column+ objects for the table specified by +table_name+.
|
749
|
+
def columns(table_name) #:nodoc:
|
750
|
+
schema = GoogleBigquery::Table.get(@config[:project], @config[:database], table_name)
|
751
|
+
schema["schema"]["fields"].map do |field|
|
752
|
+
mode = field['mode'].present? && field['mode'] == "REQUIRED" ? false : true
|
753
|
+
#column expects (name, default, sql_type = nil, null = true)
|
754
|
+
BigqueryColumn.new(field['name'], nil, field['type'], mode )
|
755
|
+
end
|
756
|
+
end
|
757
|
+
|
758
|
+
# Returns an array of indexes for the given table.
|
759
|
+
def indexes(table_name, name = nil) #:nodoc:
|
760
|
+
[]
|
761
|
+
end
|
762
|
+
|
763
|
+
def primary_key(table_name) #:nodoc:
|
764
|
+
"id"
|
765
|
+
end
|
766
|
+
|
767
|
+
def remove_index!(table_name, index_name) #:nodoc:
|
768
|
+
#exec_query "DROP INDEX #{quote_column_name(index_name)}"
|
769
|
+
end
|
770
|
+
|
771
|
+
def add_column(table_name, column_name, type, options = {}) #:nodoc:
|
772
|
+
if supports_add_column? && valid_alter_table_options( type, options )
|
773
|
+
super(table_name, column_name, type, options)
|
774
|
+
else
|
775
|
+
alter_table(table_name) do |definition|
|
776
|
+
definition.column(column_name, type, options)
|
777
|
+
end
|
778
|
+
end
|
779
|
+
end
|
780
|
+
|
781
|
+
# See also TableDefinition#column for details on how to create columns.
|
782
|
+
def create_table(table_name, options = {})
|
783
|
+
td = create_table_definition table_name, options[:temporary], options[:options]
|
784
|
+
|
785
|
+
unless options[:id] == false
|
786
|
+
pk = options.fetch(:primary_key) {
|
787
|
+
Base.get_primary_key table_name.to_s.singularize
|
788
|
+
}
|
789
|
+
|
790
|
+
td.primary_key pk, options.fetch(:id, :primary_key), options
|
791
|
+
end
|
792
|
+
|
793
|
+
yield td if block_given?
|
794
|
+
|
795
|
+
if options[:force] && table_exists?(table_name)
|
796
|
+
drop_table(table_name, options)
|
797
|
+
end
|
798
|
+
|
799
|
+
|
800
|
+
hsh = td.columns.map { |c| {"name"=> c[:name], "type"=> c[:type] } }
|
801
|
+
|
802
|
+
@table_body = { "tableReference"=> {
|
803
|
+
"projectId"=> @config[:project],
|
804
|
+
"datasetId"=> @config[:database],
|
805
|
+
"tableId"=> td.name},
|
806
|
+
"schema"=> [fields: hsh]
|
807
|
+
}
|
808
|
+
|
809
|
+
res = GoogleBigquery::Table.create(@config[:project], @config[:database], @table_body )
|
810
|
+
|
811
|
+
raise res["error"]["errors"].map{|o| "[#{o['domain']}]: #{o['reason']} #{o['message']}" }.join(", ") if res["error"].present?
|
812
|
+
end
|
813
|
+
|
814
|
+
# See also Table for details on all of the various column transformation.
|
815
|
+
def change_table(table_name, options = {})
|
816
|
+
if supports_bulk_alter? && options[:bulk]
|
817
|
+
recorder = ActiveRecord::Migration::CommandRecorder.new(self)
|
818
|
+
yield update_table_definition(table_name, recorder)
|
819
|
+
bulk_change_table(table_name, recorder.commands)
|
820
|
+
else
|
821
|
+
yield update_table_definition(table_name, self)
|
822
|
+
end
|
823
|
+
end
|
824
|
+
# Renames a table.
|
825
|
+
#
|
826
|
+
# Example:
|
827
|
+
# rename_table('octopuses', 'octopi')
|
828
|
+
def rename_table(table_name, new_name)
|
829
|
+
raise Error::PendingFeature
|
830
|
+
end
|
831
|
+
|
832
|
+
# See: http://www.sqlite.org/lang_altertable.html
|
833
|
+
# SQLite has an additional restriction on the ALTER TABLE statement
|
834
|
+
def valid_alter_table_options( type, options)
|
835
|
+
type.to_sym != :primary_key
|
836
|
+
end
|
837
|
+
|
838
|
+
def add_column(table_name, column_name, type, options = {}) #:nodoc:
|
839
|
+
|
840
|
+
if supports_add_column? && valid_alter_table_options( type, options )
|
841
|
+
|
842
|
+
hsh = table_name.classify.constantize.columns.map { |c| {"name"=> c.name, "type"=> c.type } }
|
843
|
+
hsh << {"name"=> column_name, :type=> type}
|
844
|
+
fields = [ fields: hsh ]
|
845
|
+
|
846
|
+
res = GoogleBigquery::Table.patch(@config[:project], @config[:database], table_name,
|
847
|
+
{"tableReference"=> {
|
848
|
+
"projectId" => @config[:project],
|
849
|
+
"datasetId" =>@config[:database],
|
850
|
+
"tableId" => table_name },
|
851
|
+
"schema" => fields,
|
852
|
+
"description"=> "added from migration"} )
|
853
|
+
|
854
|
+
else
|
855
|
+
bypass_feature
|
856
|
+
end
|
857
|
+
end
|
858
|
+
|
859
|
+
def bypass_feature
|
860
|
+
begin
|
861
|
+
raise Error::NotImplementedColumnOperation
|
862
|
+
rescue => e
|
863
|
+
puts e.message
|
864
|
+
logger.warn(e.message)
|
865
|
+
end
|
866
|
+
end
|
867
|
+
|
868
|
+
def remove_column(table_name, column_name, type = nil, options = {}) #:nodoc:
|
869
|
+
bypass_feature
|
870
|
+
end
|
871
|
+
|
872
|
+
def change_column_default(table_name, column_name, default) #:nodoc:
|
873
|
+
bypass_feature
|
874
|
+
end
|
875
|
+
|
876
|
+
def change_column_null(table_name, column_name, null, default = nil)
|
877
|
+
bypass_feature
|
878
|
+
end
|
879
|
+
|
880
|
+
def change_column(table_name, column_name, type, options = {}) #:nodoc:
|
881
|
+
bypass_feature
|
882
|
+
end
|
883
|
+
|
884
|
+
def rename_column(table_name, column_name, new_column_name) #:nodoc:
|
885
|
+
bypass_feature
|
886
|
+
end
|
887
|
+
|
888
|
+
def add_reference(table_name, ref_name, options = {})
|
889
|
+
polymorphic = options.delete(:polymorphic)
|
890
|
+
index_options = options.delete(:index)
|
891
|
+
add_column(table_name, "#{ref_name}_id", :string, options)
|
892
|
+
add_column(table_name, "#{ref_name}_type", :string, polymorphic.is_a?(Hash) ? polymorphic : options) if polymorphic
|
893
|
+
add_index(table_name, polymorphic ? %w[id type].map{ |t| "#{ref_name}_#{t}" } : "#{ref_name}_id", index_options.is_a?(Hash) ? index_options : nil) if index_options
|
894
|
+
end
|
895
|
+
|
896
|
+
def drop_table(table_name)
|
897
|
+
GoogleBigquery::Table.delete(@config[:project], @config[:database], table_name )
|
898
|
+
end
|
899
|
+
|
900
|
+
def dump_schema_information #:nodoc:
|
901
|
+
bypass_feature
|
902
|
+
end
|
903
|
+
|
904
|
+
def assume_migrated_upto_version(version, migrations_paths = ActiveRecord::Migrator.migrations_paths)
|
905
|
+
bypass_feature
|
906
|
+
end
|
907
|
+
|
908
|
+
|
909
|
+
protected
|
910
|
+
def select(sql, name = nil, binds = []) #:nodoc:
|
911
|
+
exec_query(sql, name, binds)
|
912
|
+
end
|
913
|
+
|
914
|
+
def table_structure(table_name)
|
915
|
+
structure = GoogleBigquery::Table.get(@config[:project], @config[:database], table_name)["schema"]["fields"]
|
916
|
+
raise(ActiveRecord::StatementInvalid, "Could not find table '#{table_name}'") if structure.empty?
|
917
|
+
structure
|
918
|
+
end
|
919
|
+
|
920
|
+
def alter_table(table_name, options = {}) #:nodoc:
|
921
|
+
|
922
|
+
end
|
923
|
+
|
924
|
+
def move_table(from, to, options = {}, &block) #:nodoc:
|
925
|
+
copy_table(from, to, options, &block)
|
926
|
+
drop_table(from)
|
927
|
+
end
|
928
|
+
|
929
|
+
def copy_table(from, to, options = {}) #:nodoc:
|
930
|
+
|
931
|
+
end
|
932
|
+
|
933
|
+
def copy_table_indexes(from, to, rename = {}) #:nodoc:
|
934
|
+
|
935
|
+
end
|
936
|
+
|
937
|
+
def copy_table_contents(from, to, columns, rename = {}) #:nodoc:
|
938
|
+
|
939
|
+
end
|
940
|
+
|
941
|
+
def create_table_definition(name, temporary, options)
|
942
|
+
TableDefinition.new native_database_types, name, temporary, options
|
943
|
+
end
|
944
|
+
|
945
|
+
end
|
946
|
+
|
947
|
+
end
|
948
|
+
|
949
|
+
end
|