sbf-dm-migrations 1.3.0.beta
Sign up to get free protection for your applications and to get access to all the features.
- checksums.yaml +7 -0
- data/.gitignore +38 -0
- data/.rspec +1 -0
- data/.rubocop.yml +468 -0
- data/.travis.yml +52 -0
- data/Gemfile +61 -0
- data/LICENSE +20 -0
- data/README.rdoc +39 -0
- data/Rakefile +4 -0
- data/db/migrations/1_create_people_table.rb +12 -0
- data/db/migrations/2_add_dob_to_people.rb +13 -0
- data/db/migrations/config.rb +4 -0
- data/dm-migrations.gemspec +20 -0
- data/examples/Rakefile +149 -0
- data/examples/sample_migration.rb +58 -0
- data/examples/sample_migration_spec.rb +46 -0
- data/lib/dm-migrations/adapters/dm-do-adapter.rb +304 -0
- data/lib/dm-migrations/adapters/dm-mysql-adapter.rb +306 -0
- data/lib/dm-migrations/adapters/dm-oracle-adapter.rb +339 -0
- data/lib/dm-migrations/adapters/dm-postgres-adapter.rb +152 -0
- data/lib/dm-migrations/adapters/dm-sqlite-adapter.rb +88 -0
- data/lib/dm-migrations/adapters/dm-sqlserver-adapter.rb +184 -0
- data/lib/dm-migrations/adapters/dm-yaml-adapter.rb +21 -0
- data/lib/dm-migrations/auto_migration.rb +227 -0
- data/lib/dm-migrations/exceptions/duplicate_migration.rb +6 -0
- data/lib/dm-migrations/migration.rb +323 -0
- data/lib/dm-migrations/migration_runner.rb +76 -0
- data/lib/dm-migrations/sql/column.rb +5 -0
- data/lib/dm-migrations/sql/mysql.rb +84 -0
- data/lib/dm-migrations/sql/oracle.rb +9 -0
- data/lib/dm-migrations/sql/postgres.rb +89 -0
- data/lib/dm-migrations/sql/sqlite.rb +59 -0
- data/lib/dm-migrations/sql/sqlserver.rb +9 -0
- data/lib/dm-migrations/sql/table.rb +15 -0
- data/lib/dm-migrations/sql/table_creator.rb +105 -0
- data/lib/dm-migrations/sql/table_modifier.rb +57 -0
- data/lib/dm-migrations/sql.rb +7 -0
- data/lib/dm-migrations/version.rb +5 -0
- data/lib/dm-migrations.rb +3 -0
- data/lib/spec/example/migration_example_group.rb +69 -0
- data/lib/spec/matchers/migration_matchers.rb +96 -0
- data/spec/integration/auto_migration_spec.rb +590 -0
- data/spec/integration/auto_upgrade_spec.rb +41 -0
- data/spec/integration/migration_runner_spec.rb +84 -0
- data/spec/integration/migration_spec.rb +156 -0
- data/spec/integration/sql_spec.rb +290 -0
- data/spec/isolated/require_after_setup_spec.rb +24 -0
- data/spec/isolated/require_before_setup_spec.rb +24 -0
- data/spec/isolated/require_spec.rb +23 -0
- data/spec/spec_helper.rb +16 -0
- data/spec/unit/migration_spec.rb +501 -0
- data/spec/unit/sql/column_spec.rb +14 -0
- data/spec/unit/sql/postgres_spec.rb +90 -0
- data/spec/unit/sql/sqlite_extensions_spec.rb +103 -0
- data/spec/unit/sql/table_creator_spec.rb +91 -0
- data/spec/unit/sql/table_modifier_spec.rb +47 -0
- data/spec/unit/sql/table_spec.rb +26 -0
- data/spec/unit/sql_spec.rb +7 -0
- data/tasks/spec.rake +21 -0
- data/tasks/yard.rake +9 -0
- data/tasks/yardstick.rake +19 -0
- metadata +120 -0
@@ -0,0 +1,20 @@
|
|
1
|
+
require File.expand_path('../lib/dm-migrations/version', __FILE__)
|
2
|
+
|
3
|
+
Gem::Specification.new do |gem|
|
4
|
+
gem.authors = ['Dan Kubb']
|
5
|
+
gem.email = ['dan.kubb@gmail.com']
|
6
|
+
gem.summary = 'DataMapper plugin for writing and spec-ing migrations'
|
7
|
+
gem.description = 'DataMapper plugin for modifying and maintaining database structure, triggers, stored procedures, and data'
|
8
|
+
gem.homepage = 'https://datamapper.org'
|
9
|
+
gem.license = 'Nonstandard'
|
10
|
+
|
11
|
+
gem.files = `git ls-files`.split("\n")
|
12
|
+
gem.extra_rdoc_files = %w(LICENSE README.rdoc)
|
13
|
+
|
14
|
+
gem.name = 'sbf-dm-migrations'
|
15
|
+
gem.require_paths = ['lib']
|
16
|
+
gem.version = DataMapper::Migrations::VERSION
|
17
|
+
gem.required_ruby_version = '>= 2.7.8'
|
18
|
+
|
19
|
+
gem.add_runtime_dependency('sbf-dm-core', '~> 1.3.0.beta')
|
20
|
+
end
|
data/examples/Rakefile
ADDED
@@ -0,0 +1,149 @@
|
|
1
|
+
# Sample tasks using dm-migrations
|
2
|
+
# Roughly following Rails conventions, and mostly based on Padrino's dm:* tasks
|
3
|
+
#
|
4
|
+
# Cf. https://github.com/padrino/padrino-framework/blob/master/padrino-gen/lib/padrino-gen/padrino-tasks/datamapper.rb
|
5
|
+
# https://github.com/firespring/dm-rails/blob/master/lib/dm-rails/railties/database.rake
|
6
|
+
#
|
7
|
+
|
8
|
+
require 'rake'
|
9
|
+
|
10
|
+
# replace this with however your app configures DataMapper repositor(ies)
|
11
|
+
task :environment do
|
12
|
+
require File.expand_path('boot', File.dirname(__FILE__))
|
13
|
+
end
|
14
|
+
|
15
|
+
namespace :db do
|
16
|
+
|
17
|
+
namespace :auto do
|
18
|
+
|
19
|
+
desc 'Perform auto-migration (reset your db data)'
|
20
|
+
task migrate: :environment do |t, _|
|
21
|
+
puts '=> Auto-migrating'
|
22
|
+
DataMapper.auto_migrate!
|
23
|
+
puts "<= #{t.name} done"
|
24
|
+
end
|
25
|
+
|
26
|
+
desc 'Perform non destructive auto-migration'
|
27
|
+
task upgrade: :environment do |t, _|
|
28
|
+
puts '=> Auto-upgrading'
|
29
|
+
DataMapper.auto_upgrade!
|
30
|
+
puts "<= #{t.name} done"
|
31
|
+
end
|
32
|
+
|
33
|
+
end
|
34
|
+
|
35
|
+
desc 'Run all pending migrations, or up to specified migration'
|
36
|
+
task :migrate, [:version] => :load_migrations do |t, args|
|
37
|
+
if (vers = args[:version] || ENV.fetch('VERSION', nil))
|
38
|
+
puts "=> Migrating up to version #{vers}"
|
39
|
+
migrate_up!(vers)
|
40
|
+
else
|
41
|
+
puts '=> Migrating up'
|
42
|
+
migrate_up!
|
43
|
+
end
|
44
|
+
puts "<= #{t.name} done"
|
45
|
+
end
|
46
|
+
|
47
|
+
desc 'Rollback down to specified migration, or rollback last STEP=x migrations (default 1)'
|
48
|
+
task :rollback, [:version] => :load_migrations do |t, args|
|
49
|
+
if (vers = args[:version] || ENV.fetch('VERSION', nil))
|
50
|
+
puts "=> Rolling back down to migration #{vers}"
|
51
|
+
migrate_down!(vers)
|
52
|
+
else
|
53
|
+
step = ENV['STEP'].to_i || 1
|
54
|
+
applied = migrations.delete_if(&:needs_up?).sort # NOTE: this is N queries as currently implemented
|
55
|
+
target = applied[-1 * step] || applied[0]
|
56
|
+
if target
|
57
|
+
puts "=> Rolling back #{step} step(s)"
|
58
|
+
migrate_down!(target.position - 1)
|
59
|
+
else
|
60
|
+
warn "No migrations to rollback: #{step} step(s)"
|
61
|
+
end
|
62
|
+
end
|
63
|
+
puts "<= #{t.name} done"
|
64
|
+
end
|
65
|
+
|
66
|
+
desc 'List migrations descending, showing which have been applied'
|
67
|
+
task migrations: :load_migrations do
|
68
|
+
puts(migrations.sort.reverse.map { |m| "#{m.position} #{m.name} #{m.needs_up? ? '' : 'APPLIED'}" })
|
69
|
+
end
|
70
|
+
|
71
|
+
task load_migrations: :environment do
|
72
|
+
require 'dm-migrations/migration_runner'
|
73
|
+
FileList['db/migrate/*.rb'].each do |migration|
|
74
|
+
load migration
|
75
|
+
end
|
76
|
+
end
|
77
|
+
|
78
|
+
|
79
|
+
desc 'Create the database'
|
80
|
+
task :create, [:repository] => :environment do |t, args|
|
81
|
+
repo = args[:repository] || ENV['REPOSITORY'] || :default
|
82
|
+
config = DataMapper.repository(repo)&.adapter&.options&.symbolize_keys
|
83
|
+
user = config[:user]
|
84
|
+
password = config[:password]
|
85
|
+
host = config[:host]
|
86
|
+
database = config[:database] || config[:path].sub('/', '')
|
87
|
+
charset = config[:charset] || ENV['CHARSET'] || 'utf8'
|
88
|
+
collation = config[:collation] || ENV['COLLATION'] || 'utf8_unicode_ci'
|
89
|
+
puts "=> Creating database '#{database}'"
|
90
|
+
|
91
|
+
case config[:adapter]
|
92
|
+
when 'postgres'
|
93
|
+
system('createdb', '-E', charset, '-h', host, '-U', user, database)
|
94
|
+
when 'mysql'
|
95
|
+
query = [
|
96
|
+
'mysql', "--user=#{user}", (password.blank? ? '' : "--password=#{password}"), (%w(127.0.0.1 localhost).include?(host) ? '-e' : "--host=#{host} -e"),
|
97
|
+
"CREATE DATABASE #{database} DEFAULT CHARACTER SET #{charset} DEFAULT COLLATE #{collation}".inspect
|
98
|
+
]
|
99
|
+
system(query.compact.join(' '))
|
100
|
+
when 'sqlite3'
|
101
|
+
DataMapper.setup(DataMapper.repository&.name, config)
|
102
|
+
else
|
103
|
+
raise "Adapter #{config[:adapter]} not supported for creating databases yet."
|
104
|
+
end
|
105
|
+
puts "<= #{t.name} done"
|
106
|
+
end
|
107
|
+
|
108
|
+
desc 'Drop the database'
|
109
|
+
task :drop, [:repository] => :environment do |t, args|
|
110
|
+
repo = args[:repository] || ENV['REPOSITORY'] || :default
|
111
|
+
config = DataMapper.repository(repo)&.adapter&.options&.symbolize_keys
|
112
|
+
user = config[:user]
|
113
|
+
password = config[:password]
|
114
|
+
host = config[:host]
|
115
|
+
database = config[:database] || config[:path].sub('/', '')
|
116
|
+
puts "=> Dropping database '#{database}'"
|
117
|
+
case config[:adapter]
|
118
|
+
when 'postgres'
|
119
|
+
system('dropdb', '-h', host, '-U', user, database)
|
120
|
+
when 'mysql'
|
121
|
+
query = [
|
122
|
+
'mysql', "--user=#{user}",
|
123
|
+
(password.blank? ? '' : "--password=#{password}"),
|
124
|
+
(%w(127.0.0.1 localhost).include?(host) ? '-e' : "--host=#{host} -e"),
|
125
|
+
"DROP DATABASE IF EXISTS #{database}".inspect
|
126
|
+
]
|
127
|
+
system(query.compact.join(' '))
|
128
|
+
when 'sqlite3'
|
129
|
+
FileUtils.rm_f(config[:path])
|
130
|
+
else
|
131
|
+
raise "Adapter #{config[:adapter]} not supported for dropping databases yet."
|
132
|
+
end
|
133
|
+
puts "<= #{t.name} done"
|
134
|
+
end
|
135
|
+
|
136
|
+
desc 'Load the seed data from db/seeds.rb'
|
137
|
+
task seed: :environment do |t, _|
|
138
|
+
puts '=> Loading seed data'
|
139
|
+
seed_file = File.expand_path('db/seeds.rb', File.dirname(__FILE__))
|
140
|
+
load(seed_file) if File.exist?(seed_file)
|
141
|
+
puts "<= #{t.name} done"
|
142
|
+
end
|
143
|
+
|
144
|
+
desc 'Drop the database, migrate from scratch and initialize with the seed data'
|
145
|
+
task reset: %i(drop setup)
|
146
|
+
|
147
|
+
desc 'Create the database, migrate and initialize with the seed data'
|
148
|
+
task setup: %i(create migrate seed)
|
149
|
+
end
|
@@ -0,0 +1,58 @@
|
|
1
|
+
require 'dm-migrations/migration_runner'
|
2
|
+
|
3
|
+
DataMapper.setup(:default, 'sqlite3::memory')
|
4
|
+
|
5
|
+
DataMapper::Logger.new(STDOUT, :debug)
|
6
|
+
DataMapper.logger.debug( 'Starting Migration' )
|
7
|
+
|
8
|
+
migration 1, :create_people_table do
|
9
|
+
up do
|
10
|
+
create_table :people do
|
11
|
+
column :id, Integer, serial: true
|
12
|
+
column :name, String, size: 50
|
13
|
+
column :age, Integer
|
14
|
+
end
|
15
|
+
end
|
16
|
+
down do
|
17
|
+
drop_table :people
|
18
|
+
end
|
19
|
+
end
|
20
|
+
|
21
|
+
migration 2, :add_dob_to_people do
|
22
|
+
up do
|
23
|
+
modify_table :people do
|
24
|
+
add_column :dob, DateTime, allow_nil: true
|
25
|
+
end
|
26
|
+
end
|
27
|
+
|
28
|
+
down do
|
29
|
+
modify_table :people do
|
30
|
+
drop_column :dob
|
31
|
+
end
|
32
|
+
end
|
33
|
+
end
|
34
|
+
|
35
|
+
# migrate_down!
|
36
|
+
# migrate_up!
|
37
|
+
#
|
38
|
+
# class Person
|
39
|
+
# include DataMapper::Resource
|
40
|
+
#
|
41
|
+
# property :id, Serial
|
42
|
+
# property :name, String, :size => 50
|
43
|
+
# property :age, Integer
|
44
|
+
# property :dob, DateTime, :default => proc { Time.now }
|
45
|
+
#
|
46
|
+
# end
|
47
|
+
#
|
48
|
+
# Person.create(:name => "Mark Bates", :age => 31)
|
49
|
+
# puts Person.first.inspect
|
50
|
+
# puts Person.all.inspect
|
51
|
+
|
52
|
+
if $0 == __FILE__
|
53
|
+
if $*.first == 'down'
|
54
|
+
migrate_down!
|
55
|
+
else
|
56
|
+
migrate_up!
|
57
|
+
end
|
58
|
+
end
|
@@ -0,0 +1,46 @@
|
|
1
|
+
require 'pathname'
|
2
|
+
|
3
|
+
dir = Pathname(__FILE__).dirname.expand_path
|
4
|
+
|
5
|
+
require "#{dir}sample_migration"
|
6
|
+
require "#{dir}../lib/spec/example/migration_example_group"
|
7
|
+
|
8
|
+
describe :create_people_table, type: :migration do
|
9
|
+
before do
|
10
|
+
run_migration
|
11
|
+
end
|
12
|
+
|
13
|
+
it 'creates a people table' do
|
14
|
+
expect(repository(:default)).to have_table(:people)
|
15
|
+
end
|
16
|
+
|
17
|
+
it 'has an id column as the primary key' do
|
18
|
+
expect(table(:people)).to have_column(:id)
|
19
|
+
expect(table(:people).column(:id).type).to eq 'integer'
|
20
|
+
# expect(table(:people).column(:id)).to be_primary_key
|
21
|
+
end
|
22
|
+
|
23
|
+
it 'has a name column as a string' do
|
24
|
+
expect(table(:people)).to have_column(:name)
|
25
|
+
expect(table(:people).column(:name).type).to eq 'character varying'
|
26
|
+
expect(table(:people).column(:name)).to permit_null
|
27
|
+
end
|
28
|
+
|
29
|
+
it 'has a nullable age column as a int' do
|
30
|
+
expect(table(:people)).to have_column(:age)
|
31
|
+
expect(table(:people).column(:age).type).to eq 'integer'
|
32
|
+
expect(table(:people).column(:age)).to permit_null
|
33
|
+
end
|
34
|
+
end
|
35
|
+
|
36
|
+
describe :add_dob_to_people, type: :migration do
|
37
|
+
before do
|
38
|
+
run_migration
|
39
|
+
end
|
40
|
+
|
41
|
+
it 'adds a dob column as a timestamp' do
|
42
|
+
expect(table(:people)).to have_column(:dob)
|
43
|
+
expect(table(:people).column(:dob).type).to eq 'timestamp without time zone'
|
44
|
+
expect(table(:people).column(:dob)).to permit_null
|
45
|
+
end
|
46
|
+
end
|
@@ -0,0 +1,304 @@
|
|
1
|
+
require 'dm-migrations/auto_migration'
|
2
|
+
|
3
|
+
module DataMapper
|
4
|
+
module Migrations
|
5
|
+
module DataObjectsAdapter
|
6
|
+
# Returns whether the storage_name exists.
|
7
|
+
#
|
8
|
+
# @param [String] storage_name
|
9
|
+
# a String defining the name of a storage, for example a table name.
|
10
|
+
#
|
11
|
+
# @return [Boolean]
|
12
|
+
# true if the storage exists
|
13
|
+
#
|
14
|
+
# @api semipublic
|
15
|
+
def storage_exists?(storage_name)
|
16
|
+
statement = DataMapper::Ext::String.compress_lines(<<-SQL)
|
17
|
+
SELECT COUNT(*)
|
18
|
+
FROM "information_schema"."tables"
|
19
|
+
WHERE "table_type" = 'BASE TABLE'
|
20
|
+
AND "table_schema" = ?
|
21
|
+
AND "table_name" = ?
|
22
|
+
SQL
|
23
|
+
|
24
|
+
select(statement, schema_name, storage_name).first > 0
|
25
|
+
end
|
26
|
+
|
27
|
+
# Returns whether the field exists.
|
28
|
+
#
|
29
|
+
# @param [String] storage_name
|
30
|
+
# a String defining the name of a storage, for example a table name.
|
31
|
+
# @param [String] column_name
|
32
|
+
# a String defining the name of a field, for example a column name.
|
33
|
+
#
|
34
|
+
# @return [Boolean]
|
35
|
+
# true if the field exists.
|
36
|
+
#
|
37
|
+
# @api semipublic
|
38
|
+
def field_exists?(storage_name, column_name)
|
39
|
+
statement = DataMapper::Ext::String.compress_lines(<<-SQL)
|
40
|
+
SELECT COUNT(*)
|
41
|
+
FROM "information_schema"."columns"
|
42
|
+
WHERE "table_schema" = ?
|
43
|
+
AND "table_name" = ?
|
44
|
+
AND "column_name" = ?
|
45
|
+
SQL
|
46
|
+
|
47
|
+
select(statement, schema_name, storage_name, column_name).first > 0
|
48
|
+
end
|
49
|
+
|
50
|
+
# @api semipublic
|
51
|
+
def upgrade_model_storage(model)
|
52
|
+
name = self.name
|
53
|
+
properties = model.properties_with_subclasses(name)
|
54
|
+
|
55
|
+
return properties if create_model_storage(model)
|
56
|
+
|
57
|
+
table_name = model.storage_name(name)
|
58
|
+
|
59
|
+
with_connection do |connection|
|
60
|
+
properties.map do |property|
|
61
|
+
schema_hash = property_schema_hash(property)
|
62
|
+
next if field_exists?(table_name, schema_hash[:name])
|
63
|
+
|
64
|
+
statement = alter_table_add_column_statement(connection, table_name, schema_hash)
|
65
|
+
command = connection.create_command(statement)
|
66
|
+
command.execute_non_query
|
67
|
+
|
68
|
+
# For simple :index => true columns, add an appropriate index.
|
69
|
+
# Upgrading doesn't know how to deal with complex indexes yet.
|
70
|
+
if property.options[:index] === true
|
71
|
+
statement = create_index_statement(model, property.name, [property.field])
|
72
|
+
command = connection.create_command(statement)
|
73
|
+
command.execute_non_query
|
74
|
+
end
|
75
|
+
|
76
|
+
property
|
77
|
+
end.compact
|
78
|
+
end
|
79
|
+
end
|
80
|
+
|
81
|
+
# @api semipublic
|
82
|
+
def create_model_storage(model)
|
83
|
+
name = self.name
|
84
|
+
properties = model.properties_with_subclasses(name)
|
85
|
+
|
86
|
+
return false if storage_exists?(model.storage_name(name))
|
87
|
+
return false if properties.empty?
|
88
|
+
|
89
|
+
with_connection do |connection|
|
90
|
+
statements = [create_table_statement(connection, model, properties)]
|
91
|
+
statements.concat(create_index_statements(model))
|
92
|
+
statements.concat(create_unique_index_statements(model))
|
93
|
+
|
94
|
+
statements.each do |statement|
|
95
|
+
command = connection.create_command(statement)
|
96
|
+
command.execute_non_query
|
97
|
+
end
|
98
|
+
end
|
99
|
+
|
100
|
+
true
|
101
|
+
end
|
102
|
+
|
103
|
+
# @api semipublic
|
104
|
+
def destroy_model_storage(model)
|
105
|
+
return true unless supports_drop_table_if_exists? || storage_exists?(model.storage_name(name))
|
106
|
+
|
107
|
+
execute(drop_table_statement(model))
|
108
|
+
true
|
109
|
+
end
|
110
|
+
|
111
|
+
module SQL # :nodoc:
|
112
|
+
# private ## This cannot be private for current migrations
|
113
|
+
|
114
|
+
# Adapters that support AUTO INCREMENT fields for CREATE TABLE
|
115
|
+
# statements should overwrite this to return true
|
116
|
+
#
|
117
|
+
# @api private
|
118
|
+
def supports_serial?
|
119
|
+
false
|
120
|
+
end
|
121
|
+
|
122
|
+
# @api private
|
123
|
+
def supports_drop_table_if_exists?
|
124
|
+
false
|
125
|
+
end
|
126
|
+
|
127
|
+
# @api private
|
128
|
+
def schema_name
|
129
|
+
raise NotImplementedError, "#{self.class}#schema_name not implemented"
|
130
|
+
end
|
131
|
+
|
132
|
+
# @api private
|
133
|
+
def alter_table_add_column_statement(connection, table_name, schema_hash)
|
134
|
+
"ALTER TABLE #{quote_name(table_name)} #{add_column_statement} #{property_schema_statement(connection, schema_hash)}"
|
135
|
+
end
|
136
|
+
|
137
|
+
# @api private
|
138
|
+
def create_table_statement(connection, model, properties)
|
139
|
+
DataMapper::Ext::String.compress_lines(<<-SQL)
|
140
|
+
CREATE TABLE #{quote_name(model.storage_name(name))}
|
141
|
+
(#{properties.map { |property| property_schema_statement(connection, property_schema_hash(property)) }.join(', ')},
|
142
|
+
PRIMARY KEY(#{properties.key.map { |property| quote_name(property.field) }.join(', ')}))
|
143
|
+
SQL
|
144
|
+
end
|
145
|
+
|
146
|
+
# @api private
|
147
|
+
def drop_table_statement(model)
|
148
|
+
table_name = quote_name(model.storage_name(name))
|
149
|
+
if supports_drop_table_if_exists?
|
150
|
+
"DROP TABLE IF EXISTS #{table_name}"
|
151
|
+
else
|
152
|
+
"DROP TABLE #{table_name}"
|
153
|
+
end
|
154
|
+
end
|
155
|
+
|
156
|
+
# @api private
|
157
|
+
def create_index_statements(model)
|
158
|
+
name = self.name
|
159
|
+
model.storage_name(name)
|
160
|
+
|
161
|
+
indexes(model).map do |index_name, fields|
|
162
|
+
create_index_statement(model, index_name, fields)
|
163
|
+
end
|
164
|
+
end
|
165
|
+
|
166
|
+
# @api private
|
167
|
+
def create_index_statement(model, index_name, fields)
|
168
|
+
table_name = model.storage_name(name)
|
169
|
+
|
170
|
+
DataMapper::Ext::String.compress_lines(<<-SQL)
|
171
|
+
CREATE INDEX #{quote_name("index_#{table_name}_#{index_name}")} ON
|
172
|
+
#{quote_name(table_name)} (#{fields.map { |field| quote_name(field) }.join(', ')})
|
173
|
+
SQL
|
174
|
+
end
|
175
|
+
|
176
|
+
# @api private
|
177
|
+
def create_unique_index_statements(model)
|
178
|
+
name = self.name
|
179
|
+
table_name = model.storage_name(name)
|
180
|
+
key = model.key(name).map(&:field)
|
181
|
+
unique_indexes = unique_indexes(model).reject { |_index_name, fields| fields == key }
|
182
|
+
|
183
|
+
unique_indexes.map do |index_name, fields|
|
184
|
+
DataMapper::Ext::String.compress_lines(<<-SQL)
|
185
|
+
CREATE UNIQUE INDEX #{quote_name("unique_#{table_name}_#{index_name}")} ON
|
186
|
+
#{quote_name(table_name)} (#{fields.map { |field| quote_name(field) }.join(', ')})
|
187
|
+
SQL
|
188
|
+
end
|
189
|
+
end
|
190
|
+
|
191
|
+
# @api private
|
192
|
+
def property_schema_hash(property)
|
193
|
+
dump_class = property.dump_class
|
194
|
+
type_by_property_class = self.class.type_by_property_class(property.class)
|
195
|
+
|
196
|
+
schema = (type_by_property_class || self.class.type_map[dump_class]).merge(name: property.field)
|
197
|
+
|
198
|
+
schema_primitive = schema[:primitive]
|
199
|
+
|
200
|
+
if dump_class.equal?(String) && schema_primitive != 'TEXT' && schema_primitive != 'CLOB' && schema_primitive != 'NVARCHAR' &&
|
201
|
+
schema_primitive != 'BYTEA' && schema_primitive != 'VARBINARY'
|
202
|
+
schema[:length] = property.length
|
203
|
+
elsif dump_class.equal?(BigDecimal) || dump_class.equal?(Float)
|
204
|
+
schema[:precision] = property.precision
|
205
|
+
schema[:scale] = property.scale
|
206
|
+
end
|
207
|
+
|
208
|
+
schema[:allow_nil] = property.allow_nil?
|
209
|
+
schema[:serial] = property.serial?
|
210
|
+
|
211
|
+
default = property.default
|
212
|
+
|
213
|
+
if default.nil? || default.respond_to?(:call)
|
214
|
+
# remove the default if the property does not allow nil
|
215
|
+
schema.delete(:default) unless schema[:allow_nil]
|
216
|
+
else
|
217
|
+
schema[:default] = property.dump(default)
|
218
|
+
end
|
219
|
+
|
220
|
+
schema
|
221
|
+
end
|
222
|
+
|
223
|
+
# @api private
|
224
|
+
def property_schema_statement(connection, schema)
|
225
|
+
statement = quote_name(schema[:name])
|
226
|
+
statement << " #{schema[:primitive]}"
|
227
|
+
|
228
|
+
length = schema[:length]
|
229
|
+
|
230
|
+
if schema[:precision] && schema[:scale]
|
231
|
+
statement << "(#{%i(precision scale).map { |key| connection.quote_value(schema[key]) }.join(', ')})"
|
232
|
+
elsif length == 'max'
|
233
|
+
statement << '(max)'
|
234
|
+
elsif length
|
235
|
+
statement << "(#{connection.quote_value(length)})"
|
236
|
+
end
|
237
|
+
|
238
|
+
default = schema[:default]
|
239
|
+
statement << " DEFAULT #{connection.quote_value(default)}" if default
|
240
|
+
|
241
|
+
statement << ' NOT NULL' unless schema[:allow_nil]
|
242
|
+
|
243
|
+
statement
|
244
|
+
end
|
245
|
+
|
246
|
+
# @api private
|
247
|
+
def indexes(model)
|
248
|
+
model.properties(name).indexes
|
249
|
+
end
|
250
|
+
|
251
|
+
# @api private
|
252
|
+
def unique_indexes(model)
|
253
|
+
model.properties(name).unique_indexes
|
254
|
+
end
|
255
|
+
|
256
|
+
# @api private
|
257
|
+
def add_column_statement
|
258
|
+
'ADD COLUMN'
|
259
|
+
end
|
260
|
+
end
|
261
|
+
|
262
|
+
include SQL
|
263
|
+
|
264
|
+
module ClassMethods
|
265
|
+
# Default types for all data object based adapters.
|
266
|
+
#
|
267
|
+
# @return [Hash] default types for data objects adapters.
|
268
|
+
#
|
269
|
+
# @api private
|
270
|
+
def type_map
|
271
|
+
length = Property::String.length
|
272
|
+
precision = Property::Numeric.precision
|
273
|
+
scale = Property::Decimal.scale
|
274
|
+
|
275
|
+
{
|
276
|
+
Property::Binary => {primitive: 'BLOB'},
|
277
|
+
Object => {primitive: 'TEXT'},
|
278
|
+
Integer => {primitive: 'INTEGER'},
|
279
|
+
String => {primitive: 'VARCHAR', length: length},
|
280
|
+
Class => {primitive: 'VARCHAR', length: length},
|
281
|
+
BigDecimal => {primitive: 'DECIMAL', precision: precision, scale: scale},
|
282
|
+
Float => {primitive: 'FLOAT', precision: precision},
|
283
|
+
DateTime => {primitive: 'TIMESTAMP'},
|
284
|
+
Date => {primitive: 'DATE'},
|
285
|
+
Time => {primitive: 'TIMESTAMP'},
|
286
|
+
TrueClass => {primitive: 'BOOLEAN'},
|
287
|
+
Property::Text => {primitive: 'TEXT'}
|
288
|
+
}.freeze
|
289
|
+
end
|
290
|
+
|
291
|
+
# Finds a type for a given property.
|
292
|
+
#
|
293
|
+
# @return [Hash | nil] type matching the given property or one of the properties ancestors
|
294
|
+
#
|
295
|
+
# @api private
|
296
|
+
def type_by_property_class(property_class)
|
297
|
+
return nil unless property_class < DataMapper::Property
|
298
|
+
|
299
|
+
type_map[property_class] || type_by_property_class(property_class.superclass)
|
300
|
+
end
|
301
|
+
end
|
302
|
+
end
|
303
|
+
end
|
304
|
+
end
|