og 0.31.0 → 0.40.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- data/doc/{AUTHORS → CONTRIBUTORS} +26 -10
- data/doc/LICENSE +2 -3
- data/doc/RELEASES +56 -7
- data/doc/tutorial.txt +15 -15
- data/lib/glue/cacheable.rb +2 -5
- data/lib/glue/hierarchical.rb +1 -4
- data/lib/glue/optimistic_locking.rb +0 -2
- data/lib/glue/orderable.rb +79 -75
- data/lib/glue/revisable.rb +19 -24
- data/lib/glue/searchable.rb +0 -2
- data/lib/glue/taggable.rb +31 -29
- data/lib/glue/timestamped.rb +4 -2
- data/lib/og.rb +50 -29
- data/lib/og/adapter.rb +19 -0
- data/lib/og/adapter/mysql.rb +212 -0
- data/lib/og/adapter/mysql/override.rb +34 -0
- data/lib/og/adapter/mysql/script.rb +15 -0
- data/lib/og/adapter/mysql/utils.rb +40 -0
- data/lib/og/adapter/postgresql.rb +231 -0
- data/lib/og/adapter/postgresql/override.rb +117 -0
- data/lib/og/adapter/postgresql/script.rb +15 -0
- data/lib/og/adapter/postgresql/utils.rb +35 -0
- data/lib/og/adapter/sqlite.rb +132 -0
- data/lib/og/adapter/sqlite/override.rb +33 -0
- data/lib/og/adapter/sqlite/script.rb +15 -0
- data/lib/og/collection.rb +35 -7
- data/lib/og/{evolution.rb → dump.rb} +4 -5
- data/lib/og/entity.rb +102 -173
- data/lib/og/entity/clone.rb +119 -0
- data/lib/og/errors.rb +0 -2
- data/lib/og/manager.rb +85 -37
- data/lib/og/relation.rb +52 -34
- data/lib/og/relation/belongs_to.rb +0 -2
- data/lib/og/relation/has_many.rb +27 -4
- data/lib/og/relation/joins_many.rb +41 -14
- data/lib/og/relation/many_to_many.rb +10 -0
- data/lib/og/relation/refers_to.rb +22 -5
- data/lib/og/store.rb +80 -86
- data/lib/og/store/sql.rb +710 -713
- data/lib/og/store/sql/evolution.rb +119 -0
- data/lib/og/store/sql/join.rb +155 -0
- data/lib/og/store/sql/utils.rb +149 -0
- data/lib/og/test/assertions.rb +1 -3
- data/lib/og/test/testcase.rb +0 -2
- data/lib/og/types.rb +2 -5
- data/lib/og/validation.rb +6 -9
- data/test/{og/mixin → glue}/tc_hierarchical.rb +3 -13
- data/test/glue/tc_og_paginate.rb +47 -0
- data/test/{og/mixin → glue}/tc_optimistic_locking.rb +2 -12
- data/test/{og/mixin → glue}/tc_orderable.rb +15 -23
- data/test/glue/tc_orderable2.rb +47 -0
- data/test/glue/tc_revisable.rb +3 -3
- data/test/{og/mixin → glue}/tc_taggable.rb +20 -10
- data/test/{og/mixin → glue}/tc_timestamped.rb +2 -12
- data/test/glue/tc_webfile.rb +36 -0
- data/test/og/CONFIG.rb +8 -11
- data/test/og/multi_validations_model.rb +14 -0
- data/test/og/store/tc_filesys.rb +3 -1
- data/test/og/store/tc_kirby.rb +16 -13
- data/test/og/store/tc_sti.rb +11 -11
- data/test/og/store/tc_sti2.rb +79 -0
- data/test/og/tc_build.rb +41 -0
- data/test/og/tc_cacheable.rb +3 -2
- data/test/og/tc_has_many.rb +96 -0
- data/test/og/tc_inheritance.rb +6 -4
- data/test/og/tc_joins_many.rb +93 -0
- data/test/og/tc_multi_validations.rb +5 -7
- data/test/og/tc_multiple.rb +7 -6
- data/test/og/tc_override.rb +13 -7
- data/test/og/tc_primary_key.rb +30 -0
- data/test/og/tc_relation.rb +8 -14
- data/test/og/tc_reldelete.rb +163 -0
- data/test/og/tc_reverse.rb +17 -14
- data/test/og/tc_scoped.rb +3 -11
- data/test/og/tc_setup.rb +13 -11
- data/test/og/tc_store.rb +21 -28
- data/test/og/tc_validation2.rb +2 -2
- data/test/og/tc_validation_loop.rb +17 -15
- metadata +109 -103
- data/INSTALL +0 -91
- data/ProjectInfo +0 -51
- data/README +0 -177
- data/doc/config.txt +0 -28
- data/examples/README +0 -23
- data/examples/mysql_to_psql.rb +0 -71
- data/examples/run.rb +0 -271
- data/lib/glue/tree.rb +0 -218
- data/lib/og/store/alpha/filesys.rb +0 -110
- data/lib/og/store/alpha/memory.rb +0 -295
- data/lib/og/store/alpha/sqlserver.rb +0 -256
- data/lib/og/store/kirby.rb +0 -490
- data/lib/og/store/mysql.rb +0 -415
- data/lib/og/store/psql.rb +0 -875
- data/lib/og/store/sqlite.rb +0 -348
- data/lib/og/store/sqlite2.rb +0 -241
- data/setup.rb +0 -1585
- data/test/og/tc_sti_find.rb +0 -35
data/lib/og/store/mysql.rb
DELETED
@@ -1,415 +0,0 @@
|
|
1
|
-
begin
|
2
|
-
require 'mysql'
|
3
|
-
rescue Object => ex
|
4
|
-
Logger.error 'Ruby-Mysql bindings are not installed!'
|
5
|
-
Logger.error 'Trying to use the pure-Ruby binding included in Og'
|
6
|
-
begin
|
7
|
-
# Attempt to use the included pure ruby version.
|
8
|
-
require 'og/vendor/mysql'
|
9
|
-
rescue Object => ex
|
10
|
-
Logger.error ex
|
11
|
-
end
|
12
|
-
end
|
13
|
-
|
14
|
-
# Helper for scripts.
|
15
|
-
#
|
16
|
-
# === Example
|
17
|
-
#
|
18
|
-
# mysql "-u root -p", <<-END
|
19
|
-
# drop database if exists weblog_development;
|
20
|
-
# create database weblog_development;
|
21
|
-
# grant all on weblog_development.* to #{`id -un`.strip}@localhost;
|
22
|
-
# END
|
23
|
-
|
24
|
-
def mysql(opts, stream)
|
25
|
-
IO.popen("mysql #{opts}", 'w') { |io| io.puts stream }
|
26
|
-
end
|
27
|
-
|
28
|
-
require 'og/store/sql'
|
29
|
-
|
30
|
-
#--
|
31
|
-
# Customize the standard mysql resultset to make
|
32
|
-
# more compatible with Og.
|
33
|
-
#++
|
34
|
-
|
35
|
-
class Mysql::Result # :nodoc: all
|
36
|
-
def blank?
|
37
|
-
0 == num_rows
|
38
|
-
end
|
39
|
-
|
40
|
-
alias_method :next, :fetch_row
|
41
|
-
|
42
|
-
def each_row
|
43
|
-
each do |row|
|
44
|
-
yield(row, 0)
|
45
|
-
end
|
46
|
-
end
|
47
|
-
|
48
|
-
def first_value
|
49
|
-
val = fetch_row[0]
|
50
|
-
free
|
51
|
-
return val
|
52
|
-
end
|
53
|
-
|
54
|
-
alias_method :close, :free
|
55
|
-
|
56
|
-
def fields
|
57
|
-
fetch_fields.map { |f| f.name }
|
58
|
-
end
|
59
|
-
end
|
60
|
-
|
61
|
-
module Og
|
62
|
-
|
63
|
-
module MysqlUtils
|
64
|
-
include SqlUtils
|
65
|
-
|
66
|
-
def escape(str)
|
67
|
-
return nil unless str
|
68
|
-
return Mysql.quote(str)
|
69
|
-
end
|
70
|
-
end
|
71
|
-
|
72
|
-
# A Store that persists objects into a MySQL database.
|
73
|
-
# To read documentation about the methods, consult
|
74
|
-
# the documentation for SqlStore and Store.
|
75
|
-
#
|
76
|
-
# Here is some useful code to initialize your MySQL
|
77
|
-
# RDBMS for development. You probably want to be
|
78
|
-
# more careful with provileges on your production
|
79
|
-
# environment.
|
80
|
-
#
|
81
|
-
# mysql> GRANT ALL PRIVELEGES
|
82
|
-
# ON keystone.*
|
83
|
-
# TO <$sys_dbuser name>@localhost
|
84
|
-
# IDENTIFIED BY '(password)'
|
85
|
-
# WITH GRANT OPTION;
|
86
|
-
|
87
|
-
class MysqlStore < SqlStore
|
88
|
-
extend MysqlUtils
|
89
|
-
include MysqlUtils
|
90
|
-
|
91
|
-
DefaultPort = 3306
|
92
|
-
|
93
|
-
def self.create(options)
|
94
|
-
options[:port] ||= DefaultPort
|
95
|
-
# gmosx: system is used to avoid shell expansion.
|
96
|
-
system 'mysqladmin', '-f', "--user=#{options[:user]}",
|
97
|
-
"--password=#{options[:password]}",
|
98
|
-
"--host=#{options[:address]}",
|
99
|
-
"--port=#{options[:port]}",
|
100
|
-
'create', options[:name]
|
101
|
-
super
|
102
|
-
end
|
103
|
-
|
104
|
-
def self.destroy(options)
|
105
|
-
options[:port] ||= DefaultPort
|
106
|
-
system 'mysqladmin', '-f', "--user=#{options[:user]}",
|
107
|
-
"--password=#{options[:password]}", 'drop',
|
108
|
-
"--host=#{options[:address]}",
|
109
|
-
"--port=#{options[:port]}",
|
110
|
-
options[:name]
|
111
|
-
super
|
112
|
-
end
|
113
|
-
|
114
|
-
# Initialize the MySQL store.
|
115
|
-
#
|
116
|
-
# === Options
|
117
|
-
#
|
118
|
-
# * :address, the addres where the server is listening.
|
119
|
-
# * :socket, is useful when the pure ruby driver is used.
|
120
|
-
# this is the location of mysql.sock. For Ubuntu/Debian
|
121
|
-
# this is '/var/run/mysqld/mysqld.sock'. You can find
|
122
|
-
# the location for your system in my.cnf
|
123
|
-
|
124
|
-
def initialize(options)
|
125
|
-
super
|
126
|
-
|
127
|
-
@typemap.update(TrueClass => 'tinyint', Time => 'datetime')
|
128
|
-
|
129
|
-
@conn = Mysql.connect(
|
130
|
-
options[:address] || 'localhost',
|
131
|
-
options[:user],
|
132
|
-
options[:password],
|
133
|
-
options[:name],
|
134
|
-
options[:port],
|
135
|
-
options[:socket]
|
136
|
-
)
|
137
|
-
|
138
|
-
# You should set recconect to true to avoid MySQL has
|
139
|
-
# gone away errors.
|
140
|
-
|
141
|
-
if @conn.respond_to? :reconnect
|
142
|
-
options[:reconnect] = true unless options.has_key?(:reconnect)
|
143
|
-
@conn.reconnect = options[:reconnect]
|
144
|
-
end
|
145
|
-
|
146
|
-
rescue => ex
|
147
|
-
if ex.errno == 1049 # database does not exist.
|
148
|
-
Logger.info "Database '#{options[:name]}' not found!"
|
149
|
-
self.class.create(options)
|
150
|
-
retry
|
151
|
-
end
|
152
|
-
raise
|
153
|
-
end
|
154
|
-
|
155
|
-
def close
|
156
|
-
@conn.close
|
157
|
-
super
|
158
|
-
end
|
159
|
-
|
160
|
-
def enchant(klass, manager)
|
161
|
-
if klass.ann.self.primary_key.symbol == :oid
|
162
|
-
unless klass.properties.include? :oid
|
163
|
-
klass.property :oid, Fixnum, :sql => 'integer AUTO_INCREMENT PRIMARY KEY'
|
164
|
-
end
|
165
|
-
end
|
166
|
-
super
|
167
|
-
end
|
168
|
-
|
169
|
-
def query(sql)
|
170
|
-
Logger.debug sql if $DBG
|
171
|
-
@conn.query_with_result = true
|
172
|
-
return @conn.query(sql)
|
173
|
-
rescue => ex
|
174
|
-
handle_sql_exception(ex, sql)
|
175
|
-
end
|
176
|
-
|
177
|
-
def exec(sql)
|
178
|
-
Logger.debug sql if $DBG
|
179
|
-
@conn.query_with_result = false
|
180
|
-
@conn.query(sql)
|
181
|
-
rescue => ex
|
182
|
-
handle_sql_exception(ex, sql)
|
183
|
-
end
|
184
|
-
|
185
|
-
def start
|
186
|
-
# nop
|
187
|
-
# FIXME: InnoDB supports transactions.
|
188
|
-
end
|
189
|
-
|
190
|
-
# Commit a transaction.
|
191
|
-
|
192
|
-
def commit
|
193
|
-
# nop, not supported?
|
194
|
-
# FIXME: InnoDB supports transactions.
|
195
|
-
end
|
196
|
-
|
197
|
-
# Rollback a transaction.
|
198
|
-
|
199
|
-
def rollback
|
200
|
-
# nop, not supported?
|
201
|
-
# FIXME: InnoDB supports transactions.
|
202
|
-
end
|
203
|
-
|
204
|
-
def sql_update(sql)
|
205
|
-
exec(sql)
|
206
|
-
@conn.affected_rows
|
207
|
-
end
|
208
|
-
|
209
|
-
# Deserialize one object from the ResultSet.
|
210
|
-
|
211
|
-
def read_one(res, klass, options = nil)
|
212
|
-
return nil if res.blank?
|
213
|
-
|
214
|
-
if options and join_relations = options[:include]
|
215
|
-
join_relations = [join_relations].flatten.collect do |n|
|
216
|
-
klass.relation(n)
|
217
|
-
end
|
218
|
-
end
|
219
|
-
|
220
|
-
res_row = res.next
|
221
|
-
|
222
|
-
# causes STI classes to come back as the correct child class
|
223
|
-
# if accessed from the superclass.
|
224
|
-
|
225
|
-
klass = Og::Entity::entity_from_string(res_row[0]) if klass.schema_inheritance?
|
226
|
-
obj = klass.og_allocate(res_row, 0)
|
227
|
-
|
228
|
-
if options and options[:select]
|
229
|
-
read_row(obj, res, res_row, 0)
|
230
|
-
else
|
231
|
-
obj.og_read(res_row)
|
232
|
-
read_join_relations(obj, res_row, 0, join_relations) if join_relations
|
233
|
-
end
|
234
|
-
|
235
|
-
return obj
|
236
|
-
|
237
|
-
ensure
|
238
|
-
res.close
|
239
|
-
end
|
240
|
-
|
241
|
-
private
|
242
|
-
|
243
|
-
def create_table(klass)
|
244
|
-
# rp: fixes problems when user doesn't have
|
245
|
-
# write access to db.
|
246
|
-
# THINK, should a method more like this be
|
247
|
-
# used instead of catching database exceptions
|
248
|
-
# for 'table exists'?
|
249
|
-
|
250
|
-
fields = fields_for_class(klass)
|
251
|
-
|
252
|
-
if @conn.list_tables.include?(klass::OGTABLE)
|
253
|
-
actual_fields = conn.list_fields(klass::OGTABLE).fetch_fields.map {|f| f.name }
|
254
|
-
|
255
|
-
# Make new ones always - don't destroy by default because
|
256
|
-
# it might contain data you want back.
|
257
|
-
|
258
|
-
need_fields = fields.each do |needed_field|
|
259
|
-
field_name = needed_field[0..(needed_field.index(' ')-1)]
|
260
|
-
next if actual_fields.include?(field_name)
|
261
|
-
|
262
|
-
if @options[:evolve_schema] == true
|
263
|
-
Logger.debug "Adding field '#{needed_field}' to '#{klass::OGTABLE}'" if $DBG
|
264
|
-
sql = "ALTER TABLE #{klass::OGTABLE} ADD COLUMN #{needed_field}"
|
265
|
-
@conn.query(sql)
|
266
|
-
else
|
267
|
-
Logger.info "WARNING: Table '#{klass::OGTABLE}' is missing field '#{needed_field}' and :evolve_schema is not set to true!"
|
268
|
-
end
|
269
|
-
end
|
270
|
-
|
271
|
-
#Drop old ones
|
272
|
-
needed_fields = fields.map {|f| f =~ /^([^ ]+)/; $1}
|
273
|
-
actual_fields.each do |obsolete_field|
|
274
|
-
next if needed_fields.include?(obsolete_field)
|
275
|
-
if @options[:evolve_schema] == true and @options[:evolve_schema_cautious] == false
|
276
|
-
sql = "ALTER TABLE #{klass::OGTABLE} DROP COLUMN #{obsolete_field}"
|
277
|
-
Logger.debug "Removing obsolete field '#{obsolete_field}' from '#{klass::OGTABLE}'" if $DBG
|
278
|
-
@conn.query(sql)
|
279
|
-
else
|
280
|
-
Logger.info "WARNING: You have an obsolete field '#{obsolete_field}' on table '#{klass::OGTABLE}' and :evolve_schema is not set or is in cautious mode!"
|
281
|
-
end
|
282
|
-
end
|
283
|
-
return
|
284
|
-
end
|
285
|
-
|
286
|
-
sql = "CREATE TABLE #{klass::OGTABLE} (#{fields.join(', ')}"
|
287
|
-
|
288
|
-
# Create table constraints.
|
289
|
-
|
290
|
-
if constraints = klass.ann.self[:sql_constraint]
|
291
|
-
sql << ", #{constraints.join(', ')}"
|
292
|
-
end
|
293
|
-
|
294
|
-
if table_type = @options[:table_type]
|
295
|
-
sql << ") TYPE = #{table_type};"
|
296
|
-
else
|
297
|
-
sql << ");"
|
298
|
-
end
|
299
|
-
|
300
|
-
# Create indices.
|
301
|
-
|
302
|
-
if indices = klass.ann.self[:index]
|
303
|
-
for data in indices
|
304
|
-
idx, options = *data
|
305
|
-
idx = idx.to_s
|
306
|
-
pre_sql, post_sql = options[:pre], options[:post]
|
307
|
-
idxname = idx.gsub(/ /, "").gsub(/,/, "_").gsub(/\(.*\)/, "")
|
308
|
-
sql << " CREATE #{pre_sql} INDEX #{klass::OGTABLE}_#{idxname}_idx #{post_sql} ON #{klass::OGTABLE} (#{idx});"
|
309
|
-
end
|
310
|
-
end
|
311
|
-
|
312
|
-
@conn.query_with_result = false
|
313
|
-
|
314
|
-
begin
|
315
|
-
@conn.query(sql)
|
316
|
-
Logger.info "Created table '#{klass::OGTABLE}'."
|
317
|
-
rescue => ex
|
318
|
-
if ex.errno == 1050 # table already exists.
|
319
|
-
Logger.debug 'Table already exists' if $DBG
|
320
|
-
return
|
321
|
-
else
|
322
|
-
raise
|
323
|
-
end
|
324
|
-
end
|
325
|
-
|
326
|
-
# Create join tables if needed. Join tables are used in
|
327
|
-
# 'many_to_many' relations.
|
328
|
-
|
329
|
-
if join_tables = klass.ann.self[:join_tables]
|
330
|
-
for info in join_tables
|
331
|
-
begin
|
332
|
-
create_join_table_sql(info).each do |sql|
|
333
|
-
@conn.query sql
|
334
|
-
end
|
335
|
-
Logger.debug "Created jointable '#{info[:table]}'." if $DBG
|
336
|
-
rescue => ex
|
337
|
-
if ex.respond_to?(:errno) and ex.errno == 1050 # table already exists.
|
338
|
-
Logger.debug 'Join table already exists' if $DBG
|
339
|
-
else
|
340
|
-
raise
|
341
|
-
end
|
342
|
-
end
|
343
|
-
end
|
344
|
-
end
|
345
|
-
end
|
346
|
-
|
347
|
-
def create_field_map(klass)
|
348
|
-
conn.query_with_result = true
|
349
|
-
res = @conn.query "SELECT * FROM #{klass::OGTABLE} LIMIT 1"
|
350
|
-
map = {}
|
351
|
-
|
352
|
-
# Check if the field should be ignored.
|
353
|
-
ignore = klass.ann[:self][:ignore_field] || klass.ann[:self][:ignore_fields] || klass.ann[:self][:ignore_columns]
|
354
|
-
|
355
|
-
res.num_fields.times do |i|
|
356
|
-
field_name = res.fetch_field.name.to_sym
|
357
|
-
|
358
|
-
unless (ignore and ignore.include?(field_name))
|
359
|
-
map[field_name] = i
|
360
|
-
end
|
361
|
-
end
|
362
|
-
|
363
|
-
return map
|
364
|
-
ensure
|
365
|
-
res.close if res
|
366
|
-
end
|
367
|
-
|
368
|
-
def write_prop(p)
|
369
|
-
if p.klass.ancestors.include?(Integer)
|
370
|
-
return "#\{@#{p} || 'NULL'\}"
|
371
|
-
elsif p.klass.ancestors.include?(Float)
|
372
|
-
return "#\{@#{p} || 'NULL'\}"
|
373
|
-
elsif p.klass.ancestors.include?(String)
|
374
|
-
return %|#\{@#{p} ? "'#\{#{self.class}.escape(@#{p})\}'" : 'NULL'\}|
|
375
|
-
elsif p.klass.ancestors.include?(Time)
|
376
|
-
return %|#\{@#{p} ? "'#\{#{self.class}.timestamp(@#{p})\}'" : 'NULL'\}|
|
377
|
-
elsif p.klass.ancestors.include?(Date)
|
378
|
-
return %|#\{@#{p} ? "'#\{#{self.class}.date(@#{p})\}'" : 'NULL'\}|
|
379
|
-
elsif p.klass.ancestors.include?(TrueClass)
|
380
|
-
return "#\{@#{p} ? \"'1'\" : 'NULL' \}"
|
381
|
-
elsif p.klass.ancestors.include?(Og::Blob)
|
382
|
-
return %|#\{@#{p} ? "'#\{#{self.class}.escape(#{self.class}.blob(@#{p}))\}'" : 'NULL'\}|
|
383
|
-
else
|
384
|
-
# gmosx: keep the '' for nil symbols.
|
385
|
-
return %|#\{@#{p} ? "'#\{#{self.class}.escape(@#{p}.to_yaml)\}'" : "''"\}|
|
386
|
-
end
|
387
|
-
end
|
388
|
-
|
389
|
-
def eval_og_insert(klass)
|
390
|
-
props = klass.properties.values
|
391
|
-
values = props.collect { |p| write_prop(p) }.join(',')
|
392
|
-
|
393
|
-
if klass.schema_inheritance?
|
394
|
-
props << Property.new(:symbol => :ogtype, :klass => String)
|
395
|
-
values << ", '#{klass}'"
|
396
|
-
end
|
397
|
-
|
398
|
-
sql = "INSERT INTO #{klass::OGTABLE} (#{props.collect {|p| field_for_property(p)}.join(',')}) VALUES (#{values})"
|
399
|
-
|
400
|
-
klass.class_eval %{
|
401
|
-
def og_insert(store)
|
402
|
-
#{::Aspects.gen_advice_code(:og_insert, klass.advices, :pre) if klass.respond_to?(:advices)}
|
403
|
-
store.conn.query_with_result = false
|
404
|
-
store.conn.query "#{sql}"
|
405
|
-
@#{klass.pk_symbol} = store.conn.insert_id
|
406
|
-
#{::Aspects.gen_advice_code(:og_insert, klass.advices, :post) if klass.respond_to?(:advices)}
|
407
|
-
end
|
408
|
-
}
|
409
|
-
end
|
410
|
-
|
411
|
-
end
|
412
|
-
|
413
|
-
end
|
414
|
-
|
415
|
-
# * George Moschovitis <gm@navel.gr>
|
data/lib/og/store/psql.rb
DELETED
@@ -1,875 +0,0 @@
|
|
1
|
-
begin
|
2
|
-
require 'postgres'
|
3
|
-
rescue Object => ex
|
4
|
-
Logger.error 'Ruby-PostgreSQL bindings are not installed!'
|
5
|
-
Logger.error ex
|
6
|
-
end
|
7
|
-
|
8
|
-
#--
|
9
|
-
# Customize to make more compatible with Og.
|
10
|
-
#++
|
11
|
-
|
12
|
-
class PGconn # :nodoc: all
|
13
|
-
# Lists all the tables within the database.
|
14
|
-
|
15
|
-
def list_tables
|
16
|
-
begin
|
17
|
-
r = self.exec "SELECT c.relname FROM pg_catalog.pg_class c LEFT JOIN pg_catalog.pg_namespace n ON n.oid = c.relnamespace WHERE c.relkind='r' AND n.nspname NOT IN ('pg_catalog', 'pg_toast') AND pg_catalog.pg_table_is_visible(c.oid)"
|
18
|
-
rescue Exception
|
19
|
-
# Racing
|
20
|
-
return []
|
21
|
-
end
|
22
|
-
ret = r.result.flatten
|
23
|
-
r.clear
|
24
|
-
ret
|
25
|
-
end
|
26
|
-
|
27
|
-
# Returns true if a table exists within the database, false
|
28
|
-
# otherwise.
|
29
|
-
|
30
|
-
def table_exists?(table) #rp: this should be abstracted to the sql abstractor
|
31
|
-
begin
|
32
|
-
r = self.exec "SELECT c.relname FROM pg_catalog.pg_class c LEFT JOIN pg_catalog.pg_namespace n ON n.oid = c.relnamespace WHERE c.relkind='r' AND n.nspname NOT IN ('pg_catalog', 'pg_toast') AND pg_catalog.pg_table_is_visible(c.oid) AND c.relname='#{self.class.escape(table.to_s)}'"
|
33
|
-
rescue Exception
|
34
|
-
return false # Racing...
|
35
|
-
end
|
36
|
-
ret = r.result.size != 0
|
37
|
-
r.clear
|
38
|
-
ret
|
39
|
-
end
|
40
|
-
|
41
|
-
# Returns the PostgreSQL OID of a table within the database or
|
42
|
-
# nil if it doesn't exist. Mostly for internal usage.
|
43
|
-
|
44
|
-
def table_oid(table)
|
45
|
-
begin
|
46
|
-
r = self.exec "SELECT c.oid FROM pg_catalog.pg_class c LEFT JOIN pg_catalog.pg_namespace n ON n.oid = c.relnamespace WHERE c.relkind='r' AND n.nspname NOT IN ('pg_catalog', 'pg_toast') AND pg_catalog.pg_table_is_visible(c.oid) AND c.relname='#{self.class.escape(table.to_s)}'"
|
47
|
-
rescue Exception
|
48
|
-
return nil # Racing...
|
49
|
-
end
|
50
|
-
ret = r.result.flatten.first
|
51
|
-
r.clear
|
52
|
-
ret
|
53
|
-
end
|
54
|
-
|
55
|
-
# Returns an array of arrays containing the list of fields within a
|
56
|
-
# table. Each element contains two elements, the first is the field
|
57
|
-
# name and the second is the field type. Returns nil if the table
|
58
|
-
# does not exist.
|
59
|
-
|
60
|
-
def table_field_list(table)
|
61
|
-
return nil unless pg_oid = table_oid(table)
|
62
|
-
r = self.exec "SELECT a.attname, pg_catalog.format_type(a.atttypid, a.atttypmod) FROM pg_catalog.pg_attribute a WHERE a.attrelid = '#{pg_oid}' AND a.attnum > 0 AND NOT a.attisdropped ORDER BY a.attnum"
|
63
|
-
ret = r.result
|
64
|
-
r.clear
|
65
|
-
ret
|
66
|
-
end
|
67
|
-
|
68
|
-
# Returns a hash containing the foreign key constrains within a table.
|
69
|
-
# The keys are constraint names and the values are the constraint
|
70
|
-
# definitions.
|
71
|
-
|
72
|
-
def table_foreign_keys(table)
|
73
|
-
return nil unless pg_oid = table_oid(table)
|
74
|
-
r = self.exec "SELECT conname, pg_catalog.pg_get_constraintdef(oid, true) as condef FROM pg_catalog.pg_constraint r WHERE r.conrelid = '#{pg_oid}' AND r.contype = 'f'"
|
75
|
-
res = r.result
|
76
|
-
ret = Hash.new
|
77
|
-
res.each do |double|
|
78
|
-
ret[double.first] = double.last
|
79
|
-
end
|
80
|
-
r.clear
|
81
|
-
ret
|
82
|
-
end
|
83
|
-
|
84
|
-
# Returns a hash keyed by table (as a string) with each value also
|
85
|
-
# being a hash keyed by the constraint name (as a string) and the
|
86
|
-
# value being a string that contains the constraint definition.
|
87
|
-
|
88
|
-
def all_foreign_keys
|
89
|
-
loop_counter = 0
|
90
|
-
loop_max = 5
|
91
|
-
begin
|
92
|
-
r = self.exec "SELECT c.relname,r.conname, pg_catalog.pg_get_constraintdef(r.oid, true) as condef FROM pg_catalog.pg_class c LEFT JOIN pg_catalog.pg_namespace n ON n.oid = c.relnamespace JOIN pg_catalog.pg_constraint r ON r.conrelid = c.oid WHERE c.relkind='r' AND r.contype ='f' AND n.nspname NOT IN ('pg_catalog', 'pg_toast') AND pg_catalog.pg_table_is_visible(c.oid);"
|
93
|
-
rescue RuntimeError => ex
|
94
|
-
raise unless ex.message =~ /cache lookup failed for relation (\d+)/ # Racing
|
95
|
-
# damaged_relation = $1
|
96
|
-
# Logger.error "Got damage to #{damaged_relation}"
|
97
|
-
loop_counter += 1
|
98
|
-
if loop_counter > loop_max
|
99
|
-
Logger.error "PostgreSQL had more than #{loop_max} cache errors, your database is almost certainly corrupt as pg_class does not match the PostgreSQL cache. Either use pg_dump to save the data, re-create the database, let og rebuild the schema and use pg_restore to restore the data, or repair it by hand"
|
100
|
-
exit
|
101
|
-
end
|
102
|
-
Logger.error "There is a problem with PostgreSQL's internal cache, retrying... (#{loop_counter} of #{loop_max})"
|
103
|
-
# you have a horrible setup anyhow, and it allows your
|
104
|
-
# horrible setup to work (deleting tables so fast
|
105
|
-
# in parallel PostgreSQL's internal lookups fail)
|
106
|
-
sleep 2
|
107
|
-
retry
|
108
|
-
end
|
109
|
-
res = r.result
|
110
|
-
ret = Hash.new
|
111
|
-
res.each do |tripple|
|
112
|
-
ret[tripple.first] ||= Hash.new
|
113
|
-
ret[tripple[0]][tripple[1]] = tripple[2]
|
114
|
-
end
|
115
|
-
r.clear
|
116
|
-
ret
|
117
|
-
end
|
118
|
-
|
119
|
-
end
|
120
|
-
|
121
|
-
require 'og/store/sql'
|
122
|
-
|
123
|
-
#--
|
124
|
-
# Customize the standard postgres resultset to make
|
125
|
-
# more compatible with Og.
|
126
|
-
#++
|
127
|
-
|
128
|
-
class PGresult # :nodoc: all
|
129
|
-
def blank?
|
130
|
-
0 == num_tuples
|
131
|
-
end
|
132
|
-
|
133
|
-
def next
|
134
|
-
self
|
135
|
-
end
|
136
|
-
|
137
|
-
def each_row
|
138
|
-
for row in (0...num_tuples)
|
139
|
-
yield(self, row)
|
140
|
-
end
|
141
|
-
end
|
142
|
-
|
143
|
-
def first_value
|
144
|
-
val = getvalue(0, 0)
|
145
|
-
clear
|
146
|
-
return val
|
147
|
-
end
|
148
|
-
|
149
|
-
alias_method :close, :clear
|
150
|
-
end
|
151
|
-
|
152
|
-
module Og
|
153
|
-
|
154
|
-
module PsqlUtils
|
155
|
-
include SqlUtils
|
156
|
-
|
157
|
-
def escape(str)
|
158
|
-
return nil unless str
|
159
|
-
return PGconn.escape(str.to_s)
|
160
|
-
end
|
161
|
-
|
162
|
-
# TODO, mneumann:
|
163
|
-
#
|
164
|
-
# Blobs are actually a lot faster (and uses up less storage) for large data I
|
165
|
-
# think, as they need not to be encoded and decoded. I'd like to have both ;-)
|
166
|
-
# BYTEA is easier to handle than BLOBs, but if you implement BLOBs in a way
|
167
|
-
# that they are transparent to the user (as I did in Ruby/DBI), I'd prefer that
|
168
|
-
# way.
|
169
|
-
|
170
|
-
def blob(val)
|
171
|
-
val.gsub(/[\000-\037\047\134\177-\377]/) do |b|
|
172
|
-
"\\#{ b[0].to_s(8).rjust(3, '0') }"
|
173
|
-
end
|
174
|
-
end
|
175
|
-
|
176
|
-
def parse_blob(val)
|
177
|
-
return '' unless val
|
178
|
-
|
179
|
-
val.gsub(/\\(\\|'|[0-3][0-7][0-7])/) do |s|
|
180
|
-
if s.size == 2 then s[1,1] else s[1,3].oct.chr end
|
181
|
-
end
|
182
|
-
end
|
183
|
-
|
184
|
-
end
|
185
|
-
|
186
|
-
# A Store that persists objects into a PostgreSQL database.
|
187
|
-
# To read documentation about the methods, consult the documentation
|
188
|
-
# for SqlStore and Store.
|
189
|
-
#
|
190
|
-
# This is the reference Og store.
|
191
|
-
#
|
192
|
-
# === Design
|
193
|
-
#
|
194
|
-
# The getvalue interface is used instead of each for extra
|
195
|
-
# performance.
|
196
|
-
|
197
|
-
class PsqlStore < SqlStore
|
198
|
-
extend PsqlUtils
|
199
|
-
include PsqlUtils
|
200
|
-
|
201
|
-
def self.create(options)
|
202
|
-
# gmosx: system is used to avoid shell expansion.
|
203
|
-
system 'createdb', options[:name], '-U', options[:user]
|
204
|
-
super
|
205
|
-
end
|
206
|
-
|
207
|
-
def self.destroy(options)
|
208
|
-
system 'dropdb', options[:name], '-U', options[:user]
|
209
|
-
super
|
210
|
-
end
|
211
|
-
|
212
|
-
# Purges all tables from the database.
|
213
|
-
|
214
|
-
def self.destroy_tables(options)
|
215
|
-
|
216
|
-
conn = PGconn.connect(
|
217
|
-
options[:address],
|
218
|
-
options[:port], nil, nil,
|
219
|
-
options[:name],
|
220
|
-
options[:user].to_s,
|
221
|
-
options[:password].to_s
|
222
|
-
)
|
223
|
-
|
224
|
-
conn.list_tables.each do |table|
|
225
|
-
begin
|
226
|
-
conn.exec "DROP TABLE #{table} CASCADE"
|
227
|
-
Logger.debug "Dropped database table #{table}" if $DBG
|
228
|
-
rescue RuntimeError => ex
|
229
|
-
catch :ok do # Racing
|
230
|
-
throw :ok if ex.message =~ /tuple concurrently updated/
|
231
|
-
throw :ok if ex.message =~ /does not exist/
|
232
|
-
throw :ok if ex.message =~ /cache lookup failed/
|
233
|
-
raise
|
234
|
-
end
|
235
|
-
end
|
236
|
-
end
|
237
|
-
|
238
|
-
conn.close
|
239
|
-
end
|
240
|
-
|
241
|
-
|
242
|
-
def initialize(options)
|
243
|
-
super
|
244
|
-
|
245
|
-
@typemap.update(Og::Blob => 'bytea')
|
246
|
-
|
247
|
-
@conn = PGconn.connect(
|
248
|
-
options[:address],
|
249
|
-
options[:port], nil, nil,
|
250
|
-
options[:name],
|
251
|
-
options[:user].to_s,
|
252
|
-
options[:password].to_s
|
253
|
-
)
|
254
|
-
schema_order = options[:schema_order]
|
255
|
-
encoding = options[:encoding]
|
256
|
-
min_messages = options[:min_messages]
|
257
|
-
|
258
|
-
@conn.exec("SET search_path TO #{schema_order}") if schema_order
|
259
|
-
@conn.exec("SET client_encoding TO '#{encoding}'") if encoding
|
260
|
-
@conn.exec("SET client_min_messages TO '#{min_messages}'") if min_messages
|
261
|
-
rescue => ex
|
262
|
-
# gmosx: any idea how to better test this?
|
263
|
-
if ex.to_s =~ /database .* does not exist/i
|
264
|
-
Logger.info "Database '#{options[:name]}' not found!"
|
265
|
-
self.class.create(options)
|
266
|
-
retry
|
267
|
-
end
|
268
|
-
raise
|
269
|
-
end
|
270
|
-
|
271
|
-
def close
|
272
|
-
@conn.close
|
273
|
-
super
|
274
|
-
end
|
275
|
-
|
276
|
-
def enchant(klass, manager)
|
277
|
-
if klass.schema_inheritance_child?
|
278
|
-
klass.const_set 'OGSEQ', "#{table(klass.schema_inheritance_root_class)}_oid_seq"
|
279
|
-
else
|
280
|
-
klass.const_set 'OGSEQ', "#{table(klass)}_oid_seq"
|
281
|
-
end
|
282
|
-
|
283
|
-
if klass.ann.self.primary_key.symbol == :oid
|
284
|
-
unless klass.properties.include? :oid
|
285
|
-
klass.property :oid, Fixnum, :sql => 'serial PRIMARY KEY'
|
286
|
-
end
|
287
|
-
end
|
288
|
-
super
|
289
|
-
end
|
290
|
-
|
291
|
-
def query(sql)
|
292
|
-
Logger.debug sql if $DBG
|
293
|
-
return @conn.exec(sql)
|
294
|
-
rescue => ex
|
295
|
-
handle_sql_exception(ex, sql)
|
296
|
-
end
|
297
|
-
|
298
|
-
def exec(sql)
|
299
|
-
Logger.debug sql if $DBG
|
300
|
-
@conn.exec(sql).clear
|
301
|
-
rescue => ex
|
302
|
-
handle_sql_exception(ex, sql)
|
303
|
-
end
|
304
|
-
|
305
|
-
def sql_update(sql)
|
306
|
-
Logger.debug sql if $DBG
|
307
|
-
res = @conn.exec(sql)
|
308
|
-
changed = res.cmdtuples
|
309
|
-
res.clear
|
310
|
-
changed
|
311
|
-
end
|
312
|
-
|
313
|
-
# Start a new transaction.
|
314
|
-
|
315
|
-
def start
|
316
|
-
# neumann: works with earlier PSQL databases too.
|
317
|
-
exec('BEGIN TRANSACTION') if @transaction_nesting < 1
|
318
|
-
@transaction_nesting += 1
|
319
|
-
end
|
320
|
-
|
321
|
-
# Returns the Og::Manager that owns this store.
|
322
|
-
|
323
|
-
def manager
|
324
|
-
manager = nil
|
325
|
-
ok = false
|
326
|
-
ObjectSpace.each_object(Og::Manager) do |manager|
|
327
|
-
if manager.store.__id__ == self.__id__
|
328
|
-
ok = true
|
329
|
-
break
|
330
|
-
end
|
331
|
-
end
|
332
|
-
raise RuntimeError, "#{self.class} could not find it's manager" unless ok
|
333
|
-
manager
|
334
|
-
end
|
335
|
-
|
336
|
-
# Returns an array containing the constraints needed for this relation.
|
337
|
-
# The array contains hashes with the format:
|
338
|
-
#
|
339
|
-
# :table => The name of the table to which the constraint should be
|
340
|
-
# applied.
|
341
|
-
# :referenced_table => The name of the table which the foreign key
|
342
|
-
# refers to.
|
343
|
-
# :fk => The name of the field to turn into a foreign key.
|
344
|
-
# :pk => The primary key of the referenced table.
|
345
|
-
# :update => The action that should be taken if the primary key
|
346
|
-
# of a referenced row is changed.
|
347
|
-
# :delete => The action that should be taken if a referenced
|
348
|
-
# row is deleted.
|
349
|
-
# :name => The name of the constraint to apply.
|
350
|
-
|
351
|
-
def constraint_info(rel)
|
352
|
-
if rel.join_table
|
353
|
-
info = join_table_info(rel)
|
354
|
-
constraints = [ { :fk => info[:first_key], :referenced_table => info[:first_table], :table => rel.join_table, :pk => ( rel.owner_class.primary_key.field || rel.owner_class.primary_key.symbol ), :update => 'CASCADE', :delete => 'CASCADE'},
|
355
|
-
{ :fk => info[:second_key], :referenced_table => info[:second_table], :table => rel.join_table, :pk => ( rel.target_class.primary_key.field || rel.target_class.primary_key.symbol ), :update => 'CASCADE', :delete => 'CASCADE' } ]
|
356
|
-
elsif rel.class == Og::HasMany
|
357
|
-
constraints = [ { :fk => rel.foreign_key, :table => rel.target_class::OGTABLE, :referenced_table => rel.owner_class::OGTABLE, :pk => ( rel.owner_class.primary_key.field || rel.owner_class.primary_key.symbol ), :update => 'SET NULL', :delete => 'SET NULL' } ]
|
358
|
-
else
|
359
|
-
constraints = [ { :fk => rel.foreign_key, :table => rel.owner_class::OGTABLE, :referenced_table => rel.target_class::OGTABLE, :pk => ( rel.target_class.primary_key.field || rel.target_class.primary_key.symbol ), :update => 'SET NULL', :delete => 'SET NULL' } ]
|
360
|
-
end
|
361
|
-
|
362
|
-
constraints.each do |constraint|
|
363
|
-
constraint[:name] = constraint_name(constraint)
|
364
|
-
end
|
365
|
-
|
366
|
-
# This checks for not-yet-enchanted entities, is there a better way?
|
367
|
-
constraints.reject{|info| [info[:table], info[:referenced_table]].include?(:OGTABLE) }
|
368
|
-
end
|
369
|
-
|
370
|
-
|
371
|
-
# Returns a hash keyed by table (as a string) with each value also
|
372
|
-
# being a hash keyed by the constraint name (as a string) and the
|
373
|
-
# value being a string that contains the constraint definition.
|
374
|
-
#
|
375
|
-
# This format matches the actual constrains returned by the
|
376
|
-
# all_foreign_keys method added to the PGConn class.
|
377
|
-
|
378
|
-
def all_needed_constraints
|
379
|
-
relations = manager.managed_classes.map{|klass| klass.relations}.flatten.uniq
|
380
|
-
need_constraints = Hash.new
|
381
|
-
relations.each do |relation|
|
382
|
-
infos = constraint_info(relation)
|
383
|
-
infos.each do |info|
|
384
|
-
# Skip constraints we already know we need
|
385
|
-
next if need_constraints[info[:table]] and need_constraints[info[:table]].has_key? info[:name]
|
386
|
-
need_constraints[info[:table]] ||= Hash.new
|
387
|
-
need_constraints[info[:table]][info[:name]] = constraint_definition(info)
|
388
|
-
end
|
389
|
-
end
|
390
|
-
need_constraints
|
391
|
-
end
|
392
|
-
|
393
|
-
# Returns an SQL fragment containing the correct definition for a foreign key constraint.
|
394
|
-
|
395
|
-
def constraint_definition(info)
|
396
|
-
"FOREIGN KEY (#{info[:fk]}) REFERENCES #{info[:referenced_table]}(#{info[:pk]}) ON UPDATE #{info[:update]} ON DELETE #{info[:delete]}"
|
397
|
-
end
|
398
|
-
|
399
|
-
# Works the same as all_needed_constraints but only acts on one class and
|
400
|
-
# returns the same hash as part of yet another hash with two keys, tables
|
401
|
-
# and constraints. This is done to prevent having to resolve the
|
402
|
-
# relations again later just to map tables.
|
403
|
-
|
404
|
-
def needed_constraints(klass)
|
405
|
-
need_constraints = Hash.new
|
406
|
-
tables = Array.new
|
407
|
-
(klass.relations + klass.resolve_remote_relations).each do |rel|
|
408
|
-
constraint_info(rel).each do |info|
|
409
|
-
tables.concat [info[:table], info[:referenced_table]]
|
410
|
-
need_constraints[info[:table]] ||= Hash.new
|
411
|
-
need_constraints[info[:table]][info[:name]] = constraint_definition(info)
|
412
|
-
end
|
413
|
-
end
|
414
|
-
{ :tables => tables.uniq, :constraints => need_constraints }
|
415
|
-
end
|
416
|
-
|
417
|
-
# Returns the appropriate constraint prefix for a foreign key constraint.
|
418
|
-
|
419
|
-
def constraint_prefix
|
420
|
-
"#{Og.table_prefix}c"
|
421
|
-
end
|
422
|
-
|
423
|
-
# Returns the appropriate name for a constraint element generated by
|
424
|
-
# the constraint_info method.
|
425
|
-
|
426
|
-
def constraint_name(hash)
|
427
|
-
"#{constraint_prefix}_#{hash[:table]}_#{hash[:fk]}"
|
428
|
-
end
|
429
|
-
|
430
|
-
def needed_constraints_sql(klass = nil)
|
431
|
-
if klass
|
432
|
-
constraints = needed_constraints(klass)
|
433
|
-
all_needed = constraints[:constraints]
|
434
|
-
all_existing = Hash.new
|
435
|
-
constraints[:tables].each do |table|
|
436
|
-
all_existing[table] = @conn.table_foreign_keys(table)
|
437
|
-
end
|
438
|
-
else
|
439
|
-
all_existing = @conn.all_foreign_keys
|
440
|
-
all_needed = all_needed_constraints
|
441
|
-
end
|
442
|
-
|
443
|
-
drop_constraints = Array.new
|
444
|
-
create_constraints = Array.new
|
445
|
-
|
446
|
-
all_needed.each_pair do |table,constraints|
|
447
|
-
constraints.each_pair do |name,definition|
|
448
|
-
|
449
|
-
# If neither of these are matched, the constraint already exists
|
450
|
-
# and has the correct definition.
|
451
|
-
|
452
|
-
if all_existing[table].nil? or all_existing[table][name].nil?
|
453
|
-
|
454
|
-
# Does not exist in database
|
455
|
-
|
456
|
-
create_constraints << "ALTER TABLE #{table} ADD CONSTRAINT #{name} #{definition}"
|
457
|
-
elsif all_existing[table][name] != definition
|
458
|
-
|
459
|
-
# Exists in database and matches the object structure but has the
|
460
|
-
# wrong definition (unlikely to happen very often).
|
461
|
-
|
462
|
-
Logger.debug "PostgreSQL database contains a constraint on table '#{table}' named '#{name}' which is incorrectly defined and will be redefined (OLD: '#{all_existing[table][name]}', NEW: '#{definition}')" if $DBG
|
463
|
-
drop_constraints << "ALTER TABLE #{table} DROP CONSTRAINT #{name}"
|
464
|
-
create_constraints << "ALTER TABLE #{table} ADD CONSTRAINT #{name} #{definition}"
|
465
|
-
end
|
466
|
-
end
|
467
|
-
end
|
468
|
-
|
469
|
-
# You can't do this when managing classes seperately without spidering
|
470
|
-
# each other class managed by this stores manager as other classes
|
471
|
-
# can want relations within the same tables too. I will add spidering
|
472
|
-
# support at some point but this isn't very important since these
|
473
|
-
# complicated and convoluted routines will now rarely happen thank
|
474
|
-
# to the setup hooking.
|
475
|
-
|
476
|
-
unless klass
|
477
|
-
all_existing.each_pair do |table,constraints|
|
478
|
-
constraints.each_key do |name|
|
479
|
-
if all_needed[table].nil? or all_needed[table][name].nil?
|
480
|
-
|
481
|
-
# Exists in database but doesn't match object model at all
|
482
|
-
raise Exception if table.to_s.downcase == "table"
|
483
|
-
Logger.debug "PostgreSQL database contains a constraint on table '#{table}' named '#{name}' which does not match the object model and will be deleted" if $DBG
|
484
|
-
drop_constraints << "ALTER TABLE #{table} DROP CONSTRAINT #{name}"
|
485
|
-
end
|
486
|
-
end
|
487
|
-
end
|
488
|
-
end
|
489
|
-
|
490
|
-
{
|
491
|
-
:drop => drop_constraints,
|
492
|
-
:create => create_constraints
|
493
|
-
}
|
494
|
-
|
495
|
-
end
|
496
|
-
|
497
|
-
# Takes a hash with constraints to drop and create and performs
|
498
|
-
# the work.
|
499
|
-
|
500
|
-
def create_constraints(param = nil)
|
501
|
-
subsection_only = !!param
|
502
|
-
sql_hash = param ? param : needed_constraints_sql
|
503
|
-
Logger.debug "PostgreSQL processing foreign key constraints" unless subsection_only if $DBG
|
504
|
-
started = Time.now
|
505
|
-
deleted = 0
|
506
|
-
nulled_relations = 0
|
507
|
-
deleted_relations = 0
|
508
|
-
created = 0
|
509
|
-
|
510
|
-
sql_hash[:drop].each do |sql|
|
511
|
-
begin
|
512
|
-
@conn.exec(sql)
|
513
|
-
rescue RuntimeError => ex
|
514
|
-
raise unless ex.message =~ /does not exist/
|
515
|
-
end
|
516
|
-
deleted += 1
|
517
|
-
end
|
518
|
-
|
519
|
-
nullified_relations = 0
|
520
|
-
sql_hash[:create].each do |sql|
|
521
|
-
con_retry = true
|
522
|
-
begin
|
523
|
-
@conn.exec(sql)
|
524
|
-
created += 1
|
525
|
-
rescue PGError,RuntimeError => ex
|
526
|
-
next if ex.message =~ /already exists/ # Racing
|
527
|
-
unless ex.message =~ /.*violates foreign key constraint.*/
|
528
|
-
Logger.error "PostgreSQL connection returned an error for query #{sql}"
|
529
|
-
raise
|
530
|
-
end
|
531
|
-
if @options[:evolve_schema] == true and @options[:evolve_schema_cautious] == false
|
532
|
-
table, name, fk, referenced_table, pk = sql.match(/^ALTER TABLE (\S+) ADD CONSTRAINT (\S+) FOREIGN KEY \((\S+)\) REFERENCES ([^ (]+)[ (]+([^)]+)/).captures
|
533
|
-
raise if [table,fk,pk,referenced_table].include? nil
|
534
|
-
|
535
|
-
cleaner_sql = "UPDATE #{table} SET #{fk} = NULL WHERE #{fk} NOT IN (SELECT #{pk} FROM #{referenced_table})"
|
536
|
-
begin
|
537
|
-
@conn.exec(cleaner_sql)
|
538
|
-
if cleaner_sql[0..5] == "UPDATE"
|
539
|
-
nulled_relations += 1
|
540
|
-
else
|
541
|
-
deleted_relations += 1
|
542
|
-
end
|
543
|
-
|
544
|
-
rescue PGError,RuntimeError => ex
|
545
|
-
if ex.message =~ /.*violates not-null constraint.*/
|
546
|
-
cleaner_sql = "DELETE FROM #{table} WHERE #{fk} NOT IN (SELECT #{pk} FROM #{referenced_table})"
|
547
|
-
retry
|
548
|
-
end
|
549
|
-
Logger.error "PostgreSQL connection returned an error for query '#{cleaner_sql}' which was attempting to tidy up ready for the query '#{sql}'"
|
550
|
-
raise
|
551
|
-
end
|
552
|
-
|
553
|
-
Logger.error "There were relationships in table #{table} that did not exist so they have been set to NULL (or deleted if this was not possible, i.e. for a join table)."
|
554
|
-
if con_retry
|
555
|
-
con_retry = false
|
556
|
-
retry
|
557
|
-
end
|
558
|
-
else
|
559
|
-
Logger.error "There are relationships in table #{table} that do not exist. Your database is corrupt. Please fix these or enable evolve_schema not in cautious mode and they will be fixed automatically."
|
560
|
-
end
|
561
|
-
end
|
562
|
-
end
|
563
|
-
finished = Time.now
|
564
|
-
taken = Kernel.sprintf("%.2f", finished - started)
|
565
|
-
broken_relations = nulled_relations + deleted_relations
|
566
|
-
text = "PostgreSQL finished setting constraints. "
|
567
|
-
need_comma = false
|
568
|
-
if [0,0,0] == [deleted,created,broken_relations]
|
569
|
-
return if subsection_only # Make less chatty for short calls
|
570
|
-
text << "No action was taken, "
|
571
|
-
else
|
572
|
-
text << "#{created} constraints were added, " if created != 0
|
573
|
-
text << "#{deleted} constraints were deleted, " if deleted != 0
|
574
|
-
if broken_relations != 0
|
575
|
-
text.gsub!(/,([^,]+)$/,' and \1')
|
576
|
-
text << "#{broken_relations} relations were broken causing "
|
577
|
-
if nullified_relations != 0
|
578
|
-
text << "#{nullified_relations} relations to have non-existant foreign keys set to null"
|
579
|
-
text << (deleted_relations == 0 ? ", " : " and ")
|
580
|
-
end
|
581
|
-
text << "#{nullified_relations} relations to have rows with non-existant foreign keys deleted, " if deleted_relations != 0
|
582
|
-
end
|
583
|
-
end
|
584
|
-
text = text[0..-3].gsub(/,([^,]+)$/,' and \1')
|
585
|
-
text << " in #{taken} seconds."
|
586
|
-
Logger.debug text if $DBG
|
587
|
-
end
|
588
|
-
|
589
|
-
# Called by Og.manager (in turn called by Og.setup) when Og.setup
|
590
|
-
# has finished, allowing better processing of foreign key
|
591
|
-
# constraints and possibly other enhancements.
|
592
|
-
|
593
|
-
def post_setup
|
594
|
-
create_constraints
|
595
|
-
end
|
596
|
-
|
597
|
-
# Deserialize one object from the ResultSet.
|
598
|
-
|
599
|
-
def read_one(res, klass, options = nil)
|
600
|
-
return nil if res.blank?
|
601
|
-
|
602
|
-
if options and join_relations = options[:include]
|
603
|
-
join_relations = [join_relations].flatten.collect do |n|
|
604
|
-
klass.relation(n)
|
605
|
-
end
|
606
|
-
end
|
607
|
-
|
608
|
-
res_row = res.next
|
609
|
-
|
610
|
-
# causes STI classes to come back as the correct child class
|
611
|
-
# if accessed from the superclass.
|
612
|
-
|
613
|
-
klass = Og::Entity::entity_from_string(res_row.result.flatten[res_row.fieldnum('ogtype')]) if klass.schema_inheritance?
|
614
|
-
obj = klass.og_allocate(res_row, 0)
|
615
|
-
|
616
|
-
if options and options[:select]
|
617
|
-
read_row(obj, res, res_row, 0)
|
618
|
-
else
|
619
|
-
obj.og_read(res_row)
|
620
|
-
read_join_relations(obj, res_row, 0, join_relations) if join_relations
|
621
|
-
end
|
622
|
-
|
623
|
-
return obj
|
624
|
-
|
625
|
-
ensure
|
626
|
-
res.close
|
627
|
-
end
|
628
|
-
|
629
|
-
private
|
630
|
-
|
631
|
-
def create_table(klass)
|
632
|
-
fields = fields_for_class(klass)
|
633
|
-
|
634
|
-
unless @conn.table_exists? klass::OGTABLE
|
635
|
-
|
636
|
-
sql = "CREATE TABLE #{klass::OGTABLE} (#{fields.join(', ')}"
|
637
|
-
|
638
|
-
# Create table constraints.
|
639
|
-
|
640
|
-
if constraints = klass.ann.self[:sql_constraint]
|
641
|
-
sql << ", #{constraints.join(', ')}"
|
642
|
-
end
|
643
|
-
|
644
|
-
sql << ") WITHOUT OIDS;"
|
645
|
-
|
646
|
-
# Create indices.
|
647
|
-
|
648
|
-
if indices = klass.ann.self[:index]
|
649
|
-
for data in indices
|
650
|
-
idx, options = *data
|
651
|
-
idx = idx.to_s
|
652
|
-
pre_sql, post_sql = options[:pre], options[:post]
|
653
|
-
idxname = idx.gsub(/ /, "").gsub(/,/, "_").gsub(/\(.*\)/, "")
|
654
|
-
sql << " CREATE #{pre_sql} INDEX #{klass::OGTABLE}_#{idxname}_idx #{post_sql} ON #{klass::OGTABLE} (#{idx});"
|
655
|
-
end
|
656
|
-
end
|
657
|
-
begin
|
658
|
-
res = @conn.exec(sql)
|
659
|
-
res.clear
|
660
|
-
Logger.info "Created table '#{klass::OGTABLE}'."
|
661
|
-
rescue RuntimeError => ex
|
662
|
-
catch :ok do # Racing
|
663
|
-
throw :ok if ex.message =~ /duplicate key violates unique constraint "pg_class_relname_nsp_index"/
|
664
|
-
throw :ok if ex.message =~ /already exists/
|
665
|
-
raise
|
666
|
-
end
|
667
|
-
end
|
668
|
-
else
|
669
|
-
Logger.debug "Table #{klass::OGTABLE} already exists" if $DBG
|
670
|
-
#rp: basic field interrogation
|
671
|
-
# TODO: Add type checking.
|
672
|
-
|
673
|
-
actual_fields = @conn.table_field_list(klass::OGTABLE).map {|pair| pair.first}
|
674
|
-
|
675
|
-
#Make new ones always - don't destroy by default because it might contain data you want back.
|
676
|
-
need_fields = fields.each do |needed_field|
|
677
|
-
field_name = needed_field[0..(needed_field.index(' ')-1)]
|
678
|
-
next if actual_fields.include?(field_name)
|
679
|
-
|
680
|
-
if @options[:evolve_schema] == true
|
681
|
-
Logger.debug "Adding field '#{needed_field}' to '#{klass::OGTABLE}'" if $DBG
|
682
|
-
sql = "ALTER TABLE #{klass::OGTABLE} ADD COLUMN #{needed_field}"
|
683
|
-
begin
|
684
|
-
@conn.exec(sql)
|
685
|
-
rescue RuntimeError => ex
|
686
|
-
raise unless ex.message =~ /already exists/
|
687
|
-
end
|
688
|
-
else
|
689
|
-
Logger.info "WARNING: Table '#{klass::OGTABLE}' is missing field '#{needed_field}' and :evolve_schema is not set to true!"
|
690
|
-
end
|
691
|
-
end
|
692
|
-
|
693
|
-
#Drop old ones
|
694
|
-
needed_fields = fields.map {|f| f =~ /^([^ ]+)/; $1}
|
695
|
-
actual_fields.each do |obsolete_field|
|
696
|
-
next if needed_fields.include?(obsolete_field)
|
697
|
-
if @options[:evolve_schema] == true and @options[:evolve_schema_cautious] == false
|
698
|
-
sql = "ALTER TABLE #{klass::OGTABLE} DROP COLUMN #{obsolete_field}"
|
699
|
-
begin
|
700
|
-
@conn.exec(sql)
|
701
|
-
rescue RuntimeError => ex
|
702
|
-
raise unless ex.message =~ /does not exist/
|
703
|
-
Logger.debug "Removed obsolete field '#{obsolete_field}' from '#{klass::OGTABLE}'" if $DBG
|
704
|
-
end
|
705
|
-
else
|
706
|
-
Logger.info "WARNING: You have an obsolete field '#{obsolete_field}' on table '#{klass::OGTABLE}' and :evolve_schema is not set or is in cautious mode!"
|
707
|
-
end
|
708
|
-
end
|
709
|
-
end
|
710
|
-
|
711
|
-
# Create join tables if needed. Join tables are used in
|
712
|
-
# 'many_to_many' relations.
|
713
|
-
|
714
|
-
# For some reason this is missing a self join case
|
715
|
-
# and therefore can't be used.
|
716
|
-
# if join_tables = klass.ann.self[:join_tables]
|
717
|
-
# for info in join_tables
|
718
|
-
# unless @conn.table_exists? info[:table]
|
719
|
-
# join_tables = Array.new
|
720
|
-
# join_tables = klass.relations.reject{|rel| !rel.join_table}.map{|rel| join_table_info(rel)}
|
721
|
-
if join_tables = klass.ann.self[:join_tables]
|
722
|
-
for info in join_tables
|
723
|
-
unless @conn.table_exists? info[:table]
|
724
|
-
create_join_table_sql(info).each do |sql|
|
725
|
-
begin
|
726
|
-
res = @conn.exec(sql)
|
727
|
-
res.clear
|
728
|
-
rescue RuntimeError => ex
|
729
|
-
raise unless ex.message =~ /duplicate key violates unique constraint "pg_class_relname_nsp_index"/
|
730
|
-
# Racing
|
731
|
-
end
|
732
|
-
end
|
733
|
-
Logger.debug "Created jointable '#{info[:table]}'." if $DBG
|
734
|
-
else
|
735
|
-
Logger.debug "Join table '#{info[:table]}' already exists." if $DBG
|
736
|
-
end
|
737
|
-
end
|
738
|
-
end
|
739
|
-
|
740
|
-
# If we are being called by Og.setup, we can use a much cleaner method
|
741
|
-
# for constructing foreign key constraints.
|
742
|
-
return if @options[:called_by_og_setup]
|
743
|
-
|
744
|
-
# Strip out old constraints... this shouldn't always be necessary but
|
745
|
-
# must be here for now while glycerin is still bleeding-edge to fix
|
746
|
-
# changes and a nasty error that made it into the glycerin developers
|
747
|
-
# darcs repo (but NOT into any released version of Nitro)
|
748
|
-
|
749
|
-
unless @options[:leave_constraints] == true or @stripped_constraints
|
750
|
-
Logger.debug "Stripping PostgreSQL foreign key constraints" if $DBG
|
751
|
-
all_foreign_keys.map{|k| k[1].map{|v| [k[0],v[0]] }[0]}.each do |table,constraint|
|
752
|
-
prefix = constraint_prefix
|
753
|
-
next unless constraint[0-prefix.size..-1] == constraint_prefix
|
754
|
-
begin
|
755
|
-
m.store.conn.exec "ALTER TABLE #{table} DROP CONSTRAINT #{constraint}"
|
756
|
-
rescue Exception
|
757
|
-
end
|
758
|
-
end
|
759
|
-
end
|
760
|
-
|
761
|
-
# Create sql constraints
|
762
|
-
create_constraints(needed_constraints_sql(klass))
|
763
|
-
|
764
|
-
end
|
765
|
-
|
766
|
-
def drop_table(klass)
|
767
|
-
# foreign key constraints will remove the need to do manual cleanup on
|
768
|
-
# related rows.
|
769
|
-
exec "DROP TABLE #{klass.table} CASCADE"
|
770
|
-
end
|
771
|
-
|
772
|
-
def create_field_map(klass)
|
773
|
-
begin
|
774
|
-
res = @conn.exec "SELECT * FROM #{klass::OGTABLE} LIMIT 1"
|
775
|
-
rescue RuntimeError => ex
|
776
|
-
raise unless ex.message =~ /does not exist/ or ex.message =~ /deleted while still in use/
|
777
|
-
# Racing
|
778
|
-
create_table(klass)
|
779
|
-
retry
|
780
|
-
end
|
781
|
-
map = {}
|
782
|
-
|
783
|
-
# Check if the field should be ignored.
|
784
|
-
ignore = klass.ann[:self][:ignore_field] || klass.ann[:self][:ignore_fields] || klass.ann[:self][:ignore_columns]
|
785
|
-
|
786
|
-
for field in res.fields
|
787
|
-
field_name = field.to_sym
|
788
|
-
|
789
|
-
unless (ignore and ignore.include?(field_name))
|
790
|
-
map[field_name] = res.fieldnum(field)
|
791
|
-
end
|
792
|
-
end
|
793
|
-
|
794
|
-
return map
|
795
|
-
ensure
|
796
|
-
res.clear if res
|
797
|
-
end
|
798
|
-
|
799
|
-
def read_prop(p, col)
|
800
|
-
if p.klass.ancestors.include?(Integer)
|
801
|
-
return "#{self.class}.parse_int(res.getvalue(row, #{col} + offset))"
|
802
|
-
elsif p.klass.ancestors.include?(Float)
|
803
|
-
return "#{self.class}.parse_float(res.getvalue(row, #{col} + offset))"
|
804
|
-
elsif p.klass.ancestors.include?(String)
|
805
|
-
return "res.getvalue(row, #{col} + offset)"
|
806
|
-
elsif p.klass.ancestors.include?(Time)
|
807
|
-
return "#{self.class}.parse_timestamp(res.getvalue(row, #{col} + offset))"
|
808
|
-
elsif p.klass.ancestors.include?(Date)
|
809
|
-
return "#{self.class}.parse_date(res.getvalue(row, #{col} + offset))"
|
810
|
-
elsif p.klass.ancestors.include?(TrueClass)
|
811
|
-
return %|('t' == res.getvalue(row, #{col} + offset))|
|
812
|
-
elsif p.klass.ancestors.include?(Og::Blob)
|
813
|
-
return "#{self.class}.parse_blob(res.getvalue(row, #{col} + offset))"
|
814
|
-
else
|
815
|
-
return "YAML.load(res.getvalue(row, #{col} + offset))"
|
816
|
-
end
|
817
|
-
end
|
818
|
-
|
819
|
-
#--
|
820
|
-
# TODO: create stored procedure.
|
821
|
-
#++
|
822
|
-
|
823
|
-
def eval_og_insert(klass)
|
824
|
-
props = klass.properties.values.dup
|
825
|
-
values = props.collect { |p| write_prop(p) }.join(',')
|
826
|
-
|
827
|
-
if klass.schema_inheritance?
|
828
|
-
props << Property.new(:symbol => :ogtype, :klass => String)
|
829
|
-
values << ", '#{klass}'"
|
830
|
-
end
|
831
|
-
|
832
|
-
sql = "INSERT INTO #{klass::OGTABLE} (#{props.collect {|p| field_for_property(p)}.join(',')}) VALUES (#{values})"
|
833
|
-
|
834
|
-
klass.class_eval %{
|
835
|
-
def og_insert(store)
|
836
|
-
#{::Aspects.gen_advice_code(:og_insert, klass.advices, :pre) if klass.respond_to?(:advices)}
|
837
|
-
res = store.conn.exec "SELECT nextval('#{klass::OGSEQ}')"
|
838
|
-
@#{klass.pk_symbol} = res.getvalue(0, 0).to_i
|
839
|
-
res.clear
|
840
|
-
store.conn.exec("#{sql}").clear
|
841
|
-
#{::Aspects.gen_advice_code(:og_insert, klass.advices, :post) if klass.respond_to?(:advices)}
|
842
|
-
end
|
843
|
-
}
|
844
|
-
end
|
845
|
-
|
846
|
-
def eval_og_allocate(klass)
|
847
|
-
if klass.schema_inheritance?
|
848
|
-
klass.module_eval %{
|
849
|
-
def self.og_allocate(res, row = 0)
|
850
|
-
Object.constant(res.getvalue(row, 0)).allocate
|
851
|
-
end
|
852
|
-
}
|
853
|
-
else
|
854
|
-
klass.module_eval %{
|
855
|
-
def self.og_allocate(res, row = 0)
|
856
|
-
self.allocate
|
857
|
-
end
|
858
|
-
}
|
859
|
-
end
|
860
|
-
end
|
861
|
-
|
862
|
-
def read_row(obj, res, res_row, row)
|
863
|
-
res.fields.each_with_index do |field, idx|
|
864
|
-
obj.instance_variable_set "@#{field}", res.getvalue(row, idx)
|
865
|
-
end
|
866
|
-
end
|
867
|
-
|
868
|
-
end
|
869
|
-
|
870
|
-
end
|
871
|
-
|
872
|
-
# * George Moschovitis <gm@navel.gr>
|
873
|
-
# * Rob Pitt <rob@motionpath.com>
|
874
|
-
# * Michael Neumann <mneumann@ntecs.de>
|
875
|
-
# * Ysabel <deb@ysabel.org>
|