og 0.9.5 → 0.10.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -1,386 +0,0 @@
1
- # * George Moschovitis <gm@navel.gr>
2
- # (c) 2004-2005 Navel, all rights reserved.
3
- # $Id: psql.rb 248 2005-01-31 13:38:34Z gmosx $
4
-
5
- require 'postgres'
6
-
7
- require 'og/backend'
8
-
9
- class Og
10
-
11
- # Implements a PostgreSQL powered backend.
12
- # This backend is compatible with Michael Neumann's postgres-pr
13
- # pure ruby driver.
14
-
15
- class PsqlBackend < Backend
16
-
17
- # A mapping between Ruby and SQL types.
18
-
19
- TYPEMAP = {
20
- Integer => 'integer',
21
- Fixnum => 'integer',
22
- Float => 'float',
23
- String => 'text',
24
- Time => 'timestamp',
25
- Date => 'date',
26
- TrueClass => 'boolean',
27
- Object => 'text',
28
- Array => 'text',
29
- Hash => 'text'
30
- }
31
-
32
- # Intitialize the connection to the RDBMS.
33
-
34
- def initialize(config)
35
- begin
36
- @conn = PGconn.connect(nil, nil, nil, nil, config[:database],
37
- config[:user], config[:password])
38
- rescue => ex
39
- # gmosx: any idea how to better test this?
40
- if ex.to_s =~ /database .* does not exist/i
41
- Logger.info "Database '#{config[:database]}' not found!"
42
- PsqlBackend.create_db(config[:database], config[:user])
43
- retry
44
- end
45
- raise
46
- end
47
- end
48
-
49
- # - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
50
- # Utilities
51
- # - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
52
-
53
- # Escape an SQL string
54
-
55
- def self.escape(str)
56
- return nil unless str
57
- return PGconn.escape(str)
58
- end
59
-
60
- # Convert a ruby time to an sql timestamp.
61
- # TODO: Optimize this
62
-
63
- def self.timestamp(time = Time.now)
64
- return nil unless time
65
- return time.strftime("%Y-%m-%d %H:%M:%S")
66
- end
67
-
68
- # Output YYY-mm-dd
69
- # TODO: Optimize this
70
-
71
- def self.date(date)
72
- return nil unless date
73
- return "#{date.year}-#{date.month}-#{date.mday}"
74
- end
75
-
76
- # Parse sql datetime
77
- # TODO: Optimize this
78
-
79
- def self.parse_timestamp(str)
80
- return Time.parse(str)
81
- end
82
-
83
- # Input YYYY-mm-dd
84
- # TODO: Optimize this
85
-
86
- def self.parse_date(str)
87
- return nil unless str
88
- return Date.strptime(str)
89
- end
90
-
91
- # Return an sql string evaluator for the property.
92
- # No need to optimize this, used only to precalculate code.
93
- # YAML is used to store general Ruby objects to be more
94
- # portable.
95
- #
96
- # FIXME: add extra handling for float.
97
-
98
- def self.write_prop(p)
99
- if p.klass.ancestors.include?(Integer)
100
- return "#\{@#{p.symbol} || 'NULL'\}"
101
- elsif p.klass.ancestors.include?(Float)
102
- return "#\{@#{p.symbol} || 'NULL'\}"
103
- elsif p.klass.ancestors.include?(String)
104
- return "'#\{PsqlBackend.escape(@#{p.symbol})\}'"
105
- elsif p.klass.ancestors.include?(Time)
106
- return %|#\{@#{p.symbol} ? "'#\{PsqlBackend.timestamp(@#{p.symbol})\}'" : 'NULL'\}|
107
- elsif p.klass.ancestors.include?(Date)
108
- return %|#\{@#{p.symbol} ? "'#\{PsqlBackend.date(@#{p.symbol})\}'" : 'NULL'\}|
109
- elsif p.klass.ancestors.include?(TrueClass)
110
- return "#\{@#{p.symbol} ? \"'t'\" : 'NULL' \}"
111
- else
112
- return %|#\{@#{p.symbol} ? "'#\{PsqlBackend.escape(@#{p.symbol}.to_yaml)\}'" : "''"\}|
113
- end
114
- end
115
-
116
- # Return an evaluator for reading the property.
117
- # No need to optimize this, used only to precalculate code.
118
-
119
- def self.read_prop(p, idx)
120
- if p.klass.ancestors.include?(Integer)
121
- return "res.getvalue(tuple, #{idx}).to_i()"
122
- elsif p.klass.ancestors.include?(Float)
123
- return "res.getvalue(tuple, #{idx}).to_f()"
124
- elsif p.klass.ancestors.include?(String)
125
- return "res.getvalue(tuple, #{idx})"
126
- elsif p.klass.ancestors.include?(Time)
127
- return "PsqlBackend.parse_timestamp(res.getvalue(tuple, #{idx}))"
128
- elsif p.klass.ancestors.include?(Date)
129
- return "PsqlBackend.parse_date(res.getvalue(tuple, #{idx}))"
130
- elsif p.klass.ancestors.include?(TrueClass)
131
- return %|('t' == res.getvalue(tuple, #{idx}))|
132
- else
133
- return "YAML::load(res.getvalue(tuple, #{idx}))"
134
- end
135
- end
136
-
137
- # Returns the code that actually inserts the object into the
138
- # database. Returns the code as String.
139
-
140
- def self.insert_code(klass, sql, pre_cb, post_cb)
141
- %{
142
- #{pre_cb}
143
- res = conn.db.query("SELECT nextval('#{klass::DBSEQ}')")
144
- @oid = res.getvalue(0, 0).to_i
145
- conn.exec "#{sql}"
146
- #{post_cb}
147
- }
148
- end
149
-
150
- # generate the mapping of the database fields to the
151
- # object properties.
152
-
153
- def self.calc_field_index(klass, og)
154
- res = og.query "SELECT * FROM #{klass::DBTABLE} LIMIT 1"
155
- meta = og.managed_classes[klass]
156
-
157
- for field in res.fields
158
- meta.field_index[field] = res.fieldnum(field)
159
- end
160
- end
161
-
162
- # Generate the property for oid
163
-
164
- def self.eval_og_oid(klass)
165
- klass.class_eval %{
166
- prop_accessor :oid, Fixnum, :sql => "integer PRIMARY KEY"
167
- }
168
- end
169
-
170
- # - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
171
- # Connection methods.
172
- # - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
173
-
174
- # Create the database.
175
-
176
- def self.create_db(database, user = nil, password = nil)
177
- Logger.info "Creating database '#{database}'."
178
- `createdb #{database} -U #{user}`
179
- end
180
-
181
- # Drop the database.
182
-
183
- def self.drop_db(database, user = nil, password = nil)
184
- Logger.info "Dropping database '#{database}'."
185
- `dropdb #{database} -U #{user}`
186
- end
187
-
188
- # Execute an SQL query and return the result.
189
-
190
- def query(sql)
191
- Logger.debug sql if $DBG
192
- return @conn.exec(sql)
193
- end
194
-
195
- # Execute an SQL query, no result returned.
196
-
197
- def exec(sql)
198
- Logger.debug sql if $DBG
199
- res = @conn.exec(sql)
200
- res.clear()
201
- end
202
-
203
- # Execute an SQL query and return the result. Wrapped in a rescue
204
- # block.
205
-
206
- def safe_query(sql)
207
- Logger.debug sql if $DBG
208
- begin
209
- return @conn.exec(sql)
210
- rescue => ex
211
- Logger.error "DB error #{ex}, [#{sql}]"
212
- Logger.error ex.backtrace
213
- return nil
214
- end
215
- end
216
-
217
- # Execute an SQL query, no result returned. Wrapped in a rescue
218
- # block.
219
-
220
- def safe_exec(sql)
221
- Logger.debug sql if $DBG
222
- begin
223
- res = @conn.exec(sql)
224
- res.clear()
225
- rescue => ex
226
- Logger.error "DB error #{ex}, [#{sql}]"
227
- Logger.error ex.backtrace
228
- end
229
- end
230
-
231
- # Check if it is a valid resultset.
232
-
233
- def valid?(res)
234
- return !(res.nil? or 0 == res.num_tuples)
235
- end
236
-
237
- # Create the managed object table. The properties of the
238
- # object are mapped to the table columns. Additional sql relations
239
- # and constrains are created (indicices, sequences, etc).
240
-
241
- def create_table(klass)
242
- fields = create_fields(klass, TYPEMAP)
243
-
244
- sql = "CREATE TABLE #{klass::DBTABLE} (#{fields.join(', ')}"
245
-
246
- # Create table constrains
247
-
248
- if klass.__meta and constrains = klass.__meta[:sql_constrain]
249
- sql << ", #{constrains.join(', ')}"
250
- end
251
-
252
- sql << ") WITHOUT OIDS;"
253
-
254
- # Create indices
255
-
256
- if klass.__meta and indices = klass.__meta[:sql_index]
257
- for data in indices
258
- idx, options = *data
259
- idx = idx.to_s
260
- pre_sql, post_sql = options[:pre], options[:post]
261
- idxname = idx.gsub(/ /, "").gsub(/,/, "_").gsub(/\(.*\)/, "")
262
- sql << " CREATE #{pre_sql} INDEX #{klass::DBTABLE}_#{idxname}_idx #{post_sql} ON #{klass::DBTABLE} (#{idx});"
263
- end
264
- end
265
-
266
- begin
267
- exec(sql)
268
- Logger.info "Created table '#{klass::DBTABLE}'."
269
- rescue => ex
270
- # gmosx: any idea how to better test this?
271
- if ex.to_s =~ /relation .* already exists/i
272
- Logger.debug "Table already exists" if $DBG
273
- else
274
- raise
275
- end
276
- end
277
-
278
- # create the sequence for this table. Even if the table
279
- # uses the oids_seq, attempt to create it. This makes
280
- # the system more fault tolerant.
281
-
282
- begin
283
- exec "CREATE SEQUENCE #{klass::DBSEQ}"
284
- Logger.info "Created sequence '#{klass::DBSEQ}'."
285
- rescue => ex
286
- # gmosx: any idea how to better test this?
287
- if ex.to_s =~ /relation .* already exists/i
288
- Logger.debug "Sequence already exists" if $DBG
289
- else
290
- raise
291
- end
292
- end
293
-
294
- # Create join tables if needed. Join tables are used in
295
- # 'many_to_many' relations.
296
-
297
- if klass.__meta and joins = klass.__meta[:sql_join]
298
- for data in joins
299
- # the class to join to and some options.
300
- join_class, options = *data
301
-
302
- # gmosx: dont use DBTABLE here, perhaps the join class
303
- # is not managed yet.
304
- join_table = "#{self.class.join_table(klass, join_class)}"
305
- join_src = "#{self.class.encode(klass)}_oid"
306
- join_dst = "#{self.class.encode(join_class)}_oid"
307
- begin
308
- exec "CREATE TABLE #{join_table} ( key1 integer NOT NULL, key2 integer NOT NULL )"
309
- exec "CREATE INDEX #{join_table}_key1_idx ON #{join_table} (key1)"
310
- exec "CREATE INDEX #{join_table}_key2_idx ON #{join_table} (key2)"
311
- rescue => ex
312
- # gmosx: any idea how to better test this?
313
- if ex.to_s =~ /relation .* already exists/i
314
- Logger.debug "Join table already exists" if $DBG
315
- else
316
- raise
317
- end
318
- end
319
- end
320
- end
321
-
322
- begin
323
- exec(sql)
324
- Logger.info "Created join table '#{join_table}'."
325
- rescue => ex
326
- # gmosx: any idea how to better test this?
327
- if ex.to_s =~ /relation .* already exists/i
328
- Logger.debug "Join table already exists" if $DBG
329
- else
330
- raise
331
- end
332
- end
333
-
334
- end
335
-
336
- # Drop the managed object table.
337
-
338
- def drop_table(klass)
339
- super
340
- exec "DROP SEQUENCE #{klass::DBSEQ}"
341
- end
342
-
343
- # Deserialize one row of the resultset.
344
-
345
- def deserialize_one(res, klass)
346
- return nil unless valid?(res)
347
-
348
- # gmosx: Managed objects should have no params constructor.
349
- entity = klass.new()
350
- entity.og_deserialize(res, 0)
351
-
352
- # get_join_fields(res, 0, entity, join_fields) if join_fields
353
-
354
- res.clear()
355
- return entity
356
- end
357
-
358
- # Deserialize all rows of the resultset.
359
-
360
- def deserialize_all(res, klass)
361
- return [] unless valid?(res)
362
-
363
- entities = []
364
-
365
- for tuple in (0...res.num_tuples)
366
- entity = klass.new()
367
- entity.og_deserialize(res, tuple)
368
-
369
- # get_join_fields(res, tuple, entity, join_fields) if join_fields
370
-
371
- entities << entity
372
- end
373
-
374
- res.clear()
375
- return entities
376
- end
377
-
378
- # Return a single integer value from the resultset.
379
-
380
- def get_int(res, idx = 0)
381
- return res.getvalue(0, idx).to_i
382
- end
383
-
384
- end
385
-
386
- end
@@ -1,383 +0,0 @@
1
- # * George Moschovitis <gm@navel.gr>
2
- # (c) 2004-2005 Navel, all rights reserved.
3
- # $Id$
4
-
5
- require 'sqlite'
6
-
7
- require 'og/backend'
8
-
9
- class Og
10
-
11
- # Implements an SQLite powered backend.
12
-
13
- class SqliteBackend < Backend
14
-
15
- # A mapping between Ruby and SQL types.
16
-
17
- TYPEMAP = {
18
- Integer => 'integer',
19
- Fixnum => 'integer',
20
- Float => 'float',
21
- String => 'text',
22
- Time => 'timestamp',
23
- Date => 'date',
24
- TrueClass => 'boolean',
25
- Object => 'text',
26
- Array => 'text',
27
- Hash => 'text'
28
- }
29
-
30
- # Intitialize the connection to the RDBMS.
31
-
32
- def initialize(config)
33
- begin
34
- @conn = SQLite::Database.new(config[:database])
35
- rescue => ex
36
- # gmosx: any idea how to better test this?
37
- if ex.to_s =~ /database .* does not exist/i
38
- Logger.info "Database '#{config[:database]}' not found!"
39
- SqliteBackend.create_db(config[:database], config[:user])
40
- retry
41
- end
42
- raise
43
- end
44
- end
45
-
46
- # - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
47
- # Utilities
48
- # - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
49
-
50
- # Escape an SQL string
51
-
52
- def self.escape(str)
53
- return nil unless str
54
- return str.gsub( /'/, "''" )
55
- end
56
-
57
- # Convert a ruby time to an sql timestamp.
58
- # TODO: Optimize this
59
-
60
- def self.timestamp(time = Time.now)
61
- return nil unless time
62
- return time.strftime("%Y-%m-%d %H:%M:%S")
63
- end
64
-
65
- # Output YYY-mm-dd
66
- # TODO: Optimize this
67
-
68
- def self.date(date)
69
- return nil unless date
70
- return "#{date.year}-#{date.month}-#{date.mday}"
71
- end
72
-
73
- # Parse sql datetime
74
- # TODO: Optimize this
75
-
76
- def self.parse_timestamp(str)
77
- return Time.parse(str)
78
- end
79
-
80
- # Input YYYY-mm-dd
81
- # TODO: Optimize this
82
-
83
- def self.parse_date(str)
84
- return nil unless str
85
- return Date.strptime(str)
86
- end
87
-
88
- # Return an sql string evaluator for the property.
89
- # No need to optimize this, used only to precalculate code.
90
- # YAML is used to store general Ruby objects to be more
91
- # portable.
92
- #
93
- # FIXME: add extra handling for float.
94
-
95
- def self.write_prop(p)
96
- if p.klass.ancestors.include?(Integer)
97
- return "#\{@#{p.symbol} || 'NULL'\}"
98
- elsif p.klass.ancestors.include?(Float)
99
- return "#\{@#{p.symbol} || 'NULL'\}"
100
- elsif p.klass.ancestors.include?(String)
101
- return "'#\{SqliteBackend.escape(@#{p.symbol})\}'"
102
- elsif p.klass.ancestors.include?(Time)
103
- return %|#\{@#{p.symbol} ? "'#\{SqliteBackend.timestamp(@#{p.symbol})\}'" : 'NULL'\}|
104
- elsif p.klass.ancestors.include?(Date)
105
- return %|#\{@#{p.symbol} ? "'#\{SqliteBackend.date(@#{p.symbol})\}'" : 'NULL'\}|
106
- elsif p.klass.ancestors.include?(TrueClass)
107
- return "#\{@#{p.symbol} ? \"'t'\" : 'NULL' \}"
108
- else
109
- return %|#\{@#{p.symbol} ? "'#\{SqliteBackend.escape(@#{p.symbol}.to_yaml)\}'" : "''"\}|
110
- end
111
- end
112
-
113
- # Return an evaluator for reading the property.
114
- # No need to optimize this, used only to precalculate code.
115
-
116
- def self.read_prop(p, idx)
117
- if p.klass.ancestors.include?(Integer)
118
- return "res.getvalue(tuple, #{idx}).to_i()"
119
- elsif p.klass.ancestors.include?(Float)
120
- return "res.getvalue(tuple, #{idx}).to_f()"
121
- elsif p.klass.ancestors.include?(String)
122
- return "res.getvalue(tuple, #{idx})"
123
- elsif p.klass.ancestors.include?(Time)
124
- return "PsqlBackend.parse_timestamp(res.getvalue(tuple, #{idx}))"
125
- elsif p.klass.ancestors.include?(Date)
126
- return "PsqlBackend.parse_date(res.getvalue(tuple, #{idx}))"
127
- elsif p.klass.ancestors.include?(TrueClass)
128
- return %|('t' == res.getvalue(tuple, #{idx}))|
129
- else
130
- return "YAML::load(res.getvalue(tuple, #{idx}))"
131
- end
132
- end
133
-
134
- # Returns the code that actually inserts the object into the
135
- # database. Returns the code as String.
136
-
137
- def self.insert_code(klass, sql, pre_cb, post_cb)
138
- %{
139
- #{pre_cb}
140
- res = conn.db.query("SELECT nextval('#{klass::DBSEQ}')")
141
- @oid = res.getvalue(0, 0).to_i
142
- conn.exec "#{sql}"
143
- #{post_cb}
144
- }
145
- end
146
-
147
- # generate the mapping of the database fields to the
148
- # object properties.
149
-
150
- def self.calc_field_index(klass, og)
151
- res = og.query "SELECT * FROM #{klass::DBTABLE} LIMIT 1"
152
- meta = og.managed_classes[klass]
153
-
154
- for field in res.fields
155
- meta.field_index[field] = res.fieldnum(field)
156
- end
157
- end
158
-
159
- # Generate the property for oid
160
-
161
- def self.eval_og_oid(klass)
162
- klass.class_eval %{
163
- prop_accessor :oid, Fixnum, :sql => "integer PRIMARY KEY"
164
- }
165
- end
166
-
167
- # - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
168
- # Connection methods.
169
- # - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
170
-
171
- # Create the database.
172
-
173
- def self.create_db(database, user = nil, password = nil)
174
- Logger.info "Creating database '#{database}'."
175
- `createdb #{database} -U #{user}`
176
- end
177
-
178
- # Drop the database.
179
-
180
- def self.drop_db(database, user = nil, password = nil)
181
- Logger.info "Dropping database '#{database}'."
182
- `dropdb #{database} -U #{user}`
183
- end
184
-
185
- # Execute an SQL query and return the result.
186
-
187
- def query(sql)
188
- Logger.debug sql if $DBG
189
- return @conn.exec(sql)
190
- end
191
-
192
- # Execute an SQL query, no result returned.
193
-
194
- def exec(sql)
195
- Logger.debug sql if $DBG
196
- res = @conn.exec(sql)
197
- res.clear()
198
- end
199
-
200
- # Execute an SQL query and return the result. Wrapped in a rescue
201
- # block.
202
-
203
- def safe_query(sql)
204
- Logger.debug sql if $DBG
205
- begin
206
- return @conn.exec(sql)
207
- rescue => ex
208
- Logger.error "DB error #{ex}, [#{sql}]"
209
- Logger.error ex.backtrace
210
- return nil
211
- end
212
- end
213
-
214
- # Execute an SQL query, no result returned. Wrapped in a rescue
215
- # block.
216
-
217
- def safe_exec(sql)
218
- Logger.debug sql if $DBG
219
- begin
220
- res = @conn.exec(sql)
221
- res.clear()
222
- rescue => ex
223
- Logger.error "DB error #{ex}, [#{sql}]"
224
- Logger.error ex.backtrace
225
- end
226
- end
227
-
228
- # Check if it is a valid resultset.
229
-
230
- def valid?(res)
231
- return !(res.nil? or 0 == res.num_tuples)
232
- end
233
-
234
- # Create the managed object table. The properties of the
235
- # object are mapped to the table columns. Additional sql relations
236
- # and constrains are created (indicices, sequences, etc).
237
-
238
- def create_table(klass)
239
- fields = create_fields(klass, TYPEMAP)
240
-
241
- sql = "CREATE TABLE #{klass::DBTABLE} (#{fields.join(', ')}"
242
-
243
- # Create table constrains
244
-
245
- if klass.__meta and constrains = klass.__meta[:sql_constrain]
246
- sql << ", #{constrains.join(', ')}"
247
- end
248
-
249
- sql << ") WITHOUT OIDS;"
250
-
251
- # Create indices
252
-
253
- if klass.__meta and indices = klass.__meta[:sql_index]
254
- for data in indices
255
- idx, options = *data
256
- idx = idx.to_s
257
- pre_sql, post_sql = options[:pre], options[:post]
258
- idxname = idx.gsub(/ /, "").gsub(/,/, "_").gsub(/\(.*\)/, "")
259
- sql << " CREATE #{pre_sql} INDEX #{klass::DBTABLE}_#{idxname}_idx #{post_sql} ON #{klass::DBTABLE} (#{idx});"
260
- end
261
- end
262
-
263
- begin
264
- exec(sql)
265
- Logger.info "Created table '#{klass::DBTABLE}'."
266
- rescue => ex
267
- # gmosx: any idea how to better test this?
268
- if ex.to_s =~ /relation .* already exists/i
269
- Logger.debug "Table already exists" if $DBG
270
- else
271
- raise
272
- end
273
- end
274
-
275
- # create the sequence for this table. Even if the table
276
- # uses the oids_seq, attempt to create it. This makes
277
- # the system more fault tolerant.
278
-
279
- begin
280
- exec "CREATE SEQUENCE #{klass::DBSEQ}"
281
- Logger.info "Created sequence '#{klass::DBSEQ}'."
282
- rescue => ex
283
- # gmosx: any idea how to better test this?
284
- if ex.to_s =~ /relation .* already exists/i
285
- Logger.debug "Sequence already exists" if $DBG
286
- else
287
- raise
288
- end
289
- end
290
-
291
- # Create join tables if needed. Join tables are used in
292
- # 'many_to_many' relations.
293
-
294
- if klass.__meta and joins = klass.__meta[:sql_join]
295
- for data in joins
296
- # the class to join to and some options.
297
- join_class, options = *data
298
-
299
- # gmosx: dont use DBTABLE here, perhaps the join class
300
- # is not managed yet.
301
- join_table = "#{self.class.join_table(klass, join_class)}"
302
- join_src = "#{self.class.encode(klass)}_oid"
303
- join_dst = "#{self.class.encode(join_class)}_oid"
304
- begin
305
- exec "CREATE TABLE #{join_table} ( key1 integer NOT NULL, key2 integer NOT NULL )"
306
- exec "CREATE INDEX #{join_table}_key1_idx ON #{join_table} (key1)"
307
- exec "CREATE INDEX #{join_table}_key2_idx ON #{join_table} (key2)"
308
- rescue => ex
309
- # gmosx: any idea how to better test this?
310
- if ex.to_s =~ /relation .* already exists/i
311
- Logger.debug "Join table already exists" if $DBG
312
- else
313
- raise
314
- end
315
- end
316
- end
317
- end
318
-
319
- begin
320
- exec(sql)
321
- Logger.info "Created join table '#{join_table}'."
322
- rescue => ex
323
- # gmosx: any idea how to better test this?
324
- if ex.to_s =~ /relation .* already exists/i
325
- Logger.debug "Join table already exists" if $DBG
326
- else
327
- raise
328
- end
329
- end
330
-
331
- end
332
-
333
- # Drop the managed object table.
334
-
335
- def drop_table(klass)
336
- super
337
- exec "DROP SEQUENCE #{klass::DBSEQ}"
338
- end
339
-
340
- # Deserialize one row of the resultset.
341
-
342
- def deserialize_one(res, klass)
343
- return nil unless valid?(res)
344
-
345
- # gmosx: Managed objects should have no params constructor.
346
- entity = klass.new()
347
- entity.og_deserialize(res, 0)
348
-
349
- # get_join_fields(res, 0, entity, join_fields) if join_fields
350
-
351
- res.clear()
352
- return entity
353
- end
354
-
355
- # Deserialize all rows of the resultset.
356
-
357
- def deserialize_all(res, klass)
358
- return [] unless valid?(res)
359
-
360
- entities = []
361
-
362
- for tuple in (0...res.num_tuples)
363
- entity = klass.new()
364
- entity.og_deserialize(res, tuple)
365
-
366
- # get_join_fields(res, tuple, entity, join_fields) if join_fields
367
-
368
- entities << entity
369
- end
370
-
371
- res.clear()
372
- return entities
373
- end
374
-
375
- # Return a single integer value from the resultset.
376
-
377
- def get_int(res, idx = 0)
378
- return res.getvalue(0, idx).to_i
379
- end
380
-
381
- end
382
-
383
- end