og 0.9.5 → 0.10.0
Sign up to get free protection for your applications and to get access to all the features.
- data/ChangeLog +260 -0
- data/LICENSE +1 -0
- data/README.og +6 -5
- data/RELEASES.og +23 -0
- data/Rakefile +102 -92
- data/examples/og/mock_example.rb +0 -2
- data/examples/og/mysql_to_psql.rb +0 -2
- data/examples/og/run.rb +23 -22
- data/install.rb +44 -0
- data/lib/glue/array.rb +6 -10
- data/lib/glue/attribute.rb +0 -3
- data/lib/glue/cache.rb +1 -1
- data/lib/glue/inflector.rb +5 -5
- data/lib/glue/mixins.rb +3 -12
- data/lib/glue/number.rb +1 -1
- data/lib/glue/object.rb +7 -1
- data/lib/glue/property.rb +32 -22
- data/lib/glue/string.rb +13 -75
- data/lib/glue/time.rb +2 -2
- data/lib/glue/validation.rb +7 -11
- data/lib/og.rb +27 -261
- data/lib/og/adapter.rb +352 -0
- data/lib/og/adapters/mysql.rb +304 -0
- data/lib/og/adapters/psql.rb +286 -0
- data/lib/og/adapters/sqlite.rb +262 -0
- data/lib/og/backend.rb +1 -1
- data/lib/og/connection.rb +123 -87
- data/lib/og/database.rb +268 -0
- data/lib/og/meta.rb +23 -22
- data/lib/og/mock.rb +2 -3
- data/test/og/tc_lifecycle.rb +22 -25
- data/test/og/tc_sqlite.rb +87 -0
- data/test/tc_og.rb +61 -42
- metadata +35 -11
- data/lib/glue/macro.rb +0 -56
- data/lib/og/backends/mysql.rb +0 -370
- data/lib/og/backends/psql.rb +0 -386
- data/lib/og/backends/sqlite.rb +0 -383
- data/lib/og/version.rb +0 -9
@@ -0,0 +1,304 @@
|
|
1
|
+
# * George Moschovitis <gm@navel.gr>
|
2
|
+
# (c) 2004-2005 Navel, all rights reserved.
|
3
|
+
# $Id: mysql.rb 259 2005-02-15 08:54:54Z gmosx $
|
4
|
+
|
5
|
+
require 'mysql'
|
6
|
+
|
7
|
+
require 'og/adapter'
|
8
|
+
require 'og/connection'
|
9
|
+
require 'glue/attribute'
|
10
|
+
|
11
|
+
class Og
|
12
|
+
|
13
|
+
# The MySQL adapter. This adapter communicates with
|
14
|
+
# an MySQL rdbms. For extra documentation see
|
15
|
+
# lib/og/adapter.rb
|
16
|
+
|
17
|
+
class MysqlAdapter < Adapter
|
18
|
+
|
19
|
+
def initialize
|
20
|
+
super
|
21
|
+
@typemap.update({TrueClass => 'tinyint'})
|
22
|
+
end
|
23
|
+
|
24
|
+
def self.escape(str)
|
25
|
+
return nil unless str
|
26
|
+
return Mysql.quote(str)
|
27
|
+
end
|
28
|
+
|
29
|
+
def self.timestamp(time = Time.now)
|
30
|
+
return nil unless time
|
31
|
+
return time.strftime("%Y%m%d%H%M%S")
|
32
|
+
end
|
33
|
+
|
34
|
+
def self.date(date)
|
35
|
+
return nil unless date
|
36
|
+
return "#{date.year}-#{date.month}-#{date.mday}"
|
37
|
+
end
|
38
|
+
|
39
|
+
def write_prop(p)
|
40
|
+
if p.klass.ancestors.include?(Integer)
|
41
|
+
return "#\{@#{p.symbol} || 'NULL'\}"
|
42
|
+
elsif p.klass.ancestors.include?(Float)
|
43
|
+
return "#\{@#{p.symbol} || 'NULL'\}"
|
44
|
+
elsif p.klass.ancestors.include?(String)
|
45
|
+
return "'#\{#{self.class}.escape(@#{p.symbol})\}'"
|
46
|
+
elsif p.klass.ancestors.include?(Time)
|
47
|
+
return %|#\{@#{p.symbol} ? "'#\{#{self.class}.timestamp(@#{p.symbol})\}'" : 'NULL'\}|
|
48
|
+
elsif p.klass.ancestors.include?(Date)
|
49
|
+
return %|#\{@#{p.symbol} ? "'#\{#{self.class}.date(@#{p.symbol})\}'" : 'NULL'\}|
|
50
|
+
elsif p.klass.ancestors.include?(TrueClass)
|
51
|
+
return "#\{@#{p.symbol} ? 1 : 0 \}"
|
52
|
+
else
|
53
|
+
return %|#\{@#{p.symbol} ? "'#\{#{self.class}.escape(@#{p.symbol}.to_yaml)\}'" : "''"\}|
|
54
|
+
end
|
55
|
+
end
|
56
|
+
|
57
|
+
def read_prop(p, idx)
|
58
|
+
if p.klass.ancestors.include?(Integer)
|
59
|
+
return "res[#{idx}].to_i"
|
60
|
+
elsif p.klass.ancestors.include?(Float)
|
61
|
+
return "res[#{idx}].to_f"
|
62
|
+
elsif p.klass.ancestors.include?(String)
|
63
|
+
return "res[#{idx}]"
|
64
|
+
elsif p.klass.ancestors.include?(Time)
|
65
|
+
return "#{self.class}.parse_timestamp(res[#{idx}])"
|
66
|
+
elsif p.klass.ancestors.include?(Date)
|
67
|
+
return "#{self.class}.parse_date(res[#{idx}])"
|
68
|
+
elsif p.klass.ancestors.include?(TrueClass)
|
69
|
+
return "('0' != res[#{idx}])"
|
70
|
+
else
|
71
|
+
return "YAML::load(res[#{idx}])"
|
72
|
+
end
|
73
|
+
end
|
74
|
+
|
75
|
+
def create_db(database, user = nil, password = nil)
|
76
|
+
`mysqladmin -f --user=#{user} --password=#{password} create #{database}`
|
77
|
+
super
|
78
|
+
end
|
79
|
+
|
80
|
+
def drop_db(database, user = nil, password = nil)
|
81
|
+
`mysqladmin -f --user=#{user} --password=#{password} drop #{database}`
|
82
|
+
super
|
83
|
+
end
|
84
|
+
|
85
|
+
def insert_code(klass, db, pre_cb, post_cb)
|
86
|
+
props = props_for_insert(klass)
|
87
|
+
values = props.collect { |p| write_prop(p) }.join(',')
|
88
|
+
|
89
|
+
sql = "INSERT INTO #{klass::DBTABLE} (#{props.collect {|p| p.name}.join(',')}) VALUES (#{values})"
|
90
|
+
|
91
|
+
%{
|
92
|
+
#{pre_cb}
|
93
|
+
conn.store.query_with_result = false
|
94
|
+
conn.store.query "#{sql}"
|
95
|
+
@oid = conn.store.insert_id()
|
96
|
+
#{post_cb}
|
97
|
+
}
|
98
|
+
end
|
99
|
+
|
100
|
+
def new_connection(db)
|
101
|
+
return Og::MysqlConnection.new(db)
|
102
|
+
end
|
103
|
+
|
104
|
+
def calc_field_index(klass, db)
|
105
|
+
res = db.query "SELECT * FROM #{klass::DBTABLE} LIMIT 1"
|
106
|
+
meta = db.managed_classes[klass]
|
107
|
+
|
108
|
+
for idx in (0...res.num_fields)
|
109
|
+
meta.field_index[res.fetch_field.name] = idx
|
110
|
+
end
|
111
|
+
|
112
|
+
ensure
|
113
|
+
res.free if res
|
114
|
+
end
|
115
|
+
|
116
|
+
def create_table(klass, db)
|
117
|
+
conn = db.get_connection
|
118
|
+
|
119
|
+
fields = create_fields(klass)
|
120
|
+
|
121
|
+
sql = "CREATE TABLE #{klass::DBTABLE} (#{fields.join(', ')}"
|
122
|
+
|
123
|
+
conn.store.query_with_result = true
|
124
|
+
|
125
|
+
# Create table constrains
|
126
|
+
|
127
|
+
if klass.__meta and constrains = klass.__meta[:sql_constrain]
|
128
|
+
sql << ", #{constrains.join(', ')}"
|
129
|
+
end
|
130
|
+
|
131
|
+
sql << ');'
|
132
|
+
|
133
|
+
begin
|
134
|
+
conn.store.query(sql)
|
135
|
+
Logger.info "Created table '#{klass::DBTABLE}'."
|
136
|
+
rescue => ex
|
137
|
+
if ex.errno == 1050 # table already exists.
|
138
|
+
Logger.debug "Table already exists" if $DBG
|
139
|
+
return
|
140
|
+
else
|
141
|
+
raise
|
142
|
+
end
|
143
|
+
end
|
144
|
+
|
145
|
+
# Create indices
|
146
|
+
|
147
|
+
if klass.__meta and indices = klass.__meta[:sql_index]
|
148
|
+
for data in indices
|
149
|
+
idx, options = *data
|
150
|
+
idx = idx.to_s
|
151
|
+
pre_sql, post_sql = options[:pre], options[:post]
|
152
|
+
idxname = idx.gsub(/ /, "").gsub(/,/, "_").gsub(/\(.*\)/, "")
|
153
|
+
conn.store.query("CREATE #{pre_sql} INDEX #{klass::DBTABLE}_#{idxname}_idx #{post_sql} ON #{klass::DBTABLE} (#{idx})")
|
154
|
+
end
|
155
|
+
end
|
156
|
+
|
157
|
+
# Create join tables if needed. Join tables are used in
|
158
|
+
# 'many_to_many' relations.
|
159
|
+
|
160
|
+
if klass.__meta and joins = klass.__meta[:sql_join]
|
161
|
+
for data in joins
|
162
|
+
# the class to join to and some options.
|
163
|
+
join_class, options = *data
|
164
|
+
|
165
|
+
# gmosx: dont use DBTABLE here, perhaps the join class
|
166
|
+
# is not managed yet.
|
167
|
+
join_table = "#{self.class.join_table(klass, join_class)}"
|
168
|
+
join_src = "#{self.class.encode(klass)}_oid"
|
169
|
+
join_dst = "#{self.class.encode(join_class)}_oid"
|
170
|
+
begin
|
171
|
+
conn.store.query("CREATE TABLE #{join_table} ( key1 integer NOT NULL, key2 integer NOT NULL )")
|
172
|
+
conn.store.query("CREATE INDEX #{join_table}_key1_idx ON #{join_table} (key1)")
|
173
|
+
conn.store.query("CREATE INDEX #{join_table}_key2_idx ON #{join_table} (key2)")
|
174
|
+
rescue => ex
|
175
|
+
if ex.errno == 1050 # table already exists.
|
176
|
+
Logger.debug "Join table already exists" if $DBG
|
177
|
+
else
|
178
|
+
raise
|
179
|
+
end
|
180
|
+
end
|
181
|
+
end
|
182
|
+
end
|
183
|
+
|
184
|
+
ensure
|
185
|
+
db.put_connection
|
186
|
+
end
|
187
|
+
|
188
|
+
def eval_og_oid(klass)
|
189
|
+
klass.class_eval %{
|
190
|
+
prop_accessor :oid, Fixnum, :sql => 'integer AUTO_INCREMENT PRIMARY KEY'
|
191
|
+
}
|
192
|
+
end
|
193
|
+
end
|
194
|
+
|
195
|
+
# The MySQL connection.
|
196
|
+
|
197
|
+
class MysqlConnection < Connection
|
198
|
+
|
199
|
+
def initialize(db)
|
200
|
+
super
|
201
|
+
|
202
|
+
config = db.config
|
203
|
+
|
204
|
+
@store = Mysql.connect(
|
205
|
+
config[:address] || 'localhost',
|
206
|
+
config[:user],
|
207
|
+
config[:password],
|
208
|
+
config[:database]
|
209
|
+
)
|
210
|
+
rescue => ex
|
211
|
+
if ex.errno == 1049 # database does not exist.
|
212
|
+
Logger.info "Database '#{config[:database]}' not found!"
|
213
|
+
@db.adapter.create_db(config[:database], config[:user], config[:password])
|
214
|
+
retry
|
215
|
+
end
|
216
|
+
raise
|
217
|
+
end
|
218
|
+
|
219
|
+
def close
|
220
|
+
@store.close
|
221
|
+
super
|
222
|
+
end
|
223
|
+
|
224
|
+
def prepare(sql)
|
225
|
+
raise 'Not implemented!'
|
226
|
+
end
|
227
|
+
|
228
|
+
def query(sql)
|
229
|
+
Logger.debug sql if $DBG
|
230
|
+
begin
|
231
|
+
@store.query_with_result = true
|
232
|
+
return @store.query(sql)
|
233
|
+
rescue => ex
|
234
|
+
Logger.error "DB error #{ex}, [#{sql}]"
|
235
|
+
Logger.error ex.backtrace.join("\n")
|
236
|
+
return nil
|
237
|
+
end
|
238
|
+
end
|
239
|
+
|
240
|
+
def exec(sql)
|
241
|
+
Logger.debug sql if $DBG
|
242
|
+
begin
|
243
|
+
@store.query_with_result = false
|
244
|
+
@store.query(sql)
|
245
|
+
rescue => ex
|
246
|
+
Logger.error "DB error #{ex}, [#{sql}]"
|
247
|
+
Logger.error ex.backtrace.join("\n")
|
248
|
+
end
|
249
|
+
end
|
250
|
+
|
251
|
+
def start
|
252
|
+
# @store.transaction
|
253
|
+
end
|
254
|
+
|
255
|
+
def commit
|
256
|
+
# @store.commit
|
257
|
+
end
|
258
|
+
|
259
|
+
def rollback
|
260
|
+
# @store.rollback
|
261
|
+
end
|
262
|
+
|
263
|
+
def valid_res?(res)
|
264
|
+
return !(res.nil? or 0 == res.num_rows)
|
265
|
+
end
|
266
|
+
|
267
|
+
def read_one(res, klass)
|
268
|
+
return nil unless valid_res?(res)
|
269
|
+
|
270
|
+
row = res.fetch_row
|
271
|
+
obj = klass.new
|
272
|
+
obj.og_read(row)
|
273
|
+
|
274
|
+
res.free
|
275
|
+
return obj
|
276
|
+
end
|
277
|
+
|
278
|
+
def read_all(res, klass)
|
279
|
+
return [] unless valid_res?(res)
|
280
|
+
|
281
|
+
objects = []
|
282
|
+
|
283
|
+
for tuple in (0...res.num_rows)
|
284
|
+
row = res.fetch_row
|
285
|
+
|
286
|
+
obj = klass.new
|
287
|
+
obj.og_read(row)
|
288
|
+
|
289
|
+
objects << obj
|
290
|
+
end
|
291
|
+
|
292
|
+
res.free
|
293
|
+
return objects
|
294
|
+
end
|
295
|
+
|
296
|
+
def read_int(res, idx = 0)
|
297
|
+
val = res.fetch_row[idx].to_i
|
298
|
+
res.free
|
299
|
+
return val
|
300
|
+
end
|
301
|
+
|
302
|
+
end
|
303
|
+
|
304
|
+
end
|
@@ -0,0 +1,286 @@
|
|
1
|
+
# * George Moschovitis <gm@navel.gr>
|
2
|
+
# (c) 2004-2005 Navel, all rights reserved.
|
3
|
+
# $Id: psql.rb 259 2005-02-15 08:54:54Z gmosx $
|
4
|
+
|
5
|
+
require 'postgres'
|
6
|
+
|
7
|
+
require 'og/adapter'
|
8
|
+
require 'og/connection'
|
9
|
+
require 'glue/attribute'
|
10
|
+
|
11
|
+
class Og
|
12
|
+
|
13
|
+
# The PostgreSQL adapter. This adapter communicates with
|
14
|
+
# an PostgreSQL rdbms. For extra documentation see
|
15
|
+
# lib/og/adapter.rb
|
16
|
+
|
17
|
+
class PsqlAdapter < Adapter
|
18
|
+
|
19
|
+
def self.escape(str)
|
20
|
+
return nil unless str
|
21
|
+
return PGconn.escape(str)
|
22
|
+
end
|
23
|
+
|
24
|
+
def self.timestamp(time = Time.now)
|
25
|
+
return nil unless time
|
26
|
+
return time.strftime("%Y-%m-%d %H:%M:%S")
|
27
|
+
end
|
28
|
+
|
29
|
+
def self.date(date)
|
30
|
+
return nil unless date
|
31
|
+
return "#{date.year}-#{date.month}-#{date.mday}"
|
32
|
+
end
|
33
|
+
|
34
|
+
def write_prop(p)
|
35
|
+
if p.klass.ancestors.include?(Integer)
|
36
|
+
return "#\{@#{p.symbol} || 'NULL'\}"
|
37
|
+
elsif p.klass.ancestors.include?(Float)
|
38
|
+
return "#\{@#{p.symbol} || 'NULL'\}"
|
39
|
+
elsif p.klass.ancestors.include?(String)
|
40
|
+
return "'#\{#{self.class}.escape(@#{p.symbol})\}'"
|
41
|
+
elsif p.klass.ancestors.include?(Time)
|
42
|
+
return %|#\{@#{p.symbol} ? "'#\{#{self.class}.timestamp(@#{p.symbol})\}'" : 'NULL'\}|
|
43
|
+
elsif p.klass.ancestors.include?(Date)
|
44
|
+
return %|#\{@#{p.symbol} ? "'#\{#{self.class}.date(@#{p.symbol})\}'" : 'NULL'\}|
|
45
|
+
elsif p.klass.ancestors.include?(TrueClass)
|
46
|
+
return "#\{@#{p.symbol} ? \"'t'\" : 'NULL' \}"
|
47
|
+
else
|
48
|
+
return %|#\{@#{p.symbol} ? "'#\{#{self.class}.escape(@#{p.symbol}.to_yaml)\}'" : "''"\}|
|
49
|
+
end
|
50
|
+
end
|
51
|
+
|
52
|
+
def read_prop(p, idx)
|
53
|
+
if p.klass.ancestors.include?(Integer)
|
54
|
+
return "res.getvalue(tuple, #{idx}).to_i()"
|
55
|
+
elsif p.klass.ancestors.include?(Float)
|
56
|
+
return "res.getvalue(tuple, #{idx}).to_f()"
|
57
|
+
elsif p.klass.ancestors.include?(String)
|
58
|
+
return "res.getvalue(tuple, #{idx})"
|
59
|
+
elsif p.klass.ancestors.include?(Time)
|
60
|
+
return "#{self.class}.parse_timestamp(res.getvalue(tuple, #{idx}))"
|
61
|
+
elsif p.klass.ancestors.include?(Date)
|
62
|
+
return "#{self.class}.parse_date(res.getvalue(tuple, #{idx}))"
|
63
|
+
elsif p.klass.ancestors.include?(TrueClass)
|
64
|
+
return %|('t' == res.getvalue(tuple, #{idx}))|
|
65
|
+
else
|
66
|
+
return "YAML::load(res.getvalue(tuple, #{idx}))"
|
67
|
+
end
|
68
|
+
end
|
69
|
+
|
70
|
+
def create_db(database, user = nil, password = nil)
|
71
|
+
`createdb #{database} -U #{user}`
|
72
|
+
super
|
73
|
+
end
|
74
|
+
|
75
|
+
def drop_db(database, user = nil, password = nil)
|
76
|
+
`dropdb #{database} -U #{user}`
|
77
|
+
super
|
78
|
+
end
|
79
|
+
|
80
|
+
def insert_code(klass, db, pre_cb, post_cb)
|
81
|
+
props = props_for_insert(klass)
|
82
|
+
values = props.collect { |p| write_prop(p) }.join(',')
|
83
|
+
|
84
|
+
sql = "INSERT INTO #{klass::DBTABLE} (#{props.collect {|p| p.name}.join(',')}) VALUES (#{values})"
|
85
|
+
|
86
|
+
%{
|
87
|
+
#{pre_cb}
|
88
|
+
res = conn.store.exec("SELECT nextval('#{klass::DBSEQ}')")
|
89
|
+
@oid = res.getvalue(0, 0).to_i
|
90
|
+
res.clear
|
91
|
+
conn.exec "#{sql}"
|
92
|
+
#{post_cb}
|
93
|
+
}
|
94
|
+
end
|
95
|
+
|
96
|
+
def new_connection(db)
|
97
|
+
return Og::PsqlConnection.new(db)
|
98
|
+
end
|
99
|
+
|
100
|
+
def calc_field_index(klass, db)
|
101
|
+
res = db.query "SELECT * FROM #{klass::DBTABLE} LIMIT 1"
|
102
|
+
meta = db.managed_classes[klass]
|
103
|
+
|
104
|
+
for field in res.fields
|
105
|
+
meta.field_index[field] = res.fieldnum(field)
|
106
|
+
end
|
107
|
+
|
108
|
+
ensure
|
109
|
+
res.clear if res
|
110
|
+
end
|
111
|
+
|
112
|
+
def create_table(klass, db)
|
113
|
+
conn = db.get_connection
|
114
|
+
|
115
|
+
fields = create_fields(klass)
|
116
|
+
|
117
|
+
sql = "CREATE TABLE #{klass::DBTABLE} (#{fields.join(', ')}"
|
118
|
+
|
119
|
+
# Create table constrains
|
120
|
+
|
121
|
+
if klass.__meta and constrains = klass.__meta[:sql_constrain]
|
122
|
+
sql << ", #{constrains.join(', ')}"
|
123
|
+
end
|
124
|
+
|
125
|
+
sql << ") WITHOUT OIDS;"
|
126
|
+
|
127
|
+
# Create indices
|
128
|
+
|
129
|
+
if klass.__meta and indices = klass.__meta[:sql_index]
|
130
|
+
for data in indices
|
131
|
+
idx, options = *data
|
132
|
+
idx = idx.to_s
|
133
|
+
pre_sql, post_sql = options[:pre], options[:post]
|
134
|
+
idxname = idx.gsub(/ /, "").gsub(/,/, "_").gsub(/\(.*\)/, "")
|
135
|
+
sql << " CREATE #{pre_sql} INDEX #{klass::DBTABLE}_#{idxname}_idx #{post_sql} ON #{klass::DBTABLE} (#{idx});"
|
136
|
+
end
|
137
|
+
end
|
138
|
+
|
139
|
+
begin
|
140
|
+
conn.store.exec(sql).clear
|
141
|
+
Logger.info "Created table '#{klass::DBTABLE}'."
|
142
|
+
rescue => ex
|
143
|
+
# gmosx: any idea how to better test this?
|
144
|
+
if ex.to_s =~ /relation .* already exists/i
|
145
|
+
Logger.debug 'Table already exists' if $DBG
|
146
|
+
return
|
147
|
+
else
|
148
|
+
raise
|
149
|
+
end
|
150
|
+
end
|
151
|
+
|
152
|
+
# Create join tables if needed. Join tables are used in
|
153
|
+
# 'many_to_many' relations.
|
154
|
+
|
155
|
+
if klass.__meta and joins = klass.__meta[:sql_join]
|
156
|
+
for data in joins
|
157
|
+
# the class to join to and some options.
|
158
|
+
join_class, options = *data
|
159
|
+
|
160
|
+
# gmosx: dont use DBTABLE here, perhaps the join class
|
161
|
+
# is not managed yet.
|
162
|
+
join_table = "#{self.class.join_table(klass, join_class)}"
|
163
|
+
join_src = "#{self.class.encode(klass)}_oid"
|
164
|
+
join_dst = "#{self.class.encode(join_class)}_oid"
|
165
|
+
begin
|
166
|
+
conn.store.exec("CREATE TABLE #{join_table} ( key1 integer NOT NULL, key2 integer NOT NULL )").clear
|
167
|
+
conn.store.exec("CREATE INDEX #{join_table}_key1_idx ON #{join_table} (key1)").clear
|
168
|
+
conn.store.exec("CREATE INDEX #{join_table}_key2_idx ON #{join_table} (key2)").clear
|
169
|
+
rescue => ex
|
170
|
+
# gmosx: any idea how to better test this?
|
171
|
+
if ex.to_s =~ /relation .* already exists/i
|
172
|
+
Logger.debug "Join table already exists" if $DBG
|
173
|
+
else
|
174
|
+
raise
|
175
|
+
end
|
176
|
+
end
|
177
|
+
end
|
178
|
+
end
|
179
|
+
|
180
|
+
ensure
|
181
|
+
db.put_connection
|
182
|
+
end
|
183
|
+
|
184
|
+
def drop_table(klass)
|
185
|
+
super
|
186
|
+
exec "DROP SEQUENCE #{klass::DBSEQ}"
|
187
|
+
end
|
188
|
+
|
189
|
+
# Generate the property for oid.
|
190
|
+
|
191
|
+
def eval_og_oid(klass)
|
192
|
+
klass.class_eval %{
|
193
|
+
prop_accessor :oid, Fixnum, :sql => 'serial PRIMARY KEY'
|
194
|
+
}
|
195
|
+
end
|
196
|
+
|
197
|
+
end
|
198
|
+
|
199
|
+
# The PostgreSQL connection.
|
200
|
+
|
201
|
+
class PsqlConnection < Connection
|
202
|
+
|
203
|
+
def initialize(db)
|
204
|
+
super
|
205
|
+
|
206
|
+
config = db.config
|
207
|
+
|
208
|
+
begin
|
209
|
+
@store = PGconn.connect(
|
210
|
+
config[:address],
|
211
|
+
config[:port],
|
212
|
+
nil,
|
213
|
+
nil,
|
214
|
+
config[:database],
|
215
|
+
config[:user].to_s,
|
216
|
+
config[:password].to_s
|
217
|
+
)
|
218
|
+
rescue => ex
|
219
|
+
# gmosx: any idea how to better test this?
|
220
|
+
if ex.to_s =~ /database .* does not exist/i
|
221
|
+
Logger.info "Database '#{config[:database]}' not found!"
|
222
|
+
@db.adapter.create_db(config[:database], config[:user])
|
223
|
+
retry
|
224
|
+
end
|
225
|
+
raise
|
226
|
+
end
|
227
|
+
end
|
228
|
+
|
229
|
+
def query(sql)
|
230
|
+
Logger.debug sql if $DBG
|
231
|
+
begin
|
232
|
+
return @store.exec(sql)
|
233
|
+
rescue => ex
|
234
|
+
Logger.error "DB error #{ex}, [#{sql}]"
|
235
|
+
Logger.error ex.backtrace.join("\n")
|
236
|
+
return nil
|
237
|
+
end
|
238
|
+
end
|
239
|
+
|
240
|
+
def exec(sql)
|
241
|
+
Logger.debug sql if $DBG
|
242
|
+
begin
|
243
|
+
@store.exec(sql).clear
|
244
|
+
rescue => ex
|
245
|
+
Logger.error "DB error #{ex}, [#{sql}]"
|
246
|
+
Logger.error ex.backtrace.join("\n")
|
247
|
+
end
|
248
|
+
end
|
249
|
+
|
250
|
+
def valid_res?(res)
|
251
|
+
return !(res.nil? or 0 == res.num_tuples)
|
252
|
+
end
|
253
|
+
|
254
|
+
def read_one(res, klass)
|
255
|
+
return nil unless valid_res?(res)
|
256
|
+
|
257
|
+
obj = klass.new
|
258
|
+
obj.og_read(res, 0)
|
259
|
+
|
260
|
+
res.clear
|
261
|
+
return obj
|
262
|
+
end
|
263
|
+
|
264
|
+
def read_all(res, klass)
|
265
|
+
return [] unless valid_res?(res)
|
266
|
+
objects = []
|
267
|
+
|
268
|
+
for tuple in (0...res.num_tuples)
|
269
|
+
obj = klass.new
|
270
|
+
obj.og_read(res, tuple)
|
271
|
+
objects << obj
|
272
|
+
end
|
273
|
+
|
274
|
+
res.clear
|
275
|
+
return objects
|
276
|
+
end
|
277
|
+
|
278
|
+
def read_int(res, idx = 0)
|
279
|
+
val = res.getvalue(0, idx).to_i
|
280
|
+
res.clear
|
281
|
+
return val
|
282
|
+
end
|
283
|
+
|
284
|
+
end
|
285
|
+
|
286
|
+
end
|