sequel 0.4.0 → 0.4.1

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (48) hide show
  1. data/CHANGELOG +4 -0
  2. data/README +1 -6
  3. data/Rakefile +1 -1
  4. data/bin/sequel +1 -3
  5. data/lib/sequel.rb +18 -1
  6. data/lib/sequel/adapters/ado.rb +104 -0
  7. data/lib/sequel/adapters/db2.rb +160 -0
  8. data/lib/sequel/adapters/dbi.rb +130 -0
  9. data/lib/sequel/adapters/informix.rb +78 -0
  10. data/lib/sequel/adapters/mysql.rb +256 -0
  11. data/lib/sequel/adapters/odbc.rb +144 -0
  12. data/lib/sequel/adapters/oracle.rb +109 -0
  13. data/lib/sequel/adapters/postgres.rb +507 -0
  14. data/lib/sequel/adapters/sqlite.rb +186 -0
  15. data/lib/sequel/ado.rb +2 -104
  16. data/lib/{sequel-core → sequel}/array_keys.rb +0 -0
  17. data/lib/{sequel-core → sequel}/connection_pool.rb +0 -0
  18. data/lib/{sequel-core → sequel}/core_ext.rb +0 -0
  19. data/lib/{sequel-core → sequel}/core_sql.rb +0 -0
  20. data/lib/{sequel-core → sequel}/database.rb +10 -20
  21. data/lib/{sequel-core → sequel}/dataset.rb +0 -0
  22. data/lib/{sequel-core → sequel}/dataset/convenience.rb +0 -0
  23. data/lib/{sequel-core → sequel}/dataset/sequelizer.rb +0 -0
  24. data/lib/{sequel-core → sequel}/dataset/sql.rb +0 -0
  25. data/lib/sequel/db2.rb +2 -160
  26. data/lib/sequel/dbi.rb +2 -130
  27. data/lib/{sequel-core → sequel}/error.rb +0 -0
  28. data/lib/sequel/informix.rb +2 -78
  29. data/lib/{sequel-core → sequel}/migration.rb +0 -0
  30. data/lib/{sequel-core → sequel}/model.rb +0 -0
  31. data/lib/{sequel-core → sequel}/model/base.rb +0 -0
  32. data/lib/{sequel-core → sequel}/model/caching.rb +0 -0
  33. data/lib/{sequel-core → sequel}/model/hooks.rb +0 -0
  34. data/lib/{sequel-core → sequel}/model/record.rb +0 -0
  35. data/lib/{sequel-core → sequel}/model/relations.rb +0 -0
  36. data/lib/{sequel-core → sequel}/model/schema.rb +0 -0
  37. data/lib/sequel/mysql.rb +2 -256
  38. data/lib/sequel/odbc.rb +2 -144
  39. data/lib/sequel/oracle.rb +2 -109
  40. data/lib/sequel/postgres.rb +2 -507
  41. data/lib/{sequel-core → sequel}/pretty_table.rb +0 -0
  42. data/lib/{sequel-core → sequel}/schema.rb +0 -0
  43. data/lib/{sequel-core → sequel}/schema/schema_generator.rb +0 -0
  44. data/lib/{sequel-core → sequel}/schema/schema_sql.rb +0 -0
  45. data/lib/sequel/sqlite.rb +2 -186
  46. data/lib/{sequel-core → sequel}/worker.rb +0 -0
  47. data/spec/database_spec.rb +7 -9
  48. metadata +39 -29
data/lib/sequel/odbc.rb CHANGED
@@ -1,144 +1,2 @@
1
- if !Object.const_defined?('Sequel')
2
- require File.join(File.dirname(__FILE__), '../sequel')
3
- end
4
-
5
- require 'odbc'
6
-
7
- module Sequel
8
- module ODBC
9
- class Database < Sequel::Database
10
- set_adapter_scheme :odbc
11
-
12
- def connect
13
- conn = ::ODBC::connect(@opts[:database], @opts[:user], @opts[:password])
14
- conn.autocommit = true
15
- conn
16
- end
17
-
18
- def disconnect
19
- @pool.disconnect {|c| c.disconnect}
20
- end
21
-
22
- def dataset(opts = nil)
23
- ODBC::Dataset.new(self, opts)
24
- end
25
-
26
- def execute(sql)
27
- @logger.info(sql) if @logger
28
- @pool.hold do |conn|
29
- conn.run(sql)
30
- end
31
- end
32
-
33
- def do(sql)
34
- @logger.info(sql) if @logger
35
- @pool.hold do |conn|
36
- conn.do(sql)
37
- end
38
- end
39
- end
40
-
41
- class Dataset < Sequel::Dataset
42
- def literal(v)
43
- case v
44
- when true: '1'
45
- when false: '0'
46
- else
47
- super
48
- end
49
- end
50
-
51
- def fetch_rows(sql, &block)
52
- @db.synchronize do
53
- s = @db.execute select_sql(sql)
54
- begin
55
- @columns = s.columns(true).map {|c| c.name.to_sym}
56
- rows = s.fetch_all
57
- rows.each {|row| yield hash_row(row)}
58
- ensure
59
- s.drop unless s.nil? rescue nil
60
- end
61
- end
62
- self
63
- end
64
-
65
- def hash_row(row)
66
- hash = {}
67
- row.each_with_index do |v, idx|
68
- hash[@columns[idx]] = convert_odbc_value(v)
69
- end
70
- hash
71
- end
72
-
73
- def convert_odbc_value(v)
74
- # When fetching a result set, the Ruby ODBC driver converts all ODBC
75
- # SQL types to an equivalent Ruby type; with the exception of
76
- # SQL_TYPE_DATE, SQL_TYPE_TIME and SQL_TYPE_TIMESTAMP.
77
- #
78
- # The conversions below are consistent with the mappings in
79
- # ODBCColumn#mapSqlTypeToGenericType and Column#klass.
80
- case v
81
- when ::ODBC::TimeStamp
82
- DateTime.new(v.year, v.month, v.day, v.hour, v.minute, v.second)
83
- when ::ODBC::Time
84
- DateTime.now
85
- Time.gm(now.year, now.month, now.day, v.hour, v.minute, v.second)
86
- when ::ODBC::Date
87
- Date.new(v.year, v.month, v.day)
88
- else
89
- v
90
- end
91
- end
92
-
93
- def array_tuples_fetch_rows(sql, &block)
94
- @db.synchronize do
95
- s = @db.execute sql
96
- begin
97
- @columns = s.columns(true).map {|c| c.name.to_sym}
98
- rows = s.fetch_all
99
- rows.each {|r| yield array_tuples_make_row(r)}
100
- ensure
101
- s.drop unless s.nil? rescue nil
102
- end
103
- end
104
- self
105
- end
106
-
107
- def array_tuples_make_row(row)
108
- row.keys = @columns
109
- row.each_with_index do |v, idx|
110
- # When fetching a result set, the Ruby ODBC driver converts all ODBC
111
- # SQL types to an equivalent Ruby type; with the exception of
112
- # SQL_TYPE_DATE, SQL_TYPE_TIME and SQL_TYPE_TIMESTAMP.
113
- #
114
- # The conversions below are consistent with the mappings in
115
- # ODBCColumn#mapSqlTypeToGenericType and Column#klass.
116
- case v
117
- when ::ODBC::TimeStamp
118
- row[idx] = DateTime.new(v.year, v.month, v.day, v.hour, v.minute, v.second)
119
- when ::ODBC::Time
120
- now = DateTime.now
121
- row[idx] = Time.gm(now.year, now.month, now.day, v.hour, v.minute, v.second)
122
- when ::ODBC::Date
123
- row[idx] = Date.new(v.year, v.month, v.day)
124
- end
125
- end
126
- row
127
- end
128
-
129
-
130
- def insert(*values)
131
- @db.do insert_sql(*values)
132
- end
133
-
134
- def update(values, opts = nil)
135
- @db.do update_sql(values, opts)
136
- self
137
- end
138
-
139
- def delete(opts = nil)
140
- @db.do delete_sql(opts)
141
- end
142
- end
143
- end
144
- end
1
+ warn "Requiring 'sequel/odbc' is deprecated. Please modify your code to require 'sequel' instead."
2
+ require File.join(File.dirname(__FILE__), 'adapters/odbc')
data/lib/sequel/oracle.rb CHANGED
@@ -1,109 +1,2 @@
1
- if !Object.const_defined?('Sequel')
2
- require File.join(File.dirname(__FILE__), '../sequel')
3
- end
4
-
5
- require 'oci8'
6
-
7
- module Sequel
8
- module Oracle
9
- class Database < Sequel::Database
10
- set_adapter_scheme :oracle
11
-
12
- # AUTO_INCREMENT = 'IDENTITY(1,1)'.freeze
13
- #
14
- # def auto_increment_sql
15
- # AUTO_INCREMENT
16
- # end
17
-
18
- def connect
19
- if @opts[:database]
20
- dbname = @opts[:host] ? \
21
- "//#{@opts[:host]}/#{@opts[:database]}" : @opts[:database]
22
- else
23
- dbname = @opts[:host]
24
- end
25
- conn = OCI8.new(@opts[:user], @opts[:password], dbname, @opts[:privilege])
26
- conn.autocommit = true
27
- conn.non_blocking = true
28
- conn
29
- end
30
-
31
- def disconnect
32
- @pool.disconnect {|c| c.logoff}
33
- end
34
-
35
- def dataset(opts = nil)
36
- Oracle::Dataset.new(self, opts)
37
- end
38
-
39
- def execute(sql)
40
- @logger.info(sql) if @logger
41
- @pool.hold {|conn| conn.exec(sql)}
42
- end
43
-
44
- alias_method :do, :execute
45
- end
46
-
47
- class Dataset < Sequel::Dataset
48
- def literal(v)
49
- case v
50
- when Time: literal(v.iso8601)
51
- else
52
- super
53
- end
54
- end
55
-
56
- def fetch_rows(sql, &block)
57
- @db.synchronize do
58
- cursor = @db.execute sql
59
- begin
60
- @columns = cursor.get_col_names.map {|c| c.to_sym}
61
- while r = cursor.fetch
62
- row = {}
63
- r.each_with_index {|v, i| row[columns[i]] = v}
64
- yield row
65
- end
66
- ensure
67
- cursor.close
68
- end
69
- end
70
- self
71
- end
72
-
73
- def hash_row(row)
74
- @columns.inject({}) do |m, c|
75
- m[c] = row.shift
76
- m
77
- end
78
- end
79
-
80
- def array_tuples_fetch_rows(sql, &block)
81
- @db.synchronize do
82
- cursor = @db.execute sql
83
- begin
84
- @columns = cursor.get_col_names.map {|c| c.to_sym}
85
- while r = cursor.fetch
86
- r.keys = columns
87
- yield r
88
- end
89
- ensure
90
- cursor.close
91
- end
92
- end
93
- self
94
- end
95
-
96
- def insert(*values)
97
- @db.do insert_sql(*values)
98
- end
99
-
100
- def update(values, opts = nil)
101
- @db.do update_sql(values, opts)
102
- end
103
-
104
- def delete(opts = nil)
105
- @db.do delete_sql(opts)
106
- end
107
- end
108
- end
109
- end
1
+ warn "Requiring 'sequel/oracle' is deprecated. Please modify your code to require 'sequel' instead."
2
+ require File.join(File.dirname(__FILE__), 'adapters/oracle')
@@ -1,507 +1,2 @@
1
- if !Object.const_defined?('Sequel')
2
- require File.join(File.dirname(__FILE__), '../sequel')
3
- end
4
-
5
- require 'postgres'
6
-
7
- class PGconn
8
- # the pure-ruby postgres adapter does not have a quote method.
9
- unless methods.include?('quote')
10
- def self.quote(obj)
11
- case obj
12
- when true: 't'
13
- when false: 'f'
14
- when nil: 'NULL'
15
- when String: "'#{obj}'"
16
- else obj.to_s
17
- end
18
- end
19
- end
20
-
21
- class << self
22
- # The postgres gem's string quoting doesn't render string literals properly, which this fixes.
23
- #
24
- # "a basic string" #=> 'a basic string'
25
- # "this\or that" #=> E'this\\or that'
26
- #
27
- # See <http://www.postgresql.org/docs/8.2/static/sql-syntax-lexical.html> for details.
28
- def quote_with_proper_escaping(s)
29
- value = quote_without_proper_escaping(s)
30
- value = "E#{value}" if value =~ /\\/
31
- return value
32
- end
33
- alias_method :quote_without_proper_escaping, :quote
34
- alias_method :quote, :quote_with_proper_escaping
35
- end
36
-
37
- def connected?
38
- status == PGconn::CONNECTION_OK
39
- end
40
-
41
- def execute(sql)
42
- begin
43
- async_exec(sql)
44
- rescue PGError => e
45
- unless connected?
46
- reset
47
- async_exec(sql)
48
- else
49
- raise e
50
- end
51
- end
52
- end
53
-
54
- attr_accessor :transaction_in_progress
55
-
56
- SELECT_CURRVAL = "SELECT currval('%s')".freeze
57
-
58
- def last_insert_id(table)
59
- @table_sequences ||= {}
60
- if !@table_sequences.include?(table)
61
- pkey_and_seq = pkey_and_sequence(table)
62
- if pkey_and_seq
63
- @table_sequences[table] = pkey_and_seq[1]
64
- end
65
- end
66
- if seq = @table_sequences[table]
67
- r = async_query(SELECT_CURRVAL % seq)
68
- return r[0][0].to_i unless r.nil? || r.empty?
69
- end
70
- nil # primary key sequence not found
71
- end
72
-
73
- # Shamelessly appropriated from ActiveRecord's Postgresql adapter.
74
-
75
- SELECT_PK_AND_SERIAL_SEQUENCE = <<-end_sql
76
- SELECT attr.attname, name.nspname, seq.relname
77
- FROM pg_class seq, pg_attribute attr, pg_depend dep,
78
- pg_namespace name, pg_constraint cons
79
- WHERE seq.oid = dep.objid
80
- AND seq.relnamespace = name.oid
81
- AND seq.relkind = 'S'
82
- AND attr.attrelid = dep.refobjid
83
- AND attr.attnum = dep.refobjsubid
84
- AND attr.attrelid = cons.conrelid
85
- AND attr.attnum = cons.conkey[1]
86
- AND cons.contype = 'p'
87
- AND dep.refobjid = '%s'::regclass
88
- end_sql
89
-
90
- SELECT_PK_AND_CUSTOM_SEQUENCE = <<-end_sql
91
- SELECT attr.attname, name.nspname, split_part(def.adsrc, '''', 2)
92
- FROM pg_class t
93
- JOIN pg_namespace name ON (t.relnamespace = name.oid)
94
- JOIN pg_attribute attr ON (t.oid = attrelid)
95
- JOIN pg_attrdef def ON (adrelid = attrelid AND adnum = attnum)
96
- JOIN pg_constraint cons ON (conrelid = adrelid AND adnum = conkey[1])
97
- WHERE t.oid = '%s'::regclass
98
- AND cons.contype = 'p'
99
- AND def.adsrc ~* 'nextval'
100
- end_sql
101
-
102
- SELECT_PK = <<-end_sql
103
- SELECT pg_attribute.attname
104
- FROM pg_class, pg_attribute, pg_index
105
- WHERE pg_class.oid = pg_attribute.attrelid AND
106
- pg_class.oid = pg_index.indrelid AND
107
- pg_index.indkey[0] = pg_attribute.attnum AND
108
- pg_index.indisprimary = 't' AND
109
- pg_class.relname = '%s'
110
- end_sql
111
-
112
- def pkey_and_sequence(table)
113
- r = async_query(SELECT_PK_AND_SERIAL_SEQUENCE % table)
114
- return [r[0].first, r[0].last] unless r.nil? or r.empty?
115
-
116
- r = async_query(SELECT_PK_AND_CUSTOM_SEQUENCE % table)
117
- return [r[0].first, r[0].last] unless r.nil? or r.empty?
118
- rescue
119
- nil
120
- end
121
-
122
- def primary_key(table)
123
- r = async_query(SELECT_PK % table)
124
- pkey = r[0].first unless r.nil? or r.empty?
125
- return pkey.to_sym if pkey
126
- rescue
127
- nil
128
- end
129
- end
130
-
131
- class String
132
- def postgres_to_bool
133
- if self == 't'
134
- true
135
- elsif self == 'f'
136
- false
137
- else
138
- nil
139
- end
140
- end
141
- end
142
-
143
- module Sequel
144
- module Postgres
145
- PG_TYPES = {
146
- 16 => :postgres_to_bool,
147
- 20 => :to_i,
148
- 21 => :to_i,
149
- 22 => :to_i,
150
- 23 => :to_i,
151
- 700 => :to_f,
152
- 701 => :to_f,
153
- 1114 => :to_time
154
- }
155
-
156
- if PGconn.respond_to?(:translate_results=)
157
- PGconn.translate_results = true
158
- AUTO_TRANSLATE = true
159
- else
160
- AUTO_TRANSLATE = false
161
- end
162
-
163
- class Database < Sequel::Database
164
- set_adapter_scheme :postgres
165
-
166
- def connect
167
- conn = PGconn.connect(
168
- @opts[:host] || 'localhost',
169
- @opts[:port] || 5432,
170
- '', '',
171
- @opts[:database],
172
- @opts[:user],
173
- @opts[:password]
174
- )
175
- if encoding = @opts[:encoding] || @opts[:charset]
176
- conn.set_client_encoding(encoding)
177
- end
178
- conn
179
- end
180
-
181
- def disconnect
182
- @pool.disconnect {|c| c.close}
183
- end
184
-
185
- def dataset(opts = nil)
186
- Postgres::Dataset.new(self, opts)
187
- end
188
-
189
- RELATION_QUERY = {:from => [:pg_class], :select => [:relname]}.freeze
190
- RELATION_FILTER = "(relkind = 'r') AND (relname !~ '^pg|sql')".freeze
191
- SYSTEM_TABLE_REGEXP = /^pg|sql/.freeze
192
-
193
- def tables
194
- dataset(RELATION_QUERY).filter(RELATION_FILTER).map {|r| r[:relname].to_sym}
195
- end
196
-
197
- def locks
198
- dataset.from("pg_class, pg_locks").
199
- select("pg_class.relname, pg_locks.*").
200
- filter("pg_class.relfilenode=pg_locks.relation")
201
- end
202
-
203
- def execute(sql)
204
- @logger.info(sql) if @logger
205
- @pool.hold {|conn| conn.execute(sql)}
206
- rescue => e
207
- @logger.error(e.message) if @logger
208
- raise e
209
- end
210
-
211
- def execute_and_forget(sql)
212
- @logger.info(sql) if @logger
213
- @pool.hold {|conn| conn.execute(sql).clear}
214
- rescue => e
215
- @logger.error(e.message) if @logger
216
- raise e
217
- end
218
-
219
- def primary_key_for_table(conn, table)
220
- @primary_keys ||= {}
221
- @primary_keys[table] ||= conn.primary_key(table)
222
- end
223
-
224
- RE_CURRVAL_ERROR = /currval of sequence "(.*)" is not yet defined in this session/.freeze
225
-
226
- def insert_result(conn, table, values)
227
- begin
228
- result = conn.last_insert_id(table)
229
- return result if result
230
- rescue PGError => e
231
- # An error could occur if the inserted values include a primary key
232
- # value, while the primary key is serial.
233
- if e.message =~ RE_CURRVAL_ERROR
234
- raise SequelError, "Could not return primary key value for the inserted record. Are you specifying a primary key value for a serial primary key?"
235
- else
236
- raise e
237
- end
238
- end
239
-
240
- case values
241
- when Hash:
242
- values[primary_key_for_table(conn, table)]
243
- when Array:
244
- values.first
245
- else
246
- nil
247
- end
248
- end
249
-
250
- def execute_insert(sql, table, values)
251
- @logger.info(sql) if @logger
252
- @pool.hold do |conn|
253
- conn.execute(sql).clear
254
- insert_result(conn, table, values)
255
- end
256
- rescue => e
257
- @logger.error(e.message) if @logger
258
- raise e
259
- end
260
-
261
- def synchronize(&block)
262
- @pool.hold(&block)
263
- end
264
-
265
- SQL_BEGIN = 'BEGIN'.freeze
266
- SQL_COMMIT = 'COMMIT'.freeze
267
- SQL_ROLLBACK = 'ROLLBACK'.freeze
268
-
269
- def transaction
270
- @pool.hold do |conn|
271
- if conn.transaction_in_progress
272
- yield conn
273
- else
274
- @logger.info(SQL_BEGIN) if @logger
275
- conn.async_exec(SQL_BEGIN)
276
- begin
277
- conn.transaction_in_progress = true
278
- result = yield
279
- begin
280
- @logger.info(SQL_COMMIT) if @logger
281
- conn.async_exec(SQL_COMMIT)
282
- rescue => e
283
- @logger.error(e.message) if @logger
284
- raise e
285
- end
286
- result
287
- rescue => e
288
- @logger.info(SQL_ROLLBACK) if @logger
289
- conn.async_exec(SQL_ROLLBACK) rescue nil
290
- raise e unless SequelRollbackError === e
291
- ensure
292
- conn.transaction_in_progress = nil
293
- end
294
- end
295
- end
296
- end
297
-
298
- def serial_primary_key_options
299
- {:primary_key => true, :type => :serial}
300
- end
301
-
302
- def drop_table_sql(name)
303
- "DROP TABLE #{name} CASCADE"
304
- end
305
- end
306
-
307
- class Dataset < Sequel::Dataset
308
- def literal(v)
309
- case v
310
- when LiteralString: v
311
- when String, Fixnum, Float, TrueClass, FalseClass: PGconn.quote(v)
312
- else
313
- super
314
- end
315
- end
316
-
317
- def match_expr(l, r)
318
- case r
319
- when Regexp:
320
- r.casefold? ? \
321
- "(#{literal(l)} ~* #{literal(r.source)})" :
322
- "(#{literal(l)} ~ #{literal(r.source)})"
323
- else
324
- super
325
- end
326
- end
327
-
328
- FOR_UPDATE = ' FOR UPDATE'.freeze
329
- FOR_SHARE = ' FOR SHARE'.freeze
330
-
331
- def select_sql(opts = nil)
332
- row_lock_mode = opts ? opts[:lock] : @opts[:lock]
333
- sql = super
334
- case row_lock_mode
335
- when :update : sql << FOR_UPDATE
336
- when :share : sql << FOR_SHARE
337
- end
338
- sql
339
- end
340
-
341
- def for_update
342
- clone_merge(:lock => :update)
343
- end
344
-
345
- def for_share
346
- clone_merge(:lock => :share)
347
- end
348
-
349
- EXPLAIN = 'EXPLAIN '.freeze
350
- EXPLAIN_ANALYZE = 'EXPLAIN ANALYZE '.freeze
351
- QUERY_PLAN = 'QUERY PLAN'.to_sym
352
-
353
- def explain(opts = nil)
354
- analysis = []
355
- fetch_rows(EXPLAIN + select_sql(opts)) do |r|
356
- analysis << r[QUERY_PLAN]
357
- end
358
- analysis.join("\r\n")
359
- end
360
-
361
- def analyze(opts = nil)
362
- analysis = []
363
- fetch_rows(EXPLAIN_ANALYZE + select_sql(opts)) do |r|
364
- analysis << r[QUERY_PLAN]
365
- end
366
- analysis.join("\r\n")
367
- end
368
-
369
- LOCK = 'LOCK TABLE %s IN %s MODE'.freeze
370
-
371
- ACCESS_SHARE = 'ACCESS SHARE'.freeze
372
- ROW_SHARE = 'ROW SHARE'.freeze
373
- ROW_EXCLUSIVE = 'ROW EXCLUSIVE'.freeze
374
- SHARE_UPDATE_EXCLUSIVE = 'SHARE UPDATE EXCLUSIVE'.freeze
375
- SHARE = 'SHARE'.freeze
376
- SHARE_ROW_EXCLUSIVE = 'SHARE ROW EXCLUSIVE'.freeze
377
- EXCLUSIVE = 'EXCLUSIVE'.freeze
378
- ACCESS_EXCLUSIVE = 'ACCESS EXCLUSIVE'.freeze
379
-
380
- # Locks the table with the specified mode.
381
- def lock(mode, &block)
382
- sql = LOCK % [@opts[:from], mode]
383
- @db.synchronize do
384
- if block # perform locking inside a transaction and yield to block
385
- @db.transaction {@db.execute_and_forget(sql); yield}
386
- else
387
- @db.execute_and_forget(sql) # lock without a transaction
388
- self
389
- end
390
- end
391
- end
392
-
393
- def insert(*values)
394
- @db.execute_insert(insert_sql(*values), @opts[:from],
395
- values.size == 1 ? values.first : values)
396
- end
397
-
398
- def update(values, opts = nil)
399
- @db.synchronize do
400
- result = @db.execute(update_sql(values))
401
- begin
402
- affected = result.cmdtuples
403
- ensure
404
- result.clear
405
- end
406
- affected
407
- end
408
- end
409
-
410
- def delete(opts = nil)
411
- @db.synchronize do
412
- result = @db.execute(delete_sql(opts))
413
- begin
414
- affected = result.cmdtuples
415
- ensure
416
- result.clear
417
- end
418
- affected
419
- end
420
- end
421
-
422
- def fetch_rows(sql, &block)
423
- @db.synchronize do
424
- result = @db.execute(sql)
425
- begin
426
- conv = row_converter(result)
427
- result.each {|r| yield conv[r]}
428
- ensure
429
- result.clear
430
- end
431
- end
432
- end
433
-
434
- @@converters_mutex = Mutex.new
435
- @@converters = {}
436
-
437
- def row_converter(result)
438
- @columns = []; translators = []
439
- result.fields.each_with_index do |f, idx|
440
- @columns << f.to_sym
441
- translators << PG_TYPES[result.type(idx)]
442
- end
443
-
444
- # create result signature and memoize the converter
445
- sig = [@columns, translators].hash
446
- @@converters_mutex.synchronize do
447
- @@converters[sig] ||= compile_converter(@columns, translators)
448
- end
449
- end
450
-
451
- def compile_converter(columns, translators)
452
- used_columns = []
453
- kvs = []
454
- columns.each_with_index do |column, idx|
455
- next if used_columns.include?(column)
456
- used_columns << column
457
-
458
- if !AUTO_TRANSLATE and translator = translators[idx]
459
- kvs << ":\"#{column}\" => ((t = r[#{idx}]) ? t.#{translator} : nil)"
460
- else
461
- kvs << ":\"#{column}\" => r[#{idx}]"
462
- end
463
- end
464
- eval("lambda {|r| {#{kvs.join(COMMA_SEPARATOR)}}}")
465
- end
466
-
467
- def array_tuples_fetch_rows(sql, &block)
468
- @db.synchronize do
469
- result = @db.execute(sql)
470
- begin
471
- conv = array_tuples_row_converter(result)
472
- result.each {|r| yield conv[r]}
473
- ensure
474
- result.clear
475
- end
476
- end
477
- end
478
-
479
- @@array_tuples_converters_mutex = Mutex.new
480
- @@array_tuples_converters = {}
481
-
482
- def array_tuples_row_converter(result)
483
- @columns = []; translators = []
484
- result.fields.each_with_index do |f, idx|
485
- @columns << f.to_sym
486
- translators << PG_TYPES[result.type(idx)]
487
- end
488
-
489
- # create result signature and memoize the converter
490
- sig = [@columns, translators].hash
491
- @@array_tuples_converters_mutex.synchronize do
492
- @@array_tuples_converters[sig] ||= array_tuples_compile_converter(@columns, translators)
493
- end
494
- end
495
-
496
- def array_tuples_compile_converter(columns, translators)
497
- tr = []
498
- columns.each_with_index do |column, idx|
499
- if !AUTO_TRANSLATE and t = translators[idx]
500
- tr << "if (v = r[#{idx}]); r[#{idx}] = v.#{t}; end"
501
- end
502
- end
503
- eval("lambda {|r| r.keys = columns; #{tr.join(';')}; r}")
504
- end
505
- end
506
- end
507
- end
1
+ warn "Requiring 'sequel/postgres' is deprecated. Please modify your code to require 'sequel' instead."
2
+ require File.join(File.dirname(__FILE__), 'adapters/postgres')