ydbi 0.5.8 → 0.6.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (86) hide show
  1. checksums.yaml +4 -4
  2. data/.envrc +3 -0
  3. data/.github/workflows/ruby.yml +65 -0
  4. data/.gitignore +19 -0
  5. data/ChangeLog +8 -0
  6. data/Gemfile +5 -0
  7. data/Rakefile +14 -0
  8. data/TODO +44 -0
  9. data/bench/bench.rb +79 -0
  10. data/build/rake_task_lib.rb +186 -0
  11. data/devenv.lock +228 -0
  12. data/devenv.nix +55 -0
  13. data/devenv.yaml +8 -0
  14. data/doc/DBD_SPEC.rdoc +88 -0
  15. data/doc/DBI_SPEC.rdoc +157 -0
  16. data/doc/homepage/contact.html +62 -0
  17. data/doc/homepage/development.html +124 -0
  18. data/doc/homepage/index.html +83 -0
  19. data/doc/homepage/ruby-dbi.css +91 -0
  20. data/lib/dbd/Mysql.rb +137 -0
  21. data/lib/dbd/ODBC.rb +89 -0
  22. data/lib/dbd/Pg.rb +189 -0
  23. data/lib/dbd/SQLite.rb +97 -0
  24. data/lib/dbd/SQLite3.rb +124 -0
  25. data/lib/dbd/mysql/database.rb +405 -0
  26. data/lib/dbd/mysql/driver.rb +125 -0
  27. data/lib/dbd/mysql/statement.rb +188 -0
  28. data/lib/dbd/odbc/database.rb +128 -0
  29. data/lib/dbd/odbc/driver.rb +38 -0
  30. data/lib/dbd/odbc/statement.rb +137 -0
  31. data/lib/dbd/pg/database.rb +508 -0
  32. data/lib/dbd/pg/exec.rb +47 -0
  33. data/lib/dbd/pg/statement.rb +160 -0
  34. data/lib/dbd/pg/tuples.rb +121 -0
  35. data/lib/dbd/pg/type.rb +209 -0
  36. data/lib/dbd/sqlite/database.rb +151 -0
  37. data/lib/dbd/sqlite/statement.rb +125 -0
  38. data/lib/dbd/sqlite3/database.rb +201 -0
  39. data/lib/dbd/sqlite3/statement.rb +78 -0
  40. data/lib/dbi/version.rb +1 -1
  41. data/prototypes/types2.rb +237 -0
  42. data/readme.md +3 -4
  43. data/setup.rb +1585 -0
  44. data/test/DBD_TESTS +50 -0
  45. data/test/TESTING +16 -0
  46. data/test/dbd/general/test_database.rb +206 -0
  47. data/test/dbd/general/test_statement.rb +325 -0
  48. data/test/dbd/general/test_types.rb +295 -0
  49. data/test/dbd/mysql/base.rb +26 -0
  50. data/test/dbd/mysql/down.sql +19 -0
  51. data/test/dbd/mysql/test_blob.rb +18 -0
  52. data/test/dbd/mysql/test_new_methods.rb +7 -0
  53. data/test/dbd/mysql/test_patches.rb +111 -0
  54. data/test/dbd/mysql/up.sql +28 -0
  55. data/test/dbd/odbc/base.rb +30 -0
  56. data/test/dbd/odbc/down.sql +19 -0
  57. data/test/dbd/odbc/test_new_methods.rb +12 -0
  58. data/test/dbd/odbc/test_ping.rb +10 -0
  59. data/test/dbd/odbc/test_statement.rb +44 -0
  60. data/test/dbd/odbc/test_transactions.rb +58 -0
  61. data/test/dbd/odbc/up.sql +33 -0
  62. data/test/dbd/postgresql/base.rb +31 -0
  63. data/test/dbd/postgresql/down.sql +31 -0
  64. data/test/dbd/postgresql/test_arrays.rb +179 -0
  65. data/test/dbd/postgresql/test_async.rb +121 -0
  66. data/test/dbd/postgresql/test_blob.rb +37 -0
  67. data/test/dbd/postgresql/test_bytea.rb +88 -0
  68. data/test/dbd/postgresql/test_ping.rb +10 -0
  69. data/test/dbd/postgresql/test_timestamp.rb +77 -0
  70. data/test/dbd/postgresql/test_transactions.rb +58 -0
  71. data/test/dbd/postgresql/testdbipg.rb +307 -0
  72. data/test/dbd/postgresql/up.sql +60 -0
  73. data/test/dbd/sqlite/base.rb +32 -0
  74. data/test/dbd/sqlite/test_database.rb +30 -0
  75. data/test/dbd/sqlite/test_driver.rb +68 -0
  76. data/test/dbd/sqlite/test_statement.rb +112 -0
  77. data/test/dbd/sqlite/up.sql +25 -0
  78. data/test/dbd/sqlite3/base.rb +32 -0
  79. data/test/dbd/sqlite3/test_database.rb +77 -0
  80. data/test/dbd/sqlite3/test_driver.rb +67 -0
  81. data/test/dbd/sqlite3/test_statement.rb +88 -0
  82. data/test/dbd/sqlite3/up.sql +33 -0
  83. data/test/dbi/tc_dbi.rb +1 -1
  84. data/test/ts_dbd.rb +136 -0
  85. data/ydbi.gemspec +25 -0
  86. metadata +148 -12
@@ -0,0 +1,508 @@
1
+ #
2
+ # See DBI::BaseDatabase.
3
+ #
4
+ class DBI::DBD::Pg::Database < DBI::BaseDatabase
5
+
6
+ # type map
7
+ POSTGRESQL_to_XOPEN = {
8
+ "boolean" => [DBI::SQL_CHAR, 1, nil],
9
+ "character" => [DBI::SQL_CHAR, 1, nil],
10
+ "char" => [DBI::SQL_CHAR, 1, nil],
11
+ "real" => [DBI::SQL_REAL, 4, 6],
12
+ "double precision" => [DBI::SQL_DOUBLE, 8, 15],
13
+ "smallint" => [DBI::SQL_SMALLINT, 2],
14
+ "integer" => [DBI::SQL_INTEGER, 4],
15
+ "bigint" => [DBI::SQL_BIGINT, 8],
16
+ "numeric" => [DBI::SQL_NUMERIC, nil, nil],
17
+ "time with time zone" => [DBI::SQL_TIME, nil, nil],
18
+ "timestamp with time zone" => [DBI::SQL_TIMESTAMP, nil, nil],
19
+ "bit varying" => [DBI::SQL_BINARY, nil, nil], #huh??
20
+ "character varying" => [DBI::SQL_VARCHAR, nil, nil],
21
+ "bit" => [DBI::SQL_TINYINT, nil, nil],
22
+ "text" => [DBI::SQL_VARCHAR, nil, nil],
23
+ nil => [DBI::SQL_OTHER, nil, nil]
24
+ }
25
+
26
+ attr_reader :type_map
27
+
28
+ #
29
+ # See DBI::BaseDatabase#new. These attributes are also supported:
30
+ #
31
+ # * pg_async: boolean or strings 'true' or 'false'. Indicates if we're to
32
+ # use PostgreSQL's asyncrohonous support. 'NonBlocking' is a synonym for
33
+ # this.
34
+ # * AutoCommit: 'unchained' mode in PostgreSQL. Commits after each
35
+ # statement execution.
36
+ # * pg_client_encoding: set the encoding for the client.
37
+ # * pg_native_binding: Boolean. Indicates whether to use libpq native
38
+ # binding or DBI's inline binding. Defaults to true.
39
+ #
40
+ def initialize(dbname, user, auth, attr)
41
+ hash = DBI::Utils.parse_params(dbname)
42
+
43
+ if hash['dbname'].nil? and hash['database'].nil?
44
+ raise DBI::InterfaceError, "must specify database"
45
+ end
46
+
47
+ hash['options'] ||= nil
48
+ hash['tty'] = nil
49
+ hash['host'] ||= 'localhost'
50
+ hash['port'] = hash['port'].to_i unless hash['port'].nil?
51
+
52
+ @connection = PG::Connection.new(hash['host'], hash['port'], hash['options'], hash['tty'],
53
+ hash['dbname'] || hash['database'], user, auth)
54
+
55
+ @exec_method = :exec
56
+ @in_transaction = false
57
+
58
+ # set attribute defaults, and look for pg_* attrs in the DSN
59
+ @attr = { 'AutoCommit' => true, 'pg_async' => false }
60
+ hash.each do |key, value|
61
+ @attr[key] = value if key =~ /^pg_./
62
+ end
63
+ @attr.merge!(attr || {})
64
+ if @attr['pg_async'].is_a?(String)
65
+ case @attr['pg_async'].downcase
66
+ when 'true'
67
+ @attr['pg_async'] = true
68
+ when 'false'
69
+ @attr['pg_async'] = false
70
+ else
71
+ raise InterfaceError, %q{'pg_async' must be 'true' or 'false'}
72
+ end
73
+ end
74
+
75
+ @attr.each { |k,v| self[k] = v}
76
+ @attr["pg_native_binding"] = true unless @attr.has_key? "pg_native_binding"
77
+
78
+ load_type_map
79
+
80
+ self['AutoCommit'] = true # Postgres starts in unchained mode (AutoCommit=on) by default
81
+
82
+ rescue PG::Error => err
83
+ raise DBI::OperationalError.new(err.message)
84
+ end
85
+
86
+ def disconnect
87
+ if not @attr['AutoCommit'] and @in_transaction
88
+ _exec("ROLLBACK") # rollback outstanding transactions
89
+ end
90
+ @connection.close
91
+ end
92
+
93
+ def ping
94
+ answer = _exec("SELECT 1")
95
+ if answer
96
+ return answer.num_tuples == 1
97
+ else
98
+ return false
99
+ end
100
+ rescue PG::Error
101
+ return false
102
+ ensure
103
+ answer.clear if answer
104
+ end
105
+
106
+ def database_name
107
+ @connection.db
108
+ end
109
+
110
+ def tables
111
+ stmt = execute("SELECT c.relname FROM pg_catalog.pg_class c WHERE c.relkind IN ('r','v') and pg_catalog.pg_table_is_visible(c.oid)")
112
+ res = stmt.fetch_all.collect {|row| row[0]}
113
+ stmt.finish
114
+ res
115
+ end
116
+
117
+ #
118
+ # See DBI::BaseDatabase.
119
+ #
120
+ # These additional attributes are also supported:
121
+ #
122
+ # * nullable: true if NULL values are allowed in this column.
123
+ # * indexed: true if this column is a part of an index.
124
+ # * primary: true if this column is a part of a primary key.
125
+ # * unique: true if this column is a part of a unique key.
126
+ # * default: what will be insert if this column is left out of an insert query.
127
+ # * array_of_type: true if this is actually an array of this type.
128
+ # +dbi_type+ will be the type authority if this is the case.
129
+ #
130
+ def columns(table)
131
+ sql1 = %[
132
+ select a.attname, i.indisprimary, i.indisunique
133
+ from pg_class bc inner join pg_index i
134
+ on bc.oid = i.indrelid
135
+ inner join pg_class c
136
+ on c.oid = i.indexrelid
137
+ inner join pg_attribute a
138
+ on c.oid = a.attrelid
139
+ where bc.relname = ?
140
+ and bc.relkind in ('r', 'v')
141
+ and pg_catalog.pg_table_is_visible(bc.oid);
142
+ ]
143
+
144
+ sql2 = %[
145
+ SELECT a.attname, a.atttypid, a.attnotnull, a.attlen, format_type(a.atttypid, a.atttypmod)
146
+ FROM pg_catalog.pg_class c, pg_attribute a, pg_type t
147
+ WHERE a.attnum > 0 AND a.attrelid = c.oid AND a.atttypid = t.oid AND c.relname = ?
148
+ AND c.relkind IN ('r','v')
149
+ AND pg_catalog.pg_table_is_visible(c.oid)
150
+ ]
151
+
152
+ # by Michael Neumann (get default value)
153
+ # corrected by Joseph McDonald
154
+ # from https://www.postgresql.org/docs/12/release-12.html
155
+ # Remove obsolete pg_attrdef.adsrc column (Peter Eisentraut)
156
+ # This column has been deprecated for a long time, because it did not update in response to other catalog changes (such as column renamings). The # # recommended way to get a text version of a default-value expression from pg_attrdef is pg_get_expr(adbin, adrelid).
157
+
158
+ sql3 = %[
159
+ SELECT pg_get_expr(pg_attrdef.adbin, pg_attrdef.adrelid), pg_attribute.attname
160
+ FROM pg_attribute, pg_attrdef, pg_catalog.pg_class
161
+ WHERE pg_catalog.pg_class.relname = ? AND
162
+ pg_attribute.attrelid = pg_catalog.pg_class.oid AND
163
+ pg_attrdef.adrelid = pg_catalog.pg_class.oid AND
164
+ pg_attrdef.adnum = pg_attribute.attnum
165
+ AND pg_catalog.pg_class.relkind IN ('r','v')
166
+ AND pg_catalog.pg_table_is_visible(pg_catalog.pg_class.oid)
167
+ ]
168
+
169
+ dbh = DBI::DatabaseHandle.new(self)
170
+ dbh.driver_name = DBI::DBD::Pg.driver_name
171
+ indices = {}
172
+ default_values = {}
173
+
174
+ dbh.select_all(sql3, table) do |default, name|
175
+ default_values[name] = default
176
+ end
177
+
178
+ dbh.select_all(sql1, table) do |name, primary, unique|
179
+ indices[name] = [primary, unique]
180
+ end
181
+
182
+ ##########
183
+
184
+ ret = []
185
+ dbh.execute(sql2, table) do |sth|
186
+ ret = sth.collect do |row|
187
+ name, pg_type, notnullable, len, ftype = row
188
+ #name = row[2]
189
+ indexed = false
190
+ primary = nil
191
+ unique = nil
192
+ if indices.has_key?(name)
193
+ indexed = true
194
+ primary, unique = indices[name]
195
+ end
196
+
197
+ typeinfo = DBI::DBD::Pg.parse_type(ftype)
198
+ typeinfo[:size] ||= len
199
+
200
+ if POSTGRESQL_to_XOPEN.has_key?(typeinfo[:type])
201
+ sql_type = POSTGRESQL_to_XOPEN[typeinfo[:type]][0]
202
+ else
203
+ sql_type = POSTGRESQL_to_XOPEN[nil][0]
204
+ end
205
+
206
+ row = {}
207
+ row['name'] = name
208
+ row['sql_type'] = sql_type
209
+ row['type_name'] = typeinfo[:type]
210
+ row['nullable'] = ! notnullable
211
+ row['indexed'] = indexed
212
+ row['primary'] = primary
213
+ row['unique'] = unique
214
+ row['precision'] = typeinfo[:size]
215
+ row['scale'] = typeinfo[:decimal]
216
+ row['default'] = default_values[name]
217
+ row['array_of_type'] = typeinfo[:array]
218
+
219
+ if typeinfo[:array]
220
+ row['dbi_type'] =
221
+ DBI::DBD::Pg::Type::Array.new(
222
+ DBI::TypeUtil.type_name_to_module(typeinfo[:type])
223
+ )
224
+ end
225
+ row
226
+ end # collect
227
+ end # execute
228
+
229
+ return ret
230
+ end
231
+
232
+ def prepare(statement)
233
+ DBI::DBD::Pg::Statement.new(self, statement)
234
+ end
235
+
236
+ def [](attr)
237
+ case attr
238
+ when 'pg_client_encoding'
239
+ @connection.client_encoding
240
+ when 'NonBlocking'
241
+ @attr['pg_async']
242
+ else
243
+ @attr[attr]
244
+ end
245
+ end
246
+
247
+ def []=(attr, value)
248
+ case attr
249
+ when 'AutoCommit'
250
+ if @attr['AutoCommit'] != value then
251
+ if value # turn AutoCommit ON
252
+ if @in_transaction
253
+ # TODO: commit outstanding transactions?
254
+ _exec("COMMIT")
255
+ @in_transaction = false
256
+ end
257
+ else # turn AutoCommit OFF
258
+ @in_transaction = false
259
+ end
260
+ end
261
+ # value is assigned below
262
+ when 'NonBlocking', 'pg_async'
263
+ # booleanize input
264
+ value = value ? true : false
265
+ @pgexec = (value ? DBI::DBD::Pg::PgExecutorAsync : DBI::DBD::Pg::PgExecutor).new(@connection)
266
+ # value is assigned to @attr below
267
+ when 'pg_client_encoding'
268
+ @connection.set_client_encoding(value)
269
+ when 'pg_native_binding'
270
+ @attr[attr] = value
271
+ else
272
+ if attr =~ /^pg_/ or attr != /_/
273
+ raise DBI::NotSupportedError, "Option '#{attr}' not supported"
274
+ else # option for some other driver - quitly ignore
275
+ return
276
+ end
277
+ end
278
+ @attr[attr] = value
279
+ end
280
+
281
+ def commit
282
+ if @in_transaction
283
+ _exec("COMMIT")
284
+ @in_transaction = false
285
+ else
286
+ # TODO: Warn?
287
+ end
288
+ end
289
+
290
+ def rollback
291
+ if @in_transaction
292
+ _exec("ROLLBACK")
293
+ @in_transaction = false
294
+ else
295
+ # TODO: Warn?
296
+ end
297
+ end
298
+
299
+ #
300
+ # Are we in an transaction?
301
+ #
302
+ def in_transaction?
303
+ @in_transaction
304
+ end
305
+
306
+ #
307
+ # Forcibly initializes a new transaction.
308
+ #
309
+ def start_transaction
310
+ _exec("BEGIN")
311
+ @in_transaction = true
312
+ end
313
+
314
+ def _exec(sql, *parameters)
315
+ @pgexec.exec(sql, parameters)
316
+ end
317
+
318
+ def _exec_prepared(stmt_name, *parameters)
319
+ @pgexec.exec_prepared(stmt_name, parameters)
320
+ end
321
+
322
+ def _prepare(stmt_name, sql)
323
+ @pgexec.prepare(stmt_name, sql)
324
+ end
325
+
326
+ private
327
+
328
+ def parse_type_name(type_name)
329
+ case type_name
330
+ when 'bool' then DBI::Type::Boolean
331
+ when 'int8', 'int4', 'int2' then DBI::Type::Integer
332
+ when 'varchar' then DBI::Type::Varchar
333
+ when 'float4','float8' then DBI::Type::Float
334
+ when 'time', 'timetz' then DBI::Type::Timestamp
335
+ when 'timestamp', 'timestamptz' then DBI::Type::Timestamp
336
+ when 'date' then DBI::Type::Timestamp
337
+ when 'decimal', 'numeric' then DBI::Type::Decimal
338
+ when 'bytea' then DBI::DBD::Pg::Type::ByteA
339
+ when 'enum' then DBI::Type::Varchar
340
+ end
341
+ end
342
+
343
+ #
344
+ # Gathers the types from the postgres database and attempts to
345
+ # locate matching DBI::Type objects for them.
346
+ #
347
+ def load_type_map
348
+ @type_map = Hash.new
349
+
350
+ res = _exec("SELECT oid, typname, typelem FROM pg_type WHERE typtype IN ('b', 'e')")
351
+
352
+ res.each do |row|
353
+ rowtype = parse_type_name(row["typname"])
354
+ @type_map[row["oid"].to_i] =
355
+ {
356
+ "type_name" => row["typname"],
357
+ "dbi_type" =>
358
+ if rowtype
359
+ rowtype
360
+ elsif row["typname"] =~ /^_/ and row["typelem"].to_i > 0 then
361
+ # arrays are special and have a subtype, as an
362
+ # oid held in the "typelem" field.
363
+ # Since we may not have a mapping for the
364
+ # subtype yet, defer by storing the typelem
365
+ # integer as a base type in a constructed
366
+ # Type::Array object. dirty, i know.
367
+ #
368
+ # These array objects will be reconstructed
369
+ # after all rows are processed and therefore
370
+ # the oid -> type mapping is complete.
371
+ #
372
+ DBI::DBD::Pg::Type::Array.new(row["typelem"].to_i)
373
+ else
374
+ DBI::Type::Varchar
375
+ end
376
+ }
377
+ end
378
+ # additional conversions
379
+ @type_map[705] ||= DBI::Type::Varchar # select 'hallo'
380
+ @type_map[1114] ||= DBI::Type::Timestamp # TIMESTAMP WITHOUT TIME ZONE
381
+
382
+ # remap array subtypes
383
+ @type_map.each_key do |key|
384
+ if @type_map[key]["dbi_type"].class == DBI::DBD::Pg::Type::Array
385
+ oid = @type_map[key]["dbi_type"].base_type
386
+ if @type_map[oid]
387
+ @type_map[key]["dbi_type"] = DBI::DBD::Pg::Type::Array.new(@type_map[oid]["dbi_type"])
388
+ else
389
+ # punt
390
+ @type_map[key] = DBI::DBD::Pg::Type::Array.new(DBI::Type::Varchar)
391
+ end
392
+ end unless key.is_a?(Integer)
393
+ end
394
+ end
395
+
396
+ public
397
+
398
+ # return the postgresql types for this session. returns an oid -> type name mapping.
399
+ def __types(force=nil)
400
+ load_type_map if (!@type_map or force)
401
+ @type_map
402
+ end
403
+
404
+ # deprecated.
405
+ def __types_old
406
+ h = { }
407
+
408
+ _exec('select oid, typname from pg_type').each do |row|
409
+ h[row["oid"].to_i] = row["typname"]
410
+ end
411
+
412
+ return h
413
+ end
414
+
415
+ #
416
+ # Import a BLOB from a file.
417
+ #
418
+ def __blob_import(file)
419
+ start_transaction unless @in_transaction
420
+ @connection.lo_import(file)
421
+ rescue PG::Error => err
422
+ raise DBI::DatabaseError.new(err.message)
423
+ end
424
+
425
+ #
426
+ # Export a BLOB to a file.
427
+ #
428
+ def __blob_export(oid, file)
429
+ start_transaction unless @in_transaction
430
+ @connection.lo_export(oid.to_i, file)
431
+ rescue PG::Error => err
432
+ raise DBI::DatabaseError.new(err.message)
433
+ end
434
+
435
+ #
436
+ # Create a BLOB.
437
+ #
438
+ def __blob_create(mode=PG::Connection::INV_READ)
439
+ start_transaction unless @in_transaction
440
+ @connection.lo_creat(mode)
441
+ rescue PG::Error => err
442
+ raise DBI::DatabaseError.new(err.message)
443
+ end
444
+
445
+ #
446
+ # Open a BLOB.
447
+ #
448
+ def __blob_open(oid, mode=PG::Connection::INV_READ)
449
+ start_transaction unless @in_transaction
450
+ @connection.lo_open(oid.to_i, mode)
451
+ rescue PG::Error => err
452
+ raise DBI::DatabaseError.new(err.message)
453
+ end
454
+
455
+ #
456
+ # Remove a BLOB.
457
+ #
458
+ def __blob_unlink(oid)
459
+ start_transaction unless @in_transaction
460
+ @connection.lo_unlink(oid.to_i)
461
+ rescue PG::Error => err
462
+ raise DBI::DatabaseError.new(err.message)
463
+ end
464
+
465
+ #
466
+ # Read a BLOB and return the data.
467
+ #
468
+ def __blob_read(oid, length)
469
+ blob = @connection.lo_open(oid.to_i, PG::Connection::INV_READ)
470
+
471
+ if length.nil?
472
+ data = @connection.lo_read(blob)
473
+ else
474
+ data = @connection.lo_read(blob, length)
475
+ end
476
+
477
+ # FIXME it doesn't like to close here either.
478
+ # @connection.lo_close(blob)
479
+ data
480
+ rescue PG::Error => err
481
+ raise DBI::DatabaseError.new(err.message)
482
+ end
483
+
484
+ #
485
+ # Write the value to the BLOB.
486
+ #
487
+ def __blob_write(oid, value)
488
+ start_transaction unless @in_transaction
489
+ blob = @connection.lo_open(oid.to_i, PG::Connection::INV_WRITE)
490
+ res = @connection.lo_write(blob, value)
491
+ # FIXME not sure why PG doesn't like to close here -- seems to be
492
+ # working but we should make sure it's not eating file descriptors
493
+ # up before release.
494
+ # @connection.lo_close(blob)
495
+ return res
496
+ rescue PG::Error => err
497
+ raise DBI::DatabaseError.new(err.message)
498
+ end
499
+
500
+ #
501
+ # FIXME DOCUMENT
502
+ #
503
+ def __set_notice_processor(proc)
504
+ @connection.set_notice_processor proc
505
+ rescue PG::Error => err
506
+ raise DBI::DatabaseError.new(err.message)
507
+ end
508
+ end # Database
@@ -0,0 +1,47 @@
1
+ module DBI::DBD::Pg
2
+ ################################################################
3
+ # Convenience adaptor to hide details command execution API calls.
4
+ # See PgExecutorAsync subclass
5
+ class PgExecutor
6
+ def initialize(pg_conn)
7
+ @pg_conn = pg_conn
8
+ end
9
+
10
+ def exec(sql, parameters = nil)
11
+ @pg_conn.exec(sql, parameters)
12
+ end
13
+
14
+ def exec_prepared(stmt_name, parameters = nil)
15
+ @pg_conn.exec_prepared(stmt_name, parameters)
16
+ end
17
+
18
+ def prepare(stmt_name, sql)
19
+ @pg_conn.prepare(stmt_name, sql)
20
+ end
21
+ end
22
+
23
+ # Asynchronous implementation of PgExecutor, useful for 'green
24
+ # thread' implementations (e.g., MRI <= 1.8.x) which would otherwise
25
+ # suspend other threads while awaiting query results.
26
+ #--
27
+ # FIXME: PQsetnonblocking + select/poll would make the exec*
28
+ # methods truly 'async', though this is rarely needed in
29
+ # practice.
30
+ class PgExecutorAsync < PgExecutor
31
+ def exec(sql, parameters = nil)
32
+ @pg_conn.async_exec(sql, parameters)
33
+ end
34
+
35
+ def exec_prepared(stmt_name, parameters = nil)
36
+ @pg_conn.send_query_prepared(stmt_name, parameters)
37
+ @pg_conn.block()
38
+ @pg_conn.get_last_result()
39
+ end
40
+
41
+ def prepare(stmt_name, sql)
42
+ @pg_conn.send_prepare(stmt_name, sql)
43
+ @pg_conn.block()
44
+ @pg_conn.get_last_result()
45
+ end
46
+ end
47
+ end
@@ -0,0 +1,160 @@
1
+ #
2
+ # See DBI::BaseStatement, and DBI::DBD::Pg::Tuples.
3
+ #
4
+ #--
5
+ # Peculiar Statement responsibilities:
6
+ # - Translate dbi params (?, ?, ...) to Pg params ($1, $2, ...)
7
+ # - Translate DBI::Binary objects to Pg large objects (lo_*)
8
+
9
+ class DBI::DBD::Pg::Statement < DBI::BaseStatement
10
+
11
+ PG_STMT_NAME_PREFIX = 'ruby-dbi:Pg:'
12
+
13
+ def initialize(db, sql)
14
+ super(db)
15
+ @db = db
16
+ @sql = sql
17
+ @stmt_name = PG_STMT_NAME_PREFIX + self.object_id.to_s + Time.now.to_f.to_s
18
+ @result = nil
19
+ @bindvars = []
20
+ @prepared = false
21
+ rescue PG::Error => err
22
+ raise DBI::ProgrammingError.new(err.message)
23
+ end
24
+
25
+ def bind_param(index, value, options)
26
+ @bindvars[index-1] = value
27
+ end
28
+
29
+ #
30
+ # See DBI::BaseDatabase#execute.
31
+ #
32
+ # This method will make use of PostgreSQL's native BLOB support if
33
+ # DBI::Binary objects are passed in.
34
+ #
35
+ def execute
36
+ # replace DBI::Binary object by oid returned by lo_import
37
+ @bindvars.collect! do |var|
38
+ if var.is_a? DBI::Binary then
39
+ oid = @db.__blob_create(PG::Connection::INV_WRITE)
40
+ @db.__blob_write(oid, var.to_s)
41
+ oid
42
+ else
43
+ var
44
+ end
45
+ end
46
+
47
+ internal_prepare
48
+
49
+ if not @db['AutoCommit'] then
50
+ # if not SQL.query?(boundsql) and not @db['AutoCommit'] then
51
+ @db.start_transaction unless @db.in_transaction?
52
+ end
53
+
54
+ if @db["pg_native_binding"]
55
+ pg_result = @db._exec_prepared(@stmt_name, *@bindvars)
56
+ else
57
+ pg_result = @db._exec_prepared(@stmt_name)
58
+ end
59
+
60
+ @result = DBI::DBD::Pg::Tuples.new(@db, pg_result)
61
+ rescue PG::Error, RuntimeError => err
62
+ raise DBI::ProgrammingError.new(err.message)
63
+ end
64
+
65
+ def fetch
66
+ @result.fetchrow
67
+ end
68
+
69
+ def fetch_scroll(direction, offset)
70
+ @result.fetch_scroll(direction, offset)
71
+ end
72
+
73
+ def finish
74
+ internal_finish
75
+ @result = nil
76
+ @db = nil
77
+ end
78
+
79
+ #
80
+ # See DBI::DBD::Pg::Tuples#column_info.
81
+ #
82
+ def column_info
83
+ @result.column_info
84
+ end
85
+
86
+ def rows
87
+ if @result
88
+ @result.rows_affected
89
+ else
90
+ nil
91
+ end
92
+ end
93
+
94
+ #
95
+ # Attributes:
96
+ #
97
+ # If +pg_row_count+ is requested and the statement has already executed,
98
+ # postgres will return what it believes is the row count.
99
+ #
100
+ def [](attr)
101
+ case attr
102
+ when 'pg_row_count'
103
+ if @result
104
+ @result.row_count
105
+ else
106
+ nil
107
+ end
108
+ else
109
+ @attr[attr]
110
+ end
111
+ end
112
+
113
+ private
114
+
115
+ #
116
+ # A native binding helper.
117
+ #
118
+ class DummyQuoter
119
+ # dummy to substitute ?-style parameter markers by :1 :2 etc.
120
+ def quote(str)
121
+ str
122
+ end
123
+ end
124
+
125
+ # finish the statement at a lower level
126
+ def internal_finish
127
+ @result.finish if @result
128
+ @db._exec("DEALLOCATE \"#{@stmt_name}\"") if @prepared rescue nil
129
+ end
130
+
131
+ # prepare the statement at a lower level.
132
+ def internal_prepare
133
+ if @db["pg_native_binding"]
134
+ unless @prepared
135
+ @stmt = @db._prepare(@stmt_name, translate_param_markers(@sql))
136
+ end
137
+ else
138
+ internal_finish
139
+ @stmt = @db._prepare(@stmt_name, DBI::SQL::PreparedStatement.new(DBI::DBD::Pg, @sql).bind(@bindvars))
140
+ end
141
+ @prepared = true
142
+ end
143
+
144
+ # Prepare the given SQL statement, returning its PostgreSQL string
145
+ # handle. ?-style parameters are translated to $1, $2, etc.
146
+ #--
147
+ # TESTME do ?::TYPE qualifers work?
148
+ # FIXME: DBI ought to supply a generic param converter, e.g.:
149
+ # sql = DBI::Utils::convert_placeholders(sql) do |i|
150
+ # '$' + i.to_s
151
+ # end
152
+ def translate_param_markers(sql)
153
+ translator = DBI::SQL::PreparedStatement.new(DummyQuoter.new, sql)
154
+ if translator.unbound.size > 0
155
+ arr = (1..(translator.unbound.size)).collect{|i| "$#{i}"}
156
+ sql = translator.bind( arr )
157
+ end
158
+ sql
159
+ end
160
+ end # Statement