ydbi 0.5.0 → 0.5.1

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (81) hide show
  1. checksums.yaml +4 -4
  2. data/ChangeLog +4 -0
  3. data/build/Rakefile.dbi.rb +4 -4
  4. data/lib/dbi/version.rb +1 -1
  5. data/test/dbi/tc_dbi.rb +1 -1
  6. metadata +9 -124
  7. data/.gitignore +0 -6
  8. data/Gemfile +0 -4
  9. data/Rakefile +0 -8
  10. data/TODO +0 -44
  11. data/bench/bench.rb +0 -79
  12. data/build/rake_task_lib.rb +0 -187
  13. data/doc/DBD_SPEC.rdoc +0 -88
  14. data/doc/DBI_SPEC.rdoc +0 -157
  15. data/doc/homepage/contact.html +0 -62
  16. data/doc/homepage/development.html +0 -124
  17. data/doc/homepage/index.html +0 -83
  18. data/doc/homepage/ruby-dbi.css +0 -91
  19. data/lib/dbd/Mysql.rb +0 -137
  20. data/lib/dbd/ODBC.rb +0 -89
  21. data/lib/dbd/Pg.rb +0 -188
  22. data/lib/dbd/SQLite.rb +0 -97
  23. data/lib/dbd/SQLite3.rb +0 -124
  24. data/lib/dbd/mysql/database.rb +0 -405
  25. data/lib/dbd/mysql/driver.rb +0 -125
  26. data/lib/dbd/mysql/statement.rb +0 -188
  27. data/lib/dbd/odbc/database.rb +0 -128
  28. data/lib/dbd/odbc/driver.rb +0 -38
  29. data/lib/dbd/odbc/statement.rb +0 -137
  30. data/lib/dbd/pg/database.rb +0 -516
  31. data/lib/dbd/pg/exec.rb +0 -47
  32. data/lib/dbd/pg/statement.rb +0 -160
  33. data/lib/dbd/pg/tuples.rb +0 -121
  34. data/lib/dbd/pg/type.rb +0 -209
  35. data/lib/dbd/sqlite/database.rb +0 -151
  36. data/lib/dbd/sqlite/statement.rb +0 -125
  37. data/lib/dbd/sqlite3/database.rb +0 -201
  38. data/lib/dbd/sqlite3/statement.rb +0 -78
  39. data/prototypes/types2.rb +0 -237
  40. data/setup.rb +0 -1585
  41. data/test/DBD_TESTS +0 -50
  42. data/test/TESTING +0 -16
  43. data/test/dbd/general/test_database.rb +0 -206
  44. data/test/dbd/general/test_statement.rb +0 -326
  45. data/test/dbd/general/test_types.rb +0 -296
  46. data/test/dbd/mysql/base.rb +0 -26
  47. data/test/dbd/mysql/down.sql +0 -19
  48. data/test/dbd/mysql/test_blob.rb +0 -18
  49. data/test/dbd/mysql/test_new_methods.rb +0 -7
  50. data/test/dbd/mysql/test_patches.rb +0 -111
  51. data/test/dbd/mysql/up.sql +0 -28
  52. data/test/dbd/odbc/base.rb +0 -30
  53. data/test/dbd/odbc/down.sql +0 -19
  54. data/test/dbd/odbc/test_new_methods.rb +0 -12
  55. data/test/dbd/odbc/test_ping.rb +0 -10
  56. data/test/dbd/odbc/test_statement.rb +0 -44
  57. data/test/dbd/odbc/test_transactions.rb +0 -58
  58. data/test/dbd/odbc/up.sql +0 -33
  59. data/test/dbd/postgresql/base.rb +0 -31
  60. data/test/dbd/postgresql/down.sql +0 -31
  61. data/test/dbd/postgresql/test_arrays.rb +0 -179
  62. data/test/dbd/postgresql/test_async.rb +0 -121
  63. data/test/dbd/postgresql/test_blob.rb +0 -36
  64. data/test/dbd/postgresql/test_bytea.rb +0 -87
  65. data/test/dbd/postgresql/test_ping.rb +0 -10
  66. data/test/dbd/postgresql/test_timestamp.rb +0 -77
  67. data/test/dbd/postgresql/test_transactions.rb +0 -58
  68. data/test/dbd/postgresql/testdbipg.rb +0 -307
  69. data/test/dbd/postgresql/up.sql +0 -60
  70. data/test/dbd/sqlite/base.rb +0 -32
  71. data/test/dbd/sqlite/test_database.rb +0 -30
  72. data/test/dbd/sqlite/test_driver.rb +0 -68
  73. data/test/dbd/sqlite/test_statement.rb +0 -112
  74. data/test/dbd/sqlite/up.sql +0 -25
  75. data/test/dbd/sqlite3/base.rb +0 -32
  76. data/test/dbd/sqlite3/test_database.rb +0 -77
  77. data/test/dbd/sqlite3/test_driver.rb +0 -67
  78. data/test/dbd/sqlite3/test_statement.rb +0 -88
  79. data/test/dbd/sqlite3/up.sql +0 -33
  80. data/test/ts_dbd.rb +0 -131
  81. data/ydbi.gemspec +0 -24
@@ -1,516 +0,0 @@
1
- #
2
- # See DBI::BaseDatabase.
3
- #
4
- class DBI::DBD::Pg::Database < DBI::BaseDatabase
5
-
6
- # type map
7
- POSTGRESQL_to_XOPEN = {
8
- "boolean" => [DBI::SQL_CHAR, 1, nil],
9
- "character" => [DBI::SQL_CHAR, 1, nil],
10
- "char" => [DBI::SQL_CHAR, 1, nil],
11
- "real" => [DBI::SQL_REAL, 4, 6],
12
- "double precision" => [DBI::SQL_DOUBLE, 8, 15],
13
- "smallint" => [DBI::SQL_SMALLINT, 2],
14
- "integer" => [DBI::SQL_INTEGER, 4],
15
- "bigint" => [DBI::SQL_BIGINT, 8],
16
- "numeric" => [DBI::SQL_NUMERIC, nil, nil],
17
- "time with time zone" => [DBI::SQL_TIME, nil, nil],
18
- "timestamp with time zone" => [DBI::SQL_TIMESTAMP, nil, nil],
19
- "bit varying" => [DBI::SQL_BINARY, nil, nil], #huh??
20
- "character varying" => [DBI::SQL_VARCHAR, nil, nil],
21
- "bit" => [DBI::SQL_TINYINT, nil, nil],
22
- "text" => [DBI::SQL_VARCHAR, nil, nil],
23
- nil => [DBI::SQL_OTHER, nil, nil]
24
- }
25
-
26
- attr_reader :type_map
27
-
28
- #
29
- # See DBI::BaseDatabase#new. These attributes are also supported:
30
- #
31
- # * pg_async: boolean or strings 'true' or 'false'. Indicates if we're to
32
- # use PostgreSQL's asyncrohonous support. 'NonBlocking' is a synonym for
33
- # this.
34
- # * AutoCommit: 'unchained' mode in PostgreSQL. Commits after each
35
- # statement execution.
36
- # * pg_client_encoding: set the encoding for the client.
37
- # * pg_native_binding: Boolean. Indicates whether to use libpq native
38
- # binding or DBI's inline binding. Defaults to true.
39
- #
40
- def initialize(dbname, user, auth, attr)
41
- hash = DBI::Utils.parse_params(dbname)
42
-
43
- if hash['dbname'].nil? and hash['database'].nil?
44
- raise DBI::InterfaceError, "must specify database"
45
- end
46
-
47
- hash['options'] ||= nil
48
- hash['tty'] ||= ''
49
- hash['port'] = hash['port'].to_i unless hash['port'].nil?
50
-
51
- @connection = PGconn.new(hash['host'], hash['port'], hash['options'], hash['tty'],
52
- hash['dbname'] || hash['database'], user, auth)
53
-
54
- @exec_method = :exec
55
- @in_transaction = false
56
-
57
- # set attribute defaults, and look for pg_* attrs in the DSN
58
- @attr = { 'AutoCommit' => true, 'pg_async' => false }
59
- hash.each do |key, value|
60
- @attr[key] = value if key =~ /^pg_./
61
- end
62
- @attr.merge!(attr || {})
63
- if @attr['pg_async'].is_a?(String)
64
- case @attr['pg_async'].downcase
65
- when 'true'
66
- @attr['pg_async'] = true
67
- when 'false'
68
- @attr['pg_async'] = false
69
- else
70
- raise InterfaceError, %q{'pg_async' must be 'true' or 'false'}
71
- end
72
- end
73
-
74
- @attr.each { |k,v| self[k] = v}
75
- @attr["pg_native_binding"] = true unless @attr.has_key? "pg_native_binding"
76
-
77
- load_type_map
78
-
79
- self['AutoCommit'] = true # Postgres starts in unchained mode (AutoCommit=on) by default
80
-
81
- rescue PGError => err
82
- raise DBI::OperationalError.new(err.message)
83
- end
84
-
85
- def disconnect
86
- if not @attr['AutoCommit'] and @in_transaction
87
- _exec("ROLLBACK") # rollback outstanding transactions
88
- end
89
- @connection.close
90
- end
91
-
92
- def ping
93
- answer = _exec("SELECT 1")
94
- if answer
95
- return answer.num_tuples == 1
96
- else
97
- return false
98
- end
99
- rescue PGError
100
- return false
101
- ensure
102
- answer.clear if answer
103
- end
104
-
105
- def database_name
106
- @connection.db
107
- end
108
-
109
- def tables
110
- stmt = execute("SELECT c.relname FROM pg_catalog.pg_class c WHERE c.relkind IN ('r','v') and pg_catalog.pg_table_is_visible(c.oid)")
111
- res = stmt.fetch_all.collect {|row| row[0]}
112
- stmt.finish
113
- res
114
- end
115
-
116
- #
117
- # See DBI::BaseDatabase.
118
- #
119
- # These additional attributes are also supported:
120
- #
121
- # * nullable: true if NULL values are allowed in this column.
122
- # * indexed: true if this column is a part of an index.
123
- # * primary: true if this column is a part of a primary key.
124
- # * unique: true if this column is a part of a unique key.
125
- # * default: what will be insert if this column is left out of an insert query.
126
- # * array_of_type: true if this is actually an array of this type.
127
- # +dbi_type+ will be the type authority if this is the case.
128
- #
129
- def columns(table)
130
- sql1 = %[
131
- select a.attname, i.indisprimary, i.indisunique
132
- from pg_class bc inner join pg_index i
133
- on bc.oid = i.indrelid
134
- inner join pg_class c
135
- on c.oid = i.indexrelid
136
- inner join pg_attribute a
137
- on c.oid = a.attrelid
138
- where bc.relname = ?
139
- and bc.relkind in ('r', 'v')
140
- and pg_catalog.pg_table_is_visible(bc.oid);
141
- ]
142
-
143
- sql2 = %[
144
- SELECT a.attname, a.atttypid, a.attnotnull, a.attlen, format_type(a.atttypid, a.atttypmod)
145
- FROM pg_catalog.pg_class c, pg_attribute a, pg_type t
146
- WHERE a.attnum > 0 AND a.attrelid = c.oid AND a.atttypid = t.oid AND c.relname = ?
147
- AND c.relkind IN ('r','v')
148
- AND pg_catalog.pg_table_is_visible(c.oid)
149
- ]
150
-
151
- # by Michael Neumann (get default value)
152
- # corrected by Joseph McDonald
153
- sql3 = %[
154
- SELECT pg_attrdef.adsrc, pg_attribute.attname
155
- FROM pg_attribute, pg_attrdef, pg_catalog.pg_class
156
- WHERE pg_catalog.pg_class.relname = ? AND
157
- pg_attribute.attrelid = pg_catalog.pg_class.oid AND
158
- pg_attrdef.adrelid = pg_catalog.pg_class.oid AND
159
- pg_attrdef.adnum = pg_attribute.attnum
160
- AND pg_catalog.pg_class.relkind IN ('r','v')
161
- AND pg_catalog.pg_table_is_visible(pg_catalog.pg_class.oid)
162
- ]
163
-
164
- dbh = DBI::DatabaseHandle.new(self)
165
- dbh.driver_name = DBI::DBD::Pg.driver_name
166
- indices = {}
167
- default_values = {}
168
-
169
- dbh.select_all(sql3, table) do |default, name|
170
- default_values[name] = default
171
- end
172
-
173
- dbh.select_all(sql1, table) do |name, primary, unique|
174
- indices[name] = [primary, unique]
175
- end
176
-
177
- ##########
178
-
179
- ret = []
180
- dbh.execute(sql2, table) do |sth|
181
- ret = sth.collect do |row|
182
- name, pg_type, notnullable, len, ftype = row
183
- #name = row[2]
184
- indexed = false
185
- primary = nil
186
- unique = nil
187
- if indices.has_key?(name)
188
- indexed = true
189
- primary, unique = indices[name]
190
- end
191
-
192
- typeinfo = DBI::DBD::Pg.parse_type(ftype)
193
- typeinfo[:size] ||= len
194
-
195
- if POSTGRESQL_to_XOPEN.has_key?(typeinfo[:type])
196
- sql_type = POSTGRESQL_to_XOPEN[typeinfo[:type]][0]
197
- else
198
- sql_type = POSTGRESQL_to_XOPEN[nil][0]
199
- end
200
-
201
- row = {}
202
- row['name'] = name
203
- row['sql_type'] = sql_type
204
- row['type_name'] = typeinfo[:type]
205
- row['nullable'] = ! notnullable
206
- row['indexed'] = indexed
207
- row['primary'] = primary
208
- row['unique'] = unique
209
- row['precision'] = typeinfo[:size]
210
- row['scale'] = typeinfo[:decimal]
211
- row['default'] = default_values[name]
212
- row['array_of_type'] = typeinfo[:array]
213
-
214
- if typeinfo[:array]
215
- row['dbi_type'] =
216
- DBI::DBD::Pg::Type::Array.new(
217
- DBI::TypeUtil.type_name_to_module(typeinfo[:type])
218
- )
219
- end
220
- row
221
- end # collect
222
- end # execute
223
-
224
- return ret
225
- end
226
-
227
- def prepare(statement)
228
- DBI::DBD::Pg::Statement.new(self, statement)
229
- end
230
-
231
- def [](attr)
232
- case attr
233
- when 'pg_client_encoding'
234
- @connection.client_encoding
235
- when 'NonBlocking'
236
- @attr['pg_async']
237
- else
238
- @attr[attr]
239
- end
240
- end
241
-
242
- def []=(attr, value)
243
- case attr
244
- when 'AutoCommit'
245
- if @attr['AutoCommit'] != value then
246
- if value # turn AutoCommit ON
247
- if @in_transaction
248
- # TODO: commit outstanding transactions?
249
- _exec("COMMIT")
250
- @in_transaction = false
251
- end
252
- else # turn AutoCommit OFF
253
- @in_transaction = false
254
- end
255
- end
256
- # value is assigned below
257
- when 'NonBlocking', 'pg_async'
258
- # booleanize input
259
- value = value ? true : false
260
- @pgexec = (value ? DBI::DBD::Pg::PgExecutorAsync : DBI::DBD::Pg::PgExecutor).new(@connection)
261
- # value is assigned to @attr below
262
- when 'pg_client_encoding'
263
- @connection.set_client_encoding(value)
264
- when 'pg_native_binding'
265
- @attr[attr] = value
266
- else
267
- if attr =~ /^pg_/ or attr != /_/
268
- raise DBI::NotSupportedError, "Option '#{attr}' not supported"
269
- else # option for some other driver - quitly ignore
270
- return
271
- end
272
- end
273
- @attr[attr] = value
274
- end
275
-
276
- def commit
277
- if @in_transaction
278
- _exec("COMMIT")
279
- @in_transaction = false
280
- else
281
- # TODO: Warn?
282
- end
283
- end
284
-
285
- def rollback
286
- if @in_transaction
287
- _exec("ROLLBACK")
288
- @in_transaction = false
289
- else
290
- # TODO: Warn?
291
- end
292
- end
293
-
294
- #
295
- # Are we in an transaction?
296
- #
297
- def in_transaction?
298
- @in_transaction
299
- end
300
-
301
- #
302
- # Forcibly initializes a new transaction.
303
- #
304
- def start_transaction
305
- _exec("BEGIN")
306
- @in_transaction = true
307
- end
308
-
309
- def _exec(sql, *parameters)
310
- @pgexec.exec(sql, parameters)
311
- end
312
-
313
- def _exec_prepared(stmt_name, *parameters)
314
- @pgexec.exec_prepared(stmt_name, parameters)
315
- end
316
-
317
- def _prepare(stmt_name, sql)
318
- @pgexec.prepare(stmt_name, sql)
319
- end
320
-
321
- private
322
-
323
- # special quoting if value is element of an array
324
- def quote_array_elements( value )
325
- # XXX is this method still being used?
326
- case value
327
- when Array
328
- '{'+ value.collect{|v| quote_array_elements(v) }.join(',') + '}'
329
- when String
330
- '"' + value.gsub(/\\/){ '\\\\' }.gsub(/"/){ '\\"' } + '"'
331
- else
332
- quote( value ).sub(/^'/,'').sub(/'$/,'')
333
- end
334
- end
335
-
336
- def parse_type_name(type_name)
337
- case type_name
338
- when 'bool' then DBI::Type::Boolean
339
- when 'int8', 'int4', 'int2' then DBI::Type::Integer
340
- when 'varchar' then DBI::Type::Varchar
341
- when 'float4','float8' then DBI::Type::Float
342
- when 'time', 'timetz' then DBI::Type::Timestamp
343
- when 'timestamp', 'timestamptz' then DBI::Type::Timestamp
344
- when 'date' then DBI::Type::Timestamp
345
- when 'decimal', 'numeric' then DBI::Type::Decimal
346
- when 'bytea' then DBI::DBD::Pg::Type::ByteA
347
- when 'enum' then DBI::Type::Varchar
348
- end
349
- end
350
-
351
- #
352
- # Gathers the types from the postgres database and attempts to
353
- # locate matching DBI::Type objects for them.
354
- #
355
- def load_type_map
356
- @type_map = Hash.new
357
-
358
- res = _exec("SELECT oid, typname, typelem FROM pg_type WHERE typtype IN ('b', 'e')")
359
-
360
- res.each do |row|
361
- rowtype = parse_type_name(row["typname"])
362
- @type_map[row["oid"].to_i] =
363
- {
364
- "type_name" => row["typname"],
365
- "dbi_type" =>
366
- if rowtype
367
- rowtype
368
- elsif row["typname"] =~ /^_/ and row["typelem"].to_i > 0 then
369
- # arrays are special and have a subtype, as an
370
- # oid held in the "typelem" field.
371
- # Since we may not have a mapping for the
372
- # subtype yet, defer by storing the typelem
373
- # integer as a base type in a constructed
374
- # Type::Array object. dirty, i know.
375
- #
376
- # These array objects will be reconstructed
377
- # after all rows are processed and therefore
378
- # the oid -> type mapping is complete.
379
- #
380
- DBI::DBD::Pg::Type::Array.new(row["typelem"].to_i)
381
- else
382
- DBI::Type::Varchar
383
- end
384
- }
385
- end
386
- # additional conversions
387
- @type_map[705] ||= DBI::Type::Varchar # select 'hallo'
388
- @type_map[1114] ||= DBI::Type::Timestamp # TIMESTAMP WITHOUT TIME ZONE
389
-
390
- # remap array subtypes
391
- @type_map.each_key do |key|
392
- if @type_map[key]["dbi_type"].class == DBI::DBD::Pg::Type::Array
393
- oid = @type_map[key]["dbi_type"].base_type
394
- if @type_map[oid]
395
- @type_map[key]["dbi_type"] = DBI::DBD::Pg::Type::Array.new(@type_map[oid]["dbi_type"])
396
- else
397
- # punt
398
- @type_map[key] = DBI::DBD::Pg::Type::Array.new(DBI::Type::Varchar)
399
- end
400
- end
401
- end
402
- end
403
-
404
- public
405
-
406
- # return the postgresql types for this session. returns an oid -> type name mapping.
407
- def __types(force=nil)
408
- load_type_map if (!@type_map or force)
409
- @type_map
410
- end
411
-
412
- # deprecated.
413
- def __types_old
414
- h = { }
415
-
416
- _exec('select oid, typname from pg_type').each do |row|
417
- h[row["oid"].to_i] = row["typname"]
418
- end
419
-
420
- return h
421
- end
422
-
423
- #
424
- # Import a BLOB from a file.
425
- #
426
- def __blob_import(file)
427
- start_transaction unless @in_transaction
428
- @connection.lo_import(file)
429
- rescue PGError => err
430
- raise DBI::DatabaseError.new(err.message)
431
- end
432
-
433
- #
434
- # Export a BLOB to a file.
435
- #
436
- def __blob_export(oid, file)
437
- start_transaction unless @in_transaction
438
- @connection.lo_export(oid.to_i, file)
439
- rescue PGError => err
440
- raise DBI::DatabaseError.new(err.message)
441
- end
442
-
443
- #
444
- # Create a BLOB.
445
- #
446
- def __blob_create(mode=PGconn::INV_READ)
447
- start_transaction unless @in_transaction
448
- @connection.lo_creat(mode)
449
- rescue PGError => err
450
- raise DBI::DatabaseError.new(err.message)
451
- end
452
-
453
- #
454
- # Open a BLOB.
455
- #
456
- def __blob_open(oid, mode=PGconn::INV_READ)
457
- start_transaction unless @in_transaction
458
- @connection.lo_open(oid.to_i, mode)
459
- rescue PGError => err
460
- raise DBI::DatabaseError.new(err.message)
461
- end
462
-
463
- #
464
- # Remove a BLOB.
465
- #
466
- def __blob_unlink(oid)
467
- start_transaction unless @in_transaction
468
- @connection.lo_unlink(oid.to_i)
469
- rescue PGError => err
470
- raise DBI::DatabaseError.new(err.message)
471
- end
472
-
473
- #
474
- # Read a BLOB and return the data.
475
- #
476
- def __blob_read(oid, length)
477
- blob = @connection.lo_open(oid.to_i, PGconn::INV_READ)
478
-
479
- if length.nil?
480
- data = @connection.lo_read(blob)
481
- else
482
- data = @connection.lo_read(blob, length)
483
- end
484
-
485
- # FIXME it doesn't like to close here either.
486
- # @connection.lo_close(blob)
487
- data
488
- rescue PGError => err
489
- raise DBI::DatabaseError.new(err.message)
490
- end
491
-
492
- #
493
- # Write the value to the BLOB.
494
- #
495
- def __blob_write(oid, value)
496
- start_transaction unless @in_transaction
497
- blob = @connection.lo_open(oid.to_i, PGconn::INV_WRITE)
498
- res = @connection.lo_write(blob, value)
499
- # FIXME not sure why PG doesn't like to close here -- seems to be
500
- # working but we should make sure it's not eating file descriptors
501
- # up before release.
502
- # @connection.lo_close(blob)
503
- return res
504
- rescue PGError => err
505
- raise DBI::DatabaseError.new(err.message)
506
- end
507
-
508
- #
509
- # FIXME DOCUMENT
510
- #
511
- def __set_notice_processor(proc)
512
- @connection.set_notice_processor proc
513
- rescue PGError => err
514
- raise DBI::DatabaseError.new(err.message)
515
- end
516
- end # Database
data/lib/dbd/pg/exec.rb DELETED
@@ -1,47 +0,0 @@
1
- module DBI::DBD::Pg
2
- ################################################################
3
- # Convenience adaptor to hide details command execution API calls.
4
- # See PgExecutorAsync subclass
5
- class PgExecutor
6
- def initialize(pg_conn)
7
- @pg_conn = pg_conn
8
- end
9
-
10
- def exec(sql, parameters = nil)
11
- @pg_conn.exec(sql, parameters)
12
- end
13
-
14
- def exec_prepared(stmt_name, parameters = nil)
15
- @pg_conn.exec_prepared(stmt_name, parameters)
16
- end
17
-
18
- def prepare(stmt_name, sql)
19
- @pg_conn.prepare(stmt_name, sql)
20
- end
21
- end
22
-
23
- # Asynchronous implementation of PgExecutor, useful for 'green
24
- # thread' implementations (e.g., MRI <= 1.8.x) which would otherwise
25
- # suspend other threads while awaiting query results.
26
- #--
27
- # FIXME: PQsetnonblocking + select/poll would make the exec*
28
- # methods truly 'async', though this is rarely needed in
29
- # practice.
30
- class PgExecutorAsync < PgExecutor
31
- def exec(sql, parameters = nil)
32
- @pg_conn.async_exec(sql, parameters)
33
- end
34
-
35
- def exec_prepared(stmt_name, parameters = nil)
36
- @pg_conn.send_query_prepared(stmt_name, parameters)
37
- @pg_conn.block()
38
- @pg_conn.get_last_result()
39
- end
40
-
41
- def prepare(stmt_name, sql)
42
- @pg_conn.send_prepare(stmt_name, sql)
43
- @pg_conn.block()
44
- @pg_conn.get_last_result()
45
- end
46
- end
47
- end