pg 1.1.4

Sign up to get free protection for your applications and to get access to all the features.
Files changed (77) hide show
  1. checksums.yaml +7 -0
  2. checksums.yaml.gz.sig +3 -0
  3. data.tar.gz.sig +0 -0
  4. data/.gemtest +0 -0
  5. data/BSDL +22 -0
  6. data/ChangeLog +6595 -0
  7. data/Contributors.rdoc +46 -0
  8. data/History.rdoc +492 -0
  9. data/LICENSE +56 -0
  10. data/Manifest.txt +72 -0
  11. data/POSTGRES +23 -0
  12. data/README-OS_X.rdoc +68 -0
  13. data/README-Windows.rdoc +56 -0
  14. data/README.ja.rdoc +14 -0
  15. data/README.rdoc +178 -0
  16. data/Rakefile +215 -0
  17. data/Rakefile.cross +298 -0
  18. data/ext/errorcodes.def +968 -0
  19. data/ext/errorcodes.rb +45 -0
  20. data/ext/errorcodes.txt +478 -0
  21. data/ext/extconf.rb +94 -0
  22. data/ext/gvl_wrappers.c +17 -0
  23. data/ext/gvl_wrappers.h +241 -0
  24. data/ext/pg.c +640 -0
  25. data/ext/pg.h +365 -0
  26. data/ext/pg_binary_decoder.c +229 -0
  27. data/ext/pg_binary_encoder.c +162 -0
  28. data/ext/pg_coder.c +549 -0
  29. data/ext/pg_connection.c +4252 -0
  30. data/ext/pg_copy_coder.c +596 -0
  31. data/ext/pg_errors.c +95 -0
  32. data/ext/pg_result.c +1501 -0
  33. data/ext/pg_text_decoder.c +981 -0
  34. data/ext/pg_text_encoder.c +682 -0
  35. data/ext/pg_tuple.c +541 -0
  36. data/ext/pg_type_map.c +166 -0
  37. data/ext/pg_type_map_all_strings.c +116 -0
  38. data/ext/pg_type_map_by_class.c +239 -0
  39. data/ext/pg_type_map_by_column.c +312 -0
  40. data/ext/pg_type_map_by_mri_type.c +284 -0
  41. data/ext/pg_type_map_by_oid.c +355 -0
  42. data/ext/pg_type_map_in_ruby.c +299 -0
  43. data/ext/util.c +149 -0
  44. data/ext/util.h +65 -0
  45. data/ext/vc/pg.sln +26 -0
  46. data/ext/vc/pg_18/pg.vcproj +216 -0
  47. data/ext/vc/pg_19/pg_19.vcproj +209 -0
  48. data/lib/pg.rb +74 -0
  49. data/lib/pg/basic_type_mapping.rb +459 -0
  50. data/lib/pg/binary_decoder.rb +22 -0
  51. data/lib/pg/coder.rb +83 -0
  52. data/lib/pg/connection.rb +291 -0
  53. data/lib/pg/constants.rb +11 -0
  54. data/lib/pg/exceptions.rb +11 -0
  55. data/lib/pg/result.rb +31 -0
  56. data/lib/pg/text_decoder.rb +47 -0
  57. data/lib/pg/text_encoder.rb +69 -0
  58. data/lib/pg/tuple.rb +30 -0
  59. data/lib/pg/type_map_by_column.rb +15 -0
  60. data/spec/data/expected_trace.out +26 -0
  61. data/spec/data/random_binary_data +0 -0
  62. data/spec/helpers.rb +380 -0
  63. data/spec/pg/basic_type_mapping_spec.rb +508 -0
  64. data/spec/pg/connection_spec.rb +1872 -0
  65. data/spec/pg/connection_sync_spec.rb +41 -0
  66. data/spec/pg/result_spec.rb +491 -0
  67. data/spec/pg/tuple_spec.rb +280 -0
  68. data/spec/pg/type_map_by_class_spec.rb +138 -0
  69. data/spec/pg/type_map_by_column_spec.rb +222 -0
  70. data/spec/pg/type_map_by_mri_type_spec.rb +136 -0
  71. data/spec/pg/type_map_by_oid_spec.rb +149 -0
  72. data/spec/pg/type_map_in_ruby_spec.rb +164 -0
  73. data/spec/pg/type_map_spec.rb +22 -0
  74. data/spec/pg/type_spec.rb +949 -0
  75. data/spec/pg_spec.rb +50 -0
  76. metadata +322 -0
  77. metadata.gz.sig +0 -0
@@ -0,0 +1,41 @@
1
+ # -*- rspec -*-
2
+ #encoding: utf-8
3
+
4
+ require_relative '../helpers'
5
+
6
+ context "running with sync_* methods" do
7
+ before :each do
8
+ PG::Connection.async_api = false
9
+ end
10
+
11
+ after :each do
12
+ PG::Connection.async_api = true
13
+ end
14
+
15
+ fname = File.expand_path("../connection_spec.rb", __FILE__)
16
+ eval File.read(fname, encoding: __ENCODING__), binding, fname
17
+
18
+
19
+ it "enables/disables async/sync methods by #async_api" do
20
+ [true, false].each do |async|
21
+ PG::Connection.async_api = async
22
+
23
+ start = Time.now
24
+ t = Thread.new do
25
+ @conn.exec( 'select pg_sleep(1)' )
26
+ end
27
+ sleep 0.1
28
+
29
+ t.kill
30
+ t.join
31
+ dt = Time.now - start
32
+
33
+ if async
34
+ expect( dt ).to be < 1.0
35
+ else
36
+ expect( dt ).to be >= 1.0
37
+ end
38
+ end
39
+ end
40
+
41
+ end
@@ -0,0 +1,491 @@
1
+ # -*- rspec -*-
2
+ # encoding: utf-8
3
+
4
+ require_relative '../helpers'
5
+
6
+ require 'pg'
7
+
8
+
9
+ describe PG::Result do
10
+
11
+ it "acts as an array of hashes" do
12
+ res = @conn.exec("SELECT 1 AS a, 2 AS b")
13
+ expect( res[0]['a'] ).to eq( '1' )
14
+ expect( res[0]['b'] ).to eq( '2' )
15
+ end
16
+
17
+ it "yields a row as an array" do
18
+ res = @conn.exec("SELECT 1 AS a, 2 AS b")
19
+ list = []
20
+ res.each_row { |r| list << r }
21
+ expect( list ).to eq [['1', '2']]
22
+ end
23
+
24
+ it "yields a row as an Enumerator" do
25
+ res = @conn.exec("SELECT 1 AS a, 2 AS b")
26
+ e = res.each_row
27
+ expect( e ).to be_a_kind_of(Enumerator)
28
+ pending "Rubinius doesn't define RETURN_SIZED_ENUMERATOR()" if RUBY_ENGINE=='rbx'
29
+ expect( e.size ).to eq( 1 )
30
+ expect( e.to_a ).to eq [['1', '2']]
31
+ end
32
+
33
+ it "yields a row as an Enumerator of hashs" do
34
+ res = @conn.exec("SELECT 1 AS a, 2 AS b")
35
+ e = res.each
36
+ expect( e ).to be_a_kind_of(Enumerator)
37
+ pending "Rubinius doesn't define RETURN_SIZED_ENUMERATOR()" if RUBY_ENGINE=='rbx'
38
+ expect( e.size ).to eq( 1 )
39
+ expect( e.to_a ).to eq [{'a'=>'1', 'b'=>'2'}]
40
+ end
41
+
42
+ context "result streaming in single row mode" do
43
+ it "can iterate over all rows as Hash" do
44
+ @conn.send_query( "SELECT generate_series(2,4) AS a; SELECT 1 AS b, generate_series(5,6) AS c" )
45
+ @conn.set_single_row_mode
46
+ expect(
47
+ @conn.get_result.stream_each.to_a
48
+ ).to eq(
49
+ [{'a'=>"2"}, {'a'=>"3"}, {'a'=>"4"}]
50
+ )
51
+ expect(
52
+ @conn.get_result.enum_for(:stream_each).to_a
53
+ ).to eq(
54
+ [{'b'=>"1", 'c'=>"5"}, {'b'=>"1", 'c'=>"6"}]
55
+ )
56
+ expect( @conn.get_result ).to be_nil
57
+ end
58
+
59
+ it "can iterate over all rows as Array" do
60
+ @conn.send_query( "SELECT generate_series(2,4) AS a; SELECT 1 AS b, generate_series(5,6) AS c" )
61
+ @conn.set_single_row_mode
62
+ expect(
63
+ @conn.get_result.enum_for(:stream_each_row).to_a
64
+ ).to eq(
65
+ [["2"], ["3"], ["4"]]
66
+ )
67
+ expect(
68
+ @conn.get_result.stream_each_row.to_a
69
+ ).to eq(
70
+ [["1", "5"], ["1", "6"]]
71
+ )
72
+ expect( @conn.get_result ).to be_nil
73
+ end
74
+
75
+ it "can iterate over all rows as PG::Tuple" do
76
+ @conn.send_query( "SELECT generate_series(2,4) AS a; SELECT 1 AS b, generate_series(5,6) AS c" )
77
+ @conn.set_single_row_mode
78
+ tuples = @conn.get_result.stream_each_tuple.to_a
79
+ expect( tuples[0][0] ).to eq( "2" )
80
+ expect( tuples[1]["a"] ).to eq( "3" )
81
+ expect( tuples.size ).to eq( 3 )
82
+
83
+ tuples = @conn.get_result.enum_for(:stream_each_tuple).to_a
84
+ expect( tuples[-1][-1] ).to eq( "6" )
85
+ expect( tuples[-2]["b"] ).to eq( "1" )
86
+ expect( tuples.size ).to eq( 2 )
87
+
88
+ expect( @conn.get_result ).to be_nil
89
+ end
90
+
91
+ it "complains when not in single row mode" do
92
+ @conn.send_query( "SELECT generate_series(2,4)" )
93
+ expect{
94
+ @conn.get_result.stream_each_row.to_a
95
+ }.to raise_error(PG::InvalidResultStatus, /not in single row mode/)
96
+ end
97
+
98
+ it "complains when intersected with get_result" do
99
+ @conn.send_query( "SELECT 1" )
100
+ @conn.set_single_row_mode
101
+ expect{
102
+ @conn.get_result.stream_each_row.each{ @conn.get_result }
103
+ }.to raise_error(PG::NoResultError, /no result received/)
104
+ end
105
+
106
+ it "raises server errors" do
107
+ @conn.send_query( "SELECT 0/0" )
108
+ expect{
109
+ @conn.get_result.stream_each_row.to_a
110
+ }.to raise_error(PG::DivisionByZero)
111
+ end
112
+ end
113
+
114
+ it "inserts nil AS NULL and return NULL as nil" do
115
+ res = @conn.exec_params("SELECT $1::int AS n", [nil])
116
+ expect( res[0]['n'] ).to be_nil()
117
+ end
118
+
119
+ it "encapsulates errors in a PG::Error object" do
120
+ exception = nil
121
+ begin
122
+ @conn.exec( "SELECT * FROM nonexistant_table" )
123
+ rescue PG::Error => err
124
+ exception = err
125
+ end
126
+
127
+ result = exception.result
128
+
129
+ expect( result ).to be_a( described_class() )
130
+ expect( result.error_field(PG::PG_DIAG_SEVERITY) ).to eq( 'ERROR' )
131
+ expect( result.error_field(PG::PG_DIAG_SQLSTATE) ).to eq( '42P01' )
132
+ expect(
133
+ result.error_field(PG::PG_DIAG_MESSAGE_PRIMARY)
134
+ ).to eq( 'relation "nonexistant_table" does not exist' )
135
+ expect( result.error_field(PG::PG_DIAG_MESSAGE_DETAIL) ).to be_nil()
136
+ expect( result.error_field(PG::PG_DIAG_MESSAGE_HINT) ).to be_nil()
137
+ expect( result.error_field(PG::PG_DIAG_STATEMENT_POSITION) ).to eq( '15' )
138
+ expect( result.error_field(PG::PG_DIAG_INTERNAL_POSITION) ).to be_nil()
139
+ expect( result.error_field(PG::PG_DIAG_INTERNAL_QUERY) ).to be_nil()
140
+ expect( result.error_field(PG::PG_DIAG_CONTEXT) ).to be_nil()
141
+ expect(
142
+ result.error_field(PG::PG_DIAG_SOURCE_FILE)
143
+ ).to match( /parse_relation\.c$|namespace\.c$/ )
144
+ expect( result.error_field(PG::PG_DIAG_SOURCE_LINE) ).to match( /^\d+$/ )
145
+ expect(
146
+ result.error_field(PG::PG_DIAG_SOURCE_FUNCTION)
147
+ ).to match( /^parserOpenTable$|^RangeVarGetRelid$/ )
148
+ end
149
+
150
+ it "encapsulates database object names for integrity constraint violations", :postgresql_93 do
151
+ @conn.exec( "CREATE TABLE integrity (id SERIAL PRIMARY KEY)" )
152
+ exception = nil
153
+ begin
154
+ @conn.exec( "INSERT INTO integrity VALUES (NULL)" )
155
+ rescue PG::Error => err
156
+ exception = err
157
+ end
158
+ result = exception.result
159
+
160
+ expect( result.error_field(PG::PG_DIAG_SCHEMA_NAME) ).to eq( 'public' )
161
+ expect( result.error_field(PG::PG_DIAG_TABLE_NAME) ).to eq( 'integrity' )
162
+ expect( result.error_field(PG::PG_DIAG_COLUMN_NAME) ).to eq( 'id' )
163
+ expect( result.error_field(PG::PG_DIAG_DATATYPE_NAME) ).to be_nil
164
+ expect( result.error_field(PG::PG_DIAG_CONSTRAINT_NAME) ).to be_nil
165
+ end
166
+
167
+ it "detects division by zero as SQLSTATE 22012" do
168
+ sqlstate = nil
169
+ begin
170
+ res = @conn.exec("SELECT 1/0")
171
+ rescue PG::Error => e
172
+ sqlstate = e.result.result_error_field( PG::PG_DIAG_SQLSTATE ).to_i
173
+ end
174
+ expect( sqlstate ).to eq( 22012 )
175
+ end
176
+
177
+ it "returns the same bytes in binary format that are sent in binary format" do
178
+ binary_file = File.join(Dir.pwd, 'spec/data', 'random_binary_data')
179
+ bytes = File.open(binary_file, 'rb').read
180
+ res = @conn.exec_params('VALUES ($1::bytea)',
181
+ [ { :value => bytes, :format => 1 } ], 1)
182
+ expect( res[0]['column1'] ).to eq( bytes )
183
+ expect( res.getvalue(0,0) ).to eq( bytes )
184
+ expect( res.values[0][0] ).to eq( bytes )
185
+ expect( res.column_values(0)[0] ).to eq( bytes )
186
+ end
187
+
188
+ it "returns the same bytes in binary format that are sent as inline text" do
189
+ binary_file = File.join(Dir.pwd, 'spec/data', 'random_binary_data')
190
+ bytes = File.open(binary_file, 'rb').read
191
+ @conn.exec("SET standard_conforming_strings=on")
192
+ res = @conn.exec_params("VALUES ('#{PG::Connection.escape_bytea(bytes)}'::bytea)", [], 1)
193
+ expect( res[0]['column1'] ).to eq( bytes )
194
+ expect( res.getvalue(0,0) ).to eq( bytes )
195
+ expect( res.values[0][0] ).to eq( bytes )
196
+ expect( res.column_values(0)[0] ).to eq( bytes )
197
+ end
198
+
199
+ it "returns the same bytes in text format that are sent in binary format" do
200
+ binary_file = File.join(Dir.pwd, 'spec/data', 'random_binary_data')
201
+ bytes = File.open(binary_file, 'rb').read
202
+ res = @conn.exec_params('VALUES ($1::bytea)',
203
+ [ { :value => bytes, :format => 1 } ])
204
+ expect( PG::Connection.unescape_bytea(res[0]['column1']) ).to eq( bytes )
205
+ end
206
+
207
+ it "returns the same bytes in text format that are sent as inline text" do
208
+ binary_file = File.join(Dir.pwd, 'spec/data', 'random_binary_data')
209
+ in_bytes = File.open(binary_file, 'rb').read
210
+
211
+ out_bytes = nil
212
+ @conn.exec("SET standard_conforming_strings=on")
213
+ res = @conn.exec_params("VALUES ('#{PG::Connection.escape_bytea(in_bytes)}'::bytea)", [], 0)
214
+ out_bytes = PG::Connection.unescape_bytea(res[0]['column1'])
215
+ expect( out_bytes ).to eq( in_bytes )
216
+ end
217
+
218
+ it "returns the parameter type of the specified prepared statement parameter" do
219
+ query = 'SELECT * FROM pg_stat_activity WHERE user = $1::name AND query = $2::text'
220
+ @conn.prepare( 'queryfinder', query )
221
+ res = @conn.describe_prepared( 'queryfinder' )
222
+
223
+ expect(
224
+ @conn.exec_params( 'SELECT format_type($1, -1)', [res.paramtype(0)] ).getvalue( 0, 0 )
225
+ ).to eq( 'name' )
226
+ expect(
227
+ @conn.exec_params( 'SELECT format_type($1, -1)', [res.paramtype(1)] ).getvalue( 0, 0 )
228
+ ).to eq( 'text' )
229
+ end
230
+
231
+ it "raises an exception when a negative index is given to #fformat" do
232
+ res = @conn.exec('SELECT * FROM pg_stat_activity')
233
+ expect {
234
+ res.fformat( -1 )
235
+ }.to raise_error( ArgumentError, /column number/i )
236
+ end
237
+
238
+ it "raises an exception when a negative index is given to #fmod" do
239
+ res = @conn.exec('SELECT * FROM pg_stat_activity')
240
+ expect {
241
+ res.fmod( -1 )
242
+ }.to raise_error( ArgumentError, /column number/i )
243
+ end
244
+
245
+ it "raises an exception when a negative index is given to #[]" do
246
+ res = @conn.exec('SELECT * FROM pg_stat_activity')
247
+ expect {
248
+ res[ -1 ]
249
+ }.to raise_error( IndexError, /-1 is out of range/i )
250
+ end
251
+
252
+ it "raises allow for conversion to an array of arrays" do
253
+ @conn.exec( 'CREATE TABLE valuestest ( foo varchar(33) )' )
254
+ @conn.exec( 'INSERT INTO valuestest ("foo") values (\'bar\')' )
255
+ @conn.exec( 'INSERT INTO valuestest ("foo") values (\'bar2\')' )
256
+
257
+ res = @conn.exec( 'SELECT * FROM valuestest' )
258
+ expect( res.values ).to eq( [ ["bar"], ["bar2"] ] )
259
+ end
260
+
261
+ # PQfmod
262
+ it "can return the type modifier for a result column" do
263
+ @conn.exec( 'CREATE TABLE fmodtest ( foo varchar(33) )' )
264
+ res = @conn.exec( 'SELECT * FROM fmodtest' )
265
+ expect( res.fmod(0) ).to eq( 33 + 4 ) # Column length + varlena size (4)
266
+ end
267
+
268
+ it "raises an exception when an invalid index is passed to PG::Result#fmod" do
269
+ @conn.exec( 'CREATE TABLE fmodtest ( foo varchar(33) )' )
270
+ res = @conn.exec( 'SELECT * FROM fmodtest' )
271
+ expect { res.fmod(1) }.to raise_error( ArgumentError )
272
+ end
273
+
274
+ it "raises an exception when an invalid (negative) index is passed to PG::Result#fmod" do
275
+ @conn.exec( 'CREATE TABLE fmodtest ( foo varchar(33) )' )
276
+ res = @conn.exec( 'SELECT * FROM fmodtest' )
277
+ expect { res.fmod(-11) }.to raise_error( ArgumentError )
278
+ end
279
+
280
+ it "doesn't raise an exception when a valid index is passed to PG::Result#fmod for a" +
281
+ " column with no typemod" do
282
+ @conn.exec( 'CREATE TABLE fmodtest ( foo text )' )
283
+ res = @conn.exec( 'SELECT * FROM fmodtest' )
284
+ expect( res.fmod(0) ).to eq( -1 )
285
+ end
286
+
287
+ # PQftable
288
+ it "can return the oid of the table from which a result column was fetched" do
289
+ @conn.exec( 'CREATE TABLE ftabletest ( foo text )' )
290
+ res = @conn.exec( 'SELECT * FROM ftabletest' )
291
+
292
+ expect( res.ftable(0) ).to be_nonzero()
293
+ end
294
+
295
+ it "raises an exception when an invalid index is passed to PG::Result#ftable" do
296
+ @conn.exec( 'CREATE TABLE ftabletest ( foo text )' )
297
+ res = @conn.exec( 'SELECT * FROM ftabletest' )
298
+
299
+ expect { res.ftable(18) }.to raise_error( ArgumentError )
300
+ end
301
+
302
+ it "raises an exception when an invalid (negative) index is passed to PG::Result#ftable" do
303
+ @conn.exec( 'CREATE TABLE ftabletest ( foo text )' )
304
+ res = @conn.exec( 'SELECT * FROM ftabletest' )
305
+
306
+ expect { res.ftable(-2) }.to raise_error( ArgumentError )
307
+ end
308
+
309
+ it "doesn't raise an exception when a valid index is passed to PG::Result#ftable for a " +
310
+ "column with no corresponding table" do
311
+ @conn.exec( 'CREATE TABLE ftabletest ( foo text )' )
312
+ res = @conn.exec( 'SELECT foo, LENGTH(foo) as length FROM ftabletest' )
313
+ expect( res.ftable(1) ).to eq( PG::INVALID_OID )
314
+ end
315
+
316
+ # PQftablecol
317
+ it "can return the column number (within its table) of a column in a result" do
318
+ @conn.exec( 'CREATE TABLE ftablecoltest ( foo text, bar numeric )' )
319
+ res = @conn.exec( 'SELECT * FROM ftablecoltest' )
320
+
321
+ expect( res.ftablecol(0) ).to eq( 1 )
322
+ expect( res.ftablecol(1) ).to eq( 2 )
323
+ end
324
+
325
+ it "raises an exception when an invalid index is passed to PG::Result#ftablecol" do
326
+ @conn.exec( 'CREATE TABLE ftablecoltest ( foo text, bar numeric )' )
327
+ res = @conn.exec( 'SELECT * FROM ftablecoltest' )
328
+
329
+ expect { res.ftablecol(32) }.to raise_error( ArgumentError )
330
+ end
331
+
332
+ it "raises an exception when an invalid (negative) index is passed to PG::Result#ftablecol" do
333
+ @conn.exec( 'CREATE TABLE ftablecoltest ( foo text, bar numeric )' )
334
+ res = @conn.exec( 'SELECT * FROM ftablecoltest' )
335
+
336
+ expect { res.ftablecol(-1) }.to raise_error( ArgumentError )
337
+ end
338
+
339
+ it "doesnn't raise an exception when a valid index is passed to PG::Result#ftablecol for a " +
340
+ "column with no corresponding table" do
341
+ @conn.exec( 'CREATE TABLE ftablecoltest ( foo text )' )
342
+ res = @conn.exec( 'SELECT foo, LENGTH(foo) as length FROM ftablecoltest' )
343
+ expect( res.ftablecol(1) ).to eq( 0 )
344
+ end
345
+
346
+ it "can be manually checked for failed result status (async API)" do
347
+ @conn.send_query( "SELECT * FROM nonexistant_table" )
348
+ res = @conn.get_result
349
+ expect {
350
+ res.check
351
+ }.to raise_error( PG::Error, /relation "nonexistant_table" does not exist/ )
352
+ end
353
+
354
+ it "can return the values of a single field" do
355
+ res = @conn.exec( "SELECT 1 AS x, 'a' AS y UNION ALL SELECT 2, 'b'" )
356
+ expect( res.field_values('x') ).to eq( ['1', '2'] )
357
+ expect( res.field_values('y') ).to eq( ['a', 'b'] )
358
+ expect{ res.field_values('') }.to raise_error(IndexError)
359
+ expect{ res.field_values(:x) }.to raise_error(TypeError)
360
+ end
361
+
362
+ it "can return the values of a single tuple" do
363
+ res = @conn.exec( "SELECT 1 AS x, 'a' AS y UNION ALL SELECT 2, 'b'" )
364
+ expect( res.tuple_values(0) ).to eq( ['1', 'a'] )
365
+ expect( res.tuple_values(1) ).to eq( ['2', 'b'] )
366
+ expect{ res.tuple_values(2) }.to raise_error(IndexError)
367
+ expect{ res.tuple_values(-1) }.to raise_error(IndexError)
368
+ expect{ res.tuple_values("x") }.to raise_error(TypeError)
369
+ end
370
+
371
+ it "can return the values of a single vary lazy tuple" do
372
+ res = @conn.exec( "VALUES(1),(2)" )
373
+ expect( res.tuple(0) ).to be_kind_of( PG::Tuple )
374
+ expect( res.tuple(1) ).to be_kind_of( PG::Tuple )
375
+ expect{ res.tuple(2) }.to raise_error(IndexError)
376
+ expect{ res.tuple(-1) }.to raise_error(IndexError)
377
+ expect{ res.tuple("x") }.to raise_error(TypeError)
378
+ end
379
+
380
+ it "raises a proper exception for a nonexistant table" do
381
+ expect {
382
+ @conn.exec( "SELECT * FROM nonexistant_table" )
383
+ }.to raise_error( PG::UndefinedTable, /relation "nonexistant_table" does not exist/ )
384
+ end
385
+
386
+ it "raises a more generic exception for an unknown SQLSTATE" do
387
+ old_error = PG::ERROR_CLASSES.delete('42P01')
388
+ begin
389
+ expect {
390
+ @conn.exec( "SELECT * FROM nonexistant_table" )
391
+ }.to raise_error{|error|
392
+ expect( error ).to be_an_instance_of(PG::SyntaxErrorOrAccessRuleViolation)
393
+ expect( error.to_s ).to match(/relation "nonexistant_table" does not exist/)
394
+ }
395
+ ensure
396
+ PG::ERROR_CLASSES['42P01'] = old_error
397
+ end
398
+ end
399
+
400
+ it "raises a ServerError for an unknown SQLSTATE class" do
401
+ old_error1 = PG::ERROR_CLASSES.delete('42P01')
402
+ old_error2 = PG::ERROR_CLASSES.delete('42')
403
+ begin
404
+ expect {
405
+ @conn.exec( "SELECT * FROM nonexistant_table" )
406
+ }.to raise_error{|error|
407
+ expect( error ).to be_an_instance_of(PG::ServerError)
408
+ expect( error.to_s ).to match(/relation "nonexistant_table" does not exist/)
409
+ }
410
+ ensure
411
+ PG::ERROR_CLASSES['42P01'] = old_error1
412
+ PG::ERROR_CLASSES['42'] = old_error2
413
+ end
414
+ end
415
+
416
+ it "raises a proper exception for a nonexistant schema" do
417
+ expect {
418
+ @conn.exec( "DROP SCHEMA nonexistant_schema" )
419
+ }.to raise_error( PG::InvalidSchemaName, /schema "nonexistant_schema" does not exist/ )
420
+ end
421
+
422
+ it "the raised result is nil in case of a connection error" do
423
+ c = PG::Connection.connect_start( '127.0.0.1', 54320, "", "", "me", "xxxx", "somedb" )
424
+ expect {
425
+ c.exec "select 1"
426
+ }.to raise_error {|error|
427
+ expect( error ).to be_an_instance_of(PG::UnableToSend)
428
+ expect( error.result ).to eq( nil )
429
+ }
430
+ end
431
+
432
+ it "does not clear the result itself" do
433
+ r = @conn.exec "select 1"
434
+ expect( r.autoclear? ).to eq(false)
435
+ expect( r.cleared? ).to eq(false)
436
+ r.clear
437
+ expect( r.cleared? ).to eq(true)
438
+ end
439
+
440
+ it "can be inspected before and after clear" do
441
+ r = @conn.exec "select 1"
442
+ expect( r.inspect ).to match(/status=PGRES_TUPLES_OK/)
443
+ r.clear
444
+ expect( r.inspect ).to match(/cleared/)
445
+ end
446
+
447
+ context 'result value conversions with TypeMapByColumn' do
448
+ let!(:textdec_int){ PG::TextDecoder::Integer.new name: 'INT4', oid: 23 }
449
+ let!(:textdec_float){ PG::TextDecoder::Float.new name: 'FLOAT4', oid: 700 }
450
+
451
+ it "should allow reading, assigning and diabling type conversions" do
452
+ res = @conn.exec( "SELECT 123" )
453
+ expect( res.type_map ).to be_kind_of(PG::TypeMapAllStrings)
454
+ res.type_map = PG::TypeMapByColumn.new [textdec_int]
455
+ expect( res.type_map ).to be_an_instance_of(PG::TypeMapByColumn)
456
+ expect( res.type_map.coders ).to eq( [textdec_int] )
457
+ res.type_map = PG::TypeMapByColumn.new [textdec_float]
458
+ expect( res.type_map.coders ).to eq( [textdec_float] )
459
+ res.type_map = PG::TypeMapAllStrings.new
460
+ expect( res.type_map ).to be_kind_of(PG::TypeMapAllStrings)
461
+ end
462
+
463
+ it "should be applied to all value retrieving methods" do
464
+ res = @conn.exec( "SELECT 123 as f" )
465
+ res.type_map = PG::TypeMapByColumn.new [textdec_int]
466
+ expect( res.values ).to eq( [[123]] )
467
+ expect( res.getvalue(0,0) ).to eq( 123 )
468
+ expect( res[0] ).to eq( {'f' => 123 } )
469
+ expect( res.enum_for(:each_row).to_a ).to eq( [[123]] )
470
+ expect( res.enum_for(:each).to_a ).to eq( [{'f' => 123}] )
471
+ expect( res.column_values(0) ).to eq( [123] )
472
+ expect( res.field_values('f') ).to eq( [123] )
473
+ expect( res.tuple_values(0) ).to eq( [123] )
474
+ end
475
+
476
+ it "should be usable for several querys" do
477
+ colmap = PG::TypeMapByColumn.new [textdec_int]
478
+ res = @conn.exec( "SELECT 123" )
479
+ res.type_map = colmap
480
+ expect( res.values ).to eq( [[123]] )
481
+ res = @conn.exec( "SELECT 456" )
482
+ res.type_map = colmap
483
+ expect( res.values ).to eq( [[456]] )
484
+ end
485
+
486
+ it "shouldn't allow invalid type maps" do
487
+ res = @conn.exec( "SELECT 1" )
488
+ expect{ res.type_map = 1 }.to raise_error(TypeError)
489
+ end
490
+ end
491
+ end