pg 1.0.0 → 1.5.9

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (126) hide show
  1. checksums.yaml +4 -4
  2. checksums.yaml.gz.sig +0 -0
  3. data/Gemfile +20 -0
  4. data/History.md +932 -0
  5. data/Manifest.txt +8 -3
  6. data/README-Windows.rdoc +4 -4
  7. data/README.ja.md +300 -0
  8. data/README.md +286 -0
  9. data/Rakefile +41 -138
  10. data/Rakefile.cross +71 -66
  11. data/certs/ged.pem +24 -0
  12. data/certs/kanis@comcard.de.pem +20 -0
  13. data/certs/larskanis-2022.pem +26 -0
  14. data/certs/larskanis-2023.pem +24 -0
  15. data/certs/larskanis-2024.pem +24 -0
  16. data/ext/errorcodes.def +84 -5
  17. data/ext/errorcodes.rb +1 -1
  18. data/ext/errorcodes.txt +23 -6
  19. data/ext/extconf.rb +109 -25
  20. data/ext/gvl_wrappers.c +4 -0
  21. data/ext/gvl_wrappers.h +23 -0
  22. data/ext/pg.c +213 -155
  23. data/ext/pg.h +89 -23
  24. data/ext/pg_binary_decoder.c +164 -16
  25. data/ext/pg_binary_encoder.c +238 -13
  26. data/ext/pg_coder.c +159 -35
  27. data/ext/pg_connection.c +1584 -967
  28. data/ext/pg_copy_coder.c +373 -43
  29. data/ext/pg_errors.c +1 -1
  30. data/ext/pg_record_coder.c +522 -0
  31. data/ext/pg_result.c +710 -217
  32. data/ext/pg_text_decoder.c +630 -43
  33. data/ext/pg_text_encoder.c +222 -72
  34. data/ext/pg_tuple.c +572 -0
  35. data/ext/pg_type_map.c +45 -11
  36. data/ext/pg_type_map_all_strings.c +21 -7
  37. data/ext/pg_type_map_by_class.c +59 -27
  38. data/ext/pg_type_map_by_column.c +80 -37
  39. data/ext/pg_type_map_by_mri_type.c +49 -20
  40. data/ext/pg_type_map_by_oid.c +62 -29
  41. data/ext/pg_type_map_in_ruby.c +56 -22
  42. data/ext/{util.c → pg_util.c} +12 -12
  43. data/ext/{util.h → pg_util.h} +2 -2
  44. data/lib/pg/basic_type_map_based_on_result.rb +67 -0
  45. data/lib/pg/basic_type_map_for_queries.rb +202 -0
  46. data/lib/pg/basic_type_map_for_results.rb +104 -0
  47. data/lib/pg/basic_type_registry.rb +311 -0
  48. data/lib/pg/binary_decoder/date.rb +9 -0
  49. data/lib/pg/binary_decoder/timestamp.rb +26 -0
  50. data/lib/pg/binary_encoder/timestamp.rb +20 -0
  51. data/lib/pg/coder.rb +36 -13
  52. data/lib/pg/connection.rb +769 -70
  53. data/lib/pg/exceptions.rb +22 -2
  54. data/lib/pg/result.rb +14 -2
  55. data/lib/pg/text_decoder/date.rb +21 -0
  56. data/lib/pg/text_decoder/inet.rb +9 -0
  57. data/lib/pg/text_decoder/json.rb +17 -0
  58. data/lib/pg/text_decoder/numeric.rb +9 -0
  59. data/lib/pg/text_decoder/timestamp.rb +30 -0
  60. data/lib/pg/text_encoder/date.rb +13 -0
  61. data/lib/pg/text_encoder/inet.rb +31 -0
  62. data/lib/pg/text_encoder/json.rb +17 -0
  63. data/lib/pg/text_encoder/numeric.rb +9 -0
  64. data/lib/pg/text_encoder/timestamp.rb +24 -0
  65. data/lib/pg/tuple.rb +30 -0
  66. data/lib/pg/type_map_by_column.rb +3 -2
  67. data/lib/pg/version.rb +4 -0
  68. data/lib/pg.rb +106 -39
  69. data/misc/openssl-pg-segfault.rb +31 -0
  70. data/misc/postgres/History.txt +9 -0
  71. data/misc/postgres/Manifest.txt +5 -0
  72. data/misc/postgres/README.txt +21 -0
  73. data/misc/postgres/Rakefile +21 -0
  74. data/misc/postgres/lib/postgres.rb +16 -0
  75. data/misc/ruby-pg/History.txt +9 -0
  76. data/misc/ruby-pg/Manifest.txt +5 -0
  77. data/misc/ruby-pg/README.txt +21 -0
  78. data/misc/ruby-pg/Rakefile +21 -0
  79. data/misc/ruby-pg/lib/ruby/pg.rb +16 -0
  80. data/pg.gemspec +36 -0
  81. data/rakelib/task_extension.rb +46 -0
  82. data/sample/array_insert.rb +20 -0
  83. data/sample/async_api.rb +102 -0
  84. data/sample/async_copyto.rb +39 -0
  85. data/sample/async_mixed.rb +56 -0
  86. data/sample/check_conn.rb +21 -0
  87. data/sample/copydata.rb +71 -0
  88. data/sample/copyfrom.rb +81 -0
  89. data/sample/copyto.rb +19 -0
  90. data/sample/cursor.rb +21 -0
  91. data/sample/disk_usage_report.rb +177 -0
  92. data/sample/issue-119.rb +94 -0
  93. data/sample/losample.rb +69 -0
  94. data/sample/minimal-testcase.rb +17 -0
  95. data/sample/notify_wait.rb +72 -0
  96. data/sample/pg_statistics.rb +285 -0
  97. data/sample/replication_monitor.rb +222 -0
  98. data/sample/test_binary_values.rb +33 -0
  99. data/sample/wal_shipper.rb +434 -0
  100. data/sample/warehouse_partitions.rb +311 -0
  101. data.tar.gz.sig +0 -0
  102. metadata +138 -223
  103. metadata.gz.sig +0 -0
  104. data/.gemtest +0 -0
  105. data/ChangeLog +0 -6595
  106. data/History.rdoc +0 -422
  107. data/README.ja.rdoc +0 -14
  108. data/README.rdoc +0 -167
  109. data/lib/pg/basic_type_mapping.rb +0 -426
  110. data/lib/pg/constants.rb +0 -11
  111. data/lib/pg/text_decoder.rb +0 -51
  112. data/lib/pg/text_encoder.rb +0 -35
  113. data/spec/data/expected_trace.out +0 -26
  114. data/spec/data/random_binary_data +0 -0
  115. data/spec/helpers.rb +0 -348
  116. data/spec/pg/basic_type_mapping_spec.rb +0 -305
  117. data/spec/pg/connection_spec.rb +0 -1719
  118. data/spec/pg/result_spec.rb +0 -456
  119. data/spec/pg/type_map_by_class_spec.rb +0 -138
  120. data/spec/pg/type_map_by_column_spec.rb +0 -222
  121. data/spec/pg/type_map_by_mri_type_spec.rb +0 -136
  122. data/spec/pg/type_map_by_oid_spec.rb +0 -149
  123. data/spec/pg/type_map_in_ruby_spec.rb +0 -164
  124. data/spec/pg/type_map_spec.rb +0 -22
  125. data/spec/pg/type_spec.rb +0 -777
  126. data/spec/pg_spec.rb +0 -50
@@ -1,456 +0,0 @@
1
- #!/usr/bin/env rspec
2
- # encoding: utf-8
3
-
4
- require_relative '../helpers'
5
-
6
- require 'pg'
7
-
8
-
9
- describe PG::Result do
10
-
11
- it "acts as an array of hashes" do
12
- res = @conn.exec("SELECT 1 AS a, 2 AS b")
13
- expect( res[0]['a'] ).to eq( '1' )
14
- expect( res[0]['b'] ).to eq( '2' )
15
- end
16
-
17
- it "yields a row as an array" do
18
- res = @conn.exec("SELECT 1 AS a, 2 AS b")
19
- list = []
20
- res.each_row { |r| list << r }
21
- expect( list ).to eq [['1', '2']]
22
- end
23
-
24
- it "yields a row as an Enumerator" do
25
- res = @conn.exec("SELECT 1 AS a, 2 AS b")
26
- e = res.each_row
27
- expect( e ).to be_a_kind_of(Enumerator)
28
- pending "Rubinius doesn't define RETURN_SIZED_ENUMERATOR()" if RUBY_ENGINE=='rbx'
29
- expect( e.size ).to eq( 1 )
30
- expect( e.to_a ).to eq [['1', '2']]
31
- end
32
-
33
- it "yields a row as an Enumerator of hashs" do
34
- res = @conn.exec("SELECT 1 AS a, 2 AS b")
35
- e = res.each
36
- expect( e ).to be_a_kind_of(Enumerator)
37
- pending "Rubinius doesn't define RETURN_SIZED_ENUMERATOR()" if RUBY_ENGINE=='rbx'
38
- expect( e.size ).to eq( 1 )
39
- expect( e.to_a ).to eq [{'a'=>'1', 'b'=>'2'}]
40
- end
41
-
42
- context "result streaming" do
43
- it "can iterate over all tuples in single row mode" do
44
- @conn.send_query( "SELECT generate_series(2,4) AS a; SELECT 1 AS b, generate_series(5,6) AS c" )
45
- @conn.set_single_row_mode
46
- expect(
47
- @conn.get_result.stream_each.to_a
48
- ).to eq(
49
- [{'a'=>"2"}, {'a'=>"3"}, {'a'=>"4"}]
50
- )
51
- expect(
52
- @conn.get_result.enum_for(:stream_each).to_a
53
- ).to eq(
54
- [{'b'=>"1", 'c'=>"5"}, {'b'=>"1", 'c'=>"6"}]
55
- )
56
- expect( @conn.get_result ).to be_nil
57
- end
58
-
59
- it "can iterate over all rows in single row mode" do
60
- @conn.send_query( "SELECT generate_series(2,4) AS a; SELECT 1 AS b, generate_series(5,6) AS c" )
61
- @conn.set_single_row_mode
62
- expect(
63
- @conn.get_result.enum_for(:stream_each_row).to_a
64
- ).to eq(
65
- [["2"], ["3"], ["4"]]
66
- )
67
- expect(
68
- @conn.get_result.stream_each_row.to_a
69
- ).to eq(
70
- [["1", "5"], ["1", "6"]]
71
- )
72
- expect( @conn.get_result ).to be_nil
73
- end
74
-
75
- it "complains when not in single row mode" do
76
- @conn.send_query( "SELECT generate_series(2,4)" )
77
- expect{
78
- @conn.get_result.stream_each_row.to_a
79
- }.to raise_error(PG::InvalidResultStatus, /not in single row mode/)
80
- end
81
-
82
- it "complains when intersected with get_result" do
83
- @conn.send_query( "SELECT 1" )
84
- @conn.set_single_row_mode
85
- expect{
86
- @conn.get_result.stream_each_row.each{ @conn.get_result }
87
- }.to raise_error(PG::NoResultError, /no result received/)
88
- end
89
-
90
- it "raises server errors" do
91
- @conn.send_query( "SELECT 0/0" )
92
- expect{
93
- @conn.get_result.stream_each_row.to_a
94
- }.to raise_error(PG::DivisionByZero)
95
- end
96
- end
97
-
98
- it "inserts nil AS NULL and return NULL as nil" do
99
- res = @conn.exec("SELECT $1::int AS n", [nil])
100
- expect( res[0]['n'] ).to be_nil()
101
- end
102
-
103
- it "encapsulates errors in a PG::Error object" do
104
- exception = nil
105
- begin
106
- @conn.exec( "SELECT * FROM nonexistant_table" )
107
- rescue PG::Error => err
108
- exception = err
109
- end
110
-
111
- result = exception.result
112
-
113
- expect( result ).to be_a( described_class() )
114
- expect( result.error_field(PG::PG_DIAG_SEVERITY) ).to eq( 'ERROR' )
115
- expect( result.error_field(PG::PG_DIAG_SQLSTATE) ).to eq( '42P01' )
116
- expect(
117
- result.error_field(PG::PG_DIAG_MESSAGE_PRIMARY)
118
- ).to eq( 'relation "nonexistant_table" does not exist' )
119
- expect( result.error_field(PG::PG_DIAG_MESSAGE_DETAIL) ).to be_nil()
120
- expect( result.error_field(PG::PG_DIAG_MESSAGE_HINT) ).to be_nil()
121
- expect( result.error_field(PG::PG_DIAG_STATEMENT_POSITION) ).to eq( '15' )
122
- expect( result.error_field(PG::PG_DIAG_INTERNAL_POSITION) ).to be_nil()
123
- expect( result.error_field(PG::PG_DIAG_INTERNAL_QUERY) ).to be_nil()
124
- expect( result.error_field(PG::PG_DIAG_CONTEXT) ).to be_nil()
125
- expect(
126
- result.error_field(PG::PG_DIAG_SOURCE_FILE)
127
- ).to match( /parse_relation\.c$|namespace\.c$/ )
128
- expect( result.error_field(PG::PG_DIAG_SOURCE_LINE) ).to match( /^\d+$/ )
129
- expect(
130
- result.error_field(PG::PG_DIAG_SOURCE_FUNCTION)
131
- ).to match( /^parserOpenTable$|^RangeVarGetRelid$/ )
132
- end
133
-
134
- it "encapsulates database object names for integrity constraint violations", :postgresql_93 do
135
- @conn.exec( "CREATE TABLE integrity (id SERIAL PRIMARY KEY)" )
136
- exception = nil
137
- begin
138
- @conn.exec( "INSERT INTO integrity VALUES (NULL)" )
139
- rescue PG::Error => err
140
- exception = err
141
- end
142
- result = exception.result
143
-
144
- expect( result.error_field(PG::PG_DIAG_SCHEMA_NAME) ).to eq( 'public' )
145
- expect( result.error_field(PG::PG_DIAG_TABLE_NAME) ).to eq( 'integrity' )
146
- expect( result.error_field(PG::PG_DIAG_COLUMN_NAME) ).to eq( 'id' )
147
- expect( result.error_field(PG::PG_DIAG_DATATYPE_NAME) ).to be_nil
148
- expect( result.error_field(PG::PG_DIAG_CONSTRAINT_NAME) ).to be_nil
149
- end
150
-
151
- it "detects division by zero as SQLSTATE 22012" do
152
- sqlstate = nil
153
- begin
154
- res = @conn.exec("SELECT 1/0")
155
- rescue PG::Error => e
156
- sqlstate = e.result.result_error_field( PG::PG_DIAG_SQLSTATE ).to_i
157
- end
158
- expect( sqlstate ).to eq( 22012 )
159
- end
160
-
161
- it "returns the same bytes in binary format that are sent in binary format" do
162
- binary_file = File.join(Dir.pwd, 'spec/data', 'random_binary_data')
163
- bytes = File.open(binary_file, 'rb').read
164
- res = @conn.exec('VALUES ($1::bytea)',
165
- [ { :value => bytes, :format => 1 } ], 1)
166
- expect( res[0]['column1'] ).to eq( bytes )
167
- expect( res.getvalue(0,0) ).to eq( bytes )
168
- expect( res.values[0][0] ).to eq( bytes )
169
- expect( res.column_values(0)[0] ).to eq( bytes )
170
- end
171
-
172
- it "returns the same bytes in binary format that are sent as inline text" do
173
- binary_file = File.join(Dir.pwd, 'spec/data', 'random_binary_data')
174
- bytes = File.open(binary_file, 'rb').read
175
- @conn.exec("SET standard_conforming_strings=on")
176
- res = @conn.exec("VALUES ('#{PG::Connection.escape_bytea(bytes)}'::bytea)", [], 1)
177
- expect( res[0]['column1'] ).to eq( bytes )
178
- expect( res.getvalue(0,0) ).to eq( bytes )
179
- expect( res.values[0][0] ).to eq( bytes )
180
- expect( res.column_values(0)[0] ).to eq( bytes )
181
- end
182
-
183
- it "returns the same bytes in text format that are sent in binary format" do
184
- binary_file = File.join(Dir.pwd, 'spec/data', 'random_binary_data')
185
- bytes = File.open(binary_file, 'rb').read
186
- res = @conn.exec('VALUES ($1::bytea)',
187
- [ { :value => bytes, :format => 1 } ])
188
- expect( PG::Connection.unescape_bytea(res[0]['column1']) ).to eq( bytes )
189
- end
190
-
191
- it "returns the same bytes in text format that are sent as inline text" do
192
- binary_file = File.join(Dir.pwd, 'spec/data', 'random_binary_data')
193
- in_bytes = File.open(binary_file, 'rb').read
194
-
195
- out_bytes = nil
196
- @conn.exec("SET standard_conforming_strings=on")
197
- res = @conn.exec("VALUES ('#{PG::Connection.escape_bytea(in_bytes)}'::bytea)", [], 0)
198
- out_bytes = PG::Connection.unescape_bytea(res[0]['column1'])
199
- expect( out_bytes ).to eq( in_bytes )
200
- end
201
-
202
- it "returns the parameter type of the specified prepared statement parameter" do
203
- query = 'SELECT * FROM pg_stat_activity WHERE user = $1::name AND query = $2::text'
204
- @conn.prepare( 'queryfinder', query )
205
- res = @conn.describe_prepared( 'queryfinder' )
206
-
207
- expect(
208
- @conn.exec( 'SELECT format_type($1, -1)', [res.paramtype(0)] ).getvalue( 0, 0 )
209
- ).to eq( 'name' )
210
- expect(
211
- @conn.exec( 'SELECT format_type($1, -1)', [res.paramtype(1)] ).getvalue( 0, 0 )
212
- ).to eq( 'text' )
213
- end
214
-
215
- it "raises an exception when a negative index is given to #fformat" do
216
- res = @conn.exec('SELECT * FROM pg_stat_activity')
217
- expect {
218
- res.fformat( -1 )
219
- }.to raise_error( ArgumentError, /column number/i )
220
- end
221
-
222
- it "raises an exception when a negative index is given to #fmod" do
223
- res = @conn.exec('SELECT * FROM pg_stat_activity')
224
- expect {
225
- res.fmod( -1 )
226
- }.to raise_error( ArgumentError, /column number/i )
227
- end
228
-
229
- it "raises an exception when a negative index is given to #[]" do
230
- res = @conn.exec('SELECT * FROM pg_stat_activity')
231
- expect {
232
- res[ -1 ]
233
- }.to raise_error( IndexError, /-1 is out of range/i )
234
- end
235
-
236
- it "raises allow for conversion to an array of arrays" do
237
- @conn.exec( 'CREATE TABLE valuestest ( foo varchar(33) )' )
238
- @conn.exec( 'INSERT INTO valuestest ("foo") values (\'bar\')' )
239
- @conn.exec( 'INSERT INTO valuestest ("foo") values (\'bar2\')' )
240
-
241
- res = @conn.exec( 'SELECT * FROM valuestest' )
242
- expect( res.values ).to eq( [ ["bar"], ["bar2"] ] )
243
- end
244
-
245
- # PQfmod
246
- it "can return the type modifier for a result column" do
247
- @conn.exec( 'CREATE TABLE fmodtest ( foo varchar(33) )' )
248
- res = @conn.exec( 'SELECT * FROM fmodtest' )
249
- expect( res.fmod(0) ).to eq( 33 + 4 ) # Column length + varlena size (4)
250
- end
251
-
252
- it "raises an exception when an invalid index is passed to PG::Result#fmod" do
253
- @conn.exec( 'CREATE TABLE fmodtest ( foo varchar(33) )' )
254
- res = @conn.exec( 'SELECT * FROM fmodtest' )
255
- expect { res.fmod(1) }.to raise_error( ArgumentError )
256
- end
257
-
258
- it "raises an exception when an invalid (negative) index is passed to PG::Result#fmod" do
259
- @conn.exec( 'CREATE TABLE fmodtest ( foo varchar(33) )' )
260
- res = @conn.exec( 'SELECT * FROM fmodtest' )
261
- expect { res.fmod(-11) }.to raise_error( ArgumentError )
262
- end
263
-
264
- it "doesn't raise an exception when a valid index is passed to PG::Result#fmod for a" +
265
- " column with no typemod" do
266
- @conn.exec( 'CREATE TABLE fmodtest ( foo text )' )
267
- res = @conn.exec( 'SELECT * FROM fmodtest' )
268
- expect( res.fmod(0) ).to eq( -1 )
269
- end
270
-
271
- # PQftable
272
- it "can return the oid of the table from which a result column was fetched" do
273
- @conn.exec( 'CREATE TABLE ftabletest ( foo text )' )
274
- res = @conn.exec( 'SELECT * FROM ftabletest' )
275
-
276
- expect( res.ftable(0) ).to be_nonzero()
277
- end
278
-
279
- it "raises an exception when an invalid index is passed to PG::Result#ftable" do
280
- @conn.exec( 'CREATE TABLE ftabletest ( foo text )' )
281
- res = @conn.exec( 'SELECT * FROM ftabletest' )
282
-
283
- expect { res.ftable(18) }.to raise_error( ArgumentError )
284
- end
285
-
286
- it "raises an exception when an invalid (negative) index is passed to PG::Result#ftable" do
287
- @conn.exec( 'CREATE TABLE ftabletest ( foo text )' )
288
- res = @conn.exec( 'SELECT * FROM ftabletest' )
289
-
290
- expect { res.ftable(-2) }.to raise_error( ArgumentError )
291
- end
292
-
293
- it "doesn't raise an exception when a valid index is passed to PG::Result#ftable for a " +
294
- "column with no corresponding table" do
295
- @conn.exec( 'CREATE TABLE ftabletest ( foo text )' )
296
- res = @conn.exec( 'SELECT foo, LENGTH(foo) as length FROM ftabletest' )
297
- expect( res.ftable(1) ).to eq( PG::INVALID_OID )
298
- end
299
-
300
- # PQftablecol
301
- it "can return the column number (within its table) of a column in a result" do
302
- @conn.exec( 'CREATE TABLE ftablecoltest ( foo text, bar numeric )' )
303
- res = @conn.exec( 'SELECT * FROM ftablecoltest' )
304
-
305
- expect( res.ftablecol(0) ).to eq( 1 )
306
- expect( res.ftablecol(1) ).to eq( 2 )
307
- end
308
-
309
- it "raises an exception when an invalid index is passed to PG::Result#ftablecol" do
310
- @conn.exec( 'CREATE TABLE ftablecoltest ( foo text, bar numeric )' )
311
- res = @conn.exec( 'SELECT * FROM ftablecoltest' )
312
-
313
- expect { res.ftablecol(32) }.to raise_error( ArgumentError )
314
- end
315
-
316
- it "raises an exception when an invalid (negative) index is passed to PG::Result#ftablecol" do
317
- @conn.exec( 'CREATE TABLE ftablecoltest ( foo text, bar numeric )' )
318
- res = @conn.exec( 'SELECT * FROM ftablecoltest' )
319
-
320
- expect { res.ftablecol(-1) }.to raise_error( ArgumentError )
321
- end
322
-
323
- it "doesnn't raise an exception when a valid index is passed to PG::Result#ftablecol for a " +
324
- "column with no corresponding table" do
325
- @conn.exec( 'CREATE TABLE ftablecoltest ( foo text )' )
326
- res = @conn.exec( 'SELECT foo, LENGTH(foo) as length FROM ftablecoltest' )
327
- expect( res.ftablecol(1) ).to eq( 0 )
328
- end
329
-
330
- it "can be manually checked for failed result status (async API)" do
331
- @conn.send_query( "SELECT * FROM nonexistant_table" )
332
- res = @conn.get_result
333
- expect {
334
- res.check
335
- }.to raise_error( PG::Error, /relation "nonexistant_table" does not exist/ )
336
- end
337
-
338
- it "can return the values of a single field" do
339
- res = @conn.exec( "SELECT 1 AS x, 'a' AS y UNION ALL SELECT 2, 'b'" )
340
- expect( res.field_values('x') ).to eq( ['1', '2'] )
341
- expect( res.field_values('y') ).to eq( ['a', 'b'] )
342
- expect{ res.field_values('') }.to raise_error(IndexError)
343
- expect{ res.field_values(:x) }.to raise_error(TypeError)
344
- end
345
-
346
- it "raises a proper exception for a nonexistant table" do
347
- expect {
348
- @conn.exec( "SELECT * FROM nonexistant_table" )
349
- }.to raise_error( PG::UndefinedTable, /relation "nonexistant_table" does not exist/ )
350
- end
351
-
352
- it "raises a more generic exception for an unknown SQLSTATE" do
353
- old_error = PG::ERROR_CLASSES.delete('42P01')
354
- begin
355
- expect {
356
- @conn.exec( "SELECT * FROM nonexistant_table" )
357
- }.to raise_error{|error|
358
- expect( error ).to be_an_instance_of(PG::SyntaxErrorOrAccessRuleViolation)
359
- expect( error.to_s ).to match(/relation "nonexistant_table" does not exist/)
360
- }
361
- ensure
362
- PG::ERROR_CLASSES['42P01'] = old_error
363
- end
364
- end
365
-
366
- it "raises a ServerError for an unknown SQLSTATE class" do
367
- old_error1 = PG::ERROR_CLASSES.delete('42P01')
368
- old_error2 = PG::ERROR_CLASSES.delete('42')
369
- begin
370
- expect {
371
- @conn.exec( "SELECT * FROM nonexistant_table" )
372
- }.to raise_error{|error|
373
- expect( error ).to be_an_instance_of(PG::ServerError)
374
- expect( error.to_s ).to match(/relation "nonexistant_table" does not exist/)
375
- }
376
- ensure
377
- PG::ERROR_CLASSES['42P01'] = old_error1
378
- PG::ERROR_CLASSES['42'] = old_error2
379
- end
380
- end
381
-
382
- it "raises a proper exception for a nonexistant schema" do
383
- expect {
384
- @conn.exec( "DROP SCHEMA nonexistant_schema" )
385
- }.to raise_error( PG::InvalidSchemaName, /schema "nonexistant_schema" does not exist/ )
386
- end
387
-
388
- it "the raised result is nil in case of a connection error" do
389
- c = PG::Connection.connect_start( '127.0.0.1', 54320, "", "", "me", "xxxx", "somedb" )
390
- expect {
391
- c.exec "select 1"
392
- }.to raise_error {|error|
393
- expect( error ).to be_an_instance_of(PG::UnableToSend)
394
- expect( error.result ).to eq( nil )
395
- }
396
- end
397
-
398
- it "does not clear the result itself" do
399
- r = @conn.exec "select 1"
400
- expect( r.autoclear? ).to eq(false)
401
- expect( r.cleared? ).to eq(false)
402
- r.clear
403
- expect( r.cleared? ).to eq(true)
404
- end
405
-
406
- it "can be inspected before and after clear" do
407
- r = @conn.exec "select 1"
408
- expect( r.inspect ).to match(/status=PGRES_TUPLES_OK/)
409
- r.clear
410
- expect( r.inspect ).to match(/cleared/)
411
- end
412
-
413
- context 'result value conversions with TypeMapByColumn' do
414
- let!(:textdec_int){ PG::TextDecoder::Integer.new name: 'INT4', oid: 23 }
415
- let!(:textdec_float){ PG::TextDecoder::Float.new name: 'FLOAT4', oid: 700 }
416
-
417
- it "should allow reading, assigning and diabling type conversions" do
418
- res = @conn.exec( "SELECT 123" )
419
- expect( res.type_map ).to be_kind_of(PG::TypeMapAllStrings)
420
- res.type_map = PG::TypeMapByColumn.new [textdec_int]
421
- expect( res.type_map ).to be_an_instance_of(PG::TypeMapByColumn)
422
- expect( res.type_map.coders ).to eq( [textdec_int] )
423
- res.type_map = PG::TypeMapByColumn.new [textdec_float]
424
- expect( res.type_map.coders ).to eq( [textdec_float] )
425
- res.type_map = PG::TypeMapAllStrings.new
426
- expect( res.type_map ).to be_kind_of(PG::TypeMapAllStrings)
427
- end
428
-
429
- it "should be applied to all value retrieving methods" do
430
- res = @conn.exec( "SELECT 123 as f" )
431
- res.type_map = PG::TypeMapByColumn.new [textdec_int]
432
- expect( res.values ).to eq( [[123]] )
433
- expect( res.getvalue(0,0) ).to eq( 123 )
434
- expect( res[0] ).to eq( {'f' => 123 } )
435
- expect( res.enum_for(:each_row).to_a ).to eq( [[123]] )
436
- expect( res.enum_for(:each).to_a ).to eq( [{'f' => 123}] )
437
- expect( res.column_values(0) ).to eq( [123] )
438
- expect( res.field_values('f') ).to eq( [123] )
439
- end
440
-
441
- it "should be usable for several querys" do
442
- colmap = PG::TypeMapByColumn.new [textdec_int]
443
- res = @conn.exec( "SELECT 123" )
444
- res.type_map = colmap
445
- expect( res.values ).to eq( [[123]] )
446
- res = @conn.exec( "SELECT 456" )
447
- res.type_map = colmap
448
- expect( res.values ).to eq( [[456]] )
449
- end
450
-
451
- it "shouldn't allow invalid type maps" do
452
- res = @conn.exec( "SELECT 1" )
453
- expect{ res.type_map = 1 }.to raise_error(TypeError)
454
- end
455
- end
456
- end
@@ -1,138 +0,0 @@
1
- #!/usr/bin/env rspec
2
- # encoding: utf-8
3
-
4
- require_relative '../helpers'
5
-
6
- require 'pg'
7
-
8
-
9
- describe PG::TypeMapByClass do
10
-
11
- let!(:textenc_int){ PG::TextEncoder::Integer.new name: 'INT4', oid: 23 }
12
- let!(:textenc_float){ PG::TextEncoder::Float.new name: 'FLOAT8', oid: 701 }
13
- let!(:textenc_string){ PG::TextEncoder::String.new name: 'TEXT', oid: 25 }
14
- let!(:binaryenc_int){ PG::BinaryEncoder::Int8.new name: 'INT8', oid: 20, format: 1 }
15
- let!(:pass_through_type) do
16
- type = Class.new(PG::SimpleEncoder) do
17
- def encode(*v)
18
- v.inspect
19
- end
20
- end.new
21
- type.oid = 25
22
- type.format = 0
23
- type.name = 'pass_through'
24
- type
25
- end
26
-
27
- let!(:tm) do
28
- tm = PG::TypeMapByClass.new
29
- tm[Integer] = binaryenc_int
30
- tm[Float] = textenc_float
31
- tm[Symbol] = pass_through_type
32
- tm
33
- end
34
-
35
- let!(:raise_class) do
36
- Class.new
37
- end
38
-
39
- let!(:derived_tm) do
40
- tm = Class.new(PG::TypeMapByClass) do
41
- def array_type_map_for(value)
42
- PG::TextEncoder::Array.new name: '_INT4', oid: 1007, elements_type: PG::TextEncoder::Integer.new
43
- end
44
- end.new
45
- tm[Integer] = proc{|value| textenc_int }
46
- tm[raise_class] = proc{|value| /invalid/ }
47
- tm[Array] = :array_type_map_for
48
- tm
49
- end
50
-
51
- it "should retrieve all conversions" do
52
- expect( tm.coders ).to eq( {
53
- Integer => binaryenc_int,
54
- Float => textenc_float,
55
- Symbol => pass_through_type,
56
- } )
57
- end
58
-
59
- it "should retrieve particular conversions" do
60
- expect( tm[Integer] ).to eq(binaryenc_int)
61
- expect( tm[Float] ).to eq(textenc_float)
62
- expect( tm[Range] ).to be_nil
63
- expect( derived_tm[raise_class] ).to be_kind_of(Proc)
64
- expect( derived_tm[Array] ).to eq(:array_type_map_for)
65
- end
66
-
67
- it "should allow deletion of coders" do
68
- tm[Integer] = nil
69
- expect( tm[Integer] ).to be_nil
70
- expect( tm.coders ).to eq( {
71
- Float => textenc_float,
72
- Symbol => pass_through_type,
73
- } )
74
- end
75
-
76
- it "forwards query param conversions to the #default_type_map" do
77
- tm1 = PG::TypeMapByColumn.new( [textenc_int, nil, nil] )
78
-
79
- tm2 = PG::TypeMapByClass.new
80
- tm2[Integer] = PG::TextEncoder::Integer.new name: 'INT2', oid: 21
81
- tm2.default_type_map = tm1
82
-
83
- res = @conn.exec_params( "SELECT $1, $2, $3::TEXT", ['1', 2, 3], 0, tm2 )
84
-
85
- expect( res.ftype(0) ).to eq( 23 ) # tm1
86
- expect( res.ftype(1) ).to eq( 21 ) # tm2
87
- expect( res.getvalue(0,2) ).to eq( "3" ) # TypeMapAllStrings
88
- end
89
-
90
- #
91
- # Decoding Examples
92
- #
93
-
94
- it "should raise an error when used for results" do
95
- res = @conn.exec_params( "SELECT 1", [], 1 )
96
- expect{ res.type_map = tm }.to raise_error(NotImplementedError, /not suitable to map result values/)
97
- end
98
-
99
- #
100
- # Encoding Examples
101
- #
102
-
103
- it "should allow mixed type conversions" do
104
- res = @conn.exec_params( "SELECT $1, $2, $3", [5, 1.23, :TestSymbol], 0, tm )
105
- expect( res.values ).to eq([['5', '1.23', "[:TestSymbol, #{@conn.internal_encoding.inspect}]"]])
106
- expect( res.ftype(0) ).to eq(20)
107
- end
108
-
109
- it "should expire the cache after changes to the coders" do
110
- res = @conn.exec_params( "SELECT $1", [5], 0, tm )
111
- expect( res.ftype(0) ).to eq(20)
112
-
113
- tm[Integer] = textenc_int
114
-
115
- res = @conn.exec_params( "SELECT $1", [5], 0, tm )
116
- expect( res.ftype(0) ).to eq(23)
117
- end
118
-
119
- it "should allow mixed type conversions with derived type map" do
120
- res = @conn.exec_params( "SELECT $1, $2", [6, [7]], 0, derived_tm )
121
- expect( res.values ).to eq([['6', '{7}']])
122
- expect( res.ftype(0) ).to eq(23)
123
- expect( res.ftype(1) ).to eq(1007)
124
- end
125
-
126
- it "should raise TypeError with derived type map" do
127
- expect{
128
- @conn.exec_params( "SELECT $1", [raise_class.new], 0, derived_tm )
129
- }.to raise_error(TypeError, /invalid type Regexp/)
130
- end
131
-
132
- it "should raise error on invalid coder object" do
133
- tm[TrueClass] = "dummy"
134
- expect{
135
- res = @conn.exec_params( "SELECT $1", [true], 0, tm )
136
- }.to raise_error(NoMethodError, /undefined method.*call/)
137
- end
138
- end