pg 1.2.3 → 1.5.3
Sign up to get free protection for your applications and to get access to all the features.
- checksums.yaml +4 -4
- checksums.yaml.gz.sig +0 -0
- data/.appveyor.yml +42 -0
- data/.gems +6 -0
- data/.github/workflows/binary-gems.yml +117 -0
- data/.github/workflows/source-gem.yml +137 -0
- data/.gitignore +22 -0
- data/.hgsigs +34 -0
- data/.hgtags +41 -0
- data/.irbrc +23 -0
- data/.pryrc +23 -0
- data/.tm_properties +21 -0
- data/.travis.yml +49 -0
- data/Gemfile +14 -0
- data/History.md +876 -0
- data/Manifest.txt +0 -1
- data/README.ja.md +276 -0
- data/README.md +286 -0
- data/Rakefile +33 -135
- data/Rakefile.cross +12 -13
- data/certs/ged.pem +24 -0
- data/certs/larskanis-2022.pem +26 -0
- data/certs/larskanis-2023.pem +24 -0
- data/ext/errorcodes.def +12 -0
- data/ext/errorcodes.rb +0 -0
- data/ext/errorcodes.txt +4 -1
- data/ext/extconf.rb +100 -25
- data/ext/gvl_wrappers.c +4 -0
- data/ext/gvl_wrappers.h +23 -0
- data/ext/pg.c +72 -57
- data/ext/pg.h +28 -4
- data/ext/pg_binary_decoder.c +80 -1
- data/ext/pg_binary_encoder.c +225 -1
- data/ext/pg_coder.c +96 -33
- data/ext/pg_connection.c +996 -697
- data/ext/pg_copy_coder.c +351 -33
- data/ext/pg_errors.c +1 -1
- data/ext/pg_record_coder.c +50 -19
- data/ext/pg_result.c +177 -64
- data/ext/pg_text_decoder.c +29 -11
- data/ext/pg_text_encoder.c +29 -16
- data/ext/pg_tuple.c +83 -60
- data/ext/pg_type_map.c +44 -10
- data/ext/pg_type_map_all_strings.c +17 -3
- data/ext/pg_type_map_by_class.c +54 -27
- data/ext/pg_type_map_by_column.c +73 -31
- data/ext/pg_type_map_by_mri_type.c +48 -19
- data/ext/pg_type_map_by_oid.c +59 -27
- data/ext/pg_type_map_in_ruby.c +55 -21
- data/ext/pg_util.c +2 -2
- data/lib/pg/basic_type_map_based_on_result.rb +67 -0
- data/lib/pg/basic_type_map_for_queries.rb +198 -0
- data/lib/pg/basic_type_map_for_results.rb +104 -0
- data/lib/pg/basic_type_registry.rb +299 -0
- data/lib/pg/binary_decoder/date.rb +9 -0
- data/lib/pg/binary_decoder/timestamp.rb +26 -0
- data/lib/pg/binary_encoder/timestamp.rb +20 -0
- data/lib/pg/coder.rb +15 -13
- data/lib/pg/connection.rb +743 -83
- data/lib/pg/exceptions.rb +14 -1
- data/lib/pg/text_decoder/date.rb +18 -0
- data/lib/pg/text_decoder/inet.rb +9 -0
- data/lib/pg/text_decoder/json.rb +14 -0
- data/lib/pg/text_decoder/numeric.rb +9 -0
- data/lib/pg/text_decoder/timestamp.rb +30 -0
- data/lib/pg/text_encoder/date.rb +12 -0
- data/lib/pg/text_encoder/inet.rb +28 -0
- data/lib/pg/text_encoder/json.rb +14 -0
- data/lib/pg/text_encoder/numeric.rb +9 -0
- data/lib/pg/text_encoder/timestamp.rb +24 -0
- data/lib/pg/version.rb +4 -0
- data/lib/pg.rb +94 -39
- data/misc/openssl-pg-segfault.rb +31 -0
- data/misc/postgres/History.txt +9 -0
- data/misc/postgres/Manifest.txt +5 -0
- data/misc/postgres/README.txt +21 -0
- data/misc/postgres/Rakefile +21 -0
- data/misc/postgres/lib/postgres.rb +16 -0
- data/misc/ruby-pg/History.txt +9 -0
- data/misc/ruby-pg/Manifest.txt +5 -0
- data/misc/ruby-pg/README.txt +21 -0
- data/misc/ruby-pg/Rakefile +21 -0
- data/misc/ruby-pg/lib/ruby/pg.rb +16 -0
- data/pg.gemspec +34 -0
- data/rakelib/task_extension.rb +46 -0
- data/sample/array_insert.rb +20 -0
- data/sample/async_api.rb +102 -0
- data/sample/async_copyto.rb +39 -0
- data/sample/async_mixed.rb +56 -0
- data/sample/check_conn.rb +21 -0
- data/sample/copydata.rb +71 -0
- data/sample/copyfrom.rb +81 -0
- data/sample/copyto.rb +19 -0
- data/sample/cursor.rb +21 -0
- data/sample/disk_usage_report.rb +177 -0
- data/sample/issue-119.rb +94 -0
- data/sample/losample.rb +69 -0
- data/sample/minimal-testcase.rb +17 -0
- data/sample/notify_wait.rb +72 -0
- data/sample/pg_statistics.rb +285 -0
- data/sample/replication_monitor.rb +222 -0
- data/sample/test_binary_values.rb +33 -0
- data/sample/wal_shipper.rb +434 -0
- data/sample/warehouse_partitions.rb +311 -0
- data/translation/.po4a-version +7 -0
- data/translation/po/all.pot +910 -0
- data/translation/po/ja.po +1047 -0
- data/translation/po4a.cfg +12 -0
- data.tar.gz.sig +0 -0
- metadata +142 -210
- metadata.gz.sig +0 -0
- data/ChangeLog +0 -0
- data/History.rdoc +0 -578
- data/README.ja.rdoc +0 -13
- data/README.rdoc +0 -213
- data/lib/pg/basic_type_mapping.rb +0 -522
- data/lib/pg/binary_decoder.rb +0 -23
- data/lib/pg/constants.rb +0 -12
- data/lib/pg/text_decoder.rb +0 -46
- data/lib/pg/text_encoder.rb +0 -59
- data/spec/data/expected_trace.out +0 -26
- data/spec/data/random_binary_data +0 -0
- data/spec/helpers.rb +0 -380
- data/spec/pg/basic_type_mapping_spec.rb +0 -630
- data/spec/pg/connection_spec.rb +0 -1949
- data/spec/pg/connection_sync_spec.rb +0 -41
- data/spec/pg/result_spec.rb +0 -681
- data/spec/pg/tuple_spec.rb +0 -333
- data/spec/pg/type_map_by_class_spec.rb +0 -138
- data/spec/pg/type_map_by_column_spec.rb +0 -226
- data/spec/pg/type_map_by_mri_type_spec.rb +0 -136
- data/spec/pg/type_map_by_oid_spec.rb +0 -149
- data/spec/pg/type_map_in_ruby_spec.rb +0 -164
- data/spec/pg/type_map_spec.rb +0 -22
- data/spec/pg/type_spec.rb +0 -1123
- data/spec/pg_spec.rb +0 -50
data/spec/pg/result_spec.rb
DELETED
@@ -1,681 +0,0 @@
|
|
1
|
-
# -*- rspec -*-
|
2
|
-
# encoding: utf-8
|
3
|
-
|
4
|
-
require_relative '../helpers'
|
5
|
-
|
6
|
-
require 'pg'
|
7
|
-
require 'objspace'
|
8
|
-
|
9
|
-
|
10
|
-
describe PG::Result do
|
11
|
-
|
12
|
-
describe :field_name_type do
|
13
|
-
let!(:res) { @conn.exec('SELECT 1 AS a, 2 AS "B"') }
|
14
|
-
|
15
|
-
it "uses string field names per default" do
|
16
|
-
expect(res.field_name_type).to eq(:string)
|
17
|
-
end
|
18
|
-
|
19
|
-
it "can set string field names" do
|
20
|
-
res.field_name_type = :string
|
21
|
-
expect(res.field_name_type).to eq(:string)
|
22
|
-
end
|
23
|
-
|
24
|
-
it "can set symbol field names" do
|
25
|
-
res.field_name_type = :symbol
|
26
|
-
expect(res.field_name_type).to eq(:symbol)
|
27
|
-
end
|
28
|
-
|
29
|
-
it "can set static_symbol field names" do
|
30
|
-
res.field_name_type = :static_symbol
|
31
|
-
expect(res.field_name_type).to eq(:static_symbol)
|
32
|
-
end
|
33
|
-
|
34
|
-
it "can't set symbol field names after #fields" do
|
35
|
-
res.fields
|
36
|
-
expect{ res.field_name_type = :symbol }.to raise_error(ArgumentError, /already materialized/)
|
37
|
-
expect(res.field_name_type).to eq(:string)
|
38
|
-
end
|
39
|
-
|
40
|
-
it "can't set invalid values" do
|
41
|
-
expect{ res.field_name_type = :sym }.to raise_error(ArgumentError, /invalid argument :sym/)
|
42
|
-
expect{ res.field_name_type = "symbol" }.to raise_error(ArgumentError, /invalid argument "symbol"/)
|
43
|
-
end
|
44
|
-
end
|
45
|
-
|
46
|
-
it "acts as an array of hashes" do
|
47
|
-
res = @conn.exec("SELECT 1 AS a, 2 AS b")
|
48
|
-
expect( res[0]['a'] ).to eq( '1' )
|
49
|
-
expect( res[0]['b'] ).to eq( '2' )
|
50
|
-
end
|
51
|
-
|
52
|
-
it "acts as an array of hashes with symbols" do
|
53
|
-
res = @conn.exec("SELECT 1 AS a, 2 AS b")
|
54
|
-
res.field_name_type = :symbol
|
55
|
-
expect( res[0][:a] ).to eq( '1' )
|
56
|
-
expect( res[0][:b] ).to eq( '2' )
|
57
|
-
end
|
58
|
-
|
59
|
-
it "acts as an array of hashes with static_symbols" do
|
60
|
-
res = @conn.exec("SELECT 1 AS a, 2 AS b")
|
61
|
-
res.field_name_type = :static_symbol
|
62
|
-
expect( res[0][:a] ).to eq( '1' )
|
63
|
-
expect( res[0][:b] ).to eq( '2' )
|
64
|
-
end
|
65
|
-
|
66
|
-
it "yields a row as an array" do
|
67
|
-
res = @conn.exec("SELECT 1 AS a, 2 AS b")
|
68
|
-
list = []
|
69
|
-
res.each_row { |r| list << r }
|
70
|
-
expect( list ).to eq [['1', '2']]
|
71
|
-
end
|
72
|
-
|
73
|
-
it "yields a row as an Enumerator" do
|
74
|
-
res = @conn.exec("SELECT 1 AS a, 2 AS b")
|
75
|
-
e = res.each_row
|
76
|
-
expect( e ).to be_a_kind_of(Enumerator)
|
77
|
-
expect( e.size ).to eq( 1 )
|
78
|
-
expect( e.to_a ).to eq [['1', '2']]
|
79
|
-
end
|
80
|
-
|
81
|
-
it "yields a row as an Enumerator of hashs" do
|
82
|
-
res = @conn.exec("SELECT 1 AS a, 2 AS b")
|
83
|
-
e = res.each
|
84
|
-
expect( e ).to be_a_kind_of(Enumerator)
|
85
|
-
expect( e.size ).to eq( 1 )
|
86
|
-
expect( e.to_a ).to eq [{'a'=>'1', 'b'=>'2'}]
|
87
|
-
end
|
88
|
-
|
89
|
-
it "yields a row as an Enumerator of hashs with symbols" do
|
90
|
-
res = @conn.exec("SELECT 1 AS a, 2 AS b")
|
91
|
-
res.field_name_type = :symbol
|
92
|
-
expect( res.each.to_a ).to eq [{:a=>'1', :b=>'2'}]
|
93
|
-
end
|
94
|
-
|
95
|
-
context "result streaming in single row mode" do
|
96
|
-
let!(:textdec_int){ PG::TextDecoder::Integer.new name: 'INT4', oid: 23 }
|
97
|
-
|
98
|
-
it "can iterate over all rows as Hash" do
|
99
|
-
@conn.send_query( "SELECT generate_series(2,4) AS a; SELECT 1 AS b, generate_series(5,6) AS c" )
|
100
|
-
@conn.set_single_row_mode
|
101
|
-
expect(
|
102
|
-
@conn.get_result.stream_each.to_a
|
103
|
-
).to eq(
|
104
|
-
[{'a'=>"2"}, {'a'=>"3"}, {'a'=>"4"}]
|
105
|
-
)
|
106
|
-
expect(
|
107
|
-
@conn.get_result.enum_for(:stream_each).to_a
|
108
|
-
).to eq(
|
109
|
-
[{'b'=>"1", 'c'=>"5"}, {'b'=>"1", 'c'=>"6"}]
|
110
|
-
)
|
111
|
-
expect( @conn.get_result ).to be_nil
|
112
|
-
end
|
113
|
-
|
114
|
-
it "can iterate over all rows as Hash with symbols and typemap" do
|
115
|
-
@conn.send_query( "SELECT generate_series(2,4) AS a" )
|
116
|
-
@conn.set_single_row_mode
|
117
|
-
res = @conn.get_result.field_names_as(:symbol)
|
118
|
-
res.type_map = PG::TypeMapByColumn.new [textdec_int]
|
119
|
-
expect(
|
120
|
-
res.stream_each.to_a
|
121
|
-
).to eq(
|
122
|
-
[{:a=>2}, {:a=>3}, {:a=>4}]
|
123
|
-
)
|
124
|
-
expect( @conn.get_result ).to be_nil
|
125
|
-
end
|
126
|
-
|
127
|
-
it "keeps last result on error while iterating stream_each" do
|
128
|
-
@conn.send_query( "SELECT generate_series(2,4) AS a" )
|
129
|
-
@conn.set_single_row_mode
|
130
|
-
res = @conn.get_result
|
131
|
-
expect do
|
132
|
-
res.stream_each_row do
|
133
|
-
raise ZeroDivisionError
|
134
|
-
end
|
135
|
-
end.to raise_error(ZeroDivisionError)
|
136
|
-
expect( res.values ).to eq([["2"]])
|
137
|
-
end
|
138
|
-
|
139
|
-
it "can iterate over all rows as Array" do
|
140
|
-
@conn.send_query( "SELECT generate_series(2,4) AS a; SELECT 1 AS b, generate_series(5,6) AS c" )
|
141
|
-
@conn.set_single_row_mode
|
142
|
-
expect(
|
143
|
-
@conn.get_result.enum_for(:stream_each_row).to_a
|
144
|
-
).to eq(
|
145
|
-
[["2"], ["3"], ["4"]]
|
146
|
-
)
|
147
|
-
expect(
|
148
|
-
@conn.get_result.stream_each_row.to_a
|
149
|
-
).to eq(
|
150
|
-
[["1", "5"], ["1", "6"]]
|
151
|
-
)
|
152
|
-
expect( @conn.get_result ).to be_nil
|
153
|
-
end
|
154
|
-
|
155
|
-
it "keeps last result on error while iterating stream_each_row" do
|
156
|
-
@conn.send_query( "SELECT generate_series(2,4) AS a" )
|
157
|
-
@conn.set_single_row_mode
|
158
|
-
res = @conn.get_result
|
159
|
-
expect do
|
160
|
-
res.stream_each_row do
|
161
|
-
raise ZeroDivisionError
|
162
|
-
end
|
163
|
-
end.to raise_error(ZeroDivisionError)
|
164
|
-
expect( res.values ).to eq([["2"]])
|
165
|
-
end
|
166
|
-
|
167
|
-
it "can iterate over all rows as PG::Tuple" do
|
168
|
-
@conn.send_query( "SELECT generate_series(2,4) AS a; SELECT 1 AS b, generate_series(5,6) AS c" )
|
169
|
-
@conn.set_single_row_mode
|
170
|
-
tuples = @conn.get_result.stream_each_tuple.to_a
|
171
|
-
expect( tuples[0][0] ).to eq( "2" )
|
172
|
-
expect( tuples[1]["a"] ).to eq( "3" )
|
173
|
-
expect( tuples.size ).to eq( 3 )
|
174
|
-
|
175
|
-
tuples = @conn.get_result.enum_for(:stream_each_tuple).to_a
|
176
|
-
expect( tuples[-1][-1] ).to eq( "6" )
|
177
|
-
expect( tuples[-2]["b"] ).to eq( "1" )
|
178
|
-
expect( tuples.size ).to eq( 2 )
|
179
|
-
|
180
|
-
expect( @conn.get_result ).to be_nil
|
181
|
-
end
|
182
|
-
|
183
|
-
it "clears result on error while iterating stream_each_tuple" do
|
184
|
-
@conn.send_query( "SELECT generate_series(2,4) AS a" )
|
185
|
-
@conn.set_single_row_mode
|
186
|
-
res = @conn.get_result
|
187
|
-
expect do
|
188
|
-
res.stream_each_tuple do
|
189
|
-
raise ZeroDivisionError
|
190
|
-
end
|
191
|
-
end.to raise_error(ZeroDivisionError)
|
192
|
-
expect( res.cleared? ).to eq(true)
|
193
|
-
end
|
194
|
-
|
195
|
-
it "should reuse field names in stream_each_tuple" do
|
196
|
-
@conn.send_query( "SELECT generate_series(2,3) AS a" )
|
197
|
-
@conn.set_single_row_mode
|
198
|
-
tuple1, tuple2 = *@conn.get_result.stream_each_tuple.to_a
|
199
|
-
expect( tuple1.keys[0].object_id ).to eq(tuple2.keys[0].object_id)
|
200
|
-
end
|
201
|
-
|
202
|
-
it "can iterate over all rows as PG::Tuple with symbols and typemap" do
|
203
|
-
@conn.send_query( "SELECT generate_series(2,4) AS a" )
|
204
|
-
@conn.set_single_row_mode
|
205
|
-
res = @conn.get_result.field_names_as(:symbol)
|
206
|
-
res.type_map = PG::TypeMapByColumn.new [textdec_int]
|
207
|
-
tuples = res.stream_each_tuple.to_a
|
208
|
-
expect( tuples[0][0] ).to eq( 2 )
|
209
|
-
expect( tuples[1][:a] ).to eq( 3 )
|
210
|
-
expect( @conn.get_result ).to be_nil
|
211
|
-
end
|
212
|
-
|
213
|
-
it "complains when not in single row mode" do
|
214
|
-
@conn.send_query( "SELECT generate_series(2,4)" )
|
215
|
-
expect{
|
216
|
-
@conn.get_result.stream_each_row.to_a
|
217
|
-
}.to raise_error(PG::InvalidResultStatus, /not in single row mode/)
|
218
|
-
end
|
219
|
-
|
220
|
-
it "complains when intersected with get_result" do
|
221
|
-
@conn.send_query( "SELECT 1" )
|
222
|
-
@conn.set_single_row_mode
|
223
|
-
expect{
|
224
|
-
@conn.get_result.stream_each_row.each{ @conn.get_result }
|
225
|
-
}.to raise_error(PG::NoResultError, /no result received/)
|
226
|
-
end
|
227
|
-
|
228
|
-
it "raises server errors" do
|
229
|
-
@conn.send_query( "SELECT 0/0" )
|
230
|
-
expect{
|
231
|
-
@conn.get_result.stream_each_row.to_a
|
232
|
-
}.to raise_error(PG::DivisionByZero)
|
233
|
-
end
|
234
|
-
end
|
235
|
-
|
236
|
-
it "inserts nil AS NULL and return NULL as nil" do
|
237
|
-
res = @conn.exec_params("SELECT $1::int AS n", [nil])
|
238
|
-
expect( res[0]['n'] ).to be_nil()
|
239
|
-
end
|
240
|
-
|
241
|
-
it "encapsulates errors in a PG::Error object" do
|
242
|
-
exception = nil
|
243
|
-
begin
|
244
|
-
@conn.exec( "SELECT * FROM nonexistant_table" )
|
245
|
-
rescue PG::Error => err
|
246
|
-
exception = err
|
247
|
-
end
|
248
|
-
|
249
|
-
result = exception.result
|
250
|
-
|
251
|
-
expect( result ).to be_a( described_class() )
|
252
|
-
expect( result.error_field(PG::PG_DIAG_SEVERITY) ).to eq( 'ERROR' )
|
253
|
-
expect( result.error_field(PG::PG_DIAG_SQLSTATE) ).to eq( '42P01' )
|
254
|
-
expect(
|
255
|
-
result.error_field(PG::PG_DIAG_MESSAGE_PRIMARY)
|
256
|
-
).to eq( 'relation "nonexistant_table" does not exist' )
|
257
|
-
expect( result.error_field(PG::PG_DIAG_MESSAGE_DETAIL) ).to be_nil()
|
258
|
-
expect( result.error_field(PG::PG_DIAG_MESSAGE_HINT) ).to be_nil()
|
259
|
-
expect( result.error_field(PG::PG_DIAG_STATEMENT_POSITION) ).to eq( '15' )
|
260
|
-
expect( result.error_field(PG::PG_DIAG_INTERNAL_POSITION) ).to be_nil()
|
261
|
-
expect( result.error_field(PG::PG_DIAG_INTERNAL_QUERY) ).to be_nil()
|
262
|
-
expect( result.error_field(PG::PG_DIAG_CONTEXT) ).to be_nil()
|
263
|
-
expect(
|
264
|
-
result.error_field(PG::PG_DIAG_SOURCE_FILE)
|
265
|
-
).to match( /parse_relation\.c$|namespace\.c$/ )
|
266
|
-
expect( result.error_field(PG::PG_DIAG_SOURCE_LINE) ).to match( /^\d+$/ )
|
267
|
-
expect(
|
268
|
-
result.error_field(PG::PG_DIAG_SOURCE_FUNCTION)
|
269
|
-
).to match( /^parserOpenTable$|^RangeVarGetRelid$/ )
|
270
|
-
end
|
271
|
-
|
272
|
-
it "encapsulates PG_DIAG_SEVERITY_NONLOCALIZED error in a PG::Error object", :postgresql_96 do
|
273
|
-
result = nil
|
274
|
-
begin
|
275
|
-
@conn.exec( "SELECT * FROM nonexistant_table" )
|
276
|
-
rescue PG::Error => err
|
277
|
-
result = err.result
|
278
|
-
end
|
279
|
-
|
280
|
-
expect( result.error_field(PG::PG_DIAG_SEVERITY_NONLOCALIZED) ).to eq( 'ERROR' )
|
281
|
-
end
|
282
|
-
|
283
|
-
it "encapsulates database object names for integrity constraint violations", :postgresql_93 do
|
284
|
-
@conn.exec( "CREATE TABLE integrity (id SERIAL PRIMARY KEY)" )
|
285
|
-
exception = nil
|
286
|
-
begin
|
287
|
-
@conn.exec( "INSERT INTO integrity VALUES (NULL)" )
|
288
|
-
rescue PG::Error => err
|
289
|
-
exception = err
|
290
|
-
end
|
291
|
-
result = exception.result
|
292
|
-
|
293
|
-
expect( result.error_field(PG::PG_DIAG_SCHEMA_NAME) ).to eq( 'public' )
|
294
|
-
expect( result.error_field(PG::PG_DIAG_TABLE_NAME) ).to eq( 'integrity' )
|
295
|
-
expect( result.error_field(PG::PG_DIAG_COLUMN_NAME) ).to eq( 'id' )
|
296
|
-
expect( result.error_field(PG::PG_DIAG_DATATYPE_NAME) ).to be_nil
|
297
|
-
expect( result.error_field(PG::PG_DIAG_CONSTRAINT_NAME) ).to be_nil
|
298
|
-
end
|
299
|
-
|
300
|
-
it "detects division by zero as SQLSTATE 22012" do
|
301
|
-
sqlstate = nil
|
302
|
-
begin
|
303
|
-
@conn.exec("SELECT 1/0")
|
304
|
-
rescue PG::Error => e
|
305
|
-
sqlstate = e.result.result_error_field( PG::PG_DIAG_SQLSTATE ).to_i
|
306
|
-
end
|
307
|
-
expect( sqlstate ).to eq( 22012 )
|
308
|
-
end
|
309
|
-
|
310
|
-
it "provides the error message" do
|
311
|
-
@conn.send_query("SELECT xyz")
|
312
|
-
res = @conn.get_result; @conn.get_result
|
313
|
-
expect( res.error_message ).to match(/"xyz"/)
|
314
|
-
expect( res.result_error_message ).to match(/"xyz"/)
|
315
|
-
end
|
316
|
-
|
317
|
-
it "provides a verbose error message", :postgresql_96 do
|
318
|
-
@conn.send_query("SELECT xyz")
|
319
|
-
res = @conn.get_result; @conn.get_result
|
320
|
-
# PQERRORS_TERSE should give a single line result
|
321
|
-
expect( res.verbose_error_message(PG::PQERRORS_TERSE, PG::PQSHOW_CONTEXT_ALWAYS) ).to match(/\A.*\n\z/)
|
322
|
-
# PQERRORS_VERBOSE should give a multi line result
|
323
|
-
expect( res.result_verbose_error_message(PG::PQERRORS_VERBOSE, PG::PQSHOW_CONTEXT_NEVER) ).to match(/\n.*\n/)
|
324
|
-
end
|
325
|
-
|
326
|
-
it "provides a verbose error message with SQLSTATE", :postgresql_12 do
|
327
|
-
@conn.send_query("SELECT xyz")
|
328
|
-
res = @conn.get_result; @conn.get_result
|
329
|
-
expect( res.verbose_error_message(PG::PQERRORS_SQLSTATE, PG::PQSHOW_CONTEXT_NEVER) ).to match(/42703/)
|
330
|
-
end
|
331
|
-
|
332
|
-
it "returns the same bytes in binary format that are sent in binary format" do
|
333
|
-
binary_file = File.join(Dir.pwd, 'spec/data', 'random_binary_data')
|
334
|
-
bytes = File.open(binary_file, 'rb').read
|
335
|
-
res = @conn.exec_params('VALUES ($1::bytea)',
|
336
|
-
[ { :value => bytes, :format => 1 } ], 1)
|
337
|
-
expect( res[0]['column1'] ).to eq( bytes )
|
338
|
-
expect( res.getvalue(0,0) ).to eq( bytes )
|
339
|
-
expect( res.values[0][0] ).to eq( bytes )
|
340
|
-
expect( res.column_values(0)[0] ).to eq( bytes )
|
341
|
-
end
|
342
|
-
|
343
|
-
it "returns the same bytes in binary format that are sent as inline text" do
|
344
|
-
binary_file = File.join(Dir.pwd, 'spec/data', 'random_binary_data')
|
345
|
-
bytes = File.open(binary_file, 'rb').read
|
346
|
-
@conn.exec("SET standard_conforming_strings=on")
|
347
|
-
res = @conn.exec_params("VALUES ('#{PG::Connection.escape_bytea(bytes)}'::bytea)", [], 1)
|
348
|
-
expect( res[0]['column1'] ).to eq( bytes )
|
349
|
-
expect( res.getvalue(0,0) ).to eq( bytes )
|
350
|
-
expect( res.values[0][0] ).to eq( bytes )
|
351
|
-
expect( res.column_values(0)[0] ).to eq( bytes )
|
352
|
-
end
|
353
|
-
|
354
|
-
it "returns the same bytes in text format that are sent in binary format" do
|
355
|
-
binary_file = File.join(Dir.pwd, 'spec/data', 'random_binary_data')
|
356
|
-
bytes = File.open(binary_file, 'rb').read
|
357
|
-
res = @conn.exec_params('VALUES ($1::bytea)',
|
358
|
-
[ { :value => bytes, :format => 1 } ])
|
359
|
-
expect( PG::Connection.unescape_bytea(res[0]['column1']) ).to eq( bytes )
|
360
|
-
end
|
361
|
-
|
362
|
-
it "returns the same bytes in text format that are sent as inline text" do
|
363
|
-
binary_file = File.join(Dir.pwd, 'spec/data', 'random_binary_data')
|
364
|
-
in_bytes = File.open(binary_file, 'rb').read
|
365
|
-
|
366
|
-
out_bytes = nil
|
367
|
-
@conn.exec("SET standard_conforming_strings=on")
|
368
|
-
res = @conn.exec_params("VALUES ('#{PG::Connection.escape_bytea(in_bytes)}'::bytea)", [], 0)
|
369
|
-
out_bytes = PG::Connection.unescape_bytea(res[0]['column1'])
|
370
|
-
expect( out_bytes ).to eq( in_bytes )
|
371
|
-
end
|
372
|
-
|
373
|
-
it "returns the parameter type of the specified prepared statement parameter" do
|
374
|
-
query = 'SELECT * FROM pg_stat_activity WHERE user = $1::name AND query = $2::text'
|
375
|
-
@conn.prepare( 'queryfinder', query )
|
376
|
-
res = @conn.describe_prepared( 'queryfinder' )
|
377
|
-
|
378
|
-
expect(
|
379
|
-
@conn.exec_params( 'SELECT format_type($1, -1)', [res.paramtype(0)] ).getvalue( 0, 0 )
|
380
|
-
).to eq( 'name' )
|
381
|
-
expect(
|
382
|
-
@conn.exec_params( 'SELECT format_type($1, -1)', [res.paramtype(1)] ).getvalue( 0, 0 )
|
383
|
-
).to eq( 'text' )
|
384
|
-
end
|
385
|
-
|
386
|
-
it "raises an exception when a negative index is given to #fformat" do
|
387
|
-
res = @conn.exec('SELECT * FROM pg_stat_activity')
|
388
|
-
expect {
|
389
|
-
res.fformat( -1 )
|
390
|
-
}.to raise_error( ArgumentError, /column number/i )
|
391
|
-
end
|
392
|
-
|
393
|
-
it "raises an exception when a negative index is given to #fmod" do
|
394
|
-
res = @conn.exec('SELECT * FROM pg_stat_activity')
|
395
|
-
expect {
|
396
|
-
res.fmod( -1 )
|
397
|
-
}.to raise_error( ArgumentError, /column number/i )
|
398
|
-
end
|
399
|
-
|
400
|
-
it "raises an exception when a negative index is given to #[]" do
|
401
|
-
res = @conn.exec('SELECT * FROM pg_stat_activity')
|
402
|
-
expect {
|
403
|
-
res[ -1 ]
|
404
|
-
}.to raise_error( IndexError, /-1 is out of range/i )
|
405
|
-
end
|
406
|
-
|
407
|
-
it "raises allow for conversion to an array of arrays" do
|
408
|
-
@conn.exec( 'CREATE TABLE valuestest ( foo varchar(33) )' )
|
409
|
-
@conn.exec( 'INSERT INTO valuestest ("foo") values (\'bar\')' )
|
410
|
-
@conn.exec( 'INSERT INTO valuestest ("foo") values (\'bar2\')' )
|
411
|
-
|
412
|
-
res = @conn.exec( 'SELECT * FROM valuestest' )
|
413
|
-
expect( res.values ).to eq( [ ["bar"], ["bar2"] ] )
|
414
|
-
end
|
415
|
-
|
416
|
-
it "can retrieve field names" do
|
417
|
-
res = @conn.exec('SELECT 1 AS a, 2 AS "B"')
|
418
|
-
expect(res.fields).to eq(["a", "B"])
|
419
|
-
end
|
420
|
-
|
421
|
-
it "can retrieve field names as symbols" do
|
422
|
-
res = @conn.exec('SELECT 1 AS a, 2 AS "B"')
|
423
|
-
res.field_name_type = :symbol
|
424
|
-
expect(res.fields).to eq([:a, :B])
|
425
|
-
end
|
426
|
-
|
427
|
-
it "can retrieve single field names" do
|
428
|
-
res = @conn.exec('SELECT 1 AS a, 2 AS "B"')
|
429
|
-
expect(res.fname(0)).to eq("a")
|
430
|
-
expect(res.fname(1)).to eq("B")
|
431
|
-
expect{res.fname(2)}.to raise_error(ArgumentError)
|
432
|
-
end
|
433
|
-
|
434
|
-
it "can retrieve single field names as symbol" do
|
435
|
-
res = @conn.exec('SELECT 1 AS a, 2 AS "B"')
|
436
|
-
res.field_name_type = :symbol
|
437
|
-
expect(res.fname(0)).to eq(:a)
|
438
|
-
expect(res.fname(1)).to eq(:B)
|
439
|
-
expect{res.fname(2)}.to raise_error(ArgumentError)
|
440
|
-
end
|
441
|
-
|
442
|
-
# PQfmod
|
443
|
-
it "can return the type modifier for a result column" do
|
444
|
-
@conn.exec( 'CREATE TABLE fmodtest ( foo varchar(33) )' )
|
445
|
-
res = @conn.exec( 'SELECT * FROM fmodtest' )
|
446
|
-
expect( res.fmod(0) ).to eq( 33 + 4 ) # Column length + varlena size (4)
|
447
|
-
end
|
448
|
-
|
449
|
-
it "raises an exception when an invalid index is passed to PG::Result#fmod" do
|
450
|
-
@conn.exec( 'CREATE TABLE fmodtest ( foo varchar(33) )' )
|
451
|
-
res = @conn.exec( 'SELECT * FROM fmodtest' )
|
452
|
-
expect { res.fmod(1) }.to raise_error( ArgumentError )
|
453
|
-
end
|
454
|
-
|
455
|
-
it "raises an exception when an invalid (negative) index is passed to PG::Result#fmod" do
|
456
|
-
@conn.exec( 'CREATE TABLE fmodtest ( foo varchar(33) )' )
|
457
|
-
res = @conn.exec( 'SELECT * FROM fmodtest' )
|
458
|
-
expect { res.fmod(-11) }.to raise_error( ArgumentError )
|
459
|
-
end
|
460
|
-
|
461
|
-
it "doesn't raise an exception when a valid index is passed to PG::Result#fmod for a" +
|
462
|
-
" column with no typemod" do
|
463
|
-
@conn.exec( 'CREATE TABLE fmodtest ( foo text )' )
|
464
|
-
res = @conn.exec( 'SELECT * FROM fmodtest' )
|
465
|
-
expect( res.fmod(0) ).to eq( -1 )
|
466
|
-
end
|
467
|
-
|
468
|
-
# PQftable
|
469
|
-
it "can return the oid of the table from which a result column was fetched" do
|
470
|
-
@conn.exec( 'CREATE TABLE ftabletest ( foo text )' )
|
471
|
-
res = @conn.exec( 'SELECT * FROM ftabletest' )
|
472
|
-
|
473
|
-
expect( res.ftable(0) ).to be_nonzero()
|
474
|
-
end
|
475
|
-
|
476
|
-
it "raises an exception when an invalid index is passed to PG::Result#ftable" do
|
477
|
-
@conn.exec( 'CREATE TABLE ftabletest ( foo text )' )
|
478
|
-
res = @conn.exec( 'SELECT * FROM ftabletest' )
|
479
|
-
|
480
|
-
expect { res.ftable(18) }.to raise_error( ArgumentError )
|
481
|
-
end
|
482
|
-
|
483
|
-
it "raises an exception when an invalid (negative) index is passed to PG::Result#ftable" do
|
484
|
-
@conn.exec( 'CREATE TABLE ftabletest ( foo text )' )
|
485
|
-
res = @conn.exec( 'SELECT * FROM ftabletest' )
|
486
|
-
|
487
|
-
expect { res.ftable(-2) }.to raise_error( ArgumentError )
|
488
|
-
end
|
489
|
-
|
490
|
-
it "doesn't raise an exception when a valid index is passed to PG::Result#ftable for a " +
|
491
|
-
"column with no corresponding table" do
|
492
|
-
@conn.exec( 'CREATE TABLE ftabletest ( foo text )' )
|
493
|
-
res = @conn.exec( 'SELECT foo, LENGTH(foo) as length FROM ftabletest' )
|
494
|
-
expect( res.ftable(1) ).to eq( PG::INVALID_OID )
|
495
|
-
end
|
496
|
-
|
497
|
-
# PQftablecol
|
498
|
-
it "can return the column number (within its table) of a column in a result" do
|
499
|
-
@conn.exec( 'CREATE TABLE ftablecoltest ( foo text, bar numeric )' )
|
500
|
-
res = @conn.exec( 'SELECT * FROM ftablecoltest' )
|
501
|
-
|
502
|
-
expect( res.ftablecol(0) ).to eq( 1 )
|
503
|
-
expect( res.ftablecol(1) ).to eq( 2 )
|
504
|
-
end
|
505
|
-
|
506
|
-
it "raises an exception when an invalid index is passed to PG::Result#ftablecol" do
|
507
|
-
@conn.exec( 'CREATE TABLE ftablecoltest ( foo text, bar numeric )' )
|
508
|
-
res = @conn.exec( 'SELECT * FROM ftablecoltest' )
|
509
|
-
|
510
|
-
expect { res.ftablecol(32) }.to raise_error( ArgumentError )
|
511
|
-
end
|
512
|
-
|
513
|
-
it "raises an exception when an invalid (negative) index is passed to PG::Result#ftablecol" do
|
514
|
-
@conn.exec( 'CREATE TABLE ftablecoltest ( foo text, bar numeric )' )
|
515
|
-
res = @conn.exec( 'SELECT * FROM ftablecoltest' )
|
516
|
-
|
517
|
-
expect { res.ftablecol(-1) }.to raise_error( ArgumentError )
|
518
|
-
end
|
519
|
-
|
520
|
-
it "doesnn't raise an exception when a valid index is passed to PG::Result#ftablecol for a " +
|
521
|
-
"column with no corresponding table" do
|
522
|
-
@conn.exec( 'CREATE TABLE ftablecoltest ( foo text )' )
|
523
|
-
res = @conn.exec( 'SELECT foo, LENGTH(foo) as length FROM ftablecoltest' )
|
524
|
-
expect( res.ftablecol(1) ).to eq( 0 )
|
525
|
-
end
|
526
|
-
|
527
|
-
it "can be manually checked for failed result status (async API)" do
|
528
|
-
@conn.send_query( "SELECT * FROM nonexistant_table" )
|
529
|
-
res = @conn.get_result
|
530
|
-
expect {
|
531
|
-
res.check
|
532
|
-
}.to raise_error( PG::Error, /relation "nonexistant_table" does not exist/ )
|
533
|
-
end
|
534
|
-
|
535
|
-
it "can return the values of a single field" do
|
536
|
-
res = @conn.exec( "SELECT 1 AS x, 'a' AS y UNION ALL SELECT 2, 'b'" )
|
537
|
-
expect( res.field_values('x') ).to eq( ['1', '2'] )
|
538
|
-
expect( res.field_values('y') ).to eq( ['a', 'b'] )
|
539
|
-
expect( res.field_values(:x) ).to eq( ['1', '2'] )
|
540
|
-
expect{ res.field_values('') }.to raise_error(IndexError)
|
541
|
-
expect{ res.field_values(0) }.to raise_error(TypeError)
|
542
|
-
end
|
543
|
-
|
544
|
-
it "can return the values of a single tuple" do
|
545
|
-
res = @conn.exec( "SELECT 1 AS x, 'a' AS y UNION ALL SELECT 2, 'b'" )
|
546
|
-
expect( res.tuple_values(0) ).to eq( ['1', 'a'] )
|
547
|
-
expect( res.tuple_values(1) ).to eq( ['2', 'b'] )
|
548
|
-
expect{ res.tuple_values(2) }.to raise_error(IndexError)
|
549
|
-
expect{ res.tuple_values(-1) }.to raise_error(IndexError)
|
550
|
-
expect{ res.tuple_values("x") }.to raise_error(TypeError)
|
551
|
-
end
|
552
|
-
|
553
|
-
it "can return the values of a single vary lazy tuple" do
|
554
|
-
res = @conn.exec( "VALUES(1),(2)" )
|
555
|
-
expect( res.tuple(0) ).to be_kind_of( PG::Tuple )
|
556
|
-
expect( res.tuple(1) ).to be_kind_of( PG::Tuple )
|
557
|
-
expect{ res.tuple(2) }.to raise_error(IndexError)
|
558
|
-
expect{ res.tuple(-1) }.to raise_error(IndexError)
|
559
|
-
expect{ res.tuple("x") }.to raise_error(TypeError)
|
560
|
-
end
|
561
|
-
|
562
|
-
it "raises a proper exception for a nonexistant table" do
|
563
|
-
expect {
|
564
|
-
@conn.exec( "SELECT * FROM nonexistant_table" )
|
565
|
-
}.to raise_error( PG::UndefinedTable, /relation "nonexistant_table" does not exist/ )
|
566
|
-
end
|
567
|
-
|
568
|
-
it "raises a more generic exception for an unknown SQLSTATE" do
|
569
|
-
old_error = PG::ERROR_CLASSES.delete('42P01')
|
570
|
-
begin
|
571
|
-
expect {
|
572
|
-
@conn.exec( "SELECT * FROM nonexistant_table" )
|
573
|
-
}.to raise_error{|error|
|
574
|
-
expect( error ).to be_an_instance_of(PG::SyntaxErrorOrAccessRuleViolation)
|
575
|
-
expect( error.to_s ).to match(/relation "nonexistant_table" does not exist/)
|
576
|
-
}
|
577
|
-
ensure
|
578
|
-
PG::ERROR_CLASSES['42P01'] = old_error
|
579
|
-
end
|
580
|
-
end
|
581
|
-
|
582
|
-
it "raises a ServerError for an unknown SQLSTATE class" do
|
583
|
-
old_error1 = PG::ERROR_CLASSES.delete('42P01')
|
584
|
-
old_error2 = PG::ERROR_CLASSES.delete('42')
|
585
|
-
begin
|
586
|
-
expect {
|
587
|
-
@conn.exec( "SELECT * FROM nonexistant_table" )
|
588
|
-
}.to raise_error{|error|
|
589
|
-
expect( error ).to be_an_instance_of(PG::ServerError)
|
590
|
-
expect( error.to_s ).to match(/relation "nonexistant_table" does not exist/)
|
591
|
-
}
|
592
|
-
ensure
|
593
|
-
PG::ERROR_CLASSES['42P01'] = old_error1
|
594
|
-
PG::ERROR_CLASSES['42'] = old_error2
|
595
|
-
end
|
596
|
-
end
|
597
|
-
|
598
|
-
it "raises a proper exception for a nonexistant schema" do
|
599
|
-
expect {
|
600
|
-
@conn.exec( "DROP SCHEMA nonexistant_schema" )
|
601
|
-
}.to raise_error( PG::InvalidSchemaName, /schema "nonexistant_schema" does not exist/ )
|
602
|
-
end
|
603
|
-
|
604
|
-
it "the raised result is nil in case of a connection error" do
|
605
|
-
c = PG::Connection.connect_start( '127.0.0.1', 54320, "", "", "me", "xxxx", "somedb" )
|
606
|
-
expect {
|
607
|
-
c.exec "select 1"
|
608
|
-
}.to raise_error {|error|
|
609
|
-
expect( error ).to be_an_instance_of(PG::UnableToSend)
|
610
|
-
expect( error.result ).to eq( nil )
|
611
|
-
}
|
612
|
-
end
|
613
|
-
|
614
|
-
it "does not clear the result itself" do
|
615
|
-
r = @conn.exec "select 1"
|
616
|
-
expect( r.autoclear? ).to eq(false)
|
617
|
-
expect( r.cleared? ).to eq(false)
|
618
|
-
r.clear
|
619
|
-
expect( r.cleared? ).to eq(true)
|
620
|
-
end
|
621
|
-
|
622
|
-
it "can be inspected before and after clear" do
|
623
|
-
r = @conn.exec "select 1"
|
624
|
-
expect( r.inspect ).to match(/status=PGRES_TUPLES_OK/)
|
625
|
-
r.clear
|
626
|
-
expect( r.inspect ).to match(/cleared/)
|
627
|
-
end
|
628
|
-
|
629
|
-
it "should give account about memory usage" do
|
630
|
-
r = @conn.exec "select 1"
|
631
|
-
expect( ObjectSpace.memsize_of(r) ).to be > 1000
|
632
|
-
r.clear
|
633
|
-
expect( ObjectSpace.memsize_of(r) ).to be < 100
|
634
|
-
end
|
635
|
-
|
636
|
-
context 'result value conversions with TypeMapByColumn' do
|
637
|
-
let!(:textdec_int){ PG::TextDecoder::Integer.new name: 'INT4', oid: 23 }
|
638
|
-
let!(:textdec_float){ PG::TextDecoder::Float.new name: 'FLOAT4', oid: 700 }
|
639
|
-
|
640
|
-
it "should allow reading, assigning and diabling type conversions" do
|
641
|
-
res = @conn.exec( "SELECT 123" )
|
642
|
-
expect( res.type_map ).to be_kind_of(PG::TypeMapAllStrings)
|
643
|
-
res.type_map = PG::TypeMapByColumn.new [textdec_int]
|
644
|
-
expect( res.type_map ).to be_an_instance_of(PG::TypeMapByColumn)
|
645
|
-
expect( res.type_map.coders ).to eq( [textdec_int] )
|
646
|
-
res.type_map = PG::TypeMapByColumn.new [textdec_float]
|
647
|
-
expect( res.type_map.coders ).to eq( [textdec_float] )
|
648
|
-
res.type_map = PG::TypeMapAllStrings.new
|
649
|
-
expect( res.type_map ).to be_kind_of(PG::TypeMapAllStrings)
|
650
|
-
end
|
651
|
-
|
652
|
-
it "should be applied to all value retrieving methods" do
|
653
|
-
res = @conn.exec( "SELECT 123 as f" )
|
654
|
-
res.type_map = PG::TypeMapByColumn.new [textdec_int]
|
655
|
-
expect( res.values ).to eq( [[123]] )
|
656
|
-
expect( res.getvalue(0,0) ).to eq( 123 )
|
657
|
-
expect( res[0] ).to eq( {'f' => 123 } )
|
658
|
-
expect( res.enum_for(:each_row).to_a ).to eq( [[123]] )
|
659
|
-
expect( res.enum_for(:each).to_a ).to eq( [{'f' => 123}] )
|
660
|
-
expect( res.column_values(0) ).to eq( [123] )
|
661
|
-
expect( res.field_values('f') ).to eq( [123] )
|
662
|
-
expect( res.field_values(:f) ).to eq( [123] )
|
663
|
-
expect( res.tuple_values(0) ).to eq( [123] )
|
664
|
-
end
|
665
|
-
|
666
|
-
it "should be usable for several querys" do
|
667
|
-
colmap = PG::TypeMapByColumn.new [textdec_int]
|
668
|
-
res = @conn.exec( "SELECT 123" )
|
669
|
-
res.type_map = colmap
|
670
|
-
expect( res.values ).to eq( [[123]] )
|
671
|
-
res = @conn.exec( "SELECT 456" )
|
672
|
-
res.type_map = colmap
|
673
|
-
expect( res.values ).to eq( [[456]] )
|
674
|
-
end
|
675
|
-
|
676
|
-
it "shouldn't allow invalid type maps" do
|
677
|
-
res = @conn.exec( "SELECT 1" )
|
678
|
-
expect{ res.type_map = 1 }.to raise_error(TypeError)
|
679
|
-
end
|
680
|
-
end
|
681
|
-
end
|