pg 0.15.1 → 1.2.3
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- checksums.yaml +5 -5
- checksums.yaml.gz.sig +0 -0
- data.tar.gz.sig +0 -0
- data/BSDL +2 -2
- data/ChangeLog +0 -3022
- data/History.rdoc +370 -4
- data/Manifest.txt +39 -19
- data/README-Windows.rdoc +17 -28
- data/README.ja.rdoc +1 -2
- data/README.rdoc +113 -14
- data/Rakefile +97 -36
- data/Rakefile.cross +109 -83
- data/ext/errorcodes.def +1032 -0
- data/ext/errorcodes.rb +45 -0
- data/ext/errorcodes.txt +494 -0
- data/ext/extconf.rb +55 -52
- data/ext/gvl_wrappers.c +4 -0
- data/ext/gvl_wrappers.h +94 -38
- data/ext/pg.c +273 -121
- data/ext/pg.h +292 -50
- data/ext/pg_binary_decoder.c +229 -0
- data/ext/pg_binary_encoder.c +163 -0
- data/ext/pg_coder.c +561 -0
- data/ext/pg_connection.c +1811 -1051
- data/ext/pg_copy_coder.c +599 -0
- data/ext/pg_errors.c +95 -0
- data/ext/pg_record_coder.c +491 -0
- data/ext/pg_result.c +917 -203
- data/ext/pg_text_decoder.c +987 -0
- data/ext/pg_text_encoder.c +814 -0
- data/ext/pg_tuple.c +549 -0
- data/ext/pg_type_map.c +166 -0
- data/ext/pg_type_map_all_strings.c +116 -0
- data/ext/pg_type_map_by_class.c +244 -0
- data/ext/pg_type_map_by_column.c +313 -0
- data/ext/pg_type_map_by_mri_type.c +284 -0
- data/ext/pg_type_map_by_oid.c +356 -0
- data/ext/pg_type_map_in_ruby.c +299 -0
- data/ext/pg_util.c +149 -0
- data/ext/pg_util.h +65 -0
- data/lib/pg.rb +31 -9
- data/lib/pg/basic_type_mapping.rb +522 -0
- data/lib/pg/binary_decoder.rb +23 -0
- data/lib/pg/coder.rb +104 -0
- data/lib/pg/connection.rb +235 -30
- data/lib/pg/constants.rb +2 -1
- data/lib/pg/exceptions.rb +2 -1
- data/lib/pg/result.rb +33 -6
- data/lib/pg/text_decoder.rb +46 -0
- data/lib/pg/text_encoder.rb +59 -0
- data/lib/pg/tuple.rb +30 -0
- data/lib/pg/type_map_by_column.rb +16 -0
- data/spec/{lib/helpers.rb → helpers.rb} +154 -52
- data/spec/pg/basic_type_mapping_spec.rb +630 -0
- data/spec/pg/connection_spec.rb +1352 -426
- data/spec/pg/connection_sync_spec.rb +41 -0
- data/spec/pg/result_spec.rb +508 -105
- data/spec/pg/tuple_spec.rb +333 -0
- data/spec/pg/type_map_by_class_spec.rb +138 -0
- data/spec/pg/type_map_by_column_spec.rb +226 -0
- data/spec/pg/type_map_by_mri_type_spec.rb +136 -0
- data/spec/pg/type_map_by_oid_spec.rb +149 -0
- data/spec/pg/type_map_in_ruby_spec.rb +164 -0
- data/spec/pg/type_map_spec.rb +22 -0
- data/spec/pg/type_spec.rb +1123 -0
- data/spec/pg_spec.rb +35 -16
- metadata +163 -84
- metadata.gz.sig +0 -0
- data/sample/array_insert.rb +0 -20
- data/sample/async_api.rb +0 -106
- data/sample/async_copyto.rb +0 -39
- data/sample/async_mixed.rb +0 -56
- data/sample/check_conn.rb +0 -21
- data/sample/copyfrom.rb +0 -81
- data/sample/copyto.rb +0 -19
- data/sample/cursor.rb +0 -21
- data/sample/disk_usage_report.rb +0 -186
- data/sample/issue-119.rb +0 -94
- data/sample/losample.rb +0 -69
- data/sample/minimal-testcase.rb +0 -17
- data/sample/notify_wait.rb +0 -72
- data/sample/pg_statistics.rb +0 -294
- data/sample/replication_monitor.rb +0 -231
- data/sample/test_binary_values.rb +0 -33
- data/sample/wal_shipper.rb +0 -434
- data/sample/warehouse_partitions.rb +0 -320
@@ -0,0 +1,41 @@
|
|
1
|
+
# -*- rspec -*-
|
2
|
+
#encoding: utf-8
|
3
|
+
|
4
|
+
require_relative '../helpers'
|
5
|
+
|
6
|
+
context "running with sync_* methods" do
|
7
|
+
before :each do
|
8
|
+
PG::Connection.async_api = false
|
9
|
+
end
|
10
|
+
|
11
|
+
after :each do
|
12
|
+
PG::Connection.async_api = true
|
13
|
+
end
|
14
|
+
|
15
|
+
fname = File.expand_path("../connection_spec.rb", __FILE__)
|
16
|
+
eval File.read(fname, encoding: __ENCODING__), binding, fname
|
17
|
+
|
18
|
+
|
19
|
+
it "enables/disables async/sync methods by #async_api" do
|
20
|
+
[true, false].each do |async|
|
21
|
+
PG::Connection.async_api = async
|
22
|
+
|
23
|
+
start = Time.now
|
24
|
+
t = Thread.new do
|
25
|
+
@conn.exec( 'select pg_sleep(1)' )
|
26
|
+
end
|
27
|
+
sleep 0.1
|
28
|
+
|
29
|
+
t.kill
|
30
|
+
t.join
|
31
|
+
dt = Time.now - start
|
32
|
+
|
33
|
+
if async
|
34
|
+
expect( dt ).to be < 1.0
|
35
|
+
else
|
36
|
+
expect( dt ).to be >= 1.0
|
37
|
+
end
|
38
|
+
end
|
39
|
+
end
|
40
|
+
|
41
|
+
end
|
data/spec/pg/result_spec.rb
CHANGED
@@ -1,204 +1,468 @@
|
|
1
|
-
|
1
|
+
# -*- rspec -*-
|
2
2
|
# encoding: utf-8
|
3
3
|
|
4
|
-
|
5
|
-
require 'pathname'
|
4
|
+
require_relative '../helpers'
|
6
5
|
|
7
|
-
basedir = Pathname( __FILE__ ).dirname.parent.parent
|
8
|
-
libdir = basedir + 'lib'
|
9
|
-
|
10
|
-
$LOAD_PATH.unshift( basedir.to_s ) unless $LOAD_PATH.include?( basedir.to_s )
|
11
|
-
$LOAD_PATH.unshift( libdir.to_s ) unless $LOAD_PATH.include?( libdir.to_s )
|
12
|
-
}
|
13
|
-
|
14
|
-
require 'rspec'
|
15
|
-
require 'spec/lib/helpers'
|
16
6
|
require 'pg'
|
7
|
+
require 'objspace'
|
8
|
+
|
17
9
|
|
18
10
|
describe PG::Result do
|
19
11
|
|
20
|
-
|
21
|
-
@conn
|
12
|
+
describe :field_name_type do
|
13
|
+
let!(:res) { @conn.exec('SELECT 1 AS a, 2 AS "B"') }
|
14
|
+
|
15
|
+
it "uses string field names per default" do
|
16
|
+
expect(res.field_name_type).to eq(:string)
|
17
|
+
end
|
18
|
+
|
19
|
+
it "can set string field names" do
|
20
|
+
res.field_name_type = :string
|
21
|
+
expect(res.field_name_type).to eq(:string)
|
22
|
+
end
|
23
|
+
|
24
|
+
it "can set symbol field names" do
|
25
|
+
res.field_name_type = :symbol
|
26
|
+
expect(res.field_name_type).to eq(:symbol)
|
27
|
+
end
|
28
|
+
|
29
|
+
it "can set static_symbol field names" do
|
30
|
+
res.field_name_type = :static_symbol
|
31
|
+
expect(res.field_name_type).to eq(:static_symbol)
|
32
|
+
end
|
33
|
+
|
34
|
+
it "can't set symbol field names after #fields" do
|
35
|
+
res.fields
|
36
|
+
expect{ res.field_name_type = :symbol }.to raise_error(ArgumentError, /already materialized/)
|
37
|
+
expect(res.field_name_type).to eq(:string)
|
38
|
+
end
|
39
|
+
|
40
|
+
it "can't set invalid values" do
|
41
|
+
expect{ res.field_name_type = :sym }.to raise_error(ArgumentError, /invalid argument :sym/)
|
42
|
+
expect{ res.field_name_type = "symbol" }.to raise_error(ArgumentError, /invalid argument "symbol"/)
|
43
|
+
end
|
22
44
|
end
|
23
45
|
|
24
|
-
|
25
|
-
@conn.exec(
|
46
|
+
it "acts as an array of hashes" do
|
47
|
+
res = @conn.exec("SELECT 1 AS a, 2 AS b")
|
48
|
+
expect( res[0]['a'] ).to eq( '1' )
|
49
|
+
expect( res[0]['b'] ).to eq( '2' )
|
26
50
|
end
|
27
51
|
|
28
|
-
|
29
|
-
@conn.exec(
|
52
|
+
it "acts as an array of hashes with symbols" do
|
53
|
+
res = @conn.exec("SELECT 1 AS a, 2 AS b")
|
54
|
+
res.field_name_type = :symbol
|
55
|
+
expect( res[0][:a] ).to eq( '1' )
|
56
|
+
expect( res[0][:b] ).to eq( '2' )
|
30
57
|
end
|
31
58
|
|
32
|
-
|
33
|
-
|
59
|
+
it "acts as an array of hashes with static_symbols" do
|
60
|
+
res = @conn.exec("SELECT 1 AS a, 2 AS b")
|
61
|
+
res.field_name_type = :static_symbol
|
62
|
+
expect( res[0][:a] ).to eq( '1' )
|
63
|
+
expect( res[0][:b] ).to eq( '2' )
|
34
64
|
end
|
35
65
|
|
66
|
+
it "yields a row as an array" do
|
67
|
+
res = @conn.exec("SELECT 1 AS a, 2 AS b")
|
68
|
+
list = []
|
69
|
+
res.each_row { |r| list << r }
|
70
|
+
expect( list ).to eq [['1', '2']]
|
71
|
+
end
|
36
72
|
|
37
|
-
|
38
|
-
|
39
|
-
|
73
|
+
it "yields a row as an Enumerator" do
|
74
|
+
res = @conn.exec("SELECT 1 AS a, 2 AS b")
|
75
|
+
e = res.each_row
|
76
|
+
expect( e ).to be_a_kind_of(Enumerator)
|
77
|
+
expect( e.size ).to eq( 1 )
|
78
|
+
expect( e.to_a ).to eq [['1', '2']]
|
79
|
+
end
|
40
80
|
|
41
|
-
it "
|
81
|
+
it "yields a row as an Enumerator of hashs" do
|
42
82
|
res = @conn.exec("SELECT 1 AS a, 2 AS b")
|
43
|
-
res
|
44
|
-
|
83
|
+
e = res.each
|
84
|
+
expect( e ).to be_a_kind_of(Enumerator)
|
85
|
+
expect( e.size ).to eq( 1 )
|
86
|
+
expect( e.to_a ).to eq [{'a'=>'1', 'b'=>'2'}]
|
45
87
|
end
|
46
88
|
|
47
|
-
it "
|
89
|
+
it "yields a row as an Enumerator of hashs with symbols" do
|
48
90
|
res = @conn.exec("SELECT 1 AS a, 2 AS b")
|
49
|
-
|
50
|
-
res.
|
51
|
-
|
91
|
+
res.field_name_type = :symbol
|
92
|
+
expect( res.each.to_a ).to eq [{:a=>'1', :b=>'2'}]
|
93
|
+
end
|
94
|
+
|
95
|
+
context "result streaming in single row mode" do
|
96
|
+
let!(:textdec_int){ PG::TextDecoder::Integer.new name: 'INT4', oid: 23 }
|
97
|
+
|
98
|
+
it "can iterate over all rows as Hash" do
|
99
|
+
@conn.send_query( "SELECT generate_series(2,4) AS a; SELECT 1 AS b, generate_series(5,6) AS c" )
|
100
|
+
@conn.set_single_row_mode
|
101
|
+
expect(
|
102
|
+
@conn.get_result.stream_each.to_a
|
103
|
+
).to eq(
|
104
|
+
[{'a'=>"2"}, {'a'=>"3"}, {'a'=>"4"}]
|
105
|
+
)
|
106
|
+
expect(
|
107
|
+
@conn.get_result.enum_for(:stream_each).to_a
|
108
|
+
).to eq(
|
109
|
+
[{'b'=>"1", 'c'=>"5"}, {'b'=>"1", 'c'=>"6"}]
|
110
|
+
)
|
111
|
+
expect( @conn.get_result ).to be_nil
|
112
|
+
end
|
113
|
+
|
114
|
+
it "can iterate over all rows as Hash with symbols and typemap" do
|
115
|
+
@conn.send_query( "SELECT generate_series(2,4) AS a" )
|
116
|
+
@conn.set_single_row_mode
|
117
|
+
res = @conn.get_result.field_names_as(:symbol)
|
118
|
+
res.type_map = PG::TypeMapByColumn.new [textdec_int]
|
119
|
+
expect(
|
120
|
+
res.stream_each.to_a
|
121
|
+
).to eq(
|
122
|
+
[{:a=>2}, {:a=>3}, {:a=>4}]
|
123
|
+
)
|
124
|
+
expect( @conn.get_result ).to be_nil
|
125
|
+
end
|
126
|
+
|
127
|
+
it "keeps last result on error while iterating stream_each" do
|
128
|
+
@conn.send_query( "SELECT generate_series(2,4) AS a" )
|
129
|
+
@conn.set_single_row_mode
|
130
|
+
res = @conn.get_result
|
131
|
+
expect do
|
132
|
+
res.stream_each_row do
|
133
|
+
raise ZeroDivisionError
|
134
|
+
end
|
135
|
+
end.to raise_error(ZeroDivisionError)
|
136
|
+
expect( res.values ).to eq([["2"]])
|
137
|
+
end
|
138
|
+
|
139
|
+
it "can iterate over all rows as Array" do
|
140
|
+
@conn.send_query( "SELECT generate_series(2,4) AS a; SELECT 1 AS b, generate_series(5,6) AS c" )
|
141
|
+
@conn.set_single_row_mode
|
142
|
+
expect(
|
143
|
+
@conn.get_result.enum_for(:stream_each_row).to_a
|
144
|
+
).to eq(
|
145
|
+
[["2"], ["3"], ["4"]]
|
146
|
+
)
|
147
|
+
expect(
|
148
|
+
@conn.get_result.stream_each_row.to_a
|
149
|
+
).to eq(
|
150
|
+
[["1", "5"], ["1", "6"]]
|
151
|
+
)
|
152
|
+
expect( @conn.get_result ).to be_nil
|
153
|
+
end
|
154
|
+
|
155
|
+
it "keeps last result on error while iterating stream_each_row" do
|
156
|
+
@conn.send_query( "SELECT generate_series(2,4) AS a" )
|
157
|
+
@conn.set_single_row_mode
|
158
|
+
res = @conn.get_result
|
159
|
+
expect do
|
160
|
+
res.stream_each_row do
|
161
|
+
raise ZeroDivisionError
|
162
|
+
end
|
163
|
+
end.to raise_error(ZeroDivisionError)
|
164
|
+
expect( res.values ).to eq([["2"]])
|
165
|
+
end
|
166
|
+
|
167
|
+
it "can iterate over all rows as PG::Tuple" do
|
168
|
+
@conn.send_query( "SELECT generate_series(2,4) AS a; SELECT 1 AS b, generate_series(5,6) AS c" )
|
169
|
+
@conn.set_single_row_mode
|
170
|
+
tuples = @conn.get_result.stream_each_tuple.to_a
|
171
|
+
expect( tuples[0][0] ).to eq( "2" )
|
172
|
+
expect( tuples[1]["a"] ).to eq( "3" )
|
173
|
+
expect( tuples.size ).to eq( 3 )
|
174
|
+
|
175
|
+
tuples = @conn.get_result.enum_for(:stream_each_tuple).to_a
|
176
|
+
expect( tuples[-1][-1] ).to eq( "6" )
|
177
|
+
expect( tuples[-2]["b"] ).to eq( "1" )
|
178
|
+
expect( tuples.size ).to eq( 2 )
|
179
|
+
|
180
|
+
expect( @conn.get_result ).to be_nil
|
181
|
+
end
|
182
|
+
|
183
|
+
it "clears result on error while iterating stream_each_tuple" do
|
184
|
+
@conn.send_query( "SELECT generate_series(2,4) AS a" )
|
185
|
+
@conn.set_single_row_mode
|
186
|
+
res = @conn.get_result
|
187
|
+
expect do
|
188
|
+
res.stream_each_tuple do
|
189
|
+
raise ZeroDivisionError
|
190
|
+
end
|
191
|
+
end.to raise_error(ZeroDivisionError)
|
192
|
+
expect( res.cleared? ).to eq(true)
|
193
|
+
end
|
194
|
+
|
195
|
+
it "should reuse field names in stream_each_tuple" do
|
196
|
+
@conn.send_query( "SELECT generate_series(2,3) AS a" )
|
197
|
+
@conn.set_single_row_mode
|
198
|
+
tuple1, tuple2 = *@conn.get_result.stream_each_tuple.to_a
|
199
|
+
expect( tuple1.keys[0].object_id ).to eq(tuple2.keys[0].object_id)
|
200
|
+
end
|
201
|
+
|
202
|
+
it "can iterate over all rows as PG::Tuple with symbols and typemap" do
|
203
|
+
@conn.send_query( "SELECT generate_series(2,4) AS a" )
|
204
|
+
@conn.set_single_row_mode
|
205
|
+
res = @conn.get_result.field_names_as(:symbol)
|
206
|
+
res.type_map = PG::TypeMapByColumn.new [textdec_int]
|
207
|
+
tuples = res.stream_each_tuple.to_a
|
208
|
+
expect( tuples[0][0] ).to eq( 2 )
|
209
|
+
expect( tuples[1][:a] ).to eq( 3 )
|
210
|
+
expect( @conn.get_result ).to be_nil
|
211
|
+
end
|
212
|
+
|
213
|
+
it "complains when not in single row mode" do
|
214
|
+
@conn.send_query( "SELECT generate_series(2,4)" )
|
215
|
+
expect{
|
216
|
+
@conn.get_result.stream_each_row.to_a
|
217
|
+
}.to raise_error(PG::InvalidResultStatus, /not in single row mode/)
|
218
|
+
end
|
219
|
+
|
220
|
+
it "complains when intersected with get_result" do
|
221
|
+
@conn.send_query( "SELECT 1" )
|
222
|
+
@conn.set_single_row_mode
|
223
|
+
expect{
|
224
|
+
@conn.get_result.stream_each_row.each{ @conn.get_result }
|
225
|
+
}.to raise_error(PG::NoResultError, /no result received/)
|
226
|
+
end
|
227
|
+
|
228
|
+
it "raises server errors" do
|
229
|
+
@conn.send_query( "SELECT 0/0" )
|
230
|
+
expect{
|
231
|
+
@conn.get_result.stream_each_row.to_a
|
232
|
+
}.to raise_error(PG::DivisionByZero)
|
233
|
+
end
|
52
234
|
end
|
53
235
|
|
54
|
-
it "
|
55
|
-
res = @conn.
|
56
|
-
res[0]['n'].
|
236
|
+
it "inserts nil AS NULL and return NULL as nil" do
|
237
|
+
res = @conn.exec_params("SELECT $1::int AS n", [nil])
|
238
|
+
expect( res[0]['n'] ).to be_nil()
|
57
239
|
end
|
58
240
|
|
59
|
-
it "encapsulates errors in a
|
241
|
+
it "encapsulates errors in a PG::Error object" do
|
60
242
|
exception = nil
|
61
243
|
begin
|
62
244
|
@conn.exec( "SELECT * FROM nonexistant_table" )
|
63
|
-
rescue
|
245
|
+
rescue PG::Error => err
|
64
246
|
exception = err
|
65
247
|
end
|
66
248
|
|
67
249
|
result = exception.result
|
68
250
|
|
69
|
-
result.
|
70
|
-
result.error_field(
|
71
|
-
result.error_field(
|
72
|
-
|
73
|
-
|
74
|
-
|
75
|
-
result.error_field(
|
76
|
-
|
77
|
-
result.error_field(
|
78
|
-
result.error_field(
|
79
|
-
result.error_field(
|
80
|
-
result.error_field(
|
81
|
-
|
82
|
-
|
83
|
-
|
84
|
-
|
85
|
-
|
86
|
-
|
87
|
-
|
251
|
+
expect( result ).to be_a( described_class() )
|
252
|
+
expect( result.error_field(PG::PG_DIAG_SEVERITY) ).to eq( 'ERROR' )
|
253
|
+
expect( result.error_field(PG::PG_DIAG_SQLSTATE) ).to eq( '42P01' )
|
254
|
+
expect(
|
255
|
+
result.error_field(PG::PG_DIAG_MESSAGE_PRIMARY)
|
256
|
+
).to eq( 'relation "nonexistant_table" does not exist' )
|
257
|
+
expect( result.error_field(PG::PG_DIAG_MESSAGE_DETAIL) ).to be_nil()
|
258
|
+
expect( result.error_field(PG::PG_DIAG_MESSAGE_HINT) ).to be_nil()
|
259
|
+
expect( result.error_field(PG::PG_DIAG_STATEMENT_POSITION) ).to eq( '15' )
|
260
|
+
expect( result.error_field(PG::PG_DIAG_INTERNAL_POSITION) ).to be_nil()
|
261
|
+
expect( result.error_field(PG::PG_DIAG_INTERNAL_QUERY) ).to be_nil()
|
262
|
+
expect( result.error_field(PG::PG_DIAG_CONTEXT) ).to be_nil()
|
263
|
+
expect(
|
264
|
+
result.error_field(PG::PG_DIAG_SOURCE_FILE)
|
265
|
+
).to match( /parse_relation\.c$|namespace\.c$/ )
|
266
|
+
expect( result.error_field(PG::PG_DIAG_SOURCE_LINE) ).to match( /^\d+$/ )
|
267
|
+
expect(
|
268
|
+
result.error_field(PG::PG_DIAG_SOURCE_FUNCTION)
|
269
|
+
).to match( /^parserOpenTable$|^RangeVarGetRelid$/ )
|
270
|
+
end
|
271
|
+
|
272
|
+
it "encapsulates PG_DIAG_SEVERITY_NONLOCALIZED error in a PG::Error object", :postgresql_96 do
|
273
|
+
result = nil
|
274
|
+
begin
|
275
|
+
@conn.exec( "SELECT * FROM nonexistant_table" )
|
276
|
+
rescue PG::Error => err
|
277
|
+
result = err.result
|
278
|
+
end
|
279
|
+
|
280
|
+
expect( result.error_field(PG::PG_DIAG_SEVERITY_NONLOCALIZED) ).to eq( 'ERROR' )
|
281
|
+
end
|
282
|
+
|
283
|
+
it "encapsulates database object names for integrity constraint violations", :postgresql_93 do
|
284
|
+
@conn.exec( "CREATE TABLE integrity (id SERIAL PRIMARY KEY)" )
|
285
|
+
exception = nil
|
286
|
+
begin
|
287
|
+
@conn.exec( "INSERT INTO integrity VALUES (NULL)" )
|
288
|
+
rescue PG::Error => err
|
289
|
+
exception = err
|
290
|
+
end
|
291
|
+
result = exception.result
|
292
|
+
|
293
|
+
expect( result.error_field(PG::PG_DIAG_SCHEMA_NAME) ).to eq( 'public' )
|
294
|
+
expect( result.error_field(PG::PG_DIAG_TABLE_NAME) ).to eq( 'integrity' )
|
295
|
+
expect( result.error_field(PG::PG_DIAG_COLUMN_NAME) ).to eq( 'id' )
|
296
|
+
expect( result.error_field(PG::PG_DIAG_DATATYPE_NAME) ).to be_nil
|
297
|
+
expect( result.error_field(PG::PG_DIAG_CONSTRAINT_NAME) ).to be_nil
|
298
|
+
end
|
299
|
+
|
300
|
+
it "detects division by zero as SQLSTATE 22012" do
|
88
301
|
sqlstate = nil
|
89
302
|
begin
|
90
|
-
|
91
|
-
rescue
|
303
|
+
@conn.exec("SELECT 1/0")
|
304
|
+
rescue PG::Error => e
|
92
305
|
sqlstate = e.result.result_error_field( PG::PG_DIAG_SQLSTATE ).to_i
|
93
306
|
end
|
94
|
-
sqlstate.
|
307
|
+
expect( sqlstate ).to eq( 22012 )
|
308
|
+
end
|
309
|
+
|
310
|
+
it "provides the error message" do
|
311
|
+
@conn.send_query("SELECT xyz")
|
312
|
+
res = @conn.get_result; @conn.get_result
|
313
|
+
expect( res.error_message ).to match(/"xyz"/)
|
314
|
+
expect( res.result_error_message ).to match(/"xyz"/)
|
315
|
+
end
|
316
|
+
|
317
|
+
it "provides a verbose error message", :postgresql_96 do
|
318
|
+
@conn.send_query("SELECT xyz")
|
319
|
+
res = @conn.get_result; @conn.get_result
|
320
|
+
# PQERRORS_TERSE should give a single line result
|
321
|
+
expect( res.verbose_error_message(PG::PQERRORS_TERSE, PG::PQSHOW_CONTEXT_ALWAYS) ).to match(/\A.*\n\z/)
|
322
|
+
# PQERRORS_VERBOSE should give a multi line result
|
323
|
+
expect( res.result_verbose_error_message(PG::PQERRORS_VERBOSE, PG::PQSHOW_CONTEXT_NEVER) ).to match(/\n.*\n/)
|
95
324
|
end
|
96
325
|
|
97
|
-
it "
|
326
|
+
it "provides a verbose error message with SQLSTATE", :postgresql_12 do
|
327
|
+
@conn.send_query("SELECT xyz")
|
328
|
+
res = @conn.get_result; @conn.get_result
|
329
|
+
expect( res.verbose_error_message(PG::PQERRORS_SQLSTATE, PG::PQSHOW_CONTEXT_NEVER) ).to match(/42703/)
|
330
|
+
end
|
331
|
+
|
332
|
+
it "returns the same bytes in binary format that are sent in binary format" do
|
98
333
|
binary_file = File.join(Dir.pwd, 'spec/data', 'random_binary_data')
|
99
334
|
bytes = File.open(binary_file, 'rb').read
|
100
|
-
res = @conn.
|
335
|
+
res = @conn.exec_params('VALUES ($1::bytea)',
|
101
336
|
[ { :value => bytes, :format => 1 } ], 1)
|
102
|
-
res[0]['column1'].
|
103
|
-
res.getvalue(0,0).
|
104
|
-
res.values[0][0].
|
105
|
-
res.column_values(0)[0].
|
337
|
+
expect( res[0]['column1'] ).to eq( bytes )
|
338
|
+
expect( res.getvalue(0,0) ).to eq( bytes )
|
339
|
+
expect( res.values[0][0] ).to eq( bytes )
|
340
|
+
expect( res.column_values(0)[0] ).to eq( bytes )
|
106
341
|
end
|
107
342
|
|
108
|
-
it "
|
343
|
+
it "returns the same bytes in binary format that are sent as inline text" do
|
109
344
|
binary_file = File.join(Dir.pwd, 'spec/data', 'random_binary_data')
|
110
345
|
bytes = File.open(binary_file, 'rb').read
|
111
346
|
@conn.exec("SET standard_conforming_strings=on")
|
112
|
-
res = @conn.
|
113
|
-
res[0]['column1'].
|
114
|
-
res.getvalue(0,0).
|
115
|
-
res.values[0][0].
|
116
|
-
res.column_values(0)[0].
|
347
|
+
res = @conn.exec_params("VALUES ('#{PG::Connection.escape_bytea(bytes)}'::bytea)", [], 1)
|
348
|
+
expect( res[0]['column1'] ).to eq( bytes )
|
349
|
+
expect( res.getvalue(0,0) ).to eq( bytes )
|
350
|
+
expect( res.values[0][0] ).to eq( bytes )
|
351
|
+
expect( res.column_values(0)[0] ).to eq( bytes )
|
117
352
|
end
|
118
353
|
|
119
|
-
it "
|
354
|
+
it "returns the same bytes in text format that are sent in binary format" do
|
120
355
|
binary_file = File.join(Dir.pwd, 'spec/data', 'random_binary_data')
|
121
356
|
bytes = File.open(binary_file, 'rb').read
|
122
|
-
res = @conn.
|
357
|
+
res = @conn.exec_params('VALUES ($1::bytea)',
|
123
358
|
[ { :value => bytes, :format => 1 } ])
|
124
|
-
PG::Connection.unescape_bytea(res[0]['column1']).
|
359
|
+
expect( PG::Connection.unescape_bytea(res[0]['column1']) ).to eq( bytes )
|
125
360
|
end
|
126
361
|
|
127
|
-
it "
|
362
|
+
it "returns the same bytes in text format that are sent as inline text" do
|
128
363
|
binary_file = File.join(Dir.pwd, 'spec/data', 'random_binary_data')
|
129
364
|
in_bytes = File.open(binary_file, 'rb').read
|
130
365
|
|
131
366
|
out_bytes = nil
|
132
367
|
@conn.exec("SET standard_conforming_strings=on")
|
133
|
-
res = @conn.
|
368
|
+
res = @conn.exec_params("VALUES ('#{PG::Connection.escape_bytea(in_bytes)}'::bytea)", [], 0)
|
134
369
|
out_bytes = PG::Connection.unescape_bytea(res[0]['column1'])
|
135
|
-
out_bytes.
|
370
|
+
expect( out_bytes ).to eq( in_bytes )
|
136
371
|
end
|
137
372
|
|
138
|
-
it "
|
373
|
+
it "returns the parameter type of the specified prepared statement parameter" do
|
139
374
|
query = 'SELECT * FROM pg_stat_activity WHERE user = $1::name AND query = $2::text'
|
140
375
|
@conn.prepare( 'queryfinder', query )
|
141
376
|
res = @conn.describe_prepared( 'queryfinder' )
|
142
377
|
|
143
|
-
|
144
|
-
|
145
|
-
|
146
|
-
|
378
|
+
expect(
|
379
|
+
@conn.exec_params( 'SELECT format_type($1, -1)', [res.paramtype(0)] ).getvalue( 0, 0 )
|
380
|
+
).to eq( 'name' )
|
381
|
+
expect(
|
382
|
+
@conn.exec_params( 'SELECT format_type($1, -1)', [res.paramtype(1)] ).getvalue( 0, 0 )
|
383
|
+
).to eq( 'text' )
|
147
384
|
end
|
148
385
|
|
149
|
-
it "
|
386
|
+
it "raises an exception when a negative index is given to #fformat" do
|
150
387
|
res = @conn.exec('SELECT * FROM pg_stat_activity')
|
151
388
|
expect {
|
152
389
|
res.fformat( -1 )
|
153
390
|
}.to raise_error( ArgumentError, /column number/i )
|
154
391
|
end
|
155
392
|
|
156
|
-
it "
|
393
|
+
it "raises an exception when a negative index is given to #fmod" do
|
157
394
|
res = @conn.exec('SELECT * FROM pg_stat_activity')
|
158
395
|
expect {
|
159
396
|
res.fmod( -1 )
|
160
397
|
}.to raise_error( ArgumentError, /column number/i )
|
161
398
|
end
|
162
399
|
|
163
|
-
it "
|
400
|
+
it "raises an exception when a negative index is given to #[]" do
|
164
401
|
res = @conn.exec('SELECT * FROM pg_stat_activity')
|
165
402
|
expect {
|
166
403
|
res[ -1 ]
|
167
404
|
}.to raise_error( IndexError, /-1 is out of range/i )
|
168
405
|
end
|
169
406
|
|
170
|
-
it "
|
407
|
+
it "raises allow for conversion to an array of arrays" do
|
171
408
|
@conn.exec( 'CREATE TABLE valuestest ( foo varchar(33) )' )
|
172
409
|
@conn.exec( 'INSERT INTO valuestest ("foo") values (\'bar\')' )
|
173
410
|
@conn.exec( 'INSERT INTO valuestest ("foo") values (\'bar2\')' )
|
174
411
|
|
175
412
|
res = @conn.exec( 'SELECT * FROM valuestest' )
|
176
|
-
res.values.
|
413
|
+
expect( res.values ).to eq( [ ["bar"], ["bar2"] ] )
|
414
|
+
end
|
415
|
+
|
416
|
+
it "can retrieve field names" do
|
417
|
+
res = @conn.exec('SELECT 1 AS a, 2 AS "B"')
|
418
|
+
expect(res.fields).to eq(["a", "B"])
|
419
|
+
end
|
420
|
+
|
421
|
+
it "can retrieve field names as symbols" do
|
422
|
+
res = @conn.exec('SELECT 1 AS a, 2 AS "B"')
|
423
|
+
res.field_name_type = :symbol
|
424
|
+
expect(res.fields).to eq([:a, :B])
|
425
|
+
end
|
426
|
+
|
427
|
+
it "can retrieve single field names" do
|
428
|
+
res = @conn.exec('SELECT 1 AS a, 2 AS "B"')
|
429
|
+
expect(res.fname(0)).to eq("a")
|
430
|
+
expect(res.fname(1)).to eq("B")
|
431
|
+
expect{res.fname(2)}.to raise_error(ArgumentError)
|
432
|
+
end
|
433
|
+
|
434
|
+
it "can retrieve single field names as symbol" do
|
435
|
+
res = @conn.exec('SELECT 1 AS a, 2 AS "B"')
|
436
|
+
res.field_name_type = :symbol
|
437
|
+
expect(res.fname(0)).to eq(:a)
|
438
|
+
expect(res.fname(1)).to eq(:B)
|
439
|
+
expect{res.fname(2)}.to raise_error(ArgumentError)
|
177
440
|
end
|
178
441
|
|
179
442
|
# PQfmod
|
180
443
|
it "can return the type modifier for a result column" do
|
181
444
|
@conn.exec( 'CREATE TABLE fmodtest ( foo varchar(33) )' )
|
182
445
|
res = @conn.exec( 'SELECT * FROM fmodtest' )
|
183
|
-
res.fmod(
|
446
|
+
expect( res.fmod(0) ).to eq( 33 + 4 ) # Column length + varlena size (4)
|
184
447
|
end
|
185
448
|
|
186
|
-
it "
|
449
|
+
it "raises an exception when an invalid index is passed to PG::Result#fmod" do
|
187
450
|
@conn.exec( 'CREATE TABLE fmodtest ( foo varchar(33) )' )
|
188
451
|
res = @conn.exec( 'SELECT * FROM fmodtest' )
|
189
452
|
expect { res.fmod(1) }.to raise_error( ArgumentError )
|
190
453
|
end
|
191
454
|
|
192
|
-
it "
|
455
|
+
it "raises an exception when an invalid (negative) index is passed to PG::Result#fmod" do
|
193
456
|
@conn.exec( 'CREATE TABLE fmodtest ( foo varchar(33) )' )
|
194
457
|
res = @conn.exec( 'SELECT * FROM fmodtest' )
|
195
458
|
expect { res.fmod(-11) }.to raise_error( ArgumentError )
|
196
459
|
end
|
197
460
|
|
198
|
-
it "
|
461
|
+
it "doesn't raise an exception when a valid index is passed to PG::Result#fmod for a" +
|
462
|
+
" column with no typemod" do
|
199
463
|
@conn.exec( 'CREATE TABLE fmodtest ( foo text )' )
|
200
464
|
res = @conn.exec( 'SELECT * FROM fmodtest' )
|
201
|
-
res.fmod(
|
465
|
+
expect( res.fmod(0) ).to eq( -1 )
|
202
466
|
end
|
203
467
|
|
204
468
|
# PQftable
|
@@ -206,28 +470,28 @@ describe PG::Result do
|
|
206
470
|
@conn.exec( 'CREATE TABLE ftabletest ( foo text )' )
|
207
471
|
res = @conn.exec( 'SELECT * FROM ftabletest' )
|
208
472
|
|
209
|
-
res.ftable(
|
473
|
+
expect( res.ftable(0) ).to be_nonzero()
|
210
474
|
end
|
211
475
|
|
212
|
-
it "
|
476
|
+
it "raises an exception when an invalid index is passed to PG::Result#ftable" do
|
213
477
|
@conn.exec( 'CREATE TABLE ftabletest ( foo text )' )
|
214
478
|
res = @conn.exec( 'SELECT * FROM ftabletest' )
|
215
479
|
|
216
480
|
expect { res.ftable(18) }.to raise_error( ArgumentError )
|
217
481
|
end
|
218
482
|
|
219
|
-
it "
|
483
|
+
it "raises an exception when an invalid (negative) index is passed to PG::Result#ftable" do
|
220
484
|
@conn.exec( 'CREATE TABLE ftabletest ( foo text )' )
|
221
485
|
res = @conn.exec( 'SELECT * FROM ftabletest' )
|
222
486
|
|
223
487
|
expect { res.ftable(-2) }.to raise_error( ArgumentError )
|
224
488
|
end
|
225
489
|
|
226
|
-
it "
|
490
|
+
it "doesn't raise an exception when a valid index is passed to PG::Result#ftable for a " +
|
227
491
|
"column with no corresponding table" do
|
228
492
|
@conn.exec( 'CREATE TABLE ftabletest ( foo text )' )
|
229
493
|
res = @conn.exec( 'SELECT foo, LENGTH(foo) as length FROM ftabletest' )
|
230
|
-
res.ftable(
|
494
|
+
expect( res.ftable(1) ).to eq( PG::INVALID_OID )
|
231
495
|
end
|
232
496
|
|
233
497
|
# PQftablecol
|
@@ -235,29 +499,29 @@ describe PG::Result do
|
|
235
499
|
@conn.exec( 'CREATE TABLE ftablecoltest ( foo text, bar numeric )' )
|
236
500
|
res = @conn.exec( 'SELECT * FROM ftablecoltest' )
|
237
501
|
|
238
|
-
res.ftablecol(
|
239
|
-
res.ftablecol(
|
502
|
+
expect( res.ftablecol(0) ).to eq( 1 )
|
503
|
+
expect( res.ftablecol(1) ).to eq( 2 )
|
240
504
|
end
|
241
505
|
|
242
|
-
it "
|
506
|
+
it "raises an exception when an invalid index is passed to PG::Result#ftablecol" do
|
243
507
|
@conn.exec( 'CREATE TABLE ftablecoltest ( foo text, bar numeric )' )
|
244
508
|
res = @conn.exec( 'SELECT * FROM ftablecoltest' )
|
245
509
|
|
246
510
|
expect { res.ftablecol(32) }.to raise_error( ArgumentError )
|
247
511
|
end
|
248
512
|
|
249
|
-
it "
|
513
|
+
it "raises an exception when an invalid (negative) index is passed to PG::Result#ftablecol" do
|
250
514
|
@conn.exec( 'CREATE TABLE ftablecoltest ( foo text, bar numeric )' )
|
251
515
|
res = @conn.exec( 'SELECT * FROM ftablecoltest' )
|
252
516
|
|
253
517
|
expect { res.ftablecol(-1) }.to raise_error( ArgumentError )
|
254
518
|
end
|
255
519
|
|
256
|
-
it "
|
520
|
+
it "doesnn't raise an exception when a valid index is passed to PG::Result#ftablecol for a " +
|
257
521
|
"column with no corresponding table" do
|
258
522
|
@conn.exec( 'CREATE TABLE ftablecoltest ( foo text )' )
|
259
523
|
res = @conn.exec( 'SELECT foo, LENGTH(foo) as length FROM ftablecoltest' )
|
260
|
-
res.ftablecol(1).
|
524
|
+
expect( res.ftablecol(1) ).to eq( 0 )
|
261
525
|
end
|
262
526
|
|
263
527
|
it "can be manually checked for failed result status (async API)" do
|
@@ -270,9 +534,148 @@ describe PG::Result do
|
|
270
534
|
|
271
535
|
it "can return the values of a single field" do
|
272
536
|
res = @conn.exec( "SELECT 1 AS x, 'a' AS y UNION ALL SELECT 2, 'b'" )
|
273
|
-
res.field_values(
|
274
|
-
res.field_values(
|
275
|
-
expect
|
276
|
-
expect{ res.field_values(
|
537
|
+
expect( res.field_values('x') ).to eq( ['1', '2'] )
|
538
|
+
expect( res.field_values('y') ).to eq( ['a', 'b'] )
|
539
|
+
expect( res.field_values(:x) ).to eq( ['1', '2'] )
|
540
|
+
expect{ res.field_values('') }.to raise_error(IndexError)
|
541
|
+
expect{ res.field_values(0) }.to raise_error(TypeError)
|
542
|
+
end
|
543
|
+
|
544
|
+
it "can return the values of a single tuple" do
|
545
|
+
res = @conn.exec( "SELECT 1 AS x, 'a' AS y UNION ALL SELECT 2, 'b'" )
|
546
|
+
expect( res.tuple_values(0) ).to eq( ['1', 'a'] )
|
547
|
+
expect( res.tuple_values(1) ).to eq( ['2', 'b'] )
|
548
|
+
expect{ res.tuple_values(2) }.to raise_error(IndexError)
|
549
|
+
expect{ res.tuple_values(-1) }.to raise_error(IndexError)
|
550
|
+
expect{ res.tuple_values("x") }.to raise_error(TypeError)
|
551
|
+
end
|
552
|
+
|
553
|
+
it "can return the values of a single vary lazy tuple" do
|
554
|
+
res = @conn.exec( "VALUES(1),(2)" )
|
555
|
+
expect( res.tuple(0) ).to be_kind_of( PG::Tuple )
|
556
|
+
expect( res.tuple(1) ).to be_kind_of( PG::Tuple )
|
557
|
+
expect{ res.tuple(2) }.to raise_error(IndexError)
|
558
|
+
expect{ res.tuple(-1) }.to raise_error(IndexError)
|
559
|
+
expect{ res.tuple("x") }.to raise_error(TypeError)
|
560
|
+
end
|
561
|
+
|
562
|
+
it "raises a proper exception for a nonexistant table" do
|
563
|
+
expect {
|
564
|
+
@conn.exec( "SELECT * FROM nonexistant_table" )
|
565
|
+
}.to raise_error( PG::UndefinedTable, /relation "nonexistant_table" does not exist/ )
|
566
|
+
end
|
567
|
+
|
568
|
+
it "raises a more generic exception for an unknown SQLSTATE" do
|
569
|
+
old_error = PG::ERROR_CLASSES.delete('42P01')
|
570
|
+
begin
|
571
|
+
expect {
|
572
|
+
@conn.exec( "SELECT * FROM nonexistant_table" )
|
573
|
+
}.to raise_error{|error|
|
574
|
+
expect( error ).to be_an_instance_of(PG::SyntaxErrorOrAccessRuleViolation)
|
575
|
+
expect( error.to_s ).to match(/relation "nonexistant_table" does not exist/)
|
576
|
+
}
|
577
|
+
ensure
|
578
|
+
PG::ERROR_CLASSES['42P01'] = old_error
|
579
|
+
end
|
580
|
+
end
|
581
|
+
|
582
|
+
it "raises a ServerError for an unknown SQLSTATE class" do
|
583
|
+
old_error1 = PG::ERROR_CLASSES.delete('42P01')
|
584
|
+
old_error2 = PG::ERROR_CLASSES.delete('42')
|
585
|
+
begin
|
586
|
+
expect {
|
587
|
+
@conn.exec( "SELECT * FROM nonexistant_table" )
|
588
|
+
}.to raise_error{|error|
|
589
|
+
expect( error ).to be_an_instance_of(PG::ServerError)
|
590
|
+
expect( error.to_s ).to match(/relation "nonexistant_table" does not exist/)
|
591
|
+
}
|
592
|
+
ensure
|
593
|
+
PG::ERROR_CLASSES['42P01'] = old_error1
|
594
|
+
PG::ERROR_CLASSES['42'] = old_error2
|
595
|
+
end
|
596
|
+
end
|
597
|
+
|
598
|
+
it "raises a proper exception for a nonexistant schema" do
|
599
|
+
expect {
|
600
|
+
@conn.exec( "DROP SCHEMA nonexistant_schema" )
|
601
|
+
}.to raise_error( PG::InvalidSchemaName, /schema "nonexistant_schema" does not exist/ )
|
602
|
+
end
|
603
|
+
|
604
|
+
it "the raised result is nil in case of a connection error" do
|
605
|
+
c = PG::Connection.connect_start( '127.0.0.1', 54320, "", "", "me", "xxxx", "somedb" )
|
606
|
+
expect {
|
607
|
+
c.exec "select 1"
|
608
|
+
}.to raise_error {|error|
|
609
|
+
expect( error ).to be_an_instance_of(PG::UnableToSend)
|
610
|
+
expect( error.result ).to eq( nil )
|
611
|
+
}
|
612
|
+
end
|
613
|
+
|
614
|
+
it "does not clear the result itself" do
|
615
|
+
r = @conn.exec "select 1"
|
616
|
+
expect( r.autoclear? ).to eq(false)
|
617
|
+
expect( r.cleared? ).to eq(false)
|
618
|
+
r.clear
|
619
|
+
expect( r.cleared? ).to eq(true)
|
620
|
+
end
|
621
|
+
|
622
|
+
it "can be inspected before and after clear" do
|
623
|
+
r = @conn.exec "select 1"
|
624
|
+
expect( r.inspect ).to match(/status=PGRES_TUPLES_OK/)
|
625
|
+
r.clear
|
626
|
+
expect( r.inspect ).to match(/cleared/)
|
627
|
+
end
|
628
|
+
|
629
|
+
it "should give account about memory usage" do
|
630
|
+
r = @conn.exec "select 1"
|
631
|
+
expect( ObjectSpace.memsize_of(r) ).to be > 1000
|
632
|
+
r.clear
|
633
|
+
expect( ObjectSpace.memsize_of(r) ).to be < 100
|
634
|
+
end
|
635
|
+
|
636
|
+
context 'result value conversions with TypeMapByColumn' do
|
637
|
+
let!(:textdec_int){ PG::TextDecoder::Integer.new name: 'INT4', oid: 23 }
|
638
|
+
let!(:textdec_float){ PG::TextDecoder::Float.new name: 'FLOAT4', oid: 700 }
|
639
|
+
|
640
|
+
it "should allow reading, assigning and diabling type conversions" do
|
641
|
+
res = @conn.exec( "SELECT 123" )
|
642
|
+
expect( res.type_map ).to be_kind_of(PG::TypeMapAllStrings)
|
643
|
+
res.type_map = PG::TypeMapByColumn.new [textdec_int]
|
644
|
+
expect( res.type_map ).to be_an_instance_of(PG::TypeMapByColumn)
|
645
|
+
expect( res.type_map.coders ).to eq( [textdec_int] )
|
646
|
+
res.type_map = PG::TypeMapByColumn.new [textdec_float]
|
647
|
+
expect( res.type_map.coders ).to eq( [textdec_float] )
|
648
|
+
res.type_map = PG::TypeMapAllStrings.new
|
649
|
+
expect( res.type_map ).to be_kind_of(PG::TypeMapAllStrings)
|
650
|
+
end
|
651
|
+
|
652
|
+
it "should be applied to all value retrieving methods" do
|
653
|
+
res = @conn.exec( "SELECT 123 as f" )
|
654
|
+
res.type_map = PG::TypeMapByColumn.new [textdec_int]
|
655
|
+
expect( res.values ).to eq( [[123]] )
|
656
|
+
expect( res.getvalue(0,0) ).to eq( 123 )
|
657
|
+
expect( res[0] ).to eq( {'f' => 123 } )
|
658
|
+
expect( res.enum_for(:each_row).to_a ).to eq( [[123]] )
|
659
|
+
expect( res.enum_for(:each).to_a ).to eq( [{'f' => 123}] )
|
660
|
+
expect( res.column_values(0) ).to eq( [123] )
|
661
|
+
expect( res.field_values('f') ).to eq( [123] )
|
662
|
+
expect( res.field_values(:f) ).to eq( [123] )
|
663
|
+
expect( res.tuple_values(0) ).to eq( [123] )
|
664
|
+
end
|
665
|
+
|
666
|
+
it "should be usable for several querys" do
|
667
|
+
colmap = PG::TypeMapByColumn.new [textdec_int]
|
668
|
+
res = @conn.exec( "SELECT 123" )
|
669
|
+
res.type_map = colmap
|
670
|
+
expect( res.values ).to eq( [[123]] )
|
671
|
+
res = @conn.exec( "SELECT 456" )
|
672
|
+
res.type_map = colmap
|
673
|
+
expect( res.values ).to eq( [[456]] )
|
674
|
+
end
|
675
|
+
|
676
|
+
it "shouldn't allow invalid type maps" do
|
677
|
+
res = @conn.exec( "SELECT 1" )
|
678
|
+
expect{ res.type_map = 1 }.to raise_error(TypeError)
|
679
|
+
end
|
277
680
|
end
|
278
681
|
end
|