pg 0.18.4 → 1.0.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (58) hide show
  1. checksums.yaml +5 -5
  2. checksums.yaml.gz.sig +0 -0
  3. data/BSDL +2 -2
  4. data/ChangeLog +689 -5
  5. data/History.rdoc +84 -0
  6. data/Manifest.txt +0 -18
  7. data/README.rdoc +13 -10
  8. data/Rakefile +16 -19
  9. data/Rakefile.cross +21 -24
  10. data/ext/errorcodes.def +33 -0
  11. data/ext/errorcodes.txt +15 -1
  12. data/ext/extconf.rb +21 -33
  13. data/ext/gvl_wrappers.c +4 -0
  14. data/ext/gvl_wrappers.h +27 -39
  15. data/ext/pg.c +18 -50
  16. data/ext/pg.h +13 -80
  17. data/ext/pg_binary_encoder.c +8 -8
  18. data/ext/pg_coder.c +31 -10
  19. data/ext/pg_connection.c +340 -225
  20. data/ext/pg_copy_coder.c +34 -4
  21. data/ext/pg_result.c +24 -22
  22. data/ext/pg_text_encoder.c +62 -42
  23. data/ext/pg_type_map.c +14 -7
  24. data/lib/pg/basic_type_mapping.rb +35 -8
  25. data/lib/pg/connection.rb +53 -12
  26. data/lib/pg/result.rb +10 -5
  27. data/lib/pg/text_decoder.rb +7 -0
  28. data/lib/pg/text_encoder.rb +8 -0
  29. data/lib/pg.rb +18 -10
  30. data/spec/helpers.rb +8 -15
  31. data/spec/pg/basic_type_mapping_spec.rb +54 -0
  32. data/spec/pg/connection_spec.rb +384 -209
  33. data/spec/pg/result_spec.rb +14 -7
  34. data/spec/pg/type_map_by_class_spec.rb +2 -2
  35. data/spec/pg/type_map_by_mri_type_spec.rb +1 -1
  36. data/spec/pg/type_spec.rb +83 -3
  37. data/spec/pg_spec.rb +1 -1
  38. data.tar.gz.sig +0 -0
  39. metadata +55 -64
  40. metadata.gz.sig +0 -0
  41. data/sample/array_insert.rb +0 -20
  42. data/sample/async_api.rb +0 -106
  43. data/sample/async_copyto.rb +0 -39
  44. data/sample/async_mixed.rb +0 -56
  45. data/sample/check_conn.rb +0 -21
  46. data/sample/copyfrom.rb +0 -81
  47. data/sample/copyto.rb +0 -19
  48. data/sample/cursor.rb +0 -21
  49. data/sample/disk_usage_report.rb +0 -186
  50. data/sample/issue-119.rb +0 -94
  51. data/sample/losample.rb +0 -69
  52. data/sample/minimal-testcase.rb +0 -17
  53. data/sample/notify_wait.rb +0 -72
  54. data/sample/pg_statistics.rb +0 -294
  55. data/sample/replication_monitor.rb +0 -231
  56. data/sample/test_binary_values.rb +0 -33
  57. data/sample/wal_shipper.rb +0 -434
  58. data/sample/warehouse_partitions.rb +0 -320
@@ -188,7 +188,8 @@ module PG::BasicTypeRegistry
188
188
  # register_type 'polygon', OID::Text.new
189
189
  # register_type 'circle', OID::Text.new
190
190
  # register_type 'hstore', OID::Hstore.new
191
- # register_type 'json', OID::Json.new
191
+ register_type 0, 'json', PG::TextEncoder::JSON, PG::TextDecoder::JSON
192
+ alias_type 0, 'jsonb', 'json'
192
193
  # register_type 'citext', OID::Text.new
193
194
  # register_type 'ltree', OID::Text.new
194
195
  #
@@ -226,8 +227,8 @@ end
226
227
  #
227
228
  # Example:
228
229
  # conn = PG::Connection.new
229
- # # Assign a default ruleset for type casts of input and output values.
230
- # conn.type_mapping = PG::BasicTypeMapping.new(conn)
230
+ # # Assign a default ruleset for type casts of output values.
231
+ # conn.type_map_for_results = PG::BasicTypeMapForResults.new(conn)
231
232
  # # Execute a query.
232
233
  # res = conn.exec_params( "SELECT $1::INT", ['5'] )
233
234
  # # Retrieve and cast the result value. Value format is 0 (text) and OID is 20. Therefore typecasting
@@ -236,8 +237,28 @@ end
236
237
  #
237
238
  # PG::TypeMapByOid#fit_to_result(result, false) can be used to generate
238
239
  # a result independent PG::TypeMapByColumn type map, which can subsequently be used
239
- # to cast #get_copy_data fields. See also PG::BasicTypeMapBasedOnResult .
240
+ # to cast #get_copy_data fields:
241
+ #
242
+ # For the following table:
243
+ # conn.exec( "CREATE TABLE copytable AS VALUES('a', 123, '{5,4,3}'::INT[])" )
244
+ #
245
+ # # Retrieve table OIDs per empty result set.
246
+ # res = conn.exec( "SELECT * FROM copytable LIMIT 0" )
247
+ # # Build a type map for common database to ruby type decoders.
248
+ # btm = PG::BasicTypeMapForResults.new(conn)
249
+ # # Build a PG::TypeMapByColumn with decoders suitable for copytable.
250
+ # tm = btm.build_column_map( res )
251
+ # row_decoder = PG::TextDecoder::CopyRow.new type_map: tm
252
+ #
253
+ # conn.copy_data( "COPY copytable TO STDOUT", row_decoder ) do |res|
254
+ # while row=conn.get_copy_data
255
+ # p row
256
+ # end
257
+ # end
258
+ # This prints the rows with type casted columns:
259
+ # ["a", 123, [5, 4, 3]]
240
260
  #
261
+ # See also PG::BasicTypeMapBasedOnResult for the encoder direction.
241
262
  class PG::BasicTypeMapForResults < PG::TypeMapByOid
242
263
  include PG::BasicTypeRegistry
243
264
 
@@ -290,12 +311,17 @@ end
290
311
  #
291
312
  # # Retrieve table OIDs per empty result set.
292
313
  # res = conn.exec( "SELECT * FROM copytable LIMIT 0" )
293
- # tm = basic_type_mapping.build_column_map( res )
314
+ # # Build a type map for common ruby to database type encoders.
315
+ # btm = PG::BasicTypeMapBasedOnResult.new(conn)
316
+ # # Build a PG::TypeMapByColumn with encoders suitable for copytable.
317
+ # tm = btm.build_column_map( res )
294
318
  # row_encoder = PG::TextEncoder::CopyRow.new type_map: tm
295
319
  #
296
320
  # conn.copy_data( "COPY copytable FROM STDIN", row_encoder ) do |res|
297
321
  # conn.put_copy_data ['a', 123, [5,4,3]]
298
322
  # end
323
+ # This inserts a single row into copytable with type casts from ruby to
324
+ # database types.
299
325
  class PG::BasicTypeMapBasedOnResult < PG::TypeMapByOid
300
326
  include PG::BasicTypeRegistry
301
327
 
@@ -314,15 +340,16 @@ end
314
340
  # OIDs of supported type casts are not hard-coded in the sources, but are retrieved from the
315
341
  # PostgreSQL's pg_type table in PG::BasicTypeMapForQueries.new .
316
342
  #
317
- # Query params are type casted based on the MRI internal type of the given value.
343
+ # Query params are type casted based on the class of the given value.
318
344
  #
319
345
  # Higher level libraries will most likely not make use of this class, but use their
320
- # own set of rules to choose suitable encoders and decoders.
346
+ # own derivation of PG::TypeMapByClass or another set of rules to choose suitable
347
+ # encoders and decoders for the values to be sent.
321
348
  #
322
349
  # Example:
323
350
  # conn = PG::Connection.new
324
351
  # # Assign a default ruleset for type casts of input and output values.
325
- # conn.type_mapping_for_queries = PG::BasicTypeMapForQueries.new(conn)
352
+ # conn.type_map_for_queries = PG::BasicTypeMapForQueries.new(conn)
326
353
  # # Execute a query. The Integer param value is typecasted internally by PG::BinaryEncoder::Int8.
327
354
  # # The format of the parameter is set to 1 (binary) and the OID of this parameter is set to 20 (int8).
328
355
  # res = conn.exec_params( "SELECT $1", [5] )
data/lib/pg/connection.rb CHANGED
@@ -47,7 +47,7 @@ class PG::Connection
47
47
 
48
48
  if args.length == 1
49
49
  case args.first
50
- when URI, URI.regexp
50
+ when URI, /\A#{URI.regexp}\z/
51
51
  uri = URI(args.first)
52
52
  options.merge!( Hash[URI.decode_www_form( uri.query )] ) if uri.query
53
53
  when /=/
@@ -85,7 +85,7 @@ class PG::Connection
85
85
 
86
86
 
87
87
  # call-seq:
88
- # conn.copy_data( sql ) {|sql_result| ... } -> PG::Result
88
+ # conn.copy_data( sql [, coder] ) {|sql_result| ... } -> PG::Result
89
89
  #
90
90
  # Execute a copy process for transfering data to or from the server.
91
91
  #
@@ -109,13 +109,26 @@ class PG::Connection
109
109
  # of blocking mode of operation, #copy_data is preferred to raw calls
110
110
  # of #put_copy_data, #get_copy_data and #put_copy_end.
111
111
  #
112
+ # _coder_ can be a PG::Coder derivation
113
+ # (typically PG::TextEncoder::CopyRow or PG::TextDecoder::CopyRow).
114
+ # This enables encoding of data fields given to #put_copy_data
115
+ # or decoding of fields received by #get_copy_data.
116
+ #
112
117
  # Example with CSV input format:
113
- # conn.exec "create table my_table (a text,b text,c text,d text,e text)"
118
+ # conn.exec "create table my_table (a text,b text,c text,d text)"
114
119
  # conn.copy_data "COPY my_table FROM STDIN CSV" do
115
- # conn.put_copy_data "some,csv,data,to,copy\n"
116
- # conn.put_copy_data "more,csv,data,to,copy\n"
120
+ # conn.put_copy_data "some,data,to,copy\n"
121
+ # conn.put_copy_data "more,data,to,copy\n"
122
+ # end
123
+ # This creates +my_table+ and inserts two CSV rows.
124
+ #
125
+ # The same with text format encoder PG::TextEncoder::CopyRow
126
+ # and Array input:
127
+ # enco = PG::TextEncoder::CopyRow.new
128
+ # conn.copy_data "COPY my_table FROM STDIN", enco do
129
+ # conn.put_copy_data ['some', 'data', 'to', 'copy']
130
+ # conn.put_copy_data ['more', 'data', 'to', 'copy']
117
131
  # end
118
- # This creates +my_table+ and inserts two rows.
119
132
  #
120
133
  # Example with CSV output format:
121
134
  # conn.copy_data "COPY my_table TO STDOUT CSV" do
@@ -124,8 +137,21 @@ class PG::Connection
124
137
  # end
125
138
  # end
126
139
  # This prints all rows of +my_table+ to stdout:
127
- # "some,csv,data,to,copy\n"
128
- # "more,csv,data,to,copy\n"
140
+ # "some,data,to,copy\n"
141
+ # "more,data,to,copy\n"
142
+ #
143
+ # The same with text format decoder PG::TextDecoder::CopyRow
144
+ # and Array output:
145
+ # deco = PG::TextDecoder::CopyRow.new
146
+ # conn.copy_data "COPY my_table TO STDOUT", deco do
147
+ # while row=conn.get_copy_data
148
+ # p row
149
+ # end
150
+ # end
151
+ # This receives all rows of +my_table+ as ruby array:
152
+ # ["some", "data", "to", "copy"]
153
+ # ["more", "data", "to", "copy"]
154
+
129
155
  def copy_data( sql, coder=nil )
130
156
  res = exec( sql )
131
157
 
@@ -165,7 +191,7 @@ class PG::Connection
165
191
  raise
166
192
  else
167
193
  res = get_last_result
168
- if res.result_status != PGRES_COMMAND_OK
194
+ if !res || res.result_status != PGRES_COMMAND_OK
169
195
  while get_copy_data
170
196
  end
171
197
  while get_result
@@ -224,8 +250,23 @@ class PG::Connection
224
250
  end
225
251
  end
226
252
 
227
- end # class PG::Connection
253
+ # Method 'ssl_attribute' was introduced in PostgreSQL 9.5.
254
+ if self.instance_methods.find{|m| m.to_sym == :ssl_attribute }
255
+ # call-seq:
256
+ # conn.ssl_attributes -> Hash<String,String>
257
+ #
258
+ # Returns SSL-related information about the connection as key/value pairs
259
+ #
260
+ # The available attributes varies depending on the SSL library being used,
261
+ # and the type of connection.
262
+ #
263
+ # See also #ssl_attribute
264
+ def ssl_attributes
265
+ ssl_attribute_names.each.with_object({}) do |n,h|
266
+ h[n] = ssl_attribute(n)
267
+ end
268
+ end
269
+ end
228
270
 
229
- # Backward-compatible alias
230
- PGconn = PG::Connection
271
+ end # class PG::Connection
231
272
 
data/lib/pg/result.rb CHANGED
@@ -12,15 +12,20 @@ class PG::Result
12
12
  # See PG::BasicTypeMapForResults
13
13
  def map_types!(type_map)
14
14
  self.type_map = type_map
15
- self
15
+ return self
16
16
  end
17
17
 
18
+
19
+ ### Return a String representation of the object suitable for debugging.
18
20
  def inspect
19
21
  str = self.to_s
20
- str[-1,0] = " status=#{res_status(result_status)} ntuples=#{ntuples} nfields=#{nfields} cmd_tuples=#{cmd_tuples}"
21
- str
22
+ str[-1,0] = if cleared?
23
+ " cleared"
24
+ else
25
+ " status=#{res_status(result_status)} ntuples=#{ntuples} nfields=#{nfields} cmd_tuples=#{cmd_tuples}"
26
+ end
27
+ return str
22
28
  end
29
+
23
30
  end # class PG::Result
24
31
 
25
- # Backward-compatible alias
26
- PGresult = PG::Result
@@ -1,6 +1,7 @@
1
1
  #!/usr/bin/env ruby
2
2
 
3
3
  require 'date'
4
+ require 'json'
4
5
 
5
6
  module PG
6
7
  module TextDecoder
@@ -39,6 +40,12 @@ module PG
39
40
  end
40
41
  end
41
42
  end
43
+
44
+ class JSON < SimpleDecoder
45
+ def decode(string, tuple=nil, field=nil)
46
+ ::JSON.parse(string, quirks_mode: true)
47
+ end
48
+ end
42
49
  end
43
50
  end # module PG
44
51
 
@@ -1,5 +1,7 @@
1
1
  #!/usr/bin/env ruby
2
2
 
3
+ require 'json'
4
+
3
5
  module PG
4
6
  module TextEncoder
5
7
  class Date < SimpleEncoder
@@ -22,6 +24,12 @@ module PG
22
24
  value.respond_to?(:strftime) ? value.strftime(STRFTIME_ISO_DATETIME_WITH_TIMEZONE) : value
23
25
  end
24
26
  end
27
+
28
+ class JSON < SimpleEncoder
29
+ def encode(value)
30
+ ::JSON.generate(value, quirks_mode: true)
31
+ end
32
+ end
25
33
  end
26
34
  end # module PG
27
35
 
data/lib/pg.rb CHANGED
@@ -8,11 +8,22 @@ rescue LoadError
8
8
  major_minor = RUBY_VERSION[ /^(\d+\.\d+)/ ] or
9
9
  raise "Oops, can't extract the major/minor version from #{RUBY_VERSION.dump}"
10
10
 
11
- # Set the PATH environment variable, so that libpq.dll can be found.
12
- old_path = ENV['PATH']
13
- ENV['PATH'] = "#{File.expand_path("../#{RUBY_PLATFORM}", __FILE__)};#{old_path}"
14
- require "#{major_minor}/pg_ext"
15
- ENV['PATH'] = old_path
11
+ add_dll_path = proc do |path, &block|
12
+ begin
13
+ require 'ruby_installer/runtime'
14
+ RubyInstaller::Runtime.add_dll_directory(path, &block)
15
+ rescue LoadError
16
+ old_path = ENV['PATH']
17
+ ENV['PATH'] = "#{path};#{old_path}"
18
+ block.call
19
+ ENV['PATH'] = old_path
20
+ end
21
+ end
22
+
23
+ # Temporary add this directory for DLL search, so that libpq.dll can be found.
24
+ add_dll_path.call(__dir__) do
25
+ require "#{major_minor}/pg_ext"
26
+ end
16
27
  else
17
28
  raise
18
29
  end
@@ -24,10 +35,10 @@ end
24
35
  module PG
25
36
 
26
37
  # Library version
27
- VERSION = '0.18.4'
38
+ VERSION = '1.0.0'
28
39
 
29
40
  # VCS revision
30
- REVISION = %q$Revision: da42b972b5ab $
41
+ REVISION = %q$Revision: fef434914848 $
31
42
 
32
43
  class NotAllCopyDataRetrieved < PG::Error
33
44
  end
@@ -59,6 +70,3 @@ module PG
59
70
  end # module PG
60
71
 
61
72
 
62
- # Backward-compatible aliase
63
- PGError = PG::Error
64
-
data/spec/helpers.rb CHANGED
@@ -5,7 +5,9 @@ require 'rspec'
5
5
  require 'shellwords'
6
6
  require 'pg'
7
7
 
8
- TEST_DIRECTORY = Pathname.getwd + "tmp_test_specs"
8
+ DEFAULT_TEST_DIR_STR = File.join(Dir.pwd, "tmp_test_specs")
9
+ TEST_DIR_STR = ENV['RUBY_PG_TEST_DIR'] || DEFAULT_TEST_DIR_STR
10
+ TEST_DIRECTORY = Pathname.new(TEST_DIR_STR)
9
11
 
10
12
  module PG::TestingHelpers
11
13
 
@@ -251,7 +253,7 @@ module PG::TestingHelpers
251
253
 
252
254
  def check_for_lingering_connections( conn )
253
255
  conn.exec( "SELECT * FROM pg_stat_activity" ) do |res|
254
- conns = res.find_all {|row| row['pid'].to_i != conn.backend_pid }
256
+ conns = res.find_all {|row| row['pid'].to_i != conn.backend_pid && ["client backend", nil].include?(row["backend_type"]) }
255
257
  unless conns.empty?
256
258
  puts "Lingering connections remain:"
257
259
  conns.each do |row|
@@ -339,17 +341,8 @@ RSpec.configure do |config|
339
341
  config.filter_run_excluding :socket_io unless
340
342
  PG::Connection.instance_methods.map( &:to_sym ).include?( :socket_io )
341
343
 
342
- config.filter_run_excluding :postgresql_90 unless
343
- PG::Connection.instance_methods.map( &:to_sym ).include?( :escape_literal )
344
-
345
- if !PG.respond_to?( :library_version )
346
- config.filter_run_excluding( :postgresql_91, :postgresql_92, :postgresql_93, :postgresql_94 )
347
- elsif PG.library_version < 90200
348
- config.filter_run_excluding( :postgresql_92, :postgresql_93, :postgresql_94 )
349
- elsif PG.library_version < 90300
350
- config.filter_run_excluding( :postgresql_93, :postgresql_94 )
351
- elsif PG.library_version < 90400
352
- config.filter_run_excluding( :postgresql_94 )
353
- end
344
+ config.filter_run_excluding( :postgresql_93 ) if PG.library_version < 90300
345
+ config.filter_run_excluding( :postgresql_94 ) if PG.library_version < 90400
346
+ config.filter_run_excluding( :postgresql_95 ) if PG.library_version < 90500
347
+ config.filter_run_excluding( :postgresql_10 ) if PG.library_version < 100000
354
348
  end
355
-
@@ -166,6 +166,27 @@ describe 'Basic type mapping' do
166
166
  end
167
167
  end
168
168
 
169
+ it "should do JSON conversions", :postgresql_94 do
170
+ [0].each do |format|
171
+ ['JSON', 'JSONB'].each do |type|
172
+ res = @conn.exec( "SELECT CAST('123' AS #{type}),
173
+ CAST('12.3' AS #{type}),
174
+ CAST('true' AS #{type}),
175
+ CAST('false' AS #{type}),
176
+ CAST('null' AS #{type}),
177
+ CAST('[1, \"a\", null]' AS #{type}),
178
+ CAST('{\"b\" : [2,3]}' AS #{type})", [], format )
179
+ expect( res.getvalue(0,0) ).to eq( 123 )
180
+ expect( res.getvalue(0,1) ).to be_within(0.1).of( 12.3 )
181
+ expect( res.getvalue(0,2) ).to eq( true )
182
+ expect( res.getvalue(0,3) ).to eq( false )
183
+ expect( res.getvalue(0,4) ).to eq( nil )
184
+ expect( res.getvalue(0,5) ).to eq( [1, "a", nil] )
185
+ expect( res.getvalue(0,6) ).to eq( {"b" => [2, 3]} )
186
+ end
187
+ end
188
+ end
189
+
169
190
  it "should do array type conversions" do
170
191
  [0].each do |format|
171
192
  res = @conn.exec( "SELECT CAST('{1,2,3}' AS INT2[]), CAST('{{1,2},{3,4}}' AS INT2[][]),
@@ -228,6 +249,39 @@ describe 'Basic type mapping' do
228
249
  res = @conn.exec( "SELECT * FROM copytable" )
229
250
  expect( res.values ).to eq( [['a', '123', '{5,4,3}'], ['b', '234', '{2,3}']] )
230
251
  end
252
+
253
+ it "can do JSON conversions", :postgresql_94 do
254
+ ['JSON', 'JSONB'].each do |type|
255
+ sql = "SELECT CAST('123' AS #{type}),
256
+ CAST('12.3' AS #{type}),
257
+ CAST('true' AS #{type}),
258
+ CAST('false' AS #{type}),
259
+ CAST('null' AS #{type}),
260
+ CAST('[1, \"a\", null]' AS #{type}),
261
+ CAST('{\"b\" : [2,3]}' AS #{type})"
262
+
263
+ tm = basic_type_mapping.build_column_map( @conn.exec( sql ) )
264
+ expect( tm.coders.map(&:name) ).to eq( [type.downcase] * 7 )
265
+
266
+ res = @conn.exec_params( "SELECT $1, $2, $3, $4, $5, $6, $7",
267
+ [ 123,
268
+ 12.3,
269
+ true,
270
+ false,
271
+ nil,
272
+ [1, "a", nil],
273
+ {"b" => [2, 3]},
274
+ ], 0, tm )
275
+
276
+ expect( res.getvalue(0,0) ).to eq( "123" )
277
+ expect( res.getvalue(0,1) ).to eq( "12.3" )
278
+ expect( res.getvalue(0,2) ).to eq( "true" )
279
+ expect( res.getvalue(0,3) ).to eq( "false" )
280
+ expect( res.getvalue(0,4) ).to eq( nil )
281
+ expect( res.getvalue(0,5).gsub(" ","") ).to eq( "[1,\"a\",null]" )
282
+ expect( res.getvalue(0,6).gsub(" ","") ).to eq( "{\"b\":[2,3]}" )
283
+ end
284
+ end
231
285
  end
232
286
 
233
287
  context "with usage of result oids for copy encoder selection" do