pg 0.18.0 → 1.0.0

Sign up to get free protection for your applications and to get access to all the features.
Files changed (66) hide show
  1. checksums.yaml +5 -5
  2. checksums.yaml.gz.sig +0 -0
  3. data/BSDL +2 -2
  4. data/ChangeLog +1221 -4
  5. data/History.rdoc +130 -0
  6. data/Manifest.txt +0 -18
  7. data/README-Windows.rdoc +15 -26
  8. data/README.rdoc +16 -10
  9. data/Rakefile +32 -23
  10. data/Rakefile.cross +56 -38
  11. data/ext/errorcodes.def +33 -0
  12. data/ext/errorcodes.txt +15 -1
  13. data/ext/extconf.rb +27 -35
  14. data/ext/gvl_wrappers.c +4 -0
  15. data/ext/gvl_wrappers.h +27 -39
  16. data/ext/pg.c +19 -51
  17. data/ext/pg.h +22 -79
  18. data/ext/pg_binary_decoder.c +3 -1
  19. data/ext/pg_binary_encoder.c +14 -12
  20. data/ext/pg_coder.c +31 -10
  21. data/ext/pg_connection.c +350 -263
  22. data/ext/pg_copy_coder.c +34 -4
  23. data/ext/pg_result.c +27 -25
  24. data/ext/pg_text_decoder.c +9 -10
  25. data/ext/pg_text_encoder.c +93 -73
  26. data/ext/pg_type_map.c +20 -13
  27. data/ext/pg_type_map_by_column.c +7 -7
  28. data/ext/pg_type_map_by_mri_type.c +2 -2
  29. data/ext/pg_type_map_in_ruby.c +4 -7
  30. data/ext/util.c +3 -3
  31. data/ext/util.h +1 -1
  32. data/lib/pg/basic_type_mapping.rb +69 -42
  33. data/lib/pg/connection.rb +89 -38
  34. data/lib/pg/result.rb +10 -5
  35. data/lib/pg/text_decoder.rb +12 -3
  36. data/lib/pg/text_encoder.rb +8 -0
  37. data/lib/pg.rb +18 -10
  38. data/spec/helpers.rb +9 -16
  39. data/spec/pg/basic_type_mapping_spec.rb +58 -4
  40. data/spec/pg/connection_spec.rb +477 -217
  41. data/spec/pg/result_spec.rb +14 -7
  42. data/spec/pg/type_map_by_class_spec.rb +2 -2
  43. data/spec/pg/type_map_by_mri_type_spec.rb +1 -1
  44. data/spec/pg/type_spec.rb +145 -33
  45. data/spec/pg_spec.rb +1 -1
  46. data.tar.gz.sig +0 -0
  47. metadata +67 -66
  48. metadata.gz.sig +0 -0
  49. data/sample/array_insert.rb +0 -20
  50. data/sample/async_api.rb +0 -106
  51. data/sample/async_copyto.rb +0 -39
  52. data/sample/async_mixed.rb +0 -56
  53. data/sample/check_conn.rb +0 -21
  54. data/sample/copyfrom.rb +0 -81
  55. data/sample/copyto.rb +0 -19
  56. data/sample/cursor.rb +0 -21
  57. data/sample/disk_usage_report.rb +0 -186
  58. data/sample/issue-119.rb +0 -94
  59. data/sample/losample.rb +0 -69
  60. data/sample/minimal-testcase.rb +0 -17
  61. data/sample/notify_wait.rb +0 -72
  62. data/sample/pg_statistics.rb +0 -294
  63. data/sample/replication_monitor.rb +0 -231
  64. data/sample/test_binary_values.rb +0 -33
  65. data/sample/wal_shipper.rb +0 -434
  66. data/sample/warehouse_partitions.rb +0 -320
@@ -25,9 +25,9 @@ module PG::BasicTypeRegistry
25
25
 
26
26
  # populate the enum types
27
27
  _enums, leaves = leaves.partition { |row| row['typinput'] == 'enum_in' }
28
- # enums.each do |row|
29
- # coder_map[row['oid'].to_i] = OID::Enum.new
30
- # end
28
+ # enums.each do |row|
29
+ # coder_map[row['oid'].to_i] = OID::Enum.new
30
+ # end
31
31
 
32
32
  # populate the base types
33
33
  leaves.find_all { |row| coders_by_name.key?(row['typname']) }.each do |row|
@@ -41,9 +41,9 @@ module PG::BasicTypeRegistry
41
41
  _records_by_oid = result.group_by { |row| row['oid'] }
42
42
 
43
43
  # populate composite types
44
- # nodes.each do |row|
45
- # add_oid row, records_by_oid, coder_map
46
- # end
44
+ # nodes.each do |row|
45
+ # add_oid row, records_by_oid, coder_map
46
+ # end
47
47
 
48
48
  if arraycoder
49
49
  # populate array types
@@ -62,11 +62,11 @@ module PG::BasicTypeRegistry
62
62
  end
63
63
 
64
64
  # populate range types
65
- # ranges.find_all { |row| coder_map.key? row['rngsubtype'].to_i }.each do |row|
66
- # subcoder = coder_map[row['rngsubtype'].to_i]
67
- # range = OID::Range.new subcoder
68
- # coder_map[row['oid'].to_i] = range
69
- # end
65
+ # ranges.find_all { |row| coder_map.key? row['rngsubtype'].to_i }.each do |row|
66
+ # subcoder = coder_map[row['rngsubtype'].to_i]
67
+ # range = OID::Range.new subcoder
68
+ # coder_map[row['oid'].to_i] = range
69
+ # end
70
70
 
71
71
  @coders = coder_map.values
72
72
  @coders_by_name = @coders.inject({}){|h, t| h[t.name] = t; h }
@@ -154,46 +154,47 @@ module PG::BasicTypeRegistry
154
154
  alias_type 0, 'int8', 'int2'
155
155
  alias_type 0, 'oid', 'int2'
156
156
 
157
- # register_type 0, 'numeric', OID::Decimal.new
157
+ # register_type 0, 'numeric', OID::Decimal.new
158
158
  register_type 0, 'text', PG::TextEncoder::String, PG::TextDecoder::String
159
159
  alias_type 0, 'varchar', 'text'
160
160
  alias_type 0, 'char', 'text'
161
161
  alias_type 0, 'bpchar', 'text'
162
162
  alias_type 0, 'xml', 'text'
163
163
 
164
- # # FIXME: why are we keeping these types as strings?
165
- # alias_type 'tsvector', 'text'
166
- # alias_type 'interval', 'text'
167
- # alias_type 'macaddr', 'text'
168
- # alias_type 'uuid', 'text'
169
- #
170
- # register_type 'money', OID::Money.new
164
+ # FIXME: why are we keeping these types as strings?
165
+ # alias_type 'tsvector', 'text'
166
+ # alias_type 'interval', 'text'
167
+ # alias_type 'macaddr', 'text'
168
+ # alias_type 'uuid', 'text'
169
+ #
170
+ # register_type 'money', OID::Money.new
171
171
  # There is no PG::TextEncoder::Bytea, because it's simple and more efficient to send bytea-data
172
172
  # in binary format, either with PG::BinaryEncoder::Bytea or in Hash param format.
173
173
  register_type 0, 'bytea', nil, PG::TextDecoder::Bytea
174
174
  register_type 0, 'bool', PG::TextEncoder::Boolean, PG::TextDecoder::Boolean
175
- # register_type 'bit', OID::Bit.new
176
- # register_type 'varbit', OID::Bit.new
177
- #
175
+ # register_type 'bit', OID::Bit.new
176
+ # register_type 'varbit', OID::Bit.new
177
+
178
178
  register_type 0, 'float4', PG::TextEncoder::Float, PG::TextDecoder::Float
179
179
  alias_type 0, 'float8', 'float4'
180
180
 
181
181
  register_type 0, 'timestamp', PG::TextEncoder::TimestampWithoutTimeZone, PG::TextDecoder::TimestampWithoutTimeZone
182
182
  register_type 0, 'timestamptz', PG::TextEncoder::TimestampWithTimeZone, PG::TextDecoder::TimestampWithTimeZone
183
183
  register_type 0, 'date', PG::TextEncoder::Date, PG::TextDecoder::Date
184
- # register_type 'time', OID::Time.new
185
- #
186
- # register_type 'path', OID::Text.new
187
- # register_type 'point', OID::Point.new
188
- # register_type 'polygon', OID::Text.new
189
- # register_type 'circle', OID::Text.new
190
- # register_type 'hstore', OID::Hstore.new
191
- # register_type 'json', OID::Json.new
192
- # register_type 'citext', OID::Text.new
193
- # register_type 'ltree', OID::Text.new
194
- #
195
- # register_type 'cidr', OID::Cidr.new
196
- # alias_type 'inet', 'cidr'
184
+ # register_type 'time', OID::Time.new
185
+ #
186
+ # register_type 'path', OID::Text.new
187
+ # register_type 'point', OID::Point.new
188
+ # register_type 'polygon', OID::Text.new
189
+ # register_type 'circle', OID::Text.new
190
+ # register_type 'hstore', OID::Hstore.new
191
+ register_type 0, 'json', PG::TextEncoder::JSON, PG::TextDecoder::JSON
192
+ alias_type 0, 'jsonb', 'json'
193
+ # register_type 'citext', OID::Text.new
194
+ # register_type 'ltree', OID::Text.new
195
+ #
196
+ # register_type 'cidr', OID::Cidr.new
197
+ # alias_type 'inet', 'cidr'
197
198
 
198
199
 
199
200
 
@@ -226,8 +227,8 @@ end
226
227
  #
227
228
  # Example:
228
229
  # conn = PG::Connection.new
229
- # # Assign a default ruleset for type casts of input and output values.
230
- # conn.type_mapping = PG::BasicTypeMapping.new(conn)
230
+ # # Assign a default ruleset for type casts of output values.
231
+ # conn.type_map_for_results = PG::BasicTypeMapForResults.new(conn)
231
232
  # # Execute a query.
232
233
  # res = conn.exec_params( "SELECT $1::INT", ['5'] )
233
234
  # # Retrieve and cast the result value. Value format is 0 (text) and OID is 20. Therefore typecasting
@@ -236,8 +237,28 @@ end
236
237
  #
237
238
  # PG::TypeMapByOid#fit_to_result(result, false) can be used to generate
238
239
  # a result independent PG::TypeMapByColumn type map, which can subsequently be used
239
- # to cast #get_copy_data fields. See also PG::BasicTypeMapBasedOnResult .
240
+ # to cast #get_copy_data fields:
241
+ #
242
+ # For the following table:
243
+ # conn.exec( "CREATE TABLE copytable AS VALUES('a', 123, '{5,4,3}'::INT[])" )
240
244
  #
245
+ # # Retrieve table OIDs per empty result set.
246
+ # res = conn.exec( "SELECT * FROM copytable LIMIT 0" )
247
+ # # Build a type map for common database to ruby type decoders.
248
+ # btm = PG::BasicTypeMapForResults.new(conn)
249
+ # # Build a PG::TypeMapByColumn with decoders suitable for copytable.
250
+ # tm = btm.build_column_map( res )
251
+ # row_decoder = PG::TextDecoder::CopyRow.new type_map: tm
252
+ #
253
+ # conn.copy_data( "COPY copytable TO STDOUT", row_decoder ) do |res|
254
+ # while row=conn.get_copy_data
255
+ # p row
256
+ # end
257
+ # end
258
+ # This prints the rows with type casted columns:
259
+ # ["a", 123, [5, 4, 3]]
260
+ #
261
+ # See also PG::BasicTypeMapBasedOnResult for the encoder direction.
241
262
  class PG::BasicTypeMapForResults < PG::TypeMapByOid
242
263
  include PG::BasicTypeRegistry
243
264
 
@@ -290,12 +311,17 @@ end
290
311
  #
291
312
  # # Retrieve table OIDs per empty result set.
292
313
  # res = conn.exec( "SELECT * FROM copytable LIMIT 0" )
293
- # tm = basic_type_mapping.build_column_map( res )
314
+ # # Build a type map for common ruby to database type encoders.
315
+ # btm = PG::BasicTypeMapBasedOnResult.new(conn)
316
+ # # Build a PG::TypeMapByColumn with encoders suitable for copytable.
317
+ # tm = btm.build_column_map( res )
294
318
  # row_encoder = PG::TextEncoder::CopyRow.new type_map: tm
295
319
  #
296
320
  # conn.copy_data( "COPY copytable FROM STDIN", row_encoder ) do |res|
297
321
  # conn.put_copy_data ['a', 123, [5,4,3]]
298
322
  # end
323
+ # This inserts a single row into copytable with type casts from ruby to
324
+ # database types.
299
325
  class PG::BasicTypeMapBasedOnResult < PG::TypeMapByOid
300
326
  include PG::BasicTypeRegistry
301
327
 
@@ -314,15 +340,16 @@ end
314
340
  # OIDs of supported type casts are not hard-coded in the sources, but are retrieved from the
315
341
  # PostgreSQL's pg_type table in PG::BasicTypeMapForQueries.new .
316
342
  #
317
- # Query params are type casted based on the MRI internal type of the given value.
343
+ # Query params are type casted based on the class of the given value.
318
344
  #
319
345
  # Higher level libraries will most likely not make use of this class, but use their
320
- # own set of rules to choose suitable encoders and decoders.
346
+ # own derivation of PG::TypeMapByClass or another set of rules to choose suitable
347
+ # encoders and decoders for the values to be sent.
321
348
  #
322
349
  # Example:
323
350
  # conn = PG::Connection.new
324
351
  # # Assign a default ruleset for type casts of input and output values.
325
- # conn.type_mapping_for_queries = PG::BasicTypeMapForQueries.new(conn)
352
+ # conn.type_map_for_queries = PG::BasicTypeMapForQueries.new(conn)
326
353
  # # Execute a query. The Integer param value is typecasted internally by PG::BinaryEncoder::Int8.
327
354
  # # The format of the parameter is set to 1 (binary) and the OID of this parameter is set to 20 (int8).
328
355
  # res = conn.exec_params( "SELECT $1", [5] )
data/lib/pg/connection.rb CHANGED
@@ -1,6 +1,7 @@
1
1
  #!/usr/bin/env ruby
2
2
 
3
3
  require 'pg' unless defined?( PG )
4
+ require 'uri'
4
5
 
5
6
  # The PostgreSQL connection class. The interface for this class is based on
6
7
  # {libpq}[http://www.postgresql.org/docs/9.2/interactive/libpq.html], the C
@@ -34,48 +35,57 @@ class PG::Connection
34
35
  def self::parse_connect_args( *args )
35
36
  return '' if args.empty?
36
37
 
37
- # This will be swapped soon for code that makes options like those required for
38
- # PQconnectdbParams()/PQconnectStartParams(). For now, stick to an options string for
39
- # PQconnectdb()/PQconnectStart().
38
+ hash_arg = args.last.is_a?( Hash ) ? args.pop : {}
39
+ option_string = ''
40
+ options = {}
40
41
 
41
42
  # Parameter 'fallback_application_name' was introduced in PostgreSQL 9.0
42
43
  # together with PQescapeLiteral().
43
- if PG::Connection.instance_methods.find{|m| m.to_sym == :escape_literal }
44
- appname = $0.sub(/^(.{30}).{4,}(.{30})$/){ $1+"..."+$2 }
45
- appname = PG::Connection.quote_connstr( appname )
46
- connopts = ["fallback_application_name=#{appname}"]
47
- else
48
- connopts = []
44
+ if PG::Connection.instance_methods.find {|m| m.to_sym == :escape_literal }
45
+ options[:fallback_application_name] = $0.sub( /^(.{30}).{4,}(.{30})$/ ){ $1+"..."+$2 }
49
46
  end
50
47
 
51
- # Handle an options hash first
52
- if args.last.is_a?( Hash )
53
- opthash = args.pop
54
- opthash.each do |key, val|
55
- connopts.push( "%s=%s" % [key, PG::Connection.quote_connstr(val)] )
48
+ if args.length == 1
49
+ case args.first
50
+ when URI, /\A#{URI.regexp}\z/
51
+ uri = URI(args.first)
52
+ options.merge!( Hash[URI.decode_www_form( uri.query )] ) if uri.query
53
+ when /=/
54
+ # Option string style
55
+ option_string = args.first.to_s
56
+ else
57
+ # Positional parameters
58
+ options[CONNECT_ARGUMENT_ORDER.first.to_sym] = args.first
56
59
  end
57
- end
58
-
59
- # Option string style
60
- if args.length == 1 && args.first.to_s.index( '=' )
61
- connopts.unshift( args.first )
62
-
63
- # Append positional parameters
64
60
  else
65
- args.each_with_index do |val, i|
66
- next unless val # Skip nil placeholders
61
+ max = CONNECT_ARGUMENT_ORDER.length
62
+ raise ArgumentError,
63
+ "Extra positional parameter %d: %p" % [ max + 1, args[max] ] if args.length > max
67
64
 
68
- key = CONNECT_ARGUMENT_ORDER[ i ] or
69
- raise ArgumentError, "Extra positional parameter %d: %p" % [ i+1, val ]
70
- connopts.push( "%s=%s" % [key, PG::Connection.quote_connstr(val.to_s)] )
65
+ CONNECT_ARGUMENT_ORDER.zip( args ) do |(k,v)|
66
+ options[ k.to_sym ] = v if v
71
67
  end
72
68
  end
73
69
 
74
- return connopts.join(' ')
70
+ options.merge!( hash_arg )
71
+
72
+ if uri
73
+ uri.host = nil if options[:host]
74
+ uri.port = nil if options[:port]
75
+ uri.user = nil if options[:user]
76
+ uri.password = nil if options[:password]
77
+ uri.path = '' if options[:dbname]
78
+ uri.query = URI.encode_www_form( options )
79
+ return uri.to_s.sub( /^#{uri.scheme}:(?!\/\/)/, "#{uri.scheme}://" )
80
+ else
81
+ option_string += ' ' unless option_string.empty? && options.empty?
82
+ return option_string + options.map { |k,v| "#{k}=#{quote_connstr(v)}" }.join( ' ' )
83
+ end
75
84
  end
76
85
 
86
+
77
87
  # call-seq:
78
- # conn.copy_data( sql ) {|sql_result| ... } -> PG::Result
88
+ # conn.copy_data( sql [, coder] ) {|sql_result| ... } -> PG::Result
79
89
  #
80
90
  # Execute a copy process for transfering data to or from the server.
81
91
  #
@@ -99,13 +109,26 @@ class PG::Connection
99
109
  # of blocking mode of operation, #copy_data is preferred to raw calls
100
110
  # of #put_copy_data, #get_copy_data and #put_copy_end.
101
111
  #
112
+ # _coder_ can be a PG::Coder derivation
113
+ # (typically PG::TextEncoder::CopyRow or PG::TextDecoder::CopyRow).
114
+ # This enables encoding of data fields given to #put_copy_data
115
+ # or decoding of fields received by #get_copy_data.
116
+ #
102
117
  # Example with CSV input format:
103
- # conn.exec "create table my_table (a text,b text,c text,d text,e text)"
118
+ # conn.exec "create table my_table (a text,b text,c text,d text)"
104
119
  # conn.copy_data "COPY my_table FROM STDIN CSV" do
105
- # conn.put_copy_data "some,csv,data,to,copy\n"
106
- # conn.put_copy_data "more,csv,data,to,copy\n"
120
+ # conn.put_copy_data "some,data,to,copy\n"
121
+ # conn.put_copy_data "more,data,to,copy\n"
122
+ # end
123
+ # This creates +my_table+ and inserts two CSV rows.
124
+ #
125
+ # The same with text format encoder PG::TextEncoder::CopyRow
126
+ # and Array input:
127
+ # enco = PG::TextEncoder::CopyRow.new
128
+ # conn.copy_data "COPY my_table FROM STDIN", enco do
129
+ # conn.put_copy_data ['some', 'data', 'to', 'copy']
130
+ # conn.put_copy_data ['more', 'data', 'to', 'copy']
107
131
  # end
108
- # This creates +my_table+ and inserts two rows.
109
132
  #
110
133
  # Example with CSV output format:
111
134
  # conn.copy_data "COPY my_table TO STDOUT CSV" do
@@ -114,8 +137,21 @@ class PG::Connection
114
137
  # end
115
138
  # end
116
139
  # This prints all rows of +my_table+ to stdout:
117
- # "some,csv,data,to,copy\n"
118
- # "more,csv,data,to,copy\n"
140
+ # "some,data,to,copy\n"
141
+ # "more,data,to,copy\n"
142
+ #
143
+ # The same with text format decoder PG::TextDecoder::CopyRow
144
+ # and Array output:
145
+ # deco = PG::TextDecoder::CopyRow.new
146
+ # conn.copy_data "COPY my_table TO STDOUT", deco do
147
+ # while row=conn.get_copy_data
148
+ # p row
149
+ # end
150
+ # end
151
+ # This receives all rows of +my_table+ as ruby array:
152
+ # ["some", "data", "to", "copy"]
153
+ # ["more", "data", "to", "copy"]
154
+
119
155
  def copy_data( sql, coder=nil )
120
156
  res = exec( sql )
121
157
 
@@ -155,7 +191,7 @@ class PG::Connection
155
191
  raise
156
192
  else
157
193
  res = get_last_result
158
- if res.result_status != PGRES_COMMAND_OK
194
+ if !res || res.result_status != PGRES_COMMAND_OK
159
195
  while get_copy_data
160
196
  end
161
197
  while get_result
@@ -214,8 +250,23 @@ class PG::Connection
214
250
  end
215
251
  end
216
252
 
217
- end # class PG::Connection
253
+ # Method 'ssl_attribute' was introduced in PostgreSQL 9.5.
254
+ if self.instance_methods.find{|m| m.to_sym == :ssl_attribute }
255
+ # call-seq:
256
+ # conn.ssl_attributes -> Hash<String,String>
257
+ #
258
+ # Returns SSL-related information about the connection as key/value pairs
259
+ #
260
+ # The available attributes varies depending on the SSL library being used,
261
+ # and the type of connection.
262
+ #
263
+ # See also #ssl_attribute
264
+ def ssl_attributes
265
+ ssl_attribute_names.each.with_object({}) do |n,h|
266
+ h[n] = ssl_attribute(n)
267
+ end
268
+ end
269
+ end
218
270
 
219
- # Backward-compatible alias
220
- PGconn = PG::Connection
271
+ end # class PG::Connection
221
272
 
data/lib/pg/result.rb CHANGED
@@ -12,15 +12,20 @@ class PG::Result
12
12
  # See PG::BasicTypeMapForResults
13
13
  def map_types!(type_map)
14
14
  self.type_map = type_map
15
- self
15
+ return self
16
16
  end
17
17
 
18
+
19
+ ### Return a String representation of the object suitable for debugging.
18
20
  def inspect
19
21
  str = self.to_s
20
- str[-1,0] = " status=#{res_status(result_status)} ntuples=#{ntuples} nfields=#{nfields} cmd_tuples=#{cmd_tuples}"
21
- str
22
+ str[-1,0] = if cleared?
23
+ " cleared"
24
+ else
25
+ " status=#{res_status(result_status)} ntuples=#{ntuples} nfields=#{nfields} cmd_tuples=#{cmd_tuples}"
26
+ end
27
+ return str
22
28
  end
29
+
23
30
  end # class PG::Result
24
31
 
25
- # Backward-compatible alias
26
- PGresult = PG::Result
@@ -1,5 +1,8 @@
1
1
  #!/usr/bin/env ruby
2
2
 
3
+ require 'date'
4
+ require 'json'
5
+
3
6
  module PG
4
7
  module TextDecoder
5
8
  class Date < SimpleDecoder
@@ -7,7 +10,7 @@ module PG
7
10
 
8
11
  def decode(string, tuple=nil, field=nil)
9
12
  if string =~ ISO_DATE
10
- Time.new $1.to_i, $2.to_i, $3.to_i
13
+ ::Date.new $1.to_i, $2.to_i, $3.to_i
11
14
  else
12
15
  string
13
16
  end
@@ -27,16 +30,22 @@ module PG
27
30
  end
28
31
 
29
32
  class TimestampWithTimeZone < SimpleDecoder
30
- ISO_DATETIME_WITH_TIMEZONE = /\A(\d{4})-(\d\d)-(\d\d) (\d\d):(\d\d):(\d\d)(\.\d+)?([-\+]\d\d)\z/
33
+ ISO_DATETIME_WITH_TIMEZONE = /\A(\d{4})-(\d\d)-(\d\d) (\d\d):(\d\d):(\d\d)(\.\d+)?([-\+]\d\d):?(\d\d)?:?(\d\d)?\z/
31
34
 
32
35
  def decode(string, tuple=nil, field=nil)
33
36
  if string =~ ISO_DATETIME_WITH_TIMEZONE
34
- Time.new $1.to_i, $2.to_i, $3.to_i, $4.to_i, $5.to_i, "#{$6}#{$7}".to_r, "#{$8}:00"
37
+ Time.new $1.to_i, $2.to_i, $3.to_i, $4.to_i, $5.to_i, "#{$6}#{$7}".to_r, "#{$8}:#{$9 || '00'}:#{$10 || '00'}"
35
38
  else
36
39
  string
37
40
  end
38
41
  end
39
42
  end
43
+
44
+ class JSON < SimpleDecoder
45
+ def decode(string, tuple=nil, field=nil)
46
+ ::JSON.parse(string, quirks_mode: true)
47
+ end
48
+ end
40
49
  end
41
50
  end # module PG
42
51
 
@@ -1,5 +1,7 @@
1
1
  #!/usr/bin/env ruby
2
2
 
3
+ require 'json'
4
+
3
5
  module PG
4
6
  module TextEncoder
5
7
  class Date < SimpleEncoder
@@ -22,6 +24,12 @@ module PG
22
24
  value.respond_to?(:strftime) ? value.strftime(STRFTIME_ISO_DATETIME_WITH_TIMEZONE) : value
23
25
  end
24
26
  end
27
+
28
+ class JSON < SimpleEncoder
29
+ def encode(value)
30
+ ::JSON.generate(value, quirks_mode: true)
31
+ end
32
+ end
25
33
  end
26
34
  end # module PG
27
35
 
data/lib/pg.rb CHANGED
@@ -8,11 +8,22 @@ rescue LoadError
8
8
  major_minor = RUBY_VERSION[ /^(\d+\.\d+)/ ] or
9
9
  raise "Oops, can't extract the major/minor version from #{RUBY_VERSION.dump}"
10
10
 
11
- # Set the PATH environment variable, so that libpq.dll can be found.
12
- old_path = ENV['PATH']
13
- ENV['PATH'] = "#{File.expand_path("../#{RUBY_PLATFORM}", __FILE__)};#{old_path}"
14
- require "#{major_minor}/pg_ext"
15
- ENV['PATH'] = old_path
11
+ add_dll_path = proc do |path, &block|
12
+ begin
13
+ require 'ruby_installer/runtime'
14
+ RubyInstaller::Runtime.add_dll_directory(path, &block)
15
+ rescue LoadError
16
+ old_path = ENV['PATH']
17
+ ENV['PATH'] = "#{path};#{old_path}"
18
+ block.call
19
+ ENV['PATH'] = old_path
20
+ end
21
+ end
22
+
23
+ # Temporary add this directory for DLL search, so that libpq.dll can be found.
24
+ add_dll_path.call(__dir__) do
25
+ require "#{major_minor}/pg_ext"
26
+ end
16
27
  else
17
28
  raise
18
29
  end
@@ -24,10 +35,10 @@ end
24
35
  module PG
25
36
 
26
37
  # Library version
27
- VERSION = '0.18.0'
38
+ VERSION = '1.0.0'
28
39
 
29
40
  # VCS revision
30
- REVISION = %q$Revision: b2bf034e3b9f $
41
+ REVISION = %q$Revision: fef434914848 $
31
42
 
32
43
  class NotAllCopyDataRetrieved < PG::Error
33
44
  end
@@ -59,6 +70,3 @@ module PG
59
70
  end # module PG
60
71
 
61
72
 
62
- # Backward-compatible aliase
63
- PGError = PG::Error
64
-
data/spec/helpers.rb CHANGED
@@ -5,7 +5,9 @@ require 'rspec'
5
5
  require 'shellwords'
6
6
  require 'pg'
7
7
 
8
- TEST_DIRECTORY = Pathname.getwd + "tmp_test_specs"
8
+ DEFAULT_TEST_DIR_STR = File.join(Dir.pwd, "tmp_test_specs")
9
+ TEST_DIR_STR = ENV['RUBY_PG_TEST_DIR'] || DEFAULT_TEST_DIR_STR
10
+ TEST_DIRECTORY = Pathname.new(TEST_DIR_STR)
9
11
 
10
12
  module PG::TestingHelpers
11
13
 
@@ -251,11 +253,11 @@ module PG::TestingHelpers
251
253
 
252
254
  def check_for_lingering_connections( conn )
253
255
  conn.exec( "SELECT * FROM pg_stat_activity" ) do |res|
254
- conns = res.find_all {|row| row['pid'].to_i != conn.backend_pid }
256
+ conns = res.find_all {|row| row['pid'].to_i != conn.backend_pid && ["client backend", nil].include?(row["backend_type"]) }
255
257
  unless conns.empty?
256
258
  puts "Lingering connections remain:"
257
259
  conns.each do |row|
258
- puts " [%d] {%s} %s -- %s" % row.values_at( 'pid', 'state', 'application_name', 'query' )
260
+ puts " [%s] {%s} %s -- %s" % row.values_at( 'pid', 'state', 'application_name', 'query' )
259
261
  end
260
262
  end
261
263
  end
@@ -339,17 +341,8 @@ RSpec.configure do |config|
339
341
  config.filter_run_excluding :socket_io unless
340
342
  PG::Connection.instance_methods.map( &:to_sym ).include?( :socket_io )
341
343
 
342
- config.filter_run_excluding :postgresql_90 unless
343
- PG::Connection.instance_methods.map( &:to_sym ).include?( :escape_literal )
344
-
345
- if !PG.respond_to?( :library_version )
346
- config.filter_run_excluding( :postgresql_91, :postgresql_92, :postgresql_93, :postgresql_94 )
347
- elsif PG.library_version < 90200
348
- config.filter_run_excluding( :postgresql_92, :postgresql_93, :postgresql_94 )
349
- elsif PG.library_version < 90300
350
- config.filter_run_excluding( :postgresql_93, :postgresql_94 )
351
- elsif PG.library_version < 90400
352
- config.filter_run_excluding( :postgresql_94 )
353
- end
344
+ config.filter_run_excluding( :postgresql_93 ) if PG.library_version < 90300
345
+ config.filter_run_excluding( :postgresql_94 ) if PG.library_version < 90400
346
+ config.filter_run_excluding( :postgresql_95 ) if PG.library_version < 90500
347
+ config.filter_run_excluding( :postgresql_10 ) if PG.library_version < 100000
354
348
  end
355
-
@@ -58,8 +58,8 @@ describe 'Basic type mapping' do
58
58
  it "should do OID based type conversions", :ruby_19 do
59
59
  res = @conn.exec( "SELECT 1, 'a', 2.0::FLOAT, TRUE, '2013-06-30'::DATE, generate_series(4,5)" )
60
60
  expect( res.map_types!(basic_type_mapping).values ).to eq( [
61
- [ 1, 'a', 2.0, true, Time.new(2013,6,30), 4 ],
62
- [ 1, 'a', 2.0, true, Time.new(2013,6,30), 5 ],
61
+ [ 1, 'a', 2.0, true, Date.new(2013,6,30), 4 ],
62
+ [ 1, 'a', 2.0, true, Date.new(2013,6,30), 5 ],
63
63
  ] )
64
64
  end
65
65
 
@@ -159,13 +159,34 @@ describe 'Basic type mapping' do
159
159
  CAST('1913-12-31' AS DATE),
160
160
  CAST('infinity' AS DATE),
161
161
  CAST('-infinity' AS DATE)", [], format )
162
- expect( res.getvalue(0,0) ).to eq( Time.new(2113, 12, 31) )
163
- expect( res.getvalue(0,1) ).to eq( Time.new(1913, 12, 31) )
162
+ expect( res.getvalue(0,0) ).to eq( Date.new(2113, 12, 31) )
163
+ expect( res.getvalue(0,1) ).to eq( Date.new(1913, 12, 31) )
164
164
  expect( res.getvalue(0,2) ).to eq( 'infinity' )
165
165
  expect( res.getvalue(0,3) ).to eq( '-infinity' )
166
166
  end
167
167
  end
168
168
 
169
+ it "should do JSON conversions", :postgresql_94 do
170
+ [0].each do |format|
171
+ ['JSON', 'JSONB'].each do |type|
172
+ res = @conn.exec( "SELECT CAST('123' AS #{type}),
173
+ CAST('12.3' AS #{type}),
174
+ CAST('true' AS #{type}),
175
+ CAST('false' AS #{type}),
176
+ CAST('null' AS #{type}),
177
+ CAST('[1, \"a\", null]' AS #{type}),
178
+ CAST('{\"b\" : [2,3]}' AS #{type})", [], format )
179
+ expect( res.getvalue(0,0) ).to eq( 123 )
180
+ expect( res.getvalue(0,1) ).to be_within(0.1).of( 12.3 )
181
+ expect( res.getvalue(0,2) ).to eq( true )
182
+ expect( res.getvalue(0,3) ).to eq( false )
183
+ expect( res.getvalue(0,4) ).to eq( nil )
184
+ expect( res.getvalue(0,5) ).to eq( [1, "a", nil] )
185
+ expect( res.getvalue(0,6) ).to eq( {"b" => [2, 3]} )
186
+ end
187
+ end
188
+ end
189
+
169
190
  it "should do array type conversions" do
170
191
  [0].each do |format|
171
192
  res = @conn.exec( "SELECT CAST('{1,2,3}' AS INT2[]), CAST('{{1,2},{3,4}}' AS INT2[][]),
@@ -228,6 +249,39 @@ describe 'Basic type mapping' do
228
249
  res = @conn.exec( "SELECT * FROM copytable" )
229
250
  expect( res.values ).to eq( [['a', '123', '{5,4,3}'], ['b', '234', '{2,3}']] )
230
251
  end
252
+
253
+ it "can do JSON conversions", :postgresql_94 do
254
+ ['JSON', 'JSONB'].each do |type|
255
+ sql = "SELECT CAST('123' AS #{type}),
256
+ CAST('12.3' AS #{type}),
257
+ CAST('true' AS #{type}),
258
+ CAST('false' AS #{type}),
259
+ CAST('null' AS #{type}),
260
+ CAST('[1, \"a\", null]' AS #{type}),
261
+ CAST('{\"b\" : [2,3]}' AS #{type})"
262
+
263
+ tm = basic_type_mapping.build_column_map( @conn.exec( sql ) )
264
+ expect( tm.coders.map(&:name) ).to eq( [type.downcase] * 7 )
265
+
266
+ res = @conn.exec_params( "SELECT $1, $2, $3, $4, $5, $6, $7",
267
+ [ 123,
268
+ 12.3,
269
+ true,
270
+ false,
271
+ nil,
272
+ [1, "a", nil],
273
+ {"b" => [2, 3]},
274
+ ], 0, tm )
275
+
276
+ expect( res.getvalue(0,0) ).to eq( "123" )
277
+ expect( res.getvalue(0,1) ).to eq( "12.3" )
278
+ expect( res.getvalue(0,2) ).to eq( "true" )
279
+ expect( res.getvalue(0,3) ).to eq( "false" )
280
+ expect( res.getvalue(0,4) ).to eq( nil )
281
+ expect( res.getvalue(0,5).gsub(" ","") ).to eq( "[1,\"a\",null]" )
282
+ expect( res.getvalue(0,6).gsub(" ","") ).to eq( "{\"b\":[2,3]}" )
283
+ end
284
+ end
231
285
  end
232
286
 
233
287
  context "with usage of result oids for copy encoder selection" do