pg 1.1.0.pre20180730144600 → 1.1.0.pre20180730171000

Sign up to get free protection for your applications and to get access to all the features.
Files changed (57) hide show
  1. checksums.yaml +4 -4
  2. checksums.yaml.gz.sig +0 -0
  3. data.tar.gz.sig +0 -0
  4. data/.gems +6 -0
  5. data/.hgignore +21 -0
  6. data/.hgsigs +29 -0
  7. data/.hgtags +36 -0
  8. data/.hoerc +2 -0
  9. data/.irbrc +23 -0
  10. data/.pryrc +23 -0
  11. data/.tm_properties +21 -0
  12. data/.travis.yml +41 -0
  13. data/Gemfile +2 -0
  14. data/Manifest.txt +2 -65
  15. data/Rakefile +2 -1
  16. data/Rakefile.cross +3 -3
  17. data/appveyor.yml +50 -0
  18. data/certs/ged.pem +26 -0
  19. data/lib/pg.rb +1 -1
  20. data/lib/pg/binary_decoder.rb +22 -0
  21. data/lib/pg/tuple.rb +30 -0
  22. data/misc/openssl-pg-segfault.rb +31 -0
  23. data/misc/postgres/History.txt +9 -0
  24. data/misc/postgres/Manifest.txt +5 -0
  25. data/misc/postgres/README.txt +21 -0
  26. data/misc/postgres/Rakefile +21 -0
  27. data/misc/postgres/lib/postgres.rb +16 -0
  28. data/misc/ruby-pg/History.txt +9 -0
  29. data/misc/ruby-pg/Manifest.txt +5 -0
  30. data/misc/ruby-pg/README.txt +21 -0
  31. data/misc/ruby-pg/Rakefile +21 -0
  32. data/misc/ruby-pg/lib/ruby/pg.rb +16 -0
  33. data/pg.gemspec +61 -0
  34. data/sample/array_insert.rb +20 -0
  35. data/sample/async_api.rb +106 -0
  36. data/sample/async_copyto.rb +39 -0
  37. data/sample/async_mixed.rb +56 -0
  38. data/sample/check_conn.rb +21 -0
  39. data/sample/copydata.rb +71 -0
  40. data/sample/copyfrom.rb +81 -0
  41. data/sample/copyto.rb +19 -0
  42. data/sample/cursor.rb +21 -0
  43. data/sample/disk_usage_report.rb +177 -0
  44. data/sample/issue-119.rb +94 -0
  45. data/sample/losample.rb +69 -0
  46. data/sample/minimal-testcase.rb +17 -0
  47. data/sample/notify_wait.rb +72 -0
  48. data/sample/pg_statistics.rb +285 -0
  49. data/sample/replication_monitor.rb +222 -0
  50. data/sample/test_binary_values.rb +33 -0
  51. data/sample/wal_shipper.rb +434 -0
  52. data/sample/warehouse_partitions.rb +311 -0
  53. data/spec/pg/connection_sync_spec.rb +41 -0
  54. data/spec/pg/tuple_spec.rb +266 -0
  55. metadata +69 -25
  56. metadata.gz.sig +0 -0
  57. data/ChangeLog +0 -0
@@ -0,0 +1,94 @@
1
+ # -*- ruby -*-
2
+
3
+ require 'pg'
4
+
5
+ # This is another example of how to use COPY FROM, this time as a
6
+ # minimal test case used to try to figure out what was going on in
7
+ # an issue submitted from a user:
8
+ #
9
+ # https://bitbucket.org/ged/ruby-pg/issue/119
10
+ #
11
+
12
+ conn = PG.connect( dbname: 'test' )
13
+ table_name = 'issue_119'
14
+ field_list = %w[name body_weight brain_weight]
15
+ method = 0
16
+ options = { truncate: true }
17
+ sql_parameters = ''
18
+
19
+ conn.set_error_verbosity( PG::PQERRORS_VERBOSE )
20
+ conn.exec( "DROP TABLE IF EXISTS #{table_name}" )
21
+ conn.exec( "CREATE TABLE #{table_name} ( id SERIAL, name TEXT, body_weight REAL, brain_weight REAL )" )
22
+
23
+ text = <<-END_DATA
24
+ Mountain beaver 1.35 465
25
+ Cow 465 423
26
+ Grey wolf 36.33 119.5
27
+ Goat 27.66 115
28
+ Guinea pig 1.04 5.5
29
+ Dipliodocus 11700 50
30
+ Asian elephant 2547 4603
31
+ Donkey 187.1 419
32
+ Horse 521 655
33
+ Potar monkey 10 115
34
+ Cat 3.3 25.6
35
+ Giraffe 529 680
36
+ Gorilla 207 406
37
+ Human 62 1320
38
+ African elephant 6654 5712
39
+ Triceratops 9400 70
40
+ Rhesus monkey 6.8 179
41
+ Kangaroo 35 56
42
+ Golden hamster 0.12 1
43
+ Mouse 0.023 0.4
44
+ Rabbit 2.5 12.1
45
+ Sheep 55.5 175
46
+ Jaguar 100 157
47
+ Chimpanzee 52.16 440
48
+ Brachiosaurus 87000 154.5
49
+ Mole 0.122 3
50
+ Pig 192 18
51
+ END_DATA
52
+
53
+ #ActiveRecord::Base.connection_pool.with_connection do |conn|
54
+ conn.transaction do
55
+ rc = conn #.raw_connection
56
+ rc.exec "TRUNCATE TABLE #{table_name};" if options[:truncate]
57
+ sql = "COPY #{table_name} (#{field_list.join(',')}) FROM STDIN #{sql_parameters} "
58
+ p sql
59
+ rc.exec(sql)
60
+ errmsg = nil # scope this outside of the rescue below so it's visible later
61
+ begin
62
+ if method == 1
63
+ rc.put_copy_data text + "\\.\n"
64
+ else
65
+ text.each_line { |line| rc.put_copy_data(line) }
66
+ end
67
+ rescue Errno => err
68
+ errmsg = "%s while reading copy data: %s" % [err.class.name, err.message]
69
+ puts "an error occured"
70
+ end
71
+
72
+ if errmsg
73
+ rc.put_copy_end(errmsg)
74
+ puts "ERROR #{errmsg}"
75
+ else
76
+ rc.put_copy_end
77
+ end
78
+
79
+ while res = rc.get_result
80
+ st = res.res_status( res.result_status )
81
+ puts "Result of COPY is: %s" % [ st ]
82
+ if res.result_status != PG::PGRES_COPY_IN
83
+ puts res.error_message
84
+ end
85
+ end
86
+ puts "end"
87
+ end #transaction
88
+ #end #connection
89
+
90
+ conn.exec( "SELECT name, brain_weight FROM #{table_name}" ) do |res|
91
+ p res.values
92
+ end
93
+
94
+
@@ -0,0 +1,69 @@
1
+ # -*- ruby -*-
2
+
3
+ require 'pg'
4
+
5
+ SAMPLE_WRITE_DATA = 'some sample data'
6
+ SAMPLE_EXPORT_NAME = 'lowrite.txt'
7
+
8
+ conn = PG.connect( :dbname => 'test', :host => 'localhost', :port => 5432 )
9
+ puts "dbname: " + conn.db + "\thost: " + conn.host + "\tuser: " + conn.user
10
+
11
+ # Start a transaction, as all large object functions require one.
12
+ puts "Beginning transaction"
13
+ conn.exec( 'BEGIN' )
14
+
15
+ # Test importing from a file
16
+ puts "Import test:"
17
+ puts " importing %s" % [ __FILE__ ]
18
+ oid = conn.lo_import( __FILE__ )
19
+ puts " imported as large object %d" % [ oid ]
20
+
21
+ # Read back 50 bytes of the imported data
22
+ puts "Read test:"
23
+ fd = conn.lo_open( oid, PG::INV_READ|PG::INV_WRITE )
24
+ conn.lo_lseek( fd, 0, PG::SEEK_SET )
25
+ buf = conn.lo_read( fd, 50 )
26
+ puts " read: %p" % [ buf ]
27
+ puts " read was ok!" if buf =~ /require 'pg'/
28
+
29
+ # Append some test data onto the end of the object
30
+ puts "Write test:"
31
+ conn.lo_lseek( fd, 0, PG::SEEK_END )
32
+ buf = SAMPLE_WRITE_DATA.dup
33
+ totalbytes = 0
34
+ until buf.empty?
35
+ bytes = conn.lo_write( fd, buf )
36
+ buf.slice!( 0, bytes )
37
+ totalbytes += bytes
38
+ end
39
+ puts " appended %d bytes" % [ totalbytes ]
40
+
41
+ # Now export it
42
+ puts "Export test:"
43
+ File.unlink( SAMPLE_EXPORT_NAME ) if File.exist?( SAMPLE_EXPORT_NAME )
44
+ conn.lo_export( oid, SAMPLE_EXPORT_NAME )
45
+ puts " success!" if File.exist?( SAMPLE_EXPORT_NAME )
46
+ puts " exported as %s (%d bytes)" % [ SAMPLE_EXPORT_NAME, File.size(SAMPLE_EXPORT_NAME) ]
47
+
48
+ conn.exec( 'COMMIT' )
49
+ puts "End of transaction."
50
+
51
+
52
+ puts 'Testing read and delete from a new transaction:'
53
+ puts ' starting a new transaction'
54
+ conn.exec( 'BEGIN' )
55
+
56
+ fd = conn.lo_open( oid, PG::INV_READ )
57
+ puts ' reopened okay.'
58
+ conn.lo_lseek( fd, 50, PG::SEEK_END )
59
+ buf = conn.lo_read( fd, 50 )
60
+ puts ' read okay.' if buf == SAMPLE_WRITE_DATA
61
+
62
+ puts 'Closing and unlinking:'
63
+ conn.lo_close( fd )
64
+ puts ' closed.'
65
+ conn.lo_unlink( oid )
66
+ puts ' unlinked.'
67
+ conn.exec( 'COMMIT' )
68
+ puts 'Done.'
69
+
@@ -0,0 +1,17 @@
1
+ # -*- ruby -*-
2
+
3
+ require 'pg'
4
+
5
+ conn = PG.connect( :dbname => 'test' )
6
+ $stderr.puts '---',
7
+ RUBY_DESCRIPTION,
8
+ PG.version_string( true ),
9
+ "Server version: #{conn.server_version}",
10
+ "Client version: #{PG.library_version}",
11
+ '---'
12
+
13
+ result = conn.exec( "SELECT * from pg_stat_activity" )
14
+
15
+ $stderr.puts %Q{Expected this to return: ["select * from pg_stat_activity"]}
16
+ p result.field_values( 'current_query' )
17
+
@@ -0,0 +1,72 @@
1
+ # -*- ruby -*-
2
+ #
3
+ # Test script, demonstrating a non-poll notification for a table event.
4
+ #
5
+
6
+ BEGIN {
7
+ require 'pathname'
8
+ basedir = Pathname.new( __FILE__ ).expand_path.dirname.parent
9
+ libdir = basedir + 'lib'
10
+ $LOAD_PATH.unshift( libdir.to_s ) unless $LOAD_PATH.include?( libdir.to_s )
11
+ }
12
+
13
+ require 'pg'
14
+
15
+ TRIGGER_TABLE = %{
16
+ CREATE TABLE IF NOT EXISTS test ( message text );
17
+ }
18
+
19
+ TRIGGER_FUNCTION = %{
20
+ CREATE OR REPLACE FUNCTION notify_test()
21
+ RETURNS TRIGGER
22
+ LANGUAGE plpgsql
23
+ AS $$
24
+ BEGIN
25
+ NOTIFY woo;
26
+ RETURN NULL;
27
+ END
28
+ $$
29
+ }
30
+
31
+ DROP_TRIGGER = %{
32
+ DROP TRIGGER IF EXISTS notify_trigger ON test
33
+ }
34
+
35
+
36
+ TRIGGER = %{
37
+ CREATE TRIGGER notify_trigger
38
+ AFTER UPDATE OR INSERT OR DELETE
39
+ ON test
40
+ FOR EACH STATEMENT
41
+ EXECUTE PROCEDURE notify_test();
42
+ }
43
+
44
+ conn = PG.connect( :dbname => 'test' )
45
+
46
+ conn.exec( TRIGGER_TABLE )
47
+ conn.exec( TRIGGER_FUNCTION )
48
+ conn.exec( DROP_TRIGGER )
49
+ conn.exec( TRIGGER )
50
+
51
+ conn.exec( 'LISTEN woo' ) # register interest in the 'woo' event
52
+
53
+ notifications = []
54
+
55
+ puts "Now switch to a different term and run:",
56
+ '',
57
+ %{ psql test -c "insert into test values ('A message.')"},
58
+ ''
59
+
60
+ puts "Waiting up to 30 seconds for for an event!"
61
+ conn.wait_for_notify( 30 ) do |notify, pid|
62
+ notifications << [ pid, notify ]
63
+ end
64
+
65
+ if notifications.empty?
66
+ puts "Awww, I didn't see any events."
67
+ else
68
+ puts "I got one from pid %d: %s" % notifications.first
69
+ end
70
+
71
+
72
+
@@ -0,0 +1,285 @@
1
+ # -*- ruby -*-
2
+ # vim: set noet nosta sw=4 ts=4 :
3
+ #
4
+ # PostgreSQL statistic gatherer.
5
+ # Mahlon E. Smith <mahlon@martini.nu>
6
+ #
7
+ # Based on queries by Kenny Gorman.
8
+ # http://www.kennygorman.com/wordpress/?page_id=491
9
+ #
10
+ # An example gnuplot input script is included in the __END__ block
11
+ # of this script. Using it, you can feed the output this script
12
+ # generates to gnuplot (after removing header lines) to generate
13
+ # some nice performance charts.
14
+ #
15
+
16
+ require 'ostruct'
17
+ require 'optparse'
18
+ require 'etc'
19
+ require 'pg'
20
+
21
+
22
+ ### PostgreSQL Stats. Fetch information from pg_stat_* tables.
23
+ ### Optionally run in a continuous loop, displaying deltas.
24
+ ###
25
+ class Stats
26
+ VERSION = %q$Id$
27
+
28
+ def initialize( opts )
29
+ @opts = opts
30
+ @db = PG.connect(
31
+ :dbname => opts.database,
32
+ :host => opts.host,
33
+ :port => opts.port,
34
+ :user => opts.user,
35
+ :password => opts.pass,
36
+ :sslmode => 'prefer'
37
+ )
38
+ @last = nil
39
+ end
40
+
41
+ ######
42
+ public
43
+ ######
44
+
45
+ ### Primary loop. Gather statistics and generate deltas.
46
+ ###
47
+ def run
48
+ run_count = 0
49
+
50
+ loop do
51
+ current_stat = self.get_stats
52
+
53
+ # First run, store and continue
54
+ #
55
+ if @last.nil?
56
+ @last = current_stat
57
+ sleep @opts.interval
58
+ next
59
+ end
60
+
61
+ # headers
62
+ #
63
+ if run_count == 0 || run_count % 50 == 0
64
+ puts "%-20s%12s%12s%12s%12s%12s%12s%12s%12s%12s%12s%12s%12s%12s%12s" % %w[
65
+ time commits rollbks blksrd blkshit bkends seqscan
66
+ seqtprd idxscn idxtrd ins upd del locks activeq
67
+ ]
68
+ end
69
+
70
+ # calculate deltas
71
+ #
72
+ delta = current_stat.inject({}) do |h, pair|
73
+ stat, val = *pair
74
+
75
+ if %w[ activeq locks bkends ].include?( stat )
76
+ h[stat] = current_stat[stat].to_i
77
+ else
78
+ h[stat] = current_stat[stat].to_i - @last[stat].to_i
79
+ end
80
+
81
+ h
82
+ end
83
+ delta[ 'time' ] = Time.now.strftime('%F %T')
84
+
85
+ # new values
86
+ #
87
+ puts "%-20s%12s%12s%12s%12s%12s%12s%12s%12s%12s%12s%12s%12s%12s%12s" % [
88
+ delta['time'], delta['commits'], delta['rollbks'], delta['blksrd'],
89
+ delta['blkshit'], delta['bkends'], delta['seqscan'],
90
+ delta['seqtprd'], delta['idxscn'], delta['idxtrd'],
91
+ delta['ins'], delta['upd'], delta['del'], delta['locks'], delta['activeq']
92
+ ]
93
+
94
+ @last = current_stat
95
+ run_count += 1
96
+ sleep @opts.interval
97
+ end
98
+ end
99
+
100
+
101
+ ### Query the database for performance measurements. Returns a hash.
102
+ ###
103
+ def get_stats
104
+ res = @db.exec %Q{
105
+ SELECT
106
+ MAX(stat_db.xact_commit) AS commits,
107
+ MAX(stat_db.xact_rollback) AS rollbks,
108
+ MAX(stat_db.blks_read) AS blksrd,
109
+ MAX(stat_db.blks_hit) AS blkshit,
110
+ MAX(stat_db.numbackends) AS bkends,
111
+ SUM(stat_tables.seq_scan) AS seqscan,
112
+ SUM(stat_tables.seq_tup_read) AS seqtprd,
113
+ SUM(stat_tables.idx_scan) AS idxscn,
114
+ SUM(stat_tables.idx_tup_fetch) AS idxtrd,
115
+ SUM(stat_tables.n_tup_ins) AS ins,
116
+ SUM(stat_tables.n_tup_upd) AS upd,
117
+ SUM(stat_tables.n_tup_del) AS del,
118
+ MAX(stat_locks.locks) AS locks,
119
+ MAX(activity.sess) AS activeq
120
+ FROM
121
+ pg_stat_database AS stat_db,
122
+ pg_stat_user_tables AS stat_tables,
123
+ (SELECT COUNT(*) AS locks FROM pg_locks ) AS stat_locks,
124
+ (SELECT COUNT(*) AS sess FROM pg_stat_activity WHERE current_query <> '<IDLE>') AS activity
125
+ WHERE
126
+ stat_db.datname = '%s';
127
+ } % [ @opts.database ]
128
+
129
+ return res[0]
130
+ end
131
+ end
132
+
133
+
134
+ ### Parse command line arguments. Return a struct of global options.
135
+ ###
136
+ def parse_args( args )
137
+ options = OpenStruct.new
138
+ options.database = Etc.getpwuid( Process.uid ).name
139
+ options.host = '127.0.0.1'
140
+ options.port = 5432
141
+ options.user = Etc.getpwuid( Process.uid ).name
142
+ options.sslmode = 'disable'
143
+ options.interval = 5
144
+
145
+ opts = OptionParser.new do |opts|
146
+ opts.banner = "Usage: #{$0} [options]"
147
+
148
+ opts.separator ''
149
+ opts.separator 'Connection options:'
150
+
151
+ opts.on( '-d', '--database DBNAME',
152
+ "specify the database to connect to (default: \"#{options.database}\")" ) do |db|
153
+ options.database = db
154
+ end
155
+
156
+ opts.on( '-h', '--host HOSTNAME', 'database server host' ) do |host|
157
+ options.host = host
158
+ end
159
+
160
+ opts.on( '-p', '--port PORT', Integer,
161
+ "database server port (default: \"#{options.port}\")" ) do |port|
162
+ options.port = port
163
+ end
164
+
165
+ opts.on( '-U', '--user NAME',
166
+ "database user name (default: \"#{options.user}\")" ) do |user|
167
+ options.user = user
168
+ end
169
+
170
+ opts.on( '-W', 'force password prompt' ) do |pw|
171
+ print 'Password: '
172
+ begin
173
+ system 'stty -echo'
174
+ options.pass = gets.chomp
175
+ ensure
176
+ system 'stty echo'
177
+ puts
178
+ end
179
+ end
180
+
181
+ opts.separator ''
182
+ opts.separator 'Other options:'
183
+
184
+ opts.on( '-i', '--interval SECONDS', Integer,
185
+ "refresh interval in seconds (default: \"#{options.interval}\")") do |seconds|
186
+ options.interval = seconds
187
+ end
188
+
189
+ opts.on_tail( '--help', 'show this help, then exit' ) do
190
+ $stderr.puts opts
191
+ exit
192
+ end
193
+
194
+ opts.on_tail( '--version', 'output version information, then exit' ) do
195
+ puts Stats::VERSION
196
+ exit
197
+ end
198
+ end
199
+
200
+ opts.parse!( args )
201
+ return options
202
+ end
203
+
204
+
205
+ ### Go!
206
+ ###
207
+ if __FILE__ == $0
208
+ $stdout.sync = true
209
+ Stats.new( parse_args( ARGV ) ).run
210
+ end
211
+
212
+
213
+ __END__
214
+ ######################################################################
215
+ ### T E R M I N A L O P T I O N S
216
+ ######################################################################
217
+
218
+ #set terminal png nocrop enhanced font arial 8 size '800x600' x000000 xffffff x444444
219
+ #set output 'graph.png'
220
+
221
+ set terminal pdf linewidth 4 size 11,8
222
+ set output 'graph.pdf'
223
+
224
+ #set terminal aqua
225
+
226
+
227
+ ######################################################################
228
+ ### O P T I O N S F O R A L L G R A P H S
229
+ ######################################################################
230
+
231
+ set multiplot layout 2,1 title "PostgreSQL Statistics\n5 second sample rate (smoothed)"
232
+
233
+ set grid x y
234
+ set key right vertical outside
235
+ set key nobox
236
+ set xdata time
237
+ set timefmt "%Y-%m-%d.%H:%M:%S"
238
+ set format x "%l%p"
239
+ set xtic rotate by -45
240
+ input_file = "database_stats.txt"
241
+
242
+ # edit to taste!
243
+ set xrange ["2012-04-16.00:00:00":"2012-04-17.00:00:00"]
244
+
245
+
246
+ ######################################################################
247
+ ### G R A P H 1
248
+ ######################################################################
249
+
250
+ set title "Database Operations and Connection Totals"
251
+ set yrange [0:200]
252
+
253
+ plot \
254
+ input_file using 1:2 title "Commits" with lines smooth bezier, \
255
+ input_file using 1:3 title "Rollbacks" with lines smooth bezier, \
256
+ input_file using 1:11 title "Inserts" with lines smooth bezier, \
257
+ input_file using 1:12 title "Updates" with lines smooth bezier, \
258
+ input_file using 1:13 title "Deletes" with lines smooth bezier, \
259
+ input_file using 1:6 title "Backends (total)" with lines, \
260
+ input_file using 1:15 title "Active queries (total)" with lines smooth bezier
261
+
262
+
263
+ ######################################################################
264
+ ### G R A P H 2
265
+ ######################################################################
266
+
267
+ set title "Backend Performance"
268
+ set yrange [0:10000]
269
+
270
+ plot \
271
+ input_file using 1:4 title "Block (cache) reads" with lines smooth bezier, \
272
+ input_file using 1:5 title "Block (cache) hits" with lines smooth bezier, \
273
+ input_file using 1:7 title "Sequence scans" with lines smooth bezier, \
274
+ input_file using 1:8 title "Sequence tuple reads" with lines smooth bezier, \
275
+ input_file using 1:9 title "Index scans" with lines smooth bezier, \
276
+ input_file using 1:10 title "Index tuple reads" with lines smooth bezier
277
+
278
+
279
+ ######################################################################
280
+ ### C L E A N U P
281
+ ######################################################################
282
+
283
+ unset multiplot
284
+ reset
285
+