pg 0.20.0-x86-mingw32 → 0.21.0-x86-mingw32

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (54) hide show
  1. checksums.yaml +4 -4
  2. checksums.yaml.gz.sig +0 -0
  3. data.tar.gz.sig +0 -0
  4. data/ChangeLog +145 -44
  5. data/History.rdoc +13 -0
  6. data/Manifest.txt +1 -18
  7. data/README.rdoc +1 -1
  8. data/Rakefile +5 -5
  9. data/ext/pg.c +1 -1
  10. data/ext/pg_binary_decoder.c +1 -1
  11. data/ext/pg_binary_encoder.c +1 -1
  12. data/ext/pg_connection.c +1 -1
  13. data/ext/pg_result.c +12 -6
  14. data/ext/pg_text_decoder.c +1 -1
  15. data/ext/pg_text_encoder.c +1 -1
  16. data/ext/pg_type_map.c +1 -1
  17. data/ext/pg_type_map_all_strings.c +1 -1
  18. data/ext/pg_type_map_by_class.c +1 -1
  19. data/ext/pg_type_map_by_column.c +1 -1
  20. data/ext/pg_type_map_by_mri_type.c +1 -1
  21. data/ext/pg_type_map_by_oid.c +1 -1
  22. data/ext/pg_type_map_in_ruby.c +1 -1
  23. data/ext/util.c +1 -1
  24. data/lib/2.0/pg_ext.so +0 -0
  25. data/lib/2.1/pg_ext.so +0 -0
  26. data/lib/2.2/pg_ext.so +0 -0
  27. data/lib/2.3/pg_ext.so +0 -0
  28. data/lib/2.4/pg_ext.so +0 -0
  29. data/lib/libpq.dll +0 -0
  30. data/lib/pg.rb +8 -6
  31. data/lib/pg/connection.rb +0 -4
  32. data/lib/pg/deprecated_constants.rb +21 -0
  33. data/lib/pg/result.rb +0 -3
  34. data/spec/pg/result_spec.rb +4 -4
  35. metadata +40 -63
  36. metadata.gz.sig +0 -0
  37. data/sample/array_insert.rb +0 -20
  38. data/sample/async_api.rb +0 -106
  39. data/sample/async_copyto.rb +0 -39
  40. data/sample/async_mixed.rb +0 -56
  41. data/sample/check_conn.rb +0 -21
  42. data/sample/copyfrom.rb +0 -81
  43. data/sample/copyto.rb +0 -19
  44. data/sample/cursor.rb +0 -21
  45. data/sample/disk_usage_report.rb +0 -186
  46. data/sample/issue-119.rb +0 -94
  47. data/sample/losample.rb +0 -69
  48. data/sample/minimal-testcase.rb +0 -17
  49. data/sample/notify_wait.rb +0 -72
  50. data/sample/pg_statistics.rb +0 -294
  51. data/sample/replication_monitor.rb +0 -231
  52. data/sample/test_binary_values.rb +0 -33
  53. data/sample/wal_shipper.rb +0 -434
  54. data/sample/warehouse_partitions.rb +0 -320
@@ -1,69 +0,0 @@
1
- #!/usr/bin/env ruby
2
-
3
- require 'pg'
4
-
5
- SAMPLE_WRITE_DATA = 'some sample data'
6
- SAMPLE_EXPORT_NAME = 'lowrite.txt'
7
-
8
- conn = PG.connect( :dbname => 'test', :host => 'localhost', :port => 5432 )
9
- puts "dbname: " + conn.db + "\thost: " + conn.host + "\tuser: " + conn.user
10
-
11
- # Start a transaction, as all large object functions require one.
12
- puts "Beginning transaction"
13
- conn.exec( 'BEGIN' )
14
-
15
- # Test importing from a file
16
- puts "Import test:"
17
- puts " importing %s" % [ __FILE__ ]
18
- oid = conn.lo_import( __FILE__ )
19
- puts " imported as large object %d" % [ oid ]
20
-
21
- # Read back 50 bytes of the imported data
22
- puts "Read test:"
23
- fd = conn.lo_open( oid, PG::INV_READ|PG::INV_WRITE )
24
- conn.lo_lseek( fd, 0, PG::SEEK_SET )
25
- buf = conn.lo_read( fd, 50 )
26
- puts " read: %p" % [ buf ]
27
- puts " read was ok!" if buf =~ /require 'pg'/
28
-
29
- # Append some test data onto the end of the object
30
- puts "Write test:"
31
- conn.lo_lseek( fd, 0, PG::SEEK_END )
32
- buf = SAMPLE_WRITE_DATA.dup
33
- totalbytes = 0
34
- until buf.empty?
35
- bytes = conn.lo_write( fd, buf )
36
- buf.slice!( 0, bytes )
37
- totalbytes += bytes
38
- end
39
- puts " appended %d bytes" % [ totalbytes ]
40
-
41
- # Now export it
42
- puts "Export test:"
43
- File.unlink( SAMPLE_EXPORT_NAME ) if File.exist?( SAMPLE_EXPORT_NAME )
44
- conn.lo_export( oid, SAMPLE_EXPORT_NAME )
45
- puts " success!" if File.exist?( SAMPLE_EXPORT_NAME )
46
- puts " exported as %s (%d bytes)" % [ SAMPLE_EXPORT_NAME, File.size(SAMPLE_EXPORT_NAME) ]
47
-
48
- conn.exec( 'COMMIT' )
49
- puts "End of transaction."
50
-
51
-
52
- puts 'Testing read and delete from a new transaction:'
53
- puts ' starting a new transaction'
54
- conn.exec( 'BEGIN' )
55
-
56
- fd = conn.lo_open( oid, PG::INV_READ )
57
- puts ' reopened okay.'
58
- conn.lo_lseek( fd, 50, PG::SEEK_END )
59
- buf = conn.lo_read( fd, 50 )
60
- puts ' read okay.' if buf == SAMPLE_WRITE_DATA
61
-
62
- puts 'Closing and unlinking:'
63
- conn.lo_close( fd )
64
- puts ' closed.'
65
- conn.lo_unlink( oid )
66
- puts ' unlinked.'
67
- conn.exec( 'COMMIT' )
68
- puts 'Done.'
69
-
@@ -1,17 +0,0 @@
1
- #!/usr/bin/env ruby
2
-
3
- require 'pg'
4
-
5
- conn = PG.connect( :dbname => 'test' )
6
- $stderr.puts '---',
7
- RUBY_DESCRIPTION,
8
- PG.version_string( true ),
9
- "Server version: #{conn.server_version}",
10
- "Client version: #{PG.respond_to?( :library_version ) ? PG.library_version : 'unknown'}",
11
- '---'
12
-
13
- result = conn.exec( "SELECT * from pg_stat_activity" )
14
-
15
- $stderr.puts %Q{Expected this to return: ["select * from pg_stat_activity"]}
16
- p result.field_values( 'current_query' )
17
-
@@ -1,72 +0,0 @@
1
- #!/usr/bin/env ruby
2
- #
3
- # Test script, demonstrating a non-poll notification for a table event.
4
- #
5
-
6
- BEGIN {
7
- require 'pathname'
8
- basedir = Pathname.new( __FILE__ ).expand_path.dirname.parent
9
- libdir = basedir + 'lib'
10
- $LOAD_PATH.unshift( libdir.to_s ) unless $LOAD_PATH.include?( libdir.to_s )
11
- }
12
-
13
- require 'pg'
14
-
15
- TRIGGER_TABLE = %{
16
- CREATE TABLE IF NOT EXISTS test ( message text );
17
- }
18
-
19
- TRIGGER_FUNCTION = %{
20
- CREATE OR REPLACE FUNCTION notify_test()
21
- RETURNS TRIGGER
22
- LANGUAGE plpgsql
23
- AS $$
24
- BEGIN
25
- NOTIFY woo;
26
- RETURN NULL;
27
- END
28
- $$
29
- }
30
-
31
- DROP_TRIGGER = %{
32
- DROP TRIGGER IF EXISTS notify_trigger ON test
33
- }
34
-
35
-
36
- TRIGGER = %{
37
- CREATE TRIGGER notify_trigger
38
- AFTER UPDATE OR INSERT OR DELETE
39
- ON test
40
- FOR EACH STATEMENT
41
- EXECUTE PROCEDURE notify_test();
42
- }
43
-
44
- conn = PG.connect( :dbname => 'test' )
45
-
46
- conn.exec( TRIGGER_TABLE )
47
- conn.exec( TRIGGER_FUNCTION )
48
- conn.exec( DROP_TRIGGER )
49
- conn.exec( TRIGGER )
50
-
51
- conn.exec( 'LISTEN woo' ) # register interest in the 'woo' event
52
-
53
- notifications = []
54
-
55
- puts "Now switch to a different term and run:",
56
- '',
57
- %{ psql test -c "insert into test values ('A message.')"},
58
- ''
59
-
60
- puts "Waiting up to 30 seconds for for an event!"
61
- conn.wait_for_notify( 30 ) do |notify, pid|
62
- notifications << [ pid, notify ]
63
- end
64
-
65
- if notifications.empty?
66
- puts "Awww, I didn't see any events."
67
- else
68
- puts "I got one from pid %d: %s" % notifications.first
69
- end
70
-
71
-
72
-
@@ -1,294 +0,0 @@
1
- #!/usr/bin/env ruby
2
- # vim: set noet nosta sw=4 ts=4 :
3
- #
4
- # PostgreSQL statistic gatherer.
5
- # Mahlon E. Smith <mahlon@martini.nu>
6
- #
7
- # Based on queries by Kenny Gorman.
8
- # http://www.kennygorman.com/wordpress/?page_id=491
9
- #
10
- # An example gnuplot input script is included in the __END__ block
11
- # of this script. Using it, you can feed the output this script
12
- # generates to gnuplot (after removing header lines) to generate
13
- # some nice performance charts.
14
- #
15
-
16
- begin
17
- require 'ostruct'
18
- require 'optparse'
19
- require 'etc'
20
- require 'pg'
21
-
22
- rescue LoadError # 1.8 support
23
- unless Object.const_defined?( :Gem )
24
- require 'rubygems'
25
- retry
26
- end
27
- raise
28
- end
29
-
30
-
31
- ### PostgreSQL Stats. Fetch information from pg_stat_* tables.
32
- ### Optionally run in a continuous loop, displaying deltas.
33
- ###
34
- class Stats
35
- VERSION = %q$Id: pg_statistics.rb,v 36ca5b412583 2012/04/17 23:32:25 mahlon $
36
-
37
- def initialize( opts )
38
- @opts = opts
39
- @db = PG.connect(
40
- :dbname => opts.database,
41
- :host => opts.host,
42
- :port => opts.port,
43
- :user => opts.user,
44
- :password => opts.pass,
45
- :sslmode => 'prefer'
46
- )
47
- @last = nil
48
- end
49
-
50
- ######
51
- public
52
- ######
53
-
54
- ### Primary loop. Gather statistics and generate deltas.
55
- ###
56
- def run
57
- run_count = 0
58
-
59
- loop do
60
- current_stat = self.get_stats
61
-
62
- # First run, store and continue
63
- #
64
- if @last.nil?
65
- @last = current_stat
66
- sleep @opts.interval
67
- next
68
- end
69
-
70
- # headers
71
- #
72
- if run_count == 0 || run_count % 50 == 0
73
- puts "%-20s%12s%12s%12s%12s%12s%12s%12s%12s%12s%12s%12s%12s%12s%12s" % %w[
74
- time commits rollbks blksrd blkshit bkends seqscan
75
- seqtprd idxscn idxtrd ins upd del locks activeq
76
- ]
77
- end
78
-
79
- # calculate deltas
80
- #
81
- delta = current_stat.inject({}) do |h, pair|
82
- stat, val = *pair
83
-
84
- if %w[ activeq locks bkends ].include?( stat )
85
- h[stat] = current_stat[stat].to_i
86
- else
87
- h[stat] = current_stat[stat].to_i - @last[stat].to_i
88
- end
89
-
90
- h
91
- end
92
- delta[ 'time' ] = Time.now.strftime('%F %T')
93
-
94
- # new values
95
- #
96
- puts "%-20s%12s%12s%12s%12s%12s%12s%12s%12s%12s%12s%12s%12s%12s%12s" % [
97
- delta['time'], delta['commits'], delta['rollbks'], delta['blksrd'],
98
- delta['blkshit'], delta['bkends'], delta['seqscan'],
99
- delta['seqtprd'], delta['idxscn'], delta['idxtrd'],
100
- delta['ins'], delta['upd'], delta['del'], delta['locks'], delta['activeq']
101
- ]
102
-
103
- @last = current_stat
104
- run_count += 1
105
- sleep @opts.interval
106
- end
107
- end
108
-
109
-
110
- ### Query the database for performance measurements. Returns a hash.
111
- ###
112
- def get_stats
113
- res = @db.exec %Q{
114
- SELECT
115
- MAX(stat_db.xact_commit) AS commits,
116
- MAX(stat_db.xact_rollback) AS rollbks,
117
- MAX(stat_db.blks_read) AS blksrd,
118
- MAX(stat_db.blks_hit) AS blkshit,
119
- MAX(stat_db.numbackends) AS bkends,
120
- SUM(stat_tables.seq_scan) AS seqscan,
121
- SUM(stat_tables.seq_tup_read) AS seqtprd,
122
- SUM(stat_tables.idx_scan) AS idxscn,
123
- SUM(stat_tables.idx_tup_fetch) AS idxtrd,
124
- SUM(stat_tables.n_tup_ins) AS ins,
125
- SUM(stat_tables.n_tup_upd) AS upd,
126
- SUM(stat_tables.n_tup_del) AS del,
127
- MAX(stat_locks.locks) AS locks,
128
- MAX(activity.sess) AS activeq
129
- FROM
130
- pg_stat_database AS stat_db,
131
- pg_stat_user_tables AS stat_tables,
132
- (SELECT COUNT(*) AS locks FROM pg_locks ) AS stat_locks,
133
- (SELECT COUNT(*) AS sess FROM pg_stat_activity WHERE current_query <> '<IDLE>') AS activity
134
- WHERE
135
- stat_db.datname = '%s';
136
- } % [ @opts.database ]
137
-
138
- return res[0]
139
- end
140
- end
141
-
142
-
143
- ### Parse command line arguments. Return a struct of global options.
144
- ###
145
- def parse_args( args )
146
- options = OpenStruct.new
147
- options.database = Etc.getpwuid( Process.uid ).name
148
- options.host = '127.0.0.1'
149
- options.port = 5432
150
- options.user = Etc.getpwuid( Process.uid ).name
151
- options.sslmode = 'disable'
152
- options.interval = 5
153
-
154
- opts = OptionParser.new do |opts|
155
- opts.banner = "Usage: #{$0} [options]"
156
-
157
- opts.separator ''
158
- opts.separator 'Connection options:'
159
-
160
- opts.on( '-d', '--database DBNAME',
161
- "specify the database to connect to (default: \"#{options.database}\")" ) do |db|
162
- options.database = db
163
- end
164
-
165
- opts.on( '-h', '--host HOSTNAME', 'database server host' ) do |host|
166
- options.host = host
167
- end
168
-
169
- opts.on( '-p', '--port PORT', Integer,
170
- "database server port (default: \"#{options.port}\")" ) do |port|
171
- options.port = port
172
- end
173
-
174
- opts.on( '-U', '--user NAME',
175
- "database user name (default: \"#{options.user}\")" ) do |user|
176
- options.user = user
177
- end
178
-
179
- opts.on( '-W', 'force password prompt' ) do |pw|
180
- print 'Password: '
181
- begin
182
- system 'stty -echo'
183
- options.pass = gets.chomp
184
- ensure
185
- system 'stty echo'
186
- puts
187
- end
188
- end
189
-
190
- opts.separator ''
191
- opts.separator 'Other options:'
192
-
193
- opts.on( '-i', '--interval SECONDS', Integer,
194
- "refresh interval in seconds (default: \"#{options.interval}\")") do |seconds|
195
- options.interval = seconds
196
- end
197
-
198
- opts.on_tail( '--help', 'show this help, then exit' ) do
199
- $stderr.puts opts
200
- exit
201
- end
202
-
203
- opts.on_tail( '--version', 'output version information, then exit' ) do
204
- puts Stats::VERSION
205
- exit
206
- end
207
- end
208
-
209
- opts.parse!( args )
210
- return options
211
- end
212
-
213
-
214
- ### Go!
215
- ###
216
- if __FILE__ == $0
217
- $stdout.sync = true
218
- Stats.new( parse_args( ARGV ) ).run
219
- end
220
-
221
-
222
- __END__
223
- ######################################################################
224
- ### T E R M I N A L O P T I O N S
225
- ######################################################################
226
-
227
- #set terminal png nocrop enhanced font arial 8 size '800x600' x000000 xffffff x444444
228
- #set output 'graph.png'
229
-
230
- set terminal pdf linewidth 4 size 11,8
231
- set output 'graph.pdf'
232
-
233
- #set terminal aqua
234
-
235
-
236
- ######################################################################
237
- ### O P T I O N S F O R A L L G R A P H S
238
- ######################################################################
239
-
240
- set multiplot layout 2,1 title "PostgreSQL Statistics\n5 second sample rate (smoothed)"
241
-
242
- set grid x y
243
- set key right vertical outside
244
- set key nobox
245
- set xdata time
246
- set timefmt "%Y-%m-%d.%H:%M:%S"
247
- set format x "%l%p"
248
- set xtic rotate by -45
249
- input_file = "database_stats.txt"
250
-
251
- # edit to taste!
252
- set xrange ["2012-04-16.00:00:00":"2012-04-17.00:00:00"]
253
-
254
-
255
- ######################################################################
256
- ### G R A P H 1
257
- ######################################################################
258
-
259
- set title "Database Operations and Connection Totals"
260
- set yrange [0:200]
261
-
262
- plot \
263
- input_file using 1:2 title "Commits" with lines smooth bezier, \
264
- input_file using 1:3 title "Rollbacks" with lines smooth bezier, \
265
- input_file using 1:11 title "Inserts" with lines smooth bezier, \
266
- input_file using 1:12 title "Updates" with lines smooth bezier, \
267
- input_file using 1:13 title "Deletes" with lines smooth bezier, \
268
- input_file using 1:6 title "Backends (total)" with lines, \
269
- input_file using 1:15 title "Active queries (total)" with lines smooth bezier
270
-
271
-
272
- ######################################################################
273
- ### G R A P H 2
274
- ######################################################################
275
-
276
- set title "Backend Performance"
277
- set yrange [0:10000]
278
-
279
- plot \
280
- input_file using 1:4 title "Block (cache) reads" with lines smooth bezier, \
281
- input_file using 1:5 title "Block (cache) hits" with lines smooth bezier, \
282
- input_file using 1:7 title "Sequence scans" with lines smooth bezier, \
283
- input_file using 1:8 title "Sequence tuple reads" with lines smooth bezier, \
284
- input_file using 1:9 title "Index scans" with lines smooth bezier, \
285
- input_file using 1:10 title "Index tuple reads" with lines smooth bezier
286
-
287
-
288
- ######################################################################
289
- ### C L E A N U P
290
- ######################################################################
291
-
292
- unset multiplot
293
- reset
294
-