cipherstash-pg 1.0.0.beta.1-arm64-darwin-21
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- checksums.yaml +7 -0
- data/BSDL +22 -0
- data/Contributors.rdoc +46 -0
- data/Gemfile +14 -0
- data/History.rdoc +789 -0
- data/LICENSE +56 -0
- data/Manifest.txt +72 -0
- data/POSTGRES +23 -0
- data/README-OS_X.rdoc +68 -0
- data/README-Windows.rdoc +56 -0
- data/README.ja.rdoc +13 -0
- data/README.rdoc +233 -0
- data/Rakefile +115 -0
- data/certs/ged.pem +24 -0
- data/certs/larskanis-2022.pem +26 -0
- data/cipherstash-pg.gemspec +31 -0
- data/lib/2.7/pg_ext.bundle +0 -0
- data/lib/3.0/pg_ext.bundle +0 -0
- data/lib/3.1/pg_ext.bundle +0 -0
- data/lib/3.2/pg_ext.bundle +0 -0
- data/lib/cipherstash-pg.rb +15 -0
- data/lib/libpq.5.dylib +0 -0
- data/lib/pg/basic_type_map_based_on_result.rb +47 -0
- data/lib/pg/basic_type_map_for_queries.rb +193 -0
- data/lib/pg/basic_type_map_for_results.rb +81 -0
- data/lib/pg/basic_type_registry.rb +301 -0
- data/lib/pg/binary_decoder.rb +23 -0
- data/lib/pg/coder.rb +104 -0
- data/lib/pg/connection.rb +878 -0
- data/lib/pg/constants.rb +12 -0
- data/lib/pg/exceptions.rb +18 -0
- data/lib/pg/result.rb +43 -0
- data/lib/pg/text_decoder.rb +46 -0
- data/lib/pg/text_encoder.rb +59 -0
- data/lib/pg/tuple.rb +30 -0
- data/lib/pg/type_map_by_column.rb +16 -0
- data/lib/pg/version.rb +4 -0
- data/lib/pg.rb +55 -0
- data/misc/openssl-pg-segfault.rb +31 -0
- data/misc/postgres/History.txt +9 -0
- data/misc/postgres/Manifest.txt +5 -0
- data/misc/postgres/README.txt +21 -0
- data/misc/postgres/Rakefile +21 -0
- data/misc/postgres/lib/postgres.rb +16 -0
- data/misc/ruby-pg/History.txt +9 -0
- data/misc/ruby-pg/Manifest.txt +5 -0
- data/misc/ruby-pg/README.txt +21 -0
- data/misc/ruby-pg/Rakefile +21 -0
- data/misc/ruby-pg/lib/ruby/pg.rb +16 -0
- data/rakelib/task_extension.rb +46 -0
- data/sample/array_insert.rb +20 -0
- data/sample/async_api.rb +102 -0
- data/sample/async_copyto.rb +39 -0
- data/sample/async_mixed.rb +56 -0
- data/sample/check_conn.rb +21 -0
- data/sample/copydata.rb +71 -0
- data/sample/copyfrom.rb +81 -0
- data/sample/copyto.rb +19 -0
- data/sample/cursor.rb +21 -0
- data/sample/disk_usage_report.rb +177 -0
- data/sample/issue-119.rb +94 -0
- data/sample/losample.rb +69 -0
- data/sample/minimal-testcase.rb +17 -0
- data/sample/notify_wait.rb +72 -0
- data/sample/pg_statistics.rb +285 -0
- data/sample/replication_monitor.rb +222 -0
- data/sample/test_binary_values.rb +33 -0
- data/sample/wal_shipper.rb +434 -0
- data/sample/warehouse_partitions.rb +311 -0
- data/vendor/database-extensions/install.sql +317 -0
- data/vendor/database-extensions/uninstall.sql +20 -0
- metadata +118 -0
@@ -0,0 +1,26 @@
|
|
1
|
+
-----BEGIN CERTIFICATE-----
|
2
|
+
MIIETTCCArWgAwIBAgIBATANBgkqhkiG9w0BAQsFADAoMSYwJAYDVQQDDB1sYXJz
|
3
|
+
L0RDPWdyZWl6LXJlaW5zZG9yZi9EQz1kZTAeFw0yMjAyMTQxMzMwNTZaFw0yMzAy
|
4
|
+
MTQxMzMwNTZaMCgxJjAkBgNVBAMMHWxhcnMvREM9Z3JlaXotcmVpbnNkb3JmL0RD
|
5
|
+
PWRlMIIBojANBgkqhkiG9w0BAQEFAAOCAY8AMIIBigKCAYEAwum6Y1KznfpzXOT/
|
6
|
+
mZgJTBbxZuuZF49Fq3K0WA67YBzNlDv95qzSp7V/7Ek3NCcnT7G+2kSuhNo1FhdN
|
7
|
+
eSDO/moYebZNAcu3iqLsuzuULXPLuoU0GsMnVMqV9DZPh7cQHE5EBZ7hlzDBK7k/
|
8
|
+
8nBMvR0mHo77kIkapHc26UzVq/G0nKLfDsIHXVylto3PjzOumjG6GhmFN4r3cP6e
|
9
|
+
SDfl1FSeRYVpt4kmQULz/zdSaOH3AjAq7PM2Z91iGwQvoUXMANH2v89OWjQO/NHe
|
10
|
+
JMNDFsmHK/6Ji4Kk48Z3TyscHQnipAID5GhS1oD21/WePdj7GhmbF5gBzkV5uepd
|
11
|
+
eJQPgWGwrQW/Z2oPjRuJrRofzWfrMWqbOahj9uth6WSxhNexUtbjk6P8emmXOJi5
|
12
|
+
chQPnWX+N3Gj+jjYxqTFdwT7Mj3pv1VHa+aNUbqSPpvJeDyxRIuo9hvzDaBHb/Cg
|
13
|
+
9qRVcm8a96n4t7y2lrX1oookY6bkBaxWOMtWlqIprq8JZXM9AgMBAAGjgYEwfzAJ
|
14
|
+
BgNVHRMEAjAAMAsGA1UdDwQEAwIEsDAdBgNVHQ4EFgQUOIdbSMr3VFrTCO9/cTM0
|
15
|
+
0exHzBcwIgYDVR0RBBswGYEXbGFyc0BncmVpei1yZWluc2RvcmYuZGUwIgYDVR0S
|
16
|
+
BBswGYEXbGFyc0BncmVpei1yZWluc2RvcmYuZGUwDQYJKoZIhvcNAQELBQADggGB
|
17
|
+
AFWP7F/y3Oq3NgrqUOnjKOeDaBa7AqNhHS+PZg+C90lnJzMgOs4KKgZYxqSQVSab
|
18
|
+
SCEmzIO/StkXY4NpJ4fYLrHemf/fJy1wPyu+fNdp5SEEUwEo+2toRFlzTe4u4LdS
|
19
|
+
QC636nPPTMt8H3xz2wf/lUIUeo2Qc95Qt2BQM465ibbG9kmA3c7Sopx6yOabYOAl
|
20
|
+
KPRbOSEPiWYcF9Suuz8Gdf8jxEtPlnZiwRvnYJ+IHMq3XQCJWPpMzdDMbtlgHbXE
|
21
|
+
vq1zOTLMSYAS0UB3uionR4yo1hLz60odwkCm7qf0o2Ci/5OjtB0a89VuyqRU2vUJ
|
22
|
+
QH95WBjDJ6lCCW7J0mrMPnJQSUFTmufsU6jOChvPaCeAzW1YwrsP/YKnvwueG7ip
|
23
|
+
VOdW6RitjtFxhS7evRL0201+KUvLz12zZWWjOcujlQs64QprxOtiv/MiisKb1Ng+
|
24
|
+
oL1mUdzB8KrZL4/WbG5YNX6UTtJbIOu9qEFbBAy4/jtIkJX+dlNoFwd4GXQW1YNO
|
25
|
+
nA==
|
26
|
+
-----END CERTIFICATE-----
|
@@ -0,0 +1,31 @@
|
|
1
|
+
# This gemspec replaces cipherstash-pg.gemspec when the FAT gems are built.
|
2
|
+
# It is not used when the ABI-specific gems are built.
|
3
|
+
|
4
|
+
# frozen_string_literal: true
|
5
|
+
# -*- encoding: utf-8 -*-
|
6
|
+
|
7
|
+
require_relative 'lib/pg/version'
|
8
|
+
|
9
|
+
Gem::Specification.new do |spec|
|
10
|
+
spec.name = "cipherstash-pg"
|
11
|
+
spec.version = "1.0.0.beta.1"
|
12
|
+
spec.authors = ["CipherStash"]
|
13
|
+
spec.email = ["engineers@cipherstash.com"]
|
14
|
+
|
15
|
+
spec.summary = "CipherStash PG is a drop in replacement of PG that provides transparent data encryption"
|
16
|
+
spec.description = "CipherStash PG is a drop in replacement of PG that provides transparent data encryption, with a PG-compatible API"
|
17
|
+
spec.homepage = "https://github.com/cipherstash/cipherstash-pg"
|
18
|
+
spec.license = "BSD-2-Clause"
|
19
|
+
spec.required_ruby_version = ">= 2.7"
|
20
|
+
|
21
|
+
spec.metadata["homepage_uri"] = spec.homepage
|
22
|
+
spec.metadata["source_code_uri"] = "https://github.com/cipherstash/cipherstash-pg"
|
23
|
+
spec.metadata["documentation_uri"] = "http://deveiate.org/code/pg"
|
24
|
+
|
25
|
+
spec.platform = Gem::Platform::CURRENT
|
26
|
+
|
27
|
+
spec.files = Dir.chdir(File.expand_path(__dir__)) do
|
28
|
+
`find .`.lines.map{|line| line.strip}.reject{|line| line == "" || [".", ".."].include?(line)}
|
29
|
+
end
|
30
|
+
spec.require_paths = ["lib"]
|
31
|
+
end
|
Binary file
|
Binary file
|
Binary file
|
Binary file
|
@@ -0,0 +1,15 @@
|
|
1
|
+
require_relative './pg'
|
2
|
+
|
3
|
+
module CipherStash
|
4
|
+
module PG
|
5
|
+
DB_EXT_DIR = File.join(__dir__, '../vendor/database-extensions')
|
6
|
+
|
7
|
+
def self.install_script
|
8
|
+
File.read(File.join(DB_EXT_DIR, "install.sql"))
|
9
|
+
end
|
10
|
+
|
11
|
+
def self.uninstall_script
|
12
|
+
File.read(File.join(DB_EXT_DIR, "uninstall.sql"))
|
13
|
+
end
|
14
|
+
end
|
15
|
+
end
|
data/lib/libpq.5.dylib
ADDED
Binary file
|
@@ -0,0 +1,47 @@
|
|
1
|
+
# -*- ruby -*-
|
2
|
+
# frozen_string_literal: true
|
3
|
+
|
4
|
+
require 'pg' unless defined?( PG )
|
5
|
+
|
6
|
+
# Simple set of rules for type casting common PostgreSQL types from Ruby
|
7
|
+
# to PostgreSQL.
|
8
|
+
#
|
9
|
+
# OIDs of supported type casts are not hard-coded in the sources, but are retrieved from the
|
10
|
+
# PostgreSQL's +pg_type+ table in PG::BasicTypeMapBasedOnResult.new .
|
11
|
+
#
|
12
|
+
# This class works equal to PG::BasicTypeMapForResults, but does not define decoders for
|
13
|
+
# the given result OIDs, but encoders. So it can be used to type cast field values based on
|
14
|
+
# the type OID retrieved by a separate SQL query.
|
15
|
+
#
|
16
|
+
# PG::TypeMapByOid#build_column_map(result) can be used to generate a result independent
|
17
|
+
# PG::TypeMapByColumn type map, which can subsequently be used to cast query bind parameters
|
18
|
+
# or #put_copy_data fields.
|
19
|
+
#
|
20
|
+
# Example:
|
21
|
+
# conn.exec( "CREATE TEMP TABLE copytable (t TEXT, i INT, ai INT[])" )
|
22
|
+
#
|
23
|
+
# # Retrieve table OIDs per empty result set.
|
24
|
+
# res = conn.exec( "SELECT * FROM copytable LIMIT 0" )
|
25
|
+
# # Build a type map for common ruby to database type encoders.
|
26
|
+
# btm = PG::BasicTypeMapBasedOnResult.new(conn)
|
27
|
+
# # Build a PG::TypeMapByColumn with encoders suitable for copytable.
|
28
|
+
# tm = btm.build_column_map( res )
|
29
|
+
# row_encoder = PG::TextEncoder::CopyRow.new type_map: tm
|
30
|
+
#
|
31
|
+
# conn.copy_data( "COPY copytable FROM STDIN", row_encoder ) do |res|
|
32
|
+
# conn.put_copy_data ['a', 123, [5,4,3]]
|
33
|
+
# end
|
34
|
+
# This inserts a single row into copytable with type casts from ruby to
|
35
|
+
# database types.
|
36
|
+
class PG::BasicTypeMapBasedOnResult < PG::TypeMapByOid
|
37
|
+
include PG::BasicTypeRegistry::Checker
|
38
|
+
|
39
|
+
def initialize(connection_or_coder_maps, registry: nil)
|
40
|
+
@coder_maps = build_coder_maps(connection_or_coder_maps, registry: registry)
|
41
|
+
|
42
|
+
# Populate TypeMapByOid hash with encoders
|
43
|
+
@coder_maps.each_format(:encoder).flat_map{|f| f.coders }.each do |coder|
|
44
|
+
add_coder(coder)
|
45
|
+
end
|
46
|
+
end
|
47
|
+
end
|
@@ -0,0 +1,193 @@
|
|
1
|
+
# -*- ruby -*-
|
2
|
+
# frozen_string_literal: true
|
3
|
+
|
4
|
+
require 'pg' unless defined?( PG )
|
5
|
+
|
6
|
+
# Simple set of rules for type casting common Ruby types to PostgreSQL.
|
7
|
+
#
|
8
|
+
# OIDs of supported type casts are not hard-coded in the sources, but are retrieved from the
|
9
|
+
# PostgreSQL's pg_type table in PG::BasicTypeMapForQueries.new .
|
10
|
+
#
|
11
|
+
# Query params are type casted based on the class of the given value.
|
12
|
+
#
|
13
|
+
# Higher level libraries will most likely not make use of this class, but use their
|
14
|
+
# own derivation of PG::TypeMapByClass or another set of rules to choose suitable
|
15
|
+
# encoders and decoders for the values to be sent.
|
16
|
+
#
|
17
|
+
# Example:
|
18
|
+
# conn = PG::Connection.new
|
19
|
+
# # Assign a default ruleset for type casts of input and output values.
|
20
|
+
# conn.type_map_for_queries = PG::BasicTypeMapForQueries.new(conn)
|
21
|
+
# # Execute a query. The Integer param value is typecasted internally by PG::BinaryEncoder::Int8.
|
22
|
+
# # The format of the parameter is set to 0 (text) and the OID of this parameter is set to 20 (int8).
|
23
|
+
# res = conn.exec_params( "SELECT $1", [5] )
|
24
|
+
class PG::BasicTypeMapForQueries < PG::TypeMapByClass
|
25
|
+
# Helper class for submission of binary strings into bytea columns.
|
26
|
+
#
|
27
|
+
# Since PG::BasicTypeMapForQueries chooses the encoder to be used by the class of the submitted value,
|
28
|
+
# it's necessary to send binary strings as BinaryData.
|
29
|
+
# That way they're distinct from text strings.
|
30
|
+
# Please note however that PG::BasicTypeMapForResults delivers bytea columns as plain String
|
31
|
+
# with binary encoding.
|
32
|
+
#
|
33
|
+
# conn.type_map_for_queries = PG::BasicTypeMapForQueries.new(conn)
|
34
|
+
# conn.exec("CREATE TEMP TABLE test (data bytea)")
|
35
|
+
# bd = PG::BasicTypeMapForQueries::BinaryData.new("ab\xff\0cd")
|
36
|
+
# conn.exec_params("INSERT INTO test (data) VALUES ($1)", [bd])
|
37
|
+
class BinaryData < String
|
38
|
+
end
|
39
|
+
|
40
|
+
class UndefinedEncoder < RuntimeError
|
41
|
+
end
|
42
|
+
|
43
|
+
include PG::BasicTypeRegistry::Checker
|
44
|
+
|
45
|
+
# Create a new type map for query submission
|
46
|
+
#
|
47
|
+
# Options:
|
48
|
+
# * +registry+: Custom type registry, nil for default global registry
|
49
|
+
# * +if_undefined+: Optional +Proc+ object which is called, if no type for an parameter class is not defined in the registry.
|
50
|
+
def initialize(connection_or_coder_maps, registry: nil, if_undefined: nil)
|
51
|
+
@coder_maps = build_coder_maps(connection_or_coder_maps, registry: registry)
|
52
|
+
@array_encoders_by_klass = array_encoders_by_klass
|
53
|
+
@encode_array_as = :array
|
54
|
+
@if_undefined = if_undefined || proc { |oid_name, format|
|
55
|
+
raise UndefinedEncoder, "no encoder defined for type #{oid_name.inspect} format #{format}"
|
56
|
+
}
|
57
|
+
init_encoders
|
58
|
+
end
|
59
|
+
|
60
|
+
# Change the mechanism that is used to encode ruby array values
|
61
|
+
#
|
62
|
+
# Possible values:
|
63
|
+
# * +:array+ : Encode the ruby array as a PostgreSQL array.
|
64
|
+
# The array element type is inferred from the class of the first array element. This is the default.
|
65
|
+
# * +:json+ : Encode the ruby array as a JSON document.
|
66
|
+
# * +:record+ : Encode the ruby array as a composite type row.
|
67
|
+
# * <code>"_type"</code> : Encode the ruby array as a particular PostgreSQL type.
|
68
|
+
# All PostgreSQL array types are supported.
|
69
|
+
# If there's an encoder registered for the elements +type+, it will be used.
|
70
|
+
# Otherwise a string conversion (by +value.to_s+) is done.
|
71
|
+
def encode_array_as=(pg_type)
|
72
|
+
case pg_type
|
73
|
+
when :array
|
74
|
+
when :json
|
75
|
+
when :record
|
76
|
+
when /\A_/
|
77
|
+
else
|
78
|
+
raise ArgumentError, "invalid pg_type #{pg_type.inspect}"
|
79
|
+
end
|
80
|
+
|
81
|
+
@encode_array_as = pg_type
|
82
|
+
|
83
|
+
init_encoders
|
84
|
+
end
|
85
|
+
|
86
|
+
attr_reader :encode_array_as
|
87
|
+
|
88
|
+
private
|
89
|
+
|
90
|
+
def init_encoders
|
91
|
+
coders.each { |kl, c| self[kl] = nil } # Clear type map
|
92
|
+
populate_encoder_list
|
93
|
+
@textarray_encoder = coder_by_name(0, :encoder, '_text')
|
94
|
+
end
|
95
|
+
|
96
|
+
def coder_by_name(format, direction, name)
|
97
|
+
check_format_and_direction(format, direction)
|
98
|
+
@coder_maps.map_for(format, direction).coder_by_name(name)
|
99
|
+
end
|
100
|
+
|
101
|
+
def undefined(name, format)
|
102
|
+
@if_undefined.call(name, format)
|
103
|
+
end
|
104
|
+
|
105
|
+
def populate_encoder_list
|
106
|
+
DEFAULT_TYPE_MAP.each do |klass, selector|
|
107
|
+
if Array === selector
|
108
|
+
format, name, oid_name = selector
|
109
|
+
coder = coder_by_name(format, :encoder, name).dup
|
110
|
+
if coder
|
111
|
+
if oid_name
|
112
|
+
oid_coder = coder_by_name(format, :encoder, oid_name)
|
113
|
+
if oid_coder
|
114
|
+
coder.oid = oid_coder.oid
|
115
|
+
else
|
116
|
+
undefined(oid_name, format)
|
117
|
+
end
|
118
|
+
else
|
119
|
+
coder.oid = 0
|
120
|
+
end
|
121
|
+
self[klass] = coder
|
122
|
+
else
|
123
|
+
undefined(name, format)
|
124
|
+
end
|
125
|
+
else
|
126
|
+
|
127
|
+
case @encode_array_as
|
128
|
+
when :array
|
129
|
+
self[klass] = selector
|
130
|
+
when :json
|
131
|
+
self[klass] = PG::TextEncoder::JSON.new
|
132
|
+
when :record
|
133
|
+
self[klass] = PG::TextEncoder::Record.new type_map: self
|
134
|
+
when /\A_/
|
135
|
+
coder = coder_by_name(0, :encoder, @encode_array_as)
|
136
|
+
if coder
|
137
|
+
self[klass] = coder
|
138
|
+
else
|
139
|
+
undefined(@encode_array_as, format)
|
140
|
+
end
|
141
|
+
else
|
142
|
+
raise ArgumentError, "invalid pg_type #{@encode_array_as.inspect}"
|
143
|
+
end
|
144
|
+
end
|
145
|
+
end
|
146
|
+
end
|
147
|
+
|
148
|
+
def array_encoders_by_klass
|
149
|
+
DEFAULT_ARRAY_TYPE_MAP.inject({}) do |h, (klass, (format, name))|
|
150
|
+
h[klass] = coder_by_name(format, :encoder, name)
|
151
|
+
h
|
152
|
+
end
|
153
|
+
end
|
154
|
+
|
155
|
+
def get_array_type(value)
|
156
|
+
elem = value
|
157
|
+
while elem.kind_of?(Array)
|
158
|
+
elem = elem.first
|
159
|
+
end
|
160
|
+
@array_encoders_by_klass[elem.class] ||
|
161
|
+
elem.class.ancestors.lazy.map{|ancestor| @array_encoders_by_klass[ancestor] }.find{|a| a } ||
|
162
|
+
@textarray_encoder
|
163
|
+
end
|
164
|
+
|
165
|
+
DEFAULT_TYPE_MAP = {
|
166
|
+
TrueClass => [1, 'bool', 'bool'],
|
167
|
+
FalseClass => [1, 'bool', 'bool'],
|
168
|
+
# We use text format and no type OID for numbers, because setting the OID can lead
|
169
|
+
# to unnecessary type conversions on server side.
|
170
|
+
Integer => [0, 'int8'],
|
171
|
+
Float => [0, 'float8'],
|
172
|
+
BigDecimal => [0, 'numeric'],
|
173
|
+
Time => [0, 'timestamptz'],
|
174
|
+
# We use text format and no type OID for IPAddr, because setting the OID can lead
|
175
|
+
# to unnecessary inet/cidr conversions on the server side.
|
176
|
+
IPAddr => [0, 'inet'],
|
177
|
+
Hash => [0, 'json'],
|
178
|
+
Array => :get_array_type,
|
179
|
+
BinaryData => [1, 'bytea'],
|
180
|
+
}
|
181
|
+
|
182
|
+
DEFAULT_ARRAY_TYPE_MAP = {
|
183
|
+
TrueClass => [0, '_bool'],
|
184
|
+
FalseClass => [0, '_bool'],
|
185
|
+
Integer => [0, '_int8'],
|
186
|
+
String => [0, '_text'],
|
187
|
+
Float => [0, '_float8'],
|
188
|
+
BigDecimal => [0, '_numeric'],
|
189
|
+
Time => [0, '_timestamptz'],
|
190
|
+
IPAddr => [0, '_inet'],
|
191
|
+
}
|
192
|
+
|
193
|
+
end
|
@@ -0,0 +1,81 @@
|
|
1
|
+
# -*- ruby -*-
|
2
|
+
# frozen_string_literal: true
|
3
|
+
|
4
|
+
require 'pg' unless defined?( PG )
|
5
|
+
|
6
|
+
# Simple set of rules for type casting common PostgreSQL types to Ruby.
|
7
|
+
#
|
8
|
+
# OIDs of supported type casts are not hard-coded in the sources, but are retrieved from the
|
9
|
+
# PostgreSQL's +pg_type+ table in PG::BasicTypeMapForResults.new .
|
10
|
+
#
|
11
|
+
# Result values are type casted based on the type OID of the given result column.
|
12
|
+
#
|
13
|
+
# Higher level libraries will most likely not make use of this class, but use their
|
14
|
+
# own set of rules to choose suitable encoders and decoders.
|
15
|
+
#
|
16
|
+
# Example:
|
17
|
+
# conn = PG::Connection.new
|
18
|
+
# # Assign a default ruleset for type casts of output values.
|
19
|
+
# conn.type_map_for_results = PG::BasicTypeMapForResults.new(conn)
|
20
|
+
# # Execute a query.
|
21
|
+
# res = conn.exec_params( "SELECT $1::INT", ['5'] )
|
22
|
+
# # Retrieve and cast the result value. Value format is 0 (text) and OID is 20. Therefore typecasting
|
23
|
+
# # is done by PG::TextDecoder::Integer internally for all value retrieval methods.
|
24
|
+
# res.values # => [[5]]
|
25
|
+
#
|
26
|
+
# PG::TypeMapByOid#build_column_map(result) can be used to generate
|
27
|
+
# a result independent PG::TypeMapByColumn type map, which can subsequently be used
|
28
|
+
# to cast #get_copy_data fields:
|
29
|
+
#
|
30
|
+
# For the following table:
|
31
|
+
# conn.exec( "CREATE TABLE copytable AS VALUES('a', 123, '{5,4,3}'::INT[])" )
|
32
|
+
#
|
33
|
+
# # Retrieve table OIDs per empty result set.
|
34
|
+
# res = conn.exec( "SELECT * FROM copytable LIMIT 0" )
|
35
|
+
# # Build a type map for common database to ruby type decoders.
|
36
|
+
# btm = PG::BasicTypeMapForResults.new(conn)
|
37
|
+
# # Build a PG::TypeMapByColumn with decoders suitable for copytable.
|
38
|
+
# tm = btm.build_column_map( res )
|
39
|
+
# row_decoder = PG::TextDecoder::CopyRow.new type_map: tm
|
40
|
+
#
|
41
|
+
# conn.copy_data( "COPY copytable TO STDOUT", row_decoder ) do |res|
|
42
|
+
# while row=conn.get_copy_data
|
43
|
+
# p row
|
44
|
+
# end
|
45
|
+
# end
|
46
|
+
# This prints the rows with type casted columns:
|
47
|
+
# ["a", 123, [5, 4, 3]]
|
48
|
+
#
|
49
|
+
# See also PG::BasicTypeMapBasedOnResult for the encoder direction and PG::BasicTypeRegistry for the definition of additional types.
|
50
|
+
class PG::BasicTypeMapForResults < PG::TypeMapByOid
|
51
|
+
include PG::BasicTypeRegistry::Checker
|
52
|
+
|
53
|
+
class WarningTypeMap < PG::TypeMapInRuby
|
54
|
+
def initialize(typenames)
|
55
|
+
@already_warned = Hash.new{|h, k| h[k] = {} }
|
56
|
+
@typenames_by_oid = typenames
|
57
|
+
end
|
58
|
+
|
59
|
+
def typecast_result_value(result, _tuple, field)
|
60
|
+
format = result.fformat(field)
|
61
|
+
oid = result.ftype(field)
|
62
|
+
unless @already_warned[format][oid]
|
63
|
+
warn "Warning: no type cast defined for type #{@typenames_by_oid[oid].inspect} format #{format} with oid #{oid}. Please cast this type explicitly to TEXT to be safe for future changes."
|
64
|
+
@already_warned[format][oid] = true
|
65
|
+
end
|
66
|
+
super
|
67
|
+
end
|
68
|
+
end
|
69
|
+
|
70
|
+
def initialize(connection_or_coder_maps, registry: nil)
|
71
|
+
@coder_maps = build_coder_maps(connection_or_coder_maps, registry: registry)
|
72
|
+
|
73
|
+
# Populate TypeMapByOid hash with decoders
|
74
|
+
@coder_maps.each_format(:decoder).flat_map{|f| f.coders }.each do |coder|
|
75
|
+
add_coder(coder)
|
76
|
+
end
|
77
|
+
|
78
|
+
typenames = @coder_maps.typenames_by_oid
|
79
|
+
self.default_type_map = WarningTypeMap.new(typenames)
|
80
|
+
end
|
81
|
+
end
|