pgcp 0.0.0
Sign up to get free protection for your applications and to get access to all the features.
- checksums.yaml +7 -0
- data/bin/pgcp +9 -0
- data/lib/pgcp.rb +67 -0
- data/lib/postgres.rb +347 -0
- data/lib/transport.rb +138 -0
- metadata +92 -0
checksums.yaml
ADDED
@@ -0,0 +1,7 @@
|
|
1
|
+
---
|
2
|
+
SHA1:
|
3
|
+
metadata.gz: e0bf53cb613c6859915d664646ef18c6ef216767
|
4
|
+
data.tar.gz: b3e0ace6249b20249efba309f40e7385d1f4cd24
|
5
|
+
SHA512:
|
6
|
+
metadata.gz: 41ae7a1c3b5c39e29b698a402f7c336d77de4a2f69c2cf92c2512624c788d6e37f61300a66b570cc5148b34100cd77e8e136332df81a64bea1395501de992a43
|
7
|
+
data.tar.gz: 21534f4d98783c90a57d54bcbb978864711206dc24686f275c286de031a79acbd668e1ea01ce4799a5b884821f50f07c579fb1a99dc55f6c81d26bad5713bbc2
|
data/bin/pgcp
ADDED
data/lib/pgcp.rb
ADDED
@@ -0,0 +1,67 @@
|
|
1
|
+
require 'logger'
|
2
|
+
|
3
|
+
class PgcpRunner < Thor
|
4
|
+
desc 'cp', 'Perform copies of tables between Postgres databases'
|
5
|
+
method_option :source, type: :string, aliases: '-s', desc: 'Source database', required: true
|
6
|
+
method_option :dest, type: :string, aliases: '-d', desc: 'Destination database', required: true
|
7
|
+
method_option :table, type: :string, aliases: '-t', desc: 'Table to be copied', required: true
|
8
|
+
method_option :config, type: :string, aliases: '-c', desc: 'Path to config file'
|
9
|
+
method_option :force_schema, type: :string, aliases: '-f', desc: 'Force destination schema'
|
10
|
+
method_option :log, type: :string, aliases: '-l', desc: 'Path to log file'
|
11
|
+
|
12
|
+
def cp
|
13
|
+
config = load_config_file(options['config'] || File.join(ENV['HOME'], '.pgcp.yml'))
|
14
|
+
if options['log']
|
15
|
+
Pgcp.log_file = options['log']
|
16
|
+
end
|
17
|
+
|
18
|
+
src = config['databases'][options['source']].symbolize_keys!
|
19
|
+
dest = config['databases'][options['dest']].symbolize_keys!
|
20
|
+
|
21
|
+
begin
|
22
|
+
tr = Transport.new(src, dest)
|
23
|
+
if options['table'].include? '*'
|
24
|
+
if (not options['table'].include? '.') or (options['table'].count('.') > 1)
|
25
|
+
Pgcp.logger.error 'Globbed tables must have schema name, e.g. public.test* is valid but test* is not.'
|
26
|
+
return
|
27
|
+
end
|
28
|
+
|
29
|
+
tr.copy_tables(options['table'], force_schema: options['force_schema'])
|
30
|
+
else
|
31
|
+
tr.copy_table(options['table'], nil, force_schema: options['force_schema'])
|
32
|
+
end
|
33
|
+
rescue Exception => e
|
34
|
+
Pgcp.logger.error(e.message)
|
35
|
+
return
|
36
|
+
end
|
37
|
+
end
|
38
|
+
|
39
|
+
default_task :cp
|
40
|
+
|
41
|
+
private
|
42
|
+
def load_config_file(path)
|
43
|
+
config = {}
|
44
|
+
if not path.nil? and File.exists?(path)
|
45
|
+
config = YAML::load_file(path)
|
46
|
+
end
|
47
|
+
|
48
|
+
config
|
49
|
+
end
|
50
|
+
end
|
51
|
+
|
52
|
+
|
53
|
+
class Pgcp
|
54
|
+
@@logger = nil
|
55
|
+
|
56
|
+
def self.logger
|
57
|
+
if not @@logger
|
58
|
+
@@logger = Logger.new STDOUT
|
59
|
+
end
|
60
|
+
|
61
|
+
@@logger
|
62
|
+
end
|
63
|
+
|
64
|
+
def self.log_file=(path)
|
65
|
+
@@logger = Logger.new(path)
|
66
|
+
end
|
67
|
+
end
|
data/lib/postgres.rb
ADDED
@@ -0,0 +1,347 @@
|
|
1
|
+
require 'active_support'
|
2
|
+
require 'active_support/core_ext'
|
3
|
+
require 'pg'
|
4
|
+
|
5
|
+
class Postgres
|
6
|
+
# Initialize Postgres instance
|
7
|
+
#
|
8
|
+
# @param [Hash] dbconfig Database config
|
9
|
+
def initialize(dbconfig)
|
10
|
+
@dbconfig = dbconfig
|
11
|
+
end
|
12
|
+
|
13
|
+
def exec(sql, val=[])
|
14
|
+
with_connection do |conn|
|
15
|
+
conn.exec sql, val
|
16
|
+
|
17
|
+
end
|
18
|
+
end
|
19
|
+
|
20
|
+
def list_tables(schema_name)
|
21
|
+
with_connection do |conn|
|
22
|
+
sql = <<-SQL.strip_heredoc
|
23
|
+
SELECT table_name
|
24
|
+
FROM information_schema.tables
|
25
|
+
WHERE table_type = 'BASE TABLE'
|
26
|
+
AND table_schema NOT IN ('pg_catalog', 'information_schema')
|
27
|
+
AND table_schema = '#{schema_name}'
|
28
|
+
ORDER BY 1
|
29
|
+
SQL
|
30
|
+
|
31
|
+
rs = conn.exec sql
|
32
|
+
|
33
|
+
rs.values.map(&:first)
|
34
|
+
end
|
35
|
+
|
36
|
+
end
|
37
|
+
|
38
|
+
def drop_table(schema_name, table_name)
|
39
|
+
with_connection do |conn|
|
40
|
+
if _table_exists?(conn, schema_name, table_name)
|
41
|
+
fq_table_name = conn.escape_string("#{schema_name}.#{table_name}")
|
42
|
+
|
43
|
+
sql = <<-SQL.strip_heredoc
|
44
|
+
DROP TABLE #{fq_table_name}
|
45
|
+
SQL
|
46
|
+
|
47
|
+
conn.exec sql
|
48
|
+
end
|
49
|
+
end
|
50
|
+
end
|
51
|
+
|
52
|
+
def create_table(schema_name, table_name, columns, options={})
|
53
|
+
with_connection do |conn|
|
54
|
+
if options[:temporary]
|
55
|
+
table_name = conn.escape_string(table_name)
|
56
|
+
create_sql = create_table_statement(conn, columns,
|
57
|
+
table_name,
|
58
|
+
options)
|
59
|
+
conn.transaction do
|
60
|
+
conn.exec create_sql
|
61
|
+
conn.exec "DROP TABLE IF EXISTS #{table_name}"
|
62
|
+
end
|
63
|
+
|
64
|
+
true
|
65
|
+
else
|
66
|
+
unless _table_exists?(conn, schema_name, table_name)
|
67
|
+
create_sql = create_table_statement(conn, columns,
|
68
|
+
"#{schema_name}.#{table_name}",
|
69
|
+
options)
|
70
|
+
conn.exec create_sql
|
71
|
+
|
72
|
+
true
|
73
|
+
end
|
74
|
+
|
75
|
+
false
|
76
|
+
end
|
77
|
+
end
|
78
|
+
end
|
79
|
+
|
80
|
+
def create_table_from_query(query, schema_name, table_name, columns, options={})
|
81
|
+
with_connection do |conn|
|
82
|
+
create_sql = create_table_statement(conn, columns,
|
83
|
+
"#{schema_name}.#{table_name}",
|
84
|
+
options)
|
85
|
+
conn.transaction do
|
86
|
+
conn.exec "DROP TABLE IF EXISTS #{schema_name}.#{table_name}"
|
87
|
+
conn.exec create_sql
|
88
|
+
conn.exec "INSERT INTO #{schema_name}.#{table_name}\n#{query}"
|
89
|
+
end
|
90
|
+
end
|
91
|
+
end
|
92
|
+
|
93
|
+
def hotswap_table(schema_name, src_table_name, dst_table_name)
|
94
|
+
with_connection do |conn|
|
95
|
+
conn.transaction do
|
96
|
+
schema_name = conn.escape_string(schema_name)
|
97
|
+
dst_table_name = conn.escape_string(dst_table_name)
|
98
|
+
conn.exec "DROP TABLE #{schema_name}.#{dst_table_name}" if _table_exists?(conn, schema_name, dst_table_name)
|
99
|
+
conn.exec "ALTER TABLE #{schema_name}.#{src_table_name} RENAME TO #{dst_table_name}"
|
100
|
+
#conn.exec "TRUNCATE TABLE #{schema_name}.#{dst_table_name}" if _table_exists?(conn, schema_name, dst_table_name)
|
101
|
+
#conn.exec "INSERT INTO #{schema_name}.#{dst_table_name}\n(SELECT * FROM #{schema_name}.#{src_table_name})"
|
102
|
+
#conn.exec "DROP TABLE #{schema_name}.#{src_table_name}"
|
103
|
+
end
|
104
|
+
|
105
|
+
end
|
106
|
+
end
|
107
|
+
|
108
|
+
def schema_names
|
109
|
+
with_connection do |conn|
|
110
|
+
sql = <<-SQL.strip_heredoc
|
111
|
+
SELECT schema_name
|
112
|
+
FROM information_schema.schemata
|
113
|
+
WHERE schema_name <> 'information_schema'
|
114
|
+
AND schema_name NOT LIKE 'pg_%'
|
115
|
+
SQL
|
116
|
+
|
117
|
+
rs = conn.exec sql
|
118
|
+
rs.values.map(&:first)
|
119
|
+
end
|
120
|
+
end
|
121
|
+
|
122
|
+
def copy_from_file(schema_name, table_name, csv_file, options={})
|
123
|
+
with_connection do |conn|
|
124
|
+
schema_name = conn.escape_string(schema_name)
|
125
|
+
table_name = conn.escape_string(table_name)
|
126
|
+
|
127
|
+
conn.copy_data "COPY #{schema_name}.#{table_name} FROM STDIN CSV #{options[:header]?'HEADER':''}" do
|
128
|
+
buf = ''
|
129
|
+
while csv_file.read(256, buf)
|
130
|
+
conn.put_copy_data(buf)
|
131
|
+
end
|
132
|
+
end
|
133
|
+
|
134
|
+
end
|
135
|
+
end
|
136
|
+
|
137
|
+
def column_definitions(schema_name, table_name)
|
138
|
+
with_connection do |conn|
|
139
|
+
sql = <<-SQL.strip_heredoc
|
140
|
+
SELECT
|
141
|
+
c.relname, a.attname AS column_name,
|
142
|
+
pg_catalog.format_type(a.atttypid, a.atttypmod) as type,
|
143
|
+
case
|
144
|
+
when a.attnotnull
|
145
|
+
then 'NOT NULL'
|
146
|
+
else 'NULL'
|
147
|
+
END as not_null
|
148
|
+
FROM pg_class c,
|
149
|
+
pg_attribute a,
|
150
|
+
pg_type t,
|
151
|
+
pg_namespace n
|
152
|
+
WHERE c.relname = '#{table_name}'
|
153
|
+
AND n.nspname = '#{schema_name}'
|
154
|
+
AND a.attnum > 0
|
155
|
+
AND a.attrelid = c.oid
|
156
|
+
AND a.atttypid = t.oid
|
157
|
+
AND c.relnamespace = n.oid
|
158
|
+
ORDER BY a.attnum
|
159
|
+
SQL
|
160
|
+
|
161
|
+
rs = conn.exec sql
|
162
|
+
|
163
|
+
rs.values.map do |col|
|
164
|
+
{name: col[1], type: col[2], null: col[3]}
|
165
|
+
end
|
166
|
+
end
|
167
|
+
end
|
168
|
+
|
169
|
+
def index_names(schema_name, table_name)
|
170
|
+
with_connection do |conn|
|
171
|
+
sql = <<-SQL.strip_heredoc
|
172
|
+
SELECT
|
173
|
+
C.relname AS "index_name"
|
174
|
+
FROM pg_catalog.pg_class C,
|
175
|
+
pg_catalog.pg_namespace N,
|
176
|
+
pg_catalog.pg_index I,
|
177
|
+
pg_catalog.pg_class C2
|
178
|
+
WHERE C.relkind IN ( 'i', '' )
|
179
|
+
AND N.oid = C.relnamespace
|
180
|
+
AND N.nspname = '#{schema_name}'
|
181
|
+
AND I.indexrelid = C.oid
|
182
|
+
AND C2.oid = I.indrelid
|
183
|
+
AND C2.relname = '#{table_name}';
|
184
|
+
SQL
|
185
|
+
|
186
|
+
rs = conn.exec sql
|
187
|
+
|
188
|
+
rs.values.map(&:first)
|
189
|
+
end
|
190
|
+
end
|
191
|
+
|
192
|
+
def get_indexes(schema_name, table_name)
|
193
|
+
idx_names = self.index_names(schema_name, table_name)
|
194
|
+
|
195
|
+
with_connection do |conn|
|
196
|
+
idx_names.map do |name|
|
197
|
+
index_info = index_info(conn, name, schema_name)
|
198
|
+
index_info['name'] = name
|
199
|
+
index_info['columns'] = index_column_names conn, index_info['oid']
|
200
|
+
|
201
|
+
index_info
|
202
|
+
end
|
203
|
+
end
|
204
|
+
end
|
205
|
+
|
206
|
+
def index_info(conn, index_name, schema_name)
|
207
|
+
sql = <<-SQL.strip_heredoc
|
208
|
+
SELECT
|
209
|
+
C.oid,
|
210
|
+
I.indisunique AS "unique",
|
211
|
+
I.indisprimary AS "primary",
|
212
|
+
pg_get_expr(I.indpred, I.indrelid) AS "where"
|
213
|
+
FROM pg_catalog.pg_class C,
|
214
|
+
pg_catalog.pg_namespace N,
|
215
|
+
pg_catalog.pg_index I
|
216
|
+
WHERE C.relname = '#{index_name}'
|
217
|
+
AND C.relnamespace = N.oid
|
218
|
+
AND I.indexrelid = C.oid
|
219
|
+
AND N.nspname = '#{schema_name}';
|
220
|
+
SQL
|
221
|
+
|
222
|
+
rs = conn.exec sql
|
223
|
+
rs[0].tap do |info|
|
224
|
+
info['unique'] = info['unique'] != 'f'
|
225
|
+
info['primary'] = info['primary'] != 'f'
|
226
|
+
info['where'] = info['where'][1..-2] if info['where'].present?
|
227
|
+
end
|
228
|
+
end
|
229
|
+
|
230
|
+
def index_column_names conn, oid
|
231
|
+
sql = <<-SQL.strip_heredoc
|
232
|
+
SELECT
|
233
|
+
pg_catalog.pg_get_indexdef(A.attrelid, A.attnum, TRUE) AS "column_name"
|
234
|
+
FROM pg_catalog.pg_attribute A
|
235
|
+
WHERE A.attrelid = $1
|
236
|
+
AND A.attnum > 0
|
237
|
+
AND NOT A.attisdropped
|
238
|
+
ORDER BY A.attnum;
|
239
|
+
SQL
|
240
|
+
conn.exec(sql, [oid]).map { |row| row['column_name'] }
|
241
|
+
end
|
242
|
+
|
243
|
+
def create_indexes(schema_name, table_name, indexes)
|
244
|
+
with_connection do |conn|
|
245
|
+
indexes.each do |index|
|
246
|
+
if index['primary']
|
247
|
+
sql = <<-SQL.strip_heredoc
|
248
|
+
ALTER TABLE #{schema_name}.#{table_name} ADD PRIMARY KEY (#{index['columns'][0]})
|
249
|
+
SQL
|
250
|
+
else
|
251
|
+
sql = <<-SQL.strip_heredoc
|
252
|
+
CREATE #{index['unique'] ? 'UNIQUE': ''} INDEX #{index['name']}
|
253
|
+
ON #{schema_name}.#{table_name} (#{index['columns'].join(', ')})
|
254
|
+
#{index['where'] ? 'WHERE ' + index['where'] : ''}
|
255
|
+
SQL
|
256
|
+
end
|
257
|
+
|
258
|
+
conn.exec(sql)
|
259
|
+
end
|
260
|
+
end
|
261
|
+
end
|
262
|
+
|
263
|
+
def get_create_table_statement(src_schema_name, src_table_name, dest_schema_name=nil, dest_table_name=nil)
|
264
|
+
dest_schema_name ||= src_schema_name
|
265
|
+
dest_table_name ||= dest_schema_name
|
266
|
+
|
267
|
+
columns = column_definitions(src_schema_name, src_table_name)
|
268
|
+
|
269
|
+
statement = "CREATE TABLE #{dest_schema_name}.#{dest_table_name} (\n"
|
270
|
+
columns.each_with_index do |col, index|
|
271
|
+
statement << " #{col[:name]} #{col[:type]} #{col[:null]}"
|
272
|
+
statement << ',' if index != columns.size - 1
|
273
|
+
statement << "\n"
|
274
|
+
end
|
275
|
+
statement << ");\n"
|
276
|
+
|
277
|
+
statement
|
278
|
+
end
|
279
|
+
|
280
|
+
def table_exist?(schema_name, table_name)
|
281
|
+
with_connection do |conn|
|
282
|
+
_table_exists?(conn, schema_name, table_name)
|
283
|
+
end
|
284
|
+
end
|
285
|
+
|
286
|
+
private
|
287
|
+
|
288
|
+
def with_connection(&block)
|
289
|
+
conn = get_connection
|
290
|
+
|
291
|
+
block.call(conn)
|
292
|
+
ensure
|
293
|
+
conn.close if not conn.nil?
|
294
|
+
end
|
295
|
+
|
296
|
+
def get_connection
|
297
|
+
PG::Connection.connect(@dbconfig)
|
298
|
+
end
|
299
|
+
|
300
|
+
def column_line(column)
|
301
|
+
name, data_type, nullable = column.symbolize_keys.values_at(:column_name, :data_type, :is_nullable)
|
302
|
+
|
303
|
+
# default type to varchar
|
304
|
+
data_type ||= "VARCHAR(1000)"
|
305
|
+
|
306
|
+
line_tokens = ["\"#{name}\""]
|
307
|
+
line_tokens << data_type
|
308
|
+
line_tokens << (nullable ? '' : 'NOT NULL')
|
309
|
+
|
310
|
+
line_tokens
|
311
|
+
.select { |token| token != '' }
|
312
|
+
.join " "
|
313
|
+
end
|
314
|
+
|
315
|
+
def create_table_statement(connection, columns, table_name, options={})
|
316
|
+
statement = "CREATE #{options[:temporary] ? 'TEMPORARY' : ''} TABLE #{connection.escape_string(table_name)} (\n"
|
317
|
+
statement << columns
|
318
|
+
.map { |column| column_line(column) }
|
319
|
+
.map(&:strip)
|
320
|
+
.map { |column| connection.escape_string(column) }
|
321
|
+
.join(",\n")
|
322
|
+
statement << "\n);"
|
323
|
+
|
324
|
+
statement
|
325
|
+
end
|
326
|
+
|
327
|
+
def _table_exists?(connection, schema_name, table_name)
|
328
|
+
sql = <<-SQL.strip_heredoc
|
329
|
+
SELECT
|
330
|
+
count(table_name)
|
331
|
+
FROM
|
332
|
+
information_schema.tables
|
333
|
+
WHERE
|
334
|
+
table_schema <> 'pg_catalog'
|
335
|
+
AND table_schema <> 'information_schema'
|
336
|
+
AND table_schema !~ '^pg_toast'
|
337
|
+
AND table_schema = '#{connection.escape_string(schema_name)}'
|
338
|
+
AND table_name = '#{connection.escape_string(table_name)}'
|
339
|
+
GROUP BY
|
340
|
+
table_schema,table_name;
|
341
|
+
SQL
|
342
|
+
|
343
|
+
res = connection.exec(sql)
|
344
|
+
|
345
|
+
res.values.size > 0
|
346
|
+
end
|
347
|
+
end
|
data/lib/transport.rb
ADDED
@@ -0,0 +1,138 @@
|
|
1
|
+
require 'active_support'
|
2
|
+
require 'active_support/core_ext'
|
3
|
+
require './lib/postgres'
|
4
|
+
require './lib/qualified_name'
|
5
|
+
require './lib/pgcp'
|
6
|
+
require 'securerandom'
|
7
|
+
|
8
|
+
class Transport
|
9
|
+
# Initialize Transport instance
|
10
|
+
#
|
11
|
+
# @param [Hash] src_dbconfig Source database config
|
12
|
+
# @param [Hash] dest_dbconfig Destination database config
|
13
|
+
def initialize(src_dbconfig, dest_dbconfig, options={})
|
14
|
+
@src_dbconfig = src_dbconfig
|
15
|
+
@src_dbconfig[:port] ||= 5432
|
16
|
+
@dest_dbconfig = dest_dbconfig
|
17
|
+
@dest_dbconfig[:port] ||= 5432
|
18
|
+
end
|
19
|
+
|
20
|
+
def copy_tables(src_tablenames, options={})
|
21
|
+
schema_name = src_tablenames.split('.')[0]
|
22
|
+
table_glob = src_tablenames.split('.')[1]
|
23
|
+
|
24
|
+
dest_conn = Postgres.new(@dest_dbconfig)
|
25
|
+
tables = dest_conn.list_tables(schema_name)
|
26
|
+
tables.each do |table|
|
27
|
+
if File.fnmatch(table_glob, table)
|
28
|
+
copy_table("#{schema_name}.#{table}", options)
|
29
|
+
end
|
30
|
+
end
|
31
|
+
end
|
32
|
+
|
33
|
+
def copy_table(src_tablename, dest_tablename=nil, options={})
|
34
|
+
dest_tablename ||= src_tablename
|
35
|
+
options[:create_schema] = true if options[:create_schema].nil?
|
36
|
+
options[:skip_indexes] = false if options[:skipe_indexes].nil?
|
37
|
+
|
38
|
+
Pgcp.logger.info "Start to copy from table #{src_tablename} to table #{dest_tablename}"
|
39
|
+
src_table = QualifiedName.new(src_tablename)
|
40
|
+
dest_table = QualifiedName.new(dest_tablename)
|
41
|
+
dest_table.schema_name = options[:force_schema] if not options[:force_schema].nil?
|
42
|
+
|
43
|
+
src_conn = Postgres.new(@src_dbconfig)
|
44
|
+
dest_conn = Postgres.new(@dest_dbconfig)
|
45
|
+
|
46
|
+
dest_conn.exec "CREATE SCHEMA IF NOT EXISTS #{dest_table.schema_name};" if options[:create_schema]
|
47
|
+
|
48
|
+
src_indexes = src_conn.get_indexes(src_table.schema_name, src_table.table_name)
|
49
|
+
if dest_conn.table_exist?(src_table.schema_name, src_table.table_name)
|
50
|
+
Pgcp.logger.info "Destination table already exists, creating temporary table"
|
51
|
+
temp_table = QualifiedName.new("#{dest_table.schema_name}.temp_#{SecureRandom.hex}")
|
52
|
+
create_table_statement =
|
53
|
+
src_conn.get_create_table_statement(src_table.schema_name,
|
54
|
+
src_table.table_name,
|
55
|
+
temp_table.schema_name,
|
56
|
+
temp_table.table_name)
|
57
|
+
begin
|
58
|
+
dest_conn.exec(create_table_statement)
|
59
|
+
Pgcp.logger.info "Copying table data to temporary table. This could take a while..."
|
60
|
+
direct_copy(src_table.full_name, temp_table.full_name)
|
61
|
+
Pgcp.logger.info "Hotswapping to destination table #{dest_tablename}"
|
62
|
+
dest_conn.hotswap_table(dest_table.schema_name, temp_table.table_name, dest_table.table_name)
|
63
|
+
Pgcp.logger.info "Done copying table data."
|
64
|
+
rescue Exception => e
|
65
|
+
Pgcp.logger.error(e.message)
|
66
|
+
return
|
67
|
+
ensure
|
68
|
+
dest_conn.drop_table(temp_table.schema_name, temp_table.table_name)
|
69
|
+
end
|
70
|
+
else
|
71
|
+
Pgcp.logger.info "Destination table does not exist, creating destination table."
|
72
|
+
create_table_statement =
|
73
|
+
src_conn.get_create_table_statement(src_table.schema_name,
|
74
|
+
src_table.table_name,
|
75
|
+
dest_table.schema_name,
|
76
|
+
dest_table.table_name)
|
77
|
+
dest_conn.exec(create_table_statement)
|
78
|
+
Pgcp.logger.info "Copying table data to destination table. This could take a while..."
|
79
|
+
direct_copy(src_table.full_name, dest_table.full_name)
|
80
|
+
Pgcp.logger.info "Copying table data to destination table done."
|
81
|
+
end
|
82
|
+
|
83
|
+
unless options[:skip_indexes]
|
84
|
+
Pgcp.logger.info "Copying table indexes to destination table..."
|
85
|
+
dest_conn.create_indexes(dest_table.schema_name, dest_table.table_name, src_indexes)
|
86
|
+
Pgcp.logger.info "Done copying table indexes."
|
87
|
+
end
|
88
|
+
|
89
|
+
end
|
90
|
+
|
91
|
+
private
|
92
|
+
|
93
|
+
def direct_copy(src_tablename, dest_tablename)
|
94
|
+
sql_in = sql_copy_from_stdin(dest_tablename)
|
95
|
+
sql_out = sql_copy_to_stdout(src_tablename)
|
96
|
+
command = transfer_command(@src_dbconfig, @dest_dbconfig, sql_in, sql_out)
|
97
|
+
`#{command}`
|
98
|
+
exit_code = `echo $?`.to_i
|
99
|
+
if exit_code != 0
|
100
|
+
raise 'Failed to directly copy data'
|
101
|
+
end
|
102
|
+
end
|
103
|
+
|
104
|
+
def sql_copy_from_stdin(q_tablename)
|
105
|
+
<<-SQL.strip_heredoc
|
106
|
+
COPY #{q_tablename} FROM STDIN
|
107
|
+
SQL
|
108
|
+
end
|
109
|
+
|
110
|
+
def sql_copy_to_stdout(q_tablename)
|
111
|
+
<<-SQL.strip_heredoc
|
112
|
+
COPY (SELECT * FROM #{q_tablename}) TO STDOUT
|
113
|
+
SQL
|
114
|
+
end
|
115
|
+
|
116
|
+
def transfer_command(src_dbconfig, dest_dbconfig, sql_in, sql_out)
|
117
|
+
copy_to_command = %Q{
|
118
|
+
env PGPASSWORD="#{src_dbconfig[:password]}"
|
119
|
+
psql
|
120
|
+
-U #{src_dbconfig[:user]}
|
121
|
+
-h #{src_dbconfig[:host]}
|
122
|
+
-p #{src_dbconfig[:port]}
|
123
|
+
-c "#{sql_out}"
|
124
|
+
#{src_dbconfig[:dbname]}
|
125
|
+
}.gsub(/\n/, ' ')
|
126
|
+
copy_from_command = %Q{
|
127
|
+
env PGPASSWORD="#{dest_dbconfig[:password]}"
|
128
|
+
psql
|
129
|
+
-U #{dest_dbconfig[:user]}
|
130
|
+
-h #{dest_dbconfig[:host]}
|
131
|
+
-p #{dest_dbconfig[:port]}
|
132
|
+
-c "#{sql_in}"
|
133
|
+
#{dest_dbconfig[:dbname]}
|
134
|
+
}.gsub(/\n/, ' ')
|
135
|
+
|
136
|
+
"#{copy_to_command} | #{copy_from_command}"
|
137
|
+
end
|
138
|
+
end
|
metadata
ADDED
@@ -0,0 +1,92 @@
|
|
1
|
+
--- !ruby/object:Gem::Specification
|
2
|
+
name: pgcp
|
3
|
+
version: !ruby/object:Gem::Version
|
4
|
+
version: 0.0.0
|
5
|
+
platform: ruby
|
6
|
+
authors:
|
7
|
+
- Thanh Dinh Khac
|
8
|
+
- Huy Nguyen
|
9
|
+
autorequire:
|
10
|
+
bindir: bin
|
11
|
+
cert_chain: []
|
12
|
+
date: 2015-11-05 00:00:00.000000000 Z
|
13
|
+
dependencies:
|
14
|
+
- !ruby/object:Gem::Dependency
|
15
|
+
name: activesupport
|
16
|
+
requirement: !ruby/object:Gem::Requirement
|
17
|
+
requirements:
|
18
|
+
- - "~>"
|
19
|
+
- !ruby/object:Gem::Version
|
20
|
+
version: '4.2'
|
21
|
+
type: :runtime
|
22
|
+
prerelease: false
|
23
|
+
version_requirements: !ruby/object:Gem::Requirement
|
24
|
+
requirements:
|
25
|
+
- - "~>"
|
26
|
+
- !ruby/object:Gem::Version
|
27
|
+
version: '4.2'
|
28
|
+
- !ruby/object:Gem::Dependency
|
29
|
+
name: thor
|
30
|
+
requirement: !ruby/object:Gem::Requirement
|
31
|
+
requirements:
|
32
|
+
- - "~>"
|
33
|
+
- !ruby/object:Gem::Version
|
34
|
+
version: '0.19'
|
35
|
+
type: :runtime
|
36
|
+
prerelease: false
|
37
|
+
version_requirements: !ruby/object:Gem::Requirement
|
38
|
+
requirements:
|
39
|
+
- - "~>"
|
40
|
+
- !ruby/object:Gem::Version
|
41
|
+
version: '0.19'
|
42
|
+
- !ruby/object:Gem::Dependency
|
43
|
+
name: pg
|
44
|
+
requirement: !ruby/object:Gem::Requirement
|
45
|
+
requirements:
|
46
|
+
- - "~>"
|
47
|
+
- !ruby/object:Gem::Version
|
48
|
+
version: '0.18'
|
49
|
+
type: :runtime
|
50
|
+
prerelease: false
|
51
|
+
version_requirements: !ruby/object:Gem::Requirement
|
52
|
+
requirements:
|
53
|
+
- - "~>"
|
54
|
+
- !ruby/object:Gem::Version
|
55
|
+
version: '0.18'
|
56
|
+
description: A simple command line tool to copy tables from one Postgres database
|
57
|
+
to another
|
58
|
+
email: thanh@holistics.io
|
59
|
+
executables:
|
60
|
+
- pgcp
|
61
|
+
extensions: []
|
62
|
+
extra_rdoc_files: []
|
63
|
+
files:
|
64
|
+
- bin/pgcp
|
65
|
+
- lib/pgcp.rb
|
66
|
+
- lib/postgres.rb
|
67
|
+
- lib/transport.rb
|
68
|
+
homepage: http://rubygems.org/gems/pgcp
|
69
|
+
licenses:
|
70
|
+
- GPL
|
71
|
+
metadata: {}
|
72
|
+
post_install_message:
|
73
|
+
rdoc_options: []
|
74
|
+
require_paths:
|
75
|
+
- lib
|
76
|
+
required_ruby_version: !ruby/object:Gem::Requirement
|
77
|
+
requirements:
|
78
|
+
- - ">="
|
79
|
+
- !ruby/object:Gem::Version
|
80
|
+
version: '0'
|
81
|
+
required_rubygems_version: !ruby/object:Gem::Requirement
|
82
|
+
requirements:
|
83
|
+
- - ">="
|
84
|
+
- !ruby/object:Gem::Version
|
85
|
+
version: '0'
|
86
|
+
requirements: []
|
87
|
+
rubyforge_project:
|
88
|
+
rubygems_version: 2.4.8
|
89
|
+
signing_key:
|
90
|
+
specification_version: 4
|
91
|
+
summary: A simple command line tool to copy tables from one Postgres database to another
|
92
|
+
test_files: []
|