backhoe 0.8.3 → 0.10.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- checksums.yaml +4 -4
- data/.github/workflows/ci.yml +1 -1
- data/README.md +0 -3
- data/backhoe.gemspec +1 -0
- data/lib/backhoe/dump.rb +15 -7
- data/lib/backhoe/load.rb +4 -4
- data/lib/backhoe/version.rb +1 -1
- data/lib/backhoe.rb +4 -9
- metadata +16 -3
- data/lib/backhoe/backup.rb +0 -79
checksums.yaml
CHANGED
@@ -1,7 +1,7 @@
|
|
1
1
|
---
|
2
2
|
SHA256:
|
3
|
-
metadata.gz:
|
4
|
-
data.tar.gz:
|
3
|
+
metadata.gz: 779de972f23a178e054d789eb0f09c70b7200b87468a4c85d14b53de9eddcfb8
|
4
|
+
data.tar.gz: 9621e4f611daa56dc8d34abc4285b9e86dbc79d3391b0463b39cdb663b02eee2
|
5
5
|
SHA512:
|
6
|
-
metadata.gz:
|
7
|
-
data.tar.gz:
|
6
|
+
metadata.gz: f1ee8947074606e2d4e4eca671883cd0c3ceab65de25dfd2228eada96888c316cd756dac556c3dd0a5ee16442dee8163155cb14498917355de67201bc505a8f5
|
7
|
+
data.tar.gz: e050249511d82941b33713b915399f51264608f32a40ae9cbf7bd58dd592eafe45a9ee99e22ec6e9c80df372211ee57534b6dcf831ca6ab4c29a3d305e7968e3
|
data/.github/workflows/ci.yml
CHANGED
@@ -8,7 +8,7 @@ jobs:
|
|
8
8
|
gemfile: [ activerecord_6.0, activerecord_6.1, activerecord_7.0, activerecord_7.1 ]
|
9
9
|
ruby: [ '3.0', 3.1, 3.2, 3.3 ]
|
10
10
|
|
11
|
-
runs-on: ubuntu-
|
11
|
+
runs-on: ubuntu-22.04
|
12
12
|
env: # $BUNDLE_GEMFILE must be set at the job level, so it is set for all steps
|
13
13
|
BUNDLE_GEMFILE: ${{ github.workspace }}/gemfiles/${{ matrix.gemfile }}.gemfile
|
14
14
|
POSTGRES_HOST_AUTH_METHOD: trust
|
data/README.md
CHANGED
@@ -15,9 +15,6 @@ Backhoe.dump "data.sql", skip_tables: [:comments], skip_columns: { users: [:pass
|
|
15
15
|
Backhoe.load "data.sql" # loads db from db/data.sql
|
16
16
|
Backhoe.load "data.sql.gz" # => can also load a gzipped sql file
|
17
17
|
Backhoe.load "data.sql", drop_and_create: true # injects DROP and CREATE statements into the SQL invocation
|
18
|
-
|
19
|
-
# Backup db to S3
|
20
|
-
Backhoe.backup "bucket-name/folder", access_key: "abc123", secret_key: "def456" # => must specify AWS creds
|
21
18
|
```
|
22
19
|
|
23
20
|
## Development
|
data/backhoe.gemspec
CHANGED
data/lib/backhoe/dump.rb
CHANGED
@@ -1,7 +1,7 @@
|
|
1
1
|
require "rake"
|
2
2
|
|
3
3
|
module Backhoe
|
4
|
-
class Dump < Struct.new(:database, :
|
4
|
+
class Dump < Struct.new(:database, :path, :skip_tables, :skip_columns)
|
5
5
|
include Rake::DSL
|
6
6
|
|
7
7
|
def initialize *args
|
@@ -16,7 +16,7 @@ module Backhoe
|
|
16
16
|
end
|
17
17
|
if skip_columns.any?
|
18
18
|
raise NotImplementedError if database.postgresql?
|
19
|
-
SanitizedDatabase.new(skip_columns,
|
19
|
+
SanitizedDatabase.new(skip_columns, path).dump do |tables|
|
20
20
|
self.skip_tables += tables
|
21
21
|
dump
|
22
22
|
end
|
@@ -29,9 +29,9 @@ module Backhoe
|
|
29
29
|
|
30
30
|
def dump
|
31
31
|
if database.mysql?
|
32
|
-
sh "#{mysqldump} --no-create-db --single-transaction --quick -e #{skip_table_options} #{database.to_mysql_options} #{database.name} | #{pipe}
|
32
|
+
sh "#{mysqldump} --no-create-db --single-transaction --quick -e #{skip_table_options} #{database.to_mysql_options} #{database.name} | #{pipe} #{target}"
|
33
33
|
elsif database.postgresql?
|
34
|
-
sh "#{pg_dump} --column-inserts #{database.name} | #{pipe}
|
34
|
+
sh "#{pg_dump} --column-inserts #{database.name} | #{pipe} #{target}"
|
35
35
|
else
|
36
36
|
raise "don't know how to dump #{database.adapter}"
|
37
37
|
end
|
@@ -39,6 +39,14 @@ module Backhoe
|
|
39
39
|
|
40
40
|
private
|
41
41
|
|
42
|
+
def target
|
43
|
+
if path =~ /^https?:\/\//
|
44
|
+
"| curl -X PUT -H 'Content-Type: application/octet-stream' --data-binary @- '#{path}'"
|
45
|
+
else
|
46
|
+
"> #{path}"
|
47
|
+
end
|
48
|
+
end
|
49
|
+
|
42
50
|
def mysqldump
|
43
51
|
cmd = `which mysqldump`.strip
|
44
52
|
raise RuntimeError, "Cannot find mysqldump." if cmd.blank?
|
@@ -52,7 +60,7 @@ module Backhoe
|
|
52
60
|
end
|
53
61
|
|
54
62
|
def pipe
|
55
|
-
|
63
|
+
path =~ /\.gz\b/ ? "gzip -9f" : "cat"
|
56
64
|
end
|
57
65
|
|
58
66
|
def skip_table_options
|
@@ -61,13 +69,13 @@ module Backhoe
|
|
61
69
|
end.join(" ")
|
62
70
|
end
|
63
71
|
|
64
|
-
class SanitizedDatabase < Struct.new(:config, :
|
72
|
+
class SanitizedDatabase < Struct.new(:config, :path)
|
65
73
|
def dump
|
66
74
|
with_sanitized_tables do
|
67
75
|
yield skip_tables
|
68
76
|
end
|
69
77
|
skip_tables.each do |table|
|
70
|
-
File.write
|
78
|
+
File.write path, "RENAME TABLE `sanitized_#{table}` TO `#{table}`;", mode: "a"
|
71
79
|
end
|
72
80
|
end
|
73
81
|
|
data/lib/backhoe/load.rb
CHANGED
@@ -1,7 +1,7 @@
|
|
1
1
|
require "rake"
|
2
2
|
|
3
3
|
module Backhoe
|
4
|
-
class Load < Struct.new(:database, :
|
4
|
+
class Load < Struct.new(:database, :path, :drop_and_create)
|
5
5
|
include Rake::DSL
|
6
6
|
|
7
7
|
def call
|
@@ -18,7 +18,7 @@ module Backhoe
|
|
18
18
|
private
|
19
19
|
|
20
20
|
def mysql_command
|
21
|
-
cmd = "#{cat} #{
|
21
|
+
cmd = "#{cat} #{path} | "
|
22
22
|
cmd += if drop_and_create
|
23
23
|
"#{pipe} | #{mysql} #{database.to_mysql_options}"
|
24
24
|
else
|
@@ -27,7 +27,7 @@ module Backhoe
|
|
27
27
|
end
|
28
28
|
|
29
29
|
def psql_command
|
30
|
-
cmd = "#{cat} #{
|
30
|
+
cmd = "#{cat} #{path} | "
|
31
31
|
if drop_and_create
|
32
32
|
cmd = "dropdb -f #{database.name}; createdb #{database.name}; #{cmd}"
|
33
33
|
end
|
@@ -36,7 +36,7 @@ module Backhoe
|
|
36
36
|
end
|
37
37
|
|
38
38
|
def cat
|
39
|
-
|
39
|
+
path =~ /\.gz$/ ? "zcat" : "cat"
|
40
40
|
end
|
41
41
|
|
42
42
|
def pipe
|
data/lib/backhoe/version.rb
CHANGED
data/lib/backhoe.rb
CHANGED
@@ -1,22 +1,17 @@
|
|
1
1
|
require "backhoe/version"
|
2
2
|
require "backhoe/dump"
|
3
3
|
require "backhoe/load"
|
4
|
-
require "backhoe/backup"
|
5
4
|
require "backhoe/database"
|
6
5
|
require "active_record"
|
7
6
|
|
8
7
|
module Backhoe
|
9
8
|
class << self
|
10
|
-
def dump
|
11
|
-
Dump.new(Database.new,
|
9
|
+
def dump path, skip_tables: [], skip_columns: {}
|
10
|
+
Dump.new(Database.new, path, skip_tables, skip_columns).call
|
12
11
|
end
|
13
12
|
|
14
|
-
def load
|
15
|
-
Load.new(Database.new,
|
16
|
-
end
|
17
|
-
|
18
|
-
def backup s3_path, access_key:, secret_key:
|
19
|
-
Backup.new(s3_path, access_key, secret_key).call
|
13
|
+
def load path, drop_and_create: false
|
14
|
+
Load.new(Database.new, path, drop_and_create).call
|
20
15
|
end
|
21
16
|
end
|
22
17
|
end
|
metadata
CHANGED
@@ -1,14 +1,14 @@
|
|
1
1
|
--- !ruby/object:Gem::Specification
|
2
2
|
name: backhoe
|
3
3
|
version: !ruby/object:Gem::Version
|
4
|
-
version: 0.
|
4
|
+
version: 0.10.0
|
5
5
|
platform: ruby
|
6
6
|
authors:
|
7
7
|
- Micah Geisel
|
8
8
|
autorequire:
|
9
9
|
bindir: exe
|
10
10
|
cert_chain: []
|
11
|
-
date: 2024-
|
11
|
+
date: 2024-08-15 00:00:00.000000000 Z
|
12
12
|
dependencies:
|
13
13
|
- !ruby/object:Gem::Dependency
|
14
14
|
name: activerecord
|
@@ -122,6 +122,20 @@ dependencies:
|
|
122
122
|
- - ">="
|
123
123
|
- !ruby/object:Gem::Version
|
124
124
|
version: '0'
|
125
|
+
- !ruby/object:Gem::Dependency
|
126
|
+
name: webrick
|
127
|
+
requirement: !ruby/object:Gem::Requirement
|
128
|
+
requirements:
|
129
|
+
- - ">="
|
130
|
+
- !ruby/object:Gem::Version
|
131
|
+
version: '0'
|
132
|
+
type: :development
|
133
|
+
prerelease: false
|
134
|
+
version_requirements: !ruby/object:Gem::Requirement
|
135
|
+
requirements:
|
136
|
+
- - ">="
|
137
|
+
- !ruby/object:Gem::Version
|
138
|
+
version: '0'
|
125
139
|
description: Dump and load current database to and from a file.
|
126
140
|
email:
|
127
141
|
- micah@botandrose.com
|
@@ -146,7 +160,6 @@ files:
|
|
146
160
|
- gemfiles/activerecord_7.0.gemfile
|
147
161
|
- gemfiles/activerecord_7.1.gemfile
|
148
162
|
- lib/backhoe.rb
|
149
|
-
- lib/backhoe/backup.rb
|
150
163
|
- lib/backhoe/database.rb
|
151
164
|
- lib/backhoe/dump.rb
|
152
165
|
- lib/backhoe/load.rb
|
data/lib/backhoe/backup.rb
DELETED
@@ -1,79 +0,0 @@
|
|
1
|
-
require "net/http"
|
2
|
-
require "openssl"
|
3
|
-
require "base64"
|
4
|
-
|
5
|
-
module Backhoe
|
6
|
-
class Backup < Struct.new(:s3_path, :access_key, :secret_key)
|
7
|
-
def call
|
8
|
-
@time = Time.now
|
9
|
-
|
10
|
-
Backhoe.dump path
|
11
|
-
|
12
|
-
uri = URI("https://s3.amazonaws.com/#{s3_path}/#{filename}")
|
13
|
-
|
14
|
-
request = Net::HTTP::Put.new(uri, {
|
15
|
-
"Content-Length": File.size(path).to_s,
|
16
|
-
"Content-Type": content_type,
|
17
|
-
"Date": date,
|
18
|
-
"Authorization": "AWS #{access_key}:#{signature}",
|
19
|
-
"x-amz-storage-class": "STANDARD",
|
20
|
-
"x-amz-acl": "private",
|
21
|
-
})
|
22
|
-
request.body_stream = File.open(path)
|
23
|
-
|
24
|
-
Net::HTTP.start(uri.hostname) do |http|
|
25
|
-
response = http.request(request)
|
26
|
-
response.value # raises if not success
|
27
|
-
end
|
28
|
-
end
|
29
|
-
|
30
|
-
private
|
31
|
-
|
32
|
-
def signature
|
33
|
-
digester = OpenSSL::Digest::SHA1.new
|
34
|
-
digest = OpenSSL::HMAC.digest(digester, secret_key, key)
|
35
|
-
Base64.strict_encode64(digest)
|
36
|
-
end
|
37
|
-
|
38
|
-
def key
|
39
|
-
[
|
40
|
-
"PUT",
|
41
|
-
"",
|
42
|
-
content_type,
|
43
|
-
date,
|
44
|
-
acl,
|
45
|
-
storage_type,
|
46
|
-
full_s3_path,
|
47
|
-
].join("\n")
|
48
|
-
end
|
49
|
-
|
50
|
-
def content_type
|
51
|
-
"application/gzip"
|
52
|
-
end
|
53
|
-
|
54
|
-
def date
|
55
|
-
@time.rfc2822
|
56
|
-
end
|
57
|
-
|
58
|
-
def acl
|
59
|
-
"x-amz-acl:private"
|
60
|
-
end
|
61
|
-
|
62
|
-
def storage_type
|
63
|
-
"x-amz-storage-class:STANDARD"
|
64
|
-
end
|
65
|
-
|
66
|
-
def full_s3_path
|
67
|
-
"/#{s3_path}/#{filename}"
|
68
|
-
end
|
69
|
-
|
70
|
-
def path
|
71
|
-
"/tmp/#{filename}"
|
72
|
-
end
|
73
|
-
|
74
|
-
def filename
|
75
|
-
"#{@time.utc.iso8601}.sql.gz"
|
76
|
-
end
|
77
|
-
end
|
78
|
-
end
|
79
|
-
|