backhoe 0.8.3 → 0.9.0
Sign up to get free protection for your applications and to get access to all the features.
- checksums.yaml +4 -4
- data/README.md +0 -3
- data/lib/backhoe/version.rb +1 -1
- data/lib/backhoe.rb +0 -5
- metadata +2 -3
- data/lib/backhoe/backup.rb +0 -79
checksums.yaml
CHANGED
@@ -1,7 +1,7 @@
|
|
1
1
|
---
|
2
2
|
SHA256:
|
3
|
-
metadata.gz:
|
4
|
-
data.tar.gz:
|
3
|
+
metadata.gz: '018ae4becb6f27ba167be3ab17101cdbb2b08f42eed108141b89e5ee634a133d'
|
4
|
+
data.tar.gz: f760a73b1f983fb050fbf7b4be493c7a944455c8f76e623278840d44b601f2d8
|
5
5
|
SHA512:
|
6
|
-
metadata.gz:
|
7
|
-
data.tar.gz:
|
6
|
+
metadata.gz: a96b4af2ae37f29fc611aa81a8194587b005d182ad5f7b298c84aab0a917a8e77acafea234acbfbc498000656ac3898451adbc7ceb986417c4ad6457c6469962
|
7
|
+
data.tar.gz: 81dda2749d1d4b53ac8b57d784942f35941c2c9757a5295630f754bc95f9dc816697d2ea9c3572819c4c8215e4c7cde17f27197638343ab136e20e062827cef8
|
data/README.md
CHANGED
@@ -15,9 +15,6 @@ Backhoe.dump "data.sql", skip_tables: [:comments], skip_columns: { users: [:pass
|
|
15
15
|
Backhoe.load "data.sql" # loads db from db/data.sql
|
16
16
|
Backhoe.load "data.sql.gz" # => can also load a gzipped sql file
|
17
17
|
Backhoe.load "data.sql", drop_and_create: true # injects DROP and CREATE statements into the SQL invocation
|
18
|
-
|
19
|
-
# Backup db to S3
|
20
|
-
Backhoe.backup "bucket-name/folder", access_key: "abc123", secret_key: "def456" # => must specify AWS creds
|
21
18
|
```
|
22
19
|
|
23
20
|
## Development
|
data/lib/backhoe/version.rb
CHANGED
data/lib/backhoe.rb
CHANGED
@@ -1,7 +1,6 @@
|
|
1
1
|
require "backhoe/version"
|
2
2
|
require "backhoe/dump"
|
3
3
|
require "backhoe/load"
|
4
|
-
require "backhoe/backup"
|
5
4
|
require "backhoe/database"
|
6
5
|
require "active_record"
|
7
6
|
|
@@ -14,10 +13,6 @@ module Backhoe
|
|
14
13
|
def load file_path, drop_and_create: false
|
15
14
|
Load.new(Database.new, file_path, drop_and_create).call
|
16
15
|
end
|
17
|
-
|
18
|
-
def backup s3_path, access_key:, secret_key:
|
19
|
-
Backup.new(s3_path, access_key, secret_key).call
|
20
|
-
end
|
21
16
|
end
|
22
17
|
end
|
23
18
|
|
metadata
CHANGED
@@ -1,14 +1,14 @@
|
|
1
1
|
--- !ruby/object:Gem::Specification
|
2
2
|
name: backhoe
|
3
3
|
version: !ruby/object:Gem::Version
|
4
|
-
version: 0.
|
4
|
+
version: 0.9.0
|
5
5
|
platform: ruby
|
6
6
|
authors:
|
7
7
|
- Micah Geisel
|
8
8
|
autorequire:
|
9
9
|
bindir: exe
|
10
10
|
cert_chain: []
|
11
|
-
date: 2024-
|
11
|
+
date: 2024-07-22 00:00:00.000000000 Z
|
12
12
|
dependencies:
|
13
13
|
- !ruby/object:Gem::Dependency
|
14
14
|
name: activerecord
|
@@ -146,7 +146,6 @@ files:
|
|
146
146
|
- gemfiles/activerecord_7.0.gemfile
|
147
147
|
- gemfiles/activerecord_7.1.gemfile
|
148
148
|
- lib/backhoe.rb
|
149
|
-
- lib/backhoe/backup.rb
|
150
149
|
- lib/backhoe/database.rb
|
151
150
|
- lib/backhoe/dump.rb
|
152
151
|
- lib/backhoe/load.rb
|
data/lib/backhoe/backup.rb
DELETED
@@ -1,79 +0,0 @@
|
|
1
|
-
require "net/http"
|
2
|
-
require "openssl"
|
3
|
-
require "base64"
|
4
|
-
|
5
|
-
module Backhoe
|
6
|
-
class Backup < Struct.new(:s3_path, :access_key, :secret_key)
|
7
|
-
def call
|
8
|
-
@time = Time.now
|
9
|
-
|
10
|
-
Backhoe.dump path
|
11
|
-
|
12
|
-
uri = URI("https://s3.amazonaws.com/#{s3_path}/#{filename}")
|
13
|
-
|
14
|
-
request = Net::HTTP::Put.new(uri, {
|
15
|
-
"Content-Length": File.size(path).to_s,
|
16
|
-
"Content-Type": content_type,
|
17
|
-
"Date": date,
|
18
|
-
"Authorization": "AWS #{access_key}:#{signature}",
|
19
|
-
"x-amz-storage-class": "STANDARD",
|
20
|
-
"x-amz-acl": "private",
|
21
|
-
})
|
22
|
-
request.body_stream = File.open(path)
|
23
|
-
|
24
|
-
Net::HTTP.start(uri.hostname) do |http|
|
25
|
-
response = http.request(request)
|
26
|
-
response.value # raises if not success
|
27
|
-
end
|
28
|
-
end
|
29
|
-
|
30
|
-
private
|
31
|
-
|
32
|
-
def signature
|
33
|
-
digester = OpenSSL::Digest::SHA1.new
|
34
|
-
digest = OpenSSL::HMAC.digest(digester, secret_key, key)
|
35
|
-
Base64.strict_encode64(digest)
|
36
|
-
end
|
37
|
-
|
38
|
-
def key
|
39
|
-
[
|
40
|
-
"PUT",
|
41
|
-
"",
|
42
|
-
content_type,
|
43
|
-
date,
|
44
|
-
acl,
|
45
|
-
storage_type,
|
46
|
-
full_s3_path,
|
47
|
-
].join("\n")
|
48
|
-
end
|
49
|
-
|
50
|
-
def content_type
|
51
|
-
"application/gzip"
|
52
|
-
end
|
53
|
-
|
54
|
-
def date
|
55
|
-
@time.rfc2822
|
56
|
-
end
|
57
|
-
|
58
|
-
def acl
|
59
|
-
"x-amz-acl:private"
|
60
|
-
end
|
61
|
-
|
62
|
-
def storage_type
|
63
|
-
"x-amz-storage-class:STANDARD"
|
64
|
-
end
|
65
|
-
|
66
|
-
def full_s3_path
|
67
|
-
"/#{s3_path}/#{filename}"
|
68
|
-
end
|
69
|
-
|
70
|
-
def path
|
71
|
-
"/tmp/#{filename}"
|
72
|
-
end
|
73
|
-
|
74
|
-
def filename
|
75
|
-
"#{@time.utc.iso8601}.sql.gz"
|
76
|
-
end
|
77
|
-
end
|
78
|
-
end
|
79
|
-
|