cknife 0.1.0
Sign up to get free protection for your applications and to get access to all the features.
- checksums.yaml +7 -0
- data/Gemfile +17 -0
- data/Gemfile.lock +86 -0
- data/LICENSE +21 -0
- data/README.md +169 -0
- data/Rakefile +38 -0
- data/VERSION +1 -0
- data/bin/cknifeaws +508 -0
- data/bin/cknifedub +66 -0
- data/bin/cknifenowtimestamp +3 -0
- data/bin/cknifewcdir +28 -0
- data/bin/cknifezerigo +101 -0
- data/cknife.gemspec +80 -0
- metadata +203 -0
checksums.yaml
ADDED
@@ -0,0 +1,7 @@
|
|
1
|
+
---
|
2
|
+
SHA1:
|
3
|
+
metadata.gz: 6a8aeeb2e92a76ac079da5b63fe8e9403b451d6e
|
4
|
+
data.tar.gz: a74e4274ccc2b39ab451c06d3069ef649884d80f
|
5
|
+
SHA512:
|
6
|
+
metadata.gz: edd89d9f659ef18a4c88b4b91b57f13252c6caa48d1443ac0a99ddbf5e8b5e183d5c8bfd66b7a3d7edcd60548d8c268996760da19638c63462faae89ac5febe0
|
7
|
+
data.tar.gz: 626d66f0e47119dae79626e6819196b75356d4b49a893edb04a2a0bc9867011ba32f4f2062223acdee678dc9c440c0f5e1ce8e5de8406a879854de5d76c8331a
|
data/Gemfile
ADDED
@@ -0,0 +1,17 @@
|
|
1
|
+
source "http://rubygems.org"
|
2
|
+
|
3
|
+
gem "rest-client"
|
4
|
+
gem "nokogiri"
|
5
|
+
gem "i18n"
|
6
|
+
gem "activesupport"
|
7
|
+
gem "thor"
|
8
|
+
gem "builder"
|
9
|
+
gem "fog", ">= 1.15.0"
|
10
|
+
gem "unf"
|
11
|
+
|
12
|
+
# Add dependencies to develop your gem here.
|
13
|
+
# Include everything needed to run rake, tests, features, etc.
|
14
|
+
group :development do
|
15
|
+
gem "bundler", "~> 1.0"
|
16
|
+
gem "jeweler", "~> 2.0.1"
|
17
|
+
end
|
data/Gemfile.lock
ADDED
@@ -0,0 +1,86 @@
|
|
1
|
+
GEM
|
2
|
+
remote: http://rubygems.org/
|
3
|
+
specs:
|
4
|
+
activesupport (3.0.7)
|
5
|
+
addressable (2.3.6)
|
6
|
+
builder (3.0.0)
|
7
|
+
descendants_tracker (0.0.4)
|
8
|
+
thread_safe (~> 0.3, >= 0.3.1)
|
9
|
+
excon (0.27.6)
|
10
|
+
faraday (0.9.0)
|
11
|
+
multipart-post (>= 1.2, < 3)
|
12
|
+
fog (1.17.0)
|
13
|
+
builder
|
14
|
+
excon (~> 0.27.0)
|
15
|
+
formatador (~> 0.2.0)
|
16
|
+
mime-types
|
17
|
+
multi_json (~> 1.0)
|
18
|
+
net-scp (~> 1.1)
|
19
|
+
net-ssh (>= 2.1.3)
|
20
|
+
nokogiri (~> 1.5)
|
21
|
+
ruby-hmac
|
22
|
+
formatador (0.2.4)
|
23
|
+
git (1.2.6)
|
24
|
+
github_api (0.11.3)
|
25
|
+
addressable (~> 2.3)
|
26
|
+
descendants_tracker (~> 0.0.1)
|
27
|
+
faraday (~> 0.8, < 0.10)
|
28
|
+
hashie (>= 1.2)
|
29
|
+
multi_json (>= 1.7.5, < 2.0)
|
30
|
+
nokogiri (~> 1.6.0)
|
31
|
+
oauth2
|
32
|
+
hashie (3.0.0)
|
33
|
+
highline (1.6.21)
|
34
|
+
i18n (0.6.0)
|
35
|
+
jeweler (2.0.1)
|
36
|
+
builder
|
37
|
+
bundler (>= 1.0)
|
38
|
+
git (>= 1.2.5)
|
39
|
+
github_api
|
40
|
+
highline (>= 1.6.15)
|
41
|
+
nokogiri (>= 1.5.10)
|
42
|
+
rake
|
43
|
+
rdoc
|
44
|
+
jwt (1.0.0)
|
45
|
+
mime-types (1.16)
|
46
|
+
mini_portile (0.5.2)
|
47
|
+
multi_json (1.8.2)
|
48
|
+
multi_xml (0.5.5)
|
49
|
+
multipart-post (2.0.0)
|
50
|
+
net-scp (1.1.2)
|
51
|
+
net-ssh (>= 2.6.5)
|
52
|
+
net-ssh (2.7.0)
|
53
|
+
nokogiri (1.6.0)
|
54
|
+
mini_portile (~> 0.5.0)
|
55
|
+
oauth2 (0.9.4)
|
56
|
+
faraday (>= 0.8, < 0.10)
|
57
|
+
jwt (~> 1.0)
|
58
|
+
multi_json (~> 1.3)
|
59
|
+
multi_xml (~> 0.5)
|
60
|
+
rack (~> 1.2)
|
61
|
+
rack (1.5.2)
|
62
|
+
rake (0.9.6)
|
63
|
+
rdoc (4.0.0)
|
64
|
+
rest-client (1.6.3)
|
65
|
+
mime-types (>= 1.16)
|
66
|
+
ruby-hmac (0.4.0)
|
67
|
+
thor (0.14.6)
|
68
|
+
thread_safe (0.3.4)
|
69
|
+
unf (0.1.3)
|
70
|
+
unf_ext
|
71
|
+
unf_ext (0.0.6)
|
72
|
+
|
73
|
+
PLATFORMS
|
74
|
+
ruby
|
75
|
+
|
76
|
+
DEPENDENCIES
|
77
|
+
activesupport
|
78
|
+
builder
|
79
|
+
bundler (~> 1.0)
|
80
|
+
fog (>= 1.15.0)
|
81
|
+
i18n
|
82
|
+
jeweler (~> 2.0.1)
|
83
|
+
nokogiri
|
84
|
+
rest-client
|
85
|
+
thor
|
86
|
+
unf
|
data/LICENSE
ADDED
@@ -0,0 +1,21 @@
|
|
1
|
+
The MIT License (MIT)
|
2
|
+
|
3
|
+
Copyright (c) 2013 The Mike De La Loza Company
|
4
|
+
|
5
|
+
Permission is hereby granted, free of charge, to any person obtaining a copy
|
6
|
+
of this software and associated documentation files (the "Software"), to deal
|
7
|
+
in the Software without restriction, including without limitation the rights
|
8
|
+
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
|
9
|
+
copies of the Software, and to permit persons to whom the Software is
|
10
|
+
furnished to do so, subject to the following conditions:
|
11
|
+
|
12
|
+
The above copyright notice and this permission notice shall be included in
|
13
|
+
all copies or substantial portions of the Software.
|
14
|
+
|
15
|
+
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
16
|
+
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
17
|
+
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
|
18
|
+
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
19
|
+
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
|
20
|
+
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
|
21
|
+
THE SOFTWARE.
|
data/README.md
ADDED
@@ -0,0 +1,169 @@
|
|
1
|
+
Cali Army Knife
|
2
|
+
==============
|
3
|
+
|
4
|
+
An Amazon Web Services S3 command line tool, and a few other command
|
5
|
+
line tools. Written in Ruby with Thor. It depends on the Fog gem for
|
6
|
+
all of its S3 operations.
|
7
|
+
|
8
|
+
Uses multipart uploads with a chunksize of 10 megabytes to keep RAM
|
9
|
+
usage down.
|
10
|
+
|
11
|
+
The premier feature of the tool is the "upsync" command, which can be
|
12
|
+
used to run a backups schedule with multiple classes of files
|
13
|
+
(partitioned by a glob pattern). **It is your responsibility to
|
14
|
+
generate one uniquely-named backup file per day**, as this tool does
|
15
|
+
not do that part for you.
|
16
|
+
|
17
|
+
If you *don't* use the `backups-retain` option, then its like a very
|
18
|
+
weak **rsync** that can upload from a local filesystem into a bucket.
|
19
|
+
Which is also pretty useful.
|
20
|
+
|
21
|
+
[Github Link](https://github.com/mikedll/cali-army-knife)
|
22
|
+
|
23
|
+
Examples:
|
24
|
+
|
25
|
+
# download entire my-photos bucke to CWD
|
26
|
+
> aws.rb download my-photos
|
27
|
+
|
28
|
+
# upload and sync /tmp/*.sql into my-frog-app-backups
|
29
|
+
# bucket. Treat the files as backup files, and keep one backup
|
30
|
+
# file for each of the last 5 months, 10 weeks, and 30 days.
|
31
|
+
> aws.rb upsync my-frog-app-backups ./tmp --glob "*.sql" --noprompt --backups-retain true --months-retain 5 --weeks-retain 10 --days-retain 30
|
32
|
+
|
33
|
+
# as above, but now do redis backup files (./tmp/*.rdb). these will not produce
|
34
|
+
# namespace collisions with the sql files, and thus the same bucket
|
35
|
+
# can be used to store backups for both.
|
36
|
+
> aws.rb upsync my-frog-app-backups ./tmp --glob "*.rdb" --noprompt --backups-retain true --months-retain 5 --weeks-retain 10 --days-retain 30
|
37
|
+
|
38
|
+
# DO NOT DO THIS INSTEAD OF THE ABOVE 2 COMMANDS, THINKING IT WILL
|
39
|
+
# TREAT .SQL AND .RDB FILES SEPARATELY. INSTEAD, YOU WILL LOSE
|
40
|
+
# SOME OF YOUR BACKUP FILES.
|
41
|
+
> aws.rb upsync my-frog-app-backups ./tmp --glob "*" --noprompt --backups-retain true --months-retain 5 --weeks-retain 10 --days-retain 30
|
42
|
+
|
43
|
+
# Dry run mode. Try one of the prior backups retain commands, but
|
44
|
+
# let's see what will happen, first.
|
45
|
+
> aws.rb upsync my-frog-app-backups ./tmp --glob "*.sql" --noprompt --backups-retain true --months-retain 5 --weeks-retain 10 --days-retain 30 --dry-run
|
46
|
+
|
47
|
+
|
48
|
+
# Getting Started / Installation
|
49
|
+
|
50
|
+
Clone this repo, `bundle` the Gemfile, and make the aws.rb invokable
|
51
|
+
either by adding it to your PATH or making a bash alias to it, using
|
52
|
+
an absolute path to it, or something. I dunno, I'm working on that.
|
53
|
+
|
54
|
+
# aws
|
55
|
+
|
56
|
+
Tasks:
|
57
|
+
aws.rb afew [BUCKET_NAME] # Show first 5 files in bucket
|
58
|
+
aws.rb create [BUCKET_NAME] # Create a bucket
|
59
|
+
aws.rb create_cloudfront [BUCKET_NAME] # Create a cloudfront distribution (a CDN)
|
60
|
+
aws.rb delete [BUCKET_NAME] # Destroy a bucket
|
61
|
+
aws.rb download [BUCKET_NAME] # Download all files in a bucket to CWD. Or one file.
|
62
|
+
aws.rb help [TASK] # Describe available tasks or one specific task
|
63
|
+
aws.rb list # Show all buckets
|
64
|
+
aws.rb list_cloudfront # List cloudfront distributions (CDNs)
|
65
|
+
aws.rb list_servers # Show all servers
|
66
|
+
aws.rb show [BUCKET_NAME] # Show info about bucket
|
67
|
+
aws.rb start_server [SERVER_ID] # Start a given EC2 server
|
68
|
+
aws.rb stop_server [SERVER_ID] # Stop a given EC2 server (does not terminate it)
|
69
|
+
aws.rb upsync [BUCKET_NAME] [DIRECTORY] # Push local files matching glob PATTERN into bucket. Ignore unchanged files.
|
70
|
+
|
71
|
+
## Synchronizing a local directory's files with an Amazon S3 Bucket
|
72
|
+
|
73
|
+
Usage:
|
74
|
+
aws.rb upsync [BUCKET_NAME] [DIRECTORY]
|
75
|
+
|
76
|
+
Options:
|
77
|
+
[--public]
|
78
|
+
[--region=REGION]
|
79
|
+
# Default: us-east-1
|
80
|
+
[--noprompt=NOPROMPT]
|
81
|
+
[--glob=GLOB]
|
82
|
+
# Default: **/*
|
83
|
+
[--backups-retain]
|
84
|
+
[--days-retain=N]
|
85
|
+
# Default: 30
|
86
|
+
[--months-retain=N]
|
87
|
+
# Default: 3
|
88
|
+
[--weeks-retain=N]
|
89
|
+
# Default: 5
|
90
|
+
[--dry-run]
|
91
|
+
|
92
|
+
The glob allows you to determine whether you want to recursively
|
93
|
+
upload an entire directory, or just a set of *.dat or *.sql files,
|
94
|
+
ignoring whatever else may be in the specified directory. This glob
|
95
|
+
pattern is appended to the directory you specify.
|
96
|
+
|
97
|
+
For determining whether to upload a file, first the mod time is used,
|
98
|
+
and if that match fails, an md5 checksum comparison is used.
|
99
|
+
|
100
|
+
The file's local modification time, from the file system from which it
|
101
|
+
was uploaded, is used to determined whether it qualifies for retention
|
102
|
+
in the backup program you specify.
|
103
|
+
|
104
|
+
This info is uploaded with the file to Amazon's S3 servers when the
|
105
|
+
file is uploaded, in the S3 file metadata. Without this, S3 uses a
|
106
|
+
modtime that is equal to when the file was last uploaded, which is not
|
107
|
+
comparable to the file's local mod time.
|
108
|
+
|
109
|
+
## Dumping an Amazon S3 bucket
|
110
|
+
|
111
|
+
Sometimes you want to download an entire S3 bucket to your local
|
112
|
+
directory - a set of photos, for example.
|
113
|
+
|
114
|
+
> aws.rb help download
|
115
|
+
Usage:
|
116
|
+
aws.rb download [BUCKET_NAME]
|
117
|
+
|
118
|
+
Options:
|
119
|
+
[--region=REGION]
|
120
|
+
# Default: us-east-1
|
121
|
+
[--one=ONE]
|
122
|
+
|
123
|
+
Download all files in a bucket to CWD. Or one file.
|
124
|
+
|
125
|
+
|
126
|
+
## Key and Secret Configuration
|
127
|
+
|
128
|
+
In order of priority, Setup your key and secret with:
|
129
|
+
|
130
|
+
- $CWD/cknife.yml
|
131
|
+
- $CWD/tmp/cknife.yml
|
132
|
+
- environment variables: `KEY`, `SECRET`
|
133
|
+
- environment variablse: `AMAZON_ACCESS_KEY_ID`, `AMAZON_SECRET_ACCESS_KEY`
|
134
|
+
|
135
|
+
The format of your cknife.yml must be like so:
|
136
|
+
|
137
|
+
---
|
138
|
+
key: AKIAblahblahb...
|
139
|
+
secret: 8xILhOsecretsecretsecretsecret...
|
140
|
+
|
141
|
+
|
142
|
+
# zerigo
|
143
|
+
|
144
|
+
These tasks can be used to manage your DNS via Zerigo. They changed
|
145
|
+
their rates drastically with little notice in January of 2014, so I
|
146
|
+
switched to DNS Simple and don't use this much anymore.
|
147
|
+
|
148
|
+
> zerigo
|
149
|
+
Tasks:
|
150
|
+
zerigo.rb create [HOST_NAME] # Create a host
|
151
|
+
zerigo.rb delete [ID] # Delete an entry by id
|
152
|
+
zerigo.rb help [TASK] # Describe available tasks or one specific task
|
153
|
+
zerigo.rb list # List available host names.
|
154
|
+
|
155
|
+
# dub
|
156
|
+
|
157
|
+
Like du, but sorts your output by size. This helps you determine
|
158
|
+
which directories are taking up the most space:
|
159
|
+
|
160
|
+
> dub
|
161
|
+
37.0G .
|
162
|
+
23.0G ./Personal
|
163
|
+
14.0G ./Library
|
164
|
+
673.0M ./Work
|
165
|
+
0.0B ./Colloquy Transcripts
|
166
|
+
|
167
|
+
## options
|
168
|
+
|
169
|
+
-c Enable colorized output.
|
data/Rakefile
ADDED
@@ -0,0 +1,38 @@
|
|
1
|
+
# encoding: utf-8
|
2
|
+
|
3
|
+
require 'rubygems'
|
4
|
+
require 'bundler'
|
5
|
+
begin
|
6
|
+
Bundler.setup(:default, :development)
|
7
|
+
rescue Bundler::BundlerError => e
|
8
|
+
$stderr.puts e.message
|
9
|
+
$stderr.puts "Run `bundle install` to install missing gems"
|
10
|
+
exit e.status_code
|
11
|
+
end
|
12
|
+
require 'rake'
|
13
|
+
|
14
|
+
require 'jeweler'
|
15
|
+
Jeweler::Tasks.new do |gem|
|
16
|
+
# gem is a Gem::Specification... see http://guides.rubygems.org/specification-reference/ for more options
|
17
|
+
gem.name = "cknife"
|
18
|
+
gem.homepage = "http://github.com/mikedll/cknife"
|
19
|
+
gem.license = "MIT"
|
20
|
+
gem.summary = "Cali Army Knife"
|
21
|
+
gem.description = "An Amazon Web Services S3 command line tool, and a few other command line tools."
|
22
|
+
gem.email = "mikedll@mikedll.com"
|
23
|
+
gem.authors = ["Mike De La Loza"]
|
24
|
+
# dependencies defined in Gemfile
|
25
|
+
end
|
26
|
+
Jeweler::RubygemsDotOrgTasks.new
|
27
|
+
|
28
|
+
task :default => :spec
|
29
|
+
|
30
|
+
require 'rdoc/task'
|
31
|
+
Rake::RDocTask.new do |rdoc|
|
32
|
+
version = File.exist?('VERSION') ? File.read('VERSION') : ""
|
33
|
+
|
34
|
+
rdoc.rdoc_dir = 'rdoc'
|
35
|
+
rdoc.title = "cknife #{version}"
|
36
|
+
rdoc.rdoc_files.include('README*')
|
37
|
+
rdoc.rdoc_files.include('lib/**/*.rb')
|
38
|
+
end
|
data/VERSION
ADDED
@@ -0,0 +1 @@
|
|
1
|
+
0.1.0
|
data/bin/cknifeaws
ADDED
@@ -0,0 +1,508 @@
|
|
1
|
+
#!/usr/bin/env ruby
|
2
|
+
|
3
|
+
ENV['BUNDLE_GEMFILE'] = File.join( File.dirname( File.expand_path( __FILE__ ) ), "../Gemfile" )
|
4
|
+
|
5
|
+
require 'rubygems'
|
6
|
+
require 'bundler'
|
7
|
+
Bundler.require
|
8
|
+
|
9
|
+
require 'active_support/all'
|
10
|
+
require 'zlib'
|
11
|
+
require 'digest/md5'
|
12
|
+
|
13
|
+
class Aws < Thor
|
14
|
+
|
15
|
+
FILE_BUFFER_SIZE = 10.megabytes
|
16
|
+
LOCAL_MOD_KEY = "x-amz-meta-mtime"
|
17
|
+
EPSILON = 1.second
|
18
|
+
|
19
|
+
no_tasks do
|
20
|
+
|
21
|
+
def config
|
22
|
+
return @config if @config
|
23
|
+
|
24
|
+
@config = {
|
25
|
+
:key => ENV["KEY"] || ENV['AMAZON_ACCESS_KEY_ID'],
|
26
|
+
:secret => ENV["SECRET"] || ENV['AMAZON_SECRET_ACCESS_KEY']
|
27
|
+
}
|
28
|
+
|
29
|
+
config_file = nil
|
30
|
+
Pathname.new(Dir.getwd).tap do |here|
|
31
|
+
config_file = [["cknife.yml"], ["tmp", "cknife.yml"]].map { |args|
|
32
|
+
here.join(*args)
|
33
|
+
}.select { |path|
|
34
|
+
File.exists?(path)
|
35
|
+
}.first
|
36
|
+
end
|
37
|
+
|
38
|
+
if config_file
|
39
|
+
begin
|
40
|
+
@config.merge!(YAML.load(config_file.read).symbolize_keys!)
|
41
|
+
rescue
|
42
|
+
say ("Found, but could not parse config: #{config_file}")
|
43
|
+
end
|
44
|
+
end
|
45
|
+
|
46
|
+
@config
|
47
|
+
end
|
48
|
+
|
49
|
+
def fog_opts
|
50
|
+
opts = {
|
51
|
+
:provider => 'AWS',
|
52
|
+
:aws_access_key_id => config[:key],
|
53
|
+
:aws_secret_access_key => config[:secret]
|
54
|
+
}
|
55
|
+
opts.merge!({ :region => options[:region] }) if !options[:region].blank?
|
56
|
+
opts
|
57
|
+
end
|
58
|
+
|
59
|
+
def fog_storage
|
60
|
+
return @storage if @storage
|
61
|
+
@storage = Fog::Storage.new(fog_opts)
|
62
|
+
begin
|
63
|
+
@storage.directories.count # test login
|
64
|
+
rescue Excon::Errors::Forbidden => e
|
65
|
+
say("Received Forbidden error while accessing account info. Is your key/secret correct?")
|
66
|
+
raise SystemExit
|
67
|
+
end
|
68
|
+
@storage
|
69
|
+
end
|
70
|
+
|
71
|
+
def fog_compute
|
72
|
+
@compute ||= Fog::Compute.new(fog_opts)
|
73
|
+
end
|
74
|
+
|
75
|
+
def fog_cdn
|
76
|
+
@cdn ||= Fog::CDN.new(fog_opts)
|
77
|
+
end
|
78
|
+
|
79
|
+
def show_buckets
|
80
|
+
fog_storage.directories.sort { |a,b| a.key <=> b.key }.each { |b| puts "#{b.key}" }
|
81
|
+
end
|
82
|
+
|
83
|
+
def show_servers
|
84
|
+
fog_compute.servers.sort { |a,b| a.key_name <=> b.key_name }.each do |s|
|
85
|
+
puts "#{s.tags['Name']} (state: #{s.state}): id=#{s.id} keyname=#{s.key_name} dns=#{s.dns_name} flavor=#{s.flavor_id}"
|
86
|
+
end
|
87
|
+
end
|
88
|
+
|
89
|
+
def show_cdns
|
90
|
+
puts fog_cdn.get_distribution_list.body['DistributionSummary'].to_yaml
|
91
|
+
end
|
92
|
+
|
93
|
+
def with_bucket(bucket_name)
|
94
|
+
d = fog_storage.directories.select { |d| d.key == bucket_name }.first
|
95
|
+
if d.nil?
|
96
|
+
say ("Could not find bucket with name #{bucket_name}")
|
97
|
+
return
|
98
|
+
end
|
99
|
+
|
100
|
+
say ("Found bucket named #{bucket_name}")
|
101
|
+
yield d
|
102
|
+
end
|
103
|
+
|
104
|
+
def fog_key_for(target_root, file_path)
|
105
|
+
target_root_path_length ||= target_root.to_s.length + "/".length
|
106
|
+
relative = file_path[ target_root_path_length, file_path.length]
|
107
|
+
relative
|
108
|
+
end
|
109
|
+
|
110
|
+
def s3_download(s3_file)
|
111
|
+
dir_path = Pathname.new(s3_file.key).dirname
|
112
|
+
dir_path.mkpath
|
113
|
+
File.open(s3_file.key, "w") do |f|
|
114
|
+
f.write s3_file.body
|
115
|
+
end
|
116
|
+
end
|
117
|
+
|
118
|
+
|
119
|
+
def content_hash(file)
|
120
|
+
md5 = Digest::MD5.new
|
121
|
+
|
122
|
+
while !file.eof?
|
123
|
+
md5.update(file.read(FILE_BUFFER_SIZE))
|
124
|
+
end
|
125
|
+
|
126
|
+
md5.hexdigest
|
127
|
+
end
|
128
|
+
|
129
|
+
end
|
130
|
+
|
131
|
+
desc "list_servers", "Show all servers"
|
132
|
+
def list_servers
|
133
|
+
show_servers
|
134
|
+
end
|
135
|
+
|
136
|
+
desc "start_server [SERVER_ID]", "Start a given EC2 server"
|
137
|
+
def start_server(server_id)
|
138
|
+
s = fog_compute.servers.select { |s| s.id == server_id}.first
|
139
|
+
if s
|
140
|
+
say("found server. starting/resuming. #{s.id}")
|
141
|
+
s.start
|
142
|
+
show_servers
|
143
|
+
else
|
144
|
+
say("no server with that id found. nothing done.")
|
145
|
+
end
|
146
|
+
end
|
147
|
+
|
148
|
+
desc "stop_server [SERVER_ID]", "Stop a given EC2 server (does not terminate it)"
|
149
|
+
def stop_server(server_id)
|
150
|
+
s = fog_compute.servers.select { |s| s.id == server_id}.first
|
151
|
+
if s
|
152
|
+
say("found server. stopping. #{s.id}")
|
153
|
+
s.stop
|
154
|
+
else
|
155
|
+
say("no server with that id found. nothing done.")
|
156
|
+
end
|
157
|
+
end
|
158
|
+
|
159
|
+
desc "list_cloudfront", "List cloudfront distributions (CDNs)"
|
160
|
+
def list_cloudfront
|
161
|
+
show_cdns
|
162
|
+
end
|
163
|
+
|
164
|
+
desc "create_cloudfront [BUCKET_NAME]", "Create a cloudfront distribution (a CDN)"
|
165
|
+
def create_cloudfront(bucket_id)
|
166
|
+
fog_cdn.post_distribution({
|
167
|
+
'S3Origin' => {
|
168
|
+
'DNSName' => "#{bucket_id}.s3.amazonaws.com"
|
169
|
+
},
|
170
|
+
'Enabled' => true
|
171
|
+
})
|
172
|
+
|
173
|
+
show_cdns
|
174
|
+
end
|
175
|
+
|
176
|
+
desc "list", "Show all buckets"
|
177
|
+
method_options :region => "us-east-1"
|
178
|
+
def list
|
179
|
+
show_buckets
|
180
|
+
end
|
181
|
+
|
182
|
+
desc "afew [BUCKET_NAME]", "Show first 5 files in bucket"
|
183
|
+
method_options :count => "5"
|
184
|
+
method_options :glob => "**/*"
|
185
|
+
def afew(bucket_name)
|
186
|
+
d = fog_storage.directories.select { |d| d.key == bucket_name }.first
|
187
|
+
if d.nil?
|
188
|
+
say ("Found no bucket by name #{bucket_name}")
|
189
|
+
return
|
190
|
+
end
|
191
|
+
|
192
|
+
found = []
|
193
|
+
|
194
|
+
i = 0
|
195
|
+
d.files.each do |f|
|
196
|
+
if File.fnmatch(options[:glob], f.key)
|
197
|
+
found.push(d.files.head(f.key))
|
198
|
+
break if i >= options[:count].to_i
|
199
|
+
i += 1
|
200
|
+
end
|
201
|
+
end
|
202
|
+
|
203
|
+
unit_to_mult = {
|
204
|
+
'B' => 1,
|
205
|
+
'K' => 2**10,
|
206
|
+
'M' => 2**20,
|
207
|
+
'G' => 2**30
|
208
|
+
}
|
209
|
+
|
210
|
+
found.map { |f|
|
211
|
+
matching = unit_to_mult.keys.select { |k|
|
212
|
+
f.content_length >= unit_to_mult[k]
|
213
|
+
}.last
|
214
|
+
|
215
|
+
[f.key,
|
216
|
+
"#{(f.content_length.to_f / unit_to_mult[matching]).round(2)}#{matching}",
|
217
|
+
f.content_type,
|
218
|
+
f.last_modified
|
219
|
+
]
|
220
|
+
}.tap do |tabular|
|
221
|
+
print_table(tabular, :ident => 2)
|
222
|
+
end
|
223
|
+
|
224
|
+
end
|
225
|
+
|
226
|
+
desc "download [BUCKET_NAME]", "Download all files in a bucket to CWD. Or one file."
|
227
|
+
method_options :region => "us-east-1"
|
228
|
+
method_options :one => nil
|
229
|
+
def download(bucket_name)
|
230
|
+
with_bucket bucket_name do |d|
|
231
|
+
if options[:one].nil?
|
232
|
+
if yes?("Are you sure you want to download all files into the CWD?", :red)
|
233
|
+
d.files.each do |s3_file|
|
234
|
+
say("Creating path for and downloading #{s3_file.key}")
|
235
|
+
s3_download(s3_file)
|
236
|
+
end
|
237
|
+
else
|
238
|
+
say("No action taken.")
|
239
|
+
end
|
240
|
+
else
|
241
|
+
s3_file = d.files.get(options[:one])
|
242
|
+
if !s3_file.nil?
|
243
|
+
s3_download(s3_file)
|
244
|
+
else
|
245
|
+
say("Could not find #{options[:one]}. No action taken.")
|
246
|
+
end
|
247
|
+
end
|
248
|
+
end
|
249
|
+
end
|
250
|
+
|
251
|
+
desc "upsync [BUCKET_NAME] [DIRECTORY]", "Push local files matching glob PATTERN into bucket. Ignore unchanged files."
|
252
|
+
method_options :public => false
|
253
|
+
method_options :region => "us-east-1"
|
254
|
+
method_options :noprompt => nil
|
255
|
+
method_options :glob => "**/*"
|
256
|
+
method_options :backups_retain => false
|
257
|
+
method_options :days_retain => 30
|
258
|
+
method_options :months_retain => 3
|
259
|
+
method_options :weeks_retain => 5
|
260
|
+
method_options :dry_run => false
|
261
|
+
def upsync(bucket_name, directory)
|
262
|
+
|
263
|
+
say("This is a dry run.") if options[:dry_run]
|
264
|
+
|
265
|
+
if !File.exists?(directory) || !File.directory?(directory)
|
266
|
+
say("'#{directory} does not exist or is not a directory.")
|
267
|
+
return
|
268
|
+
end
|
269
|
+
|
270
|
+
target_root = Pathname.new(directory)
|
271
|
+
|
272
|
+
files = Dir.glob(target_root.join(options[:glob])).select { |f| !File.directory?(f) }.map(&:to_s)
|
273
|
+
if !options[:backups_retain] && files.count == 0
|
274
|
+
say("No files to upload and no backups retain requested.")
|
275
|
+
return
|
276
|
+
end
|
277
|
+
|
278
|
+
say("Found #{files.count} candidate file upload(s).")
|
279
|
+
|
280
|
+
spn = dn = sn = un = cn = 0
|
281
|
+
with_bucket bucket_name do |d|
|
282
|
+
|
283
|
+
# having a brain fart and cant get this to simplify
|
284
|
+
go = false
|
285
|
+
if options[:noprompt] != nil
|
286
|
+
go = true
|
287
|
+
else
|
288
|
+
go = yes?("Proceed?", :red)
|
289
|
+
end
|
290
|
+
|
291
|
+
if go
|
292
|
+
time_marks = []
|
293
|
+
immediate_successors = {}
|
294
|
+
if options[:backups_retain]
|
295
|
+
# inclusive lower bound, exclusive upper bound
|
296
|
+
time_marks = []
|
297
|
+
Time.now.beginning_of_day.tap do |start|
|
298
|
+
options[:days_retain].times do |i|
|
299
|
+
time_marks.push(start - i.days)
|
300
|
+
end
|
301
|
+
end
|
302
|
+
|
303
|
+
Time.now.beginning_of_week.tap do |start|
|
304
|
+
options[:weeks_retain].times do |i|
|
305
|
+
time_marks.push(start - i.weeks)
|
306
|
+
end
|
307
|
+
end
|
308
|
+
|
309
|
+
Time.now.beginning_of_month.tap do |start|
|
310
|
+
options[:months_retain].times do |i|
|
311
|
+
time_marks.push(start - i.months)
|
312
|
+
end
|
313
|
+
end
|
314
|
+
|
315
|
+
time_marks.each do |tm|
|
316
|
+
files.each do |to_upload|
|
317
|
+
File.open(to_upload) do |localfile|
|
318
|
+
if localfile.mtime >= tm && (immediate_successors[tm].nil? || localfile.mtime < immediate_successors[tm][:last_modified])
|
319
|
+
immediate_successors[tm] = { :local_path => to_upload, :last_modified => localfile.mtime }
|
320
|
+
end
|
321
|
+
end
|
322
|
+
end
|
323
|
+
end
|
324
|
+
end
|
325
|
+
|
326
|
+
# don't pointlessly upload large files if we already know we're going to delete them!
|
327
|
+
if options[:backups_retain]
|
328
|
+
immediate_successors.values.map { |h| h[:local_path] }.tap do |kept_files|
|
329
|
+
before_reject = files.count # blah...lame
|
330
|
+
files.reject! { |to_upload| !kept_files.include?(to_upload) }
|
331
|
+
sn += before_reject - files.count
|
332
|
+
|
333
|
+
say("Found #{files.count} file(s) that meet backups retention criteria for upload. Comparing against bucket...")
|
334
|
+
|
335
|
+
end
|
336
|
+
end
|
337
|
+
|
338
|
+
files.each do |to_upload|
|
339
|
+
say("#{to_upload} (no output if skipped)...")
|
340
|
+
k = fog_key_for(target_root, to_upload)
|
341
|
+
|
342
|
+
existing_head = d.files.head(k)
|
343
|
+
|
344
|
+
time_mismatch = false
|
345
|
+
content_hash_mistmatched = false
|
346
|
+
File.open(to_upload) do |localfile|
|
347
|
+
time_mismatch = !existing_head.nil? && (existing_head.metadata[LOCAL_MOD_KEY].nil? || (Time.parse(existing_head.metadata[LOCAL_MOD_KEY]) - localfile.mtime).abs > EPSILON)
|
348
|
+
if time_mismatch
|
349
|
+
content_hash_mistmatched = existing_head.etag != content_hash(localfile)
|
350
|
+
end
|
351
|
+
end
|
352
|
+
|
353
|
+
if existing_head && time_mismatch && content_hash_mistmatched
|
354
|
+
if !options[:dry_run]
|
355
|
+
File.open(to_upload) do |localfile|
|
356
|
+
existing_head.metadata = { LOCAL_MOD_KEY => localfile.mtime.to_s }
|
357
|
+
existing_head.body = localfile
|
358
|
+
existing_head.multipart_chunk_size = FILE_BUFFER_SIZE # creates multipart_save
|
359
|
+
existing_head.save
|
360
|
+
end
|
361
|
+
end
|
362
|
+
say("updated.")
|
363
|
+
un += 1
|
364
|
+
elsif existing_head && time_mismatch
|
365
|
+
if !options[:dry_run]
|
366
|
+
File.open(to_upload) do |localfile|
|
367
|
+
existing_head.metadata = { LOCAL_MOD_KEY => localfile.mtime.to_s }
|
368
|
+
existing_head.save
|
369
|
+
end
|
370
|
+
end
|
371
|
+
say("updated.")
|
372
|
+
un += 1
|
373
|
+
elsif existing_head.nil?
|
374
|
+
if !options[:dry_run]
|
375
|
+
File.open(to_upload) do |localfile|
|
376
|
+
file = d.files.create(
|
377
|
+
:key => k,
|
378
|
+
:public => options[:public],
|
379
|
+
:body => ""
|
380
|
+
)
|
381
|
+
file.metadata = { LOCAL_MOD_KEY => localfile.mtime.to_s }
|
382
|
+
file.multipart_chunk_size = FILE_BUFFER_SIZE # creates multipart_save
|
383
|
+
file.body = localfile
|
384
|
+
file.save
|
385
|
+
end
|
386
|
+
end
|
387
|
+
say("created.")
|
388
|
+
cn += 1
|
389
|
+
else
|
390
|
+
sn += 1
|
391
|
+
# skipped
|
392
|
+
end
|
393
|
+
end
|
394
|
+
|
395
|
+
|
396
|
+
if options[:backups_retain]
|
397
|
+
|
398
|
+
# This array of hashes is computed because we need to do
|
399
|
+
# nested for loops of M*N complexity, where M=time_marks
|
400
|
+
# and N=files. We also need to do an remote get call to
|
401
|
+
# fetch the metadata of all N remote files (d.files.each
|
402
|
+
# will not do this). so, for performance sanity, we cache
|
403
|
+
# all the meta data for all the N files.
|
404
|
+
|
405
|
+
file_keys_modtimes = []
|
406
|
+
d.files.each { |f|
|
407
|
+
if File.fnmatch(options[:glob], f.key)
|
408
|
+
existing_head = d.files.head(f.key)
|
409
|
+
md = existing_head.metadata
|
410
|
+
file_keys_modtimes.push({
|
411
|
+
:key => f.key,
|
412
|
+
:last_modified => md[LOCAL_MOD_KEY] ? Time.parse(md[LOCAL_MOD_KEY]) : f.last_modified,
|
413
|
+
:existing_head => existing_head
|
414
|
+
})
|
415
|
+
end
|
416
|
+
}
|
417
|
+
|
418
|
+
say("#{file_keys_modtimes.length} file(s) found to consider for remote retention or remote deletion.")
|
419
|
+
|
420
|
+
# this generates as many 'kept files' as there are time marks...which seems wrong.
|
421
|
+
immediate_successors = {}
|
422
|
+
time_marks.each do |tm|
|
423
|
+
file_keys_modtimes.each do |fkm|
|
424
|
+
if fkm[:last_modified] >= tm && (immediate_successors[tm].nil? || fkm[:last_modified] < immediate_successors[tm][:last_modified])
|
425
|
+
immediate_successors[tm] = fkm
|
426
|
+
end
|
427
|
+
end
|
428
|
+
end
|
429
|
+
|
430
|
+
immediate_successors.values.map { |v| v[:key] }.tap do |kept_keys|
|
431
|
+
file_keys_modtimes.each do |fkm|
|
432
|
+
if kept_keys.include?(fkm[:key])
|
433
|
+
say("Remote retained #{fkm[:key]}.")
|
434
|
+
spn += 1
|
435
|
+
else
|
436
|
+
fkm[:existing_head].destroy if !options[:dry_run]
|
437
|
+
say("Remote deleted #{fkm[:key]}.")
|
438
|
+
dn += 1
|
439
|
+
end
|
440
|
+
end
|
441
|
+
end
|
442
|
+
end
|
443
|
+
else
|
444
|
+
say ("No action taken.")
|
445
|
+
end
|
446
|
+
end
|
447
|
+
say("Done. #{cn} created. #{un} updated. #{sn} local skipped. #{dn} deleted remotely. #{spn} retained remotely.")
|
448
|
+
end
|
449
|
+
|
450
|
+
desc "delete [BUCKET_NAME]", "Destroy a bucket"
|
451
|
+
method_options :region => "us-east-1"
|
452
|
+
def delete(bucket_name)
|
453
|
+
d = fog_storage.directories.select { |d| d.key == bucket_name }.first
|
454
|
+
|
455
|
+
if d.nil?
|
456
|
+
say ("Found no bucket by name #{bucket_name}")
|
457
|
+
return
|
458
|
+
end
|
459
|
+
|
460
|
+
if d.files.length > 0
|
461
|
+
say "Bucket has #{d.files.length} files. Please empty before destroying."
|
462
|
+
return
|
463
|
+
end
|
464
|
+
|
465
|
+
if yes?("Are you sure you want to delete this bucket #{d.key}?", :red)
|
466
|
+
d.destroy
|
467
|
+
say "Destroyed bucket named #{bucket_name}."
|
468
|
+
show_buckets
|
469
|
+
else
|
470
|
+
say "No action taken."
|
471
|
+
end
|
472
|
+
|
473
|
+
end
|
474
|
+
|
475
|
+
desc "create [BUCKET_NAME]", "Create a bucket"
|
476
|
+
method_options :region => "us-east-1"
|
477
|
+
def create(bucket_name = nil)
|
478
|
+
if !bucket_name
|
479
|
+
puts "No bucket name given."
|
480
|
+
return
|
481
|
+
end
|
482
|
+
|
483
|
+
fog_storage.directories.create(
|
484
|
+
:key => bucket_name,
|
485
|
+
:location => options[:region]
|
486
|
+
)
|
487
|
+
|
488
|
+
puts "Created bucket #{bucket_name}."
|
489
|
+
show_buckets
|
490
|
+
end
|
491
|
+
|
492
|
+
desc "show [BUCKET_NAME]", "Show info about bucket"
|
493
|
+
method_options :region => "us-east-1"
|
494
|
+
def show(bucket_name = nil)
|
495
|
+
if !bucket_name
|
496
|
+
puts "No bucket name given."
|
497
|
+
return
|
498
|
+
end
|
499
|
+
|
500
|
+
with_bucket(bucket_name) do |d|
|
501
|
+
say "#{d}: "
|
502
|
+
say d.location
|
503
|
+
end
|
504
|
+
end
|
505
|
+
|
506
|
+
end
|
507
|
+
|
508
|
+
Aws.start
|
data/bin/cknifedub
ADDED
@@ -0,0 +1,66 @@
|
|
1
|
+
#!/usr/bin/env ruby
|
2
|
+
|
3
|
+
color_highlighting = false
|
4
|
+
|
5
|
+
ARGV.each do |arg|
|
6
|
+
if arg == '-c'
|
7
|
+
color_highlighting = true
|
8
|
+
end
|
9
|
+
end
|
10
|
+
|
11
|
+
duout = `du -h -d 1`
|
12
|
+
|
13
|
+
comps = []
|
14
|
+
|
15
|
+
duout.split(/\n/).each do |l|
|
16
|
+
l =~ /\s*(\d+(\.\d+)?)(\w)\s+(.*)$/
|
17
|
+
size = $1
|
18
|
+
unit = $3
|
19
|
+
path = $4
|
20
|
+
comps << [size.to_f, unit, path]
|
21
|
+
end
|
22
|
+
|
23
|
+
unit_to_mult = {
|
24
|
+
'B' => 1,
|
25
|
+
'K' => 2**10,
|
26
|
+
'M' => 2**20,
|
27
|
+
'G' => 2**30
|
28
|
+
}
|
29
|
+
|
30
|
+
comps.sort! do |a, b|
|
31
|
+
(b[0] * unit_to_mult[b[1]]) <=> (a[0] * unit_to_mult[a[1]])
|
32
|
+
end
|
33
|
+
|
34
|
+
BLACK = "\033[30m"
|
35
|
+
RED = "\033[31m"
|
36
|
+
RED_BLINK = "\033[5;31m"
|
37
|
+
GREEN = "\033[32m"
|
38
|
+
YELLOW = "\033[33m"
|
39
|
+
BLUE = "\033[34m"
|
40
|
+
MAGENTA = "\033[35m"
|
41
|
+
CYAN = "\033[36m"
|
42
|
+
WHITE = "\033[37m"
|
43
|
+
RESET_COLOR = "\033[0m"
|
44
|
+
|
45
|
+
if color_highlighting
|
46
|
+
comps.each do |(size,unit,path)|
|
47
|
+
case unit
|
48
|
+
when 'G'
|
49
|
+
output_color = RED
|
50
|
+
when 'M'
|
51
|
+
output_color = YELLOW
|
52
|
+
when 'K'
|
53
|
+
output_color = CYAN
|
54
|
+
when 'B'
|
55
|
+
output_color = GREEN
|
56
|
+
else
|
57
|
+
output_color = BLACK
|
58
|
+
end
|
59
|
+
|
60
|
+
printf( "%s%10.1f%s %s%s\n",output_color, size, unit, path,RESET_COLOR)
|
61
|
+
end
|
62
|
+
else
|
63
|
+
comps.each do |(size,unit,path)|
|
64
|
+
printf( "%10.1f%s %s\n",size, unit, path)
|
65
|
+
end
|
66
|
+
end
|
data/bin/cknifewcdir
ADDED
@@ -0,0 +1,28 @@
|
|
1
|
+
#!/usr/bin/env ruby
|
2
|
+
|
3
|
+
def run
|
4
|
+
wccmd = `find . -ipath #{ARGV[0]} -exec wc -l {} \\;`
|
5
|
+
|
6
|
+
comps = []
|
7
|
+
|
8
|
+
total = 0
|
9
|
+
wccmd.split(/\n/).each do |l|
|
10
|
+
l =~ /\s*(\d+(\.\d+)?)\s+(.*)$/
|
11
|
+
size = $1
|
12
|
+
path = $3
|
13
|
+
comps << [size.to_i, path]
|
14
|
+
total += size.to_i
|
15
|
+
end
|
16
|
+
|
17
|
+
comps.sort! do |a, b|
|
18
|
+
b[0] <=> a[0]
|
19
|
+
end
|
20
|
+
|
21
|
+
comps.each do |(size,path)|
|
22
|
+
printf( "%d %s\n", size, path)
|
23
|
+
end
|
24
|
+
|
25
|
+
puts "Total: #{total}"
|
26
|
+
end
|
27
|
+
|
28
|
+
run
|
data/bin/cknifezerigo
ADDED
@@ -0,0 +1,101 @@
|
|
1
|
+
#!/usr/bin/env ruby
|
2
|
+
|
3
|
+
ENV['BUNDLE_GEMFILE'] = File.join( File.dirname( File.expand_path( __FILE__ ) ), "Gemfile" )
|
4
|
+
|
5
|
+
require 'rubygems'
|
6
|
+
require 'bundler'
|
7
|
+
Bundler.require
|
8
|
+
|
9
|
+
require 'active_support/all'
|
10
|
+
|
11
|
+
class Zerigo < Thor
|
12
|
+
|
13
|
+
ENDPOINT = "https://ns.zerigo.com/api/1.1"
|
14
|
+
USERNAME = ENV['ZERIGO_USERNAME']
|
15
|
+
PASSWORD = ENV['ZERIGO_PASSWORD']
|
16
|
+
DOMAIN = ENV['ZERIGO_DEFAULT_DOMAIN']
|
17
|
+
|
18
|
+
|
19
|
+
no_tasks do
|
20
|
+
def zerigo
|
21
|
+
@zerigo ||= RestClient::Resource.new ENDPOINT, USERNAME, PASSWORD
|
22
|
+
end
|
23
|
+
|
24
|
+
def show_hosts(response)
|
25
|
+
fmt = "%6.5s %30.29s %40.39s %12.11s"
|
26
|
+
|
27
|
+
puts sprintf(fmt,
|
28
|
+
"Type",
|
29
|
+
"Hostname",
|
30
|
+
"Data",
|
31
|
+
"ID")
|
32
|
+
|
33
|
+
Nokogiri::XML( response.body ).css('host').each do |host|
|
34
|
+
puts sprintf(fmt,
|
35
|
+
host.at_css('host-type').text,
|
36
|
+
host.at_css('hostname').text,
|
37
|
+
host.at_css('data').text,
|
38
|
+
host.at_css('id').text)
|
39
|
+
end
|
40
|
+
end
|
41
|
+
|
42
|
+
def zone
|
43
|
+
if @zone.nil?
|
44
|
+
zones = []
|
45
|
+
response = zerigo['/zones.xml'].get
|
46
|
+
unless response.code.to_s !~ /2\d{2}/
|
47
|
+
Nokogiri::XML( response.body ).css('zone').each do |zone|
|
48
|
+
zones << {
|
49
|
+
:id => zone.css('id').first.text,
|
50
|
+
:domain => zone.css('domain').first.text,
|
51
|
+
}
|
52
|
+
end
|
53
|
+
end
|
54
|
+
found = zones.select { |z| z[:domain] == DOMAIN }.first
|
55
|
+
raise "Unable to find zone matching domain #{DOMAIN}" unless found
|
56
|
+
|
57
|
+
@zone = found[:id]
|
58
|
+
end
|
59
|
+
@zone
|
60
|
+
end
|
61
|
+
end
|
62
|
+
|
63
|
+
module HostTypes
|
64
|
+
CNAME = "CNAME"
|
65
|
+
ARECORD = "A"
|
66
|
+
MX = "MX"
|
67
|
+
end
|
68
|
+
|
69
|
+
desc "list", "List available host names."
|
70
|
+
def list
|
71
|
+
response = zerigo["/zones/#{zone}/hosts.xml"].get
|
72
|
+
unless response.code.to_s !~ /2\d{2}/
|
73
|
+
show_hosts( response )
|
74
|
+
end
|
75
|
+
end
|
76
|
+
|
77
|
+
desc "create [HOST_NAME]", "Create a host"
|
78
|
+
method_options :data => "proxy.heroku.com"
|
79
|
+
method_options :host_type => HostTypes::CNAME
|
80
|
+
def create(hostname = "")
|
81
|
+
new_host = { :data => options[:data], :host_type => options[:host_type], :hostname => hostname }
|
82
|
+
|
83
|
+
response = zerigo["/zones/#{zone}/hosts.xml"].post new_host.to_xml(:root => "host"), :content_type => "application/xml"
|
84
|
+
if response.code.to_s =~ /201/
|
85
|
+
puts "Created a host."
|
86
|
+
show_hosts( response )
|
87
|
+
end
|
88
|
+
end
|
89
|
+
|
90
|
+
|
91
|
+
desc "delete [ID]", "Delete an entry by id"
|
92
|
+
def delete(id)
|
93
|
+
response = zerigo["/hosts/#{id}.xml"].delete
|
94
|
+
if response.code.to_s =~ /200/
|
95
|
+
puts "Delete a host with id #{id}"
|
96
|
+
end
|
97
|
+
end
|
98
|
+
|
99
|
+
end
|
100
|
+
|
101
|
+
Zerigo.start
|
data/cknife.gemspec
ADDED
@@ -0,0 +1,80 @@
|
|
1
|
+
# Generated by jeweler
|
2
|
+
# DO NOT EDIT THIS FILE DIRECTLY
|
3
|
+
# Instead, edit Jeweler::Tasks in Rakefile, and run 'rake gemspec'
|
4
|
+
# -*- encoding: utf-8 -*-
|
5
|
+
# stub: cknife 0.1.0 ruby lib
|
6
|
+
|
7
|
+
Gem::Specification.new do |s|
|
8
|
+
s.name = "cknife"
|
9
|
+
s.version = "0.1.0"
|
10
|
+
|
11
|
+
s.required_rubygems_version = Gem::Requirement.new(">= 0") if s.respond_to? :required_rubygems_version=
|
12
|
+
s.require_paths = ["lib"]
|
13
|
+
s.authors = ["Mike De La Loza"]
|
14
|
+
s.date = "2014-06-06"
|
15
|
+
s.description = "An Amazon Web Services S3 command line tool, and a few other command line tools."
|
16
|
+
s.email = "mikedll@mikedll.com"
|
17
|
+
s.executables = ["cknifeaws", "cknifedub", "cknifenowtimestamp", "cknifewcdir", "cknifezerigo"]
|
18
|
+
s.extra_rdoc_files = [
|
19
|
+
"LICENSE",
|
20
|
+
"README.md"
|
21
|
+
]
|
22
|
+
s.files = [
|
23
|
+
"Gemfile",
|
24
|
+
"Gemfile.lock",
|
25
|
+
"LICENSE",
|
26
|
+
"README.md",
|
27
|
+
"Rakefile",
|
28
|
+
"VERSION",
|
29
|
+
"bin/cknifeaws",
|
30
|
+
"bin/cknifedub",
|
31
|
+
"bin/cknifenowtimestamp",
|
32
|
+
"bin/cknifewcdir",
|
33
|
+
"bin/cknifezerigo",
|
34
|
+
"cknife.gemspec"
|
35
|
+
]
|
36
|
+
s.homepage = "http://github.com/mikedll/cknife"
|
37
|
+
s.licenses = ["MIT"]
|
38
|
+
s.rubygems_version = "2.2.2"
|
39
|
+
s.summary = "Cali Army Knife"
|
40
|
+
|
41
|
+
if s.respond_to? :specification_version then
|
42
|
+
s.specification_version = 4
|
43
|
+
|
44
|
+
if Gem::Version.new(Gem::VERSION) >= Gem::Version.new('1.2.0') then
|
45
|
+
s.add_runtime_dependency(%q<rest-client>, [">= 0"])
|
46
|
+
s.add_runtime_dependency(%q<nokogiri>, [">= 0"])
|
47
|
+
s.add_runtime_dependency(%q<i18n>, [">= 0"])
|
48
|
+
s.add_runtime_dependency(%q<activesupport>, [">= 0"])
|
49
|
+
s.add_runtime_dependency(%q<thor>, [">= 0"])
|
50
|
+
s.add_runtime_dependency(%q<builder>, [">= 0"])
|
51
|
+
s.add_runtime_dependency(%q<fog>, [">= 1.15.0"])
|
52
|
+
s.add_runtime_dependency(%q<unf>, [">= 0"])
|
53
|
+
s.add_development_dependency(%q<bundler>, ["~> 1.0"])
|
54
|
+
s.add_development_dependency(%q<jeweler>, ["~> 2.0.1"])
|
55
|
+
else
|
56
|
+
s.add_dependency(%q<rest-client>, [">= 0"])
|
57
|
+
s.add_dependency(%q<nokogiri>, [">= 0"])
|
58
|
+
s.add_dependency(%q<i18n>, [">= 0"])
|
59
|
+
s.add_dependency(%q<activesupport>, [">= 0"])
|
60
|
+
s.add_dependency(%q<thor>, [">= 0"])
|
61
|
+
s.add_dependency(%q<builder>, [">= 0"])
|
62
|
+
s.add_dependency(%q<fog>, [">= 1.15.0"])
|
63
|
+
s.add_dependency(%q<unf>, [">= 0"])
|
64
|
+
s.add_dependency(%q<bundler>, ["~> 1.0"])
|
65
|
+
s.add_dependency(%q<jeweler>, ["~> 2.0.1"])
|
66
|
+
end
|
67
|
+
else
|
68
|
+
s.add_dependency(%q<rest-client>, [">= 0"])
|
69
|
+
s.add_dependency(%q<nokogiri>, [">= 0"])
|
70
|
+
s.add_dependency(%q<i18n>, [">= 0"])
|
71
|
+
s.add_dependency(%q<activesupport>, [">= 0"])
|
72
|
+
s.add_dependency(%q<thor>, [">= 0"])
|
73
|
+
s.add_dependency(%q<builder>, [">= 0"])
|
74
|
+
s.add_dependency(%q<fog>, [">= 1.15.0"])
|
75
|
+
s.add_dependency(%q<unf>, [">= 0"])
|
76
|
+
s.add_dependency(%q<bundler>, ["~> 1.0"])
|
77
|
+
s.add_dependency(%q<jeweler>, ["~> 2.0.1"])
|
78
|
+
end
|
79
|
+
end
|
80
|
+
|
metadata
ADDED
@@ -0,0 +1,203 @@
|
|
1
|
+
--- !ruby/object:Gem::Specification
|
2
|
+
name: cknife
|
3
|
+
version: !ruby/object:Gem::Version
|
4
|
+
version: 0.1.0
|
5
|
+
platform: ruby
|
6
|
+
authors:
|
7
|
+
- Mike De La Loza
|
8
|
+
autorequire:
|
9
|
+
bindir: bin
|
10
|
+
cert_chain: []
|
11
|
+
date: 2014-06-06 00:00:00.000000000 Z
|
12
|
+
dependencies:
|
13
|
+
- !ruby/object:Gem::Dependency
|
14
|
+
name: rest-client
|
15
|
+
requirement: !ruby/object:Gem::Requirement
|
16
|
+
requirements:
|
17
|
+
- - '>='
|
18
|
+
- !ruby/object:Gem::Version
|
19
|
+
version: '0'
|
20
|
+
type: :runtime
|
21
|
+
prerelease: false
|
22
|
+
version_requirements: !ruby/object:Gem::Requirement
|
23
|
+
requirements:
|
24
|
+
- - '>='
|
25
|
+
- !ruby/object:Gem::Version
|
26
|
+
version: '0'
|
27
|
+
- !ruby/object:Gem::Dependency
|
28
|
+
name: nokogiri
|
29
|
+
requirement: !ruby/object:Gem::Requirement
|
30
|
+
requirements:
|
31
|
+
- - '>='
|
32
|
+
- !ruby/object:Gem::Version
|
33
|
+
version: '0'
|
34
|
+
type: :runtime
|
35
|
+
prerelease: false
|
36
|
+
version_requirements: !ruby/object:Gem::Requirement
|
37
|
+
requirements:
|
38
|
+
- - '>='
|
39
|
+
- !ruby/object:Gem::Version
|
40
|
+
version: '0'
|
41
|
+
- !ruby/object:Gem::Dependency
|
42
|
+
name: i18n
|
43
|
+
requirement: !ruby/object:Gem::Requirement
|
44
|
+
requirements:
|
45
|
+
- - '>='
|
46
|
+
- !ruby/object:Gem::Version
|
47
|
+
version: '0'
|
48
|
+
type: :runtime
|
49
|
+
prerelease: false
|
50
|
+
version_requirements: !ruby/object:Gem::Requirement
|
51
|
+
requirements:
|
52
|
+
- - '>='
|
53
|
+
- !ruby/object:Gem::Version
|
54
|
+
version: '0'
|
55
|
+
- !ruby/object:Gem::Dependency
|
56
|
+
name: activesupport
|
57
|
+
requirement: !ruby/object:Gem::Requirement
|
58
|
+
requirements:
|
59
|
+
- - '>='
|
60
|
+
- !ruby/object:Gem::Version
|
61
|
+
version: '0'
|
62
|
+
type: :runtime
|
63
|
+
prerelease: false
|
64
|
+
version_requirements: !ruby/object:Gem::Requirement
|
65
|
+
requirements:
|
66
|
+
- - '>='
|
67
|
+
- !ruby/object:Gem::Version
|
68
|
+
version: '0'
|
69
|
+
- !ruby/object:Gem::Dependency
|
70
|
+
name: thor
|
71
|
+
requirement: !ruby/object:Gem::Requirement
|
72
|
+
requirements:
|
73
|
+
- - '>='
|
74
|
+
- !ruby/object:Gem::Version
|
75
|
+
version: '0'
|
76
|
+
type: :runtime
|
77
|
+
prerelease: false
|
78
|
+
version_requirements: !ruby/object:Gem::Requirement
|
79
|
+
requirements:
|
80
|
+
- - '>='
|
81
|
+
- !ruby/object:Gem::Version
|
82
|
+
version: '0'
|
83
|
+
- !ruby/object:Gem::Dependency
|
84
|
+
name: builder
|
85
|
+
requirement: !ruby/object:Gem::Requirement
|
86
|
+
requirements:
|
87
|
+
- - '>='
|
88
|
+
- !ruby/object:Gem::Version
|
89
|
+
version: '0'
|
90
|
+
type: :runtime
|
91
|
+
prerelease: false
|
92
|
+
version_requirements: !ruby/object:Gem::Requirement
|
93
|
+
requirements:
|
94
|
+
- - '>='
|
95
|
+
- !ruby/object:Gem::Version
|
96
|
+
version: '0'
|
97
|
+
- !ruby/object:Gem::Dependency
|
98
|
+
name: fog
|
99
|
+
requirement: !ruby/object:Gem::Requirement
|
100
|
+
requirements:
|
101
|
+
- - '>='
|
102
|
+
- !ruby/object:Gem::Version
|
103
|
+
version: 1.15.0
|
104
|
+
type: :runtime
|
105
|
+
prerelease: false
|
106
|
+
version_requirements: !ruby/object:Gem::Requirement
|
107
|
+
requirements:
|
108
|
+
- - '>='
|
109
|
+
- !ruby/object:Gem::Version
|
110
|
+
version: 1.15.0
|
111
|
+
- !ruby/object:Gem::Dependency
|
112
|
+
name: unf
|
113
|
+
requirement: !ruby/object:Gem::Requirement
|
114
|
+
requirements:
|
115
|
+
- - '>='
|
116
|
+
- !ruby/object:Gem::Version
|
117
|
+
version: '0'
|
118
|
+
type: :runtime
|
119
|
+
prerelease: false
|
120
|
+
version_requirements: !ruby/object:Gem::Requirement
|
121
|
+
requirements:
|
122
|
+
- - '>='
|
123
|
+
- !ruby/object:Gem::Version
|
124
|
+
version: '0'
|
125
|
+
- !ruby/object:Gem::Dependency
|
126
|
+
name: bundler
|
127
|
+
requirement: !ruby/object:Gem::Requirement
|
128
|
+
requirements:
|
129
|
+
- - ~>
|
130
|
+
- !ruby/object:Gem::Version
|
131
|
+
version: '1.0'
|
132
|
+
type: :development
|
133
|
+
prerelease: false
|
134
|
+
version_requirements: !ruby/object:Gem::Requirement
|
135
|
+
requirements:
|
136
|
+
- - ~>
|
137
|
+
- !ruby/object:Gem::Version
|
138
|
+
version: '1.0'
|
139
|
+
- !ruby/object:Gem::Dependency
|
140
|
+
name: jeweler
|
141
|
+
requirement: !ruby/object:Gem::Requirement
|
142
|
+
requirements:
|
143
|
+
- - ~>
|
144
|
+
- !ruby/object:Gem::Version
|
145
|
+
version: 2.0.1
|
146
|
+
type: :development
|
147
|
+
prerelease: false
|
148
|
+
version_requirements: !ruby/object:Gem::Requirement
|
149
|
+
requirements:
|
150
|
+
- - ~>
|
151
|
+
- !ruby/object:Gem::Version
|
152
|
+
version: 2.0.1
|
153
|
+
description: An Amazon Web Services S3 command line tool, and a few other command
|
154
|
+
line tools.
|
155
|
+
email: mikedll@mikedll.com
|
156
|
+
executables:
|
157
|
+
- cknifeaws
|
158
|
+
- cknifedub
|
159
|
+
- cknifenowtimestamp
|
160
|
+
- cknifewcdir
|
161
|
+
- cknifezerigo
|
162
|
+
extensions: []
|
163
|
+
extra_rdoc_files:
|
164
|
+
- LICENSE
|
165
|
+
- README.md
|
166
|
+
files:
|
167
|
+
- Gemfile
|
168
|
+
- Gemfile.lock
|
169
|
+
- LICENSE
|
170
|
+
- README.md
|
171
|
+
- Rakefile
|
172
|
+
- VERSION
|
173
|
+
- bin/cknifeaws
|
174
|
+
- bin/cknifedub
|
175
|
+
- bin/cknifenowtimestamp
|
176
|
+
- bin/cknifewcdir
|
177
|
+
- bin/cknifezerigo
|
178
|
+
- cknife.gemspec
|
179
|
+
homepage: http://github.com/mikedll/cknife
|
180
|
+
licenses:
|
181
|
+
- MIT
|
182
|
+
metadata: {}
|
183
|
+
post_install_message:
|
184
|
+
rdoc_options: []
|
185
|
+
require_paths:
|
186
|
+
- lib
|
187
|
+
required_ruby_version: !ruby/object:Gem::Requirement
|
188
|
+
requirements:
|
189
|
+
- - '>='
|
190
|
+
- !ruby/object:Gem::Version
|
191
|
+
version: '0'
|
192
|
+
required_rubygems_version: !ruby/object:Gem::Requirement
|
193
|
+
requirements:
|
194
|
+
- - '>='
|
195
|
+
- !ruby/object:Gem::Version
|
196
|
+
version: '0'
|
197
|
+
requirements: []
|
198
|
+
rubyforge_project:
|
199
|
+
rubygems_version: 2.2.2
|
200
|
+
signing_key:
|
201
|
+
specification_version: 4
|
202
|
+
summary: Cali Army Knife
|
203
|
+
test_files: []
|