fdlcap 0.3.28
Sign up to get free protection for your applications and to get access to all the features.
- data/LICENSE +20 -0
- data/README.rdoc +72 -0
- data/Rakefile +69 -0
- data/VERSION +1 -0
- data/bin/fdlcap +4 -0
- data/examples/deploy.rb +57 -0
- data/fdlcap.gemspec +98 -0
- data/features/fdlcap.feature +9 -0
- data/features/step_definitions/fdlcap_steps.rb +0 -0
- data/features/support/env.rb +6 -0
- data/lib/fdlcap/extensions/configuration.rb +34 -0
- data/lib/fdlcap/extensions/recipe_definition.rb +19 -0
- data/lib/fdlcap/recipes/autotagger.rb +51 -0
- data/lib/fdlcap/recipes/check_revision.rb +22 -0
- data/lib/fdlcap/recipes/craken.rb +5 -0
- data/lib/fdlcap/recipes/database.rb +204 -0
- data/lib/fdlcap/recipes/delayed_job.rb +28 -0
- data/lib/fdlcap/recipes/deploy.rb +26 -0
- data/lib/fdlcap/recipes/fdl_defaults.rb +16 -0
- data/lib/fdlcap/recipes/geminstaller.rb +53 -0
- data/lib/fdlcap/recipes/newrelic.rb +6 -0
- data/lib/fdlcap/recipes/nginx.rb +102 -0
- data/lib/fdlcap/recipes/passenger.rb +17 -0
- data/lib/fdlcap/recipes/performance.rb +78 -0
- data/lib/fdlcap/recipes/rake.rb +19 -0
- data/lib/fdlcap/recipes/rolling_restart.rb +31 -0
- data/lib/fdlcap/recipes/rsync.rb +42 -0
- data/lib/fdlcap/recipes/ruby_inline.rb +19 -0
- data/lib/fdlcap/recipes/sass.rb +20 -0
- data/lib/fdlcap/recipes/sinatra_passenger.rb +21 -0
- data/lib/fdlcap/recipes/slice.rb +59 -0
- data/lib/fdlcap/recipes/ssh.rb +42 -0
- data/lib/fdlcap/recipes/stages.rb +12 -0
- data/lib/fdlcap/recipes/symlink_configs.rb +7 -0
- data/lib/fdlcap/recipes/symlinks.rb +48 -0
- data/lib/fdlcap/recipes/thin.rb +119 -0
- data/lib/fdlcap/recipes/thinking_sphinx.rb +12 -0
- data/lib/fdlcap/recipes.rb +11 -0
- data/lib/fdlcap/templates/nginx.auth.conf.erb +9 -0
- data/lib/fdlcap/templates/nginx.conf.erb +57 -0
- data/lib/fdlcap/templates/nginx.vhost.conf.erb +85 -0
- data/lib/fdlcap.rb +3 -0
- data/test/fdlcap_test.rb +7 -0
- data/test/test_helper.rb +10 -0
- metadata +140 -0
@@ -0,0 +1,204 @@
|
|
1
|
+
Capistrano::Configuration.instance(:must_exist).load do
|
2
|
+
set(:create_database_on_cold, true)
|
3
|
+
|
4
|
+
set(:copy_compression, :gzip)
|
5
|
+
|
6
|
+
set(:exclude_tables, [])
|
7
|
+
|
8
|
+
# Returns the file extension used for the compression method in
|
9
|
+
# question.
|
10
|
+
def compression_extension
|
11
|
+
case copy_compression
|
12
|
+
when :gzip, :gz then "tar.gz"
|
13
|
+
when :bzip2, :bz2 then "tar.bz2"
|
14
|
+
when :zip then "zip"
|
15
|
+
else raise ArgumentError, "invalid compression type #{compression.inspect}"
|
16
|
+
end
|
17
|
+
end
|
18
|
+
|
19
|
+
# Returns the command necessary to compress the given directory
|
20
|
+
# into the given file. The command is returned as an array, where
|
21
|
+
# the first element is the utility to be used to perform the compression.
|
22
|
+
def compress(directory, file)
|
23
|
+
case copy_compression
|
24
|
+
when :gzip, :gz then ["tar", "czf", file, directory]
|
25
|
+
when :bzip2, :bz2 then ["tar", "cjf", file, directory]
|
26
|
+
when :zip then ["zip", "-qr", file, directory]
|
27
|
+
else raise ArgumentError, "invalid compression type #{copy_compression.inspect}"
|
28
|
+
end
|
29
|
+
end
|
30
|
+
|
31
|
+
# Returns the command necessary to decompress the given file,
|
32
|
+
# relative to the current working directory. It must also
|
33
|
+
# preserve the directory structure in the file. The command is returned
|
34
|
+
# as an array, where the first element is the utility to be used to
|
35
|
+
# perform the decompression.
|
36
|
+
def decompress(file)
|
37
|
+
case copy_compression
|
38
|
+
when :gzip, :gz then ["tar", "xzf", file]
|
39
|
+
when :bzip2, :bz2 then ["tar", "xjf", file]
|
40
|
+
when :zip then ["unzip", "-q", file]
|
41
|
+
else raise ArgumentError, "invalid compression type #{copy_compression.inspect}"
|
42
|
+
end
|
43
|
+
end
|
44
|
+
|
45
|
+
class Capistrano::Configuration
|
46
|
+
def execute(command, failure_message = "Command failed")
|
47
|
+
puts "Executing: #{command}"
|
48
|
+
system(command) || raise(failure_message)
|
49
|
+
end
|
50
|
+
end
|
51
|
+
|
52
|
+
namespace :database do
|
53
|
+
desc <<-DESC
|
54
|
+
create the production database
|
55
|
+
DESC
|
56
|
+
task :create, :roles => :db do
|
57
|
+
run "cd #{current_path} && rake db:create RAILS_ENV=#{rails_env} --trace"
|
58
|
+
end
|
59
|
+
|
60
|
+
desc "Push db remotely"
|
61
|
+
task :push_db_remotely, :roles => :db do
|
62
|
+
|
63
|
+
default_character_set = Object.const_defined?(:DEFAULT_CHARACTER_SET) ? Object::DEFAULT_CHARACTER_SET : "utf8"
|
64
|
+
|
65
|
+
local_env = ENV['LOCAL_ENV'] || "development"
|
66
|
+
|
67
|
+
all_db_info = YAML.load(File.read("config/database.yml"))
|
68
|
+
local_db_info = all_db_info[local_env]
|
69
|
+
remote_db_info = all_db_info[rails_env.to_s]
|
70
|
+
raise "Missing database.yml entry for #{local_env}" unless local_db_info
|
71
|
+
raise "Missing database.yml entry for #{rails_env.to_s}" unless remote_db_info
|
72
|
+
|
73
|
+
|
74
|
+
puts %{
|
75
|
+
|
76
|
+
! WARNING !: The remote database '#{remote_db_info["database"]}'
|
77
|
+
will be replaced with the contents of the local database '#{local_db_info["database"]}'.
|
78
|
+
A dump of the remote db will be placed in your remote home directory just prior
|
79
|
+
to it being replaced.
|
80
|
+
|
81
|
+
1) current REMOTE_DB ===> backed up to dump file, in ~/
|
82
|
+
2) LOCAL_DB ===> REMOTE_DB ...old REMOTE_DB contents are overwritten!!!
|
83
|
+
|
84
|
+
Even so, this is a very significant and potentially destructive operation. Please step
|
85
|
+
back and contemplate what you're about to do.
|
86
|
+
|
87
|
+
If you're really sure you want to continue, type "REPLACE #{remote_db_info["database"].upcase}":
|
88
|
+
}
|
89
|
+
|
90
|
+
if ($stdin.gets.strip != "REPLACE #{remote_db_info["database"].upcase}")
|
91
|
+
puts "No action taken, exiting"
|
92
|
+
exit(1)
|
93
|
+
else
|
94
|
+
puts "You confirmed that you want to continue, here we go"
|
95
|
+
end
|
96
|
+
|
97
|
+
dump_file_name = "#{local_db_info["database"]}.sql"
|
98
|
+
local_dump_file_gz_path = "/tmp/#{dump_file_name}.gz"
|
99
|
+
|
100
|
+
execute "time mysqldump -e -q --single-transaction --default_character_set=#{default_character_set} \
|
101
|
+
-u #{local_db_info["username"]} --password=#{local_db_info["password"]} \
|
102
|
+
--database #{local_db_info["database"]} | gzip > #{local_dump_file_gz_path}"
|
103
|
+
|
104
|
+
upload "#{local_dump_file_gz_path}", "#{dump_file_name}.gz", :via => :scp
|
105
|
+
|
106
|
+
execute "echo ^G^G^G^G^G"
|
107
|
+
|
108
|
+
run "gzip -df ~/#{dump_file_name}.gz"
|
109
|
+
run "perl -pi -e 's|#{local_db_info["database"]}|#{remote_db_info["database"]}|g' ~/#{dump_file_name}"
|
110
|
+
|
111
|
+
run "time mysqldump -e -q --single-transaction --default_character_set=#{default_character_set} \
|
112
|
+
-u #{remote_db_info["username"]} --password=#{remote_db_info["password"]} \
|
113
|
+
--database #{remote_db_info["database"]} | gzip > ~/#{remote_db_info["database"]}_#{Time.now.strftime("%Y-%m-%d_%H-%M-%S")}.sql.gz"
|
114
|
+
|
115
|
+
remote_host = remote_db_info["host"] || "localhost"
|
116
|
+
|
117
|
+
run "mysqladmin -u #{remote_db_info["username"]} --password=#{remote_db_info["password"]} -h #{remote_host} drop #{remote_db_info["database"]} -f"
|
118
|
+
run "mysqladmin -u #{remote_db_info["username"]} --password=#{remote_db_info["password"]} -h #{remote_host} create #{remote_db_info["database"]} --default_character_set=#{default_character_set}"
|
119
|
+
run "time mysql -u #{remote_db_info["username"]} --password=#{remote_db_info["password"]} -h #{remote_host} --database #{remote_db_info["database"]} --default_character_set=#{default_character_set} < ~/#{dump_file_name}"
|
120
|
+
run "rm ~/#{dump_file_name}"
|
121
|
+
|
122
|
+
end
|
123
|
+
|
124
|
+
# ganked from pivotal
|
125
|
+
desc "Pull db locally"
|
126
|
+
task :pull_db_locally, :roles => :db do
|
127
|
+
all_db_info = YAML.load(File.read("config/database.yml"))
|
128
|
+
db_info = all_db_info[rails_env.to_s]
|
129
|
+
raise "Missing database.yml entry for #{rails_env.to_s}" unless db_info
|
130
|
+
|
131
|
+
database = db_info["database"]
|
132
|
+
tables = ENV['TABLES'] ? ENV['TABLES'].split(',').join(' ') : ''
|
133
|
+
dump_file = "/tmp/#{database}.sql"
|
134
|
+
|
135
|
+
# look to see if a mysqldump process is already running
|
136
|
+
run "ps aux | grep mysqldump | wc -l" do |channel, stream, data|
|
137
|
+
if data.strip.to_i > 2
|
138
|
+
puts "It appears that mysqldump is already running on the server - is another pull task being run? Aborting to avoid clobbering the dumpfile."
|
139
|
+
exit 1
|
140
|
+
end
|
141
|
+
end
|
142
|
+
|
143
|
+
db_dump_found = false
|
144
|
+
run "if [ -f #{dump_file}.gz ]; then echo exists; else echo not_found; fi" do |channel, stream, data|
|
145
|
+
puts "Result: #{channel[:server]} -> #{dump_file}.gz ( #{data} )"
|
146
|
+
db_dump_found = data.strip == 'exists' ? true : false
|
147
|
+
break if stream == :err
|
148
|
+
end
|
149
|
+
|
150
|
+
db_host = db_info["host"]
|
151
|
+
host = ""
|
152
|
+
host_arg = " -h #{db_host}" if db_host
|
153
|
+
db_dump_cmd = "mysqldump -e -q --single-transaction \
|
154
|
+
-u #{db_info["username"]} --password=#{db_info["password"]} \
|
155
|
+
#{host_arg} #{db_info["database"]} #{tables} | gzip > #{dump_file}.gz"
|
156
|
+
|
157
|
+
if db_dump_found
|
158
|
+
run "ls -l #{dump_file}.gz"
|
159
|
+
puts %{
|
160
|
+
! INFO !: The remote database dump '#{dump_file}.gz' already exists
|
161
|
+
|
162
|
+
If you would like to use the existing database dump type "USE EXISTING":
|
163
|
+
}
|
164
|
+
|
165
|
+
if ($stdin.gets.strip != "USE EXISTING")
|
166
|
+
puts "Ignoring existing file and regenerating #{dump_file}.gz"
|
167
|
+
run db_dump_cmd
|
168
|
+
else
|
169
|
+
puts "You confirmed that you want to use the existing file [#{dump_file}.gz], here we go"
|
170
|
+
end
|
171
|
+
else
|
172
|
+
run db_dump_cmd
|
173
|
+
end
|
174
|
+
|
175
|
+
get "#{dump_file}.gz", "#{dump_file}.gz"
|
176
|
+
run "rm #{dump_file}.gz" unless ENV['LEAVE_ON_SERVER']
|
177
|
+
|
178
|
+
target_env = ENV['LOCAL_ENV'] || "development"
|
179
|
+
target_db_info = all_db_info[target_env]
|
180
|
+
target_db_login = "-u #{target_db_info["username"]} --password=#{target_db_info["password"]}"
|
181
|
+
target_db_login += " -h #{target_db_info["host"]}" if target_db_info["host"]
|
182
|
+
|
183
|
+
gunzip_cmd = "gunzip -c #{dump_file}.gz"
|
184
|
+
sed_cmd = "sed 's/#{db_info["database"]}/#{target_db_info["database"]}/g' > #{dump_file}"
|
185
|
+
execute("#{gunzip_cmd} | #{sed_cmd}", "gunzip/sed of #{dump_file}.gz failed")
|
186
|
+
unless ENV['TABLES']
|
187
|
+
execute("mysqladmin #{target_db_login} drop #{target_db_info["database"]} -f", "mysqladmin drop failed")
|
188
|
+
execute("mysqladmin #{target_db_login} create #{target_db_info["database"]}", "mysqladmin create failed")
|
189
|
+
end
|
190
|
+
execute("mysql #{target_db_login} --database #{target_db_info["database"]} < #{dump_file}", "mysql import failed")
|
191
|
+
execute("rake db:migrate RAILS_ENV=#{target_env}", "migrate failed")
|
192
|
+
execute("rm #{dump_file}", "rm of local unzipped #{dump_file} failed")
|
193
|
+
end
|
194
|
+
end
|
195
|
+
|
196
|
+
namespace :deploy do
|
197
|
+
task :cold do
|
198
|
+
update
|
199
|
+
database.create if create_database_on_cold
|
200
|
+
migrate
|
201
|
+
start
|
202
|
+
end
|
203
|
+
end
|
204
|
+
end
|
@@ -0,0 +1,28 @@
|
|
1
|
+
Capistrano::Configuration.instance(:must_exist).load do
|
2
|
+
define_recipe :delayed_job do |*args|
|
3
|
+
|
4
|
+
options = args.empty? ? {} : args.first
|
5
|
+
prefix = options[:prefix] || 'dj'
|
6
|
+
set :dj_monit_prefix, prefix unless exists?(:dj_monit_prefix)
|
7
|
+
|
8
|
+
namespace :delayed_job do
|
9
|
+
|
10
|
+
desc "Start delayed_job"
|
11
|
+
task :start, :only => {:delayed_job => true} do
|
12
|
+
sudo "/usr/bin/monit start all -g #{dj_monit_prefix}_#{application}"
|
13
|
+
end
|
14
|
+
desc "Stop delayed_job"
|
15
|
+
task :stop, :only => {:delayed_job => true} do
|
16
|
+
sudo "/usr/bin/monit stop all -g #{dj_monit_prefix}_#{application}"
|
17
|
+
end
|
18
|
+
desc "Restart delayed_job"
|
19
|
+
task :restart, :only => {:delayed_job => true} do
|
20
|
+
sudo "/usr/bin/monit restart all -g #{dj_monit_prefix}_#{application}"
|
21
|
+
end
|
22
|
+
end
|
23
|
+
|
24
|
+
after "deploy:restart", "delayed_job:restart"
|
25
|
+
after "deploy:start", "delayed_job:start"
|
26
|
+
after "deploy:stop", "delayed_job:stop"
|
27
|
+
end
|
28
|
+
end
|
@@ -0,0 +1,26 @@
|
|
1
|
+
Capistrano::Configuration.instance(:must_exist).load do
|
2
|
+
|
3
|
+
namespace :web do
|
4
|
+
task :disable, :roles => :web, :except => { :no_release => true } do
|
5
|
+
on_rollback { run "rm #{shared_path}/system/maintenance.html" }
|
6
|
+
run "cp #{current_path}/public/maintenance.html #{shared_path}/system/maintenance.html"
|
7
|
+
end
|
8
|
+
end
|
9
|
+
|
10
|
+
namespace :deploy do
|
11
|
+
desc "Pull files from a remote server"
|
12
|
+
task :download_file, :roles => :app, :except => { :no_release => true } do
|
13
|
+
ENV['FILES'].split(',').each do |file|
|
14
|
+
get "#{current_path}/#{file}", File.basename(file)
|
15
|
+
end
|
16
|
+
end
|
17
|
+
end
|
18
|
+
|
19
|
+
# Clean up old releases
|
20
|
+
define_recipe :perform_cleanup do
|
21
|
+
after "deploy", "deploy:cleanup"
|
22
|
+
after "deploy:migrations" , "deploy:cleanup"
|
23
|
+
after "deploy:long" , "deploy:cleanup"
|
24
|
+
end
|
25
|
+
|
26
|
+
end
|
@@ -0,0 +1,16 @@
|
|
1
|
+
Capistrano::Configuration.instance(:must_exist).load do
|
2
|
+
|
3
|
+
define_recipe :fdl_defaults do
|
4
|
+
# defaults for fdl
|
5
|
+
set :scm, :git
|
6
|
+
set :keep_releases, 5
|
7
|
+
set :deploy_via, :remote_cache
|
8
|
+
|
9
|
+
use_recipe :symlinks
|
10
|
+
use_recipe :geminstaller
|
11
|
+
use_recipe :perform_cleanup
|
12
|
+
use_recipe :symlink_configs
|
13
|
+
|
14
|
+
end
|
15
|
+
|
16
|
+
end
|
@@ -0,0 +1,53 @@
|
|
1
|
+
Capistrano::Configuration.instance(:must_exist).load do
|
2
|
+
define_recipe :geminstaller do
|
3
|
+
#
|
4
|
+
# Tasks
|
5
|
+
#
|
6
|
+
namespace :geminstaller do
|
7
|
+
desc <<-DESC
|
8
|
+
install geminstaller
|
9
|
+
DESC
|
10
|
+
task :install, :only => { :geminstaller => true } do
|
11
|
+
as = fetch(:runner, "app")
|
12
|
+
via = fetch(:run_method, :sudo)
|
13
|
+
invoke_command "gem install geminstaller", :via => via, :as => as
|
14
|
+
invoke_command "gem source -a http://gems.github.com", :via => via, :as => as
|
15
|
+
invoke_command "gem source -a http://gemcutter.org", :via => via, :as => as
|
16
|
+
end
|
17
|
+
|
18
|
+
desc <<-DESC
|
19
|
+
run geminstaller rake task to install gems on the server
|
20
|
+
DESC
|
21
|
+
task :run, :only => { :geminstaller => true } do
|
22
|
+
as = fetch(:runner, "app")
|
23
|
+
via = fetch(:run_method, :sudo)
|
24
|
+
use_geminstaller_sudo = fetch(:geminstaller_sudo, false)
|
25
|
+
invoke_command "if ! gem source | grep -q 'http://gemcutter.org' ; then gem source -a 'http://gemcutter.org'; fi", :via => via, :as => as
|
26
|
+
invoke_command "if ! gem source | grep -q 'http://gems.github.com' ; then gem source -a 'http://gems.github.com'; fi", :via => via, :as => as
|
27
|
+
invoke_command "/usr/bin/geminstaller #{use_geminstaller_sudo ? '-s' : ''} -c #{current_path}/config/geminstaller.yml --geminstaller-output=all --rubygems-output=all", :via => via, :as => as
|
28
|
+
end
|
29
|
+
|
30
|
+
desc <<-DESC
|
31
|
+
add geminstaller config to list of remote dependencies.
|
32
|
+
DESC
|
33
|
+
task :add_remote_gem_dependencies, :only => { :geminstaller => true } do
|
34
|
+
CONFIG_PATH = File.join('config', 'geminstaller.yml')
|
35
|
+
if File.exists?(CONFIG_PATH)
|
36
|
+
gems = YAML.load(ERB.new(File.read(CONFIG_PATH)).result)['gems']
|
37
|
+
gems.each do |gem|
|
38
|
+
depend :remote, :gem, gem['name'], gem['version']
|
39
|
+
end
|
40
|
+
end
|
41
|
+
end
|
42
|
+
end
|
43
|
+
|
44
|
+
#
|
45
|
+
# Callbacks
|
46
|
+
#
|
47
|
+
before "deploy:check", "geminstaller:add_remote_gem_dependencies"
|
48
|
+
after "deploy:setup", "geminstaller:install"
|
49
|
+
after "geminstaller:install", "geminstaller:run"
|
50
|
+
after "deploy:update", "geminstaller:run"
|
51
|
+
end
|
52
|
+
|
53
|
+
end
|
@@ -0,0 +1,102 @@
|
|
1
|
+
Capistrano::Configuration.instance(:must_exist).load do
|
2
|
+
|
3
|
+
define_recipe :nginx do
|
4
|
+
#
|
5
|
+
# Configuration
|
6
|
+
#
|
7
|
+
|
8
|
+
# global vars
|
9
|
+
set :can_configure_nginx, true
|
10
|
+
set :nginx_user, 'nginx'
|
11
|
+
set :nginx_processes, 4
|
12
|
+
set :nginx_gzip_on, true
|
13
|
+
set :nginx_gzip_xml_on, false
|
14
|
+
|
15
|
+
# app specific vars
|
16
|
+
set :nginx_server_names, "_"
|
17
|
+
set :nginx_far_future, false
|
18
|
+
set :nginx_default_app, true
|
19
|
+
|
20
|
+
# http auth vars
|
21
|
+
set :nginx_auth_ip_masks, ['192.168.0.0/254']
|
22
|
+
set :nginx_http_auth_app, false
|
23
|
+
set :nginx_auth_locations, []
|
24
|
+
set :nginx_http_auth_users, []
|
25
|
+
|
26
|
+
#
|
27
|
+
# Tasks
|
28
|
+
#
|
29
|
+
|
30
|
+
namespace :nginx do
|
31
|
+
|
32
|
+
%w( start stop restart reload ).each do |cmd|
|
33
|
+
desc "#{cmd} your nginx servers"
|
34
|
+
task "#{cmd}".to_sym, :roles => :app do
|
35
|
+
default_run_options[:pty] = true
|
36
|
+
sudo "nohup /etc/init.d/nginx #{cmd} > /dev/null"
|
37
|
+
end
|
38
|
+
end
|
39
|
+
|
40
|
+
desc "Setup Nginx vhost config"
|
41
|
+
task :vhost, :roles => :web do
|
42
|
+
result = render_erb_template(File.join(File.dirname(__FILE__), '..','templates','nginx.vhost.conf.erb'))
|
43
|
+
put result, "/tmp/nginx.vhost.conf"
|
44
|
+
sudo "mkdir -p /etc/nginx/vhosts"
|
45
|
+
sudo "cp /tmp/nginx.vhost.conf /etc/nginx/vhosts/#{application}.conf"
|
46
|
+
inform "You must edit nginx.conf to include the vhost config file."
|
47
|
+
end
|
48
|
+
|
49
|
+
desc "Setup Nginx vhost auth config"
|
50
|
+
task :vhost_auth, :roles => :web do
|
51
|
+
result = render_erb_template(File.join(File.dirname(__FILE__), '..','templates','nginx.auth.conf.erb'))
|
52
|
+
put result, "/tmp/nginx.auth.conf"
|
53
|
+
sudo "mkdir -p /etc/nginx/vhosts"
|
54
|
+
sudo "cp /tmp/nginx.vhost.conf /etc/nginx/vhosts/#{application}.auth.conf"
|
55
|
+
end
|
56
|
+
|
57
|
+
desc "Setup htpasswd file for nginx auth"
|
58
|
+
task :create_htpasswd, :roles => :web do
|
59
|
+
sudo "mkdir -p /etc/nginx/conf"
|
60
|
+
for user in nginx_http_auth_users
|
61
|
+
run "cd /etc/nginx/conf && htpasswd -b htpasswd #{user['name']} #{user['password']}"
|
62
|
+
# run <<-CMD
|
63
|
+
# cd /etc/nginx/conf;
|
64
|
+
# if [ ! -e /etc/nginx/conf/htpasswd ] ; then
|
65
|
+
# htpasswd -b -c htpasswd #{user['name']} #{user['password']};
|
66
|
+
# else
|
67
|
+
# htpasswd -b htpasswd #{user['name']} #{user['password']};
|
68
|
+
# fi
|
69
|
+
# CMD
|
70
|
+
end
|
71
|
+
end
|
72
|
+
|
73
|
+
desc "Setup Nginx.config"
|
74
|
+
task :conf, :roles => :web do
|
75
|
+
if can_configure_nginx
|
76
|
+
|
77
|
+
result = render_erb_template(File.join(File.dirname(__FILE__),'..','templates','nginx.conf.erb'))
|
78
|
+
put result, "/tmp/nginx.conf"
|
79
|
+
sudo "cp /tmp/nginx.conf /etc/nginx/nginx.conf"
|
80
|
+
|
81
|
+
else
|
82
|
+
inform "Nginx configuration tasks have been disabled. Most likely you are deploying to engineyard which has it's own nginx conf setup."
|
83
|
+
end
|
84
|
+
end
|
85
|
+
|
86
|
+
desc "Setup Nginx vhost config and nginx.conf"
|
87
|
+
task :configure, :roles => :web do
|
88
|
+
if can_configure_nginx
|
89
|
+
|
90
|
+
conf
|
91
|
+
vhost
|
92
|
+
vhost_auth if nginx_auth_locations.length > 0 || nginx_http_auth_app
|
93
|
+
create_htpasswd if nginx_http_auth_users.length > 0
|
94
|
+
|
95
|
+
else
|
96
|
+
inform "Nginx configuration tasks have been disabled. Most likely you are deploying to engineyard which has it's own nginx conf setup."
|
97
|
+
end
|
98
|
+
end
|
99
|
+
|
100
|
+
end
|
101
|
+
end
|
102
|
+
end
|
@@ -0,0 +1,17 @@
|
|
1
|
+
Capistrano::Configuration.instance(:must_exist).load do
|
2
|
+
define_recipe :passenger do
|
3
|
+
namespace :deploy do
|
4
|
+
task :start, :roles => :app do
|
5
|
+
run "touch #{current_release}/tmp/restart.txt"
|
6
|
+
end
|
7
|
+
|
8
|
+
task :stop, :roles => :app do
|
9
|
+
# Do nothing.
|
10
|
+
end
|
11
|
+
|
12
|
+
task :restart, :roles => :app do
|
13
|
+
run "touch #{current_release}/tmp/restart.txt"
|
14
|
+
end
|
15
|
+
end
|
16
|
+
end
|
17
|
+
end
|
@@ -0,0 +1,78 @@
|
|
1
|
+
Capistrano::Configuration.instance(:must_exist).load do
|
2
|
+
namespace :autoperf do
|
3
|
+
desc "get nginx log for load test"
|
4
|
+
task :fetch_log, :roles => :app do
|
5
|
+
# Grab the last 1000 requests from production Nginx log, and extract the request path (ex: /index)
|
6
|
+
run "tail -n 1000 /var/log/nginx/#{application}.access.log | awk '{print $7}' > /tmp/requests.log"
|
7
|
+
|
8
|
+
# Replace newlines with null terminator (httperf format)
|
9
|
+
run 'tr "\n" "\0" < /tmp/requests.log > /tmp/requests_httperf.log'
|
10
|
+
|
11
|
+
download "/tmp/requests_httperf.log", "config/autoperf/requests_httperf.log", :via => :scp
|
12
|
+
|
13
|
+
run "rm /tmp/requests_httperf.log"
|
14
|
+
end
|
15
|
+
|
16
|
+
task :run_test, :roles => :app do
|
17
|
+
run "cd #{current_path} && script/autoperf -c config/autoperf/primary.conf" do |channel, stream, data|
|
18
|
+
puts data if stream == :out
|
19
|
+
if stream == :err
|
20
|
+
puts "[Error: #{channel[:host]}] #{data}"
|
21
|
+
break
|
22
|
+
end
|
23
|
+
end
|
24
|
+
end
|
25
|
+
end
|
26
|
+
|
27
|
+
namespace :autobench do
|
28
|
+
|
29
|
+
task :run_test, :roles => :app do
|
30
|
+
options = ENV['OPTIONS'] || "--low_rate=1 --high_rate=20 --rate_step=2 --num_call=2 --num_conn=200"
|
31
|
+
run "/usr/local/bin/autobench --single_host --host1=#{ENV['HOST']} --uri1=#{ENV['URL']} --file=/tmp/test.bench.txt #{options}"
|
32
|
+
download "/tmp/test.bench.txt", "autobench/test.bench.txt", :via => :scp
|
33
|
+
run "rm /tmp/test.bench.txt"
|
34
|
+
end
|
35
|
+
|
36
|
+
end
|
37
|
+
|
38
|
+
namespace :install do
|
39
|
+
|
40
|
+
task :setup do
|
41
|
+
sudo "rm -rf src"
|
42
|
+
run "mkdir -p src"
|
43
|
+
end
|
44
|
+
|
45
|
+
desc "Install httperf"
|
46
|
+
task :httperf do
|
47
|
+
setup
|
48
|
+
|
49
|
+
cmd = [
|
50
|
+
"cd src",
|
51
|
+
"wget ftp://ftp.hpl.hp.com/pub/httperf/httperf-0.9.0.tar.gz",
|
52
|
+
"tar xfz httperf-0.9.0.tar.gz",
|
53
|
+
"cd httperf-0.9.0",
|
54
|
+
"./configure --prefix=/usr/local",
|
55
|
+
"make",
|
56
|
+
"sudo make install"
|
57
|
+
].join(' && ')
|
58
|
+
run cmd
|
59
|
+
run 'rm httperf-0.9.0.tar.gz'
|
60
|
+
end
|
61
|
+
|
62
|
+
task :autobench do
|
63
|
+
setup
|
64
|
+
|
65
|
+
cmd = [
|
66
|
+
"cd src",
|
67
|
+
"wget http://www.xenoclast.org/autobench/downloads/autobench-2.1.2.tar.gz",
|
68
|
+
"tar xfz autobench-2.1.2.tar.gz",
|
69
|
+
"cd autobench-2.1.2",
|
70
|
+
"make",
|
71
|
+
"sudo make install"
|
72
|
+
].join(' && ')
|
73
|
+
run cmd
|
74
|
+
run 'rm autobench-2.1.2.tar.gz'
|
75
|
+
end
|
76
|
+
end
|
77
|
+
|
78
|
+
end
|
@@ -0,0 +1,19 @@
|
|
1
|
+
Capistrano::Configuration.instance(:must_exist).load do
|
2
|
+
desc "Execute an arbitrary rake task on slices with a specified role (ROLES=x,y,z TASK=p)"
|
3
|
+
task :rake do
|
4
|
+
task = ENV['TASK']
|
5
|
+
run "cd #{current_path} && rake #{task} RAILS_ENV=#{rails_env}"
|
6
|
+
end
|
7
|
+
|
8
|
+
desc "Execute an arbitrary runner command on slices with a specified role (ROLES=x,y,z CMD=p)"
|
9
|
+
task :runner do
|
10
|
+
cmd = ENV['CMD']
|
11
|
+
run "cd #{current_path} && script/runner -e #{rails_env} '#{cmd}'"
|
12
|
+
end
|
13
|
+
|
14
|
+
desc "Execute an arbitrary UNIX command on slices with a specified role (ROLES=x,y,z CMD=p)"
|
15
|
+
task :command do
|
16
|
+
cmd = ENV['CMD']
|
17
|
+
run "cd #{current_path} && #{cmd}"
|
18
|
+
end
|
19
|
+
end
|
@@ -0,0 +1,31 @@
|
|
1
|
+
Capistrano::Configuration.instance(:must_exist).load do
|
2
|
+
define_recipe :rolling_restart do
|
3
|
+
|
4
|
+
# Define the rolling_restart task for mongrel
|
5
|
+
namespace :mongrel do
|
6
|
+
desc <<-DESC
|
7
|
+
Do a rolling restart of mongrels, one app server at a time.
|
8
|
+
DESC
|
9
|
+
task :rolling_restart do
|
10
|
+
find_servers(:roles => :app).each do |server|
|
11
|
+
ENV['HOSTS'] = "#{server.host}:#{server.port}"
|
12
|
+
nginx.stop
|
13
|
+
puts "Waiting 10 seconds for mongrels to finish processing on #{ENV['HOSTS']}."
|
14
|
+
sleep 10
|
15
|
+
mongrel.restart
|
16
|
+
puts "Waiting 30 seconds for mongrels to come back up on #{ENV['HOSTS']}."
|
17
|
+
sleep 30
|
18
|
+
nginx.start
|
19
|
+
end
|
20
|
+
end
|
21
|
+
end
|
22
|
+
|
23
|
+
# Use a rolling restart by default. Theoretically, if we're doing migrations we should be using deploy:long anyway
|
24
|
+
namespace :deploy do
|
25
|
+
task :restart do
|
26
|
+
mongrel.rolling_restart
|
27
|
+
end
|
28
|
+
end
|
29
|
+
|
30
|
+
end
|
31
|
+
end
|
@@ -0,0 +1,42 @@
|
|
1
|
+
class Capistrano::Configuration
|
2
|
+
def execute(command, failure_message = "Command failed")
|
3
|
+
puts "Executing: #{command}"
|
4
|
+
system(command) || raise(failure_message)
|
5
|
+
end
|
6
|
+
end
|
7
|
+
|
8
|
+
Capistrano::Configuration.instance(:must_exist).load do
|
9
|
+
namespace :rsync do
|
10
|
+
desc <<-DESC
|
11
|
+
use rsync to sync assets locally or between servers
|
12
|
+
DESC
|
13
|
+
task :pull_shared , :roles => :app do
|
14
|
+
servers = find_servers :roles => :app, :except => { :no_release => true }
|
15
|
+
server = servers.first
|
16
|
+
if server
|
17
|
+
symlink_dirs.each do |share|
|
18
|
+
`echo '#{password}' | /usr/bin/pbcopy`
|
19
|
+
execute( "rsync -P -a -h -e 'ssh -p #{server.port || 22}' #{user}@#{server.host}:#{shared_path}/#{share}/* #{share}", "unable to run rsync files")
|
20
|
+
end
|
21
|
+
else
|
22
|
+
puts 'no server found'
|
23
|
+
end
|
24
|
+
end
|
25
|
+
|
26
|
+
desc <<-DESC
|
27
|
+
use rsync to sync assets locally or between servers
|
28
|
+
DESC
|
29
|
+
task :push_shared , :roles => :app do
|
30
|
+
servers = find_servers :roles => :app, :except => { :no_release => true }
|
31
|
+
server = servers.first
|
32
|
+
if server
|
33
|
+
symlink_dirs.each do |share|
|
34
|
+
`echo '#{password}' | /usr/bin/pbcopy`
|
35
|
+
execute( "rsync -P -a -h -e 'ssh -p #{server.port || 22}' #{share}/* #{user}@#{server.host}:#{shared_path}/#{share}/", "unable to run rsync files")
|
36
|
+
end
|
37
|
+
else
|
38
|
+
puts 'no server found'
|
39
|
+
end
|
40
|
+
end
|
41
|
+
end
|
42
|
+
end
|
@@ -0,0 +1,19 @@
|
|
1
|
+
Capistrano::Configuration.instance(:must_exist).load do
|
2
|
+
define_recipe :ruby_inline do
|
3
|
+
|
4
|
+
#
|
5
|
+
# Tasks
|
6
|
+
#
|
7
|
+
task :ruby_inline, :roles => :app, :except => {:no_release => true, :no_symlink => true} do
|
8
|
+
run "mkdir -p #{release_path}/tmp/ruby_inline/"
|
9
|
+
run "mkdir -p #{release_path}/tmp/ruby_inline/.ruby_inline"
|
10
|
+
sudo "chmod 755 #{release_path}/tmp/ruby_inline/"
|
11
|
+
sudo "chmod 755 #{release_path}/tmp/ruby_inline/.ruby_inline"
|
12
|
+
end
|
13
|
+
|
14
|
+
#
|
15
|
+
# Callbacks
|
16
|
+
#
|
17
|
+
after "deploy:symlink_configs", "ruby_inline"
|
18
|
+
end
|
19
|
+
end
|
@@ -0,0 +1,20 @@
|
|
1
|
+
Capistrano::Configuration.instance(:must_exist).load do
|
2
|
+
define_recipe :sass do
|
3
|
+
#
|
4
|
+
# Tasks
|
5
|
+
#
|
6
|
+
namespace :sass do
|
7
|
+
desc 'Updates the stylesheets generated by Sass'
|
8
|
+
task :update, :roles => :app, :except => { :no_release => true } do
|
9
|
+
invoke_command "cd #{latest_release}; RAILS_ENV=#{rails_env} rake sass:update"
|
10
|
+
end
|
11
|
+
end
|
12
|
+
|
13
|
+
#
|
14
|
+
# Callbacks
|
15
|
+
#
|
16
|
+
|
17
|
+
# Generate all the stylesheets manually (from their Sass templates) before each restart.
|
18
|
+
before 'deploy:restart', 'sass:update'
|
19
|
+
end
|
20
|
+
end
|