backup 3.0.21 → 3.0.22
Sign up to get free protection for your applications and to get access to all the features.
- data/Gemfile.lock +3 -1
- data/README.md +4 -3
- data/lib/backup.rb +8 -4
- data/lib/backup/config.rb +1 -1
- data/lib/backup/configuration/syncer/cloud.rb +23 -0
- data/lib/backup/configuration/syncer/cloud_files.rb +30 -0
- data/lib/backup/configuration/syncer/s3.rb +5 -11
- data/lib/backup/dependency.rb +6 -0
- data/lib/backup/notifier/twitter.rb +1 -1
- data/lib/backup/syncer/base.rb +25 -0
- data/lib/backup/syncer/cloud.rb +187 -0
- data/lib/backup/syncer/cloud_files.rb +56 -0
- data/lib/backup/syncer/rsync/base.rb +0 -26
- data/lib/backup/syncer/s3.rb +21 -102
- data/lib/backup/version.rb +1 -1
- data/spec/cli/utility_spec.rb +2 -2
- data/spec/configuration/syncer/cloud_files_spec.rb +44 -0
- data/spec/configuration/syncer/s3_spec.rb +0 -4
- data/spec/notifier/twitter_spec.rb +3 -3
- data/spec/syncer/cloud_files_spec.rb +192 -0
- data/spec/syncer/s3_spec.rb +155 -191
- data/templates/cli/utility/archive +20 -8
- data/templates/cli/utility/database/mongodb +3 -3
- data/templates/cli/utility/database/mysql +4 -4
- data/templates/cli/utility/database/postgresql +4 -4
- data/templates/cli/utility/database/redis +1 -1
- data/templates/cli/utility/encryptor/openssl +2 -2
- data/templates/cli/utility/notifier/campfire +3 -3
- data/templates/cli/utility/notifier/hipchat +6 -6
- data/templates/cli/utility/notifier/mail +7 -7
- data/templates/cli/utility/notifier/presently +4 -4
- data/templates/cli/utility/notifier/prowl +2 -2
- data/templates/cli/utility/notifier/twitter +4 -4
- data/templates/cli/utility/storage/cloud_files +22 -0
- data/templates/cli/utility/storage/dropbox +15 -10
- data/templates/cli/utility/storage/ftp +4 -4
- data/templates/cli/utility/storage/local +1 -1
- data/templates/cli/utility/storage/ninefold +3 -3
- data/templates/cli/utility/storage/rsync +4 -4
- data/templates/cli/utility/storage/s3 +6 -6
- data/templates/cli/utility/storage/scp +4 -4
- data/templates/cli/utility/storage/sftp +4 -4
- data/templates/cli/utility/syncer/cloud_files +48 -0
- data/templates/cli/utility/syncer/s3 +31 -1
- metadata +69 -39
- data/templates/cli/utility/storage/cloudfiles +0 -12
@@ -4,19 +4,6 @@ module Backup
|
|
4
4
|
module Syncer
|
5
5
|
module RSync
|
6
6
|
class Base < Syncer::Base
|
7
|
-
|
8
|
-
##
|
9
|
-
# Path to store the synced files/directories to
|
10
|
-
attr_accessor :path
|
11
|
-
|
12
|
-
##
|
13
|
-
# Directories to sync
|
14
|
-
attr_writer :directories
|
15
|
-
|
16
|
-
##
|
17
|
-
# Flag for mirroring the files/directories
|
18
|
-
attr_accessor :mirror
|
19
|
-
|
20
7
|
##
|
21
8
|
# Additional options for the rsync cli
|
22
9
|
attr_accessor :additional_options
|
@@ -33,19 +20,6 @@ module Backup
|
|
33
20
|
@additional_options ||= Array.new
|
34
21
|
end
|
35
22
|
|
36
|
-
##
|
37
|
-
# Syntactical suger for the DSL for adding directories
|
38
|
-
def directories(&block)
|
39
|
-
return @directories unless block_given?
|
40
|
-
instance_eval(&block)
|
41
|
-
end
|
42
|
-
|
43
|
-
##
|
44
|
-
# Adds a path to the @directories array
|
45
|
-
def add(path)
|
46
|
-
@directories << path
|
47
|
-
end
|
48
|
-
|
49
23
|
private
|
50
24
|
|
51
25
|
##
|
data/lib/backup/syncer/s3.rb
CHANGED
@@ -2,127 +2,46 @@
|
|
2
2
|
|
3
3
|
module Backup
|
4
4
|
module Syncer
|
5
|
-
class S3 <
|
5
|
+
class S3 < Cloud
|
6
6
|
|
7
7
|
##
|
8
8
|
# Amazon Simple Storage Service (S3) Credentials
|
9
9
|
attr_accessor :access_key_id, :secret_access_key
|
10
10
|
|
11
11
|
##
|
12
|
-
#
|
13
|
-
attr_accessor :bucket
|
12
|
+
# The S3 bucket to store files to
|
13
|
+
attr_accessor :bucket
|
14
14
|
|
15
15
|
##
|
16
|
-
#
|
17
|
-
|
18
|
-
|
19
|
-
##
|
20
|
-
# Flag to enable mirroring
|
21
|
-
attr_accessor :mirror
|
22
|
-
|
23
|
-
##
|
24
|
-
# Additional options for the s3sync cli
|
25
|
-
attr_accessor :additional_options
|
26
|
-
|
27
|
-
##
|
28
|
-
# Instantiates a new S3 Syncer object and sets the default configuration
|
29
|
-
# specified in the Backup::Configuration::Syncer::S3.
|
30
|
-
# Then it sets the object defaults if particular properties weren't set.
|
31
|
-
# Finally it'll evaluate the users configuration file and overwrite
|
32
|
-
# anything that's been defined
|
33
|
-
def initialize(&block)
|
34
|
-
load_defaults!
|
35
|
-
|
36
|
-
@path ||= 'backups'
|
37
|
-
@directories = Array.new
|
38
|
-
@mirror ||= false
|
39
|
-
@additional_options ||= []
|
40
|
-
|
41
|
-
instance_eval(&block) if block_given?
|
42
|
-
end
|
43
|
-
|
44
|
-
##
|
45
|
-
# Sets the Amazon S3 credentials for S3Sync, performs the S3Sync
|
46
|
-
# operation, then unsets the credentials (back to nil values)
|
47
|
-
def perform!
|
48
|
-
set_environment_variables!
|
49
|
-
|
50
|
-
@directories.each do |directory|
|
51
|
-
Logger.message("#{ syncer_name } started syncing '#{ directory }'.")
|
52
|
-
Logger.silent(
|
53
|
-
run("#{ utility(:s3sync) } #{ options } " +
|
54
|
-
"'#{ File.expand_path(directory) }' '#{ bucket }:#{ dest_path }'")
|
55
|
-
)
|
56
|
-
end
|
57
|
-
|
58
|
-
unset_environment_variables!
|
59
|
-
end
|
60
|
-
|
61
|
-
##
|
62
|
-
# Syntactical suger for the DSL for adding directories
|
63
|
-
def directories(&block)
|
64
|
-
return @directories unless block_given?
|
65
|
-
instance_eval(&block)
|
66
|
-
end
|
67
|
-
|
68
|
-
##
|
69
|
-
# Adds a path to the @directories array
|
70
|
-
def add(path)
|
71
|
-
@directories << path
|
72
|
-
end
|
16
|
+
# The AWS region of the specified S3 bucket
|
17
|
+
attr_accessor :region
|
73
18
|
|
74
19
|
private
|
75
20
|
|
76
21
|
##
|
77
|
-
#
|
78
|
-
def
|
79
|
-
@
|
80
|
-
|
81
|
-
|
82
|
-
|
83
|
-
|
84
|
-
|
85
|
-
def options
|
86
|
-
([verbose_option, recursive_option, mirror_option] +
|
87
|
-
additional_options).compact.join("\s")
|
88
|
-
end
|
89
|
-
|
90
|
-
##
|
91
|
-
# Returns S3Sync syntax for enabling mirroring
|
92
|
-
def mirror_option
|
93
|
-
'--delete' if @mirror
|
94
|
-
end
|
95
|
-
|
96
|
-
##
|
97
|
-
# Returns S3Sync syntax for syncing recursively
|
98
|
-
def recursive_option
|
99
|
-
'--recursive'
|
22
|
+
# Established and creates a new Fog storage object for S3.
|
23
|
+
def connection
|
24
|
+
@connection ||= Fog::Storage.new(
|
25
|
+
:provider => provider,
|
26
|
+
:aws_access_key_id => access_key_id,
|
27
|
+
:aws_secret_access_key => secret_access_key,
|
28
|
+
:region => region
|
29
|
+
)
|
100
30
|
end
|
101
31
|
|
102
32
|
##
|
103
|
-
#
|
104
|
-
|
105
|
-
|
33
|
+
# Creates a new @repository_object (bucket). Fetches it from S3
|
34
|
+
# if it already exists, otherwise it will create it first and fetch use that instead.
|
35
|
+
def repository_object
|
36
|
+
@repository_object ||= connection.directories.get(bucket) ||
|
37
|
+
connection.directories.create(:key => bucket, :location => region)
|
106
38
|
end
|
107
39
|
|
108
40
|
##
|
109
|
-
#
|
110
|
-
|
111
|
-
|
112
|
-
def set_environment_variables!
|
113
|
-
ENV['AWS_ACCESS_KEY_ID'] = access_key_id
|
114
|
-
ENV['AWS_SECRET_ACCESS_KEY'] = secret_access_key
|
115
|
-
ENV['AWS_CALLING_FORMAT'] = 'SUBDOMAIN'
|
41
|
+
# This is the provider that Fog uses for the Cloud Files
|
42
|
+
def provider
|
43
|
+
"AWS"
|
116
44
|
end
|
117
|
-
|
118
|
-
##
|
119
|
-
# Sets the AWS_ACCESS_KEY_ID and AWS_SECRET_ACCESS_KEY back to nil
|
120
|
-
def unset_environment_variables!
|
121
|
-
ENV['AWS_ACCESS_KEY_ID'] = nil
|
122
|
-
ENV['AWS_SECRET_ACCESS_KEY'] = nil
|
123
|
-
ENV['AWS_CALLING_FORMAT'] = nil
|
124
|
-
end
|
125
|
-
|
126
45
|
end
|
127
46
|
end
|
128
47
|
end
|
data/lib/backup/version.rb
CHANGED
data/spec/cli/utility_spec.rb
CHANGED
@@ -187,8 +187,8 @@ describe 'Backup::CLI::Utility' do
|
|
187
187
|
--trigger=TRIGGER
|
188
188
|
[--config-path=CONFIG_PATH] # Path to your Backup configuration directory
|
189
189
|
[--databases=DATABASES] # (mongodb, mysql, postgresql, redis, riak)
|
190
|
-
[--storages=STORAGES] # (
|
191
|
-
[--syncers=SYNCERS] # (rsync_local, rsync_pull, rsync_push, s3)
|
190
|
+
[--storages=STORAGES] # (cloud_files, dropbox, ftp, local, ninefold, rsync, s3, scp, sftp)
|
191
|
+
[--syncers=SYNCERS] # (cloud_files, rsync_local, rsync_pull, rsync_push, s3)
|
192
192
|
[--encryptors=ENCRYPTORS] # (gpg, openssl)
|
193
193
|
[--compressors=COMPRESSORS] # (bzip2, gzip, lzma, pbzip2)
|
194
194
|
[--notifiers=NOTIFIERS] # (campfire, hipchat, mail, presently, prowl, twitter)
|
@@ -0,0 +1,44 @@
|
|
1
|
+
# encoding: utf-8
|
2
|
+
|
3
|
+
require File.expand_path('../../../spec_helper.rb', __FILE__)
|
4
|
+
|
5
|
+
describe Backup::Configuration::Syncer::CloudFiles do
|
6
|
+
before do
|
7
|
+
Backup::Configuration::Syncer::CloudFiles.defaults do |cf|
|
8
|
+
cf.username = 'my-username'
|
9
|
+
cf.api_key = 'my-api-key'
|
10
|
+
cf.container = 'my-container'
|
11
|
+
cf.auth_url = 'my-auth-url'
|
12
|
+
cf.servicenet = true
|
13
|
+
cf.path = '/backups/'
|
14
|
+
cf.mirror = true
|
15
|
+
end
|
16
|
+
end
|
17
|
+
after { Backup::Configuration::Syncer::CloudFiles.clear_defaults! }
|
18
|
+
|
19
|
+
it 'should set the default cloud files configuration' do
|
20
|
+
cf = Backup::Configuration::Syncer::CloudFiles
|
21
|
+
cf.username.should == 'my-username'
|
22
|
+
cf.api_key.should == 'my-api-key'
|
23
|
+
cf.container.should == 'my-container'
|
24
|
+
cf.auth_url.should == 'my-auth-url'
|
25
|
+
cf.servicenet.should == true
|
26
|
+
cf.path.should == '/backups/'
|
27
|
+
cf.mirror.should == true
|
28
|
+
end
|
29
|
+
|
30
|
+
describe '#clear_defaults!' do
|
31
|
+
it 'should clear all the defaults, resetting them to nil' do
|
32
|
+
Backup::Configuration::Syncer::CloudFiles.clear_defaults!
|
33
|
+
|
34
|
+
cf = Backup::Configuration::Syncer::CloudFiles
|
35
|
+
cf.username.should == nil
|
36
|
+
cf.api_key.should == nil
|
37
|
+
cf.container.should == nil
|
38
|
+
cf.auth_url.should == nil
|
39
|
+
cf.servicenet.should == nil
|
40
|
+
cf.path.should == nil
|
41
|
+
cf.mirror.should == nil
|
42
|
+
end
|
43
|
+
end
|
44
|
+
end
|
@@ -9,9 +9,7 @@ describe Backup::Configuration::Syncer::S3 do
|
|
9
9
|
s3.secret_access_key = 'my_secret_access_key'
|
10
10
|
s3.bucket = 'my-bucket'
|
11
11
|
s3.path = '/backups/'
|
12
|
-
#s3.directories = 'cannot_have_a_default_value'
|
13
12
|
s3.mirror = true
|
14
|
-
s3.additional_options = ['--exclude="*.rb"']
|
15
13
|
end
|
16
14
|
end
|
17
15
|
after { Backup::Configuration::Syncer::S3.clear_defaults! }
|
@@ -23,7 +21,6 @@ describe Backup::Configuration::Syncer::S3 do
|
|
23
21
|
s3.bucket.should == 'my-bucket'
|
24
22
|
s3.path.should == '/backups/'
|
25
23
|
s3.mirror.should == true
|
26
|
-
s3.additional_options.should == ['--exclude="*.rb"']
|
27
24
|
end
|
28
25
|
|
29
26
|
describe '#clear_defaults!' do
|
@@ -36,7 +33,6 @@ describe Backup::Configuration::Syncer::S3 do
|
|
36
33
|
s3.bucket.should == nil
|
37
34
|
s3.path.should == nil
|
38
35
|
s3.mirror.should == nil
|
39
|
-
s3.additional_options.should == nil
|
40
36
|
end
|
41
37
|
end
|
42
38
|
end
|
@@ -88,7 +88,7 @@ describe Backup::Notifier::Twitter do
|
|
88
88
|
context 'when status is :success' do
|
89
89
|
it 'should send Success message' do
|
90
90
|
notifier.expects(:send_message).with(
|
91
|
-
|
91
|
+
"[Backup::Success] test label (test_trigger) (@ #{notifier.instance_variable_get("@model").time})"
|
92
92
|
)
|
93
93
|
notifier.send(:notify!, :success)
|
94
94
|
end
|
@@ -97,7 +97,7 @@ describe Backup::Notifier::Twitter do
|
|
97
97
|
context 'when status is :warning' do
|
98
98
|
it 'should send Warning message' do
|
99
99
|
notifier.expects(:send_message).with(
|
100
|
-
|
100
|
+
"[Backup::Warning] test label (test_trigger) (@ #{notifier.instance_variable_get("@model").time})"
|
101
101
|
)
|
102
102
|
notifier.send(:notify!, :warning)
|
103
103
|
end
|
@@ -106,7 +106,7 @@ describe Backup::Notifier::Twitter do
|
|
106
106
|
context 'when status is :failure' do
|
107
107
|
it 'should send Failure message' do
|
108
108
|
notifier.expects(:send_message).with(
|
109
|
-
|
109
|
+
"[Backup::Failure] test label (test_trigger) (@ #{notifier.instance_variable_get("@model").time})"
|
110
110
|
)
|
111
111
|
notifier.send(:notify!, :failure)
|
112
112
|
end
|
@@ -0,0 +1,192 @@
|
|
1
|
+
# encoding: utf-8
|
2
|
+
require File.expand_path('../../spec_helper.rb', __FILE__)
|
3
|
+
|
4
|
+
class Parallel; end
|
5
|
+
|
6
|
+
describe Backup::Syncer::CloudFiles do
|
7
|
+
describe '#perform!' do
|
8
|
+
let(:syncer) { Backup::Syncer::CloudFiles.new }
|
9
|
+
let(:connection) { stub('connection',
|
10
|
+
:directories => stub('directories', :get => container)) }
|
11
|
+
let(:container) { stub('container', :files => files) }
|
12
|
+
let(:files) { [] }
|
13
|
+
let(:content) { stub('content') }
|
14
|
+
|
15
|
+
before :each do
|
16
|
+
Fog::Storage.stubs(:new).returns connection
|
17
|
+
File.stubs(:open).returns content
|
18
|
+
File.stubs(:exist?).returns true
|
19
|
+
files.stubs(:create).returns true
|
20
|
+
|
21
|
+
syncer.directories << 'tmp'
|
22
|
+
syncer.path = 'storage'
|
23
|
+
Backup::Syncer::S3::SyncContext.any_instance.
|
24
|
+
stubs(:`).returns 'MD5(tmp/foo)= 123abcdef'
|
25
|
+
end
|
26
|
+
|
27
|
+
it "respects the concurrency_type setting with threads" do
|
28
|
+
syncer.concurrency_type = :threads
|
29
|
+
|
30
|
+
Parallel.expects(:each).with(anything, {:in_threads => 2}, anything)
|
31
|
+
|
32
|
+
syncer.perform!
|
33
|
+
end
|
34
|
+
|
35
|
+
it "respects the parallel thread count" do
|
36
|
+
syncer.concurrency_type = :threads
|
37
|
+
syncer.concurrency_level = 10
|
38
|
+
|
39
|
+
Parallel.expects(:each).with(anything, {:in_threads => 10}, anything)
|
40
|
+
|
41
|
+
syncer.perform!
|
42
|
+
end
|
43
|
+
|
44
|
+
it "respects the concurrency_type setting with processors" do
|
45
|
+
syncer.concurrency_type = :processes
|
46
|
+
|
47
|
+
Parallel.expects(:each).with(anything, {:in_processes => 2}, anything)
|
48
|
+
|
49
|
+
syncer.perform!
|
50
|
+
end
|
51
|
+
|
52
|
+
it "respects the parallel thread count" do
|
53
|
+
syncer.concurrency_type = :processes
|
54
|
+
syncer.concurrency_level = 10
|
55
|
+
|
56
|
+
Parallel.expects(:each).with(anything, {:in_processes => 10}, anything)
|
57
|
+
|
58
|
+
syncer.perform!
|
59
|
+
end
|
60
|
+
|
61
|
+
context 'file exists locally' do
|
62
|
+
it "uploads a file if it does not exist remotely" do
|
63
|
+
files.expects(:create).with(:key => 'storage/tmp/foo', :body => content)
|
64
|
+
|
65
|
+
syncer.perform!
|
66
|
+
end
|
67
|
+
|
68
|
+
it "uploads a file if it exists remotely with a different MD5" do
|
69
|
+
files << stub('file', :key => 'storage/tmp/foo', :etag => 'abcdef123')
|
70
|
+
|
71
|
+
files.expects(:create).with(:key => 'storage/tmp/foo', :body => content)
|
72
|
+
|
73
|
+
syncer.perform!
|
74
|
+
end
|
75
|
+
|
76
|
+
it "does nothing if the file exists remotely with the same MD5" do
|
77
|
+
files << stub('file', :key => 'storage/tmp/foo', :etag => '123abcdef')
|
78
|
+
|
79
|
+
files.expects(:create).never
|
80
|
+
|
81
|
+
syncer.perform!
|
82
|
+
end
|
83
|
+
|
84
|
+
it "skips the file if it no longer exists locally" do
|
85
|
+
File.stubs(:exist?).returns false
|
86
|
+
|
87
|
+
files.expects(:create).never
|
88
|
+
|
89
|
+
syncer.perform!
|
90
|
+
end
|
91
|
+
|
92
|
+
it "respects the given path" do
|
93
|
+
syncer.path = 'box'
|
94
|
+
|
95
|
+
files.expects(:create).with(:key => 'box/tmp/foo', :body => content)
|
96
|
+
|
97
|
+
syncer.perform!
|
98
|
+
end
|
99
|
+
|
100
|
+
it "uploads the content of the local file" do
|
101
|
+
File.expects(:open).with('tmp/foo').returns content
|
102
|
+
|
103
|
+
syncer.perform!
|
104
|
+
end
|
105
|
+
|
106
|
+
it "creates the connection with the provided credentials" do
|
107
|
+
syncer.api_key = 'my-key'
|
108
|
+
syncer.username = 'my-name'
|
109
|
+
syncer.auth_url = 'my-auth'
|
110
|
+
syncer.servicenet = 'my-servicenet'
|
111
|
+
|
112
|
+
Fog::Storage.expects(:new).with(
|
113
|
+
:provider => 'Rackspace',
|
114
|
+
:rackspace_api_key => 'my-key',
|
115
|
+
:rackspace_username => 'my-name',
|
116
|
+
:rackspace_auth_url => 'my-auth',
|
117
|
+
:rackspace_servicenet => 'my-servicenet'
|
118
|
+
).returns connection
|
119
|
+
|
120
|
+
syncer.perform!
|
121
|
+
end
|
122
|
+
|
123
|
+
it "uses the container with the given name" do
|
124
|
+
syncer.container = 'leaky'
|
125
|
+
|
126
|
+
connection.directories.expects(:get).with('leaky').returns(container)
|
127
|
+
|
128
|
+
syncer.perform!
|
129
|
+
end
|
130
|
+
|
131
|
+
it "creates the container if one does not exist" do
|
132
|
+
syncer.container = 'leaky'
|
133
|
+
connection.directories.stubs(:get).returns nil
|
134
|
+
|
135
|
+
connection.directories.expects(:create).
|
136
|
+
with(:key => 'leaky').returns(container)
|
137
|
+
|
138
|
+
syncer.perform!
|
139
|
+
end
|
140
|
+
|
141
|
+
it "iterates over each directory" do
|
142
|
+
syncer.directories << 'files'
|
143
|
+
|
144
|
+
Backup::Syncer::CloudFiles::SyncContext.any_instance.expects(:`).
|
145
|
+
with('find tmp -print0 | xargs -0 openssl md5 2> /dev/null').
|
146
|
+
returns 'MD5(tmp/foo)= 123abcdef'
|
147
|
+
Backup::Syncer::CloudFiles::SyncContext.any_instance.expects(:`).
|
148
|
+
with('find files -print0 | xargs -0 openssl md5 2> /dev/null').
|
149
|
+
returns 'MD5(tmp/foo)= 123abcdef'
|
150
|
+
|
151
|
+
syncer.perform!
|
152
|
+
end
|
153
|
+
end
|
154
|
+
|
155
|
+
context 'file does not exist locally' do
|
156
|
+
let(:file) { stub('file', :key => 'storage/tmp/foo',
|
157
|
+
:etag => '123abcdef') }
|
158
|
+
|
159
|
+
before :each do
|
160
|
+
Backup::Syncer::CloudFiles::SyncContext.any_instance.
|
161
|
+
stubs(:`).returns ''
|
162
|
+
files << file
|
163
|
+
File.stubs(:exist?).returns false
|
164
|
+
end
|
165
|
+
|
166
|
+
it "removes the remote file when mirroring is turned on" do
|
167
|
+
syncer.mirror = true
|
168
|
+
|
169
|
+
file.expects(:destroy).once
|
170
|
+
|
171
|
+
syncer.perform!
|
172
|
+
end
|
173
|
+
|
174
|
+
it "leaves the remote file when mirroring is turned off" do
|
175
|
+
syncer.mirror = false
|
176
|
+
|
177
|
+
file.expects(:destroy).never
|
178
|
+
|
179
|
+
syncer.perform!
|
180
|
+
end
|
181
|
+
|
182
|
+
it "does not remove files not under one of the specified directories" do
|
183
|
+
file.stubs(:key).returns 'unsynced/tmp/foo'
|
184
|
+
syncer.mirror = true
|
185
|
+
|
186
|
+
file.expects(:destroy).never
|
187
|
+
|
188
|
+
syncer.perform!
|
189
|
+
end
|
190
|
+
end
|
191
|
+
end
|
192
|
+
end
|