refinery 0.12.2 → 1.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- data/lib/refinery.rb +1 -99
- data/refinery.gemspec +16 -117
- metadata +39 -118
- data/.gitignore +0 -6
- data/CHANGELOG +0 -2
- data/LICENSE +0 -21
- data/README.rdoc +0 -58
- data/README.textile +0 -58
- data/Rakefile +0 -43
- data/VERSION +0 -1
- data/bin/epub +0 -64
- data/bin/monitor +0 -47
- data/bin/pubnow +0 -61
- data/bin/refinery +0 -64
- data/config/config.example.yml +0 -21
- data/lib/refinery/beanstalk_queue.rb +0 -36
- data/lib/refinery/beanstalk_queue_provider.rb +0 -18
- data/lib/refinery/config.rb +0 -48
- data/lib/refinery/configurable.rb +0 -15
- data/lib/refinery/daemon.rb +0 -148
- data/lib/refinery/event_publisher.rb +0 -131
- data/lib/refinery/heartbeat.rb +0 -33
- data/lib/refinery/loggable.rb +0 -9
- data/lib/refinery/monitor.rb +0 -113
- data/lib/refinery/processor.rb +0 -55
- data/lib/refinery/publisher.rb +0 -42
- data/lib/refinery/queueable.rb +0 -48
- data/lib/refinery/server.rb +0 -88
- data/lib/refinery/statistics.rb +0 -61
- data/lib/refinery/stats_server.rb +0 -135
- data/lib/refinery/utilities.rb +0 -33
- data/lib/refinery/validations.rb +0 -48
- data/lib/refinery/worker.rb +0 -65
- data/logs/README +0 -1
- data/publishers/error.rb +0 -6
- data/publishers/sample.rb +0 -6
- data/publishers/sleep.rb +0 -5
- data/test/config.yml +0 -10
- data/test/test_helper.rb +0 -21
- data/test/unit/config_test.rb +0 -42
- data/test/unit/configurable_test.rb +0 -13
- data/test/unit/daemon_test.rb +0 -63
- data/test/unit/event_publisher_test.rb +0 -12
- data/test/unit/heartbeat_test.rb +0 -25
- data/test/unit/loggable_test.rb +0 -12
- data/test/unit/processor_test.rb +0 -34
- data/test/unit/publisher_test.rb +0 -13
- data/test/unit/queueable_test.rb +0 -26
- data/test/unit/server_test.rb +0 -34
- data/test/unit/statistics_test.rb +0 -44
- data/test/unit/utilities_test.rb +0 -25
- data/test/unit/validations_test.rb +0 -37
- data/test/unit/worker_test.rb +0 -44
- data/workers/error.rb +0 -8
- data/workers/sample.rb +0 -8
- data/workers/sleep.rb +0 -7
@@ -1,18 +0,0 @@
|
|
1
|
-
module Refinery #:nodoc:
|
2
|
-
# A queue provider for beanstalk
|
3
|
-
class BeanstalkQueueProvider
|
4
|
-
include Refinery::Loggable
|
5
|
-
|
6
|
-
attr_reader :queues
|
7
|
-
|
8
|
-
# Initialize the queue provider
|
9
|
-
def initialize(hosts=nil)
|
10
|
-
@hosts = hosts
|
11
|
-
@queues = {}
|
12
|
-
end
|
13
|
-
# Get the named queue
|
14
|
-
def queue(name)
|
15
|
-
queues[name] ||= Refinery::BeanstalkQueue.new(name, @hosts)
|
16
|
-
end
|
17
|
-
end
|
18
|
-
end
|
data/lib/refinery/config.rb
DELETED
@@ -1,48 +0,0 @@
|
|
1
|
-
module Refinery #:nodoc:
|
2
|
-
# Configuration class.
|
3
|
-
class Config
|
4
|
-
# Get a shared configuration
|
5
|
-
def self.default
|
6
|
-
@default ||= new({
|
7
|
-
'aws' => {
|
8
|
-
'credentials' => {}
|
9
|
-
},
|
10
|
-
'processors' => {}
|
11
|
-
})
|
12
|
-
end
|
13
|
-
|
14
|
-
# Initialize the config with the given data
|
15
|
-
def initialize(data={})
|
16
|
-
@data = data
|
17
|
-
end
|
18
|
-
|
19
|
-
# Get the configuration value
|
20
|
-
def [](key)
|
21
|
-
data[key.to_s]
|
22
|
-
end
|
23
|
-
|
24
|
-
# Set the configuration value
|
25
|
-
def []=(key, value)
|
26
|
-
data[key.to_s] = value
|
27
|
-
end
|
28
|
-
|
29
|
-
# Load configuration from a YAML file
|
30
|
-
def load_file(file)
|
31
|
-
@file = file
|
32
|
-
@data = YAML::load_file(@file)
|
33
|
-
@last_load = File.mtime(@file)
|
34
|
-
end
|
35
|
-
|
36
|
-
# Refresh the configuration from the YAML file if necessary.
|
37
|
-
def refresh
|
38
|
-
if File.mtime(@file) != @last_load
|
39
|
-
@data = YAML::load_file(@file)
|
40
|
-
end
|
41
|
-
end
|
42
|
-
|
43
|
-
private
|
44
|
-
def data
|
45
|
-
@data ||= {}
|
46
|
-
end
|
47
|
-
end
|
48
|
-
end
|
@@ -1,15 +0,0 @@
|
|
1
|
-
module Refinery #:nodoc:
|
2
|
-
# Include this module to get access to a shared configuration
|
3
|
-
module Configurable
|
4
|
-
# Get the configuration. If the config is nil then this will use
|
5
|
-
# the default shared configuration.
|
6
|
-
def config
|
7
|
-
@config ||= Refinery::Config.default
|
8
|
-
end
|
9
|
-
|
10
|
-
# Set the configuration.
|
11
|
-
def config=(config)
|
12
|
-
@config = config
|
13
|
-
end
|
14
|
-
end
|
15
|
-
end
|
data/lib/refinery/daemon.rb
DELETED
@@ -1,148 +0,0 @@
|
|
1
|
-
module Refinery #:nodoc:
|
2
|
-
# A daemon provides a thread to run workers in.
|
3
|
-
class Daemon < Thread
|
4
|
-
include Refinery::Loggable
|
5
|
-
include Refinery::Configurable
|
6
|
-
include Refinery::Utilities
|
7
|
-
include Refinery::Queueable
|
8
|
-
|
9
|
-
RUNNING = 'running'
|
10
|
-
STOPPED = 'stopped'
|
11
|
-
|
12
|
-
# The name of the daemon
|
13
|
-
attr_reader :name
|
14
|
-
# The settings for the daemon
|
15
|
-
attr_reader :settings
|
16
|
-
# The base queue name
|
17
|
-
attr_reader :queue_name
|
18
|
-
|
19
|
-
# Stop the daemon
|
20
|
-
def stop
|
21
|
-
self.state = STOPPED
|
22
|
-
end
|
23
|
-
|
24
|
-
# Return the daemon state
|
25
|
-
def state
|
26
|
-
@state ||= RUNNING
|
27
|
-
end
|
28
|
-
|
29
|
-
# Set the daemon state.
|
30
|
-
def state=(state)
|
31
|
-
@state = state
|
32
|
-
end
|
33
|
-
protected :state
|
34
|
-
|
35
|
-
# Return true if the daemon state is running.
|
36
|
-
def running?
|
37
|
-
state == RUNNING
|
38
|
-
end
|
39
|
-
|
40
|
-
# Initialize the daemon.
|
41
|
-
#
|
42
|
-
# * <tt>processor</tt>: The processor instance
|
43
|
-
# * <tt>name</tt>: The processor name
|
44
|
-
# * <tt>waiting_queue</tt>: The waiting queue that provides messages to be processed
|
45
|
-
# * <tt>error_queue</tt>: The queue where errors are posted.
|
46
|
-
# * <tt>done_queue</tt>: The queue for messages that have been processed.
|
47
|
-
# * <tt>settings</tt>: The settings hash from the config.
|
48
|
-
#
|
49
|
-
# The settings hash may contain the following options:
|
50
|
-
# * <tt>visibility</tt>: The time in seconds that the message is hidden
|
51
|
-
# in the queue.
|
52
|
-
def initialize(processor, name, queue_prefix='', settings={})
|
53
|
-
logger.debug "Starting daemon"
|
54
|
-
|
55
|
-
@processor = processor
|
56
|
-
@name = name
|
57
|
-
@settings = settings
|
58
|
-
|
59
|
-
queue_name = settings['queue'] || name
|
60
|
-
queue_name = "#{queue_prefix}#{queue_name}"
|
61
|
-
logger.debug "Using queue #{queue_name}"
|
62
|
-
@queue_name = queue_name
|
63
|
-
|
64
|
-
super do
|
65
|
-
begin
|
66
|
-
execute
|
67
|
-
rescue Exception => e
|
68
|
-
logger.error e
|
69
|
-
end
|
70
|
-
end
|
71
|
-
end
|
72
|
-
|
73
|
-
private
|
74
|
-
def execute
|
75
|
-
logger.debug "Running daemon thread: #{name} (settings: #{settings.inspect})"
|
76
|
-
|
77
|
-
if defined?(java.lang.Thread)
|
78
|
-
java.lang.Thread.current_thread.name = "#{name} Daemon"
|
79
|
-
end
|
80
|
-
|
81
|
-
while(running?)
|
82
|
-
#logger.debug "Checking #{queue_name}_waiting"
|
83
|
-
with_queue("#{queue_name}_waiting") do |waiting_queue|
|
84
|
-
while (message = waiting_queue.receive(settings['visibility']))
|
85
|
-
worker = load_worker_class(name).new(self)
|
86
|
-
begin
|
87
|
-
result, run_time = worker.run(decode_message(message.body))
|
88
|
-
if result
|
89
|
-
with_queue("#{queue_name}_done") do |done_queue|
|
90
|
-
done_message = {
|
91
|
-
'host_info' => host_info,
|
92
|
-
'original' => message.body,
|
93
|
-
'run_time' => run_time
|
94
|
-
}
|
95
|
-
logger.debug "Sending 'done' message to #{done_queue.name}"
|
96
|
-
done_queue.send_message(encode_message(done_message))
|
97
|
-
end
|
98
|
-
|
99
|
-
logger.debug "Deleting message from queue"
|
100
|
-
message.delete()
|
101
|
-
end
|
102
|
-
rescue Exception => e
|
103
|
-
with_queue("#{queue_name}_error") do |error_queue|
|
104
|
-
error_message = {
|
105
|
-
'error' => {
|
106
|
-
'message' => e.message,
|
107
|
-
'class' => e.class.name,
|
108
|
-
'backtrace' => e.backtrace
|
109
|
-
},
|
110
|
-
'host_info' => host_info,
|
111
|
-
'original' => message.body
|
112
|
-
}
|
113
|
-
logger.error "Sending 'error' message to #{error_queue.name}: #{e.message}"
|
114
|
-
error_queue.send_message(encode_message(error_message))
|
115
|
-
end
|
116
|
-
message.delete()
|
117
|
-
end
|
118
|
-
end
|
119
|
-
sleep(settings['sleep'] || 5)
|
120
|
-
end
|
121
|
-
end
|
122
|
-
logger.debug "Exiting daemon thread"
|
123
|
-
end
|
124
|
-
|
125
|
-
# A hash of worker classes
|
126
|
-
def workers
|
127
|
-
@workers ||= {}
|
128
|
-
end
|
129
|
-
|
130
|
-
private
|
131
|
-
# Load the appropriate worker class
|
132
|
-
def load_worker_class(name)
|
133
|
-
source_file = "#{@processor.server.workers_directory}/#{name}.rb"
|
134
|
-
if File.exist?(source_file)
|
135
|
-
modified_at = File.mtime(source_file)
|
136
|
-
if workers[name] != modified_at
|
137
|
-
logger.debug "Loading #{source_file}"
|
138
|
-
load(source_file)
|
139
|
-
workers[name] = modified_at
|
140
|
-
end
|
141
|
-
else
|
142
|
-
raise SourceFileNotFound, "Source file not found: #{source_file}"
|
143
|
-
end
|
144
|
-
|
145
|
-
Object.const_get(camelize("#{name}_worker"))
|
146
|
-
end
|
147
|
-
end
|
148
|
-
end
|
@@ -1,131 +0,0 @@
|
|
1
|
-
module Refinery #:nodoc:
|
2
|
-
# Publish events.
|
3
|
-
class EventPublisher
|
4
|
-
include Refinery::Loggable
|
5
|
-
include Refinery::Configurable
|
6
|
-
include Refinery::Queueable
|
7
|
-
include Refinery::Utilities
|
8
|
-
|
9
|
-
STARTING = 'starting' #:nodoc:
|
10
|
-
RUNNING = 'running' #:nodoc:
|
11
|
-
STOPPED = 'stopped' #:nodoc:
|
12
|
-
|
13
|
-
attr_accessor :publishers_directory
|
14
|
-
|
15
|
-
# Initialize the event publisher
|
16
|
-
#
|
17
|
-
# Options:
|
18
|
-
# * <tt>:debug</tt>: Set to true to enable debug logging
|
19
|
-
# * <tt>:config</tt>: Provide a file path to load that config
|
20
|
-
def initialize(options={})
|
21
|
-
logger.level = Logger::INFO if options[:verbose]
|
22
|
-
logger.level = Logger::DEBUG if options[:debug]
|
23
|
-
config.load_file(options[:config]) if options[:config]
|
24
|
-
self.publishers_directory = options[:publishers] if options[:publishers]
|
25
|
-
end
|
26
|
-
|
27
|
-
# Get the event publisher state
|
28
|
-
def state
|
29
|
-
@state ||= STARTING
|
30
|
-
end
|
31
|
-
|
32
|
-
# Return true if the event publisher is running
|
33
|
-
def running?
|
34
|
-
state == RUNNING
|
35
|
-
end
|
36
|
-
|
37
|
-
# The directory where publishers are found. Defaults to ./publishers
|
38
|
-
def publishers_directory
|
39
|
-
@publishers_directory ||= './publishers'
|
40
|
-
end
|
41
|
-
|
42
|
-
# A hash of all publisher classes mapped to last modified timestamps.
|
43
|
-
def publishers
|
44
|
-
@publishers ||= {}
|
45
|
-
end
|
46
|
-
|
47
|
-
# Run the specified publisher once and return
|
48
|
-
def run_once(key)
|
49
|
-
prefix = config['prefix'] || ''
|
50
|
-
settings = config['processors'][key]
|
51
|
-
raise RuntimeError, "No processor configuration found for #{key}" unless settings
|
52
|
-
queue_name = settings['queue'] || key
|
53
|
-
queue_name = "#{prefix}#{queue_name}"
|
54
|
-
waiting_queue_name = "#{queue_name}_waiting"
|
55
|
-
logger.debug "Using queue #{waiting_queue_name}"
|
56
|
-
load_publisher_class(key).new(waiting_queue_name).execute
|
57
|
-
end
|
58
|
-
|
59
|
-
# Run the event publisher
|
60
|
-
def run
|
61
|
-
@state = RUNNING
|
62
|
-
logger.info "Starting event publisher"
|
63
|
-
config['processors'].each do |key, settings|
|
64
|
-
run_publisher(key, settings)
|
65
|
-
end
|
66
|
-
|
67
|
-
begin
|
68
|
-
threads.each { |thread| thread.join }
|
69
|
-
rescue Interrupt => e
|
70
|
-
end
|
71
|
-
|
72
|
-
logger.info "Exiting event publisher"
|
73
|
-
end
|
74
|
-
|
75
|
-
private
|
76
|
-
# An array of threads, one for each publisher instance
|
77
|
-
def threads
|
78
|
-
@threads ||= []
|
79
|
-
end
|
80
|
-
|
81
|
-
# Run the publisher for the given key
|
82
|
-
def run_publisher(key, settings)
|
83
|
-
if File.exists?(source_file(key))
|
84
|
-
prefix = config['prefix'] || ''
|
85
|
-
logger.info "Creating publisher for #{key}"
|
86
|
-
queue_name = settings['queue'] || key
|
87
|
-
queue_name = "#{prefix}#{queue_name}"
|
88
|
-
waiting_queue_name = "#{queue_name}_waiting"
|
89
|
-
logger.debug "Using queue #{waiting_queue_name}"
|
90
|
-
|
91
|
-
threads << Thread.new(waiting_queue_name, settings) do |waiting_queue_name, settings|
|
92
|
-
while(running?)
|
93
|
-
begin
|
94
|
-
load_publisher_class(key).new(waiting_queue_name).execute
|
95
|
-
rescue Exception => e
|
96
|
-
logger.error "Error running publisher: #{e.message}"
|
97
|
-
end
|
98
|
-
|
99
|
-
delay = settings['publishers']['delay'] || 60
|
100
|
-
logger.debug "Sleeping #{delay} seconds"
|
101
|
-
sleep delay
|
102
|
-
|
103
|
-
end
|
104
|
-
end
|
105
|
-
else
|
106
|
-
logger.warn "No publisher found for #{key}"
|
107
|
-
end
|
108
|
-
end
|
109
|
-
|
110
|
-
def source_file(key)
|
111
|
-
source_file = "#{publishers_directory}/#{key}.rb"
|
112
|
-
end
|
113
|
-
|
114
|
-
def load_publisher_class(key)
|
115
|
-
source_file = source_file(key)
|
116
|
-
if File.exist?(source_file)
|
117
|
-
modified_at = File.mtime(source_file)
|
118
|
-
if publishers[key] != modified_at
|
119
|
-
logger.debug "Loading #{source_file}"
|
120
|
-
load(source_file)
|
121
|
-
publishers[key] = modified_at
|
122
|
-
end
|
123
|
-
else
|
124
|
-
raise SourceFileNotFound, "Source file not found: #{source_file}"
|
125
|
-
end
|
126
|
-
|
127
|
-
Object.const_get(camelize("#{key}_publisher"))
|
128
|
-
end
|
129
|
-
|
130
|
-
end
|
131
|
-
end
|
data/lib/refinery/heartbeat.rb
DELETED
@@ -1,33 +0,0 @@
|
|
1
|
-
module Refinery #:nodoc:
|
2
|
-
# A heartbeat publisher that indicates a server is alive.
|
3
|
-
class Heartbeat
|
4
|
-
include Refinery::Loggable
|
5
|
-
include Refinery::Configurable
|
6
|
-
include Refinery::Queueable
|
7
|
-
include Refinery::Utilities
|
8
|
-
|
9
|
-
# Initialize the heartbeat for the given server.
|
10
|
-
def initialize(server)
|
11
|
-
queue_prefix = config['prefix'] || ''
|
12
|
-
@server = server
|
13
|
-
@thread = Thread.new do
|
14
|
-
if defined?(java.lang.Thread)
|
15
|
-
java.lang.Thread.current_thread.name = 'Heartbeat'
|
16
|
-
end
|
17
|
-
|
18
|
-
loop do
|
19
|
-
with_queue("#{queue_prefix}heartbeat") do |heartbeat_queue|
|
20
|
-
logger.debug "Send heartbeat"
|
21
|
-
message = {
|
22
|
-
'host_info' => host_info,
|
23
|
-
'timestamp' => Time.now.utc,
|
24
|
-
'running_daemons' => @server.daemons.length
|
25
|
-
}
|
26
|
-
heartbeat_queue.send_message(Base64.encode64(message.to_json))
|
27
|
-
sleep(60)
|
28
|
-
end
|
29
|
-
end
|
30
|
-
end
|
31
|
-
end
|
32
|
-
end
|
33
|
-
end
|
data/lib/refinery/loggable.rb
DELETED
data/lib/refinery/monitor.rb
DELETED
@@ -1,113 +0,0 @@
|
|
1
|
-
module Refinery #:nodoc:
|
2
|
-
# The monitor is responsible for monitoring the health of the various
|
3
|
-
# components of refinery.
|
4
|
-
class Monitor
|
5
|
-
include Refinery::Loggable
|
6
|
-
include Refinery::Configurable
|
7
|
-
include Refinery::Queueable
|
8
|
-
include Refinery::Utilities
|
9
|
-
|
10
|
-
# Initialize the monitor.
|
11
|
-
#
|
12
|
-
# Options:
|
13
|
-
# * <tt>:verbose</tt>: Enable INFO level logging
|
14
|
-
# * <tt>:debug</tt>: Enable DEBUG level logging
|
15
|
-
# * <tt>:config</tt>: The config file
|
16
|
-
def initialize(options)
|
17
|
-
logger.level = Logger::INFO if options[:verbose]
|
18
|
-
logger.level = Logger::DEBUG if options[:debug]
|
19
|
-
config.load_file(options[:config]) if options[:config]
|
20
|
-
@queue_prefix = config['prefix'] || ''
|
21
|
-
end
|
22
|
-
|
23
|
-
# Execute the monitor. The monitor will start one heartbeat
|
24
|
-
# monitor thread and one thread for each done queue and error
|
25
|
-
# queue as specified in the configuration.
|
26
|
-
def run
|
27
|
-
logger.info "Starting up monitor"
|
28
|
-
heartbeat_monitor_thread = run_heartbeat_monitor
|
29
|
-
done_monitor_threads = run_done_monitors
|
30
|
-
error_monitor_threads = run_error_monitors
|
31
|
-
|
32
|
-
logger.info "Monitor running"
|
33
|
-
|
34
|
-
Refinery::StatsServer.new.run
|
35
|
-
|
36
|
-
begin
|
37
|
-
heartbeat_monitor_thread.join
|
38
|
-
done_monitor_threads.each { |t| t.join }
|
39
|
-
error_monitor_threads.each { |t| t.join }
|
40
|
-
rescue Interrupt => e
|
41
|
-
end
|
42
|
-
|
43
|
-
logger.info "Monitor is exiting"
|
44
|
-
end
|
45
|
-
|
46
|
-
private
|
47
|
-
def statistics
|
48
|
-
@statistics ||= Refinery::Statistics.new
|
49
|
-
end
|
50
|
-
|
51
|
-
def run_heartbeat_monitor
|
52
|
-
logger.info "Starting #{@queue_prefix}heartbeat monitor"
|
53
|
-
Thread.new("#{@queue_prefix}heartbeat") do |heartbeat_queue_name|
|
54
|
-
loop do
|
55
|
-
with_queue(heartbeat_queue_name) do |heartbeat_queue|
|
56
|
-
while (message = heartbeat_queue.receive)
|
57
|
-
logger.debug "#{heartbeat_queue.name}: #{decode_message(message.body).inspect}"
|
58
|
-
message.delete()
|
59
|
-
end
|
60
|
-
end
|
61
|
-
sleep(5)
|
62
|
-
end
|
63
|
-
end
|
64
|
-
end
|
65
|
-
|
66
|
-
def run_done_monitors
|
67
|
-
config['processors'].collect do |key, settings|
|
68
|
-
queue_name = settings['queue'] || key
|
69
|
-
done_queue_name = "#{@queue_prefix}#{queue_name}_done"
|
70
|
-
logger.debug "Starting monitor for queue #{done_queue_name}"
|
71
|
-
Thread.new(done_queue_name) do |done_queue_name|
|
72
|
-
loop do
|
73
|
-
with_queue(done_queue_name) do |done_queue|
|
74
|
-
while (message = done_queue.receive)
|
75
|
-
done_message = decode_message(message.body)
|
76
|
-
logger.debug "#{done_queue.name}: #{done_message.pretty_inspect}"
|
77
|
-
processed = decode_message(done_message['original'])
|
78
|
-
logger.info "Done: #{queue_name} #{processed.inspect}"
|
79
|
-
message.delete()
|
80
|
-
statistics.record_done(done_message)
|
81
|
-
end
|
82
|
-
sleep(5)
|
83
|
-
end
|
84
|
-
end
|
85
|
-
end
|
86
|
-
end
|
87
|
-
end
|
88
|
-
|
89
|
-
def run_error_monitors
|
90
|
-
config['processors'].collect do |key, settings|
|
91
|
-
queue_name = settings['queue'] || key
|
92
|
-
error_queue_name = "#{@queue_prefix}#{queue_name}_error"
|
93
|
-
logger.info "Starting error monitor for queue #{error_queue_name}"
|
94
|
-
Thread.new(error_queue_name) do |error_queue_name|
|
95
|
-
loop do
|
96
|
-
with_queue(error_queue_name) do |error_queue|
|
97
|
-
while (message = error_queue.receive)
|
98
|
-
error_message = decode_message(message.body)
|
99
|
-
logger.debug "#{error_queue.name}: #{error_message.pretty_inspect}"
|
100
|
-
processed = decode_message(error_message['original'])
|
101
|
-
logger.info "Error: #{queue_name} #{processed.inspect}"
|
102
|
-
message.delete()
|
103
|
-
statistics.record_error(error_message)
|
104
|
-
end
|
105
|
-
end
|
106
|
-
sleep(5)
|
107
|
-
end
|
108
|
-
end
|
109
|
-
end
|
110
|
-
end
|
111
|
-
|
112
|
-
end
|
113
|
-
end
|