rest-ftp-daemon 0.300.3 → 0.302.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- checksums.yaml +4 -4
- data/Gemfile.lock +12 -12
- data/README.md +12 -3
- data/bin/rest-ftp-daemon +14 -15
- data/config.ru +2 -8
- data/defaults.yml +83 -59
- data/lib/rest-ftp-daemon.rb +4 -8
- data/lib/rest-ftp-daemon/api/config.rb +3 -1
- data/lib/rest-ftp-daemon/api/dashboard.rb +1 -0
- data/lib/rest-ftp-daemon/api/debug.rb +2 -0
- data/lib/rest-ftp-daemon/api/job_presenter.rb +2 -0
- data/lib/rest-ftp-daemon/api/jobs.rb +2 -0
- data/lib/rest-ftp-daemon/api/root.rb +2 -0
- data/lib/rest-ftp-daemon/api/status.rb +1 -0
- data/lib/rest-ftp-daemon/constants.rb +5 -5
- data/lib/rest-ftp-daemon/job.rb +37 -37
- data/lib/rest-ftp-daemon/logger_pool.rb +39 -16
- data/lib/rest-ftp-daemon/notification.rb +26 -19
- data/lib/rest-ftp-daemon/remote.rb +16 -15
- data/lib/rest-ftp-daemon/remote_ftp.rb +2 -7
- data/lib/rest-ftp-daemon/remote_sftp.rb +1 -4
- data/lib/rest-ftp-daemon/worker_pool.rb +5 -15
- data/lib/rest-ftp-daemon/{worker_conchita.rb → workers/conchita.rb} +20 -22
- data/lib/rest-ftp-daemon/{worker_reporter.rb → workers/reporter.rb} +23 -30
- data/lib/rest-ftp-daemon/{worker_job.rb → workers/transfer.rb} +25 -35
- data/lib/shared/conf.rb +47 -41
- data/lib/{rest-ftp-daemon/worker.rb → shared/worker_base.rb} +42 -28
- data/rest-ftp-daemon.gemspec +5 -3
- data/spec/rest-ftp-daemon/features/dashboard_spec.rb +5 -5
- data/spec/spec_helper.rb +2 -2
- metadata +36 -24
- data/rest-ftp-daemon.sample.yml +0 -71
- data/spec/support/config.yml +0 -25
@@ -9,9 +9,6 @@ module RestFtpDaemon
|
|
9
9
|
# Call super
|
10
10
|
super
|
11
11
|
|
12
|
-
# Use debug ?
|
13
|
-
@debug = (Conf.at :debug, :sftp) == true
|
14
|
-
|
15
12
|
# Announce object
|
16
13
|
log_debug "RemoteSFTP.initialize"
|
17
14
|
end
|
@@ -22,7 +19,7 @@ module RestFtpDaemon
|
|
22
19
|
log_debug "RemoteSFTP.connect [#{@url.user}]@[#{@url.host}]:[#{@url.port}]"
|
23
20
|
|
24
21
|
# Debug level
|
25
|
-
verbosity =
|
22
|
+
verbosity = @debug ? Logger::INFO : false
|
26
23
|
|
27
24
|
# Connect remote server
|
28
25
|
@sftp = Net::SFTP.start(@url.host.to_s, @url.user.to_s,
|
@@ -59,14 +59,14 @@ module RestFtpDaemon
|
|
59
59
|
|
60
60
|
def create_threads
|
61
61
|
# Read configuration or initialize with empty hash
|
62
|
-
pools = Conf[:pools]
|
62
|
+
pools = Conf.at[:pools]
|
63
63
|
pools = {} unless pools.is_a? Hash
|
64
64
|
|
65
65
|
# Minimum one worker on DEFAULT_POOL
|
66
66
|
if !(pools.is_a? Hash)
|
67
67
|
log_error "create_threads: one JobWorker is the minimum (#{pools.inspect}"
|
68
68
|
end
|
69
|
-
log_info "WorkerPool creating workers
|
69
|
+
log_info "WorkerPool creating all workers with #{pools.to_hash.inspect}"
|
70
70
|
|
71
71
|
# Start ConchitaWorker and ReporterWorker
|
72
72
|
create_thread :conchita, ConchitaWorker
|
@@ -77,7 +77,7 @@ module RestFtpDaemon
|
|
77
77
|
pools.each do |pool, count|
|
78
78
|
count.times do
|
79
79
|
wid = next_worker_id
|
80
|
-
|
80
|
+
create_thread(wid, TransferWorker, pool)
|
81
81
|
end
|
82
82
|
end
|
83
83
|
|
@@ -85,23 +85,13 @@ module RestFtpDaemon
|
|
85
85
|
log_error "EXCEPTION: #{ex.message}", ex.backtrace
|
86
86
|
end
|
87
87
|
|
88
|
-
# def create_worker_thread wid, pool
|
89
|
-
# Thread.new wid do
|
90
|
-
# begin
|
91
|
-
# worker = JobWorker.new wid, pool
|
92
|
-
# #log_info "JobWorker [#{wid}][#{pool}]: #{worker}"
|
93
|
-
# rescue StandardError => ex
|
94
|
-
# log_error "JobWorker EXCEPTION: #{ex.message} #{e.backtrace}"
|
95
|
-
# end
|
96
|
-
# end
|
97
|
-
# end
|
98
|
-
|
99
88
|
def create_thread wid, klass, pool = nil
|
100
89
|
# Spawn thread and add it to my index
|
101
|
-
log_info "spawning #{klass.name} [#{wid}] [#{pool}]"
|
90
|
+
log_info "spawning #{klass.name} wid[#{wid}] pool[#{pool}]"
|
102
91
|
@workers[wid] = Thread.new do
|
103
92
|
begin
|
104
93
|
worker = klass.new wid, pool
|
94
|
+
sleep 0.1
|
105
95
|
rescue StandardError => ex
|
106
96
|
log_error "#{klass.name} EXCEPTION: #{ex.message}"
|
107
97
|
end
|
@@ -1,35 +1,33 @@
|
|
1
1
|
module RestFtpDaemon
|
2
2
|
|
3
3
|
# Worker used to clean up the queue deleting expired jobs
|
4
|
-
class ConchitaWorker <
|
4
|
+
class ConchitaWorker < Shared::WorkerBase
|
5
5
|
|
6
|
-
|
7
|
-
# Call dady and load my conf
|
8
|
-
super
|
6
|
+
protected
|
9
7
|
|
10
|
-
|
11
|
-
|
12
|
-
|
13
|
-
end
|
8
|
+
def worker_init
|
9
|
+
# Load corker conf
|
10
|
+
config_section :conchita
|
14
11
|
|
15
|
-
|
12
|
+
# Check that everything is OK
|
13
|
+
return "invalid timer" unless @config[:timer].to_i > 0
|
14
|
+
return false
|
15
|
+
end
|
16
16
|
|
17
|
-
|
18
|
-
|
19
|
-
|
20
|
-
|
21
|
-
|
22
|
-
# ]
|
23
|
-
# end
|
17
|
+
def worker_after
|
18
|
+
# Sleep for a few seconds
|
19
|
+
worker_status WORKER_STATUS_WAITING
|
20
|
+
sleep @config[:timer]
|
21
|
+
end
|
24
22
|
|
25
|
-
def
|
23
|
+
def worker_process
|
26
24
|
# Announce we are working
|
27
25
|
worker_status WORKER_STATUS_CLEANING
|
28
26
|
|
29
27
|
# Cleanup queues according to configured max-age
|
30
|
-
$queue.expire JOB_STATUS_FINISHED, maxage(JOB_STATUS_FINISHED), @debug
|
31
|
-
$queue.expire JOB_STATUS_FAILED, maxage(JOB_STATUS_FAILED), @debug
|
32
|
-
$queue.expire JOB_STATUS_QUEUED, maxage(JOB_STATUS_QUEUED), @debug
|
28
|
+
$queue.expire JOB_STATUS_FINISHED, maxage(JOB_STATUS_FINISHED), @config[:debug]
|
29
|
+
$queue.expire JOB_STATUS_FAILED, maxage(JOB_STATUS_FAILED), @config[:debug]
|
30
|
+
$queue.expire JOB_STATUS_QUEUED, maxage(JOB_STATUS_QUEUED), @config[:debug]
|
33
31
|
|
34
32
|
# Force garbage collector
|
35
33
|
GC.start if @config["garbage_collector"]
|
@@ -37,10 +35,10 @@ module RestFtpDaemon
|
|
37
35
|
rescue StandardError => e
|
38
36
|
log_error "EXCEPTION: #{e.inspect}"
|
39
37
|
sleep 1
|
40
|
-
else
|
41
|
-
wait_according_to_config
|
42
38
|
end
|
43
39
|
|
40
|
+
private
|
41
|
+
|
44
42
|
def maxage status
|
45
43
|
@config["clean_#{status}"] || 0
|
46
44
|
end
|
@@ -1,28 +1,26 @@
|
|
1
1
|
module RestFtpDaemon
|
2
2
|
|
3
|
-
# Worker used to
|
4
|
-
class ReporterWorker <
|
3
|
+
# Worker used to report metrics to various services
|
4
|
+
class ReporterWorker < Shared::WorkerBase
|
5
5
|
|
6
|
-
|
7
|
-
# Call dady and load my conf
|
8
|
-
super
|
6
|
+
protected
|
9
7
|
|
10
|
-
|
11
|
-
|
12
|
-
|
13
|
-
end
|
8
|
+
def worker_init
|
9
|
+
# Load corker conf
|
10
|
+
config_section :reporter
|
14
11
|
|
15
|
-
|
12
|
+
# Check that everything is OK
|
13
|
+
return "invalid timer" unless @config[:timer].to_i > 0
|
14
|
+
return false
|
15
|
+
end
|
16
16
|
|
17
|
-
|
18
|
-
|
19
|
-
|
20
|
-
|
21
|
-
|
22
|
-
# ]
|
23
|
-
# end
|
17
|
+
def worker_after
|
18
|
+
# Sleep for a few seconds
|
19
|
+
worker_status WORKER_STATUS_WAITING
|
20
|
+
sleep @config[:timer]
|
21
|
+
end
|
24
22
|
|
25
|
-
def
|
23
|
+
def worker_process
|
26
24
|
# Announce we are working
|
27
25
|
worker_status WORKER_STATUS_REPORTING
|
28
26
|
|
@@ -32,23 +30,14 @@ module RestFtpDaemon
|
|
32
30
|
rescue StandardError => e
|
33
31
|
log_error "EXCEPTION: #{e.inspect}"
|
34
32
|
sleep 1
|
35
|
-
else
|
36
|
-
wait_according_to_config
|
37
|
-
end
|
38
|
-
|
39
|
-
def maxage status
|
40
|
-
@config["clean_#{status}"] || 0
|
41
33
|
end
|
42
34
|
|
43
35
|
private
|
44
36
|
|
45
37
|
def do_metrics
|
46
|
-
# Get common metrics
|
47
|
-
log_info "collecting metrics"
|
38
|
+
# Get common metrics and dump them to logs
|
48
39
|
metrics = Metrics.sample
|
49
|
-
|
50
|
-
# Dump metrics to logs
|
51
|
-
log_debug "collected metrics", metrics
|
40
|
+
log_info "collected metrics", metrics
|
52
41
|
|
53
42
|
# Transpose metrics to NewRelic metrics
|
54
43
|
report_newrelic(metrics) if Conf.newrelic_enabled?
|
@@ -63,7 +52,11 @@ module RestFtpDaemon
|
|
63
52
|
metrics_newrelic[name] = value
|
64
53
|
end
|
65
54
|
end
|
66
|
-
|
55
|
+
if @config[:debug]
|
56
|
+
log_debug "reported following metrics to NewRelic", metrics_newrelic
|
57
|
+
else
|
58
|
+
log_debug "reported all metrics to NewRelic"
|
59
|
+
end
|
67
60
|
end
|
68
61
|
|
69
62
|
end
|
@@ -1,42 +1,35 @@
|
|
1
1
|
module RestFtpDaemon
|
2
2
|
|
3
3
|
# Worker used to process Jobs
|
4
|
-
class
|
4
|
+
class TransferWorker < Shared::WorkerBase
|
5
5
|
|
6
|
-
|
7
|
-
# Call dady and load my conf
|
8
|
-
super
|
9
|
-
|
10
|
-
# Timeout and retry config
|
11
|
-
@timeout = (Conf.at(:transfer, :timeout) rescue nil)
|
12
|
-
@retry = (Conf.at(:retry) rescue {})
|
6
|
+
protected
|
13
7
|
|
14
|
-
|
15
|
-
|
16
|
-
|
17
|
-
@
|
18
|
-
@retry_delay = Conf.at(:retry, :delay)
|
8
|
+
def worker_init
|
9
|
+
# Load standard config
|
10
|
+
config_section :transfer
|
11
|
+
@endpoints = Conf[:endpoints]
|
19
12
|
|
13
|
+
# Timeout and retry config
|
14
|
+
return "invalid timeout" unless @config[:timeout].to_i > 0
|
20
15
|
|
21
|
-
#
|
22
|
-
log_info "JobWorker
|
23
|
-
|
24
|
-
|
25
|
-
timeout: @timeout
|
16
|
+
# Log that
|
17
|
+
log_info "JobWorker worker_init", {
|
18
|
+
pool: @pool,
|
19
|
+
timeout: @config[:timeout]
|
26
20
|
}
|
27
|
-
start
|
28
|
-
end
|
29
|
-
|
30
|
-
protected
|
31
21
|
|
22
|
+
return false
|
23
|
+
end
|
32
24
|
|
33
|
-
def
|
34
|
-
#
|
25
|
+
def worker_after
|
26
|
+
# Clean worker status
|
27
|
+
worker_jid nil
|
35
28
|
end
|
36
29
|
|
37
30
|
private
|
38
31
|
|
39
|
-
def
|
32
|
+
def worker_process
|
40
33
|
# Wait for a job to be available in the queue
|
41
34
|
worker_status WORKER_STATUS_WAITING
|
42
35
|
job = $queue.pop @pool
|
@@ -52,18 +45,18 @@ module RestFtpDaemon
|
|
52
45
|
if !job.error
|
53
46
|
#log_info "job succeeded"
|
54
47
|
|
55
|
-
elsif !(@
|
48
|
+
elsif !(@config[:retry_on].is_a?(Enumerable) && @config[:retry_on].include?(job.error))
|
56
49
|
log_error "not retrying: error not eligible"
|
57
50
|
|
58
|
-
elsif @
|
59
|
-
log_error "not retrying: max_age reached (#{@
|
51
|
+
elsif @config[:retry_for] && (job.age >= @config[:retry_for])
|
52
|
+
log_error "not retrying: max_age reached (#{@config[:retry_for]} s)"
|
60
53
|
|
61
|
-
elsif @
|
62
|
-
log_error "not retrying: max_runs reached (#{@
|
54
|
+
elsif @config[:retry_max] && (job.runs >= @config[:retry_max])
|
55
|
+
log_error "not retrying: max_runs reached (#{@config[:retry_max]} tries)"
|
63
56
|
|
64
57
|
else
|
65
58
|
# Delay cannot be negative, and will be 1s minimum
|
66
|
-
retry_after = [@
|
59
|
+
retry_after = [@config[:retry_after] || DEFAULT_RETRY_AFTER, 1].max
|
67
60
|
log_info "retrying job: waiting for #{retry_after} seconds"
|
68
61
|
|
69
62
|
# Wait !
|
@@ -74,9 +67,6 @@ module RestFtpDaemon
|
|
74
67
|
$queue.requeue job
|
75
68
|
end
|
76
69
|
|
77
|
-
# Clean worker status
|
78
|
-
worker_jid nil
|
79
|
-
|
80
70
|
rescue StandardError => ex
|
81
71
|
log_error "WORKER UNHANDLED EXCEPTION: #{ex.message}", ex.backtrace
|
82
72
|
worker_status WORKER_STATUS_CRASHED
|
@@ -89,7 +79,7 @@ module RestFtpDaemon
|
|
89
79
|
job.wid = Thread.current.thread_variable_get :wid
|
90
80
|
|
91
81
|
# Processs this job protected by a timeout
|
92
|
-
Timeout.timeout(@timeout, RestFtpDaemon::JobTimeout) do
|
82
|
+
Timeout.timeout(@config[:timeout], RestFtpDaemon::JobTimeout) do
|
93
83
|
job.process
|
94
84
|
end
|
95
85
|
|
data/lib/shared/conf.rb
CHANGED
@@ -10,6 +10,7 @@ module Shared
|
|
10
10
|
|
11
11
|
class Conf
|
12
12
|
extend Chamber
|
13
|
+
PIDFILE_DIR = "/tmp/"
|
13
14
|
|
14
15
|
class << self
|
15
16
|
attr_accessor :app_env
|
@@ -21,7 +22,6 @@ module Shared
|
|
21
22
|
attr_reader :app_spec
|
22
23
|
attr_reader :files
|
23
24
|
attr_reader :host
|
24
|
-
|
25
25
|
end
|
26
26
|
|
27
27
|
def self.init app_root
|
@@ -51,11 +51,12 @@ module Shared
|
|
51
51
|
fail ConfigMissingParameter, "gemspec: missing version" unless @app_ver
|
52
52
|
|
53
53
|
# Now we know app_name, initalize app_libs
|
54
|
-
@app_libs = File.expand_path(
|
54
|
+
@app_libs = File.expand_path("lib/#{@app_name}/", @app_root)
|
55
55
|
|
56
56
|
# Add other config files
|
57
|
-
add_default_config
|
58
|
-
|
57
|
+
#add_default_config
|
58
|
+
add_config generate(:config_defaults)
|
59
|
+
add_config generate(:config_etc)
|
59
60
|
|
60
61
|
# Return something
|
61
62
|
return @app_name
|
@@ -64,17 +65,20 @@ module Shared
|
|
64
65
|
def self.prepare args = {}
|
65
66
|
ensure_init
|
66
67
|
|
67
|
-
# Add extra config file
|
68
|
-
|
69
|
-
|
70
|
-
# Load configuration files
|
71
|
-
load_files
|
68
|
+
# Add extra config file and load them all
|
69
|
+
add_config args[:config]
|
70
|
+
reload!
|
72
71
|
|
73
72
|
# Set Rack env
|
74
73
|
ENV["RACK_ENV"] = @app_env.to_s
|
75
74
|
|
75
|
+
# Set up encodings
|
76
|
+
Encoding.default_internal = "utf-8"
|
77
|
+
Encoding.default_external = "utf-8"
|
78
|
+
|
76
79
|
# Init New Relic
|
77
|
-
|
80
|
+
newrelic_logfile = File.expand_path(Conf[:logs][:newrelic], Conf[:logs][:path])
|
81
|
+
prepare_newrelic self[:newrelic], newrelic_logfile
|
78
82
|
|
79
83
|
# Try to access any key to force parsing of the files
|
80
84
|
self[:dummy]
|
@@ -104,53 +108,55 @@ module Shared
|
|
104
108
|
|
105
109
|
def self.newrelic_enabled?
|
106
110
|
ensure_init
|
107
|
-
|
111
|
+
self[:newrelic] && self[:newrelic][:licence]
|
108
112
|
end
|
109
113
|
|
110
114
|
# Defaults generators
|
111
|
-
def self.
|
112
|
-
ensure_init
|
113
|
-
"/tmp/#{@app_name}-#{@host}-#{self[:port]}.pid"
|
114
|
-
end
|
115
|
-
def self.gen_config_etc
|
115
|
+
def self.generate what
|
116
116
|
ensure_init
|
117
|
-
|
118
|
-
|
119
|
-
|
120
|
-
|
121
|
-
|
122
|
-
|
123
|
-
|
124
|
-
|
125
|
-
|
126
|
-
|
127
|
-
|
128
|
-
|
129
|
-
|
130
|
-
|
117
|
+
return case what
|
118
|
+
|
119
|
+
when :user_agent
|
120
|
+
"#{@app_name}/#{@app_ver}" if @app_name && @app_ver
|
121
|
+
|
122
|
+
when :config_defaults
|
123
|
+
"#{@app_root}/defaults.yml" if @app_root
|
124
|
+
|
125
|
+
when :config_etc
|
126
|
+
"/etc/#{@app_name}.yml" if @app_name
|
127
|
+
|
128
|
+
when :process_name
|
129
|
+
parts = [@app_name, @app_env]
|
130
|
+
parts << self[:port] if self[:port]
|
131
|
+
parts.join('-')
|
132
|
+
|
133
|
+
when :pidfile
|
134
|
+
process_name = self.generate(:process_name)
|
135
|
+
File.expand_path "#{process_name}.pid", PIDFILE_DIR
|
136
|
+
|
137
|
+
when :config_message
|
138
|
+
config_defaults = self.generate(:config_defaults)
|
139
|
+
config_etc = self.generate(:config_etc)
|
140
|
+
|
141
|
+
"A default configuration is available (#{config_defaults}) and can be copied to the default location (#{config_etc}): \n sudo cp #{config_defaults} #{config_etc}"
|
142
|
+
|
143
|
+
end
|
131
144
|
end
|
132
145
|
|
146
|
+
|
133
147
|
protected
|
134
148
|
|
135
149
|
def self.load_files
|
136
150
|
load files: @files, namespaces: { environment: @app_env }
|
137
151
|
end
|
138
152
|
|
139
|
-
def self.
|
140
|
-
@files <<
|
141
|
-
end
|
142
|
-
|
143
|
-
def self.add_etc_config
|
144
|
-
@files << File.expand_path("/etc/#{@app_name}.yml") if @app_name
|
145
|
-
end
|
146
|
-
|
147
|
-
def self.add_extra_config path
|
148
|
-
@files << File.expand_path(path) if path
|
153
|
+
def self.add_config path
|
154
|
+
@files << File.expand_path(path) if path && File.readable?(path)
|
149
155
|
end
|
150
156
|
|
151
157
|
def self.prepare_newrelic section, logfile
|
152
158
|
# Disable NewRelic if no config present
|
153
|
-
unless
|
159
|
+
unless self.newrelic_enabled?
|
154
160
|
ENV["NEWRELIC_AGENT_ENABLED"] = "false"
|
155
161
|
return
|
156
162
|
end
|