content_server 1.0.2 → 1.0.3
Sign up to get free protection for your applications and to get access to all the features.
- data/bin/backup_server +3 -0
- data/bin/content_server +3 -0
- data/lib/content_server/backup_server.rb +88 -11
- data/lib/content_server/content_server.rb +88 -10
- data/lib/content_server/queue_copy.rb +20 -7
- data/lib/content_server/queue_indexer.rb +8 -10
- data/lib/content_server/version.rb +1 -1
- data/lib/content_server.rb +5 -0
- metadata +18 -18
data/bin/backup_server
CHANGED
@@ -23,11 +23,14 @@ begin
|
|
23
23
|
RunInBackground.run { ContentServer.run_backup_server }
|
24
24
|
rescue SystemExit, SignalException => exc
|
25
25
|
# TODO (genadyp) do we need to trap signals by types?
|
26
|
+
puts("Interrupt or Exit happened in #{Params['service_name']} server: #{exc.class}, " +
|
27
|
+
"stopping process.\nBacktrace:\n#{exc.backtrace.join("\n")}")
|
26
28
|
Log.error("Interrupt or Exit happened in #{Params['service_name']} server: #{exc.class}, " +
|
27
29
|
"stopping process.\nBacktrace:\n#{exc.backtrace.join("\n")}")
|
28
30
|
Log.flush
|
29
31
|
exit
|
30
32
|
rescue Exception => exc
|
33
|
+
puts("Exception happened in #{Params['service_name']} server: #{exc.class}:#{exc.message}\nBacktrace:\n#{exc.backtrace.join("\n")}")
|
31
34
|
Log.error("Exception happened in #{Params['service_name']} server: #{exc.class}:#{exc.message}\nBacktrace:\n#{exc.backtrace.join("\n")}")
|
32
35
|
if retries > 0
|
33
36
|
Log.debug1("Restarting (retries:#{retries}).")
|
data/bin/content_server
CHANGED
@@ -20,11 +20,14 @@ begin
|
|
20
20
|
RunInBackground.run { ContentServer.run_content_server }
|
21
21
|
rescue SystemExit, SignalException => exc
|
22
22
|
# TODO (genadyp) do we need to trap signals by types?
|
23
|
+
puts("Interrupt or Exit happened in #{Params['service_name']} server: #{exc.class}, " +
|
24
|
+
"stopping process.\nBacktrace:\n#{exc.backtrace.join("\n")}")
|
23
25
|
Log.error("Interrupt or Exit happened in #{Params['service_name']} server: #{exc.class}, " +
|
24
26
|
"stopping process.\nBacktrace:\n#{exc.backtrace.join("\n")}")
|
25
27
|
Log.flush
|
26
28
|
exit
|
27
29
|
rescue Exception => exc
|
30
|
+
puts("Exception happened in #{Params['service_name']} server: #{exc.class}:#{exc.message}\nBacktrace:\n#{exc.backtrace.join("\n")}")
|
28
31
|
Log.error("Exception happened in #{Params['service_name']} server: #{exc.class}:#{exc.message}\nBacktrace:\n#{exc.backtrace.join("\n")}")
|
29
32
|
if retries > 0
|
30
33
|
Log.debug1("Restarting (retries:#{retries}).")
|
@@ -31,8 +31,16 @@ module ContentServer
|
|
31
31
|
Thread.abort_on_exception = true
|
32
32
|
all_threads = []
|
33
33
|
|
34
|
-
|
35
|
-
|
34
|
+
# create general tmp dir
|
35
|
+
FileUtils.mkdir_p(Params['tmp_path']) unless File.directory?(Params['tmp_path'])
|
36
|
+
# init tmp content data file
|
37
|
+
tmp_content_data_file = Params['tmp_path'] + '/backup.data'
|
38
|
+
|
39
|
+
if Params['enable_monitoring']
|
40
|
+
Log.info("Initializing monitoring of process params on port:#{Params['process_monitoring_web_port']}")
|
41
|
+
Params['process_vars'] = ThreadSafeHash::ThreadSafeHash.new
|
42
|
+
Params['process_vars'].set('server_name', 'backup_server')
|
43
|
+
end
|
36
44
|
|
37
45
|
# # # # # # # # # # # #
|
38
46
|
# Initialize/Start monitoring
|
@@ -61,7 +69,7 @@ module ContentServer
|
|
61
69
|
# # # # # # # # # # # # # # # # # # # # # # # # # # #
|
62
70
|
# Initialize/Start backup server content data sender
|
63
71
|
Log.debug1('Start backup server content data sender')
|
64
|
-
|
72
|
+
local_dynamic_content_data = ContentData::DynamicContentData.new
|
65
73
|
#content_data_sender = ContentDataSender.new(
|
66
74
|
# Params['remote_server'],
|
67
75
|
# Params['remote_listening_port'])
|
@@ -71,7 +79,24 @@ module ContentServer
|
|
71
79
|
Log.debug1 'Waiting on local server content data queue.'
|
72
80
|
cd = local_server_content_data_queue.pop
|
73
81
|
# content_data_sender.send_content_data(cd)
|
74
|
-
|
82
|
+
local_dynamic_content_data.update(cd)
|
83
|
+
end
|
84
|
+
end
|
85
|
+
|
86
|
+
# # # # # # # # # # # # # # # # # # # # # # # #
|
87
|
+
# Start dump local content data to file thread
|
88
|
+
Log.debug1('Start dump local content data to file thread')
|
89
|
+
all_threads << Thread.new do
|
90
|
+
last_data_flush_time = nil
|
91
|
+
while true do
|
92
|
+
if last_data_flush_time.nil? || last_data_flush_time + Params['data_flush_delay'] < Time.now.to_i
|
93
|
+
Log.info "Writing local content data to #{Params['local_content_data_path']}."
|
94
|
+
local_dynamic_content_data.last_content_data.to_file(tmp_content_data_file)
|
95
|
+
sleep(0.1) # Added to prevent mv access issue
|
96
|
+
::FileUtils.mv(tmp_content_data_file, Params['local_content_data_path'])
|
97
|
+
last_data_flush_time = Time.now.to_i
|
98
|
+
end
|
99
|
+
sleep(1)
|
75
100
|
end
|
76
101
|
end
|
77
102
|
Params['backup_destination_folder'] = File.expand_path(Params['monitoring_paths'][0]['path'])
|
@@ -85,8 +110,7 @@ module ContentServer
|
|
85
110
|
|
86
111
|
file_copy_client = FileCopyClient.new(Params['content_server_hostname'],
|
87
112
|
Params['content_server_files_port'],
|
88
|
-
|
89
|
-
@process_variables)
|
113
|
+
local_dynamic_content_data)
|
90
114
|
all_threads.concat(file_copy_client.threads)
|
91
115
|
|
92
116
|
# Each
|
@@ -94,7 +118,7 @@ module ContentServer
|
|
94
118
|
all_threads << Thread.new do
|
95
119
|
loop do
|
96
120
|
sleep(Params['backup_check_delay'])
|
97
|
-
local_cd =
|
121
|
+
local_cd = local_dynamic_content_data.last_content_data()
|
98
122
|
remote_cd = content_server_dynamic_content_data.last_content_data()
|
99
123
|
diff = ContentData::ContentData.remove(local_cd, remote_cd)
|
100
124
|
#file_copy_client.request_copy(diff) unless diff.empty?
|
@@ -111,11 +135,64 @@ module ContentServer
|
|
111
135
|
end
|
112
136
|
end
|
113
137
|
|
138
|
+
# # # # # # # # # # # # # # # # # # # # # # # #
|
139
|
+
# Start process vars thread
|
114
140
|
if Params['enable_monitoring']
|
115
|
-
|
116
|
-
|
117
|
-
|
118
|
-
|
141
|
+
monitoring_info = MonitoringInfo::MonitoringInfo.new()
|
142
|
+
all_threads << Thread.new do
|
143
|
+
last_data_flush_time = nil
|
144
|
+
mutex = Mutex.new
|
145
|
+
while true do
|
146
|
+
if last_data_flush_time.nil? || last_data_flush_time + Params['process_vars_delay'] < Time.now
|
147
|
+
Params['process_vars'].set('time', Time.now)
|
148
|
+
Log.info("process_vars:monitoring queue size:#{monitoring_events.size}")
|
149
|
+
Params['process_vars'].set('monitoring queue', monitoring_events.size)
|
150
|
+
Log.info("process_vars:content data queue size:#{monitoring_events.size}")
|
151
|
+
Params['process_vars'].set('content data queue', local_server_content_data_queue.size)
|
152
|
+
#enable following line to see full list of object:count
|
153
|
+
#obj_array = ''
|
154
|
+
total_obj_count = 0
|
155
|
+
string_count = 0
|
156
|
+
file_count = 0
|
157
|
+
dir_count = 0
|
158
|
+
content_count = 0
|
159
|
+
index_agent_count = 0
|
160
|
+
indexer_patterns_count = 0
|
161
|
+
mutex.synchronize do
|
162
|
+
ObjectSpace.each_object(Class) {|obj|
|
163
|
+
obj_count_per_class = ObjectSpace.each_object(obj).count
|
164
|
+
#enable following line to see full list of object:count
|
165
|
+
#obj_array = "#{obj_array} * #{obj.name}:#{obj_count_per_class}"
|
166
|
+
total_obj_count = total_obj_count + obj_count_per_class
|
167
|
+
}
|
168
|
+
string_count = ObjectSpace.each_object(String).count
|
169
|
+
file_count = ObjectSpace.each_object(::FileMonitoring::FileStat).count
|
170
|
+
dir_count = ObjectSpace.each_object(::FileMonitoring::DirStat).count
|
171
|
+
content_count = ObjectSpace.each_object(::ContentData::ContentData).count
|
172
|
+
index_agent_count = ObjectSpace.each_object(::FileIndexing::IndexAgent).count
|
173
|
+
indexer_patterns_count = ObjectSpace.each_object(::FileIndexing::IndexerPatterns).count
|
174
|
+
end
|
175
|
+
#enable following line to see full list of object:count
|
176
|
+
#Params['process_vars'].set('Live objs full', obj_array)
|
177
|
+
Log.info("process_vars:Live objs cnt:#{total_obj_count}")
|
178
|
+
Log.info("process_vars:Live String obj cnt:#{string_count}")
|
179
|
+
Log.info("process_vars:Live File obj cnt:#{file_count}")
|
180
|
+
Log.info("process_vars:Live Dir obj cnt:#{dir_count}")
|
181
|
+
Log.info("process_vars:Live Content data obj cnt:#{content_count}")
|
182
|
+
Log.info("process_vars:Live index agent obj cnt:#{index_agent_count}")
|
183
|
+
Log.info("process_vars:Live indexer patterns obj cnt:#{indexer_patterns_count}")
|
184
|
+
Params['process_vars'].set('Live objs cnt', total_obj_count)
|
185
|
+
Params['process_vars'].set('Live String obj cnt', string_count)
|
186
|
+
Params['process_vars'].set('Live File obj cnt', file_count)
|
187
|
+
Params['process_vars'].set('Live Dir obj cnt', dir_count)
|
188
|
+
Params['process_vars'].set('Live Content data obj cnt', content_count)
|
189
|
+
Params['process_vars'].set('Live index agent obj cnt', index_agent_count)
|
190
|
+
Params['process_vars'].set('Live indexer patterns obj cnt', indexer_patterns_count)
|
191
|
+
last_data_flush_time = Time.now
|
192
|
+
end
|
193
|
+
sleep(0.3)
|
194
|
+
end
|
195
|
+
end
|
119
196
|
end
|
120
197
|
|
121
198
|
all_threads.each { |t| t.abort_on_exception = true }
|
@@ -13,7 +13,6 @@ require 'log'
|
|
13
13
|
require 'networking/tcp'
|
14
14
|
require 'params'
|
15
15
|
require 'process_monitoring/thread_safe_hash'
|
16
|
-
require 'process_monitoring/monitoring'
|
17
16
|
require 'process_monitoring/monitoring_info'
|
18
17
|
|
19
18
|
# Content server. Monitors files, index local files, listen to backup server content,
|
@@ -22,13 +21,21 @@ module ContentServer
|
|
22
21
|
# Content server specific flags.
|
23
22
|
Params.integer('local_files_port', 4444, 'Remote port in backup server to copy files.')
|
24
23
|
Params.integer('local_content_data_port', 3333, 'Listen to incoming content data requests.')
|
24
|
+
Params.path('tmp_path', '~/.bbfs/tmp', 'tmp path for temporary files')
|
25
25
|
|
26
26
|
def run_content_server
|
27
27
|
Log.info('Content server start')
|
28
28
|
all_threads = []
|
29
|
+
# create general tmp dir
|
30
|
+
FileUtils.mkdir_p(Params['tmp_path']) unless File.directory?(Params['tmp_path'])
|
31
|
+
# init tmp content data file
|
32
|
+
tmp_content_data_file = Params['tmp_path'] + '/content.data'
|
29
33
|
|
30
|
-
|
31
|
-
|
34
|
+
if Params['enable_monitoring']
|
35
|
+
Log.info("Initializing monitoring of process params on port:#{Params['process_monitoring_web_port']}")
|
36
|
+
Params['process_vars'] = ThreadSafeHash::ThreadSafeHash.new
|
37
|
+
Params['process_vars'].set('server_name', 'content_server')
|
38
|
+
end
|
32
39
|
|
33
40
|
# # # # # # # # # # # #
|
34
41
|
# Initialize/Start monitoring
|
@@ -57,9 +64,8 @@ module ContentServer
|
|
57
64
|
# # # # # # # # # # # # # # # # # # # # # #
|
58
65
|
# Initialize/Start content data comparator
|
59
66
|
Log.debug1('Start content data comparator')
|
60
|
-
copy_files_events = Queue.new
|
61
67
|
local_dynamic_content_data = ContentData::DynamicContentData.new
|
62
|
-
all_threads << Thread.new do
|
68
|
+
all_threads << Thread.new do # TODO(kolman): Seems like redundant, check how to update dynamic directly.
|
63
69
|
while true do
|
64
70
|
# Note: This thread should be the only consumer of local_server_content_data_queue
|
65
71
|
Log.debug1 'Waiting on local server content data.'
|
@@ -68,6 +74,23 @@ module ContentServer
|
|
68
74
|
end
|
69
75
|
end
|
70
76
|
|
77
|
+
# # # # # # # # # # # # # # # # # # # # # # # #
|
78
|
+
# Start dump local content data to file thread
|
79
|
+
Log.debug1('Start dump local content data to file thread')
|
80
|
+
all_threads << Thread.new do
|
81
|
+
last_data_flush_time = nil
|
82
|
+
while true do
|
83
|
+
if last_data_flush_time.nil? || last_data_flush_time + Params['data_flush_delay'] < Time.now.to_i
|
84
|
+
Log.info "Writing local content data to #{Params['local_content_data_path']}."
|
85
|
+
local_dynamic_content_data.last_content_data.to_file(tmp_content_data_file)
|
86
|
+
sleep(0.1) # Added to prevent mv access issue
|
87
|
+
::FileUtils.mv(tmp_content_data_file, Params['local_content_data_path'])
|
88
|
+
last_data_flush_time = Time.now.to_i
|
89
|
+
end
|
90
|
+
sleep(1)
|
91
|
+
end
|
92
|
+
end
|
93
|
+
|
71
94
|
remote_content_client = RemoteContentServer.new(local_dynamic_content_data,
|
72
95
|
Params['local_content_data_port'])
|
73
96
|
all_threads << remote_content_client.tcp_thread
|
@@ -75,16 +98,71 @@ module ContentServer
|
|
75
98
|
# # # # # # # # # # # # # # # #
|
76
99
|
# Start copying files on demand
|
77
100
|
Log.debug1('Start copy data on demand')
|
101
|
+
copy_files_events = Queue.new # TODO(kolman): Remove this initialization and merge to FileCopyServer.
|
78
102
|
copy_server = FileCopyServer.new(copy_files_events, Params['local_files_port'])
|
79
103
|
all_threads.concat(copy_server.run())
|
80
104
|
|
105
|
+
# # # # # # # # # # # # # # # # # # # # # # # #
|
106
|
+
# Start process vars thread
|
81
107
|
if Params['enable_monitoring']
|
82
|
-
|
83
|
-
|
84
|
-
|
85
|
-
|
108
|
+
monitoring_info = MonitoringInfo::MonitoringInfo.new()
|
109
|
+
all_threads << Thread.new do
|
110
|
+
last_data_flush_time = nil
|
111
|
+
mutex = Mutex.new
|
112
|
+
while true do
|
113
|
+
if last_data_flush_time.nil? || last_data_flush_time + Params['process_vars_delay'] < Time.now
|
114
|
+
Params['process_vars'].set('time', Time.now)
|
115
|
+
Log.info("process_vars:monitoring queue size:#{monitoring_events.size}")
|
116
|
+
Params['process_vars'].set('monitoring queue', monitoring_events.size)
|
117
|
+
Log.info("process_vars:content data queue size:#{monitoring_events.size}")
|
118
|
+
Params['process_vars'].set('content data queue', local_server_content_data_queue.size)
|
119
|
+
Log.info("process_vars:copy files events queue size:#{copy_files_events.size}")
|
120
|
+
Params['process_vars'].set('copy files events queue', copy_files_events.size)
|
121
|
+
#enable following line to see full list of object:count
|
122
|
+
#obj_array = ''
|
123
|
+
total_obj_count = 0
|
124
|
+
string_count = 0
|
125
|
+
file_count = 0
|
126
|
+
dir_count = 0
|
127
|
+
content_count = 0
|
128
|
+
index_agent_count = 0
|
129
|
+
indexer_patterns_count = 0
|
130
|
+
mutex.synchronize do
|
131
|
+
ObjectSpace.each_object(Class) {|obj|
|
132
|
+
obj_count_per_class = ObjectSpace.each_object(obj).count
|
133
|
+
#enable following line to see full list of object:count
|
134
|
+
#obj_array = "#{obj_array} * #{obj.name}:#{obj_count_per_class}"
|
135
|
+
total_obj_count = total_obj_count + obj_count_per_class
|
136
|
+
}
|
137
|
+
string_count = ObjectSpace.each_object(String).count
|
138
|
+
file_count = ObjectSpace.each_object(::FileMonitoring::FileStat).count
|
139
|
+
dir_count = ObjectSpace.each_object(::FileMonitoring::DirStat).count
|
140
|
+
content_count = ObjectSpace.each_object(::ContentData::ContentData).count
|
141
|
+
index_agent_count = ObjectSpace.each_object(::FileIndexing::IndexAgent).count
|
142
|
+
indexer_patterns_count = ObjectSpace.each_object(::FileIndexing::IndexerPatterns).count
|
143
|
+
end
|
144
|
+
#enable following line to see full list of object:count
|
145
|
+
#Params['process_vars'].set('Live objs full', obj_array)
|
146
|
+
Log.info("process_vars:Live objs cnt:#{total_obj_count}")
|
147
|
+
Log.info("process_vars:Live String obj cnt:#{string_count}")
|
148
|
+
Log.info("process_vars:Live File obj cnt:#{file_count}")
|
149
|
+
Log.info("process_vars:Live Dir obj cnt:#{dir_count}")
|
150
|
+
Log.info("process_vars:Live Content data obj cnt:#{content_count}")
|
151
|
+
Log.info("process_vars:Live index agent obj cnt:#{index_agent_count}")
|
152
|
+
Log.info("process_vars:Live indexer patterns obj cnt:#{indexer_patterns_count}")
|
153
|
+
Params['process_vars'].set('Live objs cnt', total_obj_count)
|
154
|
+
Params['process_vars'].set('Live String obj cnt', string_count)
|
155
|
+
Params['process_vars'].set('Live File obj cnt', file_count)
|
156
|
+
Params['process_vars'].set('Live Dir obj cnt', dir_count)
|
157
|
+
Params['process_vars'].set('Live Content data obj cnt', content_count)
|
158
|
+
Params['process_vars'].set('Live index agent obj cnt', index_agent_count)
|
159
|
+
Params['process_vars'].set('Live indexer patterns obj cnt', indexer_patterns_count)
|
160
|
+
last_data_flush_time = Time.now
|
161
|
+
end
|
162
|
+
sleep(0.3)
|
163
|
+
end
|
164
|
+
end
|
86
165
|
end
|
87
|
-
|
88
166
|
# Finalize server threads.
|
89
167
|
all_threads.each { |t| t.abort_on_exception = true }
|
90
168
|
all_threads.each { |t| t.join }
|
@@ -4,6 +4,7 @@ require 'content_server/file_streamer'
|
|
4
4
|
require 'file_indexing/index_agent'
|
5
5
|
require 'log'
|
6
6
|
require 'networking/tcp'
|
7
|
+
require 'params'
|
7
8
|
|
8
9
|
|
9
10
|
module ContentServer
|
@@ -119,8 +120,9 @@ module ContentServer
|
|
119
120
|
end # class QueueCopy
|
120
121
|
|
121
122
|
class FileCopyClient
|
122
|
-
def initialize(host, port, dynamic_content_data
|
123
|
+
def initialize(host, port, dynamic_content_data)
|
123
124
|
@local_queue = Queue.new
|
125
|
+
start_process_var_monitoring
|
124
126
|
@dynamic_content_data = dynamic_content_data
|
125
127
|
@tcp_client = Networking::TCPClient.new(host, port, method(:handle_message))
|
126
128
|
@file_receiver = FileReceiver.new(method(:done_copy),
|
@@ -132,10 +134,25 @@ module ContentServer
|
|
132
134
|
end
|
133
135
|
end
|
134
136
|
@local_thread.abort_on_exception = true
|
135
|
-
@process_variables = process_variables
|
136
137
|
Log.debug3("initialize FileCopyClient host:#{host} port:#{port}")
|
137
138
|
end
|
138
139
|
|
140
|
+
def start_process_var_monitoring
|
141
|
+
if Params['enable_monitoring']
|
142
|
+
@process_var_thread = Thread.new do
|
143
|
+
last_data_flush_time = nil
|
144
|
+
while true do
|
145
|
+
if last_data_flush_time.nil? || last_data_flush_time + Params['process_vars_delay'] < Time.now
|
146
|
+
Log.info("process_vars:FileCopyClient queue size:#{@local_queue.size}")
|
147
|
+
Params['process_vars'].set('File Copy Client queue', @local_queue.size)
|
148
|
+
last_data_flush_time = Time.now
|
149
|
+
end
|
150
|
+
sleep(0.3)
|
151
|
+
end
|
152
|
+
end
|
153
|
+
end
|
154
|
+
end
|
155
|
+
|
139
156
|
def threads
|
140
157
|
ret = [@local_thread]
|
141
158
|
ret << @tcp_client.tcp_thread if @tcp_client != nil
|
@@ -155,14 +172,10 @@ module ContentServer
|
|
155
172
|
end
|
156
173
|
|
157
174
|
def done_copy(local_file_checksum, local_path)
|
158
|
-
|
175
|
+
Params['process_vars'].inc('num_files_received')
|
159
176
|
Log.debug1("Done copy file: #{local_path}, #{local_file_checksum}")
|
160
177
|
end
|
161
178
|
|
162
|
-
def add_process_variables_info()
|
163
|
-
@process_variables.inc('num_files_received')
|
164
|
-
end
|
165
|
-
|
166
179
|
def handle_message(message)
|
167
180
|
Log.debug3('QueueFileReceiver handle message')
|
168
181
|
@local_queue.push(message)
|
@@ -1,10 +1,10 @@
|
|
1
1
|
require 'file_indexing/index_agent'
|
2
2
|
require 'file_indexing/indexer_patterns'
|
3
3
|
require 'log'
|
4
|
+
require 'params'
|
4
5
|
|
5
6
|
module ContentServer
|
6
7
|
|
7
|
-
Params.integer('data_flush_delay', 300, 'Number of seconds to delay content data file flush to disk.')
|
8
8
|
|
9
9
|
# Simple indexer, gets inputs events (files to index) and outputs
|
10
10
|
# content data updates into output queue.
|
@@ -14,7 +14,6 @@ module ContentServer
|
|
14
14
|
@input_queue = input_queue
|
15
15
|
@output_queue = output_queue
|
16
16
|
@content_data_path = content_data_path
|
17
|
-
@last_data_flush_time = nil
|
18
17
|
end
|
19
18
|
|
20
19
|
def run
|
@@ -40,14 +39,14 @@ module ContentServer
|
|
40
39
|
thread = Thread.new do
|
41
40
|
while true do
|
42
41
|
Log.debug1 'Waiting on index input queue.'
|
43
|
-
state, is_dir, path = @input_queue.pop
|
44
|
-
Log.debug1 "event:
|
42
|
+
(state, is_dir, path) = @input_queue.pop
|
43
|
+
Log.debug1 "index event: state:#{state}, dir?#{is_dir}, path:#{path}."
|
45
44
|
|
46
45
|
# index files and add to copy queue
|
47
46
|
# delete directory with it's sub files
|
48
47
|
# delete file
|
49
48
|
if state == FileMonitoring::FileStatEnum::STABLE && !is_dir
|
50
|
-
Log.
|
49
|
+
Log.debug1 "Indexing content #{path}."
|
51
50
|
index_agent = FileIndexing::IndexAgent.new
|
52
51
|
indexer_patterns = FileIndexing::IndexerPatterns.new
|
53
52
|
indexer_patterns.add_pattern(path)
|
@@ -56,6 +55,9 @@ module ContentServer
|
|
56
55
|
if !index_agent.failed_files.empty?
|
57
56
|
Log.debug1("indexed content #{index_agent.indexed_content}.")
|
58
57
|
server_content_data.merge index_agent.indexed_content
|
58
|
+
if Params['enable_monitoring']
|
59
|
+
Params['process_vars'].inc('indexed_files')
|
60
|
+
end
|
59
61
|
elsif ((state == FileMonitoring::FileStatEnum::NON_EXISTING ||
|
60
62
|
state == FileMonitoring::FileStatEnum::CHANGED) && !is_dir)
|
61
63
|
# If file content changed, we should remove old instance.
|
@@ -87,11 +89,7 @@ module ContentServer
|
|
87
89
|
else
|
88
90
|
Log.debug1("This case should not be handled: #{state}, #{is_dir}, #{path}.")
|
89
91
|
end
|
90
|
-
|
91
|
-
Log.debug1 "Writing server content data to #{@content_data_path}."
|
92
|
-
server_content_data.to_file(@content_data_path)
|
93
|
-
@last_data_flush_time = Time.now.to_i
|
94
|
-
end
|
92
|
+
|
95
93
|
Log.debug1 'Adding server content data to queue.'
|
96
94
|
@output_queue.push(ContentData::ContentData.new(server_content_data))
|
97
95
|
end # while true do
|
data/lib/content_server.rb
CHANGED
@@ -7,7 +7,12 @@ module ContentServer
|
|
7
7
|
|
8
8
|
# Monitoring
|
9
9
|
Params.boolean('enable_monitoring', false, 'Whether to enable process monitoring or not.')
|
10
|
+
Params.global('process_vars', nil, 'container of process variables reflected to http')
|
11
|
+
Params.integer('process_vars_delay', 10, 'pulling time of variables')
|
10
12
|
|
11
13
|
# Handling thread exceptions.
|
12
14
|
Params.boolean('abort_on_exception', true, 'Any exception in any thread will abort the run.')
|
15
|
+
|
16
|
+
Params.integer('data_flush_delay', 300, 'Number of seconds to delay content data file flush to disk.')
|
17
|
+
|
13
18
|
end # module ContentServer
|
metadata
CHANGED
@@ -1,7 +1,7 @@
|
|
1
1
|
--- !ruby/object:Gem::Specification
|
2
2
|
name: content_server
|
3
3
|
version: !ruby/object:Gem::Version
|
4
|
-
version: 1.0.
|
4
|
+
version: 1.0.3
|
5
5
|
prerelease:
|
6
6
|
platform: ruby
|
7
7
|
authors:
|
@@ -9,7 +9,7 @@ authors:
|
|
9
9
|
autorequire:
|
10
10
|
bindir: bin
|
11
11
|
cert_chain: []
|
12
|
-
date: 2013-
|
12
|
+
date: 2013-06-12 00:00:00.000000000 Z
|
13
13
|
dependencies:
|
14
14
|
- !ruby/object:Gem::Dependency
|
15
15
|
name: content_data
|
@@ -50,7 +50,7 @@ dependencies:
|
|
50
50
|
requirements:
|
51
51
|
- - '='
|
52
52
|
- !ruby/object:Gem::Version
|
53
|
-
version: 1.0.
|
53
|
+
version: 1.0.3
|
54
54
|
type: :runtime
|
55
55
|
prerelease: false
|
56
56
|
version_requirements: !ruby/object:Gem::Requirement
|
@@ -58,7 +58,7 @@ dependencies:
|
|
58
58
|
requirements:
|
59
59
|
- - '='
|
60
60
|
- !ruby/object:Gem::Version
|
61
|
-
version: 1.0.
|
61
|
+
version: 1.0.3
|
62
62
|
- !ruby/object:Gem::Dependency
|
63
63
|
name: log
|
64
64
|
requirement: !ruby/object:Gem::Requirement
|
@@ -66,7 +66,7 @@ dependencies:
|
|
66
66
|
requirements:
|
67
67
|
- - '='
|
68
68
|
- !ruby/object:Gem::Version
|
69
|
-
version: 1.0.
|
69
|
+
version: 1.0.2
|
70
70
|
type: :runtime
|
71
71
|
prerelease: false
|
72
72
|
version_requirements: !ruby/object:Gem::Requirement
|
@@ -74,7 +74,7 @@ dependencies:
|
|
74
74
|
requirements:
|
75
75
|
- - '='
|
76
76
|
- !ruby/object:Gem::Version
|
77
|
-
version: 1.0.
|
77
|
+
version: 1.0.2
|
78
78
|
- !ruby/object:Gem::Dependency
|
79
79
|
name: networking
|
80
80
|
requirement: !ruby/object:Gem::Requirement
|
@@ -98,7 +98,7 @@ dependencies:
|
|
98
98
|
requirements:
|
99
99
|
- - '='
|
100
100
|
- !ruby/object:Gem::Version
|
101
|
-
version: 1.0.
|
101
|
+
version: 1.0.2
|
102
102
|
type: :runtime
|
103
103
|
prerelease: false
|
104
104
|
version_requirements: !ruby/object:Gem::Requirement
|
@@ -106,7 +106,7 @@ dependencies:
|
|
106
106
|
requirements:
|
107
107
|
- - '='
|
108
108
|
- !ruby/object:Gem::Version
|
109
|
-
version: 1.0.
|
109
|
+
version: 1.0.2
|
110
110
|
- !ruby/object:Gem::Dependency
|
111
111
|
name: process_monitoring
|
112
112
|
requirement: !ruby/object:Gem::Requirement
|
@@ -114,7 +114,7 @@ dependencies:
|
|
114
114
|
requirements:
|
115
115
|
- - '='
|
116
116
|
- !ruby/object:Gem::Version
|
117
|
-
version: 1.0.
|
117
|
+
version: 1.0.2
|
118
118
|
type: :runtime
|
119
119
|
prerelease: false
|
120
120
|
version_requirements: !ruby/object:Gem::Requirement
|
@@ -122,7 +122,7 @@ dependencies:
|
|
122
122
|
requirements:
|
123
123
|
- - '='
|
124
124
|
- !ruby/object:Gem::Version
|
125
|
-
version: 1.0.
|
125
|
+
version: 1.0.2
|
126
126
|
- !ruby/object:Gem::Dependency
|
127
127
|
name: rake
|
128
128
|
requirement: !ruby/object:Gem::Requirement
|
@@ -194,7 +194,7 @@ dependencies:
|
|
194
194
|
requirements:
|
195
195
|
- - '='
|
196
196
|
- !ruby/object:Gem::Version
|
197
|
-
version: 1.0.
|
197
|
+
version: 1.0.3
|
198
198
|
type: :runtime
|
199
199
|
prerelease: false
|
200
200
|
version_requirements: !ruby/object:Gem::Requirement
|
@@ -202,7 +202,7 @@ dependencies:
|
|
202
202
|
requirements:
|
203
203
|
- - '='
|
204
204
|
- !ruby/object:Gem::Version
|
205
|
-
version: 1.0.
|
205
|
+
version: 1.0.3
|
206
206
|
- !ruby/object:Gem::Dependency
|
207
207
|
name: log
|
208
208
|
requirement: !ruby/object:Gem::Requirement
|
@@ -210,7 +210,7 @@ dependencies:
|
|
210
210
|
requirements:
|
211
211
|
- - '='
|
212
212
|
- !ruby/object:Gem::Version
|
213
|
-
version: 1.0.
|
213
|
+
version: 1.0.2
|
214
214
|
type: :runtime
|
215
215
|
prerelease: false
|
216
216
|
version_requirements: !ruby/object:Gem::Requirement
|
@@ -218,7 +218,7 @@ dependencies:
|
|
218
218
|
requirements:
|
219
219
|
- - '='
|
220
220
|
- !ruby/object:Gem::Version
|
221
|
-
version: 1.0.
|
221
|
+
version: 1.0.2
|
222
222
|
- !ruby/object:Gem::Dependency
|
223
223
|
name: networking
|
224
224
|
requirement: !ruby/object:Gem::Requirement
|
@@ -242,7 +242,7 @@ dependencies:
|
|
242
242
|
requirements:
|
243
243
|
- - '='
|
244
244
|
- !ruby/object:Gem::Version
|
245
|
-
version: 1.0.
|
245
|
+
version: 1.0.2
|
246
246
|
type: :runtime
|
247
247
|
prerelease: false
|
248
248
|
version_requirements: !ruby/object:Gem::Requirement
|
@@ -250,7 +250,7 @@ dependencies:
|
|
250
250
|
requirements:
|
251
251
|
- - '='
|
252
252
|
- !ruby/object:Gem::Version
|
253
|
-
version: 1.0.
|
253
|
+
version: 1.0.2
|
254
254
|
- !ruby/object:Gem::Dependency
|
255
255
|
name: process_monitoring
|
256
256
|
requirement: !ruby/object:Gem::Requirement
|
@@ -258,7 +258,7 @@ dependencies:
|
|
258
258
|
requirements:
|
259
259
|
- - '='
|
260
260
|
- !ruby/object:Gem::Version
|
261
|
-
version: 1.0.
|
261
|
+
version: 1.0.2
|
262
262
|
type: :runtime
|
263
263
|
prerelease: false
|
264
264
|
version_requirements: !ruby/object:Gem::Requirement
|
@@ -266,7 +266,7 @@ dependencies:
|
|
266
266
|
requirements:
|
267
267
|
- - '='
|
268
268
|
- !ruby/object:Gem::Version
|
269
|
-
version: 1.0.
|
269
|
+
version: 1.0.2
|
270
270
|
- !ruby/object:Gem::Dependency
|
271
271
|
name: rake
|
272
272
|
requirement: !ruby/object:Gem::Requirement
|