content_server 1.0.3 → 1.1.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -25,12 +25,23 @@ rescue SystemExit, SignalException => exc
25
25
  # TODO (genadyp) do we need to trap signals by types?
26
26
  puts("Interrupt or Exit happened in #{Params['service_name']} server: #{exc.class}, " +
27
27
  "stopping process.\nBacktrace:\n#{exc.backtrace.join("\n")}")
28
+ puts("Force writing local content data to #{Params['local_content_data_path']}.")
29
+ ContentServer::local_dynamic_content_data.last_content_data.to_file(ContentServer::tmp_content_data_file)
30
+ sleep(0.1) # Added to prevent mv access issue
31
+ ::FileUtils.mv(ContentServer::tmp_content_data_file, Params['local_content_data_path'])
32
+
33
+
28
34
  Log.error("Interrupt or Exit happened in #{Params['service_name']} server: #{exc.class}, " +
29
35
  "stopping process.\nBacktrace:\n#{exc.backtrace.join("\n")}")
30
36
  Log.flush
31
37
  exit
32
38
  rescue Exception => exc
33
39
  puts("Exception happened in #{Params['service_name']} server: #{exc.class}:#{exc.message}\nBacktrace:\n#{exc.backtrace.join("\n")}")
40
+ puts("Force writing local content data to #{Params['local_content_data_path']}.")
41
+ ContentServer::local_dynamic_content_data.last_content_data.to_file(ContentServer::tmp_content_data_file)
42
+ sleep(0.1) # Added to prevent mv access issue
43
+ ::FileUtils.mv(ContentServer::tmp_content_data_file, Params['local_content_data_path'])
44
+
34
45
  Log.error("Exception happened in #{Params['service_name']} server: #{exc.class}:#{exc.message}\nBacktrace:\n#{exc.backtrace.join("\n")}")
35
46
  if retries > 0
36
47
  Log.debug1("Restarting (retries:#{retries}).")
@@ -20,14 +20,25 @@ begin
20
20
  RunInBackground.run { ContentServer.run_content_server }
21
21
  rescue SystemExit, SignalException => exc
22
22
  # TODO (genadyp) do we need to trap signals by types?
23
+ #force dump of content data to file
23
24
  puts("Interrupt or Exit happened in #{Params['service_name']} server: #{exc.class}, " +
24
25
  "stopping process.\nBacktrace:\n#{exc.backtrace.join("\n")}")
26
+ puts("Force writing local content data to #{Params['local_content_data_path']}.")
27
+ ContentServer::local_dynamic_content_data.last_content_data.to_file(ContentServer::tmp_content_data_file)
28
+ sleep(0.1) # Added to prevent mv access issue
29
+ ::FileUtils.mv(ContentServer::tmp_content_data_file, Params['local_content_data_path'])
30
+
25
31
  Log.error("Interrupt or Exit happened in #{Params['service_name']} server: #{exc.class}, " +
26
32
  "stopping process.\nBacktrace:\n#{exc.backtrace.join("\n")}")
27
33
  Log.flush
28
34
  exit
29
35
  rescue Exception => exc
30
36
  puts("Exception happened in #{Params['service_name']} server: #{exc.class}:#{exc.message}\nBacktrace:\n#{exc.backtrace.join("\n")}")
37
+ puts("Force writing local content data to #{Params['local_content_data_path']}.")
38
+ ContentServer::local_dynamic_content_data.last_content_data.to_file(ContentServer::tmp_content_data_file)
39
+ sleep(0.1) # Added to prevent mv access issue
40
+ ::FileUtils.mv(ContentServer::tmp_content_data_file, Params['local_content_data_path'])
41
+
31
42
  Log.error("Exception happened in #{Params['service_name']} server: #{exc.class}:#{exc.message}\nBacktrace:\n#{exc.backtrace.join("\n")}")
32
43
  if retries > 0
33
44
  Log.debug1("Restarting (retries:#{retries}).")
@@ -7,8 +7,7 @@ module ContentServer
7
7
 
8
8
  # Monitoring
9
9
  Params.boolean('enable_monitoring', false, 'Whether to enable process monitoring or not.')
10
- Params.global('process_vars', nil, 'container of process variables reflected to http')
11
- Params.integer('process_vars_delay', 10, 'pulling time of variables')
10
+ Params.integer('process_vars_delay', 3, 'pulling time of variables')
12
11
 
13
12
  # Handling thread exceptions.
14
13
  Params.boolean('abort_on_exception', true, 'Any exception in any thread will abort the run.')
@@ -9,10 +9,10 @@ require 'content_server/queue_copy'
9
9
  require 'content_server/remote_content'
10
10
  require 'file_indexing'
11
11
  require 'file_monitoring'
12
+ require 'content_server/globals'
12
13
  require 'log'
13
14
  require 'networking/tcp'
14
15
  require 'params'
15
- require 'process_monitoring/thread_safe_hash'
16
16
  require 'process_monitoring/monitoring'
17
17
  require 'process_monitoring/monitoring_info'
18
18
 
@@ -23,8 +23,18 @@ module ContentServer
23
23
  Params.string('content_server_hostname', nil, 'IP or DNS of backup server.')
24
24
  Params.integer('content_server_data_port', 3333, 'Port to copy content data from.')
25
25
  Params.integer('content_server_files_port', 4444, 'Listening port in backup server for files')
26
-
27
26
  Params.integer('backup_check_delay', 5, 'Delay in seconds between two content vs backup checks.')
27
+ Params.complex('backup_destination_folder',
28
+ [{'path'=>File.expand_path(''), 'scan_period'=>300, 'stable_state'=>1}],
29
+ 'Backup server destination folder, default is the relative local folder.')
30
+
31
+ def self.tmp_content_data_file
32
+ @@tmp_content_data_file
33
+ end
34
+
35
+ def self.local_dynamic_content_data
36
+ @@local_dynamic_content_data
37
+ end
28
38
 
29
39
  def run_backup_server
30
40
  Log.info('Start backup server')
@@ -34,22 +44,34 @@ module ContentServer
34
44
  # create general tmp dir
35
45
  FileUtils.mkdir_p(Params['tmp_path']) unless File.directory?(Params['tmp_path'])
36
46
  # init tmp content data file
37
- tmp_content_data_file = Params['tmp_path'] + '/backup.data'
47
+ @@tmp_content_data_file = File.join(Params['tmp_path'], 'backup.data')
38
48
 
39
49
  if Params['enable_monitoring']
40
50
  Log.info("Initializing monitoring of process params on port:#{Params['process_monitoring_web_port']}")
41
- Params['process_vars'] = ThreadSafeHash::ThreadSafeHash.new
42
- Params['process_vars'].set('server_name', 'backup_server')
51
+ ::ContentServer::Globals.process_vars.set('server_name', 'backup_server')
43
52
  end
44
53
 
45
54
  # # # # # # # # # # # #
46
- # Initialize/Start monitoring
55
+ # Initialize/start monitoring and destination folder
56
+ Params['backup_destination_folder'][0]['path']=File.expand_path(Params['backup_destination_folder'][0]['path'])
57
+ Log.info("backup_destination_folder is:#{Params['backup_destination_folder'][0]['path']}")
58
+ #adding destination folder to monitoring paths
59
+ Params['monitoring_paths'] << Params['backup_destination_folder'][0]
47
60
  Log.info('Start monitoring following directories:')
48
- Params['monitoring_paths'].each {|path|
61
+ Params['monitoring_paths'].each { |path|
49
62
  Log.info(" Path:'#{path['path']}'")
50
63
  }
64
+
65
+ # Read here for initial content data that exist from previous system run
66
+ initial_content_data = ContentData::ContentData.new
67
+ content_data_path = Params['local_content_data_path']
68
+ initial_content_data.from_file(content_data_path) if File.exists?(content_data_path)
69
+ # Update local dynamic content with existing content
70
+ @@local_dynamic_content_data = ContentData::DynamicContentData.new
71
+ @@local_dynamic_content_data.update(initial_content_data)
72
+
51
73
  monitoring_events = Queue.new
52
- fm = FileMonitoring::FileMonitoring.new
74
+ fm = FileMonitoring::FileMonitoring.new(@@local_dynamic_content_data)
53
75
  fm.set_event_queue(monitoring_events)
54
76
  # Start monitoring and writing changes to queue
55
77
  all_threads << Thread.new do
@@ -59,30 +81,10 @@ module ContentServer
59
81
  # # # # # # # # # # # # # #
60
82
  # Initialize/Start local indexer
61
83
  Log.debug1('Start indexer')
62
- local_server_content_data_queue = Queue.new
63
- queue_indexer = QueueIndexer.new(monitoring_events,
64
- local_server_content_data_queue,
65
- Params['local_content_data_path'])
84
+ queue_indexer = QueueIndexer.new(monitoring_events, @@local_dynamic_content_data)
66
85
  # Start indexing on demand and write changes to queue
67
86
  all_threads << queue_indexer.run
68
87
 
69
- # # # # # # # # # # # # # # # # # # # # # # # # # # #
70
- # Initialize/Start backup server content data sender
71
- Log.debug1('Start backup server content data sender')
72
- local_dynamic_content_data = ContentData::DynamicContentData.new
73
- #content_data_sender = ContentDataSender.new(
74
- # Params['remote_server'],
75
- # Params['remote_listening_port'])
76
- # Start sending to backup server
77
- all_threads << Thread.new do
78
- while true do
79
- Log.debug1 'Waiting on local server content data queue.'
80
- cd = local_server_content_data_queue.pop
81
- # content_data_sender.send_content_data(cd)
82
- local_dynamic_content_data.update(cd)
83
- end
84
- end
85
-
86
88
  # # # # # # # # # # # # # # # # # # # # # # # #
87
89
  # Start dump local content data to file thread
88
90
  Log.debug1('Start dump local content data to file thread')
@@ -91,26 +93,24 @@ module ContentServer
91
93
  while true do
92
94
  if last_data_flush_time.nil? || last_data_flush_time + Params['data_flush_delay'] < Time.now.to_i
93
95
  Log.info "Writing local content data to #{Params['local_content_data_path']}."
94
- local_dynamic_content_data.last_content_data.to_file(tmp_content_data_file)
96
+ @@local_dynamic_content_data.last_content_data.to_file(@@tmp_content_data_file)
95
97
  sleep(0.1) # Added to prevent mv access issue
96
- ::FileUtils.mv(tmp_content_data_file, Params['local_content_data_path'])
98
+ ::FileUtils.mv(@@tmp_content_data_file, Params['local_content_data_path'])
97
99
  last_data_flush_time = Time.now.to_i
98
100
  end
99
101
  sleep(1)
100
102
  end
101
103
  end
102
- Params['backup_destination_folder'] = File.expand_path(Params['monitoring_paths'][0]['path'])
103
- Log.info("backup_destination_folder is:#{Params['backup_destination_folder']}")
104
104
  content_server_dynamic_content_data = ContentData::DynamicContentData.new
105
105
  remote_content = ContentServer::RemoteContentClient.new(content_server_dynamic_content_data,
106
106
  Params['content_server_hostname'],
107
107
  Params['content_server_data_port'],
108
- Params['backup_destination_folder'])
108
+ Params['backup_destination_folder'][0]['path'])
109
109
  all_threads.concat(remote_content.run())
110
110
 
111
111
  file_copy_client = FileCopyClient.new(Params['content_server_hostname'],
112
112
  Params['content_server_files_port'],
113
- local_dynamic_content_data)
113
+ @@local_dynamic_content_data)
114
114
  all_threads.concat(file_copy_client.threads)
115
115
 
116
116
  # Each
@@ -118,16 +118,17 @@ module ContentServer
118
118
  all_threads << Thread.new do
119
119
  loop do
120
120
  sleep(Params['backup_check_delay'])
121
- local_cd = local_dynamic_content_data.last_content_data()
121
+ local_cd = @@local_dynamic_content_data.last_content_data()
122
122
  remote_cd = content_server_dynamic_content_data.last_content_data()
123
- diff = ContentData::ContentData.remove(local_cd, remote_cd)
124
- #file_copy_client.request_copy(diff) unless diff.empty?
125
- if !diff.empty?
126
- Log.info('Start sync check. Backup and remote contents need a sync:')
127
- Log.debug2("Backup content:\n#{local_cd}")
128
- Log.debug2("Remote content:\n#{remote_cd}")
123
+ diff = ContentData.remove(local_cd, remote_cd)
124
+
125
+ Log.debug2("Backup content:\n#{local_cd}")
126
+ Log.debug2("Remote content:\n#{remote_cd}")
127
+ Log.debug2("Diff content:\n#{diff}")
128
+
129
+ unless diff.nil? || diff.empty?
130
+ Log.info('Start sync check. Backup and remote contents need a sync, requesting copy files:')
129
131
  Log.info("Missing contents:\n#{diff}")
130
- Log.info('Requesting copy files')
131
132
  file_copy_client.request_copy(diff)
132
133
  else
133
134
  Log.info("Start sync check. Local and remote contents are equal. No sync required.")
@@ -140,57 +141,40 @@ module ContentServer
140
141
  if Params['enable_monitoring']
141
142
  monitoring_info = MonitoringInfo::MonitoringInfo.new()
142
143
  all_threads << Thread.new do
143
- last_data_flush_time = nil
144
144
  mutex = Mutex.new
145
145
  while true do
146
- if last_data_flush_time.nil? || last_data_flush_time + Params['process_vars_delay'] < Time.now
147
- Params['process_vars'].set('time', Time.now)
148
- Log.info("process_vars:monitoring queue size:#{monitoring_events.size}")
149
- Params['process_vars'].set('monitoring queue', monitoring_events.size)
150
- Log.info("process_vars:content data queue size:#{monitoring_events.size}")
151
- Params['process_vars'].set('content data queue', local_server_content_data_queue.size)
152
- #enable following line to see full list of object:count
153
- #obj_array = ''
154
- total_obj_count = 0
155
- string_count = 0
156
- file_count = 0
157
- dir_count = 0
158
- content_count = 0
159
- index_agent_count = 0
160
- indexer_patterns_count = 0
161
- mutex.synchronize do
162
- ObjectSpace.each_object(Class) {|obj|
163
- obj_count_per_class = ObjectSpace.each_object(obj).count
164
- #enable following line to see full list of object:count
165
- #obj_array = "#{obj_array} * #{obj.name}:#{obj_count_per_class}"
166
- total_obj_count = total_obj_count + obj_count_per_class
167
- }
168
- string_count = ObjectSpace.each_object(String).count
169
- file_count = ObjectSpace.each_object(::FileMonitoring::FileStat).count
170
- dir_count = ObjectSpace.each_object(::FileMonitoring::DirStat).count
171
- content_count = ObjectSpace.each_object(::ContentData::ContentData).count
172
- index_agent_count = ObjectSpace.each_object(::FileIndexing::IndexAgent).count
173
- indexer_patterns_count = ObjectSpace.each_object(::FileIndexing::IndexerPatterns).count
174
- end
175
- #enable following line to see full list of object:count
176
- #Params['process_vars'].set('Live objs full', obj_array)
177
- Log.info("process_vars:Live objs cnt:#{total_obj_count}")
178
- Log.info("process_vars:Live String obj cnt:#{string_count}")
179
- Log.info("process_vars:Live File obj cnt:#{file_count}")
180
- Log.info("process_vars:Live Dir obj cnt:#{dir_count}")
181
- Log.info("process_vars:Live Content data obj cnt:#{content_count}")
182
- Log.info("process_vars:Live index agent obj cnt:#{index_agent_count}")
183
- Log.info("process_vars:Live indexer patterns obj cnt:#{indexer_patterns_count}")
184
- Params['process_vars'].set('Live objs cnt', total_obj_count)
185
- Params['process_vars'].set('Live String obj cnt', string_count)
186
- Params['process_vars'].set('Live File obj cnt', file_count)
187
- Params['process_vars'].set('Live Dir obj cnt', dir_count)
188
- Params['process_vars'].set('Live Content data obj cnt', content_count)
189
- Params['process_vars'].set('Live index agent obj cnt', index_agent_count)
190
- Params['process_vars'].set('Live indexer patterns obj cnt', indexer_patterns_count)
191
- last_data_flush_time = Time.now
146
+ sleep(Params['process_vars_delay'])
147
+ ::ContentServer::Globals.process_vars.set('time', Time.now)
148
+ Log.debug3("process_vars:monitoring queue size:#{monitoring_events.size}")
149
+ ::ContentServer::Globals.process_vars.set('monitoring queue', monitoring_events.size)
150
+ #enable following line to see full list of object:count
151
+ #obj_array = ''
152
+ total_obj_count = 0
153
+ string_count = 0
154
+ file_count = 0
155
+ dir_count = 0
156
+ content_count = 0
157
+ mutex.synchronize do
158
+ ObjectSpace.each_object(Class) {|obj|
159
+ obj_count_per_class = ObjectSpace.each_object(obj).count
160
+ #enable following line to see full list of object:count
161
+ #obj_array = "#{obj_array} * #{obj.name}:#{obj_count_per_class}"
162
+ total_obj_count = total_obj_count + obj_count_per_class
163
+ }
164
+ string_count = ObjectSpace.each_object(String).count
165
+ file_count = ObjectSpace.each_object(::FileMonitoring::FileStat).count
166
+ dir_count = ObjectSpace.each_object(::FileMonitoring::DirStat).count
167
+ content_count = ObjectSpace.each_object(::ContentData::ContentData).count
192
168
  end
193
- sleep(0.3)
169
+ #enable following line to see full list of object:count
170
+ #::ContentServer::Globals.process_vars.set('Live objs full', obj_array)
171
+ Log.debug3("process_vars:Live objs cnt:#{total_obj_count}")
172
+ ::ContentServer::Globals.process_vars.set('Live objs cnt', total_obj_count)
173
+ Log.debug3("process_vars:Live String obj cnt:#{string_count}")
174
+ Log.debug3("process_vars:Live File obj cnt:#{file_count}")
175
+ Log.debug3("process_vars:Live Dir obj cnt:#{dir_count}")
176
+ Log.debug3("process_vars:Live Content data obj cnt:#{content_count}")
177
+ ::ContentServer::Globals.process_vars.set('Live String obj cnt', string_count)
194
178
  end
195
179
  end
196
180
  end
@@ -9,32 +9,41 @@ require 'content_server/queue_copy'
9
9
  require 'content_server/remote_content'
10
10
  require 'file_indexing'
11
11
  require 'file_monitoring'
12
+ require 'content_server/globals'
12
13
  require 'log'
13
14
  require 'networking/tcp'
14
15
  require 'params'
15
- require 'process_monitoring/thread_safe_hash'
16
16
  require 'process_monitoring/monitoring_info'
17
17
 
18
18
  # Content server. Monitors files, index local files, listen to backup server content,
19
19
  # copy changes and new files to backup server.
20
20
  module ContentServer
21
21
  # Content server specific flags.
22
- Params.integer('local_files_port', 4444, 'Remote port in backup server to copy files.')
23
- Params.integer('local_content_data_port', 3333, 'Listen to incoming content data requests.')
22
+ Params.integer('local_files_port', 4444, 'Remote port in backup server to copy files')
23
+ Params.integer('local_content_data_port', 3333, 'Listen to incoming content data requests')
24
+ Params.string('local_server_name', `hostname`.strip, 'local server name')
24
25
  Params.path('tmp_path', '~/.bbfs/tmp', 'tmp path for temporary files')
25
26
 
27
+ def self.tmp_content_data_file
28
+ @@tmp_content_data_file
29
+ end
30
+
31
+ def self.local_dynamic_content_data
32
+ @@local_dynamic_content_data
33
+ end
34
+
26
35
  def run_content_server
27
36
  Log.info('Content server start')
28
37
  all_threads = []
38
+
29
39
  # create general tmp dir
30
40
  FileUtils.mkdir_p(Params['tmp_path']) unless File.directory?(Params['tmp_path'])
31
41
  # init tmp content data file
32
- tmp_content_data_file = Params['tmp_path'] + '/content.data'
42
+ @@tmp_content_data_file = File.join(Params['tmp_path'], 'content.data')
33
43
 
34
44
  if Params['enable_monitoring']
35
45
  Log.info("Initializing monitoring of process params on port:#{Params['process_monitoring_web_port']}")
36
- Params['process_vars'] = ThreadSafeHash::ThreadSafeHash.new
37
- Params['process_vars'].set('server_name', 'content_server')
46
+ ::ContentServer::Globals.process_vars.set('server_name', 'content_server')
38
47
  end
39
48
 
40
49
  # # # # # # # # # # # #
@@ -44,7 +53,17 @@ module ContentServer
44
53
  Log.info(" Path:'#{path['path']}'")
45
54
  }
46
55
  monitoring_events = Queue.new
47
- fm = FileMonitoring::FileMonitoring.new
56
+
57
+ # Read here for initial content data that exist from previous system run
58
+ initial_content_data = ContentData::ContentData.new
59
+ content_data_path = Params['local_content_data_path']
60
+ initial_content_data.from_file(content_data_path) if File.exists?(content_data_path)
61
+ # Update local dynamic content with existing content
62
+ @@local_dynamic_content_data = ContentData::DynamicContentData.new
63
+ @@local_dynamic_content_data.update(initial_content_data)
64
+
65
+ #Start files monitor taking into consideration existing content data
66
+ fm = FileMonitoring::FileMonitoring.new(@@local_dynamic_content_data)
48
67
  fm.set_event_queue(monitoring_events)
49
68
  # Start monitoring and writing changes to queue
50
69
  all_threads << Thread.new do
@@ -54,26 +73,10 @@ module ContentServer
54
73
  # # # # # # # # # # # # # #
55
74
  # Initialize/Start local indexer
56
75
  Log.debug1('Start indexer')
57
- local_server_content_data_queue = Queue.new
58
- queue_indexer = QueueIndexer.new(monitoring_events,
59
- local_server_content_data_queue,
60
- Params['local_content_data_path'])
76
+ queue_indexer = QueueIndexer.new(monitoring_events, @@local_dynamic_content_data)
61
77
  # Start indexing on demand and write changes to queue
62
78
  all_threads << queue_indexer.run
63
79
 
64
- # # # # # # # # # # # # # # # # # # # # # #
65
- # Initialize/Start content data comparator
66
- Log.debug1('Start content data comparator')
67
- local_dynamic_content_data = ContentData::DynamicContentData.new
68
- all_threads << Thread.new do # TODO(kolman): Seems like redundant, check how to update dynamic directly.
69
- while true do
70
- # Note: This thread should be the only consumer of local_server_content_data_queue
71
- Log.debug1 'Waiting on local server content data.'
72
- local_server_content_data = local_server_content_data_queue.pop
73
- local_dynamic_content_data.update(local_server_content_data)
74
- end
75
- end
76
-
77
80
  # # # # # # # # # # # # # # # # # # # # # # # #
78
81
  # Start dump local content data to file thread
79
82
  Log.debug1('Start dump local content data to file thread')
@@ -82,16 +85,16 @@ module ContentServer
82
85
  while true do
83
86
  if last_data_flush_time.nil? || last_data_flush_time + Params['data_flush_delay'] < Time.now.to_i
84
87
  Log.info "Writing local content data to #{Params['local_content_data_path']}."
85
- local_dynamic_content_data.last_content_data.to_file(tmp_content_data_file)
88
+ @@local_dynamic_content_data.last_content_data.to_file(@@tmp_content_data_file)
86
89
  sleep(0.1) # Added to prevent mv access issue
87
- ::FileUtils.mv(tmp_content_data_file, Params['local_content_data_path'])
90
+ ::FileUtils.mv(@@tmp_content_data_file, Params['local_content_data_path'])
88
91
  last_data_flush_time = Time.now.to_i
89
92
  end
90
93
  sleep(1)
91
94
  end
92
95
  end
93
96
 
94
- remote_content_client = RemoteContentServer.new(local_dynamic_content_data,
97
+ remote_content_client = RemoteContentServer.new(@@local_dynamic_content_data,
95
98
  Params['local_content_data_port'])
96
99
  all_threads << remote_content_client.tcp_thread
97
100
 
@@ -110,56 +113,43 @@ module ContentServer
110
113
  last_data_flush_time = nil
111
114
  mutex = Mutex.new
112
115
  while true do
113
- if last_data_flush_time.nil? || last_data_flush_time + Params['process_vars_delay'] < Time.now
114
- Params['process_vars'].set('time', Time.now)
115
- Log.info("process_vars:monitoring queue size:#{monitoring_events.size}")
116
- Params['process_vars'].set('monitoring queue', monitoring_events.size)
117
- Log.info("process_vars:content data queue size:#{monitoring_events.size}")
118
- Params['process_vars'].set('content data queue', local_server_content_data_queue.size)
119
- Log.info("process_vars:copy files events queue size:#{copy_files_events.size}")
120
- Params['process_vars'].set('copy files events queue', copy_files_events.size)
121
- #enable following line to see full list of object:count
122
- #obj_array = ''
123
- total_obj_count = 0
124
- string_count = 0
125
- file_count = 0
126
- dir_count = 0
127
- content_count = 0
128
- index_agent_count = 0
129
- indexer_patterns_count = 0
130
- mutex.synchronize do
131
- ObjectSpace.each_object(Class) {|obj|
132
- obj_count_per_class = ObjectSpace.each_object(obj).count
133
- #enable following line to see full list of object:count
134
- #obj_array = "#{obj_array} * #{obj.name}:#{obj_count_per_class}"
135
- total_obj_count = total_obj_count + obj_count_per_class
136
- }
137
- string_count = ObjectSpace.each_object(String).count
138
- file_count = ObjectSpace.each_object(::FileMonitoring::FileStat).count
139
- dir_count = ObjectSpace.each_object(::FileMonitoring::DirStat).count
140
- content_count = ObjectSpace.each_object(::ContentData::ContentData).count
141
- index_agent_count = ObjectSpace.each_object(::FileIndexing::IndexAgent).count
142
- indexer_patterns_count = ObjectSpace.each_object(::FileIndexing::IndexerPatterns).count
143
- end
144
- #enable following line to see full list of object:count
145
- #Params['process_vars'].set('Live objs full', obj_array)
146
- Log.info("process_vars:Live objs cnt:#{total_obj_count}")
147
- Log.info("process_vars:Live String obj cnt:#{string_count}")
148
- Log.info("process_vars:Live File obj cnt:#{file_count}")
149
- Log.info("process_vars:Live Dir obj cnt:#{dir_count}")
150
- Log.info("process_vars:Live Content data obj cnt:#{content_count}")
151
- Log.info("process_vars:Live index agent obj cnt:#{index_agent_count}")
152
- Log.info("process_vars:Live indexer patterns obj cnt:#{indexer_patterns_count}")
153
- Params['process_vars'].set('Live objs cnt', total_obj_count)
154
- Params['process_vars'].set('Live String obj cnt', string_count)
155
- Params['process_vars'].set('Live File obj cnt', file_count)
156
- Params['process_vars'].set('Live Dir obj cnt', dir_count)
157
- Params['process_vars'].set('Live Content data obj cnt', content_count)
158
- Params['process_vars'].set('Live index agent obj cnt', index_agent_count)
159
- Params['process_vars'].set('Live indexer patterns obj cnt', indexer_patterns_count)
160
- last_data_flush_time = Time.now
116
+ sleep(Params['process_vars_delay'])
117
+ ::ContentServer::Globals.process_vars.set('time', Time.now)
118
+ Log.debug3("process_vars:monitoring queue size:#{monitoring_events.size}")
119
+ ::ContentServer::Globals.process_vars.set('monitoring queue', monitoring_events.size)
120
+ Log.debug3("process_vars:copy files events queue size:#{copy_files_events.size}")
121
+ ::ContentServer::Globals.process_vars.set('copy files events queue', copy_files_events.size)
122
+ #enable following line to see full list of object:count
123
+ #obj_array = ''
124
+ total_obj_count = 0
125
+ string_count = 0
126
+ file_count = 0
127
+ dir_count = 0
128
+ content_count = 0
129
+ mutex.synchronize do
130
+ ObjectSpace.each_object(Class) {|obj|
131
+ obj_count_per_class = ObjectSpace.each_object(obj).count
132
+ #enable following line to see full list of object:count
133
+ #obj_array = "#{obj_array} * #{obj.name}:#{obj_count_per_class}"
134
+ total_obj_count = total_obj_count + obj_count_per_class
135
+ }
136
+ string_count = ObjectSpace.each_object(String).count
137
+ file_count = ObjectSpace.each_object(::FileMonitoring::FileStat).count
138
+ dir_count = ObjectSpace.each_object(::FileMonitoring::DirStat).count
139
+ content_count = ObjectSpace.each_object(::ContentData::ContentData).count
161
140
  end
162
- sleep(0.3)
141
+ #enable following line to see full list of object:count
142
+ #process_vars.set('Live objs full', obj_array)
143
+ Log.debug3("process_vars:Live objs cnt:#{total_obj_count}")
144
+ Log.debug3("process_vars:Live String obj cnt:#{string_count}")
145
+ Log.debug3("process_vars:Live File obj cnt:#{file_count}")
146
+ Log.debug3("process_vars:Live Dir obj cnt:#{dir_count}")
147
+ Log.debug3("process_vars:Live Content data obj cnt:#{content_count}")
148
+ ::ContentServer::Globals.process_vars.set('Live objs cnt', total_obj_count)
149
+ ::ContentServer::Globals.process_vars.set('Live String obj cnt', string_count)
150
+ ::ContentServer::Globals.process_vars.set('Live File obj cnt', file_count)
151
+ ::ContentServer::Globals.process_vars.set('Live Dir obj cnt', dir_count)
152
+ ::ContentServer::Globals.process_vars.set('Live Content data obj cnt', content_count)
163
153
  end
164
154
  end
165
155
  end
@@ -2,6 +2,7 @@ require 'tempfile'
2
2
  require 'thread'
3
3
 
4
4
  require 'file_indexing/index_agent'
5
+ require 'content_server/globals'
5
6
  require 'log'
6
7
  require 'params'
7
8
 
@@ -12,9 +13,6 @@ module ContentServer
12
13
  'Max number of content bytes to send in one chunk.')
13
14
  Params.integer('file_streaming_timeout', 5*60,
14
15
  'If no action is taken on a file streamer, abort copy.')
15
- Params.path('backup_destination_folder', '',
16
- 'Backup server destination folder, default is the relative local folder.')
17
-
18
16
  class Stream
19
17
  attr_reader :checksum, :path, :tmp_path, :file, :size
20
18
  def initialize(checksum, path, file, size)
@@ -59,24 +57,40 @@ module ContentServer
59
57
 
60
58
  def copy_another_chuck(checksum)
61
59
  @stream_queue << [:COPY_CHUNK, checksum]
60
+ if Params['enable_monitoring']
61
+ ::ContentServer::Globals.process_vars.set('File Streamer queue', @stream_queue.size)
62
+ end
62
63
  end
63
64
 
64
65
  def start_streaming(checksum, path)
65
66
  @stream_queue << [:NEW_STREAM, [checksum, path]]
67
+ if Params['enable_monitoring']
68
+ ::ContentServer::Globals.process_vars.set('File Streamer queue', @stream_queue.size)
69
+ end
66
70
  end
67
71
 
68
72
  def abort_streaming(checksum)
69
73
  @stream_queue << [:ABORT_STREAM, checksum]
74
+ if Params['enable_monitoring']
75
+ ::ContentServer::Globals.process_vars.set('File Streamer queue', @stream_queue.size)
76
+ end
70
77
  end
71
78
 
72
79
  def reset_streaming(checksum, new_offset)
73
80
  @stream_queue << [:RESET_STREAM, [checksum, new_offset]]
81
+ if Params['enable_monitoring']
82
+ ::ContentServer::Globals.process_vars.set('File Streamer queue', @stream_queue.size)
83
+ end
74
84
  end
75
85
 
76
86
  def run
77
87
  return Thread.new do
78
88
  loop {
79
- checksum = handle(@stream_queue.pop)
89
+ stream_pop = @stream_queue.pop
90
+ if Params['enable_monitoring']
91
+ ::ContentServer::Globals.process_vars.set('File Streamer queue', @stream_queue.size)
92
+ end
93
+ checksum = handle(stream_pop)
80
94
  }
81
95
  end
82
96
  end
@@ -87,6 +101,9 @@ module ContentServer
87
101
  checksum, path = content
88
102
  reset_stream(checksum, path, 0)
89
103
  @stream_queue << [:COPY_CHUNK, checksum] if @streams.key?(checksum)
104
+ if Params['enable_monitoring']
105
+ ::ContentServer::Globals.process_vars.set('File Streamer queue', @stream_queue.size)
106
+ end
90
107
  elsif type == :ABORT_STREAM
91
108
  checksum = content
92
109
  Stream.close_delete_stream(checksum, @streams)
@@ -94,6 +111,9 @@ module ContentServer
94
111
  checksum, new_offset = content
95
112
  reset_stream(checksum, nil, new_offset)
96
113
  @stream_queue << [:COPY_CHUNK, checksum] if @streams.key?(checksum)
114
+ if Params['enable_monitoring']
115
+ ::ContentServer::Globals.process_vars.set('File Streamer queue', @stream_queue.size)
116
+ end
97
117
  elsif type == :COPY_CHUNK
98
118
  checksum = content
99
119
  if @streams.key?(checksum)
@@ -191,11 +211,12 @@ module ContentServer
191
211
 
192
212
  # open new stream
193
213
  def handle_new_stream(file_checksum, file_size)
214
+ Log.info("enter handle_new_stream")
194
215
  # final destination path
195
216
  tmp_path = FileReceiver.destination_filename(
196
- File.join(Params['backup_destination_folder'], 'tmp'),
217
+ File.join(Params['backup_destination_folder'][0]['path'], 'tmp'),
197
218
  file_checksum)
198
- path = FileReceiver.destination_filename(Params['backup_destination_folder'],
219
+ path = FileReceiver.destination_filename(Params['backup_destination_folder'][0]['path'],
199
220
  file_checksum)
200
221
  if File.exists?(path)
201
222
  Log.warning("File already exists (#{path}) not writing.")
@@ -204,7 +225,7 @@ module ContentServer
204
225
  # The file will be moved from tmp location once the transfer will be done
205
226
  # system will use the checksum and some more unique key for tmp file name
206
227
  FileUtils.makedirs(File.dirname(tmp_path)) unless File.directory?(File.dirname(tmp_path))
207
- tmp_file = file = File.new(tmp_path, 'wb')
228
+ tmp_file = File.new(tmp_path, 'wb')
208
229
  @streams[file_checksum] = Stream.new(file_checksum, tmp_path, tmp_file, file_size)
209
230
  end
210
231
  end
@@ -234,7 +255,7 @@ module ContentServer
234
255
  # Should always be true, unless file creation failed.
235
256
  if @streams.key?(file_checksum)
236
257
  # Make the directory if does not exists.
237
- path = FileReceiver.destination_filename(Params['backup_destination_folder'],
258
+ path = FileReceiver.destination_filename(Params['backup_destination_folder'][0]['path'],
238
259
  file_checksum)
239
260
  Log.debug1("Moving tmp file #{@streams[file_checksum].path} to #{path}")
240
261
  Log.debug1("Creating directory: #{path}")
@@ -0,0 +1,10 @@
1
+ require 'process_monitoring/thread_safe_hash'
2
+
3
+ module ContentServer
4
+ class Globals
5
+ @@process_vars = ThreadSafeHash::ThreadSafeHash.new
6
+ def self.process_vars
7
+ @@process_vars
8
+ end
9
+ end
10
+ end
@@ -2,6 +2,7 @@ require 'thread'
2
2
 
3
3
  require 'content_server/file_streamer'
4
4
  require 'file_indexing/index_agent'
5
+ require 'content_server/globals'
5
6
  require 'log'
6
7
  require 'networking/tcp'
7
8
  require 'params'
@@ -55,12 +56,11 @@ module ContentServer
55
56
  if message_type == :COPY_MESSAGE
56
57
  Log.debug1 "Copy files event: #{message_content}"
57
58
  # Prepare source,dest map for copy.
58
- message_content.instances.each { |key, instance|
59
- # If not already sending.
60
- if !@copy_prepare.key?(instance.checksum) || !@copy_prepare[instance.checksum][1]
61
- @copy_prepare[instance.checksum] = [instance.full_path, false]
62
- Log.debug1("Sending ack for: #{instance.checksum}")
63
- @backup_tcp.send_obj([:ACK_MESSAGE, [instance.checksum, Time.now.to_i]])
59
+ message_content.each_instance { |checksum, size, content_mod_time, instance_mod_time, server, path|
60
+ if !@copy_prepare.key?(checksum) || !@copy_prepare[checksum][1]
61
+ @copy_prepare[checksum] = [path, false]
62
+ Log.debug1("Sending ack for: #{checksum}")
63
+ @backup_tcp.send_obj([:ACK_MESSAGE, [checksum, Time.now.to_i]])
64
64
  end
65
65
  }
66
66
  elsif message_type == :ACK_MESSAGE
@@ -72,19 +72,23 @@ module ContentServer
72
72
  "now: #{Time.now.to_i}")
73
73
 
74
74
  # Copy file if ack (does not exists on backup and not too much time passed)
75
- if ack && (Time.now.to_i - timestamp < Params['ack_timeout'])
76
- if !@copy_prepare.key?(checksum) || @copy_prepare[checksum][1]
77
- Log.warning("File was aborted, copied, or started copy just now: #{checksum}")
75
+ if ack
76
+ if (Time.now.to_i - timestamp < Params['ack_timeout'])
77
+ if !@copy_prepare.key?(checksum) || @copy_prepare[checksum][1]
78
+ Log.warning("File was aborted, copied, or started copy just now: #{checksum}")
79
+ else
80
+ path = @copy_prepare[checksum][0]
81
+ Log.info "Streaming to backup server. content: #{checksum} path:#{path}."
82
+ @file_streamer.start_streaming(checksum, path)
83
+ # Ack received, setting prepare to true
84
+ @copy_prepare[checksum][1] = true
85
+ end
78
86
  else
79
- path = @copy_prepare[checksum][0]
80
- Log.info "Streaming to backup server. content: #{checksum} path:#{path}."
81
- @file_streamer.start_streaming(checksum, path)
82
- # Ack received, setting prepare to true
83
- @copy_prepare[checksum][1] = true
87
+ Log.debug1("Ack timed out span: #{Time.now.to_i - timestamp} > " \
88
+ "timeout: #{Params['ack_timeout']}")
84
89
  end
85
90
  else
86
- Log.debug1("Ack timed out span: #{Time.now.to_i - timestamp} > " \
87
- "timeout: #{Params['ack_timeout']}")
91
+ Log.debug1('Ack is false');
88
92
  end
89
93
  elsif message_type == :COPY_CHUNK_FROM_REMOTE
90
94
  checksum = message_content
@@ -122,7 +126,6 @@ module ContentServer
122
126
  class FileCopyClient
123
127
  def initialize(host, port, dynamic_content_data)
124
128
  @local_queue = Queue.new
125
- start_process_var_monitoring
126
129
  @dynamic_content_data = dynamic_content_data
127
130
  @tcp_client = Networking::TCPClient.new(host, port, method(:handle_message))
128
131
  @file_receiver = FileReceiver.new(method(:done_copy),
@@ -130,6 +133,10 @@ module ContentServer
130
133
  method(:reset_copy))
131
134
  @local_thread = Thread.new do
132
135
  loop do
136
+ pop_queue = @local_queue.pop
137
+ if Params['enable_monitoring']
138
+ ::ContentServer::Globals.process_vars.set('File Copy Client queue', @local_queue.size)
139
+ end
133
140
  handle(@local_queue.pop)
134
141
  end
135
142
  end
@@ -137,22 +144,6 @@ module ContentServer
137
144
  Log.debug3("initialize FileCopyClient host:#{host} port:#{port}")
138
145
  end
139
146
 
140
- def start_process_var_monitoring
141
- if Params['enable_monitoring']
142
- @process_var_thread = Thread.new do
143
- last_data_flush_time = nil
144
- while true do
145
- if last_data_flush_time.nil? || last_data_flush_time + Params['process_vars_delay'] < Time.now
146
- Log.info("process_vars:FileCopyClient queue size:#{@local_queue.size}")
147
- Params['process_vars'].set('File Copy Client queue', @local_queue.size)
148
- last_data_flush_time = Time.now
149
- end
150
- sleep(0.3)
151
- end
152
- end
153
- end
154
- end
155
-
156
147
  def threads
157
148
  ret = [@local_thread]
158
149
  ret << @tcp_client.tcp_thread if @tcp_client != nil
@@ -172,13 +163,18 @@ module ContentServer
172
163
  end
173
164
 
174
165
  def done_copy(local_file_checksum, local_path)
175
- Params['process_vars'].inc('num_files_received')
166
+ if Params['enable_monitoring']
167
+ ::ContentServer::Globals.process_vars.inc('num_files_received')
168
+ end
176
169
  Log.debug1("Done copy file: #{local_path}, #{local_file_checksum}")
177
170
  end
178
171
 
179
172
  def handle_message(message)
180
173
  Log.debug3('QueueFileReceiver handle message')
181
174
  @local_queue.push(message)
175
+ if Params['enable_monitoring']
176
+ ::ContentServer::Globals.process_vars.set('File Copy Client queue', @local_queue.size)
177
+ end
182
178
  end
183
179
 
184
180
  # This is a function which receives the messages (file or ack) and return answer in case
@@ -186,8 +182,8 @@ module ContentServer
186
182
  def handle(message)
187
183
  message_type, message_content = message
188
184
  if message_type == :SEND_COPY_MESSAGE
189
- Log.debug1("Requesting file (content data) to copy.")
190
- Log.debug2("File requested: #{message_content.to_s}")
185
+ Log.debug1("Requesting files to copy.")
186
+ Log.debug2("Files requested: #{message_content.to_s}")
191
187
  bytes_written = @tcp_client.send_obj([:COPY_MESSAGE, message_content])
192
188
  Log.debug2("Sending copy message succeeded? bytes_written: #{bytes_written}.")
193
189
  elsif message_type == :COPY_CHUNK
@@ -1,110 +1,89 @@
1
1
  require 'file_indexing/index_agent'
2
2
  require 'file_indexing/indexer_patterns'
3
+ require 'content_server/globals'
3
4
  require 'log'
4
5
  require 'params'
5
6
 
6
7
  module ContentServer
7
8
 
8
-
9
9
  # Simple indexer, gets inputs events (files to index) and outputs
10
10
  # content data updates into output queue.
11
11
  class QueueIndexer
12
12
 
13
- def initialize(input_queue, output_queue, content_data_path)
13
+ def initialize(input_queue, local_dynamic_content_data)
14
14
  @input_queue = input_queue
15
- @output_queue = output_queue
16
- @content_data_path = content_data_path
15
+ @local_dynamic_content_data = local_dynamic_content_data
17
16
  end
18
17
 
18
+ # index files and add to copy queue
19
+ # delete directory with it's sub files
20
+ # delete file
19
21
  def run
20
- server_content_data = ContentData::ContentData.new
21
- # Shallow check content data files.
22
- tmp_content_data = ContentData::ContentData.new
23
- tmp_content_data.from_file(@content_data_path) if File.exists?(@content_data_path)
24
- tmp_content_data.instances.each_value do |instance|
25
- # Skipp instances (files) which did not pass the shallow check.
26
- Log.debug1('Shallow checking content data:')
27
- if shallow_check(instance)
28
- Log.debug1("exists: #{instance.full_path}")
29
- server_content_data.add_content(tmp_content_data.contents[instance.checksum])
30
- server_content_data.add_instance(instance)
31
- else
32
- Log.debug1("changed: #{instance.full_path}")
33
- # Add non existing and changed files to index queue.
34
- @input_queue.push([FileMonitoring::FileStatEnum::STABLE, instance.full_path])
35
- end
36
- end
37
-
38
22
  # Start indexing on demand and write changes to queue
39
23
  thread = Thread.new do
40
24
  while true do
41
25
  Log.debug1 'Waiting on index input queue.'
42
- (state, is_dir, path) = @input_queue.pop
43
- Log.debug1 "index event: state:#{state}, dir?#{is_dir}, path:#{path}."
44
-
45
- # index files and add to copy queue
46
- # delete directory with it's sub files
47
- # delete file
26
+ (state, is_dir, path, mtime, size) = @input_queue.pop
27
+ Log.debug1 "index event: state:#{state}, dir?#{is_dir}, path:#{path}, mtime:#{mtime}, size:#{size}."
48
28
  if state == FileMonitoring::FileStatEnum::STABLE && !is_dir
49
- Log.debug1 "Indexing content #{path}."
50
- index_agent = FileIndexing::IndexAgent.new
51
- indexer_patterns = FileIndexing::IndexerPatterns.new
52
- indexer_patterns.add_pattern(path)
53
- index_agent.index(indexer_patterns, server_content_data)
54
- Log.debug1("Failed files: #{index_agent.failed_files.to_a.join(',')}.") \
55
- if !index_agent.failed_files.empty?
56
- Log.debug1("indexed content #{index_agent.indexed_content}.")
57
- server_content_data.merge index_agent.indexed_content
58
- if Params['enable_monitoring']
59
- Params['process_vars'].inc('indexed_files')
29
+ # Calculating checksum
30
+ instance_stats = @local_dynamic_content_data.stats_by_location([Params['local_server_name'], path])
31
+ Log.debug1("instance !#{instance_stats}! mtime: #{mtime.to_i}, size: #{size}")
32
+ if instance_stats.nil? || mtime.to_i != instance_stats[1] || size != instance_stats[0]
33
+ Log.info "Indexing file:'#{path}'."
34
+ checksum = calc_SHA1(path)
35
+ if Params['enable_monitoring']
36
+ ::ContentServer::Globals.process_vars.inc('indexed_files')
37
+ end
38
+ Log.debug1("Index info:checksum:#{checksum} size:#{size} time:#{mtime.to_i}")
39
+ Log.debug1('Adding index to content data. put in queue for dynamic update.')
40
+ @local_dynamic_content_data.add_instance(checksum, size, Params['local_server_name'], path, mtime.to_i)
41
+ else
42
+ Log.info("Skip file #{path} indexing (shallow check passed)")
60
43
  end
61
44
  elsif ((state == FileMonitoring::FileStatEnum::NON_EXISTING ||
62
45
  state == FileMonitoring::FileStatEnum::CHANGED) && !is_dir)
63
- # If file content changed, we should remove old instance.
64
- key = FileIndexing::IndexAgent.global_path(path)
65
- # Check if deleted file exists at content data.
66
- Log.debug1("Instance to remove: #{key}")
67
- if server_content_data.instances.key?(key)
68
- instance_to_remove = server_content_data.instances[key]
69
- # Remove file from content data only if it does not pass the shallow check, i.e.,
70
- # content has changed/removed.
71
- if !shallow_check(instance_to_remove)
72
- content_to_remove = server_content_data.contents[instance_to_remove.checksum]
73
- # Remove the deleted instance.
74
- content_data_to_remove = ContentData::ContentData.new
75
- content_data_to_remove.add_content(content_to_remove)
76
- content_data_to_remove.add_instance(instance_to_remove)
77
- # Remove the file.
78
- server_content_data = ContentData::ContentData.remove_instances(
79
- content_data_to_remove, server_content_data)
80
- end
81
- end
46
+ Log.debug2("NonExisting/Changed (file): #{path}")
47
+ # Remove file but only when non-existing.
48
+ Log.debug1("File to remove: #{path}")
49
+ @local_dynamic_content_data.remove_instance([Params['local_server_name'],path])
82
50
  elsif state == FileMonitoring::FileStatEnum::NON_EXISTING && is_dir
83
- Log.debug1("NonExisting/Changed: #{path}")
51
+ Log.debug2("NonExisting/Changed (dir): #{path}")
84
52
  # Remove directory but only when non-existing.
85
53
  Log.debug1("Directory to remove: #{path}")
86
- global_dir = FileIndexing::IndexAgent.global_path(path)
87
- server_content_data = ContentData::ContentData.remove_directory(
88
- server_content_data, global_dir)
54
+ @local_dynamic_content_data.remove_directory(path, Params['local_server_name'])
89
55
  else
90
56
  Log.debug1("This case should not be handled: #{state}, #{is_dir}, #{path}.")
91
57
  end
92
-
93
- Log.debug1 'Adding server content data to queue.'
94
- @output_queue.push(ContentData::ContentData.new(server_content_data))
95
58
  end # while true do
96
59
  end # Thread.new do
97
60
  thread
98
61
  end # def run
99
62
 
63
+ # Opens file and calculates SHA1 of it's content, returns SHA1
64
+ def calc_SHA1(path)
65
+ begin
66
+ digest = Digest::SHA1.new
67
+ File.open(path, 'rb') { |f|
68
+ while buffer = f.read(65536) do
69
+ digest << buffer
70
+ end
71
+ }
72
+ rescue
73
+ Log.warning("Monitored path'#{path}' does not exist. Probably file changed")
74
+ end
75
+ return digest.hexdigest.downcase
76
+ end
77
+
78
+ # Remove when not needed.
100
79
  # Check file existence, check it's size and modification date.
101
80
  # If something wrong reindex the file and update content data.
102
- def shallow_check(instance)
103
- shallow_instance = FileIndexing::IndexAgent.create_shallow_instance(instance.full_path)
104
- return false unless shallow_instance
105
- return (shallow_instance.size == instance.size &&
106
- shallow_instance.modification_time == instance.modification_time)
107
- end
81
+ #def shallow_check(file_name, file_size, file_mod_time)
82
+ # shallow_instance = FileIndexing::IndexAgent.create_shallow_instance(file_name)
83
+ # return false unless shallow_instance
84
+ # return (shallow_instance[0] == file_size &&
85
+ # shallow_instance[2] == file_mod_time)
86
+ #end
108
87
 
109
88
  end # class QueueIndexer
110
89
  end
@@ -25,7 +25,6 @@ module ContentServer
25
25
  def receive_content(message)
26
26
  Log.debug1("Backup server received Remote content data:#{message.to_s}")
27
27
  Log.info("Backup server received Remote content data")
28
- ref = @dynamic_content_data.last_content_data
29
28
  @dynamic_content_data.update(message)
30
29
  @last_fetch_timestamp = Time.now.to_i
31
30
 
@@ -1,3 +1,3 @@
1
1
  module ContentServer
2
- VERSION = "1.0.3"
2
+ VERSION = "1.1.0"
3
3
  end
metadata CHANGED
@@ -1,7 +1,7 @@
1
1
  --- !ruby/object:Gem::Specification
2
2
  name: content_server
3
3
  version: !ruby/object:Gem::Version
4
- version: 1.0.3
4
+ version: 1.1.0
5
5
  prerelease:
6
6
  platform: ruby
7
7
  authors:
@@ -9,7 +9,7 @@ authors:
9
9
  autorequire:
10
10
  bindir: bin
11
11
  cert_chain: []
12
- date: 2013-06-12 00:00:00.000000000 Z
12
+ date: 2013-06-24 00:00:00.000000000 Z
13
13
  dependencies:
14
14
  - !ruby/object:Gem::Dependency
15
15
  name: content_data
@@ -18,7 +18,7 @@ dependencies:
18
18
  requirements:
19
19
  - - '='
20
20
  - !ruby/object:Gem::Version
21
- version: 1.0.1
21
+ version: 1.1.0
22
22
  type: :runtime
23
23
  prerelease: false
24
24
  version_requirements: !ruby/object:Gem::Requirement
@@ -26,7 +26,7 @@ dependencies:
26
26
  requirements:
27
27
  - - '='
28
28
  - !ruby/object:Gem::Version
29
- version: 1.0.1
29
+ version: 1.1.0
30
30
  - !ruby/object:Gem::Dependency
31
31
  name: file_indexing
32
32
  requirement: !ruby/object:Gem::Requirement
@@ -34,7 +34,7 @@ dependencies:
34
34
  requirements:
35
35
  - - '='
36
36
  - !ruby/object:Gem::Version
37
- version: 1.0.2
37
+ version: 1.1.0
38
38
  type: :runtime
39
39
  prerelease: false
40
40
  version_requirements: !ruby/object:Gem::Requirement
@@ -42,7 +42,7 @@ dependencies:
42
42
  requirements:
43
43
  - - '='
44
44
  - !ruby/object:Gem::Version
45
- version: 1.0.2
45
+ version: 1.1.0
46
46
  - !ruby/object:Gem::Dependency
47
47
  name: file_monitoring
48
48
  requirement: !ruby/object:Gem::Requirement
@@ -50,7 +50,7 @@ dependencies:
50
50
  requirements:
51
51
  - - '='
52
52
  - !ruby/object:Gem::Version
53
- version: 1.0.3
53
+ version: 1.1.0
54
54
  type: :runtime
55
55
  prerelease: false
56
56
  version_requirements: !ruby/object:Gem::Requirement
@@ -58,7 +58,7 @@ dependencies:
58
58
  requirements:
59
59
  - - '='
60
60
  - !ruby/object:Gem::Version
61
- version: 1.0.3
61
+ version: 1.1.0
62
62
  - !ruby/object:Gem::Dependency
63
63
  name: log
64
64
  requirement: !ruby/object:Gem::Requirement
@@ -66,7 +66,7 @@ dependencies:
66
66
  requirements:
67
67
  - - '='
68
68
  - !ruby/object:Gem::Version
69
- version: 1.0.2
69
+ version: 1.1.0
70
70
  type: :runtime
71
71
  prerelease: false
72
72
  version_requirements: !ruby/object:Gem::Requirement
@@ -74,7 +74,7 @@ dependencies:
74
74
  requirements:
75
75
  - - '='
76
76
  - !ruby/object:Gem::Version
77
- version: 1.0.2
77
+ version: 1.1.0
78
78
  - !ruby/object:Gem::Dependency
79
79
  name: networking
80
80
  requirement: !ruby/object:Gem::Requirement
@@ -82,7 +82,7 @@ dependencies:
82
82
  requirements:
83
83
  - - '='
84
84
  - !ruby/object:Gem::Version
85
- version: 1.0.1
85
+ version: 1.1.0
86
86
  type: :runtime
87
87
  prerelease: false
88
88
  version_requirements: !ruby/object:Gem::Requirement
@@ -90,7 +90,7 @@ dependencies:
90
90
  requirements:
91
91
  - - '='
92
92
  - !ruby/object:Gem::Version
93
- version: 1.0.1
93
+ version: 1.1.0
94
94
  - !ruby/object:Gem::Dependency
95
95
  name: params
96
96
  requirement: !ruby/object:Gem::Requirement
@@ -98,7 +98,7 @@ dependencies:
98
98
  requirements:
99
99
  - - '='
100
100
  - !ruby/object:Gem::Version
101
- version: 1.0.2
101
+ version: 1.1.0
102
102
  type: :runtime
103
103
  prerelease: false
104
104
  version_requirements: !ruby/object:Gem::Requirement
@@ -106,7 +106,7 @@ dependencies:
106
106
  requirements:
107
107
  - - '='
108
108
  - !ruby/object:Gem::Version
109
- version: 1.0.2
109
+ version: 1.1.0
110
110
  - !ruby/object:Gem::Dependency
111
111
  name: process_monitoring
112
112
  requirement: !ruby/object:Gem::Requirement
@@ -114,7 +114,7 @@ dependencies:
114
114
  requirements:
115
115
  - - '='
116
116
  - !ruby/object:Gem::Version
117
- version: 1.0.2
117
+ version: 1.1.0
118
118
  type: :runtime
119
119
  prerelease: false
120
120
  version_requirements: !ruby/object:Gem::Requirement
@@ -122,7 +122,7 @@ dependencies:
122
122
  requirements:
123
123
  - - '='
124
124
  - !ruby/object:Gem::Version
125
- version: 1.0.2
125
+ version: 1.1.0
126
126
  - !ruby/object:Gem::Dependency
127
127
  name: rake
128
128
  requirement: !ruby/object:Gem::Requirement
@@ -146,7 +146,7 @@ dependencies:
146
146
  requirements:
147
147
  - - '='
148
148
  - !ruby/object:Gem::Version
149
- version: 1.0.1
149
+ version: 1.1.0
150
150
  type: :runtime
151
151
  prerelease: false
152
152
  version_requirements: !ruby/object:Gem::Requirement
@@ -154,7 +154,7 @@ dependencies:
154
154
  requirements:
155
155
  - - '='
156
156
  - !ruby/object:Gem::Version
157
- version: 1.0.1
157
+ version: 1.1.0
158
158
  - !ruby/object:Gem::Dependency
159
159
  name: content_data
160
160
  requirement: !ruby/object:Gem::Requirement
@@ -162,7 +162,7 @@ dependencies:
162
162
  requirements:
163
163
  - - '='
164
164
  - !ruby/object:Gem::Version
165
- version: 1.0.1
165
+ version: 1.1.0
166
166
  type: :runtime
167
167
  prerelease: false
168
168
  version_requirements: !ruby/object:Gem::Requirement
@@ -170,7 +170,7 @@ dependencies:
170
170
  requirements:
171
171
  - - '='
172
172
  - !ruby/object:Gem::Version
173
- version: 1.0.1
173
+ version: 1.1.0
174
174
  - !ruby/object:Gem::Dependency
175
175
  name: file_indexing
176
176
  requirement: !ruby/object:Gem::Requirement
@@ -178,7 +178,7 @@ dependencies:
178
178
  requirements:
179
179
  - - '='
180
180
  - !ruby/object:Gem::Version
181
- version: 1.0.2
181
+ version: 1.1.0
182
182
  type: :runtime
183
183
  prerelease: false
184
184
  version_requirements: !ruby/object:Gem::Requirement
@@ -186,7 +186,7 @@ dependencies:
186
186
  requirements:
187
187
  - - '='
188
188
  - !ruby/object:Gem::Version
189
- version: 1.0.2
189
+ version: 1.1.0
190
190
  - !ruby/object:Gem::Dependency
191
191
  name: file_monitoring
192
192
  requirement: !ruby/object:Gem::Requirement
@@ -194,7 +194,7 @@ dependencies:
194
194
  requirements:
195
195
  - - '='
196
196
  - !ruby/object:Gem::Version
197
- version: 1.0.3
197
+ version: 1.1.0
198
198
  type: :runtime
199
199
  prerelease: false
200
200
  version_requirements: !ruby/object:Gem::Requirement
@@ -202,7 +202,7 @@ dependencies:
202
202
  requirements:
203
203
  - - '='
204
204
  - !ruby/object:Gem::Version
205
- version: 1.0.3
205
+ version: 1.1.0
206
206
  - !ruby/object:Gem::Dependency
207
207
  name: log
208
208
  requirement: !ruby/object:Gem::Requirement
@@ -210,7 +210,7 @@ dependencies:
210
210
  requirements:
211
211
  - - '='
212
212
  - !ruby/object:Gem::Version
213
- version: 1.0.2
213
+ version: 1.1.0
214
214
  type: :runtime
215
215
  prerelease: false
216
216
  version_requirements: !ruby/object:Gem::Requirement
@@ -218,7 +218,7 @@ dependencies:
218
218
  requirements:
219
219
  - - '='
220
220
  - !ruby/object:Gem::Version
221
- version: 1.0.2
221
+ version: 1.1.0
222
222
  - !ruby/object:Gem::Dependency
223
223
  name: networking
224
224
  requirement: !ruby/object:Gem::Requirement
@@ -226,7 +226,7 @@ dependencies:
226
226
  requirements:
227
227
  - - '='
228
228
  - !ruby/object:Gem::Version
229
- version: 1.0.1
229
+ version: 1.1.0
230
230
  type: :runtime
231
231
  prerelease: false
232
232
  version_requirements: !ruby/object:Gem::Requirement
@@ -234,7 +234,7 @@ dependencies:
234
234
  requirements:
235
235
  - - '='
236
236
  - !ruby/object:Gem::Version
237
- version: 1.0.1
237
+ version: 1.1.0
238
238
  - !ruby/object:Gem::Dependency
239
239
  name: params
240
240
  requirement: !ruby/object:Gem::Requirement
@@ -242,7 +242,7 @@ dependencies:
242
242
  requirements:
243
243
  - - '='
244
244
  - !ruby/object:Gem::Version
245
- version: 1.0.2
245
+ version: 1.1.0
246
246
  type: :runtime
247
247
  prerelease: false
248
248
  version_requirements: !ruby/object:Gem::Requirement
@@ -250,7 +250,7 @@ dependencies:
250
250
  requirements:
251
251
  - - '='
252
252
  - !ruby/object:Gem::Version
253
- version: 1.0.2
253
+ version: 1.1.0
254
254
  - !ruby/object:Gem::Dependency
255
255
  name: process_monitoring
256
256
  requirement: !ruby/object:Gem::Requirement
@@ -258,7 +258,7 @@ dependencies:
258
258
  requirements:
259
259
  - - '='
260
260
  - !ruby/object:Gem::Version
261
- version: 1.0.2
261
+ version: 1.1.0
262
262
  type: :runtime
263
263
  prerelease: false
264
264
  version_requirements: !ruby/object:Gem::Requirement
@@ -266,7 +266,7 @@ dependencies:
266
266
  requirements:
267
267
  - - '='
268
268
  - !ruby/object:Gem::Version
269
- version: 1.0.2
269
+ version: 1.1.0
270
270
  - !ruby/object:Gem::Dependency
271
271
  name: rake
272
272
  requirement: !ruby/object:Gem::Requirement
@@ -290,7 +290,7 @@ dependencies:
290
290
  requirements:
291
291
  - - '='
292
292
  - !ruby/object:Gem::Version
293
- version: 1.0.1
293
+ version: 1.1.0
294
294
  type: :runtime
295
295
  prerelease: false
296
296
  version_requirements: !ruby/object:Gem::Requirement
@@ -298,7 +298,7 @@ dependencies:
298
298
  requirements:
299
299
  - - '='
300
300
  - !ruby/object:Gem::Version
301
- version: 1.0.1
301
+ version: 1.1.0
302
302
  description: Monitor and Index a directory and back it up to backup server.
303
303
  email: bbfsdev@gmail.com
304
304
  executables:
@@ -310,6 +310,7 @@ files:
310
310
  - lib/content_server.rb
311
311
  - lib/content_server/file_streamer.rb
312
312
  - lib/content_server/queue_copy.rb
313
+ - lib/content_server/globals.rb
313
314
  - lib/content_server/content_server.rb
314
315
  - lib/content_server/content_receiver.rb
315
316
  - lib/content_server/remote_content.rb