content_server 0.0.10 → 1.0.0

Sign up to get free protection for your applications and to get access to all the features.
@@ -7,21 +7,33 @@
7
7
 
8
8
  # NOTE this file mainly is a copy of content_server
9
9
 
10
- begin
11
- require 'yaml'
12
- require 'params'
13
- require 'run_in_background'
14
- require 'content_server'
15
- rescue LoadError
16
- require 'rubygems'
17
- require 'yaml'
18
- require 'params'
19
- require 'run_in_background'
20
- require 'content_server'
21
- end
22
- include BBFS
10
+ require 'log'
11
+ require 'params'
12
+ require 'run_in_background'
13
+ require 'content_server'
23
14
 
24
15
  Params.init ARGV
25
16
  Log.init
26
17
 
27
- RunInBackground.run { ContentServer.run_backup_server }
18
+ Thread.abort_on_exception = true # TODO (genadyp) should be treated globally? by param for example.
19
+
20
+ retries = 0 # number of retries to run a server
21
+
22
+ begin
23
+ RunInBackground.run { ContentServer.run_backup_server }
24
+ rescue SystemExit, SignalException => exc
25
+ # TODO (genadyp) do we need to trap signals by types?
26
+ Log.error("Interrupt or Exit happened in #{Params['service_name']} server: #{exc.class}, " +
27
+ "stopping process.\nBacktrace:\n#{exc.backtrace.join("\n")}")
28
+ Log.flush
29
+ exit
30
+ rescue Exception => exc
31
+ Log.error("Exception happened in #{Params['service_name']} server: #{exc.class}:#{exc.message}")
32
+ if retries > 0
33
+ Log.info("Restarting (retries:#{retries}).\nBacktrace:\n#{exc.backtrace.join("\n")}")
34
+ else
35
+ Log.info("Exiting...\nBacktrace:\n#{exc.backtrace.join("\n")}")
36
+ Log.flush
37
+ end
38
+ retries -= 1
39
+ end while retries >= 0
@@ -4,31 +4,33 @@
4
4
  # The files in those directories are indexed (calculating their SHA1).
5
5
  # Each unique content is backed up to the remote (backup) server.
6
6
 
7
- begin
8
- print "1"
9
- require 'yaml'
10
- print "2"
11
- require 'params'
12
- print "3"
13
- require 'run_in_background'
14
- print "4"
15
- require 'content_server'
16
- print "5"
17
- rescue LoadError
18
- print "6"
19
- require 'rubygems'
20
- print "7"
21
- require 'yaml'
22
- print "8"
23
- require 'params'
24
- print "9"
25
- require 'run_in_background'
26
- print "0"
27
- require 'content_server'
28
- end
29
- include BBFS
7
+ require 'log'
8
+ require 'params'
9
+ require 'run_in_background'
10
+ require 'content_server'
30
11
 
31
12
  Params.init ARGV
32
13
  Log.init
33
14
 
34
- RunInBackground.run { ContentServer.run }
15
+ Thread.abort_on_exception = true # TODO (genadyp) should be treated globally? by param for example.
16
+
17
+ retries = 0 # number of retries to run a server
18
+
19
+ begin
20
+ RunInBackground.run { ContentServer.run_content_server }
21
+ rescue SystemExit, SignalException => exc
22
+ # TODO (genadyp) do we need to trap signals by types?
23
+ Log.error("Interrupt or Exit happened in #{Params['service_name']} server: #{exc.class}, " +
24
+ "stopping process.\nBacktrace:\n#{exc.backtrace.join("\n")}")
25
+ Log.flush
26
+ exit
27
+ rescue Exception => exc
28
+ Log.error("Exception happened in #{Params['service_name']} server: #{exc.class}:#{exc.message}")
29
+ if retries > 0
30
+ Log.info("Restarting (retries:#{retries}).\nBacktrace:\n#{exc.backtrace.join("\n")}")
31
+ else
32
+ Log.info("Exiting...\nBacktrace:\n#{exc.backtrace.join("\n")}")
33
+ Log.flush
34
+ end
35
+ retries -= 1
36
+ end while retries >= 0
@@ -1,189 +1,13 @@
1
- require 'fileutils'
2
- require 'set'
3
- require 'thread'
1
+ require 'content_server/content_server'
2
+ require 'content_server/backup_server'
4
3
 
5
- require 'content_data'
6
- require 'content_server/content_receiver'
7
- require 'content_server/queue_indexer'
8
- require 'content_server/queue_copy'
9
- require 'content_server/remote_content'
10
- require 'file_indexing'
11
- require 'file_monitoring'
12
- require 'log'
13
- require 'networking/tcp'
14
- require 'params'
4
+ module ContentServer
5
+ # Flags combined for content and backup server.
6
+ Params.path('local_content_data_path', '', 'ContentData file path.')
15
7
 
8
+ # Monitoring
9
+ Params.boolean('enable_monitoring', false, 'Whether to enable process monitoring or not.')
16
10
 
17
-
18
- # Content server. Monitors files, index local files, listen to backup server content,
19
- # copy changes and new files to backup server.
20
- module BBFS
21
- module ContentServer
22
- Params.string('remote_server', 'localhost', 'IP or DNS of backup server.')
23
- Params.string('backup_username', nil, 'Backup server username.')
24
- Params.string('backup_password', nil, 'Backup server password.')
25
- Params.integer('backup_file_listening_port', 4444, 'Listening port in backup server for files')
26
- Params.string('content_data_path', File.expand_path('~/.bbfs/var/content.data'),
27
- 'ContentData file path.')
28
- Params.string('monitoring_config_path', File.expand_path('~/.bbfs/etc/file_monitoring.yml'),
29
- 'Configuration file for monitoring.')
30
- Params.integer('remote_content_port', 3333, 'Default port for remote content copy.')
31
- Params.integer('backup_check_delay', 5, 'Time between two content vs backup checks.')
32
-
33
- def run
34
- all_threads = []
35
-
36
- # # # # # # # # # # # #
37
- # Initialize/Start monitoring
38
- monitoring_events = Queue.new
39
- fm = FileMonitoring::FileMonitoring.new
40
- fm.set_config_path(Params['monitoring_config_path'])
41
- fm.set_event_queue(monitoring_events)
42
- # Start monitoring and writing changes to queue
43
- all_threads << Thread.new do
44
- fm.monitor_files
45
- end
46
-
47
- # # # # # # # # # # # # # # # # # # # # # # # # #
48
- # Initialize/Start backup server content data listener
49
- #backup_server_content_data = nil
50
- #backup_server_content_data_queue = Queue.new
51
- #content_data_receiver = ContentDataReceiver.new(
52
- # backup_server_content_data_queue,
53
- # Params['remote_listening_port'])
54
- # Start listening to backup server
55
- #all_threads << Thread.new do
56
- # content_data_receiver.run
57
- #end
58
-
59
- # # # # # # # # # # # # # #
60
- # Initialize/Start local indexer
61
- local_server_content_data_queue = Queue.new
62
- queue_indexer = QueueIndexer.new(monitoring_events,
63
- local_server_content_data_queue,
64
- Params['content_data_path'])
65
- # Start indexing on demand and write changes to queue
66
- all_threads << queue_indexer.run
67
-
68
- # # # # # # # # # # # # # # # # # # # # # #
69
- # Initialize/Start content data comparator
70
- copy_files_events = Queue.new
71
- local_dynamic_content_data = ContentData::DynamicContentData.new
72
- all_threads << Thread.new do
73
- # backup_server_content_data = ContentData::ContentData.new
74
- # local_server_content_data = nil
75
- while true do
76
-
77
- # Note: This thread should be the only consumer of local_server_content_data_queue
78
- Log.info 'Waiting on local server content data.'
79
- local_server_content_data = local_server_content_data_queue.pop
80
- local_dynamic_content_data.update(local_server_content_data)
81
- #
82
- # # Note: This thread should be the only consumer of backup_server_content_data_queue
83
- # # Note: The server will wait in the first time on pop until backup sends it's content data
84
- # while backup_server_content_data_queue.size > 0
85
- # Log.info 'Waiting on backup server content data.'
86
- # backup_server_content_data = backup_server_content_data_queue.pop
87
- # end
88
-
89
- # Log.info 'Updating file copy queue.'
90
- # Log.debug1 "local_server_content_data #{local_server_content_data}."
91
- # Log.debug1 "backup_server_content_data #{backup_server_content_data}."
92
- # # Remove backup content data from local server
93
- # content_to_copy = ContentData::ContentData.remove(backup_server_content_data, local_server_content_data)
94
- # content_to_copy = local_server_content_data
95
- # # Add copy instruction in case content is not empty
96
- # Log.debug1 "Content to copy: #{content_to_copy}"
97
- # copy_files_events.push([:COPY_MESSAGE, content_to_copy]) unless content_to_copy.empty?
98
- end
99
- end
100
-
101
- remote_content_client = RemoteContentClient.new(local_dynamic_content_data,
102
- Params['remote_content_port'])
103
- all_threads << remote_content_client.tcp_thread
104
-
105
- # # # # # # # # # # # # # # # #
106
- # Start copying files on demand
107
- copy_server = FileCopyServer.new(copy_files_events, Params['backup_file_listening_port'])
108
- all_threads.concat(copy_server.run())
109
-
110
- # Finalize server threads.
111
- all_threads.each { |t| t.abort_on_exception = true }
112
- all_threads.each { |t| t.join }
113
- # Should never reach this line.
114
- end
115
- module_function :run
116
-
117
- def run_backup_server
118
- all_threads = []
119
-
120
- # # # # # # # # # # # #
121
- # Initialize/Start monitoring
122
- monitoring_events = Queue.new
123
- fm = FileMonitoring::FileMonitoring.new
124
- fm.set_config_path(Params['monitoring_config_path'])
125
- fm.set_event_queue(monitoring_events)
126
- # Start monitoring and writing changes to queue
127
- all_threads << Thread.new do
128
- fm.monitor_files
129
- end
130
-
131
- # # # # # # # # # # # # # #
132
- # Initialize/Start local indexer
133
- local_server_content_data_queue = Queue.new
134
- queue_indexer = QueueIndexer.new(monitoring_events,
135
- local_server_content_data_queue,
136
- Params['content_data_path'])
137
- # Start indexing on demand and write changes to queue
138
- all_threads << queue_indexer.run
139
-
140
- # # # # # # # # # # # # # # # # # # # # # # # # # # #
141
- # Initialize/Start backup server content data sender
142
- dynamic_content_data = ContentData::DynamicContentData.new
143
- #content_data_sender = ContentDataSender.new(
144
- # Params['remote_server'],
145
- # Params['remote_listening_port'])
146
- # Start sending to backup server
147
- all_threads << Thread.new do
148
- while true do
149
- Log.info 'Waiting on local server content data queue.'
150
- cd = local_server_content_data_queue.pop
151
- # content_data_sender.send_content_data(cd)
152
- dynamic_content_data.update(cd)
153
- end
154
- end
155
-
156
- content_server_dynamic_content_data = ContentData::DynamicContentData.new
157
- remote_content = ContentServer::RemoteContent.new(content_server_dynamic_content_data,
158
- Params['remote_server'],
159
- Params['remote_content_port'],
160
- Params['backup_destination_folder'])
161
- all_threads.concat(remote_content.run())
162
-
163
- file_copy_client = FileCopyClient.new(Params['remote_server'],
164
- Params['backup_file_listening_port'],
165
- dynamic_content_data)
166
- all_threads.concat(file_copy_client.threads)
167
-
168
- # Each
169
- all_threads << Thread.new do
170
- loop do
171
- sleep(Params['backup_check_delay'])
172
- local_cd = dynamic_content_data.last_content_data()
173
- remote_cd = content_server_dynamic_content_data.last_content_data()
174
- diff = ContentData::ContentData.remove(local_cd, remote_cd)
175
- Log.debug2("Files to send? #{!diff.empty?}")
176
- file_copy_client.request_copy(diff) unless diff.empty?
177
- end
178
- end
179
-
180
-
181
- all_threads.each { |t| t.abort_on_exception = true }
182
- all_threads.each { |t| t.join }
183
- # Should never reach this line.
184
- end
185
- module_function :run_backup_server
186
-
187
- end # module ContentServer
188
- end # module BBFS
189
-
11
+ # Handling thread exceptions.
12
+ Params.boolean('abort_on_exception', true, 'Any exception in any thread will abort the run.')
13
+ end # module ContentServer
@@ -0,0 +1,120 @@
1
+ require 'fileutils'
2
+ require 'set'
3
+ require 'thread'
4
+
5
+ require 'content_data'
6
+ require 'content_server/content_receiver'
7
+ require 'content_server/queue_indexer'
8
+ require 'content_server/queue_copy'
9
+ require 'content_server/remote_content'
10
+ require 'file_indexing'
11
+ require 'file_monitoring'
12
+ require 'log'
13
+ require 'networking/tcp'
14
+ require 'params'
15
+ require 'process_monitoring/thread_safe_hash'
16
+ require 'process_monitoring/monitoring'
17
+ require 'process_monitoring/monitoring_info'
18
+
19
+ # Content server. Monitors files, index local files, listen to backup server content,
20
+ # copy changes and new files to backup server.
21
+ module ContentServer
22
+ # Backup server specific flags
23
+ Params.string('content_server_hostname', nil, 'IP or DNS of backup server.')
24
+ Params.integer('content_server_data_port', 3333, 'Port to copy content data from.')
25
+ Params.integer('content_server_files_port', 4444, 'Listening port in backup server for files')
26
+
27
+ Params.integer('backup_check_delay', 5, 'Delay in seconds between two content vs backup checks.')
28
+
29
+ def run_backup_server
30
+ Thread.abort_on_exception = true
31
+ all_threads = []
32
+
33
+ @process_variables = ThreadSafeHash::ThreadSafeHash.new
34
+ @process_variables.set('server_name', 'backup_server')
35
+
36
+ # # # # # # # # # # # #
37
+ # Initialize/Start monitoring
38
+ monitoring_events = Queue.new
39
+ fm = FileMonitoring::FileMonitoring.new
40
+ fm.set_event_queue(monitoring_events)
41
+ # Start monitoring and writing changes to queue
42
+ all_threads << Thread.new do
43
+ fm.monitor_files
44
+ end
45
+
46
+ # # # # # # # # # # # # # #
47
+ # Initialize/Start local indexer
48
+ local_server_content_data_queue = Queue.new
49
+ queue_indexer = QueueIndexer.new(monitoring_events,
50
+ local_server_content_data_queue,
51
+ Params['local_content_data_path'])
52
+ # Start indexing on demand and write changes to queue
53
+ all_threads << queue_indexer.run
54
+
55
+ # # # # # # # # # # # # # # # # # # # # # # # # # # #
56
+ # Initialize/Start backup server content data sender
57
+ dynamic_content_data = ContentData::DynamicContentData.new
58
+ #content_data_sender = ContentDataSender.new(
59
+ # Params['remote_server'],
60
+ # Params['remote_listening_port'])
61
+ # Start sending to backup server
62
+ all_threads << Thread.new do
63
+ while true do
64
+ Log.info 'Waiting on local server content data queue.'
65
+ cd = local_server_content_data_queue.pop
66
+ # content_data_sender.send_content_data(cd)
67
+ dynamic_content_data.update(cd)
68
+ end
69
+ end
70
+
71
+ Params['backup_destination_folder'] = File.expand_path(Params['monitoring_paths'][0]['path'])
72
+ content_server_dynamic_content_data = ContentData::DynamicContentData.new
73
+ remote_content = ContentServer::RemoteContentClient.new(content_server_dynamic_content_data,
74
+ Params['content_server_hostname'],
75
+ Params['content_server_data_port'],
76
+ Params['backup_destination_folder'])
77
+ all_threads.concat(remote_content.run())
78
+
79
+ file_copy_client = FileCopyClient.new(Params['content_server_hostname'],
80
+ Params['content_server_files_port'],
81
+ dynamic_content_data,
82
+ @process_variables)
83
+ all_threads.concat(file_copy_client.threads)
84
+
85
+ # Each
86
+ all_threads << Thread.new do
87
+ loop do
88
+ sleep(Params['backup_check_delay'])
89
+ local_cd = dynamic_content_data.last_content_data()
90
+ remote_cd = content_server_dynamic_content_data.last_content_data()
91
+ diff = ContentData::ContentData.remove(local_cd, remote_cd)
92
+ Log.debug2("Files to send? #{!diff.empty?}")
93
+ #file_copy_client.request_copy(diff) unless diff.empty?
94
+ if !diff.empty?
95
+ Log.info('Backup and remote contents need a sync:')
96
+ Log.info("Backup content:\n#{local_cd}")
97
+ Log.info("Remote content:\n#{remote_cd}")
98
+ Log.info("Missing contents:\n#{diff}")
99
+ Log.info('Requesting a copy')
100
+ file_copy_client.request_copy(diff)
101
+ end
102
+ end
103
+ end
104
+
105
+ if Params['enable_monitoring']
106
+ mon = Monitoring::Monitoring.new(@process_variables)
107
+ Log.add_consumer(mon)
108
+ all_threads << mon.thread
109
+ monitoring_info = MonitoringInfo::MonitoringInfo.new(@process_variables)
110
+ end
111
+
112
+ all_threads.each { |t| t.abort_on_exception = true }
113
+ all_threads.each { |t| t.join }
114
+ # Should never reach this line.
115
+ end
116
+ module_function :run_backup_server
117
+
118
+ end # module ContentServer
119
+
120
+
@@ -2,61 +2,60 @@ require 'log'
2
2
  require 'params'
3
3
  require 'socket'
4
4
 
5
- module BBFS
6
- module ContentServer
5
+ module ContentServer
7
6
 
8
- class ContentDataReceiver
9
- def initialize queue, port
10
- @queue = queue
11
- @port = port
12
- end
7
+ class ContentDataReceiver
8
+ def initialize queue, port
9
+ @queue = queue
10
+ @port = port
11
+ end
13
12
 
14
- def run
15
- Socket.tcp_server_loop(@port) do |sock, client_addrinfo|
16
- while size_of_data = sock.read(4)
17
- size_of_data = size_of_data.unpack("l")[0]
18
- Log.debug3 "Size of data: #{size_of_data}"
19
- data = sock.read(size_of_data)
20
- #Log.debug3 "Data received: #{data}"
21
- unmarshaled_data = Marshal.load(data)
22
- #Log.debug3 "Unmarshaled data: #{unmarshaled_data}"
23
- @queue.push unmarshaled_data
24
- Log.debug3 "Socket closed? #{sock.closed?}."
25
- break if sock.closed?
26
- Log.debug1 'Waiting on sock.read'
27
- end
28
- Log.debug1 'Exited, socket closed or read returned nil.'
13
+ def run
14
+ Socket.tcp_server_loop(@port) do |sock, client_addrinfo|
15
+ while size_of_data = sock.read(4)
16
+ size_of_data = size_of_data.unpack("l")[0]
17
+ Log.debug3 "Size of data: #{size_of_data}"
18
+ data = sock.read(size_of_data)
19
+ #Log.debug3 "Data received: #{data}"
20
+ unmarshaled_data = Marshal.load(data)
21
+ #Log.debug3 "Unmarshaled data: #{unmarshaled_data}"
22
+ @queue.push unmarshaled_data
23
+ Log.debug3 "Socket closed? #{sock.closed?}."
24
+ break if sock.closed?
25
+ Log.debug1 'Waiting on sock.read'
29
26
  end
27
+ Log.debug1 'Exited, socket closed or read returned nil.'
30
28
  end
31
29
  end
30
+ end
32
31
 
33
- class ContentDataSender
32
+ class ContentDataSender
34
33
 
35
- def initialize host, port
36
- @host = host
37
- @port = port
38
- open_socket
39
- end
34
+ def initialize host, port
35
+ @host = host
36
+ @port = port
37
+ open_socket
38
+ end
40
39
 
41
- def open_socket
42
- Log.debug1 "Connecting to content server #{@host}:#{@port}."
43
- @tcp_socket = TCPSocket.new(@host, @port)
44
- end
40
+ def open_socket
41
+ Log.debug1 "Connecting to content server #{@host}:#{@port}."
42
+ @tcp_socket = TCPSocket.new(@host, @port)
43
+ end
45
44
 
46
- def send_content_data content_data
47
- open_socket if @tcp_socket.closed?
48
- #Log.debug3 "Data to send: #{content_data}"
49
- marshal_data = Marshal.dump(content_data)
50
- Log.debug3 "Marshaled size: #{marshal_data.length}."
51
- data_size = [marshal_data.length].pack("l")
52
- #Log.debug3 "Marshaled data: #{marshal_data}."
53
- if data_size.nil? || marshal_data.nil?
54
- Log.debug3 'Send data is nil!!!!!!!!'
55
- end
56
- @tcp_socket.write data_size
57
- @tcp_socket.write marshal_data
45
+ def send_content_data content_data
46
+ open_socket if @tcp_socket.closed?
47
+ #Log.debug3 "Data to send: #{content_data}"
48
+ marshal_data = Marshal.dump(content_data)
49
+ Log.debug3 "Marshaled size: #{marshal_data.length}."
50
+ data_size = [marshal_data.length].pack("l")
51
+ #Log.debug3 "Marshaled data: #{marshal_data}."
52
+ if data_size.nil? || marshal_data.nil?
53
+ Log.debug3 'Send data is nil!!!!!!!!'
58
54
  end
55
+ @tcp_socket.write data_size
56
+ @tcp_socket.write marshal_data
59
57
  end
60
-
61
58
  end
59
+
62
60
  end
61
+