content_server 0.0.1

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -0,0 +1,10 @@
1
+ #!/usr/bin/env ruby
2
+
3
+ begin
4
+ require 'content_server'
5
+ rescue LoadError
6
+ require 'rubygems'
7
+ require 'content_server'
8
+ end
9
+
10
+ BBFS::ContentServer.run_backup_server
@@ -0,0 +1,10 @@
1
+ #!/usr/bin/env ruby
2
+
3
+ begin
4
+ require 'content_server'
5
+ rescue LoadError
6
+ require 'rubygems'
7
+ require 'content_server'
8
+ end
9
+
10
+ BBFS::ContentServer.run
@@ -0,0 +1,169 @@
1
+ require 'content_data'
2
+ require 'file_copy/copy'
3
+ require 'file_monitoring'
4
+ require 'thread'
5
+ require 'parameters'
6
+
7
+ require_relative 'content_server/content_receiver'
8
+ require_relative 'content_server/queue_indexer'
9
+
10
+ # Content server. Monitors files, index local files, listen to backup server content,
11
+ # copy changes and new files to backup server.
12
+ module BBFS
13
+ module ContentServer
14
+ VERSION = '0.0.1'
15
+
16
+ PARAMS.parameter('remote_server', 'localhost', 'IP or DNS of backup server.')
17
+ PARAMS.parameter('remote_listening_port', 3333, 'Listening port for backup server content data.')
18
+ PARAMS.parameter('backup_username', nil, 'Backup server username.')
19
+ PARAMS.parameter('backup_password', nil, 'Backup server password.')
20
+ PARAMS.parameter('backup_destination_folder', File.expand_path('~/.bbfs/data'),
21
+ 'Backup server destination folder.')
22
+ PARAMS.parameter('content_data_path', File.expand_path('~/.bbfs/var/content.data'),
23
+ 'ContentData file path.')
24
+ PARAMS.parameter('monitoring_config_path', File.expand_path('~/.bbfs/etc/file_monitoring.yml'),
25
+ 'Configuration file for monitoring.')
26
+
27
+ def run
28
+ all_threads = []
29
+
30
+ # # # # # # # # # # # #
31
+ # Initialize/Start monitoring
32
+ monitoring_events = Queue.new
33
+ fm = FileMonitoring::FileMonitoring.new
34
+ fm.set_config_path(PARAMS.monitoring_config_path)
35
+ fm.set_event_queue(monitoring_events)
36
+ # Start monitoring and writing changes to queue
37
+ all_threads << Thread.new do
38
+ fm.monitor_files
39
+ end
40
+
41
+ # # # # # # # # # # # # # # # # # # # # # # # # #
42
+ # Initialize/Start backup server content data listener
43
+ backup_server_content_data = nil
44
+ backup_server_content_data_queue = Queue.new
45
+ content_data_receiver = ContentDataReceiver.new(
46
+ backup_server_content_data_queue,
47
+ PARAMS.backup_server,
48
+ PARAMS.remote_listening_port)
49
+ # Start listening to backup server
50
+ all_threads << Thread.new do
51
+ content_data_receiver.start_server
52
+ end
53
+
54
+ # # # # # # # # # # # # # #
55
+ # Initialize/Start local indexer
56
+ copy_files_events = Queue.new
57
+ local_server_content_data_queue = Queue.new
58
+ queue_indexer = QueueIndexer.new(copy_files_events,
59
+ local_server_content_data_queue,
60
+ PARAMS.content_data_path)
61
+ # Start indexing on demand and write changes to queue
62
+ all_threads << queue_indexer.run
63
+
64
+ # # # # # # # # # # # # # # # # # # # # # #
65
+ # Initialize/Start content data comparator
66
+ all_threads << Thread.new do
67
+ backup_server_content = nil
68
+ local_server_content = nil
69
+ while true do
70
+
71
+ # Note: This thread should be the only consumer of local_server_content_data_queue
72
+ # Note: The server will wait in the first time on pop until local sends it's content data
73
+ while !local_server_content || local_server_content_data_queue.size > 1
74
+ local_server_content_data = local_server_content_data_queue.pop
75
+ end
76
+
77
+ # Note: This thread should be the only consumer of backup_server_content_data_queue
78
+ # Note: The server will wait in the first time on pop until backup sends it's content data
79
+ while !backup_server_content || backup_server_content_data_queue.size > 1
80
+ backup_server_content_data = backup_server_content_data_queue.pop
81
+ end
82
+
83
+ # Remove backup content data from local server
84
+ content_to_copy = ContentData.remove(backup_server_content_data, local_server_content)
85
+ # Add copy instruction in case content is not empty
86
+ output_queue.push(content_to_copy) unless content_to_copy.empty?
87
+ end
88
+ end
89
+
90
+
91
+ # # # # # # # # # # # # # # # #
92
+ # Start copying files on demand
93
+ all_threads << Thread.new do
94
+ while true do
95
+ copy_event = copy_files_events.pop
96
+
97
+ # Prepare source,dest map for copy.
98
+ used_contents = Set.new
99
+ files_map = Hash.new
100
+ copy_event.instances.each { |instance|
101
+ # Add instance only if such content has not added yet.
102
+ if !used_contents.has_key?(instance.checksum)
103
+ files_map[instance.full_path] = destination_filename(
104
+ PARAMS.backup_destination_folder,
105
+ instance.checksum)
106
+ used_contents.add(instance.checksum)
107
+ end
108
+ }
109
+
110
+ # Copy files, waits until files are finished copying.
111
+ FileCopy::sftp_copy(PARAMS.backup_username,
112
+ PARAMS.backup_password,
113
+ PARAMS.backup_server,
114
+ files_map)
115
+ end
116
+ end
117
+
118
+ all_threads.each { |t| t.join }
119
+ # Should never reach this line.
120
+ end
121
+ module_function :run
122
+
123
+ # Creates destination filename for backup server, input is base folder and sha1.
124
+ # for example: folder:/mnt/hd1/bbbackup, sha1:d0be2dc421be4fcd0172e5afceea3970e2f3d940
125
+ # dest filename: /mnt/hd1/bbbackup/d0/be/2d/d0be2dc421be4fcd0172e5afceea3970e2f3d940
126
+ def destination_filename(folder, sha1)
127
+ File.join(folder, sha1[0,2], sha1[2,2], sha1[4,2], sha1)
128
+ end
129
+
130
+ def run_backup_server
131
+ all_threads = []
132
+
133
+ # # # # # # # # # # # #
134
+ # Initialize/Start monitoring
135
+ monitoring_events = Queue.new
136
+ fm = FileMonitoring::FileMonitoring.new
137
+ fm.set_config_path(PARAMS.monitoring_config_path)
138
+ fm.set_event_queue(monitoring_events)
139
+ # Start monitoring and writing changes to queue
140
+ all_threads << Thread.new do
141
+ fm.monitor_files
142
+ end
143
+
144
+ # # # # # # # # # # # # # #
145
+ # Initialize/Start local indexer
146
+ copy_files_events = Queue.new
147
+ local_server_content_data_queue = Queue.new
148
+ queue_indexer = QueueIndexer.new(copy_files_events,
149
+ local_server_content_data_queue,
150
+ PARAMS.content_data_path)
151
+ # Start indexing on demand and write changes to queue
152
+ all_threads << queue_indexer.run
153
+
154
+ # # # # # # # # # # # # # # # # # # # # # # # # # # #
155
+ # Initialize/Start backup server content data sender
156
+ content_data_sender = ContentDataSender.new(
157
+ PARAMS.remote_server,
158
+ PARAMS.remote_listening_port)
159
+ # Start sending to backup server
160
+ all_threads << Thread.new do
161
+ content_data_sender.connect
162
+ while true do
163
+ content_data_sender.send_content_data(local_server_content_data_queue.pop)
164
+ end
165
+ end
166
+ end
167
+
168
+ end # module ContentServer
169
+ end # module BBFS
@@ -0,0 +1,41 @@
1
+ require 'eventmachine'
2
+
3
+ module BBFS
4
+ module ContentServer
5
+
6
+ class ContentDataReceiver
7
+ def initialize queue, host, port
8
+ @queue = queue
9
+ @host = host
10
+ @port = port
11
+ end
12
+
13
+ def receive_data(data)
14
+ @queue.push(Marshal.load(data))
15
+ end
16
+
17
+ def start_server
18
+ EventMachine::start_server @host, @port, self
19
+ puts "Started ContentDataServer on #{@host}:#{@port}..."
20
+ end
21
+ end
22
+
23
+ class ContentDataSender
24
+ def initialize host, port
25
+ @host = host
26
+ @port = port
27
+ end
28
+
29
+ def send_content_data content_data
30
+ send_data(Marshal.dump(content_data))
31
+ end
32
+
33
+ def connect
34
+ EventMachine.run {
35
+ EventMachine.connect @host, @port, self
36
+ }
37
+ end
38
+ end
39
+
40
+ end
41
+ end
@@ -0,0 +1,39 @@
1
+ module BBFS
2
+ module ContentServer
3
+
4
+ # Simple indexer, gets inputs events (files to index) and outputs
5
+ # content data updates into output queue.
6
+ class QueueIndexer
7
+
8
+ def initialize input_queue, output_queue, content_data_path
9
+ @input_queue = input_queue
10
+ @output_queue = output_queue
11
+ @content_data_path = content_data_path
12
+ end
13
+
14
+ def run
15
+ server_content_data = ContentData::ContentData.new
16
+ server_content_data.from_file(@content_data_path)
17
+ # Start indexing on demand and write changes to queue
18
+ thread = Thread.new do
19
+ while true do
20
+ event = input_queue.pop
21
+ # index files and add to copy queue
22
+ if (event[0] == FileStatEnum.CHANGED || event[0] == FileStatEnum.NEW)
23
+ index_agent = IndexAgent.new
24
+ indexer_patterns = IndexerPatterns.new
25
+ indexer_patterns.add_pattern(event[1])
26
+ index_agent.index(indexer_patterns)
27
+ server_content_data.merge(index_agent.indexed_content)
28
+ # TODO(kolman) Don't write to file each change?
29
+ server_content_data.to_file(@content_data_path)
30
+ output_queue.push(server_content_data)
31
+ end
32
+ end # while true do
33
+ end # Thread.new do
34
+ thread
35
+ end # def run
36
+
37
+ end # class QueueIndexer
38
+ end
39
+ end
@@ -0,0 +1,25 @@
1
+ require_relative '../../lib/file_copy/copy.rb'
2
+
3
+ module BBFS
4
+ module ContentServer
5
+ module Spec
6
+
7
+ describe 'Backup Listener' do
8
+
9
+ end
10
+
11
+ describe 'Local file monitor' do
12
+
13
+ end
14
+
15
+ describe 'Local file indexer' do
16
+
17
+ end
18
+
19
+ describe 'File copier' do
20
+
21
+ end
22
+
23
+ end
24
+ end
25
+ end
metadata ADDED
@@ -0,0 +1,108 @@
1
+ --- !ruby/object:Gem::Specification
2
+ name: content_server
3
+ version: !ruby/object:Gem::Version
4
+ version: 0.0.1
5
+ prerelease:
6
+ platform: ruby
7
+ authors:
8
+ - Gena Petelko, Kolman Vornovitsky
9
+ autorequire:
10
+ bindir: bin
11
+ cert_chain: []
12
+ date: 2012-04-08 00:00:00.000000000Z
13
+ dependencies:
14
+ - !ruby/object:Gem::Dependency
15
+ name: file_monitoring
16
+ requirement: &70305881380440 !ruby/object:Gem::Requirement
17
+ none: false
18
+ requirements:
19
+ - - ! '>='
20
+ - !ruby/object:Gem::Version
21
+ version: '0'
22
+ type: :runtime
23
+ prerelease: false
24
+ version_requirements: *70305881380440
25
+ - !ruby/object:Gem::Dependency
26
+ name: file_indexing
27
+ requirement: &70305881379880 !ruby/object:Gem::Requirement
28
+ none: false
29
+ requirements:
30
+ - - ! '>='
31
+ - !ruby/object:Gem::Version
32
+ version: '0'
33
+ type: :runtime
34
+ prerelease: false
35
+ version_requirements: *70305881379880
36
+ - !ruby/object:Gem::Dependency
37
+ name: parameters
38
+ requirement: &70305881379340 !ruby/object:Gem::Requirement
39
+ none: false
40
+ requirements:
41
+ - - ! '>='
42
+ - !ruby/object:Gem::Version
43
+ version: '0'
44
+ type: :runtime
45
+ prerelease: false
46
+ version_requirements: *70305881379340
47
+ - !ruby/object:Gem::Dependency
48
+ name: file_copy
49
+ requirement: &70305881378800 !ruby/object:Gem::Requirement
50
+ none: false
51
+ requirements:
52
+ - - ! '>='
53
+ - !ruby/object:Gem::Version
54
+ version: '0'
55
+ type: :runtime
56
+ prerelease: false
57
+ version_requirements: *70305881378800
58
+ - !ruby/object:Gem::Dependency
59
+ name: content_data
60
+ requirement: &70305881378380 !ruby/object:Gem::Requirement
61
+ none: false
62
+ requirements:
63
+ - - ! '>='
64
+ - !ruby/object:Gem::Version
65
+ version: '0'
66
+ type: :runtime
67
+ prerelease: false
68
+ version_requirements: *70305881378380
69
+ description: Monitor and Index a directory and back it up to backup server.
70
+ email: kolmanv@gmail.com
71
+ executables:
72
+ - content_server
73
+ - backup_server
74
+ extensions: []
75
+ extra_rdoc_files: []
76
+ files:
77
+ - lib/content_server.rb
78
+ - lib/content_server/content_receiver.rb
79
+ - lib/content_server/queue_indexer.rb
80
+ - test/content_server/content_server_spec.rb
81
+ - bin/content_server
82
+ - bin/backup_server
83
+ homepage: http://github.com/kolmanv/bbfs
84
+ licenses: []
85
+ post_install_message:
86
+ rdoc_options: []
87
+ require_paths:
88
+ - lib
89
+ required_ruby_version: !ruby/object:Gem::Requirement
90
+ none: false
91
+ requirements:
92
+ - - ! '>='
93
+ - !ruby/object:Gem::Version
94
+ version: '0'
95
+ required_rubygems_version: !ruby/object:Gem::Requirement
96
+ none: false
97
+ requirements:
98
+ - - ! '>='
99
+ - !ruby/object:Gem::Version
100
+ version: '0'
101
+ requirements: []
102
+ rubyforge_project:
103
+ rubygems_version: 1.8.15
104
+ signing_key:
105
+ specification_version: 3
106
+ summary: Servers for backing up content.
107
+ test_files:
108
+ - test/content_server/content_server_spec.rb