content_server 0.0.1 → 0.0.2

Sign up to get free protection for your applications and to get access to all the features.
data/bin/backup_server CHANGED
@@ -1,5 +1,10 @@
1
1
  #!/usr/bin/env ruby
2
2
 
3
+ # Runs backup server. This server monitors a set of directories (blobs/patterns).
4
+ # The files in those directories are indexed (calculating their SHA1).
5
+ # A remote server copies new/changed files in the backup server. The backup server
6
+ # index those new files too and send the content data (their SHA1 to the original server).
7
+
3
8
  begin
4
9
  require 'content_server'
5
10
  rescue LoadError
data/bin/content_server CHANGED
@@ -1,5 +1,9 @@
1
1
  #!/usr/bin/env ruby
2
2
 
3
+ # Runs content server. This server monitors a set of directories (blobs/patterns).
4
+ # The files in those directories are indexed (calculating their SHA1).
5
+ # Each unique content is backed up to the remote (backup) server.
6
+
3
7
  begin
4
8
  require 'content_server'
5
9
  rescue LoadError
@@ -1,39 +1,44 @@
1
- require 'eventmachine'
1
+ require 'socket'
2
2
 
3
3
  module BBFS
4
4
  module ContentServer
5
5
 
6
6
  class ContentDataReceiver
7
- def initialize queue, host, port
7
+ def initialize queue, port
8
8
  @queue = queue
9
- @host = host
10
9
  @port = port
11
10
  end
12
11
 
13
- def receive_data(data)
14
- @queue.push(Marshal.load(data))
15
- end
16
-
17
- def start_server
18
- EventMachine::start_server @host, @port, self
19
- puts "Started ContentDataServer on #{@host}:#{@port}..."
12
+ def run
13
+ Socket.tcp_server_loop(@port) do |sock, client_addrinfo|
14
+ p 'Waiting on sock.gets.'
15
+ size_of_data = sock.read(4).unpack("l")[0]
16
+ p "Size of data: #{size_of_data}"
17
+ size_of_data = size_of_data.to_i
18
+ p "Size of data: #{size_of_data}"
19
+ data = sock.read(size_of_data)
20
+ p "Data received: #{data}"
21
+ unmarshaled_data = Marshal.load(data)
22
+ p "Unmarshaled data: #{unmarshaled_data}"
23
+ @queue.push unmarshaled_data
24
+ end
20
25
  end
21
26
  end
22
27
 
23
28
  class ContentDataSender
24
29
  def initialize host, port
25
- @host = host
26
- @port = port
30
+ p "Connecting to content server #{host}:#{port}."
31
+ @tcp_socket = TCPSocket.open(host, port)
27
32
  end
28
33
 
29
34
  def send_content_data content_data
30
- send_data(Marshal.dump(content_data))
31
- end
32
-
33
- def connect
34
- EventMachine.run {
35
- EventMachine.connect @host, @port, self
36
- }
35
+ p "Data to send: #{content_data}"
36
+ marshal_data = Marshal.dump(content_data)
37
+ p "Marshaled size: #{marshal_data.length}."
38
+ data_size = [marshal_data.length].pack("l")
39
+ p "Marshaled data: #{marshal_data}."
40
+ @tcp_socket.write data_size
41
+ @tcp_socket.write marshal_data
37
42
  end
38
43
  end
39
44
 
@@ -1,3 +1,6 @@
1
+ require 'file_indexing/index_agent'
2
+ require 'file_indexing/indexer_patterns'
3
+
1
4
  module BBFS
2
5
  module ContentServer
3
6
 
@@ -13,22 +16,28 @@ module BBFS
13
16
 
14
17
  def run
15
18
  server_content_data = ContentData::ContentData.new
16
- server_content_data.from_file(@content_data_path)
19
+ server_content_data.from_file(@content_data_path) rescue Errno::ENOENT
17
20
  # Start indexing on demand and write changes to queue
18
21
  thread = Thread.new do
19
22
  while true do
20
- event = input_queue.pop
23
+ p 'Waiting on index input queue.'
24
+ event = @input_queue.pop
25
+ p "event: #{event}"
21
26
  # index files and add to copy queue
22
- if (event[0] == FileStatEnum.CHANGED || event[0] == FileStatEnum.NEW)
23
- index_agent = IndexAgent.new
24
- indexer_patterns = IndexerPatterns.new
27
+ if event[0] == FileMonitoring::FileStatEnum::CHANGED || event[0] == FileMonitoring::FileStatEnum::NEW
28
+ p "Indexing content #{event[1]}."
29
+ index_agent = FileIndexing::IndexAgent.new
30
+ indexer_patterns = FileIndexing::IndexerPatterns.new
25
31
  indexer_patterns.add_pattern(event[1])
26
- index_agent.index(indexer_patterns)
27
- server_content_data.merge(index_agent.indexed_content)
28
- # TODO(kolman) Don't write to file each change?
32
+ index_agent.index(indexer_patterns, server_content_data)
33
+ server_content_data.merge index_agent.indexed_content
34
+ # TODO(kolman): Don't write to file each change?
35
+ p "Writing server content data to #{@content_data_path}."
29
36
  server_content_data.to_file(@content_data_path)
30
- output_queue.push(server_content_data)
37
+ p 'Adding server content data to queue.'
38
+ @output_queue.push(server_content_data)
31
39
  end
40
+ #p 'End of if.'
32
41
  end # while true do
33
42
  end # Thread.new do
34
43
  thread
@@ -1,8 +1,10 @@
1
1
  require 'content_data'
2
- require 'file_copy/copy'
2
+ require 'file_copy'
3
+ require 'file_indexing'
3
4
  require 'file_monitoring'
5
+ require 'params'
6
+ require 'set'
4
7
  require 'thread'
5
- require 'parameters'
6
8
 
7
9
  require_relative 'content_server/content_receiver'
8
10
  require_relative 'content_server/queue_indexer'
@@ -17,7 +19,7 @@ module BBFS
17
19
  PARAMS.parameter('remote_listening_port', 3333, 'Listening port for backup server content data.')
18
20
  PARAMS.parameter('backup_username', nil, 'Backup server username.')
19
21
  PARAMS.parameter('backup_password', nil, 'Backup server password.')
20
- PARAMS.parameter('backup_destination_folder', File.expand_path('~/.bbfs/data'),
22
+ PARAMS.parameter('backup_destination_folder', File.expand_path('~/backup_data'),
21
23
  'Backup server destination folder.')
22
24
  PARAMS.parameter('content_data_path', File.expand_path('~/.bbfs/var/content.data'),
23
25
  'ContentData file path.')
@@ -44,18 +46,16 @@ module BBFS
44
46
  backup_server_content_data_queue = Queue.new
45
47
  content_data_receiver = ContentDataReceiver.new(
46
48
  backup_server_content_data_queue,
47
- PARAMS.backup_server,
48
49
  PARAMS.remote_listening_port)
49
50
  # Start listening to backup server
50
51
  all_threads << Thread.new do
51
- content_data_receiver.start_server
52
+ content_data_receiver.run
52
53
  end
53
54
 
54
55
  # # # # # # # # # # # # # #
55
56
  # Initialize/Start local indexer
56
- copy_files_events = Queue.new
57
57
  local_server_content_data_queue = Queue.new
58
- queue_indexer = QueueIndexer.new(copy_files_events,
58
+ queue_indexer = QueueIndexer.new(monitoring_events,
59
59
  local_server_content_data_queue,
60
60
  PARAMS.content_data_path)
61
61
  # Start indexing on demand and write changes to queue
@@ -63,27 +63,31 @@ module BBFS
63
63
 
64
64
  # # # # # # # # # # # # # # # # # # # # # #
65
65
  # Initialize/Start content data comparator
66
+ copy_files_events = Queue.new
66
67
  all_threads << Thread.new do
67
- backup_server_content = nil
68
- local_server_content = nil
68
+ backup_server_content_data = ContentData::ContentData.new
69
+ local_server_content_data = nil
69
70
  while true do
70
71
 
71
72
  # Note: This thread should be the only consumer of local_server_content_data_queue
72
- # Note: The server will wait in the first time on pop until local sends it's content data
73
- while !local_server_content || local_server_content_data_queue.size > 1
74
- local_server_content_data = local_server_content_data_queue.pop
75
- end
73
+ p 'Waiting on local server content data.'
74
+ local_server_content_data = local_server_content_data_queue.pop
76
75
 
77
76
  # Note: This thread should be the only consumer of backup_server_content_data_queue
78
77
  # Note: The server will wait in the first time on pop until backup sends it's content data
79
- while !backup_server_content || backup_server_content_data_queue.size > 1
78
+ while backup_server_content_data_queue.size > 0
79
+ p 'Waiting on backup server content data.'
80
80
  backup_server_content_data = backup_server_content_data_queue.pop
81
81
  end
82
82
 
83
+ p 'Updating file copy queue.'
84
+ p "local_server_content_data #{local_server_content_data}."
85
+ p "backup_server_content_data #{backup_server_content_data}."
83
86
  # Remove backup content data from local server
84
- content_to_copy = ContentData.remove(backup_server_content_data, local_server_content)
87
+ content_to_copy = ContentData::ContentData.remove(backup_server_content_data, local_server_content_data)
85
88
  # Add copy instruction in case content is not empty
86
- output_queue.push(content_to_copy) unless content_to_copy.empty?
89
+ p "Content to copy: #{content_to_copy}"
90
+ copy_files_events.push(content_to_copy) unless content_to_copy.empty?
87
91
  end
88
92
  end
89
93
 
@@ -92,14 +96,19 @@ module BBFS
92
96
  # Start copying files on demand
93
97
  all_threads << Thread.new do
94
98
  while true do
99
+ p 'Waiting on copy files events.'
95
100
  copy_event = copy_files_events.pop
96
101
 
102
+ p "Copy file event: #{copy_event}"
103
+
97
104
  # Prepare source,dest map for copy.
98
105
  used_contents = Set.new
99
106
  files_map = Hash.new
100
- copy_event.instances.each { |instance|
107
+ p "Instances: #{copy_event.instances}"
108
+ copy_event.instances.each { |key, instance|
109
+ p "Instance: #{instance}"
101
110
  # Add instance only if such content has not added yet.
102
- if !used_contents.has_key?(instance.checksum)
111
+ if !used_contents.member?(instance.checksum)
103
112
  files_map[instance.full_path] = destination_filename(
104
113
  PARAMS.backup_destination_folder,
105
114
  instance.checksum)
@@ -107,14 +116,16 @@ module BBFS
107
116
  end
108
117
  }
109
118
 
119
+ p "Copying files: #{files_map}."
110
120
  # Copy files, waits until files are finished copying.
111
121
  FileCopy::sftp_copy(PARAMS.backup_username,
112
122
  PARAMS.backup_password,
113
- PARAMS.backup_server,
123
+ PARAMS.remote_server,
114
124
  files_map)
115
125
  end
116
126
  end
117
127
 
128
+ all_threads.each { |t| t.abort_on_exception = true }
118
129
  all_threads.each { |t| t.join }
119
130
  # Should never reach this line.
120
131
  end
@@ -126,6 +137,7 @@ module BBFS
126
137
  def destination_filename(folder, sha1)
127
138
  File.join(folder, sha1[0,2], sha1[2,2], sha1[4,2], sha1)
128
139
  end
140
+ module_function :destination_filename
129
141
 
130
142
  def run_backup_server
131
143
  all_threads = []
@@ -143,9 +155,8 @@ module BBFS
143
155
 
144
156
  # # # # # # # # # # # # # #
145
157
  # Initialize/Start local indexer
146
- copy_files_events = Queue.new
147
158
  local_server_content_data_queue = Queue.new
148
- queue_indexer = QueueIndexer.new(copy_files_events,
159
+ queue_indexer = QueueIndexer.new(monitoring_events,
149
160
  local_server_content_data_queue,
150
161
  PARAMS.content_data_path)
151
162
  # Start indexing on demand and write changes to queue
@@ -158,12 +169,17 @@ module BBFS
158
169
  PARAMS.remote_listening_port)
159
170
  # Start sending to backup server
160
171
  all_threads << Thread.new do
161
- content_data_sender.connect
162
172
  while true do
173
+ p 'Waiting on local server content data queue.'
163
174
  content_data_sender.send_content_data(local_server_content_data_queue.pop)
164
175
  end
165
176
  end
177
+
178
+ all_threads.each { |t| t.abort_on_exception = true }
179
+ all_threads.each { |t| t.join }
180
+ # Should never reach this line.
166
181
  end
182
+ module_function :run_backup_server
167
183
 
168
184
  end # module ContentServer
169
185
  end # module BBFS
metadata CHANGED
@@ -1,7 +1,7 @@
1
1
  --- !ruby/object:Gem::Specification
2
2
  name: content_server
3
3
  version: !ruby/object:Gem::Version
4
- version: 0.0.1
4
+ version: 0.0.2
5
5
  prerelease:
6
6
  platform: ruby
7
7
  authors:
@@ -9,11 +9,11 @@ authors:
9
9
  autorequire:
10
10
  bindir: bin
11
11
  cert_chain: []
12
- date: 2012-04-08 00:00:00.000000000Z
12
+ date: 2012-04-11 00:00:00.000000000Z
13
13
  dependencies:
14
14
  - !ruby/object:Gem::Dependency
15
- name: file_monitoring
16
- requirement: &70305881380440 !ruby/object:Gem::Requirement
15
+ name: content_data
16
+ requirement: &70119917644580 !ruby/object:Gem::Requirement
17
17
  none: false
18
18
  requirements:
19
19
  - - ! '>='
@@ -21,10 +21,10 @@ dependencies:
21
21
  version: '0'
22
22
  type: :runtime
23
23
  prerelease: false
24
- version_requirements: *70305881380440
24
+ version_requirements: *70119917644580
25
25
  - !ruby/object:Gem::Dependency
26
- name: file_indexing
27
- requirement: &70305881379880 !ruby/object:Gem::Requirement
26
+ name: eventmachine
27
+ requirement: &70119917644120 !ruby/object:Gem::Requirement
28
28
  none: false
29
29
  requirements:
30
30
  - - ! '>='
@@ -32,10 +32,10 @@ dependencies:
32
32
  version: '0'
33
33
  type: :runtime
34
34
  prerelease: false
35
- version_requirements: *70305881379880
35
+ version_requirements: *70119917644120
36
36
  - !ruby/object:Gem::Dependency
37
- name: parameters
38
- requirement: &70305881379340 !ruby/object:Gem::Requirement
37
+ name: file_copy
38
+ requirement: &70119917643700 !ruby/object:Gem::Requirement
39
39
  none: false
40
40
  requirements:
41
41
  - - ! '>='
@@ -43,10 +43,10 @@ dependencies:
43
43
  version: '0'
44
44
  type: :runtime
45
45
  prerelease: false
46
- version_requirements: *70305881379340
46
+ version_requirements: *70119917643700
47
47
  - !ruby/object:Gem::Dependency
48
- name: file_copy
49
- requirement: &70305881378800 !ruby/object:Gem::Requirement
48
+ name: file_indexing
49
+ requirement: &70119917643240 !ruby/object:Gem::Requirement
50
50
  none: false
51
51
  requirements:
52
52
  - - ! '>='
@@ -54,10 +54,21 @@ dependencies:
54
54
  version: '0'
55
55
  type: :runtime
56
56
  prerelease: false
57
- version_requirements: *70305881378800
57
+ version_requirements: *70119917643240
58
58
  - !ruby/object:Gem::Dependency
59
- name: content_data
60
- requirement: &70305881378380 !ruby/object:Gem::Requirement
59
+ name: file_monitoring
60
+ requirement: &70119917642820 !ruby/object:Gem::Requirement
61
+ none: false
62
+ requirements:
63
+ - - ! '>='
64
+ - !ruby/object:Gem::Version
65
+ version: '0'
66
+ type: :runtime
67
+ prerelease: false
68
+ version_requirements: *70119917642820
69
+ - !ruby/object:Gem::Dependency
70
+ name: params
71
+ requirement: &70119917574700 !ruby/object:Gem::Requirement
61
72
  none: false
62
73
  requirements:
63
74
  - - ! '>='
@@ -65,7 +76,7 @@ dependencies:
65
76
  version: '0'
66
77
  type: :runtime
67
78
  prerelease: false
68
- version_requirements: *70305881378380
79
+ version_requirements: *70119917574700
69
80
  description: Monitor and Index a directory and back it up to backup server.
70
81
  email: kolmanv@gmail.com
71
82
  executables: