content_server 0.0.2 → 0.0.8

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
data/bin/backup_server CHANGED
@@ -5,11 +5,23 @@
5
5
  # A remote server copies new/changed files in the backup server. The backup server
6
6
  # index those new files too and send the content data (their SHA1 to the original server).
7
7
 
8
+ # NOTE this file mainly is a copy of content_server
9
+
8
10
  begin
11
+ require 'yaml'
12
+ require 'params'
13
+ require 'run_in_background'
9
14
  require 'content_server'
10
15
  rescue LoadError
11
16
  require 'rubygems'
17
+ require 'yaml'
18
+ require 'params'
19
+ require 'run_in_background'
12
20
  require 'content_server'
13
21
  end
22
+ include BBFS
23
+
24
+ Params.init ARGV
25
+ Log.init
14
26
 
15
- BBFS::ContentServer.run_backup_server
27
+ RunInBackground.run { ContentServer.run_backup_server }
data/bin/content_server CHANGED
@@ -5,10 +5,21 @@
5
5
  # Each unique content is backed up to the remote (backup) server.
6
6
 
7
7
  begin
8
+ require 'yaml'
9
+ require 'params'
10
+ require 'run_in_background'
8
11
  require 'content_server'
9
- rescue LoadError
12
+ rescue LoadError => e
13
+ print e.to_s
10
14
  require 'rubygems'
15
+ require 'yaml'
16
+ require 'params'
17
+ require 'run_in_background'
11
18
  require 'content_server'
12
19
  end
20
+ include BBFS
13
21
 
14
- BBFS::ContentServer.run
22
+ Params.init ARGV
23
+ Log.init
24
+
25
+ RunInBackground.run { ContentServer.run }
@@ -1,30 +1,34 @@
1
+ require 'fileutils'
2
+ require 'set'
3
+ require 'thread'
4
+
1
5
  require 'content_data'
2
- require 'file_copy'
6
+ require 'content_server/content_receiver'
7
+ require 'content_server/queue_indexer'
8
+ require 'content_server/queue_copy'
9
+ require 'content_server/remote_content'
3
10
  require 'file_indexing'
4
11
  require 'file_monitoring'
12
+ require 'log'
13
+ require 'networking/tcp'
5
14
  require 'params'
6
- require 'set'
7
- require 'thread'
8
15
 
9
- require_relative 'content_server/content_receiver'
10
- require_relative 'content_server/queue_indexer'
16
+
11
17
 
12
18
  # Content server. Monitors files, index local files, listen to backup server content,
13
19
  # copy changes and new files to backup server.
14
20
  module BBFS
15
21
  module ContentServer
16
- VERSION = '0.0.1'
17
-
18
- PARAMS.parameter('remote_server', 'localhost', 'IP or DNS of backup server.')
19
- PARAMS.parameter('remote_listening_port', 3333, 'Listening port for backup server content data.')
20
- PARAMS.parameter('backup_username', nil, 'Backup server username.')
21
- PARAMS.parameter('backup_password', nil, 'Backup server password.')
22
- PARAMS.parameter('backup_destination_folder', File.expand_path('~/backup_data'),
23
- 'Backup server destination folder.')
24
- PARAMS.parameter('content_data_path', File.expand_path('~/.bbfs/var/content.data'),
22
+ Params.string('remote_server', 'localhost', 'IP or DNS of backup server.')
23
+ Params.string('backup_username', nil, 'Backup server username.')
24
+ Params.string('backup_password', nil, 'Backup server password.')
25
+ Params.integer('backup_file_listening_port', 4444, 'Listening port in backup server for files')
26
+ Params.string('content_data_path', File.expand_path('~/.bbfs/var/content.data'),
25
27
  'ContentData file path.')
26
- PARAMS.parameter('monitoring_config_path', File.expand_path('~/.bbfs/etc/file_monitoring.yml'),
28
+ Params.string('monitoring_config_path', File.expand_path('~/.bbfs/etc/file_monitoring.yml'),
27
29
  'Configuration file for monitoring.')
30
+ Params.integer('remote_content_port', 3333, 'Default port for remote content copy.')
31
+ Params.integer('backup_check_delay', 5, 'Time between two content vs backup checks.')
28
32
 
29
33
  def run
30
34
  all_threads = []
@@ -33,7 +37,7 @@ module BBFS
33
37
  # Initialize/Start monitoring
34
38
  monitoring_events = Queue.new
35
39
  fm = FileMonitoring::FileMonitoring.new
36
- fm.set_config_path(PARAMS.monitoring_config_path)
40
+ fm.set_config_path(Params['monitoring_config_path'])
37
41
  fm.set_event_queue(monitoring_events)
38
42
  # Start monitoring and writing changes to queue
39
43
  all_threads << Thread.new do
@@ -42,103 +46,74 @@ module BBFS
42
46
 
43
47
  # # # # # # # # # # # # # # # # # # # # # # # # #
44
48
  # Initialize/Start backup server content data listener
45
- backup_server_content_data = nil
46
- backup_server_content_data_queue = Queue.new
47
- content_data_receiver = ContentDataReceiver.new(
48
- backup_server_content_data_queue,
49
- PARAMS.remote_listening_port)
49
+ #backup_server_content_data = nil
50
+ #backup_server_content_data_queue = Queue.new
51
+ #content_data_receiver = ContentDataReceiver.new(
52
+ # backup_server_content_data_queue,
53
+ # Params['remote_listening_port'])
50
54
  # Start listening to backup server
51
- all_threads << Thread.new do
52
- content_data_receiver.run
53
- end
55
+ #all_threads << Thread.new do
56
+ # content_data_receiver.run
57
+ #end
54
58
 
55
59
  # # # # # # # # # # # # # #
56
60
  # Initialize/Start local indexer
57
61
  local_server_content_data_queue = Queue.new
58
62
  queue_indexer = QueueIndexer.new(monitoring_events,
59
63
  local_server_content_data_queue,
60
- PARAMS.content_data_path)
64
+ Params['content_data_path'])
61
65
  # Start indexing on demand and write changes to queue
62
66
  all_threads << queue_indexer.run
63
67
 
64
68
  # # # # # # # # # # # # # # # # # # # # # #
65
69
  # Initialize/Start content data comparator
66
70
  copy_files_events = Queue.new
71
+ local_dynamic_content_data = ContentData::DynamicContentData.new
67
72
  all_threads << Thread.new do
68
- backup_server_content_data = ContentData::ContentData.new
69
- local_server_content_data = nil
73
+ # backup_server_content_data = ContentData::ContentData.new
74
+ # local_server_content_data = nil
70
75
  while true do
71
76
 
72
77
  # Note: This thread should be the only consumer of local_server_content_data_queue
73
- p 'Waiting on local server content data.'
78
+ Log.info 'Waiting on local server content data.'
74
79
  local_server_content_data = local_server_content_data_queue.pop
75
-
76
- # Note: This thread should be the only consumer of backup_server_content_data_queue
77
- # Note: The server will wait in the first time on pop until backup sends it's content data
78
- while backup_server_content_data_queue.size > 0
79
- p 'Waiting on backup server content data.'
80
- backup_server_content_data = backup_server_content_data_queue.pop
81
- end
82
-
83
- p 'Updating file copy queue.'
84
- p "local_server_content_data #{local_server_content_data}."
85
- p "backup_server_content_data #{backup_server_content_data}."
86
- # Remove backup content data from local server
87
- content_to_copy = ContentData::ContentData.remove(backup_server_content_data, local_server_content_data)
88
- # Add copy instruction in case content is not empty
89
- p "Content to copy: #{content_to_copy}"
90
- copy_files_events.push(content_to_copy) unless content_to_copy.empty?
80
+ local_dynamic_content_data.update(local_server_content_data)
81
+ #
82
+ # # Note: This thread should be the only consumer of backup_server_content_data_queue
83
+ # # Note: The server will wait in the first time on pop until backup sends it's content data
84
+ # while backup_server_content_data_queue.size > 0
85
+ # Log.info 'Waiting on backup server content data.'
86
+ # backup_server_content_data = backup_server_content_data_queue.pop
87
+ # end
88
+
89
+ # Log.info 'Updating file copy queue.'
90
+ # Log.debug1 "local_server_content_data #{local_server_content_data}."
91
+ # Log.debug1 "backup_server_content_data #{backup_server_content_data}."
92
+ # # Remove backup content data from local server
93
+ # content_to_copy = ContentData::ContentData.remove(backup_server_content_data, local_server_content_data)
94
+ # content_to_copy = local_server_content_data
95
+ # # Add copy instruction in case content is not empty
96
+ # Log.debug1 "Content to copy: #{content_to_copy}"
97
+ # copy_files_events.push([:COPY_MESSAGE, content_to_copy]) unless content_to_copy.empty?
91
98
  end
92
99
  end
93
100
 
101
+ remote_content_client = RemoteContentClient.new(local_dynamic_content_data,
102
+ Params['remote_content_port'])
103
+ all_threads << remote_content_client.tcp_thread
94
104
 
95
105
  # # # # # # # # # # # # # # # #
96
106
  # Start copying files on demand
97
- all_threads << Thread.new do
98
- while true do
99
- p 'Waiting on copy files events.'
100
- copy_event = copy_files_events.pop
101
-
102
- p "Copy file event: #{copy_event}"
103
-
104
- # Prepare source,dest map for copy.
105
- used_contents = Set.new
106
- files_map = Hash.new
107
- p "Instances: #{copy_event.instances}"
108
- copy_event.instances.each { |key, instance|
109
- p "Instance: #{instance}"
110
- # Add instance only if such content has not added yet.
111
- if !used_contents.member?(instance.checksum)
112
- files_map[instance.full_path] = destination_filename(
113
- PARAMS.backup_destination_folder,
114
- instance.checksum)
115
- used_contents.add(instance.checksum)
116
- end
117
- }
118
-
119
- p "Copying files: #{files_map}."
120
- # Copy files, waits until files are finished copying.
121
- FileCopy::sftp_copy(PARAMS.backup_username,
122
- PARAMS.backup_password,
123
- PARAMS.remote_server,
124
- files_map)
125
- end
126
- end
107
+ copy_server = FileCopyServer.new(copy_files_events, Params['backup_file_listening_port'])
108
+ all_threads.concat(copy_server.run())
127
109
 
110
+ # Finalize server threads.
128
111
  all_threads.each { |t| t.abort_on_exception = true }
129
112
  all_threads.each { |t| t.join }
130
113
  # Should never reach this line.
131
114
  end
132
115
  module_function :run
133
116
 
134
- # Creates destination filename for backup server, input is base folder and sha1.
135
- # for example: folder:/mnt/hd1/bbbackup, sha1:d0be2dc421be4fcd0172e5afceea3970e2f3d940
136
- # dest filename: /mnt/hd1/bbbackup/d0/be/2d/d0be2dc421be4fcd0172e5afceea3970e2f3d940
137
- def destination_filename(folder, sha1)
138
- File.join(folder, sha1[0,2], sha1[2,2], sha1[4,2], sha1)
139
- end
140
- module_function :destination_filename
141
-
142
117
  def run_backup_server
143
118
  all_threads = []
144
119
 
@@ -146,7 +121,7 @@ module BBFS
146
121
  # Initialize/Start monitoring
147
122
  monitoring_events = Queue.new
148
123
  fm = FileMonitoring::FileMonitoring.new
149
- fm.set_config_path(PARAMS.monitoring_config_path)
124
+ fm.set_config_path(Params['monitoring_config_path'])
150
125
  fm.set_event_queue(monitoring_events)
151
126
  # Start monitoring and writing changes to queue
152
127
  all_threads << Thread.new do
@@ -158,23 +133,51 @@ module BBFS
158
133
  local_server_content_data_queue = Queue.new
159
134
  queue_indexer = QueueIndexer.new(monitoring_events,
160
135
  local_server_content_data_queue,
161
- PARAMS.content_data_path)
136
+ Params['content_data_path'])
162
137
  # Start indexing on demand and write changes to queue
163
138
  all_threads << queue_indexer.run
164
139
 
165
140
  # # # # # # # # # # # # # # # # # # # # # # # # # # #
166
141
  # Initialize/Start backup server content data sender
167
- content_data_sender = ContentDataSender.new(
168
- PARAMS.remote_server,
169
- PARAMS.remote_listening_port)
142
+ dynamic_content_data = ContentData::DynamicContentData.new
143
+ #content_data_sender = ContentDataSender.new(
144
+ # Params['remote_server'],
145
+ # Params['remote_listening_port'])
170
146
  # Start sending to backup server
171
147
  all_threads << Thread.new do
172
148
  while true do
173
- p 'Waiting on local server content data queue.'
174
- content_data_sender.send_content_data(local_server_content_data_queue.pop)
149
+ Log.info 'Waiting on local server content data queue.'
150
+ cd = local_server_content_data_queue.pop
151
+ # content_data_sender.send_content_data(cd)
152
+ dynamic_content_data.update(cd)
175
153
  end
176
154
  end
177
155
 
156
+ content_server_dynamic_content_data = ContentData::DynamicContentData.new
157
+ remote_content = ContentServer::RemoteContent.new(content_server_dynamic_content_data,
158
+ Params['remote_server'],
159
+ Params['remote_content_port'],
160
+ Params['backup_destination_folder'])
161
+ all_threads.concat(remote_content.run())
162
+
163
+ file_copy_client = FileCopyClient.new(Params['remote_server'],
164
+ Params['backup_file_listening_port'],
165
+ dynamic_content_data)
166
+ all_threads.concat(file_copy_client.threads)
167
+
168
+ # Each
169
+ all_threads << Thread.new do
170
+ loop do
171
+ sleep(Params['backup_check_delay'])
172
+ local_cd = dynamic_content_data.last_content_data()
173
+ remote_cd = content_server_dynamic_content_data.last_content_data()
174
+ diff = ContentData::ContentData.remove(local_cd, remote_cd)
175
+ Log.debug2("Files to send? #{!diff.empty?}")
176
+ file_copy_client.request_copy(diff) unless diff.empty?
177
+ end
178
+ end
179
+
180
+
178
181
  all_threads.each { |t| t.abort_on_exception = true }
179
182
  all_threads.each { |t| t.join }
180
183
  # Should never reach this line.
@@ -183,3 +186,4 @@ module BBFS
183
186
 
184
187
  end # module ContentServer
185
188
  end # module BBFS
189
+
@@ -1,3 +1,5 @@
1
+ require 'log'
2
+ require 'params'
1
3
  require 'socket'
2
4
 
3
5
  module BBFS
@@ -11,32 +13,46 @@ module BBFS
11
13
 
12
14
  def run
13
15
  Socket.tcp_server_loop(@port) do |sock, client_addrinfo|
14
- p 'Waiting on sock.gets.'
15
- size_of_data = sock.read(4).unpack("l")[0]
16
- p "Size of data: #{size_of_data}"
17
- size_of_data = size_of_data.to_i
18
- p "Size of data: #{size_of_data}"
19
- data = sock.read(size_of_data)
20
- p "Data received: #{data}"
21
- unmarshaled_data = Marshal.load(data)
22
- p "Unmarshaled data: #{unmarshaled_data}"
23
- @queue.push unmarshaled_data
16
+ while size_of_data = sock.read(4)
17
+ size_of_data = size_of_data.unpack("l")[0]
18
+ Log.debug3 "Size of data: #{size_of_data}"
19
+ data = sock.read(size_of_data)
20
+ #Log.debug3 "Data received: #{data}"
21
+ unmarshaled_data = Marshal.load(data)
22
+ #Log.debug3 "Unmarshaled data: #{unmarshaled_data}"
23
+ @queue.push unmarshaled_data
24
+ Log.debug3 "Socket closed? #{sock.closed?}."
25
+ break if sock.closed?
26
+ Log.debug1 'Waiting on sock.read'
27
+ end
28
+ Log.debug1 'Exited, socket closed or read returned nil.'
24
29
  end
25
30
  end
26
31
  end
27
32
 
28
33
  class ContentDataSender
34
+
29
35
  def initialize host, port
30
- p "Connecting to content server #{host}:#{port}."
31
- @tcp_socket = TCPSocket.open(host, port)
36
+ @host = host
37
+ @port = port
38
+ open_socket
39
+ end
40
+
41
+ def open_socket
42
+ Log.debug1 "Connecting to content server #{@host}:#{@port}."
43
+ @tcp_socket = TCPSocket.new(@host, @port)
32
44
  end
33
45
 
34
46
  def send_content_data content_data
35
- p "Data to send: #{content_data}"
47
+ open_socket if @tcp_socket.closed?
48
+ #Log.debug3 "Data to send: #{content_data}"
36
49
  marshal_data = Marshal.dump(content_data)
37
- p "Marshaled size: #{marshal_data.length}."
50
+ Log.debug3 "Marshaled size: #{marshal_data.length}."
38
51
  data_size = [marshal_data.length].pack("l")
39
- p "Marshaled data: #{marshal_data}."
52
+ #Log.debug3 "Marshaled data: #{marshal_data}."
53
+ if data_size.nil? || marshal_data.nil?
54
+ Log.debug3 'Send data is nil!!!!!!!!'
55
+ end
40
56
  @tcp_socket.write data_size
41
57
  @tcp_socket.write marshal_data
42
58
  end
@@ -0,0 +1,191 @@
1
+ require 'thread'
2
+
3
+ require 'file_indexing/index_agent'
4
+ require 'log'
5
+
6
+ module BBFS
7
+ module ContentServer
8
+
9
+ Params.integer('streaming_chunk_size', 64*1024,
10
+ 'Max number of content bytes to send in one chunk.')
11
+ Params.integer('file_streaming_timeout', 5*60,
12
+ 'If no action is taken on a file streamer, abort copy.')
13
+ Params.string('backup_destination_folder', '',
14
+ 'Backup server destination folder, default is the relative local folder.')
15
+
16
+ class Stream
17
+ attr_reader :checksum, :path, :file, :size
18
+ def initialize(checksum, path, file, size)
19
+ @checksum = checksum
20
+ @path = path
21
+ @file = file
22
+ @size = size
23
+ end
24
+
25
+ def self.close_delete_stream(checksum, streams_hash)
26
+ if streams_hash.key?(checksum)
27
+ Log.info("close_delete_stream #{streams_hash[checksum].file}")
28
+ begin
29
+ streams_hash[checksum].file.close()
30
+ rescue IOError => e
31
+ Log.warning("While closing stream, could not close file #{streams_hash[checksum].path}." \
32
+ " #{e.to_s}")
33
+ end
34
+ streams_hash.delete(checksum)
35
+ end
36
+ end
37
+
38
+ end
39
+
40
+ class FileStreamer
41
+ attr_reader :thread
42
+
43
+ :NEW_STREAM
44
+ :ABORT_STREAM
45
+ :COPY_CHUNK
46
+
47
+ def initialize(send_chunk_clb, abort_streaming_clb=nil)
48
+ @send_chunk_clb = send_chunk_clb
49
+ @abort_streaming_clb = abort_streaming_clb
50
+ @stream_queue = Queue.new
51
+
52
+ # Used from internal thread only.
53
+ @streams = {}
54
+ @thread = run
55
+ end
56
+
57
+ def start_streaming(checksum, path)
58
+ @stream_queue << [:NEW_STREAM, [checksum, path]]
59
+ end
60
+
61
+ def abort_streaming(checksum)
62
+ @stream_queue << [:ABORT_STREAM, checksum]
63
+ end
64
+
65
+ def run
66
+ return Thread.new do
67
+ loop {
68
+ checksum = handle(@stream_queue.pop)
69
+ }
70
+ end
71
+ end
72
+
73
+ def handle(message)
74
+ type, content = message
75
+ if type == :NEW_STREAM
76
+ checksum, path = content
77
+ if !@streams.key? checksum
78
+ begin
79
+ file = File.new(path, 'rb')
80
+ Log.info("File streamer: #{file.to_s}.")
81
+ rescue IOError => e
82
+ Log.warning("Could not stream local file #{path}. #{e.to_s}")
83
+ end
84
+ @streams[checksum] = Stream.new(checksum, path, file, file.size)
85
+ @stream_queue << [:COPY_CHUNK, checksum]
86
+ end
87
+ elsif type == :ABORT_STREAM
88
+ Stream.close_delete_stream(content, @streams)
89
+ elsif type == :COPY_CHUNK
90
+ checksum = content
91
+ if @streams.key?(checksum)
92
+ chunk = @streams[checksum].file.read(Params['streaming_chunk_size'])
93
+ if chunk.nil?
94
+ # No more to read, send end of file.
95
+ @send_chunk_clb.call(checksum, @streams[checksum].size, nil, nil)
96
+ Stream.close_delete_stream(checksum, @streams)
97
+ else
98
+ chunk_checksum = FileIndexing::IndexAgent.get_content_checksum(chunk)
99
+ @send_chunk_clb.call(checksum, @streams[checksum].size, chunk, chunk_checksum)
100
+ @stream_queue << [:COPY_CHUNK, checksum]
101
+ end
102
+ else
103
+ Log.info("No checksum found to copy chunk. #{checksum}.")
104
+ end
105
+ end
106
+
107
+ end
108
+ end
109
+
110
+ # Start implementing as dummy, no self thread for now.
111
+ # Later when we need it to response and send aborts, timeouts, ect, it will
112
+ # need self thread.
113
+ class FileReceiver
114
+
115
+ def initialize(file_done_clb=nil, file_abort_clb=nil)
116
+ @file_done_clb = file_done_clb
117
+ @file_abort_clb = file_abort_clb
118
+ @streams = {}
119
+ end
120
+
121
+ def receive_chunk(file_checksum, file_size, content, content_checksum)
122
+ if !content.nil? && !content_checksum.nil?
123
+ received_content_checksum = FileIndexing::IndexAgent.get_content_checksum(content)
124
+ comment = "Calculated received chunk with content checksum #{received_content_checksum}" \
125
+ " vs message content checksum #{content_checksum}, " \
126
+ "file checksum #{file_checksum}"
127
+ Log.debug1(comment) if content_checksum == received_content_checksum
128
+ Log.error(comment) if content_checksum != received_content_checksum
129
+
130
+ if !@streams.key?(file_checksum)
131
+ path = FileReceiver.destination_filename(Params['backup_destination_folder'],
132
+ file_checksum)
133
+ if File.exists?(path)
134
+ Log.warning("File already exists (#{path}) not writing.")
135
+ @file_abort_clb.call(file_checksum) unless @file_abort_clb.nil?
136
+ else
137
+ begin
138
+ # Make the directory if does not exists.
139
+ Log.debug1("Writing to: #{path}")
140
+ Log.debug1("Creating directory: #{File.dirname(path)}")
141
+ FileUtils.makedirs(File.dirname(path))
142
+ file = File.new(path, 'wb')
143
+ @streams[file_checksum] = Stream.new(file_checksum, path, file, file_size)
144
+ rescue IOError => e
145
+ Log.warning("Could not stream write to local file #{path}. #{e.to_s}")
146
+ end
147
+ end
148
+ end
149
+ # We still check @streams has the key, because file can fail to open.
150
+ if @streams.key?(file_checksum)
151
+ FileReceiver.write_string_to_file(content, @streams[file_checksum].file)
152
+ Log.info("Written already #{@streams[file_checksum].file.size} bytes, " \
153
+ "out of #{file_size} (#{100.0*@streams[file_checksum].file.size/file_size}%)")
154
+ end
155
+ elsif content.nil? && content_checksum.nil?
156
+ if @streams.key?(file_checksum)
157
+ # Check written file checksum!
158
+ local_path = @streams[file_checksum].path
159
+ Stream.close_delete_stream(file_checksum, @streams)
160
+
161
+ local_file_checksum = FileIndexing::IndexAgent.get_checksum(local_path)
162
+ message = "Local checksum (#{local_file_checksum}) received checksum (#{file_checksum})."
163
+ Log.info(message) ? local_file_checksum == file_checksum : Log.error(message)
164
+ Log.info("File fully received #{local_path}")
165
+ @file_done_clb.call(local_file_checksum, local_path) unless @file_done_clb.nil?
166
+ end
167
+ else
168
+ Log.warning("Unexpected receive chuck message. file_checksum:#{file_checksum}, " \
169
+ "content.nil?:#{content.nil?}, content_checksum:#{content_checksum}")
170
+ end
171
+ end
172
+
173
+ def self.write_string_to_file(str, file)
174
+ Log.info("writing to file: #{file.to_s}.")
175
+ bytes_to_write = str.bytesize
176
+ while bytes_to_write > 0
177
+ bytes_to_write -= file.write(str)
178
+ end
179
+ end
180
+
181
+ # Creates destination filename for backup server, input is base folder and sha1.
182
+ # for example: folder:/mnt/hd1/bbbackup, sha1:d0be2dc421be4fcd0172e5afceea3970e2f3d940
183
+ # dest filename: /mnt/hd1/bbbackup/d0/be/2d/d0be2dc421be4fcd0172e5afceea3970e2f3d940
184
+ def self.destination_filename(folder, sha1)
185
+ File.join(folder, sha1[0,2], sha1[2,2], sha1)
186
+ end
187
+
188
+ end
189
+
190
+ end
191
+ end
@@ -0,0 +1,172 @@
1
+ require 'thread'
2
+
3
+ require 'content_server/file_streamer'
4
+ require 'file_indexing/index_agent'
5
+ require 'log'
6
+ require 'networking/tcp'
7
+
8
+ module BBFS
9
+ module ContentServer
10
+ Params.integer('ack_timeout', 5, 'Timeout of ack from backup server in seconds.')
11
+
12
+ # Copy message types.
13
+ :ACK_MESSAGE
14
+ :COPY_MESSAGE
15
+ :SEND_COPY_MESSAGE
16
+ :COPY_CHUNK
17
+ :ABORT_COPY
18
+
19
+ # Simple copier, gets inputs events (files to copy), requests ack from backup to copy
20
+ # then copies one file.
21
+ class FileCopyServer
22
+ def initialize(copy_input_queue, port)
23
+ # Local simple tcp connection.
24
+ @backup_tcp = Networking::TCPServer.new(port, method(:receive_message))
25
+ @copy_input_queue = copy_input_queue
26
+ # Stores for each checksum, the file source path.
27
+ # TODO(kolman): If there are items in copy_prepare which timeout (don't get ack),
28
+ # resend the ack request.
29
+ @copy_prepare = {}
30
+ @file_streamer = FileStreamer.new(method(:send_chunk))
31
+ end
32
+
33
+ def send_chunk(*arg)
34
+ @copy_input_queue.push([:COPY_CHUNK, arg])
35
+ end
36
+
37
+ def receive_message(addr_info, message)
38
+ # Add ack message to copy queue.
39
+ Log.info("message received: #{message}")
40
+ @copy_input_queue.push(message)
41
+ end
42
+
43
+ def run()
44
+ threads = []
45
+ threads << @backup_tcp.tcp_thread if @backup_tcp != nil
46
+ threads << Thread.new do
47
+ while true do
48
+ Log.info 'Waiting on copy files events.'
49
+ message_type, message_content = @copy_input_queue.pop
50
+
51
+ if message_type == :COPY_MESSAGE
52
+ Log.info "Copy file event: #{message_content}"
53
+ # Prepare source,dest map for copy.
54
+ message_content.instances.each { |key, instance|
55
+ @copy_prepare[instance.checksum] = instance.full_path
56
+ Log.info("Sending ack for: #{instance.checksum}")
57
+ @backup_tcp.send_obj([:ACK_MESSAGE, [instance.checksum, Time.now.to_i]])
58
+ }
59
+ elsif message_type == :ACK_MESSAGE
60
+ # Received ack from backup, copy file if all is good.
61
+ # The timestamp is of local content server! not backup server!
62
+ timestamp, ack, checksum = message_content
63
+
64
+ Log.info("Ack (#{ack}) received for: #{checksum}, timestamp: #{timestamp} " \
65
+ "now: #{Time.now.to_i}")
66
+
67
+ # Copy file if ack (does not exists on backup and not too much time passed)
68
+ if ack && (Time.now.to_i - timestamp < Params['ack_timeout'])
69
+ if !@copy_prepare.key?(checksum)
70
+ Log.warning("Ack was already received:#{checksum}")
71
+ else
72
+ path = @copy_prepare[checksum]
73
+ Log.info "Streaming file: #{checksum} #{path}."
74
+ @file_streamer.start_streaming(checksum, path)
75
+ end
76
+ else
77
+ Log.debug1("Ack timed out span: #{Time.now.to_i - timestamp} > " \
78
+ "timeout: #{Params['ack_timeout']}")
79
+ end
80
+ elsif message_type == :COPY_CHUNK
81
+ file_checksum, file_size, content, content_checksum = message_content
82
+ Log.info "Send chunk for file #{file_checksum}."
83
+ @backup_tcp.send_obj([:COPY_CHUNK, message_content])
84
+ if content.nil? and content_checksum.nil?
85
+ @copy_prepare.delete(file_checksum)
86
+ end
87
+ elsif message_type == :ABORT_COPY
88
+ Log.info("Aborting file copy: #{message_content}")
89
+ if @copy_prepare.key?(message_content)
90
+ Log.info("Aborting: #{@copy_prepare[message_content]}")
91
+ @copy_prepare.delete(message_content)
92
+ end
93
+ @file_streamer.abort_streaming(message_content)
94
+ else
95
+ Log.error("Copy event not supported: #{message_type}")
96
+ end # handle messages here
97
+ end
98
+ end
99
+ end
100
+ end # class QueueCopy
101
+
102
+ class FileCopyClient
103
+ def initialize(host, port, dynamic_content_data)
104
+ @local_queue = Queue.new
105
+ @dynamic_content_data = dynamic_content_data
106
+ @tcp_server = Networking::TCPClient.new(host, port, method(:handle_message))
107
+ @file_receiver = FileReceiver.new(method(:done_copy), method(:abort_copy))
108
+ @local_thread = Thread.new do
109
+ loop do
110
+ handle(@local_queue.pop)
111
+ end
112
+ end
113
+ end
114
+
115
+ def threads
116
+ ret = [@local_thread]
117
+ ret << @tcp_server.tcp_thread if @tcp_server != nil
118
+ return ret
119
+ end
120
+
121
+ def request_copy(content_data)
122
+ handle_message([:SEND_COPY_MESSAGE, content_data])
123
+ end
124
+
125
+ def abort_copy(checksum)
126
+ handle_message([:ABORT_COPY, checksum])
127
+ end
128
+
129
+ def done_copy(local_file_checksum, local_path)
130
+ Log.info("Done copy file: #{local_path}, #{local_file_checksum}")
131
+ end
132
+
133
+ def handle_message(message)
134
+ Log.debug2('QueueFileReceiver handle message')
135
+ @local_queue.push(message)
136
+ end
137
+
138
+ # This is a function which receives the messages (file or ack) and return answer in case
139
+ # of ack. Note that it is being executed from the class thread only!
140
+ def handle(message)
141
+ message_type, message_content = message
142
+ if message_type == :SEND_COPY_MESSAGE
143
+ Log.debug1("Requesting file (content data) to copy.")
144
+ Log.debug3("File requested: #{message_content.to_s}")
145
+ bytes_written = @tcp_server.send_obj([:COPY_MESSAGE, message_content])
146
+ Log.debug1("Sending copy message succeeded? bytes_written: #{bytes_written}.")
147
+ elsif message_type == :COPY_CHUNK
148
+ @file_receiver.receive_chunk(*message_content)
149
+ elsif message_type == :ACK_MESSAGE
150
+ checksum, timestamp = message_content
151
+ # Here we should check file existence
152
+ Log.debug1("Returning ack for: #{checksum}, timestamp: #{timestamp}")
153
+ Log.debug1("Ack: #{!@dynamic_content_data.exists?(checksum)}")
154
+ @tcp_server.send_obj([:ACK_MESSAGE, [timestamp,
155
+ !@dynamic_content_data.exists?(checksum),
156
+ checksum]])
157
+ elsif message_type == :ABORT_COPY
158
+ @tcp_server.send_obj([:ABORT_COPY, message_content])
159
+ else
160
+ Log.error("Unexpected message type: #{message_type}")
161
+ end
162
+ end
163
+
164
+ # Creates destination filename for backup server, input is base folder and sha1.
165
+ # for example: folder:/mnt/hd1/bbbackup, sha1:d0be2dc421be4fcd0172e5afceea3970e2f3d940
166
+ # dest filename: /mnt/hd1/bbbackup/d0/be/2d/d0be2dc421be4fcd0172e5afceea3970e2f3d940
167
+ def self.destination_filename(folder, sha1)
168
+ File.join(folder, sha1[0,2], sha1[2,2], sha1)
169
+ end
170
+ end # class QueueFileReceiver
171
+ end
172
+ end
@@ -1,5 +1,6 @@
1
1
  require 'file_indexing/index_agent'
2
2
  require 'file_indexing/indexer_patterns'
3
+ require 'log'
3
4
 
4
5
  module BBFS
5
6
  module ContentServer
@@ -8,7 +9,7 @@ module BBFS
8
9
  # content data updates into output queue.
9
10
  class QueueIndexer
10
11
 
11
- def initialize input_queue, output_queue, content_data_path
12
+ def initialize(input_queue, output_queue, content_data_path)
12
13
  @input_queue = input_queue
13
14
  @output_queue = output_queue
14
15
  @content_data_path = content_data_path
@@ -16,33 +17,94 @@ module BBFS
16
17
 
17
18
  def run
18
19
  server_content_data = ContentData::ContentData.new
19
- server_content_data.from_file(@content_data_path) rescue Errno::ENOENT
20
+ # Shallow check content data files.
21
+ tmp_content_data = ContentData::ContentData.new
22
+ tmp_content_data.from_file(@content_data_path) if File.exists?(@content_data_path)
23
+ tmp_content_data.instances.each_value do |instance|
24
+ # Skipp instances (files) which did not pass the shallow check.
25
+ Log.info('Shallow checking content data:')
26
+ if shallow_check(instance)
27
+ Log.info("exists: #{instance.full_path}")
28
+ server_content_data.add_content(tmp_content_data.contents[instance.checksum])
29
+ server_content_data.add_instance(instance)
30
+ else
31
+ Log.info("changed: #{instance.full_path}")
32
+ # Add non existing and changed files to index queue.
33
+ @input_queue.push([FileMonitoring::FileStatEnum::STABLE, instance.full_path])
34
+ end
35
+ end
36
+
20
37
  # Start indexing on demand and write changes to queue
21
38
  thread = Thread.new do
22
39
  while true do
23
- p 'Waiting on index input queue.'
24
- event = @input_queue.pop
25
- p "event: #{event}"
40
+ Log.info 'Waiting on index input queue.'
41
+ state, is_dir, path = @input_queue.pop
42
+ Log.info "event: #{state}, #{is_dir}, #{path}."
43
+
26
44
  # index files and add to copy queue
27
- if event[0] == FileMonitoring::FileStatEnum::CHANGED || event[0] == FileMonitoring::FileStatEnum::NEW
28
- p "Indexing content #{event[1]}."
45
+ # delete directory with it's sub files
46
+ # delete file
47
+ if state == FileMonitoring::FileStatEnum::STABLE && !is_dir
48
+ Log.info "Indexing content #{path}."
29
49
  index_agent = FileIndexing::IndexAgent.new
30
50
  indexer_patterns = FileIndexing::IndexerPatterns.new
31
- indexer_patterns.add_pattern(event[1])
51
+ indexer_patterns.add_pattern(path)
32
52
  index_agent.index(indexer_patterns, server_content_data)
53
+ Log.info("Failed files: #{index_agent.failed_files.to_a.join(',')}.") \
54
+ if !index_agent.failed_files.empty?
55
+ Log.info("indexed content #{index_agent.indexed_content}.")
33
56
  server_content_data.merge index_agent.indexed_content
34
- # TODO(kolman): Don't write to file each change?
35
- p "Writing server content data to #{@content_data_path}."
36
- server_content_data.to_file(@content_data_path)
37
- p 'Adding server content data to queue.'
38
- @output_queue.push(server_content_data)
57
+ elsif ((state == FileMonitoring::FileStatEnum::NON_EXISTING ||
58
+ state == FileMonitoring::FileStatEnum::CHANGED) && !is_dir)
59
+ # If file content changed, we should remove old instance.
60
+ key = FileIndexing::IndexAgent.global_path(path)
61
+ # Check if deleted file exists at content data.
62
+ Log.info("Instance to remove: #{key}")
63
+ if server_content_data.instances.key?(key)
64
+ instance_to_remove = server_content_data.instances[key]
65
+ # Remove file from content data only if it does not pass the shallow check, i.e.,
66
+ # content has changed/removed.
67
+ if !shallow_check(instance_to_remove)
68
+ content_to_remove = server_content_data.contents[instance_to_remove.checksum]
69
+ # Remove the deleted instance.
70
+ content_data_to_remove = ContentData::ContentData.new
71
+ content_data_to_remove.add_content(content_to_remove)
72
+ content_data_to_remove.add_instance(instance_to_remove)
73
+ # Remove the file.
74
+ server_content_data = ContentData::ContentData.remove_instances(
75
+ content_data_to_remove, server_content_data)
76
+ end
77
+ end
78
+ elsif state == FileMonitoring::FileStatEnum::NON_EXISTING && is_dir
79
+ Log.info("NonExisting/Changed: #{path}")
80
+ # Remove directory but only when non-existing.
81
+ Log.info("Directory to remove: #{path}")
82
+ global_dir = FileIndexing::IndexAgent.global_path(path)
83
+ server_content_data = ContentData::ContentData.remove_directory(
84
+ server_content_data, global_dir)
85
+ else
86
+ Log.info("This case should not be handled: #{state}, #{is_dir}, #{path}.")
39
87
  end
40
- #p 'End of if.'
88
+ # TODO(kolman): Don't write to file each change?
89
+ Log.info "Writing server content data to #{@content_data_path}."
90
+ server_content_data.to_file(@content_data_path)
91
+
92
+ Log.info 'Adding server content data to queue.'
93
+ @output_queue.push(ContentData::ContentData.new(server_content_data))
41
94
  end # while true do
42
95
  end # Thread.new do
43
96
  thread
44
97
  end # def run
45
98
 
99
+ # Check file existence, check it's size and modification date.
100
+ # If something wrong reindex the file and update content data.
101
+ def shallow_check(instance)
102
+ shallow_instance = FileIndexing::IndexAgent.create_shallow_instance(instance.full_path)
103
+ return false unless shallow_instance
104
+ return (shallow_instance.size == instance.size &&
105
+ shallow_instance.modification_time == instance.modification_time)
106
+ end
107
+
46
108
  end # class QueueIndexer
47
109
  end
48
110
  end
@@ -0,0 +1,96 @@
1
+ require 'thread'
2
+
3
+ require 'content_data/dynamic_content_data'
4
+ require 'log'
5
+ require 'networking/tcp'
6
+ require 'params'
7
+
8
+ module BBFS
9
+ module ContentServer
10
+
11
+ Params.integer('remote_content_timeout', 10, 'Remote content desired freshness in seconds.')
12
+ Params.integer('max_content_timeout', 60*60, 'Remote content force refresh in seconds.')
13
+
14
+ # TODO(kolman): Use only one tcp/ip socket by utilizing one NQueue for many queues!
15
+ class RemoteContent
16
+ def initialize(dynamic_content_data, host, port, local_backup_folder)
17
+ @dynamic_content_data = dynamic_content_data
18
+ @remote_tcp = Networking::TCPClient.new(host, port, method(:receive_content))
19
+ @last_update_timestamp = nil
20
+ @content_server_content_data_path = File.join(local_backup_folder, 'remote',
21
+ host + '_' + port.to_s)
22
+ FileUtils.makedirs(@content_server_content_data_path)
23
+ end
24
+
25
+ def receive_content(message)
26
+ Log.debug1("Remote content data received: #{message.to_s}")
27
+ ref = @dynamic_content_data.last_content_data
28
+ @dynamic_content_data.update(message)
29
+
30
+ max_time_span = Params['max_content_timeout']
31
+ if !@last_update_timestamp.nil?
32
+ max_time_span = Time.now.to_i - @last_update_timestamp
33
+ end
34
+
35
+ @last_update_timestamp = Time.now.to_i
36
+
37
+ if ref != message || max_time_span >= Params['max_content_timeout']
38
+ Log.debug2("Remote content data changed or max time span is large, writing.")
39
+ Log.debug3("max_time_span: #{max_time_span}")
40
+ write_to = File.join(@content_server_content_data_path,
41
+ @last_update_timestamp.to_s + '.cd')
42
+ count = File.open(write_to, 'wb') { |f| f.write(message.to_s) }
43
+ else
44
+ Log.debug2("No need to write remote content data, it has not changed.")
45
+ end
46
+ end
47
+
48
+ def run()
49
+ threads = []
50
+ threads << @remote_tcp.tcp_thread if @remote_tcp != nil
51
+ threads << Thread.new do
52
+ loop do
53
+ # if need content data
54
+ if @last_update_timestamp.nil?
55
+ sleep_time_span = Params['remote_content_timeout']
56
+ else
57
+ sleep_time_span = Time.now.to_i - @last_update_timestamp
58
+ end
59
+
60
+ if sleep_time_span >= Params['remote_content_timeout']
61
+ # Send ping!
62
+ Log.debug2('Sending remote contend request.')
63
+ bytes_written = @remote_tcp.send_obj(nil)
64
+ Log.debug3("Bytes written #{bytes_written}.")
65
+ end
66
+
67
+ sleep_time_span = Time.now.to_i - @last_update_timestamp \
68
+ unless @last_update_timestamp.nil?
69
+ Log.debug2("sleep_time_span: #{sleep_time_span}")
70
+ sleep(sleep_time_span) if sleep_time_span > 0
71
+ end
72
+ end
73
+ end
74
+ end
75
+
76
+ class RemoteContentClient
77
+ def initialize(dynamic_content_data, port)
78
+ @dynamic_content_data = dynamic_content_data
79
+ @tcp_server = Networking::TCPServer.new(port, method(:content_requested))
80
+ end
81
+
82
+ def content_requested(addr_info, message)
83
+ # Send response.
84
+ Log.debug1('Local content data requested.')
85
+ @tcp_server.send_obj(@dynamic_content_data.last_content_data)
86
+ end
87
+
88
+ def tcp_thread
89
+ return @tcp_server.tcp_thread if @tcp_server != nil
90
+ nil
91
+ end
92
+
93
+ end
94
+
95
+ end
96
+ end
@@ -0,0 +1,5 @@
1
+ module BBFS
2
+ module ContentServer
3
+ VERSION = "0.0.8"
4
+ end
5
+ end
@@ -1,3 +1,5 @@
1
+ require 'rspec'
2
+
1
3
  require_relative '../../lib/file_copy/copy.rb'
2
4
 
3
5
  module BBFS
@@ -0,0 +1,53 @@
1
+ require 'log'
2
+ require 'rspec'
3
+ require 'stringio'
4
+
5
+ require_relative '../../lib/content_server/file_streamer'
6
+
7
+ # Uncomment to debug spec.
8
+ #BBFS::Params['log_write_to_console'] = true
9
+ BBFS::Params['log_write_to_file'] = false
10
+ BBFS::Params['log_debug_level'] = 0
11
+ BBFS::Params['streaming_chunk_size'] = 5
12
+ BBFS::Params.init ARGV
13
+ BBFS::Log.init
14
+ # Have to be set to test chunking mechanism.
15
+
16
+ module BBFS
17
+ module ContentServer
18
+ module Spec
19
+ describe 'FileStreamer' do
20
+ it 'should copy one file chunk by chunks and validate content' do
21
+ Log.info('#0 start')
22
+ orig_file = StringIO.new('Some content. Some content. Some content. Some content.')
23
+ Log.info("orig_file #{orig_file.to_s}.")
24
+ dest_file = StringIO.new
25
+ Log.info("dest_file #{dest_file.to_s}.")
26
+ streamer = nil
27
+ done = lambda{ |checksum, filename|
28
+ Log.info('#4 streaming done, check content ok.')
29
+ dest_file.string().should eq(orig_file.string())
30
+
31
+ Log.info('#5 exiting streamer thread.')
32
+ streamer.thread.exit
33
+ }
34
+ receiver = BBFS::ContentServer::FileReceiver.new(done)
35
+ send_chunk = lambda { |*args|
36
+ receiver.receive_chunk(*args)
37
+ }
38
+ Log.info('#2 start streaming.')
39
+ # This is for 1) FileStreamer :NEW_STREAM and 2) FileReceiver receive_chunk.
40
+ ::File.stub(:new).and_return(orig_file, dest_file)
41
+ # This is for Index agent 'get_checksum' which opens file, read content and validates
42
+ # checksum.
43
+ ::File.stub(:open).and_return(dest_file)
44
+
45
+ streamer = BBFS::ContentServer::FileStreamer.new(send_chunk)
46
+ Log.info('#3 start streaming.')
47
+ streamer.start_streaming('da39a3ee5e6b4b0d3255bfef95601890afd80709', 'dummy')
48
+ streamer.thread.join()
49
+ end
50
+ end
51
+ end
52
+ end
53
+ end
metadata CHANGED
@@ -1,7 +1,7 @@
1
1
  --- !ruby/object:Gem::Specification
2
2
  name: content_server
3
3
  version: !ruby/object:Gem::Version
4
- version: 0.0.2
4
+ version: 0.0.8
5
5
  prerelease:
6
6
  platform: ruby
7
7
  authors:
@@ -9,11 +9,11 @@ authors:
9
9
  autorequire:
10
10
  bindir: bin
11
11
  cert_chain: []
12
- date: 2012-04-11 00:00:00.000000000Z
12
+ date: 2012-09-02 00:00:00.000000000 Z
13
13
  dependencies:
14
14
  - !ruby/object:Gem::Dependency
15
15
  name: content_data
16
- requirement: &70119917644580 !ruby/object:Gem::Requirement
16
+ requirement: !ruby/object:Gem::Requirement
17
17
  none: false
18
18
  requirements:
19
19
  - - ! '>='
@@ -21,10 +21,15 @@ dependencies:
21
21
  version: '0'
22
22
  type: :runtime
23
23
  prerelease: false
24
- version_requirements: *70119917644580
24
+ version_requirements: !ruby/object:Gem::Requirement
25
+ none: false
26
+ requirements:
27
+ - - ! '>='
28
+ - !ruby/object:Gem::Version
29
+ version: '0'
25
30
  - !ruby/object:Gem::Dependency
26
- name: eventmachine
27
- requirement: &70119917644120 !ruby/object:Gem::Requirement
31
+ name: file_indexing
32
+ requirement: !ruby/object:Gem::Requirement
28
33
  none: false
29
34
  requirements:
30
35
  - - ! '>='
@@ -32,10 +37,15 @@ dependencies:
32
37
  version: '0'
33
38
  type: :runtime
34
39
  prerelease: false
35
- version_requirements: *70119917644120
40
+ version_requirements: !ruby/object:Gem::Requirement
41
+ none: false
42
+ requirements:
43
+ - - ! '>='
44
+ - !ruby/object:Gem::Version
45
+ version: '0'
36
46
  - !ruby/object:Gem::Dependency
37
- name: file_copy
38
- requirement: &70119917643700 !ruby/object:Gem::Requirement
47
+ name: file_monitoring
48
+ requirement: !ruby/object:Gem::Requirement
39
49
  none: false
40
50
  requirements:
41
51
  - - ! '>='
@@ -43,10 +53,15 @@ dependencies:
43
53
  version: '0'
44
54
  type: :runtime
45
55
  prerelease: false
46
- version_requirements: *70119917643700
56
+ version_requirements: !ruby/object:Gem::Requirement
57
+ none: false
58
+ requirements:
59
+ - - ! '>='
60
+ - !ruby/object:Gem::Version
61
+ version: '0'
47
62
  - !ruby/object:Gem::Dependency
48
- name: file_indexing
49
- requirement: &70119917643240 !ruby/object:Gem::Requirement
63
+ name: log
64
+ requirement: !ruby/object:Gem::Requirement
50
65
  none: false
51
66
  requirements:
52
67
  - - ! '>='
@@ -54,10 +69,15 @@ dependencies:
54
69
  version: '0'
55
70
  type: :runtime
56
71
  prerelease: false
57
- version_requirements: *70119917643240
72
+ version_requirements: !ruby/object:Gem::Requirement
73
+ none: false
74
+ requirements:
75
+ - - ! '>='
76
+ - !ruby/object:Gem::Version
77
+ version: '0'
58
78
  - !ruby/object:Gem::Dependency
59
- name: file_monitoring
60
- requirement: &70119917642820 !ruby/object:Gem::Requirement
79
+ name: networking
80
+ requirement: !ruby/object:Gem::Requirement
61
81
  none: false
62
82
  requirements:
63
83
  - - ! '>='
@@ -65,10 +85,15 @@ dependencies:
65
85
  version: '0'
66
86
  type: :runtime
67
87
  prerelease: false
68
- version_requirements: *70119917642820
88
+ version_requirements: !ruby/object:Gem::Requirement
89
+ none: false
90
+ requirements:
91
+ - - ! '>='
92
+ - !ruby/object:Gem::Version
93
+ version: '0'
69
94
  - !ruby/object:Gem::Dependency
70
95
  name: params
71
- requirement: &70119917574700 !ruby/object:Gem::Requirement
96
+ requirement: !ruby/object:Gem::Requirement
72
97
  none: false
73
98
  requirements:
74
99
  - - ! '>='
@@ -76,7 +101,28 @@ dependencies:
76
101
  version: '0'
77
102
  type: :runtime
78
103
  prerelease: false
79
- version_requirements: *70119917574700
104
+ version_requirements: !ruby/object:Gem::Requirement
105
+ none: false
106
+ requirements:
107
+ - - ! '>='
108
+ - !ruby/object:Gem::Version
109
+ version: '0'
110
+ - !ruby/object:Gem::Dependency
111
+ name: run_in_background
112
+ requirement: !ruby/object:Gem::Requirement
113
+ none: false
114
+ requirements:
115
+ - - ! '>='
116
+ - !ruby/object:Gem::Version
117
+ version: '0'
118
+ type: :runtime
119
+ prerelease: false
120
+ version_requirements: !ruby/object:Gem::Requirement
121
+ none: false
122
+ requirements:
123
+ - - ! '>='
124
+ - !ruby/object:Gem::Version
125
+ version: '0'
80
126
  description: Monitor and Index a directory and back it up to backup server.
81
127
  email: kolmanv@gmail.com
82
128
  executables:
@@ -87,8 +133,13 @@ extra_rdoc_files: []
87
133
  files:
88
134
  - lib/content_server.rb
89
135
  - lib/content_server/content_receiver.rb
136
+ - lib/content_server/file_streamer.rb
137
+ - lib/content_server/queue_copy.rb
90
138
  - lib/content_server/queue_indexer.rb
91
- - test/content_server/content_server_spec.rb
139
+ - lib/content_server/remote_content.rb
140
+ - lib/content_server/version.rb
141
+ - spec/content_server/content_server_spec.rb
142
+ - spec/content_server/file_streamer_spec.rb
92
143
  - bin/content_server
93
144
  - bin/backup_server
94
145
  homepage: http://github.com/kolmanv/bbfs
@@ -111,9 +162,10 @@ required_rubygems_version: !ruby/object:Gem::Requirement
111
162
  version: '0'
112
163
  requirements: []
113
164
  rubyforge_project:
114
- rubygems_version: 1.8.15
165
+ rubygems_version: 1.8.23
115
166
  signing_key:
116
167
  specification_version: 3
117
168
  summary: Servers for backing up content.
118
169
  test_files:
119
- - test/content_server/content_server_spec.rb
170
+ - spec/content_server/content_server_spec.rb
171
+ - spec/content_server/file_streamer_spec.rb