content_server 0.0.10 → 1.0.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -0,0 +1,89 @@
1
+ require 'fileutils'
2
+ require 'set'
3
+ require 'thread'
4
+
5
+ require 'content_data'
6
+ require 'content_server/content_receiver'
7
+ require 'content_server/queue_indexer'
8
+ require 'content_server/queue_copy'
9
+ require 'content_server/remote_content'
10
+ require 'file_indexing'
11
+ require 'file_monitoring'
12
+ require 'log'
13
+ require 'networking/tcp'
14
+ require 'params'
15
+ require 'process_monitoring/thread_safe_hash'
16
+ require 'process_monitoring/monitoring'
17
+ require 'process_monitoring/monitoring_info'
18
+
19
+ # Content server. Monitors files, index local files, listen to backup server content,
20
+ # copy changes and new files to backup server.
21
+ module ContentServer
22
+ # Content server specific flags.
23
+ Params.integer('local_files_port', 4444, 'Remote port in backup server to copy files.')
24
+ Params.integer('local_content_data_port', 3333, 'Listen to incoming content data requests.')
25
+
26
+ def run_content_server
27
+ all_threads = []
28
+
29
+ @process_variables = ThreadSafeHash::ThreadSafeHash.new
30
+ @process_variables.set('server_name', 'content_server')
31
+
32
+ # # # # # # # # # # # #
33
+ # Initialize/Start monitoring
34
+ monitoring_events = Queue.new
35
+ fm = FileMonitoring::FileMonitoring.new
36
+ fm.set_event_queue(monitoring_events)
37
+ # Start monitoring and writing changes to queue
38
+ all_threads << Thread.new do
39
+ fm.monitor_files
40
+ end
41
+
42
+ # # # # # # # # # # # # # #
43
+ # Initialize/Start local indexer
44
+ local_server_content_data_queue = Queue.new
45
+ queue_indexer = QueueIndexer.new(monitoring_events,
46
+ local_server_content_data_queue,
47
+ Params['local_content_data_path'])
48
+ # Start indexing on demand and write changes to queue
49
+ all_threads << queue_indexer.run
50
+
51
+ # # # # # # # # # # # # # # # # # # # # # #
52
+ # Initialize/Start content data comparator
53
+ copy_files_events = Queue.new
54
+ local_dynamic_content_data = ContentData::DynamicContentData.new
55
+ all_threads << Thread.new do
56
+ while true do
57
+ # Note: This thread should be the only consumer of local_server_content_data_queue
58
+ Log.info 'Waiting on local server content data.'
59
+ local_server_content_data = local_server_content_data_queue.pop
60
+ local_dynamic_content_data.update(local_server_content_data)
61
+ end
62
+ end
63
+
64
+ remote_content_client = RemoteContentServer.new(local_dynamic_content_data,
65
+ Params['local_content_data_port'])
66
+ all_threads << remote_content_client.tcp_thread
67
+
68
+ # # # # # # # # # # # # # # # #
69
+ # Start copying files on demand
70
+ copy_server = FileCopyServer.new(copy_files_events, Params['local_files_port'])
71
+ all_threads.concat(copy_server.run())
72
+
73
+ if Params['enable_monitoring']
74
+ mon = Monitoring::Monitoring.new(@process_variables)
75
+ Log.add_consumer(mon)
76
+ all_threads << mon.thread
77
+ monitoring_info = MonitoringInfo::MonitoringInfo.new(@process_variables)
78
+ end
79
+
80
+ # Finalize server threads.
81
+ all_threads.each { |t| t.abort_on_exception = true }
82
+ all_threads.each { |t| t.join }
83
+ # Should never reach this line.
84
+ end
85
+ module_function :run_content_server
86
+
87
+ end # module ContentServer
88
+
89
+
@@ -3,280 +3,285 @@ require 'thread'
3
3
 
4
4
  require 'file_indexing/index_agent'
5
5
  require 'log'
6
+ require 'params'
6
7
 
7
- module BBFS
8
- module ContentServer
9
-
10
- Params.integer('streaming_chunk_size', 2*1024*1024,
11
- 'Max number of content bytes to send in one chunk.')
12
- Params.integer('file_streaming_timeout', 5*60,
13
- 'If no action is taken on a file streamer, abort copy.')
14
- Params.string('backup_destination_folder', '',
15
- 'Backup server destination folder, default is the relative local folder.')
16
-
17
- class Stream
18
- attr_reader :checksum, :path, :tmp_path, :file, :size
19
- def initialize(checksum, path, file, size)
20
- @checksum = checksum
21
- @path = path
22
- @file = file
23
- @size = size
24
- end
25
8
 
26
- def self.close_delete_stream(checksum, streams_hash)
27
- if streams_hash.key?(checksum)
28
- Log.info("close_delete_stream #{streams_hash[checksum].file}")
29
- begin
30
- streams_hash[checksum].file.close()
31
- rescue IOError => e
32
- Log.warning("While closing stream, could not close file #{streams_hash[checksum].path}." \
9
+ module ContentServer
10
+
11
+ Params.integer('streaming_chunk_size', 2*1024*1024,
12
+ 'Max number of content bytes to send in one chunk.')
13
+ Params.integer('file_streaming_timeout', 5*60,
14
+ 'If no action is taken on a file streamer, abort copy.')
15
+ Params.path('backup_destination_folder', '',
16
+ 'Backup server destination folder, default is the relative local folder.')
17
+
18
+ class Stream
19
+ attr_reader :checksum, :path, :tmp_path, :file, :size
20
+ def initialize(checksum, path, file, size)
21
+ @checksum = checksum
22
+ @path = path
23
+ @file = file
24
+ @size = size
25
+ end
26
+
27
+ def self.close_delete_stream(checksum, streams_hash)
28
+ if streams_hash.key?(checksum)
29
+ Log.debug1("close_delete_stream #{streams_hash[checksum].file}")
30
+ begin
31
+ streams_hash[checksum].file.close()
32
+ rescue IOError => e
33
+ Log.warning("While closing stream, could not close file #{streams_hash[checksum].path}." \
33
34
  " #{e.to_s}")
34
- end
35
- streams_hash.delete(checksum)
36
35
  end
36
+ streams_hash.delete(checksum)
37
37
  end
38
-
39
38
  end
40
39
 
41
- class FileStreamer
42
- attr_reader :thread
40
+ end
43
41
 
44
- :NEW_STREAM
45
- :ABORT_STREAM
46
- :RESET_STREAM
47
- :COPY_CHUNK
42
+ class FileStreamer
43
+ attr_reader :thread
48
44
 
49
- def initialize(send_chunk_clb, abort_streaming_clb=nil)
50
- @send_chunk_clb = send_chunk_clb
51
- @abort_streaming_clb = abort_streaming_clb
52
- @stream_queue = Queue.new
45
+ :NEW_STREAM
46
+ :ABORT_STREAM
47
+ :RESET_STREAM
48
+ :COPY_CHUNK
53
49
 
54
- # Used from internal thread only.
55
- @streams = {}
56
- @thread = run
57
- end
50
+ def initialize(send_chunk_clb, abort_streaming_clb=nil)
51
+ @send_chunk_clb = send_chunk_clb
52
+ @abort_streaming_clb = abort_streaming_clb
53
+ @stream_queue = Queue.new
58
54
 
59
- def copy_another_chuck(checksum)
60
- @stream_queue << [:COPY_CHUNK, checksum]
61
- end
55
+ # Used from internal thread only.
56
+ @streams = {}
57
+ @thread = run
58
+ end
62
59
 
63
- def start_streaming(checksum, path)
64
- @stream_queue << [:NEW_STREAM, [checksum, path]]
65
- end
60
+ def copy_another_chuck(checksum)
61
+ @stream_queue << [:COPY_CHUNK, checksum]
62
+ end
66
63
 
67
- def abort_streaming(checksum)
68
- @stream_queue << [:ABORT_STREAM, checksum]
69
- end
64
+ def start_streaming(checksum, path)
65
+ @stream_queue << [:NEW_STREAM, [checksum, path]]
66
+ end
70
67
 
71
- def reset_streaming(checksum, new_offset)
72
- @stream_queue << [:RESET_STREAM, [checksum, new_offset]]
73
- end
68
+ def abort_streaming(checksum)
69
+ @stream_queue << [:ABORT_STREAM, checksum]
70
+ end
74
71
 
75
- def run
76
- return Thread.new do
77
- loop {
78
- checksum = handle(@stream_queue.pop)
79
- }
80
- end
72
+ def reset_streaming(checksum, new_offset)
73
+ @stream_queue << [:RESET_STREAM, [checksum, new_offset]]
74
+ end
75
+
76
+ def run
77
+ return Thread.new do
78
+ loop {
79
+ checksum = handle(@stream_queue.pop)
80
+ }
81
81
  end
82
+ end
82
83
 
83
- def handle(message)
84
- type, content = message
85
- if type == :NEW_STREAM
86
- checksum, path = content
87
- reset_stream(checksum, path, 0)
88
- @stream_queue << [:COPY_CHUNK, checksum] if @streams.key?(checksum)
89
- elsif type == :ABORT_STREAM
90
- checksum = content
91
- Stream.close_delete_stream(checksum, @streams)
92
- elsif type == :RESET_STREAM
93
- checksum, new_offset = content
94
- reset_stream(checksum, nil, new_offset)
95
- @stream_queue << [:COPY_CHUNK, checksum] if @streams.key?(checksum)
96
- elsif type == :COPY_CHUNK
97
- checksum = content
98
- if @streams.key?(checksum)
99
- offset = @streams[checksum].file.pos
100
- Log.debug1("Sending chunk for #{checksum}, offset #{offset}.")
101
- chunk = @streams[checksum].file.read(Params['streaming_chunk_size'])
102
- if chunk.nil?
103
- # No more to read, send end of file.
104
- @send_chunk_clb.call(checksum, offset, @streams[checksum].size, nil, nil)
105
- Stream.close_delete_stream(checksum, @streams)
106
- else
107
- chunk_checksum = FileIndexing::IndexAgent.get_content_checksum(chunk)
108
- @send_chunk_clb.call(checksum, offset, @streams[checksum].size, chunk, chunk_checksum)
109
- end
84
+ def handle(message)
85
+ type, content = message
86
+ if type == :NEW_STREAM
87
+ checksum, path = content
88
+ reset_stream(checksum, path, 0)
89
+ @stream_queue << [:COPY_CHUNK, checksum] if @streams.key?(checksum)
90
+ elsif type == :ABORT_STREAM
91
+ checksum = content
92
+ Stream.close_delete_stream(checksum, @streams)
93
+ elsif type == :RESET_STREAM
94
+ checksum, new_offset = content
95
+ reset_stream(checksum, nil, new_offset)
96
+ @stream_queue << [:COPY_CHUNK, checksum] if @streams.key?(checksum)
97
+ elsif type == :COPY_CHUNK
98
+ checksum = content
99
+ if @streams.key?(checksum)
100
+ offset = @streams[checksum].file.pos
101
+ Log.debug1("Sending chunk for #{checksum}, offset #{offset}.")
102
+ chunk = @streams[checksum].file.read(Params['streaming_chunk_size'])
103
+ if chunk.nil?
104
+ # No more to read, send end of file.
105
+ @send_chunk_clb.call(checksum, offset, @streams[checksum].size, nil, nil)
106
+ Stream.close_delete_stream(checksum, @streams)
110
107
  else
111
- Log.info("No checksum found to copy chunk. #{checksum}.")
108
+ chunk_checksum = FileIndexing::IndexAgent.get_content_checksum(chunk)
109
+ @send_chunk_clb.call(checksum, offset, @streams[checksum].size, chunk, chunk_checksum)
112
110
  end
111
+ else
112
+ Log.info("No checksum found to copy chunk. #{checksum}.")
113
113
  end
114
-
115
114
  end
116
115
 
117
- def reset_stream(checksum, path, offset)
118
- if !@streams.key? checksum
119
- begin
120
- file = File.new(path, 'rb')
121
- if offset > 0
122
- file.seek(offset)
123
- end
124
- Log.info("File streamer: #{file.to_s}.")
125
- rescue IOError => e
126
- Log.warning("Could not stream local file #{path}. #{e.to_s}")
116
+ end
117
+
118
+ def reset_stream(checksum, path, offset)
119
+ if !@streams.key? checksum
120
+ begin
121
+ file = File.new(path, 'rb')
122
+ if offset > 0
123
+ file.seek(offset)
127
124
  end
128
- @streams[checksum] = Stream.new(checksum, path, file, file.size)
129
- else
130
- @streams[checksum].file.seek(offset)
125
+ Log.debug1("File streamer: #{file.to_s}.")
126
+ rescue IOError => e
127
+ Log.warning("Could not stream local file #{path}. #{e.to_s}")
131
128
  end
129
+ @streams[checksum] = Stream.new(checksum, path, file, file.size)
130
+ else
131
+ @streams[checksum].file.seek(offset)
132
132
  end
133
133
  end
134
+ end
134
135
 
135
- # Start implementing as dummy, no self thread for now.
136
- # Later when we need it to response and send aborts, timeouts, ect, it will
137
- # need self thread.
138
- class FileReceiver
136
+ # Start implementing as dummy, no self thread for now.
137
+ # Later when we need it to response and send aborts, timeouts, ect, it will
138
+ # need self thread.
139
+ class FileReceiver
139
140
 
140
- def initialize(file_done_clb=nil, file_abort_clb=nil, reset_copy=nil)
141
- @file_done_clb = file_done_clb
142
- @file_abort_clb = file_abort_clb
143
- @reset_copy = reset_copy
144
- @streams = {}
145
- end
141
+ def initialize(file_done_clb=nil, file_abort_clb=nil, reset_copy=nil)
142
+ @file_done_clb = file_done_clb
143
+ @file_abort_clb = file_abort_clb
144
+ @reset_copy = reset_copy
145
+ @streams = {}
146
+ end
146
147
 
147
- def receive_chunk(file_checksum, offset, file_size, content, content_checksum)
148
- # If standard chunk copy.
149
- if !content.nil? && !content_checksum.nil?
150
- received_content_checksum = FileIndexing::IndexAgent.get_content_checksum(content)
151
- comment = "Calculated received chunk with content checksum #{received_content_checksum}" \
148
+ def receive_chunk(file_checksum, offset, file_size, content, content_checksum)
149
+ # If standard chunk copy.
150
+ if !content.nil? && !content_checksum.nil?
151
+ received_content_checksum = FileIndexing::IndexAgent.get_content_checksum(content)
152
+ comment = "Calculated received chunk with content checksum #{received_content_checksum}" \
152
153
  " vs message content checksum #{content_checksum}, " \
153
154
  "file checksum #{file_checksum}"
154
- Log.debug1(comment) if content_checksum == received_content_checksum
155
- # TODO should be here a kind of abort?
156
- if content_checksum != received_content_checksum
157
- Log.warning(comment)
158
- new_offset = 0
159
- if @streams.key?(file_checksum)
160
- new_offset = @streams[file_checksum].file.pos
161
- end
162
- @reset_copy.call(file_checksum, new_offset) unless @reset_copy.nil?
163
- return false
164
- end
165
-
166
- if !@streams.key?(file_checksum)
167
- handle_new_stream(file_checksum, file_size)
168
- end
169
- # We still check @streams has the key, because file can fail to open.
155
+ Log.debug1(comment) if content_checksum == received_content_checksum
156
+ # TODO should be here a kind of abort?
157
+ if content_checksum != received_content_checksum
158
+ Log.warning(comment)
159
+ new_offset = 0
170
160
  if @streams.key?(file_checksum)
171
- return handle_new_chunk(file_checksum, offset, content)
172
- else
173
- Log.warning('Cannot handle chunk, stream does not exists, sending abort.')
174
- @file_abort_clb.call(file_checksum) unless @file_abort_clb.nil?
175
- return false
161
+ new_offset = @streams[file_checksum].file.pos
176
162
  end
177
- # If last chunk copy.
178
- elsif content.nil? && content_checksum.nil?
179
- handle_last_chunk(file_checksum)
163
+ @reset_copy.call(file_checksum, new_offset) unless @reset_copy.nil?
180
164
  return false
165
+ end
166
+
167
+ if !@streams.key?(file_checksum)
168
+ handle_new_stream(file_checksum, file_size)
169
+ end
170
+ # We still check @streams has the key, because file can fail to open.
171
+ if @streams.key?(file_checksum)
172
+ return handle_new_chunk(file_checksum, offset, content)
181
173
  else
182
- Log.warning("Unexpected receive chuck message. file_checksum:#{file_checksum}, " \
183
- "content.nil?:#{content.nil?}, content_checksum:#{content_checksum}")
174
+ Log.warning('Cannot handle chunk, stream does not exists, sending abort.')
175
+ @file_abort_clb.call(file_checksum) unless @file_abort_clb.nil?
184
176
  return false
185
177
  end
178
+ # If last chunk copy.
179
+ elsif content.nil? && content_checksum.nil?
180
+ # Handle the case of backup empty file.
181
+ handle_new_stream(file_checksum, 0) if !@streams.key?(file_checksum)
182
+ # Finalize the file copy.
183
+ handle_last_chunk(file_checksum)
184
+ return false
185
+ else
186
+ Log.warning("Unexpected receive chuck message. file_checksum:#{file_checksum}, " \
187
+ "content.nil?:#{content.nil?}, content_checksum:#{content_checksum}")
188
+ return false
186
189
  end
190
+ end
187
191
 
188
- # open new stream
189
- def handle_new_stream(file_checksum, file_size)
190
- # final destination path
191
- tmp_path = FileReceiver.destination_filename(
192
- File.join(Params['backup_destination_folder'], 'tmp'),
193
- file_checksum)
194
- path = FileReceiver.destination_filename(Params['backup_destination_folder'],
195
- file_checksum)
196
- if File.exists?(path)
197
- Log.warning("File already exists (#{path}) not writing.")
198
- @file_abort_clb.call(file_checksum) unless @file_abort_clb.nil?
199
- else
200
- # the file will be moved from tmp location once the transfer will be done
201
- # system will use the checksum and some more unique key for tmp file name
202
- FileUtils.makedirs(File.dirname(tmp_path)) unless File.directory?(File.dirname(tmp_path))
203
- tmp_file = file = File.new(tmp_path, 'wb')
204
- @streams[file_checksum] = Stream.new(file_checksum, tmp_path, tmp_file, file_size)
205
- end
192
+ # open new stream
193
+ def handle_new_stream(file_checksum, file_size)
194
+ # final destination path
195
+ tmp_path = FileReceiver.destination_filename(
196
+ File.join(Params['backup_destination_folder'], 'tmp'),
197
+ file_checksum)
198
+ path = FileReceiver.destination_filename(Params['backup_destination_folder'],
199
+ file_checksum)
200
+ if File.exists?(path)
201
+ Log.warning("File already exists (#{path}) not writing.")
202
+ @file_abort_clb.call(file_checksum) unless @file_abort_clb.nil?
203
+ else
204
+ # The file will be moved from tmp location once the transfer will be done
205
+ # system will use the checksum and some more unique key for tmp file name
206
+ FileUtils.makedirs(File.dirname(tmp_path)) unless File.directory?(File.dirname(tmp_path))
207
+ tmp_file = file = File.new(tmp_path, 'wb')
208
+ @streams[file_checksum] = Stream.new(file_checksum, tmp_path, tmp_file, file_size)
206
209
  end
210
+ end
207
211
 
208
- # write chunk to temp file
209
- def handle_new_chunk(file_checksum, offset, content)
210
- if offset == @streams[file_checksum].file.pos
211
- FileReceiver.write_string_to_file(content, @streams[file_checksum].file)
212
- Log.info("Written already #{@streams[file_checksum].file.pos} bytes, " \
212
+ # write chunk to temp file
213
+ def handle_new_chunk(file_checksum, offset, content)
214
+ if offset == @streams[file_checksum].file.pos
215
+ FileReceiver.write_string_to_file(content, @streams[file_checksum].file)
216
+ Log.info("Written already #{@streams[file_checksum].file.pos} bytes, " \
213
217
  "out of #{@streams[file_checksum].size} " \
214
218
  "(#{100.0*@streams[file_checksum].file.size/@streams[file_checksum].size}%)")
215
- return true
216
- else
217
- # Offset is wrong, send reset/resume copy from correct offset.
218
- Log.warning("Received chunk with incorrect offset #{offset}, should " \
219
+ return true
220
+ else
221
+ # Offset is wrong, send reset/resume copy from correct offset.
222
+ Log.warning("Received chunk with incorrect offset #{offset}, should " \
219
223
  "be #{@streams[file_checksum].file.pos}, file_checksum:#{file_checksum}")
220
- @reset_copy.call(file_checksum, @streams[file_checksum].file.pos) unless @reset_copy.nil?
221
- return false
222
- end
224
+ @reset_copy.call(file_checksum, @streams[file_checksum].file.pos) unless @reset_copy.nil?
225
+ return false
223
226
  end
227
+ end
224
228
 
225
- # copy file to permanent location
226
- # close stream
227
- # remove temp file
228
- # check written file
229
- def handle_last_chunk(file_checksum)
230
- if @streams.key?(file_checksum)
231
- # Make the directory if does not exists.
232
- path = FileReceiver.destination_filename(Params['backup_destination_folder'],
233
- file_checksum)
234
- Log.debug1("Moving tmp file #{@streams[file_checksum].path} to #{path}")
235
- Log.debug1("Creating directory: #{path}")
236
- file_dir = File.dirname(path)
237
- FileUtils.makedirs(file_dir) unless File.directory?(file_dir)
238
- # Move tmp file to permanent location.
239
- tmp_file_path = @streams[file_checksum].path
240
- Stream.close_delete_stream(file_checksum, @streams) # temp file will be closed here
241
-
242
- local_file_checksum = FileIndexing::IndexAgent.get_checksum(tmp_file_path)
243
- message = "Local checksum (#{local_file_checksum}) received checksum (#{file_checksum})."
244
- if local_file_checksum == file_checksum
245
- Log.info(message)
246
- begin
247
- File.rename(tmp_file_path, path)
248
- Log.info("End move tmp file to permanent location #{path}.")
249
- @file_done_clb.call(local_file_checksum, path) unless @file_done_clb.nil?
250
- rescue IOError => e
251
- Log.warning("Could not move tmp file to permanent file #{path}. #{e.to_s}")
252
- end
253
- else
254
- Log.error(message)
255
- Log.debug1("Deleting tmp file: #{tmp_file_path}")
256
- File.delete(tmp_file_path)
229
+ # copy file to permanent location
230
+ # close stream
231
+ # remove temp file
232
+ # check written file
233
+ def handle_last_chunk(file_checksum)
234
+ # Should always be true, unless file creation failed.
235
+ if @streams.key?(file_checksum)
236
+ # Make the directory if does not exists.
237
+ path = FileReceiver.destination_filename(Params['backup_destination_folder'],
238
+ file_checksum)
239
+ Log.debug1("Moving tmp file #{@streams[file_checksum].path} to #{path}")
240
+ Log.debug1("Creating directory: #{path}")
241
+ file_dir = File.dirname(path)
242
+ FileUtils.makedirs(file_dir) unless File.directory?(file_dir)
243
+ # Move tmp file to permanent location.
244
+ tmp_file_path = @streams[file_checksum].path
245
+ Stream.close_delete_stream(file_checksum, @streams) # temp file will be closed here
246
+
247
+ local_file_checksum = FileIndexing::IndexAgent.get_checksum(tmp_file_path)
248
+ message = "Local checksum (#{local_file_checksum}) received checksum (#{file_checksum})."
249
+ if local_file_checksum == file_checksum
250
+ Log.info(message)
251
+ begin
252
+ File.rename(tmp_file_path, path)
253
+ Log.info("End move tmp file to permanent location #{path}.")
254
+ @file_done_clb.call(local_file_checksum, path) unless @file_done_clb.nil?
255
+ rescue IOError => e
256
+ Log.warning("Could not move tmp file to permanent file #{path}. #{e.to_s}")
257
257
  end
258
258
  else
259
- Log.error("Handling last chunk and tmp stream does not exists.")
260
- end
261
- end
262
-
263
- def self.write_string_to_file(str, file)
264
- bytes_to_write = str.bytesize
265
- Log.info("writing to file: #{file.to_s}, #{bytes_to_write} bytes.")
266
- while bytes_to_write > 0
267
- bytes_to_write -= file.write(str)
259
+ Log.error(message)
260
+ Log.debug1("Deleting tmp file: #{tmp_file_path}")
261
+ File.delete(tmp_file_path)
268
262
  end
263
+ else
264
+ Log.error("Handling last chunk and tmp stream does not exists.")
269
265
  end
266
+ end
270
267
 
271
- # Creates destination filename for backup server, input is base folder and sha1.
272
- # for example: folder:/mnt/hd1/bbbackup, sha1:d0be2dc421be4fcd0172e5afceea3970e2f3d940
273
- # dest filename: /mnt/hd1/bbbackup/d0/be/2d/d0be2dc421be4fcd0172e5afceea3970e2f3d940
274
- def self.destination_filename(folder, sha1)
275
- File.join(folder, sha1[0,2], sha1[2,2], sha1)
268
+ def self.write_string_to_file(str, file)
269
+ bytes_to_write = str.bytesize
270
+ Log.info("writing to file: #{file.to_s}, #{bytes_to_write} bytes.")
271
+ while bytes_to_write > 0
272
+ bytes_to_write -= file.write(str)
276
273
  end
274
+ end
277
275
 
278
- private :handle_new_stream, :handle_new_chunk, :handle_last_chunk
276
+ # Creates destination filename for backup server, input is base folder and sha1.
277
+ # for example: folder:/mnt/hd1/bbbackup, sha1:d0be2dc421be4fcd0172e5afceea3970e2f3d940
278
+ # dest filename: /mnt/hd1/bbbackup/d0/be/2d/d0be2dc421be4fcd0172e5afceea3970e2f3d940
279
+ def self.destination_filename(folder, sha1)
280
+ File.join(folder, sha1[0,2], sha1[2,2], sha1)
279
281
  end
280
282
 
283
+ private :handle_new_stream, :handle_new_chunk, :handle_last_chunk
281
284
  end
285
+
282
286
  end
287
+