conveyor 0.2.2 → 0.2.3

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
data.tar.gz.sig CHANGED
Binary file
@@ -1,3 +1,18 @@
1
+ == 0.2.3 / 2008-10-21
2
+ * update channel reading code for base-36
3
+ * adding logging to the server startup process
4
+ * new 2-level indexing scheme. only %10 of index items are held in memory
5
+ * 10x reduction in memory useage
6
+ * 1000x speedup in startup time
7
+ * added rack dependency
8
+ * index rebuilding script
9
+ * time the startup
10
+ * some performance improvements for index loading
11
+ * initial block caching strategy for lookups by id
12
+ * cache rejecting + dealing with an edge case on nearest_after
13
+ * refuse to open an index that isn't in sequential id order
14
+ * log http response code when channel already exists
15
+
1
16
  == 0.2.2 / 2008-04-10
2
17
  * make sure that an empty channel still has @last_id set
3
18
  * switch back to mongrel. too many problems with event machine on solaris
@@ -4,6 +4,7 @@ Manifest.txt
4
4
  README.txt
5
5
  Rakefile
6
6
  bin/conveyor
7
+ bin/conveyor-rebuild-index
7
8
  bin/conveyor-upgrade
8
9
  docs/file-formats.rdoc
9
10
  docs/protocol.rdoc
data/Rakefile CHANGED
@@ -12,6 +12,7 @@ Hoe.new('conveyor', Conveyor::VERSION) do |p|
12
12
  p.extra_deps << ['mongrel']
13
13
  p.extra_deps << ['json']
14
14
  p.extra_deps << ['daemons']
15
+ p.extra_deps << ['rack']
15
16
  end
16
17
 
17
18
  # vim: syntax=Ruby
@@ -0,0 +1,14 @@
1
+ #!/usr/bin/env ruby -KU
2
+
3
+ $: << 'lib'
4
+
5
+ require 'conveyor/server'
6
+ require 'conveyor/upgrader'
7
+
8
+ if ARGV.first
9
+ FileUtils.mv(File.join(ARGV.first, 'index'), File.join(ARGV.first, 'index.bak'))
10
+ FileUtils.touch(File.join(ARGV.first, 'index'))
11
+ u = Conveyor::Channel.new ARGV.first
12
+
13
+ u.rebuild_index
14
+ end
@@ -1,4 +1,4 @@
1
1
  module Conveyor
2
- VERSION = '0.2.2'
2
+ VERSION = '0.2.3'
3
3
  QUALITY = 'alpha'
4
4
  end
@@ -11,21 +11,42 @@ module Conveyor
11
11
  # Base implementation for channels. Not useful to instantiate directly.
12
12
  class BaseChannel
13
13
 
14
- NAME_PATTERN = %r{\A[a-zA-Z\-0-9\_]+\Z}
15
- BUCKET_SIZE = 100_000
16
- FORMAT_VERSION = 1
14
+ NAME_PATTERN = %r{\A[a-zA-Z\-0-9\_]+\Z}
15
+ BUCKET_SIZE = 100_000
16
+ FORMAT_VERSION = 1
17
+ BLOCK_SIZE = 1000
18
+ CACHE_SIZE = 100
17
19
 
18
20
  module Flags
19
21
  GZIP = 1
20
22
  end
21
23
 
24
+ def self.parse_headers str, index_file=false
25
+ id, time, offset, length, hash, flags, file = str.split ' '
26
+ {
27
+ :id => id.to_i(36),
28
+ :time => time.to_i(36),
29
+ :offset => offset.to_i(36),
30
+ :length => length.to_i(36),
31
+ :hash => hash,
32
+ :flags => flags.to_i(36),
33
+ :file => (index_file ? file.to_i(36) : nil)
34
+ }
35
+ end
36
+
37
+ def self.valid_channel_name? name
38
+ !!name.match(NAME_PATTERN)
39
+ end
40
+
22
41
  def initialize directory
23
- @directory = directory
24
- @data_files = []
25
- @file_mutexes = []
26
- @index = []
27
- @iterator = 1
28
- @id_lock = Mutex.new
42
+ @directory = directory
43
+ @data_files = []
44
+ @file_mutexes = []
45
+ @iterator = 1 #TODO: move to Channel.rb
46
+ @id_lock = Mutex.new
47
+ @index_file_lock = Mutex.new
48
+ @block_cache = {}
49
+ @block_last_used = {}
29
50
 
30
51
  if File.exists?(@directory)
31
52
  if !File.directory?(@directory)
@@ -44,27 +65,6 @@ module Conveyor
44
65
  "<#{self.class} dir:'#{@directory.to_s}' last_id:#{@last_id} iterator:#{@iterator}>"
45
66
  end
46
67
 
47
- def pick_bucket i
48
- (i / BUCKET_SIZE).to_i
49
- end
50
-
51
- def bucket_file i
52
- unless @data_files[i]
53
- @data_files[i] = File.open(File.join(@directory, i.to_s), 'a+')
54
- @data_files[i].sync = true
55
- @file_mutexes[i] = Mutex.new
56
- end
57
- @file_mutexes[i].synchronize do
58
- yield @data_files[i]
59
- end
60
- end
61
-
62
- def id_lock
63
- @id_lock.synchronize do
64
- yield
65
- end
66
- end
67
-
68
68
  def commit data, time=nil
69
69
  l = nil
70
70
  gzip = data.length >= 256
@@ -84,7 +84,7 @@ module Conveyor
84
84
 
85
85
  id_lock do
86
86
  i = @last_id + 1
87
- t = time || Time.now
87
+ t = time || Time.now.to_i
88
88
  b = pick_bucket(i)
89
89
  flags = 0
90
90
  flags = flags | Flags::GZIP if gzip
@@ -99,20 +99,33 @@ module Conveyor
99
99
  end
100
100
 
101
101
  @last_id = i
102
- @index_file.write "#{header} #{b.to_s(36)}\n"
103
- @index << {:id => i, :time => t, :offset => o, :length => l, :hash => h, :file => b}
102
+ index_offset = nil
103
+ index_file_lock do
104
+ @index_file.seek(0, IO::SEEK_END)
105
+ index_offset = @index_file.pos
106
+ @index_file.write "#{header} #{b.to_s(36)}\n"
107
+ end
108
+ block_num = block_num(i)
109
+ if !@blocks[block_num]
110
+ @blocks << {:offset => index_offset}
111
+ end
112
+ if @block_cache.key?(block_num)
113
+ @block_cache[block_num] << {:id => i, :time => t, :offset => o, :length => l, :hash => h, :file => b}
114
+ end
104
115
  i
105
116
  end
106
117
  end
107
118
 
108
119
  def get id, stream=false
109
120
  return nil unless id <= @last_id && id > 0
110
- i = @index[id-1]
121
+
122
+ index_entry = search_index(id)
123
+
111
124
  headers, content, compressed_content, g = nil
112
- bucket_file(i[:file]) do |f|
113
- f.seek i[:offset]
114
- headers = parse_headers(f.readline.strip)
115
- compressed_content = f.read(i[:length])
125
+ bucket_file(index_entry[:file]) do |f|
126
+ f.seek index_entry[:offset]
127
+ headers = self.class.parse_headers(f.readline.strip)
128
+ compressed_content = f.read(index_entry[:length])
116
129
  end
117
130
  io = StringIO.new(compressed_content)
118
131
  if (headers[:flags] & Flags::GZIP) != 0
@@ -128,48 +141,89 @@ module Conveyor
128
141
  end
129
142
 
130
143
  def get_nearest_after_timestamp timestamp, stream=false
131
- # i = binary search to find nearest item at or after timestamp
132
144
  i = nearest_after(timestamp)
133
145
  get(i) if i
134
146
  end
135
147
 
136
- def self.parse_headers str, index_file=false
137
- pattern = '\A([a-z\d]+) ([a-z\d]+) ([a-z\d]+) ([a-z\d]+) ([a-f0-9]+) ([a-z\d]+)'
138
- pattern += ' (\d+)' if index_file
139
- pattern += '\Z'
140
- m = str.match(Regexp.new(pattern))
148
+ def delete!
149
+ FileUtils.rm_r(@directory)
150
+ @data_files =[]
151
+ @last_id = 0
152
+ @blocks = []
153
+ end
154
+
155
+ def rebuild_index
156
+ files = Dir.glob(@directory + '/' + '[0-9]*')
157
+ files = files.map{|f| [f, f.split('/').last.to_i]}
158
+ files.sort!{|a,b| a[1] <=> b[1]}
159
+ files.each do |(f, b)|
160
+ File.open(f, 'r') do |file|
161
+ puts "reading #{f}"
162
+ while line = file.gets
163
+ headers = self.class.parse_headers(line.strip)
164
+ content = file.read(headers[:length])
165
+ file.read(1)
166
+ index_offset = nil
167
+ header = "#{headers[:id].to_s(36)} #{headers[:time].to_s(36)} #{headers[:offset].to_s(36)} #{headers[:length].to_s(36)} #{headers[:hash]} #{headers[:flags].to_s(36)}"
168
+ index_file_lock do
169
+ @index_file.seek(0, IO::SEEK_END)
170
+ index_offset = @index_file.pos
171
+ @index_file.write "#{header} #{b.to_s(36)}\n"
172
+ end
173
+ end
174
+ end
175
+ end
176
+ end
177
+
178
+ def status
141
179
  {
142
- :id => m.captures[0].to_i(36),
143
- :time => m.captures[1].to_i(36),
144
- :offset => m.captures[2].to_i(36),
145
- :length => m.captures[3].to_i(36),
146
- :hash => m.captures[4],
147
- :flags => m.captures[5].to_i(36),
148
- :file => (index_file ? m.captures[6].to_i(36) : nil)
180
+ :directory => @directory,
181
+ :data_files => @data_files.collect{|f| {:path => f.path, :bytes => File.size(f.path)}},
182
+ :iterator => @iterator,
183
+ :block_cache_keys => @block_cache.keys,
184
+ :last_id => @last_id,
185
+ :blocks => @blocks.length,
149
186
  }
150
187
  end
151
188
 
152
- def parse_headers str, index_file=false
153
- self.class.parse_headers str, index_file
189
+ protected
190
+
191
+ def block_num i
192
+ ((i-1) / BLOCK_SIZE)
154
193
  end
155
-
156
- def self.valid_channel_name? name
157
- !!name.match(NAME_PATTERN)
194
+
195
+ def pick_bucket i
196
+ (i / BUCKET_SIZE).to_i
158
197
  end
159
198
 
160
- def delete!
161
- FileUtils.rm_r(@directory)
162
- @index = []
163
- @data_files =[]
164
- @last_id = 0
199
+ def bucket_file i
200
+ unless @data_files[i]
201
+ @data_files[i] = File.open(File.join(@directory, i.to_s), 'a+')
202
+ @data_files[i].sync = true
203
+ @file_mutexes[i] = Mutex.new
204
+ end
205
+ @file_mutexes[i].synchronize do
206
+ yield @data_files[i]
207
+ end
165
208
  end
166
209
 
167
- protected
210
+ def id_lock
211
+ @id_lock.synchronize do
212
+ yield
213
+ end
214
+ end
215
+
216
+ def index_file_lock
217
+ @index_file_lock.synchronize do
218
+ yield
219
+ end
220
+ end
168
221
 
169
222
  def setup_channel
170
- @index_file = File.open(index_path, 'a')
223
+ @index_file = File.open(index_path, 'a+')
171
224
  @last_id = 0
172
225
  @version = FORMAT_VERSION
226
+ @blocks = []
173
227
  File.open(version_path, 'w+'){|f| f.write(@version.to_s)}
174
228
  end
175
229
 
@@ -185,16 +239,21 @@ module Conveyor
185
239
  end
186
240
 
187
241
  @index_file = File.open(index_path, 'r+')
188
-
242
+ @blocks = []
189
243
  @last_id = 0
190
- @index_file.each_line do |line|
191
- @index << parse_headers(line.strip, true)
192
- @last_id = @index.last[:id]
244
+ index_offset = 0
245
+ while line = @index_file.gets
246
+ entry = self.class.parse_headers(line.strip, true)
247
+ raise "corrupt index. try rebuilding." unless entry[:id] == @last_id + 1
248
+ if entry[:id] % BLOCK_SIZE == 1
249
+ @blocks << {:offset => index_offset}
250
+ end
251
+ @last_id = entry[:id]
252
+ index_offset = @index_file.pos
193
253
  end
194
254
  @index_file.seek(0, IO::SEEK_END)
195
255
  end
196
256
 
197
-
198
257
  def index_path
199
258
  File.join(@directory, 'index')
200
259
  end
@@ -203,21 +262,57 @@ module Conveyor
203
262
  File.join(@directory, 'version')
204
263
  end
205
264
 
206
- def nearest_after(timestamp)
207
- low = 0
208
- high = @index.length
209
- while low < high
210
- mid = (low + high) / 2
211
- if (@index[mid][:time].to_i > timestamp)
212
- high = mid - 1
213
- elsif (@index[mid][:time].to_i < timestamp)
214
- low = mid + 1
265
+ def cache_block block_num
266
+ if @block_cache.length > CACHE_SIZE
267
+ reject = @block_last_used.sort{|a,b| a[1] <=> b[1]}.last.first
268
+ @block_cache.delete(reject)
269
+ puts "rejected #{reject}"
270
+ end
271
+ a = []
272
+
273
+ buf = ''
274
+ block_start = @blocks[block_num][:offset]
275
+ block_end = @blocks[block_num + 1] ? @blocks[block_num + 1][:offset] : nil
276
+ index_file_lock do
277
+ @index_file.seek(block_start)
278
+ if block_end
279
+ buf = @index_file.read(block_end - block_start)
215
280
  else
216
- return mid
281
+ buf = @index_file.read
282
+ end
283
+ @index_file.seek(0, IO::SEEK_END)
284
+ end
285
+ buf.split(/\n/).each do |line|
286
+ a << self.class.parse_headers(line.strip, true)
287
+ end
288
+ @block_cache[block_num] = a
289
+ end
290
+
291
+ def search_index id
292
+ block_num = block_num(id)
293
+
294
+ if !@block_cache.has_key?(block_num)
295
+ cache_block(block_num)
296
+ end
297
+ @block_last_used[block_num] = Time.now.to_i
298
+ entry = @block_cache[block_num][id - 1 - (block_num * BLOCK_SIZE)]
299
+ end
300
+
301
+ def nearest_after(timestamp)
302
+ i = 0
303
+ while (i < @blocks.length - 1) && timestamp < @blocks[i+1][:time]
304
+ i += 1
305
+ end
306
+ cache_block(i) if !@block_cache.has_key?(i)
307
+ @block_last_used[i] = Time.now.to_i
308
+ @block_cache[i].each do |entry|
309
+ if entry[:time] > timestamp
310
+ return entry[:id]
217
311
  end
218
312
  end
219
- if timestamp <= @index[mid][:time].to_i
220
- @index[mid][:id]
313
+ if @blocks[i+1]
314
+ cache_block(i+1)
315
+ @block_cache[i+1].first[:id]
221
316
  else
222
317
  nil
223
318
  end
@@ -85,15 +85,9 @@ module Conveyor
85
85
  end
86
86
 
87
87
  def status
88
- {
89
- :directory => @directory,
90
- :index => {
91
- :size => @index.length
92
- },
93
- :data_files => @data_files.collect{|f| {:path => f.path, :bytes => File.size(f.path)}},
94
- :iterator => {:position => @iterator},
95
- :iterator_groups => @group_iterators.inject({}){|m,(k,v)| m[k] = v; m}
96
- }
88
+ super.merge({
89
+ :iterator_groups => @group_iterators.inject({}){|m,(k,v)| m[k] = v; m},
90
+ })
97
91
  end
98
92
 
99
93
  def rewind *opts
@@ -146,7 +140,8 @@ module Conveyor
146
140
 
147
141
  def load_channel
148
142
  super
149
- @iterator_file = File.open(iterator_path, 'r+')
143
+ @iterator_file = File.open(iterator_path, 'a+')
144
+ @iterator_file.seek(0)
150
145
  @iterator_file.each_line do |line|
151
146
  @iterator = line.to_i(36)
152
147
  end
@@ -21,13 +21,19 @@ module Conveyor
21
21
  @logger = Logger.new '/dev/null'
22
22
  end
23
23
 
24
+ t0 = Time.now
25
+ i "reading data"
26
+
24
27
  @channels = {}
25
28
  Dir.entries(@data_directory).each do |e|
26
29
  if !['.', '..'].include?(e) && File.directory?(File.join(@data_directory, e)) && Channel.valid_channel_name?(e)
30
+ i "initializing channel '#{e}'"
27
31
  @channels[e] = Channel.new(File.join(@data_directory, e))
28
32
  end
29
33
  end
30
34
 
35
+ i "done reading data (took #{Time.now - t0} sec.)"
36
+
31
37
  @requests = 0
32
38
  end
33
39
 
@@ -50,7 +56,7 @@ module Conveyor
50
56
  i "#{env["REMOTE_ADDR"]} PUT #{env["REQUEST_PATH"]} 201"
51
57
  [201, {}, "created channel #{m.captures[0]}"]
52
58
  else
53
- i "#{env["REMOTE_ADDR"]} PUT #{env["REQUEST_PATH"]}"
59
+ i "#{env["REMOTE_ADDR"]} PUT #{env["REQUEST_PATH"]} 202"
54
60
  [202, {}, "channel already exists. didn't do anything"]
55
61
  end
56
62
  else
@@ -21,15 +21,12 @@ class TestConveyorChannel < Test::Unit::TestCase
21
21
  end
22
22
 
23
23
  def test_parse_headers
24
- FileUtils.rm_r '/tmp/foo' rescue nil
25
-
26
- i = Channel.new '/tmp/foo'
27
24
  [
28
25
  ["1 jucfvs 0 3 acbd18db4cc2f85cedef654fccc4a4d8 0\n",
29
26
  {:id => 1, :time => 1199826280, :offset => 0, :length => 3, :hash => "acbd18db4cc2f85cedef654fccc4a4d8", :file => nil, :flags => 0}
30
27
  ]
31
28
  ].each do |(str, ret)|
32
- assert_equal ret, i.parse_headers(str)
29
+ assert_equal ret, Channel.parse_headers(str)
33
30
  end
34
31
 
35
32
  [
@@ -37,10 +34,8 @@ class TestConveyorChannel < Test::Unit::TestCase
37
34
  {:id => 2, :time => 1199826280, :offset => 0, :length => 3, :hash => "acbd18db4cc2f85cedef654fccc4a4d8", :file => 1, :flags => 0}
38
35
  ]
39
36
  ].each do |(str, ret)|
40
- assert_equal ret, i.parse_headers(str, true)
37
+ assert_equal ret, Channel.parse_headers(str, true)
41
38
  end
42
-
43
- FileUtils.rm_r '/tmp/foo'
44
39
  end
45
40
 
46
41
  def test_init_existing
@@ -70,7 +65,7 @@ class TestConveyorChannel < Test::Unit::TestCase
70
65
  assert_equal 'bar', c.get_next[1]
71
66
  assert_equal 'bam', c.get_next[1]
72
67
  assert_equal nil, c.get_next
73
- assert_equal 4, c.status[:iterator][:position]
68
+ assert_equal 4, c.status[:iterator]
74
69
  end
75
70
 
76
71
  def test_get_next_interupted
@@ -138,12 +133,14 @@ class TestConveyorChannel < Test::Unit::TestCase
138
133
 
139
134
  status = {
140
135
  :directory => '/tmp/bar',
141
- :index => {:size => 3},
136
+ :blocks => 1,
142
137
  :data_files => [
143
138
  {:path => '/tmp/bar/0', :bytes => 158}
144
139
  ],
145
- :iterator => {:position => 1},
146
- :iterator_groups => {}
140
+ :iterator => 1,
141
+ :iterator_groups => {},
142
+ :last_id => 3,
143
+ :block_cache_keys => []
147
144
  }
148
145
 
149
146
  assert_equal(status, c.status)
@@ -261,8 +258,10 @@ class TestConveyorChannel < Test::Unit::TestCase
261
258
  assert_equal nil, c.get_nearest_after_timestamp(2**32)
262
259
 
263
260
  t0 = Time.now.to_i
261
+ sleep 1 # we only have second-level granularity on time
262
+
264
263
  10.times{|i| c.post((10 + i).to_s)}
265
- assert_equal '9', c.get_nearest_after_timestamp(t0)[1]
264
+ assert_equal '10', c.get_nearest_after_timestamp(t0)[1]
266
265
  end
267
266
 
268
267
  def test_rewind_to_timestamp
@@ -5,11 +5,13 @@ require 'priority_queue'
5
5
 
6
6
  class TestReplicatedChannel < Test::Unit::TestCase
7
7
  def test_basic
8
- FileUtils.rm_r '/tmp/foo' rescue nil
9
- FileUtils.rm_r '/tmp/bar' rescue nil
8
+ chan1 = '/tmp/test_basic1'
9
+ chan2 = '/tmp/test_basic2'
10
+ FileUtils.rm_r chan1 rescue nil
11
+ FileUtils.rm_r chan2 rescue nil
10
12
 
11
- c1 = Conveyor::ReplicatedChannel.new '/tmp/foo'
12
- c2 = Conveyor::ReplicatedChannel.new '/tmp/bar'
13
+ c1 = Conveyor::ReplicatedChannel.new chan1
14
+ c2 = Conveyor::ReplicatedChannel.new chan2
13
15
 
14
16
  c1.peers << c2
15
17
  c2.peers << c1
@@ -34,8 +36,8 @@ class TestReplicatedChannel < Test::Unit::TestCase
34
36
  assert_equal c1.get(1), c2.get(1)
35
37
  assert_equal c1.get(2), c2.get(2)
36
38
 
37
- FileUtils.rm_r '/tmp/foo' rescue nil
38
- FileUtils.rm_r '/tmp/bar' rescue nil
39
+ FileUtils.rm_r chan1 rescue nil
40
+ FileUtils.rm_r chan2 rescue nil
39
41
  end
40
42
 
41
43
  def test_more
@@ -19,6 +19,7 @@ class TestConveyorServer < Test::Unit::TestCase
19
19
 
20
20
  Rack::Handler::Mongrel.run(app, :Host => '0.0.0.0', :Port => 8011)
21
21
  end
22
+ sleep 1
22
23
 
23
24
  def test_channels
24
25
  Net::HTTP.start("localhost", 8011) do |h|
@@ -117,10 +118,12 @@ class TestConveyorServer < Test::Unit::TestCase
117
118
  assert_kind_of Net::HTTPOK, req
118
119
  json = {
119
120
  "iterator_groups" => {},
120
- "index"=>{"size"=>1},
121
+ "blocks" => 1,
121
122
  "directory"=>"/tmp/asdf/#{chan}",
122
123
  "data_files"=>[{"path"=>"/tmp/asdf/#{chan}/0","bytes"=>122}],
123
- "iterator"=>{"position"=>1}
124
+ "iterator"=>1,
125
+ "last_id" => 1,
126
+ "block_cache_keys" => []
124
127
  }
125
128
  assert_equal json, JSON::parse(req.body)
126
129
 
@@ -287,8 +290,9 @@ class TestConveyorServer < Test::Unit::TestCase
287
290
  assert_equal '', c.get_nearest_after_timestamp(2**32)
288
291
 
289
292
  t0 = Time.now.to_i
293
+ sleep 1
290
294
  10.times{|i| c.post((10 + i).to_s)}
291
- assert_equal '9', c.get_nearest_after_timestamp(t0)
295
+ assert_equal '10', c.get_nearest_after_timestamp(t0)
292
296
  end
293
297
 
294
298
  def test_rewind_to_timestamp
metadata CHANGED
@@ -1,7 +1,7 @@
1
1
  --- !ruby/object:Gem::Specification
2
2
  name: conveyor
3
3
  version: !ruby/object:Gem::Version
4
- version: 0.2.2
4
+ version: 0.2.3
5
5
  platform: ruby
6
6
  authors:
7
7
  - Ryan King
@@ -30,11 +30,12 @@ cert_chain:
30
30
  Zls3y84CmyAEGg==
31
31
  -----END CERTIFICATE-----
32
32
 
33
- date: 2008-04-10 00:00:00 -07:00
33
+ date: 2008-10-21 00:00:00 -07:00
34
34
  default_executable:
35
35
  dependencies:
36
36
  - !ruby/object:Gem::Dependency
37
37
  name: mongrel
38
+ type: :runtime
38
39
  version_requirement:
39
40
  version_requirements: !ruby/object:Gem::Requirement
40
41
  requirements:
@@ -44,6 +45,7 @@ dependencies:
44
45
  version:
45
46
  - !ruby/object:Gem::Dependency
46
47
  name: json
48
+ type: :runtime
47
49
  version_requirement:
48
50
  version_requirements: !ruby/object:Gem::Requirement
49
51
  requirements:
@@ -53,6 +55,17 @@ dependencies:
53
55
  version:
54
56
  - !ruby/object:Gem::Dependency
55
57
  name: daemons
58
+ type: :runtime
59
+ version_requirement:
60
+ version_requirements: !ruby/object:Gem::Requirement
61
+ requirements:
62
+ - - ">="
63
+ - !ruby/object:Gem::Version
64
+ version: "0"
65
+ version:
66
+ - !ruby/object:Gem::Dependency
67
+ name: rack
68
+ type: :runtime
56
69
  version_requirement:
57
70
  version_requirements: !ruby/object:Gem::Requirement
58
71
  requirements:
@@ -62,17 +75,19 @@ dependencies:
62
75
  version:
63
76
  - !ruby/object:Gem::Dependency
64
77
  name: hoe
78
+ type: :development
65
79
  version_requirement:
66
80
  version_requirements: !ruby/object:Gem::Requirement
67
81
  requirements:
68
82
  - - ">="
69
83
  - !ruby/object:Gem::Version
70
- version: 1.5.1
84
+ version: 1.7.0
71
85
  version:
72
86
  description: "* Like TiVo for your data * A distributed rewindable multi-queue"
73
87
  email: ryan@theryanking.com
74
88
  executables:
75
89
  - conveyor
90
+ - conveyor-rebuild-index
76
91
  - conveyor-upgrade
77
92
  extensions: []
78
93
 
@@ -87,6 +102,7 @@ files:
87
102
  - README.txt
88
103
  - Rakefile
89
104
  - bin/conveyor
105
+ - bin/conveyor-rebuild-index
90
106
  - bin/conveyor-upgrade
91
107
  - docs/file-formats.rdoc
92
108
  - docs/protocol.rdoc
@@ -125,7 +141,7 @@ required_rubygems_version: !ruby/object:Gem::Requirement
125
141
  requirements: []
126
142
 
127
143
  rubyforge_project: conveyor
128
- rubygems_version: 1.1.0
144
+ rubygems_version: 1.2.0
129
145
  signing_key:
130
146
  specification_version: 2
131
147
  summary: "* Like TiVo for your data * A distributed rewindable multi-queue"
metadata.gz.sig CHANGED
Binary file