strokedb 0.0.2

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (59) hide show
  1. data/CONTRIBUTORS +7 -0
  2. data/CREDITS +13 -0
  3. data/README +44 -0
  4. data/bin/sdbc +2 -0
  5. data/lib/config/config.rb +161 -0
  6. data/lib/data_structures/inverted_list.rb +297 -0
  7. data/lib/data_structures/point_query.rb +24 -0
  8. data/lib/data_structures/skiplist.rb +302 -0
  9. data/lib/document/associations.rb +107 -0
  10. data/lib/document/callback.rb +11 -0
  11. data/lib/document/coercions.rb +57 -0
  12. data/lib/document/delete.rb +28 -0
  13. data/lib/document/document.rb +684 -0
  14. data/lib/document/meta.rb +261 -0
  15. data/lib/document/slot.rb +199 -0
  16. data/lib/document/util.rb +27 -0
  17. data/lib/document/validations.rb +704 -0
  18. data/lib/document/versions.rb +106 -0
  19. data/lib/document/virtualize.rb +82 -0
  20. data/lib/init.rb +57 -0
  21. data/lib/stores/chainable_storage.rb +57 -0
  22. data/lib/stores/inverted_list_index/inverted_list_file_storage.rb +56 -0
  23. data/lib/stores/inverted_list_index/inverted_list_index.rb +49 -0
  24. data/lib/stores/remote_store.rb +172 -0
  25. data/lib/stores/skiplist_store/chunk.rb +119 -0
  26. data/lib/stores/skiplist_store/chunk_storage.rb +21 -0
  27. data/lib/stores/skiplist_store/file_chunk_storage.rb +44 -0
  28. data/lib/stores/skiplist_store/memory_chunk_storage.rb +37 -0
  29. data/lib/stores/skiplist_store/skiplist_store.rb +217 -0
  30. data/lib/stores/store.rb +5 -0
  31. data/lib/sync/chain_sync.rb +38 -0
  32. data/lib/sync/diff.rb +126 -0
  33. data/lib/sync/lamport_timestamp.rb +81 -0
  34. data/lib/sync/store_sync.rb +79 -0
  35. data/lib/sync/stroke_diff/array.rb +102 -0
  36. data/lib/sync/stroke_diff/default.rb +21 -0
  37. data/lib/sync/stroke_diff/hash.rb +186 -0
  38. data/lib/sync/stroke_diff/string.rb +116 -0
  39. data/lib/sync/stroke_diff/stroke_diff.rb +9 -0
  40. data/lib/util/blankslate.rb +42 -0
  41. data/lib/util/ext/blank.rb +50 -0
  42. data/lib/util/ext/enumerable.rb +36 -0
  43. data/lib/util/ext/fixnum.rb +16 -0
  44. data/lib/util/ext/hash.rb +22 -0
  45. data/lib/util/ext/object.rb +8 -0
  46. data/lib/util/ext/string.rb +35 -0
  47. data/lib/util/inflect.rb +217 -0
  48. data/lib/util/java_util.rb +9 -0
  49. data/lib/util/lazy_array.rb +54 -0
  50. data/lib/util/lazy_mapping_array.rb +64 -0
  51. data/lib/util/lazy_mapping_hash.rb +46 -0
  52. data/lib/util/serialization.rb +29 -0
  53. data/lib/util/trigger_partition.rb +136 -0
  54. data/lib/util/util.rb +38 -0
  55. data/lib/util/xml.rb +6 -0
  56. data/lib/view/view.rb +55 -0
  57. data/script/console +70 -0
  58. data/strokedb.rb +75 -0
  59. metadata +148 -0
@@ -0,0 +1,302 @@
1
+ module StrokeDB
2
+ class Skiplist
3
+ include Enumerable
4
+
5
+ attr_accessor :default, :head, :tail, :cut_level, :unique_keys
6
+
7
+ def initialize(data = {}, default = nil, cut_level = nil, unique_keys = true)
8
+ @default, @cut_level, @unique_keys = default, cut_level, unique_keys
9
+
10
+ @head = HeadNode.new
11
+ @tail = TailNode.new
12
+ @head.forward[0] = @tail
13
+ data.each{|k, v| insert(k, v) }
14
+ end
15
+
16
+ def insert(key, value, __cheaters_level = nil, __timestamp = nil)
17
+ @size_cache = nil
18
+ update = Array.new(@head.level)
19
+ x = @head
20
+ # We have to choose between < and <= only,
21
+ # but we go into different branches to keep things fast.
22
+ if @unique_keys
23
+ @head.level.downto(1) do |i|
24
+ x = x.forward[i-1] while x.forward[i-1] < key
25
+ update[i-1] = x
26
+ end
27
+ else
28
+ @head.level.downto(1) do |i|
29
+ x = x.forward[i-1] while x.forward[i-1] <= key
30
+ update[i-1] = x
31
+ end
32
+ end
33
+ x = x.forward[0]
34
+ if x.key == key && @unique_keys
35
+ x.value = value
36
+ x.timestamp = __timestamp
37
+ value.skiplist_node_container = x if value.respond_to? :skiplist_node_container=
38
+ else
39
+ newlevel = __cheaters_level || random_level
40
+ newlevel = 1 if empty?
41
+ if newlevel > @head.level
42
+ (@head.level + 1).upto(newlevel) do |i|
43
+ update[i-1] = @head
44
+ end
45
+ end
46
+
47
+ x = Node.new(newlevel, key, value, __timestamp)
48
+ value.skiplist_node_container = x if value.respond_to? :skiplist_node_container=
49
+
50
+ if cut?(newlevel, update[0])
51
+ return new_chunks!(x, update)
52
+ else
53
+ newlevel.times do |i|
54
+ x.forward[i] = update[i].forward[i] || @tail
55
+ update[i].forward[i] = x
56
+ end
57
+ end
58
+ end
59
+ return self
60
+ end
61
+
62
+ # Finders
63
+
64
+ def find_node(key = nil)
65
+ x = @head
66
+ @head.level.downto(1) do |i|
67
+ x = x.forward[i-1] while x.forward[i-1] < key
68
+ end
69
+ x = x.forward[0]
70
+ return (x.key && yield(x.key, key) ? x : nil) if block_given?
71
+ return x if x.key == key
72
+ nil
73
+ end
74
+
75
+ def find(key, default = nil)
76
+ (i = find_node(key)) && i.value || default || @default
77
+ end
78
+
79
+ def find_nearest_node(key)
80
+ x = @head
81
+ @head.level.downto(1) do |i|
82
+ x = x.forward[i-1] while x.forward[i-1] < key
83
+ end
84
+ x = x.forward[0] if (x.forward[0].key == key || x == @head)
85
+ x
86
+ end
87
+
88
+ def find_nearest(key, default = nil)
89
+ find_nearest_node(key).value || default || @default
90
+ end
91
+
92
+ def find_all_with_prefix(key)
93
+ results = []
94
+ x = @head
95
+ @head.level.downto(1) do |i|
96
+ x = x.forward[i-1] while x.forward[i-1] < key
97
+ end
98
+ x = x.forward[0]
99
+ # got first
100
+ while x.key && x.key[0, key.size] == key
101
+ results << x.value
102
+ x = x.forward[0]
103
+ end
104
+ results
105
+ end
106
+
107
+
108
+ def delete(key, default = nil)
109
+ @size_cache = nil
110
+ default ||= @default
111
+ update = Array.new(@head.level)
112
+ x = @head
113
+ @head.level.downto(1) do |i|
114
+ x = x.forward[i-1] while x.forward[i-1] < key
115
+ update[i-1] = x
116
+ end
117
+ x = x.forward[0]
118
+ if x.key == key
119
+ @head.level.times do |i|
120
+ break if update[i].forward[i] != x
121
+ update[i].forward[i] = x.forward[i]
122
+ end
123
+ true while (y = @head.forward.pop) == @tail
124
+ @head.forward.push(y || @tail)
125
+ x.free(self)
126
+ x.value
127
+ else
128
+ default
129
+ end
130
+ end
131
+
132
+ def first_node
133
+ @head.forward[0]
134
+ end
135
+
136
+ def size
137
+ @size_cache ||= inject(0){|c,k| c + 1}
138
+ end
139
+
140
+ def empty?
141
+ @head.forward[0] == @tail
142
+ end
143
+
144
+ # Returns a string representation of the Skiplist.
145
+ def to_s
146
+ "#<#{self.class.name} " +
147
+ [@head.to_s, map{|node| node.to_s }, @tail.to_s].flatten.join(', ') +
148
+ ">"
149
+ end
150
+ def to_s_levels
151
+ "#<#{self.class.name}:levels " +
152
+ [@head.to_s, map{|node| node.level.to_s }, @tail.to_s].flatten.join(', ') +
153
+ ">"
154
+ end
155
+
156
+ def eql?(skiplist)
157
+ zip(skiplist) {|a, b| return false unless a.key == b.key && a.value == b.value }
158
+ true
159
+ end
160
+
161
+ def each
162
+ n = @head.forward[0]
163
+ until TailNode === n
164
+ yield n
165
+ n = n.forward[0]
166
+ end
167
+ end
168
+
169
+ # Only for empty list!
170
+ def raw_insert(data)
171
+ n = @head
172
+ sn = nil
173
+ update = []
174
+ data.each do |item|
175
+ key, value, level, timestamp = yield(item)
176
+ sn = Node.new(level, key, value, timestamp)
177
+ level.times do |i|
178
+ update[i] ||= @head
179
+ update[i].forward[i] = sn
180
+ sn.forward[i] = @tail
181
+ update[i] = sn
182
+ end
183
+ end
184
+ end
185
+
186
+ private
187
+
188
+ # 1/E is a fastest search value
189
+ PROBABILITY = 1/Math::E
190
+ MAX_LEVEL = 32
191
+
192
+ def random_level
193
+ l = 1
194
+ l += 1 while rand < PROBABILITY && l < MAX_LEVEL
195
+ return l
196
+ end
197
+
198
+ def cut?(l, prev)
199
+ @cut_level && !empty? && l >= @cut_level && prev != @head
200
+ end
201
+
202
+ def new_chunks!(newnode, update)
203
+ # Transposed picture:
204
+ #
205
+ # head level 8: - - - - - - - -
206
+ # update.size 8: - - - - - - - -
207
+ # ...
208
+ # newnode.level 5: - - - - -
209
+ # cut level 3: - - -
210
+ # regular node: -
211
+ # regular node: - -
212
+ # ...
213
+ # tail node: T T T T T T T T
214
+ # refs: A B C D E F G H
215
+ #
216
+ # How to cut?
217
+ #
218
+ # 0) tail1 = TailNode.new; list2 = Skiplist.new
219
+ # 1) newnode.{A, B, C, D, E} := update{A,B,C,D,E}.forward
220
+ # 2) update.{all} := tail1 (for current chunk)
221
+ # 3) list2.head.{A, B, C, D, E} = new_node.{A, B, C, D, E}
222
+ # 4) tail1.next_list = list2
223
+
224
+ list2 = Skiplist.new({}, @default, @cut_level)
225
+ tail1 = TailNode.new
226
+
227
+ newnode.level.times do |i|
228
+ # add '|| @tail' because update[i] may be head of a lower level
229
+ # without forward ref to tail.
230
+ newnode.forward[i] = update[i].forward[i] || @tail
231
+ list2.head.forward[i] = newnode
232
+ end
233
+ @head.level.times do |i|
234
+ update[i].forward[i] = tail1
235
+ end
236
+ tail1.next_list = list2
237
+ # return the current chunk and the next chunk
238
+ return self, list2
239
+ end
240
+
241
+ class Node
242
+ attr_accessor :key, :value, :forward, :timestamp
243
+ attr_accessor :_serialized_index
244
+ def initialize(level, key, value,timestamp=nil)
245
+ @key, @value, @timestamp = key, value, timestamp
246
+ @forward = Array.new(level)
247
+ end
248
+ # this is called when node is thrown out of the list
249
+ # note, that node.value is called immediately after node.free
250
+ def free(list)
251
+ # do nothing
252
+ end
253
+ def level
254
+ @forward.size
255
+ end
256
+ def <(key)
257
+ @key < key
258
+ end
259
+ def <=(key)
260
+ @key <= key
261
+ end
262
+ def next
263
+ forward[0]
264
+ end
265
+ def to_s
266
+ "[#{level}]#{@key}: #{@value}"
267
+ end
268
+ end
269
+
270
+ class HeadNode < Node
271
+ def initialize
272
+ super 1, nil, nil
273
+ end
274
+ def <(key)
275
+ true
276
+ end
277
+ def <=(key)
278
+ true
279
+ end
280
+ def to_s
281
+ "head(#{level})"
282
+ end
283
+ end
284
+
285
+ # also proxy-to-next-chunk node
286
+ class TailNode < Node
287
+ attr_accessor :next_list
288
+ def initialize
289
+ super 1, nil, nil
290
+ end
291
+ def <(key)
292
+ false
293
+ end
294
+ def <=(key)
295
+ false
296
+ end
297
+ def to_s
298
+ "tail(#{level})"
299
+ end
300
+ end
301
+ end
302
+ end
@@ -0,0 +1,107 @@
1
+ module StrokeDB
2
+
3
+ module Associations
4
+
5
+ module HasManyAssociation
6
+ attr_reader :association_owner, :association_slotname
7
+ def new(slots={})
8
+ association_meta.constantize.new(association_owner.store, slots.merge({association_reference_slotname => association_owner}))
9
+ end
10
+ alias :build :new
11
+
12
+ def create!(slots={})
13
+ new(slots).save!
14
+ end
15
+
16
+ def find(query={})
17
+ association_owner._has_many_association(association_slotname,query)
18
+ end
19
+ def <<(doc)
20
+ doc.update_slots! association_reference_slotname => association_owner
21
+ self
22
+ end
23
+
24
+ private
25
+
26
+ def association_reference_slotname
27
+ association_owner.meta["has_many_#{association_slotname}"][:reference_slotname]
28
+ end
29
+
30
+ def association_meta
31
+ association_owner.meta["has_many_#{association_slotname}"][:meta]
32
+ end
33
+
34
+ end
35
+
36
+ def has_many(slotname, opts={}, &block)
37
+ opts = opts.stringify_keys
38
+
39
+ reference_slotname = opts['foreign_reference']
40
+ through = opts['through'] || []
41
+ through = [through] unless through.is_a?(Array)
42
+ meta = (through.shift || slotname).to_s.singularize.camelize
43
+ query = opts['conditions'] || {}
44
+
45
+ extend_with = opts['extend'] || block
46
+
47
+ @meta_initialization_procs << Proc.new do
48
+ case extend_with
49
+ when Proc
50
+ extend_with_proc = extend_with
51
+ extend_with = "HasMany#{slotname.to_s.camelize}"
52
+ const_set(extend_with, Module.new(&extend_with_proc))
53
+ extend_with = "#{self.name}::HasMany#{slotname.to_s.camelize}"
54
+ when Module
55
+ extend_with = extend_with.name
56
+ when NilClass
57
+ else
58
+ raise "has_many extension should be either Module or Proc"
59
+ end
60
+ reference_slotname = reference_slotname || name.demodulize.tableize.singularize
61
+ if name.index('::') # we're in namespaced meta
62
+ _t = name.split('::')
63
+ _t.pop
64
+ _t << meta
65
+ meta = _t.join('::')
66
+ end
67
+ @args.last.reverse_merge!({"has_many_#{slotname}" => { :reference_slotname => reference_slotname, :through => through, :meta => meta, :query => query, :extend_with => extend_with } })
68
+ define_method(slotname) do
69
+ _has_many_association(slotname,{})
70
+ end
71
+
72
+ end
73
+
74
+ end
75
+
76
+ private
77
+
78
+ def initialize_associations
79
+ define_method(:_has_many_association) do |slotname, additional_query|
80
+ slot_has_many = meta["has_many_#{slotname}"]
81
+ reference_slotname = slot_has_many[:reference_slotname]
82
+ through = slot_has_many[:through]
83
+ meta = slot_has_many[:meta]
84
+ query = slot_has_many[:query]
85
+ effective_query = query.merge(:meta => meta.constantize.document, reference_slotname => self).merge(additional_query)
86
+
87
+ result = LazyArray.new.load_with do |lazy_array|
88
+ store.search(effective_query).map do |d|
89
+ begin
90
+ through.each { |t| d = d.send(t) }
91
+ rescue SlotNotFoundError
92
+ d = nil
93
+ end
94
+ d
95
+ end.compact
96
+ end
97
+ if extend_with = slot_has_many[:extend_with]
98
+ result.extend(extend_with.constantize)
99
+ end
100
+ result.instance_variable_set(:@association_owner, self)
101
+ result.instance_variable_set(:@association_slotname, slotname)
102
+ result.extend(HasManyAssociation)
103
+ result
104
+ end
105
+ end
106
+ end
107
+ end
@@ -0,0 +1,11 @@
1
+ module StrokeDB
2
+ class Callback
3
+ attr_reader :origin, :name, :uid
4
+ def initialize(origin, name, uid=nil, &block)
5
+ @origin, @name, @uid, @block = origin, name, uid, block
6
+ end
7
+ def call(*args)
8
+ @block.call(*args)
9
+ end
10
+ end
11
+ end
@@ -0,0 +1,57 @@
1
+ module StrokeDB
2
+ module Coercions
3
+ def coerces(slotnames, opts = {})
4
+ opts = opts.stringify_keys
5
+ raise ArgumentError, "coerces should have :to specified" unless opts['to']
6
+
7
+ check_condition(opts['if']) if opts['if']
8
+ check_condition(opts['unless']) if opts['unless']
9
+
10
+ slotnames = [slotnames] unless slotnames.is_a?(Array)
11
+ slotnames.each {|slotname| register_coercion(slotname, opts)}
12
+ end
13
+
14
+ private
15
+
16
+ def initialize_coercions
17
+ on_set_slot(:coerces) do |doc, slotname, value|
18
+ if coercion = doc.meta["coerces_#{slotname}"]
19
+ should_call = (!coercion[:if] || evaluate_condition(coercion[:if], doc)) &&
20
+ (!coercion[:unless] || !evaluate_condition(coercion[:unless], doc))
21
+ if should_call
22
+ case coercion[:to]
23
+ when 'number'
24
+ if value.to_i.to_s == value
25
+ value.to_i
26
+ else
27
+ value
28
+ end
29
+ when 'string'
30
+ value.to_s
31
+ end
32
+ end
33
+ end
34
+ end
35
+ end
36
+
37
+ def register_coercion(slotname, opts)
38
+ slotname = slotname.to_s
39
+ to = opts['to'].to_s
40
+
41
+ options_hash = {
42
+ :slotname => slotname,
43
+ :if => opts['if'],
44
+ :unless => opts['unless'],
45
+ :to => to
46
+ }
47
+
48
+ # options_hash.merge!(yield(opts)) if block_given?
49
+
50
+ coercion_slot = "coerces_#{slotname}"
51
+
52
+ @meta_initialization_procs << Proc.new do
53
+ @args.last.reverse_merge!(coercion_slot => { :meta => name }.merge(options_hash))
54
+ end
55
+ end
56
+ end
57
+ end
@@ -0,0 +1,28 @@
1
+ module StrokeDB
2
+
3
+ class DocumentDeletionError < StandardError
4
+ end
5
+
6
+ DeletedDocument = Meta.new(:uuid => DELETED_DOCUMENT_UUID) do
7
+ on_load do |doc|
8
+ doc.make_immutable!
9
+ end
10
+
11
+ def undelete!
12
+ deleted_version = versions.previous
13
+ store.save_as_head!(deleted_version)
14
+ self.class.find(uuid)
15
+ end
16
+ end
17
+
18
+ class Document
19
+
20
+ def delete!
21
+ raise DocumentDeletionError, "can't delete non-head document" unless head?
22
+ metas << DeletedDocument
23
+ save!
24
+ make_immutable!
25
+ end
26
+
27
+ end
28
+ end