hashery 1.4.0 → 1.5.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- data/.ruby +57 -92
- data/.yardopts +8 -0
- data/COPYING.rdoc +45 -0
- data/HISTORY.rdoc +18 -0
- data/QED.rdoc +1 -0
- data/README.rdoc +42 -16
- data/lib/hashery.rb +16 -9
- data/lib/hashery.yml +57 -92
- data/lib/hashery/association.rb +3 -1
- data/lib/hashery/basic_object.rb +74 -0
- data/lib/hashery/basic_struct.rb +288 -1
- data/lib/hashery/basicobject.rb +1 -74
- data/lib/hashery/basicstruct.rb +1 -280
- data/lib/hashery/casting_hash.rb +171 -1
- data/lib/hashery/castinghash.rb +1 -171
- data/lib/hashery/core_ext.rb +82 -0
- data/lib/hashery/dictionary.rb +3 -0
- data/lib/hashery/fuzzy_hash.rb +154 -1
- data/lib/hashery/fuzzyhash.rb +1 -154
- data/lib/hashery/ini.rb +3 -2
- data/lib/hashery/key_hash.rb +186 -0
- data/lib/hashery/keyhash.rb +1 -0
- data/lib/hashery/linked_list.rb +195 -1
- data/lib/hashery/linkedlist.rb +1 -195
- data/lib/hashery/lru_hash.rb +273 -1
- data/lib/hashery/lruhash.rb +1 -273
- data/lib/hashery/open_cascade.rb +99 -1
- data/lib/hashery/open_hash.rb +77 -1
- data/lib/hashery/opencascade.rb +1 -99
- data/lib/hashery/openhash.rb +1 -77
- data/lib/hashery/ordered_hash.rb +168 -1
- data/lib/hashery/orderedhash.rb +1 -167
- data/lib/hashery/property_hash.rb +97 -1
- data/lib/hashery/propertyhash.rb +1 -97
- data/lib/hashery/query_hash.rb +35 -1
- data/lib/hashery/queryhash.rb +1 -35
- data/lib/hashery/stash.rb +3 -174
- data/lib/hashery/static_hash.rb +48 -1
- data/lib/hashery/statichash.rb +1 -48
- data/qed/06_opencascade.rdoc +12 -12
- data/test/case_association.rb +29 -15
- data/test/case_basicstruct.rb +192 -0
- data/test/case_dictionary.rb +149 -109
- data/test/case_keyhash.rb +175 -0
- data/test/case_opencascade.rb +89 -43
- data/test/case_openhash.rb +15 -11
- metadata +85 -78
- data/LICENSE +0 -206
- data/NOTICE +0 -11
- data/lib/hashery/sparse_array.rb +0 -1
- data/lib/hashery/sparsearray.rb +0 -577
- data/test/case_openobject.rb +0 -130
- data/test/case_sparsearray.rb +0 -316
- data/test/case_stash.rb +0 -131
data/lib/hashery/lru_hash.rb
CHANGED
@@ -1 +1,273 @@
|
|
1
|
-
require '
|
1
|
+
require 'enumerator'
|
2
|
+
|
3
|
+
# Hash with LRU expiry policy. There are at most max_size elements in a
|
4
|
+
# LRUHash. When adding more elements old elements are removed according
|
5
|
+
# to LRU policy.
|
6
|
+
#
|
7
|
+
# http://github.com/rklemme/muppet-laboratories/blob/master/lib/lruhash.rb
|
8
|
+
# Copyright (c) 2010 Robert Klemme
|
9
|
+
|
10
|
+
class LRUHash
|
11
|
+
|
12
|
+
include Enumerable
|
13
|
+
|
14
|
+
attr_reader :max_size
|
15
|
+
|
16
|
+
attr_accessor :default
|
17
|
+
attr_accessor :default_proc
|
18
|
+
attr_accessor :release_proc
|
19
|
+
|
20
|
+
def initialize(max_size, default_value = nil, &block)
|
21
|
+
@max_size = normalize_max(max_size)
|
22
|
+
@default = default_value
|
23
|
+
@default_proc = block
|
24
|
+
|
25
|
+
@h = {}
|
26
|
+
@head = Node.new
|
27
|
+
@tail = front(Node.new)
|
28
|
+
end
|
29
|
+
|
30
|
+
def each_pair
|
31
|
+
if block_given?
|
32
|
+
each_node do |n|
|
33
|
+
yield [n.key, n.value]
|
34
|
+
end
|
35
|
+
else
|
36
|
+
enum_for :each_pair
|
37
|
+
end
|
38
|
+
end
|
39
|
+
|
40
|
+
alias each each_pair
|
41
|
+
|
42
|
+
def each_key
|
43
|
+
if block_given?
|
44
|
+
each_node do |n|
|
45
|
+
yield n.key
|
46
|
+
end
|
47
|
+
else
|
48
|
+
enum_for :each_key
|
49
|
+
end
|
50
|
+
end
|
51
|
+
|
52
|
+
def each_value
|
53
|
+
if block_given?
|
54
|
+
each_node do |n|
|
55
|
+
yield n.value
|
56
|
+
end
|
57
|
+
else
|
58
|
+
enum_for :each_value
|
59
|
+
end
|
60
|
+
end
|
61
|
+
|
62
|
+
def size
|
63
|
+
@h.size
|
64
|
+
end
|
65
|
+
|
66
|
+
def empty?
|
67
|
+
@head.succ.equal? @tail
|
68
|
+
end
|
69
|
+
|
70
|
+
def fetch(key, &b)
|
71
|
+
n = @h[key]
|
72
|
+
|
73
|
+
if n
|
74
|
+
front(n).value
|
75
|
+
else
|
76
|
+
(b || FETCH)[key]
|
77
|
+
end
|
78
|
+
end
|
79
|
+
|
80
|
+
def [](key)
|
81
|
+
fetch(key) do |k|
|
82
|
+
@default_proc ? @default_proc[self, k] : default
|
83
|
+
end
|
84
|
+
end
|
85
|
+
|
86
|
+
def keys
|
87
|
+
@h.keys
|
88
|
+
end
|
89
|
+
|
90
|
+
def values
|
91
|
+
@h.map {|k,n| n.value}
|
92
|
+
end
|
93
|
+
|
94
|
+
def has_key?(key)
|
95
|
+
@h.has_key? key
|
96
|
+
end
|
97
|
+
|
98
|
+
alias key? has_key?
|
99
|
+
alias member? has_key?
|
100
|
+
alias include? has_key?
|
101
|
+
|
102
|
+
def has_value?(value)
|
103
|
+
each_pair do |k, v|
|
104
|
+
return true if value.eql? v
|
105
|
+
end
|
106
|
+
|
107
|
+
false
|
108
|
+
end
|
109
|
+
|
110
|
+
alias value? has_value?
|
111
|
+
|
112
|
+
def values_at(*key_list)
|
113
|
+
key_list.map {|k| self[k]}
|
114
|
+
end
|
115
|
+
|
116
|
+
def assoc(key)
|
117
|
+
n = @h[key]
|
118
|
+
|
119
|
+
if n
|
120
|
+
front(n)
|
121
|
+
[n.key, n.value]
|
122
|
+
end
|
123
|
+
end
|
124
|
+
|
125
|
+
def rassoc(value)
|
126
|
+
each_node do |n|
|
127
|
+
if value.eql? n.value
|
128
|
+
front(n)
|
129
|
+
return [n.key, n.value]
|
130
|
+
end
|
131
|
+
end
|
132
|
+
nil
|
133
|
+
end
|
134
|
+
|
135
|
+
def key(value)
|
136
|
+
pair = rassoc(value) and pair.first
|
137
|
+
end
|
138
|
+
|
139
|
+
def store(key, value)
|
140
|
+
# same optimization as in Hash
|
141
|
+
key = key.dup.freeze if String === key && !key.frozen?
|
142
|
+
|
143
|
+
n = @h[key]
|
144
|
+
|
145
|
+
unless n
|
146
|
+
if size == max_size
|
147
|
+
# reuse node to optimize memory usage
|
148
|
+
n = delete_oldest
|
149
|
+
n.key = key
|
150
|
+
n.value = value
|
151
|
+
else
|
152
|
+
n = Node.new key, value
|
153
|
+
end
|
154
|
+
|
155
|
+
@h[key] = n
|
156
|
+
end
|
157
|
+
|
158
|
+
front(n).value = value
|
159
|
+
end
|
160
|
+
|
161
|
+
alias []= store
|
162
|
+
|
163
|
+
def delete(key)
|
164
|
+
n = @h[key] and remove_node(n).value
|
165
|
+
end
|
166
|
+
|
167
|
+
def delete_if
|
168
|
+
each_node do |n|
|
169
|
+
remove_node n if yield n.key, n.value
|
170
|
+
end
|
171
|
+
end
|
172
|
+
|
173
|
+
def max_size=(limit)
|
174
|
+
limit = normalize_max(limit)
|
175
|
+
|
176
|
+
while size > limit
|
177
|
+
delete_oldest
|
178
|
+
end
|
179
|
+
|
180
|
+
@max_size = limit
|
181
|
+
end
|
182
|
+
|
183
|
+
def clear
|
184
|
+
until empty?
|
185
|
+
delete_oldest
|
186
|
+
end
|
187
|
+
|
188
|
+
self
|
189
|
+
end
|
190
|
+
|
191
|
+
def to_s
|
192
|
+
s = nil
|
193
|
+
each_pair {|k, v| (s ? (s << ', ') : s = '{') << k.to_s << '=>' << v.to_s}
|
194
|
+
s ? (s << '}') : '{}'
|
195
|
+
end
|
196
|
+
|
197
|
+
alias inspect to_s
|
198
|
+
|
199
|
+
private
|
200
|
+
|
201
|
+
# iterate nodes
|
202
|
+
def each_node
|
203
|
+
n = @head.succ
|
204
|
+
|
205
|
+
until n.equal? @tail
|
206
|
+
succ = n.succ
|
207
|
+
yield n
|
208
|
+
n = succ
|
209
|
+
end
|
210
|
+
|
211
|
+
self
|
212
|
+
end
|
213
|
+
|
214
|
+
# move node to front
|
215
|
+
def front(node)
|
216
|
+
node.insert_after(@head)
|
217
|
+
end
|
218
|
+
|
219
|
+
# remove the node and invoke release_proc
|
220
|
+
# if set
|
221
|
+
def remove_node(node)
|
222
|
+
n = @h.delete(node.key)
|
223
|
+
n.unlink
|
224
|
+
release_proc and release_proc[n.key, n.value]
|
225
|
+
n
|
226
|
+
end
|
227
|
+
|
228
|
+
# remove the oldest node returning the node
|
229
|
+
def delete_oldest
|
230
|
+
n = @tail.pred
|
231
|
+
raise "Cannot delete from empty hash" if @head.equal? n
|
232
|
+
remove_node n
|
233
|
+
end
|
234
|
+
|
235
|
+
# Normalize the argument in order to be usable as max_size
|
236
|
+
# criterion is that n.to_i must be an Integer and it must
|
237
|
+
# be larger than zero.
|
238
|
+
def normalize_max(n)
|
239
|
+
n = n.to_i
|
240
|
+
raise ArgumentError, 'Invalid max_size: %p' % n unless Integer === n && n > 0
|
241
|
+
n
|
242
|
+
end
|
243
|
+
|
244
|
+
#
|
245
|
+
FETCH = Proc.new {|k| raise KeyError, 'key not found'}
|
246
|
+
|
247
|
+
# A single node in the doubly linked LRU list of nodes
|
248
|
+
Node = Struct.new :key, :value, :pred, :succ do
|
249
|
+
def unlink
|
250
|
+
pred.succ = succ if pred
|
251
|
+
succ.pred = pred if succ
|
252
|
+
self.succ = self.pred = nil
|
253
|
+
self
|
254
|
+
end
|
255
|
+
|
256
|
+
def insert_after(node)
|
257
|
+
raise 'Cannot insert after self' if equal? node
|
258
|
+
return self if node.succ.equal? self
|
259
|
+
|
260
|
+
unlink
|
261
|
+
|
262
|
+
self.succ = node.succ
|
263
|
+
self.pred = node
|
264
|
+
|
265
|
+
node.succ.pred = self if node.succ
|
266
|
+
node.succ = self
|
267
|
+
|
268
|
+
self
|
269
|
+
end
|
270
|
+
end
|
271
|
+
|
272
|
+
end
|
273
|
+
|
data/lib/hashery/lruhash.rb
CHANGED
@@ -1,273 +1 @@
|
|
1
|
-
require '
|
2
|
-
|
3
|
-
# Hash with LRU expiry policy. There are at most max_size elements in a
|
4
|
-
# LRUHash. When adding more elements old elements are removed according
|
5
|
-
# to LRU policy.
|
6
|
-
#
|
7
|
-
# http://github.com/rklemme/muppet-laboratories/blob/master/lib/lruhash.rb
|
8
|
-
# Copyright (c) 2010 Robert Klemme
|
9
|
-
|
10
|
-
class LRUHash
|
11
|
-
|
12
|
-
include Enumerable
|
13
|
-
|
14
|
-
attr_reader :max_size
|
15
|
-
|
16
|
-
attr_accessor :default
|
17
|
-
attr_accessor :default_proc
|
18
|
-
attr_accessor :release_proc
|
19
|
-
|
20
|
-
def initialize(max_size, default_value = nil, &block)
|
21
|
-
@max_size = normalize_max(max_size)
|
22
|
-
@default = default_value
|
23
|
-
@default_proc = block
|
24
|
-
|
25
|
-
@h = {}
|
26
|
-
@head = Node.new
|
27
|
-
@tail = front(Node.new)
|
28
|
-
end
|
29
|
-
|
30
|
-
def each_pair
|
31
|
-
if block_given?
|
32
|
-
each_node do |n|
|
33
|
-
yield [n.key, n.value]
|
34
|
-
end
|
35
|
-
else
|
36
|
-
enum_for :each_pair
|
37
|
-
end
|
38
|
-
end
|
39
|
-
|
40
|
-
alias each each_pair
|
41
|
-
|
42
|
-
def each_key
|
43
|
-
if block_given?
|
44
|
-
each_node do |n|
|
45
|
-
yield n.key
|
46
|
-
end
|
47
|
-
else
|
48
|
-
enum_for :each_key
|
49
|
-
end
|
50
|
-
end
|
51
|
-
|
52
|
-
def each_value
|
53
|
-
if block_given?
|
54
|
-
each_node do |n|
|
55
|
-
yield n.value
|
56
|
-
end
|
57
|
-
else
|
58
|
-
enum_for :each_value
|
59
|
-
end
|
60
|
-
end
|
61
|
-
|
62
|
-
def size
|
63
|
-
@h.size
|
64
|
-
end
|
65
|
-
|
66
|
-
def empty?
|
67
|
-
@head.succ.equal? @tail
|
68
|
-
end
|
69
|
-
|
70
|
-
def fetch(key, &b)
|
71
|
-
n = @h[key]
|
72
|
-
|
73
|
-
if n
|
74
|
-
front(n).value
|
75
|
-
else
|
76
|
-
(b || FETCH)[key]
|
77
|
-
end
|
78
|
-
end
|
79
|
-
|
80
|
-
def [](key)
|
81
|
-
fetch(key) do |k|
|
82
|
-
@default_proc ? @default_proc[self, k] : default
|
83
|
-
end
|
84
|
-
end
|
85
|
-
|
86
|
-
def keys
|
87
|
-
@h.keys
|
88
|
-
end
|
89
|
-
|
90
|
-
def values
|
91
|
-
@h.map {|k,n| n.value}
|
92
|
-
end
|
93
|
-
|
94
|
-
def has_key?(key)
|
95
|
-
@h.has_key? key
|
96
|
-
end
|
97
|
-
|
98
|
-
alias key? has_key?
|
99
|
-
alias member? has_key?
|
100
|
-
alias include? has_key?
|
101
|
-
|
102
|
-
def has_value?(value)
|
103
|
-
each_pair do |k, v|
|
104
|
-
return true if value.eql? v
|
105
|
-
end
|
106
|
-
|
107
|
-
false
|
108
|
-
end
|
109
|
-
|
110
|
-
alias value? has_value?
|
111
|
-
|
112
|
-
def values_at(*key_list)
|
113
|
-
key_list.map {|k| self[k]}
|
114
|
-
end
|
115
|
-
|
116
|
-
def assoc(key)
|
117
|
-
n = @h[key]
|
118
|
-
|
119
|
-
if n
|
120
|
-
front(n)
|
121
|
-
[n.key, n.value]
|
122
|
-
end
|
123
|
-
end
|
124
|
-
|
125
|
-
def rassoc(value)
|
126
|
-
each_node do |n|
|
127
|
-
if value.eql? n.value
|
128
|
-
front(n)
|
129
|
-
return [n.key, n.value]
|
130
|
-
end
|
131
|
-
end
|
132
|
-
nil
|
133
|
-
end
|
134
|
-
|
135
|
-
def key(value)
|
136
|
-
pair = rassoc(value) and pair.first
|
137
|
-
end
|
138
|
-
|
139
|
-
def store(key, value)
|
140
|
-
# same optimization as in Hash
|
141
|
-
key = key.dup.freeze if String === key && !key.frozen?
|
142
|
-
|
143
|
-
n = @h[key]
|
144
|
-
|
145
|
-
unless n
|
146
|
-
if size == max_size
|
147
|
-
# reuse node to optimize memory usage
|
148
|
-
n = delete_oldest
|
149
|
-
n.key = key
|
150
|
-
n.value = value
|
151
|
-
else
|
152
|
-
n = Node.new key, value
|
153
|
-
end
|
154
|
-
|
155
|
-
@h[key] = n
|
156
|
-
end
|
157
|
-
|
158
|
-
front(n).value = value
|
159
|
-
end
|
160
|
-
|
161
|
-
alias []= store
|
162
|
-
|
163
|
-
def delete(key)
|
164
|
-
n = @h[key] and remove_node(n).value
|
165
|
-
end
|
166
|
-
|
167
|
-
def delete_if
|
168
|
-
each_node do |n|
|
169
|
-
remove_node n if yield n.key, n.value
|
170
|
-
end
|
171
|
-
end
|
172
|
-
|
173
|
-
def max_size=(limit)
|
174
|
-
limit = normalize_max(limit)
|
175
|
-
|
176
|
-
while size > limit
|
177
|
-
delete_oldest
|
178
|
-
end
|
179
|
-
|
180
|
-
@max_size = limit
|
181
|
-
end
|
182
|
-
|
183
|
-
def clear
|
184
|
-
until empty?
|
185
|
-
delete_oldest
|
186
|
-
end
|
187
|
-
|
188
|
-
self
|
189
|
-
end
|
190
|
-
|
191
|
-
def to_s
|
192
|
-
s = nil
|
193
|
-
each_pair {|k, v| (s ? (s << ', ') : s = '{') << k.to_s << '=>' << v.to_s}
|
194
|
-
s ? (s << '}') : '{}'
|
195
|
-
end
|
196
|
-
|
197
|
-
alias inspect to_s
|
198
|
-
|
199
|
-
private
|
200
|
-
|
201
|
-
# iterate nodes
|
202
|
-
def each_node
|
203
|
-
n = @head.succ
|
204
|
-
|
205
|
-
until n.equal? @tail
|
206
|
-
succ = n.succ
|
207
|
-
yield n
|
208
|
-
n = succ
|
209
|
-
end
|
210
|
-
|
211
|
-
self
|
212
|
-
end
|
213
|
-
|
214
|
-
# move node to front
|
215
|
-
def front(node)
|
216
|
-
node.insert_after(@head)
|
217
|
-
end
|
218
|
-
|
219
|
-
# remove the node and invoke release_proc
|
220
|
-
# if set
|
221
|
-
def remove_node(node)
|
222
|
-
n = @h.delete(node.key)
|
223
|
-
n.unlink
|
224
|
-
release_proc and release_proc[n.key, n.value]
|
225
|
-
n
|
226
|
-
end
|
227
|
-
|
228
|
-
# remove the oldest node returning the node
|
229
|
-
def delete_oldest
|
230
|
-
n = @tail.pred
|
231
|
-
raise "Cannot delete from empty hash" if @head.equal? n
|
232
|
-
remove_node n
|
233
|
-
end
|
234
|
-
|
235
|
-
# Normalize the argument in order to be usable as max_size
|
236
|
-
# criterion is that n.to_i must be an Integer and it must
|
237
|
-
# be larger than zero.
|
238
|
-
def normalize_max(n)
|
239
|
-
n = n.to_i
|
240
|
-
raise ArgumentError, 'Invalid max_size: %p' % n unless Integer === n && n > 0
|
241
|
-
n
|
242
|
-
end
|
243
|
-
|
244
|
-
#
|
245
|
-
FETCH = Proc.new {|k| raise KeyError, 'key not found'}
|
246
|
-
|
247
|
-
# A single node in the doubly linked LRU list of nodes
|
248
|
-
Node = Struct.new :key, :value, :pred, :succ do
|
249
|
-
def unlink
|
250
|
-
pred.succ = succ if pred
|
251
|
-
succ.pred = pred if succ
|
252
|
-
self.succ = self.pred = nil
|
253
|
-
self
|
254
|
-
end
|
255
|
-
|
256
|
-
def insert_after(node)
|
257
|
-
raise 'Cannot insert after self' if equal? node
|
258
|
-
return self if node.succ.equal? self
|
259
|
-
|
260
|
-
unlink
|
261
|
-
|
262
|
-
self.succ = node.succ
|
263
|
-
self.pred = node
|
264
|
-
|
265
|
-
node.succ.pred = self if node.succ
|
266
|
-
node.succ = self
|
267
|
-
|
268
|
-
self
|
269
|
-
end
|
270
|
-
end
|
271
|
-
|
272
|
-
end
|
273
|
-
|
1
|
+
require 'hashery/lru_hash'
|