odba 1.1.2 → 1.1.7
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- checksums.yaml +5 -5
- data/.github/workflows/ruby.yml +35 -0
- data/Gemfile +4 -13
- data/History.txt +27 -1
- data/Rakefile +5 -6
- data/lib/odba/cache.rb +38 -35
- data/lib/odba/cache_entry.rb +3 -3
- data/lib/odba/index.rb +3 -7
- data/lib/odba/index_definition.rb +2 -3
- data/lib/odba/marshal.rb +5 -2
- data/lib/odba/persistable.rb +39 -36
- data/lib/odba/storage.rb +59 -46
- data/lib/odba/stub.rb +9 -6
- data/lib/odba/version.rb +1 -1
- data/odba.gemspec +8 -9
- data/test/test_array.rb +1 -0
- data/test/test_cache.rb +23 -22
- data/test/test_cache_entry.rb +1 -0
- data/test/test_connection_pool.rb +1 -0
- data/test/test_drbwrapper.rb +2 -1
- data/test/test_hash.rb +3 -2
- data/test/test_id_server.rb +1 -0
- data/test/test_index.rb +8 -8
- data/test/test_marshal.rb +2 -0
- data/test/test_persistable.rb +17 -16
- data/test/test_storage.rb +70 -123
- data/test/test_stub.rb +6 -4
- metadata +51 -12
- data/.travis.yml +0 -25
- data/test/suite.rb +0 -12
data/lib/odba/persistable.rb
CHANGED
@@ -45,7 +45,7 @@ module ODBA
|
|
45
45
|
opts = keys.pop
|
46
46
|
end
|
47
47
|
if(keys.last.is_a?(Class))
|
48
|
-
origin_klass = keys.pop
|
48
|
+
origin_klass = keys.pop
|
49
49
|
resolve = keys.pop
|
50
50
|
resolve_origin = keys.pop
|
51
51
|
elsif(keys.last.is_a?(Symbol))
|
@@ -58,7 +58,7 @@ module ODBA
|
|
58
58
|
else
|
59
59
|
resolve = keys.first
|
60
60
|
end
|
61
|
-
keys.each { |key|
|
61
|
+
keys.each { |key|
|
62
62
|
if RUBY_VERSION >= '1.9'
|
63
63
|
key = key.to_sym
|
64
64
|
else
|
@@ -85,8 +85,8 @@ module ODBA
|
|
85
85
|
opts.each { |key, val| index_definition.send "#{key}=", val }
|
86
86
|
ODBA.cache.ensure_index_deferred(index_definition)
|
87
87
|
meta_eval {
|
88
|
-
define_method(search_name) { |*vals|
|
89
|
-
if(vals.size > 1)
|
88
|
+
define_method(search_name) { |*vals|
|
89
|
+
if(vals.size > 1)
|
90
90
|
args = {}
|
91
91
|
vals.each_with_index { |val, idx|
|
92
92
|
cond = case val
|
@@ -95,7 +95,7 @@ module ODBA
|
|
95
95
|
else
|
96
96
|
'like'
|
97
97
|
end
|
98
|
-
args.store(keys.at(idx),
|
98
|
+
args.store(keys.at(idx),
|
99
99
|
{ 'value' => val, 'condition' => cond })
|
100
100
|
}
|
101
101
|
ODBA.cache.retrieve_from_index(index_name, args)
|
@@ -104,12 +104,12 @@ module ODBA
|
|
104
104
|
end
|
105
105
|
}
|
106
106
|
define_method(exact_name) { |*vals|
|
107
|
-
if(vals.size > 1)
|
107
|
+
if(vals.size > 1)
|
108
108
|
args = {}
|
109
109
|
vals.each_with_index { |val, idx|
|
110
110
|
args.store(keys.at(idx), val)
|
111
111
|
}
|
112
|
-
ODBA.cache.retrieve_from_index(index_name, args,
|
112
|
+
ODBA.cache.retrieve_from_index(index_name, args,
|
113
113
|
ODBA::Persistable::Exact)
|
114
114
|
else
|
115
115
|
ODBA.cache.retrieve_from_index(index_name, vals.first,
|
@@ -117,7 +117,7 @@ module ODBA
|
|
117
117
|
end
|
118
118
|
}
|
119
119
|
define_method(find_name) { |*vals|
|
120
|
-
if(vals.size > 1)
|
120
|
+
if(vals.size > 1)
|
121
121
|
args = {}
|
122
122
|
vals.each_with_index { |val, idx|
|
123
123
|
cond = case val
|
@@ -126,7 +126,7 @@ module ODBA
|
|
126
126
|
else
|
127
127
|
'like'
|
128
128
|
end
|
129
|
-
args.store(keys.at(idx),
|
129
|
+
args.store(keys.at(idx),
|
130
130
|
{ 'value' => val, 'condition' => cond })
|
131
131
|
}
|
132
132
|
ODBA.cache.retrieve_from_index(index_name, args,
|
@@ -145,7 +145,7 @@ module ODBA
|
|
145
145
|
index_definition
|
146
146
|
end
|
147
147
|
def odba_extent
|
148
|
-
all = ODBA.cache.extent(self)
|
148
|
+
all = ODBA.cache.extent(self)
|
149
149
|
if(block_given?)
|
150
150
|
all.each { |instance| yield instance }
|
151
151
|
nil
|
@@ -154,7 +154,7 @@ module ODBA
|
|
154
154
|
end
|
155
155
|
end
|
156
156
|
def odba_count
|
157
|
-
ODBA.cache.count(self)
|
157
|
+
ODBA.cache.count(self)
|
158
158
|
end
|
159
159
|
end
|
160
160
|
}
|
@@ -164,7 +164,7 @@ module ODBA
|
|
164
164
|
name.gsub(@@sanitize_ptrn, '_')
|
165
165
|
end
|
166
166
|
attr_accessor :odba_name, :odba_prefetch
|
167
|
-
# Classes which include Persistable may override ODBA_EXCLUDE_VARS to
|
167
|
+
# Classes which include Persistable may override ODBA_EXCLUDE_VARS to
|
168
168
|
# prevent data from being stored in the database (e.g. passwords, file
|
169
169
|
# descriptors). Simply redefine: ODBA_EXCLUDE_VARS = ['@foo']
|
170
170
|
ODBA_EXCLUDE_VARS = []
|
@@ -178,7 +178,7 @@ module ODBA
|
|
178
178
|
ODBA_PREDEFINE_EXCLUDE_VARS = ['@odba_observers'] # :nodoc:
|
179
179
|
ODBA_PREDEFINE_SERIALIZABLE = ['@odba_target_ids'] # :nodoc:, legacy
|
180
180
|
end
|
181
|
-
# If you want to prevent Persistables from being disconnected and stored
|
181
|
+
# If you want to prevent Persistables from being disconnected and stored
|
182
182
|
# separately (Array and Hash are Persistable by default), redefine:
|
183
183
|
# ODBA_SERIALIZABLE = ['@bar']
|
184
184
|
ODBA_SERIALIZABLE = []
|
@@ -188,7 +188,7 @@ module ODBA
|
|
188
188
|
@@odba_id_name = RUBY_VERSION >= '1.9' ? :@odba_id : '@odba_id'
|
189
189
|
def dup # :nodoc:
|
190
190
|
twin = super
|
191
|
-
## since twin may not be a Persistable, we need to do some magic here to
|
191
|
+
## since twin may not be a Persistable, we need to do some magic here to
|
192
192
|
# ensure that it does not have the same odba_id
|
193
193
|
twin.instance_variable_set(@@odba_id_name, nil)
|
194
194
|
twin
|
@@ -221,7 +221,7 @@ module ODBA
|
|
221
221
|
def odba_delete
|
222
222
|
ODBA.cache.delete(self)
|
223
223
|
end
|
224
|
-
# Delete _observer_ as an observer on this object.
|
224
|
+
# Delete _observer_ as an observer on this object.
|
225
225
|
# It will no longer receive notifications.
|
226
226
|
def odba_delete_observer(observer)
|
227
227
|
@odba_observers.delete(observer) if(@odba_observers)
|
@@ -251,7 +251,7 @@ module ODBA
|
|
251
251
|
ODBA_PREDEFINE_EXCLUDE_VARS
|
252
252
|
end
|
253
253
|
if(defined?(self::class::ODBA_EXCLUDE_VARS))
|
254
|
-
exc += self::class::ODBA_EXCLUDE_VARS
|
254
|
+
exc += self::class::ODBA_EXCLUDE_VARS
|
255
255
|
end
|
256
256
|
if RUBY_VERSION >= '1.9'
|
257
257
|
exc.map{|v| v.to_sym}
|
@@ -259,8 +259,8 @@ module ODBA
|
|
259
259
|
exc
|
260
260
|
end
|
261
261
|
end
|
262
|
-
# Returns the odba unique id of this Persistable.
|
263
|
-
# If no id had been assigned, this is now done.
|
262
|
+
# Returns the odba unique id of this Persistable.
|
263
|
+
# If no id had been assigned, this is now done.
|
264
264
|
# No attempt is made to store the Persistable in the db.
|
265
265
|
def odba_id
|
266
266
|
@odba_id ||= ODBA.cache.next_id
|
@@ -269,7 +269,7 @@ module ODBA
|
|
269
269
|
ODBA.marshaller.dump(odba_isolated_twin)
|
270
270
|
end
|
271
271
|
# Convenience method equivalent to ODBA.cache.store(self)
|
272
|
-
def odba_isolated_store
|
272
|
+
def odba_isolated_store
|
273
273
|
@odba_persistent = true
|
274
274
|
ODBA.cache.store(self)
|
275
275
|
end
|
@@ -278,7 +278,7 @@ module ODBA
|
|
278
278
|
def odba_isolated_stub
|
279
279
|
Stub.new(self.odba_id, nil, self)
|
280
280
|
end
|
281
|
-
# Returns a duplicate of this Persistable, for which all connected
|
281
|
+
# Returns a duplicate of this Persistable, for which all connected
|
282
282
|
# Persistables have been replaced by a Stub
|
283
283
|
def odba_isolated_twin
|
284
284
|
# ensure a valid odba_id
|
@@ -289,11 +289,12 @@ module ODBA
|
|
289
289
|
twin
|
290
290
|
end
|
291
291
|
# A Persistable instance can be _prefetchable_. This means that the object
|
292
|
-
# can be loaded at startup by calling ODBA.cache.prefetch, and that it will
|
293
|
-
# never expire from the Cache. The prefetch status can be controlled per
|
294
|
-
# instance by setting the instance variable @odba_prefetch, and per class by
|
292
|
+
# can be loaded at startup by calling ODBA.cache.prefetch, and that it will
|
293
|
+
# never expire from the Cache. The prefetch status can be controlled per
|
294
|
+
# instance by setting the instance variable @odba_prefetch, and per class by
|
295
295
|
# overriding the module constant ODBA_PREFETCH
|
296
296
|
def odba_prefetch?
|
297
|
+
@odba_prefetch ||= nil
|
297
298
|
@odba_prefetch \
|
298
299
|
|| (defined?(self::class::ODBA_PREFETCH) && self::class::ODBA_PREFETCH)
|
299
300
|
end
|
@@ -301,9 +302,9 @@ module ODBA
|
|
301
302
|
@odba_indexable \
|
302
303
|
|| (defined?(self::class::ODBA_INDEXABLE) && self::class::ODBA_INDEXABLE)
|
303
304
|
end
|
304
|
-
# Invoke the update method in each currently associated observer
|
305
|
+
# Invoke the update method in each currently associated observer
|
305
306
|
# in turn, passing it the given arguments
|
306
|
-
def odba_notify_observers(*args)
|
307
|
+
def odba_notify_observers(*args)
|
307
308
|
odba_observers.each { |obs| obs.odba_update(*args) }
|
308
309
|
end
|
309
310
|
def odba_observers
|
@@ -313,6 +314,7 @@ module ODBA
|
|
313
314
|
instance_variables - odba_serializables - odba_exclude_vars
|
314
315
|
end
|
315
316
|
def odba_replace!(obj) # :nodoc:
|
317
|
+
@odba_observers ||= []
|
316
318
|
instance_variables.each { |name|
|
317
319
|
instance_variable_set(name, obj.instance_variable_get(name))
|
318
320
|
}
|
@@ -353,7 +355,7 @@ module ODBA
|
|
353
355
|
ODBA_PREDEFINE_SERIALIZABLE
|
354
356
|
end
|
355
357
|
if(defined?(self::class::ODBA_SERIALIZABLE))
|
356
|
-
srs += self::class::ODBA_SERIALIZABLE
|
358
|
+
srs += self::class::ODBA_SERIALIZABLE
|
357
359
|
end
|
358
360
|
if RUBY_VERSION >= '1.9'
|
359
361
|
srs.map{|s| s.to_sym}
|
@@ -369,7 +371,7 @@ module ODBA
|
|
369
371
|
end
|
370
372
|
# Stores this Persistable and recursively all connected unsaved persistables,
|
371
373
|
# until no more direcly connected unsaved persistables can be found.
|
372
|
-
# The optional parameter _name_ can be used later to retrieve this
|
374
|
+
# The optional parameter _name_ can be used later to retrieve this
|
373
375
|
# Persistable using Cache#fetch_named
|
374
376
|
def odba_store(name = nil)
|
375
377
|
begin
|
@@ -379,7 +381,7 @@ module ODBA
|
|
379
381
|
end
|
380
382
|
odba_store_unsaved
|
381
383
|
self
|
382
|
-
rescue
|
384
|
+
rescue
|
383
385
|
@odba_name = old_name
|
384
386
|
raise
|
385
387
|
end
|
@@ -406,14 +408,14 @@ module ODBA
|
|
406
408
|
# must not be synchronized because of the following if
|
407
409
|
# statement (if an object has already been replaced by
|
408
410
|
# a stub, it will have the correct id and it
|
409
|
-
# will be ignored)
|
411
|
+
# will be ignored)
|
410
412
|
case var
|
411
413
|
when Stub
|
412
414
|
# no need to make a new stub
|
413
415
|
when Persistable
|
414
|
-
if(var.odba_id == id)
|
416
|
+
if(var.odba_id == id)
|
415
417
|
stub = ODBA::Stub.new(id, self, obj)
|
416
|
-
instance_variable_set(name, stub)
|
418
|
+
instance_variable_set(name, stub)
|
417
419
|
end
|
418
420
|
end
|
419
421
|
}
|
@@ -465,6 +467,7 @@ module ODBA
|
|
465
467
|
!@odba_persistent
|
466
468
|
#true
|
467
469
|
else
|
470
|
+
@odba_snapshot_level ||= 0
|
468
471
|
@odba_snapshot_level.to_i < snapshot_level
|
469
472
|
end
|
470
473
|
end
|
@@ -482,7 +485,7 @@ class Array # :nodoc: all
|
|
482
485
|
def odba_collection
|
483
486
|
coll = []
|
484
487
|
each_with_index { |item, index|
|
485
|
-
coll.push([index, item])
|
488
|
+
coll.push([index, item])
|
486
489
|
}
|
487
490
|
coll
|
488
491
|
end
|
@@ -491,9 +494,9 @@ class Array # :nodoc: all
|
|
491
494
|
delete_if { |val| val.eql?(remove_object) }
|
492
495
|
end
|
493
496
|
def odba_prefetch?
|
494
|
-
super || any? { |item|
|
497
|
+
super || any? { |item|
|
495
498
|
item.respond_to?(:odba_prefetch?) \
|
496
|
-
&& item.odba_prefetch?
|
499
|
+
&& item.odba_prefetch?
|
497
500
|
}
|
498
501
|
end
|
499
502
|
def odba_replace!(obj) # :nodoc:
|
@@ -505,7 +508,7 @@ class Array # :nodoc: all
|
|
505
508
|
super
|
506
509
|
end
|
507
510
|
def odba_restore(collection=[])
|
508
|
-
collection.each { |key, val|
|
511
|
+
collection.each { |key, val|
|
509
512
|
self[key] = val
|
510
513
|
}
|
511
514
|
end
|
@@ -579,7 +582,7 @@ class Hash # :nodoc: all
|
|
579
582
|
super
|
580
583
|
end
|
581
584
|
def odba_restore(collection=[])
|
582
|
-
collection.each { |key, val|
|
585
|
+
collection.each { |key, val|
|
583
586
|
self[key] = val
|
584
587
|
}
|
585
588
|
end
|
data/lib/odba/storage.rb
CHANGED
@@ -15,31 +15,31 @@ module ODBA
|
|
15
15
|
TABLES = [
|
16
16
|
# in table 'object', the isolated dumps of all objects are stored
|
17
17
|
['object', <<-'SQL'],
|
18
|
-
CREATE TABLE object (
|
18
|
+
CREATE TABLE IF NOT EXISTS object (
|
19
19
|
odba_id INTEGER NOT NULL, content TEXT,
|
20
20
|
name TEXT, prefetchable BOOLEAN, extent TEXT,
|
21
21
|
PRIMARY KEY(odba_id), UNIQUE(name)
|
22
22
|
);
|
23
23
|
SQL
|
24
24
|
['prefetchable_index', <<-SQL],
|
25
|
-
CREATE INDEX prefetchable_index ON object(prefetchable);
|
25
|
+
CREATE INDEX IF NOT EXISTS prefetchable_index ON object(prefetchable);
|
26
26
|
SQL
|
27
27
|
['extent_index', <<-SQL],
|
28
|
-
CREATE INDEX extent_index ON object(extent);
|
28
|
+
CREATE INDEX IF NOT EXISTS extent_index ON object(extent);
|
29
29
|
SQL
|
30
30
|
# helper table 'object_connection'
|
31
31
|
['object_connection', <<-'SQL'],
|
32
|
-
CREATE TABLE object_connection (
|
32
|
+
CREATE TABLE IF NOT EXISTS object_connection (
|
33
33
|
origin_id integer, target_id integer,
|
34
34
|
PRIMARY KEY(origin_id, target_id)
|
35
35
|
);
|
36
36
|
SQL
|
37
37
|
['target_id_index', <<-SQL],
|
38
|
-
CREATE INDEX target_id_index ON object_connection(target_id);
|
38
|
+
CREATE INDEX IF NOT EXISTS target_id_index ON object_connection(target_id);
|
39
39
|
SQL
|
40
40
|
# helper table 'collection'
|
41
41
|
['collection', <<-'SQL'],
|
42
|
-
CREATE TABLE collection (
|
42
|
+
CREATE TABLE IF NOT EXISTS collection (
|
43
43
|
odba_id integer NOT NULL, key text, value text,
|
44
44
|
PRIMARY KEY(odba_id, key)
|
45
45
|
);
|
@@ -135,7 +135,7 @@ CREATE TABLE collection (
|
|
135
135
|
end
|
136
136
|
def create_condition_index(table_name, definition)
|
137
137
|
self.dbi.do <<-SQL
|
138
|
-
CREATE TABLE #{table_name} (
|
138
|
+
CREATE TABLE IF NOT EXISTS #{table_name} (
|
139
139
|
origin_id INTEGER,
|
140
140
|
#{definition.collect { |*pair| pair.join(' ') }.join(",\n ") },
|
141
141
|
target_id INTEGER
|
@@ -143,62 +143,67 @@ CREATE TABLE #{table_name} (
|
|
143
143
|
SQL
|
144
144
|
#index origin_id
|
145
145
|
self.dbi.do <<-SQL
|
146
|
-
CREATE INDEX origin_id_#{table_name} ON #{table_name}(origin_id);
|
146
|
+
CREATE INDEX IF NOT EXISTS origin_id_#{table_name} ON #{table_name}(origin_id);
|
147
147
|
SQL
|
148
148
|
#index search_term
|
149
149
|
definition.each { |name, datatype|
|
150
150
|
self.dbi.do <<-SQL
|
151
|
-
CREATE INDEX #{name}_#{table_name} ON #{table_name}(#{name});
|
151
|
+
CREATE INDEX IF NOT EXISTS #{name}_#{table_name} ON #{table_name}(#{name});
|
152
152
|
SQL
|
153
153
|
}
|
154
154
|
#index target_id
|
155
155
|
self.dbi.do <<-SQL
|
156
|
-
CREATE INDEX target_id_#{table_name} ON #{table_name}(target_id);
|
156
|
+
CREATE INDEX IF NOT EXISTS target_id_#{table_name} ON #{table_name}(target_id);
|
157
157
|
SQL
|
158
158
|
end
|
159
159
|
def create_fulltext_index(table_name)
|
160
160
|
self.dbi.do <<-SQL
|
161
|
-
|
161
|
+
DROP TABLE IF EXISTS #{table_name};
|
162
|
+
SQL
|
163
|
+
self.dbi.do <<-SQL
|
164
|
+
CREATE TABLE IF NOT EXISTS #{table_name} (
|
162
165
|
origin_id INTEGER,
|
163
166
|
search_term tsvector,
|
164
167
|
target_id INTEGER
|
165
|
-
);
|
168
|
+
) WITH OIDS ;
|
166
169
|
SQL
|
167
170
|
#index origin_id
|
168
171
|
self.dbi.do <<-SQL
|
169
|
-
CREATE INDEX origin_id_#{table_name} ON #{table_name}(origin_id);
|
172
|
+
CREATE INDEX IF NOT EXISTS origin_id_#{table_name} ON #{table_name}(origin_id);
|
170
173
|
SQL
|
171
|
-
#index search_term
|
172
174
|
self.dbi.do <<-SQL
|
173
|
-
CREATE INDEX search_term_#{table_name}
|
175
|
+
CREATE INDEX IF NOT EXISTS search_term_#{table_name}
|
174
176
|
ON #{table_name} USING gist(search_term);
|
175
177
|
SQL
|
176
178
|
#index target_id
|
177
179
|
self.dbi.do <<-SQL
|
178
|
-
CREATE INDEX target_id_#{table_name} ON #{table_name}(target_id);
|
180
|
+
CREATE INDEX IF NOT EXISTS target_id_#{table_name} ON #{table_name}(target_id);
|
179
181
|
SQL
|
180
182
|
end
|
181
183
|
def create_index(table_name)
|
182
184
|
self.dbi.do <<-SQL
|
183
|
-
|
185
|
+
DROP TABLE IF EXISTS #{table_name};
|
186
|
+
SQL
|
187
|
+
self.dbi.do <<-SQL
|
188
|
+
CREATE TABLE IF NOT EXISTS #{table_name} (
|
184
189
|
origin_id INTEGER,
|
185
190
|
search_term TEXT,
|
186
191
|
target_id INTEGER
|
187
|
-
);
|
192
|
+
) WITH OIDS;
|
188
193
|
SQL
|
189
194
|
#index origin_id
|
190
195
|
self.dbi.do <<-SQL
|
191
|
-
CREATE INDEX origin_id_#{table_name}
|
196
|
+
CREATE INDEX IF NOT EXISTS origin_id_#{table_name}
|
192
197
|
ON #{table_name}(origin_id)
|
193
198
|
SQL
|
194
199
|
#index search_term
|
195
200
|
self.dbi.do <<-SQL
|
196
|
-
CREATE INDEX search_term_#{table_name}
|
201
|
+
CREATE INDEX IF NOT EXISTS search_term_#{table_name}
|
197
202
|
ON #{table_name}(search_term)
|
198
203
|
SQL
|
199
204
|
#index target_id
|
200
205
|
self.dbi.do <<-SQL
|
201
|
-
CREATE INDEX target_id_#{table_name}
|
206
|
+
CREATE INDEX IF NOT EXISTS target_id_#{table_name}
|
202
207
|
ON #{table_name}(target_id)
|
203
208
|
SQL
|
204
209
|
end
|
@@ -206,7 +211,7 @@ CREATE INDEX target_id_#{table_name} ON #{table_name}(target_id);
|
|
206
211
|
Thread.current[:txn] || @dbi
|
207
212
|
end
|
208
213
|
def drop_index(index_name)
|
209
|
-
self.dbi.do "DROP TABLE #{index_name}"
|
214
|
+
self.dbi.do "DROP TABLE IF EXISTS #{index_name}"
|
210
215
|
end
|
211
216
|
def delete_index_element(index_name, odba_id, id_name)
|
212
217
|
self.dbi.do <<-SQL, odba_id
|
@@ -267,7 +272,7 @@ CREATE INDEX target_id_#{table_name} ON #{table_name}(target_id);
|
|
267
272
|
def ensure_target_id_index(table_name)
|
268
273
|
#index target_id
|
269
274
|
self.dbi.do <<-SQL
|
270
|
-
CREATE INDEX target_id_#{table_name}
|
275
|
+
CREATE INDEX IF NOT EXISTS target_id_#{table_name}
|
271
276
|
ON #{table_name}(target_id)
|
272
277
|
SQL
|
273
278
|
rescue
|
@@ -288,6 +293,9 @@ CREATE INDEX target_id_#{table_name} ON #{table_name}(target_id);
|
|
288
293
|
WHERE #{id_name} = ?
|
289
294
|
SQL
|
290
295
|
end
|
296
|
+
def get_server_version
|
297
|
+
/\s([\d\.]+)\s/.match(self.dbi.select_all("select version();").first.first)[1]
|
298
|
+
end
|
291
299
|
def fulltext_index_target_ids(index_name, origin_id)
|
292
300
|
sql = <<-SQL
|
293
301
|
SELECT DISTINCT target_id
|
@@ -296,22 +304,23 @@ CREATE INDEX target_id_#{table_name} ON #{table_name}(target_id);
|
|
296
304
|
SQL
|
297
305
|
self.dbi.select_all(sql, origin_id)
|
298
306
|
end
|
299
|
-
def generate_dictionary(language
|
300
|
-
|
301
|
-
|
302
|
-
|
303
|
-
|
304
|
-
|
305
|
-
puts "ERROR: \"#{filename}\" does not exist in #{data_path}."
|
306
|
-
found = false
|
307
|
-
end
|
308
|
-
end
|
309
|
-
return unless found
|
307
|
+
def generate_dictionary(language)
|
308
|
+
# postgres searches for the dictionary file in the directory share/tsearch_data of it installation location
|
309
|
+
# By default under gentoo, this is /usr/share/postgresql/tsearch_data/
|
310
|
+
# Use /usr/local/pgsql-10.1/bin/pg_config --sharedir to get the current value
|
311
|
+
# As we have no way to get the current installation path, we do not check whether the files are present or not
|
312
|
+
file='fulltext'
|
310
313
|
# setup configuration
|
314
|
+
self.dbi.do <<-SQL
|
315
|
+
DROP TEXT SEARCH DICTIONARY IF EXISTS public.default_#{language};
|
316
|
+
SQL
|
311
317
|
self.dbi.do <<-SQL
|
312
318
|
CREATE TEXT SEARCH CONFIGURATION public.default_#{language} ( COPY = pg_catalog.#{language} );
|
313
319
|
SQL
|
314
320
|
# ispell
|
321
|
+
self.dbi.do <<-SQL
|
322
|
+
DROP TEXT SEARCH DICTIONARY IF EXISTS #{language}_ispell;
|
323
|
+
SQL
|
315
324
|
self.dbi.do <<-SQL
|
316
325
|
CREATE TEXT SEARCH DICTIONARY #{language}_ispell (
|
317
326
|
TEMPLATE = ispell,
|
@@ -464,7 +473,7 @@ CREATE INDEX target_id_#{table_name} ON #{table_name}(target_id);
|
|
464
473
|
end
|
465
474
|
self.dbi.select_all(sql, *values)
|
466
475
|
end
|
467
|
-
def retrieve_from_fulltext_index(index_name, search_term,
|
476
|
+
def retrieve_from_fulltext_index(index_name, search_term, limit=nil)
|
468
477
|
## this combination of gsub statements solves the problem of
|
469
478
|
# properly escaping strings of this form: "(2:1)" into
|
470
479
|
# '\(2\:1\)' (see test_retrieve_from_fulltext_index)
|
@@ -472,19 +481,19 @@ CREATE INDEX target_id_#{table_name} ON #{table_name}(target_id);
|
|
472
481
|
.gsub(/[():]/i, '\\ \\&').gsub(/\s/, '')
|
473
482
|
sql = <<-EOQ
|
474
483
|
SELECT target_id,
|
475
|
-
max(ts_rank(search_term, to_tsquery(
|
484
|
+
max(ts_rank(search_term, to_tsquery(?))) AS relevance
|
476
485
|
FROM #{index_name}
|
477
|
-
WHERE search_term @@ to_tsquery(
|
486
|
+
WHERE search_term @@ to_tsquery(?)
|
478
487
|
GROUP BY target_id
|
479
488
|
ORDER BY relevance DESC
|
480
489
|
EOQ
|
481
490
|
if(limit)
|
482
491
|
sql << " LIMIT #{limit}"
|
483
492
|
end
|
484
|
-
self.dbi.select_all(sql,
|
493
|
+
self.dbi.select_all(sql, term, term)
|
485
494
|
rescue DBI::ProgrammingError => e
|
486
495
|
warn("ODBA::Storage.retrieve_from_fulltext_index rescued a DBI::ProgrammingError(#{e.message}). Query:")
|
487
|
-
warn("self.dbi.select_all(#{sql}, #{
|
496
|
+
warn("self.dbi.select_all(#{sql}, #{term}, #{term})")
|
488
497
|
warn("returning empty result")
|
489
498
|
[]
|
490
499
|
end
|
@@ -526,7 +535,7 @@ CREATE INDEX target_id_#{table_name} ON #{table_name}(target_id);
|
|
526
535
|
unless(self.dbi.columns('object').any? { |col| col.name == 'extent' })
|
527
536
|
self.dbi.do <<-EOS
|
528
537
|
ALTER TABLE object ADD COLUMN extent TEXT;
|
529
|
-
CREATE INDEX extent_index ON object(extent);
|
538
|
+
CREATE INDEX IF NOT EXISTS extent_index ON object(extent);
|
530
539
|
EOS
|
531
540
|
end
|
532
541
|
end
|
@@ -584,20 +593,24 @@ WHERE origin_id = ?
|
|
584
593
|
SQL
|
585
594
|
end
|
586
595
|
end
|
587
|
-
def update_fulltext_index(index_name, origin_id, search_term, target_id
|
596
|
+
def update_fulltext_index(index_name, origin_id, search_term, target_id)
|
588
597
|
search_term = search_term.gsub(/\s+/, ' ').strip
|
589
598
|
if(target_id)
|
590
|
-
|
599
|
+
value = <<-SQL, origin_id.to_s, search_term, target_id
|
591
600
|
INSERT INTO #{index_name} (origin_id, search_term, target_id)
|
592
|
-
VALUES (?, to_tsvector(
|
601
|
+
VALUES (?, to_tsvector(?), ?)
|
602
|
+
SQL
|
603
|
+
result = self.dbi.do <<-SQL, origin_id.to_s, search_term, target_id
|
604
|
+
INSERT INTO #{index_name} (origin_id, search_term, target_id)
|
605
|
+
VALUES (?, to_tsvector(?), ?)
|
593
606
|
SQL
|
594
607
|
else
|
595
|
-
self.dbi.do <<-SQL,
|
596
|
-
UPDATE #{index_name} SET search_term=to_tsvector(
|
608
|
+
result = self.dbi.do <<-SQL, search_term, origin_id
|
609
|
+
UPDATE #{index_name} SET search_term=to_tsvector(?)
|
597
610
|
WHERE origin_id=?
|
598
611
|
SQL
|
599
612
|
end
|
600
|
-
|
613
|
+
end
|
601
614
|
def update_index(index_name, origin_id, search_term, target_id)
|
602
615
|
if(target_id)
|
603
616
|
self.dbi.do <<-SQL, origin_id, search_term, target_id
|