litestack 0.3.0 → 0.4.1
Sign up to get free protection for your applications and to get access to all the features.
- checksums.yaml +4 -4
- data/BENCHMARKS.md +11 -0
- data/CHANGELOG.md +10 -0
- data/Gemfile +2 -0
- data/bench/bench_jobs_rails.rb +1 -1
- data/bench/bench_jobs_raw.rb +1 -1
- data/bench/uljob.rb +1 -1
- data/lib/active_support/cache/litecache.rb +1 -1
- data/lib/litestack/litecable.rb +3 -3
- data/lib/litestack/litecache.rb +1 -1
- data/lib/litestack/litedb.rb +6 -0
- data/lib/litestack/litejob.rb +2 -3
- data/lib/litestack/litejobqueue.rb +51 -48
- data/lib/litestack/litemetric.rb +3 -3
- data/lib/litestack/litescheduler.rb +84 -0
- data/lib/litestack/litesearch/index.rb +230 -0
- data/lib/litestack/litesearch/model.rb +178 -0
- data/lib/litestack/litesearch/schema.rb +193 -0
- data/lib/litestack/litesearch/schema_adapters/backed_adapter.rb +147 -0
- data/lib/litestack/litesearch/schema_adapters/basic_adapter.rb +128 -0
- data/lib/litestack/litesearch/schema_adapters/contentless_adapter.rb +17 -0
- data/lib/litestack/litesearch/schema_adapters/standalone_adapter.rb +33 -0
- data/lib/litestack/litesearch/schema_adapters.rb +9 -0
- data/lib/litestack/litesearch.rb +37 -0
- data/lib/litestack/litesupport.rb +16 -107
- data/lib/litestack/version.rb +1 -1
- data/lib/litestack.rb +1 -0
- data/lib/sequel/adapters/litedb.rb +3 -2
- metadata +13 -3
@@ -0,0 +1,178 @@
|
|
1
|
+
module Litesearch::Model
|
2
|
+
|
3
|
+
def self.included(klass)
|
4
|
+
klass.include InstanceMethods
|
5
|
+
klass.extend ClassMethods
|
6
|
+
klass.attribute :search_rank, :float if klass.respond_to? :attribute
|
7
|
+
if defined?(Sequel::Model) != nil && klass.ancestors.include?(Sequel::Model)
|
8
|
+
klass.include Litesearch::Model::SequelInstanceMethods
|
9
|
+
klass.extend Litesearch::Model::SequelClassMethods
|
10
|
+
Sequel::Model.extend Litesearch::Model::BaseClassMethods
|
11
|
+
elsif defined?(ActiveRecord::Base) != nil && klass.ancestors.include?(ActiveRecord::Base)
|
12
|
+
klass.include Litesearch::Model::ActiveRecordInstanceMethods
|
13
|
+
klass.extend Litesearch::Model::ActiveRecordClassMethods
|
14
|
+
ActiveRecord::Base.extend Litesearch::Model::BaseClassMethods
|
15
|
+
end
|
16
|
+
end
|
17
|
+
|
18
|
+
module BaseClassMethods
|
19
|
+
def search_models
|
20
|
+
@@models ||= {}
|
21
|
+
end
|
22
|
+
end
|
23
|
+
|
24
|
+
module InstanceMethods
|
25
|
+
|
26
|
+
end
|
27
|
+
|
28
|
+
module ClassMethods
|
29
|
+
|
30
|
+
def litesearch
|
31
|
+
idx = get_connection.search_index(index_name) do |schema|
|
32
|
+
schema.type :backed
|
33
|
+
schema.table table_name.to_sym
|
34
|
+
yield schema
|
35
|
+
schema.post_init
|
36
|
+
@schema = schema #save the schema
|
37
|
+
end
|
38
|
+
if defined?(Sequel::Model) != nil && self.ancestors.include?(Sequel::Model)
|
39
|
+
Sequel::Model.search_models[self.name] = self
|
40
|
+
elsif defined?(ActiveRecord::Base) != nil && self.ancestors.include?(ActiveRecord::Base)
|
41
|
+
ActiveRecord::Base.search_models[self.name] = self
|
42
|
+
end
|
43
|
+
idx
|
44
|
+
end
|
45
|
+
|
46
|
+
def rebuild_index!
|
47
|
+
get_connection.search_index(index_name).rebuild!
|
48
|
+
end
|
49
|
+
|
50
|
+
def drop_index!
|
51
|
+
get_connection.search_index(index_name).drop!
|
52
|
+
end
|
53
|
+
|
54
|
+
def search_all(term, options={})
|
55
|
+
options[:offset] ||= 0
|
56
|
+
options[:limit] ||= 25
|
57
|
+
selects = []
|
58
|
+
if models = options[:models]
|
59
|
+
models_hash = {}
|
60
|
+
models.each do |model|
|
61
|
+
models_hash[model.name] = model
|
62
|
+
end
|
63
|
+
else
|
64
|
+
models_hash = search_models
|
65
|
+
end
|
66
|
+
models_hash.each do |name, klass|
|
67
|
+
selects << "SELECT '#{name}' AS model, rowid, -rank AS search_rank FROM #{index_name_for_table(klass.table_name)}('#{term}')"
|
68
|
+
end
|
69
|
+
conn = get_connection
|
70
|
+
sql = selects.join(" UNION ") << " ORDER BY search_rank DESC LIMIT #{options[:limit]} OFFSET #{options[:offset]}"
|
71
|
+
result = []
|
72
|
+
rs = conn.query(sql) #, options[:limit], options[:offset])
|
73
|
+
rs.each_hash do |row|
|
74
|
+
obj = models_hash[row["model"]].fetch_row(row["rowid"])
|
75
|
+
obj.search_rank = row["search_rank"]
|
76
|
+
result << obj
|
77
|
+
end
|
78
|
+
rs.close
|
79
|
+
result
|
80
|
+
end
|
81
|
+
|
82
|
+
# AR specific
|
83
|
+
|
84
|
+
private
|
85
|
+
|
86
|
+
def index_name
|
87
|
+
"#{table_name}_search_idx"
|
88
|
+
end
|
89
|
+
|
90
|
+
def index_name_for_table(table)
|
91
|
+
"#{table}_search_idx"
|
92
|
+
end
|
93
|
+
|
94
|
+
# create a new instance of self with the row as an argument
|
95
|
+
def create_instance(row)
|
96
|
+
self.new(row)
|
97
|
+
end
|
98
|
+
|
99
|
+
|
100
|
+
end
|
101
|
+
|
102
|
+
module ActiveRecordInstanceMethods;end
|
103
|
+
|
104
|
+
module ActiveRecordClassMethods
|
105
|
+
|
106
|
+
def get_connection
|
107
|
+
connection.raw_connection
|
108
|
+
end
|
109
|
+
|
110
|
+
def fetch_row(id)
|
111
|
+
find(id)
|
112
|
+
end
|
113
|
+
|
114
|
+
def search(term)
|
115
|
+
self.select(
|
116
|
+
"#{table_name}.*"
|
117
|
+
).joins(
|
118
|
+
"INNER JOIN #{index_name} ON #{table_name}.id = #{index_name}.rowid AND rank != 0 AND #{index_name} MATCH ", Arel.sql("'#{term}'")
|
119
|
+
).select(
|
120
|
+
"-#{index_name}.rank AS search_rank"
|
121
|
+
).order(
|
122
|
+
Arel.sql("#{index_name}.rank")
|
123
|
+
)
|
124
|
+
end
|
125
|
+
|
126
|
+
private
|
127
|
+
|
128
|
+
def create_instance(row)
|
129
|
+
instantiate(row)
|
130
|
+
end
|
131
|
+
end
|
132
|
+
|
133
|
+
module SequelInstanceMethods
|
134
|
+
|
135
|
+
def search_rank
|
136
|
+
@values[:search_rank]
|
137
|
+
end
|
138
|
+
|
139
|
+
def search_rank=(rank)
|
140
|
+
@values[:search_rank] = rank
|
141
|
+
end
|
142
|
+
|
143
|
+
end
|
144
|
+
|
145
|
+
module SequelClassMethods
|
146
|
+
|
147
|
+
def fetch_row(id)
|
148
|
+
self[id]
|
149
|
+
end
|
150
|
+
|
151
|
+
def get_connection
|
152
|
+
db.instance_variable_get(:@raw_db)
|
153
|
+
end
|
154
|
+
|
155
|
+
def search(term)
|
156
|
+
dataset.select(
|
157
|
+
Sequel.lit("#{table_name}.*, -#{index_name}.rank AS search_rank")
|
158
|
+
).inner_join(
|
159
|
+
Sequel.lit("#{index_name}('#{term}') ON #{table_name}.id = #{index_name}.rowid AND rank != 0")
|
160
|
+
).order(
|
161
|
+
Sequel.lit('rank')
|
162
|
+
)
|
163
|
+
end
|
164
|
+
|
165
|
+
private
|
166
|
+
|
167
|
+
def create_instance(row)
|
168
|
+
# we need to convert keys to symbols first!
|
169
|
+
row.keys.each do |k|
|
170
|
+
next if k.is_a? Symbol
|
171
|
+
row[k.to_sym] = row[k]
|
172
|
+
row.delete(k)
|
173
|
+
end
|
174
|
+
self.call(row)
|
175
|
+
end
|
176
|
+
end
|
177
|
+
|
178
|
+
end
|
@@ -0,0 +1,193 @@
|
|
1
|
+
require_relative './schema_adapters.rb'
|
2
|
+
|
3
|
+
class Litesearch::Schema
|
4
|
+
|
5
|
+
TOKENIZERS = {
|
6
|
+
porter: 'porter unicode61 remove_diacritics 2',
|
7
|
+
unicode: 'unicode61 remove_diacritics 2',
|
8
|
+
ascii: 'ascii',
|
9
|
+
trigram: 'trigram'
|
10
|
+
}
|
11
|
+
|
12
|
+
INDEX_TYPES = {
|
13
|
+
standalone: Litesearch::Schema::StandaloneAdapter,
|
14
|
+
contentless: Litesearch::Schema::ContentlessAdapter,
|
15
|
+
backed: Litesearch::Schema::BackedAdapter
|
16
|
+
}
|
17
|
+
|
18
|
+
DEFAULT_SCHEMA = {
|
19
|
+
name: nil,
|
20
|
+
type: :standalone,
|
21
|
+
fields: nil,
|
22
|
+
table: nil,
|
23
|
+
filter_column: nil,
|
24
|
+
tokenizer: :porter,
|
25
|
+
auto_create: true,
|
26
|
+
auto_modify: true,
|
27
|
+
rebuild_on_create: false,
|
28
|
+
rebuild_on_modify: false
|
29
|
+
}
|
30
|
+
|
31
|
+
attr_accessor :schema
|
32
|
+
|
33
|
+
def initialize(schema = {})
|
34
|
+
@schema = schema #DEFAULT_SCHEMA.merge(schema)
|
35
|
+
@schema[:fields] = {} unless @schema[:fields]
|
36
|
+
end
|
37
|
+
|
38
|
+
# schema definition API
|
39
|
+
def name(new_name)
|
40
|
+
@schema[:name] = new_name
|
41
|
+
end
|
42
|
+
|
43
|
+
def type(new_type)
|
44
|
+
raise "Unknown index type" if INDEX_TYPES[new_type].nil?
|
45
|
+
@schema[:type] = new_type
|
46
|
+
end
|
47
|
+
|
48
|
+
def table(table_name)
|
49
|
+
@schema[:table] = table_name
|
50
|
+
end
|
51
|
+
|
52
|
+
def fields(field_names)
|
53
|
+
field_names.each {|f| field f }
|
54
|
+
end
|
55
|
+
|
56
|
+
def field(name, attributes = {})
|
57
|
+
name = name.to_s.downcase.to_sym
|
58
|
+
attributes = {weight: 1}.merge(attributes).select{|k, v| allowed_attributes.include?(k)} # only allow attributes we know, to ease schema comparison later
|
59
|
+
@schema[:fields][name] = attributes
|
60
|
+
end
|
61
|
+
|
62
|
+
def tokenizer(new_tokenizer)
|
63
|
+
raise "Unknown tokenizer" if TOKENIZERS[new_tokenizer].nil?
|
64
|
+
@schema[:tokenizer] = new_tokenizer
|
65
|
+
end
|
66
|
+
|
67
|
+
def filter_column(filter_column)
|
68
|
+
@schema[:filter_column] = filter_column
|
69
|
+
end
|
70
|
+
|
71
|
+
def auto_create(boolean)
|
72
|
+
@schema[:auto_create] = boolean
|
73
|
+
end
|
74
|
+
|
75
|
+
def auto_modify(boolean)
|
76
|
+
@schema[:auto_modify] = boolean
|
77
|
+
end
|
78
|
+
|
79
|
+
def rebuild_on_create(boolean)
|
80
|
+
@schema[:rebuild_on_create] = boolean
|
81
|
+
end
|
82
|
+
|
83
|
+
def rebuild_on_modify(boolean)
|
84
|
+
@schema[:rebuild_on_modify] = boolean
|
85
|
+
end
|
86
|
+
|
87
|
+
def post_init
|
88
|
+
@schema = DEFAULT_SCHEMA.merge(@schema)
|
89
|
+
end
|
90
|
+
|
91
|
+
# schema sql generation API
|
92
|
+
|
93
|
+
def sql_for(method, *args)
|
94
|
+
adapter.sql_for(method, *args)
|
95
|
+
end
|
96
|
+
|
97
|
+
# schema data structure API
|
98
|
+
def get(key)
|
99
|
+
@schema[key]
|
100
|
+
end
|
101
|
+
|
102
|
+
def get_field(name)
|
103
|
+
@schema[:fields][name]
|
104
|
+
end
|
105
|
+
|
106
|
+
def adapter
|
107
|
+
@adapter ||= INDEX_TYPES[@schema[:type]].new(@schema)
|
108
|
+
end
|
109
|
+
|
110
|
+
def reset_sql
|
111
|
+
adapter.generate_sql
|
112
|
+
end
|
113
|
+
|
114
|
+
def order_fields(old_schema)
|
115
|
+
adapter.order_fields(old_schema)
|
116
|
+
end
|
117
|
+
|
118
|
+
# should we do this at the schema objects level?
|
119
|
+
def compare(other_schema)
|
120
|
+
other_schema = other_schema.schema
|
121
|
+
# are the schemas identical?
|
122
|
+
# 1 - same fields?
|
123
|
+
[:type, :tokenizer, :name, :table].each do |key|
|
124
|
+
other_schema[key] = @schema[key] if other_schema[key].nil?
|
125
|
+
end
|
126
|
+
if @schema[:type] != other_schema[:type]
|
127
|
+
raise Litesearch::SchemaChangeException.new "Cannot change the index type, please drop the index before creating it again with the new type"
|
128
|
+
end
|
129
|
+
changes = { tokenizer: @schema[:tokenizer] != other_schema[:tokenizer], table: @schema[:table] != other_schema[:table], removed_fields_count: 0, filter_column: @schema[:filter_column] != other_schema[:filter_column] }
|
130
|
+
#check tokenizer changes
|
131
|
+
if changes[:tokenizer] && !other_schema[:rebuild_on_modify]
|
132
|
+
raise Litesearch::SchemaChangeException.new "Cannot change the tokenizer without an index rebuild!"
|
133
|
+
end
|
134
|
+
|
135
|
+
|
136
|
+
|
137
|
+
# check field changes
|
138
|
+
keys = @schema[:fields].keys.sort
|
139
|
+
other_keys = other_schema[:fields].keys.sort
|
140
|
+
|
141
|
+
extra_keys = other_keys - keys
|
142
|
+
extra_keys.each do |key|
|
143
|
+
if other_schema[:fields][key][:weight] == 0
|
144
|
+
other_schema[:fields].delete(key)
|
145
|
+
end
|
146
|
+
end
|
147
|
+
|
148
|
+
other_keys = other_schema[:fields].keys.sort
|
149
|
+
|
150
|
+
changes[:fields] = keys != other_keys # only acceptable change is adding extra fields
|
151
|
+
changes[:extra_fields_count] = other_keys.count - keys.count
|
152
|
+
# check for missing fields (please note that adding fields can work without a rebuild)
|
153
|
+
if keys - other_keys != []
|
154
|
+
raise Litesearch::SchemaChangeException.new "Missing fields from existing schema, they have to exist with weight zero until the next rebuild!"
|
155
|
+
end
|
156
|
+
|
157
|
+
# check field weights
|
158
|
+
weights = keys.collect{|key| @schema[:fields][key][:weight] }
|
159
|
+
other_weights = other_keys.collect{|key| other_schema[:fields][key][:weight] }
|
160
|
+
changes[:weights] = weights != other_weights # will always be true if fields are added
|
161
|
+
if (removed_count = other_weights.select{|w| w == 0}.count) > 0
|
162
|
+
changes[:removed_fields_count] = removed_count
|
163
|
+
end
|
164
|
+
# check field attributes, only backed tables have attributes
|
165
|
+
attrs = keys.collect do |key|
|
166
|
+
f = @schema[:fields][key].dup
|
167
|
+
f.delete(:weight)
|
168
|
+
f.select{|k,v| allowed_attributes.include? k }
|
169
|
+
end
|
170
|
+
other_attrs = other_keys.collect do |key|
|
171
|
+
f = other_schema[:fields][key].dup
|
172
|
+
f.delete(:weight)
|
173
|
+
f.select{|k,v| allowed_attributes.include? k }
|
174
|
+
end
|
175
|
+
changes[:attributes] if other_attrs != attrs # this means that we will need to redefine the triggers if any are there and also the table definition if needed
|
176
|
+
|
177
|
+
# return the changes
|
178
|
+
changes
|
179
|
+
end
|
180
|
+
|
181
|
+
def clean
|
182
|
+
removable = @schema[:fields].select{|name, f| f[:weight] == 0 }.collect{|name, f| name}
|
183
|
+
removable.each{|name| @schema[:fields].delete(name)}
|
184
|
+
end
|
185
|
+
|
186
|
+
def allowed_attributes
|
187
|
+
[:weight, :col, :target]
|
188
|
+
end
|
189
|
+
|
190
|
+
end
|
191
|
+
|
192
|
+
class Litesearch::SchemaException < StandardError; end
|
193
|
+
class Litesearch::SchemaChangeException < StandardError; end
|
@@ -0,0 +1,147 @@
|
|
1
|
+
class Litesearch::Schema::BackedAdapter < Litesearch::Schema::ContentlessAdapter
|
2
|
+
|
3
|
+
private
|
4
|
+
|
5
|
+
def table
|
6
|
+
@schema[:table]
|
7
|
+
end
|
8
|
+
|
9
|
+
def generate_sql
|
10
|
+
super
|
11
|
+
@sql[:rebuild] = :rebuild_sql
|
12
|
+
@sql[:drop_primary_triggers] = :drop_primary_triggers_sql
|
13
|
+
@sql[:drop_secondary_triggers] = :drop_secondary_triggers_sql
|
14
|
+
@sql[:create_primary_triggers] = :create_primary_triggers_sql
|
15
|
+
@sql[:create_secondary_triggers] = :create_secondary_triggers_sql
|
16
|
+
end
|
17
|
+
|
18
|
+
def drop_primary_triggers_sql
|
19
|
+
sql = <<-SQL
|
20
|
+
DROP TRIGGER IF EXISTS #{name}_insert;
|
21
|
+
DROP TRIGGER IF EXISTS #{name}_update;
|
22
|
+
DROP TRIGGER IF EXISTS #{name}_update_not;
|
23
|
+
DROP TRIGGER IF EXISTS #{name}_delete;
|
24
|
+
SQL
|
25
|
+
end
|
26
|
+
|
27
|
+
def create_primary_triggers_sql(active=false)
|
28
|
+
when_stmt = "TRUE"
|
29
|
+
cols = active_cols_names
|
30
|
+
if filter = @schema[:filter_column]
|
31
|
+
when_stmt = "NEW.#{filter} = TRUE"
|
32
|
+
cols << filter
|
33
|
+
end
|
34
|
+
|
35
|
+
sql = <<-SQL
|
36
|
+
CREATE TRIGGER #{name}_insert AFTER INSERT ON #{table} WHEN #{when_stmt} BEGIN
|
37
|
+
INSERT OR REPLACE INTO #{name}(rowid, #{active_field_names.join(', ')}) VALUES (NEW.rowid, #{trigger_cols_sql});
|
38
|
+
END;
|
39
|
+
CREATE TRIGGER #{name}_update AFTER UPDATE OF #{cols.join(', ')} ON #{table} WHEN #{when_stmt} BEGIN
|
40
|
+
INSERT OR REPLACE INTO #{name}(rowid, #{active_field_names.join(', ')}) VALUES (NEW.rowid, #{trigger_cols_sql});
|
41
|
+
END;
|
42
|
+
CREATE TRIGGER #{name}_update_not AFTER UPDATE OF #{cols.join(', ')} ON #{table} WHEN NOT #{when_stmt} BEGIN
|
43
|
+
DELETE FROM #{name} WHERE rowid = NEW.rowid;
|
44
|
+
END;
|
45
|
+
CREATE TRIGGER #{name}_delete AFTER DELETE ON #{table} BEGIN
|
46
|
+
DELETE FROM #{name} WHERE rowid = OLD.id;
|
47
|
+
END;
|
48
|
+
SQL
|
49
|
+
end
|
50
|
+
|
51
|
+
def drop_secondary_trigger_sql(target_table, target_col, col)
|
52
|
+
"DROP TRIGGER IF EXISTS #{target_table}_#{target_col}_#{col}_#{name}_update;"
|
53
|
+
end
|
54
|
+
|
55
|
+
def create_secondary_trigger_sql(target_table, target_col, col)
|
56
|
+
sql = <<-SQL
|
57
|
+
CREATE TRIGGER #{target_table}_#{target_col}_#{col}_#{name}_update AFTER UPDATE OF #{target_col} ON #{target_table} BEGIN
|
58
|
+
#{rebuild_sql} AND #{table}.#{col} = NEW.id;
|
59
|
+
END;
|
60
|
+
SQL
|
61
|
+
end
|
62
|
+
|
63
|
+
def drop_secondary_triggers_sql
|
64
|
+
sql = ""
|
65
|
+
@schema[:fields].each do |name, field|
|
66
|
+
if field[:trigger_sql]
|
67
|
+
sql << drop_secondary_trigger_sql(field[:target_table], field[:target_col], field[:col])
|
68
|
+
end
|
69
|
+
end
|
70
|
+
return sql.empty? ? nil : sql
|
71
|
+
end
|
72
|
+
|
73
|
+
|
74
|
+
def create_secondary_triggers_sql
|
75
|
+
sql = ""
|
76
|
+
@schema[:fields].each do |name, field|
|
77
|
+
if field[:trigger_sql]
|
78
|
+
sql << create_secondary_trigger_sql(field[:target_table], field[:target_col], field[:col])
|
79
|
+
end
|
80
|
+
end
|
81
|
+
return sql.empty? ? nil : sql
|
82
|
+
end
|
83
|
+
|
84
|
+
|
85
|
+
def rebuild_sql
|
86
|
+
conditions = ""
|
87
|
+
jcs = join_conditions_sql
|
88
|
+
fs = filter_sql
|
89
|
+
conditions = " ON #{jcs} #{fs}" unless jcs.empty? && fs.empty?
|
90
|
+
"INSERT OR REPLACE INTO #{name}(rowid, #{active_field_names.join(', ')}) SELECT #{table}.id, #{select_cols_sql} FROM #{join_tables_sql} #{conditions}"
|
91
|
+
end
|
92
|
+
|
93
|
+
def enrich_schema
|
94
|
+
@schema[:fields].each do |name, field|
|
95
|
+
if field[:target] && ! field[:target].start_with?("#{table}.")
|
96
|
+
field[:target] = field[:target].downcase
|
97
|
+
target_table, target_col = field[:target].split('.')
|
98
|
+
field[:col] = "#{name}_id".to_sym unless field[:col]
|
99
|
+
field[:target_table] = target_table.to_sym
|
100
|
+
field[:target_col] = target_col.to_sym
|
101
|
+
field[:sql] = "(SELECT #{field[:target_col]} FROM #{field[:target_table]} WHERE id = NEW.#{field[:col]})"
|
102
|
+
field[:trigger_sql] = true # create_secondary_trigger_sql(field[:target_table], field[:target_col], field[:col])
|
103
|
+
field[:target_table_alias] = "#{field[:target_table]}_#{name}"
|
104
|
+
else
|
105
|
+
field[:col] = name unless field[:col]
|
106
|
+
field[:sql] = field[:col]
|
107
|
+
field[:target_table] = @schema[:table]
|
108
|
+
field[:target] = "#{@schema[:table]}.#{field[:sql]}"
|
109
|
+
end
|
110
|
+
end
|
111
|
+
end
|
112
|
+
|
113
|
+
def filter_sql
|
114
|
+
sql = ""
|
115
|
+
sql << " AND #{@schema[:filter_column]} = TRUE " if @schema[:filter_column]
|
116
|
+
sql
|
117
|
+
end
|
118
|
+
|
119
|
+
def trigger_cols_sql
|
120
|
+
active_fields.collect do |name, field|
|
121
|
+
field[:trigger_sql] ? field[:sql] : "NEW.#{field[:sql]}"
|
122
|
+
end.join(", ")
|
123
|
+
end
|
124
|
+
|
125
|
+
def select_cols_sql
|
126
|
+
active_fields.collect do |name, field|
|
127
|
+
field[:trigger_sql] != nil ? "#{field[:target_table_alias]}.#{field[:target_col]}" : field[:target]
|
128
|
+
end.join(', ')
|
129
|
+
end
|
130
|
+
|
131
|
+
def join_tables_sql
|
132
|
+
tables = [@schema[:table]]
|
133
|
+
active_fields.each do |name, field|
|
134
|
+
tables << "#{field[:target_table]} AS #{field[:target_table_alias]}" if field[:trigger_sql]
|
135
|
+
end
|
136
|
+
tables.uniq.join(", ")
|
137
|
+
end
|
138
|
+
|
139
|
+
def join_conditions_sql
|
140
|
+
conditions = []
|
141
|
+
active_fields.each do |name, field|
|
142
|
+
conditions << "#{field[:target_table_alias]}.id = #{@schema[:table]}.#{field[:col]}" if field[:trigger_sql]
|
143
|
+
end
|
144
|
+
conditions.join(" AND ")
|
145
|
+
end
|
146
|
+
|
147
|
+
end
|
@@ -0,0 +1,128 @@
|
|
1
|
+
class Litesearch::Schema::BasicAdapter
|
2
|
+
|
3
|
+
def initialize(schema)
|
4
|
+
@schema = schema
|
5
|
+
@sql = {}
|
6
|
+
enrich_schema
|
7
|
+
generate_sql
|
8
|
+
end
|
9
|
+
|
10
|
+
def name
|
11
|
+
@schema[:name]
|
12
|
+
end
|
13
|
+
|
14
|
+
def table
|
15
|
+
@schema[:table]
|
16
|
+
end
|
17
|
+
|
18
|
+
def fields
|
19
|
+
@schema[:fields]
|
20
|
+
end
|
21
|
+
|
22
|
+
def field_names
|
23
|
+
@schema[:fields].keys
|
24
|
+
end
|
25
|
+
|
26
|
+
def active_fields
|
27
|
+
@schema[:fields].select{|k, v| v[:weight] != 0 }
|
28
|
+
end
|
29
|
+
|
30
|
+
def active_field_names
|
31
|
+
active_fields.keys
|
32
|
+
end
|
33
|
+
|
34
|
+
def active_cols_names
|
35
|
+
active_fields.collect{|k, v| v[:col]}
|
36
|
+
end
|
37
|
+
|
38
|
+
def weights
|
39
|
+
@schema[:fields].values.collect{|v| v[:weight].to_f }
|
40
|
+
end
|
41
|
+
|
42
|
+
def active_weights
|
43
|
+
active_fields.values.collect{|v| v[:weight].to_f }
|
44
|
+
end
|
45
|
+
|
46
|
+
def tokenizer_sql
|
47
|
+
Litesearch::Schema::TOKENIZERS[@schema[:tokenizer]]
|
48
|
+
end
|
49
|
+
|
50
|
+
def order_fields(old_schema)
|
51
|
+
new_fields = {}
|
52
|
+
old_field_names = old_schema.schema[:fields].keys
|
53
|
+
old_field_names.each do |name|
|
54
|
+
new_fields[name] = @schema[:fields].delete(name)
|
55
|
+
end
|
56
|
+
missing_field_names = field_names - old_field_names
|
57
|
+
missing_field_names.each do |name|
|
58
|
+
new_fields[name] = @schema[:fields].delete(name)
|
59
|
+
end
|
60
|
+
@schema[:fields] = new_fields # this should be in order now
|
61
|
+
generate_sql
|
62
|
+
enrich_schema
|
63
|
+
end
|
64
|
+
|
65
|
+
def sql_for(method, *args)
|
66
|
+
if sql = @sql[method]
|
67
|
+
if sql.is_a? String
|
68
|
+
return sql
|
69
|
+
elsif sql.is_a? Proc
|
70
|
+
return sql.call(*args)
|
71
|
+
elsif sql.is_a? Symbol
|
72
|
+
return self.send(sql, *args)
|
73
|
+
elsif sql.is_a? Litesearch::SchemaChangeException
|
74
|
+
raise sql
|
75
|
+
end
|
76
|
+
end
|
77
|
+
end
|
78
|
+
|
79
|
+
def generate_sql
|
80
|
+
@sql[:create_index] = :create_index_sql
|
81
|
+
@sql[:insert] = "INSERT OR REPLACE INTO #{name}(rowid, #{active_col_names_sql}) VALUES (:id, #{active_col_names_var_sql}) RETURNING rowid"
|
82
|
+
@sql[:delete] = "DELETE FROM #{name} WHERE rowid = :id"
|
83
|
+
@sql[:count] = "SELECT count(*) FROM #{name}(:term)"
|
84
|
+
@sql[:count_all] = "SELECT count(*) FROM #{name}"
|
85
|
+
@sql[:delete_all] = "DELETE FROM #{name}"
|
86
|
+
@sql[:drop] = "DROP TABLE #{name}"
|
87
|
+
@sql[:expand_data] = "UPDATE #{name}_data SET block = block || zeroblob(:length) WHERE id = 1"
|
88
|
+
@sql[:expand_docsize] = "UPDATE #{name}_docsize SET sz = sz || zeroblob(:length)"
|
89
|
+
@sql[:ranks] = :ranks_sql
|
90
|
+
@sql[:set_config_value] = "INSERT OR REPLACE INTO #{name}_config(k, v) VALUES (:key, :value)"
|
91
|
+
@sql[:get_config_value] = "SELECT v FROM #{name}_config WHERE k = :key"
|
92
|
+
@sql[:search] = "SELECT rowid AS id, -rank AS search_rank FROM #{name}(:term) WHERE rank !=0 ORDER BY rank LIMIT :limit OFFSET :offset"
|
93
|
+
@sql[:update_index] = "UPDATE sqlite_schema SET sql = :sql WHERE name = '#{name}'"
|
94
|
+
@sql[:update_content_table] = "UPDATE sqlite_schema SET sql = :sql WHERE name = '#{name}_content'"
|
95
|
+
end
|
96
|
+
|
97
|
+
private
|
98
|
+
|
99
|
+
def ranks_sql(active=false)
|
100
|
+
if active
|
101
|
+
weights_sql = weights.join(', ')
|
102
|
+
else
|
103
|
+
weights_sql = active_weights.join(', ')
|
104
|
+
end
|
105
|
+
"INSERT INTO #{name}(#{name}, rank) VALUES ('rank', 'bm25(#{weights_sql})')"
|
106
|
+
end
|
107
|
+
|
108
|
+
def active_col_names_sql
|
109
|
+
active_field_names.join(', ')
|
110
|
+
end
|
111
|
+
|
112
|
+
def active_col_names_var_sql
|
113
|
+
":#{active_field_names.join(', :')}"
|
114
|
+
end
|
115
|
+
|
116
|
+
def col_names_sql
|
117
|
+
field_names.join(', ')
|
118
|
+
end
|
119
|
+
|
120
|
+
def col_names_var_sql
|
121
|
+
":#{field_names.join(', :')}"
|
122
|
+
end
|
123
|
+
|
124
|
+
def enrich_schema
|
125
|
+
end
|
126
|
+
|
127
|
+
end
|
128
|
+
|
@@ -0,0 +1,17 @@
|
|
1
|
+
class Litesearch::Schema::ContentlessAdapter < Litesearch::Schema::BasicAdapter
|
2
|
+
|
3
|
+
private
|
4
|
+
|
5
|
+
def generate_sql
|
6
|
+
super
|
7
|
+
#@sql[:rebuild_index] = Litesearch::SchemaChangeException.new("You cannot rebuild a contentless index")
|
8
|
+
#@sql[:rebuild] = Litesearch::SchemaChangeException.new("You cannot rebuild a contentless index")
|
9
|
+
end
|
10
|
+
|
11
|
+
def create_index_sql(active = false)
|
12
|
+
col_names = active ? active_col_names_sql : col_names_sql
|
13
|
+
"CREATE VIRTUAL TABLE #{name} USING FTS5(#{col_names}, content='', contentless_delete=1, tokenize='#{tokenizer_sql}')"
|
14
|
+
end
|
15
|
+
|
16
|
+
end
|
17
|
+
|
@@ -0,0 +1,33 @@
|
|
1
|
+
class Litesearch::Schema::StandaloneAdapter < Litesearch::Schema::BasicAdapter
|
2
|
+
|
3
|
+
def generate_sql
|
4
|
+
super
|
5
|
+
@sql[:move_content] = "ALTER TABLE #{name}_content RENAME TO #{name}_content_temp"
|
6
|
+
@sql[:adjust_temp_content] = "UPDATE sqlite_schema SET sql (SELECT sql FROM sqlite_schema WHERE name = '#{name}_content') WHERE name = #{name}_content_temp"
|
7
|
+
@sql[:restore_content] = "ALTER TABLE #{name}_content_temp RENAME TO #{name}_content"
|
8
|
+
@sql[:rebuild] = "INSERT INTO #{name}(#{name}) VALUES ('rebuild')"
|
9
|
+
@sql[:drop_content_table] = "DROP TABLE #{name}_content"
|
10
|
+
@sql[:drop_content_col] = :drop_content_col_sql
|
11
|
+
@sql[:create_content_table] = :create_content_table_sql
|
12
|
+
@sql[:search] = "SELECT rowid AS id, *, -rank AS search_rank FROM #{name}(:term) WHERE rank !=0 ORDER BY rank LIMIT :limit OFFSET :offset"
|
13
|
+
end
|
14
|
+
|
15
|
+
private
|
16
|
+
|
17
|
+
def create_index_sql(active = false)
|
18
|
+
col_names = active ? active_col_names_sql : col_names_sql
|
19
|
+
"CREATE VIRTUAL TABLE #{name} USING FTS5(#{col_names}, tokenize='#{tokenizer_sql}')"
|
20
|
+
end
|
21
|
+
|
22
|
+
def drop_content_col_sql(col_index)
|
23
|
+
"ALTER TABLE #{name}_content DROP COLUMN c#{col_index}"
|
24
|
+
end
|
25
|
+
|
26
|
+
def create_content_table_sql(count)
|
27
|
+
cols = []
|
28
|
+
count.times{|i| cols << "c#{i}" }
|
29
|
+
"CREATE TABLE #{name}_content(id INTEGER PRIMARY KEY, #{cols.join(', ')})"
|
30
|
+
end
|
31
|
+
|
32
|
+
end
|
33
|
+
|