litestack 0.3.0 → 0.4.2
Sign up to get free protection for your applications and to get access to all the features.
- checksums.yaml +4 -4
- data/.standard.yml +3 -0
- data/BENCHMARKS.md +34 -7
- data/CHANGELOG.md +21 -0
- data/Gemfile +1 -5
- data/Gemfile.lock +92 -0
- data/README.md +120 -6
- data/ROADMAP.md +45 -0
- data/Rakefile +3 -1
- data/WHYLITESTACK.md +1 -1
- data/assets/litecache_metrics.png +0 -0
- data/assets/litedb_metrics.png +0 -0
- data/assets/litemetric_logo_teal.png +0 -0
- data/assets/litesearch_logo_teal.png +0 -0
- data/bench/bench.rb +17 -10
- data/bench/bench_cache_rails.rb +10 -13
- data/bench/bench_cache_raw.rb +17 -22
- data/bench/bench_jobs_rails.rb +19 -13
- data/bench/bench_jobs_raw.rb +17 -10
- data/bench/bench_queue.rb +4 -6
- data/bench/rails_job.rb +5 -7
- data/bench/skjob.rb +4 -4
- data/bench/uljob.rb +6 -6
- data/lib/action_cable/subscription_adapter/litecable.rb +5 -8
- data/lib/active_job/queue_adapters/litejob_adapter.rb +6 -8
- data/lib/active_record/connection_adapters/litedb_adapter.rb +65 -75
- data/lib/active_support/cache/litecache.rb +38 -41
- data/lib/generators/litestack/install/install_generator.rb +3 -3
- data/lib/generators/litestack/install/templates/database.yml +7 -1
- data/lib/litestack/liteboard/liteboard.rb +269 -149
- data/lib/litestack/litecable.rb +44 -40
- data/lib/litestack/litecable.sql.yml +22 -11
- data/lib/litestack/litecache.rb +80 -89
- data/lib/litestack/litecache.sql.yml +81 -22
- data/lib/litestack/litecache.yml +1 -1
- data/lib/litestack/litedb.rb +39 -38
- data/lib/litestack/litejob.rb +31 -31
- data/lib/litestack/litejobqueue.rb +107 -106
- data/lib/litestack/litemetric.rb +83 -95
- data/lib/litestack/litemetric.sql.yml +244 -234
- data/lib/litestack/litemetric_collector.sql.yml +38 -41
- data/lib/litestack/litequeue.rb +39 -41
- data/lib/litestack/litequeue.sql.yml +39 -31
- data/lib/litestack/litescheduler.rb +84 -0
- data/lib/litestack/litesearch/index.rb +260 -0
- data/lib/litestack/litesearch/model.rb +179 -0
- data/lib/litestack/litesearch/schema.rb +190 -0
- data/lib/litestack/litesearch/schema_adapters/backed_adapter.rb +143 -0
- data/lib/litestack/litesearch/schema_adapters/basic_adapter.rb +137 -0
- data/lib/litestack/litesearch/schema_adapters/contentless_adapter.rb +14 -0
- data/lib/litestack/litesearch/schema_adapters/standalone_adapter.rb +31 -0
- data/lib/litestack/litesearch/schema_adapters.rb +4 -0
- data/lib/litestack/litesearch.rb +34 -0
- data/lib/litestack/litesupport.rb +85 -186
- data/lib/litestack/railtie.rb +1 -1
- data/lib/litestack/version.rb +2 -2
- data/lib/litestack.rb +7 -4
- data/lib/railties/rails/commands/dbconsole.rb +11 -15
- data/lib/sequel/adapters/litedb.rb +18 -22
- data/lib/sequel/adapters/shared/litedb.rb +168 -168
- data/scripts/build_metrics.rb +91 -0
- data/scripts/test_cable.rb +30 -0
- data/scripts/test_job_retry.rb +33 -0
- data/scripts/test_metrics.rb +60 -0
- data/template.rb +2 -2
- metadata +112 -7
@@ -0,0 +1,179 @@
|
|
1
|
+
module Litesearch::Model
|
2
|
+
def self.included(klass)
|
3
|
+
klass.include InstanceMethods
|
4
|
+
klass.extend ClassMethods
|
5
|
+
klass.attribute :search_rank, :float if klass.respond_to? :attribute
|
6
|
+
if !defined?(Sequel::Model).nil? && klass.ancestors.include?(Sequel::Model)
|
7
|
+
klass.include Litesearch::Model::SequelInstanceMethods
|
8
|
+
klass.extend Litesearch::Model::SequelClassMethods
|
9
|
+
Sequel::Model.extend Litesearch::Model::BaseClassMethods
|
10
|
+
elsif !defined?(ActiveRecord::Base).nil? && klass.ancestors.include?(ActiveRecord::Base)
|
11
|
+
klass.include Litesearch::Model::ActiveRecordInstanceMethods
|
12
|
+
klass.extend Litesearch::Model::ActiveRecordClassMethods
|
13
|
+
ActiveRecord::Base.extend Litesearch::Model::BaseClassMethods
|
14
|
+
end
|
15
|
+
end
|
16
|
+
|
17
|
+
module BaseClassMethods
|
18
|
+
def search_models
|
19
|
+
@@models ||= {}
|
20
|
+
end
|
21
|
+
end
|
22
|
+
|
23
|
+
module InstanceMethods
|
24
|
+
def similar(limit=10)
|
25
|
+
conn = self.class.get_connection
|
26
|
+
idx = conn.search_index(self.class.send(:index_name))
|
27
|
+
r_a_h = conn.results_as_hash
|
28
|
+
conn.results_as_hash = true
|
29
|
+
rs = idx.similar(id, limit)
|
30
|
+
conn.results_as_hash = r_a_h
|
31
|
+
result = []
|
32
|
+
rs.each do |row|
|
33
|
+
obj = self.class.fetch_row(row["id"])
|
34
|
+
obj.search_rank = row["search_rank"]
|
35
|
+
result << obj
|
36
|
+
end
|
37
|
+
result
|
38
|
+
end
|
39
|
+
|
40
|
+
end
|
41
|
+
|
42
|
+
module ClassMethods
|
43
|
+
def litesearch
|
44
|
+
idx = get_connection.search_index(index_name) do |schema|
|
45
|
+
schema.type :backed
|
46
|
+
schema.table table_name.to_sym
|
47
|
+
yield schema
|
48
|
+
schema.post_init
|
49
|
+
@schema = schema # save the schema
|
50
|
+
end
|
51
|
+
if !defined?(Sequel::Model).nil? && ancestors.include?(Sequel::Model)
|
52
|
+
Sequel::Model.search_models[name] = self
|
53
|
+
elsif !defined?(ActiveRecord::Base).nil? && ancestors.include?(ActiveRecord::Base)
|
54
|
+
ActiveRecord::Base.search_models[name] = self
|
55
|
+
end
|
56
|
+
idx
|
57
|
+
end
|
58
|
+
|
59
|
+
def rebuild_index!
|
60
|
+
get_connection.search_index(index_name).rebuild!
|
61
|
+
end
|
62
|
+
|
63
|
+
def drop_index!
|
64
|
+
get_connection.search_index(index_name).drop!
|
65
|
+
end
|
66
|
+
|
67
|
+
def search_all(term, options = {})
|
68
|
+
options[:offset] ||= 0
|
69
|
+
options[:limit] ||= 25
|
70
|
+
options[:term] = term
|
71
|
+
selects = []
|
72
|
+
if (models = options[:models])
|
73
|
+
models_hash = {}
|
74
|
+
models.each do |model|
|
75
|
+
models_hash[model.name] = model
|
76
|
+
end
|
77
|
+
else
|
78
|
+
models_hash = search_models
|
79
|
+
end
|
80
|
+
# remove the models from the options hash before passing it ot the query
|
81
|
+
options.delete(:models)
|
82
|
+
models_hash.each do |name, klass|
|
83
|
+
selects << "SELECT '#{name}' AS model, rowid, -rank AS search_rank FROM #{index_name_for_table(klass.table_name)}(:term)"
|
84
|
+
end
|
85
|
+
conn = get_connection
|
86
|
+
sql = selects.join(" UNION ") << " ORDER BY search_rank DESC LIMIT :limit OFFSET :offset"
|
87
|
+
result = []
|
88
|
+
rs = conn.query(sql, options) # , options[:limit], options[:offset])
|
89
|
+
rs.each_hash do |row|
|
90
|
+
obj = models_hash[row["model"]].fetch_row(row["rowid"])
|
91
|
+
obj.search_rank = row["search_rank"]
|
92
|
+
result << obj
|
93
|
+
end
|
94
|
+
rs.close
|
95
|
+
result
|
96
|
+
end
|
97
|
+
|
98
|
+
def index_name
|
99
|
+
"#{table_name}_search_idx"
|
100
|
+
end
|
101
|
+
|
102
|
+
def index_name_for_table(table)
|
103
|
+
"#{table}_search_idx"
|
104
|
+
end
|
105
|
+
|
106
|
+
# create a new instance of self with the row as an argument
|
107
|
+
def create_instance(row)
|
108
|
+
new(row)
|
109
|
+
end
|
110
|
+
end
|
111
|
+
|
112
|
+
module ActiveRecordInstanceMethods; end
|
113
|
+
|
114
|
+
module ActiveRecordClassMethods
|
115
|
+
def get_connection
|
116
|
+
connection.raw_connection
|
117
|
+
end
|
118
|
+
|
119
|
+
def fetch_row(id)
|
120
|
+
find(id)
|
121
|
+
end
|
122
|
+
|
123
|
+
def search(term)
|
124
|
+
self.select(
|
125
|
+
"#{table_name}.*"
|
126
|
+
).joins(
|
127
|
+
"INNER JOIN #{index_name} ON #{table_name}.id = #{index_name}.rowid AND rank != 0 AND #{index_name} MATCH ", Arel.sql("'#{term}'")
|
128
|
+
).select(
|
129
|
+
"-#{index_name}.rank AS search_rank"
|
130
|
+
).order(
|
131
|
+
Arel.sql("#{index_name}.rank")
|
132
|
+
)
|
133
|
+
end
|
134
|
+
|
135
|
+
def create_instance(row)
|
136
|
+
instantiate(row)
|
137
|
+
end
|
138
|
+
end
|
139
|
+
|
140
|
+
module SequelInstanceMethods
|
141
|
+
def search_rank
|
142
|
+
@values[:search_rank]
|
143
|
+
end
|
144
|
+
|
145
|
+
def search_rank=(rank)
|
146
|
+
@values[:search_rank] = rank
|
147
|
+
end
|
148
|
+
end
|
149
|
+
|
150
|
+
module SequelClassMethods
|
151
|
+
def fetch_row(id)
|
152
|
+
self[id]
|
153
|
+
end
|
154
|
+
|
155
|
+
def get_connection
|
156
|
+
db.instance_variable_get(:@raw_db)
|
157
|
+
end
|
158
|
+
|
159
|
+
def search(term)
|
160
|
+
dataset.select(
|
161
|
+
Sequel.lit("#{table_name}.*, -#{index_name}.rank AS search_rank")
|
162
|
+
).inner_join(
|
163
|
+
Sequel.lit("#{index_name}(:term) ON #{table_name}.id = #{index_name}.rowid AND rank != 0", {term: term})
|
164
|
+
).order(
|
165
|
+
Sequel.lit("rank")
|
166
|
+
)
|
167
|
+
end
|
168
|
+
|
169
|
+
def create_instance(row)
|
170
|
+
# we need to convert keys to symbols first!
|
171
|
+
row.keys.each do |k|
|
172
|
+
next if k.is_a? Symbol
|
173
|
+
row[k.to_sym] = row[k]
|
174
|
+
row.delete(k)
|
175
|
+
end
|
176
|
+
call(row)
|
177
|
+
end
|
178
|
+
end
|
179
|
+
end
|
@@ -0,0 +1,190 @@
|
|
1
|
+
require_relative "./schema_adapters"
|
2
|
+
|
3
|
+
class Litesearch::Schema
|
4
|
+
TOKENIZERS = {
|
5
|
+
porter: "porter unicode61 remove_diacritics 2",
|
6
|
+
unicode: "unicode61 remove_diacritics 2",
|
7
|
+
ascii: "ascii",
|
8
|
+
trigram: "trigram"
|
9
|
+
}
|
10
|
+
|
11
|
+
INDEX_TYPES = {
|
12
|
+
standalone: Litesearch::Schema::StandaloneAdapter,
|
13
|
+
contentless: Litesearch::Schema::ContentlessAdapter,
|
14
|
+
backed: Litesearch::Schema::BackedAdapter
|
15
|
+
}
|
16
|
+
|
17
|
+
DEFAULT_SCHEMA = {
|
18
|
+
name: nil,
|
19
|
+
type: :standalone,
|
20
|
+
fields: nil,
|
21
|
+
table: nil,
|
22
|
+
filter_column: nil,
|
23
|
+
tokenizer: :porter,
|
24
|
+
auto_create: true,
|
25
|
+
auto_modify: true,
|
26
|
+
rebuild_on_create: false,
|
27
|
+
rebuild_on_modify: false
|
28
|
+
}
|
29
|
+
|
30
|
+
attr_accessor :schema
|
31
|
+
|
32
|
+
def initialize(schema = {})
|
33
|
+
@schema = schema # DEFAULT_SCHEMA.merge(schema)
|
34
|
+
@schema[:fields] = {} unless @schema[:fields]
|
35
|
+
end
|
36
|
+
|
37
|
+
# schema definition API
|
38
|
+
def name(new_name)
|
39
|
+
@schema[:name] = new_name
|
40
|
+
end
|
41
|
+
|
42
|
+
def type(new_type)
|
43
|
+
raise "Unknown index type" if INDEX_TYPES[new_type].nil?
|
44
|
+
@schema[:type] = new_type
|
45
|
+
end
|
46
|
+
|
47
|
+
def table(table_name)
|
48
|
+
@schema[:table] = table_name
|
49
|
+
end
|
50
|
+
|
51
|
+
def fields(field_names)
|
52
|
+
field_names.each { |f| field f }
|
53
|
+
end
|
54
|
+
|
55
|
+
def field(name, attributes = {})
|
56
|
+
name = name.to_s.downcase.to_sym
|
57
|
+
attributes = {weight: 1}.merge(attributes).select { |k, v| allowed_attributes.include?(k) } # only allow attributes we know, to ease schema comparison later
|
58
|
+
@schema[:fields][name] = attributes
|
59
|
+
end
|
60
|
+
|
61
|
+
def tokenizer(new_tokenizer)
|
62
|
+
raise "Unknown tokenizer" if TOKENIZERS[new_tokenizer].nil?
|
63
|
+
@schema[:tokenizer] = new_tokenizer
|
64
|
+
end
|
65
|
+
|
66
|
+
def filter_column(filter_column)
|
67
|
+
@schema[:filter_column] = filter_column
|
68
|
+
end
|
69
|
+
|
70
|
+
def auto_create(boolean)
|
71
|
+
@schema[:auto_create] = boolean
|
72
|
+
end
|
73
|
+
|
74
|
+
def auto_modify(boolean)
|
75
|
+
@schema[:auto_modify] = boolean
|
76
|
+
end
|
77
|
+
|
78
|
+
def rebuild_on_create(boolean)
|
79
|
+
@schema[:rebuild_on_create] = boolean
|
80
|
+
end
|
81
|
+
|
82
|
+
def rebuild_on_modify(boolean)
|
83
|
+
@schema[:rebuild_on_modify] = boolean
|
84
|
+
end
|
85
|
+
|
86
|
+
def post_init
|
87
|
+
@schema = DEFAULT_SCHEMA.merge(@schema)
|
88
|
+
end
|
89
|
+
|
90
|
+
# schema sql generation API
|
91
|
+
|
92
|
+
def sql_for(method, *args)
|
93
|
+
adapter.sql_for(method, *args)
|
94
|
+
end
|
95
|
+
|
96
|
+
# schema data structure API
|
97
|
+
def get(key)
|
98
|
+
@schema[key]
|
99
|
+
end
|
100
|
+
|
101
|
+
def get_field(name)
|
102
|
+
@schema[:fields][name]
|
103
|
+
end
|
104
|
+
|
105
|
+
def adapter
|
106
|
+
@adapter ||= INDEX_TYPES[@schema[:type]].new(@schema)
|
107
|
+
end
|
108
|
+
|
109
|
+
def reset_sql
|
110
|
+
adapter.generate_sql
|
111
|
+
end
|
112
|
+
|
113
|
+
def order_fields(old_schema)
|
114
|
+
adapter.order_fields(old_schema)
|
115
|
+
end
|
116
|
+
|
117
|
+
# should we do this at the schema objects level?
|
118
|
+
def compare(other_schema)
|
119
|
+
other_schema = other_schema.schema
|
120
|
+
# are the schemas identical?
|
121
|
+
# 1 - same fields?
|
122
|
+
[:type, :tokenizer, :name, :table].each do |key|
|
123
|
+
other_schema[key] = @schema[key] if other_schema[key].nil?
|
124
|
+
end
|
125
|
+
if @schema[:type] != other_schema[:type]
|
126
|
+
raise Litesearch::SchemaChangeException.new "Cannot change the index type, please drop the index before creating it again with the new type"
|
127
|
+
end
|
128
|
+
changes = {tokenizer: @schema[:tokenizer] != other_schema[:tokenizer], table: @schema[:table] != other_schema[:table], removed_fields_count: 0, filter_column: @schema[:filter_column] != other_schema[:filter_column]}
|
129
|
+
# check tokenizer changes
|
130
|
+
if changes[:tokenizer] && !other_schema[:rebuild_on_modify]
|
131
|
+
raise Litesearch::SchemaChangeException.new "Cannot change the tokenizer without an index rebuild!"
|
132
|
+
end
|
133
|
+
|
134
|
+
# check field changes
|
135
|
+
keys = @schema[:fields].keys.sort
|
136
|
+
other_keys = other_schema[:fields].keys.sort
|
137
|
+
|
138
|
+
extra_keys = other_keys - keys
|
139
|
+
extra_keys.each do |key|
|
140
|
+
if other_schema[:fields][key][:weight] == 0
|
141
|
+
other_schema[:fields].delete(key)
|
142
|
+
end
|
143
|
+
end
|
144
|
+
|
145
|
+
other_keys = other_schema[:fields].keys.sort
|
146
|
+
|
147
|
+
changes[:fields] = keys != other_keys # only acceptable change is adding extra fields
|
148
|
+
changes[:extra_fields_count] = other_keys.count - keys.count
|
149
|
+
# check for missing fields (please note that adding fields can work without a rebuild)
|
150
|
+
if keys - other_keys != []
|
151
|
+
raise Litesearch::SchemaChangeException.new "Missing fields from existing schema, they have to exist with weight zero until the next rebuild!"
|
152
|
+
end
|
153
|
+
|
154
|
+
# check field weights
|
155
|
+
weights = keys.collect { |key| @schema[:fields][key][:weight] }
|
156
|
+
other_weights = other_keys.collect { |key| other_schema[:fields][key][:weight] }
|
157
|
+
changes[:weights] = weights != other_weights # will always be true if fields are added
|
158
|
+
if (removed_count = other_weights.count { |w| w == 0 }) > 0
|
159
|
+
changes[:removed_fields_count] = removed_count
|
160
|
+
end
|
161
|
+
# check field attributes, only backed tables have attributes
|
162
|
+
attrs = keys.collect do |key|
|
163
|
+
f = @schema[:fields][key].dup
|
164
|
+
f.delete(:weight)
|
165
|
+
f.select { |k, v| allowed_attributes.include? k }
|
166
|
+
end
|
167
|
+
other_attrs = other_keys.collect do |key|
|
168
|
+
f = other_schema[:fields][key].dup
|
169
|
+
f.delete(:weight)
|
170
|
+
f.select { |k, v| allowed_attributes.include? k }
|
171
|
+
end
|
172
|
+
changes[:attributes] if other_attrs != attrs # this means that we will need to redefine the triggers if any are there and also the table definition if needed
|
173
|
+
|
174
|
+
# return the changes
|
175
|
+
changes
|
176
|
+
end
|
177
|
+
|
178
|
+
def clean
|
179
|
+
removable = @schema[:fields].select { |name, f| f[:weight] == 0 }.collect { |name, f| name }
|
180
|
+
removable.each { |name| @schema[:fields].delete(name) }
|
181
|
+
end
|
182
|
+
|
183
|
+
def allowed_attributes
|
184
|
+
[:weight, :col, :target]
|
185
|
+
end
|
186
|
+
end
|
187
|
+
|
188
|
+
class Litesearch::SchemaException < StandardError; end
|
189
|
+
|
190
|
+
class Litesearch::SchemaChangeException < StandardError; end
|
@@ -0,0 +1,143 @@
|
|
1
|
+
class Litesearch::Schema::BackedAdapter < Litesearch::Schema::ContentlessAdapter
|
2
|
+
private
|
3
|
+
|
4
|
+
def table
|
5
|
+
@schema[:table]
|
6
|
+
end
|
7
|
+
|
8
|
+
def generate_sql
|
9
|
+
super
|
10
|
+
@sql[:rebuild] = :rebuild_sql
|
11
|
+
@sql[:drop_primary_triggers] = :drop_primary_triggers_sql
|
12
|
+
@sql[:drop_secondary_triggers] = :drop_secondary_triggers_sql
|
13
|
+
@sql[:create_primary_triggers] = :create_primary_triggers_sql
|
14
|
+
@sql[:create_secondary_triggers] = :create_secondary_triggers_sql
|
15
|
+
end
|
16
|
+
|
17
|
+
def drop_primary_triggers_sql
|
18
|
+
<<~SQL
|
19
|
+
DROP TRIGGER IF EXISTS #{name}_insert;
|
20
|
+
DROP TRIGGER IF EXISTS #{name}_update;
|
21
|
+
DROP TRIGGER IF EXISTS #{name}_update_not;
|
22
|
+
DROP TRIGGER IF EXISTS #{name}_delete;
|
23
|
+
SQL
|
24
|
+
end
|
25
|
+
|
26
|
+
def create_primary_triggers_sql(active = false)
|
27
|
+
when_stmt = "TRUE"
|
28
|
+
cols = active_cols_names
|
29
|
+
if (filter = @schema[:filter_column])
|
30
|
+
when_stmt = "NEW.#{filter} = TRUE"
|
31
|
+
cols << filter
|
32
|
+
end
|
33
|
+
|
34
|
+
<<-SQL
|
35
|
+
CREATE TRIGGER #{name}_insert AFTER INSERT ON #{table} WHEN #{when_stmt} BEGIN
|
36
|
+
INSERT OR REPLACE INTO #{name}(rowid, #{active_field_names.join(", ")}) VALUES (NEW.rowid, #{trigger_cols_sql});
|
37
|
+
END;
|
38
|
+
CREATE TRIGGER #{name}_update AFTER UPDATE OF #{cols.join(", ")} ON #{table} WHEN #{when_stmt} BEGIN
|
39
|
+
INSERT OR REPLACE INTO #{name}(rowid, #{active_field_names.join(", ")}) VALUES (NEW.rowid, #{trigger_cols_sql});
|
40
|
+
END;
|
41
|
+
CREATE TRIGGER #{name}_update_not AFTER UPDATE OF #{cols.join(", ")} ON #{table} WHEN NOT #{when_stmt} BEGIN
|
42
|
+
DELETE FROM #{name} WHERE rowid = NEW.rowid;
|
43
|
+
END;
|
44
|
+
CREATE TRIGGER #{name}_delete AFTER DELETE ON #{table} BEGIN
|
45
|
+
DELETE FROM #{name} WHERE rowid = OLD.id;
|
46
|
+
END;
|
47
|
+
SQL
|
48
|
+
end
|
49
|
+
|
50
|
+
def drop_secondary_trigger_sql(target_table, target_col, col)
|
51
|
+
"DROP TRIGGER IF EXISTS #{target_table}_#{target_col}_#{col}_#{name}_update;"
|
52
|
+
end
|
53
|
+
|
54
|
+
def create_secondary_trigger_sql(target_table, target_col, col)
|
55
|
+
<<~SQL
|
56
|
+
CREATE TRIGGER #{target_table}_#{target_col}_#{col}_#{name}_update AFTER UPDATE OF #{target_col} ON #{target_table} BEGIN
|
57
|
+
#{rebuild_sql} AND #{table}.#{col} = NEW.id;
|
58
|
+
END;
|
59
|
+
SQL
|
60
|
+
end
|
61
|
+
|
62
|
+
def drop_secondary_triggers_sql
|
63
|
+
sql = ""
|
64
|
+
@schema[:fields].each do |name, field|
|
65
|
+
if field[:trigger_sql]
|
66
|
+
sql << drop_secondary_trigger_sql(field[:target_table], field[:target_col], field[:col])
|
67
|
+
end
|
68
|
+
end
|
69
|
+
sql.empty? ? nil : sql
|
70
|
+
end
|
71
|
+
|
72
|
+
def create_secondary_triggers_sql
|
73
|
+
sql = ""
|
74
|
+
@schema[:fields].each do |name, field|
|
75
|
+
if field[:trigger_sql]
|
76
|
+
sql << create_secondary_trigger_sql(field[:target_table], field[:target_col], field[:col])
|
77
|
+
end
|
78
|
+
end
|
79
|
+
sql.empty? ? nil : sql
|
80
|
+
end
|
81
|
+
|
82
|
+
def rebuild_sql
|
83
|
+
conditions = ""
|
84
|
+
jcs = join_conditions_sql
|
85
|
+
fs = filter_sql
|
86
|
+
conditions = " ON #{jcs} #{fs}" unless jcs.empty? && fs.empty?
|
87
|
+
"INSERT OR REPLACE INTO #{name}(rowid, #{active_field_names.join(", ")}) SELECT #{table}.id, #{select_cols_sql} FROM #{join_tables_sql} #{conditions}"
|
88
|
+
end
|
89
|
+
|
90
|
+
def enrich_schema
|
91
|
+
@schema[:fields].each do |name, field|
|
92
|
+
if field[:target] && !field[:target].start_with?("#{table}.")
|
93
|
+
field[:target] = field[:target].downcase
|
94
|
+
target_table, target_col = field[:target].split(".")
|
95
|
+
field[:col] = "#{name}_id".to_sym unless field[:col]
|
96
|
+
field[:target_table] = target_table.to_sym
|
97
|
+
field[:target_col] = target_col.to_sym
|
98
|
+
field[:sql] = "(SELECT #{field[:target_col]} FROM #{field[:target_table]} WHERE id = NEW.#{field[:col]})"
|
99
|
+
field[:trigger_sql] = true # create_secondary_trigger_sql(field[:target_table], field[:target_col], field[:col])
|
100
|
+
field[:target_table_alias] = "#{field[:target_table]}_#{name}"
|
101
|
+
else
|
102
|
+
field[:col] = name unless field[:col]
|
103
|
+
field[:sql] = field[:col]
|
104
|
+
field[:target_table] = @schema[:table]
|
105
|
+
field[:target] = "#{@schema[:table]}.#{field[:sql]}"
|
106
|
+
end
|
107
|
+
end
|
108
|
+
end
|
109
|
+
|
110
|
+
def filter_sql
|
111
|
+
sql = ""
|
112
|
+
sql << " AND #{@schema[:filter_column]} = TRUE " if @schema[:filter_column]
|
113
|
+
sql
|
114
|
+
end
|
115
|
+
|
116
|
+
def trigger_cols_sql
|
117
|
+
active_fields.collect do |name, field|
|
118
|
+
field[:trigger_sql] ? field[:sql] : "NEW.#{field[:sql]}"
|
119
|
+
end.join(", ")
|
120
|
+
end
|
121
|
+
|
122
|
+
def select_cols_sql
|
123
|
+
active_fields.collect do |name, field|
|
124
|
+
(!field[:trigger_sql].nil?) ? "#{field[:target_table_alias]}.#{field[:target_col]}" : field[:target]
|
125
|
+
end.join(", ")
|
126
|
+
end
|
127
|
+
|
128
|
+
def join_tables_sql
|
129
|
+
tables = [@schema[:table]]
|
130
|
+
active_fields.each do |name, field|
|
131
|
+
tables << "#{field[:target_table]} AS #{field[:target_table_alias]}" if field[:trigger_sql]
|
132
|
+
end
|
133
|
+
tables.uniq.join(", ")
|
134
|
+
end
|
135
|
+
|
136
|
+
def join_conditions_sql
|
137
|
+
conditions = []
|
138
|
+
active_fields.each do |name, field|
|
139
|
+
conditions << "#{field[:target_table_alias]}.id = #{@schema[:table]}.#{field[:col]}" if field[:trigger_sql]
|
140
|
+
end
|
141
|
+
conditions.join(" AND ")
|
142
|
+
end
|
143
|
+
end
|
@@ -0,0 +1,137 @@
|
|
1
|
+
class Litesearch::Schema::BasicAdapter
|
2
|
+
def initialize(schema)
|
3
|
+
@schema = schema
|
4
|
+
@sql = {}
|
5
|
+
enrich_schema
|
6
|
+
generate_sql
|
7
|
+
end
|
8
|
+
|
9
|
+
def name
|
10
|
+
@schema[:name]
|
11
|
+
end
|
12
|
+
|
13
|
+
def table
|
14
|
+
@schema[:table]
|
15
|
+
end
|
16
|
+
|
17
|
+
def fields
|
18
|
+
@schema[:fields]
|
19
|
+
end
|
20
|
+
|
21
|
+
def field_names
|
22
|
+
@schema[:fields].keys
|
23
|
+
end
|
24
|
+
|
25
|
+
def active_fields
|
26
|
+
@schema[:fields].select { |k, v| v[:weight] != 0 }
|
27
|
+
end
|
28
|
+
|
29
|
+
def active_field_names
|
30
|
+
active_fields.keys
|
31
|
+
end
|
32
|
+
|
33
|
+
def active_cols_names
|
34
|
+
active_fields.collect { |k, v| v[:col] }
|
35
|
+
end
|
36
|
+
|
37
|
+
def weights
|
38
|
+
@schema[:fields].values.collect { |v| v[:weight].to_f }
|
39
|
+
end
|
40
|
+
|
41
|
+
def active_weights
|
42
|
+
active_fields.values.collect { |v| v[:weight].to_f }
|
43
|
+
end
|
44
|
+
|
45
|
+
def tokenizer_sql
|
46
|
+
Litesearch::Schema::TOKENIZERS[@schema[:tokenizer]]
|
47
|
+
end
|
48
|
+
|
49
|
+
def order_fields(old_schema)
|
50
|
+
new_fields = {}
|
51
|
+
old_field_names = old_schema.schema[:fields].keys
|
52
|
+
old_field_names.each do |name|
|
53
|
+
new_fields[name] = @schema[:fields].delete(name)
|
54
|
+
end
|
55
|
+
missing_field_names = field_names - old_field_names
|
56
|
+
missing_field_names.each do |name|
|
57
|
+
new_fields[name] = @schema[:fields].delete(name)
|
58
|
+
end
|
59
|
+
@schema[:fields] = new_fields # this should be in order now
|
60
|
+
generate_sql
|
61
|
+
enrich_schema
|
62
|
+
end
|
63
|
+
|
64
|
+
def sql_for(method, *args)
|
65
|
+
if (sql = @sql[method])
|
66
|
+
if sql.is_a? String
|
67
|
+
sql
|
68
|
+
elsif sql.is_a? Proc
|
69
|
+
sql.call(*args)
|
70
|
+
elsif sql.is_a? Symbol
|
71
|
+
send(sql, *args)
|
72
|
+
elsif sql.is_a? Litesearch::SchemaChangeException
|
73
|
+
raise sql
|
74
|
+
end
|
75
|
+
end
|
76
|
+
end
|
77
|
+
|
78
|
+
def generate_sql
|
79
|
+
@sql[:create_index] = :create_index_sql
|
80
|
+
@sql[:create_vocab_tables] = :create_vocab_tables_sql
|
81
|
+
@sql[:insert] = "INSERT OR REPLACE INTO #{name}(rowid, #{active_col_names_sql}) VALUES (:id, #{active_col_names_var_sql}) RETURNING rowid"
|
82
|
+
@sql[:delete] = "DELETE FROM #{name} WHERE rowid = :id"
|
83
|
+
@sql[:count] = "SELECT count(*) FROM #{name}(:term)"
|
84
|
+
@sql[:count_all] = "SELECT count(*) FROM #{name}"
|
85
|
+
@sql[:delete_all] = "DELETE FROM #{name}"
|
86
|
+
@sql[:drop] = "DROP TABLE #{name}"
|
87
|
+
@sql[:expand_data] = "UPDATE #{name}_data SET block = block || zeroblob(:length) WHERE id = 1"
|
88
|
+
@sql[:expand_docsize] = "UPDATE #{name}_docsize SET sz = sz || zeroblob(:length)"
|
89
|
+
@sql[:ranks] = :ranks_sql
|
90
|
+
@sql[:set_config_value] = "INSERT OR REPLACE INTO #{name}_config(k, v) VALUES (:key, :value)"
|
91
|
+
@sql[:get_config_value] = "SELECT v FROM #{name}_config WHERE k = :key"
|
92
|
+
@sql[:search] = "SELECT rowid AS id, -rank AS search_rank FROM #{name}(:term) WHERE rank !=0 ORDER BY rank LIMIT :limit OFFSET :offset"
|
93
|
+
@sql[:similarity_terms] = "SELECT DISTINCT term FROM #{name}_instance WHERE doc = :id AND FLOOR(term) IS NULL AND LENGTH(term) > 2 AND NOT instr(term, ' ') AND NOT instr(term, '-') AND NOT instr(term, ':') AND NOT instr(term, '#') AND NOT instr(term, '_') LIMIT 15"
|
94
|
+
@sql[:similarity_query] = "SELECT group_concat('\"' || term || '\"', ' OR ') FROM #{name}_row WHERE term IN (#{@sql[:similarity_terms]})"
|
95
|
+
@sql[:similarity_search] = "SELECT rowid AS id, -rank AS search_rank FROM #{name}(:term) WHERE rowid != :id ORDER BY rank LIMIT :limit"
|
96
|
+
@sql[:similar] = "SELECT rowid AS id, -rank AS search_rank FROM #{name} WHERE #{name} = (#{@sql[:similarity_query]}) AND rowid != :id ORDER BY rank LIMIT :limit"
|
97
|
+
@sql[:update_index] = "UPDATE sqlite_schema SET sql = :sql WHERE name = '#{name}'"
|
98
|
+
@sql[:update_content_table] = "UPDATE sqlite_schema SET sql = :sql WHERE name = '#{name}_content'"
|
99
|
+
end
|
100
|
+
|
101
|
+
private
|
102
|
+
|
103
|
+
def create_vocab_tables_sql
|
104
|
+
<<~SQL
|
105
|
+
CREATE VIRTUAL TABLE IF NOT EXISTS #{name}_row USING fts5vocab(#{name}, row);
|
106
|
+
CREATE VIRTUAL TABLE IF NOT EXISTS #{name}_instance USING fts5vocab(#{name}, instance);
|
107
|
+
SQL
|
108
|
+
end
|
109
|
+
|
110
|
+
def ranks_sql(active = false)
|
111
|
+
weights_sql = if active
|
112
|
+
weights.join(", ")
|
113
|
+
else
|
114
|
+
active_weights.join(", ")
|
115
|
+
end
|
116
|
+
"INSERT INTO #{name}(#{name}, rank) VALUES ('rank', 'bm25(#{weights_sql})')"
|
117
|
+
end
|
118
|
+
|
119
|
+
def active_col_names_sql
|
120
|
+
active_field_names.join(", ")
|
121
|
+
end
|
122
|
+
|
123
|
+
def active_col_names_var_sql
|
124
|
+
":#{active_field_names.join(", :")}"
|
125
|
+
end
|
126
|
+
|
127
|
+
def col_names_sql
|
128
|
+
field_names.join(", ")
|
129
|
+
end
|
130
|
+
|
131
|
+
def col_names_var_sql
|
132
|
+
":#{field_names.join(", :")}"
|
133
|
+
end
|
134
|
+
|
135
|
+
def enrich_schema
|
136
|
+
end
|
137
|
+
end
|
@@ -0,0 +1,14 @@
|
|
1
|
+
class Litesearch::Schema::ContentlessAdapter < Litesearch::Schema::BasicAdapter
|
2
|
+
private
|
3
|
+
|
4
|
+
def generate_sql
|
5
|
+
super
|
6
|
+
# @sql[:rebuild_index] = Litesearch::SchemaChangeException.new("You cannot rebuild a contentless index")
|
7
|
+
# @sql[:rebuild] = Litesearch::SchemaChangeException.new("You cannot rebuild a contentless index")
|
8
|
+
end
|
9
|
+
|
10
|
+
def create_index_sql(active = false)
|
11
|
+
col_names = active ? active_col_names_sql : col_names_sql
|
12
|
+
"CREATE VIRTUAL TABLE #{name} USING FTS5(#{col_names}, content='', contentless_delete=1, tokenize='#{tokenizer_sql}')"
|
13
|
+
end
|
14
|
+
end
|
@@ -0,0 +1,31 @@
|
|
1
|
+
class Litesearch::Schema::StandaloneAdapter < Litesearch::Schema::BasicAdapter
|
2
|
+
def generate_sql
|
3
|
+
super
|
4
|
+
@sql[:move_content] = "ALTER TABLE #{name}_content RENAME TO #{name}_content_temp"
|
5
|
+
@sql[:adjust_temp_content] = "UPDATE sqlite_schema SET sql (SELECT sql FROM sqlite_schema WHERE name = '#{name}_content') WHERE name = #{name}_content_temp"
|
6
|
+
@sql[:restore_content] = "ALTER TABLE #{name}_content_temp RENAME TO #{name}_content"
|
7
|
+
@sql[:rebuild] = "INSERT INTO #{name}(#{name}) VALUES ('rebuild')"
|
8
|
+
@sql[:similar] = "SELECT rowid AS id, *, -rank AS search_rank FROM #{name} WHERE #{name} = (#{@sql[:similarity_query]}) AND rowid != :id ORDER BY rank LIMIT :limit"
|
9
|
+
@sql[:drop_content_table] = "DROP TABLE #{name}_content"
|
10
|
+
@sql[:drop_content_col] = :drop_content_col_sql
|
11
|
+
@sql[:create_content_table] = :create_content_table_sql
|
12
|
+
@sql[:search] = "SELECT rowid AS id, *, -rank AS search_rank FROM #{name}(:term) WHERE rank !=0 ORDER BY rank LIMIT :limit OFFSET :offset"
|
13
|
+
end
|
14
|
+
|
15
|
+
private
|
16
|
+
|
17
|
+
def create_index_sql(active = false)
|
18
|
+
col_names = active ? active_col_names_sql : col_names_sql
|
19
|
+
"CREATE VIRTUAL TABLE #{name} USING FTS5(#{col_names}, tokenize='#{tokenizer_sql}')"
|
20
|
+
end
|
21
|
+
|
22
|
+
def drop_content_col_sql(col_index)
|
23
|
+
"ALTER TABLE #{name}_content DROP COLUMN c#{col_index}"
|
24
|
+
end
|
25
|
+
|
26
|
+
def create_content_table_sql(count)
|
27
|
+
cols = []
|
28
|
+
count.times { |i| cols << "c#{i}" }
|
29
|
+
"CREATE TABLE #{name}_content(id INTEGER PRIMARY KEY, #{cols.join(", ")})"
|
30
|
+
end
|
31
|
+
end
|