kozeki 0.1.0
Sign up to get free protection for your applications and to get access to all the features.
- checksums.yaml +7 -0
- data/.rspec +3 -0
- data/CHANGELOG.md +5 -0
- data/Rakefile +8 -0
- data/bin/kozeki +5 -0
- data/lib/kozeki/build.rb +260 -0
- data/lib/kozeki/cli.rb +57 -0
- data/lib/kozeki/client.rb +48 -0
- data/lib/kozeki/collection.rb +136 -0
- data/lib/kozeki/collection_list.rb +27 -0
- data/lib/kozeki/config.rb +91 -0
- data/lib/kozeki/dsl.rb +65 -0
- data/lib/kozeki/filesystem.rb +46 -0
- data/lib/kozeki/item.rb +44 -0
- data/lib/kozeki/loader_chain.rb +20 -0
- data/lib/kozeki/local_filesystem.rb +83 -0
- data/lib/kozeki/markdown_loader.rb +52 -0
- data/lib/kozeki/queued_filesystem.rb +65 -0
- data/lib/kozeki/record.rb +48 -0
- data/lib/kozeki/source.rb +60 -0
- data/lib/kozeki/state.rb +326 -0
- data/lib/kozeki/version.rb +5 -0
- data/lib/kozeki.rb +8 -0
- data/sig/kozeki.rbs +4 -0
- metadata +140 -0
data/lib/kozeki/state.rb
ADDED
@@ -0,0 +1,326 @@
|
|
1
|
+
# frozen_string_literal: true
|
2
|
+
|
3
|
+
require 'sqlite3'
|
4
|
+
require 'fileutils'
|
5
|
+
|
6
|
+
require 'kozeki/record'
|
7
|
+
|
8
|
+
module Kozeki
|
9
|
+
class State
|
10
|
+
EPOCH = 1
|
11
|
+
|
12
|
+
class NotFound < StandardError; end
|
13
|
+
class DuplicatedItemIdError < StandardError; end
|
14
|
+
|
15
|
+
def self.open(path:)
|
16
|
+
FileUtils.mkdir_p File.dirname(path) if path
|
17
|
+
state = new(path:)
|
18
|
+
state.ensure_schema!
|
19
|
+
state
|
20
|
+
end
|
21
|
+
|
22
|
+
# @param path [String, #to_path, nil]
|
23
|
+
def initialize(path:)
|
24
|
+
@db = SQLite3::Database.new(
|
25
|
+
path || ':memory:',
|
26
|
+
{
|
27
|
+
results_as_hash: true,
|
28
|
+
strict: true, # Disable SQLITE_DBCONFIG_DQS_DDL, SQLITE_DBCONFIG_DQS_DML
|
29
|
+
}
|
30
|
+
)
|
31
|
+
end
|
32
|
+
|
33
|
+
attr_reader :db
|
34
|
+
|
35
|
+
def clear!
|
36
|
+
@db.execute_batch <<~SQL
|
37
|
+
delete from "records";
|
38
|
+
delete from "collection_memberships";
|
39
|
+
delete from "item_ids";
|
40
|
+
delete from "builds";
|
41
|
+
SQL
|
42
|
+
end
|
43
|
+
|
44
|
+
def build_exist?
|
45
|
+
@db.execute(%{select * from "builds" where completed = 1 limit 1})[0]
|
46
|
+
end
|
47
|
+
|
48
|
+
def create_build(t = Time.now)
|
49
|
+
@db.execute(%{insert into "builds" ("built_at") values (?)}, [t.to_i])
|
50
|
+
@db.last_insert_row_id
|
51
|
+
end
|
52
|
+
|
53
|
+
# @param id [id]
|
54
|
+
def mark_build_completed(id)
|
55
|
+
@db.execute(%{update "builds" set completed = 1 where id = ?}, [id])
|
56
|
+
end
|
57
|
+
|
58
|
+
# Clear all markers and delete 'remove'd rows.
|
59
|
+
def process_markers!
|
60
|
+
@db.execute_batch <<~SQL
|
61
|
+
delete from "records" where "pending_build_action" = 'remove';
|
62
|
+
update "records" set "pending_build_action" = 'none', "id_was" = null where "pending_build_action" <> 'none';
|
63
|
+
delete from "collection_memberships" where "pending_build_action" = 'remove';
|
64
|
+
update "collection_memberships" set "pending_build_action" = 'none' where "pending_build_action" <> 'none';
|
65
|
+
delete from "item_ids" where "pending_build_action" = 'remove';
|
66
|
+
update "item_ids" set "pending_build_action" = 'none' where "pending_build_action" <> 'none';
|
67
|
+
SQL
|
68
|
+
end
|
69
|
+
|
70
|
+
# @param path [Array<String>]
|
71
|
+
def find_record_by_path!(path)
|
72
|
+
row = @db.execute(%{select * from "records" where "path" = ?}, [path.join('/')])[0]
|
73
|
+
if row
|
74
|
+
Record.from_row(row)
|
75
|
+
else
|
76
|
+
raise NotFound, "record not found for path=#{path.inspect}"
|
77
|
+
end
|
78
|
+
end
|
79
|
+
|
80
|
+
def find_record!(id)
|
81
|
+
rows = @db.execute(%{select * from "records" where "id" = ? and "pending_build_action" <> 'remove'}, [id])
|
82
|
+
case rows.size
|
83
|
+
when 0
|
84
|
+
raise NotFound, "record not found for id=#{id.inspect}"
|
85
|
+
when 1
|
86
|
+
Record.from_row(rows[0])
|
87
|
+
else
|
88
|
+
raise DuplicatedItemIdError, "multiple records found for id=#{id.inspect}, resolve conflict first"
|
89
|
+
end
|
90
|
+
end
|
91
|
+
|
92
|
+
def list_records_by_pending_build_action(action)
|
93
|
+
rows = @db.execute(%{select * from "records" where "pending_build_action" = ?}, [action.to_s])
|
94
|
+
rows.map { Record.from_row(_1) }
|
95
|
+
end
|
96
|
+
|
97
|
+
def list_records_by_id(id)
|
98
|
+
rows = @db.execute(%{select * from "records" where "id" = ?}, [id.to_s])
|
99
|
+
rows.map { Record.from_row(_1) }
|
100
|
+
end
|
101
|
+
|
102
|
+
def list_record_paths
|
103
|
+
rows = @db.execute(%{select "path" from "records"})
|
104
|
+
rows.map { _1.fetch('path').split('/') } # XXX: consolidate with Record logic
|
105
|
+
end
|
106
|
+
|
107
|
+
# @param record [Record]
|
108
|
+
def save_record(record)
|
109
|
+
new_row = @db.execute(<<~SQL, record.to_row)[0]
|
110
|
+
insert into "records"
|
111
|
+
("path", "id", "timestamp", "mtime", "meta", "build", "pending_build_action")
|
112
|
+
values
|
113
|
+
(:path, :id, :timestamp, :mtime, :meta, :build, :pending_build_action)
|
114
|
+
on conflict ("path") do update set
|
115
|
+
"id" = excluded."id"
|
116
|
+
, "timestamp" = excluded."timestamp"
|
117
|
+
, "mtime" = excluded."mtime"
|
118
|
+
, "meta" = excluded."meta"
|
119
|
+
, "build" = excluded."build"
|
120
|
+
, "pending_build_action" = excluded."pending_build_action"
|
121
|
+
, "id_was" = "id"
|
122
|
+
returning
|
123
|
+
*
|
124
|
+
SQL
|
125
|
+
id_was = new_row['id_was']
|
126
|
+
@db.execute(<<~SQL, [record.id])
|
127
|
+
insert into "item_ids" ("id") values (?)
|
128
|
+
on conflict ("id") do update set
|
129
|
+
"pending_build_action" = 'none'
|
130
|
+
SQL
|
131
|
+
case id_was
|
132
|
+
when record.id
|
133
|
+
record
|
134
|
+
when nil
|
135
|
+
record
|
136
|
+
else
|
137
|
+
@db.execute(<<~SQL, [id_was])
|
138
|
+
insert into "item_ids" ("id") values (?)
|
139
|
+
on conflict ("id") do update set
|
140
|
+
"pending_build_action" = 'garbage_collection'
|
141
|
+
SQL
|
142
|
+
Record.from_row(new_row)
|
143
|
+
end
|
144
|
+
end
|
145
|
+
|
146
|
+
def set_record_pending_build_action(record, pending_build_action)
|
147
|
+
path = record.path
|
148
|
+
@db.execute(<<~SQL, {path: record.path_row, pending_build_action: pending_build_action.to_s})
|
149
|
+
update "records"
|
150
|
+
set "pending_build_action" = :pending_build_action
|
151
|
+
where "path" = :path
|
152
|
+
SQL
|
153
|
+
raise NotFound, "record not found to update for path=#{path}" if @db.changes.zero?
|
154
|
+
if pending_build_action == :remove
|
155
|
+
@db.execute(<<~SQL, [record.id])
|
156
|
+
update "item_ids"
|
157
|
+
set "pending_build_action" = 'garbage_collection'
|
158
|
+
where "id" = :id
|
159
|
+
SQL
|
160
|
+
end
|
161
|
+
nil
|
162
|
+
end
|
163
|
+
|
164
|
+
def set_record_collections_pending(record_id, collections)
|
165
|
+
@db.execute(%{update "collection_memberships" set pending_build_action = 'remove' where record_id = ?}, record_id)
|
166
|
+
return if collections.empty?
|
167
|
+
@db.execute(<<~SQL, collections.map { [_1, record_id, 'update'] })
|
168
|
+
insert into "collection_memberships"
|
169
|
+
("collection", "record_id", "pending_build_action")
|
170
|
+
values
|
171
|
+
#{collections.map { '(?,?,?)' }.join(',')}
|
172
|
+
on conflict ("collection", "record_id") do update set
|
173
|
+
"pending_build_action" = excluded."pending_build_action"
|
174
|
+
SQL
|
175
|
+
end
|
176
|
+
|
177
|
+
def list_item_ids_for_garbage_collection
|
178
|
+
@db.execute(%{select "id" from "item_ids" where "pending_build_action" = 'garbage_collection'}).map do |row|
|
179
|
+
row.fetch('id')
|
180
|
+
end
|
181
|
+
end
|
182
|
+
|
183
|
+
def mark_item_id_to_remove(id)
|
184
|
+
@db.execute(%{update "item_ids" set "pending_build_action" = 'remove' where "id" = ?}, [id])
|
185
|
+
nil
|
186
|
+
end
|
187
|
+
|
188
|
+
def list_collection_names_pending
|
189
|
+
@db.execute(%{select distinct "collection" from "collection_memberships" where "pending_build_action" <> 'none'}).map do |row|
|
190
|
+
row.fetch('collection')
|
191
|
+
end
|
192
|
+
end
|
193
|
+
|
194
|
+
def list_collection_names
|
195
|
+
@db.execute(%{select distinct "collection" from "collection_memberships"}).map do |row|
|
196
|
+
row.fetch('collection')
|
197
|
+
end
|
198
|
+
end
|
199
|
+
|
200
|
+
def list_collection_names_with_prefix(*prefixes)
|
201
|
+
return list_collection_names() if prefixes.empty?
|
202
|
+
conditions = prefixes.map { %{"collection" glob '#{SQLite3::Database.quote(_1)}*'} }
|
203
|
+
@db.execute(%{select distinct "collection" from "collection_memberships" where (#{conditions.join('or')})}).map do |row|
|
204
|
+
row.fetch('collection')
|
205
|
+
end
|
206
|
+
end
|
207
|
+
|
208
|
+
|
209
|
+
def list_collection_records(collection)
|
210
|
+
@db.execute(<<~SQL, [collection]).map { Record.from_row(_1) }
|
211
|
+
select
|
212
|
+
"records".*
|
213
|
+
from "collection_memberships"
|
214
|
+
inner join "records" on "collection_memberships"."record_id" = "records"."id"
|
215
|
+
where
|
216
|
+
"collection_memberships"."collection" = ?
|
217
|
+
and "collection_memberships"."pending_build_action" <> 'remove'
|
218
|
+
and "records"."pending_build_action" <> 'remove'
|
219
|
+
SQL
|
220
|
+
end
|
221
|
+
|
222
|
+
def count_collection_records(collection)
|
223
|
+
@db.execute(<<~SQL, [collection])[0].fetch('cnt')
|
224
|
+
select
|
225
|
+
count(*) cnt
|
226
|
+
from "collection_memberships"
|
227
|
+
where
|
228
|
+
"collection_memberships"."collection" = ?
|
229
|
+
SQL
|
230
|
+
end
|
231
|
+
|
232
|
+
def transaction(...)
|
233
|
+
db.transaction(...)
|
234
|
+
end
|
235
|
+
|
236
|
+
def close
|
237
|
+
db.close
|
238
|
+
end
|
239
|
+
|
240
|
+
# Ensure schema for the present version of Kozeki. As a state behaves like a cache, all tables will be removed
|
241
|
+
# when version is different.
|
242
|
+
def ensure_schema!
|
243
|
+
return if current_epoch == EPOCH
|
244
|
+
|
245
|
+
db.execute_batch <<~SQL
|
246
|
+
drop table if exists "kozeki_schema_epoch";
|
247
|
+
create table kozeki_schema_epoch (
|
248
|
+
"epoch" integer not null
|
249
|
+
) strict;
|
250
|
+
SQL
|
251
|
+
|
252
|
+
db.execute_batch <<~SQL
|
253
|
+
drop table if exists "records";
|
254
|
+
create table "records" (
|
255
|
+
path text not null unique,
|
256
|
+
id text not null,
|
257
|
+
timestamp integer not null,
|
258
|
+
mtime integer not null,
|
259
|
+
meta text not null,
|
260
|
+
build text,
|
261
|
+
pending_build_action text not null default 'none',
|
262
|
+
id_was text
|
263
|
+
) strict;
|
264
|
+
SQL
|
265
|
+
# Non-unique index; during normal file operation we may see duplicated IDs while we process events one-by-one
|
266
|
+
db.execute_batch <<~SQL
|
267
|
+
drop index if exists "idx_records_id";
|
268
|
+
create index "idx_records_id" on "records" ("id");
|
269
|
+
SQL
|
270
|
+
db.execute_batch <<~SQL
|
271
|
+
drop index if exists "idx_records_pending";
|
272
|
+
create index "idx_records_pending" on "records" ("pending_build_action");
|
273
|
+
SQL
|
274
|
+
|
275
|
+
db.execute_batch <<~SQL
|
276
|
+
drop table if exists "item_ids";
|
277
|
+
create table "item_ids" (
|
278
|
+
id text unique not null,
|
279
|
+
pending_build_action text not null default 'none'
|
280
|
+
) strict;
|
281
|
+
SQL
|
282
|
+
db.execute_batch <<~SQL
|
283
|
+
drop index if exists "idx_item_ids_pending";
|
284
|
+
create index "idx_item_ids_pending" on "item_ids" ("pending_build_action");
|
285
|
+
SQL
|
286
|
+
|
287
|
+
db.execute_batch <<~SQL
|
288
|
+
drop table if exists "collection_memberships";
|
289
|
+
create table "collection_memberships" (
|
290
|
+
collection text not null,
|
291
|
+
record_id text not null,
|
292
|
+
pending_build_action text not null default 'none'
|
293
|
+
) strict;
|
294
|
+
SQL
|
295
|
+
db.execute_batch <<~SQL
|
296
|
+
drop index if exists "idx_col_record";
|
297
|
+
create unique index "idx_col_record" on "collection_memberships" ("collection", "record_id");
|
298
|
+
SQL
|
299
|
+
db.execute_batch <<~SQL
|
300
|
+
drop index if exists "idx_col_pending";
|
301
|
+
create index "idx_col_pending" on "collection_memberships" ("pending_build_action", "collection");
|
302
|
+
SQL
|
303
|
+
|
304
|
+
db.execute_batch <<~SQL
|
305
|
+
drop table if exists "builds";
|
306
|
+
create table "builds" (
|
307
|
+
id integer primary key,
|
308
|
+
built_at integer not null,
|
309
|
+
completed integer not null default 0
|
310
|
+
) strict;
|
311
|
+
SQL
|
312
|
+
|
313
|
+
db.execute(%{delete from "kozeki_schema_epoch"})
|
314
|
+
db.execute(%{insert into "kozeki_schema_epoch" values (?)}, [EPOCH])
|
315
|
+
|
316
|
+
nil
|
317
|
+
end
|
318
|
+
|
319
|
+
def current_epoch
|
320
|
+
epoch_tables = @db.execute("select * from sqlite_schema where type = 'table' and name = 'kozeki_schema_epoch'")
|
321
|
+
return nil if epoch_tables.empty?
|
322
|
+
epoch = @db.execute(%{select "epoch" from "kozeki_schema_epoch" order by "epoch" desc limit 1})
|
323
|
+
epoch&.dig(0, 'epoch')
|
324
|
+
end
|
325
|
+
end
|
326
|
+
end
|
data/lib/kozeki.rb
ADDED
data/sig/kozeki.rbs
ADDED
metadata
ADDED
@@ -0,0 +1,140 @@
|
|
1
|
+
--- !ruby/object:Gem::Specification
|
2
|
+
name: kozeki
|
3
|
+
version: !ruby/object:Gem::Version
|
4
|
+
version: 0.1.0
|
5
|
+
platform: ruby
|
6
|
+
authors:
|
7
|
+
- Sorah Fukumori
|
8
|
+
autorequire:
|
9
|
+
bindir: bin
|
10
|
+
cert_chain: []
|
11
|
+
date: 2023-11-15 00:00:00.000000000 Z
|
12
|
+
dependencies:
|
13
|
+
- !ruby/object:Gem::Dependency
|
14
|
+
name: thor
|
15
|
+
requirement: !ruby/object:Gem::Requirement
|
16
|
+
requirements:
|
17
|
+
- - "~>"
|
18
|
+
- !ruby/object:Gem::Version
|
19
|
+
version: '1.2'
|
20
|
+
type: :runtime
|
21
|
+
prerelease: false
|
22
|
+
version_requirements: !ruby/object:Gem::Requirement
|
23
|
+
requirements:
|
24
|
+
- - "~>"
|
25
|
+
- !ruby/object:Gem::Version
|
26
|
+
version: '1.2'
|
27
|
+
- !ruby/object:Gem::Dependency
|
28
|
+
name: commonmarker
|
29
|
+
requirement: !ruby/object:Gem::Requirement
|
30
|
+
requirements:
|
31
|
+
- - ">="
|
32
|
+
- !ruby/object:Gem::Version
|
33
|
+
version: 1.0.0.pre11
|
34
|
+
type: :runtime
|
35
|
+
prerelease: false
|
36
|
+
version_requirements: !ruby/object:Gem::Requirement
|
37
|
+
requirements:
|
38
|
+
- - ">="
|
39
|
+
- !ruby/object:Gem::Version
|
40
|
+
version: 1.0.0.pre11
|
41
|
+
- !ruby/object:Gem::Dependency
|
42
|
+
name: sqlite3
|
43
|
+
requirement: !ruby/object:Gem::Requirement
|
44
|
+
requirements:
|
45
|
+
- - ">="
|
46
|
+
- !ruby/object:Gem::Version
|
47
|
+
version: '0'
|
48
|
+
type: :runtime
|
49
|
+
prerelease: false
|
50
|
+
version_requirements: !ruby/object:Gem::Requirement
|
51
|
+
requirements:
|
52
|
+
- - ">="
|
53
|
+
- !ruby/object:Gem::Version
|
54
|
+
version: '0'
|
55
|
+
- !ruby/object:Gem::Dependency
|
56
|
+
name: listen
|
57
|
+
requirement: !ruby/object:Gem::Requirement
|
58
|
+
requirements:
|
59
|
+
- - ">="
|
60
|
+
- !ruby/object:Gem::Version
|
61
|
+
version: '0'
|
62
|
+
type: :runtime
|
63
|
+
prerelease: false
|
64
|
+
version_requirements: !ruby/object:Gem::Requirement
|
65
|
+
requirements:
|
66
|
+
- - ">="
|
67
|
+
- !ruby/object:Gem::Version
|
68
|
+
version: '0'
|
69
|
+
- !ruby/object:Gem::Dependency
|
70
|
+
name: rspec
|
71
|
+
requirement: !ruby/object:Gem::Requirement
|
72
|
+
requirements:
|
73
|
+
- - ">="
|
74
|
+
- !ruby/object:Gem::Version
|
75
|
+
version: '0'
|
76
|
+
type: :development
|
77
|
+
prerelease: false
|
78
|
+
version_requirements: !ruby/object:Gem::Requirement
|
79
|
+
requirements:
|
80
|
+
- - ">="
|
81
|
+
- !ruby/object:Gem::Version
|
82
|
+
version: '0'
|
83
|
+
description:
|
84
|
+
email:
|
85
|
+
- her@sorah.jp
|
86
|
+
executables:
|
87
|
+
- kozeki
|
88
|
+
extensions: []
|
89
|
+
extra_rdoc_files: []
|
90
|
+
files:
|
91
|
+
- ".rspec"
|
92
|
+
- CHANGELOG.md
|
93
|
+
- Rakefile
|
94
|
+
- bin/kozeki
|
95
|
+
- lib/kozeki.rb
|
96
|
+
- lib/kozeki/build.rb
|
97
|
+
- lib/kozeki/cli.rb
|
98
|
+
- lib/kozeki/client.rb
|
99
|
+
- lib/kozeki/collection.rb
|
100
|
+
- lib/kozeki/collection_list.rb
|
101
|
+
- lib/kozeki/config.rb
|
102
|
+
- lib/kozeki/dsl.rb
|
103
|
+
- lib/kozeki/filesystem.rb
|
104
|
+
- lib/kozeki/item.rb
|
105
|
+
- lib/kozeki/loader_chain.rb
|
106
|
+
- lib/kozeki/local_filesystem.rb
|
107
|
+
- lib/kozeki/markdown_loader.rb
|
108
|
+
- lib/kozeki/queued_filesystem.rb
|
109
|
+
- lib/kozeki/record.rb
|
110
|
+
- lib/kozeki/source.rb
|
111
|
+
- lib/kozeki/state.rb
|
112
|
+
- lib/kozeki/version.rb
|
113
|
+
- sig/kozeki.rbs
|
114
|
+
homepage: https://github.com/sorah/kozeki
|
115
|
+
licenses:
|
116
|
+
- MIT
|
117
|
+
metadata:
|
118
|
+
homepage_uri: https://github.com/sorah/kozeki
|
119
|
+
source_code_uri: https://github.com/sorah/kozeki
|
120
|
+
post_install_message:
|
121
|
+
rdoc_options: []
|
122
|
+
require_paths:
|
123
|
+
- lib
|
124
|
+
required_ruby_version: !ruby/object:Gem::Requirement
|
125
|
+
requirements:
|
126
|
+
- - ">="
|
127
|
+
- !ruby/object:Gem::Version
|
128
|
+
version: 3.1.0
|
129
|
+
required_rubygems_version: !ruby/object:Gem::Requirement
|
130
|
+
requirements:
|
131
|
+
- - ">="
|
132
|
+
- !ruby/object:Gem::Version
|
133
|
+
version: '0'
|
134
|
+
requirements: []
|
135
|
+
rubygems_version: 3.4.6
|
136
|
+
signing_key:
|
137
|
+
specification_version: 4
|
138
|
+
summary: Convert markdown files to rendered JSON files with index for static website
|
139
|
+
blogging
|
140
|
+
test_files: []
|