wax_tasks 0.1.0 → 0.2.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- checksums.yaml +4 -4
- data/Gemfile +2 -0
- data/lib/wax/branch.rb +70 -0
- data/lib/wax/collection.rb +49 -0
- data/lib/wax/iiif_collection.rb +63 -0
- data/lib/wax/index.rb +70 -0
- data/lib/wax/lunr_collection.rb +50 -0
- data/lib/wax/pagemaster_collection.rb +40 -0
- data/lib/wax/tasks/iiif.rake +9 -0
- data/lib/wax/tasks/jspackage.rake +26 -0
- data/lib/{tasks → wax/tasks}/lunr.rake +2 -2
- data/lib/wax/tasks/pagemaster.rake +10 -0
- data/lib/wax/tasks/push.rake +18 -0
- data/lib/{tasks → wax/tasks}/test.rake +1 -2
- data/lib/wax/utilities.rb +99 -0
- data/lib/wax_tasks.rb +20 -40
- data/spec/iiif.rb +42 -0
- data/spec/lunr.rb +24 -0
- data/spec/pagemaster.rb +41 -0
- data/spec/spec_helper.rb +24 -114
- metadata +21 -26
- data/lib/modules/iiif.rb +0 -39
- data/lib/modules/lunr.rb +0 -123
- data/lib/modules/pagemaster.rb +0 -73
- data/lib/tasks/iiif.rake +0 -11
- data/lib/tasks/jspackage.rake +0 -26
- data/lib/tasks/pagemaster.rake +0 -14
- data/lib/tasks/push_gh.rake +0 -43
- data/lib/tasks/push_static.rake +0 -35
checksums.yaml
CHANGED
@@ -1,7 +1,7 @@
|
|
1
1
|
---
|
2
2
|
SHA256:
|
3
|
-
metadata.gz:
|
4
|
-
data.tar.gz:
|
3
|
+
metadata.gz: dcf7d468f41464dd970e43b0f4a22b857b1e412872d9fb1a3467ab63d8482b80
|
4
|
+
data.tar.gz: 3d8b8640ec00cbf86ed1c06b39935745abf24001ee858d9e9a7dec2ee6d075f9
|
5
5
|
SHA512:
|
6
|
-
metadata.gz:
|
7
|
-
data.tar.gz:
|
6
|
+
metadata.gz: f2315b406a0d970572b617571e140a29e82a09125dfbc5a700059f21da63b4b9f5e5e9bb4195dcdcdcb9d904d33f7319605c9b8c581a70fc1fb233eb41635e03
|
7
|
+
data.tar.gz: 1742b039101f3f9b7962dbe1685d0900f1da91a5bc50b263c4987bffae0ae5c6532e0ba61b473e90b9c72f687c8a529fa1bf0625df980e51bb3ba975e261b054
|
data/Gemfile
CHANGED
data/lib/wax/branch.rb
ADDED
@@ -0,0 +1,70 @@
|
|
1
|
+
require 'colorize'
|
2
|
+
require 'jekyll'
|
3
|
+
require 'tmpdir'
|
4
|
+
require 'time'
|
5
|
+
require 'yaml'
|
6
|
+
|
7
|
+
# methods for building/pushing git branches
|
8
|
+
module Branch
|
9
|
+
def build(baseurl)
|
10
|
+
FileUtils.rm_rf('_site')
|
11
|
+
opts = {
|
12
|
+
source: '.',
|
13
|
+
destination: '_site',
|
14
|
+
config: '_config.yml',
|
15
|
+
baseurl: baseurl,
|
16
|
+
verbose: true
|
17
|
+
}
|
18
|
+
Jekyll::Site.new(Jekyll.configuration(opts)).process
|
19
|
+
end
|
20
|
+
|
21
|
+
def push
|
22
|
+
abort "Cannot find _site.".magenta unless Dir.exist? '_site'
|
23
|
+
Dir.chdir('./_site')
|
24
|
+
system 'git init && git add .'
|
25
|
+
system "git commit -m '#{@commit_msg}'"
|
26
|
+
system "git remote add origin #{@origin}"
|
27
|
+
system "git push origin master:refs/heads/#{TARGET} --force"
|
28
|
+
end
|
29
|
+
end
|
30
|
+
|
31
|
+
# configure git branches from travis info
|
32
|
+
class TravisBranch
|
33
|
+
include Branch
|
34
|
+
|
35
|
+
def initialize
|
36
|
+
@repo_slug = ENV['TRAVIS_REPO_SLUG']
|
37
|
+
@user = @repo_slug.split('/')[0]
|
38
|
+
@repo_name = '1' + @repo_slug.split('/')[1]
|
39
|
+
@token = ENV['ACCESS_TOKEN']
|
40
|
+
@commit_msg = "Site updated via #{ENV['TRAVIS_COMMIT']} at #{Time.now.utc}"
|
41
|
+
@origin = "https://#{@user}:#{@token}@github.com/#{@repo_slug}.git"
|
42
|
+
|
43
|
+
puts "Deploying to #{TARGET} branch from Travis as #{@user}.".cyan
|
44
|
+
end
|
45
|
+
|
46
|
+
def build_gh_site
|
47
|
+
abort 'You must add the gh-baseurl to config.' if @repo_name.nil?
|
48
|
+
build(@repo_name)
|
49
|
+
end
|
50
|
+
end
|
51
|
+
|
52
|
+
# configure git branches from local info
|
53
|
+
class LocalBranch
|
54
|
+
include Branch
|
55
|
+
|
56
|
+
attr_reader :origin, :commit_msg
|
57
|
+
|
58
|
+
def initialize
|
59
|
+
@origin = `git config --get remote.origin.url`
|
60
|
+
@commit_msg = "Site updated via local task at #{Time.now.utc}"
|
61
|
+
puts "Deploying to #{TARGET} branch from local task.".cyan
|
62
|
+
end
|
63
|
+
|
64
|
+
def build_gh_site
|
65
|
+
abort "Cannot load config.".magenta unless CONFIG
|
66
|
+
baseurl = CONFIG.fetch('gh-baseurl', false)
|
67
|
+
abort "You must add the gh-baseurl to config.".magenta unless baseurl
|
68
|
+
build(baseurl)
|
69
|
+
end
|
70
|
+
end
|
@@ -0,0 +1,49 @@
|
|
1
|
+
# document
|
2
|
+
class Collection
|
3
|
+
def initialize(name, opts = {})
|
4
|
+
@name = name
|
5
|
+
@s_conf = opts.fetch(:site_config, WaxTasks.site_config)
|
6
|
+
@c_conf = @s_conf.fetch(:collections).fetch(@name, nil)
|
7
|
+
@page_dir = "_#{@name}"
|
8
|
+
if @s_conf.fetch(:c_dir, false)
|
9
|
+
@page_dir = "#{@s_conf[:c_dir]}/#{@page_dir}"
|
10
|
+
end
|
11
|
+
assert_required(instance_variables)
|
12
|
+
end
|
13
|
+
|
14
|
+
def ingest(source)
|
15
|
+
Error.missing_key('source', @name) if source.nil?
|
16
|
+
src_path = "_data/#{source}"
|
17
|
+
data = hash_array(src_path)
|
18
|
+
Message.processing_source(source)
|
19
|
+
assert_pids(source, data)
|
20
|
+
rescue StandardError => e
|
21
|
+
Error.bad_source(source, @name) + "\n#{e}"
|
22
|
+
end
|
23
|
+
|
24
|
+
def hash_array(src)
|
25
|
+
opts = { headers: true, encoding: 'utf-8' }
|
26
|
+
ext = File.extname(src)
|
27
|
+
case ext
|
28
|
+
when '.csv' then data = CSV.read(src, opts).map(&:to_hash)
|
29
|
+
when '.json' then data = JSON.parse(File.read(src))
|
30
|
+
when '.yml' then data = YAML.load_file(src)
|
31
|
+
else Error.invalid_type(ext, @name)
|
32
|
+
end
|
33
|
+
data
|
34
|
+
end
|
35
|
+
|
36
|
+
def assert_pids(source, data)
|
37
|
+
pids = data.map { |d| d.fetch('pid', nil) }
|
38
|
+
Error.missing_pids(source, pids) unless pids.all?
|
39
|
+
duplicates = pids.select { |p| pids.count(p) > 1 }.uniq! || []
|
40
|
+
Error.duplicate_pids(duplicates, @name) unless duplicates.empty?
|
41
|
+
data
|
42
|
+
end
|
43
|
+
|
44
|
+
def assert_required(vars)
|
45
|
+
vars.each do |v|
|
46
|
+
Error.invalid_collection(@name) if instance_variable_get(v).nil?
|
47
|
+
end
|
48
|
+
end
|
49
|
+
end
|
@@ -0,0 +1,63 @@
|
|
1
|
+
require 'wax_iiif'
|
2
|
+
|
3
|
+
# document
|
4
|
+
class IiifCollection < Collection
|
5
|
+
def initialize(name, opts = {})
|
6
|
+
super(name, opts)
|
7
|
+
|
8
|
+
@src_dir = "_data/iiif/#{@name}"
|
9
|
+
@target_dir = "./iiif/#{@name}"
|
10
|
+
@src_data = @c_conf.fetch('source', false)
|
11
|
+
@i_conf = @c_conf.fetch('iiif', {})
|
12
|
+
@meta = @i_conf.fetch('meta', false)
|
13
|
+
@variants = validated_variants
|
14
|
+
end
|
15
|
+
|
16
|
+
def process
|
17
|
+
Error.missing_iiif_src(@src_dir) unless Dir.exist?(@src_dir)
|
18
|
+
FileUtils.mkdir_p("./iiif/#{@name}", verbose: false)
|
19
|
+
|
20
|
+
builder = iiif_builder
|
21
|
+
builder.load(iiif_records)
|
22
|
+
builder.process_data(true)
|
23
|
+
end
|
24
|
+
|
25
|
+
def iiif_builder
|
26
|
+
build_opts = {
|
27
|
+
base_url: "#{@s_conf[:baseurl]}/iiif/#{@name}",
|
28
|
+
output_dir: @target_dir,
|
29
|
+
verbose: true,
|
30
|
+
variants: @variants
|
31
|
+
}
|
32
|
+
IiifS3::Builder.new(build_opts)
|
33
|
+
end
|
34
|
+
|
35
|
+
def validated_variants
|
36
|
+
vars = @i_conf.fetch('variants', false)
|
37
|
+
if vars.is_a?(Array) && vars.all? { |v| v.is_a?(Integer) }
|
38
|
+
variants = {}
|
39
|
+
vars.each_with_index { |v, i| variants["custom_variant_#{i}".to_sym] = v }
|
40
|
+
else
|
41
|
+
variants = { med: 600, lg: 1140 }
|
42
|
+
end
|
43
|
+
variants
|
44
|
+
end
|
45
|
+
|
46
|
+
def iiif_records
|
47
|
+
records = []
|
48
|
+
source_images = Dir["#{@src_dir}/*"].sort!
|
49
|
+
metadata = ingest(@src_data) if @meta && @src_data
|
50
|
+
source_images.each { |src_img| records << iiif_record(src_img, metadata) }
|
51
|
+
records
|
52
|
+
end
|
53
|
+
|
54
|
+
def iiif_record(src_img, metadata)
|
55
|
+
basename = File.basename(src_img, '.*').to_s
|
56
|
+
record_opts = { id: basename, path: src_img, label: basename }
|
57
|
+
if metadata
|
58
|
+
src_item = metadata.find { |i| i['pid'].to_s == basename }
|
59
|
+
@meta.each { |k, v| record_opts[k.to_sym] = src_item.fetch(v, '') }
|
60
|
+
end
|
61
|
+
IiifS3::ImageRecord.new(record_opts)
|
62
|
+
end
|
63
|
+
end
|
data/lib/wax/index.rb
ADDED
@@ -0,0 +1,70 @@
|
|
1
|
+
# document
|
2
|
+
class Index
|
3
|
+
attr_accessor :collections
|
4
|
+
|
5
|
+
def initialize(opts = {})
|
6
|
+
@s_conf = opts.fetch(:site_config, WaxTasks.site_config)
|
7
|
+
@collections = collections_to_index
|
8
|
+
@fields = total_fields
|
9
|
+
@path = opts.fetch(:path, 'js/lunr-index.json')
|
10
|
+
@ui = opts.fetch(:ui, ui)
|
11
|
+
end
|
12
|
+
|
13
|
+
def collections_to_index
|
14
|
+
to_index = @s_conf[:collections].find_all { |c| c[1].key?('lunr_index') }
|
15
|
+
to_index.map! { |c| c[0] }
|
16
|
+
Error.no_collections_to_index if to_index.nil?
|
17
|
+
lunr_collections = []
|
18
|
+
to_index.each { |c| lunr_collections << LunrCollection.new(c) }
|
19
|
+
lunr_collections
|
20
|
+
end
|
21
|
+
|
22
|
+
def total_fields
|
23
|
+
total_fields = ['pid']
|
24
|
+
@collections.each { |c| total_fields.concat(c.fields) unless c.fields.nil? }
|
25
|
+
total_fields.uniq
|
26
|
+
end
|
27
|
+
|
28
|
+
def write
|
29
|
+
docs = []
|
30
|
+
@collections.each { |c| docs.concat(c.data) }
|
31
|
+
docs = add_lunr_ids(docs)
|
32
|
+
FileUtils.mkdir_p(File.dirname(@path))
|
33
|
+
index = "---\nlayout: none\n---\n#{JSON.pretty_generate(docs)}"
|
34
|
+
File.open(@path, 'w') { |f| f.write(index) }
|
35
|
+
Message.writing_index(@path)
|
36
|
+
write_ui if @ui
|
37
|
+
end
|
38
|
+
|
39
|
+
def ui
|
40
|
+
ui = "$.getJSON(\"{{ site.baseurl }}/js/lunr-index.json\", function(index_json) {\nwindow.index = new elasticlunr.Index;\nwindow.store = index_json;\nindex.saveDocument(false);\nindex.setRef('lunr_id');"
|
41
|
+
@fields.each { |f| ui += "\nindex.addField('#{f}');" }
|
42
|
+
ui += "\n// add docs\nfor (i in store){index.addDoc(store[i]);}"
|
43
|
+
ui += "\n$('input#search').on('keyup', function() {\nvar results_div = $('#results');\nvar query = $(this).val();\nvar results = index.search(query, { boolean: 'AND', expand: true });\nresults_div.empty();\nif (results.length > 10) {\nresults_div.prepend(\"<p><small>Displaying 10 of \" + results.length + \" results.</small></p>\");\n}\nfor (var r in results.slice(0, 9)) {\nvar ref = results[r].ref;\nvar item = store[ref];"
|
44
|
+
@fields.each { |f| ui += "var #{f} = item.#{f};\n" }
|
45
|
+
ui += "var result = '<div class=\"result\"><b><a href=\"' + item.link + '\">' + title + '</a></b></p></div>';\nresults_div.append(result);\n}\n});\n});"
|
46
|
+
ui
|
47
|
+
end
|
48
|
+
|
49
|
+
def write_ui
|
50
|
+
ui = "---\nlayout: none\n---\n#{@ui}"
|
51
|
+
path = 'js/lunr-ui.js'
|
52
|
+
if File.exist?(path)
|
53
|
+
Message.ui_exists(path)
|
54
|
+
else
|
55
|
+
File.open(path, 'w') { |file| file.write(ui) }
|
56
|
+
Message.writing_ui(path)
|
57
|
+
end
|
58
|
+
end
|
59
|
+
|
60
|
+
def add_lunr_ids(documents)
|
61
|
+
count = 0
|
62
|
+
docs_with_ids = []
|
63
|
+
documents.each do |d|
|
64
|
+
d['lunr_id'] = count
|
65
|
+
docs_with_ids << d
|
66
|
+
count += 1
|
67
|
+
end
|
68
|
+
docs_with_ids
|
69
|
+
end
|
70
|
+
end
|
@@ -0,0 +1,50 @@
|
|
1
|
+
# document
|
2
|
+
class LunrCollection < Collection
|
3
|
+
attr_accessor :fields, :data
|
4
|
+
|
5
|
+
def initialize(name, opts = {})
|
6
|
+
super(name, opts)
|
7
|
+
@content = @c_conf['lunr_index'].fetch('content', false)
|
8
|
+
@fields = @c_conf['lunr_index'].fetch('fields', nil)
|
9
|
+
@data = pages_to_hash_array
|
10
|
+
end
|
11
|
+
|
12
|
+
def pages_to_hash_array
|
13
|
+
page_hashes = []
|
14
|
+
pages = Dir.glob(@page_dir + '/*.md')
|
15
|
+
# catch
|
16
|
+
abort "There are no pages in '#{@page_dir}'".magenta if pages.empty?
|
17
|
+
abort "There are no fields for #{@name}.".magenta if @fields.empty?
|
18
|
+
puts "Loading #{pages.length} pages from #{@page_dir}"
|
19
|
+
# index each page in collection
|
20
|
+
pages.each { |page| page_hashes << page_hash(page) }
|
21
|
+
page_hashes
|
22
|
+
end
|
23
|
+
|
24
|
+
def page_hash(page)
|
25
|
+
yaml = YAML.load_file(page)
|
26
|
+
hash = {
|
27
|
+
'link' => "{{'" + yaml.fetch('permalink') + "' | relative_url }}",
|
28
|
+
'collection' => @name
|
29
|
+
}
|
30
|
+
hash['content'] = rm_diacritics(clean(File.read(page))) if @content
|
31
|
+
add_data_fields(hash, yaml)
|
32
|
+
end
|
33
|
+
|
34
|
+
def add_data_fields(hash, yaml)
|
35
|
+
@fields.each { |f| hash[f] = normalize(yaml[f]) }
|
36
|
+
hash
|
37
|
+
end
|
38
|
+
|
39
|
+
def normalize(value)
|
40
|
+
case value
|
41
|
+
when Array
|
42
|
+
if value.first.is_a? Hash then value
|
43
|
+
else rm_diacritics(value.join(', '))
|
44
|
+
end
|
45
|
+
when String then rm_diacritics(value)
|
46
|
+
when Hash then value
|
47
|
+
else value.to_s
|
48
|
+
end
|
49
|
+
end
|
50
|
+
end
|
@@ -0,0 +1,40 @@
|
|
1
|
+
# document
|
2
|
+
class PagemasterCollection < Collection
|
3
|
+
attr_accessor :page_dir
|
4
|
+
|
5
|
+
def initialize(name, opts = {})
|
6
|
+
super(name, opts)
|
7
|
+
@data = ingest(@c_conf.fetch('source', nil))
|
8
|
+
@layout = @c_conf.fetch('layout', nil)
|
9
|
+
@ordered = @c_conf.fetch('keep_order', false)
|
10
|
+
end
|
11
|
+
|
12
|
+
def generate_pages
|
13
|
+
FileUtils.mkdir_p(@page_dir)
|
14
|
+
completed = 0
|
15
|
+
@data.each_with_index do |item, i|
|
16
|
+
page_slug = slug(item.fetch('pid').to_s)
|
17
|
+
path = "#{@page_dir}/#{page_slug}.md"
|
18
|
+
if File.exist?(path)
|
19
|
+
puts "#{page_slug}.md already exits. Skipping."
|
20
|
+
else
|
21
|
+
File.open(path, 'w') { |f| f.write("#{page(item, page_slug, i).to_yaml}---") }
|
22
|
+
completed += 1
|
23
|
+
end
|
24
|
+
end
|
25
|
+
puts Message.pagemaster_results(completed, @page_dir)
|
26
|
+
rescue StandardError => e
|
27
|
+
Error.page_generation_failure(completed) + "\n#{e}"
|
28
|
+
end
|
29
|
+
|
30
|
+
def page(item, page_slug, index)
|
31
|
+
item['permalink'] = "/#{@name}/#{page_slug}#{@s_conf[:permalink]}"
|
32
|
+
item['layout'] = @layout
|
33
|
+
item['order'] = padded_int(index, @data.length) if @ordered
|
34
|
+
item
|
35
|
+
end
|
36
|
+
|
37
|
+
def padded_int(index, max_idx)
|
38
|
+
index.to_s.rjust(Math.log10(max_idx).to_i + 1, '0')
|
39
|
+
end
|
40
|
+
end
|
@@ -0,0 +1,26 @@
|
|
1
|
+
require 'wax_tasks'
|
2
|
+
|
3
|
+
namespace :wax do
|
4
|
+
desc 'write a simple package.json'
|
5
|
+
task :jspackage do
|
6
|
+
s_conf = WaxTasks.site_config
|
7
|
+
if s_conf[:js]
|
8
|
+
names = []
|
9
|
+
package = {
|
10
|
+
'name' => s_conf['title'],
|
11
|
+
'version' => '1.0.0',
|
12
|
+
'dependencies' => {}
|
13
|
+
}
|
14
|
+
s_conf[:js].each do |dependency|
|
15
|
+
name = dependency[0]
|
16
|
+
names << name
|
17
|
+
version = dependency[1]['version']
|
18
|
+
package['dependencies'][name] = '^' + version
|
19
|
+
end
|
20
|
+
File.open('package.json', 'w') { |file| file.write(package.to_json) }
|
21
|
+
Message.writing_package_json(names)
|
22
|
+
else
|
23
|
+
Message.skipping_package_json
|
24
|
+
end
|
25
|
+
end
|
26
|
+
end
|
@@ -0,0 +1,10 @@
|
|
1
|
+
require 'wax_tasks'
|
2
|
+
|
3
|
+
namespace :wax do
|
4
|
+
desc 'generate collection md pages from yaml or csv data source'
|
5
|
+
task :pagemaster do
|
6
|
+
ARGS = ARGV.drop(1).each { |a| task a.to_sym }
|
7
|
+
abort "You must specify a collection after 'wax:pagemaster'" if ARGS.empty?
|
8
|
+
ARGS.each { |name| PagemasterCollection.new(name).generate_pages }
|
9
|
+
end
|
10
|
+
end
|
@@ -0,0 +1,18 @@
|
|
1
|
+
require 'wax_tasks'
|
2
|
+
|
3
|
+
namespace :wax do
|
4
|
+
desc 'push compiled site to git branch BRANCH'
|
5
|
+
task :push do
|
6
|
+
ARGS = ARGV.drop(1).each { |a| task a.to_sym }
|
7
|
+
TARGET = slug(ARGS.first)
|
8
|
+
TRAVIS = ENV.fetch('CI', false)
|
9
|
+
CONFIG = YAML.load_file('./_config.yml')
|
10
|
+
GH = TARGET == 'gh-pages'
|
11
|
+
|
12
|
+
abort "You must specify a branch after 'wax:push:branch'" if ARGS.empty?
|
13
|
+
|
14
|
+
branch = TRAVIS ? TravisBranch.new : LocalBranch.new
|
15
|
+
branch.build_gh_site if GH
|
16
|
+
branch.push
|
17
|
+
end
|
18
|
+
end
|
@@ -1,4 +1,3 @@
|
|
1
|
-
require 'colorized_string'
|
2
1
|
require 'html-proofer'
|
3
2
|
|
4
3
|
namespace :wax do
|
@@ -14,6 +13,6 @@ namespace :wax do
|
|
14
13
|
verbose: true
|
15
14
|
}
|
16
15
|
HTMLProofer.check_directory('./_site', opts).run
|
17
|
-
|
16
|
+
system('bundle exec rspec') if File.exist?('.rspec')
|
18
17
|
end
|
19
18
|
end
|
@@ -0,0 +1,99 @@
|
|
1
|
+
require 'colorize'
|
2
|
+
|
3
|
+
def rm_diacritics(str)
|
4
|
+
to_replace = 'ÀÁÂÃÄÅàáâãäåĀāĂ㥹ÇçĆćĈĉĊċČčÐðĎďĐđÈÉÊËèéêëĒēĔĕĖėĘęĚěĜĝĞğĠġĢģĤĥĦħÌÍÎÏìíîïĨĩĪīĬĭĮįİıĴĵĶķĸĹĺĻļĽľĿŀŁłÑñŃńŅņŇňʼnŊŋÒÓÔÕÖØòóôõöøŌōŎŏŐőŔŕŖŗŘřŚśŜŝŞşŠšſŢţŤťŦŧÙÚÛÜùúûüŨũŪūŬŭŮůŰűŲųŴŵÝýÿŶŷŸŹźŻżŽž'
|
5
|
+
replaced_by = 'AAAAAAaaaaaaAaAaAaCcCcCcCcCcDdDdDdEEEEeeeeEeEeEeEeEeGgGgGgGgHhHhIIIIiiiiIiIiIiIiIiJjKkkLlLlLlLlLlNnNnNnNnnNnOOOOOOooooooOoOoOoRrRrRrSsSsSsSssTtTtTtUUUUuuuuUuUuUuUuUuUuWwYyyYyYZzZzZz'
|
6
|
+
str.tr(to_replace, replaced_by)
|
7
|
+
end
|
8
|
+
|
9
|
+
def clean(str)
|
10
|
+
str.gsub!(/\A---(.|\n)*?---/, '') # remove yaml front matter
|
11
|
+
str.gsub!(/{%(.*)%}/, '') # remove functional liquid
|
12
|
+
str.gsub!(%r{<\/?[^>]*>}, '') # remove html
|
13
|
+
str.gsub!('\\n', '') # remove newlines
|
14
|
+
str.gsub!(/\s+/, ' ') # remove extra space
|
15
|
+
str.tr!('"', "'") # replace double quotes with single
|
16
|
+
str
|
17
|
+
end
|
18
|
+
|
19
|
+
def slug(str)
|
20
|
+
str.downcase.tr(' ', '_').gsub(/[^:\w-]/, '')
|
21
|
+
end
|
22
|
+
|
23
|
+
# document
|
24
|
+
module Error
|
25
|
+
def self.complain(error)
|
26
|
+
abort error.magenta
|
27
|
+
end
|
28
|
+
|
29
|
+
def self.missing_key(key, name)
|
30
|
+
complain("Key '#{key}' not found for '#{name}'. Check config and rerun.")
|
31
|
+
end
|
32
|
+
|
33
|
+
def self.invalid_type(ext, name)
|
34
|
+
complain("Source file for #{name} must be .csv .json or .yml. Found #{ext}.")
|
35
|
+
end
|
36
|
+
|
37
|
+
def self.duplicate_pids(duplicates, name)
|
38
|
+
complain("Fix the following duplicate pids for collection '#{name}': #{duplicates}")
|
39
|
+
end
|
40
|
+
|
41
|
+
def self.bad_source(source, name)
|
42
|
+
complain("Cannot load source '#{source}' for collection '#{name}'. Check for typos and rebuild.")
|
43
|
+
end
|
44
|
+
|
45
|
+
def self.missing_pids(source, pids)
|
46
|
+
complain("Source '#{source}' is missing #{pids.count(nil)} `pid` values.")
|
47
|
+
end
|
48
|
+
|
49
|
+
def self.invalid_collection(name)
|
50
|
+
complain("Configuration for the collection '#{name}' is invalid.")
|
51
|
+
end
|
52
|
+
|
53
|
+
def self.page_generation_failure(completed)
|
54
|
+
complain("Failure after #{completed} pages, likely from missing pid.")
|
55
|
+
end
|
56
|
+
|
57
|
+
def self.no_collections_to_index
|
58
|
+
complain('There are no valid collections to index.')
|
59
|
+
end
|
60
|
+
|
61
|
+
def self.missing_iiif_src(dir)
|
62
|
+
complain("Source path '#{dir}' does not exist. Exiting.")
|
63
|
+
end
|
64
|
+
end
|
65
|
+
|
66
|
+
# document
|
67
|
+
class Message
|
68
|
+
def self.share(msg)
|
69
|
+
puts msg.cyan
|
70
|
+
end
|
71
|
+
|
72
|
+
def self.processing_source(source)
|
73
|
+
share("\nProcessing #{source}...")
|
74
|
+
end
|
75
|
+
|
76
|
+
def self.pagemaster_results(completed, dir)
|
77
|
+
share("\n#{completed} pages were generated to #{dir} directory.")
|
78
|
+
end
|
79
|
+
|
80
|
+
def self.writing_index(path)
|
81
|
+
share("Writing lunr index to #{path}")
|
82
|
+
end
|
83
|
+
|
84
|
+
def self.ui_exists(path)
|
85
|
+
share("Lunr UI already exists at #{path}. Skipping.")
|
86
|
+
end
|
87
|
+
|
88
|
+
def self.writing_ui(path)
|
89
|
+
share("Writing lunr ui to #{path}")
|
90
|
+
end
|
91
|
+
|
92
|
+
def self.writing_package_json(names)
|
93
|
+
share("Writing #{names} to simple package.json.")
|
94
|
+
end
|
95
|
+
|
96
|
+
def self.skipping_package_json
|
97
|
+
share('Cannot find js dependencies in config. Skipping package.json.')
|
98
|
+
end
|
99
|
+
end
|
data/lib/wax_tasks.rb
CHANGED
@@ -1,47 +1,27 @@
|
|
1
1
|
require 'yaml'
|
2
2
|
|
3
|
-
require_relative '
|
4
|
-
require_relative '
|
5
|
-
require_relative '
|
6
|
-
|
7
|
-
|
3
|
+
require_relative 'wax/branch'
|
4
|
+
require_relative 'wax/collection'
|
5
|
+
require_relative 'wax/index'
|
6
|
+
require_relative 'wax/iiif_collection'
|
7
|
+
require_relative 'wax/lunr_collection'
|
8
|
+
require_relative 'wax/pagemaster_collection'
|
9
|
+
require_relative 'wax/utilities'
|
10
|
+
|
11
|
+
# document
|
8
12
|
module WaxTasks
|
9
|
-
def self.pagemaster(collection_name, site_config)
|
10
|
-
collection = collection(collection_name, site_config)
|
11
|
-
Pagemaster.generate(collection, site_config)
|
12
|
-
end
|
13
|
-
|
14
|
-
def self.lunr(site_config)
|
15
|
-
Lunr.write_index(site_config)
|
16
|
-
Lunr.write_ui(site_config)
|
17
|
-
end
|
18
|
-
|
19
|
-
def self.iiif(collection_name, site_config)
|
20
|
-
Iiif.process(collection_name, site_config)
|
21
|
-
end
|
22
|
-
|
23
13
|
def self.site_config
|
24
|
-
YAML.load_file('_config.yml')
|
25
|
-
|
26
|
-
|
27
|
-
|
28
|
-
|
29
|
-
|
30
|
-
|
31
|
-
|
32
|
-
|
33
|
-
{
|
34
|
-
name: collection_name,
|
35
|
-
source: conf['source'],
|
36
|
-
layout: conf['layout'],
|
37
|
-
keep_order: conf.key?('keep_order') ? conf['keep_order'] : false,
|
38
|
-
lunr_index: conf['lunr_index']
|
14
|
+
site_config = YAML.load_file('./_config.yml')
|
15
|
+
s_conf = {
|
16
|
+
title: site_config.fetch('title', ''),
|
17
|
+
url: site_config.fetch('url', ''),
|
18
|
+
baseurl: site_config.fetch('baseurl', ''),
|
19
|
+
permalink: site_config.fetch('permalink', false),
|
20
|
+
c_dir: site_config.fetch('collections_dir', false),
|
21
|
+
collections: site_config.fetch('collections', false),
|
22
|
+
js: site_config.fetch('js', false)
|
39
23
|
}
|
40
|
-
|
41
|
-
|
42
|
-
end
|
43
|
-
|
44
|
-
def self.slug(str)
|
45
|
-
str.downcase.tr(' ', '_').gsub(/[^:\w-]/, '')
|
24
|
+
s_conf[:permalink] = s_conf[:permalink] == 'pretty' ? '/' : '.html'
|
25
|
+
s_conf
|
46
26
|
end
|
47
27
|
end
|
data/spec/iiif.rb
ADDED
@@ -0,0 +1,42 @@
|
|
1
|
+
describe 'wax:iiif' do
|
2
|
+
it 'constructs iiif collections' do
|
3
|
+
expect { quiet_stdout { IIIF_COLLECTIONS.all? } }
|
4
|
+
end
|
5
|
+
context 'when invoked as a task' do
|
6
|
+
it 'passes' do
|
7
|
+
passes = quiet_stdout { system("bundle exec rake wax:iiif #{ARGS.first}") }
|
8
|
+
expect(passes).to eq(true)
|
9
|
+
end
|
10
|
+
it 'generates collection json' do
|
11
|
+
expect(File.exist?("./iiif/#{ARGS.first}/collection/top.json")).to be true
|
12
|
+
end
|
13
|
+
it 'generates manifest json' do
|
14
|
+
expect(File.exist?("./iiif/#{ARGS.first}/0/manifest.json")).to be true
|
15
|
+
end
|
16
|
+
it 'adds manifest metadata fields from config + source' do
|
17
|
+
manifest = JSON.parse(File.read("./iiif/#{ARGS.first}/0/manifest.json"))
|
18
|
+
%w[label description].each do |k|
|
19
|
+
expect(manifest).to have_key(k)
|
20
|
+
expect(manifest[k]).not_to be_empty
|
21
|
+
end
|
22
|
+
end
|
23
|
+
it 'generates derivatives' do
|
24
|
+
expect(Dir.exist?("./iiif/#{ARGS.first}/images")).to be true
|
25
|
+
end
|
26
|
+
it 'generates custom image variants' do
|
27
|
+
[100, 900].each do |size|
|
28
|
+
expect(File.exist?("./iiif/#{ARGS.first}/images/1-1/full/#{size},/0/default.jpg")).to be true
|
29
|
+
end
|
30
|
+
end
|
31
|
+
end
|
32
|
+
context 'when processed directly' do
|
33
|
+
it 'still passes' do
|
34
|
+
expect { quiet_stdout { IIIF_COLLECTIONS.last.process } }.not_to raise_error
|
35
|
+
end
|
36
|
+
end
|
37
|
+
context 'when looking for a missing dir' do
|
38
|
+
it 'throws a configuration error' do
|
39
|
+
expect { quiet_stdout { IiifCollection.new('not_a_collection') } }.to raise_error(SystemExit)
|
40
|
+
end
|
41
|
+
end
|
42
|
+
end
|