wax_tasks 0.0.2 → 0.0.3

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
checksums.yaml CHANGED
@@ -1,7 +1,7 @@
1
1
  ---
2
2
  SHA1:
3
- metadata.gz: 823956054c121e807115077ba59f73c6e1a11bd8
4
- data.tar.gz: c3e1e49fa4f00782b45359857aa102382aa61828
3
+ metadata.gz: 5ac9e0a96963154d6625c7f0c4fc71251c86c017
4
+ data.tar.gz: 4e87c17755a3da5eb541176aaec2812c305c736f
5
5
  SHA512:
6
- metadata.gz: 9753ef5e8f16ad101cec17e5282dd8a0f6c374b678a0b3cb4492286002ca2645bfeaecedc87a9d61438d9788cc37b4e734f3181d754f97df8b887476de9c87a2
7
- data.tar.gz: edef1c3c3c8781a694efa195f31ca8c6724f8c4e8511d7acaecab66037eff1658f5dbb56c9335486bced3e6d84604e10cdd7fde24c7215fc55e71481a8784728
6
+ metadata.gz: 3c885e92b4245d48e44e7af8b7206bee18461b2b4add0d50b23537e562e135755fa1d227d9febb5003cfc942303270867dd455f0ac8508af7619d2717b9dd3ca
7
+ data.tar.gz: bca597f6cd84e70ae59a2052c258f10781689753b98ffbad8aaf2828b8906e490f02d58bbc6707aa42390c43c734f9a1c2fc3dc2c2ad61589981ffff28e27a33
data/lib/helpers.rb ADDED
@@ -0,0 +1,30 @@
1
+ require 'colorized_string'
2
+
3
+ def clean(str)
4
+ str.gsub!(/\A---(.|\n)*?---/, '') # remove yaml front matter
5
+ str.gsub!(/{%(.*)%}/, '') # remove functional liquid
6
+ str.gsub!(/<\/?[^>]*>/, '') # remove html
7
+ str.gsub!('\\n', '') # remove newlines
8
+ str.gsub!(/\s+/, ' ') # remove extra space
9
+ str.tr!('"', "'") # replace double quotes with single
10
+ str
11
+ end
12
+
13
+ def valid_pagemaster(collection_name)
14
+ c = $config['collections'][collection_name]
15
+ abort "Cannot find the collection '#{collection_name}' in _config.yml. Exiting.".magenta if c.nil?
16
+ abort "Cannot find 'source' for the collection '#{collection_name}' in _config.yml. Exiting.".magenta if c['source'].nil?
17
+ abort "Cannot find 'layout' for the collection '#{collection_name}' in _config.yml. Exiting.".magenta if c['layout'].nil?
18
+ abort "Cannot find the file '#{'_data/' + c['source']}'. Exiting.".magenta unless File.file?('_data/' + c['source'])
19
+ c
20
+ end
21
+
22
+ def rm_diacritics(str)
23
+ to_replace = "ÀÁÂÃÄÅàáâãäåĀāĂ㥹ÇçĆćĈĉĊċČčÐðĎďĐđÈÉÊËèéêëĒēĔĕĖėĘęĚěĜĝĞğĠġĢģĤĥĦħÌÍÎÏìíîïĨĩĪīĬĭĮįİıĴĵĶķĸĹĺĻļĽľĿŀŁłÑñŃńŅņŇňʼnŊŋÒÓÔÕÖØòóôõöøŌōŎŏŐőŔŕŖŗŘřŚśŜŝŞşŠšſŢţŤťŦŧÙÚÛÜùúûüŨũŪūŬŭŮůŰűŲųŴŵÝýÿŶŷŸŹźŻżŽž"
24
+ replaced_by = "AAAAAAaaaaaaAaAaAaCcCcCcCcCcDdDdDdEEEEeeeeEeEeEeEeEeGgGgGgGgHhHhIIIIiiiiIiIiIiIiIiJjKkkLlLlLlLlLlNnNnNnNnnNnOOOOOOooooooOoOoOoRrRrRrSsSsSsSssTtTtTtUUUUuuuuUuUuUuUuUuUuWwYyyYyYZzZzZz"
25
+ str.tr(to_replace, replaced_by)
26
+ end
27
+
28
+ def slug(str)
29
+ str.downcase.tr(' ', '_').gsub(/[^\w-]/, '')
30
+ end
data/lib/lunr_index.rb ADDED
@@ -0,0 +1,69 @@
1
+ require 'yaml'
2
+ require 'colorized_string'
3
+ require 'helpers'
4
+
5
+ # LunrIndex class
6
+ class LunrIndex
7
+ attr_reader :output
8
+
9
+ def initialize(collections, lunr_language)
10
+ @collections = collections
11
+ @lunr_language = lunr_language
12
+ @cdir = $config['collections_dir'].nil? ? '' : $config['collections_dir'] + '/'
13
+ @lunr_collections = []
14
+ @total_fields = []
15
+ @output = ''
16
+
17
+ pre_process
18
+ add_docs
19
+ end
20
+
21
+ def pre_process
22
+ @output += "---\nlayout: none\n---\nvar index = new elasticlunr.Index;\nindex.setRef('lunr_id');\nindex.saveDocument(false);"
23
+ @output += "\nindex.pipeline.remove(elasticlunr.trimmer);" if @lunr_language
24
+ @collections.each do |c|
25
+ if c[1].key?('lunr_index') && c[1]['lunr_index'].key?('fields')
26
+ @total_fields.concat c[1]['lunr_index']['fields']
27
+ @total_fields << 'content' if c[1]['lunr_index']['content']
28
+ @lunr_collections << c
29
+ end
30
+ end
31
+ @total_fields.uniq!
32
+ abort("Fields are not properly configured.".magenta) if @total_fields.empty?
33
+ @total_fields.each { |f| @output += "\nindex.addField(" + "'" + f + "'" + "); " }
34
+ end
35
+
36
+ def add_docs
37
+ count = 0
38
+ store_string = "\nvar store = ["
39
+
40
+ abort("There are no valid collections to index.".magenta) if @collections.nil?
41
+ @lunr_collections.each do |c|
42
+ dir = @cdir + '_' + c[0]
43
+ fields = c[1]['lunr_index']['fields'].uniq
44
+ pages = Dir.glob(dir + '/*.md')
45
+
46
+ abort "There are no markdown pages in directory '#{dir}'".magenta if pages.empty?
47
+ abort "There are no fields specified for #{c[0]}.".magenta if fields.empty?
48
+
49
+ puts "Loading #{pages.length} pages from #{dir}"
50
+ pages.each do |page|
51
+ begin
52
+ yaml = YAML.load_file(page)
53
+ hash = {
54
+ 'lunr_id' => count,
55
+ 'link' => "{{'" + yaml['permalink'] + "' | relative_url }}"
56
+ }
57
+ fields.each { |f| hash[f] = rm_diacritics(yaml[f]) }
58
+ hash['content'] = rm_diacritics(clean(File.read(page))) if c[1]['lunr_index']['content']
59
+ @output += "\nindex.addDoc(" + hash.to_json + "); "
60
+ store_string += "\n" + hash.to_json + ", "
61
+ count += 1
62
+ rescue StandardError
63
+ abort "Cannot load data from markdown pages in #{dir}.".magenta
64
+ end
65
+ end
66
+ end
67
+ @output += store_string.chomp(', ') + '];'
68
+ end
69
+ end
@@ -7,10 +7,9 @@ namespace :wax do
7
7
  begin
8
8
  $config = YAML.load_file('_config.yml')
9
9
  $argv = ARGV.drop(1)
10
- $argv.each { |a| task a.to_sym do; end }
10
+ $argv.each { |a| task a.to_sym }
11
11
  rescue StandardError
12
- puts('Cannot load _config.yml').magenta
13
- exit 1
12
+ abort 'Cannot load _config.yml'.magenta
14
13
  end
15
14
  end
16
15
  end
@@ -1,6 +1,7 @@
1
1
  require 'jekyll'
2
2
  require 'tmpdir'
3
3
  require 'fileutils'
4
+ require 'time'
4
5
 
5
6
  namespace :wax do
6
7
  desc 'build site with gh-baseurl and publish to gh-pages branch'
@@ -25,7 +26,7 @@ namespace :wax do
25
26
  Dir.chdir tmp
26
27
 
27
28
  system 'git init' # Init the repo.
28
- system "git add . && git commit -m 'Site updated at " + Time.now.utc + "'"
29
+ system "git add . && git commit -m 'Site updated at #{Time.now.utc}'"
29
30
  system 'git remote add origin ' + origin
30
31
  system 'git push origin master:refs/heads/gh-pages --force'
31
32
  end
data/lib/tasks/iiif.rake CHANGED
@@ -3,50 +3,43 @@ require 'iiif_s3'
3
3
 
4
4
  namespace :wax do
5
5
  task :iiif => :config do
6
+ abort "You must specify a collections after 'bundle exec rake wax:iiif'.".magenta if $argv.empty?
6
7
  FileUtils.mkdir_p './_iiif/tiles'
7
- imagedata = []
8
+ all_records = []
8
9
  id_counter = 0
9
- if $argv.empty?
10
- puts("You must specify one or more collections after 'bundle exec rake wax:iiif' to generate.").magenta
11
- exit 1
12
- else
13
- build_opts = {
14
- :base_url => $config['baseurl'] + '/tiles',
15
- :output_dir => './_iiif/tiles',
16
- :tile_scale_factors => [1, 2],
17
- :verbose => true
18
- }
19
- $argv.each do |a|
20
- dirpath = './_iiif/source_images/' + a
21
- if Dir.exist?(dirpath)
22
- id_counter += 1
23
- imagefiles = Dir[dirpath + '/*'].sort!
24
- counter = 1
25
- imagefiles.each do |imagefile|
26
- begin
27
- basename = File.basename(imagefile, '.*').to_s
28
- record_opts = {
29
- :id => a + '-' + basename,
30
- :is_document => false,
31
- :path => imagefile,
32
- :label => $config['title'] + ' - ' + a + ' - ' + basename
33
- }
34
- i = IiifS3::ImageRecord.new(record_opts)
35
- counter += 1
36
- imagedata.push(i)
37
- rescue StandardError
38
- puts('Failed to convert image ' + imagefile + '.').magenta
39
- exit 1
40
- end
41
- end
42
- else
43
- puts("Source path '" + dirpath + "' does not exist. Exiting.").magenta
44
- exit 1
45
- end
46
- end
47
- builder = IiifS3::Builder.new(build_opts)
48
- builder.load(imagedata)
49
- builder.process_data()
10
+ build_opts = {
11
+ :base_url => $config['baseurl'] + '/tiles',
12
+ :output_dir => './_iiif/tiles',
13
+ :tile_scale_factors => [1, 2],
14
+ :verbose => true
15
+ }
16
+ $argv.each do |a|
17
+ id_counter += 1
18
+ dirpath = './_iiif/source_images/' + a
19
+ collection_records = make_records(a) if Dir.exist?(dirpath)
20
+ all_records.concat collection_records
21
+ abort "Source path '#{dirpath}' does not exist. Exiting.".magenta unless Dir.exist?(dirpath)
50
22
  end
23
+ builder = IiifS3::Builder.new(build_opts)
24
+ builder.load(all_records)
25
+ builder.process_data
51
26
  end
52
27
  end
28
+
29
+ def make_records(collection_name)
30
+ counter = 1
31
+ collection_records = []
32
+ imagefiles = Dir['./_iiif/source_images/' + collection_name + '/*'].sort!
33
+ imagefiles.each do |imagefile|
34
+ basename = File.basename(imagefile, '.*').to_s
35
+ record_opts = {
36
+ :id => collection_name + '-' + basename,
37
+ :is_document => false,
38
+ :path => imagefile,
39
+ :label => $config['title'] + ' - ' + collection_name + ' - ' + basename
40
+ }
41
+ counter += 1
42
+ collection_records << IiifS3::ImageRecord.new(record_opts)
43
+ end
44
+ collection_records
45
+ end
data/lib/tasks/lunr.rake CHANGED
@@ -1,70 +1,14 @@
1
- require 'json'
2
- require 'yaml'
3
- require 'colorized_string'
1
+ require 'lunr_index'
4
2
 
5
3
  namespace :wax do
6
4
  desc 'build lunr search index'
7
5
  task :lunr => :config do
8
- total_fields = []
9
- count = 0
10
- front_matter = "---\nlayout: null\n---"
11
- store_string = "\nvar store = ["
12
- index_string = "\nvar index = new elasticlunr.Index;\nindex.setRef('lunr_id');\nindex.saveDocument(false);"
13
- index_string += "\nindex.pipeline.remove(elasticlunr.trimmer);" if $config['lunr_language']
14
6
  collections = $config['collections']
15
- has_content = false
16
- collections.each do |c|
17
- if c[1].key?('lunr_index') && c[1]['lunr_index'].key?('fields')
18
- total_fields.concat c[1]['lunr_index']['fields']
19
- end
20
- has_content = true if c[1]['lunr_index']['content']
21
- end
22
- total_fields << 'content' if has_content
23
- if total_fields.uniq.empty?
24
- puts "Fields are not properly configured.".magenta
25
- exit 1
26
- else
27
- total_fields.uniq.each { |f| index_string += "\nindex.addField(" + "'" + f + "'" + "); " }
28
- collections.each do |collection|
29
- name = collection[0]
30
- collection = collection[1]
31
- if collection.key?('lunr_index') && collection['lunr_index'].key?('fields')
32
- dir = collection['directory'] || '_' + name
33
- fields = collection['lunr_index']['fields']
34
- puts "Loading pages from #{dir}".cyan
35
- Dir.glob(dir + '/*.md').each do |md|
36
- begin
37
- yaml = YAML.load_file(md)
38
- hash = {}
39
- hash['lunr_id'] = count
40
- hash['link'] = '{{ site.baseurl }}' + yaml['permalink']
41
- fields.uniq.each { |f| hash[f] = yaml[f].to_s }
42
- if collection['lunr_index']['content']
43
- hash['content'] = clean(File.read(md))
44
- end
45
- index_string += "\nindex.addDoc(" + hash.to_json + "); "
46
- store_string += "\n" + hash.to_json + ", "
47
- count += 1
48
- rescue StandardError
49
- puts "Cannot load data from markdown pages in #{dir}.".magenta
50
- exit 1
51
- end
52
- end
53
- end
54
- end
55
- store_string = store_string.chomp(', ') + '];'
56
- Dir.mkdir('js') unless File.exist?('js')
57
- File.open('js/lunr-index.js', 'w') { |file| file.write(front_matter + index_string + store_string) }
58
- puts "Writing lunr index to js/lunr-index.js".cyan
59
- end
60
- end
61
- end
7
+ lunr_language = $config['lunr_language']
8
+ index = LunrIndex.new(collections, lunr_language)
62
9
 
63
- def clean(str)
64
- str = str.gsub(/\A---(.|\n)*?---/, '') # remove yaml front matter
65
- str = str.gsub(/{%(.*)%}/, '') # remove functional liquid
66
- str = str.gsub(/<\/?[^>]*>/, '') # remove html
67
- str = str.gsub('\\n', '').gsub(/\s+/, ' ') # remove newlines and extra space
68
- str = str.tr('"', "'").to_s # replace double quotes with single
69
- return str
10
+ Dir.mkdir('js') unless File.exist?('js')
11
+ File.open('js/lunr-index.js', 'w') { |file| file.write(index.output) }
12
+ puts "Writing lunr index to js/lunr-index.js".cyan
13
+ end
70
14
  end
@@ -1,83 +1,20 @@
1
- require 'yaml'
2
- require 'csv'
3
- require 'json'
4
1
  require 'colorized_string'
2
+ require 'wax_collection'
3
+ require 'helpers'
5
4
 
6
5
  namespace :wax do
7
6
  desc 'generate collection md pages from yaml or csv data source'
8
7
  task :pagemaster => :config do
9
- collections = $config['collections']
10
8
  if $argv.empty?
11
9
  puts "You must specify one or more collections after 'bundle exec rake wax:pagemaster' to generate.".magenta
12
10
  exit 1
13
11
  else
14
- $argv.each do |a|
15
- collection = collections[a]
16
- if collection.nil?
17
- puts "The collection '#{a}' does not exist. Check for typos in terminal and _config.yml.".magenta
18
- exit 1
19
- else
20
- meta = {}
21
- meta['src'] = '_data/' + collection['source']
22
- meta['layout'] = File.basename(collection['layout'], '.*')
23
- meta['dir'] = collection['directory']
24
- if [meta['src'], meta['dir'], meta['layout']].all?
25
- FileUtils.mkdir_p meta['dir']
26
- data = ingest(meta['src'])
27
- info = generate_pages(meta, data)
28
- puts "\n#{info[:completed]} pages were generated to #{meta['dir']} directory.".cyan
29
- puts "\n#{info[:skipped]} pre-existing items were skipped.".cyan
30
- else
31
- puts "\nYour collection '#{a}' is missing one or more of the required parameters (source, directory, layout) in config. please fix and rerun.".magenta
32
- exit 1
33
- end
34
- end
12
+ $argv.each do |collection_name|
13
+ collection_config = valid_pagemaster(collection_name)
14
+ collections_dir = $config['collections_dir'].nil? ? '' : $config['collections_dir'].to_s + '/'
15
+ collection = WaxCollection.new(collection_name, collection_config, collections_dir)
16
+ collection.pagemaster
35
17
  end
36
18
  end
37
19
  end
38
20
  end
39
-
40
- def ingest(src)
41
- src_type = File.extname(src)
42
- if src_type == '.csv'
43
- data = CSV.read(src, :headers => true, :encoding => 'utf-8').map(&:to_hash)
44
- elsif src_type == '.json'
45
- data = JSON.parse(File.read(src).encode("UTF-8"))
46
- else
47
- puts "File type for #{src} must be .csv or .json. Please fix and rerun."
48
- exit 1
49
- end
50
- pids = []
51
- data.each { |d| pids << d['pid'] }
52
- duplicates = pids.detect { |p| pids.count(p) > 1 } || []
53
- unless duplicates.empty?
54
- puts "\nYour collection has the following duplicate ids. please fix and rerun.\n#{duplicates} \n".magenta
55
- exit 1
56
- end
57
- puts "Processing #{src}...."
58
- return data
59
- rescue standardError
60
- puts "Cannot load #{src}. check for typos and rebuild.".magenta
61
- exit 1
62
- end
63
-
64
- def generate_pages(meta, data)
65
- perma_ext = '.html'
66
- perma_ext = '/' if $config['permalink'] == 'pretty'
67
- info = { :completed => 0, :skipped => 0 }
68
- data.each do |item|
69
- pagename = item['pid']
70
- pagepath = meta['dir'] + '/' + pagename + '.md'
71
- if !File.exist?(pagepath)
72
- File.open(pagepath, 'w') { |file| file.write(item.to_yaml.to_s + 'permalink: /' + meta['dir'] + '/' + pagename + perma_ext + "\n" + 'layout: ' + meta['layout'] + "\n---") }
73
- info[:completed] += 1
74
- else
75
- puts "#{pagename}.md already exits. Skipping."
76
- info[:skipped] += 1
77
- end
78
- end
79
- return info
80
- rescue standardError
81
- puts "#{info[:completed]} pages were generated before failure, most likely a record is missing a valid 'pid' value.".magenta
82
- exit 1
83
- end
@@ -1,6 +1,7 @@
1
1
  require 'jekyll'
2
2
  require 'tmpdir'
3
3
  require 'fileutils'
4
+ require 'time'
4
5
 
5
6
  namespace :wax do
6
7
  desc 'build site with baseurl and publish to s3 branch'
@@ -0,0 +1,75 @@
1
+ require 'yaml'
2
+ require 'csv'
3
+ require 'json'
4
+ require 'fileutils'
5
+ require 'colorized_string'
6
+ require 'helpers'
7
+
8
+ include FileUtils
9
+
10
+ # WaxCollection class
11
+ class WaxCollection
12
+ attr_reader :name, :config, :src, :layout, :dir, :data
13
+
14
+ def initialize(collection_name, collection_config, collections_dir)
15
+ @name = collection_name
16
+ @config = collection_config
17
+ @cdir = collections_dir
18
+ @src = '_data/' + @config['source']
19
+ @layout = File.basename(@config['layout'], '.*')
20
+ @dir = @cdir + '_' + @name
21
+ @data = []
22
+ @lunr = {}
23
+ end
24
+
25
+ def pagemaster
26
+ mkdir_p(@dir)
27
+ ingest
28
+ detect_duplicates
29
+ generate_pages
30
+ end
31
+
32
+ def ingest
33
+ if File.extname(@src) == '.csv'
34
+ @data = CSV.read(@src, :headers => true, :encoding => 'utf-8').map(&:to_hash)
35
+ elsif File.extname(@src) == '.json'
36
+ @data = JSON.parse(File.read(@src).encode("UTF-8"))
37
+ else
38
+ puts "File type for #{@src} must be .csv or .json. Please fix and rerun."
39
+ exit 1
40
+ end
41
+ puts "Processing #{src}...."
42
+ rescue StandardError
43
+ puts "Cannot load #{src}. check for typos and rebuild.".magenta
44
+ exit 1
45
+ end
46
+
47
+ def generate_pages
48
+ perma_ext = $config['permalink'] == 'pretty' ? '/' : '.html'
49
+ completed = 0
50
+ skipped = 0
51
+ data.each do |item|
52
+ pagename = slug(item['pid'])
53
+ pagepath = @dir + '/' + pagename + '.md'
54
+ permalink = '/' + @name + '/' + pagename + perma_ext
55
+ if File.exist?(pagepath)
56
+ puts "#{pagename}.md already exits. Skipping."
57
+ skipped += 1
58
+ else
59
+ File.open(pagepath, 'w') { |file| file.write(item.to_yaml.to_s + "permalink: #{permalink}\nlayout: #{@layout}\n---") }
60
+ completed += 1
61
+ end
62
+ end
63
+ puts "\n#{completed} pages were generated to #{@dir} directory.".cyan
64
+ puts "\n#{skipped} pre-existing items were skipped.".cyan
65
+ rescue StandardError
66
+ abort "#{completed} pages were generated before failure, most likely a record is missing a valid 'pid' value.".magenta
67
+ end
68
+
69
+ def detect_duplicates
70
+ pids = []
71
+ @data.each { |d| pids << d['pid'] }
72
+ duplicates = pids.detect { |p| pids.count(p) > 1 } || []
73
+ raise "\nYour collection has the following duplicate ids. please fix and rerun.\n#{duplicates}".magenta unless duplicates.empty?
74
+ end
75
+ end
data/spec/fake_data.rb ADDED
@@ -0,0 +1,43 @@
1
+ require 'csv'
2
+ require 'faker'
3
+ require 'json'
4
+ require 'rake'
5
+
6
+ # make csvs
7
+ I18n.enforce_available_locales = false
8
+ Faker::Config.locale = 'zh-CN'
9
+ Dir.mkdir('_data') unless File.exist?('_data')
10
+
11
+ def fake_data(name, type)
12
+ data = []
13
+ keys = ['pid']
14
+ 5.times { keys << slug(Faker::Lovecraft.unique.word) } # keys = pid + 5
15
+ 5.times do # with 5 records
16
+ record = {
17
+ keys[0] => slug(Faker::Dune.unique.character),
18
+ keys[1] => Faker::Lorem.sentence,
19
+ keys[2] => Faker::TwinPeaks.quote,
20
+ keys[3] => Faker::Name.name,
21
+ keys[4] => Faker::Space.star,
22
+ keys[5] => Faker::Lovecraft.sentence
23
+ }
24
+ data << record
25
+ $collection_data[name] = { 'keys' => keys, 'type' => type }
26
+ end
27
+ data
28
+ end
29
+
30
+ 5.times do |i|
31
+ name = slug(Faker::RuPaul.unique.queen)
32
+ if i.even?
33
+ data = fake_data(name, '.csv')
34
+ path = '_data/' + name + '.csv'
35
+ write_csv(path, data)
36
+ else
37
+ data = fake_data(name, '.json')
38
+ path = '_data/' + name + '.json'
39
+ File.open(path, 'w') { |f| f.write(data.to_json) }
40
+ end
41
+ Faker::Dune.unique.clear
42
+ Faker::Lovecraft.unique.clear
43
+ end
@@ -0,0 +1,22 @@
1
+ require 'csv'
2
+ require 'colorized_string'
3
+
4
+ # global
5
+ $collection_data = {}
6
+
7
+ # helper methods
8
+ def slug(str)
9
+ str.downcase.tr(' ', '_').gsub(/[^\w-]/, '')
10
+ end
11
+
12
+ def write_csv(path, hashes)
13
+ CSV.open(path, 'wb:UTF-8') do |csv|
14
+ csv << hashes.first.keys
15
+ hashes.each do |hash|
16
+ csv << hash.values
17
+ end
18
+ end
19
+ puts "Writing csv data to #{path}."
20
+ rescue StandardError
21
+ abort "Cannot write csv data to #{path} for some reason.".magenta
22
+ end
data/spec/fake_site.rb ADDED
@@ -0,0 +1,27 @@
1
+ require 'fileutils'
2
+ require 'jekyll'
3
+ require 'yaml'
4
+
5
+ include FileUtils
6
+
7
+ site_dir = 'faker_site'
8
+ mkdir_p(site_dir)
9
+ image_dir = Dir.glob('spec/data/_iiif')
10
+ cp_r(image_dir, site_dir)
11
+ cd(site_dir)
12
+
13
+ config_file = {
14
+ 'title' => 'faker',
15
+ 'url' => '',
16
+ 'baseurl' => '',
17
+ 'exclude' => ['Rakefile']
18
+ }
19
+ config_opts = {
20
+ 'source' => '.',
21
+ 'destination' => '_site',
22
+ 'config' => '_config.yml'
23
+ }
24
+
25
+ File.open('_config.yml', 'w') { |f| f.puts(config_file.to_yaml) }
26
+ File.open('Rakefile', 'w') { |f| f.puts('Dir.glob("../lib/tasks/*.rake").each { |r| load r }') }
27
+ Jekyll::Site.new(Jekyll.configuration(config_opts)).process
@@ -0,0 +1,7 @@
1
+ require 'fake_helpers'
2
+ require 'fake_site'
3
+ require 'fake_data'
4
+ require 'test_config'
5
+ require 'test_pagemaster'
6
+ require 'test_lunr'
7
+ require 'test_iiif'
@@ -0,0 +1,28 @@
1
+ require 'rake'
2
+ require 'yaml'
3
+
4
+ # run + test wax:config
5
+ describe 'wax:config' do
6
+ it 'accesses _config.yml and argvs' do
7
+ load File.expand_path("../../lib/tasks/config.rake", __FILE__)
8
+ Rake::Task['wax:config'].invoke
9
+
10
+ $collection_data.each { |col| $argv << col[0] }
11
+ expect($config.length)
12
+ expect($argv.length)
13
+
14
+ # add collection data to config file
15
+ collection_hash = {}
16
+ $argv.each do |coll_name|
17
+ ext = $collection_data[coll_name]['type']
18
+ collection_hash[coll_name] = {
19
+ 'source' => coll_name + ext,
20
+ 'layout' => 'default'
21
+ }
22
+ end
23
+
24
+ $config['collections'] = collection_hash
25
+ output = YAML.dump $config
26
+ File.write('_config.yml', output)
27
+ end
28
+ end
data/spec/test_iiif.rb ADDED
@@ -0,0 +1,20 @@
1
+ require 'csv'
2
+ require 'fileutils'
3
+ require 'rake'
4
+
5
+ include FileUtils
6
+
7
+ describe 'wax:iiif' do
8
+ it 'generates iiif tiles and data' do
9
+ images = Dir.glob('./_iiif/source_images/*.jpg')
10
+ $collection_data.each do |coll|
11
+ new_dir = './_iiif/source_images/' + coll[0]
12
+ mkdir_p(new_dir)
13
+ images.each { |f| cp(File.expand_path(f), new_dir) }
14
+ end
15
+ rm_r(images)
16
+ $argv = [$argv.first] # make tests faster by only running on 1/3 collections
17
+ load File.expand_path("../../lib/tasks/iiif.rake", __FILE__)
18
+ Rake::Task['wax:iiif'].invoke
19
+ end
20
+ end
data/spec/test_lunr.rb ADDED
@@ -0,0 +1,24 @@
1
+ require 'rake'
2
+
3
+ describe 'wax:lunr' do
4
+ it 'generates a lunr index' do
5
+ $config['collections'].each do |collection|
6
+ name = collection[0]
7
+ # get info on what to index
8
+ lunr_hash = {
9
+ 'content' => false,
10
+ 'fields' => $collection_data[name]['keys']
11
+ }
12
+ # add it to config
13
+ $config['collections'][name]['lunr_index'] = lunr_hash
14
+ output = YAML.dump $config
15
+ File.write('_config.yml', output)
16
+ end
17
+ # invoke lunr task
18
+ load File.expand_path("../../lib/tasks/lunr.rake", __FILE__)
19
+ Rake::Task['wax:lunr'].invoke
20
+ # expect a populated index
21
+ index = File.open('js/lunr-index.js', 'r').read
22
+ expect(index.length > 1000)
23
+ end
24
+ end
@@ -0,0 +1,9 @@
1
+ require 'rake'
2
+ require 'yaml'
3
+
4
+ describe 'wax:pagemaster' do
5
+ it 'generates pages' do
6
+ load File.expand_path("../../lib/tasks/pagemaster.rake", __FILE__)
7
+ Rake::Task['wax:pagemaster'].invoke
8
+ end
9
+ end
metadata CHANGED
@@ -1,14 +1,14 @@
1
1
  --- !ruby/object:Gem::Specification
2
2
  name: wax_tasks
3
3
  version: !ruby/object:Gem::Version
4
- version: 0.0.2
4
+ version: 0.0.3
5
5
  platform: ruby
6
6
  authors:
7
7
  - Marii Nyrop
8
8
  autorequire:
9
9
  bindir: bin
10
10
  cert_chain: []
11
- date: 2018-01-29 00:00:00.000000000 Z
11
+ date: 2018-02-04 00:00:00.000000000 Z
12
12
  dependencies:
13
13
  - !ruby/object:Gem::Dependency
14
14
  name: bundler
@@ -144,6 +144,8 @@ extensions: []
144
144
  extra_rdoc_files: []
145
145
  files:
146
146
  - Gemfile
147
+ - lib/helpers.rb
148
+ - lib/lunr_index.rb
147
149
  - lib/tasks/config.rake
148
150
  - lib/tasks/ghbranch.rake
149
151
  - lib/tasks/iiif.rake
@@ -151,6 +153,15 @@ files:
151
153
  - lib/tasks/pagemaster.rake
152
154
  - lib/tasks/s3branch.rake
153
155
  - lib/tasks/test.rake
156
+ - lib/wax_collection.rb
157
+ - spec/fake_data.rb
158
+ - spec/fake_helpers.rb
159
+ - spec/fake_site.rb
160
+ - spec/spec_helper.rb
161
+ - spec/test_config.rb
162
+ - spec/test_iiif.rb
163
+ - spec/test_lunr.rb
164
+ - spec/test_pagemaster.rb
154
165
  homepage: https://github.com/mnyrop/wax_tasks
155
166
  licenses:
156
167
  - MIT
@@ -171,8 +182,16 @@ required_rubygems_version: !ruby/object:Gem::Requirement
171
182
  version: '0'
172
183
  requirements: []
173
184
  rubyforge_project:
174
- rubygems_version: 2.6.14
185
+ rubygems_version: 2.6.9
175
186
  signing_key:
176
187
  specification_version: 4
177
188
  summary: Rake tasks for minimal exhibitions.
178
- test_files: []
189
+ test_files:
190
+ - spec/spec_helper.rb
191
+ - spec/test_iiif.rb
192
+ - spec/test_config.rb
193
+ - spec/test_lunr.rb
194
+ - spec/fake_helpers.rb
195
+ - spec/fake_data.rb
196
+ - spec/fake_site.rb
197
+ - spec/test_pagemaster.rb