liquidoc 0.6.0 → 0.7.0

Sign up to get free protection for your applications and to get access to all the features.
checksums.yaml CHANGED
@@ -1,7 +1,7 @@
1
1
  ---
2
2
  SHA1:
3
- metadata.gz: d93d2ba43725019138268b7cbc3e90df6a8edf7f
4
- data.tar.gz: 4ae07aca59730f64a1e9769d9c2c2c964027d0ba
3
+ metadata.gz: 6066fed122738d3eb82ad92365208357bba0fc97
4
+ data.tar.gz: 55acbe02d698fea8c72ebaae61daa8f3a23584cc
5
5
  SHA512:
6
- metadata.gz: 94f1db1b4a534a2ac0630290286ed98e133d4412fc2a42e4121c4733a751219f76713c7992e8bf4bb128d30667c5343edc658f1729b5dd48557652ec69bac2f0
7
- data.tar.gz: 0a5fbaf300323cc475ca61ab1b2ddb475b9a4ca91944bdadf34c10a83373e9a7f462efc4dc39c3fed3755fb575c680c7c408e607703e28e4127ceacd06ca5762
6
+ metadata.gz: 460472382d8369294a1857b16e1444610d540d5805ba95c7ceb72faa46962305720f45fdf7a9d5d65c724cfe541154a0cbf46853ca0a17d914dca62c9314ffac
7
+ data.tar.gz: d234c41558f62429e2d23f06bafc01bc9db7302933d9ca5e09f58b0d8679a0a5caf90c80a98564ddc053b47b3aae7ced2d1dd062887d7426747a508906759284
@@ -1,4 +1,4 @@
1
- require "liquidoc"
1
+ require 'liquidoc'
2
2
  require 'yaml'
3
3
  require 'json'
4
4
  require 'optparse'
@@ -84,28 +84,29 @@ def iterate_build cfg
84
84
  builds = step.builds
85
85
  for bld in builds
86
86
  build = Build.new(bld, type) # create an instance of the Build class; Build.new accepts a 'bld' hash & action 'type'
87
- liquify(data, build.template, build.output) # perform the liquify operation
87
+ if build.template
88
+ liquify(data, build.template, build.output) # perform the liquify operation
89
+ else
90
+ regurgidata(data, build.output)
91
+ end
88
92
  end
89
93
  when "migrate"
90
94
  inclusive = true
91
95
  inclusive = step.options['inclusive'] if defined?(step.options['inclusive'])
92
96
  copy_assets(step.source, step.target, inclusive)
93
97
  when "render"
94
- if defined?(step.data) # if we're passing attributes as a YAML file, let's ingest that up front
95
- attrs = ingest_attributes(step.data)
96
- else
97
- attrs = {}
98
- end
99
- validate_file_input(step.source, "source")
98
+ validate_file_input(step.source, "source") if step.source
100
99
  doc = AsciiDocument.new(step.source)
101
- doc.add_attrs!(attrs)
100
+ attrs = ingest_attributes(step.data) if step.data # Set attributes in from YAML files
101
+ doc.add_attrs!(attrs) # Set attributes from the action-level data file
102
102
  builds = step.builds
103
103
  for bld in builds
104
- build = Build.new(bld, type) # create an instance of the Build class; Build.new accepts a 'bld' hash & action 'type'
105
- asciidocify(doc, build) # perform the liquify operation
104
+ build = Build.new(bld, type) # create an instance of the Build class; Build.new accepts a 'bld' hash & action 'type' string
105
+ render_doc(doc, build) # perform the render operation
106
106
  end
107
107
  when "deploy"
108
- @logger.warn "Deploy actions not yet implemented."
108
+ @logger.warn "Deploy actions are limited and experimental experimental."
109
+ jekyll_serve(build)
109
110
  else
110
111
  @logger.warn "The action `#{type}` is not valid."
111
112
  end
@@ -221,7 +222,7 @@ class BuildConfigStep
221
222
  when "migrate"
222
223
  reqs = ["source,target"]
223
224
  when "render"
224
- reqs = ["source,builds"]
225
+ reqs = ["builds"]
225
226
  end
226
227
  for req in reqs
227
228
  if (defined?(@step[req])).nil?
@@ -236,6 +237,8 @@ end #class Action
236
237
  class Build
237
238
 
238
239
  def initialize build, type
240
+ build['attributes'] = Hash.new unless build['attributes']
241
+ build['props'] = build['properties'] if build['properties']
239
242
  @build = build
240
243
  @type = type
241
244
  end
@@ -260,14 +263,61 @@ class Build
260
263
  @build['backend']
261
264
  end
262
265
 
266
+ def props
267
+ @build['props']
268
+ end
269
+
270
+ def prop_files_array
271
+ if props
272
+ if props['files']
273
+ begin
274
+ props['files'].force_array if props['files']
275
+ rescue Exception => ex
276
+ raise "PropertiesFilesArrayError: #{ex}"
277
+ end
278
+ end
279
+ else
280
+ Array.new
281
+ end
282
+ end
283
+
284
+ # def prop_files_list # force the array back to a list of files (for CLI)
285
+ # props['files'].force_array if props['files']
286
+ # end
287
+
288
+ # NOTE this section repeats in Class.AsciiDocument
263
289
  def attributes
264
290
  @build['attributes']
265
291
  end
266
292
 
293
+ def add_attrs! attrs
294
+ begin
295
+ attrs.to_h unless attrs.is_a? Hash
296
+ self.attributes.merge!attrs
297
+ rescue
298
+ raise "InvalidAttributesFormat"
299
+ end
300
+ end
301
+
267
302
  def set key, val
268
303
  @build[key] = val
269
304
  end
270
305
 
306
+ def self.set key, val
307
+ @build[key] = val
308
+ end
309
+
310
+ def add_config_file config_file
311
+ @build['props'] = Hash.new unless @build['props']
312
+ @build['props']['files'] = Array.new unless @build['props']['files']
313
+ begin
314
+ files_array = @build['props']['files'].force_array
315
+ @build['props']['files'] = files_array.push(config_file)
316
+ rescue
317
+ raise "PropertiesFilesArrayError"
318
+ end
319
+ end
320
+
271
321
  def validate
272
322
  reqs = []
273
323
  case self.type
@@ -290,7 +340,7 @@ class DataSrc
290
340
  def initialize datasrc
291
341
  @datasrc = {}
292
342
  @datasrc['file'] = datasrc
293
- @datasrc['ext'] = File.extname(datasrc)
343
+ @datasrc['ext'] = ''
294
344
  @datasrc['type'] = false
295
345
  @datasrc['pattern'] = false
296
346
  if datasrc.is_a? Hash # data var is a hash, so add 'ext' to it by extracting it from filename
@@ -302,8 +352,12 @@ class DataSrc
302
352
  if (defined?(datasrc['type']))
303
353
  @datasrc['type'] = datasrc['type']
304
354
  end
305
- else # datasrc is neither String nor Hash
306
- raise "InvalidDataSource"
355
+ else
356
+ if datasrc.is_a? String
357
+ @datasrc['ext'] = File.extname(datasrc)
358
+ else # datasrc is neither string nor hash
359
+ raise "InvalidDataSource"
360
+ end
307
361
  end
308
362
  end
309
363
 
@@ -342,9 +396,9 @@ class DataSrc
342
396
  end
343
397
 
344
398
  class AsciiDocument
345
- def initialize map, type='article'
346
- @index = map
347
- @attributes = {}
399
+ def initialize index, type='article'
400
+ @index = index
401
+ @attributes = {} # We start with clean attributes to delay setting those in the config > build step
348
402
  @type = type
349
403
  end
350
404
 
@@ -352,6 +406,7 @@ class AsciiDocument
352
406
  @index
353
407
  end
354
408
 
409
+ # NOTE this section repeats in Class.AsciiDocument
355
410
  def add_attrs! attrs
356
411
  raise "InvalidAttributesFormat" unless attrs.is_a?(Hash)
357
412
  self.attributes.merge!attrs
@@ -378,6 +433,16 @@ end
378
433
  # PARSE-type build procs
379
434
  # ===
380
435
 
436
+ # Get data
437
+ def get_data datasrc
438
+ @logger.debug "Executing liquify parsing operation."
439
+ if datasrc.is_a? String
440
+ datasrc = DataSrc.new(datasrc)
441
+ end
442
+ validate_file_input(datasrc.file, "data")
443
+ return ingest_data(datasrc)
444
+ end
445
+
381
446
  # Pull in a semi-structured data file, converting contents to a Ruby hash
382
447
  def ingest_data datasrc
383
448
  # Must be passed a proper data object (there must be a better way to validate arg datatypes)
@@ -406,15 +471,13 @@ def ingest_data datasrc
406
471
  @logger.error "There was a problem with the data file. #{ex.message}"
407
472
  end
408
473
  when "csv"
409
- output = []
474
+ data = []
410
475
  i = 0
411
476
  begin
412
477
  CSV.foreach(datasrc.file, headers: true, skip_blanks: true) do |row|
413
- output[i] = row.to_hash
478
+ data[i] = row.to_hash
414
479
  i = i+1
415
480
  end
416
- output = {"data" => output}
417
- data = output
418
481
  rescue
419
482
  @logger.error "The CSV format is invalid."
420
483
  end
@@ -426,6 +489,10 @@ def ingest_data datasrc
426
489
  raise "MissingRegexPattern"
427
490
  end
428
491
  end
492
+ if data.is_a? Array
493
+ data = {"data" => data}
494
+ end
495
+ return data
429
496
  end
430
497
 
431
498
  def parse_regex data_file, pattern
@@ -456,13 +523,8 @@ end
456
523
 
457
524
  # Parse given data using given template, generating given output
458
525
  def liquify datasrc, template_file, output
459
- @logger.debug "Executing liquify parsing operation."
460
- if datasrc.is_a? String
461
- datasrc = DataSrc.new(datasrc)
462
- end
463
- validate_file_input(datasrc.file, "data")
526
+ data = get_data(datasrc)
464
527
  validate_file_input(template_file, "template")
465
- data = ingest_data(datasrc)
466
528
  begin
467
529
  template = File.read(template_file) # reads the template file
468
530
  template = Liquid::Template.parse(template) # compiles template
@@ -494,6 +556,29 @@ def liquify datasrc, template_file, output
494
556
  end
495
557
  end
496
558
 
559
+ def regurgidata datasrc, output
560
+ data = get_data(datasrc)
561
+ raise "UnrecognizedFileExtension" unless File.extname(output).match(/\.yml|\.json|\.xml|\.csv/)
562
+ case File.extname(output)
563
+ when ".yml"
564
+ new_data = data.to_yaml
565
+ when ".json"
566
+ new_data = data.to_json
567
+ when ".xml"
568
+ @logger.warn "XML output not yet implemented."
569
+ when ".csv"
570
+ @logger.warn "CSV output not yet implemented."
571
+ end
572
+ if new_data
573
+ begin
574
+ File.open(output, 'w') { |file| file.write(new_data) }
575
+ @logger.info "Data converted and saved to #{output}."
576
+ rescue
577
+ raise "FileWriteError"
578
+ end
579
+ end
580
+ end
581
+
497
582
  # ===
498
583
  # MIGRATE-type procs
499
584
  # ===
@@ -527,9 +612,9 @@ end
527
612
 
528
613
  # Gather attributes from one or more fixed attributes files
529
614
  def ingest_attributes attr_file
530
- file_array = attr_file.split(",")
615
+ attr_files_array = attr_file.force_array
531
616
  attrs = {}
532
- for f in file_array
617
+ attr_files_array.each do |f|
533
618
  if f.include? ":"
534
619
  file = f.split(":")
535
620
  filename = file[0]
@@ -571,38 +656,56 @@ def derive_backend type, out_file
571
656
  return backend
572
657
  end
573
658
 
659
+ def render_doc doc, build
660
+ build.set("backend", derive_backend(doc.type, build.output) ) unless build.backend
661
+ case build.backend
662
+ when "html5", "pdf"
663
+ asciidocify(doc, build)
664
+ when "jekyll"
665
+ generate_site(doc, build)
666
+ else
667
+ raise "UnrecognizedBackend"
668
+ end
669
+ end
670
+
574
671
  def asciidocify doc, build
575
672
  @logger.debug "Executing Asciidoctor render operation for #{build.output}."
576
673
  to_file = build.output
577
674
  unless doc.type == build.doctype
578
- if build.doctype.nil?
675
+ if build.doctype.nil? # set a default doctype equal to our LiquiDoc action doc type
579
676
  build.set("doctype", doc.type)
580
677
  end
581
678
  end
582
- back = derive_backend(doc.type, build.output)
583
- unless build.style.nil?
584
- case back
585
- when "pdf"
586
- doc.add_attrs!({"pdf-style"=>build.style})
587
- when "html5"
588
- doc.add_attrs!({"stylesheet"=>build.style})
679
+ # unfortunately we have to treat attributes accumilation differently for Jekyll vs Asciidoctor
680
+ attrs = doc.attributes # Start with attributes added at the action level; no more writing to doc obj
681
+ # Handle properties files array as attributes files and
682
+ # add the ingested attributes to local var
683
+ begin
684
+ if build.prop_files_array
685
+ ingested = ingest_attributes(build.prop_files_array)
686
+ attrs.merge!(ingested)
589
687
  else
590
- raise "UnrecognizedBackend"
688
+ puts build.prop_files_array
591
689
  end
690
+ rescue Exception => ex
691
+ @logger.warn "Attributes failed to merge. #{ex}" # Shd only trigger if build.props exists
692
+ raise
693
+ end
694
+ if build.backend == "html5" # Insert a stylesheet
695
+ attrs.merge!({"stylesheet"=>build.style}) if build.style
592
696
  end
593
697
  # Add attributes from config file build section
594
- doc.add_attrs!(build.attributes.to_h)
698
+ attrs.merge!(build.attributes) # Finally merge attributes from the build step
595
699
  # Add attributes from command-line -a args
596
- doc.add_attrs!(@passed_attrs)
597
- @logger.debug "Final pre-parse attributes: #{doc.attributes}"
700
+ @logger.debug "Final pre-parse attributes: #{attrs.to_yaml}"
598
701
  # Perform the aciidoctor convert
599
- unless back == "pdf"
702
+ unless build.backend == "pdf"
600
703
  Asciidoctor.convert_file(
601
704
  doc.index,
602
705
  to_file: to_file,
603
- attributes: doc.attributes,
706
+ attributes: attrs,
604
707
  require: "pdf",
605
- backend: back,
708
+ backend: build.backend,
606
709
  doctype: build.doctype,
607
710
  safe: "unsafe",
608
711
  sourcemap: true,
@@ -610,19 +713,102 @@ def asciidocify doc, build
610
713
  mkdirs: true
611
714
  )
612
715
  else # For PDFs, we're calling the asciidoctor-pdf CLI, as the main dependency doesn't seem to perform the same way
613
- attributes = '-a ' + doc.attributes.map{|k,v| "#{k}='#{v}'"}.join(' -a ')
614
- command = "asciidoctor-pdf -o #{to_file} -b pdf -d #{build.doctype} -S unsafe #{attributes} -a no-header-footer --trace #{doc.index}"
716
+ attrs = '-a ' + attrs.map{|k,v| "#{k}='#{v}'"}.join(' -a ')
717
+ command = "asciidoctor-pdf -o #{to_file} -b pdf -d #{build.doctype} -S unsafe #{attrs} -a no-header-footer --trace #{doc.index}"
718
+ @logger.info "Generating PDF. This can take some time..."
615
719
  @logger.debug "Running #{command}"
616
720
  system command
617
721
  end
722
+ @logger.debug "AsciiDoc attributes: #{doc.attributes}"
618
723
  @logger.info "Rendered file #{to_file}."
619
724
  end
620
725
 
726
+ def generate_site doc, build
727
+ case build.backend
728
+ when "jekyll"
729
+ attrs = doc.attributes
730
+ build.add_config_file("_config.yml") unless build.prop_files_array
731
+ jekyll_config = YAML.load_file(build.prop_files_array[0]) # load the first Jekyll config file locally
732
+ attrs.merge! ({"base_dir" => jekyll_config['source']}) # Sets default Asciidoctor base_dir to == Jekyll root
733
+ # write all AsciiDoc attributes to a config file for Jekyll to ingest
734
+ attrs.merge!(build.attributes) if build.attributes
735
+ attrs = {"asciidoctor" => {"attributes" => attrs} }
736
+ attrs_yaml = attrs.to_yaml # Convert it all back to Yaml, as we're going to write a file to feed back to Jekyll
737
+ FileUtils::mkdir_p("build/pre") unless File.exists?("build/pre")
738
+ File.open("build/pre/_attributes.yml", 'w') { |file| file.write(attrs_yaml) }
739
+ build.add_config_file("build/pre/_attributes.yml")
740
+ config_list = build.prop_files_array.join(',') # flatten the Array back down for the CLI
741
+ opts_args = ""
742
+ if build.props['arguments']
743
+ opts_args = build.props['arguments'].to_opts_args
744
+ end
745
+ command = "bundle exec jekyll build --config #{config_list} #{opts_args}"
746
+ end
747
+ @logger.info "Running #{command}"
748
+ @logger.debug "AsciiDoc attributes: #{doc.attributes.to_yaml} "
749
+ system command
750
+ jekyll_serve(build) if @jekyll_serve
751
+ end
752
+
621
753
  # ===
622
- # Text manipulation Classes, Modules, filters, etc
754
+ # DEPLOY procs
623
755
  # ===
624
756
 
757
+ def jekyll_serve build
758
+ # Locally serve Jekyll as per the primary Jekyll config file
759
+ config_file = build.props['files'][0]
760
+ if build.props['arguments']
761
+ opts_args = build.props['arguments'].to_opts_args
762
+ end
763
+ command = "bundle exec jekyll serve --config #{config_file} #{opts_args} --no-watch --skip-initial-build"
764
+ system command
765
+ end
766
+
767
+ # ===
768
+ # Text manipulation Classes, Modules, procs, etc
769
+ # ===
770
+
771
+ module HashMash
772
+
773
+ def to_opts_args
774
+ out = ''
775
+ if self.is_a? Hash # TODO Should also be testing for flatness
776
+ self.each do |opt,arg|
777
+ out = out + " --#{opt} #{arg}"
778
+ end
779
+ end
780
+ return out
781
+ end
782
+
783
+ end
784
+
785
+ class Hash
786
+ include HashMash
787
+ end
788
+
789
+ module ForceArray
790
+ # So we can accept a list string ("item1.yml,item2.yml") or a single item ("item1.yml")
791
+ # and convert to array as needed
792
+ def force_array
793
+ obj = self
794
+ unless obj.class == Array
795
+ if obj.class == String
796
+ if obj.include? ","
797
+ obj = obj.split(",") # Will even force a string with no commas to a 1-item array
798
+ else
799
+ obj = Array.new.push(obj)
800
+ end
801
+ else
802
+ raise "ForceArrayFail"
803
+ end
804
+ end
805
+ return obj.to_ary
806
+ end
807
+
808
+ end
809
+
625
810
  class String
811
+ include ForceArray
626
812
  # Adapted from Nikhil Gupta
627
813
  # http://nikhgupta.com/code/wrapping-long-lines-in-ruby-for-display-in-source-files/
628
814
  def wrap options = {}
@@ -647,6 +833,10 @@ class String
647
833
 
648
834
  end
649
835
 
836
+ class Array
837
+ include ForceArray
838
+ end
839
+
650
840
  # Extending Liquid filters/text manipulation
651
841
  module CustomFilters
652
842
  def plainwrap input
@@ -675,7 +865,6 @@ end
675
865
  # register custom Liquid filters
676
866
  Liquid::Template.register_filter(CustomFilters)
677
867
 
678
-
679
868
  # ===
680
869
  # Command/options parser
681
870
  # ===
@@ -731,6 +920,14 @@ command_parser = OptionParser.new do|opts|
731
920
  @output_type = "stdout"
732
921
  end
733
922
 
923
+ opts.on("--clean PATH", "Force deletes the designated directory and all its contents WITHOUT WARNING.") do |n|
924
+ @clean_dir = n
925
+ end
926
+
927
+ opts.on("--deploy", "EXPERIMENTAL: Trigger a jekyll serve operation against the destination dir of a Jekyll render step.") do
928
+ @jekyll_serve = true
929
+ end
930
+
734
931
  opts.on("-h", "--help", "Returns help.") do
735
932
  puts opts
736
933
  exit
@@ -747,7 +944,9 @@ command_parser.parse!
747
944
  # ===
748
945
  # Execute
749
946
  # ===
750
-
947
+ if @clean_dir
948
+ FileUtils.remove_dir(@clean_dir)
949
+ end
751
950
  unless @config_file
752
951
  if @data_file
753
952
  liquify(@data_file, @template_file, @output_file)
@@ -1,3 +1,3 @@
1
1
  module Liquidoc
2
- VERSION = "0.6.0"
2
+ VERSION = "0.7.0"
3
3
  end
@@ -0,0 +1,60 @@
1
+ require 'safe_yaml/load'
2
+ module YamlPlus
3
+
4
+ def self.parse data
5
+ puts data
6
+ return
7
+ # self.new.load((::SafeYAML.load raw_data), theme_data)
8
+ # input_data ||= ::OpenStruct.new
9
+ # return input_data unless ::Hash === data
10
+ # data.inject(input_data) {|data, (key, val)| process_entry key, val, data }
11
+ # input_data.base_align ||= 'left'
12
+ # input_data
13
+ end
14
+
15
+ def process_entry key, val, data
16
+ if ::Hash === val
17
+ val.each do |key2, val2|
18
+ process_entry %(#{key}_#{key2.tr '-', '_'}), val2, data
19
+ end
20
+ else
21
+ data[key] = evaluate val, data
22
+ end
23
+ data
24
+ end
25
+
26
+ def evaluate expr, vars
27
+ case expr
28
+ when ::String
29
+ evaluate_math(expand_vars expr, vars)
30
+ when ::Array
31
+ expr.map {|e| evaluate e, vars }
32
+ else
33
+ expr
34
+ end
35
+ end
36
+
37
+ def expand_vars expr, vars
38
+ if (idx = (expr.index '$'))
39
+ if idx == 0 && expr =~ LoneVariableRx
40
+ if vars.respond_to? $1
41
+ vars[$1]
42
+ else
43
+ warn %(asciidoctor: WARNING: unknown variable reference: $#{$1})
44
+ expr
45
+ end
46
+ else
47
+ expr.gsub(VariableRx) {
48
+ if vars.respond_to? $1
49
+ vars[$1]
50
+ else
51
+ warn %(asciidoctor: WARNING: unknown variable reference: $#{$1})
52
+ $&
53
+ end
54
+ }
55
+ end
56
+ else
57
+ expr
58
+ end
59
+ end
60
+ end
metadata CHANGED
@@ -1,14 +1,14 @@
1
1
  --- !ruby/object:Gem::Specification
2
2
  name: liquidoc
3
3
  version: !ruby/object:Gem::Version
4
- version: 0.6.0
4
+ version: 0.7.0
5
5
  platform: ruby
6
6
  authors:
7
7
  - Brian Dominick
8
8
  autorequire:
9
9
  bindir: bin
10
10
  cert_chain: []
11
- date: 2018-01-27 00:00:00.000000000 Z
11
+ date: 2018-02-05 00:00:00.000000000 Z
12
12
  dependencies:
13
13
  - !ruby/object:Gem::Dependency
14
14
  name: bundler
@@ -51,6 +51,7 @@ files:
51
51
  - bin/liquidoc
52
52
  - lib/liquidoc.rb
53
53
  - lib/liquidoc/version.rb
54
+ - lib/yaml_plus.rb
54
55
  homepage: https://github.com/scalingdata/liquidoc
55
56
  licenses:
56
57
  - MIT