liquidoc 0.10.0 → 0.11.0

Sign up to get free protection for your applications and to get access to all the features.
Files changed (4) hide show
  1. checksums.yaml +4 -4
  2. data/lib/liquidoc.rb +174 -85
  3. data/lib/liquidoc/version.rb +1 -1
  4. metadata +5 -6
checksums.yaml CHANGED
@@ -1,7 +1,7 @@
1
1
  ---
2
2
  SHA256:
3
- metadata.gz: 8f251c6530c83d406052a8adc81a10048a65be63f7041bb3a8d9e6031514f69a
4
- data.tar.gz: 104ba3413ecc11bbee5afd5064ac818287a2a27164fb6943c65dbb41625359c3
3
+ metadata.gz: 6c893b7f373640a5bab823899f7236665074b94f2922bbb651fdddc859efffa9
4
+ data.tar.gz: b4574f73d3829fb88d005c4340917be3297d0bcc345e6a42870f959b9da69c55
5
5
  SHA512:
6
- metadata.gz: '09bacdea17f0eaa5e2f26db3705031801c05b3730d9ea82b37294351d1acc1a01110621ea301099d7f5b170e334c2aa3be5bace5d2e85f14c2819ffd12bdd5b2'
7
- data.tar.gz: 33da833e1f3b747e87747edf4a731a04aea4bcc4da6ca48a5af8c6d7f4a25049c9ebaf5cbf89f51476e1c107e63ea08b2b5d60fa27fa956b48fa48ab3f14815f
6
+ metadata.gz: 1144cb86dcc7644e54f974a6d303e348f23250aa2015bc44a4b24d6e7627218da5485ca0bf09711136047a1f43df8682fdd504d90bd38f34779c8e52a37954e0
7
+ data.tar.gz: 3914f320195b2e9fe2376ff29d5730a8f17c38518b4f66f5cd6a5ee1a02ed054efe48bc69c25165ed795db2c813532501356db9ca2900ca824136518de9efe82
@@ -40,6 +40,7 @@ require 'highline'
40
40
  @configs_dir = @base_dir + '_configs'
41
41
  @templates_dir = @base_dir + '_templates/'
42
42
  @data_dir = @base_dir + '_data/'
43
+ @data_file = nil
43
44
  @attributes_file_def = '_data/asciidoctor.yml'
44
45
  @attributes_file = @attributes_file_def
45
46
  @pdf_theme_file = 'theme/pdf-theme.yml'
@@ -82,7 +83,9 @@ def config_build config_file, config_vars={}, parse=false
82
83
  # If config variables are passed on the CLI, we want to parse the config file
83
84
  # and use the parsed version for the rest fo this routine
84
85
  config_out = "#{@build_dir}/pre/#{File.basename(config_file)}"
85
- liquify(nil,config_file, config_out, config_vars)
86
+ vars = DataObj.new()
87
+ vars.add_data!("vars", config_vars)
88
+ liquify(vars, config_file, config_out)
86
89
  config_file = config_out
87
90
  @logger.debug "Config parsed! Using #{config_out} for build."
88
91
  validate_file_input(config_file, "config")
@@ -124,18 +127,46 @@ def iterate_build cfg
124
127
  type = step.type
125
128
  case type # a switch to evaluate the 'action' parameter for each step in the iteration...
126
129
  when "parse"
130
+ builds = step.builds
131
+ data_obj = DataObj.new()
127
132
  if step.data
128
- data = DataSrc.new(step.data)
133
+ data_files = DataFiles.new(step.data)
134
+ data_files.sources.each do |src|
135
+ begin
136
+ data = ingest_data(src) # Extract data from file
137
+ rescue Exception => ex
138
+ @logger.error "#{ex.class}: #{ex.message}"
139
+ raise "DataFileReadFail (#{src.file})"
140
+ end
141
+ begin # Create build.data
142
+ if data_files.sources.size == 1
143
+ data_obj.add_data!("", data) if data.is_a? Hash
144
+ # Insert arrays into the data. scope, and for backward compatibility, hashes as well
145
+ data_obj.add_data!("data", data)
146
+ else
147
+ data_obj.add_data!(src.name, data) # Insert object under self-named scope
148
+ end
149
+ rescue Exception => ex
150
+ @logger.error "#{ex.class}: #{ex.message}"
151
+ raise "DataIngestFail (#{src.file})"
152
+ end
153
+ end
129
154
  end
130
- builds = step.builds
131
155
  builds.each do |bld|
132
- build = Build.new(bld, type) # create an instance of the Build class; Build.new accepts a 'bld' hash & action 'type'
156
+ build = Build.new(bld, type, data_obj) # create an instance of the Build class; Build.new accepts a 'bld' hash & action 'type'
133
157
  if build.template
158
+ # Prep & perform a Liquid-parsed build build
134
159
  @explainer.info build.message
135
- build.add_vars!(@passed_vars) unless @passed_vars.empty?
136
- liquify(data, build.template, build.output, build.variables) # perform the liquify operation
137
- else
138
- regurgidata(data, build.output)
160
+ build.add_data!("vars", build.variables) if build.variables
161
+ liquify(build.data, build.template, build.output) # perform the liquify operation
162
+ else # Prep & perform a direct conversion
163
+ # Delete nested data and vars objects
164
+ build.data.remove_scope("data")
165
+ build.data.remove_scope("vars")
166
+ # Add vars from CLI or config args
167
+ build.data.add_data!("", build.variables) unless build.variables.empty?
168
+ build.data.add_data!("", @passed_vars) unless @passed_vars.empty?
169
+ regurgidata(build.data, build.output)
139
170
  end
140
171
  end
141
172
  when "migrate"
@@ -151,7 +182,7 @@ def iterate_build cfg
151
182
  builds = step.builds
152
183
  for bld in builds
153
184
  doc = AsciiDocument.new(step.source)
154
- attrs = ingest_attributes(step.data) if step.data # Set attributes from from YAML files
185
+ attrs = ingest_attributes(step.data) if step.data # Set attributes from YAML files
155
186
  doc.add_attrs!(attrs) # Set attributes from the action-level data file
156
187
  build = Build.new(bld, type) # create an instance of the Build class; Build.new accepts a 'bld' hash & action 'type' string
157
188
  build.set("backend", derive_backend(doc.type, build.output) ) unless build.backend
@@ -170,6 +201,10 @@ def iterate_build cfg
170
201
  end
171
202
  end
172
203
 
204
+ # ===
205
+ # Helper procs
206
+ # ===
207
+
173
208
  # Verify files exist
174
209
  def validate_file_input file, type
175
210
  @logger.debug "Validating input file for #{type} file #{file}"
@@ -401,11 +436,12 @@ end #class Action
401
436
 
402
437
  class Build
403
438
 
404
- def initialize build, type
439
+ def initialize build, type, data=DataObj.new
405
440
  build['attributes'] = Hash.new unless build['attributes']
406
441
  build['props'] = build['properties'] if build['properties']
407
442
  @build = build
408
443
  @type = type
444
+ @data = data
409
445
  @build['variables'] = {} unless @build['variables']
410
446
  end
411
447
 
@@ -434,14 +470,23 @@ class Build
434
470
  end
435
471
 
436
472
  def variables
473
+ # Variables added in the config build:variables: param
474
+ # Not for manipulation
437
475
  @build['variables']
438
476
  end
439
477
 
440
- def add_vars! vars
441
- vars.to_h unless vars.is_a? Hash
442
- self.variables.merge!vars
478
+ def data
479
+ @data unless @data.nil?
480
+ end
481
+
482
+ def add_data! obj, scope
483
+ @data.add_data!(obj, scope)
443
484
  end
444
485
 
486
+ # def vars
487
+ # self.data['vars']
488
+ # end
489
+
445
490
  def message
446
491
  # dynamically build a message, possibly appending a reason
447
492
  unless @build['message']
@@ -504,10 +549,6 @@ class Build
504
549
  end
505
550
  end
506
551
 
507
- # def prop_files_list # force the array back to a list of files (for CLI)
508
- # props['files'].force_array if props['files']
509
- # end
510
-
511
552
  def search
512
553
  props['search']
513
554
  end
@@ -572,31 +613,28 @@ end # class Build
572
613
 
573
614
  class DataSrc
574
615
  # initialization means establishing a proper hash for the 'data' param
575
- def initialize datasrc
616
+ def initialize sources
576
617
  @datasrc = {}
577
- @datasrc['file'] = datasrc
618
+ @datasrc['file'] = sources
578
619
  @datasrc['ext'] = ''
579
- @datasrc['type'] = false
580
- @datasrc['pattern'] = false
581
- if datasrc.is_a? Hash # data var is a hash, so add 'ext' to it by extracting it from filename
582
- @datasrc['file'] = datasrc['file']
583
- @datasrc['ext'] = File.extname(datasrc['file'])
584
- if (defined?(datasrc['pattern']))
585
- @datasrc['pattern'] = datasrc['pattern']
620
+ @datasrc['pattern'] = nil
621
+ if sources.is_a? Hash # data var is a hash, so add 'ext' to it by extracting it from filename
622
+ @datasrc['file'] = sources['file']
623
+ @datasrc['ext'] = File.extname(sources['file'])
624
+ if (defined?(sources['pattern']))
625
+ @datasrc['pattern'] = sources['pattern']
586
626
  end
587
- if (defined?(datasrc['type']))
588
- @datasrc['type'] = datasrc['type']
627
+ if (defined?(sources['type']))
628
+ @datasrc['type'] = sources['type']
589
629
  end
590
- else
591
- if datasrc.is_a? String
592
- @datasrc['ext'] = File.extname(datasrc)
593
- else
594
- if datasrc.is_a? Array
595
-
596
- else
597
- raise "InvalidDataSource"
598
- end
630
+ elsif sources.is_a? String
631
+ @datasrc['ext'] = File.extname(sources)
632
+ elsif sources.is_a? Array
633
+ sources.each do |src|
634
+ @datasrc['name'] = File.basename(@datasrc['file'])
599
635
  end
636
+ else
637
+ raise "InvalidDataSource"
600
638
  end
601
639
  end
602
640
 
@@ -608,6 +646,10 @@ class DataSrc
608
646
  @datasrc['ext']
609
647
  end
610
648
 
649
+ def name
650
+ File.basename(self.file,File.extname(self.file))
651
+ end
652
+
611
653
  def type
612
654
  if @datasrc['type'] # if we're carrying a 'type' setting for data, pass it along
613
655
  datatype = @datasrc['type']
@@ -619,7 +661,7 @@ class DataSrc
619
661
  # @logger.error "Data file extension must be one of: .yml, .json, .xml, or .csv or else declared in config file."
620
662
  raise "FileExtensionUnknown"
621
663
  end
622
- datatype = @datasrc['ext']
664
+ datatype = self.ext
623
665
  datatype = datatype[1..-1] # removes leading dot char
624
666
  end
625
667
  unless datatype.downcase.match(/yml|json|xml|csv|regex/) # 'type' must be one of these permitted vals
@@ -632,6 +674,68 @@ class DataSrc
632
674
  def pattern
633
675
  @datasrc['pattern']
634
676
  end
677
+ end # class DataSrc
678
+
679
+ # DataFiles
680
+ class DataFiles
681
+ # Accepts a single String, Hash, or Array
682
+ # String must be a filename
683
+ # Hash must contain :file and optionally :type and :pattern
684
+ # Array must contain filenames as strings
685
+ # Returns array of DataSrc objects
686
+ def initialize data_sources
687
+ @data_sources = []
688
+ if data_sources.is_a? Array
689
+ data_sources.each do |src|
690
+ @data_sources << DataSrc.new(src)
691
+ end
692
+ else # data_sources is String or Hash
693
+ @data_sources[0] = DataSrc.new(data_sources)
694
+ end
695
+ @src_class = data_sources.class
696
+ end
697
+
698
+ def sources
699
+ @data_sources
700
+ end
701
+
702
+ def type
703
+ # returns the original class of the object used to init this obj
704
+ @src_class
705
+ end
706
+
707
+ end
708
+
709
+ class DataObj
710
+ # DataObj
711
+ #
712
+ # Scoped variables for feeding a Liquid parsing operation
713
+ def initialize
714
+ @data = {"vars" => {}}
715
+ end
716
+
717
+ def add_data! scope="", data
718
+ # Merges data into existing scope or creates a new scope
719
+ if scope.empty? # store new object at root of this object
720
+ self.data.merge!data
721
+ else # store new object as a subordinate, named object
722
+ if self.data.key?(scope) # merge into existing key
723
+ self.data[scope].merge!data
724
+ else # create a new key named after the scope
725
+ scoped_hash = { scope => data }
726
+ self.data.merge!scoped_hash
727
+ end
728
+ end
729
+ end
730
+
731
+ def data
732
+ @data
733
+ end
734
+
735
+ def remove_scope scope
736
+ self.data.delete(scope)
737
+ end
738
+
635
739
  end
636
740
 
637
741
  class AsciiDocument
@@ -660,31 +764,15 @@ class AsciiDocument
660
764
  end
661
765
  end
662
766
 
663
- class AsciiDoctorConfig
664
- def initialize out, type, back
665
-
666
- end
667
- end
668
-
669
767
  # ===
670
768
  # Action-specific procs
671
769
  # ===
672
770
  # PARSE-type build procs
673
771
  # ===
674
772
 
675
- # Get data
676
- def get_data datasrc
677
- @logger.debug "Executing liquify parsing operation."
678
- if datasrc.is_a? String
679
- datasrc = DataSrc.new(datasrc)
680
- end
681
- validate_file_input(datasrc.file, "data")
682
- return ingest_data(datasrc)
683
- end
684
-
685
773
  # Pull in a semi-structured data file, converting contents to a Ruby hash
686
774
  def ingest_data datasrc
687
- raise "InvalidDataObject" unless datasrc.is_a? Object
775
+ raise "InvalidDataSrcObject" unless datasrc.is_a? DataSrc
688
776
  case datasrc.type
689
777
  when "yml"
690
778
  begin
@@ -724,9 +812,6 @@ def ingest_data datasrc
724
812
  raise "MissingRegexPattern"
725
813
  end
726
814
  end
727
- if data.is_a? Array
728
- data = {"data" => data}
729
- end
730
815
  return data
731
816
  end
732
817
 
@@ -757,29 +842,12 @@ def parse_regex data_file, pattern
757
842
  end
758
843
 
759
844
  # Parse given data using given template, generating given output
760
- def liquify datasrc, template_file, output, variables=nil
761
- if datasrc
762
- input = get_data(datasrc)
763
- nested = { "data" => get_data(datasrc)}
764
- input.merge!nested
765
- end
766
- if variables
767
- if input
768
- input.merge!variables
769
- else
770
- input = variables
771
- end
772
- end
773
- @logger.error "Parse operations need at least a data file or variables." unless input
845
+ def liquify data_obj, template_file, output
774
846
  validate_file_input(template_file, "template")
775
- if variables
776
- vars = { "vars" => variables }
777
- input.merge!vars
778
- end
779
847
  begin
780
848
  template = File.read(template_file) # reads the template file
781
849
  template = Liquid::Template.parse(template) # compiles template
782
- rendered = template.render(input) # renders the output
850
+ rendered = template.render(data_obj.data) # renders the output
783
851
  rescue Exception => ex
784
852
  message = "Problem rendering Liquid template. #{template_file}\n" \
785
853
  "#{ex.class} thrown. #{ex.message}"
@@ -794,14 +862,33 @@ def liquify datasrc, template_file, output, variables=nil
794
862
  end
795
863
  end
796
864
 
797
- def regurgidata datasrc, output
798
- data = get_data(datasrc)
865
+ def cli_liquify data_file=nil, template_file=nil, output_file=nil, passed_vars
866
+ # converts command-line options into liquify or regurgidata inputs
867
+ data_obj = DataObj.new()
868
+ if data_file
869
+ df = DataFiles.new(data_file)
870
+ ingested = ingest_data(df.sources[0])
871
+ data_obj.add_data!("", ingested)
872
+ end
873
+ if template_file
874
+ data_obj.add_data!("data", ingested) if df
875
+ data_obj.add_data!("vars", passed_vars) if passed_vars
876
+ liquify(data_obj, template_file, output_file)
877
+ else
878
+ data_obj.remove_scope("vars")
879
+ data_obj.add_data!("", passed_vars) if passed_vars
880
+ regurgidata(data_obj, output_file)
881
+ end
882
+ end
883
+
884
+ def regurgidata data_obj, output
885
+ # converts data files from one format directly to another
799
886
  raise "UnrecognizedFileExtension" unless File.extname(output).match(/\.yml|\.json|\.xml|\.csv/)
800
887
  case File.extname(output)
801
888
  when ".yml"
802
- new_data = data.to_yaml
889
+ new_data = data_obj.data.to_yaml
803
890
  when ".json"
804
- new_data = data.to_json
891
+ new_data = data_obj.data.to_json
805
892
  when ".xml"
806
893
  @logger.warn "XML output not yet implemented."
807
894
  when ".csv"
@@ -809,9 +896,11 @@ def regurgidata datasrc, output
809
896
  end
810
897
  if new_data
811
898
  begin
812
- File.open(output, 'w') { |file| file.write(new_data) }
899
+ generate_file(new_data, output)
900
+ # File.open(output, 'w') { |file| file.write(new_data) }
813
901
  @logger.info "Data converted and saved to #{output}."
814
- rescue
902
+ rescue Exception => ex
903
+ @logger.error "#{ex.class}: #{ex.message}"
815
904
  raise "FileWriteError"
816
905
  end
817
906
  end
@@ -879,7 +968,7 @@ def ingest_attributes attr_file
879
968
  begin
880
969
  new_attrs = new_attrs[block_name]
881
970
  rescue
882
- raise "InvalidAttributesBlock"
971
+ raise "InvalidAttributesBlock (#{filename}:#{block_name})"
883
972
  end
884
973
  end
885
974
  rescue Exception => ex
@@ -1308,7 +1397,7 @@ explainer_init
1308
1397
  unless @config_file
1309
1398
  @logger.debug "Executing config-free build based on API/CLI arguments alone."
1310
1399
  if @data_file
1311
- liquify(@data_file, @template_file, @output_file, @passed_vars)
1400
+ cli_liquify(@data_file, @template_file, @output_file, @passed_vars)
1312
1401
  end
1313
1402
  if @index_file
1314
1403
  @logger.warn "Rendering via command line arguments is not yet implemented. Use a config file."
@@ -1,3 +1,3 @@
1
1
  module Liquidoc
2
- VERSION = "0.10.0"
2
+ VERSION = "0.11.0"
3
3
  end
metadata CHANGED
@@ -1,27 +1,27 @@
1
1
  --- !ruby/object:Gem::Specification
2
2
  name: liquidoc
3
3
  version: !ruby/object:Gem::Version
4
- version: 0.10.0
4
+ version: 0.11.0
5
5
  platform: ruby
6
6
  authors:
7
7
  - Brian Dominick
8
8
  autorequire:
9
9
  bindir: bin
10
10
  cert_chain: []
11
- date: 2019-03-28 00:00:00.000000000 Z
11
+ date: 2019-04-11 00:00:00.000000000 Z
12
12
  dependencies:
13
13
  - !ruby/object:Gem::Dependency
14
14
  name: bundler
15
15
  requirement: !ruby/object:Gem::Requirement
16
16
  requirements:
17
- - - "~>"
17
+ - - ">="
18
18
  - !ruby/object:Gem::Version
19
19
  version: '1.15'
20
20
  type: :development
21
21
  prerelease: false
22
22
  version_requirements: !ruby/object:Gem::Requirement
23
23
  requirements:
24
- - - "~>"
24
+ - - ">="
25
25
  - !ruby/object:Gem::Version
26
26
  version: '1.15'
27
27
  - !ruby/object:Gem::Dependency
@@ -197,8 +197,7 @@ required_rubygems_version: !ruby/object:Gem::Requirement
197
197
  - !ruby/object:Gem::Version
198
198
  version: 2.7.0
199
199
  requirements: []
200
- rubyforge_project:
201
- rubygems_version: 2.7.6
200
+ rubygems_version: 3.0.3
202
201
  signing_key:
203
202
  specification_version: 4
204
203
  summary: A highly configurable command-line tool for parsing data and content in common