liquidoc 0.12.0.pre.rc2 → 0.12.0.pre.rc6
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- checksums.yaml +4 -4
- data/lib/liquid/filters/date_filters.rb +107 -0
- data/lib/liquid/filters/grouping_filters.rb +65 -0
- data/lib/liquid/filters/jekyll.rb +488 -0
- data/lib/liquid/filters/url_filters.rb +63 -0
- data/lib/liquid/tags/highlight.rb +131 -0
- data/lib/liquid/tags/jekyll.rb +219 -0
- data/lib/liquid/tags/link.rb +37 -0
- data/lib/liquid/tags/post_url.rb +103 -0
- data/lib/liquidoc/version.rb +1 -1
- data/lib/liquidoc.rb +409 -112
- metadata +39 -3
data/lib/liquidoc.rb
CHANGED
@@ -9,9 +9,11 @@ require 'logger'
|
|
9
9
|
require 'csv'
|
10
10
|
require 'crack/xml'
|
11
11
|
require 'fileutils'
|
12
|
-
require 'jekyll'
|
13
12
|
require 'open3'
|
14
13
|
require 'highline'
|
14
|
+
require 'liquid/tags/jekyll'
|
15
|
+
require 'liquid/filters/jekyll'
|
16
|
+
require 'sterile'
|
15
17
|
|
16
18
|
# ===
|
17
19
|
# Table of Contents
|
@@ -25,7 +27,8 @@ require 'highline'
|
|
25
27
|
# 5a. parse procs def
|
26
28
|
# 5b. migrate procs def
|
27
29
|
# 5c. render procs def
|
28
|
-
#
|
30
|
+
# 5d. execute procs def
|
31
|
+
# 6. text manipulation filters
|
29
32
|
# 7. command/option parser def
|
30
33
|
# 8. executive proc calls
|
31
34
|
|
@@ -39,13 +42,14 @@ require 'highline'
|
|
39
42
|
@build_dir = @build_dir_def
|
40
43
|
@configs_dir = @base_dir + '_configs'
|
41
44
|
@templates_dir = @base_dir + '_templates/'
|
45
|
+
@includes_dirs_def = ['.','_templates','_templates/liquid','_templates/liquid/ops','_templates/ops','theme/_includes','_theme/layouts']
|
46
|
+
@includes_dirs = @includes_dirs_def
|
42
47
|
@data_dir = @base_dir + '_data/'
|
43
48
|
@data_files = nil
|
44
49
|
@attributes_file_def = '_data/asciidoctor.yml'
|
45
50
|
@attributes_file = @attributes_file_def
|
46
51
|
@pdf_theme_file = 'theme/pdf-theme.yml'
|
47
52
|
@fonts_dir = 'theme/fonts/'
|
48
|
-
@output_filename = 'index'
|
49
53
|
@attributes = {}
|
50
54
|
@passed_attrs = {}
|
51
55
|
@passed_vars = {}
|
@@ -57,6 +61,7 @@ require 'highline'
|
|
57
61
|
@search_index = false
|
58
62
|
@search_index_dry = ''
|
59
63
|
@safemode = true
|
64
|
+
@render_count = 0
|
60
65
|
|
61
66
|
# Instantiate the main Logger object, which is always running
|
62
67
|
@logger = Logger.new(STDOUT)
|
@@ -67,6 +72,7 @@ end
|
|
67
72
|
|
68
73
|
|
69
74
|
FileUtils::mkdir_p("#{@build_dir}") unless File.exists?("#{@build_dir}")
|
75
|
+
FileUtils::rm_rf("#{@build_dir}/pre")
|
70
76
|
FileUtils::mkdir_p("#{@build_dir}/pre") unless File.exists?("#{@build_dir}/pre")
|
71
77
|
|
72
78
|
|
@@ -78,7 +84,8 @@ FileUtils::mkdir_p("#{@build_dir}/pre") unless File.exists?("#{@build_dir}/pre")
|
|
78
84
|
def config_build config_file, config_vars={}, data_files=nil, parse=false
|
79
85
|
@logger.debug "Using config file #{config_file}."
|
80
86
|
validate_file_input(config_file, "config")
|
81
|
-
|
87
|
+
config_base = File.read(config_file)
|
88
|
+
if config_vars.length > 0 or data_files or parse or config_base.contains_liquid?
|
82
89
|
@logger.debug "Config_vars: #{config_vars.length}"
|
83
90
|
# If config variables are passed on the CLI, we want to parse the config file
|
84
91
|
# and use the parsed version for the rest fo this routine
|
@@ -92,8 +99,8 @@ def config_build config_file, config_vars={}, data_files=nil, parse=false
|
|
92
99
|
liquify(data_obj, config_file, config_out)
|
93
100
|
config_file = config_out
|
94
101
|
@logger.debug "Config parsed! Using #{config_out} for build."
|
95
|
-
validate_file_input(config_file, "config")
|
96
102
|
end
|
103
|
+
validate_file_input(config_file, "config")
|
97
104
|
begin
|
98
105
|
config = YAML.load_file(config_file)
|
99
106
|
rescue Exception => ex
|
@@ -104,6 +111,8 @@ def config_build config_file, config_vars={}, data_files=nil, parse=false
|
|
104
111
|
end
|
105
112
|
raise "ConfigFileError"
|
106
113
|
end
|
114
|
+
# TESTS
|
115
|
+
# puts config[0].argify
|
107
116
|
cfg = BuildConfig.new(config) # convert the config file to a new object called 'cfg'
|
108
117
|
if @safemode
|
109
118
|
commands = ""
|
@@ -144,6 +153,9 @@ def iterate_build cfg
|
|
144
153
|
# Prep & perform a Liquid-parsed build
|
145
154
|
@explainer.info build.message
|
146
155
|
build.add_data!(build.variables, "vars") if build.variables
|
156
|
+
includes_dirs = @includes_dirs
|
157
|
+
includes_dirs = build.includes_dirs if build.includes_dirs
|
158
|
+
build.add_data!({:includes_dirs=>includes_dirs})
|
147
159
|
liquify(build.data, build.template, build.output) # perform the liquify operation
|
148
160
|
else # Prep & perform a direct conversion
|
149
161
|
# Delete nested data and vars objects
|
@@ -151,7 +163,6 @@ def iterate_build cfg
|
|
151
163
|
build.data.remove_scope("vars")
|
152
164
|
# Add vars from CLI or config args
|
153
165
|
build.data.add_data!(build.variables) unless build.variables.empty?
|
154
|
-
build.data.add_data!(@passed_vars) unless @passed_vars.empty?
|
155
166
|
regurgidata(build.data, build.output)
|
156
167
|
end
|
157
168
|
end
|
@@ -168,8 +179,10 @@ def iterate_build cfg
|
|
168
179
|
builds = step.builds
|
169
180
|
for bld in builds
|
170
181
|
doc = AsciiDocument.new(step.source)
|
171
|
-
|
172
|
-
|
182
|
+
if step.data
|
183
|
+
attrs = ingest_attributes(step.data)
|
184
|
+
doc.add_attrs!(attrs) # Set attributes from the action-level data file
|
185
|
+
end
|
173
186
|
build = Build.new(bld, type) # create an instance of the Build class; Build.new accepts a 'bld' hash & action 'type' string
|
174
187
|
build.set("backend", derive_backend(doc.type, build.output) ) unless build.backend
|
175
188
|
@explainer.info build.message
|
@@ -208,31 +221,6 @@ def validate_file_input file, type
|
|
208
221
|
end
|
209
222
|
end
|
210
223
|
|
211
|
-
def validate_config_structure config
|
212
|
-
unless config.is_a? Array
|
213
|
-
message = "The configuration file is not properly structured."
|
214
|
-
@logger.error message
|
215
|
-
raise "ConfigStructError"
|
216
|
-
else
|
217
|
-
if (defined?(config['action'])).nil?
|
218
|
-
message = "Every listing in the configuration file needs an action type declaration."
|
219
|
-
@logger.error message
|
220
|
-
raise "ConfigStructError"
|
221
|
-
end
|
222
|
-
end
|
223
|
-
# TODO More validation needed
|
224
|
-
end
|
225
|
-
|
226
|
-
def contains_liquid filename
|
227
|
-
File.open(filename, "r") do |file_proc|
|
228
|
-
file_proc.each_line do |row|
|
229
|
-
if row.match(/.*\{\%.*\%\}.*|.*\{\{.*\}\}.*/)
|
230
|
-
return true
|
231
|
-
end
|
232
|
-
end
|
233
|
-
end
|
234
|
-
end
|
235
|
-
|
236
224
|
def explainer_init out=nil
|
237
225
|
unless @explainer
|
238
226
|
if out == "STDOUT"
|
@@ -283,12 +271,7 @@ class BuildConfig
|
|
283
271
|
if (defined?(config['compile'][0])) # The config is formatted for vesions < 0.3.0; convert it
|
284
272
|
config = deprecated_format(config)
|
285
273
|
end
|
286
|
-
|
287
|
-
# validations
|
288
|
-
unless config.is_a? Array
|
289
|
-
raise "ConfigStructError"
|
290
|
-
end
|
291
|
-
|
274
|
+
validate(config)
|
292
275
|
@cfg = config
|
293
276
|
end
|
294
277
|
|
@@ -305,6 +288,13 @@ class BuildConfig
|
|
305
288
|
return config['compile']
|
306
289
|
end
|
307
290
|
|
291
|
+
def validate config
|
292
|
+
unless config.is_a? Array
|
293
|
+
raise "ConfigStructError"
|
294
|
+
end
|
295
|
+
# TODO More validation needed
|
296
|
+
end
|
297
|
+
|
308
298
|
end #class BuildConfig
|
309
299
|
|
310
300
|
class BuildConfigStep
|
@@ -312,7 +302,7 @@ class BuildConfigStep
|
|
312
302
|
def initialize step
|
313
303
|
@step = step
|
314
304
|
if (defined?(@step['action'])).nil?
|
315
|
-
raise "
|
305
|
+
raise "StepStructError"
|
316
306
|
end
|
317
307
|
@step['options'] = nil unless defined?(step['options'])
|
318
308
|
validate()
|
@@ -413,7 +403,7 @@ class BuildConfigStep
|
|
413
403
|
for req in reqs
|
414
404
|
if (defined?(@step[req])).nil?
|
415
405
|
@logger.error "Every #{@step['action']}-type in the configuration file needs a '#{req}' declaration."
|
416
|
-
raise "
|
406
|
+
raise "ConfigStepError"
|
417
407
|
end
|
418
408
|
end
|
419
409
|
end
|
@@ -435,6 +425,10 @@ class Build
|
|
435
425
|
@build['template']
|
436
426
|
end
|
437
427
|
|
428
|
+
def includes_dirs
|
429
|
+
@build['includes_dirs']
|
430
|
+
end
|
431
|
+
|
438
432
|
def output
|
439
433
|
@build['output']
|
440
434
|
end
|
@@ -588,7 +582,7 @@ class Build
|
|
588
582
|
when "render"
|
589
583
|
reqs = ["output"]
|
590
584
|
end
|
591
|
-
for req in
|
585
|
+
for req in reqs
|
592
586
|
if (defined?(req)).nil?
|
593
587
|
raise "ActionSettingMissing"
|
594
588
|
end
|
@@ -644,14 +638,14 @@ class DataSrc
|
|
644
638
|
datatype = "yml"
|
645
639
|
end
|
646
640
|
else # If there's no 'type' defined, extract it from the filename and validate it
|
647
|
-
unless @datasrc['ext'].downcase.match(/\.yml|\.json|\.xml|\.csv/)
|
641
|
+
unless @datasrc['ext'].downcase.match(/\.yml|\.json|\.xml|\.csv|\.adoc/)
|
648
642
|
# @logger.error "Data file extension must be one of: .yml, .json, .xml, or .csv or else declared in config file."
|
649
643
|
raise "FileExtensionUnknown"
|
650
644
|
end
|
651
645
|
datatype = self.ext
|
652
646
|
datatype = datatype[1..-1] # removes leading dot char
|
653
647
|
end
|
654
|
-
unless datatype.downcase.match(/yml|json|xml|csv|regex/) # 'type' must be one of these permitted vals
|
648
|
+
unless datatype.downcase.match(/yml|json|xml|csv|regex|adoc/) # 'type' must be one of these permitted vals
|
655
649
|
# @logger.error "Declared data type must be one of: yaml, json, xml, csv, or regex."
|
656
650
|
raise "DataTypeUnrecognized"
|
657
651
|
end
|
@@ -832,6 +826,13 @@ def ingest_data datasrc
|
|
832
826
|
@logger.error "You must supply a regex pattern with your free-form data file."
|
833
827
|
raise "MissingRegexPattern"
|
834
828
|
end
|
829
|
+
when "adoc"
|
830
|
+
begin
|
831
|
+
doc = Asciidoctor.load_file(datasrc.file)
|
832
|
+
data = doc.attributes
|
833
|
+
rescue
|
834
|
+
@logger.error "Problem with AsciiDoc source file. Attributes not ingested."
|
835
|
+
end
|
835
836
|
end
|
836
837
|
return data
|
837
838
|
end
|
@@ -863,8 +864,10 @@ def parse_regex data_file, pattern
|
|
863
864
|
end
|
864
865
|
|
865
866
|
# Parse given data using given template, generating given output
|
866
|
-
def liquify data_obj, template_file, output
|
867
|
+
def liquify data_obj, template_file, output="stdout"
|
867
868
|
validate_file_input(template_file, "template")
|
869
|
+
# inject :includes_dirs as needed
|
870
|
+
data_obj.add_data!({'includes_dirs' => @includes_dirs}) unless data_obj.data['includes_dirs']
|
868
871
|
begin
|
869
872
|
template = File.read(template_file) # reads the template file
|
870
873
|
template = Liquid::Template.parse(template) # compiles template
|
@@ -875,17 +878,22 @@ def liquify data_obj, template_file, output
|
|
875
878
|
@logger.error message
|
876
879
|
raise message
|
877
880
|
end
|
878
|
-
unless output
|
881
|
+
unless output == "stdout" || @output_type == "stdout"
|
879
882
|
output_file = output
|
880
883
|
generate_file(rendered, output_file)
|
881
884
|
else # if stdout
|
882
|
-
puts
|
885
|
+
puts rendered
|
883
886
|
end
|
884
887
|
end
|
885
888
|
|
886
889
|
def cli_liquify data_files=nil, template_file=nil, output_file=nil, passed_vars
|
887
890
|
# converts command-line options into liquify or regurgidata inputs
|
888
891
|
data_obj = DataObj.new()
|
892
|
+
if output_file
|
893
|
+
output = output_file
|
894
|
+
else
|
895
|
+
output = "stdout"
|
896
|
+
end
|
889
897
|
if data_files
|
890
898
|
payload = get_payload(data_files)
|
891
899
|
data_obj.add_payload!(payload)
|
@@ -893,16 +901,16 @@ def cli_liquify data_files=nil, template_file=nil, output_file=nil, passed_vars
|
|
893
901
|
if template_file
|
894
902
|
# data_obj.add_data!(ingested, "data") if df
|
895
903
|
data_obj.add_data!(passed_vars, "vars") if passed_vars
|
896
|
-
liquify(data_obj, template_file,
|
904
|
+
liquify(data_obj, template_file, output)
|
897
905
|
else
|
898
906
|
data_obj.remove_scope("vars")
|
899
907
|
data_obj.add_data!(passed_vars) if passed_vars
|
900
|
-
regurgidata(data_obj,
|
908
|
+
regurgidata(data_obj, output)
|
901
909
|
end
|
902
910
|
end
|
903
911
|
|
904
912
|
def regurgidata data_obj, output
|
905
|
-
# converts data
|
913
|
+
# converts data object from one format directly to another
|
906
914
|
raise "UnrecognizedFileExtension" unless File.extname(output).match(/\.yml|\.json|\.xml|\.csv/)
|
907
915
|
case File.extname(output)
|
908
916
|
when ".yml"
|
@@ -917,7 +925,6 @@ def regurgidata data_obj, output
|
|
917
925
|
if new_data
|
918
926
|
begin
|
919
927
|
generate_file(new_data, output)
|
920
|
-
# File.open(output, 'w') { |file| file.write(new_data) }
|
921
928
|
@logger.info "Data converted and saved to #{output}."
|
922
929
|
rescue Exception => ex
|
923
930
|
@logger.error "#{ex.class}: #{ex.message}"
|
@@ -1019,6 +1026,8 @@ def derive_backend type, out_file
|
|
1019
1026
|
end
|
1020
1027
|
|
1021
1028
|
def render_doc doc, build
|
1029
|
+
@render_count += 1
|
1030
|
+
@logger.info "### Build ##{@render_count}"
|
1022
1031
|
case build.backend
|
1023
1032
|
when "html5", "pdf"
|
1024
1033
|
asciidocify(doc, build)
|
@@ -1062,6 +1071,7 @@ def asciidocify doc, build
|
|
1062
1071
|
# Perform the aciidoctor convert
|
1063
1072
|
if build.backend == "pdf"
|
1064
1073
|
@logger.info "Generating PDF. This can take some time..."
|
1074
|
+
attrs.merge!({"pdf-theme"=>build.style}) if build.style
|
1065
1075
|
end
|
1066
1076
|
Asciidoctor.convert_file(
|
1067
1077
|
doc.index,
|
@@ -1073,7 +1083,7 @@ def asciidocify doc, build
|
|
1073
1083
|
safe: "unsafe",
|
1074
1084
|
sourcemap: true,
|
1075
1085
|
verbose: @verbose,
|
1076
|
-
mkdirs: true
|
1086
|
+
mkdirs: true,
|
1077
1087
|
)
|
1078
1088
|
@logger.info "Rendered file #{to_file}."
|
1079
1089
|
end
|
@@ -1089,18 +1099,19 @@ def generate_site doc, build
|
|
1089
1099
|
attrs.merge!(build.attributes) if build.attributes
|
1090
1100
|
attrs = {"asciidoctor" => {"attributes" => attrs} }
|
1091
1101
|
attrs_yaml = attrs.to_yaml # Convert it all back to Yaml, as we're going to write a file to feed back to Jekyll
|
1092
|
-
File.open("#{@build_dir}/pre/
|
1093
|
-
build.add_config_file("#{@build_dir}/pre/
|
1102
|
+
File.open("#{@build_dir}/pre/attributes_#{@render_count}.yml", 'w') { |file| file.write(attrs_yaml) }
|
1103
|
+
build.add_config_file("#{@build_dir}/pre/attributes_#{@render_count}.yml")
|
1094
1104
|
config_list = build.prop_files_array.join(',') # flatten the Array back down for the CLI
|
1095
1105
|
quiet = "--quiet" if @quiet || @explicit
|
1096
1106
|
if build.props['arguments']
|
1097
|
-
opts_args_file = "#{@build_dir}/pre/
|
1107
|
+
opts_args_file = "#{@build_dir}/pre/jekyll_opts_args_#{@render_count}.yml"
|
1098
1108
|
opts_args = build.props['arguments']
|
1099
1109
|
File.open(opts_args_file, 'w') { |file|
|
1100
1110
|
file.write(opts_args.to_yaml)}
|
1101
1111
|
config_list << ",#{opts_args_file}"
|
1102
1112
|
end
|
1103
1113
|
base_args = "--config #{config_list}"
|
1114
|
+
base_args += " --trace" if @verbose
|
1104
1115
|
command = "bundle exec jekyll build #{base_args} #{quiet}"
|
1105
1116
|
if @search_index
|
1106
1117
|
# TODO enable config-based admin api key ingest once config is dynamic
|
@@ -1134,7 +1145,7 @@ def jekyll_serve build
|
|
1134
1145
|
@logger.debug "Attempting Jekyll serve operation."
|
1135
1146
|
config_file = build.props['files'][0]
|
1136
1147
|
if build.props['arguments']
|
1137
|
-
opts_args = build.props['arguments'].
|
1148
|
+
opts_args = build.props['arguments'].argify
|
1138
1149
|
end
|
1139
1150
|
command = "bundle exec jekyll serve --config #{config_file} #{opts_args} --no-watch --skip-initial-build"
|
1140
1151
|
system command
|
@@ -1176,7 +1187,7 @@ def execute_command cmd
|
|
1176
1187
|
contents = stdout
|
1177
1188
|
if cmd.options['outfile']
|
1178
1189
|
contents = "#{cmd.options['outfile']['prepend']}\n#{stdout}" if cmd.options['outfile']['prepend']
|
1179
|
-
contents = "#{stdout}
|
1190
|
+
contents = "#{stdout}\n#{cmd.options['outfile']['append']}" if cmd.options['outfile']['append']
|
1180
1191
|
generate_file(contents, cmd.options['outfile']['path'])
|
1181
1192
|
end
|
1182
1193
|
if cmd.options['stdout']
|
@@ -1190,24 +1201,6 @@ end
|
|
1190
1201
|
# Text manipulation Classes, Modules, procs, etc
|
1191
1202
|
# ===
|
1192
1203
|
|
1193
|
-
module HashMash
|
1194
|
-
|
1195
|
-
def to_opts_args
|
1196
|
-
out = ''
|
1197
|
-
if self.is_a? Hash # TODO Should also be testing for flatness
|
1198
|
-
self.each do |opt,arg|
|
1199
|
-
out = out + " --#{opt} #{arg}"
|
1200
|
-
end
|
1201
|
-
end
|
1202
|
-
return out
|
1203
|
-
end
|
1204
|
-
|
1205
|
-
end
|
1206
|
-
|
1207
|
-
class Hash
|
1208
|
-
include HashMash
|
1209
|
-
end
|
1210
|
-
|
1211
1204
|
module ForceArray
|
1212
1205
|
# So we can accept a list string ("item1.yml,item2.yml") or a single item ("item1.yml")
|
1213
1206
|
# and convert to array as needed
|
@@ -1221,77 +1214,375 @@ module ForceArray
|
|
1221
1214
|
obj = Array.new.push(obj)
|
1222
1215
|
end
|
1223
1216
|
else
|
1224
|
-
|
1217
|
+
if obj.class == Hash
|
1218
|
+
obj = obj.to_array
|
1219
|
+
else
|
1220
|
+
raise "ForceArrayFail"
|
1221
|
+
end
|
1225
1222
|
end
|
1226
1223
|
end
|
1227
1224
|
return obj.to_ary
|
1228
1225
|
end
|
1229
1226
|
|
1227
|
+
def force_array!
|
1228
|
+
self.force_array
|
1229
|
+
end
|
1230
|
+
|
1230
1231
|
end
|
1231
1232
|
|
1232
1233
|
class String
|
1233
1234
|
include ForceArray
|
1234
|
-
# Adapted from Nikhil Gupta
|
1235
|
-
# http://nikhgupta.com/code/wrapping-long-lines-in-ruby-for-display-in-source-files/
|
1235
|
+
# Adapted from Nikhil Gupta
|
1236
|
+
# http://nikhgupta.com/code/wrapping-long-lines-in-ruby-for-display-in-source-files/
|
1236
1237
|
def wrap options = {}
|
1237
|
-
width = options.fetch(:width, 76)
|
1238
|
-
|
1238
|
+
width = options.fetch(:width, 76) # length to wrap at
|
1239
|
+
pre = options.fetch(:prepend, '') # text to prepend
|
1240
|
+
app = options.fetch(:append, '') # text to append
|
1241
|
+
chars = pre.size + app.size
|
1239
1242
|
self.strip.split("\n").collect do |line|
|
1240
|
-
line.length > width ? line.gsub(/(.{1,#{width}})(\s+|$)/, "\\1
|
1241
|
-
end.map(&:
|
1243
|
+
line.length + chars.size > width ? line.gsub(/(.{1,#{(width - chars)}})(\s+|$)/, "#{pre}\\1#{app}\n") : "#{pre}#{line}#{app}\n"
|
1244
|
+
end.map(&:rstrip).join("\n")
|
1242
1245
|
end
|
1243
1246
|
|
1244
1247
|
def indent options = {}
|
1245
|
-
|
1246
|
-
|
1248
|
+
# TODO: does not allow tabs; inserts explicit `\t` string
|
1249
|
+
syms = options.fetch(:sym, ' ') * options.fetch(:by, 2)
|
1250
|
+
self.gsub!(/^/m, "#{syms}")
|
1251
|
+
self.sub!("#{syms}", "") unless options.fetch(:line1, false)
|
1247
1252
|
end
|
1248
1253
|
|
1249
|
-
def
|
1250
|
-
|
1251
|
-
|
1252
|
-
|
1253
|
-
|
1254
|
+
def contains_liquid?
|
1255
|
+
self.each_line do |row|
|
1256
|
+
if row.match(/.*\{\%.*\%\}.*|.*\{\{.*\}\}.*/)
|
1257
|
+
return true
|
1258
|
+
end
|
1259
|
+
end
|
1260
|
+
return false
|
1261
|
+
end
|
1262
|
+
|
1263
|
+
def quote_wrap options = {}
|
1264
|
+
# When a string contains a certain pattern, wrap it in certain quotes
|
1265
|
+
# Pass '\s' as pattern to wrap any string that contains 1 or more spaces or tabs
|
1266
|
+
# pass '.' as pattern to always wrap.
|
1267
|
+
|
1268
|
+
pattern = options.fetch(:pattern, '\s').to_s
|
1269
|
+
return self unless self.strip.match(/\s/)
|
1270
|
+
quotes = options.fetch(:quotes, "single")
|
1271
|
+
case quotes
|
1272
|
+
when "single"
|
1273
|
+
wrap = "''"
|
1274
|
+
when "double"
|
1275
|
+
wrap = '""'
|
1276
|
+
when "backtick"
|
1277
|
+
wrap = "``"
|
1278
|
+
when "bracket"
|
1279
|
+
wrap = "[]"
|
1280
|
+
else
|
1281
|
+
wrap = quotes
|
1282
|
+
end
|
1283
|
+
quotes << wrap[0] unless wrap[1]
|
1284
|
+
return wrap[0] + self.strip + wrap[1]
|
1254
1285
|
end
|
1255
1286
|
|
1256
1287
|
end
|
1257
1288
|
|
1258
1289
|
class Array
|
1259
1290
|
include ForceArray
|
1291
|
+
|
1292
|
+
def to_hash
|
1293
|
+
struct = {}
|
1294
|
+
self.each do |p|
|
1295
|
+
struct.merge!p if p.is_a? Hash
|
1296
|
+
end
|
1297
|
+
return struct
|
1298
|
+
end
|
1299
|
+
|
1300
|
+
# Get all unique values for each item in an array, or each unique value of a desigated
|
1301
|
+
# parameter in an array of hashes.
|
1302
|
+
#
|
1303
|
+
# @input : the object array
|
1304
|
+
# @property : (optional) parameter in which to select unique values (for hashes)
|
1305
|
+
def unique_property_values property=nil
|
1306
|
+
return self.uniq unless property
|
1307
|
+
new_ary = self.uniq { |i| i[property] }
|
1308
|
+
out = new_ary.map { |i| i[property] }.compact
|
1309
|
+
out
|
1310
|
+
end
|
1311
|
+
|
1312
|
+
def concatenate_property_instances property=String
|
1313
|
+
# flattens the values of instances of a given property throughout an array of Hashes
|
1314
|
+
all_arrays = []
|
1315
|
+
self.each do |i|
|
1316
|
+
all_arrays << i[property]
|
1317
|
+
end
|
1318
|
+
return all_arrays.flatten
|
1319
|
+
end
|
1320
|
+
|
1321
|
+
def repeated_property_values property=String
|
1322
|
+
# testing for uniqueness globally among all values in subarrays (list-formatted values) of all instances of the property across all nodes in the parent array
|
1323
|
+
# returns an array of duplicate items among all the tested arrays
|
1324
|
+
#
|
1325
|
+
# Example:
|
1326
|
+
# array_of_hashes[0]['cue'] = ['one','two','three']
|
1327
|
+
# array_of_hashes[1]['cue'] = ['three','four','five']
|
1328
|
+
# array_of_hashes.duplicate_property_values('cue')
|
1329
|
+
# #=> ['three']
|
1330
|
+
# Due to the apperance of 'three' in both instances of cue.
|
1331
|
+
firsts = []
|
1332
|
+
dupes = []
|
1333
|
+
self.each do |node|
|
1334
|
+
return ['non-array property value present'] unless node[property].is_a? Array
|
1335
|
+
node[property].each do |i|
|
1336
|
+
dupes << i if firsts.include? i
|
1337
|
+
firsts << i
|
1338
|
+
end
|
1339
|
+
end
|
1340
|
+
return dupes
|
1341
|
+
end
|
1342
|
+
|
1343
|
+
end
|
1344
|
+
|
1345
|
+
class Hash
|
1346
|
+
include ForceArray
|
1347
|
+
|
1348
|
+
def to_array op=nil
|
1349
|
+
# Converts a hash of key-value pairs to a flat array based on the first tier
|
1350
|
+
out = []
|
1351
|
+
self.each do |k,v|
|
1352
|
+
v = "<RemovedObject>" if v.is_a? Enumerable and op == "flatten"
|
1353
|
+
out << {k => v}
|
1354
|
+
end
|
1355
|
+
return out
|
1356
|
+
end
|
1357
|
+
|
1358
|
+
def argify options = {}
|
1359
|
+
# Converts a hash of key-value pairs to command-line option/argument listings
|
1360
|
+
# Can be called with optional arguments:
|
1361
|
+
# template :: Liquid-formatted parsing template string
|
1362
|
+
# Accepts:
|
1363
|
+
#
|
1364
|
+
# 'hyph' :: -<key> <value>
|
1365
|
+
# 'hyphhyph' :: --<key> <value> (default)
|
1366
|
+
# 'hyphchar' :: -<k> <value>
|
1367
|
+
# 'dump' :: <key> <value>
|
1368
|
+
# 'paramequal' :: <key>=<value>
|
1369
|
+
# 'valonly' :: <value>
|
1370
|
+
# delim :: Delimiter -- any ASCII characters that separate the arguments
|
1371
|
+
#
|
1372
|
+
# For template-based usage, express the variables:
|
1373
|
+
# opt (the keyname) as {{opt}}
|
1374
|
+
# arg (the value) as {{arg}}
|
1375
|
+
# EXAMPLES (my_hash = {"key1"=>"val1", "key2"=>"val2"})
|
1376
|
+
# my_hash.argify #=> key1 val1 key2 val2
|
1377
|
+
# my_hash.argify('hyphhyph') #=> --key1 val1 --key2 val2
|
1378
|
+
# my_hash.argify('paramequal') #=> key1=val1 key2=val2
|
1379
|
+
# my_hash.argify('-a {{opt}}={{arg}}')#=> -a key1=val1 -a key2=val2
|
1380
|
+
# my_hash.argify('valonly', '||') #=> val1||val2
|
1381
|
+
# my_hash.argify("{{opt}} `{{arg}}`") #=> key1 `val1` key2 `val2`
|
1382
|
+
raise "InvalidObject" unless self.is_a? Hash
|
1383
|
+
template = options.fetch(:template, 'hyphhyph')
|
1384
|
+
if template.contains_liquid?
|
1385
|
+
tp = template # use the passed Liquid template
|
1386
|
+
else
|
1387
|
+
case template # use a preset Liquid template by name
|
1388
|
+
when "dump"
|
1389
|
+
tp = "{{opt}} {{arg | quote_wrap: 'single', '\s|,' }}"
|
1390
|
+
when "hyph"
|
1391
|
+
tp = "-{{opt}} {{arg | quote_wrap: 'single', '\s|,' }}"
|
1392
|
+
when "hyphhyph"
|
1393
|
+
tp = "--{{opt}} {{arg | quote_wrap: 'single', '\s|,' }}"
|
1394
|
+
when "paramequal"
|
1395
|
+
tp = "{{opt}}={{arg | quote_wrap: 'single', '\s|,' }}"
|
1396
|
+
when "valonly"
|
1397
|
+
tp = "{{arg | quote_wrap: 'single', '\s|,' }}"
|
1398
|
+
else
|
1399
|
+
return "Liquid: Unrecognized argify template name: #{template}"
|
1400
|
+
end
|
1401
|
+
end
|
1402
|
+
begin
|
1403
|
+
tpl = Liquid::Template.parse(tp)
|
1404
|
+
first = true
|
1405
|
+
out = ''
|
1406
|
+
self.each do |k,v|
|
1407
|
+
# establish datasource
|
1408
|
+
v = "<Object>" if v.is_a? Hash
|
1409
|
+
v = v.join(',') if v.is_a? Array
|
1410
|
+
input = {"opt" => k.to_s, "arg" => v.to_s }
|
1411
|
+
if first
|
1412
|
+
dlm = ""
|
1413
|
+
first = false
|
1414
|
+
else
|
1415
|
+
dlm = options.fetch(:delim, ' ')
|
1416
|
+
end
|
1417
|
+
out += dlm + tpl.render(input)
|
1418
|
+
end
|
1419
|
+
rescue
|
1420
|
+
raise "Argify template processing failed"
|
1421
|
+
end
|
1422
|
+
return out
|
1423
|
+
end
|
1424
|
+
|
1425
|
+
|
1260
1426
|
end
|
1261
1427
|
|
1262
1428
|
# Extending Liquid filters/text manipulation
|
1263
|
-
module
|
1429
|
+
module LiquiDocFilters
|
1264
1430
|
include Jekyll::Filters
|
1431
|
+
#
|
1432
|
+
# sterile-based filters
|
1433
|
+
#
|
1434
|
+
|
1435
|
+
def to_slug input, delim='-'
|
1436
|
+
o = input.dup
|
1437
|
+
opts = {:delimiter=>delim}
|
1438
|
+
o.to_slug(opts)
|
1439
|
+
end
|
1440
|
+
|
1441
|
+
def transliterate input
|
1442
|
+
o = input.dup
|
1443
|
+
o.transliterate
|
1444
|
+
end
|
1445
|
+
|
1446
|
+
def smart_format input
|
1447
|
+
o = input.dup
|
1448
|
+
o.smart_format
|
1449
|
+
end
|
1450
|
+
|
1451
|
+
def encode_entities input
|
1452
|
+
o = input.dup
|
1453
|
+
o.encode_entities
|
1454
|
+
end
|
1265
1455
|
|
1266
|
-
def
|
1267
|
-
input.
|
1456
|
+
def titlecase input
|
1457
|
+
o = input.dup
|
1458
|
+
o.titlecase
|
1268
1459
|
end
|
1269
|
-
|
1270
|
-
|
1460
|
+
|
1461
|
+
def strip_tags input
|
1462
|
+
o = input.dup
|
1463
|
+
o.strip_tags
|
1464
|
+
end
|
1465
|
+
|
1466
|
+
def sterilize input
|
1467
|
+
o = input.dup
|
1468
|
+
o.sterilize
|
1271
1469
|
end
|
1272
|
-
|
1470
|
+
|
1471
|
+
#
|
1472
|
+
# Custom Filters
|
1473
|
+
#
|
1474
|
+
|
1475
|
+
def where_uniq input, property, value
|
1476
|
+
o = input.where(input, property, value)
|
1477
|
+
o[0] if o.size == 1
|
1478
|
+
"No result" unless o.size
|
1479
|
+
"Multiple results" if o.size > 1
|
1480
|
+
end
|
1481
|
+
|
1482
|
+
def wrap input, width=80, prepend='', append='', vent=false
|
1483
|
+
input.wrap(:width => width, :prepend => prepend, :append => append)
|
1484
|
+
end
|
1485
|
+
|
1486
|
+
def plainwrap input, width=80
|
1487
|
+
input.wrap(:width => width)
|
1488
|
+
end
|
1489
|
+
|
1490
|
+
def commentwrap input, width=80, prepend='# '
|
1491
|
+
input.wrap(:width => width, :pre => prepend)
|
1492
|
+
end
|
1493
|
+
|
1494
|
+
def unwrap input, token1='&g59h%j1k;', token2='&ru8sf%df;'
|
1273
1495
|
if input
|
1274
|
-
|
1275
|
-
input.gsub(/\n\n/, token).gsub(/\n/, ' ').gsub(token, "\n\n")
|
1496
|
+
input.gsub(/(.)\n\n/, "\\1#{token1}").gsub(/([\."'])$\n([A-Z\(\_"'])/,"\\1#{token2}\\2").gsub(/\n/, '').gsub(token2,"\n").gsub(token1, "\n\n")
|
1276
1497
|
end
|
1277
1498
|
end
|
1278
1499
|
|
1279
|
-
def
|
1280
|
-
#
|
1281
|
-
|
1500
|
+
def indent_lines input, by=2, sym=' ', line1=false
|
1501
|
+
input.indent(:by => by, :sym => "#{sym}", :line1 => line1)
|
1502
|
+
end
|
1503
|
+
|
1504
|
+
def slugify input, delim='-', snip=false
|
1282
1505
|
s = input.to_s.downcase
|
1283
|
-
s.gsub!(/[^a-
|
1506
|
+
s.gsub!(/[^a-z0-9]/, delim)
|
1507
|
+
if snip
|
1508
|
+
while s.match("#{delim}#{delim}")
|
1509
|
+
s.gsub!("#{delim}#{delim}", "#{delim}")
|
1510
|
+
end
|
1511
|
+
s.gsub!(/^#{delim}+(.*)$/, "\\1")
|
1512
|
+
s.gsub!(/^(.*)#{delim}+$/, "\\1")
|
1513
|
+
end
|
1284
1514
|
s
|
1285
1515
|
end
|
1286
1516
|
|
1517
|
+
def asciidocify input
|
1518
|
+
Asciidoctor.convert(input, doctype: "inline")
|
1519
|
+
end
|
1520
|
+
|
1521
|
+
def quote_wrap input, quotes="''", pattern="\s"
|
1522
|
+
input.quote_wrap(:quotes => quotes, :pattern => pattern)
|
1523
|
+
end
|
1524
|
+
|
1525
|
+
def to_cli_args input, template="paramequal", delim=" "
|
1526
|
+
input.argify(:template => template, :delim => delim)
|
1527
|
+
end
|
1528
|
+
|
1529
|
+
def hash_to_array input, op=nil
|
1530
|
+
o = input.dup
|
1531
|
+
o.to_array(op)
|
1532
|
+
end
|
1533
|
+
|
1534
|
+
def holds_liquid input
|
1535
|
+
o = false
|
1536
|
+
o = true if input.contains_liquid?
|
1537
|
+
o
|
1538
|
+
end
|
1539
|
+
|
1540
|
+
def store_list_uniq input, property=nil
|
1541
|
+
input.unique_property_values(property)
|
1542
|
+
end
|
1543
|
+
|
1544
|
+
def store_list_concat input, property=String
|
1545
|
+
input.concatenate_property_instances(property)
|
1546
|
+
end
|
1547
|
+
|
1548
|
+
def store_list_dupes input, property=String
|
1549
|
+
input.repeated_property_values(property)
|
1550
|
+
end
|
1551
|
+
|
1287
1552
|
def regexreplace input, regex, replacement=''
|
1553
|
+
# deprecated in favor of re_replace as of 0.12.0
|
1288
1554
|
input.to_s.gsub(Regexp.new(regex), replacement.to_s)
|
1289
1555
|
end
|
1290
1556
|
|
1557
|
+
def replace_regex input, regex, replacement='', multiline=true, global=true
|
1558
|
+
pattern = Regexp.new(regex, Regexp::MULTILINE) if multiline
|
1559
|
+
pattern = Regexp.new(regex) unless multiline
|
1560
|
+
o = input.to_s.gsub(pattern, replacement.to_s) if global
|
1561
|
+
o = input.to_s.sub(pattern, replacement.to_s) unless global
|
1562
|
+
o
|
1563
|
+
end
|
1564
|
+
|
1565
|
+
def match input, regex, multiline=true, global=true
|
1566
|
+
pattern = Regexp.new(regex, Regexp::MULTILINE) if multiline
|
1567
|
+
pattern = Regexp.new(regex) unless multiline
|
1568
|
+
return true if input.to_s.match(pattern)
|
1569
|
+
return false
|
1570
|
+
end
|
1571
|
+
|
1572
|
+
def to_yaml input
|
1573
|
+
o = input.to_yaml
|
1574
|
+
o = o.gsub(/^\-\-\-$\n/, "")
|
1575
|
+
o
|
1576
|
+
end
|
1577
|
+
|
1578
|
+
def to_json input
|
1579
|
+
o = input.to_json
|
1580
|
+
o
|
1581
|
+
end
|
1291
1582
|
end
|
1292
1583
|
|
1293
|
-
#
|
1294
|
-
Liquid::Template.register_filter(
|
1584
|
+
# Register custom Liquid filters
|
1585
|
+
Liquid::Template.register_filter(LiquiDocFilters)
|
1295
1586
|
|
1296
1587
|
# ===
|
1297
1588
|
# Command/options parser
|
@@ -1311,15 +1602,15 @@ command_parser = OptionParser.new do|opts|
|
|
1311
1602
|
end
|
1312
1603
|
|
1313
1604
|
# Global Options
|
1314
|
-
opts.on("-b PATH", "--base
|
1605
|
+
opts.on("-b PATH", "--base PATH", "The base directory, relative to this script. Defaults to `.`, or pwd." ) do |n|
|
1315
1606
|
@base_dir = n
|
1316
1607
|
end
|
1317
1608
|
|
1318
|
-
opts.on("-B PATH", "--build
|
1609
|
+
opts.on("-B PATH", "--build PATH", "The directory under which LiquiDoc should save automatically preprocessed files. Defaults to #{@base_dir}_build. Can be absolute or relative to the base path (-b/--base=). Do NOT append '/' to the build path." ) do |n|
|
1319
1610
|
@build_dir = n
|
1320
1611
|
end
|
1321
1612
|
|
1322
|
-
opts.on("-c", "--config
|
1613
|
+
opts.on("-c", "--config PATH", "Configuration file, enables preset source, template, and output.") do |n|
|
1323
1614
|
@config_file = @base_dir + n
|
1324
1615
|
end
|
1325
1616
|
|
@@ -1329,22 +1620,28 @@ command_parser = OptionParser.new do|opts|
|
|
1329
1620
|
@data_files = DataFiles.new(data_files)
|
1330
1621
|
end
|
1331
1622
|
|
1332
|
-
opts.on("-f PATH", "--from
|
1623
|
+
opts.on("-f PATH", "--from PATH", "Directory to copy assets from." ) do |n|
|
1333
1624
|
@attributes_file = n
|
1334
1625
|
end
|
1335
1626
|
|
1336
|
-
opts.on("-i PATH", "--index
|
1627
|
+
opts.on("-i PATH", "--index PATH", "An AsciiDoc index file for mapping an Asciidoctor build." ) do |n|
|
1337
1628
|
@index_file = n
|
1338
1629
|
end
|
1339
1630
|
|
1340
1631
|
opts.on("-o PATH", "--output=PATH", "Output file path for generated content. Ex. path/to/file.adoc. Required unless --config is called.") do |n|
|
1341
|
-
@
|
1632
|
+
@output = @base_dir + n
|
1342
1633
|
end
|
1343
1634
|
|
1344
|
-
opts.on("-t PATH", "--template
|
1635
|
+
opts.on("-t PATH", "--template PATH", "Path to liquid template. Required unless --configuration is called." ) do |n|
|
1345
1636
|
@template_file = @base_dir + n
|
1346
1637
|
end
|
1347
1638
|
|
1639
|
+
opts.on("--includes PATH[,PATH]", "Paths to directories where includes (partials) can be found." ) do |n|
|
1640
|
+
n = n.force_array
|
1641
|
+
# n.map { |p| @base_dir + p }
|
1642
|
+
@includes_dirs = @includes_dirs.concat n
|
1643
|
+
end
|
1644
|
+
|
1348
1645
|
opts.on("--verbose", "Run verbose debug logging.") do |n|
|
1349
1646
|
@logger.level = Logger::DEBUG
|
1350
1647
|
@verbose = true
|
@@ -1363,7 +1660,7 @@ command_parser = OptionParser.new do|opts|
|
|
1363
1660
|
end
|
1364
1661
|
|
1365
1662
|
opts.on("--stdout", "Puts the output in STDOUT instead of writing to a file.") do
|
1366
|
-
@
|
1663
|
+
@output = "stdout"
|
1367
1664
|
end
|
1368
1665
|
|
1369
1666
|
opts.on("--deploy", "EXPERIMENTAL: Trigger a jekyll serve operation against the destination dir of a Jekyll render step.") do
|
@@ -1419,7 +1716,7 @@ explainer_init
|
|
1419
1716
|
unless @config_file
|
1420
1717
|
@logger.debug "Executing config-free build based on API/CLI arguments alone."
|
1421
1718
|
if @data_files
|
1422
|
-
cli_liquify(@data_files, @template_file, @
|
1719
|
+
cli_liquify(@data_files, @template_file, @output, @passed_vars)
|
1423
1720
|
end
|
1424
1721
|
if @index_file
|
1425
1722
|
@logger.warn "Rendering via command line arguments is not yet implemented. Use a config file."
|