liquidoc 0.11.0 → 0.12.0.pre.rc5
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- checksums.yaml +4 -4
- data/lib/liquidoc.rb +104 -62
- data/lib/liquidoc/version.rb +1 -1
- metadata +17 -17
checksums.yaml
CHANGED
@@ -1,7 +1,7 @@
|
|
1
1
|
---
|
2
2
|
SHA256:
|
3
|
-
metadata.gz:
|
4
|
-
data.tar.gz:
|
3
|
+
metadata.gz: 40a1c127ce84efe07e6762641c2cc16d8248770cf8c8b2e65f5e1ae299ac5fa4
|
4
|
+
data.tar.gz: 35436583f8eb40766990bc1740ef60541160c7d2c4f67b1c71277e1f6f653d68
|
5
5
|
SHA512:
|
6
|
-
metadata.gz:
|
7
|
-
data.tar.gz:
|
6
|
+
metadata.gz: 0ad1a76573b551921a54b4595a79621fbbef203f4506882ac30baa3fc71fb40a8a735981340e1f7c70755fb9f660a1aca71ebe4a04fcff56325fc7adbd62c0ee
|
7
|
+
data.tar.gz: 60072de6a7cd07e61dcbf73c43d88d6d816c0509400247c0ba2326a0f4d6eb80c4ad9084563ac34cfad71b4bbf9a0eab2433af7f28aa4ba8fd7afca5fe67dd2f
|
data/lib/liquidoc.rb
CHANGED
@@ -40,12 +40,11 @@ require 'highline'
|
|
40
40
|
@configs_dir = @base_dir + '_configs'
|
41
41
|
@templates_dir = @base_dir + '_templates/'
|
42
42
|
@data_dir = @base_dir + '_data/'
|
43
|
-
@
|
43
|
+
@data_files = nil
|
44
44
|
@attributes_file_def = '_data/asciidoctor.yml'
|
45
45
|
@attributes_file = @attributes_file_def
|
46
46
|
@pdf_theme_file = 'theme/pdf-theme.yml'
|
47
47
|
@fonts_dir = 'theme/fonts/'
|
48
|
-
@output_filename = 'index'
|
49
48
|
@attributes = {}
|
50
49
|
@passed_attrs = {}
|
51
50
|
@passed_vars = {}
|
@@ -57,6 +56,7 @@ require 'highline'
|
|
57
56
|
@search_index = false
|
58
57
|
@search_index_dry = ''
|
59
58
|
@safemode = true
|
59
|
+
@render_count = 0
|
60
60
|
|
61
61
|
# Instantiate the main Logger object, which is always running
|
62
62
|
@logger = Logger.new(STDOUT)
|
@@ -67,6 +67,7 @@ end
|
|
67
67
|
|
68
68
|
|
69
69
|
FileUtils::mkdir_p("#{@build_dir}") unless File.exists?("#{@build_dir}")
|
70
|
+
FileUtils::rm_rf("#{@build_dir}/pre")
|
70
71
|
FileUtils::mkdir_p("#{@build_dir}/pre") unless File.exists?("#{@build_dir}/pre")
|
71
72
|
|
72
73
|
|
@@ -75,17 +76,21 @@ FileUtils::mkdir_p("#{@build_dir}/pre") unless File.exists?("#{@build_dir}/pre")
|
|
75
76
|
# ===
|
76
77
|
|
77
78
|
# Establish source, template, index, etc details for build jobs from a config file
|
78
|
-
def config_build config_file, config_vars={}, parse=false
|
79
|
+
def config_build config_file, config_vars={}, data_files=nil, parse=false
|
79
80
|
@logger.debug "Using config file #{config_file}."
|
80
81
|
validate_file_input(config_file, "config")
|
81
|
-
if config_vars.length > 0 or parse or contains_liquid(config_file)
|
82
|
+
if config_vars.length > 0 or data_files or parse or contains_liquid(config_file)
|
82
83
|
@logger.debug "Config_vars: #{config_vars.length}"
|
83
84
|
# If config variables are passed on the CLI, we want to parse the config file
|
84
85
|
# and use the parsed version for the rest fo this routine
|
85
86
|
config_out = "#{@build_dir}/pre/#{File.basename(config_file)}"
|
86
|
-
|
87
|
-
|
88
|
-
|
87
|
+
data_obj = DataObj.new()
|
88
|
+
if data_files
|
89
|
+
payload = get_payload(data_files)
|
90
|
+
data_obj.add_payload!(payload)
|
91
|
+
end
|
92
|
+
data_obj.add_data!(config_vars, "vars")
|
93
|
+
liquify(data_obj, config_file, config_out)
|
89
94
|
config_file = config_out
|
90
95
|
@logger.debug "Config parsed! Using #{config_out} for build."
|
91
96
|
validate_file_input(config_file, "config")
|
@@ -131,41 +136,23 @@ def iterate_build cfg
|
|
131
136
|
data_obj = DataObj.new()
|
132
137
|
if step.data
|
133
138
|
data_files = DataFiles.new(step.data)
|
134
|
-
|
135
|
-
|
136
|
-
data = ingest_data(src) # Extract data from file
|
137
|
-
rescue Exception => ex
|
138
|
-
@logger.error "#{ex.class}: #{ex.message}"
|
139
|
-
raise "DataFileReadFail (#{src.file})"
|
140
|
-
end
|
141
|
-
begin # Create build.data
|
142
|
-
if data_files.sources.size == 1
|
143
|
-
data_obj.add_data!("", data) if data.is_a? Hash
|
144
|
-
# Insert arrays into the data. scope, and for backward compatibility, hashes as well
|
145
|
-
data_obj.add_data!("data", data)
|
146
|
-
else
|
147
|
-
data_obj.add_data!(src.name, data) # Insert object under self-named scope
|
148
|
-
end
|
149
|
-
rescue Exception => ex
|
150
|
-
@logger.error "#{ex.class}: #{ex.message}"
|
151
|
-
raise "DataIngestFail (#{src.file})"
|
152
|
-
end
|
153
|
-
end
|
139
|
+
payload = get_payload(data_files)
|
140
|
+
data_obj.add_payload!(payload)
|
154
141
|
end
|
155
142
|
builds.each do |bld|
|
156
143
|
build = Build.new(bld, type, data_obj) # create an instance of the Build class; Build.new accepts a 'bld' hash & action 'type'
|
157
144
|
if build.template
|
158
|
-
# Prep & perform a Liquid-parsed build
|
145
|
+
# Prep & perform a Liquid-parsed build
|
159
146
|
@explainer.info build.message
|
160
|
-
build.add_data!("vars"
|
147
|
+
build.add_data!(build.variables, "vars") if build.variables
|
161
148
|
liquify(build.data, build.template, build.output) # perform the liquify operation
|
162
149
|
else # Prep & perform a direct conversion
|
163
150
|
# Delete nested data and vars objects
|
164
151
|
build.data.remove_scope("data")
|
165
152
|
build.data.remove_scope("vars")
|
166
153
|
# Add vars from CLI or config args
|
167
|
-
build.data.add_data!(
|
168
|
-
build.data.add_data!(
|
154
|
+
build.data.add_data!(build.variables) unless build.variables.empty?
|
155
|
+
build.data.add_data!(@passed_vars) unless @passed_vars.empty?
|
169
156
|
regurgidata(build.data, build.output)
|
170
157
|
end
|
171
158
|
end
|
@@ -479,8 +466,8 @@ class Build
|
|
479
466
|
@data unless @data.nil?
|
480
467
|
end
|
481
468
|
|
482
|
-
def add_data!
|
483
|
-
@data.add_data!(
|
469
|
+
def add_data! data, scope=""
|
470
|
+
@data.add_data!(data, scope)
|
484
471
|
end
|
485
472
|
|
486
473
|
# def vars
|
@@ -602,7 +589,7 @@ class Build
|
|
602
589
|
when "render"
|
603
590
|
reqs = ["output"]
|
604
591
|
end
|
605
|
-
for req in
|
592
|
+
for req in reqs
|
606
593
|
if (defined?(req)).nil?
|
607
594
|
raise "ActionSettingMissing"
|
608
595
|
end
|
@@ -612,6 +599,7 @@ class Build
|
|
612
599
|
end # class Build
|
613
600
|
|
614
601
|
class DataSrc
|
602
|
+
# Organizes metadata about an ingestible data source
|
615
603
|
# initialization means establishing a proper hash for the 'data' param
|
616
604
|
def initialize sources
|
617
605
|
@datasrc = {}
|
@@ -679,9 +667,9 @@ end # class DataSrc
|
|
679
667
|
# DataFiles
|
680
668
|
class DataFiles
|
681
669
|
# Accepts a single String, Hash, or Array
|
682
|
-
# String must be a filename
|
683
|
-
# Hash must contain :
|
684
|
-
# Array must contain filenames as strings
|
670
|
+
# String must be a path/filename
|
671
|
+
# Hash must contain file: and optionally type: and pattern:
|
672
|
+
# Array must contain path/filenames as strings
|
685
673
|
# Returns array of DataSrc objects
|
686
674
|
def initialize data_sources
|
687
675
|
@data_sources = []
|
@@ -696,6 +684,7 @@ class DataFiles
|
|
696
684
|
end
|
697
685
|
|
698
686
|
def sources
|
687
|
+
# An Array of DataSrc objects
|
699
688
|
@data_sources
|
700
689
|
end
|
701
690
|
|
@@ -714,13 +703,14 @@ class DataObj
|
|
714
703
|
@data = {"vars" => {}}
|
715
704
|
end
|
716
705
|
|
717
|
-
def add_data! scope=""
|
706
|
+
def add_data! data, scope=""
|
718
707
|
# Merges data into existing scope or creates a new scope
|
719
708
|
if scope.empty? # store new object at root of this object
|
720
709
|
self.data.merge!data
|
721
710
|
else # store new object as a subordinate, named object
|
722
|
-
if self.data.key?(scope) # merge into existing
|
723
|
-
self.data[scope].merge!data
|
711
|
+
if self.data.key?(scope) # merge/append into existing object
|
712
|
+
self.data[scope].merge!data if self.data[scope].is_a? Hash
|
713
|
+
self.data[scope] << data if self.data[scope].is_a? Array
|
724
714
|
else # create a new key named after the scope
|
725
715
|
scoped_hash = { scope => data }
|
726
716
|
self.data.merge!scoped_hash
|
@@ -728,6 +718,20 @@ class DataObj
|
|
728
718
|
end
|
729
719
|
end
|
730
720
|
|
721
|
+
def add_payload! payload
|
722
|
+
# Expects an Array of Hashes ([{name=>String, data=>Object},...])
|
723
|
+
if payload.size == 1
|
724
|
+
# If payload is a single Hash, store it at the root level (no scope)
|
725
|
+
self.add_data!(payload[0]['data']) if payload[0]['data'].is_a? Hash
|
726
|
+
# Insert arrays into the data. scope, and for backward compatibility, hashes as well
|
727
|
+
self.add_data!(payload[0]['data'], "data")
|
728
|
+
end
|
729
|
+
# For ALL payloads, create a self-named obj scope
|
730
|
+
payload.each do |obj|
|
731
|
+
self.add_data!(obj['data'], obj['name']) # Insert object under self-named scope
|
732
|
+
end
|
733
|
+
end
|
734
|
+
|
731
735
|
def data
|
732
736
|
@data
|
733
737
|
end
|
@@ -770,7 +774,25 @@ end
|
|
770
774
|
# PARSE-type build procs
|
771
775
|
# ===
|
772
776
|
|
773
|
-
|
777
|
+
def get_payload data_files
|
778
|
+
# data_files: a proper DataFile object
|
779
|
+
payload = []
|
780
|
+
data_files.sources.each do |src|
|
781
|
+
obj = {}
|
782
|
+
begin
|
783
|
+
data = ingest_data(src) # Extract data from file
|
784
|
+
rescue Exception => ex
|
785
|
+
@logger.error "#{ex.class}: #{ex.message}"
|
786
|
+
raise "DataFileReadFail (#{src.file})"
|
787
|
+
end
|
788
|
+
obj['name'] = src.name
|
789
|
+
obj['data'] = data
|
790
|
+
payload << obj
|
791
|
+
end
|
792
|
+
return payload
|
793
|
+
end
|
794
|
+
|
795
|
+
# Pull in a semi-structured data file, converting contents to a Ruby object
|
774
796
|
def ingest_data datasrc
|
775
797
|
raise "InvalidDataSrcObject" unless datasrc.is_a? DataSrc
|
776
798
|
case datasrc.type
|
@@ -862,22 +884,26 @@ def liquify data_obj, template_file, output
|
|
862
884
|
end
|
863
885
|
end
|
864
886
|
|
865
|
-
def cli_liquify
|
887
|
+
def cli_liquify data_files=nil, template_file=nil, output_file=nil, passed_vars
|
866
888
|
# converts command-line options into liquify or regurgidata inputs
|
867
889
|
data_obj = DataObj.new()
|
868
|
-
if
|
869
|
-
|
870
|
-
|
871
|
-
|
890
|
+
if output_file
|
891
|
+
output = output_file
|
892
|
+
else
|
893
|
+
output = "stdout"
|
894
|
+
end
|
895
|
+
if data_files
|
896
|
+
payload = get_payload(data_files)
|
897
|
+
data_obj.add_payload!(payload)
|
872
898
|
end
|
873
899
|
if template_file
|
874
|
-
data_obj.add_data!("data"
|
875
|
-
data_obj.add_data!("vars"
|
876
|
-
liquify(data_obj, template_file,
|
900
|
+
# data_obj.add_data!(ingested, "data") if df
|
901
|
+
data_obj.add_data!(passed_vars, "vars") if passed_vars
|
902
|
+
liquify(data_obj, template_file, output)
|
877
903
|
else
|
878
904
|
data_obj.remove_scope("vars")
|
879
|
-
data_obj.add_data!(
|
880
|
-
regurgidata(data_obj,
|
905
|
+
data_obj.add_data!(passed_vars) if passed_vars
|
906
|
+
regurgidata(data_obj, output)
|
881
907
|
end
|
882
908
|
end
|
883
909
|
|
@@ -999,6 +1025,8 @@ def derive_backend type, out_file
|
|
999
1025
|
end
|
1000
1026
|
|
1001
1027
|
def render_doc doc, build
|
1028
|
+
@render_count += 1
|
1029
|
+
@logger.info "### Build ##{@render_count}"
|
1002
1030
|
case build.backend
|
1003
1031
|
when "html5", "pdf"
|
1004
1032
|
asciidocify(doc, build)
|
@@ -1042,6 +1070,7 @@ def asciidocify doc, build
|
|
1042
1070
|
# Perform the aciidoctor convert
|
1043
1071
|
if build.backend == "pdf"
|
1044
1072
|
@logger.info "Generating PDF. This can take some time..."
|
1073
|
+
attrs.merge!({"pdf-theme"=>build.style}) if build.style
|
1045
1074
|
end
|
1046
1075
|
Asciidoctor.convert_file(
|
1047
1076
|
doc.index,
|
@@ -1053,7 +1082,7 @@ def asciidocify doc, build
|
|
1053
1082
|
safe: "unsafe",
|
1054
1083
|
sourcemap: true,
|
1055
1084
|
verbose: @verbose,
|
1056
|
-
mkdirs: true
|
1085
|
+
mkdirs: true,
|
1057
1086
|
)
|
1058
1087
|
@logger.info "Rendered file #{to_file}."
|
1059
1088
|
end
|
@@ -1069,18 +1098,19 @@ def generate_site doc, build
|
|
1069
1098
|
attrs.merge!(build.attributes) if build.attributes
|
1070
1099
|
attrs = {"asciidoctor" => {"attributes" => attrs} }
|
1071
1100
|
attrs_yaml = attrs.to_yaml # Convert it all back to Yaml, as we're going to write a file to feed back to Jekyll
|
1072
|
-
File.open("#{@build_dir}/pre/
|
1073
|
-
build.add_config_file("#{@build_dir}/pre/
|
1101
|
+
File.open("#{@build_dir}/pre/attributes_#{@render_count}.yml", 'w') { |file| file.write(attrs_yaml) }
|
1102
|
+
build.add_config_file("#{@build_dir}/pre/attributes_#{@render_count}.yml")
|
1074
1103
|
config_list = build.prop_files_array.join(',') # flatten the Array back down for the CLI
|
1075
1104
|
quiet = "--quiet" if @quiet || @explicit
|
1076
1105
|
if build.props['arguments']
|
1077
|
-
opts_args_file = "#{@build_dir}/pre/
|
1106
|
+
opts_args_file = "#{@build_dir}/pre/jekyll_opts_args_#{@render_count}.yml"
|
1078
1107
|
opts_args = build.props['arguments']
|
1079
1108
|
File.open(opts_args_file, 'w') { |file|
|
1080
1109
|
file.write(opts_args.to_yaml)}
|
1081
1110
|
config_list << ",#{opts_args_file}"
|
1082
1111
|
end
|
1083
1112
|
base_args = "--config #{config_list}"
|
1113
|
+
base_args += " --trace" if @verbose
|
1084
1114
|
command = "bundle exec jekyll build #{base_args} #{quiet}"
|
1085
1115
|
if @search_index
|
1086
1116
|
# TODO enable config-based admin api key ingest once config is dynamic
|
@@ -1268,6 +1298,16 @@ module CustomFilters
|
|
1268
1298
|
input.to_s.gsub(Regexp.new(regex), replacement.to_s)
|
1269
1299
|
end
|
1270
1300
|
|
1301
|
+
def to_yaml input
|
1302
|
+
o = input.to_yaml
|
1303
|
+
o = o.gsub(/^\-\-\-$\n/, "")
|
1304
|
+
o
|
1305
|
+
end
|
1306
|
+
|
1307
|
+
def to_json input
|
1308
|
+
o = input.to_json
|
1309
|
+
o
|
1310
|
+
end
|
1271
1311
|
end
|
1272
1312
|
|
1273
1313
|
# register custom Liquid filters
|
@@ -1303,8 +1343,10 @@ command_parser = OptionParser.new do|opts|
|
|
1303
1343
|
@config_file = @base_dir + n
|
1304
1344
|
end
|
1305
1345
|
|
1306
|
-
opts.on("-d PATH", "--data=PATH", "Semi-structured data source (input) path. Ex. path/to/data.yml. Required unless --config is called." ) do |n|
|
1307
|
-
|
1346
|
+
opts.on("-d PATH[,PATH]", "--data=PATH[,PATH]", "Semi-structured data source (input) path or paths. Ex. path/to/data.yml or data/file1.yml,data/file2.json. Required unless --config is called; optional with config." ) do |n|
|
1347
|
+
data_files = n.split(',')
|
1348
|
+
data_files = data_files.map! {|file| @base_dir + file}
|
1349
|
+
@data_files = DataFiles.new(data_files)
|
1308
1350
|
end
|
1309
1351
|
|
1310
1352
|
opts.on("-f PATH", "--from=PATH", "Directory to copy assets from." ) do |n|
|
@@ -1316,7 +1358,7 @@ command_parser = OptionParser.new do|opts|
|
|
1316
1358
|
end
|
1317
1359
|
|
1318
1360
|
opts.on("-o PATH", "--output=PATH", "Output file path for generated content. Ex. path/to/file.adoc. Required unless --config is called.") do |n|
|
1319
|
-
@
|
1361
|
+
@output = @base_dir + n
|
1320
1362
|
end
|
1321
1363
|
|
1322
1364
|
opts.on("-t PATH", "--template=PATH", "Path to liquid template. Required unless --configuration is called." ) do |n|
|
@@ -1341,7 +1383,7 @@ command_parser = OptionParser.new do|opts|
|
|
1341
1383
|
end
|
1342
1384
|
|
1343
1385
|
opts.on("--stdout", "Puts the output in STDOUT instead of writing to a file.") do
|
1344
|
-
@
|
1386
|
+
@output = "stdout"
|
1345
1387
|
end
|
1346
1388
|
|
1347
1389
|
opts.on("--deploy", "EXPERIMENTAL: Trigger a jekyll serve operation against the destination dir of a Jekyll render step.") do
|
@@ -1396,13 +1438,13 @@ explainer_init
|
|
1396
1438
|
|
1397
1439
|
unless @config_file
|
1398
1440
|
@logger.debug "Executing config-free build based on API/CLI arguments alone."
|
1399
|
-
if @
|
1400
|
-
cli_liquify(@
|
1441
|
+
if @data_files
|
1442
|
+
cli_liquify(@data_files, @template_file, @output, @passed_vars)
|
1401
1443
|
end
|
1402
1444
|
if @index_file
|
1403
1445
|
@logger.warn "Rendering via command line arguments is not yet implemented. Use a config file."
|
1404
1446
|
end
|
1405
1447
|
else
|
1406
1448
|
@logger.debug "Executing... config_build"
|
1407
|
-
config_build(@config_file, @passed_vars, @parseconfig)
|
1449
|
+
config_build(@config_file, @passed_vars, @data_files, @parseconfig)
|
1408
1450
|
end
|
data/lib/liquidoc/version.rb
CHANGED
metadata
CHANGED
@@ -1,14 +1,14 @@
|
|
1
1
|
--- !ruby/object:Gem::Specification
|
2
2
|
name: liquidoc
|
3
3
|
version: !ruby/object:Gem::Version
|
4
|
-
version: 0.
|
4
|
+
version: 0.12.0.pre.rc5
|
5
5
|
platform: ruby
|
6
6
|
authors:
|
7
7
|
- Brian Dominick
|
8
8
|
autorequire:
|
9
9
|
bindir: bin
|
10
10
|
cert_chain: []
|
11
|
-
date:
|
11
|
+
date: 2021-01-27 00:00:00.000000000 Z
|
12
12
|
dependencies:
|
13
13
|
- !ruby/object:Gem::Dependency
|
14
14
|
name: bundler
|
@@ -28,30 +28,30 @@ dependencies:
|
|
28
28
|
name: rake
|
29
29
|
requirement: !ruby/object:Gem::Requirement
|
30
30
|
requirements:
|
31
|
-
- - "
|
31
|
+
- - ">="
|
32
32
|
- !ruby/object:Gem::Version
|
33
|
-
version:
|
33
|
+
version: 12.3.3
|
34
34
|
type: :development
|
35
35
|
prerelease: false
|
36
36
|
version_requirements: !ruby/object:Gem::Requirement
|
37
37
|
requirements:
|
38
|
-
- - "
|
38
|
+
- - ">="
|
39
39
|
- !ruby/object:Gem::Version
|
40
|
-
version:
|
40
|
+
version: 12.3.3
|
41
41
|
- !ruby/object:Gem::Dependency
|
42
42
|
name: asciidoctor
|
43
43
|
requirement: !ruby/object:Gem::Requirement
|
44
44
|
requirements:
|
45
45
|
- - "~>"
|
46
46
|
- !ruby/object:Gem::Version
|
47
|
-
version: '
|
47
|
+
version: '2.0'
|
48
48
|
type: :runtime
|
49
49
|
prerelease: false
|
50
50
|
version_requirements: !ruby/object:Gem::Requirement
|
51
51
|
requirements:
|
52
52
|
- - "~>"
|
53
53
|
- !ruby/object:Gem::Version
|
54
|
-
version: '
|
54
|
+
version: '2.0'
|
55
55
|
- !ruby/object:Gem::Dependency
|
56
56
|
name: json
|
57
57
|
requirement: !ruby/object:Gem::Requirement
|
@@ -84,16 +84,16 @@ dependencies:
|
|
84
84
|
name: asciidoctor-pdf
|
85
85
|
requirement: !ruby/object:Gem::Requirement
|
86
86
|
requirements:
|
87
|
-
- -
|
87
|
+
- - '='
|
88
88
|
- !ruby/object:Gem::Version
|
89
|
-
version: 1.5.
|
89
|
+
version: 1.5.3
|
90
90
|
type: :runtime
|
91
91
|
prerelease: false
|
92
92
|
version_requirements: !ruby/object:Gem::Requirement
|
93
93
|
requirements:
|
94
|
-
- -
|
94
|
+
- - '='
|
95
95
|
- !ruby/object:Gem::Version
|
96
|
-
version: 1.5.
|
96
|
+
version: 1.5.3
|
97
97
|
- !ruby/object:Gem::Dependency
|
98
98
|
name: logger
|
99
99
|
requirement: !ruby/object:Gem::Requirement
|
@@ -128,28 +128,28 @@ dependencies:
|
|
128
128
|
requirements:
|
129
129
|
- - "~>"
|
130
130
|
- !ruby/object:Gem::Version
|
131
|
-
version: '
|
131
|
+
version: '4.0'
|
132
132
|
type: :runtime
|
133
133
|
prerelease: false
|
134
134
|
version_requirements: !ruby/object:Gem::Requirement
|
135
135
|
requirements:
|
136
136
|
- - "~>"
|
137
137
|
- !ruby/object:Gem::Version
|
138
|
-
version: '
|
138
|
+
version: '4.0'
|
139
139
|
- !ruby/object:Gem::Dependency
|
140
140
|
name: jekyll-asciidoc
|
141
141
|
requirement: !ruby/object:Gem::Requirement
|
142
142
|
requirements:
|
143
143
|
- - "~>"
|
144
144
|
- !ruby/object:Gem::Version
|
145
|
-
version: '
|
145
|
+
version: '3.0'
|
146
146
|
type: :runtime
|
147
147
|
prerelease: false
|
148
148
|
version_requirements: !ruby/object:Gem::Requirement
|
149
149
|
requirements:
|
150
150
|
- - "~>"
|
151
151
|
- !ruby/object:Gem::Version
|
152
|
-
version: '
|
152
|
+
version: '3.0'
|
153
153
|
- !ruby/object:Gem::Dependency
|
154
154
|
name: highline
|
155
155
|
requirement: !ruby/object:Gem::Requirement
|
@@ -177,7 +177,7 @@ files:
|
|
177
177
|
- bin/liquidoc
|
178
178
|
- lib/liquidoc.rb
|
179
179
|
- lib/liquidoc/version.rb
|
180
|
-
homepage: https://github.com/
|
180
|
+
homepage: https://github.com/DocOps/liquidoc
|
181
181
|
licenses:
|
182
182
|
- MIT
|
183
183
|
metadata:
|