kramdown-rfc2629 1.6.12 → 1.6.15

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
checksums.yaml CHANGED
@@ -1,7 +1,7 @@
1
1
  ---
2
2
  SHA256:
3
- metadata.gz: ac3949475273a7c4f72acf86a47d2c9fb176c2a6f81c8d9cc9db03d595c61248
4
- data.tar.gz: 165b94bb4a9306134e92515dae4eec7a9022b0264d95484d7f31a316982d3b01
3
+ metadata.gz: d667e6af185771592ba7f5b42b1671821b3f8c722aec85af0f9606d2d9857886
4
+ data.tar.gz: 43b32714ed1aa1be0666a78bf81587f49e4c34a42bc3236a12c5afcac7455ae8
5
5
  SHA512:
6
- metadata.gz: 1686e35bd93c3e9ff63f42bb830845e3b7accf1b2b7621fb78d49b0fafc3c1edeaea53a645c6e81d8ee5c9f468c54fb0920cb91831325c210d9b98d810f64564
7
- data.tar.gz: 5599b22f40492bbfffc8a5d34af3e9ddc69452757bc0c9eb95a97ce312bdc730d3858ffbe1b00bce3af8377d651a3277d3ec82acd0b62e65f7436c8051797bd7
6
+ metadata.gz: a6b1488ba454538d9a12c6c21fb35c8a916d8a5c5352b140934ae9efce9e87e929aaa6dc302fe6a0c6e3e4f2959860667119bddf734388cf707784529d31d158
7
+ data.tar.gz: 3f427762bfec7d57eb13038d442ca0f5752257dcfd45174bc74e53016307713acba005ecee53705f668a9957cd69173391164273894fc6272c33dd6ff475d42f
data/bin/doilit CHANGED
@@ -1,16 +1,12 @@
1
1
  #!/usr/bin/env ruby
2
- require 'open-uri'
3
2
  require 'yaml'
4
- require 'json'
5
3
  require 'kramdown-rfc2629'
6
4
  require 'kramdown-rfc/parameterset'
7
5
  require 'kramdown-rfc/refxml'
6
+ require 'kramdown-rfc/doi'
8
7
 
9
8
  # doilit -c 10.6028/NIST.SP.800-183 10.1016/j.adhoc.2015.04.007 10.1109/MIC.2012.29 10.1145/2507924.2507954
10
9
 
11
-
12
- ACCEPT_CITE_JSON = {"Accept" => "application/citeproc+json"}
13
-
14
10
  $verbose = false
15
11
  $fuzzy = false
16
12
  $handle = "a"
@@ -43,79 +39,9 @@ ARGV.each do |doi|
43
39
  warn "*** Usage: doilit [-c] [-f] [-v] [-h=handle|-x=xmlhandle] doi..."
44
40
  exit 1
45
41
  end
46
- cite = JSON.parse(URI("https://dx.doi.org/#{doi}").open(ACCEPT_CITE_JSON).read)
47
- puts cite.to_yaml if $verbose
48
- lit = {}
49
- ser = lit["seriesinfo"] = {}
50
- lit["title"] = cite["title"]
51
- if (st = cite["subtitle"]) && Array === st # defensive
52
- st.delete('')
53
- if st != []
54
- lit["title"] << ": " << st.join("; ")
55
- end
56
- end
57
- if authors = cite["author"]
58
- lit["author"] = authors.map do |au|
59
- lau = {}
60
- if (f = au["family"])
61
- if (g = au["given"])
62
- lau["name"] = "#{g} #{f}"
63
- lau["ins"] = "#{g[0]}. #{f}"
64
- else
65
- lau["name"] = "#{f}"
66
- # lau["ins"] = "#{g[0]}. #{f}"
67
- end
68
- end
69
- lau
70
- end
71
- end
72
- if iss = cite["issued"]
73
- if dp = iss["date-parts"]
74
- if Integer === (dp = dp[0])[0]
75
- lit["date"] = ["%04d" % dp[0], *dp[1..-1].map {|p| "%02d" % p}].join("-")
76
- end
77
- end
78
- end
79
- if !lit.key?("date") && $fuzzy && (iss = cite["created"])
80
- if dp = iss["date-parts"]
81
- if Integer === (dp = dp[0])[0]
82
- lit["date"] = ["%04d" % dp[0], *dp[1..-1].map {|p| "%02d" % p}].join("-")
83
- end
84
- end
85
- end
86
- if (ct = cite["container-title"]) && ct != []
87
- info = []
88
- if v = cite["volume"]
89
- vi = "vol. #{v}"
90
- if (v = cite["journal-issue"]) && (issue = v["issue"])
91
- vi << ", no. #{issue}"
92
- end
93
- info << vi
94
- end
95
- if p = cite["page"]
96
- info << "pp. #{p}"
97
- end
98
- rhs = info.join(", ")
99
- if info != []
100
- ser[ct] = rhs
101
- else
102
- spl = ct.split(" ")
103
- ser[spl[0..-2].join(" ")] = spl[-1]
104
- end
105
- elsif pub = cite["publisher"]
106
- info = []
107
- if t = cite["type"]
108
- info << t
109
- end
110
- rhs = info.join(", ")
111
- if info != []
112
- ser[pub] = rhs
113
- else
114
- spl = pub.split(" ")
115
- ser[spl[0..-2].join(" ")] = spl[-1]
116
- end
117
- end
118
- lit["seriesinfo"]["DOI"] = cite["DOI"]
42
+
43
+ lit = doi_fetch_and_convert(doi, fuzzy: $fuzzy, verbose: $verbose)
44
+
119
45
  while litent[$handle]
120
46
  $handle.succ!
121
47
  end
@@ -102,11 +102,13 @@
102
102
  <% mail_subdomain, mail_domain = mail_host.split(".", 2) -%>
103
103
  <% group = venue[:group] || mail_local # XXX -%>
104
104
  <% arch = venue[:arch] || "https://mailarchive.ietf.org/arch/browse/#{mail_local}/" -%>
105
+ <% subscribe = venue[:subscribe] || "https://www.ietf.org/mailman/listinfo/#{mail_local}/" -%>
105
106
  <% GROUPS = {"ietf" => "Working ", "irtf" => "Research "} -%>
106
107
  <% gtype ||= "#{GROUPS[mail_subdomain]}Group" -%>
107
108
  Discussion of this document takes place on the
108
109
  <%=group%> <%=gtype%> mailing list (<eref target="mailto:<%=mail%>"/>),
109
110
  which is archived at <eref target="<%=arch%>"/>.
111
+ Subscribe at <eref target="<%=subscribe%>"/>.
110
112
  <% end -%>
111
113
  <% if homepage -%>
112
114
  <%=gtype%> information can be found at <eref target="<%=homepage%>"/>.
@@ -1,6 +1,6 @@
1
1
  spec = Gem::Specification.new do |s|
2
2
  s.name = 'kramdown-rfc2629'
3
- s.version = '1.6.12'
3
+ s.version = '1.6.15'
4
4
  s.summary = "Kramdown extension for generating RFCXML (RFC 799x)."
5
5
  s.description = %{An RFCXML (RFC 799x) generating backend for Thomas Leitner's
6
6
  "kramdown" markdown parser. Mostly useful for RFC writers.}
@@ -149,6 +149,7 @@ NMDTAGS = ["{:/nomarkdown}\n\n", "\n\n{::nomarkdown}\n"]
149
149
  NORMINFORM = { "!" => :normative, "?" => :informative }
150
150
 
151
151
  def yaml_load(input, *args)
152
+ begin
152
153
  if YAML.respond_to?(:safe_load)
153
154
  begin
154
155
  YAML.safe_load(input, *args)
@@ -158,6 +159,10 @@ def yaml_load(input, *args)
158
159
  else
159
160
  YAML.load(input)
160
161
  end
162
+ rescue Psych::SyntaxError => e
163
+ warn "*** YAML syntax error: #{e}"
164
+ exit 65 # EX_DATAERR
165
+ end
161
166
  end
162
167
 
163
168
  def process_kramdown_options(coding_override = nil,
@@ -0,0 +1,92 @@
1
+ require 'open-uri'
2
+ require 'json'
3
+ require 'yaml'
4
+
5
+ ACCEPT_CITE_JSON = {"Accept" => "application/citeproc+json"}
6
+
7
+ def doi_fetch_and_convert(doi, fuzzy: false, verbose: false)
8
+ cite = JSON.parse(URI("https://dx.doi.org/#{doi}").open(ACCEPT_CITE_JSON).read)
9
+ puts cite.to_yaml if verbose
10
+ lit = {}
11
+ ser = lit["seriesinfo"] = {}
12
+ lit["title"] = cite["title"]
13
+ if (st = cite["subtitle"]) && Array === st # defensive
14
+ st.delete('')
15
+ if st != []
16
+ lit["title"] << ": " << st.join("; ")
17
+ end
18
+ end
19
+ if authors = cite["author"]
20
+ lit["author"] = authors.map do |au|
21
+ lau = {}
22
+ if (f = au["family"])
23
+ if (g = au["given"])
24
+ lau["name"] = "#{g} #{f}"
25
+ lau["ins"] = "#{g[0]}. #{f}"
26
+ else
27
+ lau["name"] = "#{f}"
28
+ # lau["ins"] = "#{g[0]}. #{f}"
29
+ end
30
+ end
31
+ if (f = au["affiliation"]) && Array === f
32
+ names = f.map { |affn|
33
+ if Hash === affn && (n = affn["name"]) && String === n
34
+ n
35
+ end
36
+ }.compact
37
+ if names.size > 0
38
+ lau["org"] = names.join("; ")
39
+ end
40
+ end
41
+ lau
42
+ end
43
+ end
44
+ if iss = cite["issued"]
45
+ if dp = iss["date-parts"]
46
+ if Integer === (dp = dp[0])[0]
47
+ lit["date"] = ["%04d" % dp[0], *dp[1..-1].map {|p| "%02d" % p}].join("-")
48
+ end
49
+ end
50
+ end
51
+ if !lit.key?("date") && fuzzy && (iss = cite["created"])
52
+ if dp = iss["date-parts"]
53
+ if Integer === (dp = dp[0])[0]
54
+ lit["date"] = ["%04d" % dp[0], *dp[1..-1].map {|p| "%02d" % p}].join("-")
55
+ end
56
+ end
57
+ end
58
+ if (ct = cite["container-title"]) && ct != []
59
+ info = []
60
+ if v = cite["volume"]
61
+ vi = "vol. #{v}"
62
+ if (v = cite["journal-issue"]) && (issue = v["issue"])
63
+ vi << ", no. #{issue}"
64
+ end
65
+ info << vi
66
+ end
67
+ if p = cite["page"]
68
+ info << "pp. #{p}"
69
+ end
70
+ rhs = info.join(", ")
71
+ if info != []
72
+ ser[ct] = rhs
73
+ else
74
+ spl = ct.split(" ")
75
+ ser[spl[0..-2].join(" ")] = spl[-1]
76
+ end
77
+ elsif pub = cite["publisher"]
78
+ info = []
79
+ if t = cite["type"]
80
+ info << t
81
+ end
82
+ rhs = info.join(", ")
83
+ if info != []
84
+ ser[pub] = rhs
85
+ else
86
+ spl = pub.split(" ")
87
+ ser[spl[0..-2].join(" ")] = spl[-1]
88
+ end
89
+ end
90
+ ser["DOI"] = cite["DOI"]
91
+ lit
92
+ end
@@ -22,6 +22,8 @@ require 'open3' # for math
22
22
  require 'json' # for math
23
23
  require 'rexml/document' # for SVG and bibxml acrobatics
24
24
 
25
+ require 'kramdown-rfc/doi' # for fetching information for a DOI
26
+
25
27
  class Object
26
28
  def deep_clone
27
29
  Marshal.load(Marshal.dump(self))
@@ -1002,6 +1004,12 @@ COLORS
1002
1004
  warn "(#{"%.3f" % (t2 - t1)} s)" if KRAMDOWN_PERSISTENT_VERBOSE
1003
1005
  end
1004
1006
 
1007
+ def get_doi(refname)
1008
+ lit = doi_fetch_and_convert(refname, fuzzy: true)
1009
+ anchor = "DOI_#{refname.gsub("/", "_")}"
1010
+ KramdownRFC::ref_to_xml(anchor, lit)
1011
+ end
1012
+
1005
1013
  # this is now slightly dangerous as multiple urls could map to the same cachefile
1006
1014
  def get_and_cache_resource(url, cachefile, tvalid = 7200, tn = Time.now)
1007
1015
  fn = "#{REFCACHEDIR}/#{cachefile}"
@@ -1016,7 +1024,17 @@ COLORS
1016
1024
  fetch_timeout = 60 # seconds; long timeout needed for Travis
1017
1025
  end
1018
1026
  $stderr.puts "#{fn}: #{message} from #{url}"
1019
- if ENV["HAVE_WGET"]
1027
+ if Array === url
1028
+ begin
1029
+ case url[0]
1030
+ when :DOI
1031
+ ref = get_doi(url[1])
1032
+ File.write(fn, ref)
1033
+ end
1034
+ rescue Exception => e
1035
+ warn "*** Error fetching #{url[0]} #{url[1].inspect}: #{e}"
1036
+ end
1037
+ elsif ENV["HAVE_WGET"]
1020
1038
  `cd #{REFCACHEDIR}; wget -t 3 -T #{fetch_timeout} -Nnv "#{url}"` # ignore errors if offline (hack)
1021
1039
  begin
1022
1040
  File.utime nil, nil, fn
@@ -1089,7 +1107,8 @@ COLORS
1089
1107
  "NIST" => "bibxml2",
1090
1108
  "OASIS" => "bibxml2",
1091
1109
  "PKCS" => "bibxml2",
1092
- "DOI" => ["bibxml7", 86400, true], # 24 h cache at source anyway
1110
+ "DOI" => ["bibxml7", 86400, true, ->(fn, n){ ["computed-#{fn}", [:DOI, n] ] }, true # always_altproc
1111
+ ], # emulate old 24 h cache
1093
1112
  "IANA" => ["bibxml8", 86400, true], # ditto
1094
1113
  }
1095
1114
 
@@ -1122,14 +1141,16 @@ COLORS
1122
1141
  anchor.gsub!('/', '_') # should take out all illegals
1123
1142
  to_insert = ""
1124
1143
  src.scan(/(W3C|3GPP|[A-Z-]+)[.]?([A-Za-z_0-9.\(\)\/\+-]+)/) do |t, n|
1144
+ never_altproc = n.sub!(/^[.]/, "")
1125
1145
  fn = "reference.#{t}.#{n}.xml"
1126
- sub, ttl, _can_anchor, altproc = XML_RESOURCE_ORG_MAP[t]
1146
+ sub, ttl, _can_anchor, altproc, always_altproc = XML_RESOURCE_ORG_MAP[t]
1127
1147
  ttl ||= KRAMDOWN_REFCACHETTL # everything but RFCs might change a lot
1128
1148
  puts "*** Huh: #{fn}" unless sub
1129
- if altproc && !KRAMDOWN_USE_TOOLS_SERVER
1149
+ if altproc && !never_altproc && (!KRAMDOWN_USE_TOOLS_SERVER || always_altproc)
1130
1150
  fn, url = altproc.call(fn, n)
1131
1151
  else
1132
1152
  url = "#{XML_RESOURCE_ORG_PREFIX}/#{sub}/#{fn}"
1153
+ fn = "alt-#{fn}" if never_altproc || KRAMDOWN_USE_TOOLS_SERVER
1133
1154
  end
1134
1155
  # if can_anchor # create anchor server-side for stand_alone: false
1135
1156
  # url << "?anchor=#{anchor}"
@@ -1141,6 +1162,7 @@ COLORS
1141
1162
  begin
1142
1163
  d = REXML::Document.new(to_insert)
1143
1164
  d.xml_decl.nowrite
1165
+ d.delete d.doctype
1144
1166
  d.root.attributes["anchor"] = anchor
1145
1167
  if t == "RFC" or t == "I-D"
1146
1168
  if KRAMDOWN_NO_TARGETS
metadata CHANGED
@@ -1,14 +1,14 @@
1
1
  --- !ruby/object:Gem::Specification
2
2
  name: kramdown-rfc2629
3
3
  version: !ruby/object:Gem::Version
4
- version: 1.6.12
4
+ version: 1.6.15
5
5
  platform: ruby
6
6
  authors:
7
7
  - Carsten Bormann
8
8
  autorequire:
9
9
  bindir: bin
10
10
  cert_chain: []
11
- date: 2022-06-27 00:00:00.000000000 Z
11
+ date: 2022-08-02 00:00:00.000000000 Z
12
12
  dependencies:
13
13
  - !ruby/object:Gem::Dependency
14
14
  name: kramdown
@@ -100,6 +100,7 @@ files:
100
100
  - kramdown-rfc2629.gemspec
101
101
  - lib/kramdown-rfc/autolink-iref-cleanup.rb
102
102
  - lib/kramdown-rfc/command.rb
103
+ - lib/kramdown-rfc/doi.rb
103
104
  - lib/kramdown-rfc/erb.rb
104
105
  - lib/kramdown-rfc/gzip-clone.rb
105
106
  - lib/kramdown-rfc/kdrfc-processor.rb