metanorma-standoc 1.6.5 → 1.8.1

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (51) hide show
  1. checksums.yaml +4 -4
  2. data/.github/workflows/rake.yml +17 -0
  3. data/lib/asciidoctor/standoc/base.rb +31 -44
  4. data/lib/asciidoctor/standoc/basicdoc.rng +20 -3
  5. data/lib/asciidoctor/standoc/blocks.rb +7 -7
  6. data/lib/asciidoctor/standoc/blocks_notes.rb +2 -2
  7. data/lib/asciidoctor/standoc/cleanup.rb +5 -6
  8. data/lib/asciidoctor/standoc/cleanup_block.rb +3 -11
  9. data/lib/asciidoctor/standoc/cleanup_boilerplate.rb +89 -20
  10. data/lib/asciidoctor/standoc/cleanup_inline.rb +23 -25
  11. data/lib/asciidoctor/standoc/cleanup_ref.rb +0 -85
  12. data/lib/asciidoctor/standoc/cleanup_ref_dl.rb +94 -0
  13. data/lib/asciidoctor/standoc/cleanup_terms.rb +1 -6
  14. data/lib/asciidoctor/standoc/converter.rb +2 -51
  15. data/lib/asciidoctor/standoc/front.rb +2 -2
  16. data/lib/asciidoctor/standoc/front_contributor.rb +8 -4
  17. data/lib/asciidoctor/standoc/inline.rb +8 -6
  18. data/lib/asciidoctor/standoc/isodoc.rng +36 -3
  19. data/lib/asciidoctor/standoc/lists.rb +2 -2
  20. data/lib/asciidoctor/standoc/macros.rb +14 -1
  21. data/lib/asciidoctor/standoc/macros_plantuml.rb +1 -1
  22. data/lib/asciidoctor/standoc/ref_sect.rb +2 -2
  23. data/lib/asciidoctor/standoc/reqt.rb +6 -1
  24. data/lib/asciidoctor/standoc/section.rb +14 -89
  25. data/lib/asciidoctor/standoc/table.rb +1 -1
  26. data/lib/asciidoctor/standoc/terms.rb +125 -0
  27. data/lib/asciidoctor/standoc/utils.rb +2 -94
  28. data/lib/metanorma/standoc/version.rb +1 -1
  29. data/metanorma-standoc.gemspec +2 -3
  30. data/spec/asciidoctor-standoc/base_spec.rb +44 -9
  31. data/spec/asciidoctor-standoc/blocks_spec.rb +6 -1
  32. data/spec/asciidoctor-standoc/cleanup_sections_spec.rb +12 -7
  33. data/spec/asciidoctor-standoc/cleanup_spec.rb +116 -4
  34. data/spec/asciidoctor-standoc/inline_spec.rb +4 -5
  35. data/spec/asciidoctor-standoc/isobib_cache_spec.rb +4 -4
  36. data/spec/asciidoctor-standoc/macros_lutaml_spec.rb +1 -1
  37. data/spec/asciidoctor-standoc/macros_spec.rb +30 -0
  38. data/spec/asciidoctor-standoc/refs_dl_spec.rb +17 -5
  39. data/spec/asciidoctor-standoc/refs_spec.rb +12 -12
  40. data/spec/asciidoctor-standoc/section_spec.rb +149 -4
  41. data/spec/asciidoctor-standoc/table_spec.rb +60 -0
  42. data/spec/vcr_cassettes/dated_iso_ref_joint_iso_iec.yml +48 -48
  43. data/spec/vcr_cassettes/isobib_get_123.yml +12 -12
  44. data/spec/vcr_cassettes/isobib_get_123_1.yml +26 -26
  45. data/spec/vcr_cassettes/isobib_get_123_1_fr.yml +36 -36
  46. data/spec/vcr_cassettes/isobib_get_123_2001.yml +14 -14
  47. data/spec/vcr_cassettes/isobib_get_124.yml +14 -14
  48. data/spec/vcr_cassettes/rfcbib_get_rfc8341.yml +8 -8
  49. data/spec/vcr_cassettes/separates_iev_citations_by_top_level_clause.yml +65 -59
  50. metadata +18 -31
  51. data/lib/asciidoctor/standoc/log.rb +0 -59
@@ -1,3 +1,5 @@
1
+ require "metanorma-utils"
2
+
1
3
  module Asciidoctor
2
4
  module Standoc
3
5
  module Cleanup
@@ -82,6 +84,8 @@ module Asciidoctor
82
84
 
83
85
  def xref_cleanup(xmldoc)
84
86
  xmldoc.xpath("//xref").each do |x|
87
+ /:/.match(x["target"]) and xref_to_internal_eref(x)
88
+ next unless x.name == "xref"
85
89
  if refid? x["target"]
86
90
  x.name = "eref"
87
91
  xref_to_eref(x)
@@ -91,6 +95,18 @@ module Asciidoctor
91
95
  end
92
96
  end
93
97
 
98
+ def xref_to_internal_eref(x)
99
+ a = x["target"].split(":", 3)
100
+ unless a.size < 2 || a[0].empty? || a[1].empty?
101
+ x["target"] = "#{a[0]}_#{a[1]}"
102
+ a.size > 2 and x.children = %{anchor="#{a[2..-1].join("")}",#{x&.children&.text}}
103
+ x["type"] = a[0]
104
+ @internal_eref_namespaces << a[0]
105
+ x.name = "eref"
106
+ xref_to_eref(x)
107
+ end
108
+ end
109
+
94
110
  def quotesource_cleanup(xmldoc)
95
111
  xmldoc.xpath("//quote/source | //terms/source").each do |x|
96
112
  xref_to_eref(x)
@@ -125,7 +141,7 @@ module Asciidoctor
125
141
  def concept_termbase_cleanup(x)
126
142
  text = x&.children&.first&.remove&.text
127
143
  termbase, key = x["key"].split(/:/, 2)
128
- x.add_child(%(<termref base="#{termbase}" target="#{key}">) +
144
+ x.add_child(%(<termref base="#{termbase}" target="#{key}">) +
129
145
  "#{text}</termref>")
130
146
  end
131
147
 
@@ -139,27 +155,11 @@ module Asciidoctor
139
155
  extract_localities(x.first_element_child)
140
156
  end
141
157
 
142
- NAMECHAR = "\u0000-\u0022\u0024\u002c\u002f\u003a-\u0040\\u005b-\u005e"\
143
- "\u0060\u007b-\u00b6\u00b8-\u00bf\u00d7\u00f7\u037e\u2000-\u200b"\
144
- "\u200e-\u203e\u2041-\u206f\u2190-\u2bff\u2ff0-\u3000".freeze
145
- #"\ud800-\uf8ff\ufdd0-\ufdef\ufffe-\uffff".freeze
146
- NAMESTARTCHAR = "\\u002d\u002e\u0030-\u0039\u00b7\u0300-\u036f"\
147
- "\u203f-\u2040".freeze
148
-
149
- def to_ncname(s)
150
- start = s[0]
151
- ret1 = %r([#{NAMECHAR}#]).match(start) ? "_" :
152
- (%r([#{NAMESTARTCHAR}#]).match(start) ? "_#{start}" : start)
153
- ret2 = s[1..-1] || ""
154
- ret = (ret1 || "") + ret2.gsub(%r([#{NAMECHAR}#]), "_")
155
- ret
156
- end
157
-
158
158
  def to_xreftarget(s)
159
- return to_ncname(s) unless /^[^#]+#.+$/.match(s)
159
+ return Metanorma::Utils::to_ncname(s) unless /^[^#]+#.+$/.match(s)
160
160
  /^(?<pref>[^#]+)#(?<suff>.+)$/ =~ s
161
- pref = pref.gsub(%r([#{NAMECHAR}]), "_")
162
- suff = suff.gsub(%r([#{NAMECHAR}]), "_")
161
+ pref = pref.gsub(%r([#{Metanorma::Utils::NAMECHAR}]), "_")
162
+ suff = suff.gsub(%r([#{Metanorma::Utils::NAMECHAR}]), "_")
163
163
  "#{pref}##{suff}"
164
164
  end
165
165
 
@@ -173,12 +173,11 @@ module Asciidoctor
173
173
 
174
174
  def anchor_cleanup1(x)
175
175
  x.xpath(IDREF).each do |s|
176
- if (ret = to_ncname(s.value)) != (orig = s.value)
176
+ if (ret = Metanorma::Utils::to_ncname(s.value)) != (orig = s.value)
177
177
  s.value = ret
178
178
  output = s.parent.dup
179
179
  output.children.remove
180
- @log.add("Anchors", s.parent, "normalised identifier in #{output} "\
181
- "from #{orig}")
180
+ @log.add("Anchors", s.parent, "normalised identifier in #{output} from #{orig}")
182
181
  end
183
182
  end
184
183
  end
@@ -189,8 +188,7 @@ module Asciidoctor
189
188
  s.value = ret
190
189
  output = s.parent.dup
191
190
  output.children.remove
192
- @log.add("Anchors", s.parent, "normalised identifier in #{output} "\
193
- "from #{orig}")
191
+ @log.add("Anchors", s.parent, "normalised identifier in #{output} from #{orig}")
194
192
  end
195
193
  end
196
194
  end
@@ -120,91 +120,6 @@ module Asciidoctor
120
120
  end
121
121
  end
122
122
 
123
- def ref_dl_cleanup(xmldoc)
124
- xmldoc.xpath("//clause[@bibitem = 'true']").each do |c|
125
- bib = dl_bib_extract(c) or next
126
- validate_ref_dl(bib, c)
127
- bibitemxml = RelatonBib::BibliographicItem.new(RelatonBib::HashConverter::hash_to_bib(bib)).to_xml or next
128
- bibitem = Nokogiri::XML(bibitemxml)
129
- bibitem.root["id"] = c["id"] if c["id"] && !/^_/.match(c["id"])
130
- c.replace(bibitem.root)
131
- end
132
- end
133
-
134
- def validate_ref_dl(bib, c)
135
- id = bib["id"]
136
- id ||= c["id"] unless /^_/.match(c["id"]) # do not accept implicit id
137
- unless id
138
- @log.add("Anchors", c, "The following reference is missing an anchor:\n" + c.to_xml)
139
- return
140
- end
141
- bib["title"] or @log.add("Bibliography", c, "Reference #{id} is missing a title")
142
- bib["docid"] or @log.add("Bibliography", c, "Reference #{id} is missing a document identifier (docid)")
143
- end
144
-
145
- def extract_from_p(tag, bib, key)
146
- return unless bib[tag]
147
- "<#{key}>#{bib[tag].at('p').children}</#{key}>"
148
- end
149
-
150
- # if the content is a single paragraph, replace it with its children
151
- # single links replaced with uri
152
- def p_unwrap(p)
153
- elems = p.elements
154
- if elems.size == 1 && elems[0].name == "p"
155
- link_unwrap(elems[0]).children.to_xml.strip
156
- else
157
- p.to_xml.strip
158
- end
159
- end
160
-
161
- def link_unwrap(p)
162
- elems = p.elements
163
- if elems.size == 1 && elems[0].name == "link"
164
- p.at("./link").replace(elems[0]["target"].strip)
165
- end
166
- p
167
- end
168
-
169
- def dd_bib_extract(dtd)
170
- return nil if dtd.children.empty?
171
- dtd.at("./dl") and return dl_bib_extract(dtd)
172
- elems = dtd.remove.elements
173
- return p_unwrap(dtd) unless elems.size == 1 && %w(ol ul).include?(elems[0].name)
174
- ret = []
175
- elems[0].xpath("./li").each do |li|
176
- ret << p_unwrap(li)
177
- end
178
- ret
179
- end
180
-
181
- def add_to_hash(bib, key, val)
182
- Utils::set_nested_value(bib, key.split(/\./), val)
183
- end
184
-
185
- # definition list, with at most one level of unordered lists
186
- def dl_bib_extract(c, nested = false)
187
- dl = c.at("./dl") or return
188
- bib = {}
189
- key = ""
190
- dl.xpath("./dt | ./dd").each do |dtd|
191
- dtd.name == "dt" and key = dtd.text.sub(/:+$/, "") or add_to_hash(bib, key, dd_bib_extract(dtd))
192
- end
193
- c.xpath("./clause").each do |c1|
194
- key = c1&.at("./title")&.text&.downcase&.strip
195
- next unless %w(contributor relation series).include? key
196
- add_to_hash(bib, key, dl_bib_extract(c1, true))
197
- end
198
- if !nested and c.at("./title")
199
- title = c.at("./title").remove.children.to_xml
200
- bib["title"] = [bib["title"]] if bib["title"].is_a? Hash
201
- bib["title"] = [bib["title"]] if bib["title"].is_a? String
202
- bib["title"] = [] unless bib["title"]
203
- bib["title"] << title if !title.empty?
204
- end
205
- bib
206
- end
207
-
208
123
  def fetch_termbase(termbase, id)
209
124
  ""
210
125
  end
@@ -0,0 +1,94 @@
1
+ require "set"
2
+ require "relaton_bib"
3
+
4
+ module Asciidoctor
5
+ module Standoc
6
+ module Cleanup
7
+ def ref_dl_cleanup(xmldoc)
8
+ xmldoc.xpath("//clause[@bibitem = 'true']").each do |c|
9
+ bib = dl_bib_extract(c) or next
10
+ validate_ref_dl(bib, c)
11
+ bibitemxml = RelatonBib::BibliographicItem.new(RelatonBib::HashConverter::hash_to_bib(bib)).to_xml or next
12
+ bibitem = Nokogiri::XML(bibitemxml)
13
+ bibitem.root["id"] = c["id"] if c["id"] && !/^_/.match(c["id"])
14
+ c.replace(bibitem.root)
15
+ end
16
+ end
17
+
18
+ def validate_ref_dl(bib, c)
19
+ id = bib["id"]
20
+ id ||= c["id"] unless /^_/.match(c["id"]) # do not accept implicit id
21
+ unless id
22
+ @log.add("Anchors", c, "The following reference is missing an anchor:\n" + c.to_xml)
23
+ return
24
+ end
25
+ @refids << id
26
+ bib["title"] or @log.add("Bibliography", c, "Reference #{id} is missing a title")
27
+ bib["docid"] or @log.add("Bibliography", c, "Reference #{id} is missing a document identifier (docid)")
28
+ end
29
+
30
+ def extract_from_p(tag, bib, key)
31
+ return unless bib[tag]
32
+ "<#{key}>#{bib[tag].at('p').children}</#{key}>"
33
+ end
34
+
35
+ # if the content is a single paragraph, replace it with its children
36
+ # single links replaced with uri
37
+ def p_unwrap(p)
38
+ elems = p.elements
39
+ if elems.size == 1 && elems[0].name == "p"
40
+ link_unwrap(elems[0]).children.to_xml.strip
41
+ else
42
+ p.to_xml.strip
43
+ end
44
+ end
45
+
46
+ def link_unwrap(p)
47
+ elems = p.elements
48
+ if elems.size == 1 && elems[0].name == "link"
49
+ p.at("./link").replace(elems[0]["target"].strip)
50
+ end
51
+ p
52
+ end
53
+
54
+ def dd_bib_extract(dtd)
55
+ return nil if dtd.children.empty?
56
+ dtd.at("./dl") and return dl_bib_extract(dtd)
57
+ elems = dtd.remove.elements
58
+ return p_unwrap(dtd) unless elems.size == 1 && %w(ol ul).include?(elems[0].name)
59
+ ret = []
60
+ elems[0].xpath("./li").each do |li|
61
+ ret << p_unwrap(li)
62
+ end
63
+ ret
64
+ end
65
+
66
+ def add_to_hash(bib, key, val)
67
+ Metanorma::Utils::set_nested_value(bib, key.split(/\./), val)
68
+ end
69
+
70
+ # definition list, with at most one level of unordered lists
71
+ def dl_bib_extract(c, nested = false)
72
+ dl = c.at("./dl") or return
73
+ bib = {}
74
+ key = ""
75
+ dl.xpath("./dt | ./dd").each do |dtd|
76
+ dtd.name == "dt" and key = dtd.text.sub(/:+$/, "") or add_to_hash(bib, key, dd_bib_extract(dtd))
77
+ end
78
+ c.xpath("./clause").each do |c1|
79
+ key = c1&.at("./title")&.text&.downcase&.strip
80
+ next unless %w(contributor relation series).include? key
81
+ add_to_hash(bib, key, dl_bib_extract(c1, true))
82
+ end
83
+ if !nested and c.at("./title")
84
+ title = c.at("./title").remove.children.to_xml
85
+ bib["title"] = [bib["title"]] if bib["title"].is_a? Hash
86
+ bib["title"] = [bib["title"]] if bib["title"].is_a? String
87
+ bib["title"] = [] unless bib["title"]
88
+ bib["title"] << title if !title.empty?
89
+ end
90
+ bib
91
+ end
92
+ end
93
+ end
94
+ end
@@ -47,10 +47,6 @@ module Asciidoctor
47
47
  end
48
48
  end
49
49
 
50
- def termdef_boilerplate_cleanup(xmldoc)
51
- xmldoc.xpath("//terms/p | //terms/ul").each(&:remove)
52
- end
53
-
54
50
  def termdef_subclause_cleanup(xmldoc)
55
51
  xmldoc.xpath("//terms[terms]").each { |t| t.name = "clause" }
56
52
  end
@@ -83,7 +79,7 @@ module Asciidoctor
83
79
  x.name = "note"
84
80
  end
85
81
  xmldoc.xpath("//termexample[not(ancestor::term)]").each do |x|
86
- x.name = "note"
82
+ x.name = "example"
87
83
  end
88
84
  end
89
85
 
@@ -96,7 +92,6 @@ module Asciidoctor
96
92
  termdefinition_cleanup(xmldoc)
97
93
  termdomain1_cleanup(xmldoc)
98
94
  termnote_example_cleanup(xmldoc)
99
- termdef_boilerplate_cleanup(xmldoc)
100
95
  termdef_subclause_cleanup(xmldoc)
101
96
  term_children_cleanup(xmldoc)
102
97
  termdocsource_cleanup(xmldoc)
@@ -1,6 +1,4 @@
1
1
  require "asciidoctor"
2
- require "fontist"
3
- require "fontist/manifest/install"
4
2
  require "metanorma/util"
5
3
  require "metanorma/standoc/version"
6
4
  require "asciidoctor/standoc/base"
@@ -16,7 +14,6 @@ require "asciidoctor/standoc/utils"
16
14
  require "asciidoctor/standoc/cleanup"
17
15
  require "asciidoctor/standoc/reqt"
18
16
  require_relative "./macros.rb"
19
- require_relative "./log.rb"
20
17
 
21
18
  module Asciidoctor
22
19
  module Standoc
@@ -40,7 +37,8 @@ module Asciidoctor
40
37
  inline_macro Asciidoctor::Standoc::VariantInlineMacro
41
38
  inline_macro Asciidoctor::Standoc::FootnoteBlockInlineMacro
42
39
  inline_macro Asciidoctor::Standoc::TermRefInlineMacro
43
- inline_macro Asciidoctor::Standoc::IndexInlineMacro
40
+ inline_macro Asciidoctor::Standoc::IndexXrefInlineMacro
41
+ inline_macro Asciidoctor::Standoc::IndexRangeInlineMacro
44
42
  block Asciidoctor::Standoc::ToDoAdmonitionBlock
45
43
  treeprocessor Asciidoctor::Standoc::ToDoInlineAdmonitionBlock
46
44
  block Asciidoctor::Standoc::PlantUMLBlockMacro
@@ -72,8 +70,6 @@ module Asciidoctor
72
70
  basebackend "html"
73
71
  outfilesuffix ".xml"
74
72
  @libdir = File.dirname(self.class::_file || __FILE__)
75
-
76
- install_fonts(opts)
77
73
  end
78
74
 
79
75
  class << self
@@ -89,51 +85,6 @@ module Asciidoctor
89
85
  File.join(@libdir, "../../isodoc/html", file)
90
86
  end
91
87
 
92
- def flavor_name
93
- self.class.name.split("::")&.[](-2).downcase
94
- end
95
-
96
- def fonts_manifest
97
- File.expand_path(File.join(@libdir, "../../metanorma/", flavor_name, "fonts_manifest.yaml"))
98
- end
99
-
100
- def install_fonts(options={})
101
- if options[:no_install_fonts]
102
- Metanorma::Util.log("[fontist] Skip font installation because" \
103
- " --no-install-fonts argument passed", :debug)
104
- return
105
- end
106
-
107
- if fonts_manifest.nil? || !File.exist?(fonts_manifest)
108
- Metanorma::Util.log("[fontist] Skip font installation because" \
109
- " font manifest file doesn't exists/defined", :debug)
110
- return
111
- end
112
-
113
- begin
114
- Fontist::Manifest::Install.call(
115
- fonts_manifest,
116
- confirmation: options[:agree_to_terms] ? "yes" : "no"
117
- )
118
- rescue Fontist::Errors::LicensingError
119
- if !options[:agree_to_terms]
120
- Metanorma::Util.log("[fontist] --agree-to-terms option missing." \
121
- " You must accept font licenses to install fonts.", :debug)
122
- elsif options[:continue_without_fonts]
123
- Metanorma::Util.log("[fontist] Processing will continue without" \
124
- " fonts installed", :debug)
125
- else
126
- Metanorma::Util.log("[fontist] Aborting without proper fonts" \
127
- " installed", :fatal)
128
- end
129
- rescue Fontist::Errors::NonSupportedFontError
130
- flavor = flavor_name || "cli"
131
- Metanorma::Util.log("[fontist] '#{font}' font is not supported. " \
132
- "Please go to github.com/metanorma/metanorma-#{flavor}/issues" \
133
- " to report this issue.", :info)
134
- end
135
- end
136
-
137
88
  alias_method :embedded, :content
138
89
  alias_method :verse, :quote
139
90
  alias_method :audio, :skip
@@ -93,7 +93,7 @@ module Asciidoctor
93
93
  end
94
94
 
95
95
  def metadata_script(node, xml)
96
- xml.script (node.attr("script") || "Latn")
96
+ xml.script (node.attr("script") || default_script(node.attr("language")))
97
97
  end
98
98
 
99
99
  def relaton_relations
@@ -186,7 +186,7 @@ module Asciidoctor
186
186
  ["en"].each do |lang|
187
187
  at = { language: lang, format: "text/plain" }
188
188
  xml.title **attr_code(at) do |t|
189
- t << (Utils::asciidoc_sub(node.attr("title") ||
189
+ t << (Metanorma::Utils::asciidoc_sub(node.attr("title") ||
190
190
  node.attr("title-en")) || node.title)
191
191
  end
192
192
  end
@@ -46,8 +46,9 @@ module Asciidoctor
46
46
 
47
47
  # , " => ," : CSV definition does not deal with space followed by quote
48
48
  # at start of field
49
- def csv_split(s, delim = ",")
50
- CSV.parse_line(s&.gsub(/, "(?!")/, ',"'),
49
+ def csv_split(s, delim = ";")
50
+ return if s.nil?
51
+ CSV.parse_line(s&.gsub(/#{delim} "(?!")/, "#{delim}\""),
51
52
  liberal_parsing: true,
52
53
  col_sep: delim)&.compact&.map { |x| x.strip }
53
54
  end
@@ -115,8 +116,11 @@ module Asciidoctor
115
116
  node.attr("affiliation#{suffix}") and p.affiliation do |a|
116
117
  a.organization do |o|
117
118
  o.name node.attr("affiliation#{suffix}")
118
- abbr = node.attr("affiliation_abbrev#{suffix}") and
119
- o.abbreviation abbr
119
+ a = node.attr("affiliation_subdiv#{suffix}")
120
+ abbr = node.attr("affiliation_abbrev#{suffix}") and o.abbreviation abbr
121
+ csv_split(node.attr("affiliation_subdiv#{suffix}"))&.each do |s|
122
+ o.subdivision s
123
+ end
120
124
  node.attr("address#{suffix}") and o.address do |ad|
121
125
  ad.formattedAddress do |f|
122
126
  f << node.attr("address#{suffix}").gsub(/ \+\n/, "<br/>")
@@ -165,6 +165,7 @@ module Asciidoctor
165
165
  when "domain" then xml.domain { |a| a << node.text }
166
166
 
167
167
  when "strike" then xml.strike { |s| s << node.text }
168
+ when "underline" then xml.underline { |s| s << node.text }
168
169
  when "smallcap" then xml.smallcap { |s| s << node.text }
169
170
  when "keyword" then xml.keyword { |s| s << node.text }
170
171
  else
@@ -193,7 +194,7 @@ module Asciidoctor
193
194
  type = types.first.to_s
194
195
  uri = uri.sub(%r{^data:image/\*;}, "data:#{type};")
195
196
  attr_code(src: uri, #@datauriimage ? datauri(uri) : uri,
196
- id: Utils::anchor_or_uuid,
197
+ id: Metanorma::Utils::anchor_or_uuid,
197
198
  mimetype: type,
198
199
  height: node.attr("height") || "auto",
199
200
  width: node.attr("width") || "auto" ,
@@ -211,11 +212,12 @@ module Asciidoctor
211
212
  def inline_indexterm(node)
212
213
  noko do |xml|
213
214
  node.type == :visible and xml << node.text
214
- terms = node.attr("terms") ||
215
- [Nokogiri::XML("<a>#{node.text}</a>").xpath("//text()").text]
216
- xml.index nil, **attr_code(primary: terms[0],
217
- secondary: terms.dig(1),
218
- tertiary: terms.dig(2))
215
+ terms = (node.attr("terms") || [node.text]).map { |x| xml_encode(x) }
216
+ xml.index do |i|
217
+ i.primary { |x| x << terms[0] }
218
+ a = terms.dig(1) and i.secondary { |x| x << a }
219
+ a = terms.dig(2) and i.tertiary { |x| x << a }
220
+ end
219
221
  end.join
220
222
  end
221
223
  end