metanorma 1.6.2 → 1.6.4

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -1,10 +1,13 @@
1
1
  require "isodoc"
2
2
  require "htmlentities"
3
3
  require "metanorma-utils"
4
+ require_relative "files_lookup_sectionsplit"
4
5
 
5
6
  module Metanorma
6
7
  # XML collection renderer
7
8
  class FileLookup
9
+ attr_accessor :files_to_delete, :parent
10
+
8
11
  # hash for each document in collection of document identifier to:
9
12
  # document reference (fileref or id), type of document reference,
10
13
  # and bibdata entry for that file
@@ -18,12 +21,12 @@ module Metanorma
18
21
  @path = path
19
22
  @compile = parent.compile
20
23
  @documents = parent.documents
24
+ @files_to_delete = []
21
25
  read_files
22
26
  end
23
27
 
24
28
  def read_files
25
29
  @xml.xpath(ns("//docref")).each { |d| read_file(d) }
26
- add_section_split
27
30
  end
28
31
 
29
32
  def read_file(docref)
@@ -41,9 +44,10 @@ module Metanorma
41
44
  .attachment_bibitem(identifier).root
42
45
  else
43
46
  file, _filename = targetfile(entry, read: true)
44
- xml = Nokogiri::XML(file)
47
+ xml = Nokogiri::XML(file, &:huge)
45
48
  add_document_suffix(identifier, xml)
46
49
  entry[:anchors] = read_anchors(xml)
50
+ entry[:ids] = read_ids(xml)
47
51
  entry[:bibdata] = xml.at(ns("//bibdata"))
48
52
  end
49
53
  end
@@ -55,69 +59,6 @@ module Metanorma
55
59
  entry[:bibitem].at("./*[local-name() = 'ext']")&.remove
56
60
  end
57
61
 
58
- def add_section_split
59
- #require "debug"; binding.b
60
- ret = @files.keys.each_with_object({}) do |k, m|
61
- if @files[k][:sectionsplit] == "true" && !@files[k]["attachment"]
62
- s, manifest = sectionsplit(@files[k][:ref])
63
- s.each_with_index do |f1, i|
64
- add_section_split_instance(f1, m, k, i)
65
- end
66
- m["#{k}:index.html"] = add_section_split_cover(manifest, k)
67
- end
68
- m[k] = @files[k]
69
- #require "debug"; binding.b
70
- end
71
- @files = ret
72
- end
73
-
74
- def add_section_split_cover(manifest, ident)
75
- cover = section_split_cover(manifest, @parent.dir_name_cleanse(ident))
76
- @files[ident][:out_path] = cover
77
- { attachment: true, index: false, out_path: cover,
78
- ref: File.join(File.dirname(manifest.file), cover) }
79
- end
80
-
81
- def section_split_cover(col, ident)
82
- dir = File.dirname(col.file)
83
- @compile.collection_setup(nil, dir)
84
- #require "debug";binding.b
85
- CollectionRenderer.new(col, dir,
86
- output_folder: "#{ident}_collection",
87
- format: %i(html),
88
- coverpage: File.join(dir, "cover.html")).coverpage
89
- FileUtils.mv "#{ident}_collection/index.html",
90
- File.join(dir, "#{ident}_index.html")
91
- FileUtils.rm_rf "#{ident}_collection"
92
- "#{ident}_index.html"
93
- end
94
-
95
- def add_section_split_instance(file, manifest, key, idx)
96
- presfile = File.join(File.dirname(@files[key][:ref]),
97
- File.basename(file[:url]))
98
- newkey = key("#{key.strip} #{file[:title]}")
99
- manifest[newkey] =
100
- { parentid: key, presentationxml: true, type: "fileref",
101
- rel_path: file[:url], out_path: File.basename(file[:url]),
102
- anchors: read_anchors(Nokogiri::XML(File.read(presfile))),
103
- bibdata: @files[key][:bibdata], ref: presfile }
104
- manifest[newkey][:bare] = true unless idx.zero?
105
- end
106
-
107
- def sectionsplit(file)
108
- #require "debug"; binding.b
109
- @compile.compile(
110
- file, { format: :asciidoc, extension_keys: [:presentation] }
111
- .merge(@parent.compile_options)
112
- )
113
- r = file.sub(/\.xml$/, ".presentation.xml")
114
- xml = Nokogiri::XML(File.read(r))
115
- s = @compile.sectionsplit(xml, File.basename(r), File.dirname(r))
116
- .sort_by { |f| f[:order] }
117
- [s, @compile.collection_manifest(File.basename(r), s, xml, nil,
118
- File.dirname(r))]
119
- end
120
-
121
62
  # rel_path is the source file address, determined relative to the YAML.
122
63
  # out_path is the destination file address, with any references outside
123
64
  # the working directory (../../...) truncated
@@ -220,8 +161,19 @@ module Metanorma
220
161
  ret[val[:type]][val[:value]] = key if val[:value]
221
162
  end
222
163
 
164
+ # Also parse all ids in doc (including ones which won't be xref targets)
165
+ def read_ids(xml)
166
+ ret = {}
167
+ xml.traverse do |x|
168
+ x.text? and next
169
+ /^semantic__/.match?(x.name) and next
170
+ x["id"] and ret[x["id"]] = true
171
+ end
172
+ ret
173
+ end
174
+
223
175
  def key(ident)
224
- @c.decode(ident).gsub(/(\s|[ ])+/, " ")
176
+ @c.decode(ident).gsub(/(\p{Zs})+/, " ").sub(/^metanorma-collection /, "")
225
177
  end
226
178
 
227
179
  def keys
@@ -229,10 +181,8 @@ module Metanorma
229
181
  end
230
182
 
231
183
  def get(ident, attr = nil)
232
- if attr
233
- @files[key(ident)][attr]
234
- else
235
- @files[key(ident)]
184
+ if attr then @files[key(ident)][attr]
185
+ else @files[key(ident)]
236
186
  end
237
187
  end
238
188
 
@@ -0,0 +1,69 @@
1
+ module Metanorma
2
+ # XML collection renderer
3
+ class FileLookup
4
+ def add_section_split
5
+ ret = @files.keys.each_with_object({}) do |k, m|
6
+ if @files[k][:sectionsplit] == "true" && !@files[k]["attachment"]
7
+ s, manifest = sectionsplit(@files[k][:ref], k)
8
+ s.each_with_index { |f1, i| add_section_split_instance(f1, m, k, i) }
9
+ m["#{k}:index.html"] = add_section_split_cover(manifest, k)
10
+ @files_to_delete << m["#{k}:index.html"][:ref]
11
+ end
12
+ m[k] = @files[k]
13
+ end
14
+ @files = ret
15
+ end
16
+
17
+ def add_section_split_cover(manifest, ident)
18
+ cover = @sectionsplit
19
+ .section_split_cover(manifest, @parent.dir_name_cleanse(ident),
20
+ one_doc_collection?)
21
+ @files[ident][:out_path] = cover
22
+ { attachment: true, index: false, out_path: cover,
23
+ ref: File.join(File.dirname(manifest.file), cover) }
24
+ end
25
+
26
+ def one_doc_collection?
27
+ docs = 0
28
+ @files.each_value do |v|
29
+ v[:attachment] and next
30
+ v[:presentationxml] and next
31
+ docs += 1
32
+ end
33
+ docs > 1
34
+ end
35
+
36
+ def add_section_split_instance(file, manifest, key, idx)
37
+ presfile, newkey, xml =
38
+ add_section_split_instance_prep(file, key)
39
+ manifest[newkey] =
40
+ { parentid: key, presentationxml: true, type: "fileref",
41
+ rel_path: file[:url], out_path: File.basename(file[:url]),
42
+ anchors: read_anchors(xml), ids: read_ids(xml),
43
+ sectionsplit_output: true,
44
+ bibdata: @files[key][:bibdata], ref: presfile }
45
+ @files_to_delete << file[:url]
46
+ manifest[newkey][:bare] = true unless idx.zero?
47
+ end
48
+
49
+ def add_section_split_instance_prep(file, key)
50
+ presfile = File.join(File.dirname(@files[key][:ref]),
51
+ File.basename(file[:url]))
52
+ newkey = key("#{key.strip} #{file[:title]}")
53
+ xml = Nokogiri::XML(File.read(presfile), &:huge)
54
+ [presfile, newkey, xml]
55
+ end
56
+
57
+ def sectionsplit(file, ident)
58
+ @sectionsplit = Sectionsplit
59
+ .new(input: file, base: File.basename(file), dir: File.dirname(file),
60
+ output: file, compile_options: @parent.compile_options,
61
+ fileslookup: self, ident: ident, isodoc: @isodoc)
62
+ coll = @sectionsplit.sectionsplit.sort_by { |f| f[:order] }
63
+ xml = Nokogiri::XML(File.read(file, encoding: "UTF-8"), &:huge)
64
+ [coll, @sectionsplit
65
+ .collection_manifest(File.basename(file), coll, xml, nil,
66
+ File.dirname(file))]
67
+ end
68
+ end
69
+ end
@@ -69,7 +69,8 @@ module Metanorma
69
69
  pdf-allow-print pdf-allow-print-hq pdf-allow-fill-in-forms
70
70
  fonts font-license-agreement pdf-allow-access-content
71
71
  pdf-encrypt-metadata iso-word-template document-scheme
72
- localize-number iso-word-bg-strip-color modspec-identifier-base).freeze
72
+ localize-number iso-word-bg-strip-color modspec-identifier-base)
73
+ .freeze
73
74
 
74
75
  EMPTY_ADOC_OPTIONS_DEFAULT_TRUE =
75
76
  %w(data-uri-image suppress-asciimath-dup use-xinclude
@@ -80,7 +81,7 @@ module Metanorma
80
81
  toc-tables toc-recommendations).freeze
81
82
 
82
83
  def attr_name_normalise(name)
83
- name.gsub(/-/, "").sub(/override$/, "_override").sub(/pdf$/, "_pdf")
84
+ name.gsub("-", "").sub(/override$/, "_override").sub(/pdf$/, "_pdf")
84
85
  .to_sym
85
86
  end
86
87
 
@@ -1,31 +1,35 @@
1
1
  require "yaml"
2
+ require_relative "util"
3
+ require_relative "sectionsplit_links"
2
4
 
3
5
  module Metanorma
4
- class Compile
5
- # assume we pass in Presentation XML, but we want to recover Semantic XML
6
- def sectionsplit_convert(input_filename, file, output_filename = nil,
7
- opts = {})
8
- @isodoc = IsoDoc::Convert.new({})
9
- input_filename += ".xml" unless input_filename.match?(/\.xml$/)
10
- File.exist?(input_filename) or
11
- File.open(input_filename, "w:UTF-8") { |f| f.write(file) }
12
- presxml = File.read(input_filename, encoding: "utf-8")
13
- @openmathdelim, @closemathdelim = @isodoc.extract_delims(presxml)
14
- xml, filename, dir = @isodoc.convert_init(presxml, input_filename, false)
15
- build_collection(xml, presxml, output_filename || filename, dir, opts)
6
+ class Sectionsplit
7
+ attr_accessor :filecache
8
+
9
+ def initialize(opts)
10
+ @input_filename = opts[:input]
11
+ @base = opts[:base]
12
+ @output_filename = opts[:output]
13
+ @xml = opts[:xml]
14
+ @dir = opts[:dir]
15
+ @compile_opts = opts[:compile_opts] || {}
16
+ @fileslookup = opts[:fileslookup]
17
+ @ident = opts[:ident]
18
+ @isodoc = opts[:isodoc]
16
19
  end
17
20
 
18
21
  def ns(xpath)
19
22
  @isodoc.ns(xpath)
20
23
  end
21
24
 
22
- def build_collection(xml, presxml, filename, dir, opts = {})
23
- base = File.basename(filename)
24
- collection_setup(base, dir)
25
- files = sectionsplit(xml, base, dir)
26
- collection_manifest(base, files, xml, presxml, dir).render(
27
- { format: %i(html), output_folder: "#{filename}_collection",
28
- coverpage: File.join(dir, "cover.html") }.merge(opts),
25
+ def build_collection
26
+ collection_setup(@base, @dir)
27
+ files = sectionsplit #(@input_filename, @base, @dir, @compile_opts)
28
+ input_xml = Nokogiri::XML(File.read(@input_filename,
29
+ encoding: "UTF-8"), &:huge)
30
+ collection_manifest(@base, files, input_xml, @xml, @dir).render(
31
+ { format: %i(html), output_folder: "#{@output_filename}_collection",
32
+ coverpage: File.join(@dir, "cover.html") }.merge(@compile_opts),
29
33
  )
30
34
  end
31
35
 
@@ -46,13 +50,11 @@ module Metanorma
46
50
 
47
51
  def coll_cover
48
52
  <<~COVER
49
- <html><head/>
50
- <body>
53
+ <html><head/><body>
51
54
  <h1>{{ doctitle }}</h1>
52
55
  <h2>{{ docnumber }}</h2>
53
56
  <nav>{{ navigation }}</nav>
54
- </body>
55
- </html>
57
+ </body></html>
56
58
  COVER
57
59
  end
58
60
 
@@ -62,161 +64,92 @@ module Metanorma
62
64
  ["//bibliography/*[not(@hidden = 'true')]", "bibliography"],
63
65
  ["//indexsect", nil], ["//colophon", nil]].freeze
64
66
 
65
- def sectionsplit(xml, filename, dir)
66
- @key = xref_preprocess(xml)
67
- @splitdir = dir
68
- out = emptydoc(xml)
67
+ # Input XML is Semantic
68
+ # def sectionsplit(filename, basename, dir, compile_options, fileslookup = nil, ident = nil)
69
+ def sectionsplit
70
+ xml = sectionsplit_prep(File.read(@input_filename), @base, @dir)
71
+ @key = xref_preprocess(xml, @fileslookup, @ident)
69
72
  SPLITSECTIONS.each_with_object([]) do |n, ret|
70
- xml.xpath(ns(n[0])).each do |s|
71
- ret << sectionfile(xml, out, "#{filename}.#{ret.size}", s, n[1])
72
- end
73
- end
74
- end
75
-
76
- def emptydoc(xml)
77
- out = xml.dup
78
- out.xpath(
79
- ns("//preface | //sections | //annex | //bibliography/clause | " \
80
- "//bibliography/references[not(@hidden = 'true')] | //indexsect" \
81
- "//colophon"),
82
- ).each(&:remove)
83
- out
84
- end
85
-
86
- def sectionfile(fulldoc, xml, file, chunk, parentnode)
87
- fname = create_sectionfile(fulldoc, xml.dup, file, chunk, parentnode)
88
- { order: chunk["displayorder"].to_i, url: fname,
89
- title: titlerender(chunk) }
90
- end
91
-
92
- def create_sectionfile(xml, out, file, chunk, parentnode)
93
- ins = out.at(ns("//misccontainer")) || out.at(ns("//bibdata"))
94
- if parentnode
95
- ins.next = "<#{parentnode}/>"
96
- ins.next.add_child(chunk.dup)
97
- else ins.next = chunk.dup
98
- end
99
- xref_process(out, xml, @key)
100
- outname = "#{file}.xml"
101
- File.open(File.join(@splitdir, outname), "w:UTF-8") { |f| f.write(out) }
102
- outname
103
- end
104
-
105
- def xref_preprocess(xml)
106
- svg_preprocess(xml)
107
- key = (0...8).map { rand(65..90).chr }.join # random string
108
- xml.root["type"] = key # to force recognition of internal refs
109
- key
110
- end
111
-
112
- def xref_process(section, xml, key)
113
- refs = eref_to_internal_eref(section, xml, key)
114
- refs += xref_to_internal_eref(section, key)
115
- ins = new_hidden_ref(section)
116
- copied_refs = copy_repo_items_biblio(ins, section, xml)
117
- insert_indirect_biblio(ins, refs - copied_refs, key)
118
- end
119
-
120
- def svg_preprocess(xml)
121
- xml.xpath("//m:svg", "m" => "http://www.w3.org/2000/svg").each do |s|
122
- m = svgmap_wrap(s)
123
- s.xpath(".//m:a", "m" => "http://www.w3.org/2000/svg").each do |a|
124
- next unless /^#/.match? a["href"]
125
-
126
- a["href"] = a["href"].sub(/^#/, "")
127
- m << "<target href='#{a['href']}'>" \
128
- "<xref target='#{a['href']}'/></target>"
73
+ conflate_floatingtitles(xml.xpath(ns(n[0]))).each do |s|
74
+ ret << sectionfile(xml, emptydoc(xml), "#{@base}.#{ret.size}", s, n[1])
129
75
  end
130
76
  end
131
77
  end
132
78
 
133
- def svgmap_wrap(svg)
134
- ret = svg.at("./ancestor::xmlns:svgmap") and return ret
135
- ret = svg.at("./ancestor::xmlns:figure")
136
- ret.wrap("<svgmap/>")
137
- svg.at("./ancestor::xmlns:svgmap")
79
+ def block?(node)
80
+ %w(p table formula admonition ol ul dl figure quote sourcecode example
81
+ pre note pagebrreak hr bookmark requirement recommendation permission
82
+ svgmap inputform toc passthrough review imagemap).include?(node.name)
138
83
  end
139
84
 
140
- def make_anchor(anchor)
141
- "<localityStack><locality type='anchor'><referenceFrom>" \
142
- "#{anchor}</referenceFrom></locality></localityStack>"
143
- end
144
-
145
- def xref_to_internal_eref(xml, key)
146
- xml.xpath(ns("//xref")).each_with_object({}) do |x, m|
147
- x["bibitemid"] = "#{key}_#{x['target']}"
148
- x << make_anchor(x["target"])
149
- m[x["bibitemid"]] = true
150
- x.delete("target")
151
- x["type"] = key
152
- x.name = "eref"
153
- end.keys
154
- end
155
-
156
- def eref_to_internal_eref(section, xml, key)
157
- eref_to_internal_eref_select(section, xml).each_with_object([]) do |x, m|
158
- url = xml.at(ns("//bibitem[@id = '#{x}']/uri[@type = 'citation']"))
159
- section.xpath("//*[@bibitemid = '#{x}']").each do |e|
160
- id = eref_to_internal_eref1(e, key, url)
161
- id and m << id
85
+ def conflate_floatingtitles(nodes)
86
+ holdover = false
87
+ nodes.each_with_object([]) do |x, m|
88
+ if holdover then m.last << x
89
+ else m << [x]
162
90
  end
91
+ holdover = block?(x)
163
92
  end
164
93
  end
165
94
 
166
- def eref_to_internal_eref1(elem, key, url)
167
- if url
168
- elem.name = "link"
169
- elem["target"] = url
170
- nil
171
- else
172
- elem["bibitemid"] = "#{key}_#{elem['bibitemid']}"
173
- elem << make_anchor(elem["bibitemid"])
174
- elem["type"] = key
175
- elem["bibitemid"]
176
- end
177
- end
178
-
179
- def eref_to_internal_eref_select(section, xml)
180
- refs = section.xpath("//*/@bibitemid").map { |x| x.text } # rubocop:disable Style/SymbolProc
181
- refs.uniq.reject do |x|
182
- xml.at(ns("//bibitem[@id = '#{x}'][@type = 'internal']")) ||
183
- xml.at(ns("//bibitem[@id = '#{x}']" \
184
- "[docidentifier/@type = 'repository']"))
95
+ def sectionsplit_prep(file, filename, dir)
96
+ @splitdir = dir
97
+ xml1filename, type = sectionsplit_preprocess_semxml(file, filename)
98
+ Compile.new.compile(
99
+ xml1filename,
100
+ { format: :asciidoc, extension_keys: [:presentation], type: type }
101
+ .merge(@compile_opts),
102
+ )
103
+ Nokogiri::XML(File.read(xml1filename.sub(/\.xml$/, ".presentation.xml"),
104
+ encoding: "utf-8"), &:huge)
105
+ end
106
+
107
+ def sectionsplit_preprocess_semxml(file, filename)
108
+ xml = Nokogiri::XML(file, &:huge)
109
+ type = xml.root.name.sub("-standard", "").to_sym
110
+ @fileslookup&.parent&.update_xrefs(xml, @ident, {})
111
+ xml1 = Tempfile.open([filename, ".xml"], encoding: "utf-8") do |f|
112
+ #f.write(@isodoc.to_xml(svg_preprocess(xml)))
113
+ f.write(@isodoc.to_xml((xml)))
114
+ f
185
115
  end
116
+ @filecache ||= []
117
+ @filecache << xml1
118
+ [xml1.path, type]
186
119
  end
187
120
 
188
- # from standoc
189
- def new_hidden_ref(xmldoc)
190
- ins = xmldoc.at("bibliography") or
191
- xmldoc.root << "<bibliography/>" and ins = xmldoc.at("bibliography")
192
- ins.add_child("<references hidden='true' normative='false'/>").first
121
+ def emptydoc(xml)
122
+ out = xml.dup
123
+ out.xpath(
124
+ ns("//preface | //sections | //annex | //bibliography/clause | " \
125
+ "//bibliography/references[not(@hidden = 'true')] | //indexsect | " \
126
+ "//colophon"),
127
+ ).each(&:remove)
128
+ out
193
129
  end
194
130
 
195
- def copy_repo_items_biblio(ins, section, xml)
196
- xml.xpath(ns("//references/bibitem[docidentifier/@type = 'repository']"))
197
- .each_with_object([]) do |b, m|
198
- section.at("//*[@bibitemid = '#{b['id']}']") or next
199
- ins << b.dup
200
- m << b["id"]
201
- end
131
+ def sectionfile(fulldoc, xml, file, chunks, parentnode)
132
+ fname = create_sectionfile(fulldoc, xml.dup, file, chunks, parentnode)
133
+ { order: chunks.last["displayorder"].to_i, url: fname,
134
+ title: titlerender(chunks.last) }
202
135
  end
203
136
 
204
- def insert_indirect_biblio(ins, refs, prefix)
205
- refs.each do |x|
206
- ins << <<~BIBENTRY
207
- <bibitem id="#{x}" type="internal">
208
- <docidentifier type="repository">#{x.sub(/^#{prefix}_/, "#{prefix}/")}</docidentifier>
209
- </bibitem>
210
- BIBENTRY
137
+ def create_sectionfile(xml, out, file, chunks, parentnode)
138
+ ins = out.at(ns("//metanorma-extension")) || out.at(ns("//bibdata"))
139
+ sectionfile_insert(ins, chunks, parentnode)
140
+ xref_process(out, xml, @key)
141
+ outname = "#{file}.xml"
142
+ File.open(File.join(@splitdir, outname), "w:UTF-8") do |f|
143
+ f.write(out)
211
144
  end
145
+ outname
212
146
  end
213
147
 
214
- def recursive_string_keys(hash)
215
- case hash
216
- when Hash then hash.map { |k, v| [k.to_s, recursive_string_keys(v)] }.to_h
217
- when Enumerable then hash.map { |v| recursive_string_keys(v) }
218
- else
219
- hash
148
+ def sectionfile_insert(ins, chunks, parentnode)
149
+ if parentnode
150
+ ins.next = "<#{parentnode}/>"
151
+ chunks.each { |c| ins.next.add_child(c.dup) }
152
+ else chunks.each { |c| ins.next = c.dup }
220
153
  end
221
154
  end
222
155
 
@@ -249,7 +182,20 @@ module Metanorma
249
182
  end
250
183
  },
251
184
  }
252
- recursive_string_keys(ret).to_yaml
185
+ Util::recursive_string_keys(ret).to_yaml
186
+ end
187
+
188
+ def section_split_cover(col, ident, one_doc_coll)
189
+ dir = File.dirname(col.file)
190
+ collection_setup(nil, dir)
191
+ CollectionRenderer.new(col, dir,
192
+ output_folder: "#{ident}_collection",
193
+ format: %i(html),
194
+ coverpage: File.join(dir, "cover.html")).coverpage
195
+ filename = one_doc_coll ? "#{ident}_index.html" : "index.html"
196
+ FileUtils.mv "#{ident}_collection/index.html", File.join(dir, filename)
197
+ FileUtils.rm_rf "#{ident}_collection"
198
+ filename
253
199
  end
254
200
  end
255
201
  end
@@ -0,0 +1,116 @@
1
+ module Metanorma
2
+ class Sectionsplit
3
+ def xref_preprocess(xml, _fileslookup, _identifier)
4
+ key = (0...8).map { rand(65..90).chr }.join # random string
5
+ xml.root["type"] = key # to force recognition of internal refs
6
+ key
7
+ end
8
+
9
+ def xref_process(section, xml, key)
10
+ svg_preprocess(section, Metanorma::Utils::to_ncname(@ident))
11
+ refs = eref_to_internal_eref(section, xml, key)
12
+ refs += xref_to_internal_eref(section, key)
13
+ ins = new_hidden_ref(section)
14
+ copied_refs = copy_repo_items_biblio(ins, section, xml)
15
+ insert_indirect_biblio(ins, refs - copied_refs, key)
16
+ end
17
+
18
+ def svg_preprocess(xml, document_suffix)
19
+ xml.xpath("//m:svg", "m" => "http://www.w3.org/2000/svg").each do |s|
20
+ m = svgmap_wrap(s)
21
+ s.xpath(".//m:a", "m" => "http://www.w3.org/2000/svg").each do |a|
22
+ /^#/.match? a["href"] or next
23
+ a["href"] = a["href"].sub(/^#/, "")
24
+ m << "<target href='#{a['href']}'>" \
25
+ "<xref target='#{a['href']}_#{document_suffix}'/></target>"
26
+ end
27
+ end
28
+ xml
29
+ end
30
+
31
+ def svgmap_wrap(svg)
32
+ ret = svg.at("./ancestor::xmlns:svgmap") and return ret
33
+ ret = svg.at("./ancestor::xmlns:figure")
34
+ ret.wrap("<svgmap/>")
35
+ svg.at("./ancestor::xmlns:svgmap")
36
+ end
37
+
38
+ def make_anchor(anchor)
39
+ "<localityStack><locality type='anchor'><referenceFrom>" \
40
+ "#{anchor}</referenceFrom></locality></localityStack>"
41
+ end
42
+
43
+ def xref_to_internal_eref(xml, key)
44
+ xml.xpath(ns("//xref")).each_with_object({}) do |x, m|
45
+ x["bibitemid"] = "#{key}_#{x['target']}"
46
+ x << make_anchor(x["target"])
47
+ m[x["bibitemid"]] = true
48
+ x.delete("target")
49
+ x["type"] = key
50
+ x.name = "eref"
51
+ end.keys
52
+ end
53
+
54
+ def eref_to_internal_eref(section, xml, key)
55
+ bibitems = Util::gather_bibitems(xml)
56
+ bibitemids = Util::gather_bibitemids(section)
57
+ eref_to_internal_eref_select(section, xml, bibitems)
58
+ .each_with_object([]) do |x, m|
59
+ url = bibitems[x]&.at(ns("./uri[@type = 'citation']"))
60
+ bibitemids[x]&.each do |e|
61
+ id = eref_to_internal_eref1(e, key, url)
62
+ id and m << id
63
+ end
64
+ end
65
+ end
66
+
67
+ def eref_to_internal_eref1(elem, key, url)
68
+ if url
69
+ elem.name = "link"
70
+ elem["target"] = url
71
+ nil
72
+ else
73
+ elem["bibitemid"] = "#{key}_#{elem['bibitemid']}"
74
+ elem << make_anchor(elem["bibitemid"])
75
+ elem["type"] = key
76
+ elem["bibitemid"]
77
+ end
78
+ end
79
+
80
+ def eref_to_internal_eref_select(section, _xml, bibitems)
81
+ refs = Util::gather_bibitemids(section).keys
82
+ refs.uniq.reject do |x|
83
+ b = bibitems[x] and (b["type"] == "internal" ||
84
+ b.at(ns("./docidentifier/@type = 'repository']")))
85
+ end
86
+ end
87
+
88
+ # from standoc
89
+ def new_hidden_ref(xmldoc)
90
+ ins = xmldoc.at("bibliography") or
91
+ xmldoc.root << "<bibliography/>" and ins = xmldoc.at("bibliography")
92
+ ins.add_child("<references hidden='true' normative='false'/>").first
93
+ end
94
+
95
+ def copy_repo_items_biblio(ins, section, xml)
96
+ bibitems = Util::gather_bibitems(section)
97
+ xml.xpath(ns("//references/bibitem[docidentifier/@type = 'repository']"))
98
+ .each_with_object([]) do |b, m|
99
+ bibitems[b["id"]] or next
100
+ # section.at("//*[@bibitemid = '#{b['id']}']") or next
101
+ ins << b.dup
102
+ m << b["id"]
103
+ end
104
+ end
105
+
106
+ def insert_indirect_biblio(ins, refs, prefix)
107
+ refs.each do |x|
108
+ ins << <<~BIBENTRY
109
+ <bibitem id="#{x}" type="internal">
110
+ <docidentifier type="repository">#{x.sub(/^#{prefix}_/, "#{prefix}/")}</docidentifier>
111
+ </bibitem>
112
+ BIBENTRY
113
+ end
114
+ end
115
+ end
116
+ end