metanorma-mpfd 0.0.2
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- checksums.yaml +7 -0
- data/.gitignore +1 -0
- data/.travis.yml +17 -0
- data/CODE_OF_CONDUCT.md +74 -0
- data/Gemfile +4 -0
- data/LICENSE +25 -0
- data/README.adoc +1 -0
- data/Rakefile +6 -0
- data/bin/console +14 -0
- data/bin/rspec +18 -0
- data/bin/setup +8 -0
- data/lib/asciidoctor/mpfd/biblio.rng +836 -0
- data/lib/asciidoctor/mpfd/converter.rb +204 -0
- data/lib/asciidoctor/mpfd/isodoc.rng +1041 -0
- data/lib/asciidoctor/mpfd/isostandard.rng +1001 -0
- data/lib/asciidoctor/mpfd/pdf.js +31 -0
- data/lib/asciidoctor/mpfd/rsd.rng +212 -0
- data/lib/asciidoctor/mpfd/section.rb +94 -0
- data/lib/asciidoctor/mpfd/version.rb +5 -0
- data/lib/asciidoctor/mpfd.rb +9 -0
- data/lib/isodoc/mpfd/html/header.html +184 -0
- data/lib/isodoc/mpfd/html/html_rsd_intro.html +8 -0
- data/lib/isodoc/mpfd/html/html_rsd_titlepage.html +58 -0
- data/lib/isodoc/mpfd/html/htmlstyle.scss +1094 -0
- data/lib/isodoc/mpfd/html/logo.jpg +0 -0
- data/lib/isodoc/mpfd/html/logo.svg +1 -0
- data/lib/isodoc/mpfd/html/mpfa-logo-no-text@4x.png +0 -0
- data/lib/isodoc/mpfd/html/mpfa-logo@4x.png +0 -0
- data/lib/isodoc/mpfd/html/rsd.scss +564 -0
- data/lib/isodoc/mpfd/html/scripts.html +82 -0
- data/lib/isodoc/mpfd/html/word_rsd_intro.html +3 -0
- data/lib/isodoc/mpfd/html/word_rsd_titlepage.html +42 -0
- data/lib/isodoc/mpfd/html/wordstyle.scss +1096 -0
- data/lib/isodoc/mpfd/html_convert.rb +370 -0
- data/lib/isodoc/mpfd/i18n-en.yaml +1 -0
- data/lib/isodoc/mpfd/metadata.rb +98 -0
- data/lib/isodoc/mpfd/pdf_convert.rb +367 -0
- data/lib/isodoc/mpfd/word_convert.rb +347 -0
- data/lib/metanorma/mpfd/processor.rb +43 -0
- data/lib/metanorma/mpfd.rb +7 -0
- data/lib/metanorma-mpfd.rb +11 -0
- data/metanorma-mpfd.gemspec +46 -0
- metadata +326 -0
@@ -0,0 +1,370 @@
|
|
1
|
+
require "isodoc"
|
2
|
+
require_relative "metadata"
|
3
|
+
|
4
|
+
module IsoDoc
|
5
|
+
module Mpfd
|
6
|
+
|
7
|
+
# A {Converter} implementation that generates HTML output, and a document
|
8
|
+
# schema encapsulation of the document for validation
|
9
|
+
#
|
10
|
+
class HtmlConvert < IsoDoc::HtmlConvert
|
11
|
+
def rsd_html_path(file)
|
12
|
+
File.join(File.dirname(__FILE__), File.join("html", file))
|
13
|
+
end
|
14
|
+
|
15
|
+
def initialize(options)
|
16
|
+
super
|
17
|
+
@htmlstylesheet = generate_css(rsd_html_path("htmlstyle.scss"), true, default_fonts(options))
|
18
|
+
@htmlcoverpage = rsd_html_path("html_rsd_titlepage.html")
|
19
|
+
@htmlintropage = rsd_html_path("html_rsd_intro.html")
|
20
|
+
@scripts = rsd_html_path("scripts.html")
|
21
|
+
system "cp #{rsd_html_path('logo.jpg')} logo.jpg"
|
22
|
+
system "cp #{rsd_html_path('mpfa-logo-no-text@4x.png')} mpfa-logo-no-text@4x.png"
|
23
|
+
@files_to_delete << "logo.jpg"
|
24
|
+
@files_to_delete << "mpfa-logo-no-text@4x.png"
|
25
|
+
end
|
26
|
+
|
27
|
+
def default_fonts(options)
|
28
|
+
b = options[:bodyfont] ||
|
29
|
+
(options[:script] == "Hans" ? '"SimSun",serif' :
|
30
|
+
'"Titillium Web",sans-serif')
|
31
|
+
h = options[:headerfont] ||
|
32
|
+
(options[:script] == "Hans" ? '"SimHei",sans-serif' :
|
33
|
+
'"Titillium Web",sans-serif')
|
34
|
+
m = options[:monospacefont] || '"Space Mono",monospace'
|
35
|
+
"$bodyfont: #{b};\n$headerfont: #{h};\n$monospacefont: #{m};\n"
|
36
|
+
end
|
37
|
+
|
38
|
+
def metadata_init(lang, script, labels)
|
39
|
+
@meta = Metadata.new(lang, script, labels)
|
40
|
+
end
|
41
|
+
|
42
|
+
def html_head
|
43
|
+
<<~HEAD.freeze
|
44
|
+
<script type="text/javascript" src="https://ajax.googleapis.com/ajax/libs/jquery/3.3.1/jquery.min.js"></script>
|
45
|
+
|
46
|
+
<!--TOC script import-->
|
47
|
+
<script type="text/javascript" src="https://cdn.rawgit.com/jgallen23/toc/0.3.2/dist/toc.min.js"></script>
|
48
|
+
|
49
|
+
<!--Google fonts-->
|
50
|
+
<link href="https://fonts.googleapis.com/css?family=Open+Sans:300,300i,400,400i,600,600i|Space+Mono:400,700" rel="stylesheet">
|
51
|
+
<link href="https://fonts.googleapis.com/css?family=Overpass:300,300i,600,900" rel="stylesheet">
|
52
|
+
<link href="https://fonts.googleapis.com/css?family=Titillium+Web:400,400i,700,700i" rel="stylesheet">
|
53
|
+
<!--Font awesome import for the link icon-->
|
54
|
+
<link rel="stylesheet" href="https://use.fontawesome.com/releases/v5.0.8/css/solid.css" integrity="sha384-v2Tw72dyUXeU3y4aM2Y0tBJQkGfplr39mxZqlTBDUZAb9BGoC40+rdFCG0m10lXk" crossorigin="anonymous">
|
55
|
+
<link rel="stylesheet" href="https://use.fontawesome.com/releases/v5.0.8/css/fontawesome.css" integrity="sha384-q3jl8XQu1OpdLgGFvNRnPdj5VIlCvgsDQTQB6owSOHWlAurxul7f+JpUOVdAiJ5P" crossorigin="anonymous">
|
56
|
+
<style class="anchorjs"></style>
|
57
|
+
HEAD
|
58
|
+
end
|
59
|
+
|
60
|
+
def make_body(xml, docxml)
|
61
|
+
body_attr = { lang: "EN-US", link: "blue", vlink: "#954F72", "xml:lang": "EN-US", class: "container" }
|
62
|
+
xml.body **body_attr do |body|
|
63
|
+
make_body1(body, docxml)
|
64
|
+
make_body2(body, docxml)
|
65
|
+
make_body3(body, docxml)
|
66
|
+
end
|
67
|
+
end
|
68
|
+
|
69
|
+
def html_toc(docxml)
|
70
|
+
docxml
|
71
|
+
end
|
72
|
+
|
73
|
+
def annex_name(annex, name, div)
|
74
|
+
div.h1 **{ class: "Annex" } do |t|
|
75
|
+
t << "#{get_anchors[annex['id']][:label]} "
|
76
|
+
t << "<b>#{name.text}</b>"
|
77
|
+
end
|
78
|
+
end
|
79
|
+
|
80
|
+
def annex_name_lbl(clause, num)
|
81
|
+
obl = l10n("(#{@inform_annex_lbl})")
|
82
|
+
obl = l10n("(#{@norm_annex_lbl})") if clause["obligation"] == "normative"
|
83
|
+
l10n("<b>#{@annex_lbl} #{num}</b> #{obl}")
|
84
|
+
end
|
85
|
+
|
86
|
+
def pre_parse(node, out)
|
87
|
+
out.pre node.text # content.gsub(/</, "<").gsub(/>/, ">")
|
88
|
+
end
|
89
|
+
|
90
|
+
def term_defs_boilerplate(div, source, term, preface)
|
91
|
+
if source.empty? && term.nil?
|
92
|
+
div << @no_terms_boilerplate
|
93
|
+
else
|
94
|
+
div << term_defs_boilerplate_cont(source, term)
|
95
|
+
end
|
96
|
+
end
|
97
|
+
|
98
|
+
def i18n_init(lang, script)
|
99
|
+
super
|
100
|
+
@annex_lbl = "Appendix"
|
101
|
+
end
|
102
|
+
|
103
|
+
def error_parse(node, out)
|
104
|
+
# catch elements not defined in ISO
|
105
|
+
case node.name
|
106
|
+
when "pre"
|
107
|
+
pre_parse(node, out)
|
108
|
+
when "keyword"
|
109
|
+
out.span node.text, **{ class: "keyword" }
|
110
|
+
else
|
111
|
+
super
|
112
|
+
end
|
113
|
+
end
|
114
|
+
|
115
|
+
def fileloc(loc)
|
116
|
+
File.join(File.dirname(__FILE__), loc)
|
117
|
+
end
|
118
|
+
|
119
|
+
def info(isoxml, out)
|
120
|
+
@meta.security isoxml, out
|
121
|
+
super
|
122
|
+
end
|
123
|
+
|
124
|
+
def annex_name(annex, name, div)
|
125
|
+
div.h1 **{ class: "Annex" } do |t|
|
126
|
+
t << "#{get_anchors[annex['id']][:label]} "
|
127
|
+
t << "<b>#{name.text}</b>"
|
128
|
+
end
|
129
|
+
end
|
130
|
+
|
131
|
+
def annex_name_lbl(clause, num)
|
132
|
+
obl = l10n("(#{@inform_annex_lbl})")
|
133
|
+
obl = l10n("(#{@norm_annex_lbl})") if clause["obligation"] == "normative"
|
134
|
+
l10n("<b>#{@annex_lbl} #{num}</b> #{obl}")
|
135
|
+
end
|
136
|
+
|
137
|
+
def pre_parse(node, out)
|
138
|
+
out.pre node.text # content.gsub(/</, "<").gsub(/>/, ">")
|
139
|
+
end
|
140
|
+
|
141
|
+
def term_defs_boilerplate(div, source, term, preface)
|
142
|
+
if source.empty? && term.nil?
|
143
|
+
div << @no_terms_boilerplate
|
144
|
+
else
|
145
|
+
div << term_defs_boilerplate_cont(source, term)
|
146
|
+
end
|
147
|
+
end
|
148
|
+
|
149
|
+
def i18n_init(lang, script)
|
150
|
+
super
|
151
|
+
@annex_lbl = "Appendix"
|
152
|
+
end
|
153
|
+
|
154
|
+
def error_parse(node, out)
|
155
|
+
# catch elements not defined in ISO
|
156
|
+
case node.name
|
157
|
+
when "pre"
|
158
|
+
pre_parse(node, out)
|
159
|
+
when "keyword"
|
160
|
+
out.span node.text, **{ class: "keyword" }
|
161
|
+
else
|
162
|
+
super
|
163
|
+
end
|
164
|
+
end
|
165
|
+
|
166
|
+
def fileloc(loc)
|
167
|
+
File.join(File.dirname(__FILE__), loc)
|
168
|
+
end
|
169
|
+
|
170
|
+
def i18n_init(lang, script)
|
171
|
+
super
|
172
|
+
y = if lang == "en"
|
173
|
+
YAML.load_file(File.join(File.dirname(__FILE__), "i18n-en.yaml"))
|
174
|
+
elsif lang == "zh" && script == "Hans"
|
175
|
+
YAML.load_file(File.join(File.dirname(__FILE__),
|
176
|
+
"i18n-zh-Hans.yaml"))
|
177
|
+
else
|
178
|
+
YAML.load_file(File.join(File.dirname(__FILE__), "i18n-zh-Hans.yaml"))
|
179
|
+
end
|
180
|
+
@labels = @labels.merge(y)
|
181
|
+
@clause_lbl = y["clause"]
|
182
|
+
end
|
183
|
+
|
184
|
+
def terms_defs_title(f)
|
185
|
+
return f&.at(ns("./title"))&.content
|
186
|
+
end
|
187
|
+
|
188
|
+
TERM_CLAUSE = "//preface/terms | "\
|
189
|
+
"//preface/clause[descendant::terms]".freeze
|
190
|
+
|
191
|
+
def terms_defs(isoxml, out, num)
|
192
|
+
f = isoxml.at(ns(TERM_CLAUSE)) or return num
|
193
|
+
out.div **attr_code(id: f["id"]) do |div|
|
194
|
+
clause_name(nil, terms_defs_title(f), div, nil)
|
195
|
+
f.elements.each do |e|
|
196
|
+
parse(e, div) unless %w{title source}.include? e.name
|
197
|
+
end
|
198
|
+
end
|
199
|
+
num
|
200
|
+
end
|
201
|
+
|
202
|
+
FRONT_CLAUSE = "//*[parent::preface]".freeze
|
203
|
+
|
204
|
+
def preface(isoxml, out)
|
205
|
+
isoxml.xpath(ns(FRONT_CLAUSE)).each do |c|
|
206
|
+
if c.name == "terms" then terms_defs isoxml, out, 0
|
207
|
+
else
|
208
|
+
out.div **attr_code(id: c["id"]) do |s|
|
209
|
+
clause_name(get_anchors[c['id']][:label],
|
210
|
+
c&.at(ns("./title"))&.content, s, nil)
|
211
|
+
c.elements.reject { |c1| c1.name == "title" }.each do |c1|
|
212
|
+
parse(c1, s)
|
213
|
+
end
|
214
|
+
end
|
215
|
+
end
|
216
|
+
end
|
217
|
+
end
|
218
|
+
|
219
|
+
def make_body3(body, docxml)
|
220
|
+
body.div **{ class: "main-section" } do |div3|
|
221
|
+
preface docxml, div3
|
222
|
+
middle docxml, div3
|
223
|
+
footnotes div3
|
224
|
+
comments div3
|
225
|
+
end
|
226
|
+
end
|
227
|
+
|
228
|
+
def middle(isoxml, out)
|
229
|
+
middle_title(out)
|
230
|
+
clause isoxml, out
|
231
|
+
annex isoxml, out
|
232
|
+
bibliography isoxml, out
|
233
|
+
end
|
234
|
+
|
235
|
+
def termdef_parse(node, out)
|
236
|
+
set_termdomain("")
|
237
|
+
node.children.each { |n| parse(n, out) }
|
238
|
+
end
|
239
|
+
|
240
|
+
def initial_anchor_names(d)
|
241
|
+
d.xpath(ns(FRONT_CLAUSE)).each do |c|
|
242
|
+
preface_names(c)
|
243
|
+
sequential_asset_names(c)
|
244
|
+
end
|
245
|
+
middle_section_asset_names(d)
|
246
|
+
clause_names(d, 0)
|
247
|
+
termnote_anchor_names(d)
|
248
|
+
termexample_anchor_names(d)
|
249
|
+
end
|
250
|
+
|
251
|
+
def annex_name_lbl(clause, num)
|
252
|
+
l10n("<b>#{@annex_lbl} #{num}</b>")
|
253
|
+
end
|
254
|
+
|
255
|
+
def xclause_names(docxml, _sect_num)
|
256
|
+
q = "//clause[parent::sections]"
|
257
|
+
@topnum = nil
|
258
|
+
docxml.xpath(ns(q)).each do |c|
|
259
|
+
section_names(c, @topnum, 1)
|
260
|
+
end
|
261
|
+
end
|
262
|
+
|
263
|
+
def clause_names(docxml, sect_num)
|
264
|
+
q = "//clause[parent::sections]"
|
265
|
+
@topnum = nil
|
266
|
+
lvl = 0
|
267
|
+
docxml.xpath(ns(q)).each do |c|
|
268
|
+
container_names(c, 0)
|
269
|
+
sect_num, lvl = sect_names(c, nil, sect_num, 0, lvl)
|
270
|
+
end
|
271
|
+
end
|
272
|
+
|
273
|
+
def container_names(clause, lvl)
|
274
|
+
if clause["container"]
|
275
|
+
@anchors[clause["id"]] =
|
276
|
+
{ label: nil, xref: clause.at(ns("./title"))&.text, level: lvl+1 }
|
277
|
+
end
|
278
|
+
clause.xpath(ns("./clause | ./term | ./terms | "\
|
279
|
+
"./definitions")).each do |c|
|
280
|
+
container_names(c, clause["container"] ? lvl+1 : lvl)
|
281
|
+
end
|
282
|
+
end
|
283
|
+
|
284
|
+
def sect_names(clause, num, i, lvl, prev_lvl)
|
285
|
+
return i if clause.nil?
|
286
|
+
curr = i
|
287
|
+
if clause["container"]
|
288
|
+
retlvl = lvl+1
|
289
|
+
else
|
290
|
+
retlvl = lvl
|
291
|
+
i+=1
|
292
|
+
curr = i
|
293
|
+
name = num.nil? ? i.to_s : "#{num}.#{i}"
|
294
|
+
@anchors[clause["id"]] = { label: name, xref: l10n("#{@clause_lbl} #{name}"), level: lvl+1 }
|
295
|
+
end
|
296
|
+
prev = lvl
|
297
|
+
j = 0
|
298
|
+
clause.xpath(ns("./clause | ./term | ./terms | "\
|
299
|
+
"./definitions")).each do |c|
|
300
|
+
if clause["container"]
|
301
|
+
i, lvl = sect_names(c, num, i, lvl, lvl)
|
302
|
+
else
|
303
|
+
j, prev = sect_names(c, name, j, lvl+1, prev)
|
304
|
+
end
|
305
|
+
end
|
306
|
+
i = j if j >0
|
307
|
+
i = curr if lvl < prev
|
308
|
+
[i, prev]
|
309
|
+
end
|
310
|
+
|
311
|
+
def annex_naming(c, num, lvl, i)
|
312
|
+
if c["guidance"] then annex_names1(c, "#{num}E", lvl + 1)
|
313
|
+
else
|
314
|
+
i+= 1
|
315
|
+
annex_names1(c, "#{num}.#{i}", lvl + 1)
|
316
|
+
end
|
317
|
+
i
|
318
|
+
end
|
319
|
+
|
320
|
+
def annex_names(clause, num)
|
321
|
+
@anchors[clause["id"]] = { label: annex_name_lbl(clause, num),
|
322
|
+
xref: "#{@annex_lbl} #{num}", level: 1 }
|
323
|
+
i = 0
|
324
|
+
clause.xpath(ns("./clause")).each do |c|
|
325
|
+
i = annex_naming(c, num, 1, i)
|
326
|
+
end
|
327
|
+
hierarchical_asset_names(clause, num)
|
328
|
+
end
|
329
|
+
|
330
|
+
def annex_names1(clause, num, level)
|
331
|
+
@anchors[clause["id"]] = { label: num, xref: "#{@annex_lbl} #{num}",
|
332
|
+
level: level }
|
333
|
+
i = 0
|
334
|
+
clause.xpath(ns("./clause")).each do |c|
|
335
|
+
i = annex_naming(c, num, level, i)
|
336
|
+
end
|
337
|
+
end
|
338
|
+
|
339
|
+
def clause(isoxml, out)
|
340
|
+
isoxml.xpath(ns(MIDDLE_CLAUSE)).each do |c|
|
341
|
+
out.div **attr_code(id: c["id"]) do |s|
|
342
|
+
clause_name(get_anchors[c['id']][:label],
|
343
|
+
c&.at(ns("./title"))&.content, s, class: c["container"] ? "containerhdr" : nil )
|
344
|
+
c.elements.reject { |c1| c1.name == "title" }.each do |c1|
|
345
|
+
parse(c1, s)
|
346
|
+
end
|
347
|
+
end
|
348
|
+
end
|
349
|
+
end
|
350
|
+
|
351
|
+
def clause_parse_title(node, div, c1, out)
|
352
|
+
if node["inline-header"] == "true"
|
353
|
+
inline_header_title(out, node, c1)
|
354
|
+
else
|
355
|
+
attrs = { class: node["container"] ? "containerhdr" : nil }
|
356
|
+
div.send "h#{get_anchors[node['id']][:level]}", **attr_code(attrs) do |h|
|
357
|
+
lbl = get_anchors[node['id']][:label]
|
358
|
+
h << "#{lbl}. " if lbl
|
359
|
+
c1&.children&.each { |c2| parse(c2, h) }
|
360
|
+
end
|
361
|
+
end
|
362
|
+
end
|
363
|
+
|
364
|
+
def ol_depth(node)
|
365
|
+
ol_style(node["type"])
|
366
|
+
end
|
367
|
+
end
|
368
|
+
end
|
369
|
+
end
|
370
|
+
|
@@ -0,0 +1 @@
|
|
1
|
+
clause: Paragraph
|
@@ -0,0 +1,98 @@
|
|
1
|
+
require "isodoc"
|
2
|
+
require "twitter_cldr"
|
3
|
+
|
4
|
+
module IsoDoc
|
5
|
+
module Mpfd
|
6
|
+
|
7
|
+
class Metadata < IsoDoc::Metadata
|
8
|
+
def initialize(lang, script, labels)
|
9
|
+
super
|
10
|
+
set(:status, "XXX")
|
11
|
+
end
|
12
|
+
|
13
|
+
def title(isoxml, _out)
|
14
|
+
main = isoxml&.at(ns("//title[@language='en']"))&.text
|
15
|
+
set(:doctitle, main)
|
16
|
+
end
|
17
|
+
|
18
|
+
def subtitle(_isoxml, _out)
|
19
|
+
nil
|
20
|
+
end
|
21
|
+
|
22
|
+
def author(isoxml, _out)
|
23
|
+
tc = isoxml.at(ns("//editorialgroup/committee"))
|
24
|
+
set(:tc, tc.text) if tc
|
25
|
+
end
|
26
|
+
|
27
|
+
def docid(isoxml, _out)
|
28
|
+
docnumber = isoxml.at(ns("//bibdata/docidentifier"))
|
29
|
+
docstatus = isoxml.at(ns("//bibdata/status"))
|
30
|
+
dn = docnumber&.text
|
31
|
+
if docstatus
|
32
|
+
set(:status, status_print(docstatus.text))
|
33
|
+
abbr = status_abbr(docstatus.text)
|
34
|
+
dn = "#{dn}(#{abbr})" unless abbr.empty?
|
35
|
+
end
|
36
|
+
set(:docnumber, dn)
|
37
|
+
end
|
38
|
+
|
39
|
+
def doctype(isoxml, _out)
|
40
|
+
b = isoxml.at(ns("//bibdata")) || return
|
41
|
+
return unless b["type"]
|
42
|
+
t = b["type"].split(/[- ]/).
|
43
|
+
map{ |w| w.capitalize unless w == "MPF" }.join(" ")
|
44
|
+
set(:doctype, t)
|
45
|
+
end
|
46
|
+
|
47
|
+
def status_print(status)
|
48
|
+
status.split(/-/).map{ |w| w.capitalize }.join(" ")
|
49
|
+
end
|
50
|
+
|
51
|
+
def status_abbr(status)
|
52
|
+
case status
|
53
|
+
when "working-draft" then "wd"
|
54
|
+
when "committee-draft" then "cd"
|
55
|
+
when "draft-standard" then "d"
|
56
|
+
else
|
57
|
+
""
|
58
|
+
end
|
59
|
+
end
|
60
|
+
|
61
|
+
def version(isoxml, _out)
|
62
|
+
super
|
63
|
+
revdate = get[:revdate]
|
64
|
+
set(:revdate_monthyear, monthyr(revdate))
|
65
|
+
edition = isoxml.at(ns("//version/edition")) and
|
66
|
+
set(:edition, edition.text.to_i.localize.
|
67
|
+
to_rbnf_s("SpelloutRules", "spellout-ordinal").
|
68
|
+
split(/(\W)/).map(&:capitalize).join)
|
69
|
+
end
|
70
|
+
|
71
|
+
MONTHS = {
|
72
|
+
"01": "January",
|
73
|
+
"02": "February",
|
74
|
+
"03": "March",
|
75
|
+
"04": "April",
|
76
|
+
"05": "May",
|
77
|
+
"06": "June",
|
78
|
+
"07": "July",
|
79
|
+
"08": "August",
|
80
|
+
"09": "September",
|
81
|
+
"10": "October",
|
82
|
+
"11": "November",
|
83
|
+
"12": "December",
|
84
|
+
}.freeze
|
85
|
+
|
86
|
+
def monthyr(isodate)
|
87
|
+
m = /(?<yr>\d\d\d\d)-(?<mo>\d\d)/.match isodate
|
88
|
+
return isodate unless m && m[:yr] && m[:mo]
|
89
|
+
return "#{MONTHS[m[:mo].to_sym]} #{m[:yr]}"
|
90
|
+
end
|
91
|
+
|
92
|
+
def security(isoxml, _out)
|
93
|
+
security = isoxml.at(ns("//bibdata/security")) || return
|
94
|
+
set(:security, security.text)
|
95
|
+
end
|
96
|
+
end
|
97
|
+
end
|
98
|
+
end
|