metanorma-nist 0.0.1

Sign up to get free protection for your applications and to get access to all the features.
Files changed (42) hide show
  1. checksums.yaml +7 -0
  2. data/.hound.yml +3 -0
  3. data/.rubocop.yml +10 -0
  4. data/.travis.yml +16 -0
  5. data/CODE_OF_CONDUCT.md +74 -0
  6. data/Gemfile +4 -0
  7. data/LICENSE +25 -0
  8. data/README.adoc +358 -0
  9. data/bin/console +14 -0
  10. data/bin/rspec +18 -0
  11. data/bin/setup +8 -0
  12. data/lib/asciidoctor/nist.rb +7 -0
  13. data/lib/asciidoctor/nist/biblio.rng +921 -0
  14. data/lib/asciidoctor/nist/converter.rb +271 -0
  15. data/lib/asciidoctor/nist/front.rb +150 -0
  16. data/lib/asciidoctor/nist/isodoc.rng +1063 -0
  17. data/lib/asciidoctor/nist/isostandard.rng +1071 -0
  18. data/lib/asciidoctor/nist/nist.rng +196 -0
  19. data/lib/isodoc/nist/html/commerce-logo-color.png +0 -0
  20. data/lib/isodoc/nist/html/deptofcommerce.png +0 -0
  21. data/lib/isodoc/nist/html/header.html +163 -0
  22. data/lib/isodoc/nist/html/html_nist_intro.html +46 -0
  23. data/lib/isodoc/nist/html/html_nist_titlepage.html +140 -0
  24. data/lib/isodoc/nist/html/htmlstyle.scss +1160 -0
  25. data/lib/isodoc/nist/html/logo.png +0 -0
  26. data/lib/isodoc/nist/html/nist.scss +749 -0
  27. data/lib/isodoc/nist/html/scripts.html +82 -0
  28. data/lib/isodoc/nist/html/scripts.pdf.html +70 -0
  29. data/lib/isodoc/nist/html/word_nist_intro.html +142 -0
  30. data/lib/isodoc/nist/html/word_nist_titlepage.html +247 -0
  31. data/lib/isodoc/nist/html/wordstyle.scss +1134 -0
  32. data/lib/isodoc/nist/html_convert.rb +454 -0
  33. data/lib/isodoc/nist/i18n-en.yaml +3 -0
  34. data/lib/isodoc/nist/metadata.rb +116 -0
  35. data/lib/isodoc/nist/pdf_convert.rb +456 -0
  36. data/lib/isodoc/nist/word_convert.rb +472 -0
  37. data/lib/metanorma-nist.rb +11 -0
  38. data/lib/metanorma/nist.rb +7 -0
  39. data/lib/metanorma/nist/processor.rb +43 -0
  40. data/lib/metanorma/nist/version.rb +5 -0
  41. data/metanorma-nist.gemspec +44 -0
  42. metadata +310 -0
@@ -0,0 +1,472 @@
1
+ require "isodoc"
2
+ require_relative "metadata"
3
+ require "fileutils"
4
+
5
+ module IsoDoc
6
+ module NIST
7
+ # A {Converter} implementation that generates Word output, and a document
8
+ # schema encapsulation of the document for validation
9
+
10
+ class WordConvert < IsoDoc::WordConvert
11
+ def initialize(options)
12
+ @libdir = File.dirname(__FILE__)
13
+ super
14
+ end
15
+
16
+ def convert1(docxml, filename, dir)
17
+ FileUtils.cp html_doc_path("logo.png"), "#{@localdir}/logo.png"
18
+ FileUtils.cp html_doc_path("deptofcommerce.png"), "#{@localdir}/deptofcommerce.png"
19
+ super
20
+ end
21
+
22
+ def default_fonts(options)
23
+ {
24
+ bodyfont: (options[:script] == "Hans" ? '"SimSun",serif' : '"Times New Roman",serif'),
25
+ headerfont: (options[:script] == "Hans" ? '"SimHei",sans-serif' : '"Arial",sans-serif'),
26
+ monospacefont: '"Courier New",monospace'
27
+ }
28
+ end
29
+
30
+ def default_file_locations(_options)
31
+ {
32
+ wordstylesheet: html_doc_path("wordstyle.scss"),
33
+ standardstylesheet: html_doc_path("nist.scss"),
34
+ header: html_doc_path("header.html"),
35
+ wordcoverpage: html_doc_path("word_nist_titlepage.html"),
36
+ wordintropage: html_doc_path("word_nist_intro.html"),
37
+ ulstyle: "l3",
38
+ olstyle: "l2",
39
+ }
40
+ end
41
+
42
+ def metadata_init(lang, script, labels)
43
+ @meta = Metadata.new(lang, script, labels)
44
+ end
45
+
46
+ def make_body(xml, docxml)
47
+ body_attr = { lang: "EN-US", link: "blue", vlink: "#954F72" }
48
+ xml.body **body_attr do |body|
49
+ make_body1(body, docxml)
50
+ make_body2(body, docxml)
51
+ make_body3(body, docxml)
52
+ end
53
+ end
54
+
55
+ def make_body2(body, docxml)
56
+ body.div **{ class: "WordSection2" } do |div2|
57
+ @prefacenum = 0
58
+ info docxml, div2
59
+ abstract docxml, div2
60
+ keywords docxml, div2
61
+ preface docxml, div2
62
+ div2.p { |p| p << "&nbsp;" } # placeholder
63
+ end
64
+ section_break(body)
65
+ end
66
+
67
+ def cleanup(docxml)
68
+ super
69
+ term_cleanup(docxml)
70
+ requirement_cleanup(docxml)
71
+ h1_cleanup(docxml)
72
+ toc_insert(docxml)
73
+ end
74
+
75
+ # create fallback h1 class to deal with page breaks
76
+ def h1_cleanup(docxml)
77
+ docxml.xpath("//h1[not(@class)]").each do |h|
78
+ h["class"] = "NormalTitle"
79
+ end
80
+ end
81
+
82
+ def toc_insert(docxml)
83
+ insertion = docxml.at("//div[h1 = 'Executive Summary']/preceding-sibling::div[h1][1]") ||
84
+ docxml.at("//div[@class = 'WordSection2']/child::*[last()]")
85
+ insertion.next = make_WordToC(docxml)
86
+ insertion.next = %{<p class="TOCTitle" style="page-break-before: always;">Table of Contents</p>}
87
+ docxml
88
+ end
89
+
90
+ def make_WordToC(docxml)
91
+ toc = ""
92
+ docxml.xpath("//h1[not(ancestor::*[@class = 'WordSection2'])] |"\
93
+ "//h1[contains(., 'Executive Summary')] |"\
94
+ "//h2[not(ancestor::*[@class = 'WordSection2'])] |"\
95
+ "//h3[not(ancestor::*[@class = 'WordSection2'])]").each do |h|
96
+ toc += word_toc_entry(h.name[1].to_i, header_strip(h))
97
+ end
98
+ toc.sub(/(<p class="MsoToc1">)/,
99
+ %{\\1#{WORD_TOC_PREFACE1}}) + WORD_TOC_SUFFIX1
100
+ end
101
+
102
+ # Henceforth identical to html
103
+
104
+ def abstract(isoxml, out)
105
+ f = isoxml.at(ns("//preface/abstract")) || return
106
+ out.div **attr_code(id: f["id"]) do |s|
107
+ clause_name(nil, @abstract_lbl, s, class: "AbstractTitle")
108
+ f.elements.each { |e| parse(e, s) unless e.name == "title" }
109
+ end
110
+ end
111
+
112
+ def keywords(_docxml, out)
113
+ kw = @meta.get[:keywords]
114
+ kw.empty? and return
115
+ out.div **{ class: "Section3" } do |div|
116
+ clause_name(nil, "Keywords", div, class: "IntroTitle")
117
+ div.p kw.sort.join("; ")
118
+ end
119
+ end
120
+
121
+ FRONT_CLAUSE = "//*[parent::preface][not(local-name() = 'abstract')]".freeze
122
+
123
+ # All "[preface]" sections should have class "IntroTitle" to prevent page breaks
124
+ # But for the Exec Summary
125
+ def preface(isoxml, out)
126
+ isoxml.xpath(ns(FRONT_CLAUSE)).each do |c|
127
+ foreword(isoxml, out) and next if c.name == "foreword"
128
+ next if skip_render(c, isoxml)
129
+ out.div **attr_code(id: c["id"]) do |s|
130
+ clause_name(get_anchors[c['id']][:label],
131
+ c&.at(ns("./title"))&.content, s,
132
+ class: c.name == "executivesummary" ? "NormalTitle" : "IntroTitle")
133
+ c.elements.reject { |c1| c1.name == "title" }.each do |c1|
134
+ parse(c1, s)
135
+ end
136
+ end
137
+ end
138
+ end
139
+
140
+ def skip_render(c, isoxml)
141
+ return false unless c.name == "reviewernote"
142
+ status = isoxml&.at(ns("//bibdata/status"))&.text
143
+ return true if status.nil?
144
+ return ["published", "withdrawn"].include? status
145
+ end
146
+
147
+ def term_defs_boilerplate(div, source, term, preface)
148
+ if source.empty? && term.nil?
149
+ div << @no_terms_boilerplate
150
+ else
151
+ div << term_defs_boilerplate_cont(source, term)
152
+ end
153
+ end
154
+
155
+ def i18n_init(lang, script)
156
+ super
157
+ end
158
+
159
+ def fileloc(loc)
160
+ File.join(File.dirname(__FILE__), loc)
161
+ end
162
+
163
+ def term_cleanup(docxml)
164
+ docxml.xpath("//p[@class = 'Terms']").each do |d|
165
+ h2 = d.at("./preceding-sibling::*[@class = 'TermNum'][1]")
166
+ h2.add_child("&nbsp;")
167
+ h2.add_child(d.remove)
168
+ end
169
+ docxml
170
+ end
171
+
172
+ def requirement_cleanup(docxml)
173
+ docxml.xpath("//div[@class = 'recommend'][title]").each do |d|
174
+ title = d.at("./title")
175
+ title.name = "b"
176
+ n = title.next_element
177
+ n&.children&.first&.add_previous_sibling(" ")
178
+ n&.children&.first&.add_previous_sibling(title.remove)
179
+ end
180
+ docxml
181
+ end
182
+
183
+ def figure_parse(node, out)
184
+ return pseudocode_parse(node, out) if node["type"] == "pseudocode"
185
+ super
186
+ end
187
+
188
+ def pseudocode_parse(node, out)
189
+ @in_figure = true
190
+ name = node.at(ns("./name"))
191
+ out.table **attr_code(id: node["id"], class: "pseudocode") do |div|
192
+ div.tr do |tr|
193
+ tr.td do |td|
194
+ node.children.each do |n|
195
+ parse(n, td) unless n.name == "name"
196
+ end
197
+ end
198
+ end
199
+ figure_name_parse(node, div, name) if name
200
+ end
201
+ @in_figure = false
202
+ end
203
+
204
+ def dl_parse(node, out)
205
+ return glossary_parse(node, out) if node["type"] == "glossary"
206
+ super
207
+ end
208
+
209
+ def glossary_parse(node, out)
210
+ out.dl **attr_code(id: node["id"], class: "glossary") do |v|
211
+ node.elements.select { |n| dt_dd? n }.each_slice(2) do |dt, dd|
212
+ v.dt **attr_code(id: dt["id"]) do |term|
213
+ dt_parse(dt, term)
214
+ end
215
+ v.dd **attr_code(id: dd["id"]) do |listitem|
216
+ dd.children.each { |n| parse(n, listitem) }
217
+ end
218
+ end
219
+ end
220
+ node.elements.reject { |n| dt_dd? n }.each { |n| parse(n, out) }
221
+ end
222
+
223
+ def error_parse(node, out)
224
+ case node.name
225
+ when "nistvariable" then nistvariable_parse(node, out)
226
+ when "recommendation" then recommendation_parse(node, out)
227
+ when "requirement" then requirement_parse(node, out)
228
+ when "permission" then permission_parse(node, out)
229
+ when "errata" then errata_parse(node, out)
230
+ else
231
+ super
232
+ end
233
+ end
234
+
235
+ def nistvariable_parse(node, out)
236
+ out.span **{class: "nistvariable"} do |s|
237
+ node.children.each { |n| parse(n, s) }
238
+ end
239
+ end
240
+
241
+ def recommendation_parse(node, out)
242
+ name = node["type"]
243
+ out.div **{ class: "recommend" } do |t|
244
+ t.title { |b| b << "Recommendation #{get_anchors[node['id']][:label]}:" }
245
+ node.children.each do |n|
246
+ parse(n, t)
247
+ end
248
+ end
249
+ end
250
+
251
+ def requirement_parse(node, out)
252
+ name = node["type"]
253
+ out.div **{ class: "recommend" } do |t|
254
+ t.title { |b| b << "Requirement #{get_anchors[node['id']][:label]}:" }
255
+ node.children.each do |n|
256
+ parse(n, t)
257
+ end
258
+ end
259
+ end
260
+
261
+ def permission_parse(node, out)
262
+ name = node["type"]
263
+ out.div **{ class: "recommend" } do |t|
264
+ t.title { |b| b << "Permission #{get_anchors[node['id']][:label]}:" }
265
+ node.children.each do |n|
266
+ parse(n, t)
267
+ end
268
+ end
269
+ end
270
+
271
+ def errata_parse(node, out)
272
+ out.table **make_table_attr(node) do |t|
273
+ t.thead do |h|
274
+ h.tr do |tr|
275
+ %w(Date Type Change Pages).each do |hdr|
276
+ tr.th hdr
277
+ end
278
+ end
279
+ end
280
+ t.tbody do |b|
281
+ node.xpath(ns("./row")).each do |row|
282
+ b.tr do |tr|
283
+ tr.td do |td|
284
+ row&.at(ns("./date"))&.children.each do |n|
285
+ parse(n, td)
286
+ end
287
+ end
288
+ tr.td do |td|
289
+ row&.at(ns("./type"))&.children.each do |n|
290
+ parse(n, td)
291
+ end
292
+ end
293
+ tr.td do |td|
294
+ row&.at(ns("./change"))&.children.each do |n|
295
+ parse(n, td)
296
+ end
297
+ end
298
+ tr.td do |td|
299
+ row&.at(ns("./pages"))&.children.each do |n|
300
+ parse(n, td)
301
+ end
302
+ end
303
+ end
304
+ end
305
+ end
306
+ end
307
+ end
308
+
309
+ MIDDLE_CLAUSE = "//clause[parent::sections]|//terms[parent::sections]".freeze
310
+
311
+ def middle(isoxml, out)
312
+ # NIST documents don't repeat the title
313
+ # middle_title(out)
314
+ clause isoxml, out
315
+ annex isoxml, out
316
+ bibliography isoxml, out
317
+ end
318
+
319
+ def bibliography(isoxml, out)
320
+ f = isoxml.at(ns("//bibliography/clause | //bibliography/references")) || return
321
+ page_break(out)
322
+ isoxml.xpath(ns("//bibliography/clause | //bibliography/references")).each do |f|
323
+ out.div do |div|
324
+ div.h1 **{ class: "Section3" } do |h1|
325
+ f&.at(ns("./title"))&.children.each { |n| parse(n, h1) }
326
+ end
327
+ f.elements.reject do |e|
328
+ ["reference", "title", "bibitem"].include? e.name
329
+ end.each { |e| parse(e, div) }
330
+ biblio_list(f, div, false)
331
+ end
332
+ end
333
+ end
334
+
335
+ def info(isoxml, out)
336
+ @meta.keywords isoxml, out
337
+ super
338
+ end
339
+
340
+ SECTIONS_XPATH =
341
+ "//foreword | //introduction | //reviewnote | //executivesummary | //annex | "\
342
+ "//sections/clause | //bibliography/references | "\
343
+ "//bibliography/clause".freeze
344
+
345
+ def initial_anchor_names(d)
346
+ d.xpath("//xmlns:preface/child::*").each do |c|
347
+ preface_names(c)
348
+ end
349
+ sequential_asset_names(d.xpath("//xmlns:preface/child::*"))
350
+ clause_names(d, 0)
351
+ middle_section_asset_names(d)
352
+ termnote_anchor_names(d)
353
+ termexample_anchor_names(d)
354
+ end
355
+
356
+ def back_anchor_names(docxml)
357
+ docxml.xpath(ns("//annex")).each_with_index do |c, i|
358
+ annex_names(c, (65 + i).chr.to_s)
359
+ end
360
+ docxml.xpath(ns("//bibliography/clause | "\
361
+ "//bibliography/references")).each do |b|
362
+ preface_names(b)
363
+ end
364
+ docxml.xpath(ns("//bibitem[not(ancestor::bibitem)]")).each do |ref|
365
+ reference_names(ref)
366
+ end
367
+ end
368
+
369
+
370
+ def prefaceprefix(nodes)
371
+ i = 0
372
+ nodes.each do |n|
373
+ case n.name
374
+ when "executivesummary" then @anchors[n["id"]][:prefix] = "ES"
375
+ when "abstract" then @anchors[n["id"]][:prefix] = "ABS"
376
+ when "reviewernote" then @anchors[n["id"]][:prefix] = "NTR"
377
+ else
378
+ @anchors[n["id"]][:prefix] = "PR" + i.to_s
379
+ i += 1
380
+ end
381
+ end
382
+ end
383
+
384
+ def middle_section_asset_names(d)
385
+ prefaceprefix(d.xpath("//xmlns:preface/child::*"))
386
+ d.xpath("//xmlns:preface/child::*").each do |s|
387
+ hierarchical_asset_names(s, @anchors[s["id"]][:prefix])
388
+ end
389
+ d.xpath("//xmlns:sections/child::*").each do |s|
390
+ hierarchical_asset_names(s, @anchors[s["id"]][:label])
391
+ end
392
+ end
393
+
394
+ def hierarchical_asset_names(clause, num)
395
+ super
396
+ hierarchical_permission_names(clause, num)
397
+ hierarchical_requirement_names(clause, num)
398
+ hierarchical_recommendation_names(clause, num)
399
+ end
400
+
401
+ def hierarchical_permission_names(clause, num)
402
+ clause.xpath(ns(".//permission")).each_with_index do |t, i|
403
+ next if t["id"].nil? || t["id"].empty?
404
+ @anchors[t["id"]] = anchor_struct("#{num}.#{i + 1}",
405
+ t, "Permission", "permission")
406
+ end
407
+ end
408
+
409
+ def hierarchical_requirement_names(clause, num)
410
+ clause.xpath(ns(".//requirement")).each_with_index do |t, i|
411
+ next if t["id"].nil? || t["id"].empty?
412
+ @anchors[t["id"]] = anchor_struct("#{num}.#{i + 1}",
413
+ t, "Requirement", "requirement")
414
+ end
415
+ end
416
+
417
+ def hierarchical_recommendation_names(clause, num)
418
+ clause.xpath(ns(".//recommendation")).each_with_index do |t, i|
419
+ next if t["id"].nil? || t["id"].empty?
420
+ @anchors[t["id"]] = anchor_struct("#{num}.#{i + 1}",
421
+ t, "Recommendation", "recommendation")
422
+ end
423
+ end
424
+
425
+ def clause_names(docxml, sect_num)
426
+ q = "//xmlns:sections/child::*"
427
+ docxml.xpath(q).each_with_index do |c, i|
428
+ section_names(c, (i + sect_num), 1)
429
+ end
430
+ end
431
+
432
+ def get_linkend(node)
433
+ link = anchor_linkend(node, docid_l10n(node["target"] || "[#{node['citeas']}]"))
434
+ link += eref_localities(node.xpath(ns("./locality")), link)
435
+ contents = node.children.select { |c| c.name != "locality" }
436
+ return link if contents.nil? || contents.empty?
437
+ Nokogiri::XML::NodeSet.new(node.document, contents).to_xml
438
+ # so not <origin bibitemid="ISO7301" citeas="ISO 7301">
439
+ # <locality type="section"><reference>3.1</reference></locality></origin>
440
+ end
441
+
442
+ def load_yaml(lang, script)
443
+ y = if @i18nyaml then YAML.load_file(@i18nyaml)
444
+ elsif lang == "en"
445
+ YAML.load_file(File.join(File.dirname(__FILE__), "i18n-en.yaml"))
446
+ else
447
+ YAML.load_file(File.join(File.dirname(__FILE__), "i18n-en.yaml"))
448
+ end
449
+ super.merge(y)
450
+ end
451
+
452
+ def annex_name_lbl(clause, num)
453
+ l10n("<b>#{@annex_lbl} #{num}</b>")
454
+ end
455
+
456
+ def annex_name(annex, name, div)
457
+ div.h1 **{ class: "Annex" } do |t|
458
+ t << "#{get_anchors[annex['id']][:label]} &mdash; "
459
+ t.b do |b|
460
+ name&.children&.each { |c2| parse(c2, b) }
461
+ end
462
+ end
463
+ end
464
+
465
+ def hiersep
466
+ "-"
467
+ end
468
+
469
+
470
+ end
471
+ end
472
+ end