kramdown-latexish 1.0.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -0,0 +1,170 @@
1
+ module Kramdown::Latexish
2
+ # Lexical tools, including localisation
3
+ #
4
+ # The class this is included into shall provide the method
5
+ #
6
+ # lang:
7
+ # The language in use, one of :english or :french
8
+ class Lexical
9
+
10
+ # The active language
11
+ attr_reader :language
12
+
13
+ # All supported languages
14
+ attr_reader :languages
15
+
16
+ def initialize(language)
17
+ @language = language
18
+
19
+ # The specifications from which everything else is derived
20
+ @localisation = {
21
+ :abstract => {
22
+ :english => 'abstract(s)',
23
+ :french => 'abstract(s)',
24
+ },
25
+ :definition => {
26
+ :english => 'definition(s)',
27
+ :french => 'définition(s)',
28
+ },
29
+ :postulate => {
30
+ :english => 'postulate(s)',
31
+ :french => 'postulat(s)',
32
+ },
33
+ :property => {
34
+ :english => 'property(<ies)',
35
+ :french => 'propriété(s)',
36
+ },
37
+ :lemma => {
38
+ :english => 'lemma(s)',
39
+ :french => 'lemme(s)',
40
+ },
41
+ :theorem => {
42
+ :english => 'theorem(s)',
43
+ :french => 'théorème(s)',
44
+ },
45
+ :corollary => {
46
+ :english => 'corollary(<ies)',
47
+ :french => 'corollaire(s)',
48
+ },
49
+ :section => {
50
+ :english => 'section(s)',
51
+ :french => 'section(s)',
52
+ },
53
+ :reference => {
54
+ :english => 'reference(s)',
55
+ :french => 'référence(s)',
56
+ },
57
+ :eqn => {
58
+ :english => 'eqn(s)',
59
+ :french => 'éqn(s)'
60
+ },
61
+ :and => {
62
+ :english => 'and',
63
+ :french => 'et',
64
+ },
65
+ }
66
+
67
+ # The list of languages, computed from @localisation
68
+ @languages = @localisation.values.map(&:keys).reduce(:&)
69
+
70
+ # Associate e.g. "property" and "properties" to :property
71
+ @reverse_localisation = Hash[@languages.map {|lang|
72
+ h = {}
73
+ @localisation.keys.map do |category|
74
+ h[localise(category, :singular)] = category
75
+ h[localise(category, :plural)] = category
76
+ end
77
+ [lang, h]
78
+ }]
79
+ end
80
+
81
+ # The word in the current language and specified singular/plural form
82
+ # corresponding to the given symbol.
83
+ # E.g. :property => "propriété" for form=:singular in French
84
+ def localise(symbol, form=:singular)
85
+ %r{^ (?<singular> [[:alpha:]]+)
86
+ (
87
+ \(
88
+ (?<back><+)?
89
+ (?<plural_ending> [[:alpha:]]+)
90
+ \)
91
+ )?
92
+ }x =~ @localisation[symbol][language]
93
+ return singular if plural_ending.nil?
94
+ stop = back.nil? ? -1 : -back.length - 1
95
+ case form
96
+ when :singular
97
+ singular
98
+ when :plural
99
+ singular[..stop] + plural_ending
100
+ else
101
+ raise "Unknown form: #{form}"
102
+ end
103
+ end
104
+
105
+ # The symbol corresponding to the given word,
106
+ # i.e. the reverse of `localise`.
107
+ # E.g. "properties" => :property in English
108
+ def symbolise(word)
109
+ @reverse_localisation[language][word.downcase]
110
+ end
111
+
112
+ # The lexical conjonction with commas and the word "and" of the given words
113
+ #
114
+ # This method is versatile as it can take an array of strings, or of more
115
+ # complex objects, and return an array or a joined string.
116
+ #
117
+ # SYNOPSIS
118
+ #
119
+ # and(%w(apples pears)) is "apple and pears"
120
+ #
121
+ # and(%w(apples pears), joined:false) is ["apple", " and ", pears"]
122
+ #
123
+ # and([E("apples"), E("pears")], joined:false) is
124
+ # [E("apples"), E(" and "), E("pears")]
125
+ #
126
+ # Passing `joined:true` in this case would most likely not make sense and
127
+ # lead to an error, unless the objects returned by E(…) support enough of
128
+ # the String API.
129
+ #
130
+ # Commas appear with three elements or more
131
+ #
132
+ # and(%w(apples bananas pears)) is "apples, bananas, and pears"
133
+ #
134
+ # The method knows that in some languages the equivalent of that final "and"
135
+ # is not preceded by a comma. For example, in French
136
+ #
137
+ # and(%(pommes bananes poires)) is "pommes, bananes et poires"
138
+ #
139
+ def and(array, joined: true, nbsp: false)
140
+ and_ = localise(:and, language)
141
+ comma_sep = ', '
142
+ and_sep_2 = ' ' + and_
143
+ # Some languages put a comma before "and", others don't
144
+ and_sep_n = ([:english].include?(language) ? ', ' : ' ') + and_
145
+ space = nbsp ? '&nbsp;' : ' '
146
+ and_sep_2 += space
147
+ and_sep_n += space
148
+
149
+ if block_given?
150
+ comma_sep = yield(comma_sep)
151
+ and_sep_2 = yield(and_sep_2)
152
+ and_sep_n = yield(and_sep_n)
153
+ end
154
+ output = case array.size
155
+ when 1
156
+ array
157
+ when 2
158
+ [array[0], and_sep_2, array[1]]
159
+ else
160
+ seps = Array.new(array.size - 2).fill(comma_sep) + [and_sep_n]
161
+ array.zip(seps).flatten.compact
162
+ end
163
+ if joined
164
+ output.join
165
+ else
166
+ output
167
+ end
168
+ end
169
+ end
170
+ end
@@ -0,0 +1,5 @@
1
+ module Kramdown
2
+ module Latexish
3
+ VERSION = "1.0.0"
4
+ end
5
+ end
@@ -0,0 +1,496 @@
1
+ require "kramdown/latexish/version"
2
+
3
+ require 'kramdown/parser/kramdown'
4
+ require 'kramdown/converter/html'
5
+ require 'kramdown/document'
6
+
7
+ require 'bibtex'
8
+ require 'citeproc'
9
+ require 'csl/styles'
10
+
11
+ require 'kramdown/latexish/bibliographical'
12
+ require 'kramdown/latexish/lexical'
13
+
14
+ # An extension of Kramdown parser aimed at mathematical articles
15
+ #
16
+ # The way the kramdown library is structured, the parser class must be in
17
+ # module Kramdown::Parser, so that the option `:input => "Latexish"` can be
18
+ # passed to Kramdown::Document to make it use that parser.
19
+ class Kramdown::Parser::Latexish < Kramdown::Parser::Kramdown
20
+
21
+ include Kramdown::Latexish::Bibliographical
22
+
23
+ # Tags we support for theorem-like environments
24
+ THEOREM_LIKE_TAGS = [:definition, :postulate, :property, :lemma,
25
+ :theorem, :corollary]
26
+
27
+ # All our special tags defined above
28
+ SPECIAL_TAGS = THEOREM_LIKE_TAGS + [:section]
29
+
30
+ # Initialise the parser
31
+ #
32
+ # This supports the following options in addition of those supported by
33
+ # the base class
34
+ #
35
+ # :language
36
+ # A symbol identifying the language. Currently supported are :english
37
+ # and :french (default :english)
38
+ # :theorem_header_level
39
+ # A theorem-like environment starts with a header: this option is the level
40
+ # of that header (default 5)
41
+ # :auto_number_headers
42
+ # Whether to automatically number headers
43
+ # :no_number
44
+ # A list of symbols identifying which type of headers should not be
45
+ # automatically numbered (default [:references], i.e. the Reference
46
+ # section)
47
+ # :bibliography
48
+ # A `BibTeX::Bibliography` object containing the references to appear
49
+ # at the end of the document, and which may be cited in the rest of it.
50
+ # (default nil)
51
+ # :bibliography_style
52
+ # A symbol designating the CSL style to use to format the reference section
53
+ # A complete list can be found
54
+ # [here](https://github.com/citation-style-language/styles)
55
+ # where the basename without the extension is the symbol to be passed.
56
+ # (default :apa, for the APA style)
57
+ # :latex_macros
58
+ # A list of LaTeX macros that all equations in the document shall be able
59
+ # to use. To do so they are put in a math block at the beginning of the
60
+ # document.
61
+ # (default [])
62
+ # :hide_latex_macros?
63
+ # Whether the math block containing the LaTeX macros is completely hidden
64
+ # when converted to HTML
65
+ # (default true)
66
+ def initialize(source, options)
67
+ super
68
+
69
+ # Initialise language and lexical delegate
70
+ @lex = Kramdown::Latexish::Lexical.new(@options[:language] ||= :english)
71
+
72
+ # Initialise the rest of our custom options
73
+ @options[:theorem_header_level] ||= 5
74
+ @options[:auto_number_headers] = true if @options[:auto_number_headers].nil?
75
+ @options[:no_number] ||= [reference_section_name]
76
+ @options[:bibliography_style] ||= :apa
77
+ @options[:latex_macros] ||= []
78
+ @options[:hide_latex_macros?] = true if @options[:hide_latex_macros?].nil?
79
+
80
+ # Add our new parsers
81
+ @span_parsers.unshift(:latex_inline_math)
82
+
83
+ # For parsing theorem environments
84
+ rx = THEOREM_LIKE_TAGS
85
+ .map{|tag| @lex.localise(tag)}
86
+ .map(&:capitalize)
87
+ .join('|')
88
+ rx = rx + '|' + @lex.localise(:abstract).capitalize
89
+ @environment_start_rx = / \A (#{rx}) (?: [ \t] ( \( .+? \) ) )? \s*? \Z /xm
90
+ @environment_end_rx = / \A \\ (#{rx}) \s*? \Z /xm
91
+
92
+ # Last encountered theorem header
93
+ @th = nil
94
+
95
+ # For assigning a number to each header
96
+ @next_section_number = []
97
+ @last_header_level = 0
98
+
99
+ # For tracking references to our special constructs
100
+ @number_for = {}
101
+ @category_for = {}
102
+
103
+ # For numbering theorem-like environments
104
+ @next_theorem_like_number = Hash[THEOREM_LIKE_TAGS.map{|tag| [tag, 0]}]
105
+
106
+ # Bibtex keys found in citations
107
+ @cited_bibkeys = Set[]
108
+ end
109
+
110
+ def language
111
+ @options[:language]
112
+ end
113
+
114
+ def bibliography
115
+ @options[:bibliography]
116
+ end
117
+
118
+ def reference_section_name
119
+ @lex.localise(:reference, :plural).capitalize
120
+ end
121
+
122
+ # Redefine a parser previously added with `define_parser`
123
+ def self.redefine_parser(name, start_re, span_start = nil,
124
+ meth_name = "parse_#{name}")
125
+ @@parsers.delete(name)
126
+ define_parser(name, start_re, span_start, meth_name)
127
+ end
128
+
129
+ # Parse $...$ which do not make a block
130
+ # We do not need to start the regex with (?<!\$) because the scanner
131
+ # is placed at the first $ it encounters.
132
+ LATEX_INLINE_MATH_RX = /\$ (?!\$) (.*?) (?<!\$) \$ (?!\$) /xm
133
+ def parse_latex_inline_math
134
+ parse_inline_math
135
+ end
136
+ define_parser(:latex_inline_math, LATEX_INLINE_MATH_RX, '\$')
137
+
138
+ # Parsing of environments
139
+ #
140
+ # We override the parsing of paragraphs, by detecting the start and end
141
+ # markers of an environment, then reshuffling the elements parsed by super.
142
+ def parse_paragraph
143
+ return false unless super
144
+
145
+ # We do indeed have a paragraph: we will return true in any case
146
+ # but we may do some processing beforehand if we find one of our
147
+ # environments
148
+ els = @tree.children
149
+ case els.last.children[0].value
150
+ when @environment_start_rx
151
+ # We have an environment header: keep necessary info
152
+ @th = [els.size - 1, $1, $2, @src.current_line_number]
153
+ when @environment_end_rx
154
+ # We have an end tag: do we have a starting one?
155
+ end_tag = $1
156
+ end_loc = @src.current_line_number
157
+ unless @th
158
+ warning(
159
+ "`\\#{end_tag}` on line #{end_loc} without " \
160
+ "any `#{end_tag}` earlier on")
161
+ else
162
+ # We have a beginning tag: does it match the end tag?
163
+ start_idx, start_tag, start_label, start_loc = @th
164
+ unless end_tag == start_tag
165
+ warning("\\#{end_tag} on line #{end_loc} does not match " \
166
+ "#{start_tag} on line #{start_loc}")
167
+ else
168
+ # We have a valid environment: discriminate
169
+ if @lex.symbolise(start_tag) == :abstract
170
+ add_abstract(start_tag, start_idx, start_label,
171
+ start_loc, end_loc)
172
+ else
173
+ add_theorem_like(start_tag, start_idx, start_label,
174
+ start_loc, end_loc)
175
+ end
176
+ # Prepare for a new paragraph
177
+ @th = nil
178
+ end
179
+ end
180
+ end
181
+ true
182
+ end
183
+
184
+ # Add a theorem-like environment (internal helper method)
185
+ def add_theorem_like(tag, start_idx, start_label,
186
+ start_loc, end_loc)
187
+ category = @lex.symbolise(tag)
188
+ els = @tree.children
189
+ header = els[start_idx]
190
+
191
+ # Merge header ial's with .theorem-like
192
+ ial = header.options[:ial] || {}
193
+ update_ial_with_ial(ial, {'class' => 'theorem-like'})
194
+
195
+ # Increment number
196
+ nb = @next_theorem_like_number[category] += 1
197
+
198
+ # Process id
199
+ unless (id = ial['id']).nil?
200
+ @number_for[id] = nb
201
+ @category_for[id] = category
202
+ end
203
+
204
+ # Create a <section> for the theorem with those ial's
205
+ el = new_block_el(:html_element, 'section', ial,
206
+ :category => :block, :content_model => :block)
207
+
208
+ # Create header and add it in the section
209
+ elh = new_block_el(:header, nil, nil,
210
+ :level => @options[:theorem_header_level])
211
+ # We can add Kramdown here as this is yet to be seen by the span parsers
212
+ add_text("**#{tag} #{nb}** #{start_label}".rstrip, elh)
213
+ el.children << elh
214
+
215
+ # Add all the other elements processed after the header paragraph
216
+ el.children += els[start_idx + 1 .. -2]
217
+
218
+ # Replace all the elements processed since the header paragraph
219
+ # by our section
220
+ els[start_idx ..] = el
221
+ end
222
+
223
+ # Add an abstract (internal helper method)
224
+ def add_abstract(tag, start_idx, start_label,
225
+ start_loc, end_loc)
226
+ els = @tree.children
227
+ header = els[start_idx]
228
+
229
+ # Merge header ial's with .abstract
230
+ ial = header.options[:ial] || {}
231
+ update_ial_with_ial(ial, {'class' => 'abstract'})
232
+
233
+ # Create a <div> for the abstract
234
+ el = new_block_el(:html_element, 'div', ial,
235
+ :category => :block, :content_model => :block)
236
+
237
+ # Add all the other elements processed after the header paragraph
238
+ el.children += els[start_idx + 1 .. -2]
239
+
240
+ # Replace all the elements processed since the header paragraph
241
+ # by our div
242
+ els[start_idx ..] = el
243
+ end
244
+
245
+ # Auto-numbering of headers
246
+ #
247
+ # We override this method so that it will work with both setext and atx
248
+ # headers out of the box
249
+ def add_header(level, text, id)
250
+ # Only h2, h3, … as h1 is for title
251
+ lvl = level - 1
252
+ if lvl > 0
253
+ if @options[:auto_number_headers] && !@options[:no_number].include?(text)
254
+ # Compute the number a la 2.1.3
255
+ if lvl == @last_header_level
256
+ @next_section_number[-1] += 1
257
+ elsif lvl > @last_header_level
258
+ ones = [1]*(lvl - @last_header_level)
259
+ @next_section_number.push(*ones)
260
+ else
261
+ @next_section_number.pop(@last_header_level - lvl)
262
+ @next_section_number[-1] += 1
263
+ end
264
+ @last_header_level = lvl
265
+ nb = @next_section_number.join('.')
266
+
267
+ # Prepend it to header text, removing a leading number if any
268
+ text.gsub!(/^\s*[\d.]*\s*/, '')
269
+ text = "#{nb} #{text}"
270
+
271
+ # If it has an id, keep track of the association with its number
272
+ @number_for[id] = nb if id
273
+ @category_for[id] = :section
274
+ end
275
+ end
276
+
277
+ # Let Kramdown handle it now
278
+ super(level, text, id)
279
+ end
280
+
281
+ # Parse reference links to sections
282
+ #
283
+ # We override parse_link, look whether we have one of our special reference
284
+ # links or one of our bibliographical citations, if so process it, otherwise
285
+ # let super handle it. Since this method is called by Kramdown when it
286
+ # thinks it ready to handle links. So we can assume that all id's are known
287
+ # by then, and therefore all their associated numbers.
288
+ def parse_link
289
+ start_pos = @src.save_pos
290
+ parsed = false
291
+ # Nothing to do if it is an image link
292
+ if @src.peek(1) != '!'
293
+ if @src.scan(SPECIAL_REF_RX)
294
+ parsed = handle_special_ref_link
295
+ elsif @src.scan(BIB_CITE_RX)
296
+ parsed = handle_bibliographic_citation_link
297
+ end
298
+ end
299
+ unless parsed
300
+ @src.revert_pos(start_pos)
301
+ super
302
+ end
303
+ end
304
+
305
+ # Regexes for reference links to sections
306
+ SPECIAL_REF_RX = /\[ \s* (C|c)ref : \s* ( [^\]]+ ) \s* \]/x
307
+
308
+ def handle_special_ref_link
309
+ loc = @src.current_line_number
310
+ capital = @src[1] == 'C'
311
+ if @src[2].nil?
312
+ warning("No reference specified at line #{loc}")
313
+ @tree.children << Element.new(:text, @src[0], nil, location: loc)
314
+ else
315
+ # Group the keys by header category
316
+ ids_for = {}
317
+ @src[2].split(/\s*,\s*/).map do |id|
318
+ (ids_for[@category_for[id] || :undefined] ||= []) << id
319
+ end
320
+ # For each category, and each ids of that category...
321
+ ref_chunks = ids_for.each_with_index.map do |(category, ids), i|
322
+ # Generate the reference for each id
323
+ nums = ids.map do |id|
324
+ case category
325
+ when :undefined
326
+ warning("No element with id '#{id}' at line #{loc}")
327
+ el = Element.new(:text, "¿#{id}?", nil, location: loc)
328
+ when :eqn
329
+ # Referencing equation shall be delegated to Mathjax by client code
330
+ el = Element.new(:text, "\\eqref{#{id}}", nil, location: loc)
331
+ else
332
+ nb = @number_for[id]
333
+ el = Element.new(:a, nil, nil, location: loc)
334
+ el.attr['href'] = "##{id}"
335
+ el.attr['title'] = "#{@lex.localise(category).capitalize} #{nb}"
336
+ el.children << Element.new(:text, nb.to_s, nil, location: loc)
337
+ end
338
+ el
339
+ end
340
+ # Join all the references and put the title in front
341
+ # We don't want "and" to be separated from the following link
342
+ refs = @lex.and(nums, joined: false, nbsp: true) {|word|
343
+ Element.new(:text, word, nil, location: loc)
344
+ }
345
+ if category != :undefined
346
+ form = ids.size == 1 ? :singular : :plural
347
+ label = @lex.localise(category, form)
348
+ label = label.capitalize if capital and i == 0
349
+ label = Element.new(:text, label + '&nbsp;', nil, location: loc)
350
+ [label] + refs
351
+ else
352
+ refs
353
+ end
354
+ end
355
+ # Conjunct again and append all that to the tree
356
+ # This time "and" should get separated from the following label so as
357
+ # not to stress the layout engine when it wraps lines
358
+ references = @lex.and(ref_chunks, joined:false) {|word|
359
+ [Element.new(:text, word, nil, location: loc)]
360
+ }
361
+ .flatten(1)
362
+ @tree.children += references
363
+ end
364
+ true
365
+ end
366
+
367
+ # Regex for bibliographic citations
368
+ BIB_CITE_RX = / \[ \s* cite(p|t) : \s+ ( [^\]]+ ) \s* \]/x
369
+
370
+ def handle_bibliographic_citation_link
371
+ return false if bibliography.nil?
372
+ loc = @src.current_line_number
373
+ style = @src[1] == 'p' ? :parenthetical : :textual
374
+ bibkeys = @src[2].split /\s*,\s*/
375
+ unless bibkeys.empty?
376
+ # Array of Element's for each key
377
+ elements = bibkeys.map do |key|
378
+ et_al = false
379
+ if key[0] == '*'
380
+ et_al = true
381
+ key = key[1..]
382
+ end
383
+ # Keep track of the keys that have been cited
384
+ @cited_bibkeys << key if bibliography.key?(key)
385
+
386
+ el = Element.new(:a, nil, nil, location: loc)
387
+ el.attr['href'] = "##{key}"
388
+ el.children << Element.new(:text,
389
+ citation_for(key, style, et_al, loc),
390
+ nil,
391
+ location: loc)
392
+ el
393
+ end
394
+ # Then we put them together with commas and the word "and"
395
+ conjonction = @lex.and(elements, joined: false) do |word|
396
+ Element.new(:text, word, nil, location: loc)
397
+ end
398
+ # Then output that array of Element's
399
+ @tree.children += conjonction
400
+ # Done
401
+ true
402
+ else
403
+ warning("Empty bibliographic citation at line #{loc}")
404
+ false
405
+ end
406
+ end
407
+
408
+ # Override parse to produce the Reference section
409
+ def parse
410
+ super
411
+ produce_latex_macros
412
+ produce_reference_section
413
+ end
414
+
415
+ # Override parsing of block math to gather \label's
416
+ def parse_block_math
417
+ result = super
418
+ @tree.children.last.value.scan(/\\label\s*\{(.*?)\}/) do
419
+ @category_for[$~[1]] = :eqn
420
+ end
421
+ result
422
+ end
423
+
424
+ # Produce the section containing the bibliographic references at the end
425
+ # of the document
426
+ def produce_reference_section
427
+ unless @cited_bibkeys.empty?
428
+ cp = CiteProc::Processor.new(style: @options[:bibliography_style],
429
+ format: 'html')
430
+ cp.import(bibliography.to_citeproc)
431
+ references = @cited_bibkeys.map {|key|
432
+ html = cp.render(:bibliography, id: key)[0]
433
+ html = clean_bibtex(html)
434
+ html += "\n{: .bibliography-item ##{key}}"
435
+ }
436
+ .join("\n\n")
437
+ biblio = <<~"MD"
438
+
439
+ ## #{reference_section_name}
440
+
441
+ #{references}
442
+ MD
443
+ # Since we monkey-patched it, this will use this parser
444
+ # and not the default one. In particular, $...$ will produce
445
+ # inline equations
446
+ bib_doc = Kramdown::Document.new(biblio, @options)
447
+ # TODO: fix line numbers
448
+ @root.children += bib_doc.root.children
449
+ end
450
+ end
451
+
452
+ # Produce math block with LaTeX macros
453
+ def produce_latex_macros
454
+ macros = @options[:latex_macros]
455
+ unless macros.empty?
456
+ opts = {
457
+ :style => "display:#{@options[:hide_latex_macros?] ? 'none' : 'block'}"
458
+ }
459
+ el = Element.new(
460
+ :html_element, 'div', opts,
461
+ category: :block, content_model: :block)
462
+ macros = (['\text{\LaTeX Macros:}'] + macros).join("\n")
463
+ el.children << Element.new(:math, macros, nil, category: :block)
464
+ # TODO: fix line numbers
465
+ @root.children.prepend(el)
466
+ end
467
+ end
468
+ end
469
+
470
+
471
+ module Kramdown::Latexish
472
+ # The extra options to pass to Kramdown::Document to make it correctly parse
473
+ # and convert the mathematical articles we target. The instantiation should
474
+ # therefore always be done as the equivalent of
475
+ # options = { ... }
476
+ # ...
477
+ # options = Kramdown::Latexish::taylor_options(options)
478
+ # doc = Kramdown::Document.initialise(source, options)
479
+ #
480
+ # It will override :input and :auto_ids, so setting those in `options`
481
+ # is useless, and potentially confusing.
482
+ #
483
+ # Why this design instead of creating a document class inheriting
484
+ # `Kramdown::Document`? The reason stems from a common use case,
485
+ # examplified by static website generators such as Nanoc or Middleman.
486
+ # The user code does never directly instantiate a document. Instead it
487
+ # calls a method from Nanoc or Middleman, which will in turn instantiate a
488
+ # document. The problem is that this object is not visible to the client
489
+ # code. However Nanoc and Middleman let client code pass options to
490
+ # initialise the document. Hence the present design. The only alternative
491
+ # would have been to monkeypatch Kramdown::Document but we think it is
492
+ # cleaner to avoid doing that.
493
+ def self.taylor_options(options)
494
+ options.merge({:input => 'Latexish', :auto_ids => false})
495
+ end
496
+ end