wortsammler 0.0.2
Sign up to get free protection for your applications and to get access to all the features.
- data/.gitignore +17 -0
- data/Gemfile +4 -0
- data/LICENSE.txt +22 -0
- data/README.md +131 -0
- data/README.pdf +0 -0
- data/Rakefile +34 -0
- data/bin/wortsammler +4 -0
- data/lib/wortsammler/class.Traceable.md.rb +104 -0
- data/lib/wortsammler/class.Traceable.rb +387 -0
- data/lib/wortsammler/class.proolib.rb +747 -0
- data/lib/wortsammler/class.treetopHelper.rb +117 -0
- data/lib/wortsammler/exe.wortsammler.rb +428 -0
- data/lib/wortsammler/log_helper.rb +8 -0
- data/lib/wortsammler/mdTraceParser.treetop +55 -0
- data/lib/wortsammler/rake_helper.rb +28 -0
- data/lib/wortsammler/version.rb +3 -0
- data/lib/wortsammler.rb +5 -0
- data/pkg/wortsammler-0.0.1.gem +0 -0
- data/resources/default.latex +225 -0
- data/resources/logo.jpg +0 -0
- data/resources/main.md +268 -0
- data/resources/rakefile.rb +5 -0
- data/resources/requirementsSynopsis.graphml +17 -0
- data/resources/sample_the-sample-document.yaml +51 -0
- data/spec/test.graphml +74 -0
- data/spec/traceable_spec.rb +299 -0
- data/spec/wortsammler_spec.rb +168 -0
- data/testresults/wortsammler_testresults.html +408 -0
- data/testresults/wortsammler_testresults.log +59 -0
- data/wortsammler.gemspec +53 -0
- metadata +282 -0
@@ -0,0 +1,747 @@
|
|
1
|
+
#
|
2
|
+
# This script converts the trace-References in a markdown file
|
3
|
+
# to hot references.
|
4
|
+
#
|
5
|
+
# usage prepareTracingInPandoc <infile> <format> <outfile>
|
6
|
+
#
|
7
|
+
# Traces are formatted according to [RS_DM_008].
|
8
|
+
#
|
9
|
+
# Trace itself becomes the target, uptraces are converted to references.
|
10
|
+
#
|
11
|
+
# Traces can also be referenced by
|
12
|
+
#
|
13
|
+
#
|
14
|
+
require 'rubygems'
|
15
|
+
require 'yaml'
|
16
|
+
require 'tmpdir'
|
17
|
+
require 'nokogiri'
|
18
|
+
require "rubyXL"
|
19
|
+
|
20
|
+
|
21
|
+
#require 'ruby-debug' #if not RUBY_PLATFORM=="i386-mingw32"
|
22
|
+
|
23
|
+
# TODO: make these patterns part of the configuration
|
24
|
+
|
25
|
+
ANY_ANCHOR_PATTERN = /<a\s+id=\"([^\"]+)\"\/>/
|
26
|
+
ANY_REF_PATTERN = /<a\s+href=\"#([^\"]+)\"\>([^<]*)<\/a>/
|
27
|
+
|
28
|
+
TRACE_ANCHOR_PATTERN = /\[(\w+_\w+_\w+)\](\s*\*\*)/
|
29
|
+
UPTRACE_REF_PATTERN = /\}\( ((\w+_\w+_\w+) (,\s*\w+_\w+_\w+)*)\)/x
|
30
|
+
TRACE_REF_PATTERN = /->\[(\w+_\w+_\w+)\]/
|
31
|
+
|
32
|
+
# filename
|
33
|
+
# heading
|
34
|
+
# level
|
35
|
+
# pages to include
|
36
|
+
# pageclearance
|
37
|
+
INCLUDE_PDF_PATTERN = /^\s+~~PDF\s+"(.+)" \s+ "(.+)" \s* (\d*) \s* (\d+-\d+)? \s* (clearpage|cleardoublepage)?~~/x
|
38
|
+
|
39
|
+
INCLUDE_MD_PATTERN = /^\s+~~MD\s+"(.+)" \s+ "(.+)" \s* (\d*) \s* (\d+-\d+)? \s* (clearpage|cleardoublepage)?~~/x
|
40
|
+
|
41
|
+
SNIPPET_PATTERN = /~~SN \s+ (\w+)~~/x
|
42
|
+
|
43
|
+
|
44
|
+
#
|
45
|
+
# This mixin convertes a file path to the os Path representation
|
46
|
+
# todo maybe replace this by a builtin ruby stuff such as "pathname"
|
47
|
+
#
|
48
|
+
class String
|
49
|
+
# convert the string to a path notation of the current operating system
|
50
|
+
def to_osPath
|
51
|
+
gsub(File::SEPARATOR, File::ALT_SEPARATOR || File::SEPARATOR)
|
52
|
+
end
|
53
|
+
|
54
|
+
# convert the string to a path notation of ruby.
|
55
|
+
def to_rubyPath
|
56
|
+
gsub(File::ALT_SEPARATOR || File::SEPARATOR, File::SEPARATOR)
|
57
|
+
end
|
58
|
+
|
59
|
+
# adding quotes around the string. Main purpose is to escape blanks
|
60
|
+
# in file paths.
|
61
|
+
def esc
|
62
|
+
"\"#{self}\""
|
63
|
+
end
|
64
|
+
end
|
65
|
+
|
66
|
+
|
67
|
+
|
68
|
+
#
|
69
|
+
# This class provides methods to tweak the reference according to the
|
70
|
+
# target document format
|
71
|
+
#
|
72
|
+
#
|
73
|
+
class ReferenceTweaker
|
74
|
+
|
75
|
+
#This attribute keeps the target format
|
76
|
+
attr_accessor :target, :log
|
77
|
+
|
78
|
+
|
79
|
+
private
|
80
|
+
|
81
|
+
# this prepares the reference in the target format
|
82
|
+
#
|
83
|
+
# :string: the Id of the referenced Traceable
|
84
|
+
def prepareTraceReferences(string)
|
85
|
+
result=string.gsub(/\s*/,"").split(",").map{|trace|
|
86
|
+
itrace = mkInternalTraceId(trace)
|
87
|
+
texTrace = mkTexTraceDisplay(trace)
|
88
|
+
if @target == "pdf" then
|
89
|
+
"\\hyperlink{#{itrace}}{#{texTrace}}"
|
90
|
+
else
|
91
|
+
"[#{trace}](\##{itrace})"
|
92
|
+
end
|
93
|
+
}
|
94
|
+
result.join(", ")
|
95
|
+
end
|
96
|
+
|
97
|
+
|
98
|
+
# this tweaks the reference-Id to be comaptible as TeX label
|
99
|
+
# private methd
|
100
|
+
def mkInternalTraceId(string)
|
101
|
+
string.gsub("_","-")
|
102
|
+
end
|
103
|
+
|
104
|
+
# this tweaks the reference-id to be displayed in TeX
|
105
|
+
# private method
|
106
|
+
def mkTexTraceDisplay(trace)
|
107
|
+
trace.gsub("_", "\\_")
|
108
|
+
end
|
109
|
+
|
110
|
+
public
|
111
|
+
|
112
|
+
# constructor
|
113
|
+
# :target: the target format
|
114
|
+
# in which the referneces shall be represented
|
115
|
+
#todo: improve logger approach
|
116
|
+
def initialize(target, logger=nil)
|
117
|
+
@target=target
|
118
|
+
|
119
|
+
@log=logger || $logger || nil
|
120
|
+
|
121
|
+
if @log == nil
|
122
|
+
@log = Logger.new(STDOUT)
|
123
|
+
@log.level = Logger::INFO
|
124
|
+
@log.datetime_format = "%Y-%m-%d %H:%M:%S"
|
125
|
+
@log.formatter = proc do |severity, datetime, progname, msg|
|
126
|
+
"#{datetime}: #{msg}\n"
|
127
|
+
end
|
128
|
+
end
|
129
|
+
end
|
130
|
+
|
131
|
+
# this does the postprocessing
|
132
|
+
# of the file
|
133
|
+
# in particluar handloe wortsammler's specific syntax.
|
134
|
+
def prepareFile(infile, outfile)
|
135
|
+
|
136
|
+
infileIo=File.new(infile)
|
137
|
+
text = infileIo.readlines.join
|
138
|
+
infileIo.close
|
139
|
+
|
140
|
+
#include pdf files
|
141
|
+
|
142
|
+
if @target == "pdf"
|
143
|
+
text.gsub!(INCLUDE_PDF_PATTERN){|m|
|
144
|
+
|
145
|
+
if $4
|
146
|
+
pages="[pages=#{$4}]"
|
147
|
+
else
|
148
|
+
pages=""
|
149
|
+
end
|
150
|
+
|
151
|
+
if $5
|
152
|
+
clearpage=$5
|
153
|
+
else
|
154
|
+
clearpage="cleardoublepage"
|
155
|
+
end
|
156
|
+
|
157
|
+
if $3.length > 0
|
158
|
+
level=$3
|
159
|
+
else
|
160
|
+
level=9
|
161
|
+
end
|
162
|
+
|
163
|
+
"\n\n\\#{clearpage}\n\\bookmark[level=#{level},page=\\thepage]{#{$2}}\n\\includepdf#{pages}{#{$1}}"
|
164
|
+
}
|
165
|
+
else #if not pdf then it gets a regular external link
|
166
|
+
text.gsub!(INCLUDE_PDF_PATTERN){|m|
|
167
|
+
"[#{$2}](#{$1})"
|
168
|
+
}
|
169
|
+
end
|
170
|
+
|
171
|
+
#inject the anchors for references to traces ->[traceid]
|
172
|
+
if @target == "pdf" then
|
173
|
+
text.gsub!(TRACE_ANCHOR_PATTERN){|m| "[#{$1}]#{$2}\\hypertarget{#{mkInternalTraceId($1)}}{}"}
|
174
|
+
else
|
175
|
+
text.gsub!(TRACE_ANCHOR_PATTERN){|m| "<a id=\"#{mkInternalTraceId($1)}\">[#{$1}]</a>#{$2}"}
|
176
|
+
end
|
177
|
+
|
178
|
+
#substitute arbitrary anchors for arbitrary targets <a id="">
|
179
|
+
if @target == "pdf" then
|
180
|
+
text.gsub!(ANY_ANCHOR_PATTERN){|m| "\\hypertarget{#{mkInternalTraceId($1)}}{}"}
|
181
|
+
else
|
182
|
+
# it is already html
|
183
|
+
end
|
184
|
+
|
185
|
+
#substitute arbitrary document internal references <a href=""></a>
|
186
|
+
if @target == "pdf" then
|
187
|
+
text.gsub!(ANY_REF_PATTERN){|m| "\\hyperlink{#{$1}}{#{mkTexTraceDisplay($2)}}"}
|
188
|
+
else
|
189
|
+
# it is already html
|
190
|
+
end
|
191
|
+
|
192
|
+
# substitute the uptrace references
|
193
|
+
text.gsub!(UPTRACE_REF_PATTERN){|m| "}(#{prepareTraceReferences($1)})"}
|
194
|
+
|
195
|
+
# substitute the informal trace references
|
196
|
+
text.gsub!(TRACE_REF_PATTERN){|m| "[#{prepareTraceReferences($1)}]"}
|
197
|
+
|
198
|
+
File.open(outfile, "w"){|f| f.puts(text)}
|
199
|
+
end
|
200
|
+
end
|
201
|
+
|
202
|
+
|
203
|
+
|
204
|
+
#
|
205
|
+
# This class handles the configuration of WortSammler framework
|
206
|
+
#
|
207
|
+
|
208
|
+
class ProoConfig
|
209
|
+
attr_reader :input, # An array with the input filenames
|
210
|
+
:outdir, # directory where to place the output files
|
211
|
+
:outname, # basis to determine the output files
|
212
|
+
:format, # array of output formats
|
213
|
+
:traceSortOrder, # Array of strings to determine the sort ord
|
214
|
+
:vars, # hash of variables for pandoc
|
215
|
+
:editions, # hash of editions for pandoc
|
216
|
+
:snippets, # Array of strings to determine snippet filenames
|
217
|
+
:upstream_tracefiles, # Array of strings to determine upstream tracefile names
|
218
|
+
:downstream_tracefile, # String to save downstram filename
|
219
|
+
:reqtracefile_base, # string to determine the requirements tracing results
|
220
|
+
:rootdir, # String directory of the configuration file
|
221
|
+
:stylefiles # Hash of stylefiles path to pandoc latex style file
|
222
|
+
|
223
|
+
|
224
|
+
# constructor
|
225
|
+
# @param [String] configFileName name of the configfile (without .yaml)
|
226
|
+
# @param [Symbol] configSelect Default configuration. If not specified
|
227
|
+
# the very first entry in the config file
|
228
|
+
# will apply.
|
229
|
+
# TODO: not yet implemented.
|
230
|
+
# @return [ProoConfig] instance
|
231
|
+
def initialize(configFileName, configSelect=nil)
|
232
|
+
begin
|
233
|
+
config = YAML.load(File.new(configFileName))
|
234
|
+
rescue Exception => e
|
235
|
+
unless File.exist?(configFileName) then
|
236
|
+
$log.error "config file not found '#{configFileName}'"
|
237
|
+
else
|
238
|
+
$log.error "config file could not be loaded '#{configFileName}'"
|
239
|
+
$log.error "reason '#{e.message}'"
|
240
|
+
end
|
241
|
+
exit(false)
|
242
|
+
end
|
243
|
+
|
244
|
+
basePath = File.dirname(configFileName)
|
245
|
+
|
246
|
+
# this makes an absolute path based on the absolute path
|
247
|
+
# of the configuration file
|
248
|
+
expand_path=lambda do |lf|
|
249
|
+
File.expand_path("#{basePath}/#{lf}")
|
250
|
+
end
|
251
|
+
|
252
|
+
|
253
|
+
#activeConfigs=config.select{|x| [x[:name]] & ConfigSelet}
|
254
|
+
|
255
|
+
selectedConfig=config.first
|
256
|
+
#TODO: check the config file
|
257
|
+
@input = selectedConfig[:input].map{|file| File.expand_path("#{basePath}/#{file}")}
|
258
|
+
@outdir = File.expand_path("#{basePath}/#{selectedConfig[:outdir]}")
|
259
|
+
@outname = selectedConfig[:outname]
|
260
|
+
@format = selectedConfig[:format]
|
261
|
+
@traceSortOrder = selectedConfig[:traceSortOrder]
|
262
|
+
@vars = selectedConfig[:vars] || {}
|
263
|
+
@editions = selectedConfig[:editions] || nil
|
264
|
+
|
265
|
+
@downstream_tracefile = selectedConfig[:downstream_tracefile] || nil
|
266
|
+
|
267
|
+
@reqtracefile_base = selectedConfig[:reqtracefile_base] #todo expand path
|
268
|
+
|
269
|
+
@upstream_tracefiles = selectedConfig[:upstream_tracefiles] || nil
|
270
|
+
@upstream_tracefiles = @upstream_tracefiles.map{|file| File.expand_path("#{basePath}/#{file}")} unless @upstream_tracefiles.nil?
|
271
|
+
@rootdir = basePath
|
272
|
+
|
273
|
+
stylefiles = selectedConfig[:stylefiles] || nil
|
274
|
+
if stylefiles.nil?
|
275
|
+
@stylefiles = {
|
276
|
+
:latex => expand_path.call("../ZSUPP_Styles/default.latex"),
|
277
|
+
:docx => expand_path.call("../ZSUPP_Styles/default.docx"),
|
278
|
+
:html => expand_path.call("../ZSUPP_Styles/default.css")
|
279
|
+
}
|
280
|
+
else
|
281
|
+
@stylefiles = stylefiles.map{ |key,value| {key => expand_path.call(value)} }.reduce(:merge)
|
282
|
+
end
|
283
|
+
|
284
|
+
snippets = selectedConfig[:snippets]
|
285
|
+
if snippets.nil?
|
286
|
+
@snippets = nil
|
287
|
+
else
|
288
|
+
@snippets = snippets.map{|file| File.expand_path("#{basePath}/#{file}")}
|
289
|
+
end
|
290
|
+
end
|
291
|
+
|
292
|
+
|
293
|
+
end
|
294
|
+
|
295
|
+
|
296
|
+
|
297
|
+
#
|
298
|
+
# This class provides the major functionalites
|
299
|
+
# Note that it is called PandocBeautifier for historical reasons
|
300
|
+
# provides methods to Process a pandoc file
|
301
|
+
#
|
302
|
+
|
303
|
+
class PandocBeautifier
|
304
|
+
|
305
|
+
attr_accessor :log
|
306
|
+
|
307
|
+
# the constructor
|
308
|
+
# @param [Logger] logger logger object to be applied.
|
309
|
+
# if none is specified, a default logger
|
310
|
+
# will be implemented
|
311
|
+
|
312
|
+
def initialize(logger=nil)
|
313
|
+
|
314
|
+
@view_pattern = /~~ED((\s*(\w+))*)~~/
|
315
|
+
# @view_pattern = /<\?ED((\s*(\w+))*)\?>/
|
316
|
+
@tempdir = Dir.mktmpdir
|
317
|
+
|
318
|
+
@log=logger || $logger || nil
|
319
|
+
|
320
|
+
if @log == nil
|
321
|
+
@log = Logger.new(STDOUT)
|
322
|
+
@log.level = Logger::INFO
|
323
|
+
@log.datetime_format = "%Y-%m-%d %H:%M:%S"
|
324
|
+
@log.formatter = proc do |severity, datetime, progname, msg|
|
325
|
+
"#{datetime}: #{msg}\n"
|
326
|
+
end
|
327
|
+
|
328
|
+
end
|
329
|
+
end
|
330
|
+
|
331
|
+
# perform the beautify
|
332
|
+
# * process the file with pandoc
|
333
|
+
# * revoke some quotes introduced by pandoc
|
334
|
+
# @param [String] file the name of the file to be beieautified
|
335
|
+
def beautify(file)
|
336
|
+
|
337
|
+
@log.debug(" Cleaning: \"#{file}\"")
|
338
|
+
|
339
|
+
docfile = File.new(file)
|
340
|
+
olddoc = docfile.readlines.join
|
341
|
+
docfile.close
|
342
|
+
|
343
|
+
# process the file in pandoc
|
344
|
+
cmd="pandoc -s #{file.esc} -f markdown -t markdown --atx-headers --reference-links "
|
345
|
+
newdoc = `#{cmd}`
|
346
|
+
@log.debug "beautify #{file.esc}: #{$?}"
|
347
|
+
@log.debug(" finished: \"#{file}\"")
|
348
|
+
|
349
|
+
# tweak the quoting
|
350
|
+
if $?.success? then
|
351
|
+
# do this twice since the replacement
|
352
|
+
# does not work on e.g. 2\_3\_4\_5.
|
353
|
+
#
|
354
|
+
newdoc.gsub!(/(\w)\\_(\w)/, '\1_\2')
|
355
|
+
newdoc.gsub!(/(\w)\\_(\w)/, '\1_\2')
|
356
|
+
|
357
|
+
# fix more quoting
|
358
|
+
newdoc.gsub!('-\\>[', '->[')
|
359
|
+
|
360
|
+
# (RS_Mdc)
|
361
|
+
# TODO: fix Table width toggles sometimes
|
362
|
+
if (not olddoc == newdoc) then ##only touch the file if it is really changed
|
363
|
+
File.open(file, "w"){|f| f.puts(newdoc)}
|
364
|
+
File.open(file+".bak", "w"){|f| f.puts(olddoc)} # (RS_Mdc_) # remove this if needed
|
365
|
+
@log.debug(" cleaned: \"#{file}\"")
|
366
|
+
else
|
367
|
+
@log.debug("was clean: \"#{file}\"")
|
368
|
+
end
|
369
|
+
#TODO: error handling here
|
370
|
+
else
|
371
|
+
@log.error("error calling pandoc - please watch the screen output")
|
372
|
+
end
|
373
|
+
end
|
374
|
+
|
375
|
+
|
376
|
+
# this replaces the text snippets in files
|
377
|
+
def replace_snippets_in_file(infile, snippets)
|
378
|
+
input_data = File.open(infile){|f| f.readlines.join}
|
379
|
+
output_data=input_data.clone
|
380
|
+
|
381
|
+
@log.debug("replacing snippets in #{infile}")
|
382
|
+
|
383
|
+
replace_snippets_in_text(output_data, snippets)
|
384
|
+
|
385
|
+
if (not input_data == output_data)
|
386
|
+
File.open(infile, "w"){|f| f.puts output_data}
|
387
|
+
end
|
388
|
+
end
|
389
|
+
|
390
|
+
# this replaces the snippets in a text
|
391
|
+
def replace_snippets_in_text(text, snippets)
|
392
|
+
changed=false
|
393
|
+
text.gsub!(SNIPPET_PATTERN){|m|
|
394
|
+
replacetext=snippets[$1.to_sym]
|
395
|
+
if replacetext
|
396
|
+
changed=true
|
397
|
+
@log.debug("replaced snippet #{$1} with #{replacetext}")
|
398
|
+
else
|
399
|
+
replacetext=m
|
400
|
+
@log.warn("Snippet not found: #{$1}")
|
401
|
+
end
|
402
|
+
replacetext
|
403
|
+
}
|
404
|
+
#recursively process nested snippets
|
405
|
+
#todo: this approach might rais undefined snippets twice if there are defined and undefined ones
|
406
|
+
replace_snippets_in_text(text, snippets) if changed==true
|
407
|
+
end
|
408
|
+
|
409
|
+
|
410
|
+
#
|
411
|
+
# Ths determines the view filter
|
412
|
+
#
|
413
|
+
# @param [String] line - the current input line
|
414
|
+
# @param [String] view - the currently selected view
|
415
|
+
#
|
416
|
+
# @return true/false if a view-command is found, else nil
|
417
|
+
def get_filter_command(line, view)
|
418
|
+
r = line.match(@view_pattern)
|
419
|
+
|
420
|
+
if not r.nil?
|
421
|
+
found = r[1].split(" ")
|
422
|
+
result = (found & [view, "all"].flatten).any?
|
423
|
+
else
|
424
|
+
result = nil
|
425
|
+
end
|
426
|
+
|
427
|
+
result
|
428
|
+
end
|
429
|
+
|
430
|
+
#
|
431
|
+
# This filters the document according to the target audience
|
432
|
+
#
|
433
|
+
# @param [String] inputfile name of inputfile
|
434
|
+
# @param [String] outputfile name of outputfile
|
435
|
+
# @param [String] view - name of intended view
|
436
|
+
|
437
|
+
def filter_document_variant(inputfile, outputfile, view)
|
438
|
+
|
439
|
+
input_data = File.open(inputfile){|f| f.readlines}
|
440
|
+
|
441
|
+
output_data = Array.new
|
442
|
+
is_active = true
|
443
|
+
input_data.each{|l|
|
444
|
+
switch=self.get_filter_command(l, view)
|
445
|
+
l.gsub!(@view_pattern, "")
|
446
|
+
is_active = switch unless switch.nil?
|
447
|
+
@log.debug "select edtiion #{view}: #{is_active}: #{l.strip}"
|
448
|
+
|
449
|
+
output_data << l if is_active
|
450
|
+
}
|
451
|
+
|
452
|
+
File.open(outputfile, "w"){|f| f.puts output_data.join }
|
453
|
+
end
|
454
|
+
|
455
|
+
#
|
456
|
+
# This filters the document according to the target audience
|
457
|
+
#
|
458
|
+
# @param [String] inputfile name of inputfile
|
459
|
+
# @param [String] outputfile name of outputfile
|
460
|
+
# @param [String] view - name of intended view
|
461
|
+
|
462
|
+
def process_debug_info(inputfile, outputfile, view)
|
463
|
+
|
464
|
+
input_data = File.open(inputfile){|f| f.readlines }
|
465
|
+
|
466
|
+
output_data = Array.new
|
467
|
+
|
468
|
+
input_data.each{|l|
|
469
|
+
l.gsub!(@view_pattern){|p|
|
470
|
+
if $1.strip == "all" then
|
471
|
+
color="black"
|
472
|
+
else
|
473
|
+
color="red"
|
474
|
+
end
|
475
|
+
|
476
|
+
"\\color{#{color}}\\rule{2cm}{0.5mm}\\marginpar{#{$1.strip}}"
|
477
|
+
|
478
|
+
}
|
479
|
+
|
480
|
+
l.gsub!(/todo:|TODO:/){|p| "#{p}\\marginpar{TODO}"}
|
481
|
+
|
482
|
+
output_data << l
|
483
|
+
}
|
484
|
+
|
485
|
+
File.open(outputfile, "w"){|f| f.puts output_data.join }
|
486
|
+
end
|
487
|
+
|
488
|
+
|
489
|
+
# This compiles the input documents to one single file
|
490
|
+
# it also beautifies the input files
|
491
|
+
#
|
492
|
+
# @param [Array of String] input - the input files to be processed in the given sequence
|
493
|
+
# @param [String] output - the the name of the output file
|
494
|
+
def collect_document(input, output)
|
495
|
+
inputs=input.map{|xx| xx.esc.to_osPath }.join(" ") # qoute cond combine the inputs
|
496
|
+
|
497
|
+
#now combine the input files
|
498
|
+
@log.info("combining the input files")
|
499
|
+
cmd="pandoc -s -S -o #{output} --ascii #{inputs}" # note that inputs is already quoted
|
500
|
+
system(cmd)
|
501
|
+
if $?.success? then
|
502
|
+
PandocBeautifier.new().beautify(output)
|
503
|
+
end
|
504
|
+
end
|
505
|
+
|
506
|
+
#
|
507
|
+
# This loads snippets from xlsx file
|
508
|
+
# @param file [String] Filename of the xlsx file
|
509
|
+
#
|
510
|
+
# @return [Hash] a hash with the snippets
|
511
|
+
def load_snippets_from_xlsx(file)
|
512
|
+
temp_filename = "#{@tempdir}/snippett.xlsx"
|
513
|
+
FileUtils::copy(file, temp_filename)
|
514
|
+
wb=RubyXL::Parser.parse(temp_filename)
|
515
|
+
result={}
|
516
|
+
wb.first.each{|row|
|
517
|
+
key, the_value = row
|
518
|
+
unless key.nil?
|
519
|
+
unless the_value.nil?
|
520
|
+
result[key.value.to_sym] = resolve_xml_entities(the_value.value) rescue ""
|
521
|
+
end
|
522
|
+
end
|
523
|
+
}
|
524
|
+
result
|
525
|
+
end
|
526
|
+
|
527
|
+
#
|
528
|
+
# this resolves xml entities in Text (lt, gt, amp)
|
529
|
+
# @param [String] text with entities
|
530
|
+
# @return [String] text with replaced entities
|
531
|
+
def resolve_xml_entities(text)
|
532
|
+
result=text
|
533
|
+
result.gsub!("<", "<")
|
534
|
+
result.gsub!(">", ">")
|
535
|
+
result.gsub!("&", "&")
|
536
|
+
result
|
537
|
+
end
|
538
|
+
|
539
|
+
#
|
540
|
+
# This generates the final document
|
541
|
+
# @param [Array of String] input the input files to be processed in the given sequence
|
542
|
+
# @param [String] outdir the output directory
|
543
|
+
# @param [String] outname the base name of the output file. It is a basename in case the
|
544
|
+
# output format requires multiple files
|
545
|
+
# @param [Array of String] format list of formats which shall be generated.
|
546
|
+
# supported formats: "pdf", "latex", "html", "docx", "rtf", txt
|
547
|
+
# @param [Hash] vars - the variables passed to pandoc
|
548
|
+
# @param [Hash] editions - the editions to process; default nil - no edition processing
|
549
|
+
# @param [Array of String] snippetfiles the list of files containing snippets
|
550
|
+
def generateDocument(input, outdir, outname, format, vars, editions=nil, snippetfiles=nil, config=nil)
|
551
|
+
|
552
|
+
|
553
|
+
# combine the input files
|
554
|
+
|
555
|
+
temp_filename = "#{@tempdir}/x.md".to_osPath
|
556
|
+
collect_document(input, temp_filename)
|
557
|
+
|
558
|
+
# process the snippets
|
559
|
+
|
560
|
+
if not snippetfiles.nil?
|
561
|
+
snippets={}
|
562
|
+
snippetfiles.each{|f|
|
563
|
+
if File.exists?(f)
|
564
|
+
type=File.extname(f)
|
565
|
+
case type
|
566
|
+
when ".yaml"
|
567
|
+
x=YAML.load(File.new(f))
|
568
|
+
when ".xlsx"
|
569
|
+
x=load_snippets_from_xlsx(f)
|
570
|
+
else
|
571
|
+
@log.error("Unsupported File format for snipptets: #{type}")
|
572
|
+
x={}
|
573
|
+
end
|
574
|
+
snippets.merge!(x)
|
575
|
+
else
|
576
|
+
@log.error("Snippet file not found: #{f}")
|
577
|
+
end
|
578
|
+
}
|
579
|
+
|
580
|
+
replace_snippets_in_file(temp_filename, snippets)
|
581
|
+
end
|
582
|
+
|
583
|
+
|
584
|
+
if editions.nil?
|
585
|
+
# there are no editions
|
586
|
+
render_document(temp_filename, outdir, outname, format, vars, config)
|
587
|
+
else
|
588
|
+
# process the editions
|
589
|
+
editions.each{|edition_name, properties|
|
590
|
+
edition_out_filename = "#{outname}_#{properties[:filepart]}"
|
591
|
+
edition_temp_filename = "#{@tempdir}/#{edition_out_filename}.md"
|
592
|
+
vars[:title] = properties[:title]
|
593
|
+
|
594
|
+
if properties[:debug]
|
595
|
+
process_debug_info(temp_filename, edition_temp_filename, edition_name.to_s)
|
596
|
+
lvars=vars.clone
|
597
|
+
lvars[:linenumbers] = "true"
|
598
|
+
render_document(edition_temp_filename, outdir, edition_out_filename, ["pdf", "latex"], lvars, config)
|
599
|
+
else
|
600
|
+
filter_document_variant(temp_filename, edition_temp_filename, edition_name.to_s)
|
601
|
+
render_document(edition_temp_filename, outdir, edition_out_filename, format, vars, config)
|
602
|
+
end
|
603
|
+
}
|
604
|
+
end
|
605
|
+
end
|
606
|
+
|
607
|
+
|
608
|
+
#
|
609
|
+
# render a single file
|
610
|
+
# @param input [String] path to the inputfile
|
611
|
+
# @param outdir [String] path to the output directory
|
612
|
+
# @param format [Array of String] formats
|
613
|
+
#
|
614
|
+
# @return [nil] no useful return value
|
615
|
+
def render_single_document(input, outdir, format)
|
616
|
+
outname=File.basename(input, ".*")
|
617
|
+
render_document(input, outdir, outname, format, Hash.new)
|
618
|
+
end
|
619
|
+
#
|
620
|
+
# This renders the final document
|
621
|
+
# @param [String] input the input file
|
622
|
+
# @param [String] outdir the output directory
|
623
|
+
# @param [String] outname the base name of the output file. It is a basename in case the
|
624
|
+
# output format requires multiple files
|
625
|
+
# @param [Array of String] format list of formats which shall be generated.
|
626
|
+
# supported formats: "pdf", "latex", "html", "docx", "rtf", txt
|
627
|
+
# @param [Hash] vars - the variables passed to pandoc
|
628
|
+
|
629
|
+
# @param config [ProoConfig] the entire config object (for future extensions)
|
630
|
+
# @return nil
|
631
|
+
|
632
|
+
def render_document(input, outdir, outname, format, vars, config=nil)
|
633
|
+
|
634
|
+
|
635
|
+
#TODO: Clarify the following
|
636
|
+
# on Windows, Tempdir contains a drive letter. But drive letter
|
637
|
+
# seems not to work in pandoc -> pdf if the path separator ist forward
|
638
|
+
# slash. There are two options to overcome this
|
639
|
+
#
|
640
|
+
# 1. set tempdir such that it does not contain a drive letter
|
641
|
+
# 2. use Dir.mktempdir but ensure that all provided file names
|
642
|
+
# use the platform specific SEPARATOR
|
643
|
+
#
|
644
|
+
# for whatever Reason, I decided for 2.
|
645
|
+
|
646
|
+
tempfile = input
|
647
|
+
tempfilePdf = "#{@tempdir}/x.TeX.md".to_osPath
|
648
|
+
tempfileHtml = "#{@tempdir}/x.html.md".to_osPath
|
649
|
+
outfilePdf = "#{outdir}/#{outname}.pdf".to_osPath
|
650
|
+
outfileDocx = "#{outdir}/#{outname}.docx".to_osPath
|
651
|
+
outfileHtml = "#{outdir}/#{outname}.html".to_osPath
|
652
|
+
outfileRtf = "#{outdir}/#{outname}.rtf".to_osPath
|
653
|
+
outfileLatex = "#{outdir}/#{outname}.latex".to_osPath
|
654
|
+
outfileText = "#{outdir}/#{outname}.txt".to_osPath
|
655
|
+
outfileSlide = "#{outdir}/#{outname}.slide.html".to_osPath
|
656
|
+
|
657
|
+
|
658
|
+
localvars=vars.clone
|
659
|
+
|
660
|
+
#todo: make config required, so it can be reduced to the else part
|
661
|
+
if config.nil? then
|
662
|
+
latexStyleFile = File.dirname(File.expand_path(__FILE__))+"/../../resources/default.latex"
|
663
|
+
latexStyleFile = File.expand_path(latexStyleFile).to_osPath
|
664
|
+
else
|
665
|
+
latexStyleFile = config.stylefiles[:latex]
|
666
|
+
end
|
667
|
+
|
668
|
+
|
669
|
+
|
670
|
+
vars_string=localvars.map{|key, value| "-V #{key}=#{value.esc}"}.join(" ")
|
671
|
+
|
672
|
+
@log.info("rendering #{outname} as [#{format.join(', ')}]")
|
673
|
+
|
674
|
+
begin
|
675
|
+
|
676
|
+
if format.include?("pdf") then
|
677
|
+
ReferenceTweaker.new("pdf").prepareFile(tempfile, tempfilePdf)
|
678
|
+
|
679
|
+
cmd="pandoc -S #{tempfilePdf.esc} --toc --standalone --latex-engine xelatex --number-sections #{vars_string}" +
|
680
|
+
" --template #{latexStyleFile.esc} --ascii -o #{outfilePdf.esc}"
|
681
|
+
`#{cmd}`
|
682
|
+
end
|
683
|
+
|
684
|
+
if format.include?("latex") then
|
685
|
+
|
686
|
+
ReferenceTweaker.new("pdf").prepareFile(tempfile, tempfilePdf)
|
687
|
+
|
688
|
+
cmd="pandoc -S #{tempfilePdf.esc} --toc --standalone --latex-engine xelatex --number-sections #{vars_string}" +
|
689
|
+
" --template #{latexStyleFile.esc} --ascii -o #{outfileLatex.esc}"
|
690
|
+
`#{cmd}`
|
691
|
+
end
|
692
|
+
|
693
|
+
if format.include?("html") then
|
694
|
+
#todo: handle css
|
695
|
+
ReferenceTweaker.new("html").prepareFile(tempfile, tempfileHtml)
|
696
|
+
|
697
|
+
cmd="pandoc -S #{tempfileHtml.esc} --toc --standalone --self-contained --ascii --number-sections #{vars_string}" +
|
698
|
+
" -o #{outfileHtml.esc}"
|
699
|
+
|
700
|
+
`#{cmd}`
|
701
|
+
end
|
702
|
+
|
703
|
+
if format.include?("docx") then
|
704
|
+
#todo: handle style file
|
705
|
+
ReferenceTweaker.new("html").prepareFile(tempfile, tempfileHtml)
|
706
|
+
|
707
|
+
cmd="pandoc -S #{tempfileHtml.esc} --toc --standalone --self-contained --ascii --number-sections #{vars_string}" +
|
708
|
+
" -o #{outfileDocx.esc}"
|
709
|
+
`#{cmd}`
|
710
|
+
end
|
711
|
+
|
712
|
+
if format.include?("rtf") then
|
713
|
+
|
714
|
+
ReferenceTweaker.new("html").prepareFile(tempfile, tempfileHtml)
|
715
|
+
|
716
|
+
cmd="pandoc -S #{tempfileHtml.esc} --toc --standalone --self-contained --ascii --number-sections #{vars_string}" +
|
717
|
+
" -o #{outfileRtf.esc}"
|
718
|
+
`#{cmd}`
|
719
|
+
end
|
720
|
+
|
721
|
+
if format.include?("txt") then
|
722
|
+
|
723
|
+
ReferenceTweaker.new("pdf").prepareFile(tempfile, tempfileHtml)
|
724
|
+
|
725
|
+
cmd="pandoc -S #{tempfileHtml.esc} --toc --standalone --self-contained --ascii --number-sections #{vars_string}" +
|
726
|
+
" -t plain -o #{outfileText.esc}"
|
727
|
+
`#{cmd}`
|
728
|
+
end
|
729
|
+
|
730
|
+
if format.include?("slide") then
|
731
|
+
|
732
|
+
ReferenceTweaker.new("slide").prepareFile(tempfile, tempfilePdf)
|
733
|
+
#todo: hanlde stylefile
|
734
|
+
cmd="pandoc -S #{tempfileHtml.esc} --toc --standalone --number #{vars_string}" +
|
735
|
+
" --ascii -t dzslides --slide-level 2 -o #{outfileSlide.esc}"
|
736
|
+
`#{cmd}`
|
737
|
+
end
|
738
|
+
rescue
|
739
|
+
|
740
|
+
@log.error "failed to perform #{cmd}"
|
741
|
+
#TODO make a try catch block kere
|
742
|
+
|
743
|
+
end
|
744
|
+
nil
|
745
|
+
end
|
746
|
+
|
747
|
+
end
|