wortsammler 2.0.0.dev1 → 2.0.2.pre.dev3
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- checksums.yaml +4 -4
- data/.gitignore +1 -0
- data/.gitpod.Dockerfile +32 -0
- data/.gitpod.yml +1 -0
- data/README.md +2 -0
- data/lib/wortsammler/class.Traceable.md.rb +9 -7
- data/lib/wortsammler/class.Traceable.rb +74 -60
- data/lib/wortsammler/class.proolib.rb +19 -25
- data/lib/wortsammler/mdTraceParser.treetop +2 -2
- data/lib/wortsammler/version.rb +1 -1
- data/resources/default.wortsammler.latex +19 -2
- data/spec/tc_exp_003_reference.txt +9 -25
- data/spec/test_beautify.md +13 -0
- data/spec/test_beautify_reference.md +11 -0
- data/spec/wortsammler_spec.rb +131 -125
- data/testresults/wortsammler_testresults.html +37 -203
- data/wortsammler.gemspec +5 -2
- metadata +38 -10
- data/testresults/wortsammler_testresults.log +0 -108
checksums.yaml
CHANGED
@@ -1,7 +1,7 @@
|
|
1
1
|
---
|
2
2
|
SHA256:
|
3
|
-
metadata.gz:
|
4
|
-
data.tar.gz:
|
3
|
+
metadata.gz: 2428bb7e9beed06c7d1988f3ad4292b5fb1d5c1fbf9bbfef16abae9eb11664cc
|
4
|
+
data.tar.gz: df9990288e9841109da60f4ed3b081dd0aa3d40c018d06b6285bdfa8969fca51
|
5
5
|
SHA512:
|
6
|
-
metadata.gz:
|
7
|
-
data.tar.gz:
|
6
|
+
metadata.gz: e6c5abe51379ae71360a618542004def4b58217c58558b4818928e74a7ce1cfbe2eed984e834de72c04ff6234b16f2862588461d00c5889079bdada5c18d7930
|
7
|
+
data.tar.gz: ac7224f1f41ca6f045727b4342353b5e255b670743e91fa140ebc49f352776c80c3e96183aaeae15764a5538d92001c64c6dc748662399a2f87e1d526dfdb25e
|
data/.gitignore
CHANGED
data/.gitpod.Dockerfile
ADDED
@@ -0,0 +1,32 @@
|
|
1
|
+
FROM alpine:latest
|
2
|
+
|
3
|
+
|
4
|
+
RUN apk update &&\
|
5
|
+
apk add texlive-xetex texmf-dist-latexextra less
|
6
|
+
|
7
|
+
# from https://github.com/cybercode/alpine-ruby/blob/master/Dockerfile
|
8
|
+
# guess ist is to install native extensions
|
9
|
+
|
10
|
+
RUN echo "http://dl-cdn.alpinelinux.org/alpine/edge/testing" >> /etc/apk/repositories \
|
11
|
+
&& apk update \
|
12
|
+
&& apk add --update-cache postgresql-client nodejs \
|
13
|
+
libffi-dev readline sqlite build-base postgresql-dev \
|
14
|
+
libc-dev linux-headers libxml2-dev libxslt-dev readline-dev gcc libc-dev \
|
15
|
+
&& rm -rf /var/cache/apk/*
|
16
|
+
|
17
|
+
RUN apk update && apk upgrade && apk --update add \
|
18
|
+
ruby ruby ruby-dev ruby-irb ruby-rake ruby-io-console ruby-bigdecimal ruby-json ruby-bundler ruby-rspec \
|
19
|
+
libstdc++ tzdata bash ca-certificates \
|
20
|
+
&& echo 'gem: --no-document' > /etc/gemrc\
|
21
|
+
# from https://hub.docker.com/r/ciandt/docker-alpine-pandoc/dockerfile^\
|
22
|
+
# install pandoc
|
23
|
+
RUN \
|
24
|
+
apk add ca-certificates wget \
|
25
|
+
&& wget -O /tmp/pandoc.tar.gz https://github.com/jgm/pandoc/releases/download/2.5/pandoc-2.5-linux.tar.gz \
|
26
|
+
&& tar xvzf /tmp/pandoc.tar.gz --strip-components 1 -C /usr/local/ \
|
27
|
+
&& ln /usr/local/bin/pandoc /usr/local/bin/pandoc_2.5 \
|
28
|
+
&& update-ca-certificates \
|
29
|
+
&& apk del wget ca-certificates\
|
30
|
+
&& rm /tmp/pandoc.tar.gz
|
31
|
+
|
32
|
+
RUN apk update && apk add ghostscript
|
data/.gitpod.yml
ADDED
@@ -0,0 +1 @@
|
|
1
|
+
{ image: { file: .gitpod.Dockerfile } }
|
data/README.md
CHANGED
@@ -13,6 +13,10 @@ Treetop.load File.dirname(__FILE__) + "/mdTraceParser.treetop"
|
|
13
13
|
class TraceableSet
|
14
14
|
|
15
15
|
|
16
|
+
def mk_hyperlink(id)
|
17
|
+
idm = id.gsub("_","-")
|
18
|
+
"[\[#{id}\]](#RT-#{idm})"
|
19
|
+
end
|
16
20
|
|
17
21
|
# this generates a synopsis of traces in markdown Format
|
18
22
|
# @param [Symbol] selectedCategory the the category of the Traceables
|
@@ -23,20 +27,18 @@ class TraceableSet
|
|
23
27
|
map{|t|
|
24
28
|
tidm=t.id.gsub("_","-")
|
25
29
|
|
26
|
-
lContributes=t.contributes_to.
|
27
|
-
|
28
|
-
map{|c| cm=c.gsub("_","-"); "<a href=\"#RT-#{cm}\">\[#{c}\]</a>"}
|
30
|
+
lContributes = t.contributes_to.
|
31
|
+
map { |c| mk_hyperlink(c) }
|
29
32
|
|
30
33
|
luptraces = [uptrace_ids[t.id]].flatten.compact.map{|x| self[x]}
|
31
34
|
|
32
35
|
luptraces=luptraces.
|
33
36
|
sort_by{|x| trace_order_index(x.id)}.
|
34
37
|
map{|u|
|
35
|
-
|
36
|
-
" - <a href=\"#RT-#{um}\">[#{u.id}]</a> #{u.header_orig}"
|
38
|
+
" - #{mk_hyperlink(u.id)} #{u.header_orig}"
|
37
39
|
}
|
38
40
|
|
39
|
-
["-
|
41
|
+
["- #{mk_hyperlink(t.id)} <!-- --> <a id=\"RT-#{tidm}\"/>**#{t.header_orig}**" +
|
40
42
|
# " (#{t.contributes_to.join(', ')})", "",
|
41
43
|
" (#{lContributes.join(', ')})", "",
|
42
44
|
luptraces
|
@@ -50,7 +52,7 @@ class TraceableSet
|
|
50
52
|
all_traces(selectedCategory).
|
51
53
|
sort_by{|x| trace_order_index(x.id) }.
|
52
54
|
map{|t|
|
53
|
-
"\n\n[#{t.id}] **#{t.header_orig}** { }()"
|
55
|
+
"\n\n\\[#{t.id}\\] **#{t.header_orig}** { }()"
|
54
56
|
}.join("\n\n")
|
55
57
|
end
|
56
58
|
|
@@ -23,16 +23,16 @@ class TraceableSet
|
|
23
23
|
# @return [type] [description]
|
24
24
|
def initialize()
|
25
25
|
# the traces
|
26
|
-
@traces={}
|
26
|
+
@traces = {}
|
27
27
|
|
28
28
|
# the list of supporters
|
29
29
|
# supporters for foo 0 @@supported_by["foo"]
|
30
|
-
@supported_by={}
|
30
|
+
@supported_by = {}
|
31
31
|
|
32
32
|
|
33
33
|
# define the sort order policy
|
34
34
|
# it is the same for all slots
|
35
|
-
@sortOrder=[]
|
35
|
+
@sortOrder = []
|
36
36
|
|
37
37
|
end
|
38
38
|
|
@@ -63,7 +63,7 @@ class TraceableSet
|
|
63
63
|
# @param category [Symbol] Restrict the comparison to a particlar category
|
64
64
|
#
|
65
65
|
# @return [Array] the ids of the added traces (list of trace_id which are not in @referece_set)
|
66
|
-
def added_trace_ids(reference_set, category=nil)
|
66
|
+
def added_trace_ids(reference_set, category = nil)
|
67
67
|
self.all_trace_ids(category) - reference_set.all_trace_ids(category)
|
68
68
|
end
|
69
69
|
|
@@ -74,9 +74,9 @@ class TraceableSet
|
|
74
74
|
# @param category [Symbol] Restrict the operation to traceables of this category.
|
75
75
|
#
|
76
76
|
# @return [Array] List of trace_id which changed not in reference_set
|
77
|
-
def changed_trace_ids(reference_set, category=nil)
|
78
|
-
candidates=self.all_trace_ids(category) & reference_set.all_trace_ids(category)
|
79
|
-
candidates.map{|candidate|
|
77
|
+
def changed_trace_ids(reference_set, category = nil)
|
78
|
+
candidates = self.all_trace_ids(category) & reference_set.all_trace_ids(category)
|
79
|
+
candidates.map { |candidate|
|
80
80
|
self[candidate].get_diff(reference_set[candidate])
|
81
81
|
}.compact
|
82
82
|
end
|
@@ -87,9 +87,9 @@ class TraceableSet
|
|
87
87
|
# @param category [Symbol] Restrict the operation to traceables of this category.
|
88
88
|
#
|
89
89
|
# @return [Array] List of trace_id which unchanged
|
90
|
-
def unchanged_trace_ids(reference_set, category=nil)
|
91
|
-
candidates=self.all_trace_ids(category) & reference_set.all_trace_ids(category)
|
92
|
-
candidates.select{|candidate|
|
90
|
+
def unchanged_trace_ids(reference_set, category = nil)
|
91
|
+
candidates = self.all_trace_ids(category) & reference_set.all_trace_ids(category)
|
92
|
+
candidates.select { |candidate|
|
93
93
|
self[candidate].get_diff(reference_set[candidate]).nil?
|
94
94
|
}.compact
|
95
95
|
end
|
@@ -101,7 +101,7 @@ class TraceableSet
|
|
101
101
|
# @param category [Symbol] Restrict the operation to traceables of this category.
|
102
102
|
#
|
103
103
|
# @return [Array] List of trace_id which are deleted (not in current set)
|
104
|
-
def deleted_trace_ids(reference_set, category=nil)
|
104
|
+
def deleted_trace_ids(reference_set, category = nil)
|
105
105
|
reference_set.all_trace_ids(category) - self.all_trace_ids(category)
|
106
106
|
end
|
107
107
|
|
@@ -109,31 +109,31 @@ class TraceableSet
|
|
109
109
|
# export the trace as graphml for yed
|
110
110
|
# @return - the requirements tree in graphml
|
111
111
|
def to_graphml
|
112
|
-
f
|
112
|
+
f = File.open("#{File.dirname(__FILE__)}/../../resources/requirementsSynopsis.graphml")
|
113
113
|
doc = Nokogiri::XML(f)
|
114
114
|
f.close
|
115
115
|
|
116
|
-
graph=doc.xpath("//xmlns:graph").first
|
116
|
+
graph = doc.xpath("//xmlns:graph").first
|
117
117
|
|
118
118
|
# generate all nodes
|
119
|
-
self.all_traces(nil).each{|theTrace|
|
120
|
-
n_node
|
121
|
-
n_node["id"]
|
122
|
-
n_data
|
123
|
-
n_data["key"]= "d6"
|
124
|
-
n_ShapeNode
|
125
|
-
n_NodeLabel
|
119
|
+
self.all_traces(nil).each { |theTrace|
|
120
|
+
n_node = Nokogiri::XML::Node.new "node", doc
|
121
|
+
n_node["id"] = theTrace.id
|
122
|
+
n_data = Nokogiri::XML::Node.new "data", doc
|
123
|
+
n_data["key"] = "d6"
|
124
|
+
n_ShapeNode = Nokogiri::XML::Node.new "y:ShapeNode", doc
|
125
|
+
n_NodeLabel = Nokogiri::XML::Node.new "y:NodeLabel", doc
|
126
126
|
n_NodeLabel.content = "[#{theTrace.id}] #{theTrace.header_orig}"
|
127
127
|
n_ShapeNode << n_NodeLabel
|
128
128
|
n_data << n_ShapeNode
|
129
129
|
n_node << n_data
|
130
130
|
graph << n_node
|
131
131
|
|
132
|
-
theTrace.contributes_to.each{|up|
|
133
|
-
n_edge=Nokogiri::XML::Node.new "edge", doc
|
134
|
-
n_edge["source"
|
135
|
-
n_edge["target"
|
136
|
-
n_edge["id"
|
132
|
+
theTrace.contributes_to.each { |up|
|
133
|
+
n_edge = Nokogiri::XML::Node.new "edge", doc
|
134
|
+
n_edge["source"] = theTrace.id
|
135
|
+
n_edge["target"] = up
|
136
|
+
n_edge["id"] = "#{up}_#{theTrace.id}"
|
137
137
|
graph << n_edge
|
138
138
|
}
|
139
139
|
}
|
@@ -146,7 +146,7 @@ class TraceableSet
|
|
146
146
|
# @return [Array of String] an array of the registered Traceables
|
147
147
|
# of the selectedCategory
|
148
148
|
def all_trace_ids(selected_category = nil)
|
149
|
-
@traces.keys.select{|x|
|
149
|
+
@traces.keys.select { |x|
|
150
150
|
y = @traces[x].first
|
151
151
|
selected_category.nil? or y.category == selected_category
|
152
152
|
}.sort
|
@@ -161,7 +161,7 @@ class TraceableSet
|
|
161
161
|
#
|
162
162
|
# @return [Array of Traceable] The array of traceables
|
163
163
|
def all_traces(selected_category = nil)
|
164
|
-
all_trace_ids(selected_category).map{|t| @traces[t].first}
|
164
|
+
all_trace_ids(selected_category).map { |t| @traces[t].first }
|
165
165
|
end
|
166
166
|
|
167
167
|
|
@@ -187,43 +187,43 @@ class TraceableSet
|
|
187
187
|
# this lists duplicate traces
|
188
188
|
# @return [Array of String] the list of the id of duplicate Traces
|
189
189
|
def duplicate_ids()
|
190
|
-
@traces.select{|id, traceables| traceables.length > 1}.map{|id, traceable| id}.sort
|
190
|
+
@traces.select { |id, traceables| traceables.length > 1 }.map { |id, traceable| id }.sort
|
191
191
|
end
|
192
192
|
|
193
193
|
# this lists duplicate traces
|
194
194
|
# @return [Array of Traceable] the list duplicate Traces.
|
195
195
|
def duplicate_traces()
|
196
|
-
@traces.select{|id, traceables| traceables.length > 1}.map{|id, traceable| traceable}.sort
|
196
|
+
@traces.select { |id, traceables| traceables.length > 1 }.map { |id, traceable| traceable }.sort
|
197
197
|
end
|
198
198
|
|
199
199
|
|
200
200
|
# this serializes a particular slot for caching
|
201
201
|
# @param file [String] name of the cachefile
|
202
202
|
def dump_to_marshal(file)
|
203
|
-
File.open(file, "wb"){|f|
|
203
|
+
File.open(file, "wb") { |f|
|
204
204
|
Marshal.dump(self, f)
|
205
205
|
}
|
206
206
|
end
|
207
207
|
|
208
208
|
# this loads cached information into a particular slot
|
209
209
|
# @param file [String] name of the cachefile
|
210
|
-
def
|
211
|
-
a=nil
|
212
|
-
File.open(file, "rb"){|f| a=Marshal.load(f)}
|
210
|
+
def self.load_from_marshal(file)
|
211
|
+
a = nil
|
212
|
+
File.open(file, "rb") { |f| a = Marshal.load(f) }
|
213
213
|
a
|
214
214
|
end
|
215
215
|
|
216
216
|
# this merges a TraceableSet
|
217
217
|
# @return [Treaceable] the current traceable set
|
218
218
|
def merge(set)
|
219
|
-
set.all_traces_as_arrays.values.flatten.each{|t| self.add(t)}
|
219
|
+
set.all_traces_as_arrays.values.flatten.each { |t| self.add(t) }
|
220
220
|
end
|
221
221
|
|
222
222
|
# this retunrs traces marked as supported but not being defined
|
223
223
|
# @return [Array of String] the list of the id of undefined Traces
|
224
224
|
# traces which are marked as uptraces but do not exist.
|
225
225
|
def undefined_ids
|
226
|
-
@supported_by.keys.select{|t| not @traces.has_key?(t)}.sort
|
226
|
+
@supported_by.keys.select { |t| not @traces.has_key?(t) }.sort
|
227
227
|
end
|
228
228
|
|
229
229
|
#
|
@@ -243,7 +243,7 @@ class TraceableSet
|
|
243
243
|
# it is placed according to the sequence
|
244
244
|
# in the array. Otherwise it is sorted at the end
|
245
245
|
def sort_order= (sort_order)
|
246
|
-
@sort_order=sort_order
|
246
|
+
@sort_order = sort_order
|
247
247
|
end
|
248
248
|
|
249
249
|
# this determines the sort order index of a trace
|
@@ -252,11 +252,11 @@ class TraceableSet
|
|
252
252
|
# the sort order index shall be coumputed.
|
253
253
|
# @return [String] the sort key of the given id.
|
254
254
|
def trace_order_index(trace_id)
|
255
|
-
global
|
256
|
-
|
255
|
+
global = @sort_order.index { |x| trace_id.start_with? x } ||
|
256
|
+
(@sort_order.length + 1)
|
257
257
|
|
258
258
|
# add the {index} of the trace to
|
259
|
-
orderId = [global.to_s.rjust(5,"0"),trace_id].join("_")
|
259
|
+
orderId = [global.to_s.rjust(5, "0"), trace_id].join("_")
|
260
260
|
orderId
|
261
261
|
end
|
262
262
|
|
@@ -272,11 +272,10 @@ class TraceableSet
|
|
272
272
|
#
|
273
273
|
# @return [type] [description]
|
274
274
|
def to_compareEntries
|
275
|
-
all_traces.sort.map{|t| "\n\n[#{t.id}]\n#{t.as_oneline}" }.join("\n")
|
275
|
+
all_traces.sort.map { |t| "\n\n[#{t.id}]\n#{t.as_oneline}" }.join("\n")
|
276
276
|
end
|
277
277
|
|
278
278
|
|
279
|
-
|
280
279
|
#############################
|
281
280
|
|
282
281
|
private
|
@@ -286,7 +285,7 @@ class TraceableSet
|
|
286
285
|
# @param [Nokogiri::XML::Document] doc - the document
|
287
286
|
# @return [Nokogiri::XML::Document] the beautified document
|
288
287
|
def xp(doc)
|
289
|
-
xsl
|
288
|
+
xsl = <<-XSL
|
290
289
|
<xsl:stylesheet version="1.0" xmlns:xsl="http://www.w3.org/1999/XSL/Transform">
|
291
290
|
<xsl:output method="xml" encoding="UTF-8" indent="yes"/>
|
292
291
|
<xsl:strip-space elements="*"/>
|
@@ -307,16 +306,19 @@ class TraceableSet
|
|
307
306
|
end
|
308
307
|
|
309
308
|
|
309
|
+
class Traceable
|
310
310
|
|
311
|
+
def clear_trace_id(id)
|
312
|
+
id.gsub("\\_", "_")
|
313
|
+
end
|
311
314
|
|
312
|
-
class Traceable
|
313
315
|
include Comparable
|
314
316
|
|
315
317
|
|
316
318
|
# String: The trace-Id
|
317
|
-
|
319
|
+
attr_reader :id
|
318
320
|
# string: the alternative Id, used e.g. for the constraint number
|
319
|
-
|
321
|
+
attr_reader :alternative_id
|
320
322
|
# String: The header in plain text
|
321
323
|
attr_accessor :header_plain
|
322
324
|
# String: The header in original format
|
@@ -326,7 +328,7 @@ class Traceable
|
|
326
328
|
# String: he body in original format
|
327
329
|
attr_accessor :body_orig
|
328
330
|
# Array of Strings: The uplink as an array of Trace-ids
|
329
|
-
|
331
|
+
attr_reader :contributes_to
|
330
332
|
# String: the Traceable in its original format
|
331
333
|
attr_accessor :trace_orig
|
332
334
|
# String: origin of the entry
|
@@ -338,18 +340,30 @@ class Traceable
|
|
338
340
|
|
339
341
|
|
340
342
|
def initialize()
|
341
|
-
@id
|
343
|
+
@id = ""
|
342
344
|
@alternative_id = ""
|
343
|
-
@header_orig
|
344
|
-
@body_plain
|
345
|
-
@body_orig
|
345
|
+
@header_orig = ""
|
346
|
+
@body_plain = ""
|
347
|
+
@body_orig = ""
|
346
348
|
@contributes_to = []
|
347
|
-
@trace_orig
|
348
|
-
@category
|
349
|
-
@info
|
349
|
+
@trace_orig = ""
|
350
|
+
@category = ""
|
351
|
+
@info = ""
|
352
|
+
end
|
353
|
+
|
354
|
+
def id=(id)
|
355
|
+
@id = clear_trace_id(id)
|
356
|
+
end
|
357
|
+
|
358
|
+
def alternative_id=()
|
359
|
+
@alternative_id = clear_trace_id(id)
|
360
|
+
end
|
361
|
+
|
362
|
+
def contributes_to=(list)
|
363
|
+
@contributes_to = list.map { |id| id = clear_trace_id(id) }
|
350
364
|
end
|
351
365
|
|
352
|
-
# define the comparison to makeit really
|
366
|
+
# define the comparison to makeit really comparable
|
353
367
|
# @param [Traceable] other the other traceable for comparison.
|
354
368
|
def <=> (other)
|
355
369
|
@id <=> other.id
|
@@ -366,19 +380,19 @@ class Traceable
|
|
366
380
|
if newval == oldval
|
367
381
|
result = nil
|
368
382
|
else
|
369
|
-
diff_as_html= "<pre>#{other.trace_orig}</pre><hr/><pre>#{self.trace_orig}</pre>"#Diffy::Diff.new(other.trace_orig, self.trace_orig).to_s(:text)
|
370
|
-
rawDiff
|
371
|
-
diff_as_html=rawDiff.to_s(:html)
|
383
|
+
diff_as_html = "<pre>#{other.trace_orig}</pre><hr/><pre>#{self.trace_orig}</pre>" #Diffy::Diff.new(other.trace_orig, self.trace_orig).to_s(:text)
|
384
|
+
rawDiff = Diffy::Diff.new(self.trace_orig, other.trace_orig)
|
385
|
+
diff_as_html = rawDiff.to_s(:html)
|
372
386
|
|
373
|
-
result
|
374
|
-
diff_as_html=nil
|
387
|
+
result = [self.id, similarity, diff_as_html]
|
388
|
+
diff_as_html = nil
|
375
389
|
end
|
376
390
|
result
|
377
391
|
end
|
378
392
|
|
379
393
|
|
380
394
|
def get_comparison_string
|
381
|
-
"#{header_orig};#{body_orig};#{contributes_to.sort}".gsub(/\s+/," ")
|
395
|
+
"#{header_orig};#{body_orig};#{contributes_to.sort}".gsub(/\s+/, " ")
|
382
396
|
end
|
383
397
|
|
384
398
|
def as_oneline
|
@@ -42,15 +42,15 @@ TRACE_REF_PATTERN = /->\[(\w+_\w+_\w+)\]/
|
|
42
42
|
# pageclearance
|
43
43
|
INCLUDE_PDF_PATTERN = /^\s+~~PDF\s+"(.+)" \s+ "(.+)" \s* (\d*) \s* (\d+-\d+)? \s* (clearpage|cleardoublepage)?~~/x
|
44
44
|
|
45
|
-
INCLUDE_MD_PATTERN = /(\s*)~~MD\s
|
45
|
+
INCLUDE_MD_PATTERN = /(\s*)~~MD\s+\\?"(.+)\\?"~~/x
|
46
46
|
|
47
47
|
SNIPPET_PATTERN = /(\s*)~~SN \s+ (\w+)~~/x
|
48
48
|
|
49
49
|
EMBEDDED_IMAGE_PATTERN = /~~EMBED\s+ "(.+)" \s+ (r|l|i|o) \s+ (.+) \s+ (.+)~~/x
|
50
50
|
|
51
|
-
EXPECTED_RESULT_PATTERN = /(^\s*)
|
51
|
+
EXPECTED_RESULT_PATTERN = /(^\s*)[~`]{3,}\s*\{.expectedResult\s+label=\"([A-Za-z]+\\?_[A-Za-z]+\\?_[0-9]+)\"}\s([^~`]+)[~`]{3,}/x
|
52
52
|
|
53
|
-
PLANTUML_PATTERN = /[~`]{3,}\s+{\.plantuml}\s+@startuml\s+([^\n]+)(\s+title\s+([^\n]+))?[^~`]+
|
53
|
+
PLANTUML_PATTERN = /[~`]{3,}\s+{\.plantuml}\s+@startuml\s+([^\n]+)(\s+title\s+([^\n]+))?[^~`]+ v/x
|
54
54
|
|
55
55
|
#
|
56
56
|
# This mixin convertes a file path to the os Path representation
|
@@ -161,19 +161,22 @@ class ReferenceTweaker
|
|
161
161
|
# @return [String] The resulting text
|
162
162
|
def replace_md_inlay(text)
|
163
163
|
text.gsub!(INCLUDE_MD_PATTERN) { |m|
|
164
|
-
|
165
|
-
|
164
|
+
infile = $2.gsub("\\_", "_")
|
165
|
+
if File.exist?(infile) then
|
166
|
+
replacetext_raw = File.open(infile, :encoding => 'bom|utf-8').read
|
166
167
|
unless $1.nil? then
|
167
|
-
leading_whitespace
|
168
|
-
leading_lines
|
169
|
-
leading_spaces
|
170
|
-
replacetext
|
168
|
+
leading_whitespace = $1.split("\n", 100)
|
169
|
+
leading_lines = leading_whitespace[0 .. -1].join("\n")
|
170
|
+
leading_spaces = leading_whitespace.last || ""
|
171
|
+
replacetext = leading_lines + replacetext_raw.gsub("\n", "\n#{leading_spaces}")
|
172
|
+
else
|
173
|
+
replacetext = replacetext_raw
|
171
174
|
end
|
172
175
|
else
|
173
|
-
replacetext=""
|
176
|
+
replacetext = ""
|
174
177
|
@log.warn("File not found: #{$2}")
|
175
178
|
end
|
176
|
-
result=replace_md_inlay(replacetext)
|
179
|
+
result = replace_md_inlay(replacetext)
|
177
180
|
result
|
178
181
|
}
|
179
182
|
text
|
@@ -450,8 +453,8 @@ class PandocBeautifier
|
|
450
453
|
def initialize(logger = nil)
|
451
454
|
|
452
455
|
@markdown_output_switches = %w{
|
453
|
-
|
454
|
-
|
456
|
+
+backtick_code_blocks
|
457
|
+
-fenced_code_blocks
|
455
458
|
+compact_definition_lists
|
456
459
|
+space_in_atx_header
|
457
460
|
+yaml_metadata_block
|
@@ -459,6 +462,7 @@ class PandocBeautifier
|
|
459
462
|
|
460
463
|
@markdown_input_switches = %w{
|
461
464
|
+smart
|
465
|
+
+backtick_code_blocks
|
462
466
|
+fenced_code_blocks
|
463
467
|
+compact_definition_lists
|
464
468
|
-space_in_atx_header
|
@@ -531,15 +535,6 @@ class PandocBeautifier
|
|
531
535
|
|
532
536
|
# tweak the quoting
|
533
537
|
if $?.success? then
|
534
|
-
# do this twice since the replacement
|
535
|
-
# does not work on e.g. 2\_3\_4\_5.
|
536
|
-
#
|
537
|
-
newdoc.gsub!(/(\w)\\_(\w)/, '\1_\2')
|
538
|
-
newdoc.gsub!(/(\w)\\_(\w)/, '\1_\2')
|
539
|
-
|
540
|
-
# fix more quoting
|
541
|
-
newdoc.gsub!('-\\>[', '->[')
|
542
|
-
|
543
538
|
# (RS_Mdc)
|
544
539
|
# TODO: fix Table width toggles sometimes
|
545
540
|
if (not olddoc == newdoc) then ##only touch the file if it is really changed
|
@@ -687,7 +682,7 @@ class PandocBeautifier
|
|
687
682
|
|
688
683
|
#now combine the input files
|
689
684
|
@log.debug("combining the input files #{inputname} et al")
|
690
|
-
cmd="#{PANDOC_EXE} --standalone -o #{output} --ascii #{inputs}" # note that inputs is already quoted
|
685
|
+
cmd="#{PANDOC_EXE} -f markdown#{@markdown_input_switches} --standalone -t markdown#{@markdown_output_switches} -o #{output} --ascii #{inputs}" # note that inputs is already quoted
|
691
686
|
system(cmd)
|
692
687
|
if $?.success? then
|
693
688
|
PandocBeautifier.new().beautify(output)
|
@@ -959,7 +954,7 @@ class PandocBeautifier
|
|
959
954
|
begin
|
960
955
|
vars_string=vars.map.map { |key, value| "-V #{key}=#{value.esc}" }.join(" ")
|
961
956
|
rescue
|
962
|
-
|
957
|
+
#todo require 'pry'; binding.pry
|
963
958
|
end
|
964
959
|
|
965
960
|
@log.info("rendering #{outname} as [#{format.join(', ')}]")
|
@@ -1057,7 +1052,6 @@ class PandocBeautifier
|
|
1057
1052
|
|
1058
1053
|
cmd="#{PANDOC_EXE} -f markdown#{@markdown_input_switches} #{tempfileHtml.esc} --toc --standalone --self-contained --ascii --number-sections #{vars_string}" +
|
1059
1054
|
" -t plain+smart -o #{outfileText.esc}"
|
1060
|
-
puts cmd
|
1061
1055
|
`#{cmd}`
|
1062
1056
|
end
|
1063
1057
|
|