rolfedh-doc-utils 0.1.29__tar.gz → 0.1.30__tar.gz
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- {rolfedh_doc_utils-0.1.29/rolfedh_doc_utils.egg-info → rolfedh_doc_utils-0.1.30}/PKG-INFO +1 -1
- {rolfedh_doc_utils-0.1.29 → rolfedh_doc_utils-0.1.30}/callout_lib/converter_deflist.py +27 -0
- {rolfedh_doc_utils-0.1.29 → rolfedh_doc_utils-0.1.30}/callout_lib/detector.py +17 -13
- {rolfedh_doc_utils-0.1.29 → rolfedh_doc_utils-0.1.30}/callout_lib/table_parser.py +147 -50
- {rolfedh_doc_utils-0.1.29 → rolfedh_doc_utils-0.1.30}/doc_utils/version.py +1 -1
- {rolfedh_doc_utils-0.1.29 → rolfedh_doc_utils-0.1.30}/pyproject.toml +1 -1
- {rolfedh_doc_utils-0.1.29 → rolfedh_doc_utils-0.1.30/rolfedh_doc_utils.egg-info}/PKG-INFO +1 -1
- {rolfedh_doc_utils-0.1.29 → rolfedh_doc_utils-0.1.30}/LICENSE +0 -0
- {rolfedh_doc_utils-0.1.29 → rolfedh_doc_utils-0.1.30}/README.md +0 -0
- {rolfedh_doc_utils-0.1.29 → rolfedh_doc_utils-0.1.30}/archive_unused_files.py +0 -0
- {rolfedh_doc_utils-0.1.29 → rolfedh_doc_utils-0.1.30}/archive_unused_images.py +0 -0
- {rolfedh_doc_utils-0.1.29 → rolfedh_doc_utils-0.1.30}/callout_lib/__init__.py +0 -0
- {rolfedh_doc_utils-0.1.29 → rolfedh_doc_utils-0.1.30}/callout_lib/converter_bullets.py +0 -0
- {rolfedh_doc_utils-0.1.29 → rolfedh_doc_utils-0.1.30}/callout_lib/converter_comments.py +0 -0
- {rolfedh_doc_utils-0.1.29 → rolfedh_doc_utils-0.1.30}/check_scannability.py +0 -0
- {rolfedh_doc_utils-0.1.29 → rolfedh_doc_utils-0.1.30}/convert_callouts_interactive.py +0 -0
- {rolfedh_doc_utils-0.1.29 → rolfedh_doc_utils-0.1.30}/convert_callouts_to_deflist.py +0 -0
- {rolfedh_doc_utils-0.1.29 → rolfedh_doc_utils-0.1.30}/doc_utils/__init__.py +0 -0
- {rolfedh_doc_utils-0.1.29 → rolfedh_doc_utils-0.1.30}/doc_utils/extract_link_attributes.py +0 -0
- {rolfedh_doc_utils-0.1.29 → rolfedh_doc_utils-0.1.30}/doc_utils/file_utils.py +0 -0
- {rolfedh_doc_utils-0.1.29 → rolfedh_doc_utils-0.1.30}/doc_utils/format_asciidoc_spacing.py +0 -0
- {rolfedh_doc_utils-0.1.29 → rolfedh_doc_utils-0.1.30}/doc_utils/replace_link_attributes.py +0 -0
- {rolfedh_doc_utils-0.1.29 → rolfedh_doc_utils-0.1.30}/doc_utils/scannability.py +0 -0
- {rolfedh_doc_utils-0.1.29 → rolfedh_doc_utils-0.1.30}/doc_utils/spinner.py +0 -0
- {rolfedh_doc_utils-0.1.29 → rolfedh_doc_utils-0.1.30}/doc_utils/topic_map_parser.py +0 -0
- {rolfedh_doc_utils-0.1.29 → rolfedh_doc_utils-0.1.30}/doc_utils/unused_adoc.py +0 -0
- {rolfedh_doc_utils-0.1.29 → rolfedh_doc_utils-0.1.30}/doc_utils/unused_attributes.py +0 -0
- {rolfedh_doc_utils-0.1.29 → rolfedh_doc_utils-0.1.30}/doc_utils/unused_images.py +0 -0
- {rolfedh_doc_utils-0.1.29 → rolfedh_doc_utils-0.1.30}/doc_utils/validate_links.py +0 -0
- {rolfedh_doc_utils-0.1.29 → rolfedh_doc_utils-0.1.30}/doc_utils/version_check.py +0 -0
- {rolfedh_doc_utils-0.1.29 → rolfedh_doc_utils-0.1.30}/doc_utils_cli.py +0 -0
- {rolfedh_doc_utils-0.1.29 → rolfedh_doc_utils-0.1.30}/extract_link_attributes.py +0 -0
- {rolfedh_doc_utils-0.1.29 → rolfedh_doc_utils-0.1.30}/find_unused_attributes.py +0 -0
- {rolfedh_doc_utils-0.1.29 → rolfedh_doc_utils-0.1.30}/format_asciidoc_spacing.py +0 -0
- {rolfedh_doc_utils-0.1.29 → rolfedh_doc_utils-0.1.30}/replace_link_attributes.py +0 -0
- {rolfedh_doc_utils-0.1.29 → rolfedh_doc_utils-0.1.30}/rolfedh_doc_utils.egg-info/SOURCES.txt +0 -0
- {rolfedh_doc_utils-0.1.29 → rolfedh_doc_utils-0.1.30}/rolfedh_doc_utils.egg-info/dependency_links.txt +0 -0
- {rolfedh_doc_utils-0.1.29 → rolfedh_doc_utils-0.1.30}/rolfedh_doc_utils.egg-info/entry_points.txt +0 -0
- {rolfedh_doc_utils-0.1.29 → rolfedh_doc_utils-0.1.30}/rolfedh_doc_utils.egg-info/requires.txt +0 -0
- {rolfedh_doc_utils-0.1.29 → rolfedh_doc_utils-0.1.30}/rolfedh_doc_utils.egg-info/top_level.txt +0 -0
- {rolfedh_doc_utils-0.1.29 → rolfedh_doc_utils-0.1.30}/setup.cfg +0 -0
- {rolfedh_doc_utils-0.1.29 → rolfedh_doc_utils-0.1.30}/setup.py +0 -0
- {rolfedh_doc_utils-0.1.29 → rolfedh_doc_utils-0.1.30}/tests/test_archive_unused_files.py +0 -0
- {rolfedh_doc_utils-0.1.29 → rolfedh_doc_utils-0.1.30}/tests/test_archive_unused_images.py +0 -0
- {rolfedh_doc_utils-0.1.29 → rolfedh_doc_utils-0.1.30}/tests/test_auto_discovery.py +0 -0
- {rolfedh_doc_utils-0.1.29 → rolfedh_doc_utils-0.1.30}/tests/test_check_scannability.py +0 -0
- {rolfedh_doc_utils-0.1.29 → rolfedh_doc_utils-0.1.30}/tests/test_cli_entry_points.py +0 -0
- {rolfedh_doc_utils-0.1.29 → rolfedh_doc_utils-0.1.30}/tests/test_extract_link_attributes.py +0 -0
- {rolfedh_doc_utils-0.1.29 → rolfedh_doc_utils-0.1.30}/tests/test_file_utils.py +0 -0
- {rolfedh_doc_utils-0.1.29 → rolfedh_doc_utils-0.1.30}/tests/test_fixture_archive_unused_files.py +0 -0
- {rolfedh_doc_utils-0.1.29 → rolfedh_doc_utils-0.1.30}/tests/test_fixture_archive_unused_images.py +0 -0
- {rolfedh_doc_utils-0.1.29 → rolfedh_doc_utils-0.1.30}/tests/test_fixture_check_scannability.py +0 -0
- {rolfedh_doc_utils-0.1.29 → rolfedh_doc_utils-0.1.30}/tests/test_parse_exclude_list.py +0 -0
- {rolfedh_doc_utils-0.1.29 → rolfedh_doc_utils-0.1.30}/tests/test_replace_link_attributes.py +0 -0
- {rolfedh_doc_utils-0.1.29 → rolfedh_doc_utils-0.1.30}/tests/test_symlink_handling.py +0 -0
- {rolfedh_doc_utils-0.1.29 → rolfedh_doc_utils-0.1.30}/tests/test_table_callout_conversion.py +0 -0
- {rolfedh_doc_utils-0.1.29 → rolfedh_doc_utils-0.1.30}/tests/test_table_parser.py +0 -0
- {rolfedh_doc_utils-0.1.29 → rolfedh_doc_utils-0.1.30}/tests/test_topic_map_parser.py +0 -0
- {rolfedh_doc_utils-0.1.29 → rolfedh_doc_utils-0.1.30}/tests/test_unused_attributes.py +0 -0
- {rolfedh_doc_utils-0.1.29 → rolfedh_doc_utils-0.1.30}/tests/test_validate_links.py +0 -0
- {rolfedh_doc_utils-0.1.29 → rolfedh_doc_utils-0.1.30}/tests/test_version_check.py +0 -0
- {rolfedh_doc_utils-0.1.29 → rolfedh_doc_utils-0.1.30}/validate_links.py +0 -0
|
@@ -70,10 +70,37 @@ class DefListConverter:
|
|
|
70
70
|
lines.append('+')
|
|
71
71
|
|
|
72
72
|
# Add explanation lines, prepending "Optional. " to first line if needed
|
|
73
|
+
# Handle blank lines and conditionals by inserting continuation markers
|
|
74
|
+
need_continuation = False
|
|
75
|
+
had_content = False # Track if we've output any non-conditional content
|
|
76
|
+
|
|
73
77
|
for line_idx, line in enumerate(explanation.lines):
|
|
78
|
+
stripped = line.strip()
|
|
79
|
+
|
|
80
|
+
# Check if this is a blank line
|
|
81
|
+
if stripped == '':
|
|
82
|
+
# Next non-blank line will need a continuation marker
|
|
83
|
+
need_continuation = True
|
|
84
|
+
continue # Skip blank lines
|
|
85
|
+
|
|
86
|
+
# Check if this is a conditional directive
|
|
87
|
+
is_conditional = stripped.startswith(('ifdef::', 'ifndef::', 'endif::'))
|
|
88
|
+
|
|
89
|
+
# Add continuation marker if:
|
|
90
|
+
# 1. Previous line was blank (need_continuation=True), OR
|
|
91
|
+
# 2. This is a conditional and we've had content before (need separator)
|
|
92
|
+
if need_continuation or (is_conditional and had_content and line_idx > 0):
|
|
93
|
+
lines.append('+')
|
|
94
|
+
need_continuation = False
|
|
95
|
+
|
|
96
|
+
# Add the line
|
|
74
97
|
if line_idx == 0 and explanation.is_optional:
|
|
75
98
|
lines.append(f'Optional. {line}')
|
|
76
99
|
else:
|
|
77
100
|
lines.append(line)
|
|
78
101
|
|
|
102
|
+
# Track that we've output content (not just conditionals)
|
|
103
|
+
if not is_conditional:
|
|
104
|
+
had_content = True
|
|
105
|
+
|
|
79
106
|
return lines
|
|
@@ -184,14 +184,17 @@ class CalloutDetector:
|
|
|
184
184
|
explanations = {}
|
|
185
185
|
table_data = self.table_parser.extract_callout_explanations_from_table(table)
|
|
186
186
|
|
|
187
|
-
for callout_num, (explanation_lines,
|
|
188
|
-
#
|
|
187
|
+
for callout_num, (explanation_lines, row_conditionals) in table_data.items():
|
|
188
|
+
# explanation_lines now includes blank lines and conditionals inline
|
|
189
|
+
# row_conditionals are before/after the entire row (rarely used)
|
|
189
190
|
all_lines = []
|
|
190
|
-
for line in explanation_lines:
|
|
191
|
-
all_lines.append(line)
|
|
192
191
|
|
|
193
|
-
# Add
|
|
194
|
-
|
|
192
|
+
# Add any row-level conditionals before
|
|
193
|
+
if row_conditionals:
|
|
194
|
+
all_lines.extend(row_conditionals)
|
|
195
|
+
|
|
196
|
+
# Add explanation lines (already includes inline conditionals and blank lines)
|
|
197
|
+
all_lines.extend(explanation_lines)
|
|
195
198
|
|
|
196
199
|
# Check if marked as optional
|
|
197
200
|
is_optional = False
|
|
@@ -215,11 +218,15 @@ class CalloutDetector:
|
|
|
215
218
|
explanations = {}
|
|
216
219
|
table_data = self.table_parser.extract_3column_callout_explanations(table)
|
|
217
220
|
|
|
218
|
-
for callout_num, (value_lines, description_lines,
|
|
221
|
+
for callout_num, (value_lines, description_lines, row_conditionals) in table_data.items():
|
|
219
222
|
# Combine value and description into explanation lines
|
|
220
|
-
#
|
|
223
|
+
# Both value_lines and description_lines now include conditionals and blank lines inline
|
|
221
224
|
all_lines = []
|
|
222
225
|
|
|
226
|
+
# Add any row-level conditionals before
|
|
227
|
+
if row_conditionals:
|
|
228
|
+
all_lines.extend(row_conditionals)
|
|
229
|
+
|
|
223
230
|
# Add value lines with context
|
|
224
231
|
if value_lines:
|
|
225
232
|
# Format: "`value`:"
|
|
@@ -228,16 +235,13 @@ class CalloutDetector:
|
|
|
228
235
|
if value_text:
|
|
229
236
|
all_lines.append(f"{value_text}:")
|
|
230
237
|
|
|
231
|
-
# Add additional value lines if multi-line
|
|
238
|
+
# Add additional value lines if multi-line (includes conditionals and blank lines)
|
|
232
239
|
for line in value_lines[1:]:
|
|
233
240
|
all_lines.append(line)
|
|
234
241
|
|
|
235
|
-
# Add description lines
|
|
242
|
+
# Add description lines (already includes conditionals and blank lines)
|
|
236
243
|
all_lines.extend(description_lines)
|
|
237
244
|
|
|
238
|
-
# Add conditionals as separate lines (they'll be preserved in output)
|
|
239
|
-
all_lines.extend(conditionals)
|
|
240
|
-
|
|
241
245
|
# Check if marked as optional
|
|
242
246
|
is_optional = False
|
|
243
247
|
if all_lines and (all_lines[0].lower().startswith('optional.') or
|
|
@@ -57,6 +57,44 @@ class TableParser:
|
|
|
57
57
|
# Pattern for callout number (used for callout table detection)
|
|
58
58
|
CALLOUT_NUMBER = re.compile(r'^<(\d+)>\s*$')
|
|
59
59
|
|
|
60
|
+
def _finalize_row_if_complete(self, current_row_cells, conditionals_before_row,
|
|
61
|
+
conditionals_after_row, expected_columns, rows):
|
|
62
|
+
"""
|
|
63
|
+
Check if we have enough cells for a complete row, and if so, save it.
|
|
64
|
+
|
|
65
|
+
Returns: (new_current_row_cells, new_conditionals_before, new_conditionals_after)
|
|
66
|
+
"""
|
|
67
|
+
if expected_columns > 0 and len(current_row_cells) >= expected_columns:
|
|
68
|
+
# Row is complete - save it
|
|
69
|
+
rows.append(TableRow(
|
|
70
|
+
cells=current_row_cells.copy(),
|
|
71
|
+
conditionals_before=conditionals_before_row.copy(),
|
|
72
|
+
conditionals_after=conditionals_after_row.copy()
|
|
73
|
+
))
|
|
74
|
+
return [], [], [] # Reset for next row
|
|
75
|
+
|
|
76
|
+
# Row not complete yet
|
|
77
|
+
return current_row_cells, conditionals_before_row, conditionals_after_row
|
|
78
|
+
|
|
79
|
+
def _parse_column_count(self, attributes: str) -> int:
|
|
80
|
+
"""
|
|
81
|
+
Parse the cols attribute to determine number of columns.
|
|
82
|
+
|
|
83
|
+
Example: '[cols="1,7a"]' returns 2
|
|
84
|
+
'[cols="1,2,3"]' returns 3
|
|
85
|
+
"""
|
|
86
|
+
import re
|
|
87
|
+
# Match cols="..." or cols='...'
|
|
88
|
+
match = re.search(r'cols=["\']([^"\']+)["\']', attributes)
|
|
89
|
+
if not match:
|
|
90
|
+
return 0 # Unknown column count
|
|
91
|
+
|
|
92
|
+
cols_spec = match.group(1)
|
|
93
|
+
# Count comma-separated values
|
|
94
|
+
# Handle formats like: "1,2", "1a,2a", "1,2,3", etc.
|
|
95
|
+
columns = cols_spec.split(',')
|
|
96
|
+
return len(columns)
|
|
97
|
+
|
|
60
98
|
def find_tables(self, lines: List[str]) -> List[AsciiDocTable]:
|
|
61
99
|
"""Find all tables in the document."""
|
|
62
100
|
tables = []
|
|
@@ -96,12 +134,20 @@ class TableParser:
|
|
|
96
134
|
|Cell4
|
|
97
135
|
|===
|
|
98
136
|
"""
|
|
137
|
+
# Get attributes and parse column count
|
|
138
|
+
attributes = ""
|
|
139
|
+
if start_line < delimiter_line:
|
|
140
|
+
attributes = lines[start_line]
|
|
141
|
+
|
|
142
|
+
expected_columns = self._parse_column_count(attributes)
|
|
143
|
+
|
|
99
144
|
i = delimiter_line + 1
|
|
100
145
|
rows = []
|
|
101
146
|
current_row_cells = []
|
|
102
147
|
current_cell_lines = []
|
|
103
148
|
conditionals_before_row = []
|
|
104
149
|
conditionals_after_row = []
|
|
150
|
+
in_asciidoc_cell = False # Track if we're in an a| (AsciiDoc) cell
|
|
105
151
|
|
|
106
152
|
while i < len(lines):
|
|
107
153
|
line = lines[i]
|
|
@@ -138,22 +184,29 @@ class TableParser:
|
|
|
138
184
|
|
|
139
185
|
# Check for conditional directives
|
|
140
186
|
if self.IFDEF_PATTERN.match(line) or self.ENDIF_PATTERN.match(line):
|
|
141
|
-
|
|
187
|
+
# If we're building a cell (current_cell_lines is not empty) OR
|
|
188
|
+
# we're in an AsciiDoc cell, add conditional to cell content
|
|
189
|
+
if current_cell_lines or in_asciidoc_cell:
|
|
190
|
+
# Inside a cell - conditional is part of cell content
|
|
191
|
+
current_cell_lines.append(line)
|
|
192
|
+
elif current_row_cells:
|
|
193
|
+
# Between cells in the same row
|
|
194
|
+
conditionals_after_row.append(line)
|
|
195
|
+
else:
|
|
142
196
|
# Conditional before any cells in this row
|
|
143
197
|
conditionals_before_row.append(line)
|
|
144
|
-
else:
|
|
145
|
-
# Conditional after cells started - treat as part of current context
|
|
146
|
-
if current_cell_lines:
|
|
147
|
-
# Inside a cell
|
|
148
|
-
current_cell_lines.append(line)
|
|
149
|
-
else:
|
|
150
|
-
# Between cells in the same row
|
|
151
|
-
conditionals_after_row.append(line)
|
|
152
198
|
i += 1
|
|
153
199
|
continue
|
|
154
200
|
|
|
155
|
-
# Blank line
|
|
201
|
+
# Blank line handling
|
|
156
202
|
if not line.strip():
|
|
203
|
+
# In AsciiDoc cells (a|), blank lines are part of cell content
|
|
204
|
+
if in_asciidoc_cell:
|
|
205
|
+
current_cell_lines.append(line)
|
|
206
|
+
i += 1
|
|
207
|
+
continue
|
|
208
|
+
|
|
209
|
+
# Otherwise, blank line separates rows
|
|
157
210
|
# Save pending cell if exists
|
|
158
211
|
if current_cell_lines:
|
|
159
212
|
current_row_cells.append(TableCell(
|
|
@@ -161,6 +214,7 @@ class TableParser:
|
|
|
161
214
|
conditionals=[]
|
|
162
215
|
))
|
|
163
216
|
current_cell_lines = []
|
|
217
|
+
in_asciidoc_cell = False
|
|
164
218
|
|
|
165
219
|
# Save row if we have cells
|
|
166
220
|
if current_row_cells:
|
|
@@ -178,6 +232,18 @@ class TableParser:
|
|
|
178
232
|
|
|
179
233
|
# Check for cell separator (|)
|
|
180
234
|
if self.CELL_SEPARATOR.match(line):
|
|
235
|
+
# Check if this is a cell type specifier on its own line (e.g., "a|" or "s|")
|
|
236
|
+
cell_content = line[1:].strip() # Remove leading | and whitespace
|
|
237
|
+
|
|
238
|
+
# If line is just "a|", "s|", "h|", etc. (cell type specifier alone)
|
|
239
|
+
if len(cell_content) == 2 and cell_content[0] in 'ashdmev' and cell_content[1] == '|':
|
|
240
|
+
# This is a cell type specifier on its own line
|
|
241
|
+
if cell_content[0] == 'a':
|
|
242
|
+
in_asciidoc_cell = True
|
|
243
|
+
# Don't create a cell yet - content comes on following lines
|
|
244
|
+
i += 1
|
|
245
|
+
continue
|
|
246
|
+
|
|
181
247
|
# Save previous cell if exists
|
|
182
248
|
if current_cell_lines:
|
|
183
249
|
current_row_cells.append(TableCell(
|
|
@@ -185,9 +251,27 @@ class TableParser:
|
|
|
185
251
|
conditionals=[]
|
|
186
252
|
))
|
|
187
253
|
current_cell_lines = []
|
|
254
|
+
in_asciidoc_cell = False # Reset for next cell
|
|
255
|
+
|
|
256
|
+
# Check if row is complete (have enough cells based on cols attribute)
|
|
257
|
+
current_row_cells, conditionals_before_row, conditionals_after_row = \
|
|
258
|
+
self._finalize_row_if_complete(
|
|
259
|
+
current_row_cells, conditionals_before_row,
|
|
260
|
+
conditionals_after_row, expected_columns, rows
|
|
261
|
+
)
|
|
188
262
|
|
|
189
263
|
# Extract cell content from this line (text after |)
|
|
190
|
-
cell_content = line[1:]
|
|
264
|
+
cell_content = line[1:] # Remove leading |
|
|
265
|
+
|
|
266
|
+
# Check for inline cell type specifier (a|text, s|text, etc.)
|
|
267
|
+
# Cell type specifiers are single characters followed by |
|
|
268
|
+
if len(cell_content) > 0 and cell_content[0] in 'ashdmev' and len(cell_content) > 1 and cell_content[1] == '|':
|
|
269
|
+
# Track if this is an AsciiDoc cell (a|)
|
|
270
|
+
if cell_content[0] == 'a':
|
|
271
|
+
in_asciidoc_cell = True
|
|
272
|
+
cell_content = cell_content[2:] # Remove type specifier and second |
|
|
273
|
+
|
|
274
|
+
cell_content = cell_content.strip()
|
|
191
275
|
|
|
192
276
|
# Check if there are multiple cells on the same line (e.g., |Cell1 |Cell2 |Cell3)
|
|
193
277
|
if '|' in cell_content:
|
|
@@ -220,6 +304,39 @@ class TableParser:
|
|
|
220
304
|
i += 1
|
|
221
305
|
continue
|
|
222
306
|
|
|
307
|
+
# Check for cell type specifier on its own line (e.g., "a|", "s|", "h|")
|
|
308
|
+
# This is actually a cell SEPARATOR with type specifier
|
|
309
|
+
# Example:
|
|
310
|
+
# |<1> ← Cell 1
|
|
311
|
+
# a| ← Start cell 2 with type 'a' (AsciiDoc)
|
|
312
|
+
# content... ← Cell 2 content
|
|
313
|
+
stripped_line = line.strip()
|
|
314
|
+
if (len(stripped_line) == 2 and
|
|
315
|
+
stripped_line[0] in 'ashdmev' and
|
|
316
|
+
stripped_line[1] == '|' and
|
|
317
|
+
(current_cell_lines or current_row_cells)):
|
|
318
|
+
# Save previous cell if we have one
|
|
319
|
+
if current_cell_lines:
|
|
320
|
+
current_row_cells.append(TableCell(
|
|
321
|
+
content=current_cell_lines.copy(),
|
|
322
|
+
conditionals=[]
|
|
323
|
+
))
|
|
324
|
+
current_cell_lines = []
|
|
325
|
+
|
|
326
|
+
# Check if row is complete
|
|
327
|
+
current_row_cells, conditionals_before_row, conditionals_after_row = \
|
|
328
|
+
self._finalize_row_if_complete(
|
|
329
|
+
current_row_cells, conditionals_before_row,
|
|
330
|
+
conditionals_after_row, expected_columns, rows
|
|
331
|
+
)
|
|
332
|
+
|
|
333
|
+
# Set cell type for the NEW cell we're starting
|
|
334
|
+
if stripped_line[0] == 'a':
|
|
335
|
+
in_asciidoc_cell = True
|
|
336
|
+
# Start collecting content for the new cell (no content on this line)
|
|
337
|
+
i += 1
|
|
338
|
+
continue
|
|
339
|
+
|
|
223
340
|
# Regular content line (continuation of current cell)
|
|
224
341
|
if current_cell_lines or current_row_cells:
|
|
225
342
|
current_cell_lines.append(line)
|
|
@@ -338,25 +455,20 @@ class TableParser:
|
|
|
338
455
|
|
|
339
456
|
callout_num = int(match.group(1))
|
|
340
457
|
|
|
341
|
-
# Collect explanation lines
|
|
458
|
+
# Collect explanation lines, preserving blank lines and conditionals inline
|
|
459
|
+
# Blank lines will need to become continuation markers (+) in definition lists
|
|
342
460
|
explanation_lines = []
|
|
343
461
|
for line in explanation_cell.content:
|
|
344
|
-
#
|
|
345
|
-
|
|
346
|
-
|
|
462
|
+
# Preserve ALL lines including conditionals and blank lines
|
|
463
|
+
# Empty lines will be marked as '' which signals need for continuation marker
|
|
464
|
+
explanation_lines.append(line)
|
|
347
465
|
|
|
348
|
-
# Collect
|
|
349
|
-
|
|
350
|
-
|
|
466
|
+
# Collect conditionals that appear before/after the row
|
|
467
|
+
row_conditionals = []
|
|
468
|
+
row_conditionals.extend(row.conditionals_before)
|
|
469
|
+
row_conditionals.extend(row.conditionals_after)
|
|
351
470
|
|
|
352
|
-
|
|
353
|
-
for line in explanation_cell.content:
|
|
354
|
-
if self.IFDEF_PATTERN.match(line) or self.ENDIF_PATTERN.match(line):
|
|
355
|
-
all_conditionals.append(line)
|
|
356
|
-
|
|
357
|
-
all_conditionals.extend(row.conditionals_after)
|
|
358
|
-
|
|
359
|
-
explanations[callout_num] = (explanation_lines, all_conditionals)
|
|
471
|
+
explanations[callout_num] = (explanation_lines, row_conditionals)
|
|
360
472
|
|
|
361
473
|
return explanations
|
|
362
474
|
|
|
@@ -397,37 +509,22 @@ class TableParser:
|
|
|
397
509
|
|
|
398
510
|
callout_num = int(item_num_str)
|
|
399
511
|
|
|
400
|
-
# Collect value lines (column 2)
|
|
512
|
+
# Collect value lines (column 2), preserving all content including conditionals
|
|
401
513
|
value_lines = []
|
|
402
514
|
for line in value_cell.content:
|
|
403
|
-
|
|
404
|
-
if not (self.IFDEF_PATTERN.match(line) or self.ENDIF_PATTERN.match(line)):
|
|
405
|
-
value_lines.append(line)
|
|
515
|
+
value_lines.append(line)
|
|
406
516
|
|
|
407
|
-
# Collect description lines (column 3)
|
|
517
|
+
# Collect description lines (column 3), preserving all content including conditionals
|
|
408
518
|
description_lines = []
|
|
409
519
|
for line in desc_cell.content:
|
|
410
|
-
|
|
411
|
-
if not (self.IFDEF_PATTERN.match(line) or self.ENDIF_PATTERN.match(line)):
|
|
412
|
-
description_lines.append(line)
|
|
413
|
-
|
|
414
|
-
# Collect all conditionals for this row
|
|
415
|
-
all_conditionals = []
|
|
416
|
-
all_conditionals.extend(row.conditionals_before)
|
|
417
|
-
|
|
418
|
-
# Extract conditionals from value cell
|
|
419
|
-
for line in value_cell.content:
|
|
420
|
-
if self.IFDEF_PATTERN.match(line) or self.ENDIF_PATTERN.match(line):
|
|
421
|
-
all_conditionals.append(line)
|
|
422
|
-
|
|
423
|
-
# Extract conditionals from description cell
|
|
424
|
-
for line in desc_cell.content:
|
|
425
|
-
if self.IFDEF_PATTERN.match(line) or self.ENDIF_PATTERN.match(line):
|
|
426
|
-
all_conditionals.append(line)
|
|
520
|
+
description_lines.append(line)
|
|
427
521
|
|
|
428
|
-
|
|
522
|
+
# Collect conditionals that appear before/after the row
|
|
523
|
+
row_conditionals = []
|
|
524
|
+
row_conditionals.extend(row.conditionals_before)
|
|
525
|
+
row_conditionals.extend(row.conditionals_after)
|
|
429
526
|
|
|
430
|
-
explanations[callout_num] = (value_lines, description_lines,
|
|
527
|
+
explanations[callout_num] = (value_lines, description_lines, row_conditionals)
|
|
431
528
|
|
|
432
529
|
return explanations
|
|
433
530
|
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
{rolfedh_doc_utils-0.1.29 → rolfedh_doc_utils-0.1.30}/rolfedh_doc_utils.egg-info/SOURCES.txt
RENAMED
|
File without changes
|
|
File without changes
|
{rolfedh_doc_utils-0.1.29 → rolfedh_doc_utils-0.1.30}/rolfedh_doc_utils.egg-info/entry_points.txt
RENAMED
|
File without changes
|
{rolfedh_doc_utils-0.1.29 → rolfedh_doc_utils-0.1.30}/rolfedh_doc_utils.egg-info/requires.txt
RENAMED
|
File without changes
|
{rolfedh_doc_utils-0.1.29 → rolfedh_doc_utils-0.1.30}/rolfedh_doc_utils.egg-info/top_level.txt
RENAMED
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
{rolfedh_doc_utils-0.1.29 → rolfedh_doc_utils-0.1.30}/tests/test_fixture_archive_unused_files.py
RENAMED
|
File without changes
|
{rolfedh_doc_utils-0.1.29 → rolfedh_doc_utils-0.1.30}/tests/test_fixture_archive_unused_images.py
RENAMED
|
File without changes
|
{rolfedh_doc_utils-0.1.29 → rolfedh_doc_utils-0.1.30}/tests/test_fixture_check_scannability.py
RENAMED
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
{rolfedh_doc_utils-0.1.29 → rolfedh_doc_utils-0.1.30}/tests/test_table_callout_conversion.py
RENAMED
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|