markdown-merge 1.0.2 → 7.0.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- checksums.yaml +4 -4
- checksums.yaml.gz.sig +0 -0
- data/lib/markdown/merge/version.rb +3 -4
- data/lib/markdown/merge.rb +538 -137
- data/lib/markdown-merge.rb +3 -4
- data.tar.gz.sig +0 -0
- metadata +28 -283
- metadata.gz.sig +0 -0
- data/CHANGELOG.md +0 -283
- data/CITATION.cff +0 -20
- data/CODE_OF_CONDUCT.md +0 -134
- data/CONTRIBUTING.md +0 -227
- data/FUNDING.md +0 -74
- data/LICENSE.txt +0 -21
- data/README.md +0 -1090
- data/REEK +0 -0
- data/RUBOCOP.md +0 -71
- data/SECURITY.md +0 -21
- data/lib/markdown/merge/cleanse/block_spacing.rb +0 -253
- data/lib/markdown/merge/cleanse/code_fence_spacing.rb +0 -294
- data/lib/markdown/merge/cleanse/condensed_link_refs.rb +0 -405
- data/lib/markdown/merge/cleanse.rb +0 -42
- data/lib/markdown/merge/code_block_merger.rb +0 -300
- data/lib/markdown/merge/conflict_resolver.rb +0 -128
- data/lib/markdown/merge/debug_logger.rb +0 -26
- data/lib/markdown/merge/document_problems.rb +0 -190
- data/lib/markdown/merge/file_aligner.rb +0 -196
- data/lib/markdown/merge/file_analysis.rb +0 -353
- data/lib/markdown/merge/file_analysis_base.rb +0 -629
- data/lib/markdown/merge/freeze_node.rb +0 -93
- data/lib/markdown/merge/gap_line_node.rb +0 -136
- data/lib/markdown/merge/link_definition_formatter.rb +0 -49
- data/lib/markdown/merge/link_definition_node.rb +0 -157
- data/lib/markdown/merge/link_parser.rb +0 -421
- data/lib/markdown/merge/link_reference_rehydrator.rb +0 -320
- data/lib/markdown/merge/markdown_structure.rb +0 -123
- data/lib/markdown/merge/merge_result.rb +0 -166
- data/lib/markdown/merge/node_type_normalizer.rb +0 -126
- data/lib/markdown/merge/output_builder.rb +0 -166
- data/lib/markdown/merge/partial_template_merger.rb +0 -334
- data/lib/markdown/merge/smart_merger.rb +0 -221
- data/lib/markdown/merge/smart_merger_base.rb +0 -621
- data/lib/markdown/merge/table_match_algorithm.rb +0 -504
- data/lib/markdown/merge/table_match_refiner.rb +0 -136
- data/lib/markdown/merge/whitespace_normalizer.rb +0 -251
- data/sig/markdown/merge.rbs +0 -341
data/lib/markdown/merge.rb
CHANGED
|
@@ -1,149 +1,550 @@
|
|
|
1
1
|
# frozen_string_literal: true
|
|
2
2
|
|
|
3
|
-
# External gems
|
|
4
|
-
require "version_gem"
|
|
5
|
-
require "set"
|
|
6
|
-
|
|
7
|
-
# Shared merge infrastructure
|
|
8
|
-
require "ast/merge"
|
|
9
|
-
|
|
10
|
-
# tree_haver provides unified markdown parsing via multiple backends
|
|
11
3
|
require "tree_haver"
|
|
12
4
|
|
|
13
|
-
# This gem - only require version
|
|
14
|
-
require_relative "merge/version"
|
|
15
|
-
|
|
16
5
|
module Markdown
|
|
17
|
-
# Smart merging for Markdown files using AST-based parsers via tree_haver.
|
|
18
|
-
#
|
|
19
|
-
# Markdown::Merge provides intelligent Markdown merging with support for
|
|
20
|
-
# multiple parsing backends (Commonmarker, Markly) through tree_haver:
|
|
21
|
-
# - Standalone SmartMerger that works with any available backend
|
|
22
|
-
# - Matching structural elements (headings, paragraphs, lists, etc.) between files
|
|
23
|
-
# - Preserving frozen sections marked with HTML comments
|
|
24
|
-
# - Resolving conflicts based on configurable preferences
|
|
25
|
-
# - Node type normalization for portable merge rules across backends
|
|
26
|
-
#
|
|
27
|
-
# Can be used directly or through parser-specific wrappers
|
|
28
|
-
# (commonmarker-merge, markly-merge) that provide hard dependencies
|
|
29
|
-
# and backend-specific defaults.
|
|
30
|
-
#
|
|
31
|
-
# @example Direct usage with auto backend detection
|
|
32
|
-
# require "markdown/merge"
|
|
33
|
-
# merger = Markdown::Merge::SmartMerger.new(template, destination)
|
|
34
|
-
# result = merger.merge
|
|
35
|
-
#
|
|
36
|
-
# @example With specific backend
|
|
37
|
-
# merger = Markdown::Merge::SmartMerger.new(
|
|
38
|
-
# template,
|
|
39
|
-
# destination,
|
|
40
|
-
# backend: :markly,
|
|
41
|
-
# flags: Markly::DEFAULT,
|
|
42
|
-
# extensions: [:table, :strikethrough]
|
|
43
|
-
# )
|
|
44
|
-
# result = merger.merge
|
|
45
|
-
#
|
|
46
|
-
# @example Using via commonmarker-merge
|
|
47
|
-
# require "commonmarker/merge"
|
|
48
|
-
# merger = Commonmarker::Merge::SmartMerger.new(template, destination)
|
|
49
|
-
# result = merger.merge
|
|
50
|
-
#
|
|
51
|
-
# @see SmartMerger Main entry point for merging
|
|
52
|
-
# @see FileAnalysis For parsing and analyzing Markdown files
|
|
53
|
-
# @see NodeTypeNormalizer For type normalization across backends
|
|
54
6
|
module Merge
|
|
55
|
-
|
|
56
|
-
|
|
57
|
-
|
|
58
|
-
|
|
59
|
-
|
|
60
|
-
|
|
61
|
-
|
|
62
|
-
|
|
63
|
-
|
|
64
|
-
|
|
65
|
-
|
|
66
|
-
|
|
67
|
-
|
|
68
|
-
|
|
69
|
-
|
|
70
|
-
|
|
71
|
-
|
|
72
|
-
|
|
73
|
-
|
|
74
|
-
|
|
7
|
+
PACKAGE_NAME = "markdown-merge"
|
|
8
|
+
BACKEND_REFERENCES = {
|
|
9
|
+
"kreuzberg-language-pack" => TreeHaver::KREUZBERG_LANGUAGE_PACK_BACKEND
|
|
10
|
+
}.freeze
|
|
11
|
+
|
|
12
|
+
def markdown_feature_profile
|
|
13
|
+
{
|
|
14
|
+
family: "markdown",
|
|
15
|
+
supported_dialects: ["markdown"],
|
|
16
|
+
supported_policies: []
|
|
17
|
+
}
|
|
18
|
+
end
|
|
19
|
+
|
|
20
|
+
def available_markdown_backends
|
|
21
|
+
BACKEND_REFERENCES.values
|
|
22
|
+
end
|
|
23
|
+
|
|
24
|
+
def markdown_backend_feature_profile(backend: nil)
|
|
25
|
+
resolved_backend = resolve_backend(backend)
|
|
26
|
+
return unsupported_feature_result("Unsupported Markdown backend #{resolved_backend}.") unless BACKEND_REFERENCES.key?(resolved_backend)
|
|
27
|
+
|
|
28
|
+
markdown_feature_profile.merge(
|
|
29
|
+
backend: resolved_backend,
|
|
30
|
+
backend_ref: BACKEND_REFERENCES.fetch(resolved_backend).to_h
|
|
31
|
+
)
|
|
32
|
+
end
|
|
33
|
+
|
|
34
|
+
def markdown_plan_context(backend: nil)
|
|
35
|
+
profile = markdown_backend_feature_profile(backend: backend)
|
|
36
|
+
return profile if profile[:ok] == false
|
|
37
|
+
|
|
38
|
+
{
|
|
39
|
+
family_profile: markdown_feature_profile,
|
|
40
|
+
feature_profile: {
|
|
41
|
+
backend: profile[:backend],
|
|
42
|
+
supports_dialects: false,
|
|
43
|
+
supported_policies: profile[:supported_policies]
|
|
44
|
+
}
|
|
45
|
+
}
|
|
46
|
+
end
|
|
47
|
+
|
|
48
|
+
def parse_markdown(source, dialect, backend: nil)
|
|
49
|
+
return unsupported_feature_result("Unsupported Markdown dialect #{dialect}.") unless dialect == "markdown"
|
|
50
|
+
|
|
51
|
+
resolved_backend = resolve_backend(backend)
|
|
52
|
+
unless resolved_backend == "kreuzberg-language-pack"
|
|
53
|
+
return unsupported_feature_result("Unsupported Markdown backend #{resolved_backend}.")
|
|
75
54
|
end
|
|
55
|
+
|
|
56
|
+
syntax = TreeHaver.parse_with_language_pack(
|
|
57
|
+
TreeHaver::ParserRequest.new(source: source, language: "markdown", dialect: dialect)
|
|
58
|
+
)
|
|
59
|
+
return { ok: false, diagnostics: syntax[:diagnostics], policies: [] } unless syntax[:ok]
|
|
60
|
+
|
|
61
|
+
normalized_source = normalize_source(source)
|
|
62
|
+
{
|
|
63
|
+
ok: true,
|
|
64
|
+
diagnostics: [],
|
|
65
|
+
analysis: {
|
|
66
|
+
kind: "markdown",
|
|
67
|
+
dialect: dialect,
|
|
68
|
+
normalized_source: normalized_source,
|
|
69
|
+
root_kind: "document",
|
|
70
|
+
owners: collect_markdown_owners(normalized_source)
|
|
71
|
+
},
|
|
72
|
+
policies: []
|
|
73
|
+
}
|
|
74
|
+
rescue StandardError => e
|
|
75
|
+
{
|
|
76
|
+
ok: false,
|
|
77
|
+
diagnostics: [{ severity: "error", category: "parse_error", message: e.message }],
|
|
78
|
+
policies: []
|
|
79
|
+
}
|
|
76
80
|
end
|
|
77
81
|
|
|
78
|
-
|
|
79
|
-
|
|
80
|
-
|
|
81
|
-
# begin
|
|
82
|
-
# merger = SmartMerger.new(template, destination)
|
|
83
|
-
# result = merger.merge
|
|
84
|
-
# rescue TemplateParseError => e
|
|
85
|
-
# puts "Template syntax error: #{e.message}"
|
|
86
|
-
# e.errors.each { |error| puts " #{error.message}" }
|
|
87
|
-
# end
|
|
88
|
-
class TemplateParseError < ParseError; end
|
|
89
|
-
|
|
90
|
-
# Raised when the destination file has syntax errors.
|
|
91
|
-
#
|
|
92
|
-
# @example Handling destination parse errors
|
|
93
|
-
# begin
|
|
94
|
-
# merger = SmartMerger.new(template, destination)
|
|
95
|
-
# result = merger.merge
|
|
96
|
-
# rescue DestinationParseError => e
|
|
97
|
-
# puts "Destination syntax error: #{e.message}"
|
|
98
|
-
# e.errors.each { |error| puts " #{error.message}" }
|
|
99
|
-
# end
|
|
100
|
-
class DestinationParseError < ParseError; end
|
|
101
|
-
|
|
102
|
-
# Autoload all components - base classes
|
|
103
|
-
autoload :Cleanse, "markdown/merge/cleanse"
|
|
104
|
-
autoload :DebugLogger, "markdown/merge/debug_logger"
|
|
105
|
-
autoload :FreezeNode, "markdown/merge/freeze_node"
|
|
106
|
-
autoload :FileAnalysisBase, "markdown/merge/file_analysis_base"
|
|
107
|
-
autoload :FileAligner, "markdown/merge/file_aligner"
|
|
108
|
-
autoload :ConflictResolver, "markdown/merge/conflict_resolver"
|
|
109
|
-
autoload :MergeResult, "markdown/merge/merge_result"
|
|
110
|
-
autoload :TableMatchAlgorithm, "markdown/merge/table_match_algorithm"
|
|
111
|
-
autoload :TableMatchRefiner, "markdown/merge/table_match_refiner"
|
|
112
|
-
autoload :CodeBlockMerger, "markdown/merge/code_block_merger"
|
|
113
|
-
autoload :SmartMergerBase, "markdown/merge/smart_merger_base"
|
|
114
|
-
autoload :LinkDefinitionNode, "markdown/merge/link_definition_node"
|
|
115
|
-
autoload :GapLineNode, "markdown/merge/gap_line_node"
|
|
116
|
-
autoload :OutputBuilder, "markdown/merge/output_builder"
|
|
117
|
-
autoload :LinkDefinitionFormatter, "markdown/merge/link_definition_formatter"
|
|
118
|
-
autoload :MarkdownStructure, "markdown/merge/markdown_structure"
|
|
119
|
-
autoload :DocumentProblems, "markdown/merge/document_problems"
|
|
120
|
-
autoload :WhitespaceNormalizer, "markdown/merge/whitespace_normalizer"
|
|
121
|
-
autoload :LinkParser, "markdown/merge/link_parser"
|
|
122
|
-
autoload :LinkReferenceRehydrator, "markdown/merge/link_reference_rehydrator"
|
|
123
|
-
|
|
124
|
-
# Autoload concrete implementations (tree_haver-based)
|
|
125
|
-
autoload :NodeTypeNormalizer, "markdown/merge/node_type_normalizer"
|
|
126
|
-
autoload :FileAnalysis, "markdown/merge/file_analysis"
|
|
127
|
-
autoload :SmartMerger, "markdown/merge/smart_merger"
|
|
128
|
-
autoload :PartialTemplateMerger, "markdown/merge/partial_template_merger"
|
|
129
|
-
end
|
|
130
|
-
end
|
|
82
|
+
def match_markdown_owners(template, destination)
|
|
83
|
+
destination_paths = destination[:owners].to_h { |owner| [owner[:path], true] }
|
|
84
|
+
template_paths = template[:owners].to_h { |owner| [owner[:path], true] }
|
|
131
85
|
|
|
132
|
-
|
|
133
|
-
|
|
134
|
-
|
|
135
|
-
|
|
136
|
-
|
|
137
|
-
|
|
138
|
-
|
|
139
|
-
|
|
140
|
-
|
|
141
|
-
|
|
142
|
-
|
|
143
|
-
|
|
144
|
-
|
|
145
|
-
|
|
86
|
+
{
|
|
87
|
+
matched: template[:owners]
|
|
88
|
+
.filter { |owner| destination_paths[owner[:path]] }
|
|
89
|
+
.map { |owner| { template_path: owner[:path], destination_path: owner[:path] } },
|
|
90
|
+
unmatched_template: template[:owners].map { |owner| owner[:path] }.reject { |path| destination_paths[path] },
|
|
91
|
+
unmatched_destination: destination[:owners].map { |owner| owner[:path] }.reject { |path| template_paths[path] }
|
|
92
|
+
}
|
|
93
|
+
end
|
|
94
|
+
|
|
95
|
+
def merge_markdown(template_source, destination_source, dialect, backend: nil)
|
|
96
|
+
template = parse_markdown(template_source, dialect, backend: backend)
|
|
97
|
+
return template unless template[:ok]
|
|
98
|
+
|
|
99
|
+
destination = parse_markdown(destination_source, dialect, backend: backend)
|
|
100
|
+
return destination unless destination[:ok]
|
|
101
|
+
|
|
102
|
+
destination_sections = collect_markdown_sections(
|
|
103
|
+
destination.dig(:analysis, :normalized_source),
|
|
104
|
+
destination.dig(:analysis, :owners)
|
|
105
|
+
)
|
|
106
|
+
template_sections = collect_markdown_sections(
|
|
107
|
+
template.dig(:analysis, :normalized_source),
|
|
108
|
+
template.dig(:analysis, :owners)
|
|
109
|
+
)
|
|
110
|
+
destination_paths = destination_sections.to_h { |section| [section[:path], true] }
|
|
111
|
+
merged_sections = destination_sections.map { |section| section[:text] }.reject(&:empty?) +
|
|
112
|
+
template_sections
|
|
113
|
+
.reject { |section| destination_paths[section[:path]] || section[:text].empty? }
|
|
114
|
+
.map { |section| section[:text] }
|
|
115
|
+
|
|
116
|
+
{
|
|
117
|
+
ok: true,
|
|
118
|
+
diagnostics: [],
|
|
119
|
+
output: "#{merged_sections.join("\n\n").strip}\n",
|
|
120
|
+
policies: []
|
|
121
|
+
}
|
|
122
|
+
end
|
|
123
|
+
|
|
124
|
+
def markdown_embedded_families(analysis)
|
|
125
|
+
analysis[:owners].filter_map do |owner|
|
|
126
|
+
next unless owner[:owner_kind] == "code_fence"
|
|
127
|
+
next if owner[:info_string].to_s.empty?
|
|
128
|
+
|
|
129
|
+
family = code_fence_family(owner[:info_string])
|
|
130
|
+
dialect = code_fence_dialect(owner[:info_string], family)
|
|
131
|
+
next unless family && dialect
|
|
132
|
+
|
|
133
|
+
{
|
|
134
|
+
path: owner[:path],
|
|
135
|
+
language: owner[:info_string],
|
|
136
|
+
family: family,
|
|
137
|
+
dialect: dialect
|
|
138
|
+
}
|
|
139
|
+
end
|
|
140
|
+
end
|
|
141
|
+
|
|
142
|
+
def markdown_discovered_surfaces(analysis)
|
|
143
|
+
markdown_embedded_families(analysis).map do |candidate|
|
|
144
|
+
Ast::Merge.discovered_surface(
|
|
145
|
+
surface_kind: "markdown_fenced_code_block",
|
|
146
|
+
declared_language: candidate[:language],
|
|
147
|
+
effective_language: candidate[:dialect],
|
|
148
|
+
address: "document[0] > fenced_code_block[#{candidate[:path]}]",
|
|
149
|
+
parent_address: "document[0]",
|
|
150
|
+
owner: Ast::Merge.surface_owner_ref(kind: "structural_owner", address: candidate[:path]),
|
|
151
|
+
reconstruction_strategy: "portable_write",
|
|
152
|
+
metadata: {
|
|
153
|
+
family: candidate[:family],
|
|
154
|
+
dialect: candidate[:dialect],
|
|
155
|
+
path: candidate[:path]
|
|
156
|
+
}
|
|
157
|
+
)
|
|
158
|
+
end
|
|
159
|
+
end
|
|
160
|
+
|
|
161
|
+
def markdown_delegated_child_operations(analysis, parent_operation_id: "markdown-document-0")
|
|
162
|
+
markdown_discovered_surfaces(analysis).each_with_index.map do |surface, index|
|
|
163
|
+
Ast::Merge.delegated_child_operation(
|
|
164
|
+
operation_id: "markdown-fence-#{index}",
|
|
165
|
+
parent_operation_id: parent_operation_id,
|
|
166
|
+
requested_strategy: "delegate_child_surface",
|
|
167
|
+
language_chain: ["markdown", surface[:effective_language]],
|
|
168
|
+
surface: surface
|
|
169
|
+
)
|
|
170
|
+
end
|
|
171
|
+
end
|
|
172
|
+
|
|
173
|
+
def apply_markdown_delegated_child_outputs(source, delegated_operations, apply_plan, applied_children)
|
|
174
|
+
lines = normalize_source(source).split("\n")
|
|
175
|
+
ranges = markdown_fence_ranges(source)
|
|
176
|
+
operations_by_id = delegated_operations.to_h { |operation| [operation[:operation_id], operation] }
|
|
177
|
+
outputs_by_id = applied_children.to_h { |entry| [entry[:operation_id], entry[:output]] }
|
|
178
|
+
|
|
179
|
+
replacements = apply_plan[:entries].filter_map do |entry|
|
|
180
|
+
operation = operations_by_id[entry.dig(:delegated_group, :child_operation_id)]
|
|
181
|
+
output = outputs_by_id[entry.dig(:delegated_group, :child_operation_id)]
|
|
182
|
+
next if operation.nil? || output.nil?
|
|
183
|
+
|
|
184
|
+
owner_path = operation.dig(:surface, :owner, :address)
|
|
185
|
+
range = ranges[owner_path]
|
|
186
|
+
return {
|
|
187
|
+
ok: false,
|
|
188
|
+
diagnostics: [{ severity: "error", category: "configuration_error", message: "missing fenced-code range for #{owner_path}" }],
|
|
189
|
+
policies: []
|
|
190
|
+
} if range.nil?
|
|
191
|
+
|
|
192
|
+
{ range: range, output: output }
|
|
193
|
+
end
|
|
194
|
+
|
|
195
|
+
replacements.sort_by { |entry| -entry[:range][:start] }.each do |entry|
|
|
196
|
+
body_lines = entry[:output].empty? ? [] : entry[:output].sub(/\n\z/, "").split("\n")
|
|
197
|
+
lines[entry[:range][:start] + 1...entry[:range][:end]] = body_lines
|
|
198
|
+
end
|
|
199
|
+
|
|
200
|
+
{
|
|
201
|
+
ok: true,
|
|
202
|
+
diagnostics: [],
|
|
203
|
+
output: "#{lines.join("\n").sub(/\n+\z/, "")}\n",
|
|
204
|
+
policies: []
|
|
205
|
+
}
|
|
206
|
+
end
|
|
207
|
+
|
|
208
|
+
def merge_markdown_with_nested_outputs(template_source, destination_source, dialect, nested_outputs, backend: nil)
|
|
209
|
+
Ast::Merge.execute_nested_merge(
|
|
210
|
+
nested_outputs,
|
|
211
|
+
default_family: "markdown",
|
|
212
|
+
request_id_prefix: "nested_markdown_child",
|
|
213
|
+
merge_parent: -> { merge_markdown(template_source, destination_source, dialect, backend: backend) },
|
|
214
|
+
discover_operations: lambda { |merged_output|
|
|
215
|
+
analysis = parse_markdown(merged_output, dialect, backend: backend)
|
|
216
|
+
next { ok: false, diagnostics: analysis[:diagnostics] || [] } unless analysis[:ok]
|
|
217
|
+
|
|
218
|
+
{
|
|
219
|
+
ok: true,
|
|
220
|
+
diagnostics: [],
|
|
221
|
+
operations: markdown_delegated_child_operations(analysis[:analysis])
|
|
222
|
+
}
|
|
223
|
+
},
|
|
224
|
+
apply_resolved_outputs: lambda { |merged_output, operations, apply_plan, applied_children|
|
|
225
|
+
apply_markdown_delegated_child_outputs(
|
|
226
|
+
merged_output,
|
|
227
|
+
operations,
|
|
228
|
+
apply_plan,
|
|
229
|
+
applied_children
|
|
230
|
+
)
|
|
231
|
+
}
|
|
232
|
+
)
|
|
233
|
+
end
|
|
234
|
+
|
|
235
|
+
def merge_markdown_with_reviewed_nested_outputs(template_source, destination_source, dialect, review_state, applied_children, backend: nil)
|
|
236
|
+
Ast::Merge.execute_reviewed_nested_merge(
|
|
237
|
+
review_state,
|
|
238
|
+
"markdown",
|
|
239
|
+
applied_children,
|
|
240
|
+
merge_parent: -> { merge_markdown(template_source, destination_source, dialect, backend: backend) },
|
|
241
|
+
discover_operations: lambda { |merged_output|
|
|
242
|
+
analysis = parse_markdown(merged_output, dialect, backend: backend)
|
|
243
|
+
next({ ok: false, diagnostics: analysis[:diagnostics] || [] }) unless analysis[:ok]
|
|
244
|
+
|
|
245
|
+
{
|
|
246
|
+
ok: true,
|
|
247
|
+
diagnostics: [],
|
|
248
|
+
operations: markdown_delegated_child_operations(analysis[:analysis])
|
|
249
|
+
}
|
|
250
|
+
},
|
|
251
|
+
apply_resolved_outputs: lambda { |merged_output, operations, apply_plan, resolved_children|
|
|
252
|
+
apply_markdown_delegated_child_outputs(
|
|
253
|
+
merged_output,
|
|
254
|
+
operations,
|
|
255
|
+
apply_plan,
|
|
256
|
+
resolved_children
|
|
257
|
+
)
|
|
258
|
+
}
|
|
259
|
+
)
|
|
260
|
+
end
|
|
261
|
+
|
|
262
|
+
def merge_markdown_with_reviewed_nested_outputs_from_replay_bundle(template_source, destination_source, dialect, replay_bundle, backend: nil)
|
|
263
|
+
execution = Array(replay_bundle[:reviewed_nested_executions]).find { |entry| entry[:family] == "markdown" }
|
|
264
|
+
return { ok: false, diagnostics: [{ severity: "error", category: "configuration_error", message: "review replay bundle does not include a reviewed nested execution for markdown." }], policies: [] } unless execution
|
|
265
|
+
|
|
266
|
+
merge_markdown_with_reviewed_nested_outputs(
|
|
267
|
+
template_source,
|
|
268
|
+
destination_source,
|
|
269
|
+
dialect,
|
|
270
|
+
execution[:review_state],
|
|
271
|
+
execution[:applied_children],
|
|
272
|
+
backend: backend
|
|
273
|
+
)
|
|
274
|
+
end
|
|
275
|
+
|
|
276
|
+
def merge_markdown_with_reviewed_nested_outputs_from_review_state(template_source, destination_source, dialect, review_state, backend: nil)
|
|
277
|
+
execution = Array(review_state[:reviewed_nested_executions]).find { |entry| entry[:family] == "markdown" }
|
|
278
|
+
return { ok: false, diagnostics: [{ severity: "error", category: "configuration_error", message: "review state does not include a reviewed nested execution for markdown." }], policies: [] } unless execution
|
|
279
|
+
|
|
280
|
+
merge_markdown_with_reviewed_nested_outputs(
|
|
281
|
+
template_source,
|
|
282
|
+
destination_source,
|
|
283
|
+
dialect,
|
|
284
|
+
execution[:review_state],
|
|
285
|
+
execution[:applied_children],
|
|
286
|
+
backend: backend
|
|
287
|
+
)
|
|
288
|
+
end
|
|
289
|
+
|
|
290
|
+
def merge_markdown_with_reviewed_nested_outputs_from_replay_bundle_envelope(template_source, destination_source, dialect, envelope, backend: nil)
|
|
291
|
+
replay_bundle, import_error = Ast::Merge.import_review_replay_bundle_envelope(envelope)
|
|
292
|
+
return { ok: false, diagnostics: [{ severity: "error", category: import_error[:category], message: import_error[:message] }], policies: [] } if import_error
|
|
293
|
+
|
|
294
|
+
merge_markdown_with_reviewed_nested_outputs_from_replay_bundle(
|
|
295
|
+
template_source,
|
|
296
|
+
destination_source,
|
|
297
|
+
dialect,
|
|
298
|
+
replay_bundle,
|
|
299
|
+
backend: backend
|
|
300
|
+
)
|
|
301
|
+
end
|
|
302
|
+
|
|
303
|
+
def merge_markdown_with_reviewed_nested_outputs_from_review_state_envelope(template_source, destination_source, dialect, envelope, backend: nil)
|
|
304
|
+
review_state, import_error = Ast::Merge.import_conformance_manifest_review_state_envelope(envelope)
|
|
305
|
+
return { ok: false, diagnostics: [{ severity: "error", category: import_error[:category], message: import_error[:message] }], policies: [] } if import_error
|
|
306
|
+
|
|
307
|
+
merge_markdown_with_reviewed_nested_outputs_from_review_state(
|
|
308
|
+
template_source,
|
|
309
|
+
destination_source,
|
|
310
|
+
dialect,
|
|
311
|
+
review_state,
|
|
312
|
+
backend: backend
|
|
313
|
+
)
|
|
314
|
+
end
|
|
315
|
+
|
|
316
|
+
def normalize_source(source)
|
|
317
|
+
source.gsub(/\r\n?/, "\n")
|
|
318
|
+
end
|
|
319
|
+
|
|
320
|
+
def slugify(value)
|
|
321
|
+
slug = value
|
|
322
|
+
.strip
|
|
323
|
+
.downcase
|
|
324
|
+
.gsub(/[`*_~\[\]()<>]/, "")
|
|
325
|
+
.gsub(/[^a-z0-9]+/, "-")
|
|
326
|
+
.gsub(/\A-+|-+\z/, "")
|
|
327
|
+
slug.empty? ? "section" : slug
|
|
328
|
+
end
|
|
329
|
+
|
|
330
|
+
def collect_markdown_owners(source)
|
|
331
|
+
owners = []
|
|
332
|
+
heading_index = 0
|
|
333
|
+
code_fence_index = 0
|
|
334
|
+
lines = source.split("\n")
|
|
335
|
+
index = 0
|
|
336
|
+
|
|
337
|
+
while index < lines.length
|
|
338
|
+
line = lines[index]
|
|
339
|
+
if (heading = line.match(/^(#+)\s+(.+?)\s*#*\s*$/)) && heading[1].length.between?(1, 6)
|
|
340
|
+
level = heading[1].length
|
|
341
|
+
owners << {
|
|
342
|
+
path: "/heading/#{heading_index}",
|
|
343
|
+
owner_kind: "heading",
|
|
344
|
+
match_key: "h#{level}:#{slugify(heading[2])}",
|
|
345
|
+
level: level
|
|
346
|
+
}
|
|
347
|
+
heading_index += 1
|
|
348
|
+
index += 1
|
|
349
|
+
next
|
|
350
|
+
end
|
|
351
|
+
|
|
352
|
+
if (fence = line.match(/^\s*(`{3,}|~{3,})\s*(.*?)\s*$/))
|
|
353
|
+
marker = fence[1]
|
|
354
|
+
marker_char = marker[0]
|
|
355
|
+
marker_length = marker.length
|
|
356
|
+
info_string = fence[2].strip.split(/\s+/).first.to_s
|
|
357
|
+
owners << {
|
|
358
|
+
path: "/code_fence/#{code_fence_index}",
|
|
359
|
+
owner_kind: "code_fence",
|
|
360
|
+
match_key: "fence:#{info_string.empty? ? "plain" : info_string}",
|
|
361
|
+
**(info_string.empty? ? {} : { info_string: info_string })
|
|
362
|
+
}
|
|
363
|
+
code_fence_index += 1
|
|
364
|
+
|
|
365
|
+
index += 1
|
|
366
|
+
while index < lines.length
|
|
367
|
+
trimmed = lines[index].strip
|
|
368
|
+
break if trimmed.length >= marker_length &&
|
|
369
|
+
trimmed.start_with?(marker_char * marker_length) &&
|
|
370
|
+
trimmed.delete(marker_char).empty?
|
|
371
|
+
|
|
372
|
+
index += 1
|
|
373
|
+
end
|
|
374
|
+
index += 1
|
|
375
|
+
next
|
|
376
|
+
end
|
|
377
|
+
|
|
378
|
+
index += 1
|
|
379
|
+
end
|
|
380
|
+
|
|
381
|
+
owners
|
|
382
|
+
end
|
|
383
|
+
|
|
384
|
+
def markdown_owner_start_indices(source)
|
|
385
|
+
starts = {}
|
|
386
|
+
lines = normalize_source(source).split("\n")
|
|
387
|
+
heading_index = 0
|
|
388
|
+
code_fence_index = 0
|
|
389
|
+
index = 0
|
|
390
|
+
|
|
391
|
+
while index < lines.length
|
|
392
|
+
line = lines[index]
|
|
393
|
+
if (heading = line.match(/^(#+)\s+(.+?)\s*#*\s*$/)) && heading[1].length.between?(1, 6)
|
|
394
|
+
starts["/heading/#{heading_index}"] = index
|
|
395
|
+
heading_index += 1
|
|
396
|
+
index += 1
|
|
397
|
+
next
|
|
398
|
+
end
|
|
399
|
+
|
|
400
|
+
if (fence = line.match(/^\s*(`{3,}|~{3,})\s*(.*?)\s*$/))
|
|
401
|
+
starts["/code_fence/#{code_fence_index}"] = index
|
|
402
|
+
code_fence_index += 1
|
|
403
|
+
marker = fence[1]
|
|
404
|
+
marker_char = marker[0]
|
|
405
|
+
marker_length = marker.length
|
|
406
|
+
index += 1
|
|
407
|
+
while index < lines.length
|
|
408
|
+
trimmed = lines[index].strip
|
|
409
|
+
break if trimmed.length >= marker_length &&
|
|
410
|
+
trimmed.start_with?(marker_char * marker_length) &&
|
|
411
|
+
trimmed.delete(marker_char).empty?
|
|
412
|
+
|
|
413
|
+
index += 1
|
|
414
|
+
end
|
|
415
|
+
index += 1
|
|
416
|
+
next
|
|
417
|
+
end
|
|
146
418
|
|
|
147
|
-
|
|
148
|
-
|
|
419
|
+
index += 1
|
|
420
|
+
end
|
|
421
|
+
|
|
422
|
+
starts
|
|
423
|
+
end
|
|
424
|
+
|
|
425
|
+
def collect_markdown_sections(source, owners)
|
|
426
|
+
lines = normalize_source(source).split("\n")
|
|
427
|
+
starts = markdown_owner_start_indices(source)
|
|
428
|
+
ordered = owners.filter_map do |owner|
|
|
429
|
+
start = starts[owner[:path]]
|
|
430
|
+
next if start.nil?
|
|
431
|
+
|
|
432
|
+
{ owner: owner, start: start }
|
|
433
|
+
end.sort_by { |entry| entry[:start] }
|
|
434
|
+
|
|
435
|
+
ordered.each_with_index.map do |entry, index|
|
|
436
|
+
finish = ordered[index + 1]&.dig(:start) || lines.length
|
|
437
|
+
{
|
|
438
|
+
path: entry.dig(:owner, :path),
|
|
439
|
+
text: lines[entry[:start]...finish].join("\n").strip
|
|
440
|
+
}
|
|
441
|
+
end
|
|
442
|
+
end
|
|
443
|
+
|
|
444
|
+
def markdown_fence_ranges(source)
|
|
445
|
+
ranges = {}
|
|
446
|
+
code_fence_index = 0
|
|
447
|
+
lines = normalize_source(source).split("\n")
|
|
448
|
+
index = 0
|
|
449
|
+
|
|
450
|
+
while index < lines.length
|
|
451
|
+
line = lines[index]
|
|
452
|
+
if (fence = line.match(/^\s*(`{3,}|~{3,})\s*(.*?)\s*$/))
|
|
453
|
+
marker = fence[1]
|
|
454
|
+
marker_char = marker[0]
|
|
455
|
+
marker_length = marker.length
|
|
456
|
+
closing_index = index
|
|
457
|
+
cursor = index + 1
|
|
458
|
+
while cursor < lines.length
|
|
459
|
+
trimmed = lines[cursor].strip
|
|
460
|
+
if trimmed.length >= marker_length &&
|
|
461
|
+
trimmed.start_with?(marker_char * marker_length) &&
|
|
462
|
+
trimmed.delete(marker_char).empty?
|
|
463
|
+
closing_index = cursor
|
|
464
|
+
break
|
|
465
|
+
end
|
|
466
|
+
closing_index = cursor if cursor == lines.length - 1
|
|
467
|
+
cursor += 1
|
|
468
|
+
end
|
|
469
|
+
|
|
470
|
+
ranges["/code_fence/#{code_fence_index}"] = { start: index, end: closing_index }
|
|
471
|
+
code_fence_index += 1
|
|
472
|
+
index = closing_index + 1
|
|
473
|
+
next
|
|
474
|
+
end
|
|
475
|
+
|
|
476
|
+
index += 1
|
|
477
|
+
end
|
|
478
|
+
|
|
479
|
+
ranges
|
|
480
|
+
end
|
|
481
|
+
|
|
482
|
+
def code_fence_family(info_string)
|
|
483
|
+
case info_string.to_s.downcase
|
|
484
|
+
when "ts", "typescript"
|
|
485
|
+
"typescript"
|
|
486
|
+
when "rust", "rs"
|
|
487
|
+
"rust"
|
|
488
|
+
when "go"
|
|
489
|
+
"go"
|
|
490
|
+
when "json", "jsonc"
|
|
491
|
+
"json"
|
|
492
|
+
when "yaml", "yml"
|
|
493
|
+
"yaml"
|
|
494
|
+
when "toml"
|
|
495
|
+
"toml"
|
|
496
|
+
end
|
|
497
|
+
end
|
|
498
|
+
|
|
499
|
+
def code_fence_dialect(info_string, family)
|
|
500
|
+
case family
|
|
501
|
+
when "typescript", "rust", "go", "yaml", "toml"
|
|
502
|
+
family
|
|
503
|
+
when "json"
|
|
504
|
+
info_string.to_s.downcase == "jsonc" ? "jsonc" : "json"
|
|
505
|
+
end
|
|
506
|
+
end
|
|
507
|
+
|
|
508
|
+
def resolve_backend(backend)
|
|
509
|
+
backend.to_s.empty? ? "kreuzberg-language-pack" : backend.to_s
|
|
510
|
+
end
|
|
511
|
+
|
|
512
|
+
def unsupported_feature_result(message)
|
|
513
|
+
{
|
|
514
|
+
ok: false,
|
|
515
|
+
diagnostics: [{ severity: "error", category: "unsupported_feature", message: message }],
|
|
516
|
+
policies: []
|
|
517
|
+
}
|
|
518
|
+
end
|
|
519
|
+
|
|
520
|
+
module_function(
|
|
521
|
+
:markdown_feature_profile,
|
|
522
|
+
:available_markdown_backends,
|
|
523
|
+
:markdown_backend_feature_profile,
|
|
524
|
+
:markdown_plan_context,
|
|
525
|
+
:parse_markdown,
|
|
526
|
+
:match_markdown_owners,
|
|
527
|
+
:merge_markdown,
|
|
528
|
+
:markdown_embedded_families,
|
|
529
|
+
:markdown_discovered_surfaces,
|
|
530
|
+
:markdown_delegated_child_operations,
|
|
531
|
+
:apply_markdown_delegated_child_outputs,
|
|
532
|
+
:merge_markdown_with_reviewed_nested_outputs,
|
|
533
|
+
:merge_markdown_with_reviewed_nested_outputs_from_replay_bundle,
|
|
534
|
+
:merge_markdown_with_reviewed_nested_outputs_from_replay_bundle_envelope,
|
|
535
|
+
:merge_markdown_with_reviewed_nested_outputs_from_review_state,
|
|
536
|
+
:merge_markdown_with_reviewed_nested_outputs_from_review_state_envelope,
|
|
537
|
+
:merge_markdown_with_nested_outputs,
|
|
538
|
+
:normalize_source,
|
|
539
|
+
:slugify,
|
|
540
|
+
:collect_markdown_owners,
|
|
541
|
+
:markdown_owner_start_indices,
|
|
542
|
+
:collect_markdown_sections,
|
|
543
|
+
:markdown_fence_ranges,
|
|
544
|
+
:code_fence_family,
|
|
545
|
+
:code_fence_dialect,
|
|
546
|
+
:resolve_backend,
|
|
547
|
+
:unsupported_feature_result
|
|
548
|
+
)
|
|
549
|
+
end
|
|
149
550
|
end
|
data/lib/markdown-merge.rb
CHANGED
|
@@ -1,4 +1,3 @@
|
|
|
1
|
-
#
|
|
2
|
-
|
|
3
|
-
|
|
4
|
-
require "markdown/merge"
|
|
1
|
+
# frozen_string_literal: true
|
|
2
|
+
|
|
3
|
+
require_relative "markdown/merge"
|
data.tar.gz.sig
CHANGED
|
Binary file
|