ace-support-core 0.29.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- checksums.yaml +7 -0
- data/.ace-defaults/core/settings.yml +36 -0
- data/CHANGELOG.md +460 -0
- data/LICENSE +21 -0
- data/README.md +34 -0
- data/Rakefile +14 -0
- data/lib/ace/core/atoms/command_executor.rb +239 -0
- data/lib/ace/core/atoms/config_summary.rb +220 -0
- data/lib/ace/core/atoms/env_parser.rb +76 -0
- data/lib/ace/core/atoms/file_reader.rb +184 -0
- data/lib/ace/core/atoms/glob_expander.rb +175 -0
- data/lib/ace/core/atoms/process_terminator.rb +39 -0
- data/lib/ace/core/atoms/template_parser.rb +222 -0
- data/lib/ace/core/cli/config_summary_mixin.rb +55 -0
- data/lib/ace/core/cli.rb +192 -0
- data/lib/ace/core/config_discovery.rb +176 -0
- data/lib/ace/core/errors.rb +14 -0
- data/lib/ace/core/models/config_templates.rb +87 -0
- data/lib/ace/core/molecules/env_loader.rb +128 -0
- data/lib/ace/core/molecules/file_aggregator.rb +196 -0
- data/lib/ace/core/molecules/frontmatter_free_policy.rb +34 -0
- data/lib/ace/core/molecules/output_formatter.rb +433 -0
- data/lib/ace/core/molecules/prompt_cache_manager.rb +141 -0
- data/lib/ace/core/organisms/config_diff.rb +187 -0
- data/lib/ace/core/organisms/config_initializer.rb +125 -0
- data/lib/ace/core/organisms/environment_manager.rb +142 -0
- data/lib/ace/core/version.rb +7 -0
- data/lib/ace/core.rb +144 -0
- metadata +115 -0
|
@@ -0,0 +1,196 @@
|
|
|
1
|
+
# frozen_string_literal: true
|
|
2
|
+
|
|
3
|
+
require_relative "../atoms/file_reader"
|
|
4
|
+
require_relative "../atoms/glob_expander"
|
|
5
|
+
require "ace/support/fs"
|
|
6
|
+
|
|
7
|
+
module Ace
|
|
8
|
+
module Core
|
|
9
|
+
module Molecules
|
|
10
|
+
# Aggregates file contents from multiple sources using atoms
|
|
11
|
+
class FileAggregator
|
|
12
|
+
def initialize(options = {})
|
|
13
|
+
@max_size = options[:max_size] || Atoms::FileReader::MAX_FILE_SIZE
|
|
14
|
+
@base_dir = options[:base_dir] || Ace::Support::Fs::Molecules::ProjectRootFinder.find_or_current
|
|
15
|
+
@exclude_patterns = options[:exclude] || []
|
|
16
|
+
end
|
|
17
|
+
|
|
18
|
+
# Aggregate files from patterns
|
|
19
|
+
# @param patterns [Array<String>] File patterns to aggregate
|
|
20
|
+
# @return [Hash] {files: Array, errors: Array, stats: Hash}
|
|
21
|
+
def aggregate(patterns)
|
|
22
|
+
result = {
|
|
23
|
+
files: [],
|
|
24
|
+
errors: [],
|
|
25
|
+
stats: {
|
|
26
|
+
total_size: 0,
|
|
27
|
+
file_count: 0,
|
|
28
|
+
error_count: 0,
|
|
29
|
+
skipped_count: 0
|
|
30
|
+
}
|
|
31
|
+
}
|
|
32
|
+
|
|
33
|
+
patterns = Array(patterns).compact
|
|
34
|
+
return result if patterns.empty?
|
|
35
|
+
|
|
36
|
+
# Expand all patterns
|
|
37
|
+
all_files = expand_patterns(patterns)
|
|
38
|
+
|
|
39
|
+
# Apply exclusions
|
|
40
|
+
filtered_files = filter_exclusions(all_files)
|
|
41
|
+
|
|
42
|
+
# Track skipped files
|
|
43
|
+
skipped_count = all_files.size - filtered_files.size
|
|
44
|
+
result[:stats][:skipped_count] = skipped_count
|
|
45
|
+
|
|
46
|
+
# Read each file
|
|
47
|
+
filtered_files.each do |file_path|
|
|
48
|
+
process_file(file_path, result)
|
|
49
|
+
end
|
|
50
|
+
|
|
51
|
+
result
|
|
52
|
+
end
|
|
53
|
+
|
|
54
|
+
# Aggregate specific files (no pattern expansion)
|
|
55
|
+
# @param file_paths [Array<String>] Direct file paths
|
|
56
|
+
# @return [Hash] {files: Array, errors: Array, stats: Hash}
|
|
57
|
+
def aggregate_files(file_paths)
|
|
58
|
+
result = {
|
|
59
|
+
files: [],
|
|
60
|
+
errors: [],
|
|
61
|
+
stats: {
|
|
62
|
+
total_size: 0,
|
|
63
|
+
file_count: 0,
|
|
64
|
+
error_count: 0,
|
|
65
|
+
skipped_count: 0
|
|
66
|
+
}
|
|
67
|
+
}
|
|
68
|
+
|
|
69
|
+
file_paths = Array(file_paths).compact
|
|
70
|
+
return result if file_paths.empty?
|
|
71
|
+
|
|
72
|
+
file_paths.each do |file_path|
|
|
73
|
+
# Apply exclusions even to direct paths
|
|
74
|
+
if excluded?(file_path)
|
|
75
|
+
result[:stats][:skipped_count] += 1
|
|
76
|
+
next
|
|
77
|
+
end
|
|
78
|
+
|
|
79
|
+
process_file(file_path, result)
|
|
80
|
+
end
|
|
81
|
+
|
|
82
|
+
result
|
|
83
|
+
end
|
|
84
|
+
|
|
85
|
+
# Find and aggregate files by name pattern
|
|
86
|
+
# @param pattern [String] File name pattern
|
|
87
|
+
# @param max_depth [Integer] Maximum directory depth
|
|
88
|
+
# @return [Hash] {files: Array, errors: Array, stats: Hash}
|
|
89
|
+
def find_and_aggregate(pattern, max_depth: nil)
|
|
90
|
+
files = Atoms::GlobExpander.find_files(
|
|
91
|
+
pattern,
|
|
92
|
+
base_dir: @base_dir,
|
|
93
|
+
max_depth: max_depth
|
|
94
|
+
)
|
|
95
|
+
|
|
96
|
+
aggregate_files(files)
|
|
97
|
+
end
|
|
98
|
+
|
|
99
|
+
private
|
|
100
|
+
|
|
101
|
+
# Expand patterns to file paths
|
|
102
|
+
# @param patterns [Array<String>] Patterns to expand
|
|
103
|
+
# @return [Array<String>] Expanded file paths
|
|
104
|
+
def expand_patterns(patterns)
|
|
105
|
+
Atoms::GlobExpander.expand_multiple(
|
|
106
|
+
patterns,
|
|
107
|
+
base_dir: @base_dir
|
|
108
|
+
)
|
|
109
|
+
end
|
|
110
|
+
|
|
111
|
+
# Filter out excluded files
|
|
112
|
+
# @param files [Array<String>] Files to filter
|
|
113
|
+
# @return [Array<String>] Filtered files
|
|
114
|
+
def filter_exclusions(files)
|
|
115
|
+
return files if @exclude_patterns.empty?
|
|
116
|
+
|
|
117
|
+
Atoms::GlobExpander.filter_excluded(
|
|
118
|
+
files,
|
|
119
|
+
@exclude_patterns
|
|
120
|
+
)
|
|
121
|
+
end
|
|
122
|
+
|
|
123
|
+
# Check if file is excluded
|
|
124
|
+
# @param file_path [String] File path to check
|
|
125
|
+
# @return [Boolean] true if excluded
|
|
126
|
+
def excluded?(file_path)
|
|
127
|
+
return false if @exclude_patterns.empty?
|
|
128
|
+
|
|
129
|
+
Atoms::GlobExpander.matches?(
|
|
130
|
+
file_path,
|
|
131
|
+
@exclude_patterns
|
|
132
|
+
)
|
|
133
|
+
end
|
|
134
|
+
|
|
135
|
+
# Process a single file
|
|
136
|
+
# @param file_path [String] File path
|
|
137
|
+
# @param result [Hash] Result hash to update
|
|
138
|
+
def process_file(file_path, result)
|
|
139
|
+
# Resolve file path relative to base directory if not absolute
|
|
140
|
+
resolved_path = File.absolute_path?(file_path) ? file_path : File.join(@base_dir, file_path)
|
|
141
|
+
|
|
142
|
+
# Make path relative to base directory for display
|
|
143
|
+
display_path = make_relative_path(resolved_path)
|
|
144
|
+
|
|
145
|
+
# Check if file is readable
|
|
146
|
+
unless Atoms::FileReader.readable?(resolved_path)
|
|
147
|
+
result[:errors] << "File not readable: #{display_path}"
|
|
148
|
+
result[:stats][:error_count] += 1
|
|
149
|
+
return
|
|
150
|
+
end
|
|
151
|
+
|
|
152
|
+
# Check if file is binary
|
|
153
|
+
if Atoms::FileReader.binary?(resolved_path)
|
|
154
|
+
result[:errors] << "Binary file skipped: #{display_path}"
|
|
155
|
+
result[:stats][:skipped_count] += 1
|
|
156
|
+
return
|
|
157
|
+
end
|
|
158
|
+
|
|
159
|
+
# Read file content
|
|
160
|
+
read_result = Atoms::FileReader.read(resolved_path, max_size: @max_size)
|
|
161
|
+
|
|
162
|
+
if read_result[:success]
|
|
163
|
+
result[:files] << {
|
|
164
|
+
path: display_path,
|
|
165
|
+
absolute_path: File.expand_path(resolved_path),
|
|
166
|
+
content: read_result[:content],
|
|
167
|
+
size: read_result[:size]
|
|
168
|
+
}
|
|
169
|
+
|
|
170
|
+
result[:stats][:file_count] += 1
|
|
171
|
+
result[:stats][:total_size] += read_result[:size]
|
|
172
|
+
else
|
|
173
|
+
result[:errors] << "Failed to read #{display_path}: #{read_result[:error]}"
|
|
174
|
+
result[:stats][:error_count] += 1
|
|
175
|
+
end
|
|
176
|
+
end
|
|
177
|
+
|
|
178
|
+
# Make path relative to base directory
|
|
179
|
+
# @param path [String] Path to make relative
|
|
180
|
+
# @return [String] Relative path if possible
|
|
181
|
+
def make_relative_path(path)
|
|
182
|
+
absolute = File.expand_path(path)
|
|
183
|
+
base_absolute = File.expand_path(@base_dir)
|
|
184
|
+
|
|
185
|
+
if absolute.start_with?(base_absolute)
|
|
186
|
+
Pathname.new(absolute).relative_path_from(Pathname.new(base_absolute)).to_s
|
|
187
|
+
else
|
|
188
|
+
path
|
|
189
|
+
end
|
|
190
|
+
rescue
|
|
191
|
+
path
|
|
192
|
+
end
|
|
193
|
+
end
|
|
194
|
+
end
|
|
195
|
+
end
|
|
196
|
+
end
|
|
@@ -0,0 +1,34 @@
|
|
|
1
|
+
# frozen_string_literal: true
|
|
2
|
+
|
|
3
|
+
module Ace
|
|
4
|
+
module Core
|
|
5
|
+
module Molecules
|
|
6
|
+
# Shared frontmatter-free policy for path matching and config defaults.
|
|
7
|
+
class FrontmatterFreePolicy
|
|
8
|
+
MATCH_FLAGS = File::FNM_PATHNAME | File::FNM_EXTGLOB | File::FNM_DOTMATCH
|
|
9
|
+
DEFAULT_PATTERNS = ["README.md", "*/README.md"].freeze
|
|
10
|
+
|
|
11
|
+
def self.patterns(config:, key: "frontmatter_free", default_patterns: DEFAULT_PATTERNS)
|
|
12
|
+
patterns = config[key]
|
|
13
|
+
return patterns if patterns.is_a?(Array) && !patterns.empty?
|
|
14
|
+
|
|
15
|
+
default_patterns
|
|
16
|
+
end
|
|
17
|
+
|
|
18
|
+
def self.match?(path, patterns:, project_root: Dir.pwd)
|
|
19
|
+
return false if path.nil? || path.to_s.empty?
|
|
20
|
+
return false if patterns.nil? || patterns.empty?
|
|
21
|
+
|
|
22
|
+
absolute_path = File.expand_path(path)
|
|
23
|
+
root = File.expand_path(project_root || Dir.pwd)
|
|
24
|
+
relative_path = absolute_path.sub(/^#{Regexp.escape(root)}\/?/, "")
|
|
25
|
+
|
|
26
|
+
patterns.any? do |pattern|
|
|
27
|
+
File.fnmatch?(pattern, relative_path, MATCH_FLAGS) ||
|
|
28
|
+
File.fnmatch?(pattern, absolute_path, MATCH_FLAGS)
|
|
29
|
+
end
|
|
30
|
+
end
|
|
31
|
+
end
|
|
32
|
+
end
|
|
33
|
+
end
|
|
34
|
+
end
|
|
@@ -0,0 +1,433 @@
|
|
|
1
|
+
# frozen_string_literal: true
|
|
2
|
+
|
|
3
|
+
require "yaml"
|
|
4
|
+
|
|
5
|
+
module Ace
|
|
6
|
+
module Core
|
|
7
|
+
module Molecules
|
|
8
|
+
# Formats aggregated content into various output formats
|
|
9
|
+
class OutputFormatter
|
|
10
|
+
# Supported output formats
|
|
11
|
+
FORMATS = %w[markdown yaml xml markdown-xml json].freeze
|
|
12
|
+
|
|
13
|
+
def initialize(format = "markdown")
|
|
14
|
+
@format = validate_format(format)
|
|
15
|
+
end
|
|
16
|
+
|
|
17
|
+
# Format aggregated content
|
|
18
|
+
# @param data [Hash] Aggregated data with files and commands
|
|
19
|
+
# @return [String] Formatted output
|
|
20
|
+
def format(data)
|
|
21
|
+
case @format
|
|
22
|
+
when "markdown"
|
|
23
|
+
format_markdown(data)
|
|
24
|
+
when "yaml"
|
|
25
|
+
format_yaml(data)
|
|
26
|
+
when "xml"
|
|
27
|
+
format_xml(data)
|
|
28
|
+
when "markdown-xml"
|
|
29
|
+
format_markdown_xml(data)
|
|
30
|
+
when "json"
|
|
31
|
+
format_json(data)
|
|
32
|
+
else
|
|
33
|
+
format_markdown(data)
|
|
34
|
+
end
|
|
35
|
+
end
|
|
36
|
+
|
|
37
|
+
# Format as pure markdown
|
|
38
|
+
# @param data [Hash] Data to format
|
|
39
|
+
# @return [String] Markdown formatted output
|
|
40
|
+
def format_markdown(data)
|
|
41
|
+
# If we have source content (embed_document_source mode), use raw content + XML blocks format
|
|
42
|
+
if data[:content] && !data[:content].to_s.empty?
|
|
43
|
+
return format_embedded_source(data)
|
|
44
|
+
end
|
|
45
|
+
|
|
46
|
+
output = []
|
|
47
|
+
|
|
48
|
+
# Add header
|
|
49
|
+
output << "# Context"
|
|
50
|
+
output << ""
|
|
51
|
+
|
|
52
|
+
# Add metadata if present
|
|
53
|
+
if data[:metadata]
|
|
54
|
+
# If we have original frontmatter YAML, output it as-is
|
|
55
|
+
if data[:metadata][:frontmatter_yaml]
|
|
56
|
+
output << "---"
|
|
57
|
+
output << data[:metadata][:frontmatter_yaml]
|
|
58
|
+
output << "---"
|
|
59
|
+
output << ""
|
|
60
|
+
else
|
|
61
|
+
# Otherwise use bulleted format
|
|
62
|
+
output << "## Metadata"
|
|
63
|
+
output << ""
|
|
64
|
+
data[:metadata].each do |key, value|
|
|
65
|
+
output << "- **#{key}**: #{value}"
|
|
66
|
+
end
|
|
67
|
+
output << ""
|
|
68
|
+
end
|
|
69
|
+
end
|
|
70
|
+
|
|
71
|
+
# Add files section
|
|
72
|
+
if data[:files] && !data[:files].empty?
|
|
73
|
+
output << "## Files"
|
|
74
|
+
output << ""
|
|
75
|
+
|
|
76
|
+
data[:files].each do |file|
|
|
77
|
+
output << "### #{file[:path]}"
|
|
78
|
+
output << ""
|
|
79
|
+
output << "```"
|
|
80
|
+
output << file[:content]
|
|
81
|
+
output << "```"
|
|
82
|
+
output << ""
|
|
83
|
+
end
|
|
84
|
+
end
|
|
85
|
+
|
|
86
|
+
# Add commands section
|
|
87
|
+
if data[:commands] && !data[:commands].empty?
|
|
88
|
+
output << "## Commands"
|
|
89
|
+
output << ""
|
|
90
|
+
|
|
91
|
+
data[:commands].each do |cmd|
|
|
92
|
+
output << "### Command: `#{cmd[:command]}`"
|
|
93
|
+
output << ""
|
|
94
|
+
|
|
95
|
+
if cmd[:success]
|
|
96
|
+
output << "**Output:**"
|
|
97
|
+
output << "```"
|
|
98
|
+
output << cmd[:output]
|
|
99
|
+
output << "```"
|
|
100
|
+
else
|
|
101
|
+
output << "**Error:** #{cmd[:error]}"
|
|
102
|
+
end
|
|
103
|
+
output << ""
|
|
104
|
+
end
|
|
105
|
+
end
|
|
106
|
+
|
|
107
|
+
# Add diffs section
|
|
108
|
+
if data[:diffs] && !data[:diffs].empty?
|
|
109
|
+
output << "## Git Diffs"
|
|
110
|
+
output << ""
|
|
111
|
+
|
|
112
|
+
data[:diffs].each do |diff|
|
|
113
|
+
output << "### Diff: `#{diff[:range]}`"
|
|
114
|
+
output << ""
|
|
115
|
+
|
|
116
|
+
if diff[:success]
|
|
117
|
+
output << "```diff"
|
|
118
|
+
output << diff[:output]
|
|
119
|
+
output << "```"
|
|
120
|
+
else
|
|
121
|
+
output << "**Error:** #{diff[:error]}"
|
|
122
|
+
end
|
|
123
|
+
output << ""
|
|
124
|
+
end
|
|
125
|
+
end
|
|
126
|
+
|
|
127
|
+
# Add errors section
|
|
128
|
+
if data[:errors] && !data[:errors].empty?
|
|
129
|
+
output << "## Errors"
|
|
130
|
+
output << ""
|
|
131
|
+
data[:errors].each do |error|
|
|
132
|
+
output << "- #{error}"
|
|
133
|
+
end
|
|
134
|
+
output << ""
|
|
135
|
+
end
|
|
136
|
+
|
|
137
|
+
output.join("\n").strip
|
|
138
|
+
end
|
|
139
|
+
|
|
140
|
+
# Format as YAML
|
|
141
|
+
# @param data [Hash] Data to format
|
|
142
|
+
# @return [String] YAML formatted output
|
|
143
|
+
def format_yaml(data)
|
|
144
|
+
clean_data = prepare_for_serialization(data)
|
|
145
|
+
YAML.dump(clean_data)
|
|
146
|
+
end
|
|
147
|
+
|
|
148
|
+
# Format as JSON
|
|
149
|
+
# @param data [Hash] Data to format
|
|
150
|
+
# @return [String] JSON formatted output
|
|
151
|
+
def format_json(data)
|
|
152
|
+
require "json"
|
|
153
|
+
clean_data = prepare_for_serialization(data)
|
|
154
|
+
JSON.pretty_generate(clean_data)
|
|
155
|
+
end
|
|
156
|
+
|
|
157
|
+
# Format as XML
|
|
158
|
+
# @param data [Hash] Data to format
|
|
159
|
+
# @return [String] XML formatted output
|
|
160
|
+
def format_xml(data)
|
|
161
|
+
output = []
|
|
162
|
+
output << '<?xml version="1.0" encoding="UTF-8"?>'
|
|
163
|
+
output << "<context>"
|
|
164
|
+
|
|
165
|
+
# Add metadata
|
|
166
|
+
if data[:metadata]
|
|
167
|
+
output << " <metadata>"
|
|
168
|
+
data[:metadata].each do |key, value|
|
|
169
|
+
output << " <#{key}>#{escape_xml(value.to_s)}</#{key}>"
|
|
170
|
+
end
|
|
171
|
+
output << " </metadata>"
|
|
172
|
+
end
|
|
173
|
+
|
|
174
|
+
# Add files
|
|
175
|
+
if data[:files] && !data[:files].empty?
|
|
176
|
+
output << " <files>"
|
|
177
|
+
data[:files].each do |file|
|
|
178
|
+
output << " <file path=\"#{escape_xml(file[:path])}\" size=\"#{file[:size]}\">"
|
|
179
|
+
output << " <content><![CDATA[#{file[:content]}]]></content>"
|
|
180
|
+
output << " </file>"
|
|
181
|
+
end
|
|
182
|
+
output << " </files>"
|
|
183
|
+
end
|
|
184
|
+
|
|
185
|
+
# Add commands
|
|
186
|
+
if data[:commands] && !data[:commands].empty?
|
|
187
|
+
output << " <commands>"
|
|
188
|
+
data[:commands].each do |cmd|
|
|
189
|
+
output << " <command name=\"#{escape_xml(cmd[:command])}\" success=\"#{cmd[:success]}\">"
|
|
190
|
+
if cmd[:output]
|
|
191
|
+
output << " <output><![CDATA[#{cmd[:output]}]]></output>"
|
|
192
|
+
end
|
|
193
|
+
if cmd[:error]
|
|
194
|
+
output << " <error>#{escape_xml(cmd[:error])}</error>"
|
|
195
|
+
end
|
|
196
|
+
output << " </command>"
|
|
197
|
+
end
|
|
198
|
+
output << " </commands>"
|
|
199
|
+
end
|
|
200
|
+
|
|
201
|
+
# Add diffs
|
|
202
|
+
if data[:diffs] && !data[:diffs].empty?
|
|
203
|
+
output << " <diffs>"
|
|
204
|
+
data[:diffs].each do |diff|
|
|
205
|
+
output << " <diff range=\"#{escape_xml(diff[:range])}\" success=\"#{diff[:success]}\">"
|
|
206
|
+
if diff[:output]
|
|
207
|
+
output << " <output><![CDATA[#{diff[:output]}]]></output>"
|
|
208
|
+
end
|
|
209
|
+
if diff[:error]
|
|
210
|
+
output << " <error>#{escape_xml(diff[:error])}</error>"
|
|
211
|
+
end
|
|
212
|
+
output << " </diff>"
|
|
213
|
+
end
|
|
214
|
+
output << " </diffs>"
|
|
215
|
+
end
|
|
216
|
+
|
|
217
|
+
# Add errors
|
|
218
|
+
if data[:errors] && !data[:errors].empty?
|
|
219
|
+
output << " <errors>"
|
|
220
|
+
data[:errors].each do |error|
|
|
221
|
+
output << " <error>#{escape_xml(error)}</error>"
|
|
222
|
+
end
|
|
223
|
+
output << " </errors>"
|
|
224
|
+
end
|
|
225
|
+
|
|
226
|
+
output << "</context>"
|
|
227
|
+
output.join("\n")
|
|
228
|
+
end
|
|
229
|
+
|
|
230
|
+
# Format as markdown with embedded XML (hybrid format)
|
|
231
|
+
# @param data [Hash] Data to format
|
|
232
|
+
# @return [String] Markdown-XML formatted output
|
|
233
|
+
def format_markdown_xml(data)
|
|
234
|
+
# If we have source content (embed_document_source mode), use raw content + XML blocks format
|
|
235
|
+
if data[:content] && !data[:content].to_s.empty?
|
|
236
|
+
return format_embedded_source(data)
|
|
237
|
+
end
|
|
238
|
+
|
|
239
|
+
output = []
|
|
240
|
+
|
|
241
|
+
output << "# Context"
|
|
242
|
+
output << ""
|
|
243
|
+
|
|
244
|
+
# Add metadata
|
|
245
|
+
if data[:metadata]
|
|
246
|
+
# If we have original frontmatter YAML, output it as-is
|
|
247
|
+
if data[:metadata][:frontmatter_yaml]
|
|
248
|
+
output << "---"
|
|
249
|
+
output << data[:metadata][:frontmatter_yaml]
|
|
250
|
+
output << "---"
|
|
251
|
+
output << ""
|
|
252
|
+
else
|
|
253
|
+
# Otherwise use bulleted format
|
|
254
|
+
output << "## Metadata"
|
|
255
|
+
output << ""
|
|
256
|
+
data[:metadata].each do |key, value|
|
|
257
|
+
output << "- **#{key}**: #{value}"
|
|
258
|
+
end
|
|
259
|
+
output << ""
|
|
260
|
+
end
|
|
261
|
+
end
|
|
262
|
+
|
|
263
|
+
# Add files as XML blocks
|
|
264
|
+
if data[:files] && !data[:files].empty?
|
|
265
|
+
output << "## Files"
|
|
266
|
+
output << ""
|
|
267
|
+
|
|
268
|
+
data[:files].each do |file|
|
|
269
|
+
size_info = file[:size] ? " size=\"#{file[:size]}\"" : ""
|
|
270
|
+
output << "<file path=\"#{escape_xml(file[:path])}\"#{size_info}>"
|
|
271
|
+
output << file[:content]
|
|
272
|
+
output << "</file>"
|
|
273
|
+
output << ""
|
|
274
|
+
end
|
|
275
|
+
end
|
|
276
|
+
|
|
277
|
+
# Add commands
|
|
278
|
+
if data[:commands] && !data[:commands].empty?
|
|
279
|
+
output << "## Commands"
|
|
280
|
+
output << ""
|
|
281
|
+
|
|
282
|
+
data[:commands].each do |cmd|
|
|
283
|
+
success_attr = cmd[:success] ? "true" : "false"
|
|
284
|
+
error_attr = cmd[:error] ? " error=\"#{escape_xml(cmd[:error])}\"" : ""
|
|
285
|
+
|
|
286
|
+
output << "<output command=\"#{escape_xml(cmd[:command])}\" success=\"#{success_attr}\"#{error_attr}>"
|
|
287
|
+
output << ""
|
|
288
|
+
output << cmd[:output] if cmd[:output]
|
|
289
|
+
output << "</output>"
|
|
290
|
+
output << ""
|
|
291
|
+
end
|
|
292
|
+
end
|
|
293
|
+
|
|
294
|
+
# Add diffs
|
|
295
|
+
if data[:diffs] && !data[:diffs].empty?
|
|
296
|
+
output << "## Git Diffs"
|
|
297
|
+
output << ""
|
|
298
|
+
|
|
299
|
+
data[:diffs].each do |diff|
|
|
300
|
+
success_attr = diff[:success] ? "true" : "false"
|
|
301
|
+
error_attr = diff[:error] ? " error=\"#{escape_xml(diff[:error])}\"" : ""
|
|
302
|
+
|
|
303
|
+
output << "<diff range=\"#{escape_xml(diff[:range])}\" success=\"#{success_attr}\"#{error_attr}>"
|
|
304
|
+
output << ""
|
|
305
|
+
output << diff[:output] if diff[:output]
|
|
306
|
+
output << "</diff>"
|
|
307
|
+
output << ""
|
|
308
|
+
end
|
|
309
|
+
end
|
|
310
|
+
|
|
311
|
+
# Add errors
|
|
312
|
+
if data[:errors] && !data[:errors].empty?
|
|
313
|
+
output << "## Errors"
|
|
314
|
+
output << ""
|
|
315
|
+
data[:errors].each do |error|
|
|
316
|
+
output << "- #{error}"
|
|
317
|
+
end
|
|
318
|
+
end
|
|
319
|
+
|
|
320
|
+
output.join("\n").strip
|
|
321
|
+
end
|
|
322
|
+
|
|
323
|
+
private
|
|
324
|
+
|
|
325
|
+
# Validate and normalize format
|
|
326
|
+
# @param format [String] Format to validate
|
|
327
|
+
# @return [String] Valid format
|
|
328
|
+
def validate_format(format)
|
|
329
|
+
normalized = format.to_s.downcase
|
|
330
|
+
FORMATS.include?(normalized) ? normalized : "markdown"
|
|
331
|
+
end
|
|
332
|
+
|
|
333
|
+
# Prepare data for serialization (YAML/JSON)
|
|
334
|
+
# @param data [Hash] Data to prepare
|
|
335
|
+
# @return [Hash] Clean data for serialization
|
|
336
|
+
def prepare_for_serialization(data)
|
|
337
|
+
{
|
|
338
|
+
"metadata" => data[:metadata],
|
|
339
|
+
"files" => data[:files]&.map do |f|
|
|
340
|
+
{
|
|
341
|
+
"path" => f[:path],
|
|
342
|
+
"content" => f[:content],
|
|
343
|
+
"size" => f[:size]
|
|
344
|
+
}.compact
|
|
345
|
+
end,
|
|
346
|
+
"commands" => data[:commands]&.map do |c|
|
|
347
|
+
{
|
|
348
|
+
"command" => c[:command],
|
|
349
|
+
"output" => c[:output],
|
|
350
|
+
"success" => c[:success],
|
|
351
|
+
"error" => c[:error]
|
|
352
|
+
}.compact
|
|
353
|
+
end,
|
|
354
|
+
"errors" => data[:errors],
|
|
355
|
+
"stats" => data[:stats]
|
|
356
|
+
}.compact
|
|
357
|
+
end
|
|
358
|
+
|
|
359
|
+
# Format with embedded source content (raw document + XML blocks)
|
|
360
|
+
# @param data [Hash] Data to format
|
|
361
|
+
# @return [String] Raw content with appended XML blocks
|
|
362
|
+
def format_embedded_source(data)
|
|
363
|
+
output = []
|
|
364
|
+
|
|
365
|
+
# Output raw source content (includes frontmatter and markdown body)
|
|
366
|
+
output << data[:content]
|
|
367
|
+
output << ""
|
|
368
|
+
|
|
369
|
+
# Append files as XML block
|
|
370
|
+
if data[:files] && !data[:files].empty?
|
|
371
|
+
output << "<files>"
|
|
372
|
+
data[:files].each do |file|
|
|
373
|
+
output << "<file path=\"#{escape_xml(file[:path])}\">"
|
|
374
|
+
output << file[:content]
|
|
375
|
+
output << "</file>"
|
|
376
|
+
output << ""
|
|
377
|
+
end
|
|
378
|
+
output << "</files>"
|
|
379
|
+
output << ""
|
|
380
|
+
end
|
|
381
|
+
|
|
382
|
+
# Append commands as XML block
|
|
383
|
+
if data[:commands] && !data[:commands].empty?
|
|
384
|
+
output << "<commands>"
|
|
385
|
+
data[:commands].each do |cmd|
|
|
386
|
+
success_attr = cmd[:success] ? "true" : "false"
|
|
387
|
+
output << "<command name=\"#{escape_xml(cmd[:command])}\" success=\"#{success_attr}\">"
|
|
388
|
+
output << cmd[:output] if cmd[:output]
|
|
389
|
+
if cmd[:error]
|
|
390
|
+
output << "<error>#{escape_xml(cmd[:error])}</error>"
|
|
391
|
+
end
|
|
392
|
+
output << "</command>"
|
|
393
|
+
output << ""
|
|
394
|
+
end
|
|
395
|
+
output << "</commands>"
|
|
396
|
+
output << ""
|
|
397
|
+
end
|
|
398
|
+
|
|
399
|
+
# Append diffs as XML block
|
|
400
|
+
if data[:diffs] && !data[:diffs].empty?
|
|
401
|
+
output << "<diffs>"
|
|
402
|
+
data[:diffs].each do |diff|
|
|
403
|
+
success_attr = diff[:success] ? "true" : "false"
|
|
404
|
+
output << "<diff range=\"#{escape_xml(diff[:range])}\" success=\"#{success_attr}\">"
|
|
405
|
+
output << diff[:output] if diff[:output]
|
|
406
|
+
if diff[:error]
|
|
407
|
+
output << "<error>#{escape_xml(diff[:error])}</error>"
|
|
408
|
+
end
|
|
409
|
+
output << "</diff>"
|
|
410
|
+
output << ""
|
|
411
|
+
end
|
|
412
|
+
output << "</diffs>"
|
|
413
|
+
output << ""
|
|
414
|
+
end
|
|
415
|
+
|
|
416
|
+
output.join("\n").strip
|
|
417
|
+
end
|
|
418
|
+
|
|
419
|
+
# Escape XML special characters
|
|
420
|
+
# @param text [String] Text to escape
|
|
421
|
+
# @return [String] Escaped text
|
|
422
|
+
def escape_xml(text)
|
|
423
|
+
text.to_s
|
|
424
|
+
.gsub("&", "&")
|
|
425
|
+
.gsub("<", "<")
|
|
426
|
+
.gsub(">", ">")
|
|
427
|
+
.gsub('"', """)
|
|
428
|
+
.gsub("'", "'")
|
|
429
|
+
end
|
|
430
|
+
end
|
|
431
|
+
end
|
|
432
|
+
end
|
|
433
|
+
end
|