ace-support-core 0.29.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- checksums.yaml +7 -0
- data/.ace-defaults/core/settings.yml +36 -0
- data/CHANGELOG.md +460 -0
- data/LICENSE +21 -0
- data/README.md +34 -0
- data/Rakefile +14 -0
- data/lib/ace/core/atoms/command_executor.rb +239 -0
- data/lib/ace/core/atoms/config_summary.rb +220 -0
- data/lib/ace/core/atoms/env_parser.rb +76 -0
- data/lib/ace/core/atoms/file_reader.rb +184 -0
- data/lib/ace/core/atoms/glob_expander.rb +175 -0
- data/lib/ace/core/atoms/process_terminator.rb +39 -0
- data/lib/ace/core/atoms/template_parser.rb +222 -0
- data/lib/ace/core/cli/config_summary_mixin.rb +55 -0
- data/lib/ace/core/cli.rb +192 -0
- data/lib/ace/core/config_discovery.rb +176 -0
- data/lib/ace/core/errors.rb +14 -0
- data/lib/ace/core/models/config_templates.rb +87 -0
- data/lib/ace/core/molecules/env_loader.rb +128 -0
- data/lib/ace/core/molecules/file_aggregator.rb +196 -0
- data/lib/ace/core/molecules/frontmatter_free_policy.rb +34 -0
- data/lib/ace/core/molecules/output_formatter.rb +433 -0
- data/lib/ace/core/molecules/prompt_cache_manager.rb +141 -0
- data/lib/ace/core/organisms/config_diff.rb +187 -0
- data/lib/ace/core/organisms/config_initializer.rb +125 -0
- data/lib/ace/core/organisms/environment_manager.rb +142 -0
- data/lib/ace/core/version.rb +7 -0
- data/lib/ace/core.rb +144 -0
- metadata +115 -0
|
@@ -0,0 +1,184 @@
|
|
|
1
|
+
# frozen_string_literal: true
|
|
2
|
+
|
|
3
|
+
require "pathname"
|
|
4
|
+
|
|
5
|
+
module Ace
|
|
6
|
+
module Core
|
|
7
|
+
module Atoms
|
|
8
|
+
# Pure file reading functions with safety checks
|
|
9
|
+
module FileReader
|
|
10
|
+
# Default maximum file size (1MB)
|
|
11
|
+
MAX_FILE_SIZE = 1_048_576
|
|
12
|
+
|
|
13
|
+
# Binary file detection patterns
|
|
14
|
+
BINARY_EXTENSIONS = %w[
|
|
15
|
+
.jpg .jpeg .png .gif .bmp .ico .webp .svg
|
|
16
|
+
.pdf .doc .docx .xls .xlsx .ppt .pptx
|
|
17
|
+
.zip .tar .gz .bz2 .7z .rar
|
|
18
|
+
.exe .dll .so .dylib .app
|
|
19
|
+
.mp3 .mp4 .avi .mov .wmv .flv
|
|
20
|
+
.ttf .otf .woff .woff2 .eot
|
|
21
|
+
.class .jar .war .ear
|
|
22
|
+
.pyc .pyo .o .a
|
|
23
|
+
].freeze
|
|
24
|
+
|
|
25
|
+
module_function
|
|
26
|
+
|
|
27
|
+
# Read file with size limit
|
|
28
|
+
# @param path [String] Path to file
|
|
29
|
+
# @param max_size [Integer] Maximum file size in bytes
|
|
30
|
+
# @return [Hash] {success: Boolean, content: String, error: String}
|
|
31
|
+
def read(path, max_size: MAX_FILE_SIZE)
|
|
32
|
+
return {success: false, error: "Path cannot be nil"} if path.nil?
|
|
33
|
+
|
|
34
|
+
expanded_path = File.expand_path(path)
|
|
35
|
+
|
|
36
|
+
unless File.exist?(expanded_path)
|
|
37
|
+
return {success: false, error: "File not found: #{path}"}
|
|
38
|
+
end
|
|
39
|
+
|
|
40
|
+
unless File.file?(expanded_path)
|
|
41
|
+
return {success: false, error: "Not a file: #{path}"}
|
|
42
|
+
end
|
|
43
|
+
|
|
44
|
+
file_size = File.size(expanded_path)
|
|
45
|
+
if file_size > max_size
|
|
46
|
+
return {
|
|
47
|
+
success: false,
|
|
48
|
+
error: "File too large: #{file_size} bytes (max: #{max_size})"
|
|
49
|
+
}
|
|
50
|
+
end
|
|
51
|
+
|
|
52
|
+
if binary?(expanded_path)
|
|
53
|
+
return {
|
|
54
|
+
success: false,
|
|
55
|
+
error: "Binary file detected: #{path}"
|
|
56
|
+
}
|
|
57
|
+
end
|
|
58
|
+
|
|
59
|
+
content = File.read(expanded_path, encoding: "UTF-8")
|
|
60
|
+
{success: true, content: content, size: file_size}
|
|
61
|
+
rescue Encoding::InvalidByteSequenceError, Encoding::UndefinedConversionError
|
|
62
|
+
{success: false, error: "File contains invalid UTF-8: #{path}"}
|
|
63
|
+
rescue => e
|
|
64
|
+
{success: false, error: "Failed to read file: #{e.message}"}
|
|
65
|
+
end
|
|
66
|
+
|
|
67
|
+
# Check if file exists and is readable
|
|
68
|
+
# @param path [String] Path to file
|
|
69
|
+
# @return [Boolean] true if file exists and is readable
|
|
70
|
+
def readable?(path)
|
|
71
|
+
return false if path.nil?
|
|
72
|
+
|
|
73
|
+
expanded_path = File.expand_path(path)
|
|
74
|
+
File.exist?(expanded_path) && File.file?(expanded_path) && File.readable?(expanded_path)
|
|
75
|
+
end
|
|
76
|
+
|
|
77
|
+
# Check if file appears to be binary
|
|
78
|
+
# @param path [String] Path to file
|
|
79
|
+
# @return [Boolean] true if file appears to be binary
|
|
80
|
+
def binary?(path)
|
|
81
|
+
return false if path.nil?
|
|
82
|
+
|
|
83
|
+
# Check extension first
|
|
84
|
+
ext = File.extname(path).downcase
|
|
85
|
+
return true if BINARY_EXTENSIONS.include?(ext)
|
|
86
|
+
|
|
87
|
+
# Sample first 8KB of file for null bytes
|
|
88
|
+
expanded_path = File.expand_path(path)
|
|
89
|
+
return false unless File.exist?(expanded_path)
|
|
90
|
+
|
|
91
|
+
sample_size = [File.size(expanded_path), 8192].min
|
|
92
|
+
sample = File.read(expanded_path, sample_size, mode: "rb")
|
|
93
|
+
|
|
94
|
+
# Check for null bytes (common in binary files)
|
|
95
|
+
# Also check for common binary file markers
|
|
96
|
+
return true if sample.include?("\x00")
|
|
97
|
+
|
|
98
|
+
# Check if it's mostly non-printable characters
|
|
99
|
+
# Count non-ASCII printable characters
|
|
100
|
+
non_printable = sample.bytes.count { |b| b < 32 || b > 126 }
|
|
101
|
+
printable = sample.bytes.count { |b| b >= 32 && b <= 126 }
|
|
102
|
+
|
|
103
|
+
# If more than 30% non-printable, consider it binary
|
|
104
|
+
non_printable.to_f / (non_printable + printable) > 0.3
|
|
105
|
+
rescue
|
|
106
|
+
# If we can't read it, assume it might be binary
|
|
107
|
+
true
|
|
108
|
+
end
|
|
109
|
+
|
|
110
|
+
# Get file metadata
|
|
111
|
+
# @param path [String] Path to file
|
|
112
|
+
# @return [Hash] File metadata
|
|
113
|
+
def metadata(path)
|
|
114
|
+
return {exists: false} if path.nil?
|
|
115
|
+
|
|
116
|
+
expanded_path = File.expand_path(path)
|
|
117
|
+
|
|
118
|
+
unless File.exist?(expanded_path)
|
|
119
|
+
return {exists: false, path: path}
|
|
120
|
+
end
|
|
121
|
+
|
|
122
|
+
stat = File.stat(expanded_path)
|
|
123
|
+
|
|
124
|
+
{
|
|
125
|
+
exists: true,
|
|
126
|
+
path: path,
|
|
127
|
+
absolute_path: expanded_path,
|
|
128
|
+
size: stat.size,
|
|
129
|
+
modified: stat.mtime,
|
|
130
|
+
created: stat.ctime,
|
|
131
|
+
readable: File.readable?(expanded_path),
|
|
132
|
+
writable: File.writable?(expanded_path),
|
|
133
|
+
directory: stat.directory?,
|
|
134
|
+
file: stat.file?,
|
|
135
|
+
binary: binary?(expanded_path)
|
|
136
|
+
}
|
|
137
|
+
rescue => e
|
|
138
|
+
{exists: false, path: path, error: e.message}
|
|
139
|
+
end
|
|
140
|
+
|
|
141
|
+
# Read lines from file with line limit
|
|
142
|
+
# @param path [String] Path to file
|
|
143
|
+
# @param limit [Integer] Maximum number of lines
|
|
144
|
+
# @param offset [Integer] Starting line number (0-based)
|
|
145
|
+
# @return [Hash] {success: Boolean, lines: Array, total_lines: Integer, error: String}
|
|
146
|
+
def read_lines(path, limit: 100, offset: 0)
|
|
147
|
+
return {success: false, error: "Path cannot be nil"} if path.nil?
|
|
148
|
+
|
|
149
|
+
expanded_path = File.expand_path(path)
|
|
150
|
+
|
|
151
|
+
unless File.exist?(expanded_path)
|
|
152
|
+
return {success: false, error: "File not found: #{path}"}
|
|
153
|
+
end
|
|
154
|
+
|
|
155
|
+
if binary?(expanded_path)
|
|
156
|
+
return {success: false, error: "Binary file detected: #{path}"}
|
|
157
|
+
end
|
|
158
|
+
|
|
159
|
+
lines = []
|
|
160
|
+
total_lines = 0
|
|
161
|
+
|
|
162
|
+
File.foreach(expanded_path, encoding: "UTF-8").with_index do |line, index|
|
|
163
|
+
total_lines = index + 1
|
|
164
|
+
if index >= offset && lines.size < limit
|
|
165
|
+
lines << line.chomp
|
|
166
|
+
end
|
|
167
|
+
end
|
|
168
|
+
|
|
169
|
+
{
|
|
170
|
+
success: true,
|
|
171
|
+
lines: lines,
|
|
172
|
+
total_lines: total_lines,
|
|
173
|
+
offset: offset,
|
|
174
|
+
limit: limit
|
|
175
|
+
}
|
|
176
|
+
rescue Encoding::InvalidByteSequenceError, Encoding::UndefinedConversionError
|
|
177
|
+
{success: false, error: "File contains invalid UTF-8: #{path}"}
|
|
178
|
+
rescue => e
|
|
179
|
+
{success: false, error: "Failed to read lines: #{e.message}"}
|
|
180
|
+
end
|
|
181
|
+
end
|
|
182
|
+
end
|
|
183
|
+
end
|
|
184
|
+
end
|
|
@@ -0,0 +1,175 @@
|
|
|
1
|
+
# frozen_string_literal: true
|
|
2
|
+
|
|
3
|
+
require "pathname"
|
|
4
|
+
|
|
5
|
+
module Ace
|
|
6
|
+
module Core
|
|
7
|
+
module Atoms
|
|
8
|
+
# Pure glob pattern expansion and file matching functions
|
|
9
|
+
module GlobExpander
|
|
10
|
+
module_function
|
|
11
|
+
|
|
12
|
+
# Expand glob pattern to file paths
|
|
13
|
+
# @param pattern [String] Glob pattern
|
|
14
|
+
# @param base_dir [String] Base directory for relative patterns
|
|
15
|
+
# @param flags [Integer] File::FNM_* flags for matching
|
|
16
|
+
# @return [Array<String>] Matched file paths
|
|
17
|
+
def expand(pattern, base_dir: Dir.pwd, flags: 0)
|
|
18
|
+
return [] if pattern.nil? || pattern.empty?
|
|
19
|
+
|
|
20
|
+
# Ensure base_dir is absolute
|
|
21
|
+
base_dir = File.expand_path(base_dir)
|
|
22
|
+
|
|
23
|
+
# Handle absolute patterns
|
|
24
|
+
if pattern.start_with?("/")
|
|
25
|
+
Dir.glob(pattern, flags).sort
|
|
26
|
+
else
|
|
27
|
+
# Make pattern relative to base_dir
|
|
28
|
+
full_pattern = File.join(base_dir, pattern)
|
|
29
|
+
Dir.glob(full_pattern, flags).map do |path|
|
|
30
|
+
# Return relative paths from base_dir
|
|
31
|
+
Pathname.new(path).relative_path_from(Pathname.new(base_dir)).to_s
|
|
32
|
+
rescue ArgumentError
|
|
33
|
+
# If we can't make it relative, return absolute
|
|
34
|
+
path
|
|
35
|
+
end.sort
|
|
36
|
+
end
|
|
37
|
+
rescue
|
|
38
|
+
[]
|
|
39
|
+
end
|
|
40
|
+
|
|
41
|
+
# Expand multiple glob patterns
|
|
42
|
+
# @param patterns [Array<String>] Array of glob patterns
|
|
43
|
+
# @param base_dir [String] Base directory for relative patterns
|
|
44
|
+
# @param flags [Integer] File::FNM_* flags for matching
|
|
45
|
+
# @return [Array<String>] Unique sorted file paths
|
|
46
|
+
def expand_multiple(patterns, base_dir: Dir.pwd, flags: 0)
|
|
47
|
+
return [] if patterns.nil? || patterns.empty?
|
|
48
|
+
|
|
49
|
+
patterns = Array(patterns)
|
|
50
|
+
results = []
|
|
51
|
+
|
|
52
|
+
patterns.each do |pattern|
|
|
53
|
+
results.concat(expand(pattern, base_dir: base_dir, flags: flags))
|
|
54
|
+
end
|
|
55
|
+
|
|
56
|
+
results.uniq.sort
|
|
57
|
+
end
|
|
58
|
+
|
|
59
|
+
# Check if path matches any of the patterns
|
|
60
|
+
# @param path [String] File path to check
|
|
61
|
+
# @param patterns [Array<String>] Patterns to match against
|
|
62
|
+
# @param flags [Integer] File::FNM_* flags for matching
|
|
63
|
+
# @return [Boolean] true if path matches any pattern
|
|
64
|
+
def matches?(path, patterns, flags: File::FNM_PATHNAME)
|
|
65
|
+
return false if path.nil? || patterns.nil?
|
|
66
|
+
|
|
67
|
+
patterns = Array(patterns)
|
|
68
|
+
patterns.any? do |pattern|
|
|
69
|
+
File.fnmatch(pattern, path, flags)
|
|
70
|
+
end
|
|
71
|
+
end
|
|
72
|
+
|
|
73
|
+
# Filter paths by exclusion patterns
|
|
74
|
+
# @param paths [Array<String>] Paths to filter
|
|
75
|
+
# @param exclude_patterns [Array<String>] Patterns to exclude
|
|
76
|
+
# @param flags [Integer] File::FNM_* flags for matching
|
|
77
|
+
# @return [Array<String>] Filtered paths
|
|
78
|
+
def filter_excluded(paths, exclude_patterns, flags: File::FNM_PATHNAME)
|
|
79
|
+
return paths if paths.nil? || exclude_patterns.nil? || exclude_patterns.empty?
|
|
80
|
+
|
|
81
|
+
paths.reject do |path|
|
|
82
|
+
matches?(path, exclude_patterns, flags: flags)
|
|
83
|
+
end
|
|
84
|
+
end
|
|
85
|
+
|
|
86
|
+
# Expand pattern with exclusions
|
|
87
|
+
# @param pattern [String] Glob pattern to expand
|
|
88
|
+
# @param exclude [Array<String>] Patterns to exclude
|
|
89
|
+
# @param base_dir [String] Base directory
|
|
90
|
+
# @return [Array<String>] Matched paths excluding excluded ones
|
|
91
|
+
def expand_with_exclusions(pattern, exclude: [], base_dir: Dir.pwd)
|
|
92
|
+
expanded = expand(pattern, base_dir: base_dir)
|
|
93
|
+
return expanded if exclude.nil? || exclude.empty?
|
|
94
|
+
|
|
95
|
+
filter_excluded(expanded, exclude)
|
|
96
|
+
end
|
|
97
|
+
|
|
98
|
+
# Find files recursively with pattern
|
|
99
|
+
# @param pattern [String] File name pattern
|
|
100
|
+
# @param base_dir [String] Starting directory
|
|
101
|
+
# @param max_depth [Integer] Maximum directory depth (nil for unlimited)
|
|
102
|
+
# @return [Array<String>] Found file paths
|
|
103
|
+
def find_files(pattern, base_dir: Dir.pwd, max_depth: nil)
|
|
104
|
+
return [] if pattern.nil?
|
|
105
|
+
|
|
106
|
+
base_dir = File.expand_path(base_dir)
|
|
107
|
+
results = []
|
|
108
|
+
|
|
109
|
+
# Build the glob pattern based on depth
|
|
110
|
+
if max_depth.nil?
|
|
111
|
+
glob_pattern = File.join(base_dir, "**", pattern)
|
|
112
|
+
else
|
|
113
|
+
# Build pattern with limited depth
|
|
114
|
+
depth_pattern = (0..max_depth).map do |depth|
|
|
115
|
+
parts = ["*"] * depth
|
|
116
|
+
File.join(base_dir, *parts, pattern)
|
|
117
|
+
end
|
|
118
|
+
|
|
119
|
+
depth_pattern.each do |p|
|
|
120
|
+
results.concat(Dir.glob(p))
|
|
121
|
+
end
|
|
122
|
+
|
|
123
|
+
return results.map do |path|
|
|
124
|
+
Pathname.new(path).relative_path_from(Pathname.new(base_dir)).to_s
|
|
125
|
+
rescue ArgumentError
|
|
126
|
+
path
|
|
127
|
+
end.uniq.sort
|
|
128
|
+
end
|
|
129
|
+
|
|
130
|
+
Dir.glob(glob_pattern).map do |path|
|
|
131
|
+
Pathname.new(path).relative_path_from(Pathname.new(base_dir)).to_s
|
|
132
|
+
rescue ArgumentError
|
|
133
|
+
path
|
|
134
|
+
end.sort
|
|
135
|
+
end
|
|
136
|
+
|
|
137
|
+
# Check if pattern is a glob pattern
|
|
138
|
+
# @param pattern [String] Pattern to check
|
|
139
|
+
# @return [Boolean] true if pattern contains glob characters
|
|
140
|
+
def glob_pattern?(pattern)
|
|
141
|
+
return false if pattern.nil?
|
|
142
|
+
|
|
143
|
+
pattern.match?(/[*?\[{]/)
|
|
144
|
+
end
|
|
145
|
+
|
|
146
|
+
# Normalize path separators for current OS
|
|
147
|
+
# @param path [String] Path to normalize
|
|
148
|
+
# @return [String] Normalized path
|
|
149
|
+
def normalize_separators(path)
|
|
150
|
+
return nil if path.nil?
|
|
151
|
+
|
|
152
|
+
path.gsub(/[\\\/]+/, File::SEPARATOR)
|
|
153
|
+
end
|
|
154
|
+
|
|
155
|
+
# Convert glob pattern to regex
|
|
156
|
+
# @param pattern [String] Glob pattern
|
|
157
|
+
# @return [Regexp] Regular expression equivalent
|
|
158
|
+
def to_regex(pattern)
|
|
159
|
+
return nil if pattern.nil?
|
|
160
|
+
|
|
161
|
+
# Escape special regex characters except glob ones
|
|
162
|
+
escaped = pattern.gsub(/[.+^$()|\[\]{}\\]/) { |m| "\\#{m}" }
|
|
163
|
+
|
|
164
|
+
# Convert glob patterns to regex
|
|
165
|
+
regex_pattern = escaped
|
|
166
|
+
.gsub("**/", ".*/") # ** matches any depth
|
|
167
|
+
.gsub("*", "[^/]*") # * matches within directory
|
|
168
|
+
.tr("?", ".") # ? matches single character
|
|
169
|
+
|
|
170
|
+
Regexp.new("^#{regex_pattern}$")
|
|
171
|
+
end
|
|
172
|
+
end
|
|
173
|
+
end
|
|
174
|
+
end
|
|
175
|
+
end
|
|
@@ -0,0 +1,39 @@
|
|
|
1
|
+
# frozen_string_literal: true
|
|
2
|
+
|
|
3
|
+
module Ace
|
|
4
|
+
module Core
|
|
5
|
+
module Atoms
|
|
6
|
+
# Pure functions for process termination with graceful fallback to force kill
|
|
7
|
+
module ProcessTerminator
|
|
8
|
+
module_function
|
|
9
|
+
|
|
10
|
+
# Terminate a process gracefully, then forcefully if needed
|
|
11
|
+
#
|
|
12
|
+
# Sends SIGTERM first to allow graceful shutdown, then SIGKILL if process
|
|
13
|
+
# still exists after a brief wait. Silently handles cases where the process
|
|
14
|
+
# has already terminated or is inaccessible.
|
|
15
|
+
#
|
|
16
|
+
# @param pid [Integer, nil] Process ID to terminate
|
|
17
|
+
# @param grace_period [Float] Seconds to wait between TERM and KILL (default: 0.1)
|
|
18
|
+
# @return [Boolean] true if termination was attempted, false if pid was nil
|
|
19
|
+
def terminate(pid, grace_period: 0.1)
|
|
20
|
+
return false unless pid
|
|
21
|
+
|
|
22
|
+
begin
|
|
23
|
+
# Try graceful termination first (SIGTERM)
|
|
24
|
+
Process.kill("TERM", pid)
|
|
25
|
+
# Give it a moment to terminate
|
|
26
|
+
sleep(grace_period)
|
|
27
|
+
# Check if still running and force kill if needed
|
|
28
|
+
Process.kill(0, pid) # Check if process exists
|
|
29
|
+
Process.kill("KILL", pid)
|
|
30
|
+
rescue Errno::ESRCH, Errno::EPERM
|
|
31
|
+
# Process already terminated or we don't have permission - that's fine
|
|
32
|
+
end
|
|
33
|
+
|
|
34
|
+
true
|
|
35
|
+
end
|
|
36
|
+
end
|
|
37
|
+
end
|
|
38
|
+
end
|
|
39
|
+
end
|
|
@@ -0,0 +1,222 @@
|
|
|
1
|
+
# frozen_string_literal: true
|
|
2
|
+
|
|
3
|
+
require "yaml"
|
|
4
|
+
|
|
5
|
+
module Ace
|
|
6
|
+
module Core
|
|
7
|
+
module Atoms
|
|
8
|
+
# Pure template parsing functions for context configurations
|
|
9
|
+
module TemplateParser
|
|
10
|
+
# Valid template configuration keys
|
|
11
|
+
VALID_KEYS = %w[
|
|
12
|
+
files commands format embed_document_source
|
|
13
|
+
include exclude output max_lines max_size timeout
|
|
14
|
+
].freeze
|
|
15
|
+
|
|
16
|
+
module_function
|
|
17
|
+
|
|
18
|
+
# Parse template configuration from string
|
|
19
|
+
# @param content [String] Template content (YAML or markdown with embedded YAML)
|
|
20
|
+
# @return [Hash] {success: Boolean, config: Hash, error: String}
|
|
21
|
+
def parse(content)
|
|
22
|
+
return {success: false, error: "Content cannot be nil"} if content.nil?
|
|
23
|
+
return {success: false, error: "Content cannot be empty"} if content.strip.empty?
|
|
24
|
+
|
|
25
|
+
# Try to extract from markdown with tags first
|
|
26
|
+
config = extract_from_markdown(content)
|
|
27
|
+
|
|
28
|
+
# If not found, try to parse as direct YAML
|
|
29
|
+
config ||= parse_yaml(content)
|
|
30
|
+
|
|
31
|
+
if config.nil?
|
|
32
|
+
return {success: false, error: "No valid configuration found"}
|
|
33
|
+
end
|
|
34
|
+
|
|
35
|
+
validate_config(config)
|
|
36
|
+
rescue => e
|
|
37
|
+
{success: false, error: "Failed to parse template: #{e.message}"}
|
|
38
|
+
end
|
|
39
|
+
|
|
40
|
+
# Extract configuration from markdown with <context-tool-config> tags
|
|
41
|
+
# @param content [String] Markdown content
|
|
42
|
+
# @return [Hash, nil] Extracted configuration or nil
|
|
43
|
+
def extract_from_markdown(content)
|
|
44
|
+
return nil if content.nil?
|
|
45
|
+
|
|
46
|
+
# Look for <context-tool-config> block
|
|
47
|
+
pattern = /<context-tool-config>\s*\n(.*?)\n<\/context-tool-config>/m
|
|
48
|
+
match = content.match(pattern)
|
|
49
|
+
|
|
50
|
+
return nil unless match
|
|
51
|
+
|
|
52
|
+
yaml_content = match[1]
|
|
53
|
+
parse_yaml(yaml_content)
|
|
54
|
+
end
|
|
55
|
+
|
|
56
|
+
# Extract configuration from agent markdown files
|
|
57
|
+
# @param content [String] Agent markdown content
|
|
58
|
+
# @return [Hash, nil] Extracted configuration or nil
|
|
59
|
+
def extract_from_agent(content)
|
|
60
|
+
return nil if content.nil?
|
|
61
|
+
|
|
62
|
+
# Look for Context Definition section
|
|
63
|
+
context_match = content.match(/^## Context Definition\s*\n(.*?)(?=^## |\z)/m)
|
|
64
|
+
return nil unless context_match
|
|
65
|
+
|
|
66
|
+
context_section = context_match[1].strip
|
|
67
|
+
|
|
68
|
+
# Extract YAML from code block
|
|
69
|
+
yaml_match = context_section.match(/```(?:yaml|yml)?\s*\n(.*?)\n```/m)
|
|
70
|
+
return nil unless yaml_match
|
|
71
|
+
|
|
72
|
+
yaml_content = yaml_match[1]
|
|
73
|
+
parse_yaml(yaml_content)
|
|
74
|
+
end
|
|
75
|
+
|
|
76
|
+
# Parse YAML string to hash
|
|
77
|
+
# @param yaml_string [String] YAML content
|
|
78
|
+
# @return [Hash, nil] Parsed configuration or nil
|
|
79
|
+
def parse_yaml(yaml_string)
|
|
80
|
+
return nil if yaml_string.nil? || yaml_string.strip.empty?
|
|
81
|
+
|
|
82
|
+
result = YAML.safe_load(yaml_string, permitted_classes: [Symbol])
|
|
83
|
+
|
|
84
|
+
# Ensure it's a hash
|
|
85
|
+
result.is_a?(Hash) ? stringify_keys(result) : nil
|
|
86
|
+
rescue Psych::SyntaxError
|
|
87
|
+
nil
|
|
88
|
+
end
|
|
89
|
+
|
|
90
|
+
# Validate configuration structure
|
|
91
|
+
# @param config [Hash] Configuration to validate
|
|
92
|
+
# @return [Hash] {success: Boolean, config: Hash, error: String}
|
|
93
|
+
def validate_config(config)
|
|
94
|
+
return {success: false, error: "Config must be a Hash"} unless config.is_a?(Hash)
|
|
95
|
+
|
|
96
|
+
# Check for unknown keys
|
|
97
|
+
unknown_keys = config.keys - VALID_KEYS
|
|
98
|
+
unless unknown_keys.empty?
|
|
99
|
+
return {
|
|
100
|
+
success: false,
|
|
101
|
+
error: "Unknown configuration keys: #{unknown_keys.join(", ")}"
|
|
102
|
+
}
|
|
103
|
+
end
|
|
104
|
+
|
|
105
|
+
# Normalize arrays
|
|
106
|
+
normalized = normalize_config(config)
|
|
107
|
+
|
|
108
|
+
# Validate required content
|
|
109
|
+
if normalized["files"].empty? && normalized["commands"].empty? &&
|
|
110
|
+
normalized["include"].empty?
|
|
111
|
+
return {
|
|
112
|
+
success: false,
|
|
113
|
+
error: "Configuration must specify 'files', 'commands', or 'include'"
|
|
114
|
+
}
|
|
115
|
+
end
|
|
116
|
+
|
|
117
|
+
{success: true, config: normalized}
|
|
118
|
+
end
|
|
119
|
+
|
|
120
|
+
# Normalize configuration values
|
|
121
|
+
# @param config [Hash] Configuration to normalize
|
|
122
|
+
# @return [Hash] Normalized configuration
|
|
123
|
+
def normalize_config(config)
|
|
124
|
+
{
|
|
125
|
+
"files" => to_array(config["files"]),
|
|
126
|
+
"commands" => to_array(config["commands"]),
|
|
127
|
+
"include" => to_array(config["include"]),
|
|
128
|
+
"exclude" => to_array(config["exclude"]),
|
|
129
|
+
"format" => config["format"],
|
|
130
|
+
"embed_document_source" => config["embed_document_source"],
|
|
131
|
+
"output" => config["output"],
|
|
132
|
+
"max_lines" => config["max_lines"],
|
|
133
|
+
"max_size" => config["max_size"],
|
|
134
|
+
"timeout" => config["timeout"]
|
|
135
|
+
}.compact
|
|
136
|
+
end
|
|
137
|
+
|
|
138
|
+
# Convert value to array
|
|
139
|
+
# @param value [nil, String, Array] Value to convert
|
|
140
|
+
# @return [Array] Array of strings
|
|
141
|
+
def to_array(value)
|
|
142
|
+
case value
|
|
143
|
+
when nil
|
|
144
|
+
[]
|
|
145
|
+
when Array
|
|
146
|
+
value.map(&:to_s)
|
|
147
|
+
when String
|
|
148
|
+
[value]
|
|
149
|
+
else
|
|
150
|
+
[value.to_s]
|
|
151
|
+
end
|
|
152
|
+
end
|
|
153
|
+
|
|
154
|
+
# Merge multiple configurations
|
|
155
|
+
# @param configs [Array<Hash>] Configurations to merge
|
|
156
|
+
# @return [Hash] Merged configuration
|
|
157
|
+
def merge_configs(*configs)
|
|
158
|
+
configs = configs.flatten.compact
|
|
159
|
+
|
|
160
|
+
return {} if configs.empty?
|
|
161
|
+
|
|
162
|
+
result = {
|
|
163
|
+
"files" => [],
|
|
164
|
+
"commands" => [],
|
|
165
|
+
"include" => [],
|
|
166
|
+
"exclude" => []
|
|
167
|
+
}
|
|
168
|
+
|
|
169
|
+
configs.each do |config|
|
|
170
|
+
next unless config.is_a?(Hash)
|
|
171
|
+
|
|
172
|
+
# Concatenate arrays
|
|
173
|
+
%w[files commands include exclude].each do |key|
|
|
174
|
+
result[key].concat(to_array(config[key]))
|
|
175
|
+
end
|
|
176
|
+
|
|
177
|
+
# Take last non-nil value for other keys
|
|
178
|
+
%w[format embed_document_source output max_lines max_size timeout].each do |key|
|
|
179
|
+
result[key] = config[key] if config.key?(key)
|
|
180
|
+
end
|
|
181
|
+
end
|
|
182
|
+
|
|
183
|
+
# Remove duplicates from arrays
|
|
184
|
+
%w[files commands include exclude].each do |key|
|
|
185
|
+
result[key].uniq!
|
|
186
|
+
end
|
|
187
|
+
|
|
188
|
+
result.compact
|
|
189
|
+
end
|
|
190
|
+
|
|
191
|
+
# Check if content appears to be a template
|
|
192
|
+
# @param content [String] Content to check
|
|
193
|
+
# @return [Boolean] true if content looks like a template
|
|
194
|
+
def template?(content)
|
|
195
|
+
return false if content.nil?
|
|
196
|
+
|
|
197
|
+
# Check for various template indicators
|
|
198
|
+
content.include?("<context-tool-config>") ||
|
|
199
|
+
content.match?(/^files:\s*$/m) ||
|
|
200
|
+
content.match?(/^commands:\s*$/m) ||
|
|
201
|
+
content.match?(/^include:\s*$/m) ||
|
|
202
|
+
content.match?(/^## Context Definition/m)
|
|
203
|
+
end
|
|
204
|
+
|
|
205
|
+
private
|
|
206
|
+
|
|
207
|
+
# Convert all keys to strings recursively
|
|
208
|
+
# @param hash [Hash] Hash with potentially mixed keys
|
|
209
|
+
# @return [Hash] Hash with string keys
|
|
210
|
+
module_function
|
|
211
|
+
|
|
212
|
+
def stringify_keys(hash)
|
|
213
|
+
return hash unless hash.is_a?(Hash)
|
|
214
|
+
|
|
215
|
+
hash.each_with_object({}) do |(key, value), result|
|
|
216
|
+
result[key.to_s] = value.is_a?(Hash) ? stringify_keys(value) : value
|
|
217
|
+
end
|
|
218
|
+
end
|
|
219
|
+
end
|
|
220
|
+
end
|
|
221
|
+
end
|
|
222
|
+
end
|
|
@@ -0,0 +1,55 @@
|
|
|
1
|
+
# frozen_string_literal: true
|
|
2
|
+
|
|
3
|
+
require "ace/support/cli"
|
|
4
|
+
require_relative "../atoms/config_summary"
|
|
5
|
+
|
|
6
|
+
module Ace
|
|
7
|
+
module Core
|
|
8
|
+
module CLI
|
|
9
|
+
module ConfigSummaryMixin
|
|
10
|
+
include Ace::Support::Cli::Base
|
|
11
|
+
|
|
12
|
+
def display_config_summary(command_name, options, summary_keys: nil)
|
|
13
|
+
return if quiet?(options)
|
|
14
|
+
|
|
15
|
+
Ace::Core::Atoms::ConfigSummary.display(
|
|
16
|
+
command: command_name,
|
|
17
|
+
config: gem_config,
|
|
18
|
+
defaults: gem_defaults,
|
|
19
|
+
options: options,
|
|
20
|
+
quiet: false,
|
|
21
|
+
summary_keys: summary_keys
|
|
22
|
+
)
|
|
23
|
+
end
|
|
24
|
+
|
|
25
|
+
def help_requested?(options)
|
|
26
|
+
help?(options)
|
|
27
|
+
end
|
|
28
|
+
|
|
29
|
+
private
|
|
30
|
+
|
|
31
|
+
def gem_config
|
|
32
|
+
raise NotImplementedError, "#{self.class} must implement #gem_config"
|
|
33
|
+
end
|
|
34
|
+
|
|
35
|
+
def gem_defaults
|
|
36
|
+
raise NotImplementedError, "#{self.class} must implement #gem_defaults"
|
|
37
|
+
end
|
|
38
|
+
|
|
39
|
+
module GemClassMixin
|
|
40
|
+
include ConfigSummaryMixin
|
|
41
|
+
|
|
42
|
+
private
|
|
43
|
+
|
|
44
|
+
def gem_config
|
|
45
|
+
self.class.gem_class.config
|
|
46
|
+
end
|
|
47
|
+
|
|
48
|
+
def gem_defaults
|
|
49
|
+
self.class.gem_class.default_config
|
|
50
|
+
end
|
|
51
|
+
end
|
|
52
|
+
end
|
|
53
|
+
end
|
|
54
|
+
end
|
|
55
|
+
end
|