agent-harness 0.11.1 → 0.11.3
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- checksums.yaml +4 -4
- data/.release-please-manifest.json +1 -1
- data/CHANGELOG.md +14 -0
- data/lib/agent_harness/configuration.rb +68 -1
- data/lib/agent_harness/errors.rb +11 -0
- data/lib/agent_harness/extensions.rb +644 -0
- data/lib/agent_harness/mcp_config_loader.rb +62 -0
- data/lib/agent_harness/mcp_config_translator.rb +85 -0
- data/lib/agent_harness/mcp_server.rb +40 -14
- data/lib/agent_harness/providers/anthropic.rb +3 -79
- data/lib/agent_harness/providers/base.rb +212 -9
- data/lib/agent_harness/providers/codex.rb +32 -1
- data/lib/agent_harness/providers/mcp_config_file_support.rb +76 -0
- data/lib/agent_harness/version.rb +1 -1
- data/lib/agent_harness.rb +45 -0
- metadata +5 -1
|
@@ -0,0 +1,644 @@
|
|
|
1
|
+
# frozen_string_literal: true
|
|
2
|
+
|
|
3
|
+
require "json"
|
|
4
|
+
|
|
5
|
+
module AgentHarness
|
|
6
|
+
module Extensions
|
|
7
|
+
module DeepDupable
|
|
8
|
+
private
|
|
9
|
+
|
|
10
|
+
def deep_dup(value)
|
|
11
|
+
case value
|
|
12
|
+
when Array
|
|
13
|
+
value.map { |entry| deep_dup(entry) }
|
|
14
|
+
when Hash
|
|
15
|
+
value.each_with_object({}) { |(key, entry), copy| copy[key] = deep_dup(entry) }
|
|
16
|
+
else
|
|
17
|
+
value.dup
|
|
18
|
+
end
|
|
19
|
+
rescue TypeError
|
|
20
|
+
value
|
|
21
|
+
end
|
|
22
|
+
end
|
|
23
|
+
|
|
24
|
+
class Base
|
|
25
|
+
def name
|
|
26
|
+
self.class.name.split("::").last&.downcase&.to_sym
|
|
27
|
+
end
|
|
28
|
+
|
|
29
|
+
def description
|
|
30
|
+
nil
|
|
31
|
+
end
|
|
32
|
+
|
|
33
|
+
def version
|
|
34
|
+
nil
|
|
35
|
+
end
|
|
36
|
+
|
|
37
|
+
def on_message_before(context)
|
|
38
|
+
context
|
|
39
|
+
end
|
|
40
|
+
|
|
41
|
+
def on_message_after(context)
|
|
42
|
+
context
|
|
43
|
+
end
|
|
44
|
+
|
|
45
|
+
def on_tools_available(context)
|
|
46
|
+
context
|
|
47
|
+
end
|
|
48
|
+
|
|
49
|
+
def tools
|
|
50
|
+
[]
|
|
51
|
+
end
|
|
52
|
+
|
|
53
|
+
def mcp_servers
|
|
54
|
+
[]
|
|
55
|
+
end
|
|
56
|
+
|
|
57
|
+
def system_prompt_additions
|
|
58
|
+
[]
|
|
59
|
+
end
|
|
60
|
+
|
|
61
|
+
def unsupported_features
|
|
62
|
+
[]
|
|
63
|
+
end
|
|
64
|
+
|
|
65
|
+
def required_provider_capabilities
|
|
66
|
+
required = []
|
|
67
|
+
required << :tool_use if tools.any?
|
|
68
|
+
required << :mcp if mcp_servers.any?
|
|
69
|
+
required
|
|
70
|
+
end
|
|
71
|
+
end
|
|
72
|
+
|
|
73
|
+
class MessageContext
|
|
74
|
+
attr_accessor :prompt, :messages, :tools, :options, :response, :metadata
|
|
75
|
+
attr_reader :provider, :extensions, :mode
|
|
76
|
+
|
|
77
|
+
def initialize(provider:, extensions:, mode:, options:, prompt: nil, messages: nil, tools: nil, response: nil,
|
|
78
|
+
metadata: {})
|
|
79
|
+
@provider = provider
|
|
80
|
+
@extensions = extensions.freeze
|
|
81
|
+
@mode = mode
|
|
82
|
+
@options = options
|
|
83
|
+
@prompt = prompt
|
|
84
|
+
@messages = messages
|
|
85
|
+
@tools = tools
|
|
86
|
+
@response = response
|
|
87
|
+
@metadata = metadata
|
|
88
|
+
end
|
|
89
|
+
end
|
|
90
|
+
|
|
91
|
+
class CompatibilityReport
|
|
92
|
+
attr_reader :extension, :provider, :missing_provider_capabilities, :unsupported_features
|
|
93
|
+
|
|
94
|
+
def initialize(extension:, provider:, missing_provider_capabilities:, unsupported_features:)
|
|
95
|
+
@extension = extension
|
|
96
|
+
@provider = provider
|
|
97
|
+
@missing_provider_capabilities = missing_provider_capabilities.freeze
|
|
98
|
+
@unsupported_features = unsupported_features.freeze
|
|
99
|
+
end
|
|
100
|
+
|
|
101
|
+
def compatible?
|
|
102
|
+
@missing_provider_capabilities.empty?
|
|
103
|
+
end
|
|
104
|
+
|
|
105
|
+
def fully_supported?
|
|
106
|
+
compatible? && @unsupported_features.empty?
|
|
107
|
+
end
|
|
108
|
+
|
|
109
|
+
def to_h
|
|
110
|
+
{
|
|
111
|
+
extension: extension.name,
|
|
112
|
+
provider: provider.class.provider_name,
|
|
113
|
+
compatible: compatible?,
|
|
114
|
+
fully_supported: fully_supported?,
|
|
115
|
+
missing_provider_capabilities: missing_provider_capabilities.dup,
|
|
116
|
+
unsupported_features: unsupported_features.dup
|
|
117
|
+
}
|
|
118
|
+
end
|
|
119
|
+
end
|
|
120
|
+
|
|
121
|
+
module Compatibility
|
|
122
|
+
HARNESS_CAPABILITIES = {
|
|
123
|
+
message_hooks: true,
|
|
124
|
+
response_hooks: true,
|
|
125
|
+
system_prompt_additions: true
|
|
126
|
+
}.freeze
|
|
127
|
+
|
|
128
|
+
module_function
|
|
129
|
+
|
|
130
|
+
def report(provider:, extension:)
|
|
131
|
+
required = Array(extension.required_provider_capabilities).map(&:to_sym)
|
|
132
|
+
missing = required.reject { |capability| capability_supported?(provider, capability) }
|
|
133
|
+
unsupported = Array(extension.unsupported_features).map(&:to_sym)
|
|
134
|
+
|
|
135
|
+
CompatibilityReport.new(
|
|
136
|
+
extension: extension,
|
|
137
|
+
provider: provider,
|
|
138
|
+
missing_provider_capabilities: missing,
|
|
139
|
+
unsupported_features: unsupported
|
|
140
|
+
)
|
|
141
|
+
end
|
|
142
|
+
|
|
143
|
+
def check!(provider:, extension:, strict: true)
|
|
144
|
+
compatibility = report(provider: provider, extension: extension)
|
|
145
|
+
return compatibility if compatibility.compatible?
|
|
146
|
+
return compatibility unless strict
|
|
147
|
+
|
|
148
|
+
raise ExtensionCompatibilityError.new(
|
|
149
|
+
"Extension '#{extension.name}' is not compatible with provider '#{provider.class.provider_name}': " \
|
|
150
|
+
"missing provider capabilities: #{compatibility.missing_provider_capabilities.inspect}",
|
|
151
|
+
provider: provider.class.provider_name,
|
|
152
|
+
extension: extension.name,
|
|
153
|
+
report: compatibility.to_h
|
|
154
|
+
)
|
|
155
|
+
end
|
|
156
|
+
|
|
157
|
+
def capability_supported?(provider, capability)
|
|
158
|
+
return HARNESS_CAPABILITIES.fetch(capability) if HARNESS_CAPABILITIES.key?(capability)
|
|
159
|
+
|
|
160
|
+
case capability
|
|
161
|
+
when :chat
|
|
162
|
+
provider.supports_chat?
|
|
163
|
+
when :text_mode
|
|
164
|
+
provider.supports_text_mode?
|
|
165
|
+
else
|
|
166
|
+
!!provider.capabilities[capability]
|
|
167
|
+
end
|
|
168
|
+
end
|
|
169
|
+
end
|
|
170
|
+
|
|
171
|
+
module Composition
|
|
172
|
+
module_function
|
|
173
|
+
|
|
174
|
+
def compose(extensions)
|
|
175
|
+
return [] if extensions.nil? || extensions.empty?
|
|
176
|
+
|
|
177
|
+
detect_tool_conflicts(extensions)
|
|
178
|
+
extensions
|
|
179
|
+
end
|
|
180
|
+
|
|
181
|
+
def detect_tool_conflicts(extensions)
|
|
182
|
+
tool_owners = {}
|
|
183
|
+
|
|
184
|
+
extensions.each do |extension|
|
|
185
|
+
extension.tools.each do |tool|
|
|
186
|
+
tool_name = tool[:name] || tool["name"]
|
|
187
|
+
next unless tool_name
|
|
188
|
+
|
|
189
|
+
if tool_owners.key?(tool_name)
|
|
190
|
+
raise ConfigurationError,
|
|
191
|
+
"Tool name conflict: '#{tool_name}' is provided by both " \
|
|
192
|
+
"'#{tool_owners[tool_name]}' and '#{extension.name}'"
|
|
193
|
+
end
|
|
194
|
+
|
|
195
|
+
tool_owners[tool_name] = extension.name
|
|
196
|
+
end
|
|
197
|
+
end
|
|
198
|
+
end
|
|
199
|
+
|
|
200
|
+
def merge_system_prompts(extensions)
|
|
201
|
+
extensions.flat_map(&:system_prompt_additions).reject { |a| a.nil? || a.empty? }
|
|
202
|
+
end
|
|
203
|
+
|
|
204
|
+
def merge_tools(extensions)
|
|
205
|
+
extensions.flat_map(&:tools)
|
|
206
|
+
end
|
|
207
|
+
|
|
208
|
+
def merge_mcp_servers(extensions)
|
|
209
|
+
servers = extensions.flat_map(&:mcp_servers)
|
|
210
|
+
names = servers.map { |s| s[:name] || s["name"] }.compact
|
|
211
|
+
duplicates = names.group_by { |n| n }.select { |_, v| v.size > 1 }.keys
|
|
212
|
+
|
|
213
|
+
unless duplicates.empty?
|
|
214
|
+
raise ConfigurationError,
|
|
215
|
+
"MCP server name conflict across extensions: #{duplicates.join(", ")}"
|
|
216
|
+
end
|
|
217
|
+
|
|
218
|
+
servers
|
|
219
|
+
end
|
|
220
|
+
end
|
|
221
|
+
|
|
222
|
+
class Registry
|
|
223
|
+
def initialize
|
|
224
|
+
@extensions = {}
|
|
225
|
+
end
|
|
226
|
+
|
|
227
|
+
def register(extension, as: nil)
|
|
228
|
+
unless extension.is_a?(Base)
|
|
229
|
+
raise ConfigurationError, "Extension must be an AgentHarness::Extensions::Base instance"
|
|
230
|
+
end
|
|
231
|
+
|
|
232
|
+
key = (as || extension.name).to_sym
|
|
233
|
+
@extensions[key] = extension
|
|
234
|
+
end
|
|
235
|
+
|
|
236
|
+
def fetch(name)
|
|
237
|
+
@extensions.fetch(name.to_sym) do
|
|
238
|
+
raise ConfigurationError, "Unknown extension: #{name}"
|
|
239
|
+
end
|
|
240
|
+
end
|
|
241
|
+
|
|
242
|
+
def registered?(name)
|
|
243
|
+
@extensions.key?(name.to_sym)
|
|
244
|
+
end
|
|
245
|
+
|
|
246
|
+
def all
|
|
247
|
+
@extensions.values.dup
|
|
248
|
+
end
|
|
249
|
+
end
|
|
250
|
+
|
|
251
|
+
module Loader
|
|
252
|
+
module_function
|
|
253
|
+
|
|
254
|
+
def load(path, adapter: nil)
|
|
255
|
+
resolved_path = File.expand_path(path)
|
|
256
|
+
adapter_name = normalize_adapter(adapter, resolved_path)
|
|
257
|
+
|
|
258
|
+
case adapter_name
|
|
259
|
+
when :pi
|
|
260
|
+
Adapters::Pi.load(resolved_path)
|
|
261
|
+
when :skill
|
|
262
|
+
Adapters::Skill.load(resolved_path)
|
|
263
|
+
else
|
|
264
|
+
raise ConfigurationError, "Unknown extension adapter: #{adapter_name.inspect}"
|
|
265
|
+
end
|
|
266
|
+
end
|
|
267
|
+
|
|
268
|
+
def normalize_adapter(adapter, path)
|
|
269
|
+
return adapter.to_sym if adapter
|
|
270
|
+
return :pi if File.directory?(path) && pi_directory?(path)
|
|
271
|
+
return :pi if File.file?(path) && File.extname(path).match?(/\A\.(?:[jt]s|json)\z/i)
|
|
272
|
+
return :skill if File.file?(path) && File.extname(path) == ".md"
|
|
273
|
+
if File.directory?(path)
|
|
274
|
+
raise ConfigurationError, "Cannot infer extension adapter for directory: #{path}"
|
|
275
|
+
end
|
|
276
|
+
|
|
277
|
+
raise ConfigurationError, "Could not infer adapter for extension source: #{path}"
|
|
278
|
+
end
|
|
279
|
+
|
|
280
|
+
def pi_directory?(path)
|
|
281
|
+
return true if File.exist?(File.join(path, "package.json"))
|
|
282
|
+
return true if File.exist?(File.join(path, "index.ts")) || File.exist?(File.join(path, "index.js"))
|
|
283
|
+
return true if File.directory?(File.join(path, "extensions"))
|
|
284
|
+
# Detect bare Pi-style directories containing .ts/.js source files
|
|
285
|
+
# even without package.json or index.ts, since Pi.load can resolve them.
|
|
286
|
+
return true if Dir.glob(File.join(path, "*.{ts,js}")).any?
|
|
287
|
+
|
|
288
|
+
false
|
|
289
|
+
end
|
|
290
|
+
|
|
291
|
+
def discover(directory)
|
|
292
|
+
resolved = File.expand_path(directory)
|
|
293
|
+
return [] unless File.directory?(resolved)
|
|
294
|
+
|
|
295
|
+
extensions = []
|
|
296
|
+
|
|
297
|
+
Dir.glob(File.join(resolved, "*")).sort.each do |child|
|
|
298
|
+
next unless File.directory?(child) || File.file?(child)
|
|
299
|
+
|
|
300
|
+
begin
|
|
301
|
+
extensions.concat(load(child))
|
|
302
|
+
rescue ConfigurationError
|
|
303
|
+
next
|
|
304
|
+
end
|
|
305
|
+
end
|
|
306
|
+
|
|
307
|
+
extensions
|
|
308
|
+
end
|
|
309
|
+
end
|
|
310
|
+
|
|
311
|
+
module Adapters
|
|
312
|
+
class PiExtension < Base
|
|
313
|
+
include DeepDupable
|
|
314
|
+
|
|
315
|
+
attr_reader :name, :description, :version, :entry_paths, :source_path
|
|
316
|
+
|
|
317
|
+
def initialize(name:, source_path:, entry_paths:, description: nil, version: nil, tools: [],
|
|
318
|
+
system_prompt_additions: [], mcp_servers: [], required_provider_capabilities: [],
|
|
319
|
+
unsupported_features: [])
|
|
320
|
+
@name = name.to_s.strip.gsub(/[^a-zA-Z0-9]+/, "_").gsub(/\A_+|_+\z/, "").downcase.to_sym
|
|
321
|
+
@description = description
|
|
322
|
+
@version = version
|
|
323
|
+
@tools = tools.freeze
|
|
324
|
+
@system_prompt_additions = system_prompt_additions.freeze
|
|
325
|
+
@mcp_servers = mcp_servers.freeze
|
|
326
|
+
@required_provider_capabilities = required_provider_capabilities.freeze
|
|
327
|
+
@unsupported_features = unsupported_features.freeze
|
|
328
|
+
@source_path = source_path
|
|
329
|
+
@entry_paths = entry_paths.freeze
|
|
330
|
+
end
|
|
331
|
+
|
|
332
|
+
def tools
|
|
333
|
+
@tools.map(&:dup)
|
|
334
|
+
end
|
|
335
|
+
|
|
336
|
+
def mcp_servers
|
|
337
|
+
@mcp_servers.map { |server| deep_dup(server) }
|
|
338
|
+
end
|
|
339
|
+
|
|
340
|
+
def system_prompt_additions
|
|
341
|
+
@system_prompt_additions.dup
|
|
342
|
+
end
|
|
343
|
+
|
|
344
|
+
def required_provider_capabilities
|
|
345
|
+
inferred = []
|
|
346
|
+
inferred << :tool_use if @tools.any?
|
|
347
|
+
inferred << :mcp if @mcp_servers.any?
|
|
348
|
+
(@required_provider_capabilities + inferred).uniq
|
|
349
|
+
end
|
|
350
|
+
|
|
351
|
+
def unsupported_features
|
|
352
|
+
@unsupported_features.dup
|
|
353
|
+
end
|
|
354
|
+
end
|
|
355
|
+
|
|
356
|
+
module Pi
|
|
357
|
+
module_function
|
|
358
|
+
|
|
359
|
+
def load(path)
|
|
360
|
+
resolved_path = File.expand_path(path)
|
|
361
|
+
single_file = File.file?(resolved_path) && %w[.ts .js].include?(File.extname(resolved_path)) &&
|
|
362
|
+
File.basename(resolved_path) != "package.json"
|
|
363
|
+
|
|
364
|
+
root = resolve_root(resolved_path)
|
|
365
|
+
package = load_package_json(root)
|
|
366
|
+
entry_paths = if single_file
|
|
367
|
+
# When a specific script file is requested, scope to that file only
|
|
368
|
+
# instead of discovering all siblings in the parent directory.
|
|
369
|
+
[resolved_path]
|
|
370
|
+
else
|
|
371
|
+
discover_entry_paths(root, package)
|
|
372
|
+
end
|
|
373
|
+
ext_config = package.fetch("agent_harness", {})
|
|
374
|
+
tools = ext_config["tools"] || discover_tools(entry_paths)
|
|
375
|
+
system_prompt_additions = Array(ext_config["system_prompt_additions"])
|
|
376
|
+
mcp_servers = Array(ext_config["mcp_servers"])
|
|
377
|
+
required_provider_capabilities = Array(ext_config["required_provider_capabilities"])
|
|
378
|
+
# Conservatively require :tool_use when registerTool is called with non-inline
|
|
379
|
+
# arguments that static extraction cannot parse.
|
|
380
|
+
if !ext_config["tools"] && has_non_inline_register_tool_calls?(entry_paths)
|
|
381
|
+
required_provider_capabilities |= ["tool_use"]
|
|
382
|
+
end
|
|
383
|
+
unsupported_features = Array(ext_config["unsupported_features"])
|
|
384
|
+
unsupported_features |= infer_unsupported_features(entry_paths)
|
|
385
|
+
|
|
386
|
+
default_name = single_file ? File.basename(resolved_path, File.extname(resolved_path)) : File.basename(root)
|
|
387
|
+
|
|
388
|
+
[
|
|
389
|
+
PiExtension.new(
|
|
390
|
+
name: ext_config["name"] || package["name"] || default_name,
|
|
391
|
+
description: ext_config["description"] || package["description"],
|
|
392
|
+
version: ext_config["version"] || package["version"],
|
|
393
|
+
tools: tools.map { |tool| normalize_tool(tool) },
|
|
394
|
+
system_prompt_additions: system_prompt_additions,
|
|
395
|
+
mcp_servers: mcp_servers.map { |server| normalize_mcp_server(server) },
|
|
396
|
+
required_provider_capabilities: required_provider_capabilities.map(&:to_sym),
|
|
397
|
+
unsupported_features: unsupported_features.map(&:to_sym),
|
|
398
|
+
source_path: root,
|
|
399
|
+
entry_paths: entry_paths
|
|
400
|
+
)
|
|
401
|
+
]
|
|
402
|
+
end
|
|
403
|
+
|
|
404
|
+
def resolve_root(path)
|
|
405
|
+
if File.file?(path)
|
|
406
|
+
return File.dirname(path) if File.basename(path) == "package.json"
|
|
407
|
+
return File.dirname(path) if %w[.ts .js].include?(File.extname(path))
|
|
408
|
+
end
|
|
409
|
+
|
|
410
|
+
return path if File.directory?(path)
|
|
411
|
+
|
|
412
|
+
raise ConfigurationError, "Unsupported pi extension source: #{path}"
|
|
413
|
+
end
|
|
414
|
+
|
|
415
|
+
def load_package_json(root)
|
|
416
|
+
package_path = File.join(root, "package.json")
|
|
417
|
+
return {} unless File.exist?(package_path)
|
|
418
|
+
|
|
419
|
+
JSON.parse(File.read(package_path))
|
|
420
|
+
rescue JSON::ParserError => e
|
|
421
|
+
raise ConfigurationError, "Invalid package.json for pi extension at #{root}: #{e.message}"
|
|
422
|
+
end
|
|
423
|
+
|
|
424
|
+
def discover_entry_paths(root, package)
|
|
425
|
+
manifest_entries = Array(package.dig("pi", "extensions"))
|
|
426
|
+
candidates = if manifest_entries.empty?
|
|
427
|
+
convention_extension_paths(root)
|
|
428
|
+
else
|
|
429
|
+
manifest_entries.flat_map { |entry| expand_manifest_entry(root, entry) }
|
|
430
|
+
end
|
|
431
|
+
|
|
432
|
+
paths = candidates.select { |candidate| File.file?(candidate) }
|
|
433
|
+
raise ConfigurationError, "No pi extension entry points found in #{root}" if paths.empty?
|
|
434
|
+
|
|
435
|
+
paths.uniq.sort
|
|
436
|
+
end
|
|
437
|
+
|
|
438
|
+
def convention_extension_paths(root)
|
|
439
|
+
extensions_dir = File.join(root, "extensions")
|
|
440
|
+
return direct_extension_entry_paths(root) unless File.directory?(extensions_dir)
|
|
441
|
+
|
|
442
|
+
direct_extension_entry_paths(extensions_dir)
|
|
443
|
+
end
|
|
444
|
+
|
|
445
|
+
def expand_manifest_entry(root, entry)
|
|
446
|
+
absolute = File.expand_path(entry, root)
|
|
447
|
+
return direct_extension_entry_paths(absolute) if File.directory?(absolute)
|
|
448
|
+
return Dir.glob(absolute).flat_map { |match| direct_extension_entry_paths(match) } unless File.exist?(absolute)
|
|
449
|
+
|
|
450
|
+
direct_extension_entry_paths(absolute)
|
|
451
|
+
end
|
|
452
|
+
|
|
453
|
+
def direct_extension_entry_paths(path)
|
|
454
|
+
if File.file?(path)
|
|
455
|
+
return [path] if extension_script?(path)
|
|
456
|
+
return []
|
|
457
|
+
end
|
|
458
|
+
|
|
459
|
+
return [] unless File.directory?(path)
|
|
460
|
+
|
|
461
|
+
entries = []
|
|
462
|
+
entries.concat(Dir.glob(File.join(path, "*.{ts,js}")))
|
|
463
|
+
Dir.glob(File.join(path, "*")).sort.each do |child|
|
|
464
|
+
next unless File.directory?(child)
|
|
465
|
+
|
|
466
|
+
%w[index.ts index.js].each do |entry|
|
|
467
|
+
entry_path = File.join(child, entry)
|
|
468
|
+
entries << entry_path if File.file?(entry_path)
|
|
469
|
+
end
|
|
470
|
+
end
|
|
471
|
+
entries
|
|
472
|
+
end
|
|
473
|
+
|
|
474
|
+
def extension_script?(path)
|
|
475
|
+
%w[.ts .js].include?(File.extname(path))
|
|
476
|
+
end
|
|
477
|
+
|
|
478
|
+
def discover_tools(entry_paths)
|
|
479
|
+
entry_paths.flat_map do |entry_path|
|
|
480
|
+
source = File.read(entry_path)
|
|
481
|
+
source.scan(/registerTool\s*\(\s*\{(.*?)\}\s*\)/m).filter_map do |match|
|
|
482
|
+
block = match.first
|
|
483
|
+
name = block[/name:\s*["']([^"']+)["']/, 1]
|
|
484
|
+
next unless name
|
|
485
|
+
|
|
486
|
+
description = block[/description:\s*["']([^"']+)["']/, 1]
|
|
487
|
+
{name: name, description: description}.compact
|
|
488
|
+
end
|
|
489
|
+
end.uniq
|
|
490
|
+
end
|
|
491
|
+
|
|
492
|
+
# Detect whether any entry path contains registerTool calls that could
|
|
493
|
+
# not be statically extracted as inline object literals. When true, the
|
|
494
|
+
# extension should conservatively require :tool_use even if discover_tools
|
|
495
|
+
# returned an empty list.
|
|
496
|
+
def has_non_inline_register_tool_calls?(entry_paths)
|
|
497
|
+
entry_paths.any? do |entry_path|
|
|
498
|
+
source = File.read(entry_path)
|
|
499
|
+
total_calls = source.scan(/registerTool\s*\(/).length
|
|
500
|
+
inline_calls = source.scan(/registerTool\s*\(\s*\{/).length
|
|
501
|
+
total_calls > inline_calls
|
|
502
|
+
end
|
|
503
|
+
end
|
|
504
|
+
|
|
505
|
+
def infer_unsupported_features(entry_paths)
|
|
506
|
+
features = []
|
|
507
|
+
|
|
508
|
+
entry_paths.each do |entry_path|
|
|
509
|
+
source = File.read(entry_path)
|
|
510
|
+
features << :commands if source.include?("registerCommand")
|
|
511
|
+
features << :shortcuts if source.include?("registerShortcut")
|
|
512
|
+
features << :ui if source.match?(/ctx\.ui\.|setWidget|setStatus|setTitle/)
|
|
513
|
+
features << :session_persistence if source.match?(/appendEntry|session_start|session_end/)
|
|
514
|
+
end
|
|
515
|
+
|
|
516
|
+
features.uniq
|
|
517
|
+
end
|
|
518
|
+
|
|
519
|
+
def normalize_tool(tool)
|
|
520
|
+
case tool
|
|
521
|
+
when Hash
|
|
522
|
+
tool.transform_keys(&:to_sym)
|
|
523
|
+
when String, Symbol
|
|
524
|
+
{name: tool.to_s}
|
|
525
|
+
else
|
|
526
|
+
raise ConfigurationError, "Unsupported tool definition in pi adapter: #{tool.inspect}"
|
|
527
|
+
end
|
|
528
|
+
end
|
|
529
|
+
|
|
530
|
+
def normalize_mcp_server(server)
|
|
531
|
+
case server
|
|
532
|
+
when Hash
|
|
533
|
+
server.transform_keys(&:to_sym)
|
|
534
|
+
else
|
|
535
|
+
raise ConfigurationError, "Unsupported MCP server definition in pi adapter: #{server.inspect}"
|
|
536
|
+
end
|
|
537
|
+
end
|
|
538
|
+
end
|
|
539
|
+
|
|
540
|
+
class SkillExtension < Base
|
|
541
|
+
include DeepDupable
|
|
542
|
+
|
|
543
|
+
attr_reader :name, :description, :version, :source_path
|
|
544
|
+
|
|
545
|
+
def initialize(name:, source_path:, description: nil, version: nil, tools: [],
|
|
546
|
+
system_prompt_additions: [], mcp_servers: [], required_provider_capabilities: [])
|
|
547
|
+
@name = name.to_s.strip.gsub(/[^a-zA-Z0-9]+/, "_").gsub(/\A_+|_+\z/, "").downcase.to_sym
|
|
548
|
+
@description = description
|
|
549
|
+
@version = version
|
|
550
|
+
@tools = tools.freeze
|
|
551
|
+
@system_prompt_additions = system_prompt_additions.freeze
|
|
552
|
+
@mcp_servers = mcp_servers.freeze
|
|
553
|
+
@required_provider_capabilities = required_provider_capabilities.freeze
|
|
554
|
+
@source_path = source_path
|
|
555
|
+
end
|
|
556
|
+
|
|
557
|
+
def tools
|
|
558
|
+
@tools.map(&:dup)
|
|
559
|
+
end
|
|
560
|
+
|
|
561
|
+
def mcp_servers
|
|
562
|
+
@mcp_servers.map { |server| deep_dup(server) }
|
|
563
|
+
end
|
|
564
|
+
|
|
565
|
+
def system_prompt_additions
|
|
566
|
+
@system_prompt_additions.dup
|
|
567
|
+
end
|
|
568
|
+
|
|
569
|
+
def required_provider_capabilities
|
|
570
|
+
inferred = []
|
|
571
|
+
inferred << :tool_use if @tools.any?
|
|
572
|
+
inferred << :mcp if @mcp_servers.any?
|
|
573
|
+
(@required_provider_capabilities + inferred).uniq
|
|
574
|
+
end
|
|
575
|
+
end
|
|
576
|
+
|
|
577
|
+
module Skill
|
|
578
|
+
module_function
|
|
579
|
+
|
|
580
|
+
def load(path)
|
|
581
|
+
resolved = File.expand_path(path)
|
|
582
|
+
raise ConfigurationError, "Skill file not found: #{resolved}" unless File.file?(resolved)
|
|
583
|
+
raise ConfigurationError, "Skill file must be a Markdown file: #{resolved}" unless File.extname(resolved) == ".md"
|
|
584
|
+
|
|
585
|
+
content = File.read(resolved)
|
|
586
|
+
frontmatter, body = parse_frontmatter(content)
|
|
587
|
+
|
|
588
|
+
tools = Array(frontmatter["tools"]).map { |tool| normalize_tool(tool) }
|
|
589
|
+
mcp_servers = Array(frontmatter["mcp_servers"]).map { |server| normalize_mcp_server(server) }
|
|
590
|
+
instructions = extract_instructions(body)
|
|
591
|
+
system_prompt_additions = instructions.empty? ? [] : [instructions]
|
|
592
|
+
|
|
593
|
+
[
|
|
594
|
+
SkillExtension.new(
|
|
595
|
+
name: frontmatter["name"] || File.basename(resolved, ".md"),
|
|
596
|
+
description: frontmatter["description"],
|
|
597
|
+
version: frontmatter["version"],
|
|
598
|
+
tools: tools,
|
|
599
|
+
system_prompt_additions: system_prompt_additions,
|
|
600
|
+
mcp_servers: mcp_servers,
|
|
601
|
+
required_provider_capabilities: Array(frontmatter["required_provider_capabilities"]).map(&:to_sym),
|
|
602
|
+
source_path: resolved
|
|
603
|
+
)
|
|
604
|
+
]
|
|
605
|
+
end
|
|
606
|
+
|
|
607
|
+
def parse_frontmatter(content)
|
|
608
|
+
match = content.match(/\A---\s*\n(.*?\n)---\s*\n(.*)\z/m)
|
|
609
|
+
return [{}, content] unless match
|
|
610
|
+
|
|
611
|
+
require "yaml"
|
|
612
|
+
frontmatter = YAML.safe_load(match[1], permitted_classes: [Symbol]) || {}
|
|
613
|
+
[frontmatter, match[2]]
|
|
614
|
+
rescue Psych::SyntaxError => e
|
|
615
|
+
raise ConfigurationError, "Invalid YAML frontmatter in skill file: #{e.message}"
|
|
616
|
+
end
|
|
617
|
+
|
|
618
|
+
def extract_instructions(body)
|
|
619
|
+
body.to_s.strip
|
|
620
|
+
end
|
|
621
|
+
|
|
622
|
+
def normalize_tool(tool)
|
|
623
|
+
case tool
|
|
624
|
+
when Hash
|
|
625
|
+
tool.transform_keys(&:to_sym)
|
|
626
|
+
when String, Symbol
|
|
627
|
+
{name: tool.to_s}
|
|
628
|
+
else
|
|
629
|
+
raise ConfigurationError, "Unsupported tool definition in skill adapter: #{tool.inspect}"
|
|
630
|
+
end
|
|
631
|
+
end
|
|
632
|
+
|
|
633
|
+
def normalize_mcp_server(server)
|
|
634
|
+
case server
|
|
635
|
+
when Hash
|
|
636
|
+
server.transform_keys(&:to_sym)
|
|
637
|
+
else
|
|
638
|
+
raise ConfigurationError, "Unsupported MCP server definition in skill adapter: #{server.inspect}"
|
|
639
|
+
end
|
|
640
|
+
end
|
|
641
|
+
end
|
|
642
|
+
end
|
|
643
|
+
end
|
|
644
|
+
end
|
|
@@ -0,0 +1,62 @@
|
|
|
1
|
+
# frozen_string_literal: true
|
|
2
|
+
|
|
3
|
+
require "json"
|
|
4
|
+
require "yaml"
|
|
5
|
+
|
|
6
|
+
module AgentHarness
|
|
7
|
+
class McpConfigLoader
|
|
8
|
+
ENV_VAR_PATTERN = /\$\{([A-Z0-9_]+)\}/
|
|
9
|
+
|
|
10
|
+
class << self
|
|
11
|
+
def load_file(path)
|
|
12
|
+
parsed = parse_file(path)
|
|
13
|
+
servers = parsed.is_a?(Hash) ? (parsed["servers"] || parsed[:servers] || parsed) : parsed
|
|
14
|
+
|
|
15
|
+
unless servers.is_a?(Array)
|
|
16
|
+
raise McpConfigurationError,
|
|
17
|
+
"MCP config file must contain a top-level servers array"
|
|
18
|
+
end
|
|
19
|
+
|
|
20
|
+
servers.map do |server|
|
|
21
|
+
McpServer.from_hash(interpolate_env(server))
|
|
22
|
+
end
|
|
23
|
+
end
|
|
24
|
+
|
|
25
|
+
private
|
|
26
|
+
|
|
27
|
+
def parse_file(path)
|
|
28
|
+
ext = File.extname(path).downcase
|
|
29
|
+
content = File.read(path)
|
|
30
|
+
|
|
31
|
+
case ext
|
|
32
|
+
when ".json"
|
|
33
|
+
JSON.parse(content)
|
|
34
|
+
when ".yml", ".yaml"
|
|
35
|
+
YAML.safe_load(content, aliases: false) || {}
|
|
36
|
+
else
|
|
37
|
+
raise McpConfigurationError,
|
|
38
|
+
"Unsupported MCP config file format '#{ext}'. Use .json, .yml, or .yaml"
|
|
39
|
+
end
|
|
40
|
+
rescue Errno::ENOENT => e
|
|
41
|
+
raise McpConfigurationError, "MCP config file not found: #{e.message}"
|
|
42
|
+
rescue JSON::ParserError, Psych::SyntaxError => e
|
|
43
|
+
raise McpConfigurationError, "Failed to parse MCP config file: #{e.message}"
|
|
44
|
+
end
|
|
45
|
+
|
|
46
|
+
def interpolate_env(value)
|
|
47
|
+
case value
|
|
48
|
+
when Array
|
|
49
|
+
value.map { |entry| interpolate_env(entry) }
|
|
50
|
+
when Hash
|
|
51
|
+
value.each_with_object({}) do |(key, entry), memo|
|
|
52
|
+
memo[key] = interpolate_env(entry)
|
|
53
|
+
end
|
|
54
|
+
when String
|
|
55
|
+
value.gsub(ENV_VAR_PATTERN) { ENV.fetch(Regexp.last_match(1), "") }
|
|
56
|
+
else
|
|
57
|
+
value
|
|
58
|
+
end
|
|
59
|
+
end
|
|
60
|
+
end
|
|
61
|
+
end
|
|
62
|
+
end
|