aidp 0.26.0 → 0.27.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- checksums.yaml +4 -4
- data/lib/aidp/cli/checkpoint_command.rb +198 -0
- data/lib/aidp/cli/config_command.rb +71 -0
- data/lib/aidp/cli/enhanced_input.rb +2 -0
- data/lib/aidp/cli/first_run_wizard.rb +8 -7
- data/lib/aidp/cli/harness_command.rb +102 -0
- data/lib/aidp/cli/jobs_command.rb +3 -3
- data/lib/aidp/cli/mcp_dashboard.rb +4 -3
- data/lib/aidp/cli/models_command.rb +662 -0
- data/lib/aidp/cli/providers_command.rb +223 -0
- data/lib/aidp/cli.rb +35 -456
- data/lib/aidp/daemon/runner.rb +2 -2
- data/lib/aidp/debug_mixin.rb +2 -9
- data/lib/aidp/execute/async_work_loop_runner.rb +2 -1
- data/lib/aidp/execute/checkpoint_display.rb +38 -37
- data/lib/aidp/execute/interactive_repl.rb +2 -1
- data/lib/aidp/execute/prompt_manager.rb +4 -4
- data/lib/aidp/execute/work_loop_runner.rb +29 -2
- data/lib/aidp/execute/workflow_selector.rb +2 -2
- data/lib/aidp/harness/config_manager.rb +5 -5
- data/lib/aidp/harness/configuration.rb +32 -2
- data/lib/aidp/harness/enhanced_runner.rb +24 -15
- data/lib/aidp/harness/error_handler.rb +26 -5
- data/lib/aidp/harness/model_cache.rb +269 -0
- data/lib/aidp/harness/model_discovery_service.rb +259 -0
- data/lib/aidp/harness/model_registry.rb +201 -0
- data/lib/aidp/harness/runner.rb +5 -0
- data/lib/aidp/harness/thinking_depth_manager.rb +223 -7
- data/lib/aidp/message_display.rb +0 -46
- data/lib/aidp/providers/adapter.rb +2 -4
- data/lib/aidp/providers/anthropic.rb +141 -128
- data/lib/aidp/providers/base.rb +98 -2
- data/lib/aidp/providers/capability_registry.rb +0 -1
- data/lib/aidp/providers/codex.rb +49 -67
- data/lib/aidp/providers/cursor.rb +71 -59
- data/lib/aidp/providers/gemini.rb +44 -60
- data/lib/aidp/providers/github_copilot.rb +2 -66
- data/lib/aidp/providers/kilocode.rb +24 -80
- data/lib/aidp/providers/opencode.rb +24 -80
- data/lib/aidp/setup/wizard.rb +345 -8
- data/lib/aidp/version.rb +1 -1
- data/lib/aidp/watch/plan_generator.rb +93 -14
- data/lib/aidp/watch/review_processor.rb +3 -3
- data/lib/aidp/workflows/guided_agent.rb +3 -3
- data/templates/aidp-development.yml.example +2 -2
- data/templates/aidp-production.yml.example +3 -3
- metadata +9 -1
|
@@ -0,0 +1,269 @@
|
|
|
1
|
+
# frozen_string_literal: true
|
|
2
|
+
|
|
3
|
+
require "json"
|
|
4
|
+
require "fileutils"
|
|
5
|
+
|
|
6
|
+
module Aidp
|
|
7
|
+
module Harness
|
|
8
|
+
# Manages caching of discovered models with TTL support
|
|
9
|
+
#
|
|
10
|
+
# Cache is stored in ~/.aidp/cache/models.json
|
|
11
|
+
# Each provider's models are cached separately with timestamps
|
|
12
|
+
#
|
|
13
|
+
# Usage:
|
|
14
|
+
# cache = ModelCache.new
|
|
15
|
+
# cache.cache_models("anthropic", models_array)
|
|
16
|
+
# cached = cache.get_cached_models("anthropic")
|
|
17
|
+
# cache.invalidate("anthropic")
|
|
18
|
+
class ModelCache
|
|
19
|
+
class CacheError < StandardError; end
|
|
20
|
+
|
|
21
|
+
DEFAULT_TTL = 86400 # 24 hours in seconds
|
|
22
|
+
|
|
23
|
+
attr_reader :cache_file
|
|
24
|
+
|
|
25
|
+
def initialize(cache_file: nil, cache_dir: nil)
|
|
26
|
+
@cache_file = determine_cache_file(cache_file, cache_dir)
|
|
27
|
+
@cache_enabled = ensure_cache_directory
|
|
28
|
+
|
|
29
|
+
if @cache_enabled
|
|
30
|
+
Aidp.log_debug("model_cache", "initialized", cache_file: @cache_file)
|
|
31
|
+
else
|
|
32
|
+
Aidp.log_warn("model_cache", "cache disabled due to permission issues")
|
|
33
|
+
end
|
|
34
|
+
end
|
|
35
|
+
|
|
36
|
+
# Get cached models for a provider if not expired
|
|
37
|
+
#
|
|
38
|
+
# @param provider [String] Provider name
|
|
39
|
+
# @return [Array<Hash>, nil] Cached models or nil if expired/not found
|
|
40
|
+
def get_cached_models(provider)
|
|
41
|
+
cache_data = load_cache
|
|
42
|
+
provider_cache = cache_data[provider]
|
|
43
|
+
|
|
44
|
+
return nil unless provider_cache
|
|
45
|
+
|
|
46
|
+
cached_at = begin
|
|
47
|
+
Time.parse(provider_cache["cached_at"])
|
|
48
|
+
rescue
|
|
49
|
+
nil
|
|
50
|
+
end
|
|
51
|
+
return nil unless cached_at
|
|
52
|
+
|
|
53
|
+
ttl = provider_cache["ttl"] || DEFAULT_TTL
|
|
54
|
+
expires_at = cached_at + ttl
|
|
55
|
+
|
|
56
|
+
if Time.now > expires_at
|
|
57
|
+
Aidp.log_debug("model_cache", "cache expired",
|
|
58
|
+
provider: provider, cached_at: cached_at, expires_at: expires_at)
|
|
59
|
+
return nil
|
|
60
|
+
end
|
|
61
|
+
|
|
62
|
+
models = provider_cache["models"]
|
|
63
|
+
# Convert string keys to symbols for consistency with fresh discovery
|
|
64
|
+
models = models.map { |m| m.transform_keys(&:to_sym) } if models
|
|
65
|
+
Aidp.log_debug("model_cache", "cache hit",
|
|
66
|
+
provider: provider, count: models&.size || 0)
|
|
67
|
+
models
|
|
68
|
+
rescue => e
|
|
69
|
+
Aidp.log_error("model_cache", "failed to read cache",
|
|
70
|
+
provider: provider, error: e.message)
|
|
71
|
+
nil
|
|
72
|
+
end
|
|
73
|
+
|
|
74
|
+
# Cache models for a provider with TTL
|
|
75
|
+
#
|
|
76
|
+
# @param provider [String] Provider name
|
|
77
|
+
# @param models [Array<Hash>] Models to cache
|
|
78
|
+
# @param ttl [Integer] Time to live in seconds (default: 24 hours)
|
|
79
|
+
def cache_models(provider, models, ttl: DEFAULT_TTL)
|
|
80
|
+
unless @cache_enabled
|
|
81
|
+
Aidp.log_debug("model_cache", "caching disabled, skipping",
|
|
82
|
+
provider: provider)
|
|
83
|
+
return false
|
|
84
|
+
end
|
|
85
|
+
|
|
86
|
+
cache_data = load_cache
|
|
87
|
+
|
|
88
|
+
cache_data[provider] = {
|
|
89
|
+
"cached_at" => Time.now.iso8601,
|
|
90
|
+
"ttl" => ttl,
|
|
91
|
+
"models" => models
|
|
92
|
+
}
|
|
93
|
+
|
|
94
|
+
if save_cache(cache_data)
|
|
95
|
+
Aidp.log_info("model_cache", "cached models",
|
|
96
|
+
provider: provider, count: models.size, ttl: ttl)
|
|
97
|
+
true
|
|
98
|
+
else
|
|
99
|
+
Aidp.log_warn("model_cache", "failed to cache models",
|
|
100
|
+
provider: provider)
|
|
101
|
+
false
|
|
102
|
+
end
|
|
103
|
+
rescue => e
|
|
104
|
+
Aidp.log_error("model_cache", "error caching models",
|
|
105
|
+
provider: provider, error: e.message)
|
|
106
|
+
false
|
|
107
|
+
end
|
|
108
|
+
|
|
109
|
+
# Invalidate cache for a specific provider
|
|
110
|
+
#
|
|
111
|
+
# @param provider [String] Provider name
|
|
112
|
+
def invalidate(provider)
|
|
113
|
+
return false unless @cache_enabled
|
|
114
|
+
|
|
115
|
+
cache_data = load_cache
|
|
116
|
+
cache_data.delete(provider)
|
|
117
|
+
save_cache(cache_data)
|
|
118
|
+
Aidp.log_info("model_cache", "invalidated cache", provider: provider)
|
|
119
|
+
true
|
|
120
|
+
rescue => e
|
|
121
|
+
Aidp.log_error("model_cache", "failed to invalidate cache",
|
|
122
|
+
provider: provider, error: e.message)
|
|
123
|
+
false
|
|
124
|
+
end
|
|
125
|
+
|
|
126
|
+
# Invalidate all cached models
|
|
127
|
+
def invalidate_all
|
|
128
|
+
return false unless @cache_enabled
|
|
129
|
+
|
|
130
|
+
save_cache({})
|
|
131
|
+
Aidp.log_info("model_cache", "invalidated all caches")
|
|
132
|
+
true
|
|
133
|
+
rescue => e
|
|
134
|
+
Aidp.log_error("model_cache", "failed to invalidate all",
|
|
135
|
+
error: e.message)
|
|
136
|
+
false
|
|
137
|
+
end
|
|
138
|
+
|
|
139
|
+
# Get list of providers with cached models
|
|
140
|
+
#
|
|
141
|
+
# @return [Array<String>] Provider names with valid caches
|
|
142
|
+
def cached_providers
|
|
143
|
+
cache_data = load_cache
|
|
144
|
+
providers = []
|
|
145
|
+
|
|
146
|
+
cache_data.each do |provider, data|
|
|
147
|
+
cached_at = begin
|
|
148
|
+
Time.parse(data["cached_at"])
|
|
149
|
+
rescue
|
|
150
|
+
nil
|
|
151
|
+
end
|
|
152
|
+
next unless cached_at
|
|
153
|
+
|
|
154
|
+
ttl = data["ttl"] || DEFAULT_TTL
|
|
155
|
+
expires_at = cached_at + ttl
|
|
156
|
+
|
|
157
|
+
providers << provider if Time.now <= expires_at
|
|
158
|
+
end
|
|
159
|
+
|
|
160
|
+
providers
|
|
161
|
+
rescue => e
|
|
162
|
+
Aidp.log_error("model_cache", "failed to get cached providers",
|
|
163
|
+
error: e.message)
|
|
164
|
+
[]
|
|
165
|
+
end
|
|
166
|
+
|
|
167
|
+
# Get cache statistics
|
|
168
|
+
#
|
|
169
|
+
# @return [Hash] Statistics about the cache
|
|
170
|
+
def stats
|
|
171
|
+
cache_data = load_cache
|
|
172
|
+
file_size = begin
|
|
173
|
+
File.size(@cache_file)
|
|
174
|
+
rescue
|
|
175
|
+
0
|
|
176
|
+
end
|
|
177
|
+
|
|
178
|
+
{
|
|
179
|
+
total_providers: cache_data.size,
|
|
180
|
+
cached_providers: cached_providers,
|
|
181
|
+
cache_file_size: file_size
|
|
182
|
+
}
|
|
183
|
+
rescue => e
|
|
184
|
+
Aidp.log_error("model_cache", "failed to get stats",
|
|
185
|
+
error: e.message)
|
|
186
|
+
{total_providers: 0, cached_providers: [], cache_file_size: 0}
|
|
187
|
+
end
|
|
188
|
+
|
|
189
|
+
private
|
|
190
|
+
|
|
191
|
+
def determine_cache_file(cache_file, cache_dir)
|
|
192
|
+
return cache_file if cache_file
|
|
193
|
+
|
|
194
|
+
if cache_dir
|
|
195
|
+
File.join(cache_dir, "models.json")
|
|
196
|
+
else
|
|
197
|
+
default_cache_file
|
|
198
|
+
end
|
|
199
|
+
end
|
|
200
|
+
|
|
201
|
+
def default_cache_file
|
|
202
|
+
File.join(Dir.home, ".aidp", "cache", "models.json")
|
|
203
|
+
rescue => e
|
|
204
|
+
# Fallback to temp directory if home directory is not accessible
|
|
205
|
+
Aidp.log_debug("model_cache", "home directory not accessible, using temp",
|
|
206
|
+
error: e.message)
|
|
207
|
+
File.join(Dir.tmpdir, "aidp_cache", "models.json")
|
|
208
|
+
end
|
|
209
|
+
|
|
210
|
+
def ensure_cache_directory
|
|
211
|
+
cache_dir = File.dirname(@cache_file)
|
|
212
|
+
return true if File.directory?(cache_dir)
|
|
213
|
+
|
|
214
|
+
FileUtils.mkdir_p(cache_dir)
|
|
215
|
+
true
|
|
216
|
+
rescue Errno::EACCES, Errno::EPERM => e
|
|
217
|
+
Aidp.log_warn("model_cache", "permission denied creating cache directory",
|
|
218
|
+
cache_dir: cache_dir, error: e.message)
|
|
219
|
+
|
|
220
|
+
# Try fallback to temp directory
|
|
221
|
+
@cache_file = File.join(Dir.tmpdir, "aidp_cache", "models.json")
|
|
222
|
+
fallback_dir = File.dirname(@cache_file)
|
|
223
|
+
|
|
224
|
+
begin
|
|
225
|
+
FileUtils.mkdir_p(fallback_dir) unless File.directory?(fallback_dir)
|
|
226
|
+
Aidp.log_info("model_cache", "using fallback cache directory",
|
|
227
|
+
cache_file: @cache_file)
|
|
228
|
+
true
|
|
229
|
+
rescue => fallback_error
|
|
230
|
+
Aidp.log_error("model_cache", "failed to create fallback cache directory",
|
|
231
|
+
error: fallback_error.message)
|
|
232
|
+
false
|
|
233
|
+
end
|
|
234
|
+
rescue => e
|
|
235
|
+
Aidp.log_error("model_cache", "failed to create cache directory",
|
|
236
|
+
cache_dir: cache_dir, error: e.message)
|
|
237
|
+
false
|
|
238
|
+
end
|
|
239
|
+
|
|
240
|
+
def load_cache
|
|
241
|
+
return {} unless File.exist?(@cache_file)
|
|
242
|
+
|
|
243
|
+
content = File.read(@cache_file)
|
|
244
|
+
JSON.parse(content)
|
|
245
|
+
rescue JSON::ParserError => e
|
|
246
|
+
Aidp.log_warn("model_cache", "corrupted cache file, resetting",
|
|
247
|
+
error: e.message)
|
|
248
|
+
# Reset corrupted cache
|
|
249
|
+
{}
|
|
250
|
+
rescue => e
|
|
251
|
+
Aidp.log_error("model_cache", "failed to load cache",
|
|
252
|
+
error: e.message)
|
|
253
|
+
{}
|
|
254
|
+
end
|
|
255
|
+
|
|
256
|
+
def save_cache(data)
|
|
257
|
+
return false unless @cache_enabled
|
|
258
|
+
|
|
259
|
+
ensure_cache_directory
|
|
260
|
+
File.write(@cache_file, JSON.pretty_generate(data))
|
|
261
|
+
true
|
|
262
|
+
rescue => e
|
|
263
|
+
Aidp.log_error("model_cache", "failed to save cache",
|
|
264
|
+
error: e.message, cache_file: @cache_file)
|
|
265
|
+
false
|
|
266
|
+
end
|
|
267
|
+
end
|
|
268
|
+
end
|
|
269
|
+
end
|
|
@@ -0,0 +1,259 @@
|
|
|
1
|
+
# frozen_string_literal: true
|
|
2
|
+
|
|
3
|
+
require_relative "model_cache"
|
|
4
|
+
require_relative "model_registry"
|
|
5
|
+
|
|
6
|
+
module Aidp
|
|
7
|
+
module Harness
|
|
8
|
+
# Service for discovering available models from providers
|
|
9
|
+
#
|
|
10
|
+
# Orchestrates model discovery across multiple providers:
|
|
11
|
+
# 1. Checks cache first (with TTL)
|
|
12
|
+
# 2. Falls back to dynamic discovery via provider.discover_models
|
|
13
|
+
# 3. Merges with static registry for comprehensive results
|
|
14
|
+
# 4. Caches results for future use
|
|
15
|
+
#
|
|
16
|
+
# Usage:
|
|
17
|
+
# service = ModelDiscoveryService.new
|
|
18
|
+
# models = service.discover_models("anthropic")
|
|
19
|
+
# all_models = service.discover_all_models
|
|
20
|
+
class ModelDiscoveryService
|
|
21
|
+
attr_reader :cache, :registry
|
|
22
|
+
|
|
23
|
+
def initialize(cache: nil, registry: nil)
|
|
24
|
+
@cache = cache || ModelCache.new
|
|
25
|
+
@registry = registry || ModelRegistry.new
|
|
26
|
+
@provider_classes = discover_provider_classes
|
|
27
|
+
Aidp.log_debug("model_discovery_service", "initialized",
|
|
28
|
+
providers: @provider_classes.keys)
|
|
29
|
+
end
|
|
30
|
+
|
|
31
|
+
# Discover models for a specific provider
|
|
32
|
+
#
|
|
33
|
+
# @param provider [String] Provider name (e.g., "anthropic", "cursor")
|
|
34
|
+
# @param use_cache [Boolean] Whether to use cached results (default: true)
|
|
35
|
+
# @return [Array<Hash>] Discovered models
|
|
36
|
+
def discover_models(provider, use_cache: true)
|
|
37
|
+
Aidp.log_info("model_discovery_service", "discovering models",
|
|
38
|
+
provider: provider, use_cache: use_cache)
|
|
39
|
+
|
|
40
|
+
# Check cache first
|
|
41
|
+
if use_cache
|
|
42
|
+
cached = @cache.get_cached_models(provider)
|
|
43
|
+
if cached
|
|
44
|
+
Aidp.log_debug("model_discovery_service", "using cached models",
|
|
45
|
+
provider: provider, count: cached.size)
|
|
46
|
+
return cached
|
|
47
|
+
end
|
|
48
|
+
end
|
|
49
|
+
|
|
50
|
+
# Perform discovery
|
|
51
|
+
models = perform_discovery(provider)
|
|
52
|
+
|
|
53
|
+
# Cache the results
|
|
54
|
+
@cache.cache_models(provider, models) if models.any?
|
|
55
|
+
|
|
56
|
+
models
|
|
57
|
+
rescue => e
|
|
58
|
+
Aidp.log_error("model_discovery_service", "discovery failed",
|
|
59
|
+
provider: provider, error: e.message, backtrace: e.backtrace.first(3))
|
|
60
|
+
[]
|
|
61
|
+
end
|
|
62
|
+
|
|
63
|
+
# Discover models from all available providers
|
|
64
|
+
#
|
|
65
|
+
# @param use_cache [Boolean] Whether to use cached results
|
|
66
|
+
# @return [Hash] Hash of provider => models array
|
|
67
|
+
def discover_all_models(use_cache: true)
|
|
68
|
+
results = {}
|
|
69
|
+
|
|
70
|
+
@provider_classes.each_key do |provider|
|
|
71
|
+
models = discover_models(provider, use_cache: use_cache)
|
|
72
|
+
results[provider] = models if models.any?
|
|
73
|
+
end
|
|
74
|
+
|
|
75
|
+
Aidp.log_info("model_discovery_service", "discovered all models",
|
|
76
|
+
providers: results.keys, total_models: results.values.flatten.size)
|
|
77
|
+
results
|
|
78
|
+
end
|
|
79
|
+
|
|
80
|
+
# Discover models concurrently from multiple providers
|
|
81
|
+
#
|
|
82
|
+
# @param providers [Array<String>] List of provider names
|
|
83
|
+
# @param use_cache [Boolean] Whether to use cached results
|
|
84
|
+
# @return [Hash] Hash of provider => models array
|
|
85
|
+
def discover_concurrent(providers, use_cache: true)
|
|
86
|
+
require "concurrent"
|
|
87
|
+
|
|
88
|
+
results = {}
|
|
89
|
+
mutex = Mutex.new
|
|
90
|
+
|
|
91
|
+
# Create a thread pool
|
|
92
|
+
pool = Concurrent::FixedThreadPool.new(providers.size)
|
|
93
|
+
|
|
94
|
+
# Submit discovery tasks
|
|
95
|
+
futures = providers.map do |provider|
|
|
96
|
+
Concurrent::Future.execute(executor: pool) do
|
|
97
|
+
models = discover_models(provider, use_cache: use_cache)
|
|
98
|
+
mutex.synchronize { results[provider] = models }
|
|
99
|
+
end
|
|
100
|
+
end
|
|
101
|
+
|
|
102
|
+
# Wait for all to complete
|
|
103
|
+
futures.each(&:wait)
|
|
104
|
+
|
|
105
|
+
pool.shutdown
|
|
106
|
+
pool.wait_for_termination(30)
|
|
107
|
+
|
|
108
|
+
Aidp.log_info("model_discovery_service", "concurrent discovery complete",
|
|
109
|
+
providers: results.keys, total_models: results.values.flatten.size)
|
|
110
|
+
results
|
|
111
|
+
rescue LoadError => e
|
|
112
|
+
# Fallback to sequential if concurrent gem not available
|
|
113
|
+
Aidp.log_warn("model_discovery_service", "concurrent gem not available, using sequential",
|
|
114
|
+
error: e.message)
|
|
115
|
+
providers.each_with_object({}) do |provider, hash|
|
|
116
|
+
hash[provider] = discover_models(provider, use_cache: use_cache)
|
|
117
|
+
end
|
|
118
|
+
rescue => e
|
|
119
|
+
Aidp.log_error("model_discovery_service", "concurrent discovery failed",
|
|
120
|
+
error: e.message)
|
|
121
|
+
{}
|
|
122
|
+
end
|
|
123
|
+
|
|
124
|
+
# Get all available models (discovery + static registry)
|
|
125
|
+
#
|
|
126
|
+
# Combines dynamically discovered models with static registry
|
|
127
|
+
#
|
|
128
|
+
# @param use_cache [Boolean] Whether to use cached results
|
|
129
|
+
# @return [Hash] Hash with :discovered and :registry keys
|
|
130
|
+
def all_available_models(use_cache: true)
|
|
131
|
+
discovered = discover_all_models(use_cache: use_cache)
|
|
132
|
+
registry_families = @registry.all_families
|
|
133
|
+
|
|
134
|
+
{
|
|
135
|
+
discovered: discovered,
|
|
136
|
+
registry: registry_families.map { |family| @registry.get_model_info(family) }.compact
|
|
137
|
+
}
|
|
138
|
+
end
|
|
139
|
+
|
|
140
|
+
# Find which providers support a given model family
|
|
141
|
+
#
|
|
142
|
+
# @param family_name [String] Model family name
|
|
143
|
+
# @return [Array<String>] List of provider names
|
|
144
|
+
def providers_supporting(family_name)
|
|
145
|
+
providers = []
|
|
146
|
+
|
|
147
|
+
@provider_classes.each do |provider_name, class_name|
|
|
148
|
+
provider_class = constantize_provider(class_name)
|
|
149
|
+
next unless provider_class
|
|
150
|
+
|
|
151
|
+
if provider_class.respond_to?(:supports_model_family?)
|
|
152
|
+
providers << provider_name if provider_class.supports_model_family?(family_name)
|
|
153
|
+
end
|
|
154
|
+
end
|
|
155
|
+
|
|
156
|
+
providers
|
|
157
|
+
end
|
|
158
|
+
|
|
159
|
+
# Refresh cache for all providers
|
|
160
|
+
def refresh_all_caches
|
|
161
|
+
@cache.invalidate_all
|
|
162
|
+
discover_all_models(use_cache: false)
|
|
163
|
+
end
|
|
164
|
+
|
|
165
|
+
# Refresh cache for specific provider
|
|
166
|
+
#
|
|
167
|
+
# @param provider [String] Provider name
|
|
168
|
+
def refresh_cache(provider)
|
|
169
|
+
@cache.invalidate(provider)
|
|
170
|
+
discover_models(provider, use_cache: false)
|
|
171
|
+
end
|
|
172
|
+
|
|
173
|
+
private
|
|
174
|
+
|
|
175
|
+
def perform_discovery(provider)
|
|
176
|
+
provider_class = get_provider_class(provider)
|
|
177
|
+
unless provider_class
|
|
178
|
+
Aidp.log_warn("model_discovery_service", "unknown provider",
|
|
179
|
+
provider: provider)
|
|
180
|
+
return []
|
|
181
|
+
end
|
|
182
|
+
|
|
183
|
+
unless provider_class.respond_to?(:available?) && provider_class.available?
|
|
184
|
+
Aidp.log_debug("model_discovery_service", "provider not available",
|
|
185
|
+
provider: provider)
|
|
186
|
+
return []
|
|
187
|
+
end
|
|
188
|
+
|
|
189
|
+
unless provider_class.respond_to?(:discover_models)
|
|
190
|
+
Aidp.log_warn("model_discovery_service", "provider missing discover_models",
|
|
191
|
+
provider: provider)
|
|
192
|
+
return []
|
|
193
|
+
end
|
|
194
|
+
|
|
195
|
+
models = provider_class.discover_models
|
|
196
|
+
Aidp.log_info("model_discovery_service", "discovered models",
|
|
197
|
+
provider: provider, count: models.size)
|
|
198
|
+
models
|
|
199
|
+
end
|
|
200
|
+
|
|
201
|
+
def get_provider_class(provider)
|
|
202
|
+
class_name = @provider_classes[provider]
|
|
203
|
+
return nil unless class_name
|
|
204
|
+
|
|
205
|
+
constantize_provider(class_name)
|
|
206
|
+
end
|
|
207
|
+
|
|
208
|
+
def constantize_provider(class_name)
|
|
209
|
+
# Safely constantize the provider class
|
|
210
|
+
parts = class_name.split("::")
|
|
211
|
+
parts.reduce(Object) { |mod, name| mod.const_get(name) }
|
|
212
|
+
rescue NameError => e
|
|
213
|
+
Aidp.log_debug("model_discovery_service", "provider class not found",
|
|
214
|
+
class: class_name, error: e.message)
|
|
215
|
+
nil
|
|
216
|
+
end
|
|
217
|
+
|
|
218
|
+
# Dynamically discover all provider classes from the providers directory
|
|
219
|
+
#
|
|
220
|
+
# @return [Hash] Hash of provider_name => class_name
|
|
221
|
+
def discover_provider_classes
|
|
222
|
+
providers_dir = File.join(__dir__, "../providers")
|
|
223
|
+
provider_files = Dir.glob("*.rb", base: providers_dir)
|
|
224
|
+
|
|
225
|
+
# Exclude base classes and utility files
|
|
226
|
+
excluded_files = ["base.rb", "adapter.rb", "error_taxonomy.rb", "capability_registry.rb"]
|
|
227
|
+
provider_files -= excluded_files
|
|
228
|
+
|
|
229
|
+
providers = {}
|
|
230
|
+
|
|
231
|
+
provider_files.each do |file|
|
|
232
|
+
provider_name = File.basename(file, ".rb")
|
|
233
|
+
# Convert to class name (e.g., "anthropic" -> "Anthropic", "github_copilot" -> "GithubCopilot")
|
|
234
|
+
class_name = provider_name.split("_").map(&:capitalize).join
|
|
235
|
+
full_class_name = "Aidp::Providers::#{class_name}"
|
|
236
|
+
|
|
237
|
+
# Try to load and verify the provider class exists
|
|
238
|
+
begin
|
|
239
|
+
require_relative "../providers/#{provider_name}"
|
|
240
|
+
provider_class = constantize_provider(full_class_name)
|
|
241
|
+
if provider_class&.respond_to?(:discover_models)
|
|
242
|
+
providers[provider_name] = full_class_name
|
|
243
|
+
end
|
|
244
|
+
rescue => e
|
|
245
|
+
# Skip providers that can't be loaded or don't implement discover_models
|
|
246
|
+
if ENV["DEBUG"]
|
|
247
|
+
Aidp.log_debug("model_discovery_service", "skipping provider",
|
|
248
|
+
provider: provider_name, reason: e.message)
|
|
249
|
+
end
|
|
250
|
+
end
|
|
251
|
+
end
|
|
252
|
+
|
|
253
|
+
Aidp.log_debug("model_discovery_service", "discovered provider classes",
|
|
254
|
+
count: providers.size, providers: providers.keys)
|
|
255
|
+
providers
|
|
256
|
+
end
|
|
257
|
+
end
|
|
258
|
+
end
|
|
259
|
+
end
|