aidp 0.29.0 → 0.31.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- checksums.yaml +4 -4
- data/lib/aidp/config.rb +2 -8
- data/lib/aidp/harness/configuration.rb +1 -1
- data/lib/aidp/harness/deprecation_cache.rb +177 -0
- data/lib/aidp/harness/provider_manager.rb +36 -5
- data/lib/aidp/harness/ruby_llm_registry.rb +93 -5
- data/lib/aidp/harness/thinking_depth_manager.rb +47 -5
- data/lib/aidp/providers/anthropic.rb +186 -9
- data/lib/aidp/providers/cursor.rb +7 -1
- data/lib/aidp/setup/wizard.rb +65 -0
- data/lib/aidp/version.rb +1 -1
- data/lib/aidp/watch/auto_pr_processor.rb +86 -0
- data/lib/aidp/watch/auto_processor.rb +78 -0
- data/lib/aidp/watch/change_request_processor.rb +105 -27
- data/lib/aidp/watch/runner.rb +104 -0
- data/lib/aidp/watch/state_store.rb +37 -0
- data/lib/aidp/worktree_branch_manager.rb +147 -0
- metadata +5 -1
checksums.yaml
CHANGED
|
@@ -1,7 +1,7 @@
|
|
|
1
1
|
---
|
|
2
2
|
SHA256:
|
|
3
|
-
metadata.gz:
|
|
4
|
-
data.tar.gz:
|
|
3
|
+
metadata.gz: de5176199a74d1e4992451708e3830fff889cffd526c8bc0887a0aa73a4946a9
|
|
4
|
+
data.tar.gz: 399593a4d2b8d6991d37d22f383c3cd816b5b276b42e1aa45b0a2df064a470da
|
|
5
5
|
SHA512:
|
|
6
|
-
metadata.gz:
|
|
7
|
-
data.tar.gz:
|
|
6
|
+
metadata.gz: 1d290891c58232da0b32ca1a8560ba211f5a5805c8aa7b24d78f1536cd4e32750ca9ee11dbb8c1b25f018ed50082c5fd52e9412f7b871981f0ff8eacef6f5e15
|
|
7
|
+
data.tar.gz: c10ee2a196880353991978cdf7a92775e13828b82452ba323c152853740a402a07bad29dad8984da1f9f0e2f650e5075bde9308ac8cc326b89de72af66652b2a
|
data/lib/aidp/config.rb
CHANGED
|
@@ -111,11 +111,10 @@ module Aidp
|
|
|
111
111
|
model_family: "claude",
|
|
112
112
|
max_tokens: 100_000,
|
|
113
113
|
default_flags: ["--dangerously-skip-permissions"],
|
|
114
|
-
models: ["claude-3-5-sonnet-20241022", "claude-3-5-haiku-20241022"
|
|
114
|
+
models: ["claude-3-5-sonnet-20241022", "claude-3-5-haiku-20241022"],
|
|
115
115
|
model_weights: {
|
|
116
116
|
"claude-3-5-sonnet-20241022" => 3,
|
|
117
|
-
"claude-3-5-haiku-20241022" => 2
|
|
118
|
-
"claude-3-opus-20240229" => 1
|
|
117
|
+
"claude-3-5-haiku-20241022" => 2
|
|
119
118
|
},
|
|
120
119
|
models_config: {
|
|
121
120
|
"claude-3-5-sonnet-20241022" => {
|
|
@@ -127,11 +126,6 @@ module Aidp
|
|
|
127
126
|
flags: ["--dangerously-skip-permissions"],
|
|
128
127
|
max_tokens: 200_000,
|
|
129
128
|
timeout: 180
|
|
130
|
-
},
|
|
131
|
-
"claude-3-opus-20240229" => {
|
|
132
|
-
flags: ["--dangerously-skip-permissions"],
|
|
133
|
-
max_tokens: 200_000,
|
|
134
|
-
timeout: 600
|
|
135
129
|
}
|
|
136
130
|
},
|
|
137
131
|
auth: {
|
|
@@ -1057,7 +1057,7 @@ module Aidp
|
|
|
1057
1057
|
def default_thinking_config
|
|
1058
1058
|
{
|
|
1059
1059
|
default_tier: "mini", # Use mini tier by default for cost optimization
|
|
1060
|
-
max_tier: "
|
|
1060
|
+
max_tier: "pro", # Max tier rarely needed; pro is sufficient for most tasks
|
|
1061
1061
|
allow_provider_switch: true,
|
|
1062
1062
|
auto_escalate: true,
|
|
1063
1063
|
escalation_threshold: 2,
|
|
@@ -0,0 +1,177 @@
|
|
|
1
|
+
# frozen_string_literal: true
|
|
2
|
+
|
|
3
|
+
require "json"
|
|
4
|
+
require "fileutils"
|
|
5
|
+
|
|
6
|
+
module Aidp
|
|
7
|
+
module Harness
|
|
8
|
+
# Manages a dynamic cache of deprecated models detected at runtime
|
|
9
|
+
# When deprecation errors are detected from provider APIs, models are
|
|
10
|
+
# added to this cache with metadata (replacement, detected date, etc.)
|
|
11
|
+
class DeprecationCache
|
|
12
|
+
class CacheError < StandardError; end
|
|
13
|
+
|
|
14
|
+
attr_reader :cache_path
|
|
15
|
+
|
|
16
|
+
def initialize(cache_path: nil, root_dir: nil)
|
|
17
|
+
@root_dir = root_dir || safe_root_dir
|
|
18
|
+
@cache_path = cache_path || default_cache_path
|
|
19
|
+
@cache_data = nil
|
|
20
|
+
ensure_cache_directory
|
|
21
|
+
end
|
|
22
|
+
|
|
23
|
+
# Add a deprecated model to the cache
|
|
24
|
+
# @param provider [String] Provider name (e.g., "anthropic")
|
|
25
|
+
# @param model_id [String] Deprecated model ID
|
|
26
|
+
# @param replacement [String, nil] Replacement model ID (if known)
|
|
27
|
+
# @param reason [String, nil] Deprecation reason/message
|
|
28
|
+
def add_deprecated_model(provider:, model_id:, replacement: nil, reason: nil)
|
|
29
|
+
load_cache unless @cache_data
|
|
30
|
+
|
|
31
|
+
@cache_data["providers"][provider] ||= {}
|
|
32
|
+
@cache_data["providers"][provider][model_id] = {
|
|
33
|
+
"deprecated_at" => Time.now.iso8601,
|
|
34
|
+
"replacement" => replacement,
|
|
35
|
+
"reason" => reason
|
|
36
|
+
}.compact
|
|
37
|
+
|
|
38
|
+
save_cache
|
|
39
|
+
Aidp.log_info("deprecation_cache", "Added deprecated model",
|
|
40
|
+
provider: provider, model: model_id, replacement: replacement)
|
|
41
|
+
end
|
|
42
|
+
|
|
43
|
+
# Check if a model is deprecated
|
|
44
|
+
# @param provider [String] Provider name
|
|
45
|
+
# @param model_id [String] Model ID to check
|
|
46
|
+
# @return [Boolean]
|
|
47
|
+
def deprecated?(provider:, model_id:)
|
|
48
|
+
load_cache unless @cache_data
|
|
49
|
+
@cache_data.dig("providers", provider, model_id) != nil
|
|
50
|
+
end
|
|
51
|
+
|
|
52
|
+
# Get replacement model for a deprecated model
|
|
53
|
+
# @param provider [String] Provider name
|
|
54
|
+
# @param model_id [String] Deprecated model ID
|
|
55
|
+
# @return [String, nil] Replacement model ID or nil
|
|
56
|
+
def replacement_for(provider:, model_id:)
|
|
57
|
+
load_cache unless @cache_data
|
|
58
|
+
@cache_data.dig("providers", provider, model_id, "replacement")
|
|
59
|
+
end
|
|
60
|
+
|
|
61
|
+
# Get all deprecated models for a provider
|
|
62
|
+
# @param provider [String] Provider name
|
|
63
|
+
# @return [Array<String>] List of deprecated model IDs
|
|
64
|
+
def deprecated_models(provider:)
|
|
65
|
+
load_cache unless @cache_data
|
|
66
|
+
(@cache_data.dig("providers", provider) || {}).keys
|
|
67
|
+
end
|
|
68
|
+
|
|
69
|
+
# Remove a model from the deprecated cache
|
|
70
|
+
# Useful if a model comes back or was incorrectly marked
|
|
71
|
+
# @param provider [String] Provider name
|
|
72
|
+
# @param model_id [String] Model ID to remove
|
|
73
|
+
def remove_deprecated_model(provider:, model_id:)
|
|
74
|
+
load_cache unless @cache_data
|
|
75
|
+
return unless @cache_data.dig("providers", provider, model_id)
|
|
76
|
+
|
|
77
|
+
@cache_data["providers"][provider].delete(model_id)
|
|
78
|
+
@cache_data["providers"].delete(provider) if @cache_data["providers"][provider].empty?
|
|
79
|
+
|
|
80
|
+
save_cache
|
|
81
|
+
Aidp.log_info("deprecation_cache", "Removed deprecated model",
|
|
82
|
+
provider: provider, model: model_id)
|
|
83
|
+
end
|
|
84
|
+
|
|
85
|
+
# Get full deprecation info for a model
|
|
86
|
+
# @param provider [String] Provider name
|
|
87
|
+
# @param model_id [String] Model ID
|
|
88
|
+
# @return [Hash, nil] Deprecation metadata or nil
|
|
89
|
+
def info(provider:, model_id:)
|
|
90
|
+
load_cache unless @cache_data
|
|
91
|
+
@cache_data.dig("providers", provider, model_id)
|
|
92
|
+
end
|
|
93
|
+
|
|
94
|
+
# Clear all cached deprecations
|
|
95
|
+
def clear!
|
|
96
|
+
@cache_data = default_cache_structure
|
|
97
|
+
save_cache
|
|
98
|
+
Aidp.log_info("deprecation_cache", "Cleared all deprecations")
|
|
99
|
+
end
|
|
100
|
+
|
|
101
|
+
# Get cache statistics
|
|
102
|
+
# @return [Hash] Statistics about cached deprecations
|
|
103
|
+
def stats
|
|
104
|
+
load_cache unless @cache_data
|
|
105
|
+
{
|
|
106
|
+
providers: @cache_data["providers"].keys.sort,
|
|
107
|
+
total_deprecated: @cache_data["providers"].sum { |_, models| models.size },
|
|
108
|
+
by_provider: @cache_data["providers"].transform_values(&:size)
|
|
109
|
+
}
|
|
110
|
+
end
|
|
111
|
+
|
|
112
|
+
private
|
|
113
|
+
|
|
114
|
+
# Get a safe root directory for the cache
|
|
115
|
+
# Uses Dir.pwd if writable, otherwise falls back to tmpdir
|
|
116
|
+
def safe_root_dir
|
|
117
|
+
pwd = Dir.pwd
|
|
118
|
+
aidp_dir = File.join(pwd, ".aidp")
|
|
119
|
+
|
|
120
|
+
# Try to create the directory to test writability
|
|
121
|
+
begin
|
|
122
|
+
FileUtils.mkdir_p(aidp_dir) unless File.exist?(aidp_dir)
|
|
123
|
+
pwd
|
|
124
|
+
rescue Errno::EACCES, Errno::EROFS, Errno::EPERM
|
|
125
|
+
# Permission denied or read-only filesystem - use temp directory
|
|
126
|
+
require "tmpdir"
|
|
127
|
+
Dir.tmpdir
|
|
128
|
+
end
|
|
129
|
+
end
|
|
130
|
+
|
|
131
|
+
def default_cache_path
|
|
132
|
+
File.join(@root_dir, ".aidp", "deprecated_models.json")
|
|
133
|
+
end
|
|
134
|
+
|
|
135
|
+
def ensure_cache_directory
|
|
136
|
+
dir = File.dirname(@cache_path)
|
|
137
|
+
FileUtils.mkdir_p(dir) unless Dir.exist?(dir)
|
|
138
|
+
end
|
|
139
|
+
|
|
140
|
+
def load_cache
|
|
141
|
+
if File.exist?(@cache_path)
|
|
142
|
+
@cache_data = JSON.parse(File.read(@cache_path))
|
|
143
|
+
validate_cache_structure
|
|
144
|
+
else
|
|
145
|
+
@cache_data = default_cache_structure
|
|
146
|
+
end
|
|
147
|
+
rescue JSON::ParserError => e
|
|
148
|
+
Aidp.log_warn("deprecation_cache", "Invalid cache file, resetting",
|
|
149
|
+
error: e.message, path: @cache_path)
|
|
150
|
+
@cache_data = default_cache_structure
|
|
151
|
+
end
|
|
152
|
+
|
|
153
|
+
def save_cache
|
|
154
|
+
File.write(@cache_path, JSON.pretty_generate(@cache_data))
|
|
155
|
+
rescue => e
|
|
156
|
+
Aidp.log_error("deprecation_cache", "Failed to save cache",
|
|
157
|
+
error: e.message, path: @cache_path)
|
|
158
|
+
raise CacheError, "Failed to save deprecation cache: #{e.message}"
|
|
159
|
+
end
|
|
160
|
+
|
|
161
|
+
def default_cache_structure
|
|
162
|
+
{
|
|
163
|
+
"version" => "1.0",
|
|
164
|
+
"updated_at" => Time.now.iso8601,
|
|
165
|
+
"providers" => {}
|
|
166
|
+
}
|
|
167
|
+
end
|
|
168
|
+
|
|
169
|
+
def validate_cache_structure
|
|
170
|
+
unless @cache_data.is_a?(Hash) && @cache_data["providers"].is_a?(Hash)
|
|
171
|
+
Aidp.log_warn("deprecation_cache", "Invalid cache structure, resetting")
|
|
172
|
+
@cache_data = default_cache_structure
|
|
173
|
+
end
|
|
174
|
+
end
|
|
175
|
+
end
|
|
176
|
+
end
|
|
177
|
+
end
|
|
@@ -1394,9 +1394,9 @@ module Aidp
|
|
|
1394
1394
|
|
|
1395
1395
|
# Execute a prompt with a specific provider
|
|
1396
1396
|
def execute_with_provider(provider_type, prompt, options = {})
|
|
1397
|
-
# Extract model
|
|
1397
|
+
# Extract model from options if provided
|
|
1398
1398
|
model_name = options.delete(:model)
|
|
1399
|
-
|
|
1399
|
+
retry_on_rate_limit = options.delete(:retry_on_rate_limit) != false # Default true
|
|
1400
1400
|
|
|
1401
1401
|
# Create provider factory instance
|
|
1402
1402
|
provider_factory = ProviderFactory.new
|
|
@@ -1415,11 +1415,10 @@ module Aidp
|
|
|
1415
1415
|
Aidp.logger.debug("provider_manager", "Executing with provider",
|
|
1416
1416
|
provider: provider_type,
|
|
1417
1417
|
model: model_name,
|
|
1418
|
-
tier: tier,
|
|
1419
1418
|
prompt_length: prompt.length)
|
|
1420
1419
|
|
|
1421
|
-
# Execute the prompt with the provider
|
|
1422
|
-
result = provider.send_message(prompt: prompt, session: nil
|
|
1420
|
+
# Execute the prompt with the provider
|
|
1421
|
+
result = provider.send_message(prompt: prompt, session: nil)
|
|
1423
1422
|
|
|
1424
1423
|
# Return structured result
|
|
1425
1424
|
{
|
|
@@ -1436,6 +1435,38 @@ module Aidp
|
|
|
1436
1435
|
}
|
|
1437
1436
|
rescue => e
|
|
1438
1437
|
log_rescue(e, component: "provider_manager", action: "execute_with_provider", fallback: "error_result", provider: provider_type, model: model_name, prompt_length: prompt.length)
|
|
1438
|
+
|
|
1439
|
+
# Detect rate limit / quota errors and attempt fallback
|
|
1440
|
+
error_message = e.message.to_s.downcase
|
|
1441
|
+
is_rate_limit = error_message.include?("rate limit") ||
|
|
1442
|
+
error_message.include?("quota") ||
|
|
1443
|
+
error_message.include?("limit reached") ||
|
|
1444
|
+
error_message.include?("resource exhausted") ||
|
|
1445
|
+
error_message.include?("too many requests")
|
|
1446
|
+
|
|
1447
|
+
if is_rate_limit && retry_on_rate_limit
|
|
1448
|
+
Aidp.logger.warn("provider_manager", "Rate limit detected, attempting fallback",
|
|
1449
|
+
provider: provider_type,
|
|
1450
|
+
model: model_name,
|
|
1451
|
+
error: e.message)
|
|
1452
|
+
|
|
1453
|
+
# Attempt to switch to fallback provider
|
|
1454
|
+
fallback_provider = switch_provider_for_error("rate_limit", {
|
|
1455
|
+
original_provider: provider_type,
|
|
1456
|
+
model: model_name,
|
|
1457
|
+
error_message: e.message
|
|
1458
|
+
})
|
|
1459
|
+
|
|
1460
|
+
if fallback_provider && fallback_provider != provider_type
|
|
1461
|
+
Aidp.logger.info("provider_manager", "Retrying with fallback provider",
|
|
1462
|
+
original: provider_type,
|
|
1463
|
+
fallback: fallback_provider)
|
|
1464
|
+
|
|
1465
|
+
# Retry with fallback provider (disable retry to prevent infinite loop)
|
|
1466
|
+
return execute_with_provider(fallback_provider, prompt, options.merge(retry_on_rate_limit: false))
|
|
1467
|
+
end
|
|
1468
|
+
end
|
|
1469
|
+
|
|
1439
1470
|
# Return error result
|
|
1440
1471
|
{
|
|
1441
1472
|
status: "error",
|
|
@@ -1,6 +1,7 @@
|
|
|
1
1
|
# frozen_string_literal: true
|
|
2
2
|
|
|
3
3
|
require "ruby_llm"
|
|
4
|
+
require_relative "deprecation_cache"
|
|
4
5
|
|
|
5
6
|
module Aidp
|
|
6
7
|
module Harness
|
|
@@ -26,6 +27,11 @@ module Aidp
|
|
|
26
27
|
"openrouter" => "openrouter"
|
|
27
28
|
}.freeze
|
|
28
29
|
|
|
30
|
+
# Get deprecation cache instance (lazy loaded)
|
|
31
|
+
def deprecation_cache
|
|
32
|
+
@deprecation_cache ||= Aidp::Harness::DeprecationCache.new
|
|
33
|
+
end
|
|
34
|
+
|
|
29
35
|
# Tier classification based on model characteristics
|
|
30
36
|
# These are heuristics since ruby_llm doesn't classify tiers
|
|
31
37
|
TIER_CLASSIFICATION = {
|
|
@@ -59,7 +65,8 @@ module Aidp
|
|
|
59
65
|
standard: ->(model) { true }
|
|
60
66
|
}.freeze
|
|
61
67
|
|
|
62
|
-
def initialize
|
|
68
|
+
def initialize(deprecation_cache: nil)
|
|
69
|
+
@deprecation_cache = deprecation_cache
|
|
63
70
|
@models = RubyLLM::Models.instance.instance_variable_get(:@models)
|
|
64
71
|
@index_by_id = @models.to_h { |m| [m.id, m] }
|
|
65
72
|
|
|
@@ -73,11 +80,18 @@ module Aidp
|
|
|
73
80
|
#
|
|
74
81
|
# @param model_name [String] Model name (e.g., "claude-3-5-haiku" or "claude-3-5-haiku-20241022")
|
|
75
82
|
# @param provider [String, nil] Optional AIDP provider filter
|
|
83
|
+
# @param skip_deprecated [Boolean] Skip deprecated models (default: true)
|
|
76
84
|
# @return [String, nil] Canonical model ID for API calls, or nil if not found
|
|
77
|
-
def resolve_model(model_name, provider: nil)
|
|
85
|
+
def resolve_model(model_name, provider: nil, skip_deprecated: true)
|
|
78
86
|
# Map AIDP provider to registry provider if filtering
|
|
79
87
|
registry_provider = provider ? PROVIDER_NAME_MAPPING[provider] : nil
|
|
80
88
|
|
|
89
|
+
# Check if model is deprecated
|
|
90
|
+
if skip_deprecated && model_deprecated?(model_name, registry_provider)
|
|
91
|
+
Aidp.log_warn("ruby_llm_registry", "skipping deprecated model", model: model_name, provider: provider)
|
|
92
|
+
return nil
|
|
93
|
+
end
|
|
94
|
+
|
|
81
95
|
# Try exact match first
|
|
82
96
|
model = @index_by_id[model_name]
|
|
83
97
|
return model.id if model && (registry_provider.nil? || model.provider.to_s == registry_provider)
|
|
@@ -88,13 +102,20 @@ module Aidp
|
|
|
88
102
|
# Filter by provider if specified
|
|
89
103
|
family_models = family_models.select { |m| m.provider.to_s == registry_provider } if registry_provider
|
|
90
104
|
|
|
105
|
+
# Filter out deprecated models if requested
|
|
106
|
+
if skip_deprecated
|
|
107
|
+
family_models = family_models.reject do |m|
|
|
108
|
+
deprecation_cache.deprecated?(provider: registry_provider, model_id: m.id.to_s)
|
|
109
|
+
end
|
|
110
|
+
end
|
|
111
|
+
|
|
91
112
|
# Return the latest version (first non-"latest" model, or the latest one)
|
|
92
113
|
model = family_models.reject { |m| m.id.to_s.include?("-latest") }.first || family_models.first
|
|
93
114
|
return model.id if model
|
|
94
115
|
end
|
|
95
116
|
|
|
96
117
|
# Try fuzzy matching for common patterns
|
|
97
|
-
fuzzy_match = find_fuzzy_match(model_name, registry_provider)
|
|
118
|
+
fuzzy_match = find_fuzzy_match(model_name, registry_provider, skip_deprecated: skip_deprecated)
|
|
98
119
|
return fuzzy_match.id if fuzzy_match
|
|
99
120
|
|
|
100
121
|
Aidp.log_warn("ruby_llm_registry", "model not found", model: model_name, provider: provider)
|
|
@@ -124,8 +145,9 @@ module Aidp
|
|
|
124
145
|
#
|
|
125
146
|
# @param tier [String, Symbol] The tier name (mini, standard, advanced)
|
|
126
147
|
# @param provider [String, nil] Optional AIDP provider filter
|
|
148
|
+
# @param skip_deprecated [Boolean] Skip deprecated models (default: true)
|
|
127
149
|
# @return [Array<String>] List of model IDs for the tier
|
|
128
|
-
def models_for_tier(tier, provider: nil)
|
|
150
|
+
def models_for_tier(tier, provider: nil, skip_deprecated: true)
|
|
129
151
|
tier_sym = tier.to_sym
|
|
130
152
|
classifier = TIER_CLASSIFICATION[tier_sym]
|
|
131
153
|
|
|
@@ -152,6 +174,11 @@ module Aidp
|
|
|
152
174
|
end
|
|
153
175
|
end
|
|
154
176
|
|
|
177
|
+
# Filter out deprecated models if requested
|
|
178
|
+
if skip_deprecated
|
|
179
|
+
models.reject! { |m| deprecation_cache.deprecated?(provider: registry_provider, model_id: m.id.to_s) }
|
|
180
|
+
end
|
|
181
|
+
|
|
155
182
|
model_ids = models.map(&:id).uniq
|
|
156
183
|
Aidp.log_debug("ruby_llm_registry", "found models for tier",
|
|
157
184
|
tier: tier, provider: provider, count: model_ids.size)
|
|
@@ -191,6 +218,51 @@ module Aidp
|
|
|
191
218
|
Aidp.log_info("ruby_llm_registry", "refreshed", models: @models.size)
|
|
192
219
|
end
|
|
193
220
|
|
|
221
|
+
# Check if a model is deprecated
|
|
222
|
+
# @param model_id [String] The model ID to check
|
|
223
|
+
# @param provider [String, nil] The provider name (registry format)
|
|
224
|
+
# @return [Boolean] True if model is deprecated
|
|
225
|
+
def model_deprecated?(model_id, provider = nil)
|
|
226
|
+
return false unless provider
|
|
227
|
+
|
|
228
|
+
deprecation_cache.deprecated?(provider: provider, model_id: model_id.to_s)
|
|
229
|
+
end
|
|
230
|
+
|
|
231
|
+
# Find replacement for a deprecated model
|
|
232
|
+
# Returns the latest non-deprecated model in the same family/tier
|
|
233
|
+
# @param deprecated_model [String] The deprecated model ID
|
|
234
|
+
# @param provider [String, nil] The provider name (AIDP format)
|
|
235
|
+
# @return [String, nil] Replacement model ID or nil
|
|
236
|
+
def find_replacement_model(deprecated_model, provider: nil)
|
|
237
|
+
registry_provider = provider ? PROVIDER_NAME_MAPPING[provider] : nil
|
|
238
|
+
return nil unless registry_provider
|
|
239
|
+
|
|
240
|
+
# Determine tier of deprecated model
|
|
241
|
+
deprecated_info = @index_by_id[deprecated_model]
|
|
242
|
+
return nil unless deprecated_info
|
|
243
|
+
|
|
244
|
+
tier = classify_tier(deprecated_info)
|
|
245
|
+
|
|
246
|
+
# Get all non-deprecated models for this tier and provider
|
|
247
|
+
candidates = models_for_tier(tier, provider: provider, skip_deprecated: true)
|
|
248
|
+
|
|
249
|
+
# Prefer models in the same family (e.g., both "sonnet")
|
|
250
|
+
family_keyword = extract_family_keyword(deprecated_model)
|
|
251
|
+
same_family = candidates.select { |m| m.to_s.include?(family_keyword) } if family_keyword
|
|
252
|
+
|
|
253
|
+
# Return first match from same family, or first candidate overall
|
|
254
|
+
replacement = same_family&.first || candidates.first
|
|
255
|
+
|
|
256
|
+
if replacement
|
|
257
|
+
Aidp.log_info("ruby_llm_registry", "found replacement",
|
|
258
|
+
deprecated: deprecated_model,
|
|
259
|
+
replacement: replacement,
|
|
260
|
+
tier: tier)
|
|
261
|
+
end
|
|
262
|
+
|
|
263
|
+
replacement
|
|
264
|
+
end
|
|
265
|
+
|
|
194
266
|
private
|
|
195
267
|
|
|
196
268
|
# Build an index mapping family names to model objects
|
|
@@ -208,13 +280,18 @@ module Aidp
|
|
|
208
280
|
end
|
|
209
281
|
|
|
210
282
|
# Find a model by fuzzy matching
|
|
211
|
-
def find_fuzzy_match(model_name, provider)
|
|
283
|
+
def find_fuzzy_match(model_name, provider, skip_deprecated: true)
|
|
212
284
|
# Normalize the search term
|
|
213
285
|
normalized = model_name.downcase.gsub(/[^a-z0-9]/, "")
|
|
214
286
|
|
|
215
287
|
candidates = @models.select do |m|
|
|
216
288
|
next false if provider && m.provider.to_s != provider
|
|
217
289
|
|
|
290
|
+
# Skip deprecated if requested
|
|
291
|
+
if skip_deprecated
|
|
292
|
+
next false if deprecation_cache.deprecated?(provider: provider, model_id: m.id.to_s)
|
|
293
|
+
end
|
|
294
|
+
|
|
218
295
|
# Check if model ID contains the search term
|
|
219
296
|
m.id.to_s.downcase.gsub(/[^a-z0-9]/, "").include?(normalized) ||
|
|
220
297
|
m.name.to_s.downcase.gsub(/[^a-z0-9]/, "").include?(normalized)
|
|
@@ -224,6 +301,17 @@ module Aidp
|
|
|
224
301
|
candidates.min_by { |m| m.id.to_s.length }
|
|
225
302
|
end
|
|
226
303
|
|
|
304
|
+
# Extract family keyword from model ID (e.g., "sonnet", "haiku", "opus")
|
|
305
|
+
def extract_family_keyword(model_id)
|
|
306
|
+
case model_id.to_s
|
|
307
|
+
when /sonnet/i then "sonnet"
|
|
308
|
+
when /haiku/i then "haiku"
|
|
309
|
+
when /opus/i then "opus"
|
|
310
|
+
when /gpt-4/i then "gpt-4"
|
|
311
|
+
when /gpt-3/i then "gpt-3"
|
|
312
|
+
end
|
|
313
|
+
end
|
|
314
|
+
|
|
227
315
|
# Extract capabilities from model info
|
|
228
316
|
def extract_capabilities(model)
|
|
229
317
|
caps = []
|
|
@@ -160,11 +160,53 @@ module Aidp
|
|
|
160
160
|
if configured_models.any?
|
|
161
161
|
# Use first configured model for this provider and tier
|
|
162
162
|
model_name = configured_models.first
|
|
163
|
-
|
|
164
|
-
|
|
165
|
-
|
|
166
|
-
|
|
167
|
-
|
|
163
|
+
|
|
164
|
+
# Check if model is deprecated and try to upgrade
|
|
165
|
+
require_relative "ruby_llm_registry" unless defined?(Aidp::Harness::RubyLLMRegistry)
|
|
166
|
+
llm_registry = Aidp::Harness::RubyLLMRegistry.new
|
|
167
|
+
|
|
168
|
+
if llm_registry.model_deprecated?(model_name, provider)
|
|
169
|
+
Aidp.log_warn("thinking_depth_manager", "Configured model is deprecated",
|
|
170
|
+
tier: tier,
|
|
171
|
+
provider: provider,
|
|
172
|
+
model: model_name)
|
|
173
|
+
|
|
174
|
+
# Try to find replacement
|
|
175
|
+
replacement = llm_registry.find_replacement_model(model_name, provider: provider)
|
|
176
|
+
if replacement
|
|
177
|
+
Aidp.log_info("thinking_depth_manager", "Auto-upgrading to non-deprecated model",
|
|
178
|
+
tier: tier,
|
|
179
|
+
provider: provider,
|
|
180
|
+
old_model: model_name,
|
|
181
|
+
new_model: replacement)
|
|
182
|
+
model_name = replacement
|
|
183
|
+
else
|
|
184
|
+
# Try next model in config list
|
|
185
|
+
non_deprecated = configured_models.find { |m| !llm_registry.model_deprecated?(m, provider) }
|
|
186
|
+
if non_deprecated
|
|
187
|
+
Aidp.log_info("thinking_depth_manager", "Using alternate configured model",
|
|
188
|
+
tier: tier,
|
|
189
|
+
provider: provider,
|
|
190
|
+
skipped: model_name,
|
|
191
|
+
selected: non_deprecated)
|
|
192
|
+
model_name = non_deprecated
|
|
193
|
+
else
|
|
194
|
+
Aidp.log_warn("thinking_depth_manager", "All configured models deprecated, falling back to catalog",
|
|
195
|
+
tier: tier,
|
|
196
|
+
provider: provider)
|
|
197
|
+
# Fall through to catalog selection
|
|
198
|
+
model_name = nil
|
|
199
|
+
end
|
|
200
|
+
end
|
|
201
|
+
end
|
|
202
|
+
|
|
203
|
+
if model_name
|
|
204
|
+
Aidp.log_debug("thinking_depth_manager", "Selected model from user config",
|
|
205
|
+
tier: tier,
|
|
206
|
+
provider: provider,
|
|
207
|
+
model: model_name)
|
|
208
|
+
return [provider, model_name, {}]
|
|
209
|
+
end
|
|
168
210
|
end
|
|
169
211
|
|
|
170
212
|
# Provider specified but has no models for this tier in config
|