ace-support-models 0.9.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- checksums.yaml +7 -0
- data/CHANGELOG.md +162 -0
- data/LICENSE +21 -0
- data/README.md +39 -0
- data/Rakefile +13 -0
- data/exe/ace-llm-providers +19 -0
- data/exe/ace-models +23 -0
- data/lib/ace/support/models/atoms/api_fetcher.rb +76 -0
- data/lib/ace/support/models/atoms/cache_path_resolver.rb +38 -0
- data/lib/ace/support/models/atoms/file_reader.rb +43 -0
- data/lib/ace/support/models/atoms/file_writer.rb +63 -0
- data/lib/ace/support/models/atoms/json_parser.rb +38 -0
- data/lib/ace/support/models/atoms/model_filter.rb +107 -0
- data/lib/ace/support/models/atoms/model_name_canonicalizer.rb +119 -0
- data/lib/ace/support/models/atoms/provider_config_reader.rb +218 -0
- data/lib/ace/support/models/atoms/provider_config_writer.rb +230 -0
- data/lib/ace/support/models/cli/commands/cache/clear.rb +43 -0
- data/lib/ace/support/models/cli/commands/cache/diff.rb +74 -0
- data/lib/ace/support/models/cli/commands/cache/status.rb +54 -0
- data/lib/ace/support/models/cli/commands/cache/sync.rb +51 -0
- data/lib/ace/support/models/cli/commands/info.rb +33 -0
- data/lib/ace/support/models/cli/commands/models/cost.rb +54 -0
- data/lib/ace/support/models/cli/commands/models/info.rb +136 -0
- data/lib/ace/support/models/cli/commands/models/search.rb +101 -0
- data/lib/ace/support/models/cli/commands/providers/list.rb +46 -0
- data/lib/ace/support/models/cli/commands/providers/show.rb +54 -0
- data/lib/ace/support/models/cli/commands/providers/sync.rb +66 -0
- data/lib/ace/support/models/cli/commands/search.rb +35 -0
- data/lib/ace/support/models/cli/commands/sync_shortcut.rb +32 -0
- data/lib/ace/support/models/cli/providers_cli.rb +72 -0
- data/lib/ace/support/models/cli.rb +84 -0
- data/lib/ace/support/models/errors.rb +55 -0
- data/lib/ace/support/models/models/diff_result.rb +94 -0
- data/lib/ace/support/models/models/model_info.rb +129 -0
- data/lib/ace/support/models/models/pricing_info.rb +74 -0
- data/lib/ace/support/models/models/provider_info.rb +81 -0
- data/lib/ace/support/models/models.rb +97 -0
- data/lib/ace/support/models/molecules/cache_manager.rb +237 -0
- data/lib/ace/support/models/molecules/cost_calculator.rb +135 -0
- data/lib/ace/support/models/molecules/diff_generator.rb +171 -0
- data/lib/ace/support/models/molecules/model_searcher.rb +176 -0
- data/lib/ace/support/models/molecules/model_validator.rb +177 -0
- data/lib/ace/support/models/molecules/provider_sync_diff.rb +291 -0
- data/lib/ace/support/models/organisms/provider_sync_orchestrator.rb +278 -0
- data/lib/ace/support/models/organisms/sync_orchestrator.rb +108 -0
- data/lib/ace/support/models/version.rb +9 -0
- data/lib/ace/support/models.rb +3 -0
- metadata +149 -0
|
@@ -0,0 +1,237 @@
|
|
|
1
|
+
# frozen_string_literal: true
|
|
2
|
+
|
|
3
|
+
require "time"
|
|
4
|
+
|
|
5
|
+
module Ace
|
|
6
|
+
module Support
|
|
7
|
+
module Models
|
|
8
|
+
module Molecules
|
|
9
|
+
# Manages local cache of API data
|
|
10
|
+
class CacheManager
|
|
11
|
+
API_CACHE_FILE = "api.json"
|
|
12
|
+
PREVIOUS_CACHE_FILE = "api.previous.json"
|
|
13
|
+
METADATA_FILE = "metadata.json"
|
|
14
|
+
|
|
15
|
+
attr_reader :cache_dir
|
|
16
|
+
|
|
17
|
+
# Initialize cache manager
|
|
18
|
+
# @param cache_dir [String, nil] Cache directory (default: from CachePathResolver)
|
|
19
|
+
def initialize(cache_dir: nil)
|
|
20
|
+
@cache_dir = cache_dir || Atoms::CachePathResolver.resolve
|
|
21
|
+
end
|
|
22
|
+
|
|
23
|
+
# Read current API cache
|
|
24
|
+
# @return [Hash, nil] Parsed API data or nil
|
|
25
|
+
def read
|
|
26
|
+
content = Atoms::FileReader.read(api_cache_path)
|
|
27
|
+
return nil unless content
|
|
28
|
+
|
|
29
|
+
Atoms::JsonParser.parse(content)
|
|
30
|
+
end
|
|
31
|
+
|
|
32
|
+
# Read previous API cache (for diff)
|
|
33
|
+
# @return [Hash, nil] Parsed previous API data or nil
|
|
34
|
+
def read_previous
|
|
35
|
+
content = Atoms::FileReader.read(previous_cache_path)
|
|
36
|
+
return nil unless content
|
|
37
|
+
|
|
38
|
+
Atoms::JsonParser.parse(content)
|
|
39
|
+
end
|
|
40
|
+
|
|
41
|
+
# Write API data to cache
|
|
42
|
+
# @param data [Hash] API data
|
|
43
|
+
# @return [Boolean] true on success
|
|
44
|
+
def write(data)
|
|
45
|
+
# Move current to previous first
|
|
46
|
+
if Atoms::FileReader.exist?(api_cache_path)
|
|
47
|
+
Atoms::FileWriter.rename(api_cache_path, previous_cache_path)
|
|
48
|
+
end
|
|
49
|
+
|
|
50
|
+
# Write new data
|
|
51
|
+
json = Atoms::JsonParser.to_json(data, pretty: false)
|
|
52
|
+
Atoms::FileWriter.write(api_cache_path, json)
|
|
53
|
+
|
|
54
|
+
# Update metadata
|
|
55
|
+
update_metadata
|
|
56
|
+
|
|
57
|
+
true
|
|
58
|
+
end
|
|
59
|
+
|
|
60
|
+
# Read cache metadata
|
|
61
|
+
# @return [Hash] Metadata hash
|
|
62
|
+
def metadata
|
|
63
|
+
content = Atoms::FileReader.read(metadata_path)
|
|
64
|
+
return default_metadata unless content
|
|
65
|
+
|
|
66
|
+
Atoms::JsonParser.parse(content)
|
|
67
|
+
rescue ApiError
|
|
68
|
+
default_metadata
|
|
69
|
+
end
|
|
70
|
+
|
|
71
|
+
# Check if cache exists
|
|
72
|
+
# @return [Boolean]
|
|
73
|
+
def exists?
|
|
74
|
+
Atoms::FileReader.exist?(api_cache_path)
|
|
75
|
+
end
|
|
76
|
+
|
|
77
|
+
# Alias for exists? for clearer CLI usage
|
|
78
|
+
# @return [Boolean]
|
|
79
|
+
def cached?
|
|
80
|
+
exists?
|
|
81
|
+
end
|
|
82
|
+
|
|
83
|
+
# List all providers with model counts
|
|
84
|
+
# @return [Array<Hash>] Provider info hashes
|
|
85
|
+
def list_providers
|
|
86
|
+
data = read
|
|
87
|
+
return [] unless data
|
|
88
|
+
|
|
89
|
+
normalize_providers(data).map do |provider_id, provider_data|
|
|
90
|
+
models = normalize_models(provider_data)
|
|
91
|
+
{
|
|
92
|
+
id: provider_id,
|
|
93
|
+
model_count: models.size
|
|
94
|
+
}
|
|
95
|
+
end
|
|
96
|
+
end
|
|
97
|
+
|
|
98
|
+
# Get provider details with models
|
|
99
|
+
# @param provider_id [String] Provider ID
|
|
100
|
+
# @return [Hash, nil] Provider data or nil if not found
|
|
101
|
+
def get_provider(provider_id)
|
|
102
|
+
data = read
|
|
103
|
+
return nil unless data
|
|
104
|
+
|
|
105
|
+
provider_data = normalize_providers(data)[provider_id]
|
|
106
|
+
return nil unless provider_data
|
|
107
|
+
|
|
108
|
+
models = normalize_models(provider_data).map do |model_id, model_data|
|
|
109
|
+
{
|
|
110
|
+
id: model_id,
|
|
111
|
+
name: model_data["name"] || model_id,
|
|
112
|
+
deprecated: model_data["deprecated"] == true
|
|
113
|
+
}
|
|
114
|
+
end
|
|
115
|
+
|
|
116
|
+
{
|
|
117
|
+
id: provider_id,
|
|
118
|
+
models: models.sort_by { |m| m[:id] }
|
|
119
|
+
}
|
|
120
|
+
end
|
|
121
|
+
|
|
122
|
+
# Check if cache is fresh (less than max_age old)
|
|
123
|
+
# @param max_age [Integer] Max age in seconds (default: 24 hours)
|
|
124
|
+
# @return [Boolean]
|
|
125
|
+
def fresh?(max_age: 86_400)
|
|
126
|
+
mtime = Atoms::FileReader.mtime(api_cache_path)
|
|
127
|
+
return false unless mtime
|
|
128
|
+
|
|
129
|
+
(Time.now - mtime) < max_age
|
|
130
|
+
end
|
|
131
|
+
|
|
132
|
+
# Get last sync time
|
|
133
|
+
# @return [Time, nil]
|
|
134
|
+
def last_sync_at
|
|
135
|
+
meta = metadata
|
|
136
|
+
return nil unless meta["last_sync_at"]
|
|
137
|
+
|
|
138
|
+
Time.parse(meta["last_sync_at"])
|
|
139
|
+
rescue ArgumentError
|
|
140
|
+
nil
|
|
141
|
+
end
|
|
142
|
+
|
|
143
|
+
# Clear cache
|
|
144
|
+
# @return [Hash] Result with status and deleted files
|
|
145
|
+
def clear
|
|
146
|
+
deleted_files = []
|
|
147
|
+
[api_cache_path, previous_cache_path, metadata_path].each do |path|
|
|
148
|
+
if Atoms::FileReader.exist?(path)
|
|
149
|
+
Atoms::FileWriter.delete(path)
|
|
150
|
+
deleted_files << File.basename(path)
|
|
151
|
+
end
|
|
152
|
+
end
|
|
153
|
+
|
|
154
|
+
{
|
|
155
|
+
status: :success,
|
|
156
|
+
deleted_files: deleted_files,
|
|
157
|
+
message: deleted_files.empty? ? "Cache was already empty" : "Deleted #{deleted_files.size} files"
|
|
158
|
+
}
|
|
159
|
+
end
|
|
160
|
+
|
|
161
|
+
private
|
|
162
|
+
|
|
163
|
+
def api_cache_path
|
|
164
|
+
File.join(cache_dir, API_CACHE_FILE)
|
|
165
|
+
end
|
|
166
|
+
|
|
167
|
+
def previous_cache_path
|
|
168
|
+
File.join(cache_dir, PREVIOUS_CACHE_FILE)
|
|
169
|
+
end
|
|
170
|
+
|
|
171
|
+
def metadata_path
|
|
172
|
+
File.join(cache_dir, METADATA_FILE)
|
|
173
|
+
end
|
|
174
|
+
|
|
175
|
+
def update_metadata
|
|
176
|
+
meta = {
|
|
177
|
+
"last_sync_at" => Time.now.utc.iso8601,
|
|
178
|
+
"version" => VERSION
|
|
179
|
+
}
|
|
180
|
+
json = Atoms::JsonParser.to_json(meta, pretty: true)
|
|
181
|
+
Atoms::FileWriter.write(metadata_path, json)
|
|
182
|
+
end
|
|
183
|
+
|
|
184
|
+
def default_metadata
|
|
185
|
+
{
|
|
186
|
+
"last_sync_at" => nil,
|
|
187
|
+
"version" => VERSION
|
|
188
|
+
}
|
|
189
|
+
end
|
|
190
|
+
|
|
191
|
+
def normalize_providers(data)
|
|
192
|
+
return {} unless data.is_a?(Hash)
|
|
193
|
+
|
|
194
|
+
wrapped = data["providers"]
|
|
195
|
+
return normalize_provider_collection(wrapped) unless wrapped.nil?
|
|
196
|
+
|
|
197
|
+
data
|
|
198
|
+
end
|
|
199
|
+
|
|
200
|
+
def normalize_provider_collection(providers)
|
|
201
|
+
case providers
|
|
202
|
+
when Hash
|
|
203
|
+
providers
|
|
204
|
+
when Array
|
|
205
|
+
providers.each_with_object({}) do |provider, acc|
|
|
206
|
+
next unless provider.is_a?(Hash)
|
|
207
|
+
|
|
208
|
+
provider_id = provider["id"]
|
|
209
|
+
acc[provider_id] = provider if provider_id
|
|
210
|
+
end
|
|
211
|
+
else
|
|
212
|
+
{}
|
|
213
|
+
end
|
|
214
|
+
end
|
|
215
|
+
|
|
216
|
+
def normalize_models(provider_data)
|
|
217
|
+
models = provider_data["models"]
|
|
218
|
+
|
|
219
|
+
case models
|
|
220
|
+
when Hash
|
|
221
|
+
models
|
|
222
|
+
when Array
|
|
223
|
+
models.each_with_object({}) do |model, acc|
|
|
224
|
+
next unless model.is_a?(Hash)
|
|
225
|
+
|
|
226
|
+
model_id = model["id"]
|
|
227
|
+
acc[model_id] = model if model_id
|
|
228
|
+
end
|
|
229
|
+
else
|
|
230
|
+
{}
|
|
231
|
+
end
|
|
232
|
+
end
|
|
233
|
+
end
|
|
234
|
+
end
|
|
235
|
+
end
|
|
236
|
+
end
|
|
237
|
+
end
|
|
@@ -0,0 +1,135 @@
|
|
|
1
|
+
# frozen_string_literal: true
|
|
2
|
+
|
|
3
|
+
module Ace
|
|
4
|
+
module Support
|
|
5
|
+
module Models
|
|
6
|
+
module Molecules
|
|
7
|
+
# Calculates query costs based on token usage
|
|
8
|
+
class CostCalculator
|
|
9
|
+
# Initialize calculator
|
|
10
|
+
# @param validator [ModelValidator, nil] Validator instance
|
|
11
|
+
def initialize(validator: nil)
|
|
12
|
+
@validator = validator || ModelValidator.new
|
|
13
|
+
end
|
|
14
|
+
|
|
15
|
+
# Calculate cost for a query
|
|
16
|
+
# @param model_id [String] Model ID
|
|
17
|
+
# @param input_tokens [Integer] Input token count
|
|
18
|
+
# @param output_tokens [Integer] Output token count
|
|
19
|
+
# @param reasoning_tokens [Integer] Reasoning token count
|
|
20
|
+
# @return [Hash] Cost breakdown
|
|
21
|
+
def calculate(model_id, input_tokens:, output_tokens:, reasoning_tokens: 0)
|
|
22
|
+
model = @validator.validate(model_id)
|
|
23
|
+
pricing = model.pricing
|
|
24
|
+
|
|
25
|
+
unless pricing.available?
|
|
26
|
+
return {
|
|
27
|
+
model_id: model.full_id,
|
|
28
|
+
model_name: model.name,
|
|
29
|
+
error: "No pricing data available for this model",
|
|
30
|
+
available: false
|
|
31
|
+
}
|
|
32
|
+
end
|
|
33
|
+
|
|
34
|
+
input_cost = calculate_component(input_tokens, pricing.input)
|
|
35
|
+
output_cost = calculate_component(output_tokens, pricing.output)
|
|
36
|
+
reasoning_cost = calculate_component(reasoning_tokens, pricing.reasoning)
|
|
37
|
+
|
|
38
|
+
total = input_cost + output_cost + reasoning_cost
|
|
39
|
+
|
|
40
|
+
{
|
|
41
|
+
model_id: model.full_id,
|
|
42
|
+
model_name: model.name,
|
|
43
|
+
available: true,
|
|
44
|
+
tokens: {
|
|
45
|
+
input: input_tokens,
|
|
46
|
+
output: output_tokens,
|
|
47
|
+
reasoning: reasoning_tokens,
|
|
48
|
+
total: input_tokens + output_tokens + reasoning_tokens
|
|
49
|
+
},
|
|
50
|
+
rates: {
|
|
51
|
+
input: pricing.input,
|
|
52
|
+
output: pricing.output,
|
|
53
|
+
reasoning: pricing.reasoning
|
|
54
|
+
},
|
|
55
|
+
costs: {
|
|
56
|
+
input: input_cost,
|
|
57
|
+
output: output_cost,
|
|
58
|
+
reasoning: reasoning_cost,
|
|
59
|
+
total: total
|
|
60
|
+
},
|
|
61
|
+
formatted: {
|
|
62
|
+
input: format_cost(input_cost),
|
|
63
|
+
output: format_cost(output_cost),
|
|
64
|
+
reasoning: format_cost(reasoning_cost),
|
|
65
|
+
total: format_cost(total)
|
|
66
|
+
}
|
|
67
|
+
}
|
|
68
|
+
end
|
|
69
|
+
|
|
70
|
+
# Format a cost breakdown as human-readable string
|
|
71
|
+
# @param result [Hash] Result from calculate
|
|
72
|
+
# @return [String] Formatted string
|
|
73
|
+
def format(result)
|
|
74
|
+
return result[:error] unless result[:available]
|
|
75
|
+
|
|
76
|
+
lines = []
|
|
77
|
+
lines << "Model: #{result[:model_name]} (#{result[:model_id]})"
|
|
78
|
+
lines << ""
|
|
79
|
+
|
|
80
|
+
tokens = result[:tokens]
|
|
81
|
+
result[:costs]
|
|
82
|
+
rates = result[:rates]
|
|
83
|
+
|
|
84
|
+
if tokens[:input] > 0
|
|
85
|
+
lines << "Input: #{format_tokens(tokens[:input])} tokens × $#{rates[:input]}/M = #{result[:formatted][:input]}"
|
|
86
|
+
end
|
|
87
|
+
|
|
88
|
+
if tokens[:output] > 0
|
|
89
|
+
lines << "Output: #{format_tokens(tokens[:output])} tokens × $#{rates[:output]}/M = #{result[:formatted][:output]}"
|
|
90
|
+
end
|
|
91
|
+
|
|
92
|
+
if tokens[:reasoning] > 0 && rates[:reasoning]
|
|
93
|
+
lines << "Reasoning: #{format_tokens(tokens[:reasoning])} tokens × $#{rates[:reasoning]}/M = #{result[:formatted][:reasoning]}"
|
|
94
|
+
end
|
|
95
|
+
|
|
96
|
+
lines << ""
|
|
97
|
+
lines << "Total: #{result[:formatted][:total]}"
|
|
98
|
+
|
|
99
|
+
lines.join("\n")
|
|
100
|
+
end
|
|
101
|
+
|
|
102
|
+
private
|
|
103
|
+
|
|
104
|
+
def calculate_component(tokens, rate)
|
|
105
|
+
return 0.0 unless rate && tokens > 0
|
|
106
|
+
|
|
107
|
+
(tokens / 1_000_000.0) * rate
|
|
108
|
+
end
|
|
109
|
+
|
|
110
|
+
def format_cost(cost)
|
|
111
|
+
return "$0.00" if cost.zero?
|
|
112
|
+
|
|
113
|
+
if cost < 0.01
|
|
114
|
+
"$#{sprintf("%.4f", cost)}"
|
|
115
|
+
elsif cost < 1
|
|
116
|
+
"$#{sprintf("%.3f", cost)}"
|
|
117
|
+
else
|
|
118
|
+
"$#{sprintf("%.2f", cost)}"
|
|
119
|
+
end
|
|
120
|
+
end
|
|
121
|
+
|
|
122
|
+
def format_tokens(count)
|
|
123
|
+
if count >= 1_000_000
|
|
124
|
+
"#{sprintf("%.1f", count / 1_000_000.0)}M"
|
|
125
|
+
elsif count >= 1_000
|
|
126
|
+
"#{sprintf("%.1f", count / 1_000.0)}K"
|
|
127
|
+
else
|
|
128
|
+
count.to_s
|
|
129
|
+
end
|
|
130
|
+
end
|
|
131
|
+
end
|
|
132
|
+
end
|
|
133
|
+
end
|
|
134
|
+
end
|
|
135
|
+
end
|
|
@@ -0,0 +1,171 @@
|
|
|
1
|
+
# frozen_string_literal: true
|
|
2
|
+
|
|
3
|
+
require "time"
|
|
4
|
+
|
|
5
|
+
module Ace
|
|
6
|
+
module Support
|
|
7
|
+
module Models
|
|
8
|
+
module Molecules
|
|
9
|
+
# Generates diff between cached API versions
|
|
10
|
+
class DiffGenerator
|
|
11
|
+
TRACKED_FIELDS = %w[name cost limit status modalities].freeze
|
|
12
|
+
|
|
13
|
+
# Initialize diff generator
|
|
14
|
+
# @param cache_manager [CacheManager, nil] Cache manager instance
|
|
15
|
+
def initialize(cache_manager: nil)
|
|
16
|
+
@cache_manager = cache_manager || CacheManager.new
|
|
17
|
+
end
|
|
18
|
+
|
|
19
|
+
# Generate diff between current and previous cache
|
|
20
|
+
# @return [Models::DiffResult] Diff result
|
|
21
|
+
def generate
|
|
22
|
+
current = @cache_manager.read
|
|
23
|
+
previous = @cache_manager.read_previous
|
|
24
|
+
|
|
25
|
+
unless current
|
|
26
|
+
return Models::DiffResult.new(
|
|
27
|
+
current_sync_at: Time.now
|
|
28
|
+
)
|
|
29
|
+
end
|
|
30
|
+
|
|
31
|
+
unless previous
|
|
32
|
+
# No previous - everything is "new"
|
|
33
|
+
all_models = extract_all_models(current)
|
|
34
|
+
return Models::DiffResult.new(
|
|
35
|
+
added_models: all_models,
|
|
36
|
+
added_providers: current.keys,
|
|
37
|
+
current_sync_at: @cache_manager.last_sync_at
|
|
38
|
+
)
|
|
39
|
+
end
|
|
40
|
+
|
|
41
|
+
compare(previous, current)
|
|
42
|
+
end
|
|
43
|
+
|
|
44
|
+
private
|
|
45
|
+
|
|
46
|
+
def compare(previous, current)
|
|
47
|
+
added_models = []
|
|
48
|
+
removed_models = []
|
|
49
|
+
updated_models = []
|
|
50
|
+
|
|
51
|
+
prev_providers = Set.new(previous.keys)
|
|
52
|
+
curr_providers = Set.new(current.keys)
|
|
53
|
+
|
|
54
|
+
# New providers
|
|
55
|
+
added_providers = (curr_providers - prev_providers).to_a
|
|
56
|
+
|
|
57
|
+
# Removed providers
|
|
58
|
+
removed_providers = (prev_providers - curr_providers).to_a
|
|
59
|
+
|
|
60
|
+
# All models from removed providers are removed
|
|
61
|
+
removed_providers.each do |provider_id|
|
|
62
|
+
extract_provider_models(previous, provider_id).each do |model_id|
|
|
63
|
+
removed_models << model_id
|
|
64
|
+
end
|
|
65
|
+
end
|
|
66
|
+
|
|
67
|
+
# All models from new providers are added
|
|
68
|
+
added_providers.each do |provider_id|
|
|
69
|
+
extract_provider_models(current, provider_id).each do |model_id|
|
|
70
|
+
added_models << model_id
|
|
71
|
+
end
|
|
72
|
+
end
|
|
73
|
+
|
|
74
|
+
# Compare models in shared providers
|
|
75
|
+
(prev_providers & curr_providers).each do |provider_id|
|
|
76
|
+
prev_models = previous.dig(provider_id, "models") || {}
|
|
77
|
+
curr_models = current.dig(provider_id, "models") || {}
|
|
78
|
+
|
|
79
|
+
prev_model_ids = Set.new(prev_models.keys)
|
|
80
|
+
curr_model_ids = Set.new(curr_models.keys)
|
|
81
|
+
|
|
82
|
+
# Added models
|
|
83
|
+
(curr_model_ids - prev_model_ids).each do |model_id|
|
|
84
|
+
added_models << "#{provider_id}:#{model_id}"
|
|
85
|
+
end
|
|
86
|
+
|
|
87
|
+
# Removed models
|
|
88
|
+
(prev_model_ids - curr_model_ids).each do |model_id|
|
|
89
|
+
removed_models << "#{provider_id}:#{model_id}"
|
|
90
|
+
end
|
|
91
|
+
|
|
92
|
+
# Check for updates in shared models
|
|
93
|
+
(prev_model_ids & curr_model_ids).each do |model_id|
|
|
94
|
+
changes = detect_changes(prev_models[model_id], curr_models[model_id])
|
|
95
|
+
if changes.any?
|
|
96
|
+
updated_models << Models::ModelUpdate.new(
|
|
97
|
+
model_id: "#{provider_id}:#{model_id}",
|
|
98
|
+
changes: changes
|
|
99
|
+
)
|
|
100
|
+
end
|
|
101
|
+
end
|
|
102
|
+
end
|
|
103
|
+
|
|
104
|
+
Models::DiffResult.new(
|
|
105
|
+
added_models: added_models.sort,
|
|
106
|
+
removed_models: removed_models.sort,
|
|
107
|
+
updated_models: updated_models,
|
|
108
|
+
added_providers: added_providers.sort,
|
|
109
|
+
removed_providers: removed_providers.sort,
|
|
110
|
+
previous_sync_at: @cache_manager.metadata["last_sync_at"] ? Time.parse(@cache_manager.metadata["last_sync_at"]) : nil,
|
|
111
|
+
current_sync_at: @cache_manager.last_sync_at
|
|
112
|
+
)
|
|
113
|
+
end
|
|
114
|
+
|
|
115
|
+
def extract_all_models(data)
|
|
116
|
+
models = []
|
|
117
|
+
data.each do |provider_id, provider_data|
|
|
118
|
+
(provider_data["models"] || {}).each_key do |model_id|
|
|
119
|
+
models << "#{provider_id}:#{model_id}"
|
|
120
|
+
end
|
|
121
|
+
end
|
|
122
|
+
models.sort
|
|
123
|
+
end
|
|
124
|
+
|
|
125
|
+
def extract_provider_models(data, provider_id)
|
|
126
|
+
provider_data = data[provider_id]
|
|
127
|
+
return [] unless provider_data
|
|
128
|
+
|
|
129
|
+
(provider_data["models"] || {}).keys.map { |m| "#{provider_id}:#{m}" }
|
|
130
|
+
end
|
|
131
|
+
|
|
132
|
+
def detect_changes(prev_model, curr_model)
|
|
133
|
+
changes = {}
|
|
134
|
+
|
|
135
|
+
TRACKED_FIELDS.each do |field|
|
|
136
|
+
prev_val = prev_model[field]
|
|
137
|
+
curr_val = curr_model[field]
|
|
138
|
+
|
|
139
|
+
next if prev_val == curr_val
|
|
140
|
+
|
|
141
|
+
# Deep compare for nested objects
|
|
142
|
+
if prev_val.is_a?(Hash) && curr_val.is_a?(Hash)
|
|
143
|
+
nested = detect_nested_changes(prev_val, curr_val)
|
|
144
|
+
nested.each do |key, (old_v, new_v)|
|
|
145
|
+
changes["#{field}.#{key}"] = [old_v, new_v]
|
|
146
|
+
end
|
|
147
|
+
else
|
|
148
|
+
changes[field] = [prev_val, curr_val]
|
|
149
|
+
end
|
|
150
|
+
end
|
|
151
|
+
|
|
152
|
+
changes
|
|
153
|
+
end
|
|
154
|
+
|
|
155
|
+
def detect_nested_changes(prev_hash, curr_hash)
|
|
156
|
+
changes = {}
|
|
157
|
+
all_keys = (prev_hash.keys + curr_hash.keys).uniq
|
|
158
|
+
|
|
159
|
+
all_keys.each do |key|
|
|
160
|
+
prev_val = prev_hash[key]
|
|
161
|
+
curr_val = curr_hash[key]
|
|
162
|
+
changes[key] = [prev_val, curr_val] unless prev_val == curr_val
|
|
163
|
+
end
|
|
164
|
+
|
|
165
|
+
changes
|
|
166
|
+
end
|
|
167
|
+
end
|
|
168
|
+
end
|
|
169
|
+
end
|
|
170
|
+
end
|
|
171
|
+
end
|