ai_client 0.4.3 → 0.4.5

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
checksums.yaml CHANGED
@@ -1,7 +1,7 @@
1
1
  ---
2
2
  SHA256:
3
- metadata.gz: 602b3197f09d1a0cb0525073ee456c54e6de6f835488d36c0ebf56259b6a72e9
4
- data.tar.gz: 59c7d27f2c56fff21041cdd68ed09e3f3e624b7dead6059871dfe54cfcd46e58
3
+ metadata.gz: 1e78ebb6a087ea0b92522d4aa7824c94aa77a11b71eddb006432f0a4e8f33551
4
+ data.tar.gz: 91f315cd8908f85b0e7d05b2f6744d46e962c5b752e12b804d020f8fbe11ebb7
5
5
  SHA512:
6
- metadata.gz: f98d17165dc760a2a4f81293a344b4edf65521e22228e8d908c809bd0c941c7c102c4b9de71779f7b9e2587278ded7e440b9b018fd4ef7054c4ab6d0ce2c563d
7
- data.tar.gz: be3e0798c3894579417907732a9fd867d6d1eeae3de9dbe2dad1e89b2eb29146978fe015784920186773373b57dfff7fa8434a853e4e60668336066e2a9e1e81
6
+ metadata.gz: 80c91ad5846061ac425ab8fe292b626ce5ee1515b8beeaec7daa0e036cd8fce7f19dc20bf235a9cb0c0486d92564a695eb1ca5f2a1b406e7334644f085c4e158
7
+ data.tar.gz: 88ab64ac1f7b6e77a5d5b44fc13ea3b592a5e032006101d5e0dfa595128aef2c83f3594582c7c74fda832c7781cb0a719a2fce677b85a135779b89a001033df7
data/CHANGELOG.md CHANGED
@@ -1,5 +1,19 @@
1
1
  ## [Unreleased]
2
2
 
3
+ ## Released
4
+
5
+ ### [0.4.5] - 2025-03-02
6
+ - Added ability to obtain a list of available models from an Ollama server.
7
+ - Added `ollama_model_exists?` method to check if a specific model is available on an Ollama server.
8
+ - Added command-line utility `bin/list_ollama_models` to display available Ollama models.
9
+ - Added support for configuring custom Ollama hosts via the providers configuration.
10
+
11
+ ### [0.4.4] - 2025-03-02
12
+ - Added ability to obtain a list of available models from an Ollama server.
13
+ - Added `ollama_model_exists?` method to check if a specific model is available on an Ollama server.
14
+ - Added command-line utility `bin/list_ollama_models` to display available Ollama models.
15
+ - Added support for configuring custom Ollama hosts via the providers configuration.
16
+
3
17
  ### [0.4.3] - 2025-01-05
4
18
  - updated models.yml file with latest dump from open_router
5
19
 
@@ -10,9 +24,6 @@
10
24
  - increasing test coverage
11
25
  - tweaking stuff
12
26
 
13
-
14
- ## Released
15
-
16
27
  ### [0.4.1] - 2024-10-21
17
28
  - fixed the context problem. the chatbot method works now.
18
29
 
data/Rakefile CHANGED
@@ -9,6 +9,7 @@ rescue LoadError => error
9
9
  puts error.message
10
10
  end
11
11
 
12
+ # Register the rake tasks
12
13
  Tocer::Rake::Register.call
13
14
 
14
15
  Minitest::TestTask.create
@@ -156,13 +156,22 @@ class AiClient
156
156
  timeout: nil,
157
157
  return_raw: false,
158
158
  context_length: 5, # number of responses to add as context
159
- providers: {},
159
+ providers: {
160
+ ollama: {
161
+ host: 'http://localhost:11434'
162
+ },
163
+ localai: {
164
+ host: 'http://localhost:8080'
165
+ }
166
+ },
160
167
  envar_api_key_names: {
161
168
  anthropic: ['ANTHROPIC_API_KEY'],
162
169
  google: ['GOOGLE_API_KEY'],
163
170
  mistral: ['MISTRAL_API_KEY'],
171
+ ollama: ['OLLAMA_API_KEY'],
164
172
  open_router: ['OPEN_ROUTER_API_KEY', 'OPENROUTER_API_KEY'],
165
- openai: ['OPENAI_API_KEY']
173
+ openai: ['OPENAI_API_KEY'],
174
+ xai: ['XAI_API_KEY'],
166
175
  },
167
176
  provider_patterns: {
168
177
  anthropic: /^claude/i,
@@ -177,10 +186,10 @@ class AiClient
177
186
  default_model: {
178
187
  anthropic: 'claude-3-5-sonnet-20240620',
179
188
  openai: 'gpt-4o',
180
- google: 'gemini-pro-1.5',
189
+ google: 'gemini-2.0-flash',
181
190
  mistral: 'mistral-large',
182
- localai: 'llama3.2',
183
- ollama: 'llama3.2',
191
+ localai: 'llama3.3',
192
+ ollama: 'llama3.3',
184
193
  open_router: 'auto'
185
194
  }
186
195
  )