rails_ai 0.1.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- checksums.yaml +7 -0
- data/.rspec_status +96 -0
- data/AGENT_GUIDE.md +513 -0
- data/Appraisals +49 -0
- data/COMMERCIAL_LICENSE_TEMPLATE.md +92 -0
- data/FEATURES.md +204 -0
- data/LEGAL_PROTECTION_GUIDE.md +222 -0
- data/LICENSE +62 -0
- data/LICENSE_SUMMARY.md +74 -0
- data/MIT-LICENSE +62 -0
- data/PERFORMANCE.md +300 -0
- data/PROVIDERS.md +495 -0
- data/README.md +454 -0
- data/Rakefile +11 -0
- data/SPEED_OPTIMIZATIONS.md +217 -0
- data/STRUCTURE.md +139 -0
- data/USAGE_GUIDE.md +288 -0
- data/app/channels/ai_stream_channel.rb +33 -0
- data/app/components/ai/prompt_component.rb +25 -0
- data/app/controllers/concerns/ai/context_aware.rb +77 -0
- data/app/controllers/concerns/ai/streaming.rb +41 -0
- data/app/helpers/ai_helper.rb +164 -0
- data/app/jobs/ai/generate_embedding_job.rb +25 -0
- data/app/jobs/ai/generate_summary_job.rb +25 -0
- data/app/models/concerns/ai/embeddable.rb +38 -0
- data/app/views/rails_ai/dashboard/index.html.erb +51 -0
- data/config/routes.rb +19 -0
- data/lib/generators/rails_ai/install/install_generator.rb +38 -0
- data/lib/rails_ai/agents/agent_manager.rb +258 -0
- data/lib/rails_ai/agents/agent_team.rb +243 -0
- data/lib/rails_ai/agents/base_agent.rb +331 -0
- data/lib/rails_ai/agents/collaboration.rb +238 -0
- data/lib/rails_ai/agents/memory.rb +116 -0
- data/lib/rails_ai/agents/message_bus.rb +95 -0
- data/lib/rails_ai/agents/specialized_agents.rb +391 -0
- data/lib/rails_ai/agents/task_queue.rb +111 -0
- data/lib/rails_ai/cache.rb +14 -0
- data/lib/rails_ai/config.rb +40 -0
- data/lib/rails_ai/context.rb +7 -0
- data/lib/rails_ai/context_analyzer.rb +86 -0
- data/lib/rails_ai/engine.rb +48 -0
- data/lib/rails_ai/events.rb +9 -0
- data/lib/rails_ai/image_context.rb +110 -0
- data/lib/rails_ai/performance.rb +231 -0
- data/lib/rails_ai/provider.rb +8 -0
- data/lib/rails_ai/providers/anthropic_adapter.rb +256 -0
- data/lib/rails_ai/providers/base.rb +60 -0
- data/lib/rails_ai/providers/dummy_adapter.rb +29 -0
- data/lib/rails_ai/providers/gemini_adapter.rb +509 -0
- data/lib/rails_ai/providers/openai_adapter.rb +535 -0
- data/lib/rails_ai/providers/secure_anthropic_adapter.rb +206 -0
- data/lib/rails_ai/providers/secure_openai_adapter.rb +284 -0
- data/lib/rails_ai/railtie.rb +48 -0
- data/lib/rails_ai/redactor.rb +12 -0
- data/lib/rails_ai/security/api_key_manager.rb +82 -0
- data/lib/rails_ai/security/audit_logger.rb +46 -0
- data/lib/rails_ai/security/error_handler.rb +62 -0
- data/lib/rails_ai/security/input_validator.rb +176 -0
- data/lib/rails_ai/security/secure_file_handler.rb +45 -0
- data/lib/rails_ai/security/secure_http_client.rb +177 -0
- data/lib/rails_ai/security.rb +0 -0
- data/lib/rails_ai/version.rb +5 -0
- data/lib/rails_ai/window_context.rb +103 -0
- data/lib/rails_ai.rb +502 -0
- data/monitoring/ci_setup_guide.md +214 -0
- data/monitoring/enhanced_monitoring_script.rb +237 -0
- data/monitoring/google_alerts_setup.md +42 -0
- data/monitoring_log_20250921.txt +0 -0
- data/monitoring_script.rb +161 -0
- data/rails_ai.gemspec +54 -0
- data/scripts/security_scanner.rb +353 -0
- data/setup_monitoring.sh +163 -0
- data/wiki/API-Documentation.md +734 -0
- data/wiki/Architecture-Overview.md +672 -0
- data/wiki/Contributing-Guide.md +407 -0
- data/wiki/Development-Setup.md +532 -0
- data/wiki/Home.md +278 -0
- data/wiki/Installation-Guide.md +527 -0
- data/wiki/Quick-Start.md +186 -0
- data/wiki/README.md +135 -0
- data/wiki/Release-Process.md +467 -0
- metadata +385 -0
@@ -0,0 +1,734 @@
|
|
1
|
+
# API Documentation
|
2
|
+
|
3
|
+
Complete API reference for Rails AI.
|
4
|
+
|
5
|
+
## 📚 Table of Contents
|
6
|
+
|
7
|
+
- [Core Methods](#core-methods)
|
8
|
+
- [Text Operations](#text-operations)
|
9
|
+
- [Image Operations](#image-operations)
|
10
|
+
- [Video Operations](#video-operations)
|
11
|
+
- [Audio Operations](#audio-operations)
|
12
|
+
- [Context-Aware Operations](#context-aware-operations)
|
13
|
+
- [Performance Methods](#performance-methods)
|
14
|
+
- [Provider Configuration](#provider-configuration)
|
15
|
+
- [Error Handling](#error-handling)
|
16
|
+
|
17
|
+
## 🔧 Core Methods
|
18
|
+
|
19
|
+
### `RailsAi.chat(prompt_or_messages, **opts)`
|
20
|
+
|
21
|
+
Generate text using AI.
|
22
|
+
|
23
|
+
**Parameters:**
|
24
|
+
- `prompt_or_messages` (String|Array) - Text prompt or conversation messages
|
25
|
+
- `model` (String, optional) - AI model to use (default: config.default_model)
|
26
|
+
- `**opts` (Hash) - Additional options
|
27
|
+
|
28
|
+
**Returns:** String - AI-generated text
|
29
|
+
|
30
|
+
**Example:**
|
31
|
+
```ruby
|
32
|
+
# Simple prompt
|
33
|
+
RailsAi.chat("Write a blog post about Ruby")
|
34
|
+
|
35
|
+
# Conversation
|
36
|
+
RailsAi.chat([
|
37
|
+
{ role: "user", content: "Hello" },
|
38
|
+
{ role: "assistant", content: "Hi there!" },
|
39
|
+
{ role: "user", content: "How are you?" }
|
40
|
+
])
|
41
|
+
|
42
|
+
# Provider-specific models
|
43
|
+
RailsAi.chat("Hello", model: "gpt-4o-mini") # OpenAI
|
44
|
+
RailsAi.chat("Hello", model: "claude-3-sonnet-20240229") # Anthropic
|
45
|
+
RailsAi.chat("Hello", model: "gemini-1.5-pro") # Gemini
|
46
|
+
```
|
47
|
+
|
48
|
+
### `RailsAi.stream(prompt_or_messages, **opts, &block)`
|
49
|
+
|
50
|
+
Generate text with streaming response.
|
51
|
+
|
52
|
+
**Parameters:**
|
53
|
+
- `prompt_or_messages` (String|Array) - Text prompt or conversation messages
|
54
|
+
- `model` (String, optional) - AI model to use
|
55
|
+
- `**opts` (Hash) - Additional options
|
56
|
+
- `&block` (Proc) - Block to handle streaming tokens
|
57
|
+
|
58
|
+
**Returns:** Nil
|
59
|
+
|
60
|
+
**Example:**
|
61
|
+
```ruby
|
62
|
+
RailsAi.stream("Write a long story") do |token|
|
63
|
+
puts token
|
64
|
+
end
|
65
|
+
```
|
66
|
+
|
67
|
+
### `RailsAi.embed(texts, **opts)`
|
68
|
+
|
69
|
+
Generate embeddings for text.
|
70
|
+
|
71
|
+
**Parameters:**
|
72
|
+
- `texts` (String|Array) - Text(s) to embed
|
73
|
+
- `model` (String, optional) - Embedding model to use
|
74
|
+
- `**opts` (Hash) - Additional options
|
75
|
+
|
76
|
+
**Returns:** Array - Embedding vectors
|
77
|
+
|
78
|
+
**Example:**
|
79
|
+
```ruby
|
80
|
+
embeddings = RailsAi.embed(["Ruby on Rails", "Django", "Express.js"])
|
81
|
+
```
|
82
|
+
|
83
|
+
## 📝 Text Operations
|
84
|
+
|
85
|
+
### `RailsAi.summarize(content, **opts)`
|
86
|
+
|
87
|
+
Summarize text content.
|
88
|
+
|
89
|
+
**Parameters:**
|
90
|
+
- `content` (String) - Text to summarize
|
91
|
+
- `**opts` (Hash) - Additional options
|
92
|
+
|
93
|
+
**Returns:** String - Summary
|
94
|
+
|
95
|
+
**Example:**
|
96
|
+
```ruby
|
97
|
+
summary = RailsAi.summarize(long_article)
|
98
|
+
```
|
99
|
+
|
100
|
+
### `RailsAi.translate(content, target_language, **opts)`
|
101
|
+
|
102
|
+
Translate text to another language.
|
103
|
+
|
104
|
+
**Parameters:**
|
105
|
+
- `content` (String) - Text to translate
|
106
|
+
- `target_language` (String) - Target language
|
107
|
+
- `**opts` (Hash) - Additional options
|
108
|
+
|
109
|
+
**Returns:** String - Translated text
|
110
|
+
|
111
|
+
**Example:**
|
112
|
+
```ruby
|
113
|
+
translation = RailsAi.translate("Hello world", "Spanish")
|
114
|
+
```
|
115
|
+
|
116
|
+
### `RailsAi.classify(content, categories, **opts)`
|
117
|
+
|
118
|
+
Classify text into categories.
|
119
|
+
|
120
|
+
**Parameters:**
|
121
|
+
- `content` (String) - Text to classify
|
122
|
+
- `categories` (Array) - Available categories
|
123
|
+
- `**opts` (Hash) - Additional options
|
124
|
+
|
125
|
+
**Returns:** String - Classification result
|
126
|
+
|
127
|
+
**Example:**
|
128
|
+
```ruby
|
129
|
+
classification = RailsAi.classify("I love this product!", ["positive", "negative", "neutral"])
|
130
|
+
```
|
131
|
+
|
132
|
+
### `RailsAi.extract_entities(content, **opts)`
|
133
|
+
|
134
|
+
Extract named entities from text.
|
135
|
+
|
136
|
+
**Parameters:**
|
137
|
+
- `content` (String) - Text to analyze
|
138
|
+
- `**opts` (Hash) - Additional options
|
139
|
+
|
140
|
+
**Returns:** String - Extracted entities
|
141
|
+
|
142
|
+
**Example:**
|
143
|
+
```ruby
|
144
|
+
entities = RailsAi.extract_entities("Apple Inc. was founded by Steve Jobs in Cupertino, California")
|
145
|
+
```
|
146
|
+
|
147
|
+
### `RailsAi.generate_code(prompt, language: "ruby", **opts)`
|
148
|
+
|
149
|
+
Generate code based on description.
|
150
|
+
|
151
|
+
**Parameters:**
|
152
|
+
- `prompt` (String) - Code description
|
153
|
+
- `language` (String, optional) - Programming language (default: "ruby")
|
154
|
+
- `**opts` (Hash) - Additional options
|
155
|
+
|
156
|
+
**Returns:** String - Generated code
|
157
|
+
|
158
|
+
**Example:**
|
159
|
+
```ruby
|
160
|
+
code = RailsAi.generate_code("Create a user authentication system", language: "ruby")
|
161
|
+
```
|
162
|
+
|
163
|
+
### `RailsAi.explain_code(code, language: "ruby", **opts)`
|
164
|
+
|
165
|
+
Explain code functionality.
|
166
|
+
|
167
|
+
**Parameters:**
|
168
|
+
- `code` (String) - Code to explain
|
169
|
+
- `language` (String, optional) - Programming language (default: "ruby")
|
170
|
+
- `**opts` (Hash) - Additional options
|
171
|
+
|
172
|
+
**Returns:** String - Code explanation
|
173
|
+
|
174
|
+
**Example:**
|
175
|
+
```ruby
|
176
|
+
explanation = RailsAi.explain_code("def hello; puts 'world'; end")
|
177
|
+
```
|
178
|
+
|
179
|
+
## 🖼️ Image Operations
|
180
|
+
|
181
|
+
### `RailsAi.generate_image(prompt, **opts)`
|
182
|
+
|
183
|
+
Generate an image from text description.
|
184
|
+
|
185
|
+
**Parameters:**
|
186
|
+
- `prompt` (String) - Image description
|
187
|
+
- `model` (String, optional) - Image model to use (default: "dall-e-3")
|
188
|
+
- `size` (String, optional) - Image size (default: "1024x1024")
|
189
|
+
- `quality` (String, optional) - Image quality (default: "standard")
|
190
|
+
- `**opts` (Hash) - Additional options
|
191
|
+
|
192
|
+
**Returns:** String - Base64-encoded image data
|
193
|
+
|
194
|
+
**Example:**
|
195
|
+
```ruby
|
196
|
+
image = RailsAi.generate_image("A beautiful sunset over mountains")
|
197
|
+
# => "data:image/png;base64,..."
|
198
|
+
|
199
|
+
# Note: Only supported by OpenAI provider
|
200
|
+
RailsAi.configure { |c| c.provider = :openai }
|
201
|
+
RailsAi.generate_image("A cat playing with a ball")
|
202
|
+
```
|
203
|
+
|
204
|
+
### `RailsAi.edit_image(image, prompt, **opts)`
|
205
|
+
|
206
|
+
Edit an existing image.
|
207
|
+
|
208
|
+
**Parameters:**
|
209
|
+
- `image` (String|File) - Image to edit
|
210
|
+
- `prompt` (String) - Edit description
|
211
|
+
- `mask` (String|File, optional) - Mask for editing
|
212
|
+
- `size` (String, optional) - Output size (default: "1024x1024")
|
213
|
+
- `**opts` (Hash) - Additional options
|
214
|
+
|
215
|
+
**Returns:** String - Base64-encoded edited image
|
216
|
+
|
217
|
+
**Example:**
|
218
|
+
```ruby
|
219
|
+
edited = RailsAi.edit_image(image_file, "Add a rainbow in the sky")
|
220
|
+
# Note: Only supported by OpenAI provider
|
221
|
+
```
|
222
|
+
|
223
|
+
### `RailsAi.create_variation(image, **opts)`
|
224
|
+
|
225
|
+
Create variations of an image.
|
226
|
+
|
227
|
+
**Parameters:**
|
228
|
+
- `image` (String|File) - Base image
|
229
|
+
- `size` (String, optional) - Output size (default: "1024x1024")
|
230
|
+
- `**opts` (Hash) - Additional options
|
231
|
+
|
232
|
+
**Returns:** String - Base64-encoded variation
|
233
|
+
|
234
|
+
**Example:**
|
235
|
+
```ruby
|
236
|
+
variation = RailsAi.create_variation(image_file)
|
237
|
+
# Note: Only supported by OpenAI provider
|
238
|
+
```
|
239
|
+
|
240
|
+
### `RailsAi.analyze_image(image, prompt, **opts)`
|
241
|
+
|
242
|
+
Analyze an image with AI.
|
243
|
+
|
244
|
+
**Parameters:**
|
245
|
+
- `image` (String|File) - Image to analyze
|
246
|
+
- `prompt` (String) - Analysis prompt
|
247
|
+
- `model` (String, optional) - Analysis model (default: "gpt-4-vision-preview")
|
248
|
+
- `**opts` (Hash) - Additional options
|
249
|
+
|
250
|
+
**Returns:** String - Analysis result
|
251
|
+
|
252
|
+
**Example:**
|
253
|
+
```ruby
|
254
|
+
# Works with OpenAI, Anthropic, and Gemini
|
255
|
+
analysis = RailsAi.analyze_image(image_file, "What objects do you see?")
|
256
|
+
|
257
|
+
# Provider-specific models
|
258
|
+
RailsAi.configure { |c| c.provider = :openai }
|
259
|
+
RailsAi.analyze_image(image_file, "What do you see?", model: "gpt-4-vision-preview")
|
260
|
+
|
261
|
+
RailsAi.configure { |c| c.provider = :anthropic }
|
262
|
+
RailsAi.analyze_image(image_file, "What do you see?", model: "claude-3-sonnet-20240229")
|
263
|
+
|
264
|
+
RailsAi.configure { |c| c.provider = :gemini }
|
265
|
+
RailsAi.analyze_image(image_file, "What do you see?", model: "gemini-1.5-pro")
|
266
|
+
```
|
267
|
+
|
268
|
+
## 🎥 Video Operations
|
269
|
+
|
270
|
+
### `RailsAi.generate_video(prompt, **opts)`
|
271
|
+
|
272
|
+
Generate a video from text description.
|
273
|
+
|
274
|
+
**Parameters:**
|
275
|
+
- `prompt` (String) - Video description
|
276
|
+
- `model` (String, optional) - Video model to use (default: "sora")
|
277
|
+
- `duration` (Integer, optional) - Video duration in seconds (default: 5)
|
278
|
+
- `**opts` (Hash) - Additional options
|
279
|
+
|
280
|
+
**Returns:** String - Base64-encoded video data
|
281
|
+
|
282
|
+
**Example:**
|
283
|
+
```ruby
|
284
|
+
video = RailsAi.generate_video("A cat playing with a ball", duration: 10)
|
285
|
+
# => "data:video/mp4;base64,..."
|
286
|
+
|
287
|
+
# Note: Only supported by OpenAI provider
|
288
|
+
RailsAi.configure { |c| c.provider = :openai }
|
289
|
+
RailsAi.generate_video("A sunset over the ocean")
|
290
|
+
```
|
291
|
+
|
292
|
+
### `RailsAi.edit_video(video, prompt, **opts)`
|
293
|
+
|
294
|
+
Edit an existing video.
|
295
|
+
|
296
|
+
**Parameters:**
|
297
|
+
- `video` (String|File) - Video to edit
|
298
|
+
- `prompt` (String) - Edit description
|
299
|
+
- `**opts` (Hash) - Additional options
|
300
|
+
|
301
|
+
**Returns:** String - Base64-encoded edited video
|
302
|
+
|
303
|
+
**Example:**
|
304
|
+
```ruby
|
305
|
+
edited = RailsAi.edit_video(video_file, "Add background music")
|
306
|
+
# Note: Only supported by OpenAI provider
|
307
|
+
```
|
308
|
+
|
309
|
+
### `RailsAi.analyze_video(video, prompt, **opts)`
|
310
|
+
|
311
|
+
Analyze a video with AI.
|
312
|
+
|
313
|
+
**Parameters:**
|
314
|
+
- `video` (String|File) - Video to analyze
|
315
|
+
- `prompt` (String) - Analysis prompt
|
316
|
+
- `model` (String, optional) - Analysis model (default: "gpt-4-vision-preview")
|
317
|
+
- `**opts` (Hash) - Additional options
|
318
|
+
|
319
|
+
**Returns:** String - Analysis result
|
320
|
+
|
321
|
+
**Example:**
|
322
|
+
```ruby
|
323
|
+
analysis = RailsAi.analyze_video(video_file, "What's happening in this video?")
|
324
|
+
# Note: Limited support across providers
|
325
|
+
```
|
326
|
+
|
327
|
+
## 🎵 Audio Operations
|
328
|
+
|
329
|
+
### `RailsAi.generate_speech(text, **opts)`
|
330
|
+
|
331
|
+
Generate speech from text.
|
332
|
+
|
333
|
+
**Parameters:**
|
334
|
+
- `text` (String) - Text to convert to speech
|
335
|
+
- `model` (String, optional) - Speech model to use (default: "tts-1")
|
336
|
+
- `voice` (String, optional) - Voice to use (default: "alloy")
|
337
|
+
- `**opts` (Hash) - Additional options
|
338
|
+
|
339
|
+
**Returns:** String - Base64-encoded audio data
|
340
|
+
|
341
|
+
**Example:**
|
342
|
+
```ruby
|
343
|
+
speech = RailsAi.generate_speech("Hello, welcome to our application!", voice: "alloy")
|
344
|
+
# => "data:audio/mp3;base64,..."
|
345
|
+
|
346
|
+
# Note: Only supported by OpenAI provider
|
347
|
+
RailsAi.configure { |c| c.provider = :openai }
|
348
|
+
RailsAi.generate_speech("Welcome to our app!")
|
349
|
+
```
|
350
|
+
|
351
|
+
### `RailsAi.transcribe_audio(audio, **opts)`
|
352
|
+
|
353
|
+
Transcribe audio to text.
|
354
|
+
|
355
|
+
**Parameters:**
|
356
|
+
- `audio` (String|File) - Audio to transcribe
|
357
|
+
- `model` (String, optional) - Transcription model (default: "whisper-1")
|
358
|
+
- `**opts` (Hash) - Additional options
|
359
|
+
|
360
|
+
**Returns:** String - Transcribed text
|
361
|
+
|
362
|
+
**Example:**
|
363
|
+
```ruby
|
364
|
+
transcription = RailsAi.transcribe_audio(audio_file)
|
365
|
+
# Note: Only supported by OpenAI provider
|
366
|
+
```
|
367
|
+
|
368
|
+
## 🧠 Context-Aware Operations
|
369
|
+
|
370
|
+
### `RailsAi.analyze_image_with_context(image, prompt, **contexts)`
|
371
|
+
|
372
|
+
Analyze image with context information.
|
373
|
+
|
374
|
+
**Parameters:**
|
375
|
+
- `image` (String|File) - Image to analyze
|
376
|
+
- `prompt` (String) - Analysis prompt
|
377
|
+
- `user_context` (Hash, optional) - User context information
|
378
|
+
- `window_context` (Hash, optional) - Application context information
|
379
|
+
- `image_context` (Hash, optional) - Image context information
|
380
|
+
- `**opts` (Hash) - Additional options
|
381
|
+
|
382
|
+
**Returns:** String - Context-aware analysis result
|
383
|
+
|
384
|
+
**Example:**
|
385
|
+
```ruby
|
386
|
+
result = RailsAi.analyze_image_with_context(
|
387
|
+
image_file,
|
388
|
+
"What do you see?",
|
389
|
+
user_context: { id: 1, role: "admin" },
|
390
|
+
window_context: { controller: "PostsController" },
|
391
|
+
image_context: { format: "png" }
|
392
|
+
)
|
393
|
+
```
|
394
|
+
|
395
|
+
### `RailsAi.generate_with_context(prompt, **contexts)`
|
396
|
+
|
397
|
+
Generate text with context information.
|
398
|
+
|
399
|
+
**Parameters:**
|
400
|
+
- `prompt` (String) - Text prompt
|
401
|
+
- `user_context` (Hash, optional) - User context information
|
402
|
+
- `window_context` (Hash, optional) - Application context information
|
403
|
+
- `**opts` (Hash) - Additional options
|
404
|
+
|
405
|
+
**Returns:** String - Context-aware generated text
|
406
|
+
|
407
|
+
**Example:**
|
408
|
+
```ruby
|
409
|
+
result = RailsAi.generate_with_context(
|
410
|
+
"Write a summary",
|
411
|
+
user_context: { id: 1, role: "admin" },
|
412
|
+
window_context: { controller: "PostsController" }
|
413
|
+
)
|
414
|
+
```
|
415
|
+
|
416
|
+
### `RailsAi.generate_image_with_context(prompt, **contexts)`
|
417
|
+
|
418
|
+
Generate image with context information.
|
419
|
+
|
420
|
+
**Parameters:**
|
421
|
+
- `prompt` (String) - Image description
|
422
|
+
- `user_context` (Hash, optional) - User context information
|
423
|
+
- `window_context` (Hash, optional) - Application context information
|
424
|
+
- `**opts` (Hash) - Additional options
|
425
|
+
|
426
|
+
**Returns:** String - Context-aware generated image
|
427
|
+
|
428
|
+
**Example:**
|
429
|
+
```ruby
|
430
|
+
result = RailsAi.generate_image_with_context(
|
431
|
+
"Create an image for this blog post",
|
432
|
+
user_context: { id: 1, role: "admin" },
|
433
|
+
window_context: { controller: "PostsController" }
|
434
|
+
)
|
435
|
+
```
|
436
|
+
|
437
|
+
## ⚡ Performance Methods
|
438
|
+
|
439
|
+
### `RailsAi.batch_chat(requests)`
|
440
|
+
|
441
|
+
Process multiple chat requests in batch.
|
442
|
+
|
443
|
+
**Parameters:**
|
444
|
+
- `requests` (Array) - Array of request hashes
|
445
|
+
|
446
|
+
**Returns:** Array - Array of responses
|
447
|
+
|
448
|
+
**Example:**
|
449
|
+
```ruby
|
450
|
+
requests = [
|
451
|
+
{ prompt: "Write a blog post" },
|
452
|
+
{ prompt: "Generate a summary" },
|
453
|
+
{ prompt: "Create a title" }
|
454
|
+
]
|
455
|
+
results = RailsAi.batch_chat(requests)
|
456
|
+
```
|
457
|
+
|
458
|
+
### `RailsAi.batch_embed(texts_array)`
|
459
|
+
|
460
|
+
Process multiple embedding requests in batch.
|
461
|
+
|
462
|
+
**Parameters:**
|
463
|
+
- `texts_array` (Array) - Array of text arrays
|
464
|
+
|
465
|
+
**Returns:** Array - Array of embedding arrays
|
466
|
+
|
467
|
+
**Example:**
|
468
|
+
```ruby
|
469
|
+
texts_array = [
|
470
|
+
["Ruby on Rails", "Django"],
|
471
|
+
["Express.js", "FastAPI"],
|
472
|
+
["Laravel", "Symfony"]
|
473
|
+
]
|
474
|
+
results = RailsAi.batch_embed(texts_array)
|
475
|
+
```
|
476
|
+
|
477
|
+
### `RailsAi.metrics`
|
478
|
+
|
479
|
+
Get performance metrics.
|
480
|
+
|
481
|
+
**Returns:** Hash - Performance metrics
|
482
|
+
|
483
|
+
**Example:**
|
484
|
+
```ruby
|
485
|
+
metrics = RailsAi.metrics
|
486
|
+
# => {
|
487
|
+
# chat: { count: 100, total_duration: 5.2, avg_duration: 0.052 },
|
488
|
+
# generate_image: { count: 50, total_duration: 12.3, avg_duration: 0.246 }
|
489
|
+
# }
|
490
|
+
```
|
491
|
+
|
492
|
+
### `RailsAi.warmup!`
|
493
|
+
|
494
|
+
Warmup components for faster first requests.
|
495
|
+
|
496
|
+
**Returns:** Nil
|
497
|
+
|
498
|
+
**Example:**
|
499
|
+
```ruby
|
500
|
+
RailsAi.warmup!
|
501
|
+
```
|
502
|
+
|
503
|
+
### `RailsAi.clear_cache!`
|
504
|
+
|
505
|
+
Clear all cached responses.
|
506
|
+
|
507
|
+
**Returns:** Nil
|
508
|
+
|
509
|
+
**Example:**
|
510
|
+
```ruby
|
511
|
+
RailsAi.clear_cache!
|
512
|
+
```
|
513
|
+
|
514
|
+
### `RailsAi.reset_performance_metrics!`
|
515
|
+
|
516
|
+
Reset performance metrics.
|
517
|
+
|
518
|
+
**Returns:** Nil
|
519
|
+
|
520
|
+
**Example:**
|
521
|
+
```ruby
|
522
|
+
RailsAi.reset_performance_metrics!
|
523
|
+
```
|
524
|
+
|
525
|
+
## 🔧 Provider Configuration
|
526
|
+
|
527
|
+
### `RailsAi.configure(&block)`
|
528
|
+
|
529
|
+
Configure Rails AI settings.
|
530
|
+
|
531
|
+
**Parameters:**
|
532
|
+
- `&block` (Proc) - Configuration block
|
533
|
+
|
534
|
+
**Example:**
|
535
|
+
```ruby
|
536
|
+
RailsAi.configure do |config|
|
537
|
+
config.provider = :openai
|
538
|
+
config.default_model = "gpt-4o-mini"
|
539
|
+
config.cache_ttl = 1.hour
|
540
|
+
config.enable_performance_monitoring = true
|
541
|
+
end
|
542
|
+
```
|
543
|
+
|
544
|
+
### Provider Selection
|
545
|
+
|
546
|
+
```ruby
|
547
|
+
# OpenAI
|
548
|
+
RailsAi.configure { |c| c.provider = :openai }
|
549
|
+
RailsAi.chat("Hello") # Uses OpenAI
|
550
|
+
|
551
|
+
# Anthropic (Claude)
|
552
|
+
RailsAi.configure { |c| c.provider = :anthropic }
|
553
|
+
RailsAi.chat("Hello") # Uses Anthropic
|
554
|
+
|
555
|
+
# Google Gemini
|
556
|
+
RailsAi.configure { |c| c.provider = :gemini }
|
557
|
+
RailsAi.chat("Hello") # Uses Gemini
|
558
|
+
|
559
|
+
# Dummy (Testing)
|
560
|
+
RailsAi.configure { |c| c.provider = :dummy }
|
561
|
+
RailsAi.chat("Hello") # Uses dummy provider
|
562
|
+
```
|
563
|
+
|
564
|
+
### Configuration Options
|
565
|
+
|
566
|
+
| Option | Type | Default | Description |
|
567
|
+
|--------|------|---------|-------------|
|
568
|
+
| `provider` | Symbol | `:openai` | AI provider to use |
|
569
|
+
| `default_model` | String | `"gpt-4o-mini"` | Default AI model |
|
570
|
+
| `token_limit` | Integer | `4000` | Token limit for requests |
|
571
|
+
| `cache_ttl` | Integer | `3600` | Cache time-to-live in seconds |
|
572
|
+
| `stub_responses` | Boolean | `false` | Stub responses for testing |
|
573
|
+
| `connection_pool_size` | Integer | `10` | HTTP connection pool size |
|
574
|
+
| `compression_threshold` | Integer | `1024` | Compression threshold in bytes |
|
575
|
+
| `batch_size` | Integer | `10` | Batch processing size |
|
576
|
+
| `flush_interval` | Float | `0.1` | Batch flush interval in seconds |
|
577
|
+
| `enable_performance_monitoring` | Boolean | `true` | Enable performance monitoring |
|
578
|
+
| `enable_request_deduplication` | Boolean | `true` | Enable request deduplication |
|
579
|
+
| `enable_compression` | Boolean | `true` | Enable response compression |
|
580
|
+
|
581
|
+
### Environment Variables
|
582
|
+
|
583
|
+
```bash
|
584
|
+
# OpenAI
|
585
|
+
OPENAI_API_KEY=your_openai_api_key
|
586
|
+
|
587
|
+
# Anthropic (Claude)
|
588
|
+
ANTHROPIC_API_KEY=your_anthropic_api_key
|
589
|
+
|
590
|
+
# Google Gemini
|
591
|
+
GEMINI_API_KEY=your_gemini_api_key
|
592
|
+
```
|
593
|
+
|
594
|
+
### Provider-Specific Models
|
595
|
+
|
596
|
+
#### OpenAI Models
|
597
|
+
```ruby
|
598
|
+
text_models = ["gpt-4o-mini", "gpt-4o", "gpt-3.5-turbo"]
|
599
|
+
image_models = ["dall-e-3", "dall-e-2"]
|
600
|
+
audio_models = ["tts-1", "tts-1-hd"]
|
601
|
+
embedding_models = ["text-embedding-3-small", "text-embedding-3-large"]
|
602
|
+
```
|
603
|
+
|
604
|
+
#### Anthropic Models
|
605
|
+
```ruby
|
606
|
+
text_models = ["claude-3-sonnet-20240229", "claude-3-haiku-20240307", "claude-3-opus-20240229"]
|
607
|
+
vision_models = ["claude-3-sonnet-20240229", "claude-3-opus-20240229"]
|
608
|
+
```
|
609
|
+
|
610
|
+
#### Gemini Models
|
611
|
+
```ruby
|
612
|
+
text_models = ["gemini-1.5-pro", "gemini-1.5-flash", "gemini-1.0-pro"]
|
613
|
+
vision_models = ["gemini-1.5-pro", "gemini-1.5-flash"]
|
614
|
+
```
|
615
|
+
|
616
|
+
## 🚨 Error Handling
|
617
|
+
|
618
|
+
### Common Exceptions
|
619
|
+
|
620
|
+
#### `RailsAi::Provider::RateLimited`
|
621
|
+
Raised when API rate limit is exceeded.
|
622
|
+
|
623
|
+
```ruby
|
624
|
+
begin
|
625
|
+
RailsAi.chat("Hello")
|
626
|
+
rescue RailsAi::Provider::RateLimited => e
|
627
|
+
# Handle rate limiting
|
628
|
+
sleep(1)
|
629
|
+
retry
|
630
|
+
end
|
631
|
+
```
|
632
|
+
|
633
|
+
#### `RailsAi::Provider::UnsafeInputError`
|
634
|
+
Raised when input contains unsafe content.
|
635
|
+
|
636
|
+
```ruby
|
637
|
+
begin
|
638
|
+
RailsAi.chat("Unsafe content")
|
639
|
+
rescue RailsAi::Provider::UnsafeInputError => e
|
640
|
+
# Handle unsafe input
|
641
|
+
Rails.logger.warn("Unsafe input detected: #{e.message}")
|
642
|
+
end
|
643
|
+
```
|
644
|
+
|
645
|
+
#### `NotImplementedError`
|
646
|
+
Raised when operation is not supported by current provider.
|
647
|
+
|
648
|
+
```ruby
|
649
|
+
begin
|
650
|
+
RailsAi.configure { |c| c.provider = :gemini }
|
651
|
+
RailsAi.generate_image("A sunset")
|
652
|
+
rescue NotImplementedError => e
|
653
|
+
# Handle unsupported operation
|
654
|
+
Rails.logger.warn("Operation not supported: #{e.message}")
|
655
|
+
# Switch to supported provider
|
656
|
+
RailsAi.configure { |c| c.provider = :openai }
|
657
|
+
RailsAi.generate_image("A sunset")
|
658
|
+
end
|
659
|
+
```
|
660
|
+
|
661
|
+
#### `LoadError`
|
662
|
+
Raised when required gem is not installed.
|
663
|
+
|
664
|
+
```ruby
|
665
|
+
begin
|
666
|
+
RailsAi.configure { |c| c.provider = :anthropic }
|
667
|
+
RailsAi.chat("Hello")
|
668
|
+
rescue LoadError => e
|
669
|
+
# Handle missing gem
|
670
|
+
Rails.logger.error("Missing gem: #{e.message}")
|
671
|
+
# Install gem or use different provider
|
672
|
+
end
|
673
|
+
```
|
674
|
+
|
675
|
+
#### `StandardError`
|
676
|
+
General errors from AI providers.
|
677
|
+
|
678
|
+
```ruby
|
679
|
+
begin
|
680
|
+
RailsAi.chat("Hello")
|
681
|
+
rescue StandardError => e
|
682
|
+
# Handle general errors
|
683
|
+
Rails.logger.error("AI operation failed: #{e.message}")
|
684
|
+
end
|
685
|
+
```
|
686
|
+
|
687
|
+
### Error Handling Best Practices
|
688
|
+
|
689
|
+
```ruby
|
690
|
+
def safe_ai_operation
|
691
|
+
RailsAi.chat("Hello")
|
692
|
+
rescue RailsAi::Provider::RateLimited => e
|
693
|
+
# Retry with exponential backoff
|
694
|
+
sleep(2 ** retry_count)
|
695
|
+
retry
|
696
|
+
rescue RailsAi::Provider::UnsafeInputError => e
|
697
|
+
# Log and return safe response
|
698
|
+
Rails.logger.warn("Unsafe input: #{e.message}")
|
699
|
+
"I cannot process that request."
|
700
|
+
rescue NotImplementedError => e
|
701
|
+
# Switch to supported provider
|
702
|
+
Rails.logger.warn("Operation not supported: #{e.message}")
|
703
|
+
RailsAi.configure { |c| c.provider = :openai }
|
704
|
+
RailsAi.chat("Hello")
|
705
|
+
rescue StandardError => e
|
706
|
+
# Log and return fallback
|
707
|
+
Rails.logger.error("AI error: #{e.message}")
|
708
|
+
"Sorry, I'm having trouble right now."
|
709
|
+
end
|
710
|
+
```
|
711
|
+
|
712
|
+
### Provider Fallback Strategy
|
713
|
+
|
714
|
+
```ruby
|
715
|
+
def robust_ai_operation(prompt)
|
716
|
+
providers = [:openai, :anthropic, :gemini]
|
717
|
+
|
718
|
+
providers.each do |provider|
|
719
|
+
begin
|
720
|
+
RailsAi.configure { |c| c.provider = provider }
|
721
|
+
return RailsAi.chat(prompt)
|
722
|
+
rescue => e
|
723
|
+
Rails.logger.warn("#{provider} failed: #{e.message}")
|
724
|
+
next
|
725
|
+
end
|
726
|
+
end
|
727
|
+
|
728
|
+
raise "All providers failed"
|
729
|
+
end
|
730
|
+
```
|
731
|
+
|
732
|
+
---
|
733
|
+
|
734
|
+
This API documentation provides comprehensive coverage of all Rails AI methods and their usage across multiple providers. For more examples and advanced usage, see the [Basic Usage](Basic-Usage.md) and [Advanced Topics](Advanced-Topics.md) guides. 🚀
|