ruby_llm_community 0.0.1 → 0.0.3

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (112) hide show
  1. checksums.yaml +4 -4
  2. data/LICENSE +22 -0
  3. data/README.md +172 -0
  4. data/lib/generators/ruby_llm/install/templates/INSTALL_INFO.md.tt +108 -0
  5. data/lib/generators/ruby_llm/install/templates/chat_model.rb.tt +3 -0
  6. data/lib/generators/ruby_llm/install/templates/create_chats_migration.rb.tt +8 -0
  7. data/lib/generators/ruby_llm/install/templates/create_messages_migration.rb.tt +15 -0
  8. data/lib/generators/ruby_llm/install/templates/create_tool_calls_migration.rb.tt +14 -0
  9. data/lib/generators/ruby_llm/install/templates/initializer.rb.tt +6 -0
  10. data/lib/generators/ruby_llm/install/templates/message_model.rb.tt +3 -0
  11. data/lib/generators/ruby_llm/install/templates/tool_call_model.rb.tt +3 -0
  12. data/lib/generators/ruby_llm/install_generator.rb +121 -0
  13. data/lib/ruby_llm/active_record/acts_as.rb +382 -0
  14. data/lib/ruby_llm/aliases.json +217 -0
  15. data/lib/ruby_llm/aliases.rb +56 -0
  16. data/lib/ruby_llm/attachment.rb +164 -0
  17. data/lib/ruby_llm/chat.rb +226 -0
  18. data/lib/ruby_llm/chunk.rb +6 -0
  19. data/lib/ruby_llm/configuration.rb +73 -0
  20. data/lib/ruby_llm/connection.rb +126 -0
  21. data/lib/ruby_llm/content.rb +52 -0
  22. data/lib/ruby_llm/context.rb +29 -0
  23. data/lib/ruby_llm/embedding.rb +30 -0
  24. data/lib/ruby_llm/error.rb +84 -0
  25. data/lib/ruby_llm/image.rb +53 -0
  26. data/lib/ruby_llm/message.rb +81 -0
  27. data/lib/ruby_llm/mime_type.rb +67 -0
  28. data/lib/ruby_llm/model/info.rb +101 -0
  29. data/lib/ruby_llm/model/modalities.rb +22 -0
  30. data/lib/ruby_llm/model/pricing.rb +51 -0
  31. data/lib/ruby_llm/model/pricing_category.rb +48 -0
  32. data/lib/ruby_llm/model/pricing_tier.rb +34 -0
  33. data/lib/ruby_llm/model.rb +7 -0
  34. data/lib/ruby_llm/models.json +29924 -0
  35. data/lib/ruby_llm/models.rb +214 -0
  36. data/lib/ruby_llm/models_schema.json +168 -0
  37. data/lib/ruby_llm/provider.rb +221 -0
  38. data/lib/ruby_llm/providers/anthropic/capabilities.rb +179 -0
  39. data/lib/ruby_llm/providers/anthropic/chat.rb +120 -0
  40. data/lib/ruby_llm/providers/anthropic/embeddings.rb +20 -0
  41. data/lib/ruby_llm/providers/anthropic/media.rb +116 -0
  42. data/lib/ruby_llm/providers/anthropic/models.rb +56 -0
  43. data/lib/ruby_llm/providers/anthropic/streaming.rb +45 -0
  44. data/lib/ruby_llm/providers/anthropic/tools.rb +108 -0
  45. data/lib/ruby_llm/providers/anthropic.rb +37 -0
  46. data/lib/ruby_llm/providers/bedrock/capabilities.rb +167 -0
  47. data/lib/ruby_llm/providers/bedrock/chat.rb +76 -0
  48. data/lib/ruby_llm/providers/bedrock/media.rb +73 -0
  49. data/lib/ruby_llm/providers/bedrock/models.rb +82 -0
  50. data/lib/ruby_llm/providers/bedrock/signing.rb +831 -0
  51. data/lib/ruby_llm/providers/bedrock/streaming/base.rb +63 -0
  52. data/lib/ruby_llm/providers/bedrock/streaming/content_extraction.rb +71 -0
  53. data/lib/ruby_llm/providers/bedrock/streaming/message_processing.rb +79 -0
  54. data/lib/ruby_llm/providers/bedrock/streaming/payload_processing.rb +92 -0
  55. data/lib/ruby_llm/providers/bedrock/streaming/prelude_handling.rb +91 -0
  56. data/lib/ruby_llm/providers/bedrock/streaming.rb +36 -0
  57. data/lib/ruby_llm/providers/bedrock.rb +83 -0
  58. data/lib/ruby_llm/providers/deepseek/capabilities.rb +131 -0
  59. data/lib/ruby_llm/providers/deepseek/chat.rb +17 -0
  60. data/lib/ruby_llm/providers/deepseek.rb +30 -0
  61. data/lib/ruby_llm/providers/gemini/capabilities.rb +351 -0
  62. data/lib/ruby_llm/providers/gemini/chat.rb +146 -0
  63. data/lib/ruby_llm/providers/gemini/embeddings.rb +39 -0
  64. data/lib/ruby_llm/providers/gemini/images.rb +48 -0
  65. data/lib/ruby_llm/providers/gemini/media.rb +55 -0
  66. data/lib/ruby_llm/providers/gemini/models.rb +41 -0
  67. data/lib/ruby_llm/providers/gemini/streaming.rb +66 -0
  68. data/lib/ruby_llm/providers/gemini/tools.rb +82 -0
  69. data/lib/ruby_llm/providers/gemini.rb +36 -0
  70. data/lib/ruby_llm/providers/gpustack/chat.rb +17 -0
  71. data/lib/ruby_llm/providers/gpustack/models.rb +55 -0
  72. data/lib/ruby_llm/providers/gpustack.rb +33 -0
  73. data/lib/ruby_llm/providers/mistral/capabilities.rb +163 -0
  74. data/lib/ruby_llm/providers/mistral/chat.rb +26 -0
  75. data/lib/ruby_llm/providers/mistral/embeddings.rb +36 -0
  76. data/lib/ruby_llm/providers/mistral/models.rb +49 -0
  77. data/lib/ruby_llm/providers/mistral.rb +32 -0
  78. data/lib/ruby_llm/providers/ollama/chat.rb +28 -0
  79. data/lib/ruby_llm/providers/ollama/media.rb +50 -0
  80. data/lib/ruby_llm/providers/ollama.rb +29 -0
  81. data/lib/ruby_llm/providers/openai/capabilities.rb +306 -0
  82. data/lib/ruby_llm/providers/openai/chat.rb +87 -0
  83. data/lib/ruby_llm/providers/openai/embeddings.rb +36 -0
  84. data/lib/ruby_llm/providers/openai/images.rb +38 -0
  85. data/lib/ruby_llm/providers/openai/media.rb +81 -0
  86. data/lib/ruby_llm/providers/openai/models.rb +39 -0
  87. data/lib/ruby_llm/providers/openai/response.rb +116 -0
  88. data/lib/ruby_llm/providers/openai/response_media.rb +76 -0
  89. data/lib/ruby_llm/providers/openai/streaming.rb +191 -0
  90. data/lib/ruby_llm/providers/openai/tools.rb +100 -0
  91. data/lib/ruby_llm/providers/openai.rb +44 -0
  92. data/lib/ruby_llm/providers/openai_base.rb +44 -0
  93. data/lib/ruby_llm/providers/openrouter/models.rb +88 -0
  94. data/lib/ruby_llm/providers/openrouter.rb +26 -0
  95. data/lib/ruby_llm/providers/perplexity/capabilities.rb +138 -0
  96. data/lib/ruby_llm/providers/perplexity/chat.rb +17 -0
  97. data/lib/ruby_llm/providers/perplexity/models.rb +42 -0
  98. data/lib/ruby_llm/providers/perplexity.rb +52 -0
  99. data/lib/ruby_llm/railtie.rb +17 -0
  100. data/lib/ruby_llm/stream_accumulator.rb +103 -0
  101. data/lib/ruby_llm/streaming.rb +162 -0
  102. data/lib/ruby_llm/tool.rb +100 -0
  103. data/lib/ruby_llm/tool_call.rb +31 -0
  104. data/lib/ruby_llm/utils.rb +49 -0
  105. data/lib/ruby_llm/version.rb +5 -0
  106. data/lib/ruby_llm.rb +98 -0
  107. data/lib/tasks/aliases.rake +235 -0
  108. data/lib/tasks/models_docs.rake +224 -0
  109. data/lib/tasks/models_update.rake +108 -0
  110. data/lib/tasks/release.rake +32 -0
  111. data/lib/tasks/vcr.rake +99 -0
  112. metadata +128 -7
checksums.yaml CHANGED
@@ -1,7 +1,7 @@
1
1
  ---
2
2
  SHA256:
3
- metadata.gz: bd050c159cde376efb60a5f9c5847687ff9a1f44a8d98f5ee93440ea65bc1f38
4
- data.tar.gz: dae5536d6035f67546136f12ffe1fc52d602bdffc8dd5eeba120fec2313c4cd7
3
+ metadata.gz: 2a8638f12f0c6d0e811f078bdf311d3dde1bc969f6881ed64cf1a5133256b574
4
+ data.tar.gz: 4bba1ef73b4624fca8ef83c4cd661d9bdb3a15f209da11a3a962cab24b4fdb80
5
5
  SHA512:
6
- metadata.gz: 55181d506b358aa4cc88a9e1c749694cc5d1a29c1116f0639c0a7b8d68a60599a7911dac90a1073da1d5b5188a5a519ae73907935fc558aebc1a2d0059bfa072
7
- data.tar.gz: 8cf8d948c812075b068479e25737a0d43999831f2dea87a018d6d62fe8c1049d96bb27076f88173ed91d4bd4ae1939669d768402ee9e8ce8e5c215c14014d909
6
+ metadata.gz: e083eaa68a50b78854d780b3ddd2dc2467652aceb62fd9633b1b9a75f48b89bb8ad885800f65cb376b4f2997336284d27baf3508f3fe2bbfe920cac064f85df5
7
+ data.tar.gz: c068573609da8e4755201536aea1084b08b819e6d928c8624090f3a20133e253a86688d4ad5db206254be86c5795d66e3f12ffe945e308dc5cf24af479c8d403
data/LICENSE ADDED
@@ -0,0 +1,22 @@
1
+ MIT License
2
+
3
+ Copyright (c) 2025 Carmine Paolino
4
+ Copyright (c) 2025 Paul Shippy
5
+
6
+ Permission is hereby granted, free of charge, to any person obtaining a copy
7
+ of this software and associated documentation files (the "Software"), to deal
8
+ in the Software without restriction, including without limitation the rights
9
+ to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
10
+ copies of the Software, and to permit persons to whom the Software is
11
+ furnished to do so, subject to the following conditions:
12
+
13
+ The above copyright notice and this permission notice shall be included in all
14
+ copies or substantial portions of the Software.
15
+
16
+ THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
17
+ IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
18
+ FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
19
+ AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
20
+ LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
21
+ OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
22
+ SOFTWARE.
data/README.md ADDED
@@ -0,0 +1,172 @@
1
+ <picture>
2
+ <source media="(prefers-color-scheme: dark)" srcset="/docs/assets/images/logotype_dark.svg">
3
+ <img src="/docs/assets/images/logotype.svg" alt="RubyLLM" height="120" width="250">
4
+ </picture>
5
+
6
+ **One *beautiful* Ruby API for GPT, Claude, Gemini, and more.** Easily build chatbots, AI agents, RAG applications, and content generators. Features chat (text, images, audio, PDFs), image generation, embeddings, tools (function calling), structured output, Rails integration, and streaming. Works with OpenAI, Anthropic, Google Gemini, AWS Bedrock, DeepSeek, Mistral, Ollama (local models), OpenRouter, Perplexity, GPUStack, and any OpenAI-compatible API.
7
+
8
+ <div class="badge-container">
9
+ <a href="https://badge.fury.io/rb/ruby_llm"><img src="https://badge.fury.io/rb/ruby_llm.svg?a=5" alt="Gem Version" /></a>
10
+ <a href="https://github.com/testdouble/standard"><img src="https://img.shields.io/badge/code_style-standard-brightgreen.svg" alt="Ruby Style Guide" /></a>
11
+ <a href="https://rubygems.org/gems/ruby_llm"><img alt="Gem Downloads" src="https://img.shields.io/gem/dt/ruby_llm"></a>
12
+ <a href="https://codecov.io/gh/crmne/ruby_llm"><img src="https://codecov.io/gh/crmne/ruby_llm/branch/main/graph/badge.svg" alt="codecov" /></a>
13
+ </div>
14
+
15
+ Battle tested at [<picture><source media="(prefers-color-scheme: dark)" srcset="https://chatwithwork.com/logotype-dark.svg"><img src="https://chatwithwork.com/logotype.svg" alt="Chat with Work" height="30" align="absmiddle"></picture>](https://chatwithwork.com) — *Claude Code for your documents*
16
+
17
+ ## The problem with AI libraries
18
+
19
+ Every AI provider comes with its own client library, its own response format, its own conventions for streaming, and its own way of handling errors. Want to use multiple providers? Prepare to juggle incompatible APIs and bloated dependencies.
20
+
21
+ RubyLLM fixes all that. One beautiful API for everything. One consistent format. Minimal dependencies — just Faraday, Zeitwerk, and [Marcel](https://github.com/rails/marcel). Because working with AI should be a joy, not a chore.
22
+
23
+ ## What makes it great
24
+
25
+ ```ruby
26
+ # Just ask questions
27
+ chat = RubyLLM.chat
28
+ chat.ask "What's the best way to learn Ruby?"
29
+
30
+ # Analyze images, audio, documents, and text files
31
+ chat.ask "What's in this image?", with: "ruby_conf.jpg"
32
+ chat.ask "Describe this meeting", with: "meeting.wav"
33
+ chat.ask "Summarize this document", with: "contract.pdf"
34
+ chat.ask "Explain this code", with: "app.rb"
35
+
36
+ # Multiple files at once - types automatically detected
37
+ chat.ask "Analyze these files", with: ["diagram.png", "report.pdf", "notes.txt"]
38
+
39
+ # Stream responses in real-time
40
+ chat.ask "Tell me a story about a Ruby programmer" do |chunk|
41
+ print chunk.content
42
+ end
43
+
44
+ # Generate images
45
+ RubyLLM.paint "a sunset over mountains in watercolor style"
46
+
47
+ # Create vector embeddings
48
+ RubyLLM.embed "Ruby is elegant and expressive"
49
+
50
+ # Let AI use your code
51
+ class Weather < RubyLLM::Tool
52
+ description "Gets current weather for a location"
53
+ param :latitude, desc: "Latitude (e.g., 52.5200)"
54
+ param :longitude, desc: "Longitude (e.g., 13.4050)"
55
+
56
+ def execute(latitude:, longitude:)
57
+ url = "https://api.open-meteo.com/v1/forecast?latitude=#{latitude}&longitude=#{longitude}&current=temperature_2m,wind_speed_10m"
58
+
59
+ response = Faraday.get(url)
60
+ data = JSON.parse(response.body)
61
+ rescue => e
62
+ { error: e.message }
63
+ end
64
+ end
65
+
66
+ chat.with_tool(Weather).ask "What's the weather in Berlin? (52.5200, 13.4050)"
67
+
68
+ # Get structured output with JSON schemas
69
+ class ProductSchema < RubyLLM::Schema
70
+ string :name, description: "Product name"
71
+ number :price, description: "Price in USD"
72
+ array :features, description: "Key features" do
73
+ string description: "Feature description"
74
+ end
75
+ end
76
+
77
+ response = chat.with_schema(ProductSchema)
78
+ .ask "Analyze this product description", with: "product.txt"
79
+ # response.content => { "name" => "...", "price" => 99.99, "features" => [...] }
80
+ ```
81
+
82
+ ## Core Capabilities
83
+
84
+ * 💬 **Unified Chat:** Converse with models from OpenAI, Anthropic, Gemini, Bedrock, OpenRouter, DeepSeek, Perplexity, Mistral, Ollama, or any OpenAI-compatible API using `RubyLLM.chat`.
85
+ * 👁️ **Vision:** Analyze images within chats.
86
+ * 🔊 **Audio:** Transcribe and understand audio content.
87
+ * 📄 **Document Analysis:** Extract information from PDFs, text files, CSV, JSON, XML, Markdown, and code files.
88
+ * 🖼️ **Image Generation:** Create images with `RubyLLM.paint`.
89
+ * 📊 **Embeddings:** Generate text embeddings for vector search with `RubyLLM.embed`.
90
+ * 🔧 **Tools (Function Calling):** Let AI models call your Ruby code using `RubyLLM::Tool`.
91
+ * 📋 **Structured Output:** Guarantee responses conform to JSON schemas with `RubyLLM::Schema`.
92
+ * 🚂 **Rails Integration:** Easily persist chats, messages, and tool calls using `acts_as_chat` and `acts_as_message`.
93
+ * 🌊 **Streaming:** Process responses in real-time with idiomatic Ruby blocks.
94
+ * ⚡ **Async Support:** Built-in fiber-based concurrency for high-performance operations.
95
+ * 🎯 **Smart Configuration:** Global and scoped configs with automatic retries and proxy support.
96
+ * 📚 **Model Registry:** Access 500+ models with capability detection and pricing info.
97
+
98
+ ## Installation
99
+
100
+ Add to your Gemfile:
101
+ ```ruby
102
+ gem 'ruby_llm'
103
+ ```
104
+ Then `bundle install`.
105
+
106
+ Configure your API keys (using environment variables is recommended):
107
+ ```ruby
108
+ # config/initializers/ruby_llm.rb or similar
109
+ RubyLLM.configure do |config|
110
+ config.openai_api_key = ENV.fetch('OPENAI_API_KEY', nil)
111
+ # Add keys ONLY for providers you intend to use
112
+ # config.anthropic_api_key = ENV.fetch('ANTHROPIC_API_KEY', nil)
113
+ # ... see Configuration guide for all options ...
114
+ end
115
+ ```
116
+ See the [Installation Guide](https://rubyllm.com/installation) for full details.
117
+
118
+ ## Rails Integration
119
+
120
+ Add persistence to your chat models effortlessly:
121
+
122
+ ```bash
123
+ # Generate models and migrations
124
+ rails generate ruby_llm:install
125
+ ```
126
+
127
+ ```ruby
128
+ # Or add to existing models
129
+ class Chat < ApplicationRecord
130
+ acts_as_chat # Automatically saves messages & tool calls
131
+ end
132
+
133
+ class Message < ApplicationRecord
134
+ acts_as_message
135
+ end
136
+
137
+ class ToolCall < ApplicationRecord
138
+ acts_as_tool_call
139
+ end
140
+
141
+ # Now chats persist automatically
142
+ chat = Chat.create!(model_id: "gpt-4.1-nano")
143
+ chat.ask("What's in this file?", with: "report.pdf")
144
+ ```
145
+
146
+ See the [Rails Integration Guide](https://rubyllm.com/guides/rails) for details.
147
+
148
+ ## Learn More
149
+
150
+ Dive deeper with the official documentation:
151
+
152
+ - [Installation](https://rubyllm.com/installation)
153
+ - [Configuration](https://rubyllm.com/configuration)
154
+ - **Guides:**
155
+ - [Getting Started](https://rubyllm.com/guides/getting-started)
156
+ - [Chatting with AI Models](https://rubyllm.com/guides/chat)
157
+ - [Using Tools](https://rubyllm.com/guides/tools)
158
+ - [Streaming Responses](https://rubyllm.com/guides/streaming)
159
+ - [Rails Integration](https://rubyllm.com/guides/rails)
160
+ - [Image Generation](https://rubyllm.com/guides/image-generation)
161
+ - [Embeddings](https://rubyllm.com/guides/embeddings)
162
+ - [Working with Models](https://rubyllm.com/guides/models)
163
+ - [Error Handling](https://rubyllm.com/guides/error-handling)
164
+ - [Available Models](https://rubyllm.com/guides/available-models)
165
+
166
+ ## Contributing
167
+
168
+ We welcome contributions! Please see [CONTRIBUTING.md](CONTRIBUTING.md) for details on setup, testing, and contribution guidelines.
169
+
170
+ ## License
171
+
172
+ Released under the MIT License.
@@ -0,0 +1,108 @@
1
+ # RubyLLM Rails Setup Complete!
2
+
3
+ Thanks for installing RubyLLM in your Rails application. Here's what was created:
4
+
5
+ ## Models
6
+
7
+ - `<%= options[:chat_model_name] %>` - Stores chat sessions and their associated model ID
8
+ - `<%= options[:message_model_name] %>` - Stores individual messages in a chat
9
+ - `<%= options[:tool_call_model_name] %>` - Stores tool calls made by language models
10
+
11
+ **Note:** Do not add `validates :content, presence: true` to your Message model - RubyLLM creates empty assistant messages before API calls for streaming support.
12
+
13
+ ## Configuration Options
14
+
15
+ The generator supports the following options to customize model names:
16
+
17
+ ```bash
18
+ rails generate ruby_llm:install \
19
+ --chat-model-name=Conversation \
20
+ --message-model-name=ChatMessage \
21
+ --tool-call-model-name=FunctionCall
22
+ ```
23
+
24
+ This is useful when you need to avoid namespace collisions with existing models in your application. Table names will be automatically derived from the model names following Rails conventions.
25
+
26
+ ## Next Steps
27
+
28
+ 1. **Run migrations:**
29
+ ```bash
30
+ rails db:migrate
31
+ ```
32
+
33
+ **Database Note:** The migrations use `jsonb` for PostgreSQL and `json` for MySQL/SQLite automatically.
34
+
35
+ 2. **Set your API keys** in `config/initializers/ruby_llm.rb` or using environment variables:
36
+ ```ruby
37
+ # config/initializers/ruby_llm.rb
38
+ RubyLLM.configure do |config|
39
+ config.openai_api_key = ENV['OPENAI_API_KEY']
40
+ config.anthropic_api_key = ENV['ANTHROPIC_API_KEY']
41
+ config.gemini_api_key = ENV['GEMINI_API_KEY']
42
+ # ... add other providers as needed
43
+ end
44
+ ```
45
+
46
+ 3. **Start using RubyLLM in your code:**
47
+ ```ruby
48
+ # Basic usage
49
+ chat = <%= options[:chat_model_name] %>.create!(model_id: 'gpt-4.1-nano')
50
+ response = chat.ask("What is Ruby on Rails?")
51
+
52
+ # With file attachments (requires ActiveStorage setup)
53
+ chat.ask("What's in this file?", with: "report.pdf")
54
+ chat.ask("Analyze these files", with: ["image.jpg", "data.csv", "notes.txt"])
55
+ ```
56
+
57
+ 4. **For streaming responses** with Hotwire/Turbo:
58
+ ```ruby
59
+ # app/models/<%= options[:message_model_name].underscore %>.rb
60
+ class <%= options[:message_model_name] %> < ApplicationRecord
61
+ acts_as_message
62
+
63
+ # Helper to broadcast chunks during streaming
64
+ def broadcast_append_chunk(chunk_content)
65
+ broadcast_append_to [ chat, "messages" ],
66
+ target: dom_id(self, "content"),
67
+ html: chunk_content
68
+ end
69
+ end
70
+
71
+ # app/jobs/chat_stream_job.rb
72
+ class ChatStreamJob < ApplicationJob
73
+ def perform(chat_id, user_content)
74
+ chat = <%= options[:chat_model_name] %>.find(chat_id)
75
+ chat.ask(user_content) do |chunk|
76
+ assistant_message = chat.messages.last
77
+ if chunk.content && assistant_message
78
+ assistant_message.broadcast_append_chunk(chunk.content)
79
+ end
80
+ end
81
+ end
82
+ end
83
+
84
+ # In your controller
85
+ ChatStreamJob.perform_later(@chat.id, params[:content])
86
+ ```
87
+
88
+ ## Optional: ActiveStorage for Attachments
89
+
90
+ If you want to use file attachments (PDFs, images, etc.), set up ActiveStorage:
91
+
92
+ ```bash
93
+ rails active_storage:install
94
+ rails db:migrate
95
+ ```
96
+
97
+ Then add to your Message model:
98
+ ```ruby
99
+ class <%= options[:message_model_name] %> < ApplicationRecord
100
+ acts_as_message
101
+ has_many_attached :attachments
102
+ end
103
+ ```
104
+
105
+ ## Learn More
106
+
107
+ - See the [Rails Integration Guide](https://rubyllm.com/guides/rails) for detailed examples
108
+ - Visit the [RubyLLM Documentation](https://rubyllm.com) for full API reference
@@ -0,0 +1,3 @@
1
+ class <%= options[:chat_model_name] %> < ApplicationRecord
2
+ <%= acts_as_chat_declaration %>
3
+ end
@@ -0,0 +1,8 @@
1
+ class Create<%= options[:chat_model_name].pluralize %> < ActiveRecord::Migration<%= migration_version %>
2
+ def change
3
+ create_table :<%= options[:chat_model_name].tableize %> do |t|
4
+ t.string :model_id
5
+ t.timestamps
6
+ end
7
+ end
8
+ end
@@ -0,0 +1,15 @@
1
+ # Migration for creating messages table with references to chats and tool_calls
2
+ class Create<%= options[:message_model_name].pluralize %> < ActiveRecord::Migration<%= migration_version %>
3
+ def change
4
+ create_table :<%= options[:message_model_name].tableize %> do |t|
5
+ t.references :<%= options[:chat_model_name].tableize.singularize %>, null: false, foreign_key: true
6
+ t.string :role
7
+ t.text :content
8
+ t.string :model_id
9
+ t.integer :input_tokens
10
+ t.integer :output_tokens
11
+ t.references :<%= options[:tool_call_model_name].tableize.singularize %>
12
+ t.timestamps
13
+ end
14
+ end
15
+ end
@@ -0,0 +1,14 @@
1
+ <%#- # Migration for creating tool_calls table with database-specific JSON handling -%>
2
+ class Create<%= options[:tool_call_model_name].pluralize %> < ActiveRecord::Migration<%= migration_version %>
3
+ def change
4
+ create_table :<%= options[:tool_call_model_name].tableize %> do |t|
5
+ t.references :<%= options[:message_model_name].tableize.singularize %>, null: false, foreign_key: true
6
+ t.string :tool_call_id, null: false
7
+ t.string :name, null: false
8
+ t.<%= postgresql? ? 'jsonb' : 'json' %> :arguments, default: {}
9
+ t.timestamps
10
+ end
11
+
12
+ add_index :<%= options[:tool_call_model_name].tableize %>, :tool_call_id
13
+ end
14
+ end
@@ -0,0 +1,6 @@
1
+ RubyLLM.configure do |config|
2
+ config.openai_api_key = ENV["OPENAI_API_KEY"]
3
+ config.anthropic_api_key = ENV["ANTHROPIC_API_KEY"]
4
+
5
+ # config.default_model = "gpt-4.1-nano"
6
+ end
@@ -0,0 +1,3 @@
1
+ class <%= options[:message_model_name] %> < ApplicationRecord
2
+ <%= acts_as_message_declaration %>
3
+ end
@@ -0,0 +1,3 @@
1
+ class <%= options[:tool_call_model_name] %> < ApplicationRecord
2
+ <%= acts_as_tool_call_declaration %>
3
+ end
@@ -0,0 +1,121 @@
1
+ # frozen_string_literal: true
2
+
3
+ require 'rails/generators'
4
+ require 'rails/generators/active_record'
5
+
6
+ module RubyLLM
7
+ # Generator for RubyLLM Rails models and migrations
8
+ class InstallGenerator < Rails::Generators::Base
9
+ include Rails::Generators::Migration
10
+
11
+ namespace 'ruby_llm:install'
12
+
13
+ source_root File.expand_path('install/templates', __dir__)
14
+
15
+ class_option :chat_model_name, type: :string, default: 'Chat',
16
+ desc: 'Name of the Chat model class'
17
+ class_option :message_model_name, type: :string, default: 'Message',
18
+ desc: 'Name of the Message model class'
19
+ class_option :tool_call_model_name, type: :string, default: 'ToolCall',
20
+ desc: 'Name of the ToolCall model class'
21
+
22
+ desc 'Creates model files for Chat, Message, and ToolCall, and creates migrations for RubyLLM Rails integration'
23
+
24
+ def self.next_migration_number(dirname)
25
+ ::ActiveRecord::Generators::Base.next_migration_number(dirname)
26
+ end
27
+
28
+ def migration_version
29
+ "[#{Rails::VERSION::MAJOR}.#{Rails::VERSION::MINOR}]"
30
+ end
31
+
32
+ def postgresql?
33
+ ::ActiveRecord::Base.connection.adapter_name.downcase.include?('postgresql')
34
+ rescue StandardError
35
+ false
36
+ end
37
+
38
+ def acts_as_chat_declaration
39
+ acts_as_chat_params = []
40
+ if options[:message_model_name] != 'Message'
41
+ acts_as_chat_params << "message_class: \"#{options[:message_model_name]}\""
42
+ end
43
+ if options[:tool_call_model_name] != 'ToolCall'
44
+ acts_as_chat_params << "tool_call_class: \"#{options[:tool_call_model_name]}\""
45
+ end
46
+ if acts_as_chat_params.any?
47
+ "acts_as_chat #{acts_as_chat_params.join(', ')}"
48
+ else
49
+ 'acts_as_chat'
50
+ end
51
+ end
52
+
53
+ def acts_as_message_declaration
54
+ acts_as_message_params = []
55
+ acts_as_message_params << "chat_class: \"#{options[:chat_model_name]}\"" if options[:chat_model_name] != 'Chat'
56
+ if options[:tool_call_model_name] != 'ToolCall'
57
+ acts_as_message_params << "tool_call_class: \"#{options[:tool_call_model_name]}\""
58
+ end
59
+ if acts_as_message_params.any?
60
+ "acts_as_message #{acts_as_message_params.join(', ')}"
61
+ else
62
+ 'acts_as_message'
63
+ end
64
+ end
65
+
66
+ def acts_as_tool_call_declaration
67
+ acts_as_tool_call_params = []
68
+ if options[:message_model_name] != 'Message'
69
+ acts_as_tool_call_params << "message_class: \"#{options[:message_model_name]}\""
70
+ end
71
+ if acts_as_tool_call_params.any?
72
+ "acts_as_tool_call #{acts_as_tool_call_params.join(', ')}"
73
+ else
74
+ 'acts_as_tool_call'
75
+ end
76
+ end
77
+
78
+ def create_migration_files
79
+ # Create migrations with timestamps to ensure proper order
80
+ # First create chats table
81
+ migration_template 'create_chats_migration.rb.tt',
82
+ "db/migrate/create_#{options[:chat_model_name].tableize}.rb"
83
+
84
+ # Then create messages table (must come before tool_calls due to foreign key)
85
+ sleep 1 # Ensure different timestamp
86
+ migration_template 'create_messages_migration.rb.tt',
87
+ "db/migrate/create_#{options[:message_model_name].tableize}.rb"
88
+
89
+ # Finally create tool_calls table (references messages)
90
+ sleep 1 # Ensure different timestamp
91
+ migration_template 'create_tool_calls_migration.rb.tt',
92
+ "db/migrate/create_#{options[:tool_call_model_name].tableize}.rb"
93
+ end
94
+
95
+ def create_model_files
96
+ template 'chat_model.rb.tt', "app/models/#{options[:chat_model_name].underscore}.rb"
97
+ template 'message_model.rb.tt', "app/models/#{options[:message_model_name].underscore}.rb"
98
+ template 'tool_call_model.rb.tt', "app/models/#{options[:tool_call_model_name].underscore}.rb"
99
+ end
100
+
101
+ def create_initializer
102
+ template 'initializer.rb.tt', 'config/initializers/ruby_llm.rb'
103
+ end
104
+
105
+ def show_install_info
106
+ say "\n ✅ RubyLLM installed!", :green
107
+
108
+ say "\n Next steps:", :yellow
109
+ say ' 1. Run: rails db:migrate'
110
+ say ' 2. Set your API keys in config/initializers/ruby_llm.rb'
111
+ say " 3. Start chatting: #{options[:chat_model_name]}.create!(model_id: 'gpt-4.1-nano').ask('Hello!')"
112
+
113
+ say "\n 📚 Full docs: https://rubyllm.com", :cyan
114
+
115
+ say "\n ❤️ Love RubyLLM?", :magenta
116
+ say ' • ⭐ Star on GitHub: https://github.com/crmne/ruby_llm'
117
+ say ' • 💖 Sponsor: https://github.com/sponsors/crmne'
118
+ say "\n"
119
+ end
120
+ end
121
+ end