raif 1.0.0 → 1.2.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (116) hide show
  1. checksums.yaml +4 -4
  2. data/README.md +346 -43
  3. data/app/assets/builds/raif.css +26 -1
  4. data/app/assets/stylesheets/raif/admin/stats.scss +12 -0
  5. data/app/assets/stylesheets/raif/loader.scss +27 -1
  6. data/app/controllers/raif/admin/application_controller.rb +14 -0
  7. data/app/controllers/raif/admin/stats/tasks_controller.rb +25 -0
  8. data/app/controllers/raif/admin/stats_controller.rb +19 -0
  9. data/app/controllers/raif/admin/tasks_controller.rb +18 -2
  10. data/app/controllers/raif/conversations_controller.rb +5 -1
  11. data/app/models/raif/agent.rb +11 -9
  12. data/app/models/raif/agents/native_tool_calling_agent.rb +11 -1
  13. data/app/models/raif/agents/re_act_agent.rb +6 -0
  14. data/app/models/raif/concerns/has_available_model_tools.rb +1 -1
  15. data/app/models/raif/concerns/json_schema_definition.rb +28 -0
  16. data/app/models/raif/concerns/llm_response_parsing.rb +42 -14
  17. data/app/models/raif/concerns/llm_temperature.rb +17 -0
  18. data/app/models/raif/concerns/llms/anthropic/message_formatting.rb +51 -0
  19. data/app/models/raif/concerns/llms/anthropic/tool_formatting.rb +56 -0
  20. data/app/models/raif/concerns/llms/bedrock/message_formatting.rb +70 -0
  21. data/app/models/raif/concerns/llms/bedrock/tool_formatting.rb +37 -0
  22. data/app/models/raif/concerns/llms/message_formatting.rb +42 -0
  23. data/app/models/raif/concerns/llms/open_ai/json_schema_validation.rb +138 -0
  24. data/app/models/raif/concerns/llms/open_ai_completions/message_formatting.rb +41 -0
  25. data/app/models/raif/concerns/llms/open_ai_completions/tool_formatting.rb +26 -0
  26. data/app/models/raif/concerns/llms/open_ai_responses/message_formatting.rb +43 -0
  27. data/app/models/raif/concerns/llms/open_ai_responses/tool_formatting.rb +42 -0
  28. data/app/models/raif/conversation.rb +28 -7
  29. data/app/models/raif/conversation_entry.rb +40 -8
  30. data/app/models/raif/embedding_model.rb +22 -0
  31. data/app/models/raif/embedding_models/bedrock.rb +34 -0
  32. data/app/models/raif/embedding_models/open_ai.rb +40 -0
  33. data/app/models/raif/llm.rb +108 -9
  34. data/app/models/raif/llms/anthropic.rb +72 -57
  35. data/app/models/raif/llms/bedrock.rb +165 -0
  36. data/app/models/raif/llms/open_ai_base.rb +66 -0
  37. data/app/models/raif/llms/open_ai_completions.rb +100 -0
  38. data/app/models/raif/llms/open_ai_responses.rb +144 -0
  39. data/app/models/raif/llms/open_router.rb +88 -0
  40. data/app/models/raif/model_completion.rb +23 -2
  41. data/app/models/raif/model_file_input.rb +113 -0
  42. data/app/models/raif/model_image_input.rb +4 -0
  43. data/app/models/raif/model_tool.rb +82 -52
  44. data/app/models/raif/model_tool_invocation.rb +8 -6
  45. data/app/models/raif/model_tools/agent_final_answer.rb +18 -27
  46. data/app/models/raif/model_tools/fetch_url.rb +27 -36
  47. data/app/models/raif/model_tools/provider_managed/base.rb +9 -0
  48. data/app/models/raif/model_tools/provider_managed/code_execution.rb +5 -0
  49. data/app/models/raif/model_tools/provider_managed/image_generation.rb +5 -0
  50. data/app/models/raif/model_tools/provider_managed/web_search.rb +5 -0
  51. data/app/models/raif/model_tools/wikipedia_search.rb +46 -55
  52. data/app/models/raif/streaming_responses/anthropic.rb +63 -0
  53. data/app/models/raif/streaming_responses/bedrock.rb +89 -0
  54. data/app/models/raif/streaming_responses/open_ai_completions.rb +76 -0
  55. data/app/models/raif/streaming_responses/open_ai_responses.rb +54 -0
  56. data/app/models/raif/task.rb +71 -16
  57. data/app/views/layouts/raif/admin.html.erb +10 -0
  58. data/app/views/raif/admin/agents/show.html.erb +3 -1
  59. data/app/views/raif/admin/conversations/_conversation.html.erb +1 -1
  60. data/app/views/raif/admin/conversations/_conversation_entry.html.erb +48 -0
  61. data/app/views/raif/admin/conversations/show.html.erb +4 -2
  62. data/app/views/raif/admin/model_completions/_model_completion.html.erb +8 -0
  63. data/app/views/raif/admin/model_completions/index.html.erb +2 -0
  64. data/app/views/raif/admin/model_completions/show.html.erb +58 -3
  65. data/app/views/raif/admin/stats/index.html.erb +128 -0
  66. data/app/views/raif/admin/stats/tasks/index.html.erb +45 -0
  67. data/app/views/raif/admin/tasks/_task.html.erb +5 -4
  68. data/app/views/raif/admin/tasks/index.html.erb +20 -2
  69. data/app/views/raif/admin/tasks/show.html.erb +3 -1
  70. data/app/views/raif/conversation_entries/_citations.html.erb +9 -0
  71. data/app/views/raif/conversation_entries/_conversation_entry.html.erb +22 -14
  72. data/app/views/raif/conversation_entries/_form.html.erb +1 -1
  73. data/app/views/raif/conversation_entries/_form_with_available_tools.html.erb +4 -4
  74. data/app/views/raif/conversation_entries/_message.html.erb +14 -3
  75. data/config/locales/admin.en.yml +16 -0
  76. data/config/locales/en.yml +47 -3
  77. data/config/routes.rb +6 -0
  78. data/db/migrate/20250224234252_create_raif_tables.rb +1 -1
  79. data/db/migrate/20250421202149_add_response_format_to_raif_conversations.rb +7 -0
  80. data/db/migrate/20250424200755_add_cost_columns_to_raif_model_completions.rb +14 -0
  81. data/db/migrate/20250424232946_add_created_at_indexes.rb +11 -0
  82. data/db/migrate/20250502155330_add_status_indexes_to_raif_tasks.rb +14 -0
  83. data/db/migrate/20250507155314_add_retry_count_to_raif_model_completions.rb +7 -0
  84. data/db/migrate/20250527213016_add_response_id_and_response_array_to_model_completions.rb +14 -0
  85. data/db/migrate/20250603140622_add_citations_to_raif_model_completions.rb +13 -0
  86. data/db/migrate/20250603202013_add_stream_response_to_raif_model_completions.rb +7 -0
  87. data/lib/generators/raif/agent/agent_generator.rb +22 -12
  88. data/lib/generators/raif/agent/templates/agent.rb.tt +3 -3
  89. data/lib/generators/raif/agent/templates/application_agent.rb.tt +7 -0
  90. data/lib/generators/raif/conversation/conversation_generator.rb +10 -0
  91. data/lib/generators/raif/conversation/templates/application_conversation.rb.tt +7 -0
  92. data/lib/generators/raif/conversation/templates/conversation.rb.tt +16 -14
  93. data/lib/generators/raif/install/templates/initializer.rb +62 -6
  94. data/lib/generators/raif/model_tool/model_tool_generator.rb +0 -5
  95. data/lib/generators/raif/model_tool/templates/model_tool.rb.tt +69 -56
  96. data/lib/generators/raif/task/templates/task.rb.tt +34 -23
  97. data/lib/raif/configuration.rb +63 -4
  98. data/lib/raif/embedding_model_registry.rb +83 -0
  99. data/lib/raif/engine.rb +56 -7
  100. data/lib/raif/errors/{open_ai/api_error.rb → invalid_model_file_input_error.rb} +1 -3
  101. data/lib/raif/errors/{anthropic/api_error.rb → invalid_model_image_input_error.rb} +1 -3
  102. data/lib/raif/errors/streaming_error.rb +18 -0
  103. data/lib/raif/errors/unsupported_feature_error.rb +8 -0
  104. data/lib/raif/errors.rb +4 -2
  105. data/lib/raif/json_schema_builder.rb +104 -0
  106. data/lib/raif/llm_registry.rb +315 -0
  107. data/lib/raif/migration_checker.rb +74 -0
  108. data/lib/raif/utils/html_fragment_processor.rb +169 -0
  109. data/lib/raif/utils.rb +1 -0
  110. data/lib/raif/version.rb +1 -1
  111. data/lib/raif.rb +7 -32
  112. data/lib/tasks/raif_tasks.rake +9 -4
  113. metadata +62 -12
  114. data/app/models/raif/llms/bedrock_claude.rb +0 -134
  115. data/app/models/raif/llms/open_ai.rb +0 -259
  116. data/lib/raif/default_llms.rb +0 -37
checksums.yaml CHANGED
@@ -1,7 +1,7 @@
1
1
  ---
2
2
  SHA256:
3
- metadata.gz: 232a25681c254ff213ce9565506bffc805df041361e145016eee17f0ca3514f3
4
- data.tar.gz: 24c54f8c2d5ea95664a55a57b7ed8c3e22d3040f79f28311eb60083aa2d9f47e
3
+ metadata.gz: 93d75d28d64da2a5559de740890291cb3a125d981bb58aa56ff4c45be0169954
4
+ data.tar.gz: 9f3ca2e5142f43be17c9fe27e3572cd08a4241e72d6e43893b74e6ba954169ae
5
5
  SHA512:
6
- metadata.gz: c07aa30bcc9b040f75876124fec474bf48f13222ccbb053b7f0dba2313dbd459965d37fcd2b305d399dd503495cfadc2ca4f2b5c8e48f604528f6555d46e404e
7
- data.tar.gz: 9d33cca29d90ed9dda3d7fd93c988b4b48999db4394be62044350648f11083ed03138201f76bb16569fbad6c61c2ff8b917af47d1f4a6e687c527bb9cd32b4b0
6
+ metadata.gz: 90726f18f49a312f0e8d2ceb3913c1bb45c50ad237f0fceeefa825e28374214a8c61ae84c4a83e6d3188a27adea3dc0162001792d867babadca5e30f5f736398
7
+ data.tar.gz: e053003fd9c510509cbc749d5f20d9d2596396224499fe7cc40467920a216f4da6efb3c281ed451ff7e29bced98ff17476c4779b7760a3e361c4a09285667deb
data/README.md CHANGED
@@ -6,30 +6,41 @@
6
6
  [![Documentation](https://img.shields.io/badge/docs-YARD-blue.svg)](https://cultivatelabs.github.io/raif/)
7
7
 
8
8
 
9
- Raif (Ruby AI Framework) is a Rails engine that helps you add AI-powered features to your Rails apps, such as [tasks](#tasks), [conversations](#conversations), and [agents](#agents). It supports for multiple LLM providers including [OpenAI](#openai), [Anthropic Claude](#anthropic-claude), and [AWS Bedrock](#aws-bedrock).
9
+ Raif (Ruby AI Framework) is a Rails engine that helps you add AI-powered features to your Rails apps, such as [tasks](#tasks), [conversations](#conversations), and [agents](#agents). It supports for multiple LLM providers including [OpenAI](#openai), [Anthropic Claude](#anthropic-claude), [AWS Bedrock](#aws-bedrock), and [OpenRouter](#openrouter).
10
10
 
11
11
  Raif is built by [Cultivate Labs](https://www.cultivatelabs.com) and is used to power [ARC](https://www.arcanalysis.ai), an AI-powered research & analysis platform.
12
12
 
13
13
  ## Table of Contents
14
14
  - [Setup](#setup)
15
15
  - [OpenAI](#openai)
16
+ - [OpenAI Completions API](#openai-completions-api)
17
+ - [OpenAI Responses API](#openai-responses-api)
16
18
  - [Anthropic Claude](#anthropic-claude)
17
19
  - [AWS Bedrock (Claude)](#aws-bedrock-claude)
20
+ - [OpenRouter](#openrouter)
18
21
  - [Chatting with the LLM](#chatting-with-the-llm)
22
+ - [Streaming Responses](#streaming-responses)
19
23
  - [Key Raif Concepts](#key-raif-concepts)
20
24
  - [Tasks](#tasks)
21
25
  - [Conversations](#conversations)
26
+ - [Real-time Streaming Responses](#real-time-streaming-responses)
22
27
  - [Conversation Types](#conversation-types)
23
28
  - [Agents](#agents)
24
29
  - [Model Tools](#model-tools)
30
+ - [Provider-Managed Tools](#provider-managed-tools)
31
+ - [Images/Files/PDF's](#imagesfilespdfs)
32
+ - [Images/Files/PDF's in Tasks](#imagesfilespdfs-in-tasks)
33
+ - [Embedding Models](#embedding-models)
25
34
  - [Web Admin](#web-admin)
26
35
  - [Customization](#customization)
27
36
  - [Controllers](#controllers)
28
37
  - [Models](#models)
29
38
  - [Views](#views)
30
39
  - [System Prompts](#system-prompts)
40
+ - [Adding LLM Models](#adding-llm-models)
31
41
  - [Testing](#testing)
32
42
  - [Demo App](#demo-app)
43
+ - [Contributing](#contributing)
33
44
  - [License](#license)
34
45
 
35
46
  # Setup
@@ -55,6 +66,8 @@ This will:
55
66
  - Copy Raif's database migrations to your application
56
67
  - Mount Raif's engine at `/raif` in your application's `config/routes.rb` file
57
68
 
69
+ You must configure at least one API key for your LLM provider ([OpenAI](#openai), [Anthropic Claude](#anthropic-claude), [AWS Bedrock](#aws-bedrock-claude), [OpenRouter](#openrouter)). By default, the initializer will load them from environment variables (e.g. `ENV["OPENAI_API_KEY"]`, `ENV["ANTHROPIC_API_KEY"]`, `ENV["OPENROUTER_API_KEY"]`). Alternatively, you can set them directly in `config/initializers/raif.rb`.
70
+
58
71
  Run the migrations. Raif is compatible with both PostgreSQL and MySQL databases.
59
72
  ```bash
60
73
  rails db:migrate
@@ -77,6 +90,10 @@ end
77
90
  Configure your LLM providers. You'll need at least one of:
78
91
 
79
92
  ## OpenAI
93
+
94
+ Raif supports both OpenAI's [Completions API](https://platform.openai.com/docs/api-reference/chat) and the newer [Responses API](https://platform.openai.com/docs/api-reference/responses), which provides access to provider-managed tools like web search, code execution, and image generation.
95
+
96
+ ### OpenAI Completions API
80
97
  ```ruby
81
98
  Raif.configure do |config|
82
99
  config.open_ai_models_enabled = true
@@ -85,10 +102,44 @@ Raif.configure do |config|
85
102
  end
86
103
  ```
87
104
 
88
- Currently supported OpenAI models:
105
+ Currently supported OpenAI Completions API models:
89
106
  - `open_ai_gpt_4o_mini`
90
107
  - `open_ai_gpt_4o`
91
108
  - `open_ai_gpt_3_5_turbo`
109
+ - `open_ai_gpt_4_1`
110
+ - `open_ai_gpt_4_1_mini`
111
+ - `open_ai_gpt_4_1_nano`
112
+ - `open_ai_o1`
113
+ - `open_ai_o1_mini`
114
+ - `open_ai_o3`
115
+ - `open_ai_o3_mini`
116
+ - `open_ai_o4_mini`
117
+
118
+ ### OpenAI Responses API
119
+ ```ruby
120
+ Raif.configure do |config|
121
+ config.open_ai_models_enabled = true
122
+ config.open_ai_api_key = ENV["OPENAI_API_KEY"]
123
+ config.default_llm_model_key = "open_ai_responses_gpt_4o"
124
+ end
125
+ ```
126
+
127
+ Currently supported OpenAI Responses API models:
128
+ - `open_ai_responses_gpt_4o_mini`
129
+ - `open_ai_responses_gpt_4o`
130
+ - `open_ai_responses_gpt_3_5_turbo`
131
+ - `open_ai_responses_gpt_4_1`
132
+ - `open_ai_responses_gpt_4_1_mini`
133
+ - `open_ai_responses_gpt_4_1_nano`
134
+ - `open_ai_responses_o1`
135
+ - `open_ai_responses_o1_mini`
136
+ - `open_ai_responses_o1_pro`
137
+ - `open_ai_responses_o3`
138
+ - `open_ai_responses_o3_mini`
139
+ - `open_ai_responses_o3_pro`
140
+ - `open_ai_responses_o4_mini`
141
+
142
+ The Responses API provides access to [provider-managed tools](#provider-managed-tools), including web search, code execution, and image generation.
92
143
 
93
144
  ## Anthropic Claude
94
145
  ```ruby
@@ -105,10 +156,12 @@ Currently supported Anthropic models:
105
156
  - `anthropic_claude_3_5_haiku`
106
157
  - `anthropic_claude_3_opus`
107
158
 
159
+ The Anthropic adapter provides access to [provider-managed tools](#provider-managed-tools) for web search and code execution.
160
+
108
161
  ## AWS Bedrock (Claude)
109
162
  ```ruby
110
163
  Raif.configure do |config|
111
- config.anthropic_bedrock_models_enabled = true
164
+ config.bedrock_models_enabled = true
112
165
  config.aws_bedrock_region = "us-east-1"
113
166
  config.default_llm_model_key = "bedrock_claude_3_5_sonnet"
114
167
  end
@@ -119,9 +172,34 @@ Currently supported Bedrock models:
119
172
  - `bedrock_claude_3_7_sonnet`
120
173
  - `bedrock_claude_3_5_haiku`
121
174
  - `bedrock_claude_3_opus`
175
+ - `bedrock_amazon_nova_micro`
176
+ - `bedrock_amazon_nova_lite`
177
+ - `bedrock_amazon_nova_pro`
122
178
 
123
179
  Note: Raif utilizes the [AWS Bedrock gem](https://docs.aws.amazon.com/sdk-for-ruby/v3/api/Aws/BedrockRuntime/Client.html) and AWS credentials should be configured via the AWS SDK (environment variables, IAM role, etc.)
124
180
 
181
+ ## OpenRouter
182
+ [OpenRouter](https://openrouter.ai/) is a unified API that provides access to multiple AI models from different providers including Anthropic, Meta, Google, and more.
183
+
184
+ ```ruby
185
+ Raif.configure do |config|
186
+ config.open_router_models_enabled = true
187
+ config.open_router_api_key = ENV["OPENROUTER_API_KEY"]
188
+ config.open_router_app_name = "Your App Name" # Optional
189
+ config.open_router_site_url = "https://yourdomain.com" # Optional
190
+ config.default_llm_model_key = "open_router_claude_3_7_sonnet"
191
+ end
192
+ ```
193
+
194
+ Currently included OpenRouter models:
195
+ - `open_router_claude_3_7_sonnet`
196
+ - `open_router_llama_3_3_70b_instruct`
197
+ - `open_router_llama_3_1_8b_instruct`
198
+ - `open_router_gemini_2_0_flash`
199
+ - `open_router_deepseek_chat_v3`
200
+
201
+ See [Adding LLM Models](#adding-llm-models) for more information on adding new OpenRouter models.
202
+
125
203
  # Chatting with the LLM
126
204
 
127
205
  When using Raif, it's often useful to use one of the [higher level abstractions](#key-raif-concepts) in your application. But when needed, you can utilize `Raif::Llm` to chat with the model directly. All calls to the LLM will create and return a `Raif::ModelCompletion` record, providing you a log of all interactions with the LLM which can be viewed in the [web admin](#web-admin).
@@ -157,10 +235,42 @@ puts model_completion.parsed_response # will strip backticks, parse the JSON, an
157
235
  # => {"joke" => "Why don't skeletons fight each other? They don't have the guts."}
158
236
  ```
159
237
 
238
+ ## Streaming Responses
239
+
240
+ You can enable streaming for any chat call by passing a block to the `chat` method. When streaming is enabled, the block will be called with partial responses as they're received from the LLM:
241
+
242
+ ```ruby
243
+ llm = Raif.llm(:open_ai_gpt_4o)
244
+ model_completion = llm.chat(message: "Tell me a story") do |model_completion, delta, sse_event|
245
+ # This block is called multiple times as the response streams in.
246
+ # You could broadcast these updates via Turbo Streams, WebSockets, etc.
247
+ Turbo::StreamsChannel.broadcast_replace_to(
248
+ :my_channel,
249
+ target: "chat-response",
250
+ partial: "my_partial_displaying_chat_response",
251
+ locals: { model_completion: model_completion, delta: delta, sse_event: sse_event }
252
+ )
253
+ end
254
+
255
+ # The final complete response is available in the model_completion
256
+ puts model_completion.raw_response
257
+ ```
258
+
259
+ You can configure the streaming update frequency by adjusting the chunk size threshold in your Raif configuration:
260
+
261
+ ```ruby
262
+ Raif.configure do |config|
263
+ # Control how often the model completion is updated & the block is called when streaming.
264
+ # Lower values = more frequent updates but more database writes.
265
+ # Higher values = less frequent updates but fewer database writes.
266
+ config.streaming_update_chunk_size_threshold = 50 # default is 25
267
+ end
268
+ ```
269
+
160
270
  # Key Raif Concepts
161
271
 
162
272
  ## Tasks
163
- If you have a single-shot task that you want an LLM to do in your application, you should create a `Raif::Task` subclass (see the end of this section for an example of using the task generator), where you'll define the prompt and response format for the task and call via `Raif::Task.run`. For example, say you have a `Document` model in your app and want to have a summarization task for the LLM:
273
+ If you have a single-shot task that you want an LLM to do in your application, you should create a `Raif::Task` subclass, where you'll define the prompt and response format for the task and call via `Raif::Task.run`. For example, say you have a `Document` model in your app and want to have a summarization task for the LLM:
164
274
 
165
275
  ```bash
166
276
  rails generate raif:task DocumentSummarization --response-format html
@@ -171,7 +281,10 @@ This will create a new task in `app/models/raif/tasks/document_summarization.rb`
171
281
  ```ruby
172
282
  class Raif::Tasks::DocumentSummarization < Raif::ApplicationTask
173
283
  llm_response_format :html # options are :html, :text, :json
174
-
284
+ llm_temperature 0.8 # optional, defaults to 0.7
285
+ llm_response_allowed_tags %w[p b i div strong] # optional, defaults to Rails::HTML5::SafeListSanitizer.allowed_tags
286
+ llm_response_allowed_attributes %w[style] # optional, defaults to Rails::HTML5::SafeListSanitizer.allowed_attributes
287
+
175
288
  # Any attr_accessor you define can be included as an argument when calling `run`.
176
289
  # E.g. Raif::Tasks::DocumentSummarization.run(document: document, creator: user)
177
290
  attr_accessor :document
@@ -229,20 +342,10 @@ module Raif
229
342
 
230
343
  attr_accessor :topic
231
344
 
232
- def self.json_response_schema
233
- {
234
- type: "object",
235
- additionalProperties: false,
236
- required: ["queries"],
237
- properties: {
238
- queries: {
239
- type: "array",
240
- items: {
241
- type: "string"
242
- }
243
- }
244
- }
245
- }
345
+ json_response_schema do
346
+ array :queries do
347
+ items type: "string"
348
+ end
246
349
  end
247
350
 
248
351
  def build_prompt
@@ -271,6 +374,8 @@ You are an assistant with expertise in summarizing detailed articles into clear
271
374
  You're collaborating with teammate who speaks Spanish. Please respond in Spanish.
272
375
  ```
273
376
 
377
+ The current list of valid language keys can be found [here](https://github.com/CultivateLabs/raif/blob/main/lib/raif/languages.rb).
378
+
274
379
  ## Conversations
275
380
 
276
381
  Raif provides `Raif::Conversation` and `Raif::ConversationEntry` models that you can use to provide an LLM-powered chat interface. It also provides controllers and views for the conversation interface.
@@ -312,6 +417,10 @@ If your app already includes Bootstrap styles, this will render a conversation i
312
417
 
313
418
  If your app does not include Bootstrap, you can [override the views](#views) to update styles.
314
419
 
420
+ ### Real-time Streaming Responses
421
+
422
+ Raif conversations have built-in support for streaming responses, where the LLM's response is displayed progressively as it's being generated. Each time a conversation entry is updated during the streaming response, Raif will call `broadcast_replace_to(conversation)` (where `conversation` is the `Raif::Conversation` associated with the conversation entry). When using the `raif_conversation` view helper, it will automatically set up the subscription for you.
423
+
315
424
  ### Conversation Types
316
425
 
317
426
  If your application has a specific type of conversation that you use frequently, you can create a custom conversation type by running the generator. For example, say you are implementing a customer support chatbot in your application and want to have a custom conversation type for doing this with the LLM:
@@ -434,39 +543,31 @@ This will create a new model tool in `app/models/raif/model_tools/google_search.
434
543
  ```ruby
435
544
  class Raif::ModelTools::GoogleSearch < Raif::ModelTool
436
545
  # For example tool implementations, see:
437
- # Wikipedia Search Tool: https://github.com/CultivateLabs/raif/blob/main/app/models/raif/model_tools/wikipedia_search_tool.rb
438
- # Fetch URL Tool: https://github.com/CultivateLabs/raif/blob/main/app/models/raif/model_tools/fetch_url_tool.rb
546
+ # Wikipedia Search Tool: https://github.com/CultivateLabs/raif/blob/main/app/models/raif/model_tools/wikipedia_search.rb
547
+ # Fetch URL Tool: https://github.com/CultivateLabs/raif/blob/main/app/models/raif/model_tools/fetch_url.rb
548
+
549
+ # Define the schema for the arguments that the LLM should use when invoking your tool.
550
+ # It should be a valid JSON schema. When the model invokes your tool,
551
+ # the arguments it provides will be validated against this schema using JSON::Validator from the json-schema gem.
552
+ #
553
+ # All attributes will be required and additionalProperties will be set to false.
554
+ #
555
+ # This schema would expect the model to invoke your tool with an arguments JSON object like:
556
+ # { "query" : "some query here" }
557
+ tool_arguments_schema do
558
+ string :query, description: "The query to search for"
559
+ end
439
560
 
440
561
  # An example of how the LLM should invoke your tool. This should return a hash with name and arguments keys.
441
562
  # `to_json` will be called on it and provided to the LLM as an example of how to invoke your tool.
442
- def self.example_model_invocation
563
+ example_model_invocation do
443
564
  {
444
565
  "name": tool_name,
445
566
  "arguments": { "query": "example query here" }
446
567
  }
447
568
  end
448
569
 
449
- # Define your tool's argument schema here. It should be a valid JSON schema.
450
- # When the model invokes your tool, the arguments it provides will be validated
451
- # against this schema using JSON::Validator from the json-schema gem.
452
- def self.tool_arguments_schema
453
- # For example:
454
- # {
455
- # type: "object",
456
- # additionalProperties: false,
457
- # required: ["query"],
458
- # properties: {
459
- # query: {
460
- # type: "string",
461
- # description: "The query to search for"
462
- # }
463
- # }
464
- # }
465
- # Would expect the model to invoke your tool with an arguments JSON object like:
466
- # { "query" : "some query here" }
467
- end
468
-
469
- def self.tool_description
570
+ tool_description do
470
571
  "Description of your tool that will be provided to the LLM so it knows when to invoke it"
471
572
  end
472
573
 
@@ -506,6 +607,172 @@ class Raif::ModelTools::GoogleSearch < Raif::ModelTool
506
607
  end
507
608
  ```
508
609
 
610
+ ### Provider-Managed Tools
611
+
612
+ In addition to the ability to create your own model tools, Raif supports provider-managed tools. These are tools that are built into certain LLM providers and run on the provider's infrastructure:
613
+
614
+ - **`Raif::ModelTools::ProviderManaged::WebSearch`**: Performs real-time web searches and returns relevant results
615
+ - **`Raif::ModelTools::ProviderManaged::CodeExecution`**: Executes code in a secure sandboxed environment (e.g. Python)
616
+ - **`Raif::ModelTools::ProviderManaged::ImageGeneration`**: Generates images based on text descriptions
617
+
618
+ Current provider-managed tool support:
619
+ | Provider | WebSearch | CodeExecution | ImageGeneration |
620
+ |----------|-----------|---------------|-----------------|
621
+ | OpenAI Responses API | ✅ | ✅ | ✅ |
622
+ | OpenAI Completions API | ❌ | ❌ | ❌ |
623
+ | Anthropic Claude | ✅ | ✅ | ❌ |
624
+ | AWS Bedrock (Claude) | ❌ | ❌ | ❌ |
625
+ | OpenRouter | ❌ | ❌ | ❌ |
626
+
627
+ To use provider-managed tools, include them in the `available_model_tools` array:
628
+
629
+ ```ruby
630
+ # In a conversation
631
+ conversation = Raif::Conversation.create!(
632
+ creator: current_user,
633
+ available_model_tools: [
634
+ "Raif::ModelTools::ProviderManaged::WebSearch",
635
+ "Raif::ModelTools::ProviderManaged::CodeExecution"
636
+ ]
637
+ )
638
+
639
+ # In an agent
640
+ agent = Raif::Agents::ReActAgent.new(
641
+ task: "Search for recent news about AI and create a summary chart",
642
+ available_model_tools: [
643
+ "Raif::ModelTools::ProviderManaged::WebSearch",
644
+ "Raif::ModelTools::ProviderManaged::CodeExecution"
645
+ ],
646
+ creator: current_user
647
+ )
648
+
649
+ # Directly in a chat
650
+ llm = Raif.llm(:open_ai_responses_gpt_4_1)
651
+ model_completion = llm.chat(
652
+ messages: [{ role: "user", content: "What are the latest developments in Ruby on Rails?" }],
653
+ available_model_tools: [Raif::ModelTools::ProviderManaged::WebSearch]
654
+ )
655
+ ```
656
+
657
+ ## Sending Images/Files/PDF's to the LLM
658
+
659
+ Raif supports images, files, and PDF's in the messages sent to the LLM.
660
+
661
+ To include an image, file/PDF in a message, you can use the `Raif::ModelImageInput` and `Raif::ModelFileInput`.
662
+
663
+ To include an image:
664
+ ```ruby
665
+ # From a local file
666
+ image = Raif::ModelImageInput.new(input: "path/to/image.png")
667
+
668
+ # From a URL
669
+ image = Raif::ModelImageInput.new(url: "https://example.com/image.png")
670
+
671
+ # From an ActiveStorage attachment (assumes you have a User model with an avatar attachment)
672
+ image = Raif::ModelImageInput.new(input: user.avatar)
673
+
674
+ # Then chat with the LLM
675
+ llm = Raif.llm(:open_ai_gpt_4o)
676
+ model_completion = llm.chat(messages: [
677
+ { role: "user", content: ["What's in this image?", image]}
678
+ ])
679
+ ```
680
+
681
+ To include a file/PDF:
682
+ ```ruby
683
+ # From a local file
684
+ file = Raif::ModelFileInput.new(input: "path/to/file.pdf")
685
+
686
+ # From a URL
687
+ file = Raif::ModelFileInput.new(url: "https://example.com/file.pdf")
688
+
689
+ # From an ActiveStorage attachment (assumes you have a Document model with a pdf attachment)
690
+ file = Raif::ModelFileInput.new(input: document.pdf)
691
+
692
+ # Then chat with the LLM
693
+ llm = Raif.llm(:open_ai_gpt_4o)
694
+ model_completion = llm.chat(messages: [
695
+ { role: "user", content: ["What's in this file?", file]}
696
+ ])
697
+ ```
698
+
699
+ ### Images/Files/PDF's in Tasks
700
+
701
+ You can include images and files/PDF's when running a `Raif::Task`:
702
+
703
+ To include a file/PDF:
704
+ ```ruby
705
+ file = Raif::ModelFileInput.new(input: "path/to/file.pdf")
706
+
707
+ # Assumes you've created a PdfContentExtraction task
708
+ task = Raif::Tasks::PdfContentExtraction.run(
709
+ creator: current_user,
710
+ files: [file]
711
+ )
712
+ ```
713
+
714
+ To include an image:
715
+ ```ruby
716
+ image = Raif::ModelImageInput.new(input: "path/to/image.png")
717
+
718
+ # Assumes you've created a ImageDescriptionGeneration task
719
+ task = Raif::Tasks::ImageDescriptionGeneration.run(
720
+ creator: current_user,
721
+ images: [image]
722
+ )
723
+ ```
724
+
725
+
726
+ # Embedding Models
727
+
728
+ Raif supports generation of vector embeddings. You can enable and configure embedding models in your Raif configuration:
729
+
730
+ ```ruby
731
+ Raif.configure do |config|
732
+ config.open_ai_embedding_models_enabled = true
733
+ config.bedrock_embedding_models_enabled = true
734
+
735
+ config.default_embedding_model_key = "open_ai_text_embedding_3_small"
736
+ end
737
+ ```
738
+
739
+ ## Supported Embedding Models
740
+
741
+ Raif currently supports the following embedding models:
742
+
743
+ ### OpenAI
744
+ - `open_ai_text_embedding_3_small`
745
+ - `open_ai_text_embed ding_3_large`
746
+ - `open_ai_text_embedding_ada_002`
747
+
748
+ ### AWS Bedrock
749
+ - `bedrock_titan_embed_text_v2`
750
+
751
+ ## Creating Embeddings
752
+
753
+ By default, Raif will used `Raif.config.default_embedding_model_key` to create embeddings. To create an embedding for a piece of text:
754
+
755
+ ```ruby
756
+ # Generate an embedding for a piece of text
757
+ embedding = Raif.generate_embedding!("Your text here")
758
+
759
+ # Generate an embedding for a piece of text with a specific number of dimensions
760
+ embedding = Raif.generate_embedding!("Your text here", dimensions: 1024)
761
+
762
+ # If you're using an OpenAI embedding model, you can pass an array of strings to embed multiple texts at once
763
+ embeddings = Raif.generate_embedding!([
764
+ "Your text here",
765
+ "Your other text here"
766
+ ])
767
+ ```
768
+
769
+ Or to generate embeddings for a piece of text with a specific model:
770
+
771
+ ```ruby
772
+ model = Raif.embedding_model(:open_ai_text_embedding_3_small)
773
+ embedding = model.generate_embedding!("Your text here")
774
+ ```
775
+
509
776
  # Web Admin
510
777
 
511
778
  Raif includes a web admin interface for viewing all interactions with the LLM. Assuming you have the engine mounted at `/raif`, you can access the admin interface at `/raif/admin`.
@@ -516,6 +783,7 @@ The admin interface contains sections for:
516
783
  - Conversations
517
784
  - Agents
518
785
  - Model Tool Invocations
786
+ - Stats
519
787
 
520
788
 
521
789
  ### Model Completions
@@ -537,6 +805,9 @@ The admin interface contains sections for:
537
805
  ![Model Tool Invocations Index](./screenshots/admin-model-tool-invocations-index.png)
538
806
  ![Model Tool Invocation Detail](./screenshots/admin-model-tool-invocation-show.png)
539
807
 
808
+ ### Stats
809
+ ![Stats](./screenshots/admin-stats.png)
810
+
540
811
  # Customization
541
812
 
542
813
  ## Controllers
@@ -593,6 +864,32 @@ If you don't want to override the system prompt entirely in your task/conversati
593
864
  Raif.configure do |config|
594
865
  config.conversation_system_prompt_intro = "You are a helpful assistant who specializes in customer support."
595
866
  config.task_system_prompt_intro = "You are a helpful assistant who specializes in data analysis."
867
+ # or with a lambda
868
+ config.task_system_prompt_intro = ->(task) { "You are a helpful assistant who specializes in #{task.name}." }
869
+ config.conversation_system_prompt_intro = ->(conversation) { "You are a helpful assistant talking to #{conversation.creator.email}. Today's date is #{Date.today.strftime('%B %d, %Y')}." }
870
+ end
871
+ ```
872
+
873
+ ## Adding LLM Models
874
+
875
+ You can easily add new LLM models to Raif:
876
+
877
+ ```ruby
878
+ # Register the model in Raif's LLM registry
879
+ Raif.register_llm(Raif::Llms::OpenRouter, {
880
+ key: :open_router_gemini_flash_1_5_8b, # a unique key for the model
881
+ api_name: "google/gemini-flash-1.5-8b", # name of the model to be used in API calls - needs to match the provider's API name
882
+ input_token_cost: 0.038 / 1_000_000, # the cost per input token
883
+ output_token_cost: 0.15 / 1_000_000, # the cost per output token
884
+ })
885
+
886
+ # Then use the model
887
+ llm = Raif.llm(:open_router_gemini_flash_1_5_8b)
888
+ llm.chat(message: "Hello, world!")
889
+
890
+ # Or set it as the default LLM model in your initializer
891
+ Raif.configure do |config|
892
+ config.default_llm_model_key = "open_router_gemini_flash_1_5_8b"
596
893
  end
597
894
  ```
598
895
 
@@ -673,6 +970,12 @@ You can then access the app at [http://localhost:3000](http://localhost:3000).
673
970
 
674
971
  ![Demo App Screenshot](./screenshots/demo-app.png)
675
972
 
973
+ # Contributing
974
+
975
+ We welcome contributions to Raif! Please see our [Contributing Guide](CONTRIBUTING.md) for details.
976
+
977
+ **Important**: All PR's should be made against the `dev` branch.
978
+
676
979
  # License
677
980
 
678
981
  The gem is available as open source under the terms of the MIT License.
@@ -28,6 +28,31 @@
28
28
  animation-delay: 0.4s;
29
29
  }
30
30
 
31
+ .raif-streaming-cursor {
32
+ display: inline-block;
33
+ width: 2px;
34
+ height: 1.1em;
35
+ margin-bottom: -2px;
36
+ background-color: currentColor;
37
+ animation: blink 1s infinite;
38
+ transform: none;
39
+ border-radius: 0;
40
+ position: relative;
41
+ }
42
+
43
+ .raif-streaming-cursor:before,
44
+ .raif-streaming-cursor:after {
45
+ display: none;
46
+ }
47
+
48
+ @keyframes blink {
49
+ 0%, 50% {
50
+ opacity: 1;
51
+ }
52
+ 51%, 100% {
53
+ opacity: 0;
54
+ }
55
+ }
31
56
  @keyframes rotate {
32
57
  0% {
33
58
  transform: translate(-50%, -50%) rotateZ(0deg);
@@ -71,4 +96,4 @@
71
96
  }
72
97
  }
73
98
 
74
- /*# sourceMappingURL=data:application/json;base64,eyJ2ZXJzaW9uIjozLCJzb3VyY2VzIjpbInJhaWYuY3NzIl0sIm5hbWVzIjpbXSwibWFwcGluZ3MiOiJBQUFBO0VBQ0UseUJBQXlCO0VBQ3pCLG1CQUFtQjtFQUNuQixrQkFBa0I7RUFDbEIsV0FBVztFQUNYLFlBQVk7RUFDWixjQUFjO0VBQ2QscUJBQXFCO0FBQ3ZCOztBQUVBOztFQUVFLFdBQVc7RUFDWCxjQUFjO0VBQ2Qsa0JBQWtCO0VBQ2xCLE1BQU07RUFDTixPQUFPO0VBQ1AsY0FBYztFQUNkLGVBQWU7RUFDZixrQkFBa0I7RUFDbEIseUJBQXlCO0VBQ3pCLGtDQUFrQztBQUNwQzs7QUFFQTtFQUNFLGNBQWM7RUFDZCx5QkFBeUI7RUFDekIscUJBQXFCO0FBQ3ZCOztBQUVBO0VBQ0U7SUFDRSw4Q0FBOEM7RUFDaEQ7RUFDQTtJQUNFLGdEQUFnRDtFQUNsRDtBQUNGO0FBQ0E7RUFDRTtJQUNFLDZDQUE2QztFQUMvQztFQUNBO0lBQ0UsZ0RBQWdEO0VBQ2xEO0FBQ0Y7QUFDQTtFQUNFO0lBQ0Usd0NBQXdDO0VBQzFDO0VBQ0E7SUFDRSx3Q0FBd0M7RUFDMUM7RUFDQTtJQUNFLHNDQUFzQztFQUN4QztFQUNBO0lBQ0UseUNBQXlDO0VBQzNDO0VBQ0E7SUFDRSxxQ0FBcUM7RUFDdkM7RUFDQTtJQUNFLDBDQUEwQztFQUM1QztFQUNBO0lBQ0UsdUNBQXVDO0VBQ3pDO0VBQ0E7SUFDRSx5Q0FBeUM7RUFDM0M7QUFDRiIsImZpbGUiOiJyYWlmLmNzcyIsInNvdXJjZXNDb250ZW50IjpbIi5yYWlmLWxvYWRlciB7XG4gIHRyYW5zZm9ybTogcm90YXRlWig0NWRlZyk7XG4gIHBlcnNwZWN0aXZlOiAxMDAwcHg7XG4gIGJvcmRlci1yYWRpdXM6IDUwJTtcbiAgd2lkdGg6IDI1cHg7XG4gIGhlaWdodDogMjVweDtcbiAgY29sb3I6ICMzODc0ZmY7XG4gIGRpc3BsYXk6IGlubGluZS1ibG9jaztcbn1cblxuLnJhaWYtbG9hZGVyOmJlZm9yZSxcbi5yYWlmLWxvYWRlcjphZnRlciB7XG4gIGNvbnRlbnQ6IFwiXCI7XG4gIGRpc3BsYXk6IGJsb2NrO1xuICBwb3NpdGlvbjogYWJzb2x1dGU7XG4gIHRvcDogMDtcbiAgbGVmdDogMDtcbiAgd2lkdGg6IGluaGVyaXQ7XG4gIGhlaWdodDogaW5oZXJpdDtcbiAgYm9yZGVyLXJhZGl1czogNTAlO1xuICB0cmFuc2Zvcm06IHJvdGF0ZVgoNzBkZWcpO1xuICBhbmltYXRpb246IDFzIHNwaW4gbGluZWFyIGluZmluaXRlO1xufVxuXG4ucmFpZi1sb2FkZXI6YWZ0ZXIge1xuICBjb2xvcjogIzI1YjAwMztcbiAgdHJhbnNmb3JtOiByb3RhdGVZKDcwZGVnKTtcbiAgYW5pbWF0aW9uLWRlbGF5OiAwLjRzO1xufVxuXG5Aa2V5ZnJhbWVzIHJvdGF0ZSB7XG4gIDAlIHtcbiAgICB0cmFuc2Zvcm06IHRyYW5zbGF0ZSgtNTAlLCAtNTAlKSByb3RhdGVaKDBkZWcpO1xuICB9XG4gIDEwMCUge1xuICAgIHRyYW5zZm9ybTogdHJhbnNsYXRlKC01MCUsIC01MCUpIHJvdGF0ZVooMzYwZGVnKTtcbiAgfVxufVxuQGtleWZyYW1lcyByb3RhdGVjY3cge1xuICAwJSB7XG4gICAgdHJhbnNmb3JtOiB0cmFuc2xhdGUoLTUwJSwgLTUwJSkgcm90YXRlKDBkZWcpO1xuICB9XG4gIDEwMCUge1xuICAgIHRyYW5zZm9ybTogdHJhbnNsYXRlKC01MCUsIC01MCUpIHJvdGF0ZSgtMzYwZGVnKTtcbiAgfVxufVxuQGtleWZyYW1lcyBzcGluIHtcbiAgMCUsIDEwMCUge1xuICAgIGJveC1zaGFkb3c6IDAuM2VtIDBweCAwIDBweCBjdXJyZW50Y29sb3I7XG4gIH1cbiAgMTIlIHtcbiAgICBib3gtc2hhZG93OiAwLjNlbSAwLjNlbSAwIDAgY3VycmVudGNvbG9yO1xuICB9XG4gIDI1JSB7XG4gICAgYm94LXNoYWRvdzogMCAwLjNlbSAwIDBweCBjdXJyZW50Y29sb3I7XG4gIH1cbiAgMzclIHtcbiAgICBib3gtc2hhZG93OiAtMC4zZW0gMC4zZW0gMCAwIGN1cnJlbnRjb2xvcjtcbiAgfVxuICA1MCUge1xuICAgIGJveC1zaGFkb3c6IC0wLjNlbSAwIDAgMCBjdXJyZW50Y29sb3I7XG4gIH1cbiAgNjIlIHtcbiAgICBib3gtc2hhZG93OiAtMC4zZW0gLTAuM2VtIDAgMCBjdXJyZW50Y29sb3I7XG4gIH1cbiAgNzUlIHtcbiAgICBib3gtc2hhZG93OiAwcHggLTAuM2VtIDAgMCBjdXJyZW50Y29sb3I7XG4gIH1cbiAgODclIHtcbiAgICBib3gtc2hhZG93OiAwLjNlbSAtMC4zZW0gMCAwIGN1cnJlbnRjb2xvcjtcbiAgfVxufVxuIl19 */
99
+ /*# sourceMappingURL=data:application/json;base64,eyJ2ZXJzaW9uIjozLCJzb3VyY2VzIjpbInJhaWYuY3NzIl0sIm5hbWVzIjpbXSwibWFwcGluZ3MiOiJBQUFBO0VBQ0UseUJBQXlCO0VBQ3pCLG1CQUFtQjtFQUNuQixrQkFBa0I7RUFDbEIsV0FBVztFQUNYLFlBQVk7RUFDWixjQUFjO0VBQ2QscUJBQXFCO0FBQ3ZCOztBQUVBOztFQUVFLFdBQVc7RUFDWCxjQUFjO0VBQ2Qsa0JBQWtCO0VBQ2xCLE1BQU07RUFDTixPQUFPO0VBQ1AsY0FBYztFQUNkLGVBQWU7RUFDZixrQkFBa0I7RUFDbEIseUJBQXlCO0VBQ3pCLGtDQUFrQztBQUNwQzs7QUFFQTtFQUNFLGNBQWM7RUFDZCx5QkFBeUI7RUFDekIscUJBQXFCO0FBQ3ZCOztBQUVBO0VBQ0UscUJBQXFCO0VBQ3JCLFVBQVU7RUFDVixhQUFhO0VBQ2IsbUJBQW1CO0VBQ25CLDhCQUE4QjtFQUM5Qiw0QkFBNEI7RUFDNUIsZUFBZTtFQUNmLGdCQUFnQjtFQUNoQixrQkFBa0I7QUFDcEI7O0FBRUE7O0VBRUUsYUFBYTtBQUNmOztBQUVBO0VBQ0U7SUFDRSxVQUFVO0VBQ1o7RUFDQTtJQUNFLFVBQVU7RUFDWjtBQUNGO0FBQ0E7RUFDRTtJQUNFLDhDQUE4QztFQUNoRDtFQUNBO0lBQ0UsZ0RBQWdEO0VBQ2xEO0FBQ0Y7QUFDQTtFQUNFO0lBQ0UsNkNBQTZDO0VBQy9DO0VBQ0E7SUFDRSxnREFBZ0Q7RUFDbEQ7QUFDRjtBQUNBO0VBQ0U7SUFDRSx3Q0FBd0M7RUFDMUM7RUFDQTtJQUNFLHdDQUF3QztFQUMxQztFQUNBO0lBQ0Usc0NBQXNDO0VBQ3hDO0VBQ0E7SUFDRSx5Q0FBeUM7RUFDM0M7RUFDQTtJQUNFLHFDQUFxQztFQUN2QztFQUNBO0lBQ0UsMENBQTBDO0VBQzVDO0VBQ0E7SUFDRSx1Q0FBdUM7RUFDekM7RUFDQTtJQUNFLHlDQUF5QztFQUMzQztBQUNGIiwiZmlsZSI6InJhaWYuY3NzIiwic291cmNlc0NvbnRlbnQiOlsiLnJhaWYtbG9hZGVyIHtcbiAgdHJhbnNmb3JtOiByb3RhdGVaKDQ1ZGVnKTtcbiAgcGVyc3BlY3RpdmU6IDEwMDBweDtcbiAgYm9yZGVyLXJhZGl1czogNTAlO1xuICB3aWR0aDogMjVweDtcbiAgaGVpZ2h0OiAyNXB4O1xuICBjb2xvcjogIzM4NzRmZjtcbiAgZGlzcGxheTogaW5saW5lLWJsb2NrO1xufVxuXG4ucmFpZi1sb2FkZXI6YmVmb3JlLFxuLnJhaWYtbG9hZGVyOmFmdGVyIHtcbiAgY29udGVudDogXCJcIjtcbiAgZGlzcGxheTogYmxvY2s7XG4gIHBvc2l0aW9uOiBhYnNvbHV0ZTtcbiAgdG9wOiAwO1xuICBsZWZ0OiAwO1xuICB3aWR0aDogaW5oZXJpdDtcbiAgaGVpZ2h0OiBpbmhlcml0O1xuICBib3JkZXItcmFkaXVzOiA1MCU7XG4gIHRyYW5zZm9ybTogcm90YXRlWCg3MGRlZyk7XG4gIGFuaW1hdGlvbjogMXMgc3BpbiBsaW5lYXIgaW5maW5pdGU7XG59XG5cbi5yYWlmLWxvYWRlcjphZnRlciB7XG4gIGNvbG9yOiAjMjViMDAzO1xuICB0cmFuc2Zvcm06IHJvdGF0ZVkoNzBkZWcpO1xuICBhbmltYXRpb24tZGVsYXk6IDAuNHM7XG59XG5cbi5yYWlmLXN0cmVhbWluZy1jdXJzb3Ige1xuICBkaXNwbGF5OiBpbmxpbmUtYmxvY2s7XG4gIHdpZHRoOiAycHg7XG4gIGhlaWdodDogMS4xZW07XG4gIG1hcmdpbi1ib3R0b206IC0ycHg7XG4gIGJhY2tncm91bmQtY29sb3I6IGN1cnJlbnRDb2xvcjtcbiAgYW5pbWF0aW9uOiBibGluayAxcyBpbmZpbml0ZTtcbiAgdHJhbnNmb3JtOiBub25lO1xuICBib3JkZXItcmFkaXVzOiAwO1xuICBwb3NpdGlvbjogcmVsYXRpdmU7XG59XG5cbi5yYWlmLXN0cmVhbWluZy1jdXJzb3I6YmVmb3JlLFxuLnJhaWYtc3RyZWFtaW5nLWN1cnNvcjphZnRlciB7XG4gIGRpc3BsYXk6IG5vbmU7XG59XG5cbkBrZXlmcmFtZXMgYmxpbmsge1xuICAwJSwgNTAlIHtcbiAgICBvcGFjaXR5OiAxO1xuICB9XG4gIDUxJSwgMTAwJSB7XG4gICAgb3BhY2l0eTogMDtcbiAgfVxufVxuQGtleWZyYW1lcyByb3RhdGUge1xuICAwJSB7XG4gICAgdHJhbnNmb3JtOiB0cmFuc2xhdGUoLTUwJSwgLTUwJSkgcm90YXRlWigwZGVnKTtcbiAgfVxuICAxMDAlIHtcbiAgICB0cmFuc2Zvcm06IHRyYW5zbGF0ZSgtNTAlLCAtNTAlKSByb3RhdGVaKDM2MGRlZyk7XG4gIH1cbn1cbkBrZXlmcmFtZXMgcm90YXRlY2N3IHtcbiAgMCUge1xuICAgIHRyYW5zZm9ybTogdHJhbnNsYXRlKC01MCUsIC01MCUpIHJvdGF0ZSgwZGVnKTtcbiAgfVxuICAxMDAlIHtcbiAgICB0cmFuc2Zvcm06IHRyYW5zbGF0ZSgtNTAlLCAtNTAlKSByb3RhdGUoLTM2MGRlZyk7XG4gIH1cbn1cbkBrZXlmcmFtZXMgc3BpbiB7XG4gIDAlLCAxMDAlIHtcbiAgICBib3gtc2hhZG93OiAwLjNlbSAwcHggMCAwcHggY3VycmVudGNvbG9yO1xuICB9XG4gIDEyJSB7XG4gICAgYm94LXNoYWRvdzogMC4zZW0gMC4zZW0gMCAwIGN1cnJlbnRjb2xvcjtcbiAgfVxuICAyNSUge1xuICAgIGJveC1zaGFkb3c6IDAgMC4zZW0gMCAwcHggY3VycmVudGNvbG9yO1xuICB9XG4gIDM3JSB7XG4gICAgYm94LXNoYWRvdzogLTAuM2VtIDAuM2VtIDAgMCBjdXJyZW50Y29sb3I7XG4gIH1cbiAgNTAlIHtcbiAgICBib3gtc2hhZG93OiAtMC4zZW0gMCAwIDAgY3VycmVudGNvbG9yO1xuICB9XG4gIDYyJSB7XG4gICAgYm94LXNoYWRvdzogLTAuM2VtIC0wLjNlbSAwIDAgY3VycmVudGNvbG9yO1xuICB9XG4gIDc1JSB7XG4gICAgYm94LXNoYWRvdzogMHB4IC0wLjNlbSAwIDAgY3VycmVudGNvbG9yO1xuICB9XG4gIDg3JSB7XG4gICAgYm94LXNoYWRvdzogMC4zZW0gLTAuM2VtIDAgMCBjdXJyZW50Y29sb3I7XG4gIH1cbn1cbiJdfQ== */
@@ -0,0 +1,12 @@
1
+ .stats-icon {
2
+ min-width: 46px;
3
+ text-align: center;
4
+ }
5
+
6
+ .stats-card {
7
+ transition: transform 0.2s;
8
+ }
9
+
10
+ .stats-card:hover {
11
+ transform: translateY(-5px);
12
+ }
@@ -28,6 +28,33 @@
28
28
  animation-delay: .4s;
29
29
  }
30
30
 
31
+ // Streaming cursor - a simple blinking cursor
32
+ .raif-streaming-cursor {
33
+ display: inline-block;
34
+ width: 2px;
35
+ height: 1.1em;
36
+ margin-bottom: -2px;
37
+ background-color: currentColor;
38
+ animation: blink 1s infinite;
39
+ transform: none;
40
+ border-radius: 0;
41
+ position: relative;
42
+ }
43
+
44
+ .raif-streaming-cursor:before,
45
+ .raif-streaming-cursor:after {
46
+ display: none;
47
+ }
48
+
49
+ @keyframes blink {
50
+ 0%, 50% {
51
+ opacity: 1;
52
+ }
53
+ 51%, 100% {
54
+ opacity: 0;
55
+ }
56
+ }
57
+
31
58
  @keyframes rotate {
32
59
  0% {
33
60
  transform: translate(-50%, -50%) rotateZ(0deg);
@@ -49,7 +76,6 @@
49
76
  }
50
77
 
51
78
  @keyframes spin {
52
-
53
79
  0%,
54
80
  100% {
55
81
  box-shadow: .3em 0px 0 0px currentcolor;
@@ -15,6 +15,20 @@ module Raif
15
15
  end
16
16
  end
17
17
 
18
+ def get_time_range(period)
19
+ case period
20
+ when "day"
21
+ 24.hours.ago..Time.current
22
+ when "week"
23
+ 1.week.ago..Time.current
24
+ when "month"
25
+ 1.month.ago..Time.current
26
+ when "all"
27
+ Time.at(0)..Time.current
28
+ else
29
+ 24.hours.ago..Time.current
30
+ end
31
+ end
18
32
  end
19
33
  end
20
34
  end